diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml index 6a77a8b8e8e3..678e2c6a3724 100644 --- a/.github/workflows/main.yml +++ b/.github/workflows/main.yml @@ -29,7 +29,7 @@ jobs: permissions: pull-requests: write # for googleapis/code-suggester name: Update API List PR - runs-on: ubuntu-22.04 + runs-on: ubuntu-24.04 # don't run the workflow on forks of googleapis/google-cloud-python if: ${{github.repository == 'googleapis/google-cloud-python'}} steps: diff --git a/.kokoro/docker/docs/Dockerfile b/.kokoro/docker/docs/Dockerfile index 54ea713545d4..688814801711 100644 --- a/.kokoro/docker/docs/Dockerfile +++ b/.kokoro/docker/docs/Dockerfile @@ -71,12 +71,11 @@ RUN tar -xvf Python-3.10.14.tgz RUN ./Python-3.10.14/configure --enable-optimizations RUN make altinstall -RUN python3.10 -m venv /venv -ENV PATH /venv/bin:$PATH +ENV PATH /usr/local/bin/python3.10:$PATH ###################### Install pip RUN wget -O /tmp/get-pip.py 'https://bootstrap.pypa.io/get-pip.py' \ - && python3 /tmp/get-pip.py \ + && python3.10 /tmp/get-pip.py \ && rm /tmp/get-pip.py # Test pip diff --git a/.kokoro/publish-docs-single.sh b/.kokoro/publish-docs-single.sh index fb338fc1c881..8fe16efcdeb4 100755 --- a/.kokoro/publish-docs-single.sh +++ b/.kokoro/publish-docs-single.sh @@ -24,11 +24,11 @@ pwd nox -s docs # create metadata -python3.9 -m docuploader create-metadata \ +python3.10 -m docuploader create-metadata \ --name=$(jq --raw-output '.name // empty' .repo-metadata.json) \ - --version=$(python3 setup.py --version) \ + --version=$(python3.10 setup.py --version) \ --language=$(jq --raw-output '.language // empty' .repo-metadata.json) \ - --distribution-name=$(python3 setup.py --name) \ + --distribution-name=$(python3.10 setup.py --name) \ --product-page=$(jq --raw-output '.product_documentation // empty' .repo-metadata.json) \ --github-repository=$(jq --raw-output '.repo // empty' .repo-metadata.json) \ --issue-tracker=$(jq --raw-output '.issue_tracker // empty' .repo-metadata.json) @@ -36,18 +36,18 @@ python3.9 -m docuploader create-metadata \ cat docs.metadata # upload docs -python3.9 -m docuploader upload docs/_build/html --metadata-file docs.metadata --staging-bucket "${STAGING_BUCKET}" +python3.10 -m docuploader upload docs/_build/html --metadata-file docs.metadata --staging-bucket "${STAGING_BUCKET}" # docfx yaml files nox -s docfx # create metadata. -python3.9 -m docuploader create-metadata \ +python3.10 -m docuploader create-metadata \ --name=$(jq --raw-output '.name // empty' .repo-metadata.json) \ - --version=$(python3 setup.py --version) \ + --version=$(python3.10 setup.py --version) \ --language=$(jq --raw-output '.language // empty' .repo-metadata.json) \ - --distribution-name=$(python3 setup.py --name) \ + --distribution-name=$(python3.10 setup.py --name) \ --product-page=$(jq --raw-output '.product_documentation // empty' .repo-metadata.json) \ --github-repository=$(jq --raw-output '.repo // empty' .repo-metadata.json) \ --issue-tracker=$(jq --raw-output '.issue_tracker // empty' .repo-metadata.json) @@ -64,4 +64,4 @@ else fi # upload docs -python3.9 -m docuploader upload docs/_build/html/docfx_yaml --metadata-file docs.metadata --destination-prefix docfx --staging-bucket "${staging_v2_bucket}" +python3.10 -m docuploader upload docs/_build/html/docfx_yaml --metadata-file docs.metadata --destination-prefix docfx --staging-bucket "${staging_v2_bucket}" diff --git a/.kokoro/publish-docs.sh b/.kokoro/publish-docs.sh index c138c5c1c6b5..0b68b0679bd5 100755 --- a/.kokoro/publish-docs.sh +++ b/.kokoro/publish-docs.sh @@ -36,8 +36,8 @@ RETVAL=0 export PROJECT_ROOT=$(realpath $(dirname "${BASH_SOURCE[0]}")/..) # Install nox -python3.9 -m pip install --require-hashes -r "$PROJECT_ROOT/.kokoro/requirements.txt" -python3.9 -m nox --version +python3.10 -m pip install --require-hashes -r "$PROJECT_ROOT/.kokoro/requirements.txt" +python3.10 -m nox --version # A file for publishing docs publish_docs_script="${PROJECT_ROOT}/.kokoro/publish-docs-single.sh" diff --git a/.kokoro/release-single.sh b/.kokoro/release-single.sh index 5665c4828a93..f917f8ef66d0 100755 --- a/.kokoro/release-single.sh +++ b/.kokoro/release-single.sh @@ -21,7 +21,7 @@ set -eo pipefail pwd # Move into the package, build the distribution and upload. -TWINE_PASSWORD=$(cat "${KOKORO_KEYSTORE_DIR}/73713_google-cloud-pypi-token-keystore-1") +TWINE_PASSWORD=$(cat "${KOKORO_KEYSTORE_DIR}/73713_google-cloud-pypi-token-keystore-2") python3 setup.py sdist bdist_wheel twine upload --username __token__ --password "${TWINE_PASSWORD}" dist/* diff --git a/.kokoro/release.sh b/.kokoro/release.sh index 081537815d4d..d9a6177e1dac 100755 --- a/.kokoro/release.sh +++ b/.kokoro/release.sh @@ -18,6 +18,21 @@ # or zero if all commands in the pipeline exit successfully. set -eo pipefail +python3 -m pip install --require-hashes -r github/google-cloud-python/.kokoro/requirements-aoss.txt +python3 -m keyring --list-backends + +echo "[distutils] +index-servers = + aoss-1p-python +[aoss-1p-python] +repository: https://us-python.pkg.dev/cloud-aoss-1p/cloud-aoss-1p-python/" >> $HOME/.pypirc + +echo "[install] +index-url = https://us-python.pkg.dev/cloud-aoss-1p/cloud-aoss-1p-python/simple/ +trusted-host = us-python.pkg.dev" >> $HOME/pip.conf + +export PIP_CONFIG_FILE=$HOME/pip.conf + # Start the releasetool reporter python3 -m pip install --require-hashes -r github/google-cloud-python/.kokoro/requirements.txt python3 -m releasetool publish-reporter-script > /tmp/publisher-script; source /tmp/publisher-script diff --git a/.kokoro/release/common.cfg b/.kokoro/release/common.cfg index c9b8a36f766d..830be65dde19 100644 --- a/.kokoro/release/common.cfg +++ b/.kokoro/release/common.cfg @@ -28,7 +28,7 @@ before_action { fetch_keystore { keystore_resource { keystore_config_id: 73713 - keyname: "google-cloud-pypi-token-keystore-1" + keyname: "google-cloud-pypi-token-keystore-2" } } } diff --git a/.kokoro/requirements-aoss.in b/.kokoro/requirements-aoss.in new file mode 100644 index 000000000000..bd6769b591d5 --- /dev/null +++ b/.kokoro/requirements-aoss.in @@ -0,0 +1,2 @@ +keyring +keyrings.google-artifactregistry-auth diff --git a/.kokoro/requirements-aoss.txt b/.kokoro/requirements-aoss.txt new file mode 100644 index 000000000000..34b8f631e421 --- /dev/null +++ b/.kokoro/requirements-aoss.txt @@ -0,0 +1,277 @@ +# +# This file is autogenerated by pip-compile with Python 3.9 +# by the following command: +# +# pip-compile --allow-unsafe --generate-hashes requirements-aoss.in +# +backports-tarfile==1.2.0 \ + --hash=sha256:77e284d754527b01fb1e6fa8a1afe577858ebe4e9dad8919e34c862cb399bc34 \ + --hash=sha256:d75e02c268746e1b8144c278978b6e98e85de6ad16f8e4b0844a154557eca991 + # via jaraco-context +cachetools==5.4.0 \ + --hash=sha256:3ae3b49a3d5e28a77a0be2b37dbcb89005058959cb2323858c2657c4a8cab474 \ + --hash=sha256:b8adc2e7c07f105ced7bc56dbb6dfbe7c4a00acce20e2227b3f355be89bc6827 + # via google-auth +certifi==2024.7.4 \ + --hash=sha256:5a1e7645bc0ec61a09e26c36f6106dd4cf40c6db3a1fb6352b0244e7fb057c7b \ + --hash=sha256:c198e21b1289c2ab85ee4e67bb4b4ef3ead0892059901a8d5b622f24a1101e90 + # via requests +cffi==1.16.0 \ + --hash=sha256:0c9ef6ff37e974b73c25eecc13952c55bceed9112be2d9d938ded8e856138bcc \ + --hash=sha256:131fd094d1065b19540c3d72594260f118b231090295d8c34e19a7bbcf2e860a \ + --hash=sha256:1b8ebc27c014c59692bb2664c7d13ce7a6e9a629be20e54e7271fa696ff2b417 \ + --hash=sha256:2c56b361916f390cd758a57f2e16233eb4f64bcbeee88a4881ea90fca14dc6ab \ + --hash=sha256:2d92b25dbf6cae33f65005baf472d2c245c050b1ce709cc4588cdcdd5495b520 \ + --hash=sha256:31d13b0f99e0836b7ff893d37af07366ebc90b678b6664c955b54561fc36ef36 \ + --hash=sha256:32c68ef735dbe5857c810328cb2481e24722a59a2003018885514d4c09af9743 \ + --hash=sha256:3686dffb02459559c74dd3d81748269ffb0eb027c39a6fc99502de37d501faa8 \ + --hash=sha256:582215a0e9adbe0e379761260553ba11c58943e4bbe9c36430c4ca6ac74b15ed \ + --hash=sha256:5b50bf3f55561dac5438f8e70bfcdfd74543fd60df5fa5f62d94e5867deca684 \ + --hash=sha256:5bf44d66cdf9e893637896c7faa22298baebcd18d1ddb6d2626a6e39793a1d56 \ + --hash=sha256:6602bc8dc6f3a9e02b6c22c4fc1e47aa50f8f8e6d3f78a5e16ac33ef5fefa324 \ + --hash=sha256:673739cb539f8cdaa07d92d02efa93c9ccf87e345b9a0b556e3ecc666718468d \ + --hash=sha256:68678abf380b42ce21a5f2abde8efee05c114c2fdb2e9eef2efdb0257fba1235 \ + --hash=sha256:68e7c44931cc171c54ccb702482e9fc723192e88d25a0e133edd7aff8fcd1f6e \ + --hash=sha256:6b3d6606d369fc1da4fd8c357d026317fbb9c9b75d36dc16e90e84c26854b088 \ + --hash=sha256:748dcd1e3d3d7cd5443ef03ce8685043294ad6bd7c02a38d1bd367cfd968e000 \ + --hash=sha256:7651c50c8c5ef7bdb41108b7b8c5a83013bfaa8a935590c5d74627c047a583c7 \ + --hash=sha256:7b78010e7b97fef4bee1e896df8a4bbb6712b7f05b7ef630f9d1da00f6444d2e \ + --hash=sha256:7e61e3e4fa664a8588aa25c883eab612a188c725755afff6289454d6362b9673 \ + --hash=sha256:80876338e19c951fdfed6198e70bc88f1c9758b94578d5a7c4c91a87af3cf31c \ + --hash=sha256:8895613bcc094d4a1b2dbe179d88d7fb4a15cee43c052e8885783fac397d91fe \ + --hash=sha256:88e2b3c14bdb32e440be531ade29d3c50a1a59cd4e51b1dd8b0865c54ea5d2e2 \ + --hash=sha256:8f8e709127c6c77446a8c0a8c8bf3c8ee706a06cd44b1e827c3e6a2ee6b8c098 \ + --hash=sha256:9cb4a35b3642fc5c005a6755a5d17c6c8b6bcb6981baf81cea8bfbc8903e8ba8 \ + --hash=sha256:9f90389693731ff1f659e55c7d1640e2ec43ff725cc61b04b2f9c6d8d017df6a \ + --hash=sha256:a09582f178759ee8128d9270cd1344154fd473bb77d94ce0aeb2a93ebf0feaf0 \ + --hash=sha256:a6a14b17d7e17fa0d207ac08642c8820f84f25ce17a442fd15e27ea18d67c59b \ + --hash=sha256:a72e8961a86d19bdb45851d8f1f08b041ea37d2bd8d4fd19903bc3083d80c896 \ + --hash=sha256:abd808f9c129ba2beda4cfc53bde801e5bcf9d6e0f22f095e45327c038bfe68e \ + --hash=sha256:ac0f5edd2360eea2f1daa9e26a41db02dd4b0451b48f7c318e217ee092a213e9 \ + --hash=sha256:b29ebffcf550f9da55bec9e02ad430c992a87e5f512cd63388abb76f1036d8d2 \ + --hash=sha256:b2ca4e77f9f47c55c194982e10f058db063937845bb2b7a86c84a6cfe0aefa8b \ + --hash=sha256:b7be2d771cdba2942e13215c4e340bfd76398e9227ad10402a8767ab1865d2e6 \ + --hash=sha256:b84834d0cf97e7d27dd5b7f3aca7b6e9263c56308ab9dc8aae9784abb774d404 \ + --hash=sha256:b86851a328eedc692acf81fb05444bdf1891747c25af7529e39ddafaf68a4f3f \ + --hash=sha256:bcb3ef43e58665bbda2fb198698fcae6776483e0c4a631aa5647806c25e02cc0 \ + --hash=sha256:c0f31130ebc2d37cdd8e44605fb5fa7ad59049298b3f745c74fa74c62fbfcfc4 \ + --hash=sha256:c6a164aa47843fb1b01e941d385aab7215563bb8816d80ff3a363a9f8448a8dc \ + --hash=sha256:d8a9d3ebe49f084ad71f9269834ceccbf398253c9fac910c4fd7053ff1386936 \ + --hash=sha256:db8e577c19c0fda0beb7e0d4e09e0ba74b1e4c092e0e40bfa12fe05b6f6d75ba \ + --hash=sha256:dc9b18bf40cc75f66f40a7379f6a9513244fe33c0e8aa72e2d56b0196a7ef872 \ + --hash=sha256:e09f3ff613345df5e8c3667da1d918f9149bd623cd9070c983c013792a9a62eb \ + --hash=sha256:e4108df7fe9b707191e55f33efbcb2d81928e10cea45527879a4749cbe472614 \ + --hash=sha256:e6024675e67af929088fda399b2094574609396b1decb609c55fa58b028a32a1 \ + --hash=sha256:e70f54f1796669ef691ca07d046cd81a29cb4deb1e5f942003f401c0c4a2695d \ + --hash=sha256:e715596e683d2ce000574bae5d07bd522c781a822866c20495e52520564f0969 \ + --hash=sha256:e760191dd42581e023a68b758769e2da259b5d52e3103c6060ddc02c9edb8d7b \ + --hash=sha256:ed86a35631f7bfbb28e108dd96773b9d5a6ce4811cf6ea468bb6a359b256b1e4 \ + --hash=sha256:ee07e47c12890ef248766a6e55bd38ebfb2bb8edd4142d56db91b21ea68b7627 \ + --hash=sha256:fa3a0128b152627161ce47201262d3140edb5a5c3da88d73a1b790a959126956 \ + --hash=sha256:fcc8eb6d5902bb1cf6dc4f187ee3ea80a1eba0a89aba40a5cb20a5087d961357 + # via cryptography +charset-normalizer==3.3.2 \ + --hash=sha256:06435b539f889b1f6f4ac1758871aae42dc3a8c0e24ac9e60c2384973ad73027 \ + --hash=sha256:06a81e93cd441c56a9b65d8e1d043daeb97a3d0856d177d5c90ba85acb3db087 \ + --hash=sha256:0a55554a2fa0d408816b3b5cedf0045f4b8e1a6065aec45849de2d6f3f8e9786 \ + --hash=sha256:0b2b64d2bb6d3fb9112bafa732def486049e63de9618b5843bcdd081d8144cd8 \ + --hash=sha256:10955842570876604d404661fbccbc9c7e684caf432c09c715ec38fbae45ae09 \ + --hash=sha256:122c7fa62b130ed55f8f285bfd56d5f4b4a5b503609d181f9ad85e55c89f4185 \ + --hash=sha256:1ceae2f17a9c33cb48e3263960dc5fc8005351ee19db217e9b1bb15d28c02574 \ + --hash=sha256:1d3193f4a680c64b4b6a9115943538edb896edc190f0b222e73761716519268e \ + --hash=sha256:1f79682fbe303db92bc2b1136016a38a42e835d932bab5b3b1bfcfbf0640e519 \ + --hash=sha256:2127566c664442652f024c837091890cb1942c30937add288223dc895793f898 \ + --hash=sha256:22afcb9f253dac0696b5a4be4a1c0f8762f8239e21b99680099abd9b2b1b2269 \ + --hash=sha256:25baf083bf6f6b341f4121c2f3c548875ee6f5339300e08be3f2b2ba1721cdd3 \ + --hash=sha256:2e81c7b9c8979ce92ed306c249d46894776a909505d8f5a4ba55b14206e3222f \ + --hash=sha256:3287761bc4ee9e33561a7e058c72ac0938c4f57fe49a09eae428fd88aafe7bb6 \ + --hash=sha256:34d1c8da1e78d2e001f363791c98a272bb734000fcef47a491c1e3b0505657a8 \ + --hash=sha256:37e55c8e51c236f95b033f6fb391d7d7970ba5fe7ff453dad675e88cf303377a \ + --hash=sha256:3d47fa203a7bd9c5b6cee4736ee84ca03b8ef23193c0d1ca99b5089f72645c73 \ + --hash=sha256:42cb296636fcc8b0644486d15c12376cb9fa75443e00fb25de0b8602e64c1714 \ + --hash=sha256:45485e01ff4d3630ec0d9617310448a8702f70e9c01906b0d0118bdf9d124cf2 \ + --hash=sha256:4a78b2b446bd7c934f5dcedc588903fb2f5eec172f3d29e52a9096a43722adfc \ + --hash=sha256:4ab2fe47fae9e0f9dee8c04187ce5d09f48eabe611be8259444906793ab7cbce \ + --hash=sha256:4d0d1650369165a14e14e1e47b372cfcb31d6ab44e6e33cb2d4e57265290044d \ + --hash=sha256:549a3a73da901d5bc3ce8d24e0600d1fa85524c10287f6004fbab87672bf3e1e \ + --hash=sha256:55086ee1064215781fff39a1af09518bc9255b50d6333f2e4c74ca09fac6a8f6 \ + --hash=sha256:572c3763a264ba47b3cf708a44ce965d98555f618ca42c926a9c1616d8f34269 \ + --hash=sha256:573f6eac48f4769d667c4442081b1794f52919e7edada77495aaed9236d13a96 \ + --hash=sha256:5b4c145409bef602a690e7cfad0a15a55c13320ff7a3ad7ca59c13bb8ba4d45d \ + --hash=sha256:6463effa3186ea09411d50efc7d85360b38d5f09b870c48e4600f63af490e56a \ + --hash=sha256:65f6f63034100ead094b8744b3b97965785388f308a64cf8d7c34f2f2e5be0c4 \ + --hash=sha256:663946639d296df6a2bb2aa51b60a2454ca1cb29835324c640dafb5ff2131a77 \ + --hash=sha256:6897af51655e3691ff853668779c7bad41579facacf5fd7253b0133308cf000d \ + --hash=sha256:68d1f8a9e9e37c1223b656399be5d6b448dea850bed7d0f87a8311f1ff3dabb0 \ + --hash=sha256:6ac7ffc7ad6d040517be39eb591cac5ff87416c2537df6ba3cba3bae290c0fed \ + --hash=sha256:6b3251890fff30ee142c44144871185dbe13b11bab478a88887a639655be1068 \ + --hash=sha256:6c4caeef8fa63d06bd437cd4bdcf3ffefe6738fb1b25951440d80dc7df8c03ac \ + --hash=sha256:6ef1d82a3af9d3eecdba2321dc1b3c238245d890843e040e41e470ffa64c3e25 \ + --hash=sha256:753f10e867343b4511128c6ed8c82f7bec3bd026875576dfd88483c5c73b2fd8 \ + --hash=sha256:7cd13a2e3ddeed6913a65e66e94b51d80a041145a026c27e6bb76c31a853c6ab \ + --hash=sha256:7ed9e526742851e8d5cc9e6cf41427dfc6068d4f5a3bb03659444b4cabf6bc26 \ + --hash=sha256:7f04c839ed0b6b98b1a7501a002144b76c18fb1c1850c8b98d458ac269e26ed2 \ + --hash=sha256:802fe99cca7457642125a8a88a084cef28ff0cf9407060f7b93dca5aa25480db \ + --hash=sha256:80402cd6ee291dcb72644d6eac93785fe2c8b9cb30893c1af5b8fdd753b9d40f \ + --hash=sha256:8465322196c8b4d7ab6d1e049e4c5cb460d0394da4a27d23cc242fbf0034b6b5 \ + --hash=sha256:86216b5cee4b06df986d214f664305142d9c76df9b6512be2738aa72a2048f99 \ + --hash=sha256:87d1351268731db79e0f8e745d92493ee2841c974128ef629dc518b937d9194c \ + --hash=sha256:8bdb58ff7ba23002a4c5808d608e4e6c687175724f54a5dade5fa8c67b604e4d \ + --hash=sha256:8c622a5fe39a48f78944a87d4fb8a53ee07344641b0562c540d840748571b811 \ + --hash=sha256:8d756e44e94489e49571086ef83b2bb8ce311e730092d2c34ca8f7d925cb20aa \ + --hash=sha256:8f4a014bc36d3c57402e2977dada34f9c12300af536839dc38c0beab8878f38a \ + --hash=sha256:9063e24fdb1e498ab71cb7419e24622516c4a04476b17a2dab57e8baa30d6e03 \ + --hash=sha256:90d558489962fd4918143277a773316e56c72da56ec7aa3dc3dbbe20fdfed15b \ + --hash=sha256:923c0c831b7cfcb071580d3f46c4baf50f174be571576556269530f4bbd79d04 \ + --hash=sha256:95f2a5796329323b8f0512e09dbb7a1860c46a39da62ecb2324f116fa8fdc85c \ + --hash=sha256:96b02a3dc4381e5494fad39be677abcb5e6634bf7b4fa83a6dd3112607547001 \ + --hash=sha256:9f96df6923e21816da7e0ad3fd47dd8f94b2a5ce594e00677c0013018b813458 \ + --hash=sha256:a10af20b82360ab00827f916a6058451b723b4e65030c5a18577c8b2de5b3389 \ + --hash=sha256:a50aebfa173e157099939b17f18600f72f84eed3049e743b68ad15bd69b6bf99 \ + --hash=sha256:a981a536974bbc7a512cf44ed14938cf01030a99e9b3a06dd59578882f06f985 \ + --hash=sha256:a9a8e9031d613fd2009c182b69c7b2c1ef8239a0efb1df3f7c8da66d5dd3d537 \ + --hash=sha256:ae5f4161f18c61806f411a13b0310bea87f987c7d2ecdbdaad0e94eb2e404238 \ + --hash=sha256:aed38f6e4fb3f5d6bf81bfa990a07806be9d83cf7bacef998ab1a9bd660a581f \ + --hash=sha256:b01b88d45a6fcb69667cd6d2f7a9aeb4bf53760d7fc536bf679ec94fe9f3ff3d \ + --hash=sha256:b261ccdec7821281dade748d088bb6e9b69e6d15b30652b74cbbac25e280b796 \ + --hash=sha256:b2b0a0c0517616b6869869f8c581d4eb2dd83a4d79e0ebcb7d373ef9956aeb0a \ + --hash=sha256:b4a23f61ce87adf89be746c8a8974fe1c823c891d8f86eb218bb957c924bb143 \ + --hash=sha256:b4d5f7e22c49087407533347e1d580336f05f6b97c032517da7b1c7bad1a296a \ + --hash=sha256:bd8f7df7d12c2db9fab40bdd87a7c09b1530128315d047a086fa3ae3435cb3a8 \ + --hash=sha256:beb58fe5cdb101e3a055192ac291b7a21e3b7ef4f67fa1d74e331a7f2124341c \ + --hash=sha256:c002b4ffc0be611f0d9da932eb0f704fe2602a9a949d1f738e4c34c75b0863d5 \ + --hash=sha256:c083af607d2515612056a31f0a8d9e0fcb5876b7bfc0abad3ecd275bc4ebc2d5 \ + --hash=sha256:c180f51afb394e165eafe4ac2936a14bee3eb10debc9d9e4db8958fe36afe711 \ + --hash=sha256:c235ebd9baae02f1b77bcea61bce332cb4331dc3617d254df3323aa01ab47bd4 \ + --hash=sha256:cd70574b12bb8a4d2aaa0094515df2463cb429d8536cfb6c7ce983246983e5a6 \ + --hash=sha256:d0eccceffcb53201b5bfebb52600a5fb483a20b61da9dbc885f8b103cbe7598c \ + --hash=sha256:d965bba47ddeec8cd560687584e88cf699fd28f192ceb452d1d7ee807c5597b7 \ + --hash=sha256:db364eca23f876da6f9e16c9da0df51aa4f104a972735574842618b8c6d999d4 \ + --hash=sha256:ddbb2551d7e0102e7252db79ba445cdab71b26640817ab1e3e3648dad515003b \ + --hash=sha256:deb6be0ac38ece9ba87dea880e438f25ca3eddfac8b002a2ec3d9183a454e8ae \ + --hash=sha256:e06ed3eb3218bc64786f7db41917d4e686cc4856944f53d5bdf83a6884432e12 \ + --hash=sha256:e27ad930a842b4c5eb8ac0016b0a54f5aebbe679340c26101df33424142c143c \ + --hash=sha256:e537484df0d8f426ce2afb2d0f8e1c3d0b114b83f8850e5f2fbea0e797bd82ae \ + --hash=sha256:eb00ed941194665c332bf8e078baf037d6c35d7c4f3102ea2d4f16ca94a26dc8 \ + --hash=sha256:eb6904c354526e758fda7167b33005998fb68c46fbc10e013ca97f21ca5c8887 \ + --hash=sha256:eb8821e09e916165e160797a6c17edda0679379a4be5c716c260e836e122f54b \ + --hash=sha256:efcb3f6676480691518c177e3b465bcddf57cea040302f9f4e6e191af91174d4 \ + --hash=sha256:f27273b60488abe721a075bcca6d7f3964f9f6f067c8c4c605743023d7d3944f \ + --hash=sha256:f30c3cb33b24454a82faecaf01b19c18562b1e89558fb6c56de4d9118a032fd5 \ + --hash=sha256:fb69256e180cb6c8a894fee62b3afebae785babc1ee98b81cdf68bbca1987f33 \ + --hash=sha256:fd1abc0d89e30cc4e02e4064dc67fcc51bd941eb395c502aac3ec19fab46b519 \ + --hash=sha256:ff8fa367d09b717b2a17a052544193ad76cd49979c805768879cb63d9ca50561 + # via requests +cryptography==43.0.0 \ + --hash=sha256:0663585d02f76929792470451a5ba64424acc3cd5227b03921dab0e2f27b1709 \ + --hash=sha256:08a24a7070b2b6804c1940ff0f910ff728932a9d0e80e7814234269f9d46d069 \ + --hash=sha256:232ce02943a579095a339ac4b390fbbe97f5b5d5d107f8a08260ea2768be8cc2 \ + --hash=sha256:2905ccf93a8a2a416f3ec01b1a7911c3fe4073ef35640e7ee5296754e30b762b \ + --hash=sha256:299d3da8e00b7e2b54bb02ef58d73cd5f55fb31f33ebbf33bd00d9aa6807df7e \ + --hash=sha256:2c6d112bf61c5ef44042c253e4859b3cbbb50df2f78fa8fae6747a7814484a70 \ + --hash=sha256:31e44a986ceccec3d0498e16f3d27b2ee5fdf69ce2ab89b52eaad1d2f33d8778 \ + --hash=sha256:3d9a1eca329405219b605fac09ecfc09ac09e595d6def650a437523fcd08dd22 \ + --hash=sha256:3dcdedae5c7710b9f97ac6bba7e1052b95c7083c9d0e9df96e02a1932e777895 \ + --hash=sha256:47ca71115e545954e6c1d207dd13461ab81f4eccfcb1345eac874828b5e3eaaf \ + --hash=sha256:4a997df8c1c2aae1e1e5ac49c2e4f610ad037fc5a3aadc7b64e39dea42249431 \ + --hash=sha256:51956cf8730665e2bdf8ddb8da0056f699c1a5715648c1b0144670c1ba00b48f \ + --hash=sha256:5bcb8a5620008a8034d39bce21dc3e23735dfdb6a33a06974739bfa04f853947 \ + --hash=sha256:64c3f16e2a4fc51c0d06af28441881f98c5d91009b8caaff40cf3548089e9c74 \ + --hash=sha256:6e2b11c55d260d03a8cf29ac9b5e0608d35f08077d8c087be96287f43af3ccdc \ + --hash=sha256:7b3f5fe74a5ca32d4d0f302ffe6680fcc5c28f8ef0dc0ae8f40c0f3a1b4fca66 \ + --hash=sha256:844b6d608374e7d08f4f6e6f9f7b951f9256db41421917dfb2d003dde4cd6b66 \ + --hash=sha256:9a8d6802e0825767476f62aafed40532bd435e8a5f7d23bd8b4f5fd04cc80ecf \ + --hash=sha256:aae4d918f6b180a8ab8bf6511a419473d107df4dbb4225c7b48c5c9602c38c7f \ + --hash=sha256:ac1955ce000cb29ab40def14fd1bbfa7af2017cca696ee696925615cafd0dce5 \ + --hash=sha256:b88075ada2d51aa9f18283532c9f60e72170041bba88d7f37e49cbb10275299e \ + --hash=sha256:cb013933d4c127349b3948aa8aaf2f12c0353ad0eccd715ca789c8a0f671646f \ + --hash=sha256:cc70b4b581f28d0a254d006f26949245e3657d40d8857066c2ae22a61222ef55 \ + --hash=sha256:e9c5266c432a1e23738d178e51c2c7a5e2ddf790f248be939448c0ba2021f9d1 \ + --hash=sha256:ea9e57f8ea880eeea38ab5abf9fbe39f923544d7884228ec67d666abd60f5a47 \ + --hash=sha256:ee0c405832ade84d4de74b9029bedb7b31200600fa524d218fc29bfa371e97f5 \ + --hash=sha256:fdcb265de28585de5b859ae13e3846a8e805268a823a12a4da2597f1f5afc9f0 + # via secretstorage +google-auth==2.32.0 \ + --hash=sha256:49315be72c55a6a37d62819e3573f6b416aca00721f7e3e31a008d928bf64022 \ + --hash=sha256:53326ea2ebec768070a94bee4e1b9194c9646ea0c2bd72422785bd0f9abfad7b + # via keyrings-google-artifactregistry-auth +idna==3.7 \ + --hash=sha256:028ff3aadf0609c1fd278d8ea3089299412a7a8b9bd005dd08b9f8285bcb5cfc \ + --hash=sha256:82fee1fc78add43492d3a1898bfa6d8a904cc97d8427f683ed8e798d07761aa0 + # via requests +importlib-metadata==8.2.0 \ + --hash=sha256:11901fa0c2f97919b288679932bb64febaeacf289d18ac84dd68cb2e74213369 \ + --hash=sha256:72e8d4399996132204f9a16dcc751af254a48f8d1b20b9ff0f98d4a8f901e73d + # via keyring +jaraco-classes==3.4.0 \ + --hash=sha256:47a024b51d0239c0dd8c8540c6c7f484be3b8fcf0b2d85c13825780d3b3f3acd \ + --hash=sha256:f662826b6bed8cace05e7ff873ce0f9283b5c924470fe664fff1c2f00f581790 + # via keyring +jaraco-context==5.3.0 \ + --hash=sha256:3e16388f7da43d384a1a7cd3452e72e14732ac9fe459678773a3608a812bf266 \ + --hash=sha256:c2f67165ce1f9be20f32f650f25d8edfc1646a8aeee48ae06fb35f90763576d2 + # via keyring +jaraco-functools==4.0.2 \ + --hash=sha256:3460c74cd0d32bf82b9576bbb3527c4364d5b27a21f5158a62aed6c4b42e23f5 \ + --hash=sha256:c9d16a3ed4ccb5a889ad8e0b7a343401ee5b2a71cee6ed192d3f68bc351e94e3 + # via keyring +jeepney==0.8.0 \ + --hash=sha256:5efe48d255973902f6badc3ce55e2aa6c5c3b3bc642059ef3a91247bcfcc5806 \ + --hash=sha256:c0a454ad016ca575060802ee4d590dd912e35c122fa04e70306de3d076cce755 + # via + # keyring + # secretstorage +keyring==25.2.1 \ + --hash=sha256:2458681cdefc0dbc0b7eb6cf75d0b98e59f9ad9b2d4edd319d18f68bdca95e50 \ + --hash=sha256:daaffd42dbda25ddafb1ad5fec4024e5bbcfe424597ca1ca452b299861e49f1b + # via + # -r requirements-aoss.in + # keyrings-google-artifactregistry-auth +keyrings-google-artifactregistry-auth==1.1.2 \ + --hash=sha256:bd6abb72740d2dfeb4a5c03c3b105c6f7dba169caa29dee3959694f1f02c77de \ + --hash=sha256:e3f18b50fa945c786593014dc225810d191671d4f5f8e12d9259e39bad3605a3 + # via -r requirements-aoss.in +more-itertools==10.3.0 \ + --hash=sha256:e5d93ef411224fbcef366a6e8ddc4c5781bc6359d43412a65dd5964e46111463 \ + --hash=sha256:ea6a02e24a9161e51faad17a8782b92a0df82c12c1c8886fec7f0c3fa1a1b320 + # via + # jaraco-classes + # jaraco-functools +pluggy==1.5.0 \ + --hash=sha256:2cffa88e94fdc978c4c574f15f9e59b7f4201d439195c3715ca9e2486f1d0cf1 \ + --hash=sha256:d5783d8a2575b1d2f22c03e92b0a2e18892b45eadc5a8e41625767aa5e6bcc52 + # via keyrings-google-artifactregistry-auth +pyasn1==0.6.0 \ + --hash=sha256:3a35ab2c4b5ef98e17dfdec8ab074046fbda76e281c5a706ccd82328cfc8f64c \ + --hash=sha256:cca4bb0f2df5504f02f6f8a775b6e416ff9b0b3b16f7ee80b5a3153d9b804473 + # via + # pyasn1-modules + # rsa +pyasn1-modules==0.4.0 \ + --hash=sha256:831dbcea1b177b28c9baddf4c6d1013c24c3accd14a1873fffaa6a2e905f17b6 \ + --hash=sha256:be04f15b66c206eed667e0bb5ab27e2b1855ea54a842e5037738099e8ca4ae0b + # via google-auth +pycparser==2.22 \ + --hash=sha256:491c8be9c040f5390f5bf44a5b07752bd07f56edf992381b05c701439eec10f6 \ + --hash=sha256:d47c5472466f7131bb482b7dd186918f73a2e087d05d2a50d88957a8498377e5 + # via cffi +requests==2.32.3 \ + --hash=sha256:55365417734eb18255590a9ff9eb97e9e1da868d4ccd6402399eaf68af20a760 \ + --hash=sha256:70761cfe03c773ceb22aa2f671b4757976145175cdfca038c02654d061d6dcc6 + # via keyrings-google-artifactregistry-auth +rsa==4.9 \ + --hash=sha256:bbe333816d27ed8355b433f85795665a1e9ecec1b7a022906ec6ab9d60bfcbef \ + --hash=sha256:e38464a49c6c85d7f1351b0126661487a7e0a14a50f1675ec50eb34d4f20ef21 + # via google-auth +secretstorage==3.3.3 \ + --hash=sha256:2403533ef369eca6d2ba81718576c5e0f564d5cca1b58f73a8b23e7d4eeebd77 \ + --hash=sha256:7ef3a3c14fd6975684be05cf30cb13cc17936814b3bd02664b6ab8378aaf0c5b + # via keyring +urllib3==2.2.2 \ + --hash=sha256:a448b2f64d686155468037e1ace9f2d2199776e17f0a46610480d311f73e3472 \ + --hash=sha256:dd505485549a7a552833da5e6063639d0d177c04f23bc3864e41e5dc5f612168 + # via requests +zipp==3.19.2 \ + --hash=sha256:bf1dcf6450f873a13e952a29504887c89e6de7506209e5b1bcc3460135d4de19 \ + --hash=sha256:f091755f667055f2d02b32c53771a7a6c8b47e1fdbc4b72a8b9072b3eef8015c + # via importlib-metadata diff --git a/.release-please-manifest.json b/.release-please-manifest.json index 1bbe85fd2ee9..c66fb322589a 100644 --- a/.release-please-manifest.json +++ b/.release-please-manifest.json @@ -1,32 +1,34 @@ { - "packages/google-ads-admanager": "0.1.2", - "packages/google-ai-generativelanguage": "0.6.8", - "packages/google-analytics-admin": "0.22.9", - "packages/google-analytics-data": "0.18.10", + "packages/google-ads-admanager": "0.2.0", + "packages/google-ads-marketingplatform-admin": "0.1.0", + "packages/google-ai-generativelanguage": "0.6.10", + "packages/google-analytics-admin": "0.23.0", + "packages/google-analytics-data": "0.18.12", "packages/google-apps-card": "0.1.4", - "packages/google-apps-chat": "0.1.9", + "packages/google-apps-chat": "0.1.12", "packages/google-apps-events-subscriptions": "0.1.2", "packages/google-apps-meet": "0.1.8", "packages/google-apps-script-type": "0.3.10", "packages/google-area120-tables": "0.11.11", "packages/google-cloud-access-approval": "1.13.5", "packages/google-cloud-advisorynotifications": "0.3.10", - "packages/google-cloud-alloydb": "0.3.12", + "packages/google-cloud-alloydb": "0.3.13", "packages/google-cloud-alloydb-connectors": "0.1.6", "packages/google-cloud-api-gateway": "1.9.5", "packages/google-cloud-api-keys": "0.5.11", "packages/google-cloud-apigee-connect": "1.9.5", "packages/google-cloud-apigee-registry": "0.6.11", + "packages/google-cloud-apihub": "0.2.0", "packages/google-cloud-appengine-admin": "1.11.5", "packages/google-cloud-appengine-logging": "1.4.5", "packages/google-cloud-apphub": "0.1.2", "packages/google-cloud-artifact-registry": "1.11.5", - "packages/google-cloud-asset": "3.26.3", + "packages/google-cloud-asset": "3.26.4", "packages/google-cloud-assured-workloads": "1.12.5", "packages/google-cloud-automl": "2.13.5", - "packages/google-cloud-backupdr": "0.1.3", + "packages/google-cloud-backupdr": "0.1.4", "packages/google-cloud-bare-metal-solution": "1.7.5", - "packages/google-cloud-batch": "0.17.23", + "packages/google-cloud-batch": "0.17.29", "packages/google-cloud-beyondcorp-appconnections": "0.4.11", "packages/google-cloud-beyondcorp-appconnectors": "0.4.11", "packages/google-cloud-beyondcorp-appgateways": "0.4.11", @@ -37,88 +39,89 @@ "packages/google-cloud-bigquery-connection": "1.15.5", "packages/google-cloud-bigquery-data-exchange": "0.5.13", "packages/google-cloud-bigquery-datapolicies": "0.6.8", - "packages/google-cloud-bigquery-datatransfer": "3.15.5", + "packages/google-cloud-bigquery-datatransfer": "3.16.0", "packages/google-cloud-bigquery-logging": "1.4.5", "packages/google-cloud-bigquery-migration": "0.11.9", "packages/google-cloud-bigquery-reservation": "1.13.5", "packages/google-cloud-billing": "1.13.6", "packages/google-cloud-billing-budgets": "1.14.5", "packages/google-cloud-binary-authorization": "1.10.5", - "packages/google-cloud-build": "3.24.2", + "packages/google-cloud-build": "3.25.0", "packages/google-cloud-certificate-manager": "1.7.2", - "packages/google-cloud-channel": "1.18.5", - "packages/google-cloud-cloudcontrolspartner": "0.1.3", + "packages/google-cloud-channel": "1.19.0", + "packages/google-cloud-cloudcontrolspartner": "0.2.0", "packages/google-cloud-cloudquotas": "0.1.10", - "packages/google-cloud-commerce-consumer-procurement": "0.1.7", + "packages/google-cloud-commerce-consumer-procurement": "0.1.8", "packages/google-cloud-common": "1.3.5", "packages/google-cloud-compute": "1.19.2", "packages/google-cloud-confidentialcomputing": "0.4.11", "packages/google-cloud-config": "0.1.11", - "packages/google-cloud-contact-center-insights": "1.17.5", - "packages/google-cloud-container": "2.50.0", + "packages/google-cloud-contact-center-insights": "1.18.0", + "packages/google-cloud-container": "2.51.0", "packages/google-cloud-containeranalysis": "2.14.5", "packages/google-cloud-contentwarehouse": "0.7.9", "packages/google-cloud-data-fusion": "1.10.5", "packages/google-cloud-data-qna": "0.10.11", - "packages/google-cloud-datacatalog": "3.20.0", + "packages/google-cloud-datacatalog": "3.20.1", "packages/google-cloud-datacatalog-lineage": "0.3.8", "packages/google-cloud-dataflow-client": "0.8.12", "packages/google-cloud-dataform": "0.5.11", "packages/google-cloud-datalabeling": "1.10.5", "packages/google-cloud-dataplex": "2.2.2", - "packages/google-cloud-dataproc": "5.10.2", + "packages/google-cloud-dataproc": "5.13.0", "packages/google-cloud-dataproc-metastore": "1.15.5", "packages/google-cloud-datastream": "1.9.5", - "packages/google-cloud-deploy": "2.0.0", + "packages/google-cloud-deploy": "2.1.0", "packages/google-cloud-developerconnect": "0.1.2", - "packages/google-cloud-dialogflow": "2.30.2", + "packages/google-cloud-dialogflow": "2.33.0", "packages/google-cloud-dialogflow-cx": "1.35.0", - "packages/google-cloud-discoveryengine": "0.12.0", - "packages/google-cloud-dlp": "3.19.0", + "packages/google-cloud-discoveryengine": "0.12.2", + "packages/google-cloud-dlp": "3.23.0", "packages/google-cloud-dms": "1.9.5", - "packages/google-cloud-documentai": "2.29.3", + "packages/google-cloud-documentai": "2.33.0", "packages/google-cloud-domains": "1.7.5", "packages/google-cloud-edgecontainer": "0.5.11", - "packages/google-cloud-edgenetwork": "0.1.10", + "packages/google-cloud-edgenetwork": "0.1.11", "packages/google-cloud-enterpriseknowledgegraph": "0.3.11", "packages/google-cloud-essential-contacts": "1.7.5", "packages/google-cloud-eventarc": "1.11.5", "packages/google-cloud-eventarc-publishing": "0.6.11", "packages/google-cloud-filestore": "1.9.5", - "packages/google-cloud-functions": "1.16.5", - "packages/google-cloud-gdchardwaremanagement": "0.1.3", + "packages/google-cloud-functions": "1.17.0", + "packages/google-cloud-gdchardwaremanagement": "0.1.4", "packages/google-cloud-gke-backup": "0.5.11", - "packages/google-cloud-gke-connect-gateway": "0.8.11", + "packages/google-cloud-gke-connect-gateway": "0.9.0", "packages/google-cloud-gke-hub": "1.14.2", - "packages/google-cloud-gke-multicloud": "0.6.12", + "packages/google-cloud-gke-multicloud": "0.6.13", "packages/google-cloud-gsuiteaddons": "0.3.10", "packages/google-cloud-iam": "2.15.2", "packages/google-cloud-iam-logging": "1.3.5", "packages/google-cloud-iap": "1.13.5", "packages/google-cloud-ids": "1.7.5", - "packages/google-cloud-kms": "2.24.2", - "packages/google-cloud-kms-inventory": "0.2.8", + "packages/google-cloud-kms": "3.0.0", + "packages/google-cloud-kms-inventory": "0.2.9", "packages/google-cloud-language": "2.14.0", "packages/google-cloud-life-sciences": "0.9.12", "packages/google-cloud-managed-identities": "1.9.5", - "packages/google-cloud-managedkafka": "0.1.2", + "packages/google-cloud-managedkafka": "0.1.3", "packages/google-cloud-media-translation": "0.11.11", "packages/google-cloud-memcache": "1.9.5", "packages/google-cloud-migrationcenter": "0.1.9", "packages/google-cloud-monitoring": "2.22.2", "packages/google-cloud-monitoring-dashboards": "2.15.3", "packages/google-cloud-monitoring-metrics-scopes": "1.6.5", - "packages/google-cloud-netapp": "0.3.12", + "packages/google-cloud-netapp": "0.3.14", "packages/google-cloud-network-connectivity": "2.4.5", - "packages/google-cloud-network-management": "1.17.3", + "packages/google-cloud-network-management": "1.18.0", "packages/google-cloud-network-security": "0.9.11", "packages/google-cloud-network-services": "0.5.14", "packages/google-cloud-notebooks": "1.10.5", "packages/google-cloud-optimization": "1.8.5", - "packages/google-cloud-orchestration-airflow": "1.13.1", + "packages/google-cloud-oracledatabase": "0.1.0", + "packages/google-cloud-orchestration-airflow": "1.14.0", "packages/google-cloud-os-config": "1.17.5", "packages/google-cloud-os-login": "2.14.6", - "packages/google-cloud-parallelstore": "0.2.2", + "packages/google-cloud-parallelstore": "0.2.4", "packages/google-cloud-phishing-protection": "1.11.5", "packages/google-cloud-policy-troubleshooter": "1.11.5", "packages/google-cloud-policysimulator": "0.1.8", @@ -128,7 +131,7 @@ "packages/google-cloud-privilegedaccessmanager": "0.1.1", "packages/google-cloud-public-ca": "0.3.12", "packages/google-cloud-rapidmigrationassessment": "0.1.9", - "packages/google-cloud-recaptcha-enterprise": "1.21.2", + "packages/google-cloud-recaptcha-enterprise": "1.22.1", "packages/google-cloud-recommendations-ai": "0.10.12", "packages/google-cloud-recommender": "2.15.5", "packages/google-cloud-redis": "2.15.5", @@ -136,37 +139,37 @@ "packages/google-cloud-resource-manager": "1.12.5", "packages/google-cloud-resource-settings": "1.9.6", "packages/google-cloud-retail": "1.21.2", - "packages/google-cloud-run": "0.10.8", + "packages/google-cloud-run": "0.10.9", "packages/google-cloud-scheduler": "2.13.5", "packages/google-cloud-secret-manager": "2.20.2", "packages/google-cloud-securesourcemanager": "0.1.8", - "packages/google-cloud-securitycenter": "1.33.1", + "packages/google-cloud-securitycenter": "1.34.0", "packages/google-cloud-securitycentermanagement": "0.1.14", "packages/google-cloud-service-control": "1.12.3", "packages/google-cloud-service-directory": "1.11.6", - "packages/google-cloud-service-management": "1.8.5", + "packages/google-cloud-service-management": "1.9.0", "packages/google-cloud-service-usage": "1.10.5", "packages/google-cloud-servicehealth": "0.1.6", "packages/google-cloud-shell": "1.9.5", "packages/google-cloud-source-context": "1.5.5", "packages/google-cloud-speech": "2.27.0", "packages/google-cloud-storage-control": "1.0.3", - "packages/google-cloud-storage-transfer": "1.11.5", + "packages/google-cloud-storage-transfer": "1.12.0", "packages/google-cloud-storageinsights": "0.1.10", "packages/google-cloud-support": "0.1.9", "packages/google-cloud-talent": "2.13.5", "packages/google-cloud-tasks": "2.16.5", "packages/google-cloud-telcoautomation": "0.2.5", - "packages/google-cloud-texttospeech": "2.16.5", + "packages/google-cloud-texttospeech": "2.17.2", "packages/google-cloud-tpu": "1.18.5", "packages/google-cloud-trace": "1.13.5", - "packages/google-cloud-translate": "3.15.5", + "packages/google-cloud-translate": "3.16.0", "packages/google-cloud-video-live-stream": "1.8.1", "packages/google-cloud-video-stitcher": "0.7.12", "packages/google-cloud-video-transcoder": "1.12.5", "packages/google-cloud-videointelligence": "2.13.5", "packages/google-cloud-vision": "3.7.4", - "packages/google-cloud-visionai": "0.1.2", + "packages/google-cloud-visionai": "0.1.3", "packages/google-cloud-vm-migration": "1.8.5", "packages/google-cloud-vmwareengine": "1.5.0", "packages/google-cloud-vpc-access": "1.10.5", @@ -176,17 +179,18 @@ "packages/google-cloud-workstations": "0.5.8", "packages/google-geo-type": "0.3.9", "packages/google-maps-addressvalidation": "0.3.13", + "packages/google-maps-areainsights": "0.1.0", "packages/google-maps-fleetengine": "0.2.2", - "packages/google-maps-fleetengine-delivery": "0.2.3", + "packages/google-maps-fleetengine-delivery": "0.2.4", "packages/google-maps-mapsplatformdatasets": "0.4.2", - "packages/google-maps-places": "0.1.17", - "packages/google-maps-routeoptimization": "0.1.2", + "packages/google-maps-places": "0.1.18", + "packages/google-maps-routeoptimization": "0.1.4", "packages/google-maps-routing": "0.6.10", "packages/google-maps-solar": "0.1.2", - "packages/google-shopping-css": "0.1.7", - "packages/google-shopping-merchant-accounts": "0.1.3", + "packages/google-shopping-css": "0.1.8", + "packages/google-shopping-merchant-accounts": "0.2.0", "packages/google-shopping-merchant-conversions": "0.1.3", - "packages/google-shopping-merchant-datasources": "0.1.2", + "packages/google-shopping-merchant-datasources": "0.1.3", "packages/google-shopping-merchant-inventories": "0.1.9", "packages/google-shopping-merchant-lfp": "0.1.3", "packages/google-shopping-merchant-notifications": "0.1.2", diff --git a/CHANGELOG.md b/CHANGELOG.md index 6f41cb3e7650..3c4f1ec4af65 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -2,201 +2,205 @@ Please refer to each API's `CHANGELOG.md` file under the `packages/` directory Changelogs ----- -- [google-ads-admanager==0.1.1](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-ads-admanager/CHANGELOG.md) -- [google-ai-generativelanguage==0.6.7](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-ai-generativelanguage/CHANGELOG.md) -- [google-analytics-admin==0.22.8](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-analytics-admin/CHANGELOG.md) -- [google-analytics-data==0.18.9](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-analytics-data/CHANGELOG.md) -- [google-apps-card==0.1.3](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-apps-card/CHANGELOG.md) -- [google-apps-chat==0.1.8](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-apps-chat/CHANGELOG.md) -- [google-apps-events-subscriptions==0.1.1](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-apps-events-subscriptions/CHANGELOG.md) -- [google-apps-meet==0.1.7](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-apps-meet/CHANGELOG.md) -- [google-apps-script-type==0.3.9](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-apps-script-type/CHANGELOG.md) -- [google-area120-tables==0.11.10](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-area120-tables/CHANGELOG.md) -- [google-cloud-access-approval==1.13.4](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-access-approval/CHANGELOG.md) -- [google-cloud-advisorynotifications==0.3.9](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-advisorynotifications/CHANGELOG.md) -- [google-cloud-alloydb-connectors==0.1.5](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-alloydb-connectors/CHANGELOG.md) -- [google-cloud-alloydb==0.3.11](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-alloydb/CHANGELOG.md) -- [google-cloud-api-gateway==1.9.4](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-api-gateway/CHANGELOG.md) -- [google-cloud-api-keys==0.5.10](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-api-keys/CHANGELOG.md) -- [google-cloud-apigee-connect==1.9.4](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-apigee-connect/CHANGELOG.md) -- [google-cloud-apigee-registry==0.6.10](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-apigee-registry/CHANGELOG.md) -- [google-cloud-appengine-admin==1.11.4](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-appengine-admin/CHANGELOG.md) -- [google-cloud-appengine-logging==1.4.4](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-appengine-logging/CHANGELOG.md) -- [google-cloud-apphub==0.1.1](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-apphub/CHANGELOG.md) -- [google-cloud-artifact-registry==1.11.4](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-artifact-registry/CHANGELOG.md) -- [google-cloud-asset==3.26.2](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-asset/CHANGELOG.md) -- [google-cloud-assured-workloads==1.12.4](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-assured-workloads/CHANGELOG.md) -- [google-cloud-automl==2.13.4](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-automl/CHANGELOG.md) -- [google-cloud-backupdr==0.1.2](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-backupdr/CHANGELOG.md) -- [google-cloud-bare-metal-solution==1.7.4](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-bare-metal-solution/CHANGELOG.md) -- [google-cloud-batch==0.17.22](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-batch/CHANGELOG.md) -- [google-cloud-beyondcorp-appconnections==0.4.10](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-beyondcorp-appconnections/CHANGELOG.md) -- [google-cloud-beyondcorp-appconnectors==0.4.10](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-beyondcorp-appconnectors/CHANGELOG.md) -- [google-cloud-beyondcorp-appgateways==0.4.10](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-beyondcorp-appgateways/CHANGELOG.md) -- [google-cloud-beyondcorp-clientconnectorservices==0.4.10](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-beyondcorp-clientconnectorservices/CHANGELOG.md) -- [google-cloud-beyondcorp-clientgateways==0.4.9](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-beyondcorp-clientgateways/CHANGELOG.md) -- [google-cloud-bigquery-analyticshub==0.4.10](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-bigquery-analyticshub/CHANGELOG.md) -- [google-cloud-bigquery-biglake==0.4.8](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-bigquery-biglake/CHANGELOG.md) -- [google-cloud-bigquery-connection==1.15.4](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-bigquery-connection/CHANGELOG.md) -- [google-cloud-bigquery-data-exchange==0.5.12](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-bigquery-data-exchange/CHANGELOG.md) -- [google-cloud-bigquery-datapolicies==0.6.7](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-bigquery-datapolicies/CHANGELOG.md) -- [google-cloud-bigquery-datatransfer==3.15.4](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-bigquery-datatransfer/CHANGELOG.md) -- [google-cloud-bigquery-logging==1.4.4](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-bigquery-logging/CHANGELOG.md) -- [google-cloud-bigquery-migration==0.11.8](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-bigquery-migration/CHANGELOG.md) -- [google-cloud-bigquery-reservation==1.13.4](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-bigquery-reservation/CHANGELOG.md) -- [google-cloud-billing-budgets==1.14.4](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-billing-budgets/CHANGELOG.md) -- [google-cloud-billing==1.13.5](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-billing/CHANGELOG.md) -- [google-cloud-binary-authorization==1.10.4](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-binary-authorization/CHANGELOG.md) -- [google-cloud-build==3.24.1](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-build/CHANGELOG.md) -- [google-cloud-certificate-manager==1.7.1](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-certificate-manager/CHANGELOG.md) -- [google-cloud-channel==1.18.4](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-channel/CHANGELOG.md) -- [google-cloud-cloudcontrolspartner==0.1.2](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-cloudcontrolspartner/CHANGELOG.md) -- [google-cloud-cloudquotas==0.1.9](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-cloudquotas/CHANGELOG.md) -- [google-cloud-commerce-consumer-procurement==0.1.6](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-commerce-consumer-procurement/CHANGELOG.md) -- [google-cloud-common==1.3.4](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-common/CHANGELOG.md) -- [google-cloud-compute==1.19.1](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-compute/CHANGELOG.md) -- [google-cloud-confidentialcomputing==0.4.10](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-confidentialcomputing/CHANGELOG.md) -- [google-cloud-config==0.1.10](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-config/CHANGELOG.md) -- [google-cloud-contact-center-insights==1.17.4](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-contact-center-insights/CHANGELOG.md) -- [google-cloud-container==2.49.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-container/CHANGELOG.md) -- [google-cloud-containeranalysis==2.14.4](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-containeranalysis/CHANGELOG.md) -- [google-cloud-contentwarehouse==0.7.8](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-contentwarehouse/CHANGELOG.md) -- [google-cloud-data-fusion==1.10.4](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-data-fusion/CHANGELOG.md) -- [google-cloud-data-qna==0.10.10](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-data-qna/CHANGELOG.md) -- [google-cloud-datacatalog-lineage==0.3.7](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-datacatalog-lineage/CHANGELOG.md) -- [google-cloud-datacatalog==3.19.1](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-datacatalog/CHANGELOG.md) -- [google-cloud-dataflow-client==0.8.11](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-dataflow-client/CHANGELOG.md) -- [google-cloud-dataform==0.5.10](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-dataform/CHANGELOG.md) -- [google-cloud-datalabeling==1.10.4](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-datalabeling/CHANGELOG.md) -- [google-cloud-dataplex==2.2.1](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-dataplex/CHANGELOG.md) -- [google-cloud-dataproc-metastore==1.15.4](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-dataproc-metastore/CHANGELOG.md) -- [google-cloud-dataproc==5.10.1](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-dataproc/CHANGELOG.md) -- [google-cloud-datastream==1.9.4](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-datastream/CHANGELOG.md) -- [google-cloud-deploy==1.19.1](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-deploy/CHANGELOG.md) -- [google-cloud-developerconnect==0.1.1](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-developerconnect/CHANGELOG.md) -- [google-cloud-dialogflow-cx==1.34.1](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-dialogflow-cx/CHANGELOG.md) -- [google-cloud-dialogflow==2.30.1](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-dialogflow/CHANGELOG.md) -- [google-cloud-discoveryengine==0.11.14](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-discoveryengine/CHANGELOG.md) -- [google-cloud-dlp==3.18.1](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-dlp/CHANGELOG.md) -- [google-cloud-dms==1.9.4](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-dms/CHANGELOG.md) -- [google-cloud-documentai==2.29.2](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-documentai/CHANGELOG.md) -- [google-cloud-domains==1.7.4](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-domains/CHANGELOG.md) -- [google-cloud-edgecontainer==0.5.10](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-edgecontainer/CHANGELOG.md) -- [google-cloud-edgenetwork==0.1.9](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-edgenetwork/CHANGELOG.md) -- [google-cloud-enterpriseknowledgegraph==0.3.10](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-enterpriseknowledgegraph/CHANGELOG.md) -- [google-cloud-essential-contacts==1.7.4](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-essential-contacts/CHANGELOG.md) -- [google-cloud-eventarc-publishing==0.6.10](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-eventarc-publishing/CHANGELOG.md) -- [google-cloud-eventarc==1.11.4](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-eventarc/CHANGELOG.md) -- [google-cloud-filestore==1.9.4](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-filestore/CHANGELOG.md) -- [google-cloud-functions==1.16.4](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-functions/CHANGELOG.md) -- [google-cloud-gdchardwaremanagement==0.1.2](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-gdchardwaremanagement/CHANGELOG.md) -- [google-cloud-gke-backup==0.5.10](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-gke-backup/CHANGELOG.md) -- [google-cloud-gke-connect-gateway==0.8.10](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-gke-connect-gateway/CHANGELOG.md) -- [google-cloud-gke-hub==1.14.1](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-gke-hub/CHANGELOG.md) -- [google-cloud-gke-multicloud==0.6.11](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-gke-multicloud/CHANGELOG.md) -- [google-cloud-gsuiteaddons==0.3.9](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-gsuiteaddons/CHANGELOG.md) -- [google-cloud-iam-logging==1.3.4](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-iam-logging/CHANGELOG.md) -- [google-cloud-iam==2.15.1](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-iam/CHANGELOG.md) -- [google-cloud-iap==1.13.4](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-iap/CHANGELOG.md) -- [google-cloud-ids==1.7.4](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-ids/CHANGELOG.md) -- [google-cloud-kms-inventory==0.2.7](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-kms-inventory/CHANGELOG.md) -- [google-cloud-kms==2.24.1](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-kms/CHANGELOG.md) -- [google-cloud-language==2.13.4](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-language/CHANGELOG.md) -- [google-cloud-life-sciences==0.9.11](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-life-sciences/CHANGELOG.md) -- [google-cloud-managed-identities==1.9.4](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-managed-identities/CHANGELOG.md) -- [google-cloud-managedkafka==0.1.1](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-managedkafka/CHANGELOG.md) -- [google-cloud-media-translation==0.11.10](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-media-translation/CHANGELOG.md) -- [google-cloud-memcache==1.9.4](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-memcache/CHANGELOG.md) -- [google-cloud-migrationcenter==0.1.8](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-migrationcenter/CHANGELOG.md) -- [google-cloud-monitoring-dashboards==2.15.2](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-monitoring-dashboards/CHANGELOG.md) -- [google-cloud-monitoring-metrics-scopes==1.6.4](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-monitoring-metrics-scopes/CHANGELOG.md) -- [google-cloud-monitoring==2.22.1](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-monitoring/CHANGELOG.md) -- [google-cloud-netapp==0.3.11](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-netapp/CHANGELOG.md) -- [google-cloud-network-connectivity==2.4.4](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-network-connectivity/CHANGELOG.md) -- [google-cloud-network-management==1.17.2](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-network-management/CHANGELOG.md) -- [google-cloud-network-security==0.9.10](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-network-security/CHANGELOG.md) -- [google-cloud-network-services==0.5.13](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-network-services/CHANGELOG.md) -- [google-cloud-notebooks==1.10.4](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-notebooks/CHANGELOG.md) -- [google-cloud-optimization==1.8.4](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-optimization/CHANGELOG.md) -- [google-cloud-orchestration-airflow==1.13.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-orchestration-airflow/CHANGELOG.md) -- [google-cloud-os-config==1.17.4](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-os-config/CHANGELOG.md) -- [google-cloud-os-login==2.14.5](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-os-login/CHANGELOG.md) -- [google-cloud-parallelstore==0.2.1](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-parallelstore/CHANGELOG.md) -- [google-cloud-phishing-protection==1.11.4](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-phishing-protection/CHANGELOG.md) -- [google-cloud-policy-troubleshooter==1.11.4](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-policy-troubleshooter/CHANGELOG.md) -- [google-cloud-policysimulator==0.1.7](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-policysimulator/CHANGELOG.md) -- [google-cloud-policytroubleshooter-iam==0.1.6](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-policytroubleshooter-iam/CHANGELOG.md) -- [google-cloud-private-ca==1.12.1](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-private-ca/CHANGELOG.md) -- [google-cloud-private-catalog==0.9.10](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-private-catalog/CHANGELOG.md) -- [google-cloud-privilegedaccessmanager==0.1.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-privilegedaccessmanager/CHANGELOG.md) -- [google-cloud-public-ca==0.3.11](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-public-ca/CHANGELOG.md) -- [google-cloud-rapidmigrationassessment==0.1.8](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-rapidmigrationassessment/CHANGELOG.md) -- [google-cloud-recaptcha-enterprise==1.21.1](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-recaptcha-enterprise/CHANGELOG.md) -- [google-cloud-recommendations-ai==0.10.11](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-recommendations-ai/CHANGELOG.md) -- [google-cloud-recommender==2.15.4](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-recommender/CHANGELOG.md) -- [google-cloud-redis-cluster==0.1.7](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-redis-cluster/CHANGELOG.md) -- [google-cloud-redis==2.15.4](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-redis/CHANGELOG.md) -- [google-cloud-resource-manager==1.12.4](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-resource-manager/CHANGELOG.md) -- [google-cloud-resource-settings==1.9.5](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-resource-settings/CHANGELOG.md) -- [google-cloud-retail==1.21.1](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-retail/CHANGELOG.md) -- [google-cloud-run==0.10.7](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-run/CHANGELOG.md) -- [google-cloud-scheduler==2.13.4](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-scheduler/CHANGELOG.md) -- [google-cloud-secret-manager==2.20.1](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-secret-manager/CHANGELOG.md) -- [google-cloud-securesourcemanager==0.1.7](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-securesourcemanager/CHANGELOG.md) -- [google-cloud-securitycenter==1.33.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-securitycenter/CHANGELOG.md) -- [google-cloud-securitycentermanagement==0.1.13](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-securitycentermanagement/CHANGELOG.md) -- [google-cloud-service-control==1.12.2](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-service-control/CHANGELOG.md) -- [google-cloud-service-directory==1.11.5](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-service-directory/CHANGELOG.md) -- [google-cloud-service-management==1.8.4](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-service-management/CHANGELOG.md) -- [google-cloud-service-usage==1.10.4](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-service-usage/CHANGELOG.md) -- [google-cloud-servicehealth==0.1.5](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-servicehealth/CHANGELOG.md) -- [google-cloud-shell==1.9.4](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-shell/CHANGELOG.md) -- [google-cloud-source-context==1.5.4](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-source-context/CHANGELOG.md) -- [google-cloud-speech==2.26.1](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-speech/CHANGELOG.md) -- [google-cloud-storage-control==1.0.2](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-storage-control/CHANGELOG.md) -- [google-cloud-storage-transfer==1.11.4](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-storage-transfer/CHANGELOG.md) -- [google-cloud-storageinsights==0.1.9](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-storageinsights/CHANGELOG.md) -- [google-cloud-support==0.1.8](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-support/CHANGELOG.md) -- [google-cloud-talent==2.13.4](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-talent/CHANGELOG.md) -- [google-cloud-tasks==2.16.4](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-tasks/CHANGELOG.md) -- [google-cloud-telcoautomation==0.2.4](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-telcoautomation/CHANGELOG.md) -- [google-cloud-texttospeech==2.16.4](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-texttospeech/CHANGELOG.md) -- [google-cloud-tpu==1.18.4](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-tpu/CHANGELOG.md) -- [google-cloud-trace==1.13.4](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-trace/CHANGELOG.md) -- [google-cloud-translate==3.15.4](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-translate/CHANGELOG.md) -- [google-cloud-video-live-stream==1.8.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-video-live-stream/CHANGELOG.md) -- [google-cloud-video-stitcher==0.7.11](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-video-stitcher/CHANGELOG.md) -- [google-cloud-video-transcoder==1.12.4](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-video-transcoder/CHANGELOG.md) -- [google-cloud-videointelligence==2.13.4](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-videointelligence/CHANGELOG.md) -- [google-cloud-vision==3.7.3](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-vision/CHANGELOG.md) -- [google-cloud-visionai==0.1.1](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-visionai/CHANGELOG.md) -- [google-cloud-vm-migration==1.8.4](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-vm-migration/CHANGELOG.md) -- [google-cloud-vmwareengine==1.4.4](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-vmwareengine/CHANGELOG.md) -- [google-cloud-vpc-access==1.10.4](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-vpc-access/CHANGELOG.md) -- [google-cloud-webrisk==1.14.4](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-webrisk/CHANGELOG.md) -- [google-cloud-websecurityscanner==1.14.4](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-websecurityscanner/CHANGELOG.md) -- [google-cloud-workflows==1.14.4](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-workflows/CHANGELOG.md) -- [google-cloud-workstations==0.5.7](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-workstations/CHANGELOG.md) -- [google-geo-type==0.3.8](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-geo-type/CHANGELOG.md) -- [google-maps-addressvalidation==0.3.12](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-maps-addressvalidation/CHANGELOG.md) -- [google-maps-fleetengine-delivery==0.2.2](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-maps-fleetengine-delivery/CHANGELOG.md) -- [google-maps-fleetengine==0.2.1](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-maps-fleetengine/CHANGELOG.md) -- [google-maps-mapsplatformdatasets==0.4.1](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-maps-mapsplatformdatasets/CHANGELOG.md) -- [google-maps-places==0.1.16](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-maps-places/CHANGELOG.md) -- [google-maps-routeoptimization==0.1.1](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-maps-routeoptimization/CHANGELOG.md) -- [google-maps-routing==0.6.9](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-maps-routing/CHANGELOG.md) -- [google-maps-solar==0.1.1](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-maps-solar/CHANGELOG.md) -- [google-shopping-css==0.1.6](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-shopping-css/CHANGELOG.md) -- [google-shopping-merchant-accounts==0.1.2](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-shopping-merchant-accounts/CHANGELOG.md) -- [google-shopping-merchant-conversions==0.1.2](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-shopping-merchant-conversions/CHANGELOG.md) -- [google-shopping-merchant-datasources==0.1.1](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-shopping-merchant-datasources/CHANGELOG.md) -- [google-shopping-merchant-inventories==0.1.8](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-shopping-merchant-inventories/CHANGELOG.md) -- [google-shopping-merchant-lfp==0.1.2](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-shopping-merchant-lfp/CHANGELOG.md) -- [google-shopping-merchant-notifications==0.1.1](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-shopping-merchant-notifications/CHANGELOG.md) -- [google-shopping-merchant-products==0.1.1](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-shopping-merchant-products/CHANGELOG.md) -- [google-shopping-merchant-promotions==0.1.1](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-shopping-merchant-promotions/CHANGELOG.md) -- [google-shopping-merchant-quota==0.1.1](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-shopping-merchant-quota/CHANGELOG.md) -- [google-shopping-merchant-reports==0.1.8](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-shopping-merchant-reports/CHANGELOG.md) -- [google-shopping-type==0.1.7](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-shopping-type/CHANGELOG.md) -- [grafeas==1.10.1](https://github.com/googleapis/google-cloud-python/tree/main/packages/grafeas/CHANGELOG.md) +- [google-ads-admanager==0.1.2](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-ads-admanager/CHANGELOG.md) +- [google-ads-marketingplatform-admin==0.1.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-ads-marketingplatform-admin/CHANGELOG.md) +- [google-ai-generativelanguage==0.6.10](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-ai-generativelanguage/CHANGELOG.md) +- [google-analytics-admin==0.23.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-analytics-admin/CHANGELOG.md) +- [google-analytics-data==0.18.12](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-analytics-data/CHANGELOG.md) +- [google-apps-card==0.1.4](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-apps-card/CHANGELOG.md) +- [google-apps-chat==0.1.11](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-apps-chat/CHANGELOG.md) +- [google-apps-events-subscriptions==0.1.2](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-apps-events-subscriptions/CHANGELOG.md) +- [google-apps-meet==0.1.8](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-apps-meet/CHANGELOG.md) +- [google-apps-script-type==0.3.10](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-apps-script-type/CHANGELOG.md) +- [google-area120-tables==0.11.11](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-area120-tables/CHANGELOG.md) +- [google-cloud-access-approval==1.13.5](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-access-approval/CHANGELOG.md) +- [google-cloud-advisorynotifications==0.3.10](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-advisorynotifications/CHANGELOG.md) +- [google-cloud-alloydb-connectors==0.1.6](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-alloydb-connectors/CHANGELOG.md) +- [google-cloud-alloydb==0.3.13](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-alloydb/CHANGELOG.md) +- [google-cloud-api-gateway==1.9.5](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-api-gateway/CHANGELOG.md) +- [google-cloud-api-keys==0.5.11](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-api-keys/CHANGELOG.md) +- [google-cloud-apigee-connect==1.9.5](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-apigee-connect/CHANGELOG.md) +- [google-cloud-apigee-registry==0.6.11](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-apigee-registry/CHANGELOG.md) +- [google-cloud-apihub==0.2.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-apihub/CHANGELOG.md) +- [google-cloud-appengine-admin==1.11.5](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-appengine-admin/CHANGELOG.md) +- [google-cloud-appengine-logging==1.4.5](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-appengine-logging/CHANGELOG.md) +- [google-cloud-apphub==0.1.2](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-apphub/CHANGELOG.md) +- [google-cloud-artifact-registry==1.11.5](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-artifact-registry/CHANGELOG.md) +- [google-cloud-asset==3.26.4](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-asset/CHANGELOG.md) +- [google-cloud-assured-workloads==1.12.5](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-assured-workloads/CHANGELOG.md) +- [google-cloud-automl==2.13.5](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-automl/CHANGELOG.md) +- [google-cloud-backupdr==0.1.3](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-backupdr/CHANGELOG.md) +- [google-cloud-bare-metal-solution==1.7.5](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-bare-metal-solution/CHANGELOG.md) +- [google-cloud-batch==0.17.28](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-batch/CHANGELOG.md) +- [google-cloud-beyondcorp-appconnections==0.4.11](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-beyondcorp-appconnections/CHANGELOG.md) +- [google-cloud-beyondcorp-appconnectors==0.4.11](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-beyondcorp-appconnectors/CHANGELOG.md) +- [google-cloud-beyondcorp-appgateways==0.4.11](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-beyondcorp-appgateways/CHANGELOG.md) +- [google-cloud-beyondcorp-clientconnectorservices==0.4.11](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-beyondcorp-clientconnectorservices/CHANGELOG.md) +- [google-cloud-beyondcorp-clientgateways==0.4.10](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-beyondcorp-clientgateways/CHANGELOG.md) +- [google-cloud-bigquery-analyticshub==0.4.11](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-bigquery-analyticshub/CHANGELOG.md) +- [google-cloud-bigquery-biglake==0.4.9](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-bigquery-biglake/CHANGELOG.md) +- [google-cloud-bigquery-connection==1.15.5](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-bigquery-connection/CHANGELOG.md) +- [google-cloud-bigquery-data-exchange==0.5.13](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-bigquery-data-exchange/CHANGELOG.md) +- [google-cloud-bigquery-datapolicies==0.6.8](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-bigquery-datapolicies/CHANGELOG.md) +- [google-cloud-bigquery-datatransfer==3.15.7](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-bigquery-datatransfer/CHANGELOG.md) +- [google-cloud-bigquery-logging==1.4.5](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-bigquery-logging/CHANGELOG.md) +- [google-cloud-bigquery-migration==0.11.9](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-bigquery-migration/CHANGELOG.md) +- [google-cloud-bigquery-reservation==1.13.5](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-bigquery-reservation/CHANGELOG.md) +- [google-cloud-billing-budgets==1.14.5](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-billing-budgets/CHANGELOG.md) +- [google-cloud-billing==1.13.6](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-billing/CHANGELOG.md) +- [google-cloud-binary-authorization==1.10.5](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-binary-authorization/CHANGELOG.md) +- [google-cloud-build==3.25.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-build/CHANGELOG.md) +- [google-cloud-certificate-manager==1.7.2](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-certificate-manager/CHANGELOG.md) +- [google-cloud-channel==1.18.5](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-channel/CHANGELOG.md) +- [google-cloud-cloudcontrolspartner==0.2.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-cloudcontrolspartner/CHANGELOG.md) +- [google-cloud-cloudquotas==0.1.10](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-cloudquotas/CHANGELOG.md) +- [google-cloud-commerce-consumer-procurement==0.1.7](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-commerce-consumer-procurement/CHANGELOG.md) +- [google-cloud-common==1.3.5](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-common/CHANGELOG.md) +- [google-cloud-compute==1.19.2](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-compute/CHANGELOG.md) +- [google-cloud-confidentialcomputing==0.4.11](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-confidentialcomputing/CHANGELOG.md) +- [google-cloud-config==0.1.11](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-config/CHANGELOG.md) +- [google-cloud-contact-center-insights==1.17.5](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-contact-center-insights/CHANGELOG.md) +- [google-cloud-container==2.51.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-container/CHANGELOG.md) +- [google-cloud-containeranalysis==2.14.5](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-containeranalysis/CHANGELOG.md) +- [google-cloud-contentwarehouse==0.7.9](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-contentwarehouse/CHANGELOG.md) +- [google-cloud-data-fusion==1.10.5](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-data-fusion/CHANGELOG.md) +- [google-cloud-data-qna==0.10.11](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-data-qna/CHANGELOG.md) +- [google-cloud-datacatalog-lineage==0.3.8](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-datacatalog-lineage/CHANGELOG.md) +- [google-cloud-datacatalog==3.20.1](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-datacatalog/CHANGELOG.md) +- [google-cloud-dataflow-client==0.8.12](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-dataflow-client/CHANGELOG.md) +- [google-cloud-dataform==0.5.11](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-dataform/CHANGELOG.md) +- [google-cloud-datalabeling==1.10.5](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-datalabeling/CHANGELOG.md) +- [google-cloud-dataplex==2.2.2](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-dataplex/CHANGELOG.md) +- [google-cloud-dataproc-metastore==1.15.5](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-dataproc-metastore/CHANGELOG.md) +- [google-cloud-dataproc==5.13.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-dataproc/CHANGELOG.md) +- [google-cloud-datastream==1.9.5](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-datastream/CHANGELOG.md) +- [google-cloud-deploy==2.0.1](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-deploy/CHANGELOG.md) +- [google-cloud-developerconnect==0.1.2](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-developerconnect/CHANGELOG.md) +- [google-cloud-dialogflow-cx==1.35.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-dialogflow-cx/CHANGELOG.md) +- [google-cloud-dialogflow==2.32.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-dialogflow/CHANGELOG.md) +- [google-cloud-discoveryengine==0.12.2](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-discoveryengine/CHANGELOG.md) +- [google-cloud-dlp==3.23.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-dlp/CHANGELOG.md) +- [google-cloud-dms==1.9.5](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-dms/CHANGELOG.md) +- [google-cloud-documentai==2.32.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-documentai/CHANGELOG.md) +- [google-cloud-domains==1.7.5](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-domains/CHANGELOG.md) +- [google-cloud-edgecontainer==0.5.11](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-edgecontainer/CHANGELOG.md) +- [google-cloud-edgenetwork==0.1.11](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-edgenetwork/CHANGELOG.md) +- [google-cloud-enterpriseknowledgegraph==0.3.11](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-enterpriseknowledgegraph/CHANGELOG.md) +- [google-cloud-essential-contacts==1.7.5](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-essential-contacts/CHANGELOG.md) +- [google-cloud-eventarc-publishing==0.6.11](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-eventarc-publishing/CHANGELOG.md) +- [google-cloud-eventarc==1.11.5](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-eventarc/CHANGELOG.md) +- [google-cloud-filestore==1.9.5](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-filestore/CHANGELOG.md) +- [google-cloud-functions==1.17.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-functions/CHANGELOG.md) +- [google-cloud-gdchardwaremanagement==0.1.4](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-gdchardwaremanagement/CHANGELOG.md) +- [google-cloud-gke-backup==0.5.11](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-gke-backup/CHANGELOG.md) +- [google-cloud-gke-connect-gateway==0.9.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-gke-connect-gateway/CHANGELOG.md) +- [google-cloud-gke-hub==1.14.2](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-gke-hub/CHANGELOG.md) +- [google-cloud-gke-multicloud==0.6.13](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-gke-multicloud/CHANGELOG.md) +- [google-cloud-gsuiteaddons==0.3.10](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-gsuiteaddons/CHANGELOG.md) +- [google-cloud-iam-logging==1.3.5](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-iam-logging/CHANGELOG.md) +- [google-cloud-iam==2.15.2](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-iam/CHANGELOG.md) +- [google-cloud-iap==1.13.5](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-iap/CHANGELOG.md) +- [google-cloud-ids==1.7.5](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-ids/CHANGELOG.md) +- [google-cloud-kms-inventory==0.2.8](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-kms-inventory/CHANGELOG.md) +- [google-cloud-kms==3.0.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-kms/CHANGELOG.md) +- [google-cloud-language==2.14.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-language/CHANGELOG.md) +- [google-cloud-life-sciences==0.9.12](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-life-sciences/CHANGELOG.md) +- [google-cloud-managed-identities==1.9.5](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-managed-identities/CHANGELOG.md) +- [google-cloud-managedkafka==0.1.3](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-managedkafka/CHANGELOG.md) +- [google-cloud-media-translation==0.11.11](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-media-translation/CHANGELOG.md) +- [google-cloud-memcache==1.9.5](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-memcache/CHANGELOG.md) +- [google-cloud-migrationcenter==0.1.9](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-migrationcenter/CHANGELOG.md) +- [google-cloud-monitoring-dashboards==2.15.3](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-monitoring-dashboards/CHANGELOG.md) +- [google-cloud-monitoring-metrics-scopes==1.6.5](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-monitoring-metrics-scopes/CHANGELOG.md) +- [google-cloud-monitoring==2.22.2](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-monitoring/CHANGELOG.md) +- [google-cloud-netapp==0.3.14](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-netapp/CHANGELOG.md) +- [google-cloud-network-connectivity==2.4.5](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-network-connectivity/CHANGELOG.md) +- [google-cloud-network-management==1.18.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-network-management/CHANGELOG.md) +- [google-cloud-network-security==0.9.11](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-network-security/CHANGELOG.md) +- [google-cloud-network-services==0.5.14](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-network-services/CHANGELOG.md) +- [google-cloud-notebooks==1.10.5](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-notebooks/CHANGELOG.md) +- [google-cloud-optimization==1.8.5](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-optimization/CHANGELOG.md) +- [google-cloud-oracledatabase==0.1.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-oracledatabase/CHANGELOG.md) +- [google-cloud-orchestration-airflow==1.14.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-orchestration-airflow/CHANGELOG.md) +- [google-cloud-os-config==1.17.5](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-os-config/CHANGELOG.md) +- [google-cloud-os-login==2.14.6](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-os-login/CHANGELOG.md) +- [google-cloud-parallelstore==0.2.3](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-parallelstore/CHANGELOG.md) +- [google-cloud-phishing-protection==1.11.5](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-phishing-protection/CHANGELOG.md) +- [google-cloud-policy-troubleshooter==1.11.5](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-policy-troubleshooter/CHANGELOG.md) +- [google-cloud-policysimulator==0.1.8](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-policysimulator/CHANGELOG.md) +- [google-cloud-policytroubleshooter-iam==0.1.7](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-policytroubleshooter-iam/CHANGELOG.md) +- [google-cloud-private-ca==1.12.2](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-private-ca/CHANGELOG.md) +- [google-cloud-private-catalog==0.9.11](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-private-catalog/CHANGELOG.md) +- [google-cloud-privilegedaccessmanager==0.1.1](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-privilegedaccessmanager/CHANGELOG.md) +- [google-cloud-public-ca==0.3.12](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-public-ca/CHANGELOG.md) +- [google-cloud-rapidmigrationassessment==0.1.9](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-rapidmigrationassessment/CHANGELOG.md) +- [google-cloud-recaptcha-enterprise==1.22.1](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-recaptcha-enterprise/CHANGELOG.md) +- [google-cloud-recommendations-ai==0.10.12](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-recommendations-ai/CHANGELOG.md) +- [google-cloud-recommender==2.15.5](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-recommender/CHANGELOG.md) +- [google-cloud-redis-cluster==0.1.8](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-redis-cluster/CHANGELOG.md) +- [google-cloud-redis==2.15.5](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-redis/CHANGELOG.md) +- [google-cloud-resource-manager==1.12.5](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-resource-manager/CHANGELOG.md) +- [google-cloud-resource-settings==1.9.6](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-resource-settings/CHANGELOG.md) +- [google-cloud-retail==1.21.2](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-retail/CHANGELOG.md) +- [google-cloud-run==0.10.8](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-run/CHANGELOG.md) +- [google-cloud-scheduler==2.13.5](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-scheduler/CHANGELOG.md) +- [google-cloud-secret-manager==2.20.2](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-secret-manager/CHANGELOG.md) +- [google-cloud-securesourcemanager==0.1.8](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-securesourcemanager/CHANGELOG.md) +- [google-cloud-securitycenter==1.34.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-securitycenter/CHANGELOG.md) +- [google-cloud-securitycentermanagement==0.1.14](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-securitycentermanagement/CHANGELOG.md) +- [google-cloud-service-control==1.12.3](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-service-control/CHANGELOG.md) +- [google-cloud-service-directory==1.11.6](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-service-directory/CHANGELOG.md) +- [google-cloud-service-management==1.9.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-service-management/CHANGELOG.md) +- [google-cloud-service-usage==1.10.5](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-service-usage/CHANGELOG.md) +- [google-cloud-servicehealth==0.1.6](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-servicehealth/CHANGELOG.md) +- [google-cloud-shell==1.9.5](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-shell/CHANGELOG.md) +- [google-cloud-source-context==1.5.5](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-source-context/CHANGELOG.md) +- [google-cloud-speech==2.27.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-speech/CHANGELOG.md) +- [google-cloud-storage-control==1.0.3](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-storage-control/CHANGELOG.md) +- [google-cloud-storage-transfer==1.12.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-storage-transfer/CHANGELOG.md) +- [google-cloud-storageinsights==0.1.10](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-storageinsights/CHANGELOG.md) +- [google-cloud-support==0.1.9](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-support/CHANGELOG.md) +- [google-cloud-talent==2.13.5](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-talent/CHANGELOG.md) +- [google-cloud-tasks==2.16.5](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-tasks/CHANGELOG.md) +- [google-cloud-telcoautomation==0.2.5](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-telcoautomation/CHANGELOG.md) +- [google-cloud-texttospeech==2.17.2](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-texttospeech/CHANGELOG.md) +- [google-cloud-tpu==1.18.5](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-tpu/CHANGELOG.md) +- [google-cloud-trace==1.13.5](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-trace/CHANGELOG.md) +- [google-cloud-translate==3.16.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-translate/CHANGELOG.md) +- [google-cloud-video-live-stream==1.8.1](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-video-live-stream/CHANGELOG.md) +- [google-cloud-video-stitcher==0.7.12](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-video-stitcher/CHANGELOG.md) +- [google-cloud-video-transcoder==1.12.5](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-video-transcoder/CHANGELOG.md) +- [google-cloud-videointelligence==2.13.5](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-videointelligence/CHANGELOG.md) +- [google-cloud-vision==3.7.4](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-vision/CHANGELOG.md) +- [google-cloud-visionai==0.1.3](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-visionai/CHANGELOG.md) +- [google-cloud-vm-migration==1.8.5](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-vm-migration/CHANGELOG.md) +- [google-cloud-vmwareengine==1.5.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-vmwareengine/CHANGELOG.md) +- [google-cloud-vpc-access==1.10.5](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-vpc-access/CHANGELOG.md) +- [google-cloud-webrisk==1.14.5](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-webrisk/CHANGELOG.md) +- [google-cloud-websecurityscanner==1.14.5](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-websecurityscanner/CHANGELOG.md) +- [google-cloud-workflows==1.14.5](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-workflows/CHANGELOG.md) +- [google-cloud-workstations==0.5.8](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-workstations/CHANGELOG.md) +- [google-geo-type==0.3.9](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-geo-type/CHANGELOG.md) +- [google-maps-addressvalidation==0.3.13](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-maps-addressvalidation/CHANGELOG.md) +- [google-maps-areainsights==0.1.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-maps-areainsights/CHANGELOG.md) +- [google-maps-fleetengine-delivery==0.2.4](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-maps-fleetengine-delivery/CHANGELOG.md) +- [google-maps-fleetengine==0.2.2](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-maps-fleetengine/CHANGELOG.md) +- [google-maps-mapsplatformdatasets==0.4.2](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-maps-mapsplatformdatasets/CHANGELOG.md) +- [google-maps-places==0.1.18](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-maps-places/CHANGELOG.md) +- [google-maps-routeoptimization==0.1.4](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-maps-routeoptimization/CHANGELOG.md) +- [google-maps-routing==0.6.10](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-maps-routing/CHANGELOG.md) +- [google-maps-solar==0.1.2](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-maps-solar/CHANGELOG.md) +- [google-shopping-css==0.1.8](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-shopping-css/CHANGELOG.md) +- [google-shopping-merchant-accounts==0.1.3](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-shopping-merchant-accounts/CHANGELOG.md) +- [google-shopping-merchant-conversions==0.1.3](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-shopping-merchant-conversions/CHANGELOG.md) +- [google-shopping-merchant-datasources==0.1.2](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-shopping-merchant-datasources/CHANGELOG.md) +- [google-shopping-merchant-inventories==0.1.9](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-shopping-merchant-inventories/CHANGELOG.md) +- [google-shopping-merchant-lfp==0.1.3](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-shopping-merchant-lfp/CHANGELOG.md) +- [google-shopping-merchant-notifications==0.1.2](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-shopping-merchant-notifications/CHANGELOG.md) +- [google-shopping-merchant-products==0.1.2](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-shopping-merchant-products/CHANGELOG.md) +- [google-shopping-merchant-promotions==0.1.2](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-shopping-merchant-promotions/CHANGELOG.md) +- [google-shopping-merchant-quota==0.1.2](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-shopping-merchant-quota/CHANGELOG.md) +- [google-shopping-merchant-reports==0.1.9](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-shopping-merchant-reports/CHANGELOG.md) +- [google-shopping-type==0.1.8](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-shopping-type/CHANGELOG.md) +- [grafeas==1.11.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/grafeas/CHANGELOG.md) diff --git a/packages/google-ads-admanager/CHANGELOG.md b/packages/google-ads-admanager/CHANGELOG.md index 2a855789274e..3a1de1e5b43f 100644 --- a/packages/google-ads-admanager/CHANGELOG.md +++ b/packages/google-ads-admanager/CHANGELOG.md @@ -1,5 +1,21 @@ # Changelog +## [0.2.0](https://github.com/googleapis/google-cloud-python/compare/google-ads-admanager-v0.1.2...google-ads-admanager-v0.2.0) (2024-10-08) + + +### ⚠ BREAKING CHANGES + +* Removed closed beta services that had data discrepancies with the SOAP API + +### Features + +* Added support for Interactive Reporting ([6db79dc](https://github.com/googleapis/google-cloud-python/commit/6db79dc964b540f1c9c21d96122e4916aca66d98)) + + +### Bug Fixes + +* Removed closed beta services that had data discrepancies with the SOAP API ([6db79dc](https://github.com/googleapis/google-cloud-python/commit/6db79dc964b540f1c9c21d96122e4916aca66d98)) + ## [0.1.2](https://github.com/googleapis/google-cloud-python/compare/google-ads-admanager-v0.1.1...google-ads-admanager-v0.1.2) (2024-07-30) diff --git a/packages/google-ads-admanager/docs/admanager_v1/ad_partner_service.rst b/packages/google-ads-admanager/docs/admanager_v1/ad_partner_service.rst deleted file mode 100644 index 7ccc095d3628..000000000000 --- a/packages/google-ads-admanager/docs/admanager_v1/ad_partner_service.rst +++ /dev/null @@ -1,10 +0,0 @@ -AdPartnerService ----------------------------------- - -.. automodule:: google.ads.admanager_v1.services.ad_partner_service - :members: - :inherited-members: - -.. automodule:: google.ads.admanager_v1.services.ad_partner_service.pagers - :members: - :inherited-members: diff --git a/packages/google-ads-admanager/docs/admanager_v1/contact_service.rst b/packages/google-ads-admanager/docs/admanager_v1/contact_service.rst deleted file mode 100644 index 478ccc08a803..000000000000 --- a/packages/google-ads-admanager/docs/admanager_v1/contact_service.rst +++ /dev/null @@ -1,10 +0,0 @@ -ContactService --------------------------------- - -.. automodule:: google.ads.admanager_v1.services.contact_service - :members: - :inherited-members: - -.. automodule:: google.ads.admanager_v1.services.contact_service.pagers - :members: - :inherited-members: diff --git a/packages/google-ads-admanager/docs/admanager_v1/creative_service.rst b/packages/google-ads-admanager/docs/admanager_v1/creative_service.rst deleted file mode 100644 index 2f4e457a7ab3..000000000000 --- a/packages/google-ads-admanager/docs/admanager_v1/creative_service.rst +++ /dev/null @@ -1,10 +0,0 @@ -CreativeService ---------------------------------- - -.. automodule:: google.ads.admanager_v1.services.creative_service - :members: - :inherited-members: - -.. automodule:: google.ads.admanager_v1.services.creative_service.pagers - :members: - :inherited-members: diff --git a/packages/google-ads-admanager/docs/admanager_v1/entity_signals_mapping_service.rst b/packages/google-ads-admanager/docs/admanager_v1/entity_signals_mapping_service.rst new file mode 100644 index 000000000000..d4e1f7fa5634 --- /dev/null +++ b/packages/google-ads-admanager/docs/admanager_v1/entity_signals_mapping_service.rst @@ -0,0 +1,10 @@ +EntitySignalsMappingService +--------------------------------------------- + +.. automodule:: google.ads.admanager_v1.services.entity_signals_mapping_service + :members: + :inherited-members: + +.. automodule:: google.ads.admanager_v1.services.entity_signals_mapping_service.pagers + :members: + :inherited-members: diff --git a/packages/google-ads-admanager/docs/admanager_v1/label_service.rst b/packages/google-ads-admanager/docs/admanager_v1/label_service.rst deleted file mode 100644 index f3408d1767f5..000000000000 --- a/packages/google-ads-admanager/docs/admanager_v1/label_service.rst +++ /dev/null @@ -1,10 +0,0 @@ -LabelService ------------------------------- - -.. automodule:: google.ads.admanager_v1.services.label_service - :members: - :inherited-members: - -.. automodule:: google.ads.admanager_v1.services.label_service.pagers - :members: - :inherited-members: diff --git a/packages/google-ads-admanager/docs/admanager_v1/line_item_service.rst b/packages/google-ads-admanager/docs/admanager_v1/line_item_service.rst deleted file mode 100644 index 6b4388d90085..000000000000 --- a/packages/google-ads-admanager/docs/admanager_v1/line_item_service.rst +++ /dev/null @@ -1,10 +0,0 @@ -LineItemService ---------------------------------- - -.. automodule:: google.ads.admanager_v1.services.line_item_service - :members: - :inherited-members: - -.. automodule:: google.ads.admanager_v1.services.line_item_service.pagers - :members: - :inherited-members: diff --git a/packages/google-ads-admanager/docs/admanager_v1/report_service.rst b/packages/google-ads-admanager/docs/admanager_v1/report_service.rst index 96130cad2289..a655ad73d7a3 100644 --- a/packages/google-ads-admanager/docs/admanager_v1/report_service.rst +++ b/packages/google-ads-admanager/docs/admanager_v1/report_service.rst @@ -4,3 +4,7 @@ ReportService .. automodule:: google.ads.admanager_v1.services.report_service :members: :inherited-members: + +.. automodule:: google.ads.admanager_v1.services.report_service.pagers + :members: + :inherited-members: diff --git a/packages/google-ads-admanager/docs/admanager_v1/services_.rst b/packages/google-ads-admanager/docs/admanager_v1/services_.rst index a9b93b8a07c2..a1522b62dc40 100644 --- a/packages/google-ads-admanager/docs/admanager_v1/services_.rst +++ b/packages/google-ads-admanager/docs/admanager_v1/services_.rst @@ -3,20 +3,16 @@ Services for Google Ads Admanager v1 API .. toctree:: :maxdepth: 2 - ad_partner_service ad_unit_service company_service - contact_service - creative_service custom_field_service custom_targeting_key_service custom_targeting_value_service - label_service - line_item_service + entity_signals_mapping_service network_service order_service placement_service report_service role_service - team_service + taxonomy_category_service user_service diff --git a/packages/google-ads-admanager/docs/admanager_v1/taxonomy_category_service.rst b/packages/google-ads-admanager/docs/admanager_v1/taxonomy_category_service.rst new file mode 100644 index 000000000000..61f13e739e19 --- /dev/null +++ b/packages/google-ads-admanager/docs/admanager_v1/taxonomy_category_service.rst @@ -0,0 +1,10 @@ +TaxonomyCategoryService +----------------------------------------- + +.. automodule:: google.ads.admanager_v1.services.taxonomy_category_service + :members: + :inherited-members: + +.. automodule:: google.ads.admanager_v1.services.taxonomy_category_service.pagers + :members: + :inherited-members: diff --git a/packages/google-ads-admanager/docs/admanager_v1/team_service.rst b/packages/google-ads-admanager/docs/admanager_v1/team_service.rst deleted file mode 100644 index 4d3e14c6f6c1..000000000000 --- a/packages/google-ads-admanager/docs/admanager_v1/team_service.rst +++ /dev/null @@ -1,10 +0,0 @@ -TeamService ------------------------------ - -.. automodule:: google.ads.admanager_v1.services.team_service - :members: - :inherited-members: - -.. automodule:: google.ads.admanager_v1.services.team_service.pagers - :members: - :inherited-members: diff --git a/packages/google-ads-admanager/docs/admanager_v1/user_service.rst b/packages/google-ads-admanager/docs/admanager_v1/user_service.rst index 9bae86979749..c7be2db4394e 100644 --- a/packages/google-ads-admanager/docs/admanager_v1/user_service.rst +++ b/packages/google-ads-admanager/docs/admanager_v1/user_service.rst @@ -4,7 +4,3 @@ UserService .. automodule:: google.ads.admanager_v1.services.user_service :members: :inherited-members: - -.. automodule:: google.ads.admanager_v1.services.user_service.pagers - :members: - :inherited-members: diff --git a/packages/google-ads-admanager/google/ads/admanager/__init__.py b/packages/google-ads-admanager/google/ads/admanager/__init__.py index 39067144e427..9672929d7e9f 100644 --- a/packages/google-ads-admanager/google/ads/admanager/__init__.py +++ b/packages/google-ads-admanager/google/ads/admanager/__init__.py @@ -18,15 +18,8 @@ __version__ = package_version.__version__ -from google.ads.admanager_v1.services.ad_partner_service.client import ( - AdPartnerServiceClient, -) from google.ads.admanager_v1.services.ad_unit_service.client import AdUnitServiceClient from google.ads.admanager_v1.services.company_service.client import CompanyServiceClient -from google.ads.admanager_v1.services.contact_service.client import ContactServiceClient -from google.ads.admanager_v1.services.creative_service.client import ( - CreativeServiceClient, -) from google.ads.admanager_v1.services.custom_field_service.client import ( CustomFieldServiceClient, ) @@ -36,9 +29,8 @@ from google.ads.admanager_v1.services.custom_targeting_value_service.client import ( CustomTargetingValueServiceClient, ) -from google.ads.admanager_v1.services.label_service.client import LabelServiceClient -from google.ads.admanager_v1.services.line_item_service.client import ( - LineItemServiceClient, +from google.ads.admanager_v1.services.entity_signals_mapping_service.client import ( + EntitySignalsMappingServiceClient, ) from google.ads.admanager_v1.services.network_service.client import NetworkServiceClient from google.ads.admanager_v1.services.order_service.client import OrderServiceClient @@ -47,76 +39,66 @@ ) from google.ads.admanager_v1.services.report_service.client import ReportServiceClient from google.ads.admanager_v1.services.role_service.client import RoleServiceClient -from google.ads.admanager_v1.services.team_service.client import TeamServiceClient -from google.ads.admanager_v1.services.user_service.client import UserServiceClient -from google.ads.admanager_v1.types.ad_partner_declaration import ( - AdPartnerDeclaration, - DeclarationTypeEnum, +from google.ads.admanager_v1.services.taxonomy_category_service.client import ( + TaxonomyCategoryServiceClient, ) -from google.ads.admanager_v1.types.ad_partner_service import ( - AdPartner, - GetAdPartnerRequest, - ListAdPartnersRequest, - ListAdPartnersResponse, +from google.ads.admanager_v1.services.user_service.client import UserServiceClient +from google.ads.admanager_v1.types.ad_unit_enums import ( + AdUnitStatusEnum, + SmartSizeModeEnum, + TargetWindowEnum, ) -from google.ads.admanager_v1.types.ad_unit_enums import AppliedAdsenseEnabledEnum -from google.ads.admanager_v1.types.ad_unit_service import ( +from google.ads.admanager_v1.types.ad_unit_messages import ( AdUnit, AdUnitParent, - GetAdUnitRequest, + AdUnitSize, LabelFrequencyCap, +) +from google.ads.admanager_v1.types.ad_unit_service import ( + GetAdUnitRequest, + ListAdUnitSizesRequest, + ListAdUnitSizesResponse, ListAdUnitsRequest, ListAdUnitsResponse, - SmartSizeModeEnum, - TargetWindowEnum, ) -from google.ads.admanager_v1.types.ad_unit_size import AdUnitSize from google.ads.admanager_v1.types.admanager_error import AdManagerError from google.ads.admanager_v1.types.applied_label import AppliedLabel from google.ads.admanager_v1.types.company_credit_status_enum import ( CompanyCreditStatusEnum, ) +from google.ads.admanager_v1.types.company_messages import Company from google.ads.admanager_v1.types.company_service import ( - Company, GetCompanyRequest, ListCompaniesRequest, ListCompaniesResponse, ) from google.ads.admanager_v1.types.company_type_enum import CompanyTypeEnum -from google.ads.admanager_v1.types.computed_status_enum import ComputedStatusEnum -from google.ads.admanager_v1.types.contact_service import ( - Contact, - GetContactRequest, - ListContactsRequest, - ListContactsResponse, -) -from google.ads.admanager_v1.types.creative_placeholder import CreativePlaceholder -from google.ads.admanager_v1.types.creative_service import ( - Creative, - GetCreativeRequest, - ListCreativesRequest, - ListCreativesResponse, -) +from google.ads.admanager_v1.types.contact_messages import Contact from google.ads.admanager_v1.types.custom_field_enums import ( CustomFieldDataTypeEnum, CustomFieldEntityTypeEnum, CustomFieldStatusEnum, CustomFieldVisibilityEnum, ) -from google.ads.admanager_v1.types.custom_field_service import ( +from google.ads.admanager_v1.types.custom_field_messages import ( CustomField, CustomFieldOption, +) +from google.ads.admanager_v1.types.custom_field_service import ( GetCustomFieldRequest, ListCustomFieldsRequest, ListCustomFieldsResponse, ) +from google.ads.admanager_v1.types.custom_field_value import CustomFieldValue from google.ads.admanager_v1.types.custom_targeting_key_enums import ( CustomTargetingKeyReportableTypeEnum, CustomTargetingKeyStatusEnum, CustomTargetingKeyTypeEnum, ) -from google.ads.admanager_v1.types.custom_targeting_key_service import ( +from google.ads.admanager_v1.types.custom_targeting_key_messages import ( CustomTargetingKey, +) +from google.ads.admanager_v1.types.custom_targeting_key_service import ( GetCustomTargetingKeyRequest, ListCustomTargetingKeysRequest, ListCustomTargetingKeysResponse, @@ -125,109 +107,114 @@ CustomTargetingValueMatchTypeEnum, CustomTargetingValueStatusEnum, ) -from google.ads.admanager_v1.types.custom_targeting_value_service import ( +from google.ads.admanager_v1.types.custom_targeting_value_messages import ( CustomTargetingValue, +) +from google.ads.admanager_v1.types.custom_targeting_value_service import ( GetCustomTargetingValueRequest, ListCustomTargetingValuesRequest, ListCustomTargetingValuesResponse, ) -from google.ads.admanager_v1.types.environment_type_enum import EnvironmentTypeEnum -from google.ads.admanager_v1.types.frequency_cap import FrequencyCap, TimeUnitEnum -from google.ads.admanager_v1.types.goal import Goal, GoalTypeEnum, UnitTypeEnum -from google.ads.admanager_v1.types.label_service import ( - GetLabelRequest, - Label, - ListLabelsRequest, - ListLabelsResponse, +from google.ads.admanager_v1.types.entity_signals_mapping_messages import ( + EntitySignalsMapping, ) -from google.ads.admanager_v1.types.line_item_enums import ( - CreativeRotationTypeEnum, - DeliveryRateTypeEnum, - LineItemCostTypeEnum, - LineItemDiscountTypeEnum, - LineItemTypeEnum, - ReservationStatusEnum, +from google.ads.admanager_v1.types.entity_signals_mapping_service import ( + BatchCreateEntitySignalsMappingsRequest, + BatchCreateEntitySignalsMappingsResponse, + BatchUpdateEntitySignalsMappingsRequest, + BatchUpdateEntitySignalsMappingsResponse, + CreateEntitySignalsMappingRequest, + GetEntitySignalsMappingRequest, + ListEntitySignalsMappingsRequest, + ListEntitySignalsMappingsResponse, + UpdateEntitySignalsMappingRequest, ) -from google.ads.admanager_v1.types.line_item_service import ( - GetLineItemRequest, - LineItem, - ListLineItemsRequest, - ListLineItemsResponse, +from google.ads.admanager_v1.types.environment_type_enum import EnvironmentTypeEnum +from google.ads.admanager_v1.types.frequency_cap import FrequencyCap +from google.ads.admanager_v1.types.label_messages import Label +from google.ads.admanager_v1.types.network_messages import Network +from google.ads.admanager_v1.types.network_service import ( + GetNetworkRequest, + ListNetworksRequest, + ListNetworksResponse, ) -from google.ads.admanager_v1.types.network_service import GetNetworkRequest, Network +from google.ads.admanager_v1.types.order_enums import OrderStatusEnum +from google.ads.admanager_v1.types.order_messages import Order from google.ads.admanager_v1.types.order_service import ( GetOrderRequest, ListOrdersRequest, ListOrdersResponse, - Order, ) from google.ads.admanager_v1.types.placement_enums import PlacementStatusEnum +from google.ads.admanager_v1.types.placement_messages import Placement from google.ads.admanager_v1.types.placement_service import ( GetPlacementRequest, ListPlacementsRequest, ListPlacementsResponse, - Placement, ) from google.ads.admanager_v1.types.report_service import ( - ExportSavedReportMetadata, - ExportSavedReportRequest, - ExportSavedReportResponse, + CreateReportRequest, + FetchReportResultRowsRequest, + FetchReportResultRowsResponse, + GetReportRequest, + ListReportsRequest, + ListReportsResponse, Report, + ReportDefinition, + RunReportMetadata, + RunReportRequest, + RunReportResponse, + Schedule, + ScheduleOptions, + UpdateReportRequest, ) +from google.ads.admanager_v1.types.role_enums import RoleStatusEnum +from google.ads.admanager_v1.types.role_messages import Role from google.ads.admanager_v1.types.role_service import ( GetRoleRequest, ListRolesRequest, ListRolesResponse, - Role, -) -from google.ads.admanager_v1.types.size import Size, SizeTypeEnum -from google.ads.admanager_v1.types.team_service import ( - GetTeamRequest, - ListTeamsRequest, - ListTeamsResponse, - Team, ) -from google.ads.admanager_v1.types.user_service import ( - GetUserRequest, - ListUsersRequest, - ListUsersResponse, - User, +from google.ads.admanager_v1.types.size import Size +from google.ads.admanager_v1.types.size_type_enum import SizeTypeEnum +from google.ads.admanager_v1.types.taxonomy_category_messages import TaxonomyCategory +from google.ads.admanager_v1.types.taxonomy_category_service import ( + GetTaxonomyCategoryRequest, + ListTaxonomyCategoriesRequest, + ListTaxonomyCategoriesResponse, ) +from google.ads.admanager_v1.types.taxonomy_type_enum import TaxonomyTypeEnum +from google.ads.admanager_v1.types.team_messages import Team +from google.ads.admanager_v1.types.time_unit_enum import TimeUnitEnum +from google.ads.admanager_v1.types.user_messages import User +from google.ads.admanager_v1.types.user_service import GetUserRequest __all__ = ( - "AdPartnerServiceClient", "AdUnitServiceClient", "CompanyServiceClient", - "ContactServiceClient", - "CreativeServiceClient", "CustomFieldServiceClient", "CustomTargetingKeyServiceClient", "CustomTargetingValueServiceClient", - "LabelServiceClient", - "LineItemServiceClient", + "EntitySignalsMappingServiceClient", "NetworkServiceClient", "OrderServiceClient", "PlacementServiceClient", "ReportServiceClient", "RoleServiceClient", - "TeamServiceClient", + "TaxonomyCategoryServiceClient", "UserServiceClient", - "AdPartnerDeclaration", - "DeclarationTypeEnum", - "AdPartner", - "GetAdPartnerRequest", - "ListAdPartnersRequest", - "ListAdPartnersResponse", - "AppliedAdsenseEnabledEnum", + "AdUnitStatusEnum", + "SmartSizeModeEnum", + "TargetWindowEnum", "AdUnit", "AdUnitParent", - "GetAdUnitRequest", + "AdUnitSize", "LabelFrequencyCap", + "GetAdUnitRequest", + "ListAdUnitSizesRequest", + "ListAdUnitSizesResponse", "ListAdUnitsRequest", "ListAdUnitsResponse", - "SmartSizeModeEnum", - "TargetWindowEnum", - "AdUnitSize", "AdManagerError", "AppliedLabel", "CompanyCreditStatusEnum", @@ -236,16 +223,7 @@ "ListCompaniesRequest", "ListCompaniesResponse", "CompanyTypeEnum", - "ComputedStatusEnum", "Contact", - "GetContactRequest", - "ListContactsRequest", - "ListContactsResponse", - "CreativePlaceholder", - "Creative", - "GetCreativeRequest", - "ListCreativesRequest", - "ListCreativesResponse", "CustomFieldDataTypeEnum", "CustomFieldEntityTypeEnum", "CustomFieldStatusEnum", @@ -255,6 +233,7 @@ "GetCustomFieldRequest", "ListCustomFieldsRequest", "ListCustomFieldsResponse", + "CustomFieldValue", "CustomTargetingKeyReportableTypeEnum", "CustomTargetingKeyStatusEnum", "CustomTargetingKeyTypeEnum", @@ -268,53 +247,61 @@ "GetCustomTargetingValueRequest", "ListCustomTargetingValuesRequest", "ListCustomTargetingValuesResponse", + "EntitySignalsMapping", + "BatchCreateEntitySignalsMappingsRequest", + "BatchCreateEntitySignalsMappingsResponse", + "BatchUpdateEntitySignalsMappingsRequest", + "BatchUpdateEntitySignalsMappingsResponse", + "CreateEntitySignalsMappingRequest", + "GetEntitySignalsMappingRequest", + "ListEntitySignalsMappingsRequest", + "ListEntitySignalsMappingsResponse", + "UpdateEntitySignalsMappingRequest", "EnvironmentTypeEnum", "FrequencyCap", - "TimeUnitEnum", - "Goal", - "GoalTypeEnum", - "UnitTypeEnum", - "GetLabelRequest", "Label", - "ListLabelsRequest", - "ListLabelsResponse", - "CreativeRotationTypeEnum", - "DeliveryRateTypeEnum", - "LineItemCostTypeEnum", - "LineItemDiscountTypeEnum", - "LineItemTypeEnum", - "ReservationStatusEnum", - "GetLineItemRequest", - "LineItem", - "ListLineItemsRequest", - "ListLineItemsResponse", - "GetNetworkRequest", "Network", + "GetNetworkRequest", + "ListNetworksRequest", + "ListNetworksResponse", + "OrderStatusEnum", + "Order", "GetOrderRequest", "ListOrdersRequest", "ListOrdersResponse", - "Order", "PlacementStatusEnum", + "Placement", "GetPlacementRequest", "ListPlacementsRequest", "ListPlacementsResponse", - "Placement", - "ExportSavedReportMetadata", - "ExportSavedReportRequest", - "ExportSavedReportResponse", + "CreateReportRequest", + "FetchReportResultRowsRequest", + "FetchReportResultRowsResponse", + "GetReportRequest", + "ListReportsRequest", + "ListReportsResponse", "Report", + "ReportDefinition", + "RunReportMetadata", + "RunReportRequest", + "RunReportResponse", + "Schedule", + "ScheduleOptions", + "UpdateReportRequest", + "RoleStatusEnum", + "Role", "GetRoleRequest", "ListRolesRequest", "ListRolesResponse", - "Role", "Size", "SizeTypeEnum", - "GetTeamRequest", - "ListTeamsRequest", - "ListTeamsResponse", + "TaxonomyCategory", + "GetTaxonomyCategoryRequest", + "ListTaxonomyCategoriesRequest", + "ListTaxonomyCategoriesResponse", + "TaxonomyTypeEnum", "Team", - "GetUserRequest", - "ListUsersRequest", - "ListUsersResponse", + "TimeUnitEnum", "User", + "GetUserRequest", ) diff --git a/packages/google-ads-admanager/google/ads/admanager/gapic_version.py b/packages/google-ads-admanager/google/ads/admanager/gapic_version.py index 3b0a9d9a8d43..364164ddb134 100644 --- a/packages/google-ads-admanager/google/ads/admanager/gapic_version.py +++ b/packages/google-ads-admanager/google/ads/admanager/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.1.2" # {x-release-please-version} +__version__ = "0.2.0" # {x-release-please-version} diff --git a/packages/google-ads-admanager/google/ads/admanager_v1/__init__.py b/packages/google-ads-admanager/google/ads/admanager_v1/__init__.py index b13eac5f2835..e2d73bf488ba 100644 --- a/packages/google-ads-admanager/google/ads/admanager_v1/__init__.py +++ b/packages/google-ads-admanager/google/ads/admanager_v1/__init__.py @@ -18,86 +18,59 @@ __version__ = package_version.__version__ -from .services.ad_partner_service import AdPartnerServiceClient from .services.ad_unit_service import AdUnitServiceClient from .services.company_service import CompanyServiceClient -from .services.contact_service import ContactServiceClient -from .services.creative_service import CreativeServiceClient from .services.custom_field_service import CustomFieldServiceClient from .services.custom_targeting_key_service import CustomTargetingKeyServiceClient from .services.custom_targeting_value_service import CustomTargetingValueServiceClient -from .services.label_service import LabelServiceClient -from .services.line_item_service import LineItemServiceClient +from .services.entity_signals_mapping_service import EntitySignalsMappingServiceClient from .services.network_service import NetworkServiceClient from .services.order_service import OrderServiceClient from .services.placement_service import PlacementServiceClient from .services.report_service import ReportServiceClient from .services.role_service import RoleServiceClient -from .services.team_service import TeamServiceClient +from .services.taxonomy_category_service import TaxonomyCategoryServiceClient from .services.user_service import UserServiceClient -from .types.ad_partner_declaration import AdPartnerDeclaration, DeclarationTypeEnum -from .types.ad_partner_service import ( - AdPartner, - GetAdPartnerRequest, - ListAdPartnersRequest, - ListAdPartnersResponse, -) -from .types.ad_unit_enums import AppliedAdsenseEnabledEnum +from .types.ad_unit_enums import AdUnitStatusEnum, SmartSizeModeEnum, TargetWindowEnum +from .types.ad_unit_messages import AdUnit, AdUnitParent, AdUnitSize, LabelFrequencyCap from .types.ad_unit_service import ( - AdUnit, - AdUnitParent, GetAdUnitRequest, - LabelFrequencyCap, + ListAdUnitSizesRequest, + ListAdUnitSizesResponse, ListAdUnitsRequest, ListAdUnitsResponse, - SmartSizeModeEnum, - TargetWindowEnum, ) -from .types.ad_unit_size import AdUnitSize from .types.admanager_error import AdManagerError from .types.applied_label import AppliedLabel from .types.company_credit_status_enum import CompanyCreditStatusEnum +from .types.company_messages import Company from .types.company_service import ( - Company, GetCompanyRequest, ListCompaniesRequest, ListCompaniesResponse, ) from .types.company_type_enum import CompanyTypeEnum -from .types.computed_status_enum import ComputedStatusEnum -from .types.contact_service import ( - Contact, - GetContactRequest, - ListContactsRequest, - ListContactsResponse, -) -from .types.creative_placeholder import CreativePlaceholder -from .types.creative_service import ( - Creative, - GetCreativeRequest, - ListCreativesRequest, - ListCreativesResponse, -) +from .types.contact_messages import Contact from .types.custom_field_enums import ( CustomFieldDataTypeEnum, CustomFieldEntityTypeEnum, CustomFieldStatusEnum, CustomFieldVisibilityEnum, ) +from .types.custom_field_messages import CustomField, CustomFieldOption from .types.custom_field_service import ( - CustomField, - CustomFieldOption, GetCustomFieldRequest, ListCustomFieldsRequest, ListCustomFieldsResponse, ) +from .types.custom_field_value import CustomFieldValue from .types.custom_targeting_key_enums import ( CustomTargetingKeyReportableTypeEnum, CustomTargetingKeyStatusEnum, CustomTargetingKeyTypeEnum, ) +from .types.custom_targeting_key_messages import CustomTargetingKey from .types.custom_targeting_key_service import ( - CustomTargetingKey, GetCustomTargetingKeyRequest, ListCustomTargetingKeysRequest, ListCustomTargetingKeysResponse, @@ -106,103 +79,102 @@ CustomTargetingValueMatchTypeEnum, CustomTargetingValueStatusEnum, ) +from .types.custom_targeting_value_messages import CustomTargetingValue from .types.custom_targeting_value_service import ( - CustomTargetingValue, GetCustomTargetingValueRequest, ListCustomTargetingValuesRequest, ListCustomTargetingValuesResponse, ) -from .types.environment_type_enum import EnvironmentTypeEnum -from .types.frequency_cap import FrequencyCap, TimeUnitEnum -from .types.goal import Goal, GoalTypeEnum, UnitTypeEnum -from .types.label_service import ( - GetLabelRequest, - Label, - ListLabelsRequest, - ListLabelsResponse, -) -from .types.line_item_enums import ( - CreativeRotationTypeEnum, - DeliveryRateTypeEnum, - LineItemCostTypeEnum, - LineItemDiscountTypeEnum, - LineItemTypeEnum, - ReservationStatusEnum, +from .types.entity_signals_mapping_messages import EntitySignalsMapping +from .types.entity_signals_mapping_service import ( + BatchCreateEntitySignalsMappingsRequest, + BatchCreateEntitySignalsMappingsResponse, + BatchUpdateEntitySignalsMappingsRequest, + BatchUpdateEntitySignalsMappingsResponse, + CreateEntitySignalsMappingRequest, + GetEntitySignalsMappingRequest, + ListEntitySignalsMappingsRequest, + ListEntitySignalsMappingsResponse, + UpdateEntitySignalsMappingRequest, ) -from .types.line_item_service import ( - GetLineItemRequest, - LineItem, - ListLineItemsRequest, - ListLineItemsResponse, -) -from .types.network_service import GetNetworkRequest, Network -from .types.order_service import ( - GetOrderRequest, - ListOrdersRequest, - ListOrdersResponse, - Order, +from .types.environment_type_enum import EnvironmentTypeEnum +from .types.frequency_cap import FrequencyCap +from .types.label_messages import Label +from .types.network_messages import Network +from .types.network_service import ( + GetNetworkRequest, + ListNetworksRequest, + ListNetworksResponse, ) +from .types.order_enums import OrderStatusEnum +from .types.order_messages import Order +from .types.order_service import GetOrderRequest, ListOrdersRequest, ListOrdersResponse from .types.placement_enums import PlacementStatusEnum +from .types.placement_messages import Placement from .types.placement_service import ( GetPlacementRequest, ListPlacementsRequest, ListPlacementsResponse, - Placement, ) from .types.report_service import ( - ExportSavedReportMetadata, - ExportSavedReportRequest, - ExportSavedReportResponse, + CreateReportRequest, + FetchReportResultRowsRequest, + FetchReportResultRowsResponse, + GetReportRequest, + ListReportsRequest, + ListReportsResponse, Report, + ReportDefinition, + RunReportMetadata, + RunReportRequest, + RunReportResponse, + Schedule, + ScheduleOptions, + UpdateReportRequest, ) -from .types.role_service import ( - GetRoleRequest, - ListRolesRequest, - ListRolesResponse, - Role, -) -from .types.size import Size, SizeTypeEnum -from .types.team_service import ( - GetTeamRequest, - ListTeamsRequest, - ListTeamsResponse, - Team, -) -from .types.user_service import ( - GetUserRequest, - ListUsersRequest, - ListUsersResponse, - User, +from .types.role_enums import RoleStatusEnum +from .types.role_messages import Role +from .types.role_service import GetRoleRequest, ListRolesRequest, ListRolesResponse +from .types.size import Size +from .types.size_type_enum import SizeTypeEnum +from .types.taxonomy_category_messages import TaxonomyCategory +from .types.taxonomy_category_service import ( + GetTaxonomyCategoryRequest, + ListTaxonomyCategoriesRequest, + ListTaxonomyCategoriesResponse, ) +from .types.taxonomy_type_enum import TaxonomyTypeEnum +from .types.team_messages import Team +from .types.time_unit_enum import TimeUnitEnum +from .types.user_messages import User +from .types.user_service import GetUserRequest __all__ = ( "AdManagerError", - "AdPartner", - "AdPartnerDeclaration", - "AdPartnerServiceClient", "AdUnit", "AdUnitParent", "AdUnitServiceClient", "AdUnitSize", - "AppliedAdsenseEnabledEnum", + "AdUnitStatusEnum", "AppliedLabel", + "BatchCreateEntitySignalsMappingsRequest", + "BatchCreateEntitySignalsMappingsResponse", + "BatchUpdateEntitySignalsMappingsRequest", + "BatchUpdateEntitySignalsMappingsResponse", "Company", "CompanyCreditStatusEnum", "CompanyServiceClient", "CompanyTypeEnum", - "ComputedStatusEnum", "Contact", - "ContactServiceClient", - "Creative", - "CreativePlaceholder", - "CreativeRotationTypeEnum", - "CreativeServiceClient", + "CreateEntitySignalsMappingRequest", + "CreateReportRequest", "CustomField", "CustomFieldDataTypeEnum", "CustomFieldEntityTypeEnum", "CustomFieldOption", "CustomFieldServiceClient", "CustomFieldStatusEnum", + "CustomFieldValue", "CustomFieldVisibilityEnum", "CustomTargetingKey", "CustomTargetingKeyReportableTypeEnum", @@ -213,89 +185,83 @@ "CustomTargetingValueMatchTypeEnum", "CustomTargetingValueServiceClient", "CustomTargetingValueStatusEnum", - "DeclarationTypeEnum", - "DeliveryRateTypeEnum", + "EntitySignalsMapping", + "EntitySignalsMappingServiceClient", "EnvironmentTypeEnum", - "ExportSavedReportMetadata", - "ExportSavedReportRequest", - "ExportSavedReportResponse", + "FetchReportResultRowsRequest", + "FetchReportResultRowsResponse", "FrequencyCap", - "GetAdPartnerRequest", "GetAdUnitRequest", "GetCompanyRequest", - "GetContactRequest", - "GetCreativeRequest", "GetCustomFieldRequest", "GetCustomTargetingKeyRequest", "GetCustomTargetingValueRequest", - "GetLabelRequest", - "GetLineItemRequest", + "GetEntitySignalsMappingRequest", "GetNetworkRequest", "GetOrderRequest", "GetPlacementRequest", + "GetReportRequest", "GetRoleRequest", - "GetTeamRequest", + "GetTaxonomyCategoryRequest", "GetUserRequest", - "Goal", - "GoalTypeEnum", "Label", "LabelFrequencyCap", - "LabelServiceClient", - "LineItem", - "LineItemCostTypeEnum", - "LineItemDiscountTypeEnum", - "LineItemServiceClient", - "LineItemTypeEnum", - "ListAdPartnersRequest", - "ListAdPartnersResponse", + "ListAdUnitSizesRequest", + "ListAdUnitSizesResponse", "ListAdUnitsRequest", "ListAdUnitsResponse", "ListCompaniesRequest", "ListCompaniesResponse", - "ListContactsRequest", - "ListContactsResponse", - "ListCreativesRequest", - "ListCreativesResponse", "ListCustomFieldsRequest", "ListCustomFieldsResponse", "ListCustomTargetingKeysRequest", "ListCustomTargetingKeysResponse", "ListCustomTargetingValuesRequest", "ListCustomTargetingValuesResponse", - "ListLabelsRequest", - "ListLabelsResponse", - "ListLineItemsRequest", - "ListLineItemsResponse", + "ListEntitySignalsMappingsRequest", + "ListEntitySignalsMappingsResponse", + "ListNetworksRequest", + "ListNetworksResponse", "ListOrdersRequest", "ListOrdersResponse", "ListPlacementsRequest", "ListPlacementsResponse", + "ListReportsRequest", + "ListReportsResponse", "ListRolesRequest", "ListRolesResponse", - "ListTeamsRequest", - "ListTeamsResponse", - "ListUsersRequest", - "ListUsersResponse", + "ListTaxonomyCategoriesRequest", + "ListTaxonomyCategoriesResponse", "Network", "NetworkServiceClient", "Order", "OrderServiceClient", + "OrderStatusEnum", "Placement", "PlacementServiceClient", "PlacementStatusEnum", "Report", + "ReportDefinition", "ReportServiceClient", - "ReservationStatusEnum", "Role", "RoleServiceClient", + "RoleStatusEnum", + "RunReportMetadata", + "RunReportRequest", + "RunReportResponse", + "Schedule", + "ScheduleOptions", "Size", "SizeTypeEnum", "SmartSizeModeEnum", "TargetWindowEnum", + "TaxonomyCategory", + "TaxonomyCategoryServiceClient", + "TaxonomyTypeEnum", "Team", - "TeamServiceClient", "TimeUnitEnum", - "UnitTypeEnum", + "UpdateEntitySignalsMappingRequest", + "UpdateReportRequest", "User", "UserServiceClient", ) diff --git a/packages/google-ads-admanager/google/ads/admanager_v1/gapic_metadata.json b/packages/google-ads-admanager/google/ads/admanager_v1/gapic_metadata.json index 67680096a5d9..aa173a3cf11e 100644 --- a/packages/google-ads-admanager/google/ads/admanager_v1/gapic_metadata.json +++ b/packages/google-ads-admanager/google/ads/admanager_v1/gapic_metadata.json @@ -5,25 +5,6 @@ "protoPackage": "google.ads.admanager.v1", "schema": "1.0", "services": { - "AdPartnerService": { - "clients": { - "rest": { - "libraryClient": "AdPartnerServiceClient", - "rpcs": { - "GetAdPartner": { - "methods": [ - "get_ad_partner" - ] - }, - "ListAdPartners": { - "methods": [ - "list_ad_partners" - ] - } - } - } - } - }, "AdUnitService": { "clients": { "rest": { @@ -34,6 +15,11 @@ "get_ad_unit" ] }, + "ListAdUnitSizes": { + "methods": [ + "list_ad_unit_sizes" + ] + }, "ListAdUnits": { "methods": [ "list_ad_units" @@ -62,44 +48,6 @@ } } }, - "ContactService": { - "clients": { - "rest": { - "libraryClient": "ContactServiceClient", - "rpcs": { - "GetContact": { - "methods": [ - "get_contact" - ] - }, - "ListContacts": { - "methods": [ - "list_contacts" - ] - } - } - } - } - }, - "CreativeService": { - "clients": { - "rest": { - "libraryClient": "CreativeServiceClient", - "rpcs": { - "GetCreative": { - "methods": [ - "get_creative" - ] - }, - "ListCreatives": { - "methods": [ - "list_creatives" - ] - } - } - } - } - }, "CustomFieldService": { "clients": { "rest": { @@ -157,38 +105,39 @@ } } }, - "LabelService": { + "EntitySignalsMappingService": { "clients": { "rest": { - "libraryClient": "LabelServiceClient", + "libraryClient": "EntitySignalsMappingServiceClient", "rpcs": { - "GetLabel": { + "BatchCreateEntitySignalsMappings": { "methods": [ - "get_label" + "batch_create_entity_signals_mappings" ] }, - "ListLabels": { + "BatchUpdateEntitySignalsMappings": { "methods": [ - "list_labels" + "batch_update_entity_signals_mappings" ] - } - } - } - } - }, - "LineItemService": { - "clients": { - "rest": { - "libraryClient": "LineItemServiceClient", - "rpcs": { - "GetLineItem": { + }, + "CreateEntitySignalsMapping": { + "methods": [ + "create_entity_signals_mapping" + ] + }, + "GetEntitySignalsMapping": { "methods": [ - "get_line_item" + "get_entity_signals_mapping" ] }, - "ListLineItems": { + "ListEntitySignalsMappings": { "methods": [ - "list_line_items" + "list_entity_signals_mappings" + ] + }, + "UpdateEntitySignalsMapping": { + "methods": [ + "update_entity_signals_mapping" ] } } @@ -204,6 +153,11 @@ "methods": [ "get_network" ] + }, + "ListNetworks": { + "methods": [ + "list_networks" + ] } } } @@ -252,9 +206,34 @@ "rest": { "libraryClient": "ReportServiceClient", "rpcs": { - "ExportSavedReport": { + "CreateReport": { + "methods": [ + "create_report" + ] + }, + "FetchReportResultRows": { + "methods": [ + "fetch_report_result_rows" + ] + }, + "GetReport": { + "methods": [ + "get_report" + ] + }, + "ListReports": { "methods": [ - "export_saved_report" + "list_reports" + ] + }, + "RunReport": { + "methods": [ + "run_report" + ] + }, + "UpdateReport": { + "methods": [ + "update_report" ] } } @@ -280,19 +259,19 @@ } } }, - "TeamService": { + "TaxonomyCategoryService": { "clients": { "rest": { - "libraryClient": "TeamServiceClient", + "libraryClient": "TaxonomyCategoryServiceClient", "rpcs": { - "GetTeam": { + "GetTaxonomyCategory": { "methods": [ - "get_team" + "get_taxonomy_category" ] }, - "ListTeams": { + "ListTaxonomyCategories": { "methods": [ - "list_teams" + "list_taxonomy_categories" ] } } @@ -308,11 +287,6 @@ "methods": [ "get_user" ] - }, - "ListUsers": { - "methods": [ - "list_users" - ] } } } diff --git a/packages/google-ads-admanager/google/ads/admanager_v1/gapic_version.py b/packages/google-ads-admanager/google/ads/admanager_v1/gapic_version.py index 3b0a9d9a8d43..364164ddb134 100644 --- a/packages/google-ads-admanager/google/ads/admanager_v1/gapic_version.py +++ b/packages/google-ads-admanager/google/ads/admanager_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.1.2" # {x-release-please-version} +__version__ = "0.2.0" # {x-release-please-version} diff --git a/packages/google-ads-admanager/google/ads/admanager_v1/services/ad_unit_service/client.py b/packages/google-ads-admanager/google/ads/admanager_v1/services/ad_unit_service/client.py index e92cfbf83525..c8c6cb4564df 100644 --- a/packages/google-ads-admanager/google/ads/admanager_v1/services/ad_unit_service/client.py +++ b/packages/google-ads-admanager/google/ads/admanager_v1/services/ad_unit_service/client.py @@ -55,8 +55,8 @@ from google.ads.admanager_v1.services.ad_unit_service import pagers from google.ads.admanager_v1.types import ( ad_unit_enums, + ad_unit_messages, ad_unit_service, - ad_unit_size, applied_label, ) @@ -707,7 +707,7 @@ def __init__( transport_init: Union[ Type[AdUnitServiceTransport], Callable[..., AdUnitServiceTransport] ] = ( - type(self).get_transport_class(transport) + AdUnitServiceClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., AdUnitServiceTransport], transport) ) @@ -732,7 +732,7 @@ def get_ad_unit( retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), - ) -> ad_unit_service.AdUnit: + ) -> ad_unit_messages.AdUnit: r"""API to retrieve an AdUnit object. .. code-block:: python @@ -942,6 +942,124 @@ def sample_list_ad_units(): # Done; return the response. return response + def list_ad_unit_sizes( + self, + request: Optional[Union[ad_unit_service.ListAdUnitSizesRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListAdUnitSizesPager: + r"""API to retrieve a list of AdUnitSize objects. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.ads import admanager_v1 + + def sample_list_ad_unit_sizes(): + # Create a client + client = admanager_v1.AdUnitServiceClient() + + # Initialize request argument(s) + request = admanager_v1.ListAdUnitSizesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_ad_unit_sizes(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.ads.admanager_v1.types.ListAdUnitSizesRequest, dict]): + The request object. Request object for ListAdUnitSizes + method. + parent (str): + Required. The parent, which owns this collection of + AdUnitSizes. Format: ``networks/{network_code}`` + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.ads.admanager_v1.services.ad_unit_service.pagers.ListAdUnitSizesPager: + Response object for + ListAdUnitSizesRequest containing + matching AdUnitSizes. Iterating over + this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, ad_unit_service.ListAdUnitSizesRequest): + request = ad_unit_service.ListAdUnitSizesRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_ad_unit_sizes] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListAdUnitSizesPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + def __enter__(self) -> "AdUnitServiceClient": return self diff --git a/packages/google-ads-admanager/google/ads/admanager_v1/services/ad_unit_service/pagers.py b/packages/google-ads-admanager/google/ads/admanager_v1/services/ad_unit_service/pagers.py index 2ae957b1dea0..ebf38bec7995 100644 --- a/packages/google-ads-admanager/google/ads/admanager_v1/services/ad_unit_service/pagers.py +++ b/packages/google-ads-admanager/google/ads/admanager_v1/services/ad_unit_service/pagers.py @@ -38,7 +38,7 @@ OptionalRetry = Union[retries.Retry, object, None] # type: ignore OptionalAsyncRetry = Union[retries_async.AsyncRetry, object, None] # type: ignore -from google.ads.admanager_v1.types import ad_unit_service +from google.ads.admanager_v1.types import ad_unit_messages, ad_unit_service class ListAdUnitsPager: @@ -107,9 +107,83 @@ def pages(self) -> Iterator[ad_unit_service.ListAdUnitsResponse]: ) yield self._response - def __iter__(self) -> Iterator[ad_unit_service.AdUnit]: + def __iter__(self) -> Iterator[ad_unit_messages.AdUnit]: for page in self.pages: yield from page.ad_units def __repr__(self) -> str: return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListAdUnitSizesPager: + """A pager for iterating through ``list_ad_unit_sizes`` requests. + + This class thinly wraps an initial + :class:`google.ads.admanager_v1.types.ListAdUnitSizesResponse` object, and + provides an ``__iter__`` method to iterate through its + ``ad_unit_sizes`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListAdUnitSizes`` requests and continue to iterate + through the ``ad_unit_sizes`` field on the + corresponding responses. + + All the usual :class:`google.ads.admanager_v1.types.ListAdUnitSizesResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., ad_unit_service.ListAdUnitSizesResponse], + request: ad_unit_service.ListAdUnitSizesRequest, + response: ad_unit_service.ListAdUnitSizesResponse, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.ads.admanager_v1.types.ListAdUnitSizesRequest): + The initial request object. + response (google.ads.admanager_v1.types.ListAdUnitSizesResponse): + The initial response object. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = ad_unit_service.ListAdUnitSizesRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[ad_unit_service.ListAdUnitSizesResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) + yield self._response + + def __iter__(self) -> Iterator[ad_unit_messages.AdUnitSize]: + for page in self.pages: + yield from page.ad_unit_sizes + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) diff --git a/packages/google-ads-admanager/google/ads/admanager_v1/services/ad_unit_service/transports/base.py b/packages/google-ads-admanager/google/ads/admanager_v1/services/ad_unit_service/transports/base.py index 948cad87abb1..7852b164a55c 100644 --- a/packages/google-ads-admanager/google/ads/admanager_v1/services/ad_unit_service/transports/base.py +++ b/packages/google-ads-admanager/google/ads/admanager_v1/services/ad_unit_service/transports/base.py @@ -26,7 +26,7 @@ from google.oauth2 import service_account # type: ignore from google.ads.admanager_v1 import gapic_version as package_version -from google.ads.admanager_v1.types import ad_unit_service +from google.ads.admanager_v1.types import ad_unit_messages, ad_unit_service DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( gapic_version=package_version.__version__ @@ -139,6 +139,11 @@ def _prep_wrapped_messages(self, client_info): default_timeout=None, client_info=client_info, ), + self.list_ad_unit_sizes: gapic_v1.method.wrap_method( + self.list_ad_unit_sizes, + default_timeout=None, + client_info=client_info, + ), } def close(self): @@ -155,7 +160,7 @@ def get_ad_unit( self, ) -> Callable[ [ad_unit_service.GetAdUnitRequest], - Union[ad_unit_service.AdUnit, Awaitable[ad_unit_service.AdUnit]], + Union[ad_unit_messages.AdUnit, Awaitable[ad_unit_messages.AdUnit]], ]: raise NotImplementedError() @@ -171,6 +176,18 @@ def list_ad_units( ]: raise NotImplementedError() + @property + def list_ad_unit_sizes( + self, + ) -> Callable[ + [ad_unit_service.ListAdUnitSizesRequest], + Union[ + ad_unit_service.ListAdUnitSizesResponse, + Awaitable[ad_unit_service.ListAdUnitSizesResponse], + ], + ]: + raise NotImplementedError() + @property def get_operation( self, diff --git a/packages/google-ads-admanager/google/ads/admanager_v1/services/ad_unit_service/transports/rest.py b/packages/google-ads-admanager/google/ads/admanager_v1/services/ad_unit_service/transports/rest.py index c6dd9d86e533..2c1ecebf5b66 100644 --- a/packages/google-ads-admanager/google/ads/admanager_v1/services/ad_unit_service/transports/rest.py +++ b/packages/google-ads-admanager/google/ads/admanager_v1/services/ad_unit_service/transports/rest.py @@ -38,7 +38,7 @@ from google.longrunning import operations_pb2 # type: ignore -from google.ads.admanager_v1.types import ad_unit_service +from google.ads.admanager_v1.types import ad_unit_messages, ad_unit_service from .base import AdUnitServiceTransport from .base import DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO @@ -81,6 +81,14 @@ def post_list_ad_units(self, response): logging.log(f"Received response: {response}") return response + def pre_list_ad_unit_sizes(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_ad_unit_sizes(self, response): + logging.log(f"Received response: {response}") + return response + transport = AdUnitServiceRestTransport(interceptor=MyCustomAdUnitServiceInterceptor()) client = AdUnitServiceClient(transport=transport) @@ -100,8 +108,8 @@ def pre_get_ad_unit( return request, metadata def post_get_ad_unit( - self, response: ad_unit_service.AdUnit - ) -> ad_unit_service.AdUnit: + self, response: ad_unit_messages.AdUnit + ) -> ad_unit_messages.AdUnit: """Post-rpc interceptor for get_ad_unit Override in a subclass to manipulate the response @@ -133,6 +141,29 @@ def post_list_ad_units( """ return response + def pre_list_ad_unit_sizes( + self, + request: ad_unit_service.ListAdUnitSizesRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[ad_unit_service.ListAdUnitSizesRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_ad_unit_sizes + + Override in a subclass to manipulate the request or metadata + before they are sent to the AdUnitService server. + """ + return request, metadata + + def post_list_ad_unit_sizes( + self, response: ad_unit_service.ListAdUnitSizesResponse + ) -> ad_unit_service.ListAdUnitSizesResponse: + """Post-rpc interceptor for list_ad_unit_sizes + + Override in a subclass to manipulate the response + after it is returned by the AdUnitService server but before + it is returned to user code. + """ + return response + def pre_get_operation( self, request: operations_pb2.GetOperationRequest, @@ -274,7 +305,7 @@ def __call__( retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), - ) -> ad_unit_service.AdUnit: + ) -> ad_unit_messages.AdUnit: r"""Call the get ad unit method over HTTP. Args: @@ -287,7 +318,7 @@ def __call__( sent along with the request as metadata. Returns: - ~.ad_unit_service.AdUnit: + ~.ad_unit_messages.AdUnit: The AdUnit resource. """ @@ -331,8 +362,8 @@ def __call__( raise core_exceptions.from_http_response(response) # Return the response - resp = ad_unit_service.AdUnit() - pb_resp = ad_unit_service.AdUnit.pb(resp) + resp = ad_unit_messages.AdUnit() + pb_resp = ad_unit_messages.AdUnit.pb(resp) json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_ad_unit(resp) @@ -427,10 +458,101 @@ def __call__( resp = self._interceptor.post_list_ad_units(resp) return resp + class _ListAdUnitSizes(AdUnitServiceRestStub): + def __hash__(self): + return hash("ListAdUnitSizes") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: ad_unit_service.ListAdUnitSizesRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> ad_unit_service.ListAdUnitSizesResponse: + r"""Call the list ad unit sizes method over HTTP. + + Args: + request (~.ad_unit_service.ListAdUnitSizesRequest): + The request object. Request object for ListAdUnitSizes + method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.ad_unit_service.ListAdUnitSizesResponse: + Response object for + ListAdUnitSizesRequest containing + matching AdUnitSizes. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{parent=networks/*}/adUnitSizes", + }, + ] + request, metadata = self._interceptor.pre_list_ad_unit_sizes( + request, metadata + ) + pb_request = ad_unit_service.ListAdUnitSizesRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = ad_unit_service.ListAdUnitSizesResponse() + pb_resp = ad_unit_service.ListAdUnitSizesResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_ad_unit_sizes(resp) + return resp + @property def get_ad_unit( self, - ) -> Callable[[ad_unit_service.GetAdUnitRequest], ad_unit_service.AdUnit]: + ) -> Callable[[ad_unit_service.GetAdUnitRequest], ad_unit_messages.AdUnit]: # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. # In C++ this would require a dynamic_cast return self._GetAdUnit(self._session, self._host, self._interceptor) # type: ignore @@ -445,6 +567,17 @@ def list_ad_units( # In C++ this would require a dynamic_cast return self._ListAdUnits(self._session, self._host, self._interceptor) # type: ignore + @property + def list_ad_unit_sizes( + self, + ) -> Callable[ + [ad_unit_service.ListAdUnitSizesRequest], + ad_unit_service.ListAdUnitSizesResponse, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListAdUnitSizes(self._session, self._host, self._interceptor) # type: ignore + @property def get_operation(self): return self._GetOperation(self._session, self._host, self._interceptor) # type: ignore @@ -476,11 +609,11 @@ def __call__( http_options: List[Dict[str, str]] = [ { "method": "get", - "uri": "/v1/{name=networks/*/operations/reports/exports/*}", + "uri": "/v1/{name=networks/*/operations/reports/runs/*}", }, { "method": "get", - "uri": "/v1/{name=networks/*/operations/reports/runs/*}", + "uri": "/v1/{name=networks/*/operations/reports/exports/*}", }, ] diff --git a/packages/google-ads-admanager/google/ads/admanager_v1/services/company_service/client.py b/packages/google-ads-admanager/google/ads/admanager_v1/services/company_service/client.py index dfffec532f1b..85b49f138017 100644 --- a/packages/google-ads-admanager/google/ads/admanager_v1/services/company_service/client.py +++ b/packages/google-ads-admanager/google/ads/admanager_v1/services/company_service/client.py @@ -49,11 +49,13 @@ OptionalRetry = Union[retries.Retry, object, None] # type: ignore from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore from google.ads.admanager_v1.services.company_service import pagers from google.ads.admanager_v1.types import ( applied_label, company_credit_status_enum, + company_messages, company_service, company_type_enum, ) @@ -728,7 +730,7 @@ def __init__( transport_init: Union[ Type[CompanyServiceTransport], Callable[..., CompanyServiceTransport] ] = ( - type(self).get_transport_class(transport) + CompanyServiceClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., CompanyServiceTransport], transport) ) @@ -753,7 +755,7 @@ def get_company( retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), - ) -> company_service.Company: + ) -> company_messages.Company: r"""API to retrieve a ``Company`` object. .. code-block:: python @@ -902,7 +904,7 @@ def sample_list_companies(): Returns: google.ads.admanager_v1.services.company_service.pagers.ListCompaniesPager: Response object for ListCompaniesRequest containing matching Company - resources. + objects. Iterating over this object will yield results and resolve additional pages automatically. diff --git a/packages/google-ads-admanager/google/ads/admanager_v1/services/company_service/pagers.py b/packages/google-ads-admanager/google/ads/admanager_v1/services/company_service/pagers.py index 8dd003e78650..7a1c65b16259 100644 --- a/packages/google-ads-admanager/google/ads/admanager_v1/services/company_service/pagers.py +++ b/packages/google-ads-admanager/google/ads/admanager_v1/services/company_service/pagers.py @@ -38,7 +38,7 @@ OptionalRetry = Union[retries.Retry, object, None] # type: ignore OptionalAsyncRetry = Union[retries_async.AsyncRetry, object, None] # type: ignore -from google.ads.admanager_v1.types import company_service +from google.ads.admanager_v1.types import company_messages, company_service class ListCompaniesPager: @@ -107,7 +107,7 @@ def pages(self) -> Iterator[company_service.ListCompaniesResponse]: ) yield self._response - def __iter__(self) -> Iterator[company_service.Company]: + def __iter__(self) -> Iterator[company_messages.Company]: for page in self.pages: yield from page.companies diff --git a/packages/google-ads-admanager/google/ads/admanager_v1/services/company_service/transports/base.py b/packages/google-ads-admanager/google/ads/admanager_v1/services/company_service/transports/base.py index 0415f3d70be7..3304a05b29c2 100644 --- a/packages/google-ads-admanager/google/ads/admanager_v1/services/company_service/transports/base.py +++ b/packages/google-ads-admanager/google/ads/admanager_v1/services/company_service/transports/base.py @@ -26,7 +26,7 @@ from google.oauth2 import service_account # type: ignore from google.ads.admanager_v1 import gapic_version as package_version -from google.ads.admanager_v1.types import company_service +from google.ads.admanager_v1.types import company_messages, company_service DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( gapic_version=package_version.__version__ @@ -155,7 +155,7 @@ def get_company( self, ) -> Callable[ [company_service.GetCompanyRequest], - Union[company_service.Company, Awaitable[company_service.Company]], + Union[company_messages.Company, Awaitable[company_messages.Company]], ]: raise NotImplementedError() diff --git a/packages/google-ads-admanager/google/ads/admanager_v1/services/company_service/transports/rest.py b/packages/google-ads-admanager/google/ads/admanager_v1/services/company_service/transports/rest.py index 3692c88b6fde..604ec04faf5a 100644 --- a/packages/google-ads-admanager/google/ads/admanager_v1/services/company_service/transports/rest.py +++ b/packages/google-ads-admanager/google/ads/admanager_v1/services/company_service/transports/rest.py @@ -38,7 +38,7 @@ from google.longrunning import operations_pb2 # type: ignore -from google.ads.admanager_v1.types import company_service +from google.ads.admanager_v1.types import company_messages, company_service from .base import CompanyServiceTransport from .base import DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO @@ -100,8 +100,8 @@ def pre_get_company( return request, metadata def post_get_company( - self, response: company_service.Company - ) -> company_service.Company: + self, response: company_messages.Company + ) -> company_messages.Company: """Post-rpc interceptor for get_company Override in a subclass to manipulate the response @@ -274,7 +274,7 @@ def __call__( retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), - ) -> company_service.Company: + ) -> company_messages.Company: r"""Call the get company method over HTTP. Args: @@ -287,7 +287,7 @@ def __call__( sent along with the request as metadata. Returns: - ~.company_service.Company: + ~.company_messages.Company: The ``Company`` resource. """ @@ -331,8 +331,8 @@ def __call__( raise core_exceptions.from_http_response(response) # Return the response - resp = company_service.Company() - pb_resp = company_service.Company.pb(resp) + resp = company_messages.Company() + pb_resp = company_messages.Company.pb(resp) json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_company(resp) @@ -374,7 +374,7 @@ def __call__( Returns: ~.company_service.ListCompaniesResponse: Response object for ``ListCompaniesRequest`` containing - matching ``Company`` resources. + matching ``Company`` objects. """ @@ -428,7 +428,7 @@ def __call__( @property def get_company( self, - ) -> Callable[[company_service.GetCompanyRequest], company_service.Company]: + ) -> Callable[[company_service.GetCompanyRequest], company_messages.Company]: # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. # In C++ this would require a dynamic_cast return self._GetCompany(self._session, self._host, self._interceptor) # type: ignore @@ -474,11 +474,11 @@ def __call__( http_options: List[Dict[str, str]] = [ { "method": "get", - "uri": "/v1/{name=networks/*/operations/reports/exports/*}", + "uri": "/v1/{name=networks/*/operations/reports/runs/*}", }, { "method": "get", - "uri": "/v1/{name=networks/*/operations/reports/runs/*}", + "uri": "/v1/{name=networks/*/operations/reports/exports/*}", }, ] diff --git a/packages/google-ads-admanager/google/ads/admanager_v1/services/contact_service/transports/rest.py b/packages/google-ads-admanager/google/ads/admanager_v1/services/contact_service/transports/rest.py deleted file mode 100644 index 52a4f962b295..000000000000 --- a/packages/google-ads-admanager/google/ads/admanager_v1/services/contact_service/transports/rest.py +++ /dev/null @@ -1,526 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# - -import dataclasses -import json # type: ignore -import re -from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union -import warnings - -from google.api_core import gapic_v1, path_template, rest_helpers, rest_streaming -from google.api_core import exceptions as core_exceptions -from google.api_core import retry as retries -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore -from google.auth.transport.requests import AuthorizedSession # type: ignore -from google.protobuf import json_format -import grpc # type: ignore -from requests import __version__ as requests_version - -try: - OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] -except AttributeError: # pragma: NO COVER - OptionalRetry = Union[retries.Retry, object, None] # type: ignore - - -from google.longrunning import operations_pb2 # type: ignore - -from google.ads.admanager_v1.types import contact_service - -from .base import ContactServiceTransport -from .base import DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO - -DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( - gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, - grpc_version=None, - rest_version=requests_version, -) - - -class ContactServiceRestInterceptor: - """Interceptor for ContactService. - - Interceptors are used to manipulate requests, request metadata, and responses - in arbitrary ways. - Example use cases include: - * Logging - * Verifying requests according to service or custom semantics - * Stripping extraneous information from responses - - These use cases and more can be enabled by injecting an - instance of a custom subclass when constructing the ContactServiceRestTransport. - - .. code-block:: python - class MyCustomContactServiceInterceptor(ContactServiceRestInterceptor): - def pre_get_contact(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_get_contact(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_list_contacts(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_list_contacts(self, response): - logging.log(f"Received response: {response}") - return response - - transport = ContactServiceRestTransport(interceptor=MyCustomContactServiceInterceptor()) - client = ContactServiceClient(transport=transport) - - - """ - - def pre_get_contact( - self, - request: contact_service.GetContactRequest, - metadata: Sequence[Tuple[str, str]], - ) -> Tuple[contact_service.GetContactRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for get_contact - - Override in a subclass to manipulate the request or metadata - before they are sent to the ContactService server. - """ - return request, metadata - - def post_get_contact( - self, response: contact_service.Contact - ) -> contact_service.Contact: - """Post-rpc interceptor for get_contact - - Override in a subclass to manipulate the response - after it is returned by the ContactService server but before - it is returned to user code. - """ - return response - - def pre_list_contacts( - self, - request: contact_service.ListContactsRequest, - metadata: Sequence[Tuple[str, str]], - ) -> Tuple[contact_service.ListContactsRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for list_contacts - - Override in a subclass to manipulate the request or metadata - before they are sent to the ContactService server. - """ - return request, metadata - - def post_list_contacts( - self, response: contact_service.ListContactsResponse - ) -> contact_service.ListContactsResponse: - """Post-rpc interceptor for list_contacts - - Override in a subclass to manipulate the response - after it is returned by the ContactService server but before - it is returned to user code. - """ - return response - - def pre_get_operation( - self, - request: operations_pb2.GetOperationRequest, - metadata: Sequence[Tuple[str, str]], - ) -> Tuple[operations_pb2.GetOperationRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for get_operation - - Override in a subclass to manipulate the request or metadata - before they are sent to the ContactService server. - """ - return request, metadata - - def post_get_operation( - self, response: operations_pb2.Operation - ) -> operations_pb2.Operation: - """Post-rpc interceptor for get_operation - - Override in a subclass to manipulate the response - after it is returned by the ContactService server but before - it is returned to user code. - """ - return response - - -@dataclasses.dataclass -class ContactServiceRestStub: - _session: AuthorizedSession - _host: str - _interceptor: ContactServiceRestInterceptor - - -class ContactServiceRestTransport(ContactServiceTransport): - """REST backend transport for ContactService. - - Provides methods for handling Contact objects. - - This class defines the same methods as the primary client, so the - primary client can load the underlying transport implementation - and call it. - - It sends JSON representations of protocol buffers over HTTP/1.1 - - """ - - def __init__( - self, - *, - host: str = "admanager.googleapis.com", - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - url_scheme: str = "https", - interceptor: Optional[ContactServiceRestInterceptor] = None, - api_audience: Optional[str] = None, - ) -> None: - """Instantiate the transport. - - Args: - host (Optional[str]): - The hostname to connect to (default: 'admanager.googleapis.com'). - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if ``channel`` is provided. - scopes (Optional(Sequence[str])): A list of scopes. This argument is - ignored if ``channel`` is provided. - client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client - certificate to configure mutual TLS HTTP channel. It is ignored - if ``channel`` is provided. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you are developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - url_scheme: the protocol scheme for the API endpoint. Normally - "https", but for testing or local servers, - "http" can be specified. - """ - # Run the base constructor - # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. - # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the - # credentials object - maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) - if maybe_url_match is None: - raise ValueError( - f"Unexpected hostname structure: {host}" - ) # pragma: NO COVER - - url_match_items = maybe_url_match.groupdict() - - host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host - - super().__init__( - host=host, - credentials=credentials, - client_info=client_info, - always_use_jwt_access=always_use_jwt_access, - api_audience=api_audience, - ) - self._session = AuthorizedSession( - self._credentials, default_host=self.DEFAULT_HOST - ) - if client_cert_source_for_mtls: - self._session.configure_mtls_channel(client_cert_source_for_mtls) - self._interceptor = interceptor or ContactServiceRestInterceptor() - self._prep_wrapped_messages(client_info) - - class _GetContact(ContactServiceRestStub): - def __hash__(self): - return hash("GetContact") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return { - k: v - for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() - if k not in message_dict - } - - def __call__( - self, - request: contact_service.GetContactRequest, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> contact_service.Contact: - r"""Call the get contact method over HTTP. - - Args: - request (~.contact_service.GetContactRequest): - The request object. Request object for GetContact method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - ~.contact_service.Contact: - The Contact resource. - """ - - http_options: List[Dict[str, str]] = [ - { - "method": "get", - "uri": "/v1/{name=networks/*/contacts/*}", - }, - ] - request, metadata = self._interceptor.pre_get_contact(request, metadata) - pb_request = contact_service.GetContactRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - uri = transcoded_request["uri"] - method = transcoded_request["method"] - - # Jsonify the query params - query_params = json.loads( - json_format.MessageToJson( - transcoded_request["query_params"], - use_integers_for_enums=True, - ) - ) - query_params.update(self._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - - # Send the request - headers = dict(metadata) - headers["Content-Type"] = "application/json" - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = contact_service.Contact() - pb_resp = contact_service.Contact.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - resp = self._interceptor.post_get_contact(resp) - return resp - - class _ListContacts(ContactServiceRestStub): - def __hash__(self): - return hash("ListContacts") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return { - k: v - for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() - if k not in message_dict - } - - def __call__( - self, - request: contact_service.ListContactsRequest, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> contact_service.ListContactsResponse: - r"""Call the list contacts method over HTTP. - - Args: - request (~.contact_service.ListContactsRequest): - The request object. Request object for ListContacts - method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - ~.contact_service.ListContactsResponse: - Response object for - ListContactsRequest containing matching - Contact resources. - - """ - - http_options: List[Dict[str, str]] = [ - { - "method": "get", - "uri": "/v1/{parent=networks/*}/contacts", - }, - ] - request, metadata = self._interceptor.pre_list_contacts(request, metadata) - pb_request = contact_service.ListContactsRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - uri = transcoded_request["uri"] - method = transcoded_request["method"] - - # Jsonify the query params - query_params = json.loads( - json_format.MessageToJson( - transcoded_request["query_params"], - use_integers_for_enums=True, - ) - ) - query_params.update(self._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - - # Send the request - headers = dict(metadata) - headers["Content-Type"] = "application/json" - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = contact_service.ListContactsResponse() - pb_resp = contact_service.ListContactsResponse.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - resp = self._interceptor.post_list_contacts(resp) - return resp - - @property - def get_contact( - self, - ) -> Callable[[contact_service.GetContactRequest], contact_service.Contact]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._GetContact(self._session, self._host, self._interceptor) # type: ignore - - @property - def list_contacts( - self, - ) -> Callable[ - [contact_service.ListContactsRequest], contact_service.ListContactsResponse - ]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._ListContacts(self._session, self._host, self._interceptor) # type: ignore - - @property - def get_operation(self): - return self._GetOperation(self._session, self._host, self._interceptor) # type: ignore - - class _GetOperation(ContactServiceRestStub): - def __call__( - self, - request: operations_pb2.GetOperationRequest, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> operations_pb2.Operation: - r"""Call the get operation method over HTTP. - - Args: - request (operations_pb2.GetOperationRequest): - The request object for GetOperation method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - operations_pb2.Operation: Response from GetOperation method. - """ - - http_options: List[Dict[str, str]] = [ - { - "method": "get", - "uri": "/v1/{name=networks/*/operations/reports/exports/*}", - }, - { - "method": "get", - "uri": "/v1/{name=networks/*/operations/reports/runs/*}", - }, - ] - - request, metadata = self._interceptor.pre_get_operation(request, metadata) - request_kwargs = json_format.MessageToDict(request) - transcoded_request = path_template.transcode(http_options, **request_kwargs) - - uri = transcoded_request["uri"] - method = transcoded_request["method"] - - # Jsonify the query params - query_params = json.loads(json.dumps(transcoded_request["query_params"])) - - # Send the request - headers = dict(metadata) - headers["Content-Type"] = "application/json" - - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params), - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - resp = operations_pb2.Operation() - resp = json_format.Parse(response.content.decode("utf-8"), resp) - resp = self._interceptor.post_get_operation(resp) - return resp - - @property - def kind(self) -> str: - return "rest" - - def close(self): - self._session.close() - - -__all__ = ("ContactServiceRestTransport",) diff --git a/packages/google-ads-admanager/google/ads/admanager_v1/services/creative_service/transports/rest.py b/packages/google-ads-admanager/google/ads/admanager_v1/services/creative_service/transports/rest.py deleted file mode 100644 index 33aa7085f6e5..000000000000 --- a/packages/google-ads-admanager/google/ads/admanager_v1/services/creative_service/transports/rest.py +++ /dev/null @@ -1,527 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# - -import dataclasses -import json # type: ignore -import re -from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union -import warnings - -from google.api_core import gapic_v1, path_template, rest_helpers, rest_streaming -from google.api_core import exceptions as core_exceptions -from google.api_core import retry as retries -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore -from google.auth.transport.requests import AuthorizedSession # type: ignore -from google.protobuf import json_format -import grpc # type: ignore -from requests import __version__ as requests_version - -try: - OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] -except AttributeError: # pragma: NO COVER - OptionalRetry = Union[retries.Retry, object, None] # type: ignore - - -from google.longrunning import operations_pb2 # type: ignore - -from google.ads.admanager_v1.types import creative_service - -from .base import CreativeServiceTransport -from .base import DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO - -DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( - gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, - grpc_version=None, - rest_version=requests_version, -) - - -class CreativeServiceRestInterceptor: - """Interceptor for CreativeService. - - Interceptors are used to manipulate requests, request metadata, and responses - in arbitrary ways. - Example use cases include: - * Logging - * Verifying requests according to service or custom semantics - * Stripping extraneous information from responses - - These use cases and more can be enabled by injecting an - instance of a custom subclass when constructing the CreativeServiceRestTransport. - - .. code-block:: python - class MyCustomCreativeServiceInterceptor(CreativeServiceRestInterceptor): - def pre_get_creative(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_get_creative(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_list_creatives(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_list_creatives(self, response): - logging.log(f"Received response: {response}") - return response - - transport = CreativeServiceRestTransport(interceptor=MyCustomCreativeServiceInterceptor()) - client = CreativeServiceClient(transport=transport) - - - """ - - def pre_get_creative( - self, - request: creative_service.GetCreativeRequest, - metadata: Sequence[Tuple[str, str]], - ) -> Tuple[creative_service.GetCreativeRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for get_creative - - Override in a subclass to manipulate the request or metadata - before they are sent to the CreativeService server. - """ - return request, metadata - - def post_get_creative( - self, response: creative_service.Creative - ) -> creative_service.Creative: - """Post-rpc interceptor for get_creative - - Override in a subclass to manipulate the response - after it is returned by the CreativeService server but before - it is returned to user code. - """ - return response - - def pre_list_creatives( - self, - request: creative_service.ListCreativesRequest, - metadata: Sequence[Tuple[str, str]], - ) -> Tuple[creative_service.ListCreativesRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for list_creatives - - Override in a subclass to manipulate the request or metadata - before they are sent to the CreativeService server. - """ - return request, metadata - - def post_list_creatives( - self, response: creative_service.ListCreativesResponse - ) -> creative_service.ListCreativesResponse: - """Post-rpc interceptor for list_creatives - - Override in a subclass to manipulate the response - after it is returned by the CreativeService server but before - it is returned to user code. - """ - return response - - def pre_get_operation( - self, - request: operations_pb2.GetOperationRequest, - metadata: Sequence[Tuple[str, str]], - ) -> Tuple[operations_pb2.GetOperationRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for get_operation - - Override in a subclass to manipulate the request or metadata - before they are sent to the CreativeService server. - """ - return request, metadata - - def post_get_operation( - self, response: operations_pb2.Operation - ) -> operations_pb2.Operation: - """Post-rpc interceptor for get_operation - - Override in a subclass to manipulate the response - after it is returned by the CreativeService server but before - it is returned to user code. - """ - return response - - -@dataclasses.dataclass -class CreativeServiceRestStub: - _session: AuthorizedSession - _host: str - _interceptor: CreativeServiceRestInterceptor - - -class CreativeServiceRestTransport(CreativeServiceTransport): - """REST backend transport for CreativeService. - - Provides methods for handling Creative objects. - - This class defines the same methods as the primary client, so the - primary client can load the underlying transport implementation - and call it. - - It sends JSON representations of protocol buffers over HTTP/1.1 - - """ - - def __init__( - self, - *, - host: str = "admanager.googleapis.com", - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - url_scheme: str = "https", - interceptor: Optional[CreativeServiceRestInterceptor] = None, - api_audience: Optional[str] = None, - ) -> None: - """Instantiate the transport. - - Args: - host (Optional[str]): - The hostname to connect to (default: 'admanager.googleapis.com'). - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if ``channel`` is provided. - scopes (Optional(Sequence[str])): A list of scopes. This argument is - ignored if ``channel`` is provided. - client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client - certificate to configure mutual TLS HTTP channel. It is ignored - if ``channel`` is provided. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you are developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - url_scheme: the protocol scheme for the API endpoint. Normally - "https", but for testing or local servers, - "http" can be specified. - """ - # Run the base constructor - # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. - # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the - # credentials object - maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) - if maybe_url_match is None: - raise ValueError( - f"Unexpected hostname structure: {host}" - ) # pragma: NO COVER - - url_match_items = maybe_url_match.groupdict() - - host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host - - super().__init__( - host=host, - credentials=credentials, - client_info=client_info, - always_use_jwt_access=always_use_jwt_access, - api_audience=api_audience, - ) - self._session = AuthorizedSession( - self._credentials, default_host=self.DEFAULT_HOST - ) - if client_cert_source_for_mtls: - self._session.configure_mtls_channel(client_cert_source_for_mtls) - self._interceptor = interceptor or CreativeServiceRestInterceptor() - self._prep_wrapped_messages(client_info) - - class _GetCreative(CreativeServiceRestStub): - def __hash__(self): - return hash("GetCreative") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return { - k: v - for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() - if k not in message_dict - } - - def __call__( - self, - request: creative_service.GetCreativeRequest, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> creative_service.Creative: - r"""Call the get creative method over HTTP. - - Args: - request (~.creative_service.GetCreativeRequest): - The request object. Request object for GetCreative - method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - ~.creative_service.Creative: - The Creative resource. - """ - - http_options: List[Dict[str, str]] = [ - { - "method": "get", - "uri": "/v1/{name=networks/*/creatives/*}", - }, - ] - request, metadata = self._interceptor.pre_get_creative(request, metadata) - pb_request = creative_service.GetCreativeRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - uri = transcoded_request["uri"] - method = transcoded_request["method"] - - # Jsonify the query params - query_params = json.loads( - json_format.MessageToJson( - transcoded_request["query_params"], - use_integers_for_enums=True, - ) - ) - query_params.update(self._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - - # Send the request - headers = dict(metadata) - headers["Content-Type"] = "application/json" - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = creative_service.Creative() - pb_resp = creative_service.Creative.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - resp = self._interceptor.post_get_creative(resp) - return resp - - class _ListCreatives(CreativeServiceRestStub): - def __hash__(self): - return hash("ListCreatives") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return { - k: v - for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() - if k not in message_dict - } - - def __call__( - self, - request: creative_service.ListCreativesRequest, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> creative_service.ListCreativesResponse: - r"""Call the list creatives method over HTTP. - - Args: - request (~.creative_service.ListCreativesRequest): - The request object. Request object for ListCreatives - method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - ~.creative_service.ListCreativesResponse: - Response object for - ListCreativesRequest containing matching - Creative resources. - - """ - - http_options: List[Dict[str, str]] = [ - { - "method": "get", - "uri": "/v1/{parent=networks/*}/creatives", - }, - ] - request, metadata = self._interceptor.pre_list_creatives(request, metadata) - pb_request = creative_service.ListCreativesRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - uri = transcoded_request["uri"] - method = transcoded_request["method"] - - # Jsonify the query params - query_params = json.loads( - json_format.MessageToJson( - transcoded_request["query_params"], - use_integers_for_enums=True, - ) - ) - query_params.update(self._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - - # Send the request - headers = dict(metadata) - headers["Content-Type"] = "application/json" - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = creative_service.ListCreativesResponse() - pb_resp = creative_service.ListCreativesResponse.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - resp = self._interceptor.post_list_creatives(resp) - return resp - - @property - def get_creative( - self, - ) -> Callable[[creative_service.GetCreativeRequest], creative_service.Creative]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._GetCreative(self._session, self._host, self._interceptor) # type: ignore - - @property - def list_creatives( - self, - ) -> Callable[ - [creative_service.ListCreativesRequest], creative_service.ListCreativesResponse - ]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._ListCreatives(self._session, self._host, self._interceptor) # type: ignore - - @property - def get_operation(self): - return self._GetOperation(self._session, self._host, self._interceptor) # type: ignore - - class _GetOperation(CreativeServiceRestStub): - def __call__( - self, - request: operations_pb2.GetOperationRequest, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> operations_pb2.Operation: - r"""Call the get operation method over HTTP. - - Args: - request (operations_pb2.GetOperationRequest): - The request object for GetOperation method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - operations_pb2.Operation: Response from GetOperation method. - """ - - http_options: List[Dict[str, str]] = [ - { - "method": "get", - "uri": "/v1/{name=networks/*/operations/reports/exports/*}", - }, - { - "method": "get", - "uri": "/v1/{name=networks/*/operations/reports/runs/*}", - }, - ] - - request, metadata = self._interceptor.pre_get_operation(request, metadata) - request_kwargs = json_format.MessageToDict(request) - transcoded_request = path_template.transcode(http_options, **request_kwargs) - - uri = transcoded_request["uri"] - method = transcoded_request["method"] - - # Jsonify the query params - query_params = json.loads(json.dumps(transcoded_request["query_params"])) - - # Send the request - headers = dict(metadata) - headers["Content-Type"] = "application/json" - - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params), - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - resp = operations_pb2.Operation() - resp = json_format.Parse(response.content.decode("utf-8"), resp) - resp = self._interceptor.post_get_operation(resp) - return resp - - @property - def kind(self) -> str: - return "rest" - - def close(self): - self._session.close() - - -__all__ = ("CreativeServiceRestTransport",) diff --git a/packages/google-ads-admanager/google/ads/admanager_v1/services/custom_field_service/client.py b/packages/google-ads-admanager/google/ads/admanager_v1/services/custom_field_service/client.py index 37de0b69d4f3..986a135d17c5 100644 --- a/packages/google-ads-admanager/google/ads/admanager_v1/services/custom_field_service/client.py +++ b/packages/google-ads-admanager/google/ads/admanager_v1/services/custom_field_service/client.py @@ -51,7 +51,11 @@ from google.longrunning import operations_pb2 # type: ignore from google.ads.admanager_v1.services.custom_field_service import pagers -from google.ads.admanager_v1.types import custom_field_enums, custom_field_service +from google.ads.admanager_v1.types import ( + custom_field_enums, + custom_field_messages, + custom_field_service, +) from .transports.base import DEFAULT_CLIENT_INFO, CustomFieldServiceTransport from .transports.rest import CustomFieldServiceRestTransport @@ -675,7 +679,7 @@ def __init__( Type[CustomFieldServiceTransport], Callable[..., CustomFieldServiceTransport], ] = ( - type(self).get_transport_class(transport) + CustomFieldServiceClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., CustomFieldServiceTransport], transport) ) @@ -702,7 +706,7 @@ def get_custom_field( retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), - ) -> custom_field_service.CustomField: + ) -> custom_field_messages.CustomField: r"""API to retrieve a ``CustomField`` object. .. code-block:: python @@ -749,7 +753,9 @@ def sample_get_custom_field(): Returns: google.ads.admanager_v1.types.CustomField: - The CustomField resource. + An additional, user-created field on + an entity. + """ # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have diff --git a/packages/google-ads-admanager/google/ads/admanager_v1/services/custom_field_service/pagers.py b/packages/google-ads-admanager/google/ads/admanager_v1/services/custom_field_service/pagers.py index f0a4e63f1c52..b11c6be336cc 100644 --- a/packages/google-ads-admanager/google/ads/admanager_v1/services/custom_field_service/pagers.py +++ b/packages/google-ads-admanager/google/ads/admanager_v1/services/custom_field_service/pagers.py @@ -38,7 +38,7 @@ OptionalRetry = Union[retries.Retry, object, None] # type: ignore OptionalAsyncRetry = Union[retries_async.AsyncRetry, object, None] # type: ignore -from google.ads.admanager_v1.types import custom_field_service +from google.ads.admanager_v1.types import custom_field_messages, custom_field_service class ListCustomFieldsPager: @@ -107,7 +107,7 @@ def pages(self) -> Iterator[custom_field_service.ListCustomFieldsResponse]: ) yield self._response - def __iter__(self) -> Iterator[custom_field_service.CustomField]: + def __iter__(self) -> Iterator[custom_field_messages.CustomField]: for page in self.pages: yield from page.custom_fields diff --git a/packages/google-ads-admanager/google/ads/admanager_v1/services/custom_field_service/transports/base.py b/packages/google-ads-admanager/google/ads/admanager_v1/services/custom_field_service/transports/base.py index 3578065cdf3f..97f76ac909c3 100644 --- a/packages/google-ads-admanager/google/ads/admanager_v1/services/custom_field_service/transports/base.py +++ b/packages/google-ads-admanager/google/ads/admanager_v1/services/custom_field_service/transports/base.py @@ -26,7 +26,7 @@ from google.oauth2 import service_account # type: ignore from google.ads.admanager_v1 import gapic_version as package_version -from google.ads.admanager_v1.types import custom_field_service +from google.ads.admanager_v1.types import custom_field_messages, custom_field_service DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( gapic_version=package_version.__version__ @@ -156,8 +156,8 @@ def get_custom_field( ) -> Callable[ [custom_field_service.GetCustomFieldRequest], Union[ - custom_field_service.CustomField, - Awaitable[custom_field_service.CustomField], + custom_field_messages.CustomField, + Awaitable[custom_field_messages.CustomField], ], ]: raise NotImplementedError() diff --git a/packages/google-ads-admanager/google/ads/admanager_v1/services/custom_field_service/transports/rest.py b/packages/google-ads-admanager/google/ads/admanager_v1/services/custom_field_service/transports/rest.py index 0da154858e92..4994a3e75121 100644 --- a/packages/google-ads-admanager/google/ads/admanager_v1/services/custom_field_service/transports/rest.py +++ b/packages/google-ads-admanager/google/ads/admanager_v1/services/custom_field_service/transports/rest.py @@ -38,7 +38,7 @@ from google.longrunning import operations_pb2 # type: ignore -from google.ads.admanager_v1.types import custom_field_service +from google.ads.admanager_v1.types import custom_field_messages, custom_field_service from .base import CustomFieldServiceTransport from .base import DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO @@ -100,8 +100,8 @@ def pre_get_custom_field( return request, metadata def post_get_custom_field( - self, response: custom_field_service.CustomField - ) -> custom_field_service.CustomField: + self, response: custom_field_messages.CustomField + ) -> custom_field_messages.CustomField: """Post-rpc interceptor for get_custom_field Override in a subclass to manipulate the response @@ -274,7 +274,7 @@ def __call__( retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), - ) -> custom_field_service.CustomField: + ) -> custom_field_messages.CustomField: r"""Call the get custom field method over HTTP. Args: @@ -287,8 +287,10 @@ def __call__( sent along with the request as metadata. Returns: - ~.custom_field_service.CustomField: - The ``CustomField`` resource. + ~.custom_field_messages.CustomField: + An additional, user-created field on + an entity. + """ http_options: List[Dict[str, str]] = [ @@ -333,8 +335,8 @@ def __call__( raise core_exceptions.from_http_response(response) # Return the response - resp = custom_field_service.CustomField() - pb_resp = custom_field_service.CustomField.pb(resp) + resp = custom_field_messages.CustomField() + pb_resp = custom_field_messages.CustomField.pb(resp) json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_custom_field(resp) @@ -433,7 +435,7 @@ def __call__( def get_custom_field( self, ) -> Callable[ - [custom_field_service.GetCustomFieldRequest], custom_field_service.CustomField + [custom_field_service.GetCustomFieldRequest], custom_field_messages.CustomField ]: # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. # In C++ this would require a dynamic_cast @@ -481,11 +483,11 @@ def __call__( http_options: List[Dict[str, str]] = [ { "method": "get", - "uri": "/v1/{name=networks/*/operations/reports/exports/*}", + "uri": "/v1/{name=networks/*/operations/reports/runs/*}", }, { "method": "get", - "uri": "/v1/{name=networks/*/operations/reports/runs/*}", + "uri": "/v1/{name=networks/*/operations/reports/exports/*}", }, ] diff --git a/packages/google-ads-admanager/google/ads/admanager_v1/services/custom_targeting_key_service/client.py b/packages/google-ads-admanager/google/ads/admanager_v1/services/custom_targeting_key_service/client.py index f2e5a41459cf..63992825ffe4 100644 --- a/packages/google-ads-admanager/google/ads/admanager_v1/services/custom_targeting_key_service/client.py +++ b/packages/google-ads-admanager/google/ads/admanager_v1/services/custom_targeting_key_service/client.py @@ -53,6 +53,7 @@ from google.ads.admanager_v1.services.custom_targeting_key_service import pagers from google.ads.admanager_v1.types import ( custom_targeting_key_enums, + custom_targeting_key_messages, custom_targeting_key_service, ) @@ -685,7 +686,7 @@ def __init__( Type[CustomTargetingKeyServiceTransport], Callable[..., CustomTargetingKeyServiceTransport], ] = ( - type(self).get_transport_class(transport) + CustomTargetingKeyServiceClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., CustomTargetingKeyServiceTransport], transport) ) @@ -712,7 +713,7 @@ def get_custom_targeting_key( retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), - ) -> custom_targeting_key_service.CustomTargetingKey: + ) -> custom_targeting_key_messages.CustomTargetingKey: r"""API to retrieve a ``CustomTargetingKey`` object. .. code-block:: python diff --git a/packages/google-ads-admanager/google/ads/admanager_v1/services/custom_targeting_key_service/pagers.py b/packages/google-ads-admanager/google/ads/admanager_v1/services/custom_targeting_key_service/pagers.py index e56ff58da48c..88953ea7950c 100644 --- a/packages/google-ads-admanager/google/ads/admanager_v1/services/custom_targeting_key_service/pagers.py +++ b/packages/google-ads-admanager/google/ads/admanager_v1/services/custom_targeting_key_service/pagers.py @@ -38,7 +38,10 @@ OptionalRetry = Union[retries.Retry, object, None] # type: ignore OptionalAsyncRetry = Union[retries_async.AsyncRetry, object, None] # type: ignore -from google.ads.admanager_v1.types import custom_targeting_key_service +from google.ads.admanager_v1.types import ( + custom_targeting_key_messages, + custom_targeting_key_service, +) class ListCustomTargetingKeysPager: @@ -113,7 +116,7 @@ def pages( ) yield self._response - def __iter__(self) -> Iterator[custom_targeting_key_service.CustomTargetingKey]: + def __iter__(self) -> Iterator[custom_targeting_key_messages.CustomTargetingKey]: for page in self.pages: yield from page.custom_targeting_keys diff --git a/packages/google-ads-admanager/google/ads/admanager_v1/services/custom_targeting_key_service/transports/base.py b/packages/google-ads-admanager/google/ads/admanager_v1/services/custom_targeting_key_service/transports/base.py index a55f7a97d634..7e4925dd049f 100644 --- a/packages/google-ads-admanager/google/ads/admanager_v1/services/custom_targeting_key_service/transports/base.py +++ b/packages/google-ads-admanager/google/ads/admanager_v1/services/custom_targeting_key_service/transports/base.py @@ -26,7 +26,10 @@ from google.oauth2 import service_account # type: ignore from google.ads.admanager_v1 import gapic_version as package_version -from google.ads.admanager_v1.types import custom_targeting_key_service +from google.ads.admanager_v1.types import ( + custom_targeting_key_messages, + custom_targeting_key_service, +) DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( gapic_version=package_version.__version__ @@ -156,8 +159,8 @@ def get_custom_targeting_key( ) -> Callable[ [custom_targeting_key_service.GetCustomTargetingKeyRequest], Union[ - custom_targeting_key_service.CustomTargetingKey, - Awaitable[custom_targeting_key_service.CustomTargetingKey], + custom_targeting_key_messages.CustomTargetingKey, + Awaitable[custom_targeting_key_messages.CustomTargetingKey], ], ]: raise NotImplementedError() diff --git a/packages/google-ads-admanager/google/ads/admanager_v1/services/custom_targeting_key_service/transports/rest.py b/packages/google-ads-admanager/google/ads/admanager_v1/services/custom_targeting_key_service/transports/rest.py index 5ea81bb49e6c..6b9540dc0b60 100644 --- a/packages/google-ads-admanager/google/ads/admanager_v1/services/custom_targeting_key_service/transports/rest.py +++ b/packages/google-ads-admanager/google/ads/admanager_v1/services/custom_targeting_key_service/transports/rest.py @@ -38,7 +38,10 @@ from google.longrunning import operations_pb2 # type: ignore -from google.ads.admanager_v1.types import custom_targeting_key_service +from google.ads.admanager_v1.types import ( + custom_targeting_key_messages, + custom_targeting_key_service, +) from .base import CustomTargetingKeyServiceTransport from .base import DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO @@ -103,8 +106,8 @@ def pre_get_custom_targeting_key( return request, metadata def post_get_custom_targeting_key( - self, response: custom_targeting_key_service.CustomTargetingKey - ) -> custom_targeting_key_service.CustomTargetingKey: + self, response: custom_targeting_key_messages.CustomTargetingKey + ) -> custom_targeting_key_messages.CustomTargetingKey: """Post-rpc interceptor for get_custom_targeting_key Override in a subclass to manipulate the response @@ -280,7 +283,7 @@ def __call__( retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), - ) -> custom_targeting_key_service.CustomTargetingKey: + ) -> custom_targeting_key_messages.CustomTargetingKey: r"""Call the get custom targeting key method over HTTP. Args: @@ -293,7 +296,7 @@ def __call__( sent along with the request as metadata. Returns: - ~.custom_targeting_key_service.CustomTargetingKey: + ~.custom_targeting_key_messages.CustomTargetingKey: The ``CustomTargetingKey`` resource. """ @@ -341,8 +344,8 @@ def __call__( raise core_exceptions.from_http_response(response) # Return the response - resp = custom_targeting_key_service.CustomTargetingKey() - pb_resp = custom_targeting_key_service.CustomTargetingKey.pb(resp) + resp = custom_targeting_key_messages.CustomTargetingKey() + pb_resp = custom_targeting_key_messages.CustomTargetingKey.pb(resp) json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_custom_targeting_key(resp) @@ -447,7 +450,7 @@ def get_custom_targeting_key( self, ) -> Callable[ [custom_targeting_key_service.GetCustomTargetingKeyRequest], - custom_targeting_key_service.CustomTargetingKey, + custom_targeting_key_messages.CustomTargetingKey, ]: # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. # In C++ this would require a dynamic_cast @@ -495,11 +498,11 @@ def __call__( http_options: List[Dict[str, str]] = [ { "method": "get", - "uri": "/v1/{name=networks/*/operations/reports/exports/*}", + "uri": "/v1/{name=networks/*/operations/reports/runs/*}", }, { "method": "get", - "uri": "/v1/{name=networks/*/operations/reports/runs/*}", + "uri": "/v1/{name=networks/*/operations/reports/exports/*}", }, ] diff --git a/packages/google-ads-admanager/google/ads/admanager_v1/services/custom_targeting_value_service/client.py b/packages/google-ads-admanager/google/ads/admanager_v1/services/custom_targeting_value_service/client.py index f05268ef0763..6c03f1fb4c53 100644 --- a/packages/google-ads-admanager/google/ads/admanager_v1/services/custom_targeting_value_service/client.py +++ b/packages/google-ads-admanager/google/ads/admanager_v1/services/custom_targeting_value_service/client.py @@ -53,6 +53,7 @@ from google.ads.admanager_v1.services.custom_targeting_value_service import pagers from google.ads.admanager_v1.types import ( custom_targeting_value_enums, + custom_targeting_value_messages, custom_targeting_value_service, ) @@ -694,7 +695,7 @@ def __init__( Type[CustomTargetingValueServiceTransport], Callable[..., CustomTargetingValueServiceTransport], ] = ( - type(self).get_transport_class(transport) + CustomTargetingValueServiceClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast( Callable[..., CustomTargetingValueServiceTransport], transport @@ -723,7 +724,7 @@ def get_custom_targeting_value( retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), - ) -> custom_targeting_value_service.CustomTargetingValue: + ) -> custom_targeting_value_messages.CustomTargetingValue: r"""API to retrieve a ``CustomTargetingValue`` object. .. code-block:: python diff --git a/packages/google-ads-admanager/google/ads/admanager_v1/services/custom_targeting_value_service/pagers.py b/packages/google-ads-admanager/google/ads/admanager_v1/services/custom_targeting_value_service/pagers.py index 09ef836cdb72..214d53becdec 100644 --- a/packages/google-ads-admanager/google/ads/admanager_v1/services/custom_targeting_value_service/pagers.py +++ b/packages/google-ads-admanager/google/ads/admanager_v1/services/custom_targeting_value_service/pagers.py @@ -38,7 +38,10 @@ OptionalRetry = Union[retries.Retry, object, None] # type: ignore OptionalAsyncRetry = Union[retries_async.AsyncRetry, object, None] # type: ignore -from google.ads.admanager_v1.types import custom_targeting_value_service +from google.ads.admanager_v1.types import ( + custom_targeting_value_messages, + custom_targeting_value_service, +) class ListCustomTargetingValuesPager: @@ -113,7 +116,9 @@ def pages( ) yield self._response - def __iter__(self) -> Iterator[custom_targeting_value_service.CustomTargetingValue]: + def __iter__( + self, + ) -> Iterator[custom_targeting_value_messages.CustomTargetingValue]: for page in self.pages: yield from page.custom_targeting_values diff --git a/packages/google-ads-admanager/google/ads/admanager_v1/services/custom_targeting_value_service/transports/base.py b/packages/google-ads-admanager/google/ads/admanager_v1/services/custom_targeting_value_service/transports/base.py index a9d002f465f7..0ed99f654001 100644 --- a/packages/google-ads-admanager/google/ads/admanager_v1/services/custom_targeting_value_service/transports/base.py +++ b/packages/google-ads-admanager/google/ads/admanager_v1/services/custom_targeting_value_service/transports/base.py @@ -26,7 +26,10 @@ from google.oauth2 import service_account # type: ignore from google.ads.admanager_v1 import gapic_version as package_version -from google.ads.admanager_v1.types import custom_targeting_value_service +from google.ads.admanager_v1.types import ( + custom_targeting_value_messages, + custom_targeting_value_service, +) DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( gapic_version=package_version.__version__ @@ -156,8 +159,8 @@ def get_custom_targeting_value( ) -> Callable[ [custom_targeting_value_service.GetCustomTargetingValueRequest], Union[ - custom_targeting_value_service.CustomTargetingValue, - Awaitable[custom_targeting_value_service.CustomTargetingValue], + custom_targeting_value_messages.CustomTargetingValue, + Awaitable[custom_targeting_value_messages.CustomTargetingValue], ], ]: raise NotImplementedError() diff --git a/packages/google-ads-admanager/google/ads/admanager_v1/services/custom_targeting_value_service/transports/rest.py b/packages/google-ads-admanager/google/ads/admanager_v1/services/custom_targeting_value_service/transports/rest.py index 1154a1bebe68..4706f5043211 100644 --- a/packages/google-ads-admanager/google/ads/admanager_v1/services/custom_targeting_value_service/transports/rest.py +++ b/packages/google-ads-admanager/google/ads/admanager_v1/services/custom_targeting_value_service/transports/rest.py @@ -38,7 +38,10 @@ from google.longrunning import operations_pb2 # type: ignore -from google.ads.admanager_v1.types import custom_targeting_value_service +from google.ads.admanager_v1.types import ( + custom_targeting_value_messages, + custom_targeting_value_service, +) from .base import CustomTargetingValueServiceTransport from .base import DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO @@ -103,8 +106,8 @@ def pre_get_custom_targeting_value( return request, metadata def post_get_custom_targeting_value( - self, response: custom_targeting_value_service.CustomTargetingValue - ) -> custom_targeting_value_service.CustomTargetingValue: + self, response: custom_targeting_value_messages.CustomTargetingValue + ) -> custom_targeting_value_messages.CustomTargetingValue: """Post-rpc interceptor for get_custom_targeting_value Override in a subclass to manipulate the response @@ -280,7 +283,7 @@ def __call__( retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), - ) -> custom_targeting_value_service.CustomTargetingValue: + ) -> custom_targeting_value_messages.CustomTargetingValue: r"""Call the get custom targeting value method over HTTP. @@ -294,7 +297,7 @@ def __call__( sent along with the request as metadata. Returns: - ~.custom_targeting_value_service.CustomTargetingValue: + ~.custom_targeting_value_messages.CustomTargetingValue: The ``CustomTargetingValue`` resource. """ @@ -344,8 +347,8 @@ def __call__( raise core_exceptions.from_http_response(response) # Return the response - resp = custom_targeting_value_service.CustomTargetingValue() - pb_resp = custom_targeting_value_service.CustomTargetingValue.pb(resp) + resp = custom_targeting_value_messages.CustomTargetingValue() + pb_resp = custom_targeting_value_messages.CustomTargetingValue.pb(resp) json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_custom_targeting_value(resp) @@ -454,7 +457,7 @@ def get_custom_targeting_value( self, ) -> Callable[ [custom_targeting_value_service.GetCustomTargetingValueRequest], - custom_targeting_value_service.CustomTargetingValue, + custom_targeting_value_messages.CustomTargetingValue, ]: # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. # In C++ this would require a dynamic_cast @@ -502,11 +505,11 @@ def __call__( http_options: List[Dict[str, str]] = [ { "method": "get", - "uri": "/v1/{name=networks/*/operations/reports/exports/*}", + "uri": "/v1/{name=networks/*/operations/reports/runs/*}", }, { "method": "get", - "uri": "/v1/{name=networks/*/operations/reports/runs/*}", + "uri": "/v1/{name=networks/*/operations/reports/exports/*}", }, ] diff --git a/packages/google-ads-admanager/google/ads/admanager_v1/services/entity_signals_mapping_service/__init__.py b/packages/google-ads-admanager/google/ads/admanager_v1/services/entity_signals_mapping_service/__init__.py new file mode 100644 index 000000000000..3b03f6d3f9dc --- /dev/null +++ b/packages/google-ads-admanager/google/ads/admanager_v1/services/entity_signals_mapping_service/__init__.py @@ -0,0 +1,18 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .client import EntitySignalsMappingServiceClient + +__all__ = ("EntitySignalsMappingServiceClient",) diff --git a/packages/google-ads-admanager/google/ads/admanager_v1/services/entity_signals_mapping_service/client.py b/packages/google-ads-admanager/google/ads/admanager_v1/services/entity_signals_mapping_service/client.py new file mode 100644 index 000000000000..fe94c89d85d2 --- /dev/null +++ b/packages/google-ads-admanager/google/ads/admanager_v1/services/entity_signals_mapping_service/client.py @@ -0,0 +1,1559 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import os +import re +from typing import ( + Callable, + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, + cast, +) +import warnings + +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.ads.admanager_v1 import gapic_version as package_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object, None] # type: ignore + +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore + +from google.ads.admanager_v1.services.entity_signals_mapping_service import pagers +from google.ads.admanager_v1.types import ( + entity_signals_mapping_messages, + entity_signals_mapping_service, +) + +from .transports.base import DEFAULT_CLIENT_INFO, EntitySignalsMappingServiceTransport +from .transports.rest import EntitySignalsMappingServiceRestTransport + + +class EntitySignalsMappingServiceClientMeta(type): + """Metaclass for the EntitySignalsMappingService client. + + This provides class-level methods for building and retrieving + support objects (e.g. transport) without polluting the client instance + objects. + """ + + _transport_registry = ( + OrderedDict() + ) # type: Dict[str, Type[EntitySignalsMappingServiceTransport]] + _transport_registry["rest"] = EntitySignalsMappingServiceRestTransport + + def get_transport_class( + cls, + label: Optional[str] = None, + ) -> Type[EntitySignalsMappingServiceTransport]: + """Returns an appropriate transport class. + + Args: + label: The name of the desired transport. If none is + provided, then the first transport in the registry is used. + + Returns: + The transport class to use. + """ + # If a specific transport is requested, return that one. + if label: + return cls._transport_registry[label] + + # No transport is requested; return the default (that is, the first one + # in the dictionary). + return next(iter(cls._transport_registry.values())) + + +class EntitySignalsMappingServiceClient( + metaclass=EntitySignalsMappingServiceClientMeta +): + """Provides methods for handling ``EntitySignalsMapping`` objects.""" + + @staticmethod + def _get_default_mtls_endpoint(api_endpoint): + """Converts api endpoint to mTLS endpoint. + + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to + "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. + Args: + api_endpoint (Optional[str]): the api endpoint to convert. + Returns: + str: converted mTLS api endpoint. + """ + if not api_endpoint: + return api_endpoint + + mtls_endpoint_re = re.compile( + r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" + ) + + m = mtls_endpoint_re.match(api_endpoint) + name, mtls, sandbox, googledomain = m.groups() + if mtls or not googledomain: + return api_endpoint + + if sandbox: + return api_endpoint.replace( + "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" + ) + + return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + + # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. + DEFAULT_ENDPOINT = "admanager.googleapis.com" + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore + DEFAULT_ENDPOINT + ) + + _DEFAULT_ENDPOINT_TEMPLATE = "admanager.{UNIVERSE_DOMAIN}" + _DEFAULT_UNIVERSE = "googleapis.com" + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + EntitySignalsMappingServiceClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_info(info) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + EntitySignalsMappingServiceClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file(filename) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> EntitySignalsMappingServiceTransport: + """Returns the transport used by the client instance. + + Returns: + EntitySignalsMappingServiceTransport: The transport used by the client + instance. + """ + return self._transport + + @staticmethod + def entity_signals_mapping_path( + network_code: str, + entity_signals_mapping: str, + ) -> str: + """Returns a fully-qualified entity_signals_mapping string.""" + return "networks/{network_code}/entitySignalsMappings/{entity_signals_mapping}".format( + network_code=network_code, + entity_signals_mapping=entity_signals_mapping, + ) + + @staticmethod + def parse_entity_signals_mapping_path(path: str) -> Dict[str, str]: + """Parses a entity_signals_mapping path into its component segments.""" + m = re.match( + r"^networks/(?P.+?)/entitySignalsMappings/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + + @staticmethod + def network_path( + network_code: str, + ) -> str: + """Returns a fully-qualified network string.""" + return "networks/{network_code}".format( + network_code=network_code, + ) + + @staticmethod + def parse_network_path(path: str) -> Dict[str, str]: + """Parses a network path into its component segments.""" + m = re.match(r"^networks/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_billing_account_path( + billing_account: str, + ) -> str: + """Returns a fully-qualified billing_account string.""" + return "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + + @staticmethod + def parse_common_billing_account_path(path: str) -> Dict[str, str]: + """Parse a billing_account path into its component segments.""" + m = re.match(r"^billingAccounts/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_folder_path( + folder: str, + ) -> str: + """Returns a fully-qualified folder string.""" + return "folders/{folder}".format( + folder=folder, + ) + + @staticmethod + def parse_common_folder_path(path: str) -> Dict[str, str]: + """Parse a folder path into its component segments.""" + m = re.match(r"^folders/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_organization_path( + organization: str, + ) -> str: + """Returns a fully-qualified organization string.""" + return "organizations/{organization}".format( + organization=organization, + ) + + @staticmethod + def parse_common_organization_path(path: str) -> Dict[str, str]: + """Parse a organization path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_project_path( + project: str, + ) -> str: + """Returns a fully-qualified project string.""" + return "projects/{project}".format( + project=project, + ) + + @staticmethod + def parse_common_project_path(path: str) -> Dict[str, str]: + """Parse a project path into its component segments.""" + m = re.match(r"^projects/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_location_path( + project: str, + location: str, + ) -> str: + """Returns a fully-qualified location string.""" + return "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) + + @staticmethod + def parse_common_location_path(path: str) -> Dict[str, str]: + """Parse a location path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[client_options_lib.ClientOptions] = None + ): + """Deprecated. Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + + warnings.warn( + "get_mtls_endpoint_and_cert_source is deprecated. Use the api_endpoint property instead.", + DeprecationWarning, + ) + if client_options is None: + client_options = client_options_lib.ClientOptions() + use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") + if use_client_cert not in ("true", "false"): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError( + "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + # Figure out the client cert source to use. + client_cert_source = None + if use_client_cert == "true": + if client_options.client_cert_source: + client_cert_source = client_options.client_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + elif use_mtls_endpoint == "always" or ( + use_mtls_endpoint == "auto" and client_cert_source + ): + api_endpoint = cls.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = cls.DEFAULT_ENDPOINT + + return api_endpoint, client_cert_source + + @staticmethod + def _read_environment_variables(): + """Returns the environment variables used by the client. + + Returns: + Tuple[bool, str, str]: returns the GOOGLE_API_USE_CLIENT_CERTIFICATE, + GOOGLE_API_USE_MTLS_ENDPOINT, and GOOGLE_CLOUD_UNIVERSE_DOMAIN environment variables. + + Raises: + ValueError: If GOOGLE_API_USE_CLIENT_CERTIFICATE is not + any of ["true", "false"]. + google.auth.exceptions.MutualTLSChannelError: If GOOGLE_API_USE_MTLS_ENDPOINT + is not any of ["auto", "never", "always"]. + """ + use_client_cert = os.getenv( + "GOOGLE_API_USE_CLIENT_CERTIFICATE", "false" + ).lower() + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto").lower() + universe_domain_env = os.getenv("GOOGLE_CLOUD_UNIVERSE_DOMAIN") + if use_client_cert not in ("true", "false"): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError( + "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + return use_client_cert == "true", use_mtls_endpoint, universe_domain_env + + @staticmethod + def _get_client_cert_source(provided_cert_source, use_cert_flag): + """Return the client cert source to be used by the client. + + Args: + provided_cert_source (bytes): The client certificate source provided. + use_cert_flag (bool): A flag indicating whether to use the client certificate. + + Returns: + bytes or None: The client cert source to be used by the client. + """ + client_cert_source = None + if use_cert_flag: + if provided_cert_source: + client_cert_source = provided_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + return client_cert_source + + @staticmethod + def _get_api_endpoint( + api_override, client_cert_source, universe_domain, use_mtls_endpoint + ): + """Return the API endpoint used by the client. + + Args: + api_override (str): The API endpoint override. If specified, this is always + the return value of this function and the other arguments are not used. + client_cert_source (bytes): The client certificate source used by the client. + universe_domain (str): The universe domain used by the client. + use_mtls_endpoint (str): How to use the mTLS endpoint, which depends also on the other parameters. + Possible values are "always", "auto", or "never". + + Returns: + str: The API endpoint to be used by the client. + """ + if api_override is not None: + api_endpoint = api_override + elif use_mtls_endpoint == "always" or ( + use_mtls_endpoint == "auto" and client_cert_source + ): + _default_universe = EntitySignalsMappingServiceClient._DEFAULT_UNIVERSE + if universe_domain != _default_universe: + raise MutualTLSChannelError( + f"mTLS is not supported in any universe other than {_default_universe}." + ) + api_endpoint = EntitySignalsMappingServiceClient.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = ( + EntitySignalsMappingServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=universe_domain + ) + ) + return api_endpoint + + @staticmethod + def _get_universe_domain( + client_universe_domain: Optional[str], universe_domain_env: Optional[str] + ) -> str: + """Return the universe domain used by the client. + + Args: + client_universe_domain (Optional[str]): The universe domain configured via the client options. + universe_domain_env (Optional[str]): The universe domain configured via the "GOOGLE_CLOUD_UNIVERSE_DOMAIN" environment variable. + + Returns: + str: The universe domain to be used by the client. + + Raises: + ValueError: If the universe domain is an empty string. + """ + universe_domain = EntitySignalsMappingServiceClient._DEFAULT_UNIVERSE + if client_universe_domain is not None: + universe_domain = client_universe_domain + elif universe_domain_env is not None: + universe_domain = universe_domain_env + if len(universe_domain.strip()) == 0: + raise ValueError("Universe Domain cannot be an empty string.") + return universe_domain + + @staticmethod + def _compare_universes( + client_universe: str, credentials: ga_credentials.Credentials + ) -> bool: + """Returns True iff the universe domains used by the client and credentials match. + + Args: + client_universe (str): The universe domain configured via the client options. + credentials (ga_credentials.Credentials): The credentials being used in the client. + + Returns: + bool: True iff client_universe matches the universe in credentials. + + Raises: + ValueError: when client_universe does not match the universe in credentials. + """ + + default_universe = EntitySignalsMappingServiceClient._DEFAULT_UNIVERSE + credentials_universe = getattr(credentials, "universe_domain", default_universe) + + if client_universe != credentials_universe: + raise ValueError( + "The configured universe domain " + f"({client_universe}) does not match the universe domain " + f"found in the credentials ({credentials_universe}). " + "If you haven't configured the universe domain explicitly, " + f"`{default_universe}` is the default." + ) + return True + + def _validate_universe_domain(self): + """Validates client's and credentials' universe domains are consistent. + + Returns: + bool: True iff the configured universe domain is valid. + + Raises: + ValueError: If the configured universe domain is not valid. + """ + self._is_universe_domain_valid = ( + self._is_universe_domain_valid + or EntitySignalsMappingServiceClient._compare_universes( + self.universe_domain, self.transport._credentials + ) + ) + return self._is_universe_domain_valid + + @property + def api_endpoint(self): + """Return the API endpoint used by the client instance. + + Returns: + str: The API endpoint used by the client instance. + """ + return self._api_endpoint + + @property + def universe_domain(self) -> str: + """Return the universe domain used by the client instance. + + Returns: + str: The universe domain used by the client instance. + """ + return self._universe_domain + + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[ + Union[ + str, + EntitySignalsMappingServiceTransport, + Callable[..., EntitySignalsMappingServiceTransport], + ] + ] = None, + client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the entity signals mapping service client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Optional[Union[str,EntitySignalsMappingServiceTransport,Callable[..., EntitySignalsMappingServiceTransport]]]): + The transport to use, or a Callable that constructs and returns a new transport. + If a Callable is given, it will be called with the same set of initialization + arguments as used in the EntitySignalsMappingServiceTransport constructor. + If set to None, a transport is chosen automatically. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): + Custom options for the client. + + 1. The ``api_endpoint`` property can be used to override the + default endpoint provided by the client when ``transport`` is + not explicitly provided. Only if this property is not set and + ``transport`` was not explicitly provided, the endpoint is + determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment + variable, which have one of the following values: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto-switch to the + default mTLS endpoint if client certificate is present; this is + the default value). + + 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide a client certificate for mTLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + 3. The ``universe_domain`` property can be used to override the + default "googleapis.com" universe. Note that the ``api_endpoint`` + property still takes precedence; and ``universe_domain`` is + currently not supported for mTLS. + + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client_options = client_options + if isinstance(self._client_options, dict): + self._client_options = client_options_lib.from_dict(self._client_options) + if self._client_options is None: + self._client_options = client_options_lib.ClientOptions() + self._client_options = cast( + client_options_lib.ClientOptions, self._client_options + ) + + universe_domain_opt = getattr(self._client_options, "universe_domain", None) + + ( + self._use_client_cert, + self._use_mtls_endpoint, + self._universe_domain_env, + ) = EntitySignalsMappingServiceClient._read_environment_variables() + self._client_cert_source = ( + EntitySignalsMappingServiceClient._get_client_cert_source( + self._client_options.client_cert_source, self._use_client_cert + ) + ) + self._universe_domain = EntitySignalsMappingServiceClient._get_universe_domain( + universe_domain_opt, self._universe_domain_env + ) + self._api_endpoint = None # updated below, depending on `transport` + + # Initialize the universe domain validation. + self._is_universe_domain_valid = False + + api_key_value = getattr(self._client_options, "api_key", None) + if api_key_value and credentials: + raise ValueError( + "client_options.api_key and credentials are mutually exclusive" + ) + + # Save or instantiate the transport. + # Ordinarily, we provide the transport, but allowing a custom transport + # instance provides an extensibility point for unusual situations. + transport_provided = isinstance(transport, EntitySignalsMappingServiceTransport) + if transport_provided: + # transport is a EntitySignalsMappingServiceTransport instance. + if credentials or self._client_options.credentials_file or api_key_value: + raise ValueError( + "When providing a transport instance, " + "provide its credentials directly." + ) + if self._client_options.scopes: + raise ValueError( + "When providing a transport instance, provide its scopes " + "directly." + ) + self._transport = cast(EntitySignalsMappingServiceTransport, transport) + self._api_endpoint = self._transport.host + + self._api_endpoint = ( + self._api_endpoint + or EntitySignalsMappingServiceClient._get_api_endpoint( + self._client_options.api_endpoint, + self._client_cert_source, + self._universe_domain, + self._use_mtls_endpoint, + ) + ) + + if not transport_provided: + import google.auth._default # type: ignore + + if api_key_value and hasattr( + google.auth._default, "get_api_key_credentials" + ): + credentials = google.auth._default.get_api_key_credentials( + api_key_value + ) + + transport_init: Union[ + Type[EntitySignalsMappingServiceTransport], + Callable[..., EntitySignalsMappingServiceTransport], + ] = ( + EntitySignalsMappingServiceClient.get_transport_class(transport) + if isinstance(transport, str) or transport is None + else cast( + Callable[..., EntitySignalsMappingServiceTransport], transport + ) + ) + # initialize with the provided callable or the passed in class + self._transport = transport_init( + credentials=credentials, + credentials_file=self._client_options.credentials_file, + host=self._api_endpoint, + scopes=self._client_options.scopes, + client_cert_source_for_mtls=self._client_cert_source, + quota_project_id=self._client_options.quota_project_id, + client_info=client_info, + always_use_jwt_access=True, + api_audience=self._client_options.api_audience, + ) + + def get_entity_signals_mapping( + self, + request: Optional[ + Union[entity_signals_mapping_service.GetEntitySignalsMappingRequest, dict] + ] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> entity_signals_mapping_messages.EntitySignalsMapping: + r"""API to retrieve a ``EntitySignalsMapping`` object. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.ads import admanager_v1 + + def sample_get_entity_signals_mapping(): + # Create a client + client = admanager_v1.EntitySignalsMappingServiceClient() + + # Initialize request argument(s) + request = admanager_v1.GetEntitySignalsMappingRequest( + name="name_value", + ) + + # Make the request + response = client.get_entity_signals_mapping(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.ads.admanager_v1.types.GetEntitySignalsMappingRequest, dict]): + The request object. Request object for ``GetEntitySignalsMapping`` method. + name (str): + Required. The resource name of the EntitySignalsMapping. + Format: + ``networks/{network_code}/entitySignalsMappings/{entity_signals_mapping_id}`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.ads.admanager_v1.types.EntitySignalsMapping: + The EntitySignalsMapping resource. + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance( + request, entity_signals_mapping_service.GetEntitySignalsMappingRequest + ): + request = entity_signals_mapping_service.GetEntitySignalsMappingRequest( + request + ) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[ + self._transport.get_entity_signals_mapping + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def list_entity_signals_mappings( + self, + request: Optional[ + Union[entity_signals_mapping_service.ListEntitySignalsMappingsRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListEntitySignalsMappingsPager: + r"""API to retrieve a list of ``EntitySignalsMapping`` objects. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.ads import admanager_v1 + + def sample_list_entity_signals_mappings(): + # Create a client + client = admanager_v1.EntitySignalsMappingServiceClient() + + # Initialize request argument(s) + request = admanager_v1.ListEntitySignalsMappingsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_entity_signals_mappings(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.ads.admanager_v1.types.ListEntitySignalsMappingsRequest, dict]): + The request object. Request object for ``ListEntitySignalsMappings`` method. + parent (str): + Required. The parent, which owns this collection of + EntitySignalsMappings. Format: + ``networks/{network_code}`` + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.ads.admanager_v1.services.entity_signals_mapping_service.pagers.ListEntitySignalsMappingsPager: + Response object for ListEntitySignalsMappingsRequest containing matching + EntitySignalsMapping resources. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance( + request, entity_signals_mapping_service.ListEntitySignalsMappingsRequest + ): + request = entity_signals_mapping_service.ListEntitySignalsMappingsRequest( + request + ) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[ + self._transport.list_entity_signals_mappings + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListEntitySignalsMappingsPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def create_entity_signals_mapping( + self, + request: Optional[ + Union[ + entity_signals_mapping_service.CreateEntitySignalsMappingRequest, dict + ] + ] = None, + *, + parent: Optional[str] = None, + entity_signals_mapping: Optional[ + entity_signals_mapping_messages.EntitySignalsMapping + ] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> entity_signals_mapping_messages.EntitySignalsMapping: + r"""API to create an ``EntitySignalsMapping`` object. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.ads import admanager_v1 + + def sample_create_entity_signals_mapping(): + # Create a client + client = admanager_v1.EntitySignalsMappingServiceClient() + + # Initialize request argument(s) + entity_signals_mapping = admanager_v1.EntitySignalsMapping() + entity_signals_mapping.audience_segment_id = 1980 + entity_signals_mapping.taxonomy_category_ids = [2268, 2269] + + request = admanager_v1.CreateEntitySignalsMappingRequest( + parent="parent_value", + entity_signals_mapping=entity_signals_mapping, + ) + + # Make the request + response = client.create_entity_signals_mapping(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.ads.admanager_v1.types.CreateEntitySignalsMappingRequest, dict]): + The request object. Request object for + 'CreateEntitySignalsMapping' method. + parent (str): + Required. The parent resource where this + EntitySignalsMapping will be created. Format: + ``networks/{network_code}`` + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + entity_signals_mapping (google.ads.admanager_v1.types.EntitySignalsMapping): + Required. The EntitySignalsMapping + object to create. + + This corresponds to the ``entity_signals_mapping`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.ads.admanager_v1.types.EntitySignalsMapping: + The EntitySignalsMapping resource. + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, entity_signals_mapping]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance( + request, entity_signals_mapping_service.CreateEntitySignalsMappingRequest + ): + request = entity_signals_mapping_service.CreateEntitySignalsMappingRequest( + request + ) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if entity_signals_mapping is not None: + request.entity_signals_mapping = entity_signals_mapping + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[ + self._transport.create_entity_signals_mapping + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def update_entity_signals_mapping( + self, + request: Optional[ + Union[ + entity_signals_mapping_service.UpdateEntitySignalsMappingRequest, dict + ] + ] = None, + *, + entity_signals_mapping: Optional[ + entity_signals_mapping_messages.EntitySignalsMapping + ] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> entity_signals_mapping_messages.EntitySignalsMapping: + r"""API to update an ``EntitySignalsMapping`` object. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.ads import admanager_v1 + + def sample_update_entity_signals_mapping(): + # Create a client + client = admanager_v1.EntitySignalsMappingServiceClient() + + # Initialize request argument(s) + entity_signals_mapping = admanager_v1.EntitySignalsMapping() + entity_signals_mapping.audience_segment_id = 1980 + entity_signals_mapping.taxonomy_category_ids = [2268, 2269] + + request = admanager_v1.UpdateEntitySignalsMappingRequest( + entity_signals_mapping=entity_signals_mapping, + ) + + # Make the request + response = client.update_entity_signals_mapping(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.ads.admanager_v1.types.UpdateEntitySignalsMappingRequest, dict]): + The request object. Request object for + 'UpdateEntitySignalsMapping' method. + entity_signals_mapping (google.ads.admanager_v1.types.EntitySignalsMapping): + Required. The ``EntitySignalsMapping`` to update. + + The EntitySignalsMapping's name is used to identify the + EntitySignalsMapping to update. Format: + ``networks/{network_code}/entitySignalsMappings/{entity_signals_mapping}`` + + This corresponds to the ``entity_signals_mapping`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Required. The list of fields to + update. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.ads.admanager_v1.types.EntitySignalsMapping: + The EntitySignalsMapping resource. + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([entity_signals_mapping, update_mask]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance( + request, entity_signals_mapping_service.UpdateEntitySignalsMappingRequest + ): + request = entity_signals_mapping_service.UpdateEntitySignalsMappingRequest( + request + ) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if entity_signals_mapping is not None: + request.entity_signals_mapping = entity_signals_mapping + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[ + self._transport.update_entity_signals_mapping + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("entity_signals_mapping.name", request.entity_signals_mapping.name),) + ), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def batch_create_entity_signals_mappings( + self, + request: Optional[ + Union[ + entity_signals_mapping_service.BatchCreateEntitySignalsMappingsRequest, + dict, + ] + ] = None, + *, + parent: Optional[str] = None, + requests: Optional[ + MutableSequence[ + entity_signals_mapping_service.CreateEntitySignalsMappingRequest + ] + ] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> entity_signals_mapping_service.BatchCreateEntitySignalsMappingsResponse: + r"""API to batch create ``EntitySignalsMapping`` objects. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.ads import admanager_v1 + + def sample_batch_create_entity_signals_mappings(): + # Create a client + client = admanager_v1.EntitySignalsMappingServiceClient() + + # Initialize request argument(s) + requests = admanager_v1.CreateEntitySignalsMappingRequest() + requests.parent = "parent_value" + requests.entity_signals_mapping.audience_segment_id = 1980 + requests.entity_signals_mapping.taxonomy_category_ids = [2268, 2269] + + request = admanager_v1.BatchCreateEntitySignalsMappingsRequest( + parent="parent_value", + requests=requests, + ) + + # Make the request + response = client.batch_create_entity_signals_mappings(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.ads.admanager_v1.types.BatchCreateEntitySignalsMappingsRequest, dict]): + The request object. Request object for ``BatchCreateEntitySignalsMappings`` + method. + parent (str): + Required. The parent resource where + ``EntitySignalsMappings`` will be created. Format: + ``networks/{network_code}`` The parent field in the + CreateEntitySignalsMappingRequest must match this field. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + requests (MutableSequence[google.ads.admanager_v1.types.CreateEntitySignalsMappingRequest]): + Required. The ``EntitySignalsMapping`` objects to + create. A maximum of 100 objects can be created in a + batch. + + This corresponds to the ``requests`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.ads.admanager_v1.types.BatchCreateEntitySignalsMappingsResponse: + Response object for BatchCreateEntitySignalsMappings + method. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, requests]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance( + request, + entity_signals_mapping_service.BatchCreateEntitySignalsMappingsRequest, + ): + request = ( + entity_signals_mapping_service.BatchCreateEntitySignalsMappingsRequest( + request + ) + ) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if requests is not None: + request.requests = requests + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[ + self._transport.batch_create_entity_signals_mappings + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def batch_update_entity_signals_mappings( + self, + request: Optional[ + Union[ + entity_signals_mapping_service.BatchUpdateEntitySignalsMappingsRequest, + dict, + ] + ] = None, + *, + parent: Optional[str] = None, + requests: Optional[ + MutableSequence[ + entity_signals_mapping_service.UpdateEntitySignalsMappingRequest + ] + ] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> entity_signals_mapping_service.BatchUpdateEntitySignalsMappingsResponse: + r"""API to batch update ``EntitySignalsMapping`` objects. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.ads import admanager_v1 + + def sample_batch_update_entity_signals_mappings(): + # Create a client + client = admanager_v1.EntitySignalsMappingServiceClient() + + # Initialize request argument(s) + requests = admanager_v1.UpdateEntitySignalsMappingRequest() + requests.entity_signals_mapping.audience_segment_id = 1980 + requests.entity_signals_mapping.taxonomy_category_ids = [2268, 2269] + + request = admanager_v1.BatchUpdateEntitySignalsMappingsRequest( + parent="parent_value", + requests=requests, + ) + + # Make the request + response = client.batch_update_entity_signals_mappings(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.ads.admanager_v1.types.BatchUpdateEntitySignalsMappingsRequest, dict]): + The request object. Request object for ``BatchUpdateEntitySignalsMappings`` + method. + parent (str): + Required. The parent resource where + ``EntitySignalsMappings`` will be updated. Format: + ``networks/{network_code}`` The parent field in the + UpdateEntitySignalsMappingRequest must match this field. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + requests (MutableSequence[google.ads.admanager_v1.types.UpdateEntitySignalsMappingRequest]): + Required. The ``EntitySignalsMapping`` objects to + update. A maximum of 100 objects can be updated in a + batch. + + This corresponds to the ``requests`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.ads.admanager_v1.types.BatchUpdateEntitySignalsMappingsResponse: + Response object for BatchUpdateEntitySignalsMappings + method. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, requests]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance( + request, + entity_signals_mapping_service.BatchUpdateEntitySignalsMappingsRequest, + ): + request = ( + entity_signals_mapping_service.BatchUpdateEntitySignalsMappingsRequest( + request + ) + ) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if requests is not None: + request.requests = requests + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[ + self._transport.batch_update_entity_signals_mappings + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def __enter__(self) -> "EntitySignalsMappingServiceClient": + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + + def get_operation( + self, + request: Optional[operations_pb2.GetOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Gets the latest state of a long-running operation. + + Args: + request (:class:`~.operations_pb2.GetOperationRequest`): + The request object. Request message for + `GetOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.Operation: + An ``Operation`` object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.GetOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.get_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +__all__ = ("EntitySignalsMappingServiceClient",) diff --git a/packages/google-ads-admanager/google/ads/admanager_v1/services/creative_service/pagers.py b/packages/google-ads-admanager/google/ads/admanager_v1/services/entity_signals_mapping_service/pagers.py similarity index 69% rename from packages/google-ads-admanager/google/ads/admanager_v1/services/creative_service/pagers.py rename to packages/google-ads-admanager/google/ads/admanager_v1/services/entity_signals_mapping_service/pagers.py index fda9c7f48045..464c0fe8d515 100644 --- a/packages/google-ads-admanager/google/ads/admanager_v1/services/creative_service/pagers.py +++ b/packages/google-ads-admanager/google/ads/admanager_v1/services/entity_signals_mapping_service/pagers.py @@ -38,32 +38,37 @@ OptionalRetry = Union[retries.Retry, object, None] # type: ignore OptionalAsyncRetry = Union[retries_async.AsyncRetry, object, None] # type: ignore -from google.ads.admanager_v1.types import creative_service +from google.ads.admanager_v1.types import ( + entity_signals_mapping_messages, + entity_signals_mapping_service, +) -class ListCreativesPager: - """A pager for iterating through ``list_creatives`` requests. +class ListEntitySignalsMappingsPager: + """A pager for iterating through ``list_entity_signals_mappings`` requests. This class thinly wraps an initial - :class:`google.ads.admanager_v1.types.ListCreativesResponse` object, and + :class:`google.ads.admanager_v1.types.ListEntitySignalsMappingsResponse` object, and provides an ``__iter__`` method to iterate through its - ``creatives`` field. + ``entity_signals_mappings`` field. If there are more pages, the ``__iter__`` method will make additional - ``ListCreatives`` requests and continue to iterate - through the ``creatives`` field on the + ``ListEntitySignalsMappings`` requests and continue to iterate + through the ``entity_signals_mappings`` field on the corresponding responses. - All the usual :class:`google.ads.admanager_v1.types.ListCreativesResponse` + All the usual :class:`google.ads.admanager_v1.types.ListEntitySignalsMappingsResponse` attributes are available on the pager. If multiple requests are made, only the most recent response is retained, and thus used for attribute lookup. """ def __init__( self, - method: Callable[..., creative_service.ListCreativesResponse], - request: creative_service.ListCreativesRequest, - response: creative_service.ListCreativesResponse, + method: Callable[ + ..., entity_signals_mapping_service.ListEntitySignalsMappingsResponse + ], + request: entity_signals_mapping_service.ListEntitySignalsMappingsRequest, + response: entity_signals_mapping_service.ListEntitySignalsMappingsResponse, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, @@ -74,9 +79,9 @@ def __init__( Args: method (Callable): The method that was originally called, and which instantiated this pager. - request (google.ads.admanager_v1.types.ListCreativesRequest): + request (google.ads.admanager_v1.types.ListEntitySignalsMappingsRequest): The initial request object. - response (google.ads.admanager_v1.types.ListCreativesResponse): + response (google.ads.admanager_v1.types.ListEntitySignalsMappingsResponse): The initial response object. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. @@ -85,7 +90,9 @@ def __init__( sent along with the request as metadata. """ self._method = method - self._request = creative_service.ListCreativesRequest(request) + self._request = entity_signals_mapping_service.ListEntitySignalsMappingsRequest( + request + ) self._response = response self._retry = retry self._timeout = timeout @@ -95,7 +102,9 @@ def __getattr__(self, name: str) -> Any: return getattr(self._response, name) @property - def pages(self) -> Iterator[creative_service.ListCreativesResponse]: + def pages( + self, + ) -> Iterator[entity_signals_mapping_service.ListEntitySignalsMappingsResponse]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token @@ -107,9 +116,11 @@ def pages(self) -> Iterator[creative_service.ListCreativesResponse]: ) yield self._response - def __iter__(self) -> Iterator[creative_service.Creative]: + def __iter__( + self, + ) -> Iterator[entity_signals_mapping_messages.EntitySignalsMapping]: for page in self.pages: - yield from page.creatives + yield from page.entity_signals_mappings def __repr__(self) -> str: return "{0}<{1!r}>".format(self.__class__.__name__, self._response) diff --git a/packages/google-ads-admanager/google/ads/admanager_v1/services/entity_signals_mapping_service/transports/__init__.py b/packages/google-ads-admanager/google/ads/admanager_v1/services/entity_signals_mapping_service/transports/__init__.py new file mode 100644 index 000000000000..a842b7667625 --- /dev/null +++ b/packages/google-ads-admanager/google/ads/admanager_v1/services/entity_signals_mapping_service/transports/__init__.py @@ -0,0 +1,35 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from typing import Dict, Type + +from .base import EntitySignalsMappingServiceTransport +from .rest import ( + EntitySignalsMappingServiceRestInterceptor, + EntitySignalsMappingServiceRestTransport, +) + +# Compile a registry of transports. +_transport_registry = ( + OrderedDict() +) # type: Dict[str, Type[EntitySignalsMappingServiceTransport]] +_transport_registry["rest"] = EntitySignalsMappingServiceRestTransport + +__all__ = ( + "EntitySignalsMappingServiceTransport", + "EntitySignalsMappingServiceRestTransport", + "EntitySignalsMappingServiceRestInterceptor", +) diff --git a/packages/google-ads-admanager/google/ads/admanager_v1/services/entity_signals_mapping_service/transports/base.py b/packages/google-ads-admanager/google/ads/admanager_v1/services/entity_signals_mapping_service/transports/base.py new file mode 100644 index 000000000000..cc29ed2e8641 --- /dev/null +++ b/packages/google-ads-admanager/google/ads/admanager_v1/services/entity_signals_mapping_service/transports/base.py @@ -0,0 +1,266 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import abc +from typing import Awaitable, Callable, Dict, Optional, Sequence, Union + +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.ads.admanager_v1 import gapic_version as package_version +from google.ads.admanager_v1.types import ( + entity_signals_mapping_messages, + entity_signals_mapping_service, +) + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +class EntitySignalsMappingServiceTransport(abc.ABC): + """Abstract transport class for EntitySignalsMappingService.""" + + AUTH_SCOPES = () + + DEFAULT_HOST: str = "admanager.googleapis.com" + + def __init__( + self, + *, + host: str = DEFAULT_HOST, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + **kwargs, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'admanager.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A list of scopes. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + """ + + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} + + # Save the scopes. + self._scopes = scopes + if not hasattr(self, "_ignore_credentials"): + self._ignore_credentials: bool = False + + # If no credentials are provided, then determine the appropriate + # defaults. + if credentials and credentials_file: + raise core_exceptions.DuplicateCredentialArgs( + "'credentials_file' and 'credentials' are mutually exclusive" + ) + + if credentials_file is not None: + credentials, _ = google.auth.load_credentials_from_file( + credentials_file, **scopes_kwargs, quota_project_id=quota_project_id + ) + elif credentials is None and not self._ignore_credentials: + credentials, _ = google.auth.default( + **scopes_kwargs, quota_project_id=quota_project_id + ) + # Don't apply audience if the credentials file passed from user. + if hasattr(credentials, "with_gdch_audience"): + credentials = credentials.with_gdch_audience( + api_audience if api_audience else host + ) + + # If the credentials are service account credentials, then always try to use self signed JWT. + if ( + always_use_jwt_access + and isinstance(credentials, service_account.Credentials) + and hasattr(service_account.Credentials, "with_always_use_jwt_access") + ): + credentials = credentials.with_always_use_jwt_access(True) + + # Save the credentials. + self._credentials = credentials + + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ":" not in host: + host += ":443" + self._host = host + + @property + def host(self): + return self._host + + def _prep_wrapped_messages(self, client_info): + # Precompute the wrapped methods. + self._wrapped_methods = { + self.get_entity_signals_mapping: gapic_v1.method.wrap_method( + self.get_entity_signals_mapping, + default_timeout=None, + client_info=client_info, + ), + self.list_entity_signals_mappings: gapic_v1.method.wrap_method( + self.list_entity_signals_mappings, + default_timeout=None, + client_info=client_info, + ), + self.create_entity_signals_mapping: gapic_v1.method.wrap_method( + self.create_entity_signals_mapping, + default_timeout=None, + client_info=client_info, + ), + self.update_entity_signals_mapping: gapic_v1.method.wrap_method( + self.update_entity_signals_mapping, + default_timeout=None, + client_info=client_info, + ), + self.batch_create_entity_signals_mappings: gapic_v1.method.wrap_method( + self.batch_create_entity_signals_mappings, + default_timeout=None, + client_info=client_info, + ), + self.batch_update_entity_signals_mappings: gapic_v1.method.wrap_method( + self.batch_update_entity_signals_mappings, + default_timeout=None, + client_info=client_info, + ), + } + + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + + @property + def get_entity_signals_mapping( + self, + ) -> Callable[ + [entity_signals_mapping_service.GetEntitySignalsMappingRequest], + Union[ + entity_signals_mapping_messages.EntitySignalsMapping, + Awaitable[entity_signals_mapping_messages.EntitySignalsMapping], + ], + ]: + raise NotImplementedError() + + @property + def list_entity_signals_mappings( + self, + ) -> Callable[ + [entity_signals_mapping_service.ListEntitySignalsMappingsRequest], + Union[ + entity_signals_mapping_service.ListEntitySignalsMappingsResponse, + Awaitable[entity_signals_mapping_service.ListEntitySignalsMappingsResponse], + ], + ]: + raise NotImplementedError() + + @property + def create_entity_signals_mapping( + self, + ) -> Callable[ + [entity_signals_mapping_service.CreateEntitySignalsMappingRequest], + Union[ + entity_signals_mapping_messages.EntitySignalsMapping, + Awaitable[entity_signals_mapping_messages.EntitySignalsMapping], + ], + ]: + raise NotImplementedError() + + @property + def update_entity_signals_mapping( + self, + ) -> Callable[ + [entity_signals_mapping_service.UpdateEntitySignalsMappingRequest], + Union[ + entity_signals_mapping_messages.EntitySignalsMapping, + Awaitable[entity_signals_mapping_messages.EntitySignalsMapping], + ], + ]: + raise NotImplementedError() + + @property + def batch_create_entity_signals_mappings( + self, + ) -> Callable[ + [entity_signals_mapping_service.BatchCreateEntitySignalsMappingsRequest], + Union[ + entity_signals_mapping_service.BatchCreateEntitySignalsMappingsResponse, + Awaitable[ + entity_signals_mapping_service.BatchCreateEntitySignalsMappingsResponse + ], + ], + ]: + raise NotImplementedError() + + @property + def batch_update_entity_signals_mappings( + self, + ) -> Callable[ + [entity_signals_mapping_service.BatchUpdateEntitySignalsMappingsRequest], + Union[ + entity_signals_mapping_service.BatchUpdateEntitySignalsMappingsResponse, + Awaitable[ + entity_signals_mapping_service.BatchUpdateEntitySignalsMappingsResponse + ], + ], + ]: + raise NotImplementedError() + + @property + def get_operation( + self, + ) -> Callable[ + [operations_pb2.GetOperationRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def kind(self) -> str: + raise NotImplementedError() + + +__all__ = ("EntitySignalsMappingServiceTransport",) diff --git a/packages/google-ads-admanager/google/ads/admanager_v1/services/entity_signals_mapping_service/transports/rest.py b/packages/google-ads-admanager/google/ads/admanager_v1/services/entity_signals_mapping_service/transports/rest.py new file mode 100644 index 000000000000..a6e1199ef6ef --- /dev/null +++ b/packages/google-ads-admanager/google/ads/admanager_v1/services/entity_signals_mapping_service/transports/rest.py @@ -0,0 +1,1153 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import dataclasses +import json # type: ignore +import re +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import gapic_v1, path_template, rest_helpers, rest_streaming +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth.transport.requests import AuthorizedSession # type: ignore +from google.protobuf import json_format +import grpc # type: ignore +from requests import __version__ as requests_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object, None] # type: ignore + + +from google.longrunning import operations_pb2 # type: ignore + +from google.ads.admanager_v1.types import ( + entity_signals_mapping_messages, + entity_signals_mapping_service, +) + +from .base import DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO +from .base import EntitySignalsMappingServiceTransport + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=requests_version, +) + + +class EntitySignalsMappingServiceRestInterceptor: + """Interceptor for EntitySignalsMappingService. + + Interceptors are used to manipulate requests, request metadata, and responses + in arbitrary ways. + Example use cases include: + * Logging + * Verifying requests according to service or custom semantics + * Stripping extraneous information from responses + + These use cases and more can be enabled by injecting an + instance of a custom subclass when constructing the EntitySignalsMappingServiceRestTransport. + + .. code-block:: python + class MyCustomEntitySignalsMappingServiceInterceptor(EntitySignalsMappingServiceRestInterceptor): + def pre_batch_create_entity_signals_mappings(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_batch_create_entity_signals_mappings(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_batch_update_entity_signals_mappings(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_batch_update_entity_signals_mappings(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_create_entity_signals_mapping(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_create_entity_signals_mapping(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get_entity_signals_mapping(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_entity_signals_mapping(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_entity_signals_mappings(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_entity_signals_mappings(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_update_entity_signals_mapping(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_update_entity_signals_mapping(self, response): + logging.log(f"Received response: {response}") + return response + + transport = EntitySignalsMappingServiceRestTransport(interceptor=MyCustomEntitySignalsMappingServiceInterceptor()) + client = EntitySignalsMappingServiceClient(transport=transport) + + + """ + + def pre_batch_create_entity_signals_mappings( + self, + request: entity_signals_mapping_service.BatchCreateEntitySignalsMappingsRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[ + entity_signals_mapping_service.BatchCreateEntitySignalsMappingsRequest, + Sequence[Tuple[str, str]], + ]: + """Pre-rpc interceptor for batch_create_entity_signals_mappings + + Override in a subclass to manipulate the request or metadata + before they are sent to the EntitySignalsMappingService server. + """ + return request, metadata + + def post_batch_create_entity_signals_mappings( + self, + response: entity_signals_mapping_service.BatchCreateEntitySignalsMappingsResponse, + ) -> entity_signals_mapping_service.BatchCreateEntitySignalsMappingsResponse: + """Post-rpc interceptor for batch_create_entity_signals_mappings + + Override in a subclass to manipulate the response + after it is returned by the EntitySignalsMappingService server but before + it is returned to user code. + """ + return response + + def pre_batch_update_entity_signals_mappings( + self, + request: entity_signals_mapping_service.BatchUpdateEntitySignalsMappingsRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[ + entity_signals_mapping_service.BatchUpdateEntitySignalsMappingsRequest, + Sequence[Tuple[str, str]], + ]: + """Pre-rpc interceptor for batch_update_entity_signals_mappings + + Override in a subclass to manipulate the request or metadata + before they are sent to the EntitySignalsMappingService server. + """ + return request, metadata + + def post_batch_update_entity_signals_mappings( + self, + response: entity_signals_mapping_service.BatchUpdateEntitySignalsMappingsResponse, + ) -> entity_signals_mapping_service.BatchUpdateEntitySignalsMappingsResponse: + """Post-rpc interceptor for batch_update_entity_signals_mappings + + Override in a subclass to manipulate the response + after it is returned by the EntitySignalsMappingService server but before + it is returned to user code. + """ + return response + + def pre_create_entity_signals_mapping( + self, + request: entity_signals_mapping_service.CreateEntitySignalsMappingRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[ + entity_signals_mapping_service.CreateEntitySignalsMappingRequest, + Sequence[Tuple[str, str]], + ]: + """Pre-rpc interceptor for create_entity_signals_mapping + + Override in a subclass to manipulate the request or metadata + before they are sent to the EntitySignalsMappingService server. + """ + return request, metadata + + def post_create_entity_signals_mapping( + self, response: entity_signals_mapping_messages.EntitySignalsMapping + ) -> entity_signals_mapping_messages.EntitySignalsMapping: + """Post-rpc interceptor for create_entity_signals_mapping + + Override in a subclass to manipulate the response + after it is returned by the EntitySignalsMappingService server but before + it is returned to user code. + """ + return response + + def pre_get_entity_signals_mapping( + self, + request: entity_signals_mapping_service.GetEntitySignalsMappingRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[ + entity_signals_mapping_service.GetEntitySignalsMappingRequest, + Sequence[Tuple[str, str]], + ]: + """Pre-rpc interceptor for get_entity_signals_mapping + + Override in a subclass to manipulate the request or metadata + before they are sent to the EntitySignalsMappingService server. + """ + return request, metadata + + def post_get_entity_signals_mapping( + self, response: entity_signals_mapping_messages.EntitySignalsMapping + ) -> entity_signals_mapping_messages.EntitySignalsMapping: + """Post-rpc interceptor for get_entity_signals_mapping + + Override in a subclass to manipulate the response + after it is returned by the EntitySignalsMappingService server but before + it is returned to user code. + """ + return response + + def pre_list_entity_signals_mappings( + self, + request: entity_signals_mapping_service.ListEntitySignalsMappingsRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[ + entity_signals_mapping_service.ListEntitySignalsMappingsRequest, + Sequence[Tuple[str, str]], + ]: + """Pre-rpc interceptor for list_entity_signals_mappings + + Override in a subclass to manipulate the request or metadata + before they are sent to the EntitySignalsMappingService server. + """ + return request, metadata + + def post_list_entity_signals_mappings( + self, response: entity_signals_mapping_service.ListEntitySignalsMappingsResponse + ) -> entity_signals_mapping_service.ListEntitySignalsMappingsResponse: + """Post-rpc interceptor for list_entity_signals_mappings + + Override in a subclass to manipulate the response + after it is returned by the EntitySignalsMappingService server but before + it is returned to user code. + """ + return response + + def pre_update_entity_signals_mapping( + self, + request: entity_signals_mapping_service.UpdateEntitySignalsMappingRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[ + entity_signals_mapping_service.UpdateEntitySignalsMappingRequest, + Sequence[Tuple[str, str]], + ]: + """Pre-rpc interceptor for update_entity_signals_mapping + + Override in a subclass to manipulate the request or metadata + before they are sent to the EntitySignalsMappingService server. + """ + return request, metadata + + def post_update_entity_signals_mapping( + self, response: entity_signals_mapping_messages.EntitySignalsMapping + ) -> entity_signals_mapping_messages.EntitySignalsMapping: + """Post-rpc interceptor for update_entity_signals_mapping + + Override in a subclass to manipulate the response + after it is returned by the EntitySignalsMappingService server but before + it is returned to user code. + """ + return response + + def pre_get_operation( + self, + request: operations_pb2.GetOperationRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[operations_pb2.GetOperationRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_operation + + Override in a subclass to manipulate the request or metadata + before they are sent to the EntitySignalsMappingService server. + """ + return request, metadata + + def post_get_operation( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for get_operation + + Override in a subclass to manipulate the response + after it is returned by the EntitySignalsMappingService server but before + it is returned to user code. + """ + return response + + +@dataclasses.dataclass +class EntitySignalsMappingServiceRestStub: + _session: AuthorizedSession + _host: str + _interceptor: EntitySignalsMappingServiceRestInterceptor + + +class EntitySignalsMappingServiceRestTransport(EntitySignalsMappingServiceTransport): + """REST backend transport for EntitySignalsMappingService. + + Provides methods for handling ``EntitySignalsMapping`` objects. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + + """ + + def __init__( + self, + *, + host: str = "admanager.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = "https", + interceptor: Optional[EntitySignalsMappingServiceRestInterceptor] = None, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'admanager.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client + certificate to configure mutual TLS HTTP channel. It is ignored + if ``channel`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. + # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the + # credentials object + maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) + if maybe_url_match is None: + raise ValueError( + f"Unexpected hostname structure: {host}" + ) # pragma: NO COVER + + url_match_items = maybe_url_match.groupdict() + + host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host + + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + self._session = AuthorizedSession( + self._credentials, default_host=self.DEFAULT_HOST + ) + if client_cert_source_for_mtls: + self._session.configure_mtls_channel(client_cert_source_for_mtls) + self._interceptor = interceptor or EntitySignalsMappingServiceRestInterceptor() + self._prep_wrapped_messages(client_info) + + class _BatchCreateEntitySignalsMappings(EntitySignalsMappingServiceRestStub): + def __hash__(self): + return hash("BatchCreateEntitySignalsMappings") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: entity_signals_mapping_service.BatchCreateEntitySignalsMappingsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> entity_signals_mapping_service.BatchCreateEntitySignalsMappingsResponse: + r"""Call the batch create entity + signals mappings method over HTTP. + + Args: + request (~.entity_signals_mapping_service.BatchCreateEntitySignalsMappingsRequest): + The request object. Request object for ``BatchCreateEntitySignalsMappings`` + method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.entity_signals_mapping_service.BatchCreateEntitySignalsMappingsResponse: + Response object for ``BatchCreateEntitySignalsMappings`` + method. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{parent=networks/*}/entitySignalsMappings:batchCreate", + "body": "*", + }, + ] + ( + request, + metadata, + ) = self._interceptor.pre_batch_create_entity_signals_mappings( + request, metadata + ) + pb_request = entity_signals_mapping_service.BatchCreateEntitySignalsMappingsRequest.pb( + request + ) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = ( + entity_signals_mapping_service.BatchCreateEntitySignalsMappingsResponse() + ) + pb_resp = entity_signals_mapping_service.BatchCreateEntitySignalsMappingsResponse.pb( + resp + ) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_batch_create_entity_signals_mappings(resp) + return resp + + class _BatchUpdateEntitySignalsMappings(EntitySignalsMappingServiceRestStub): + def __hash__(self): + return hash("BatchUpdateEntitySignalsMappings") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: entity_signals_mapping_service.BatchUpdateEntitySignalsMappingsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> entity_signals_mapping_service.BatchUpdateEntitySignalsMappingsResponse: + r"""Call the batch update entity + signals mappings method over HTTP. + + Args: + request (~.entity_signals_mapping_service.BatchUpdateEntitySignalsMappingsRequest): + The request object. Request object for ``BatchUpdateEntitySignalsMappings`` + method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.entity_signals_mapping_service.BatchUpdateEntitySignalsMappingsResponse: + Response object for ``BatchUpdateEntitySignalsMappings`` + method. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{parent=networks/*}/entitySignalsMappings:batchUpdate", + "body": "*", + }, + ] + ( + request, + metadata, + ) = self._interceptor.pre_batch_update_entity_signals_mappings( + request, metadata + ) + pb_request = entity_signals_mapping_service.BatchUpdateEntitySignalsMappingsRequest.pb( + request + ) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = ( + entity_signals_mapping_service.BatchUpdateEntitySignalsMappingsResponse() + ) + pb_resp = entity_signals_mapping_service.BatchUpdateEntitySignalsMappingsResponse.pb( + resp + ) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_batch_update_entity_signals_mappings(resp) + return resp + + class _CreateEntitySignalsMapping(EntitySignalsMappingServiceRestStub): + def __hash__(self): + return hash("CreateEntitySignalsMapping") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: entity_signals_mapping_service.CreateEntitySignalsMappingRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> entity_signals_mapping_messages.EntitySignalsMapping: + r"""Call the create entity signals + mapping method over HTTP. + + Args: + request (~.entity_signals_mapping_service.CreateEntitySignalsMappingRequest): + The request object. Request object for + 'CreateEntitySignalsMapping' method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.entity_signals_mapping_messages.EntitySignalsMapping: + The ``EntitySignalsMapping`` resource. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{parent=networks/*}/entitySignalsMappings", + "body": "entity_signals_mapping", + }, + ] + request, metadata = self._interceptor.pre_create_entity_signals_mapping( + request, metadata + ) + pb_request = ( + entity_signals_mapping_service.CreateEntitySignalsMappingRequest.pb( + request + ) + ) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = entity_signals_mapping_messages.EntitySignalsMapping() + pb_resp = entity_signals_mapping_messages.EntitySignalsMapping.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_create_entity_signals_mapping(resp) + return resp + + class _GetEntitySignalsMapping(EntitySignalsMappingServiceRestStub): + def __hash__(self): + return hash("GetEntitySignalsMapping") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: entity_signals_mapping_service.GetEntitySignalsMappingRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> entity_signals_mapping_messages.EntitySignalsMapping: + r"""Call the get entity signals + mapping method over HTTP. + + Args: + request (~.entity_signals_mapping_service.GetEntitySignalsMappingRequest): + The request object. Request object for ``GetEntitySignalsMapping`` method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.entity_signals_mapping_messages.EntitySignalsMapping: + The ``EntitySignalsMapping`` resource. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=networks/*/entitySignalsMappings/*}", + }, + ] + request, metadata = self._interceptor.pre_get_entity_signals_mapping( + request, metadata + ) + pb_request = ( + entity_signals_mapping_service.GetEntitySignalsMappingRequest.pb( + request + ) + ) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = entity_signals_mapping_messages.EntitySignalsMapping() + pb_resp = entity_signals_mapping_messages.EntitySignalsMapping.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_entity_signals_mapping(resp) + return resp + + class _ListEntitySignalsMappings(EntitySignalsMappingServiceRestStub): + def __hash__(self): + return hash("ListEntitySignalsMappings") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: entity_signals_mapping_service.ListEntitySignalsMappingsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> entity_signals_mapping_service.ListEntitySignalsMappingsResponse: + r"""Call the list entity signals + mappings method over HTTP. + + Args: + request (~.entity_signals_mapping_service.ListEntitySignalsMappingsRequest): + The request object. Request object for ``ListEntitySignalsMappings`` method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.entity_signals_mapping_service.ListEntitySignalsMappingsResponse: + Response object for ``ListEntitySignalsMappingsRequest`` + containing matching ``EntitySignalsMapping`` resources. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{parent=networks/*}/entitySignalsMappings", + }, + ] + request, metadata = self._interceptor.pre_list_entity_signals_mappings( + request, metadata + ) + pb_request = ( + entity_signals_mapping_service.ListEntitySignalsMappingsRequest.pb( + request + ) + ) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = entity_signals_mapping_service.ListEntitySignalsMappingsResponse() + pb_resp = ( + entity_signals_mapping_service.ListEntitySignalsMappingsResponse.pb( + resp + ) + ) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_entity_signals_mappings(resp) + return resp + + class _UpdateEntitySignalsMapping(EntitySignalsMappingServiceRestStub): + def __hash__(self): + return hash("UpdateEntitySignalsMapping") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "updateMask": {}, + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: entity_signals_mapping_service.UpdateEntitySignalsMappingRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> entity_signals_mapping_messages.EntitySignalsMapping: + r"""Call the update entity signals + mapping method over HTTP. + + Args: + request (~.entity_signals_mapping_service.UpdateEntitySignalsMappingRequest): + The request object. Request object for + 'UpdateEntitySignalsMapping' method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.entity_signals_mapping_messages.EntitySignalsMapping: + The ``EntitySignalsMapping`` resource. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "patch", + "uri": "/v1/{entity_signals_mapping.name=networks/*/entitySignalsMappings/*}", + "body": "entity_signals_mapping", + }, + ] + request, metadata = self._interceptor.pre_update_entity_signals_mapping( + request, metadata + ) + pb_request = ( + entity_signals_mapping_service.UpdateEntitySignalsMappingRequest.pb( + request + ) + ) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = entity_signals_mapping_messages.EntitySignalsMapping() + pb_resp = entity_signals_mapping_messages.EntitySignalsMapping.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_update_entity_signals_mapping(resp) + return resp + + @property + def batch_create_entity_signals_mappings( + self, + ) -> Callable[ + [entity_signals_mapping_service.BatchCreateEntitySignalsMappingsRequest], + entity_signals_mapping_service.BatchCreateEntitySignalsMappingsResponse, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._BatchCreateEntitySignalsMappings(self._session, self._host, self._interceptor) # type: ignore + + @property + def batch_update_entity_signals_mappings( + self, + ) -> Callable[ + [entity_signals_mapping_service.BatchUpdateEntitySignalsMappingsRequest], + entity_signals_mapping_service.BatchUpdateEntitySignalsMappingsResponse, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._BatchUpdateEntitySignalsMappings(self._session, self._host, self._interceptor) # type: ignore + + @property + def create_entity_signals_mapping( + self, + ) -> Callable[ + [entity_signals_mapping_service.CreateEntitySignalsMappingRequest], + entity_signals_mapping_messages.EntitySignalsMapping, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._CreateEntitySignalsMapping(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_entity_signals_mapping( + self, + ) -> Callable[ + [entity_signals_mapping_service.GetEntitySignalsMappingRequest], + entity_signals_mapping_messages.EntitySignalsMapping, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetEntitySignalsMapping(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_entity_signals_mappings( + self, + ) -> Callable[ + [entity_signals_mapping_service.ListEntitySignalsMappingsRequest], + entity_signals_mapping_service.ListEntitySignalsMappingsResponse, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListEntitySignalsMappings(self._session, self._host, self._interceptor) # type: ignore + + @property + def update_entity_signals_mapping( + self, + ) -> Callable[ + [entity_signals_mapping_service.UpdateEntitySignalsMappingRequest], + entity_signals_mapping_messages.EntitySignalsMapping, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._UpdateEntitySignalsMapping(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_operation(self): + return self._GetOperation(self._session, self._host, self._interceptor) # type: ignore + + class _GetOperation(EntitySignalsMappingServiceRestStub): + def __call__( + self, + request: operations_pb2.GetOperationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the get operation method over HTTP. + + Args: + request (operations_pb2.GetOperationRequest): + The request object for GetOperation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + operations_pb2.Operation: Response from GetOperation method. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=networks/*/operations/reports/runs/*}", + }, + { + "method": "get", + "uri": "/v1/{name=networks/*/operations/reports/exports/*}", + }, + ] + + request, metadata = self._interceptor.pre_get_operation(request, metadata) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + resp = operations_pb2.Operation() + resp = json_format.Parse(response.content.decode("utf-8"), resp) + resp = self._interceptor.post_get_operation(resp) + return resp + + @property + def kind(self) -> str: + return "rest" + + def close(self): + self._session.close() + + +__all__ = ("EntitySignalsMappingServiceRestTransport",) diff --git a/packages/google-ads-admanager/google/ads/admanager_v1/services/label_service/transports/rest.py b/packages/google-ads-admanager/google/ads/admanager_v1/services/label_service/transports/rest.py deleted file mode 100644 index 3bcda39fe9ab..000000000000 --- a/packages/google-ads-admanager/google/ads/admanager_v1/services/label_service/transports/rest.py +++ /dev/null @@ -1,520 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# - -import dataclasses -import json # type: ignore -import re -from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union -import warnings - -from google.api_core import gapic_v1, path_template, rest_helpers, rest_streaming -from google.api_core import exceptions as core_exceptions -from google.api_core import retry as retries -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore -from google.auth.transport.requests import AuthorizedSession # type: ignore -from google.protobuf import json_format -import grpc # type: ignore -from requests import __version__ as requests_version - -try: - OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] -except AttributeError: # pragma: NO COVER - OptionalRetry = Union[retries.Retry, object, None] # type: ignore - - -from google.longrunning import operations_pb2 # type: ignore - -from google.ads.admanager_v1.types import label_service - -from .base import DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO -from .base import LabelServiceTransport - -DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( - gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, - grpc_version=None, - rest_version=requests_version, -) - - -class LabelServiceRestInterceptor: - """Interceptor for LabelService. - - Interceptors are used to manipulate requests, request metadata, and responses - in arbitrary ways. - Example use cases include: - * Logging - * Verifying requests according to service or custom semantics - * Stripping extraneous information from responses - - These use cases and more can be enabled by injecting an - instance of a custom subclass when constructing the LabelServiceRestTransport. - - .. code-block:: python - class MyCustomLabelServiceInterceptor(LabelServiceRestInterceptor): - def pre_get_label(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_get_label(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_list_labels(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_list_labels(self, response): - logging.log(f"Received response: {response}") - return response - - transport = LabelServiceRestTransport(interceptor=MyCustomLabelServiceInterceptor()) - client = LabelServiceClient(transport=transport) - - - """ - - def pre_get_label( - self, - request: label_service.GetLabelRequest, - metadata: Sequence[Tuple[str, str]], - ) -> Tuple[label_service.GetLabelRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for get_label - - Override in a subclass to manipulate the request or metadata - before they are sent to the LabelService server. - """ - return request, metadata - - def post_get_label(self, response: label_service.Label) -> label_service.Label: - """Post-rpc interceptor for get_label - - Override in a subclass to manipulate the response - after it is returned by the LabelService server but before - it is returned to user code. - """ - return response - - def pre_list_labels( - self, - request: label_service.ListLabelsRequest, - metadata: Sequence[Tuple[str, str]], - ) -> Tuple[label_service.ListLabelsRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for list_labels - - Override in a subclass to manipulate the request or metadata - before they are sent to the LabelService server. - """ - return request, metadata - - def post_list_labels( - self, response: label_service.ListLabelsResponse - ) -> label_service.ListLabelsResponse: - """Post-rpc interceptor for list_labels - - Override in a subclass to manipulate the response - after it is returned by the LabelService server but before - it is returned to user code. - """ - return response - - def pre_get_operation( - self, - request: operations_pb2.GetOperationRequest, - metadata: Sequence[Tuple[str, str]], - ) -> Tuple[operations_pb2.GetOperationRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for get_operation - - Override in a subclass to manipulate the request or metadata - before they are sent to the LabelService server. - """ - return request, metadata - - def post_get_operation( - self, response: operations_pb2.Operation - ) -> operations_pb2.Operation: - """Post-rpc interceptor for get_operation - - Override in a subclass to manipulate the response - after it is returned by the LabelService server but before - it is returned to user code. - """ - return response - - -@dataclasses.dataclass -class LabelServiceRestStub: - _session: AuthorizedSession - _host: str - _interceptor: LabelServiceRestInterceptor - - -class LabelServiceRestTransport(LabelServiceTransport): - """REST backend transport for LabelService. - - Provides methods for handling Label objects. - - This class defines the same methods as the primary client, so the - primary client can load the underlying transport implementation - and call it. - - It sends JSON representations of protocol buffers over HTTP/1.1 - - """ - - def __init__( - self, - *, - host: str = "admanager.googleapis.com", - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - url_scheme: str = "https", - interceptor: Optional[LabelServiceRestInterceptor] = None, - api_audience: Optional[str] = None, - ) -> None: - """Instantiate the transport. - - Args: - host (Optional[str]): - The hostname to connect to (default: 'admanager.googleapis.com'). - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if ``channel`` is provided. - scopes (Optional(Sequence[str])): A list of scopes. This argument is - ignored if ``channel`` is provided. - client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client - certificate to configure mutual TLS HTTP channel. It is ignored - if ``channel`` is provided. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you are developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - url_scheme: the protocol scheme for the API endpoint. Normally - "https", but for testing or local servers, - "http" can be specified. - """ - # Run the base constructor - # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. - # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the - # credentials object - maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) - if maybe_url_match is None: - raise ValueError( - f"Unexpected hostname structure: {host}" - ) # pragma: NO COVER - - url_match_items = maybe_url_match.groupdict() - - host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host - - super().__init__( - host=host, - credentials=credentials, - client_info=client_info, - always_use_jwt_access=always_use_jwt_access, - api_audience=api_audience, - ) - self._session = AuthorizedSession( - self._credentials, default_host=self.DEFAULT_HOST - ) - if client_cert_source_for_mtls: - self._session.configure_mtls_channel(client_cert_source_for_mtls) - self._interceptor = interceptor or LabelServiceRestInterceptor() - self._prep_wrapped_messages(client_info) - - class _GetLabel(LabelServiceRestStub): - def __hash__(self): - return hash("GetLabel") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return { - k: v - for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() - if k not in message_dict - } - - def __call__( - self, - request: label_service.GetLabelRequest, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> label_service.Label: - r"""Call the get label method over HTTP. - - Args: - request (~.label_service.GetLabelRequest): - The request object. Request object for GetLabel method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - ~.label_service.Label: - The Label resource. - """ - - http_options: List[Dict[str, str]] = [ - { - "method": "get", - "uri": "/v1/{name=networks/*/labels/*}", - }, - ] - request, metadata = self._interceptor.pre_get_label(request, metadata) - pb_request = label_service.GetLabelRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - uri = transcoded_request["uri"] - method = transcoded_request["method"] - - # Jsonify the query params - query_params = json.loads( - json_format.MessageToJson( - transcoded_request["query_params"], - use_integers_for_enums=True, - ) - ) - query_params.update(self._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - - # Send the request - headers = dict(metadata) - headers["Content-Type"] = "application/json" - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = label_service.Label() - pb_resp = label_service.Label.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - resp = self._interceptor.post_get_label(resp) - return resp - - class _ListLabels(LabelServiceRestStub): - def __hash__(self): - return hash("ListLabels") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return { - k: v - for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() - if k not in message_dict - } - - def __call__( - self, - request: label_service.ListLabelsRequest, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> label_service.ListLabelsResponse: - r"""Call the list labels method over HTTP. - - Args: - request (~.label_service.ListLabelsRequest): - The request object. Request object for ListLabels method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - ~.label_service.ListLabelsResponse: - Response object for ListLabelsRequest - containing matching Label resources. - - """ - - http_options: List[Dict[str, str]] = [ - { - "method": "get", - "uri": "/v1/{parent=networks/*}/labels", - }, - ] - request, metadata = self._interceptor.pre_list_labels(request, metadata) - pb_request = label_service.ListLabelsRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - uri = transcoded_request["uri"] - method = transcoded_request["method"] - - # Jsonify the query params - query_params = json.loads( - json_format.MessageToJson( - transcoded_request["query_params"], - use_integers_for_enums=True, - ) - ) - query_params.update(self._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - - # Send the request - headers = dict(metadata) - headers["Content-Type"] = "application/json" - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = label_service.ListLabelsResponse() - pb_resp = label_service.ListLabelsResponse.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - resp = self._interceptor.post_list_labels(resp) - return resp - - @property - def get_label( - self, - ) -> Callable[[label_service.GetLabelRequest], label_service.Label]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._GetLabel(self._session, self._host, self._interceptor) # type: ignore - - @property - def list_labels( - self, - ) -> Callable[[label_service.ListLabelsRequest], label_service.ListLabelsResponse]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._ListLabels(self._session, self._host, self._interceptor) # type: ignore - - @property - def get_operation(self): - return self._GetOperation(self._session, self._host, self._interceptor) # type: ignore - - class _GetOperation(LabelServiceRestStub): - def __call__( - self, - request: operations_pb2.GetOperationRequest, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> operations_pb2.Operation: - r"""Call the get operation method over HTTP. - - Args: - request (operations_pb2.GetOperationRequest): - The request object for GetOperation method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - operations_pb2.Operation: Response from GetOperation method. - """ - - http_options: List[Dict[str, str]] = [ - { - "method": "get", - "uri": "/v1/{name=networks/*/operations/reports/exports/*}", - }, - { - "method": "get", - "uri": "/v1/{name=networks/*/operations/reports/runs/*}", - }, - ] - - request, metadata = self._interceptor.pre_get_operation(request, metadata) - request_kwargs = json_format.MessageToDict(request) - transcoded_request = path_template.transcode(http_options, **request_kwargs) - - uri = transcoded_request["uri"] - method = transcoded_request["method"] - - # Jsonify the query params - query_params = json.loads(json.dumps(transcoded_request["query_params"])) - - # Send the request - headers = dict(metadata) - headers["Content-Type"] = "application/json" - - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params), - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - resp = operations_pb2.Operation() - resp = json_format.Parse(response.content.decode("utf-8"), resp) - resp = self._interceptor.post_get_operation(resp) - return resp - - @property - def kind(self) -> str: - return "rest" - - def close(self): - self._session.close() - - -__all__ = ("LabelServiceRestTransport",) diff --git a/packages/google-ads-admanager/google/ads/admanager_v1/services/line_item_service/__init__.py b/packages/google-ads-admanager/google/ads/admanager_v1/services/line_item_service/__init__.py deleted file mode 100644 index 7efb117bbdc9..000000000000 --- a/packages/google-ads-admanager/google/ads/admanager_v1/services/line_item_service/__init__.py +++ /dev/null @@ -1,18 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from .client import LineItemServiceClient - -__all__ = ("LineItemServiceClient",) diff --git a/packages/google-ads-admanager/google/ads/admanager_v1/services/line_item_service/transports/base.py b/packages/google-ads-admanager/google/ads/admanager_v1/services/line_item_service/transports/base.py deleted file mode 100644 index c893d9789ad1..000000000000 --- a/packages/google-ads-admanager/google/ads/admanager_v1/services/line_item_service/transports/base.py +++ /dev/null @@ -1,188 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import abc -from typing import Awaitable, Callable, Dict, Optional, Sequence, Union - -import google.api_core -from google.api_core import exceptions as core_exceptions -from google.api_core import gapic_v1 -from google.api_core import retry as retries -import google.auth # type: ignore -from google.auth import credentials as ga_credentials # type: ignore -from google.longrunning import operations_pb2 # type: ignore -from google.oauth2 import service_account # type: ignore - -from google.ads.admanager_v1 import gapic_version as package_version -from google.ads.admanager_v1.types import line_item_service - -DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( - gapic_version=package_version.__version__ -) - - -class LineItemServiceTransport(abc.ABC): - """Abstract transport class for LineItemService.""" - - AUTH_SCOPES = () - - DEFAULT_HOST: str = "admanager.googleapis.com" - - def __init__( - self, - *, - host: str = DEFAULT_HOST, - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - api_audience: Optional[str] = None, - **kwargs, - ) -> None: - """Instantiate the transport. - - Args: - host (Optional[str]): - The hostname to connect to (default: 'admanager.googleapis.com'). - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is mutually exclusive with credentials. - scopes (Optional[Sequence[str]]): A list of scopes. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - """ - - scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} - - # Save the scopes. - self._scopes = scopes - if not hasattr(self, "_ignore_credentials"): - self._ignore_credentials: bool = False - - # If no credentials are provided, then determine the appropriate - # defaults. - if credentials and credentials_file: - raise core_exceptions.DuplicateCredentialArgs( - "'credentials_file' and 'credentials' are mutually exclusive" - ) - - if credentials_file is not None: - credentials, _ = google.auth.load_credentials_from_file( - credentials_file, **scopes_kwargs, quota_project_id=quota_project_id - ) - elif credentials is None and not self._ignore_credentials: - credentials, _ = google.auth.default( - **scopes_kwargs, quota_project_id=quota_project_id - ) - # Don't apply audience if the credentials file passed from user. - if hasattr(credentials, "with_gdch_audience"): - credentials = credentials.with_gdch_audience( - api_audience if api_audience else host - ) - - # If the credentials are service account credentials, then always try to use self signed JWT. - if ( - always_use_jwt_access - and isinstance(credentials, service_account.Credentials) - and hasattr(service_account.Credentials, "with_always_use_jwt_access") - ): - credentials = credentials.with_always_use_jwt_access(True) - - # Save the credentials. - self._credentials = credentials - - # Save the hostname. Default to port 443 (HTTPS) if none is specified. - if ":" not in host: - host += ":443" - self._host = host - - @property - def host(self): - return self._host - - def _prep_wrapped_messages(self, client_info): - # Precompute the wrapped methods. - self._wrapped_methods = { - self.get_line_item: gapic_v1.method.wrap_method( - self.get_line_item, - default_timeout=None, - client_info=client_info, - ), - self.list_line_items: gapic_v1.method.wrap_method( - self.list_line_items, - default_timeout=None, - client_info=client_info, - ), - } - - def close(self): - """Closes resources associated with the transport. - - .. warning:: - Only call this method if the transport is NOT shared - with other clients - this may cause errors in other clients! - """ - raise NotImplementedError() - - @property - def get_line_item( - self, - ) -> Callable[ - [line_item_service.GetLineItemRequest], - Union[line_item_service.LineItem, Awaitable[line_item_service.LineItem]], - ]: - raise NotImplementedError() - - @property - def list_line_items( - self, - ) -> Callable[ - [line_item_service.ListLineItemsRequest], - Union[ - line_item_service.ListLineItemsResponse, - Awaitable[line_item_service.ListLineItemsResponse], - ], - ]: - raise NotImplementedError() - - @property - def get_operation( - self, - ) -> Callable[ - [operations_pb2.GetOperationRequest], - Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], - ]: - raise NotImplementedError() - - @property - def kind(self) -> str: - raise NotImplementedError() - - -__all__ = ("LineItemServiceTransport",) diff --git a/packages/google-ads-admanager/google/ads/admanager_v1/services/network_service/client.py b/packages/google-ads-admanager/google/ads/admanager_v1/services/network_service/client.py index 708dd42e3e90..b863f446461e 100644 --- a/packages/google-ads-admanager/google/ads/admanager_v1/services/network_service/client.py +++ b/packages/google-ads-admanager/google/ads/admanager_v1/services/network_service/client.py @@ -50,7 +50,7 @@ from google.longrunning import operations_pb2 # type: ignore -from google.ads.admanager_v1.types import network_service +from google.ads.admanager_v1.types import network_messages, network_service from .transports.base import DEFAULT_CLIENT_INFO, NetworkServiceTransport from .transports.rest import NetworkServiceRestTransport @@ -667,7 +667,7 @@ def __init__( transport_init: Union[ Type[NetworkServiceTransport], Callable[..., NetworkServiceTransport] ] = ( - type(self).get_transport_class(transport) + NetworkServiceClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., NetworkServiceTransport], transport) ) @@ -692,7 +692,7 @@ def get_network( retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), - ) -> network_service.Network: + ) -> network_messages.Network: r"""API to retrieve a Network object. .. code-block:: python @@ -784,6 +784,79 @@ def sample_get_network(): # Done; return the response. return response + def list_networks( + self, + request: Optional[Union[network_service.ListNetworksRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> network_service.ListNetworksResponse: + r"""API to retrieve all the networks the current user has + access to. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.ads import admanager_v1 + + def sample_list_networks(): + # Create a client + client = admanager_v1.NetworkServiceClient() + + # Initialize request argument(s) + request = admanager_v1.ListNetworksRequest( + ) + + # Make the request + response = client.list_networks(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.ads.admanager_v1.types.ListNetworksRequest, dict]): + The request object. Request object for ``ListNetworks`` method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.ads.admanager_v1.types.ListNetworksResponse: + Response object for ListNetworks method. + """ + # Create or coerce a protobuf request object. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, network_service.ListNetworksRequest): + request = network_service.ListNetworksRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_networks] + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + def __enter__(self) -> "NetworkServiceClient": return self diff --git a/packages/google-ads-admanager/google/ads/admanager_v1/services/network_service/transports/base.py b/packages/google-ads-admanager/google/ads/admanager_v1/services/network_service/transports/base.py index ae9d00df7932..6f0ecf9e8ef9 100644 --- a/packages/google-ads-admanager/google/ads/admanager_v1/services/network_service/transports/base.py +++ b/packages/google-ads-admanager/google/ads/admanager_v1/services/network_service/transports/base.py @@ -26,7 +26,7 @@ from google.oauth2 import service_account # type: ignore from google.ads.admanager_v1 import gapic_version as package_version -from google.ads.admanager_v1.types import network_service +from google.ads.admanager_v1.types import network_messages, network_service DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( gapic_version=package_version.__version__ @@ -134,6 +134,11 @@ def _prep_wrapped_messages(self, client_info): default_timeout=None, client_info=client_info, ), + self.list_networks: gapic_v1.method.wrap_method( + self.list_networks, + default_timeout=None, + client_info=client_info, + ), } def close(self): @@ -150,7 +155,19 @@ def get_network( self, ) -> Callable[ [network_service.GetNetworkRequest], - Union[network_service.Network, Awaitable[network_service.Network]], + Union[network_messages.Network, Awaitable[network_messages.Network]], + ]: + raise NotImplementedError() + + @property + def list_networks( + self, + ) -> Callable[ + [network_service.ListNetworksRequest], + Union[ + network_service.ListNetworksResponse, + Awaitable[network_service.ListNetworksResponse], + ], ]: raise NotImplementedError() diff --git a/packages/google-ads-admanager/google/ads/admanager_v1/services/network_service/transports/rest.py b/packages/google-ads-admanager/google/ads/admanager_v1/services/network_service/transports/rest.py index 0bb7e4dcf9de..e5165660eb1c 100644 --- a/packages/google-ads-admanager/google/ads/admanager_v1/services/network_service/transports/rest.py +++ b/packages/google-ads-admanager/google/ads/admanager_v1/services/network_service/transports/rest.py @@ -38,7 +38,7 @@ from google.longrunning import operations_pb2 # type: ignore -from google.ads.admanager_v1.types import network_service +from google.ads.admanager_v1.types import network_messages, network_service from .base import DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO from .base import NetworkServiceTransport @@ -73,6 +73,14 @@ def post_get_network(self, response): logging.log(f"Received response: {response}") return response + def pre_list_networks(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_networks(self, response): + logging.log(f"Received response: {response}") + return response + transport = NetworkServiceRestTransport(interceptor=MyCustomNetworkServiceInterceptor()) client = NetworkServiceClient(transport=transport) @@ -92,8 +100,8 @@ def pre_get_network( return request, metadata def post_get_network( - self, response: network_service.Network - ) -> network_service.Network: + self, response: network_messages.Network + ) -> network_messages.Network: """Post-rpc interceptor for get_network Override in a subclass to manipulate the response @@ -102,6 +110,29 @@ def post_get_network( """ return response + def pre_list_networks( + self, + request: network_service.ListNetworksRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[network_service.ListNetworksRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_networks + + Override in a subclass to manipulate the request or metadata + before they are sent to the NetworkService server. + """ + return request, metadata + + def post_list_networks( + self, response: network_service.ListNetworksResponse + ) -> network_service.ListNetworksResponse: + """Post-rpc interceptor for list_networks + + Override in a subclass to manipulate the response + after it is returned by the NetworkService server but before + it is returned to user code. + """ + return response + def pre_get_operation( self, request: operations_pb2.GetOperationRequest, @@ -243,7 +274,7 @@ def __call__( retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), - ) -> network_service.Network: + ) -> network_messages.Network: r"""Call the get network method over HTTP. Args: @@ -256,7 +287,7 @@ def __call__( sent along with the request as metadata. Returns: - ~.network_service.Network: + ~.network_messages.Network: The Network resource. """ @@ -300,21 +331,105 @@ def __call__( raise core_exceptions.from_http_response(response) # Return the response - resp = network_service.Network() - pb_resp = network_service.Network.pb(resp) + resp = network_messages.Network() + pb_resp = network_messages.Network.pb(resp) json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_network(resp) return resp + class _ListNetworks(NetworkServiceRestStub): + def __hash__(self): + return hash("ListNetworks") + + def __call__( + self, + request: network_service.ListNetworksRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> network_service.ListNetworksResponse: + r"""Call the list networks method over HTTP. + + Args: + request (~.network_service.ListNetworksRequest): + The request object. Request object for ``ListNetworks`` method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.network_service.ListNetworksResponse: + Response object for ``ListNetworks`` method. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/networks", + }, + ] + request, metadata = self._interceptor.pre_list_networks(request, metadata) + pb_request = network_service.ListNetworksRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = network_service.ListNetworksResponse() + pb_resp = network_service.ListNetworksResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_networks(resp) + return resp + @property def get_network( self, - ) -> Callable[[network_service.GetNetworkRequest], network_service.Network]: + ) -> Callable[[network_service.GetNetworkRequest], network_messages.Network]: # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. # In C++ this would require a dynamic_cast return self._GetNetwork(self._session, self._host, self._interceptor) # type: ignore + @property + def list_networks( + self, + ) -> Callable[ + [network_service.ListNetworksRequest], network_service.ListNetworksResponse + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListNetworks(self._session, self._host, self._interceptor) # type: ignore + @property def get_operation(self): return self._GetOperation(self._session, self._host, self._interceptor) # type: ignore @@ -346,11 +461,11 @@ def __call__( http_options: List[Dict[str, str]] = [ { "method": "get", - "uri": "/v1/{name=networks/*/operations/reports/exports/*}", + "uri": "/v1/{name=networks/*/operations/reports/runs/*}", }, { "method": "get", - "uri": "/v1/{name=networks/*/operations/reports/runs/*}", + "uri": "/v1/{name=networks/*/operations/reports/exports/*}", }, ] diff --git a/packages/google-ads-admanager/google/ads/admanager_v1/services/order_service/client.py b/packages/google-ads-admanager/google/ads/admanager_v1/services/order_service/client.py index 09ad3e675c14..018b9add869d 100644 --- a/packages/google-ads-admanager/google/ads/admanager_v1/services/order_service/client.py +++ b/packages/google-ads-admanager/google/ads/admanager_v1/services/order_service/client.py @@ -52,7 +52,13 @@ from google.protobuf import timestamp_pb2 # type: ignore from google.ads.admanager_v1.services.order_service import pagers -from google.ads.admanager_v1.types import applied_label, order_service +from google.ads.admanager_v1.types import ( + applied_label, + custom_field_value, + order_enums, + order_messages, + order_service, +) from .transports.base import DEFAULT_CLIENT_INFO, OrderServiceTransport from .transports.rest import OrderServiceRestTransport @@ -218,6 +224,25 @@ def parse_contact_path(path: str) -> Dict[str, str]: ) return m.groupdict() if m else {} + @staticmethod + def custom_field_path( + network_code: str, + custom_field: str, + ) -> str: + """Returns a fully-qualified custom_field string.""" + return "networks/{network_code}/customFields/{custom_field}".format( + network_code=network_code, + custom_field=custom_field, + ) + + @staticmethod + def parse_custom_field_path(path: str) -> Dict[str, str]: + """Parses a custom_field path into its component segments.""" + m = re.match( + r"^networks/(?P.+?)/customFields/(?P.+?)$", path + ) + return m.groupdict() if m else {} + @staticmethod def label_path( network_code: str, @@ -753,7 +778,7 @@ def __init__( transport_init: Union[ Type[OrderServiceTransport], Callable[..., OrderServiceTransport] ] = ( - type(self).get_transport_class(transport) + OrderServiceClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., OrderServiceTransport], transport) ) @@ -778,7 +803,7 @@ def get_order( retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), - ) -> order_service.Order: + ) -> order_messages.Order: r"""API to retrieve an Order object. .. code-block:: python diff --git a/packages/google-ads-admanager/google/ads/admanager_v1/services/order_service/pagers.py b/packages/google-ads-admanager/google/ads/admanager_v1/services/order_service/pagers.py index e4a153d12691..6990818382ad 100644 --- a/packages/google-ads-admanager/google/ads/admanager_v1/services/order_service/pagers.py +++ b/packages/google-ads-admanager/google/ads/admanager_v1/services/order_service/pagers.py @@ -38,7 +38,7 @@ OptionalRetry = Union[retries.Retry, object, None] # type: ignore OptionalAsyncRetry = Union[retries_async.AsyncRetry, object, None] # type: ignore -from google.ads.admanager_v1.types import order_service +from google.ads.admanager_v1.types import order_messages, order_service class ListOrdersPager: @@ -107,7 +107,7 @@ def pages(self) -> Iterator[order_service.ListOrdersResponse]: ) yield self._response - def __iter__(self) -> Iterator[order_service.Order]: + def __iter__(self) -> Iterator[order_messages.Order]: for page in self.pages: yield from page.orders diff --git a/packages/google-ads-admanager/google/ads/admanager_v1/services/order_service/transports/base.py b/packages/google-ads-admanager/google/ads/admanager_v1/services/order_service/transports/base.py index 1c19f3691180..f2a3399ce763 100644 --- a/packages/google-ads-admanager/google/ads/admanager_v1/services/order_service/transports/base.py +++ b/packages/google-ads-admanager/google/ads/admanager_v1/services/order_service/transports/base.py @@ -26,7 +26,7 @@ from google.oauth2 import service_account # type: ignore from google.ads.admanager_v1 import gapic_version as package_version -from google.ads.admanager_v1.types import order_service +from google.ads.admanager_v1.types import order_messages, order_service DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( gapic_version=package_version.__version__ @@ -155,7 +155,7 @@ def get_order( self, ) -> Callable[ [order_service.GetOrderRequest], - Union[order_service.Order, Awaitable[order_service.Order]], + Union[order_messages.Order, Awaitable[order_messages.Order]], ]: raise NotImplementedError() diff --git a/packages/google-ads-admanager/google/ads/admanager_v1/services/order_service/transports/rest.py b/packages/google-ads-admanager/google/ads/admanager_v1/services/order_service/transports/rest.py index bbcd76b93d43..ee56c744e7a9 100644 --- a/packages/google-ads-admanager/google/ads/admanager_v1/services/order_service/transports/rest.py +++ b/packages/google-ads-admanager/google/ads/admanager_v1/services/order_service/transports/rest.py @@ -38,7 +38,7 @@ from google.longrunning import operations_pb2 # type: ignore -from google.ads.admanager_v1.types import order_service +from google.ads.admanager_v1.types import order_messages, order_service from .base import DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO from .base import OrderServiceTransport @@ -99,7 +99,7 @@ def pre_get_order( """ return request, metadata - def post_get_order(self, response: order_service.Order) -> order_service.Order: + def post_get_order(self, response: order_messages.Order) -> order_messages.Order: """Post-rpc interceptor for get_order Override in a subclass to manipulate the response @@ -272,7 +272,7 @@ def __call__( retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), - ) -> order_service.Order: + ) -> order_messages.Order: r"""Call the get order method over HTTP. Args: @@ -285,7 +285,7 @@ def __call__( sent along with the request as metadata. Returns: - ~.order_service.Order: + ~.order_messages.Order: The ``Order`` resource. """ @@ -329,8 +329,8 @@ def __call__( raise core_exceptions.from_http_response(response) # Return the response - resp = order_service.Order() - pb_resp = order_service.Order.pb(resp) + resp = order_messages.Order() + pb_resp = order_messages.Order.pb(resp) json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_order(resp) @@ -426,7 +426,7 @@ def __call__( @property def get_order( self, - ) -> Callable[[order_service.GetOrderRequest], order_service.Order]: + ) -> Callable[[order_service.GetOrderRequest], order_messages.Order]: # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. # In C++ this would require a dynamic_cast return self._GetOrder(self._session, self._host, self._interceptor) # type: ignore @@ -470,11 +470,11 @@ def __call__( http_options: List[Dict[str, str]] = [ { "method": "get", - "uri": "/v1/{name=networks/*/operations/reports/exports/*}", + "uri": "/v1/{name=networks/*/operations/reports/runs/*}", }, { "method": "get", - "uri": "/v1/{name=networks/*/operations/reports/runs/*}", + "uri": "/v1/{name=networks/*/operations/reports/exports/*}", }, ] diff --git a/packages/google-ads-admanager/google/ads/admanager_v1/services/placement_service/client.py b/packages/google-ads-admanager/google/ads/admanager_v1/services/placement_service/client.py index 575189aaea65..0b662d3c0e41 100644 --- a/packages/google-ads-admanager/google/ads/admanager_v1/services/placement_service/client.py +++ b/packages/google-ads-admanager/google/ads/admanager_v1/services/placement_service/client.py @@ -52,7 +52,11 @@ from google.protobuf import timestamp_pb2 # type: ignore from google.ads.admanager_v1.services.placement_service import pagers -from google.ads.admanager_v1.types import placement_enums, placement_service +from google.ads.admanager_v1.types import ( + placement_enums, + placement_messages, + placement_service, +) from .transports.base import DEFAULT_CLIENT_INFO, PlacementServiceTransport from .transports.rest import PlacementServiceRestTransport @@ -691,7 +695,7 @@ def __init__( Type[PlacementServiceTransport], Callable[..., PlacementServiceTransport], ] = ( - type(self).get_transport_class(transport) + PlacementServiceClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., PlacementServiceTransport], transport) ) @@ -716,7 +720,7 @@ def get_placement( retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), - ) -> placement_service.Placement: + ) -> placement_messages.Placement: r"""API to retrieve a ``Placement`` object. .. code-block:: python diff --git a/packages/google-ads-admanager/google/ads/admanager_v1/services/placement_service/pagers.py b/packages/google-ads-admanager/google/ads/admanager_v1/services/placement_service/pagers.py index cff0babfac9a..8dfe5d886b93 100644 --- a/packages/google-ads-admanager/google/ads/admanager_v1/services/placement_service/pagers.py +++ b/packages/google-ads-admanager/google/ads/admanager_v1/services/placement_service/pagers.py @@ -38,7 +38,7 @@ OptionalRetry = Union[retries.Retry, object, None] # type: ignore OptionalAsyncRetry = Union[retries_async.AsyncRetry, object, None] # type: ignore -from google.ads.admanager_v1.types import placement_service +from google.ads.admanager_v1.types import placement_messages, placement_service class ListPlacementsPager: @@ -107,7 +107,7 @@ def pages(self) -> Iterator[placement_service.ListPlacementsResponse]: ) yield self._response - def __iter__(self) -> Iterator[placement_service.Placement]: + def __iter__(self) -> Iterator[placement_messages.Placement]: for page in self.pages: yield from page.placements diff --git a/packages/google-ads-admanager/google/ads/admanager_v1/services/placement_service/transports/base.py b/packages/google-ads-admanager/google/ads/admanager_v1/services/placement_service/transports/base.py index 0012313340aa..5a18c192fe76 100644 --- a/packages/google-ads-admanager/google/ads/admanager_v1/services/placement_service/transports/base.py +++ b/packages/google-ads-admanager/google/ads/admanager_v1/services/placement_service/transports/base.py @@ -26,7 +26,7 @@ from google.oauth2 import service_account # type: ignore from google.ads.admanager_v1 import gapic_version as package_version -from google.ads.admanager_v1.types import placement_service +from google.ads.admanager_v1.types import placement_messages, placement_service DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( gapic_version=package_version.__version__ @@ -155,7 +155,7 @@ def get_placement( self, ) -> Callable[ [placement_service.GetPlacementRequest], - Union[placement_service.Placement, Awaitable[placement_service.Placement]], + Union[placement_messages.Placement, Awaitable[placement_messages.Placement]], ]: raise NotImplementedError() diff --git a/packages/google-ads-admanager/google/ads/admanager_v1/services/placement_service/transports/rest.py b/packages/google-ads-admanager/google/ads/admanager_v1/services/placement_service/transports/rest.py index 319d89f6a961..65d93682cb02 100644 --- a/packages/google-ads-admanager/google/ads/admanager_v1/services/placement_service/transports/rest.py +++ b/packages/google-ads-admanager/google/ads/admanager_v1/services/placement_service/transports/rest.py @@ -38,7 +38,7 @@ from google.longrunning import operations_pb2 # type: ignore -from google.ads.admanager_v1.types import placement_service +from google.ads.admanager_v1.types import placement_messages, placement_service from .base import DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO from .base import PlacementServiceTransport @@ -100,8 +100,8 @@ def pre_get_placement( return request, metadata def post_get_placement( - self, response: placement_service.Placement - ) -> placement_service.Placement: + self, response: placement_messages.Placement + ) -> placement_messages.Placement: """Post-rpc interceptor for get_placement Override in a subclass to manipulate the response @@ -274,7 +274,7 @@ def __call__( retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), - ) -> placement_service.Placement: + ) -> placement_messages.Placement: r"""Call the get placement method over HTTP. Args: @@ -287,7 +287,7 @@ def __call__( sent along with the request as metadata. Returns: - ~.placement_service.Placement: + ~.placement_messages.Placement: The ``Placement`` resource. """ @@ -331,8 +331,8 @@ def __call__( raise core_exceptions.from_http_response(response) # Return the response - resp = placement_service.Placement() - pb_resp = placement_service.Placement.pb(resp) + resp = placement_messages.Placement() + pb_resp = placement_messages.Placement.pb(resp) json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_placement(resp) @@ -428,7 +428,9 @@ def __call__( @property def get_placement( self, - ) -> Callable[[placement_service.GetPlacementRequest], placement_service.Placement]: + ) -> Callable[ + [placement_service.GetPlacementRequest], placement_messages.Placement + ]: # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. # In C++ this would require a dynamic_cast return self._GetPlacement(self._session, self._host, self._interceptor) # type: ignore @@ -475,11 +477,11 @@ def __call__( http_options: List[Dict[str, str]] = [ { "method": "get", - "uri": "/v1/{name=networks/*/operations/reports/exports/*}", + "uri": "/v1/{name=networks/*/operations/reports/runs/*}", }, { "method": "get", - "uri": "/v1/{name=networks/*/operations/reports/runs/*}", + "uri": "/v1/{name=networks/*/operations/reports/exports/*}", }, ] diff --git a/packages/google-ads-admanager/google/ads/admanager_v1/services/report_service/client.py b/packages/google-ads-admanager/google/ads/admanager_v1/services/report_service/client.py index df84cf7c5295..ebe8fb88cd0d 100644 --- a/packages/google-ads-admanager/google/ads/admanager_v1/services/report_service/client.py +++ b/packages/google-ads-admanager/google/ads/admanager_v1/services/report_service/client.py @@ -51,7 +51,10 @@ from google.api_core import operation # type: ignore from google.api_core import operation_async # type: ignore from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore +from google.ads.admanager_v1.services.report_service import pagers from google.ads.admanager_v1.types import report_service from .transports.base import DEFAULT_CLIENT_INFO, ReportServiceTransport @@ -92,7 +95,7 @@ def get_transport_class( class ReportServiceClient(metaclass=ReportServiceClientMeta): - """Provides methods for interacting with Reports.""" + """Provides methods for interacting with reports.""" @staticmethod def _get_default_mtls_endpoint(api_endpoint): @@ -180,6 +183,21 @@ def transport(self) -> ReportServiceTransport: """ return self._transport + @staticmethod + def network_path( + network_code: str, + ) -> str: + """Returns a fully-qualified network string.""" + return "networks/{network_code}".format( + network_code=network_code, + ) + + @staticmethod + def parse_network_path(path: str) -> Dict[str, str]: + """Parses a network path into its component segments.""" + m = re.match(r"^networks/(?P.+?)$", path) + return m.groupdict() if m else {} + @staticmethod def report_path( network_code: str, @@ -652,7 +670,7 @@ def __init__( transport_init: Union[ Type[ReportServiceTransport], Callable[..., ReportServiceTransport] ] = ( - type(self).get_transport_class(transport) + ReportServiceClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., ReportServiceTransport], transport) ) @@ -669,22 +687,16 @@ def __init__( api_audience=self._client_options.api_audience, ) - def export_saved_report( + def get_report( self, - request: Optional[Union[report_service.ExportSavedReportRequest, dict]] = None, + request: Optional[Union[report_service.GetReportRequest, dict]] = None, *, - report: Optional[str] = None, + name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), - ) -> operation.Operation: - r"""Initiates the execution and export of a report - asynchronously. Users can get the report by polling this - operation via OperationsService.GetOperation. - Intervals of at least 2 seconds are recommended, with an - exponential backoff. Once a report is complete, the - operation will contain a ExportSavedReportResponse in - its response field. + ) -> report_service.Report: + r"""API to retrieve a ``Report`` object. .. code-block:: python @@ -697,39 +709,489 @@ def export_saved_report( # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.ads import admanager_v1 - def sample_export_saved_report(): + def sample_get_report(): # Create a client client = admanager_v1.ReportServiceClient() # Initialize request argument(s) - request = admanager_v1.ExportSavedReportRequest( - format_="XML", + request = admanager_v1.GetReportRequest( + name="name_value", ) # Make the request - operation = client.export_saved_report(request=request) + response = client.get_report(request=request) - print("Waiting for operation to complete...") + # Handle the response + print(response) - response = operation.result() + Args: + request (Union[google.ads.admanager_v1.types.GetReportRequest, dict]): + The request object. Request object for ``GetReport`` method. + name (str): + Required. The resource name of the report. Format: + ``networks/{network_code}/reports/{report_id}`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.ads.admanager_v1.types.Report: + The Report resource. + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, report_service.GetReportRequest): + request = report_service.GetReportRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_report] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def list_reports( + self, + request: Optional[Union[report_service.ListReportsRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListReportsPager: + r"""API to retrieve a list of ``Report`` objects. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.ads import admanager_v1 + + def sample_list_reports(): + # Create a client + client = admanager_v1.ReportServiceClient() + + # Initialize request argument(s) + request = admanager_v1.ListReportsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_reports(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.ads.admanager_v1.types.ListReportsRequest, dict]): + The request object. Request object for ``ListReports`` method. + parent (str): + Required. The parent, which owns this collection of + reports. Format: ``networks/{network_code}`` + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.ads.admanager_v1.services.report_service.pagers.ListReportsPager: + Response object for ListReportsResponse containing matching Report + objects. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, report_service.ListReportsRequest): + request = report_service.ListReportsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_reports] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListReportsPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def create_report( + self, + request: Optional[Union[report_service.CreateReportRequest, dict]] = None, + *, + parent: Optional[str] = None, + report: Optional[report_service.Report] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> report_service.Report: + r"""API to create a ``Report`` object. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.ads import admanager_v1 + + def sample_create_report(): + # Create a client + client = admanager_v1.ReportServiceClient() + + # Initialize request argument(s) + report = admanager_v1.Report() + report.report_definition.dimensions = ['CUSTOM_DIMENSION_9_VALUE'] + report.report_definition.metrics = ['YIELD_GROUP_MEDIATION_THIRD_PARTY_ECPM'] + report.report_definition.report_type = "HISTORICAL" + + request = admanager_v1.CreateReportRequest( + parent="parent_value", + report=report, + ) + + # Make the request + response = client.create_report(request=request) # Handle the response print(response) Args: - request (Union[google.ads.admanager_v1.types.ExportSavedReportRequest, dict]): - The request object. Request proto for the configuration - of a report run. - report (str): - The name of a particular saved report resource. + request (Union[google.ads.admanager_v1.types.CreateReportRequest, dict]): + The request object. Request object for ``CreateReport`` method. + parent (str): + Required. The parent resource where this ``Report`` will + be created. Format: ``networks/{network_code}`` + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + report (google.ads.admanager_v1.types.Report): + Required. The ``Report`` to create. + This corresponds to the ``report`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.ads.admanager_v1.types.Report: + The Report resource. + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, report]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, report_service.CreateReportRequest): + request = report_service.CreateReportRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if report is not None: + request.report = report - A report will be run based on the specification of this - saved report. It must have the format of - "networks/{network_code}/reports/{report_id}" + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.create_report] + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def update_report( + self, + request: Optional[Union[report_service.UpdateReportRequest, dict]] = None, + *, + report: Optional[report_service.Report] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> report_service.Report: + r"""API to update a ``Report`` object. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.ads import admanager_v1 + + def sample_update_report(): + # Create a client + client = admanager_v1.ReportServiceClient() + + # Initialize request argument(s) + report = admanager_v1.Report() + report.report_definition.dimensions = ['CUSTOM_DIMENSION_9_VALUE'] + report.report_definition.metrics = ['YIELD_GROUP_MEDIATION_THIRD_PARTY_ECPM'] + report.report_definition.report_type = "HISTORICAL" + + request = admanager_v1.UpdateReportRequest( + report=report, + ) + + # Make the request + response = client.update_report(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.ads.admanager_v1.types.UpdateReportRequest, dict]): + The request object. Request object for ``UpdateReport`` method. + report (google.ads.admanager_v1.types.Report): + Required. The ``Report`` to update. This corresponds to the ``report`` field on the ``request`` instance; if ``request`` is provided, this should not be set. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Required. The list of fields to + update. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.ads.admanager_v1.types.Report: + The Report resource. + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([report, update_mask]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, report_service.UpdateReportRequest): + request = report_service.UpdateReportRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if report is not None: + request.report = report + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.update_report] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("report.name", request.report.name),) + ), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def run_report( + self, + request: Optional[Union[report_service.RunReportRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Initiates the execution of an existing report asynchronously. + Users can get the report by polling this operation via + ``OperationsService.GetOperation``. Poll every 5 seconds + initially, with an exponential backoff. Once a report is + complete, the operation will contain a ``RunReportResponse`` in + its response field containing a report_result that can be passed + to the ``FetchReportResultRows`` method to retrieve the report + data. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.ads import admanager_v1 + + def sample_run_report(): + # Create a client + client = admanager_v1.ReportServiceClient() + + # Initialize request argument(s) + request = admanager_v1.RunReportRequest( + name="name_value", + ) + + # Make the request + operation = client.run_report(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.ads.admanager_v1.types.RunReportRequest, dict]): + The request object. Request message for a running a + report. + name (str): + Required. The report to run. Format: + ``networks/{network_code}/reports/{report_id}`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -740,14 +1202,15 @@ def sample_export_saved_report(): google.api_core.operation.Operation: An object representing a long-running operation. - The result type for the operation will be :class:`google.ads.admanager_v1.types.ExportSavedReportResponse` Message included in the longrunning Operation result.response field when - the report completes successfully. + The result type for the operation will be + :class:`google.ads.admanager_v1.types.RunReportResponse` + Response message for a completed RunReport operation. """ # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([report]) + has_flattened_params = any([name]) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -756,21 +1219,21 @@ def sample_export_saved_report(): # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. - if not isinstance(request, report_service.ExportSavedReportRequest): - request = report_service.ExportSavedReportRequest(request) + if not isinstance(request, report_service.RunReportRequest): + request = report_service.RunReportRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. - if report is not None: - request.report = report + if name is not None: + request.name = name # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.export_saved_report] + rpc = self._transport._wrapped_methods[self._transport.run_report] # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("report", request.report),)), + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), ) # Validate the universe domain. @@ -788,8 +1251,131 @@ def sample_export_saved_report(): response = operation.from_gapic( response, self._transport.operations_client, - report_service.ExportSavedReportResponse, - metadata_type=report_service.ExportSavedReportMetadata, + report_service.RunReportResponse, + metadata_type=report_service.RunReportMetadata, + ) + + # Done; return the response. + return response + + def fetch_report_result_rows( + self, + request: Optional[ + Union[report_service.FetchReportResultRowsRequest, dict] + ] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.FetchReportResultRowsPager: + r"""Returns the result rows from a completed report. The caller must + have previously called ``RunReport`` and waited for that + operation to complete. The rows will be returned according to + the order specified by the ``sorts`` member of the report + definition. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.ads import admanager_v1 + + def sample_fetch_report_result_rows(): + # Create a client + client = admanager_v1.ReportServiceClient() + + # Initialize request argument(s) + request = admanager_v1.FetchReportResultRowsRequest( + ) + + # Make the request + page_result = client.fetch_report_result_rows(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.ads.admanager_v1.types.FetchReportResultRowsRequest, dict]): + The request object. The request message for the fetch + report result rows endpoint. + name (str): + The report result being fetched. Format: + ``networks/{network_code}/reports/{report_id}/results/{report_result_id}`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.ads.admanager_v1.services.report_service.pagers.FetchReportResultRowsPager: + The response message for the fetch + report result rows endpoint. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, report_service.FetchReportResultRowsRequest): + request = report_service.FetchReportResultRowsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.fetch_report_result_rows] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.FetchReportResultRowsPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, ) # Done; return the response. diff --git a/packages/google-ads-admanager/google/ads/admanager_v1/services/report_service/pagers.py b/packages/google-ads-admanager/google/ads/admanager_v1/services/report_service/pagers.py new file mode 100644 index 000000000000..5299974427f5 --- /dev/null +++ b/packages/google-ads-admanager/google/ads/admanager_v1/services/report_service/pagers.py @@ -0,0 +1,189 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import ( + Any, + AsyncIterator, + Awaitable, + Callable, + Iterator, + Optional, + Sequence, + Tuple, + Union, +) + +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.api_core import retry_async as retries_async + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] + OptionalAsyncRetry = Union[ + retries_async.AsyncRetry, gapic_v1.method._MethodDefault, None + ] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object, None] # type: ignore + OptionalAsyncRetry = Union[retries_async.AsyncRetry, object, None] # type: ignore + +from google.ads.admanager_v1.types import report_service + + +class ListReportsPager: + """A pager for iterating through ``list_reports`` requests. + + This class thinly wraps an initial + :class:`google.ads.admanager_v1.types.ListReportsResponse` object, and + provides an ``__iter__`` method to iterate through its + ``reports`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListReports`` requests and continue to iterate + through the ``reports`` field on the + corresponding responses. + + All the usual :class:`google.ads.admanager_v1.types.ListReportsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., report_service.ListReportsResponse], + request: report_service.ListReportsRequest, + response: report_service.ListReportsResponse, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.ads.admanager_v1.types.ListReportsRequest): + The initial request object. + response (google.ads.admanager_v1.types.ListReportsResponse): + The initial response object. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = report_service.ListReportsRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[report_service.ListReportsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) + yield self._response + + def __iter__(self) -> Iterator[report_service.Report]: + for page in self.pages: + yield from page.reports + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class FetchReportResultRowsPager: + """A pager for iterating through ``fetch_report_result_rows`` requests. + + This class thinly wraps an initial + :class:`google.ads.admanager_v1.types.FetchReportResultRowsResponse` object, and + provides an ``__iter__`` method to iterate through its + ``rows`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``FetchReportResultRows`` requests and continue to iterate + through the ``rows`` field on the + corresponding responses. + + All the usual :class:`google.ads.admanager_v1.types.FetchReportResultRowsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., report_service.FetchReportResultRowsResponse], + request: report_service.FetchReportResultRowsRequest, + response: report_service.FetchReportResultRowsResponse, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.ads.admanager_v1.types.FetchReportResultRowsRequest): + The initial request object. + response (google.ads.admanager_v1.types.FetchReportResultRowsResponse): + The initial response object. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = report_service.FetchReportResultRowsRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[report_service.FetchReportResultRowsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) + yield self._response + + def __iter__(self) -> Iterator[report_service.Report.DataTable.Row]: + for page in self.pages: + yield from page.rows + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) diff --git a/packages/google-ads-admanager/google/ads/admanager_v1/services/report_service/transports/base.py b/packages/google-ads-admanager/google/ads/admanager_v1/services/report_service/transports/base.py index fe244f5d7b89..1fac274ed16e 100644 --- a/packages/google-ads-admanager/google/ads/admanager_v1/services/report_service/transports/base.py +++ b/packages/google-ads-admanager/google/ads/admanager_v1/services/report_service/transports/base.py @@ -129,8 +129,33 @@ def host(self): def _prep_wrapped_messages(self, client_info): # Precompute the wrapped methods. self._wrapped_methods = { - self.export_saved_report: gapic_v1.method.wrap_method( - self.export_saved_report, + self.get_report: gapic_v1.method.wrap_method( + self.get_report, + default_timeout=None, + client_info=client_info, + ), + self.list_reports: gapic_v1.method.wrap_method( + self.list_reports, + default_timeout=None, + client_info=client_info, + ), + self.create_report: gapic_v1.method.wrap_method( + self.create_report, + default_timeout=None, + client_info=client_info, + ), + self.update_report: gapic_v1.method.wrap_method( + self.update_report, + default_timeout=None, + client_info=client_info, + ), + self.run_report: gapic_v1.method.wrap_method( + self.run_report, + default_timeout=None, + client_info=client_info, + ), + self.fetch_report_result_rows: gapic_v1.method.wrap_method( + self.fetch_report_result_rows, default_timeout=None, client_info=client_info, ), @@ -151,14 +176,65 @@ def operations_client(self): raise NotImplementedError() @property - def export_saved_report( + def get_report( self, ) -> Callable[ - [report_service.ExportSavedReportRequest], + [report_service.GetReportRequest], + Union[report_service.Report, Awaitable[report_service.Report]], + ]: + raise NotImplementedError() + + @property + def list_reports( + self, + ) -> Callable[ + [report_service.ListReportsRequest], + Union[ + report_service.ListReportsResponse, + Awaitable[report_service.ListReportsResponse], + ], + ]: + raise NotImplementedError() + + @property + def create_report( + self, + ) -> Callable[ + [report_service.CreateReportRequest], + Union[report_service.Report, Awaitable[report_service.Report]], + ]: + raise NotImplementedError() + + @property + def update_report( + self, + ) -> Callable[ + [report_service.UpdateReportRequest], + Union[report_service.Report, Awaitable[report_service.Report]], + ]: + raise NotImplementedError() + + @property + def run_report( + self, + ) -> Callable[ + [report_service.RunReportRequest], Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], ]: raise NotImplementedError() + @property + def fetch_report_result_rows( + self, + ) -> Callable[ + [report_service.FetchReportResultRowsRequest], + Union[ + report_service.FetchReportResultRowsResponse, + Awaitable[report_service.FetchReportResultRowsResponse], + ], + ]: + raise NotImplementedError() + @property def get_operation( self, diff --git a/packages/google-ads-admanager/google/ads/admanager_v1/services/report_service/transports/rest.py b/packages/google-ads-admanager/google/ads/admanager_v1/services/report_service/transports/rest.py index 9b51c99ce9b2..c529442126ea 100644 --- a/packages/google-ads-admanager/google/ads/admanager_v1/services/report_service/transports/rest.py +++ b/packages/google-ads-admanager/google/ads/admanager_v1/services/report_service/transports/rest.py @@ -71,11 +71,51 @@ class ReportServiceRestInterceptor: .. code-block:: python class MyCustomReportServiceInterceptor(ReportServiceRestInterceptor): - def pre_export_saved_report(self, request, metadata): + def pre_create_report(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata - def post_export_saved_report(self, response): + def post_create_report(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_fetch_report_result_rows(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_fetch_report_result_rows(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get_report(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_report(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_reports(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_reports(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_run_report(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_run_report(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_update_report(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_update_report(self, response): logging.log(f"Received response: {response}") return response @@ -85,22 +125,135 @@ def post_export_saved_report(self, response): """ - def pre_export_saved_report( + def pre_create_report( + self, + request: report_service.CreateReportRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[report_service.CreateReportRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for create_report + + Override in a subclass to manipulate the request or metadata + before they are sent to the ReportService server. + """ + return request, metadata + + def post_create_report( + self, response: report_service.Report + ) -> report_service.Report: + """Post-rpc interceptor for create_report + + Override in a subclass to manipulate the response + after it is returned by the ReportService server but before + it is returned to user code. + """ + return response + + def pre_fetch_report_result_rows( + self, + request: report_service.FetchReportResultRowsRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[report_service.FetchReportResultRowsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for fetch_report_result_rows + + Override in a subclass to manipulate the request or metadata + before they are sent to the ReportService server. + """ + return request, metadata + + def post_fetch_report_result_rows( + self, response: report_service.FetchReportResultRowsResponse + ) -> report_service.FetchReportResultRowsResponse: + """Post-rpc interceptor for fetch_report_result_rows + + Override in a subclass to manipulate the response + after it is returned by the ReportService server but before + it is returned to user code. + """ + return response + + def pre_get_report( + self, + request: report_service.GetReportRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[report_service.GetReportRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_report + + Override in a subclass to manipulate the request or metadata + before they are sent to the ReportService server. + """ + return request, metadata + + def post_get_report(self, response: report_service.Report) -> report_service.Report: + """Post-rpc interceptor for get_report + + Override in a subclass to manipulate the response + after it is returned by the ReportService server but before + it is returned to user code. + """ + return response + + def pre_list_reports( self, - request: report_service.ExportSavedReportRequest, + request: report_service.ListReportsRequest, metadata: Sequence[Tuple[str, str]], - ) -> Tuple[report_service.ExportSavedReportRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for export_saved_report + ) -> Tuple[report_service.ListReportsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_reports Override in a subclass to manipulate the request or metadata before they are sent to the ReportService server. """ return request, metadata - def post_export_saved_report( + def post_list_reports( + self, response: report_service.ListReportsResponse + ) -> report_service.ListReportsResponse: + """Post-rpc interceptor for list_reports + + Override in a subclass to manipulate the response + after it is returned by the ReportService server but before + it is returned to user code. + """ + return response + + def pre_run_report( + self, + request: report_service.RunReportRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[report_service.RunReportRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for run_report + + Override in a subclass to manipulate the request or metadata + before they are sent to the ReportService server. + """ + return request, metadata + + def post_run_report( self, response: operations_pb2.Operation ) -> operations_pb2.Operation: - """Post-rpc interceptor for export_saved_report + """Post-rpc interceptor for run_report + + Override in a subclass to manipulate the response + after it is returned by the ReportService server but before + it is returned to user code. + """ + return response + + def pre_update_report( + self, + request: report_service.UpdateReportRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[report_service.UpdateReportRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for update_report + + Override in a subclass to manipulate the request or metadata + before they are sent to the ReportService server. + """ + return request, metadata + + def post_update_report( + self, response: report_service.Report + ) -> report_service.Report: + """Post-rpc interceptor for update_report Override in a subclass to manipulate the response after it is returned by the ReportService server but before @@ -142,7 +295,7 @@ class ReportServiceRestStub: class ReportServiceRestTransport(ReportServiceTransport): """REST backend transport for ReportService. - Provides methods for interacting with Reports. + Provides methods for interacting with reports. This class defines the same methods as the primary client, so the primary client can load the underlying transport implementation @@ -242,11 +395,11 @@ def operations_client(self) -> operations_v1.AbstractOperationsClient: "google.longrunning.Operations.GetOperation": [ { "method": "get", - "uri": "/v1/{name=networks/*/operations/reports/exports/*}", + "uri": "/v1/{name=networks/*/operations/reports/runs/*}", }, { "method": "get", - "uri": "/v1/{name=networks/*/operations/reports/runs/*}", + "uri": "/v1/{name=networks/*/operations/reports/exports/*}", }, ], } @@ -267,9 +420,352 @@ def operations_client(self) -> operations_v1.AbstractOperationsClient: # Return the client from cache. return self._operations_client - class _ExportSavedReport(ReportServiceRestStub): + class _CreateReport(ReportServiceRestStub): + def __hash__(self): + return hash("CreateReport") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: report_service.CreateReportRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> report_service.Report: + r"""Call the create report method over HTTP. + + Args: + request (~.report_service.CreateReportRequest): + The request object. Request object for ``CreateReport`` method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.report_service.Report: + The ``Report`` resource. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{parent=networks/*}/reports", + "body": "report", + }, + ] + request, metadata = self._interceptor.pre_create_report(request, metadata) + pb_request = report_service.CreateReportRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = report_service.Report() + pb_resp = report_service.Report.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_create_report(resp) + return resp + + class _FetchReportResultRows(ReportServiceRestStub): + def __hash__(self): + return hash("FetchReportResultRows") + + def __call__( + self, + request: report_service.FetchReportResultRowsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> report_service.FetchReportResultRowsResponse: + r"""Call the fetch report result rows method over HTTP. + + Args: + request (~.report_service.FetchReportResultRowsRequest): + The request object. The request message for the fetch + report result rows endpoint. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.report_service.FetchReportResultRowsResponse: + The response message for the fetch + report result rows endpoint. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=networks/*/reports/*/results/*}:fetchRows", + }, + ] + request, metadata = self._interceptor.pre_fetch_report_result_rows( + request, metadata + ) + pb_request = report_service.FetchReportResultRowsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = report_service.FetchReportResultRowsResponse() + pb_resp = report_service.FetchReportResultRowsResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_fetch_report_result_rows(resp) + return resp + + class _GetReport(ReportServiceRestStub): + def __hash__(self): + return hash("GetReport") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: report_service.GetReportRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> report_service.Report: + r"""Call the get report method over HTTP. + + Args: + request (~.report_service.GetReportRequest): + The request object. Request object for ``GetReport`` method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.report_service.Report: + The ``Report`` resource. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=networks/*/reports/*}", + }, + ] + request, metadata = self._interceptor.pre_get_report(request, metadata) + pb_request = report_service.GetReportRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = report_service.Report() + pb_resp = report_service.Report.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_report(resp) + return resp + + class _ListReports(ReportServiceRestStub): + def __hash__(self): + return hash("ListReports") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: report_service.ListReportsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> report_service.ListReportsResponse: + r"""Call the list reports method over HTTP. + + Args: + request (~.report_service.ListReportsRequest): + The request object. Request object for ``ListReports`` method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.report_service.ListReportsResponse: + Response object for ``ListReportsResponse`` containing + matching ``Report`` objects. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{parent=networks/*}/reports", + }, + ] + request, metadata = self._interceptor.pre_list_reports(request, metadata) + pb_request = report_service.ListReportsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = report_service.ListReportsResponse() + pb_resp = report_service.ListReportsResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_reports(resp) + return resp + + class _RunReport(ReportServiceRestStub): def __hash__(self): - return hash("ExportSavedReport") + return hash("RunReport") __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} @@ -283,18 +779,18 @@ def _get_unset_required_fields(cls, message_dict): def __call__( self, - request: report_service.ExportSavedReportRequest, + request: report_service.RunReportRequest, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), ) -> operations_pb2.Operation: - r"""Call the export saved report method over HTTP. + r"""Call the run report method over HTTP. Args: - request (~.report_service.ExportSavedReportRequest): - The request object. Request proto for the configuration - of a report run. + request (~.report_service.RunReportRequest): + The request object. Request message for a running a + report. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -312,14 +808,12 @@ def __call__( http_options: List[Dict[str, str]] = [ { "method": "post", - "uri": "/v1/{report=networks/*/reports/*}:exportSavedReport", + "uri": "/v1/{name=networks/*/reports/*}:run", "body": "*", }, ] - request, metadata = self._interceptor.pre_export_saved_report( - request, metadata - ) - pb_request = report_service.ExportSavedReportRequest.pb(request) + request, metadata = self._interceptor.pre_run_report(request, metadata) + pb_request = report_service.RunReportRequest.pb(request) transcoded_request = path_template.transcode(http_options, pb_request) # Jsonify the request body @@ -360,16 +854,155 @@ def __call__( # Return the response resp = operations_pb2.Operation() json_format.Parse(response.content, resp, ignore_unknown_fields=True) - resp = self._interceptor.post_export_saved_report(resp) + resp = self._interceptor.post_run_report(resp) + return resp + + class _UpdateReport(ReportServiceRestStub): + def __hash__(self): + return hash("UpdateReport") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "updateMask": {}, + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: report_service.UpdateReportRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> report_service.Report: + r"""Call the update report method over HTTP. + + Args: + request (~.report_service.UpdateReportRequest): + The request object. Request object for ``UpdateReport`` method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.report_service.Report: + The ``Report`` resource. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "patch", + "uri": "/v1/{report.name=networks/*/reports/*}", + "body": "report", + }, + ] + request, metadata = self._interceptor.pre_update_report(request, metadata) + pb_request = report_service.UpdateReportRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = report_service.Report() + pb_resp = report_service.Report.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_update_report(resp) return resp @property - def export_saved_report( + def create_report( + self, + ) -> Callable[[report_service.CreateReportRequest], report_service.Report]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._CreateReport(self._session, self._host, self._interceptor) # type: ignore + + @property + def fetch_report_result_rows( + self, + ) -> Callable[ + [report_service.FetchReportResultRowsRequest], + report_service.FetchReportResultRowsResponse, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._FetchReportResultRows(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_report( + self, + ) -> Callable[[report_service.GetReportRequest], report_service.Report]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetReport(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_reports( + self, + ) -> Callable[ + [report_service.ListReportsRequest], report_service.ListReportsResponse + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListReports(self._session, self._host, self._interceptor) # type: ignore + + @property + def run_report( self, - ) -> Callable[[report_service.ExportSavedReportRequest], operations_pb2.Operation]: + ) -> Callable[[report_service.RunReportRequest], operations_pb2.Operation]: # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. # In C++ this would require a dynamic_cast - return self._ExportSavedReport(self._session, self._host, self._interceptor) # type: ignore + return self._RunReport(self._session, self._host, self._interceptor) # type: ignore + + @property + def update_report( + self, + ) -> Callable[[report_service.UpdateReportRequest], report_service.Report]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._UpdateReport(self._session, self._host, self._interceptor) # type: ignore @property def get_operation(self): @@ -402,11 +1035,11 @@ def __call__( http_options: List[Dict[str, str]] = [ { "method": "get", - "uri": "/v1/{name=networks/*/operations/reports/exports/*}", + "uri": "/v1/{name=networks/*/operations/reports/runs/*}", }, { "method": "get", - "uri": "/v1/{name=networks/*/operations/reports/runs/*}", + "uri": "/v1/{name=networks/*/operations/reports/exports/*}", }, ] diff --git a/packages/google-ads-admanager/google/ads/admanager_v1/services/role_service/client.py b/packages/google-ads-admanager/google/ads/admanager_v1/services/role_service/client.py index db9528e1a581..2b7295b48ef8 100644 --- a/packages/google-ads-admanager/google/ads/admanager_v1/services/role_service/client.py +++ b/packages/google-ads-admanager/google/ads/admanager_v1/services/role_service/client.py @@ -51,7 +51,7 @@ from google.longrunning import operations_pb2 # type: ignore from google.ads.admanager_v1.services.role_service import pagers -from google.ads.admanager_v1.types import role_service +from google.ads.admanager_v1.types import role_enums, role_messages, role_service from .transports.base import DEFAULT_CLIENT_INFO, RoleServiceTransport from .transports.rest import RoleServiceRestTransport @@ -91,7 +91,7 @@ def get_transport_class( class RoleServiceClient(metaclass=RoleServiceClientMeta): - """Provides methods for handling Role objects.""" + """Provides methods for handling ``Role`` objects.""" @staticmethod def _get_default_mtls_endpoint(api_endpoint): @@ -663,7 +663,7 @@ def __init__( transport_init: Union[ Type[RoleServiceTransport], Callable[..., RoleServiceTransport] ] = ( - type(self).get_transport_class(transport) + RoleServiceClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., RoleServiceTransport], transport) ) @@ -688,8 +688,8 @@ def get_role( retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), - ) -> role_service.Role: - r"""API to retrieve a Role object. + ) -> role_messages.Role: + r"""API to retrieve a ``Role`` object. .. code-block:: python @@ -719,7 +719,7 @@ def sample_get_role(): Args: request (Union[google.ads.admanager_v1.types.GetRoleRequest, dict]): - The request object. Request object for GetRole method. + The request object. Request object for ``GetRole`` method. name (str): Required. The resource name of the Role. Format: ``networks/{network_code}/roles/{role_id}`` @@ -789,7 +789,7 @@ def list_roles( timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.ListRolesPager: - r"""API to retrieve a list of Role objects. + r"""API to retrieve a list of ``Role`` objects. .. code-block:: python @@ -820,7 +820,7 @@ def sample_list_roles(): Args: request (Union[google.ads.admanager_v1.types.ListRolesRequest, dict]): - The request object. Request object for ListRoles method. + The request object. Request object for ``ListRoles`` method. parent (str): Required. The parent, which owns this collection of Roles. Format: ``networks/{network_code}`` @@ -836,12 +836,11 @@ def sample_list_roles(): Returns: google.ads.admanager_v1.services.role_service.pagers.ListRolesPager: - Response object for ListRolesRequest - containing matching Role resources. + Response object for ListRolesRequest containing matching + Role objects. - Iterating over this object will yield - results and resolve additional pages - automatically. + Iterating over this object will yield results and + resolve additional pages automatically. """ # Create or coerce a protobuf request object. diff --git a/packages/google-ads-admanager/google/ads/admanager_v1/services/role_service/pagers.py b/packages/google-ads-admanager/google/ads/admanager_v1/services/role_service/pagers.py index 93a125b6f0cc..be9ac9755dd2 100644 --- a/packages/google-ads-admanager/google/ads/admanager_v1/services/role_service/pagers.py +++ b/packages/google-ads-admanager/google/ads/admanager_v1/services/role_service/pagers.py @@ -38,7 +38,7 @@ OptionalRetry = Union[retries.Retry, object, None] # type: ignore OptionalAsyncRetry = Union[retries_async.AsyncRetry, object, None] # type: ignore -from google.ads.admanager_v1.types import role_service +from google.ads.admanager_v1.types import role_messages, role_service class ListRolesPager: @@ -107,7 +107,7 @@ def pages(self) -> Iterator[role_service.ListRolesResponse]: ) yield self._response - def __iter__(self) -> Iterator[role_service.Role]: + def __iter__(self) -> Iterator[role_messages.Role]: for page in self.pages: yield from page.roles diff --git a/packages/google-ads-admanager/google/ads/admanager_v1/services/role_service/transports/base.py b/packages/google-ads-admanager/google/ads/admanager_v1/services/role_service/transports/base.py index c90135acb761..527ea039fdf5 100644 --- a/packages/google-ads-admanager/google/ads/admanager_v1/services/role_service/transports/base.py +++ b/packages/google-ads-admanager/google/ads/admanager_v1/services/role_service/transports/base.py @@ -26,7 +26,7 @@ from google.oauth2 import service_account # type: ignore from google.ads.admanager_v1 import gapic_version as package_version -from google.ads.admanager_v1.types import role_service +from google.ads.admanager_v1.types import role_messages, role_service DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( gapic_version=package_version.__version__ @@ -155,7 +155,7 @@ def get_role( self, ) -> Callable[ [role_service.GetRoleRequest], - Union[role_service.Role, Awaitable[role_service.Role]], + Union[role_messages.Role, Awaitable[role_messages.Role]], ]: raise NotImplementedError() diff --git a/packages/google-ads-admanager/google/ads/admanager_v1/services/role_service/transports/rest.py b/packages/google-ads-admanager/google/ads/admanager_v1/services/role_service/transports/rest.py index c34a643c70d7..c729f35e3b0b 100644 --- a/packages/google-ads-admanager/google/ads/admanager_v1/services/role_service/transports/rest.py +++ b/packages/google-ads-admanager/google/ads/admanager_v1/services/role_service/transports/rest.py @@ -38,7 +38,7 @@ from google.longrunning import operations_pb2 # type: ignore -from google.ads.admanager_v1.types import role_service +from google.ads.admanager_v1.types import role_messages, role_service from .base import DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO from .base import RoleServiceTransport @@ -97,7 +97,7 @@ def pre_get_role( """ return request, metadata - def post_get_role(self, response: role_service.Role) -> role_service.Role: + def post_get_role(self, response: role_messages.Role) -> role_messages.Role: """Post-rpc interceptor for get_role Override in a subclass to manipulate the response @@ -163,7 +163,7 @@ class RoleServiceRestStub: class RoleServiceRestTransport(RoleServiceTransport): """REST backend transport for RoleService. - Provides methods for handling Role objects. + Provides methods for handling ``Role`` objects. This class defines the same methods as the primary client, so the primary client can load the underlying transport implementation @@ -270,12 +270,12 @@ def __call__( retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), - ) -> role_service.Role: + ) -> role_messages.Role: r"""Call the get role method over HTTP. Args: request (~.role_service.GetRoleRequest): - The request object. Request object for GetRole method. + The request object. Request object for ``GetRole`` method. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -283,8 +283,8 @@ def __call__( sent along with the request as metadata. Returns: - ~.role_service.Role: - The Role resource. + ~.role_messages.Role: + The ``Role`` resource. """ http_options: List[Dict[str, str]] = [ @@ -327,8 +327,8 @@ def __call__( raise core_exceptions.from_http_response(response) # Return the response - resp = role_service.Role() - pb_resp = role_service.Role.pb(resp) + resp = role_messages.Role() + pb_resp = role_messages.Role.pb(resp) json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_role(resp) @@ -360,7 +360,7 @@ def __call__( Args: request (~.role_service.ListRolesRequest): - The request object. Request object for ListRoles method. + The request object. Request object for ``ListRoles`` method. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -369,8 +369,8 @@ def __call__( Returns: ~.role_service.ListRolesResponse: - Response object for ListRolesRequest - containing matching Role resources. + Response object for ``ListRolesRequest`` containing + matching ``Role`` objects. """ @@ -422,7 +422,7 @@ def __call__( return resp @property - def get_role(self) -> Callable[[role_service.GetRoleRequest], role_service.Role]: + def get_role(self) -> Callable[[role_service.GetRoleRequest], role_messages.Role]: # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. # In C++ this would require a dynamic_cast return self._GetRole(self._session, self._host, self._interceptor) # type: ignore @@ -466,11 +466,11 @@ def __call__( http_options: List[Dict[str, str]] = [ { "method": "get", - "uri": "/v1/{name=networks/*/operations/reports/exports/*}", + "uri": "/v1/{name=networks/*/operations/reports/runs/*}", }, { "method": "get", - "uri": "/v1/{name=networks/*/operations/reports/runs/*}", + "uri": "/v1/{name=networks/*/operations/reports/exports/*}", }, ] diff --git a/packages/google-ads-admanager/google/ads/admanager_v1/services/taxonomy_category_service/__init__.py b/packages/google-ads-admanager/google/ads/admanager_v1/services/taxonomy_category_service/__init__.py new file mode 100644 index 000000000000..8d84490b8655 --- /dev/null +++ b/packages/google-ads-admanager/google/ads/admanager_v1/services/taxonomy_category_service/__init__.py @@ -0,0 +1,18 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .client import TaxonomyCategoryServiceClient + +__all__ = ("TaxonomyCategoryServiceClient",) diff --git a/packages/google-ads-admanager/google/ads/admanager_v1/services/ad_partner_service/client.py b/packages/google-ads-admanager/google/ads/admanager_v1/services/taxonomy_category_service/client.py similarity index 84% rename from packages/google-ads-admanager/google/ads/admanager_v1/services/ad_partner_service/client.py rename to packages/google-ads-admanager/google/ads/admanager_v1/services/taxonomy_category_service/client.py index 0b5e0860c1e1..d40f3a38712a 100644 --- a/packages/google-ads-admanager/google/ads/admanager_v1/services/ad_partner_service/client.py +++ b/packages/google-ads-admanager/google/ads/admanager_v1/services/taxonomy_category_service/client.py @@ -50,15 +50,19 @@ from google.longrunning import operations_pb2 # type: ignore -from google.ads.admanager_v1.services.ad_partner_service import pagers -from google.ads.admanager_v1.types import ad_partner_service +from google.ads.admanager_v1.services.taxonomy_category_service import pagers +from google.ads.admanager_v1.types import ( + taxonomy_category_messages, + taxonomy_category_service, + taxonomy_type_enum, +) -from .transports.base import DEFAULT_CLIENT_INFO, AdPartnerServiceTransport -from .transports.rest import AdPartnerServiceRestTransport +from .transports.base import DEFAULT_CLIENT_INFO, TaxonomyCategoryServiceTransport +from .transports.rest import TaxonomyCategoryServiceRestTransport -class AdPartnerServiceClientMeta(type): - """Metaclass for the AdPartnerService client. +class TaxonomyCategoryServiceClientMeta(type): + """Metaclass for the TaxonomyCategoryService client. This provides class-level methods for building and retrieving support objects (e.g. transport) without polluting the client instance @@ -67,13 +71,13 @@ class AdPartnerServiceClientMeta(type): _transport_registry = ( OrderedDict() - ) # type: Dict[str, Type[AdPartnerServiceTransport]] - _transport_registry["rest"] = AdPartnerServiceRestTransport + ) # type: Dict[str, Type[TaxonomyCategoryServiceTransport]] + _transport_registry["rest"] = TaxonomyCategoryServiceRestTransport def get_transport_class( cls, label: Optional[str] = None, - ) -> Type[AdPartnerServiceTransport]: + ) -> Type[TaxonomyCategoryServiceTransport]: """Returns an appropriate transport class. Args: @@ -92,8 +96,8 @@ def get_transport_class( return next(iter(cls._transport_registry.values())) -class AdPartnerServiceClient(metaclass=AdPartnerServiceClientMeta): - """Provides methods for handling AdPartner objects.""" +class TaxonomyCategoryServiceClient(metaclass=TaxonomyCategoryServiceClientMeta): + """Provides methods for handling ``TaxonomyCategory`` objects.""" @staticmethod def _get_default_mtls_endpoint(api_endpoint): @@ -145,7 +149,7 @@ def from_service_account_info(cls, info: dict, *args, **kwargs): kwargs: Additional arguments to pass to the constructor. Returns: - AdPartnerServiceClient: The constructed client. + TaxonomyCategoryServiceClient: The constructed client. """ credentials = service_account.Credentials.from_service_account_info(info) kwargs["credentials"] = credentials @@ -163,7 +167,7 @@ def from_service_account_file(cls, filename: str, *args, **kwargs): kwargs: Additional arguments to pass to the constructor. Returns: - AdPartnerServiceClient: The constructed client. + TaxonomyCategoryServiceClient: The constructed client. """ credentials = service_account.Credentials.from_service_account_file(filename) kwargs["credentials"] = credentials @@ -172,47 +176,48 @@ def from_service_account_file(cls, filename: str, *args, **kwargs): from_service_account_json = from_service_account_file @property - def transport(self) -> AdPartnerServiceTransport: + def transport(self) -> TaxonomyCategoryServiceTransport: """Returns the transport used by the client instance. Returns: - AdPartnerServiceTransport: The transport used by the client + TaxonomyCategoryServiceTransport: The transport used by the client instance. """ return self._transport @staticmethod - def ad_partner_path( + def network_path( network_code: str, - ad_partner: str, ) -> str: - """Returns a fully-qualified ad_partner string.""" - return "networks/{network_code}/adPartners/{ad_partner}".format( + """Returns a fully-qualified network string.""" + return "networks/{network_code}".format( network_code=network_code, - ad_partner=ad_partner, ) @staticmethod - def parse_ad_partner_path(path: str) -> Dict[str, str]: - """Parses a ad_partner path into its component segments.""" - m = re.match( - r"^networks/(?P.+?)/adPartners/(?P.+?)$", path - ) + def parse_network_path(path: str) -> Dict[str, str]: + """Parses a network path into its component segments.""" + m = re.match(r"^networks/(?P.+?)$", path) return m.groupdict() if m else {} @staticmethod - def network_path( + def taxonomy_category_path( network_code: str, + taxonomy_category: str, ) -> str: - """Returns a fully-qualified network string.""" - return "networks/{network_code}".format( + """Returns a fully-qualified taxonomy_category string.""" + return "networks/{network_code}/taxonomyCategories/{taxonomy_category}".format( network_code=network_code, + taxonomy_category=taxonomy_category, ) @staticmethod - def parse_network_path(path: str) -> Dict[str, str]: - """Parses a network path into its component segments.""" - m = re.match(r"^networks/(?P.+?)$", path) + def parse_taxonomy_category_path(path: str) -> Dict[str, str]: + """Parses a taxonomy_category path into its component segments.""" + m = re.match( + r"^networks/(?P.+?)/taxonomyCategories/(?P.+?)$", + path, + ) return m.groupdict() if m else {} @staticmethod @@ -434,15 +439,17 @@ def _get_api_endpoint( elif use_mtls_endpoint == "always" or ( use_mtls_endpoint == "auto" and client_cert_source ): - _default_universe = AdPartnerServiceClient._DEFAULT_UNIVERSE + _default_universe = TaxonomyCategoryServiceClient._DEFAULT_UNIVERSE if universe_domain != _default_universe: raise MutualTLSChannelError( f"mTLS is not supported in any universe other than {_default_universe}." ) - api_endpoint = AdPartnerServiceClient.DEFAULT_MTLS_ENDPOINT + api_endpoint = TaxonomyCategoryServiceClient.DEFAULT_MTLS_ENDPOINT else: - api_endpoint = AdPartnerServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( - UNIVERSE_DOMAIN=universe_domain + api_endpoint = ( + TaxonomyCategoryServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=universe_domain + ) ) return api_endpoint @@ -462,7 +469,7 @@ def _get_universe_domain( Raises: ValueError: If the universe domain is an empty string. """ - universe_domain = AdPartnerServiceClient._DEFAULT_UNIVERSE + universe_domain = TaxonomyCategoryServiceClient._DEFAULT_UNIVERSE if client_universe_domain is not None: universe_domain = client_universe_domain elif universe_domain_env is not None: @@ -488,7 +495,7 @@ def _compare_universes( ValueError: when client_universe does not match the universe in credentials. """ - default_universe = AdPartnerServiceClient._DEFAULT_UNIVERSE + default_universe = TaxonomyCategoryServiceClient._DEFAULT_UNIVERSE credentials_universe = getattr(credentials, "universe_domain", default_universe) if client_universe != credentials_universe: @@ -512,7 +519,7 @@ def _validate_universe_domain(self): """ self._is_universe_domain_valid = ( self._is_universe_domain_valid - or AdPartnerServiceClient._compare_universes( + or TaxonomyCategoryServiceClient._compare_universes( self.universe_domain, self.transport._credentials ) ) @@ -542,13 +549,15 @@ def __init__( credentials: Optional[ga_credentials.Credentials] = None, transport: Optional[ Union[ - str, AdPartnerServiceTransport, Callable[..., AdPartnerServiceTransport] + str, + TaxonomyCategoryServiceTransport, + Callable[..., TaxonomyCategoryServiceTransport], ] ] = None, client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: - """Instantiates the ad partner service client. + """Instantiates the taxonomy category service client. Args: credentials (Optional[google.auth.credentials.Credentials]): The @@ -556,10 +565,10 @@ def __init__( credentials identify the application to the service; if none are specified, the client will attempt to ascertain the credentials from the environment. - transport (Optional[Union[str,AdPartnerServiceTransport,Callable[..., AdPartnerServiceTransport]]]): + transport (Optional[Union[str,TaxonomyCategoryServiceTransport,Callable[..., TaxonomyCategoryServiceTransport]]]): The transport to use, or a Callable that constructs and returns a new transport. If a Callable is given, it will be called with the same set of initialization - arguments as used in the AdPartnerServiceTransport constructor. + arguments as used in the TaxonomyCategoryServiceTransport constructor. If set to None, a transport is chosen automatically. client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): Custom options for the client. @@ -612,11 +621,13 @@ def __init__( self._use_client_cert, self._use_mtls_endpoint, self._universe_domain_env, - ) = AdPartnerServiceClient._read_environment_variables() - self._client_cert_source = AdPartnerServiceClient._get_client_cert_source( - self._client_options.client_cert_source, self._use_client_cert + ) = TaxonomyCategoryServiceClient._read_environment_variables() + self._client_cert_source = ( + TaxonomyCategoryServiceClient._get_client_cert_source( + self._client_options.client_cert_source, self._use_client_cert + ) ) - self._universe_domain = AdPartnerServiceClient._get_universe_domain( + self._universe_domain = TaxonomyCategoryServiceClient._get_universe_domain( universe_domain_opt, self._universe_domain_env ) self._api_endpoint = None # updated below, depending on `transport` @@ -633,9 +644,9 @@ def __init__( # Save or instantiate the transport. # Ordinarily, we provide the transport, but allowing a custom transport # instance provides an extensibility point for unusual situations. - transport_provided = isinstance(transport, AdPartnerServiceTransport) + transport_provided = isinstance(transport, TaxonomyCategoryServiceTransport) if transport_provided: - # transport is a AdPartnerServiceTransport instance. + # transport is a TaxonomyCategoryServiceTransport instance. if credentials or self._client_options.credentials_file or api_key_value: raise ValueError( "When providing a transport instance, " @@ -646,12 +657,12 @@ def __init__( "When providing a transport instance, provide its scopes " "directly." ) - self._transport = cast(AdPartnerServiceTransport, transport) + self._transport = cast(TaxonomyCategoryServiceTransport, transport) self._api_endpoint = self._transport.host self._api_endpoint = ( self._api_endpoint - or AdPartnerServiceClient._get_api_endpoint( + or TaxonomyCategoryServiceClient._get_api_endpoint( self._client_options.api_endpoint, self._client_cert_source, self._universe_domain, @@ -670,12 +681,12 @@ def __init__( ) transport_init: Union[ - Type[AdPartnerServiceTransport], - Callable[..., AdPartnerServiceTransport], + Type[TaxonomyCategoryServiceTransport], + Callable[..., TaxonomyCategoryServiceTransport], ] = ( - type(self).get_transport_class(transport) + TaxonomyCategoryServiceClient.get_transport_class(transport) if isinstance(transport, str) or transport is None - else cast(Callable[..., AdPartnerServiceTransport], transport) + else cast(Callable[..., TaxonomyCategoryServiceTransport], transport) ) # initialize with the provided callable or the passed in class self._transport = transport_init( @@ -690,16 +701,18 @@ def __init__( api_audience=self._client_options.api_audience, ) - def get_ad_partner( + def get_taxonomy_category( self, - request: Optional[Union[ad_partner_service.GetAdPartnerRequest, dict]] = None, + request: Optional[ + Union[taxonomy_category_service.GetTaxonomyCategoryRequest, dict] + ] = None, *, name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), - ) -> ad_partner_service.AdPartner: - r"""API to retrieve a AdPartner object. + ) -> taxonomy_category_messages.TaxonomyCategory: + r"""API to retrieve a ``TaxonomyCategory`` object. .. code-block:: python @@ -712,28 +725,28 @@ def get_ad_partner( # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.ads import admanager_v1 - def sample_get_ad_partner(): + def sample_get_taxonomy_category(): # Create a client - client = admanager_v1.AdPartnerServiceClient() + client = admanager_v1.TaxonomyCategoryServiceClient() # Initialize request argument(s) - request = admanager_v1.GetAdPartnerRequest( + request = admanager_v1.GetTaxonomyCategoryRequest( name="name_value", ) # Make the request - response = client.get_ad_partner(request=request) + response = client.get_taxonomy_category(request=request) # Handle the response print(response) Args: - request (Union[google.ads.admanager_v1.types.GetAdPartnerRequest, dict]): - The request object. Request object for GetAdPartner - method. + request (Union[google.ads.admanager_v1.types.GetTaxonomyCategoryRequest, dict]): + The request object. Request object for ``GetTaxonomyCategory`` method. name (str): - Required. The resource name of the AdPartner. Format: - ``networks/{network_code}/adPartners/{ad_partner_id}`` + Required. The resource name of the TaxonomyCategory. + Format: + ``networks/{network_code}/taxonomyCategories/{taxonomy_category_id}`` This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this @@ -745,8 +758,8 @@ def sample_get_ad_partner(): sent along with the request as metadata. Returns: - google.ads.admanager_v1.types.AdPartner: - The AdPartner resource. + google.ads.admanager_v1.types.TaxonomyCategory: + The TaxonomyCategory resource. """ # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have @@ -760,8 +773,10 @@ def sample_get_ad_partner(): # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. - if not isinstance(request, ad_partner_service.GetAdPartnerRequest): - request = ad_partner_service.GetAdPartnerRequest(request) + if not isinstance( + request, taxonomy_category_service.GetTaxonomyCategoryRequest + ): + request = taxonomy_category_service.GetTaxonomyCategoryRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. if name is not None: @@ -769,7 +784,7 @@ def sample_get_ad_partner(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.get_ad_partner] + rpc = self._transport._wrapped_methods[self._transport.get_taxonomy_category] # Certain fields should be provided within the metadata header; # add these here. @@ -791,16 +806,18 @@ def sample_get_ad_partner(): # Done; return the response. return response - def list_ad_partners( + def list_taxonomy_categories( self, - request: Optional[Union[ad_partner_service.ListAdPartnersRequest, dict]] = None, + request: Optional[ + Union[taxonomy_category_service.ListTaxonomyCategoriesRequest, dict] + ] = None, *, parent: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), - ) -> pagers.ListAdPartnersPager: - r"""API to retrieve a list of AdPartner objects. + ) -> pagers.ListTaxonomyCategoriesPager: + r"""API to retrieve a list of ``TaxonomyCategory`` objects. .. code-block:: python @@ -813,29 +830,28 @@ def list_ad_partners( # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.ads import admanager_v1 - def sample_list_ad_partners(): + def sample_list_taxonomy_categories(): # Create a client - client = admanager_v1.AdPartnerServiceClient() + client = admanager_v1.TaxonomyCategoryServiceClient() # Initialize request argument(s) - request = admanager_v1.ListAdPartnersRequest( + request = admanager_v1.ListTaxonomyCategoriesRequest( parent="parent_value", ) # Make the request - page_result = client.list_ad_partners(request=request) + page_result = client.list_taxonomy_categories(request=request) # Handle the response for response in page_result: print(response) Args: - request (Union[google.ads.admanager_v1.types.ListAdPartnersRequest, dict]): - The request object. Request object for ListAdPartners - method. + request (Union[google.ads.admanager_v1.types.ListTaxonomyCategoriesRequest, dict]): + The request object. Request object for ``ListTaxonomyCategories`` method. parent (str): Required. The parent, which owns this collection of - AdPartners. Format: ``networks/{network_code}`` + TaxonomyCategories. Format: ``networks/{network_code}`` This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this @@ -847,14 +863,12 @@ def sample_list_ad_partners(): sent along with the request as metadata. Returns: - google.ads.admanager_v1.services.ad_partner_service.pagers.ListAdPartnersPager: - Response object for - ListAdPartnersRequest containing - matching AdPartner resources. + google.ads.admanager_v1.services.taxonomy_category_service.pagers.ListTaxonomyCategoriesPager: + Response object for ListTaxonomyCategoriesRequest containing matching + TaxonomyCategory objects. - Iterating over this object will yield - results and resolve additional pages - automatically. + Iterating over this object will yield results and + resolve additional pages automatically. """ # Create or coerce a protobuf request object. @@ -869,8 +883,10 @@ def sample_list_ad_partners(): # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. - if not isinstance(request, ad_partner_service.ListAdPartnersRequest): - request = ad_partner_service.ListAdPartnersRequest(request) + if not isinstance( + request, taxonomy_category_service.ListTaxonomyCategoriesRequest + ): + request = taxonomy_category_service.ListTaxonomyCategoriesRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. if parent is not None: @@ -878,7 +894,7 @@ def sample_list_ad_partners(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.list_ad_partners] + rpc = self._transport._wrapped_methods[self._transport.list_taxonomy_categories] # Certain fields should be provided within the metadata header; # add these here. @@ -899,7 +915,7 @@ def sample_list_ad_partners(): # This method is paged; wrap the response in a pager, which provides # an `__iter__` convenience method. - response = pagers.ListAdPartnersPager( + response = pagers.ListTaxonomyCategoriesPager( method=rpc, request=request, response=response, @@ -911,7 +927,7 @@ def sample_list_ad_partners(): # Done; return the response. return response - def __enter__(self) -> "AdPartnerServiceClient": + def __enter__(self) -> "TaxonomyCategoryServiceClient": return self def __exit__(self, type, value, traceback): @@ -987,4 +1003,4 @@ def get_operation( ) -__all__ = ("AdPartnerServiceClient",) +__all__ = ("TaxonomyCategoryServiceClient",) diff --git a/packages/google-ads-admanager/google/ads/admanager_v1/services/ad_partner_service/pagers.py b/packages/google-ads-admanager/google/ads/admanager_v1/services/taxonomy_category_service/pagers.py similarity index 71% rename from packages/google-ads-admanager/google/ads/admanager_v1/services/ad_partner_service/pagers.py rename to packages/google-ads-admanager/google/ads/admanager_v1/services/taxonomy_category_service/pagers.py index 1c99245728a1..ba0e74b14baf 100644 --- a/packages/google-ads-admanager/google/ads/admanager_v1/services/ad_partner_service/pagers.py +++ b/packages/google-ads-admanager/google/ads/admanager_v1/services/taxonomy_category_service/pagers.py @@ -38,32 +38,35 @@ OptionalRetry = Union[retries.Retry, object, None] # type: ignore OptionalAsyncRetry = Union[retries_async.AsyncRetry, object, None] # type: ignore -from google.ads.admanager_v1.types import ad_partner_service +from google.ads.admanager_v1.types import ( + taxonomy_category_messages, + taxonomy_category_service, +) -class ListAdPartnersPager: - """A pager for iterating through ``list_ad_partners`` requests. +class ListTaxonomyCategoriesPager: + """A pager for iterating through ``list_taxonomy_categories`` requests. This class thinly wraps an initial - :class:`google.ads.admanager_v1.types.ListAdPartnersResponse` object, and + :class:`google.ads.admanager_v1.types.ListTaxonomyCategoriesResponse` object, and provides an ``__iter__`` method to iterate through its - ``ad_partners`` field. + ``taxonomy_categories`` field. If there are more pages, the ``__iter__`` method will make additional - ``ListAdPartners`` requests and continue to iterate - through the ``ad_partners`` field on the + ``ListTaxonomyCategories`` requests and continue to iterate + through the ``taxonomy_categories`` field on the corresponding responses. - All the usual :class:`google.ads.admanager_v1.types.ListAdPartnersResponse` + All the usual :class:`google.ads.admanager_v1.types.ListTaxonomyCategoriesResponse` attributes are available on the pager. If multiple requests are made, only the most recent response is retained, and thus used for attribute lookup. """ def __init__( self, - method: Callable[..., ad_partner_service.ListAdPartnersResponse], - request: ad_partner_service.ListAdPartnersRequest, - response: ad_partner_service.ListAdPartnersResponse, + method: Callable[..., taxonomy_category_service.ListTaxonomyCategoriesResponse], + request: taxonomy_category_service.ListTaxonomyCategoriesRequest, + response: taxonomy_category_service.ListTaxonomyCategoriesResponse, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, @@ -74,9 +77,9 @@ def __init__( Args: method (Callable): The method that was originally called, and which instantiated this pager. - request (google.ads.admanager_v1.types.ListAdPartnersRequest): + request (google.ads.admanager_v1.types.ListTaxonomyCategoriesRequest): The initial request object. - response (google.ads.admanager_v1.types.ListAdPartnersResponse): + response (google.ads.admanager_v1.types.ListTaxonomyCategoriesResponse): The initial response object. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. @@ -85,7 +88,7 @@ def __init__( sent along with the request as metadata. """ self._method = method - self._request = ad_partner_service.ListAdPartnersRequest(request) + self._request = taxonomy_category_service.ListTaxonomyCategoriesRequest(request) self._response = response self._retry = retry self._timeout = timeout @@ -95,7 +98,9 @@ def __getattr__(self, name: str) -> Any: return getattr(self._response, name) @property - def pages(self) -> Iterator[ad_partner_service.ListAdPartnersResponse]: + def pages( + self, + ) -> Iterator[taxonomy_category_service.ListTaxonomyCategoriesResponse]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token @@ -107,9 +112,9 @@ def pages(self) -> Iterator[ad_partner_service.ListAdPartnersResponse]: ) yield self._response - def __iter__(self) -> Iterator[ad_partner_service.AdPartner]: + def __iter__(self) -> Iterator[taxonomy_category_messages.TaxonomyCategory]: for page in self.pages: - yield from page.ad_partners + yield from page.taxonomy_categories def __repr__(self) -> str: return "{0}<{1!r}>".format(self.__class__.__name__, self._response) diff --git a/packages/google-ads-admanager/google/ads/admanager_v1/services/ad_partner_service/transports/__init__.py b/packages/google-ads-admanager/google/ads/admanager_v1/services/taxonomy_category_service/transports/__init__.py similarity index 61% rename from packages/google-ads-admanager/google/ads/admanager_v1/services/ad_partner_service/transports/__init__.py rename to packages/google-ads-admanager/google/ads/admanager_v1/services/taxonomy_category_service/transports/__init__.py index 7a88b4ec84e4..7879c3836853 100644 --- a/packages/google-ads-admanager/google/ads/admanager_v1/services/ad_partner_service/transports/__init__.py +++ b/packages/google-ads-admanager/google/ads/admanager_v1/services/taxonomy_category_service/transports/__init__.py @@ -16,15 +16,20 @@ from collections import OrderedDict from typing import Dict, Type -from .base import AdPartnerServiceTransport -from .rest import AdPartnerServiceRestInterceptor, AdPartnerServiceRestTransport +from .base import TaxonomyCategoryServiceTransport +from .rest import ( + TaxonomyCategoryServiceRestInterceptor, + TaxonomyCategoryServiceRestTransport, +) # Compile a registry of transports. -_transport_registry = OrderedDict() # type: Dict[str, Type[AdPartnerServiceTransport]] -_transport_registry["rest"] = AdPartnerServiceRestTransport +_transport_registry = ( + OrderedDict() +) # type: Dict[str, Type[TaxonomyCategoryServiceTransport]] +_transport_registry["rest"] = TaxonomyCategoryServiceRestTransport __all__ = ( - "AdPartnerServiceTransport", - "AdPartnerServiceRestTransport", - "AdPartnerServiceRestInterceptor", + "TaxonomyCategoryServiceTransport", + "TaxonomyCategoryServiceRestTransport", + "TaxonomyCategoryServiceRestInterceptor", ) diff --git a/packages/google-ads-admanager/google/ads/admanager_v1/services/creative_service/transports/base.py b/packages/google-ads-admanager/google/ads/admanager_v1/services/taxonomy_category_service/transports/base.py similarity index 86% rename from packages/google-ads-admanager/google/ads/admanager_v1/services/creative_service/transports/base.py rename to packages/google-ads-admanager/google/ads/admanager_v1/services/taxonomy_category_service/transports/base.py index 4085651854f9..5d03acb614f2 100644 --- a/packages/google-ads-admanager/google/ads/admanager_v1/services/creative_service/transports/base.py +++ b/packages/google-ads-admanager/google/ads/admanager_v1/services/taxonomy_category_service/transports/base.py @@ -26,15 +26,18 @@ from google.oauth2 import service_account # type: ignore from google.ads.admanager_v1 import gapic_version as package_version -from google.ads.admanager_v1.types import creative_service +from google.ads.admanager_v1.types import ( + taxonomy_category_messages, + taxonomy_category_service, +) DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( gapic_version=package_version.__version__ ) -class CreativeServiceTransport(abc.ABC): - """Abstract transport class for CreativeService.""" +class TaxonomyCategoryServiceTransport(abc.ABC): + """Abstract transport class for TaxonomyCategoryService.""" AUTH_SCOPES = () @@ -129,13 +132,13 @@ def host(self): def _prep_wrapped_messages(self, client_info): # Precompute the wrapped methods. self._wrapped_methods = { - self.get_creative: gapic_v1.method.wrap_method( - self.get_creative, + self.get_taxonomy_category: gapic_v1.method.wrap_method( + self.get_taxonomy_category, default_timeout=None, client_info=client_info, ), - self.list_creatives: gapic_v1.method.wrap_method( - self.list_creatives, + self.list_taxonomy_categories: gapic_v1.method.wrap_method( + self.list_taxonomy_categories, default_timeout=None, client_info=client_info, ), @@ -151,22 +154,25 @@ def close(self): raise NotImplementedError() @property - def get_creative( + def get_taxonomy_category( self, ) -> Callable[ - [creative_service.GetCreativeRequest], - Union[creative_service.Creative, Awaitable[creative_service.Creative]], + [taxonomy_category_service.GetTaxonomyCategoryRequest], + Union[ + taxonomy_category_messages.TaxonomyCategory, + Awaitable[taxonomy_category_messages.TaxonomyCategory], + ], ]: raise NotImplementedError() @property - def list_creatives( + def list_taxonomy_categories( self, ) -> Callable[ - [creative_service.ListCreativesRequest], + [taxonomy_category_service.ListTaxonomyCategoriesRequest], Union[ - creative_service.ListCreativesResponse, - Awaitable[creative_service.ListCreativesResponse], + taxonomy_category_service.ListTaxonomyCategoriesResponse, + Awaitable[taxonomy_category_service.ListTaxonomyCategoriesResponse], ], ]: raise NotImplementedError() @@ -185,4 +191,4 @@ def kind(self) -> str: raise NotImplementedError() -__all__ = ("CreativeServiceTransport",) +__all__ = ("TaxonomyCategoryServiceTransport",) diff --git a/packages/google-ads-admanager/google/ads/admanager_v1/services/ad_partner_service/transports/rest.py b/packages/google-ads-admanager/google/ads/admanager_v1/services/taxonomy_category_service/transports/rest.py similarity index 73% rename from packages/google-ads-admanager/google/ads/admanager_v1/services/ad_partner_service/transports/rest.py rename to packages/google-ads-admanager/google/ads/admanager_v1/services/taxonomy_category_service/transports/rest.py index 2c5f921453a5..02cfba6ecb30 100644 --- a/packages/google-ads-admanager/google/ads/admanager_v1/services/ad_partner_service/transports/rest.py +++ b/packages/google-ads-admanager/google/ads/admanager_v1/services/taxonomy_category_service/transports/rest.py @@ -38,10 +38,13 @@ from google.longrunning import operations_pb2 # type: ignore -from google.ads.admanager_v1.types import ad_partner_service +from google.ads.admanager_v1.types import ( + taxonomy_category_messages, + taxonomy_category_service, +) -from .base import AdPartnerServiceTransport from .base import DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO +from .base import TaxonomyCategoryServiceTransport DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, @@ -50,8 +53,8 @@ ) -class AdPartnerServiceRestInterceptor: - """Interceptor for AdPartnerService. +class TaxonomyCategoryServiceRestInterceptor: + """Interceptor for TaxonomyCategoryService. Interceptors are used to manipulate requests, request metadata, and responses in arbitrary ways. @@ -61,74 +64,79 @@ class AdPartnerServiceRestInterceptor: * Stripping extraneous information from responses These use cases and more can be enabled by injecting an - instance of a custom subclass when constructing the AdPartnerServiceRestTransport. + instance of a custom subclass when constructing the TaxonomyCategoryServiceRestTransport. .. code-block:: python - class MyCustomAdPartnerServiceInterceptor(AdPartnerServiceRestInterceptor): - def pre_get_ad_partner(self, request, metadata): + class MyCustomTaxonomyCategoryServiceInterceptor(TaxonomyCategoryServiceRestInterceptor): + def pre_get_taxonomy_category(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata - def post_get_ad_partner(self, response): + def post_get_taxonomy_category(self, response): logging.log(f"Received response: {response}") return response - def pre_list_ad_partners(self, request, metadata): + def pre_list_taxonomy_categories(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata - def post_list_ad_partners(self, response): + def post_list_taxonomy_categories(self, response): logging.log(f"Received response: {response}") return response - transport = AdPartnerServiceRestTransport(interceptor=MyCustomAdPartnerServiceInterceptor()) - client = AdPartnerServiceClient(transport=transport) + transport = TaxonomyCategoryServiceRestTransport(interceptor=MyCustomTaxonomyCategoryServiceInterceptor()) + client = TaxonomyCategoryServiceClient(transport=transport) """ - def pre_get_ad_partner( + def pre_get_taxonomy_category( self, - request: ad_partner_service.GetAdPartnerRequest, + request: taxonomy_category_service.GetTaxonomyCategoryRequest, metadata: Sequence[Tuple[str, str]], - ) -> Tuple[ad_partner_service.GetAdPartnerRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for get_ad_partner + ) -> Tuple[ + taxonomy_category_service.GetTaxonomyCategoryRequest, Sequence[Tuple[str, str]] + ]: + """Pre-rpc interceptor for get_taxonomy_category Override in a subclass to manipulate the request or metadata - before they are sent to the AdPartnerService server. + before they are sent to the TaxonomyCategoryService server. """ return request, metadata - def post_get_ad_partner( - self, response: ad_partner_service.AdPartner - ) -> ad_partner_service.AdPartner: - """Post-rpc interceptor for get_ad_partner + def post_get_taxonomy_category( + self, response: taxonomy_category_messages.TaxonomyCategory + ) -> taxonomy_category_messages.TaxonomyCategory: + """Post-rpc interceptor for get_taxonomy_category Override in a subclass to manipulate the response - after it is returned by the AdPartnerService server but before + after it is returned by the TaxonomyCategoryService server but before it is returned to user code. """ return response - def pre_list_ad_partners( + def pre_list_taxonomy_categories( self, - request: ad_partner_service.ListAdPartnersRequest, + request: taxonomy_category_service.ListTaxonomyCategoriesRequest, metadata: Sequence[Tuple[str, str]], - ) -> Tuple[ad_partner_service.ListAdPartnersRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for list_ad_partners + ) -> Tuple[ + taxonomy_category_service.ListTaxonomyCategoriesRequest, + Sequence[Tuple[str, str]], + ]: + """Pre-rpc interceptor for list_taxonomy_categories Override in a subclass to manipulate the request or metadata - before they are sent to the AdPartnerService server. + before they are sent to the TaxonomyCategoryService server. """ return request, metadata - def post_list_ad_partners( - self, response: ad_partner_service.ListAdPartnersResponse - ) -> ad_partner_service.ListAdPartnersResponse: - """Post-rpc interceptor for list_ad_partners + def post_list_taxonomy_categories( + self, response: taxonomy_category_service.ListTaxonomyCategoriesResponse + ) -> taxonomy_category_service.ListTaxonomyCategoriesResponse: + """Post-rpc interceptor for list_taxonomy_categories Override in a subclass to manipulate the response - after it is returned by the AdPartnerService server but before + after it is returned by the TaxonomyCategoryService server but before it is returned to user code. """ return response @@ -141,7 +149,7 @@ def pre_get_operation( """Pre-rpc interceptor for get_operation Override in a subclass to manipulate the request or metadata - before they are sent to the AdPartnerService server. + before they are sent to the TaxonomyCategoryService server. """ return request, metadata @@ -151,23 +159,23 @@ def post_get_operation( """Post-rpc interceptor for get_operation Override in a subclass to manipulate the response - after it is returned by the AdPartnerService server but before + after it is returned by the TaxonomyCategoryService server but before it is returned to user code. """ return response @dataclasses.dataclass -class AdPartnerServiceRestStub: +class TaxonomyCategoryServiceRestStub: _session: AuthorizedSession _host: str - _interceptor: AdPartnerServiceRestInterceptor + _interceptor: TaxonomyCategoryServiceRestInterceptor -class AdPartnerServiceRestTransport(AdPartnerServiceTransport): - """REST backend transport for AdPartnerService. +class TaxonomyCategoryServiceRestTransport(TaxonomyCategoryServiceTransport): + """REST backend transport for TaxonomyCategoryService. - Provides methods for handling AdPartner objects. + Provides methods for handling ``TaxonomyCategory`` objects. This class defines the same methods as the primary client, so the primary client can load the underlying transport implementation @@ -189,7 +197,7 @@ def __init__( client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, always_use_jwt_access: Optional[bool] = False, url_scheme: str = "https", - interceptor: Optional[AdPartnerServiceRestInterceptor] = None, + interceptor: Optional[TaxonomyCategoryServiceRestInterceptor] = None, api_audience: Optional[str] = None, ) -> None: """Instantiate the transport. @@ -250,12 +258,12 @@ def __init__( ) if client_cert_source_for_mtls: self._session.configure_mtls_channel(client_cert_source_for_mtls) - self._interceptor = interceptor or AdPartnerServiceRestInterceptor() + self._interceptor = interceptor or TaxonomyCategoryServiceRestInterceptor() self._prep_wrapped_messages(client_info) - class _GetAdPartner(AdPartnerServiceRestStub): + class _GetTaxonomyCategory(TaxonomyCategoryServiceRestStub): def __hash__(self): - return hash("GetAdPartner") + return hash("GetTaxonomyCategory") __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} @@ -269,18 +277,17 @@ def _get_unset_required_fields(cls, message_dict): def __call__( self, - request: ad_partner_service.GetAdPartnerRequest, + request: taxonomy_category_service.GetTaxonomyCategoryRequest, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), - ) -> ad_partner_service.AdPartner: - r"""Call the get ad partner method over HTTP. + ) -> taxonomy_category_messages.TaxonomyCategory: + r"""Call the get taxonomy category method over HTTP. Args: - request (~.ad_partner_service.GetAdPartnerRequest): - The request object. Request object for GetAdPartner - method. + request (~.taxonomy_category_service.GetTaxonomyCategoryRequest): + The request object. Request object for ``GetTaxonomyCategory`` method. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -288,18 +295,22 @@ def __call__( sent along with the request as metadata. Returns: - ~.ad_partner_service.AdPartner: - The AdPartner resource. + ~.taxonomy_category_messages.TaxonomyCategory: + The ``TaxonomyCategory`` resource. """ http_options: List[Dict[str, str]] = [ { "method": "get", - "uri": "/v1/{name=networks/*/adPartners/*}", + "uri": "/v1/{name=networks/*/taxonomyCategories/*}", }, ] - request, metadata = self._interceptor.pre_get_ad_partner(request, metadata) - pb_request = ad_partner_service.GetAdPartnerRequest.pb(request) + request, metadata = self._interceptor.pre_get_taxonomy_category( + request, metadata + ) + pb_request = taxonomy_category_service.GetTaxonomyCategoryRequest.pb( + request + ) transcoded_request = path_template.transcode(http_options, pb_request) uri = transcoded_request["uri"] @@ -332,16 +343,16 @@ def __call__( raise core_exceptions.from_http_response(response) # Return the response - resp = ad_partner_service.AdPartner() - pb_resp = ad_partner_service.AdPartner.pb(resp) + resp = taxonomy_category_messages.TaxonomyCategory() + pb_resp = taxonomy_category_messages.TaxonomyCategory.pb(resp) json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - resp = self._interceptor.post_get_ad_partner(resp) + resp = self._interceptor.post_get_taxonomy_category(resp) return resp - class _ListAdPartners(AdPartnerServiceRestStub): + class _ListTaxonomyCategories(TaxonomyCategoryServiceRestStub): def __hash__(self): - return hash("ListAdPartners") + return hash("ListTaxonomyCategories") __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} @@ -355,18 +366,17 @@ def _get_unset_required_fields(cls, message_dict): def __call__( self, - request: ad_partner_service.ListAdPartnersRequest, + request: taxonomy_category_service.ListTaxonomyCategoriesRequest, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), - ) -> ad_partner_service.ListAdPartnersResponse: - r"""Call the list ad partners method over HTTP. + ) -> taxonomy_category_service.ListTaxonomyCategoriesResponse: + r"""Call the list taxonomy categories method over HTTP. Args: - request (~.ad_partner_service.ListAdPartnersRequest): - The request object. Request object for ListAdPartners - method. + request (~.taxonomy_category_service.ListTaxonomyCategoriesRequest): + The request object. Request object for ``ListTaxonomyCategories`` method. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -374,23 +384,24 @@ def __call__( sent along with the request as metadata. Returns: - ~.ad_partner_service.ListAdPartnersResponse: - Response object for - ListAdPartnersRequest containing - matching AdPartner resources. + ~.taxonomy_category_service.ListTaxonomyCategoriesResponse: + Response object for ``ListTaxonomyCategoriesRequest`` + containing matching ``TaxonomyCategory`` objects. """ http_options: List[Dict[str, str]] = [ { "method": "get", - "uri": "/v1/{parent=networks/*}/adPartners", + "uri": "/v1/{parent=networks/*}/taxonomyCategories", }, ] - request, metadata = self._interceptor.pre_list_ad_partners( + request, metadata = self._interceptor.pre_list_taxonomy_categories( request, metadata ) - pb_request = ad_partner_service.ListAdPartnersRequest.pb(request) + pb_request = taxonomy_category_service.ListTaxonomyCategoriesRequest.pb( + request + ) transcoded_request = path_template.transcode(http_options, pb_request) uri = transcoded_request["uri"] @@ -423,39 +434,40 @@ def __call__( raise core_exceptions.from_http_response(response) # Return the response - resp = ad_partner_service.ListAdPartnersResponse() - pb_resp = ad_partner_service.ListAdPartnersResponse.pb(resp) + resp = taxonomy_category_service.ListTaxonomyCategoriesResponse() + pb_resp = taxonomy_category_service.ListTaxonomyCategoriesResponse.pb(resp) json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - resp = self._interceptor.post_list_ad_partners(resp) + resp = self._interceptor.post_list_taxonomy_categories(resp) return resp @property - def get_ad_partner( + def get_taxonomy_category( self, ) -> Callable[ - [ad_partner_service.GetAdPartnerRequest], ad_partner_service.AdPartner + [taxonomy_category_service.GetTaxonomyCategoryRequest], + taxonomy_category_messages.TaxonomyCategory, ]: # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. # In C++ this would require a dynamic_cast - return self._GetAdPartner(self._session, self._host, self._interceptor) # type: ignore + return self._GetTaxonomyCategory(self._session, self._host, self._interceptor) # type: ignore @property - def list_ad_partners( + def list_taxonomy_categories( self, ) -> Callable[ - [ad_partner_service.ListAdPartnersRequest], - ad_partner_service.ListAdPartnersResponse, + [taxonomy_category_service.ListTaxonomyCategoriesRequest], + taxonomy_category_service.ListTaxonomyCategoriesResponse, ]: # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. # In C++ this would require a dynamic_cast - return self._ListAdPartners(self._session, self._host, self._interceptor) # type: ignore + return self._ListTaxonomyCategories(self._session, self._host, self._interceptor) # type: ignore @property def get_operation(self): return self._GetOperation(self._session, self._host, self._interceptor) # type: ignore - class _GetOperation(AdPartnerServiceRestStub): + class _GetOperation(TaxonomyCategoryServiceRestStub): def __call__( self, request: operations_pb2.GetOperationRequest, @@ -482,11 +494,11 @@ def __call__( http_options: List[Dict[str, str]] = [ { "method": "get", - "uri": "/v1/{name=networks/*/operations/reports/exports/*}", + "uri": "/v1/{name=networks/*/operations/reports/runs/*}", }, { "method": "get", - "uri": "/v1/{name=networks/*/operations/reports/runs/*}", + "uri": "/v1/{name=networks/*/operations/reports/exports/*}", }, ] @@ -529,4 +541,4 @@ def close(self): self._session.close() -__all__ = ("AdPartnerServiceRestTransport",) +__all__ = ("TaxonomyCategoryServiceRestTransport",) diff --git a/packages/google-ads-admanager/google/ads/admanager_v1/services/team_service/pagers.py b/packages/google-ads-admanager/google/ads/admanager_v1/services/team_service/pagers.py deleted file mode 100644 index 6b5460c35a1a..000000000000 --- a/packages/google-ads-admanager/google/ads/admanager_v1/services/team_service/pagers.py +++ /dev/null @@ -1,115 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from typing import ( - Any, - AsyncIterator, - Awaitable, - Callable, - Iterator, - Optional, - Sequence, - Tuple, - Union, -) - -from google.api_core import gapic_v1 -from google.api_core import retry as retries -from google.api_core import retry_async as retries_async - -try: - OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] - OptionalAsyncRetry = Union[ - retries_async.AsyncRetry, gapic_v1.method._MethodDefault, None - ] -except AttributeError: # pragma: NO COVER - OptionalRetry = Union[retries.Retry, object, None] # type: ignore - OptionalAsyncRetry = Union[retries_async.AsyncRetry, object, None] # type: ignore - -from google.ads.admanager_v1.types import team_service - - -class ListTeamsPager: - """A pager for iterating through ``list_teams`` requests. - - This class thinly wraps an initial - :class:`google.ads.admanager_v1.types.ListTeamsResponse` object, and - provides an ``__iter__`` method to iterate through its - ``teams`` field. - - If there are more pages, the ``__iter__`` method will make additional - ``ListTeams`` requests and continue to iterate - through the ``teams`` field on the - corresponding responses. - - All the usual :class:`google.ads.admanager_v1.types.ListTeamsResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - - def __init__( - self, - method: Callable[..., team_service.ListTeamsResponse], - request: team_service.ListTeamsRequest, - response: team_service.ListTeamsResponse, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = () - ): - """Instantiate the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.ads.admanager_v1.types.ListTeamsRequest): - The initial request object. - response (google.ads.admanager_v1.types.ListTeamsResponse): - The initial response object. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - self._method = method - self._request = team_service.ListTeamsRequest(request) - self._response = response - self._retry = retry - self._timeout = timeout - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - def pages(self) -> Iterator[team_service.ListTeamsResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = self._method( - self._request, - retry=self._retry, - timeout=self._timeout, - metadata=self._metadata, - ) - yield self._response - - def __iter__(self) -> Iterator[team_service.Team]: - for page in self.pages: - yield from page.teams - - def __repr__(self) -> str: - return "{0}<{1!r}>".format(self.__class__.__name__, self._response) diff --git a/packages/google-ads-admanager/google/ads/admanager_v1/services/user_service/client.py b/packages/google-ads-admanager/google/ads/admanager_v1/services/user_service/client.py index 0b74b925ce06..f0307b68ac1b 100644 --- a/packages/google-ads-admanager/google/ads/admanager_v1/services/user_service/client.py +++ b/packages/google-ads-admanager/google/ads/admanager_v1/services/user_service/client.py @@ -50,8 +50,7 @@ from google.longrunning import operations_pb2 # type: ignore -from google.ads.admanager_v1.services.user_service import pagers -from google.ads.admanager_v1.types import user_service +from google.ads.admanager_v1.types import user_messages, user_service from .transports.base import DEFAULT_CLIENT_INFO, UserServiceTransport from .transports.rest import UserServiceRestTransport @@ -179,21 +178,6 @@ def transport(self) -> UserServiceTransport: """ return self._transport - @staticmethod - def network_path( - network_code: str, - ) -> str: - """Returns a fully-qualified network string.""" - return "networks/{network_code}".format( - network_code=network_code, - ) - - @staticmethod - def parse_network_path(path: str) -> Dict[str, str]: - """Parses a network path into its component segments.""" - m = re.match(r"^networks/(?P.+?)$", path) - return m.groupdict() if m else {} - @staticmethod def role_path( network_code: str, @@ -680,7 +664,7 @@ def __init__( transport_init: Union[ Type[UserServiceTransport], Callable[..., UserServiceTransport] ] = ( - type(self).get_transport_class(transport) + UserServiceClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., UserServiceTransport], transport) ) @@ -705,9 +689,12 @@ def get_user( retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), - ) -> user_service.User: + ) -> user_messages.User: r"""API to retrieve a User object. + To get the current user, the resource name + ``networks/{networkCode}/users/me`` can be used. + .. code-block:: python # This snippet has been automatically generated and should be regarded as a @@ -797,123 +784,6 @@ def sample_get_user(): # Done; return the response. return response - def list_users( - self, - request: Optional[Union[user_service.ListUsersRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> pagers.ListUsersPager: - r"""API to retrieve a list of User objects. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.ads import admanager_v1 - - def sample_list_users(): - # Create a client - client = admanager_v1.UserServiceClient() - - # Initialize request argument(s) - request = admanager_v1.ListUsersRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_users(request=request) - - # Handle the response - for response in page_result: - print(response) - - Args: - request (Union[google.ads.admanager_v1.types.ListUsersRequest, dict]): - The request object. Request object for ListUsers method. - parent (str): - Required. The parent, which owns this collection of - Users. Format: ``networks/{network_code}`` - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.ads.admanager_v1.services.user_service.pagers.ListUsersPager: - Response object for ListUsersRequest - containing matching User resources. - Iterating over this object will yield - results and resolve additional pages - automatically. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) - if request is not None and has_flattened_params: - raise ValueError( - "If the `request` argument is set, then none of " - "the individual field arguments should be set." - ) - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, user_service.ListUsersRequest): - request = user_service.ListUsersRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.list_users] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__iter__` convenience method. - response = pagers.ListUsersPager( - method=rpc, - request=request, - response=response, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - def __enter__(self) -> "UserServiceClient": return self diff --git a/packages/google-ads-admanager/google/ads/admanager_v1/services/user_service/pagers.py b/packages/google-ads-admanager/google/ads/admanager_v1/services/user_service/pagers.py deleted file mode 100644 index 65a2fe539685..000000000000 --- a/packages/google-ads-admanager/google/ads/admanager_v1/services/user_service/pagers.py +++ /dev/null @@ -1,115 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from typing import ( - Any, - AsyncIterator, - Awaitable, - Callable, - Iterator, - Optional, - Sequence, - Tuple, - Union, -) - -from google.api_core import gapic_v1 -from google.api_core import retry as retries -from google.api_core import retry_async as retries_async - -try: - OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] - OptionalAsyncRetry = Union[ - retries_async.AsyncRetry, gapic_v1.method._MethodDefault, None - ] -except AttributeError: # pragma: NO COVER - OptionalRetry = Union[retries.Retry, object, None] # type: ignore - OptionalAsyncRetry = Union[retries_async.AsyncRetry, object, None] # type: ignore - -from google.ads.admanager_v1.types import user_service - - -class ListUsersPager: - """A pager for iterating through ``list_users`` requests. - - This class thinly wraps an initial - :class:`google.ads.admanager_v1.types.ListUsersResponse` object, and - provides an ``__iter__`` method to iterate through its - ``users`` field. - - If there are more pages, the ``__iter__`` method will make additional - ``ListUsers`` requests and continue to iterate - through the ``users`` field on the - corresponding responses. - - All the usual :class:`google.ads.admanager_v1.types.ListUsersResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - - def __init__( - self, - method: Callable[..., user_service.ListUsersResponse], - request: user_service.ListUsersRequest, - response: user_service.ListUsersResponse, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = () - ): - """Instantiate the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.ads.admanager_v1.types.ListUsersRequest): - The initial request object. - response (google.ads.admanager_v1.types.ListUsersResponse): - The initial response object. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - self._method = method - self._request = user_service.ListUsersRequest(request) - self._response = response - self._retry = retry - self._timeout = timeout - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - def pages(self) -> Iterator[user_service.ListUsersResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = self._method( - self._request, - retry=self._retry, - timeout=self._timeout, - metadata=self._metadata, - ) - yield self._response - - def __iter__(self) -> Iterator[user_service.User]: - for page in self.pages: - yield from page.users - - def __repr__(self) -> str: - return "{0}<{1!r}>".format(self.__class__.__name__, self._response) diff --git a/packages/google-ads-admanager/google/ads/admanager_v1/services/user_service/transports/base.py b/packages/google-ads-admanager/google/ads/admanager_v1/services/user_service/transports/base.py index 1212bbbf1c14..bc4ef1797f43 100644 --- a/packages/google-ads-admanager/google/ads/admanager_v1/services/user_service/transports/base.py +++ b/packages/google-ads-admanager/google/ads/admanager_v1/services/user_service/transports/base.py @@ -26,7 +26,7 @@ from google.oauth2 import service_account # type: ignore from google.ads.admanager_v1 import gapic_version as package_version -from google.ads.admanager_v1.types import user_service +from google.ads.admanager_v1.types import user_messages, user_service DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( gapic_version=package_version.__version__ @@ -134,11 +134,6 @@ def _prep_wrapped_messages(self, client_info): default_timeout=None, client_info=client_info, ), - self.list_users: gapic_v1.method.wrap_method( - self.list_users, - default_timeout=None, - client_info=client_info, - ), } def close(self): @@ -155,18 +150,7 @@ def get_user( self, ) -> Callable[ [user_service.GetUserRequest], - Union[user_service.User, Awaitable[user_service.User]], - ]: - raise NotImplementedError() - - @property - def list_users( - self, - ) -> Callable[ - [user_service.ListUsersRequest], - Union[ - user_service.ListUsersResponse, Awaitable[user_service.ListUsersResponse] - ], + Union[user_messages.User, Awaitable[user_messages.User]], ]: raise NotImplementedError() diff --git a/packages/google-ads-admanager/google/ads/admanager_v1/services/user_service/transports/rest.py b/packages/google-ads-admanager/google/ads/admanager_v1/services/user_service/transports/rest.py index 616531e7d102..7ec694c9237b 100644 --- a/packages/google-ads-admanager/google/ads/admanager_v1/services/user_service/transports/rest.py +++ b/packages/google-ads-admanager/google/ads/admanager_v1/services/user_service/transports/rest.py @@ -38,7 +38,7 @@ from google.longrunning import operations_pb2 # type: ignore -from google.ads.admanager_v1.types import user_service +from google.ads.admanager_v1.types import user_messages, user_service from .base import DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO from .base import UserServiceTransport @@ -73,14 +73,6 @@ def post_get_user(self, response): logging.log(f"Received response: {response}") return response - def pre_list_users(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_list_users(self, response): - logging.log(f"Received response: {response}") - return response - transport = UserServiceRestTransport(interceptor=MyCustomUserServiceInterceptor()) client = UserServiceClient(transport=transport) @@ -97,7 +89,7 @@ def pre_get_user( """ return request, metadata - def post_get_user(self, response: user_service.User) -> user_service.User: + def post_get_user(self, response: user_messages.User) -> user_messages.User: """Post-rpc interceptor for get_user Override in a subclass to manipulate the response @@ -106,29 +98,6 @@ def post_get_user(self, response: user_service.User) -> user_service.User: """ return response - def pre_list_users( - self, - request: user_service.ListUsersRequest, - metadata: Sequence[Tuple[str, str]], - ) -> Tuple[user_service.ListUsersRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for list_users - - Override in a subclass to manipulate the request or metadata - before they are sent to the UserService server. - """ - return request, metadata - - def post_list_users( - self, response: user_service.ListUsersResponse - ) -> user_service.ListUsersResponse: - """Post-rpc interceptor for list_users - - Override in a subclass to manipulate the response - after it is returned by the UserService server but before - it is returned to user code. - """ - return response - def pre_get_operation( self, request: operations_pb2.GetOperationRequest, @@ -270,7 +239,7 @@ def __call__( retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), - ) -> user_service.User: + ) -> user_messages.User: r"""Call the get user method over HTTP. Args: @@ -283,7 +252,7 @@ def __call__( sent along with the request as metadata. Returns: - ~.user_service.User: + ~.user_messages.User: The User resource. """ @@ -327,114 +296,19 @@ def __call__( raise core_exceptions.from_http_response(response) # Return the response - resp = user_service.User() - pb_resp = user_service.User.pb(resp) + resp = user_messages.User() + pb_resp = user_messages.User.pb(resp) json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_user(resp) return resp - class _ListUsers(UserServiceRestStub): - def __hash__(self): - return hash("ListUsers") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return { - k: v - for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() - if k not in message_dict - } - - def __call__( - self, - request: user_service.ListUsersRequest, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> user_service.ListUsersResponse: - r"""Call the list users method over HTTP. - - Args: - request (~.user_service.ListUsersRequest): - The request object. Request object for ListUsers method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - ~.user_service.ListUsersResponse: - Response object for ListUsersRequest - containing matching User resources. - - """ - - http_options: List[Dict[str, str]] = [ - { - "method": "get", - "uri": "/v1/{parent=networks/*}/users", - }, - ] - request, metadata = self._interceptor.pre_list_users(request, metadata) - pb_request = user_service.ListUsersRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - uri = transcoded_request["uri"] - method = transcoded_request["method"] - - # Jsonify the query params - query_params = json.loads( - json_format.MessageToJson( - transcoded_request["query_params"], - use_integers_for_enums=True, - ) - ) - query_params.update(self._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - - # Send the request - headers = dict(metadata) - headers["Content-Type"] = "application/json" - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = user_service.ListUsersResponse() - pb_resp = user_service.ListUsersResponse.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - resp = self._interceptor.post_list_users(resp) - return resp - @property - def get_user(self) -> Callable[[user_service.GetUserRequest], user_service.User]: + def get_user(self) -> Callable[[user_service.GetUserRequest], user_messages.User]: # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. # In C++ this would require a dynamic_cast return self._GetUser(self._session, self._host, self._interceptor) # type: ignore - @property - def list_users( - self, - ) -> Callable[[user_service.ListUsersRequest], user_service.ListUsersResponse]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._ListUsers(self._session, self._host, self._interceptor) # type: ignore - @property def get_operation(self): return self._GetOperation(self._session, self._host, self._interceptor) # type: ignore @@ -466,11 +340,11 @@ def __call__( http_options: List[Dict[str, str]] = [ { "method": "get", - "uri": "/v1/{name=networks/*/operations/reports/exports/*}", + "uri": "/v1/{name=networks/*/operations/reports/runs/*}", }, { "method": "get", - "uri": "/v1/{name=networks/*/operations/reports/runs/*}", + "uri": "/v1/{name=networks/*/operations/reports/exports/*}", }, ] diff --git a/packages/google-ads-admanager/google/ads/admanager_v1/types/__init__.py b/packages/google-ads-admanager/google/ads/admanager_v1/types/__init__.py index 36867842d446..79767e5a1d19 100644 --- a/packages/google-ads-admanager/google/ads/admanager_v1/types/__init__.py +++ b/packages/google-ads-admanager/google/ads/admanager_v1/types/__init__.py @@ -13,69 +13,46 @@ # See the License for the specific language governing permissions and # limitations under the License. # -from .ad_partner_declaration import AdPartnerDeclaration, DeclarationTypeEnum -from .ad_partner_service import ( - AdPartner, - GetAdPartnerRequest, - ListAdPartnersRequest, - ListAdPartnersResponse, -) -from .ad_unit_enums import AppliedAdsenseEnabledEnum +from .ad_unit_enums import AdUnitStatusEnum, SmartSizeModeEnum, TargetWindowEnum +from .ad_unit_messages import AdUnit, AdUnitParent, AdUnitSize, LabelFrequencyCap from .ad_unit_service import ( - AdUnit, - AdUnitParent, GetAdUnitRequest, - LabelFrequencyCap, + ListAdUnitSizesRequest, + ListAdUnitSizesResponse, ListAdUnitsRequest, ListAdUnitsResponse, - SmartSizeModeEnum, - TargetWindowEnum, ) -from .ad_unit_size import AdUnitSize from .admanager_error import AdManagerError from .applied_label import AppliedLabel from .company_credit_status_enum import CompanyCreditStatusEnum +from .company_messages import Company from .company_service import ( - Company, GetCompanyRequest, ListCompaniesRequest, ListCompaniesResponse, ) from .company_type_enum import CompanyTypeEnum -from .computed_status_enum import ComputedStatusEnum -from .contact_service import ( - Contact, - GetContactRequest, - ListContactsRequest, - ListContactsResponse, -) -from .creative_placeholder import CreativePlaceholder -from .creative_service import ( - Creative, - GetCreativeRequest, - ListCreativesRequest, - ListCreativesResponse, -) +from .contact_messages import Contact from .custom_field_enums import ( CustomFieldDataTypeEnum, CustomFieldEntityTypeEnum, CustomFieldStatusEnum, CustomFieldVisibilityEnum, ) +from .custom_field_messages import CustomField, CustomFieldOption from .custom_field_service import ( - CustomField, - CustomFieldOption, GetCustomFieldRequest, ListCustomFieldsRequest, ListCustomFieldsResponse, ) +from .custom_field_value import CustomFieldValue from .custom_targeting_key_enums import ( CustomTargetingKeyReportableTypeEnum, CustomTargetingKeyStatusEnum, CustomTargetingKeyTypeEnum, ) +from .custom_targeting_key_messages import CustomTargetingKey from .custom_targeting_key_service import ( - CustomTargetingKey, GetCustomTargetingKeyRequest, ListCustomTargetingKeysRequest, ListCustomTargetingKeysResponse, @@ -84,67 +61,89 @@ CustomTargetingValueMatchTypeEnum, CustomTargetingValueStatusEnum, ) +from .custom_targeting_value_messages import CustomTargetingValue from .custom_targeting_value_service import ( - CustomTargetingValue, GetCustomTargetingValueRequest, ListCustomTargetingValuesRequest, ListCustomTargetingValuesResponse, ) -from .environment_type_enum import EnvironmentTypeEnum -from .frequency_cap import FrequencyCap, TimeUnitEnum -from .goal import Goal, GoalTypeEnum, UnitTypeEnum -from .label_service import GetLabelRequest, Label, ListLabelsRequest, ListLabelsResponse -from .line_item_enums import ( - CreativeRotationTypeEnum, - DeliveryRateTypeEnum, - LineItemCostTypeEnum, - LineItemDiscountTypeEnum, - LineItemTypeEnum, - ReservationStatusEnum, +from .entity_signals_mapping_messages import EntitySignalsMapping +from .entity_signals_mapping_service import ( + BatchCreateEntitySignalsMappingsRequest, + BatchCreateEntitySignalsMappingsResponse, + BatchUpdateEntitySignalsMappingsRequest, + BatchUpdateEntitySignalsMappingsResponse, + CreateEntitySignalsMappingRequest, + GetEntitySignalsMappingRequest, + ListEntitySignalsMappingsRequest, + ListEntitySignalsMappingsResponse, + UpdateEntitySignalsMappingRequest, ) -from .line_item_service import ( - GetLineItemRequest, - LineItem, - ListLineItemsRequest, - ListLineItemsResponse, -) -from .network_service import GetNetworkRequest, Network -from .order_service import GetOrderRequest, ListOrdersRequest, ListOrdersResponse, Order +from .environment_type_enum import EnvironmentTypeEnum +from .frequency_cap import FrequencyCap +from .label_messages import Label +from .network_messages import Network +from .network_service import ( + GetNetworkRequest, + ListNetworksRequest, + ListNetworksResponse, +) +from .order_enums import OrderStatusEnum +from .order_messages import Order +from .order_service import GetOrderRequest, ListOrdersRequest, ListOrdersResponse from .placement_enums import PlacementStatusEnum +from .placement_messages import Placement from .placement_service import ( GetPlacementRequest, ListPlacementsRequest, ListPlacementsResponse, - Placement, ) from .report_service import ( - ExportSavedReportMetadata, - ExportSavedReportRequest, - ExportSavedReportResponse, + CreateReportRequest, + FetchReportResultRowsRequest, + FetchReportResultRowsResponse, + GetReportRequest, + ListReportsRequest, + ListReportsResponse, Report, -) -from .role_service import GetRoleRequest, ListRolesRequest, ListRolesResponse, Role -from .size import Size, SizeTypeEnum -from .team_service import GetTeamRequest, ListTeamsRequest, ListTeamsResponse, Team -from .user_service import GetUserRequest, ListUsersRequest, ListUsersResponse, User + ReportDefinition, + RunReportMetadata, + RunReportRequest, + RunReportResponse, + Schedule, + ScheduleOptions, + UpdateReportRequest, +) +from .role_enums import RoleStatusEnum +from .role_messages import Role +from .role_service import GetRoleRequest, ListRolesRequest, ListRolesResponse +from .size import Size +from .size_type_enum import SizeTypeEnum +from .taxonomy_category_messages import TaxonomyCategory +from .taxonomy_category_service import ( + GetTaxonomyCategoryRequest, + ListTaxonomyCategoriesRequest, + ListTaxonomyCategoriesResponse, +) +from .taxonomy_type_enum import TaxonomyTypeEnum +from .team_messages import Team +from .time_unit_enum import TimeUnitEnum +from .user_messages import User +from .user_service import GetUserRequest __all__ = ( - "AdPartnerDeclaration", - "DeclarationTypeEnum", - "AdPartner", - "GetAdPartnerRequest", - "ListAdPartnersRequest", - "ListAdPartnersResponse", - "AppliedAdsenseEnabledEnum", + "AdUnitStatusEnum", + "SmartSizeModeEnum", + "TargetWindowEnum", "AdUnit", "AdUnitParent", - "GetAdUnitRequest", + "AdUnitSize", "LabelFrequencyCap", + "GetAdUnitRequest", + "ListAdUnitSizesRequest", + "ListAdUnitSizesResponse", "ListAdUnitsRequest", "ListAdUnitsResponse", - "SmartSizeModeEnum", - "TargetWindowEnum", - "AdUnitSize", "AdManagerError", "AppliedLabel", "CompanyCreditStatusEnum", @@ -153,16 +152,7 @@ "ListCompaniesRequest", "ListCompaniesResponse", "CompanyTypeEnum", - "ComputedStatusEnum", "Contact", - "GetContactRequest", - "ListContactsRequest", - "ListContactsResponse", - "CreativePlaceholder", - "Creative", - "GetCreativeRequest", - "ListCreativesRequest", - "ListCreativesResponse", "CustomFieldDataTypeEnum", "CustomFieldEntityTypeEnum", "CustomFieldStatusEnum", @@ -172,6 +162,7 @@ "GetCustomFieldRequest", "ListCustomFieldsRequest", "ListCustomFieldsResponse", + "CustomFieldValue", "CustomTargetingKeyReportableTypeEnum", "CustomTargetingKeyStatusEnum", "CustomTargetingKeyTypeEnum", @@ -185,53 +176,61 @@ "GetCustomTargetingValueRequest", "ListCustomTargetingValuesRequest", "ListCustomTargetingValuesResponse", + "EntitySignalsMapping", + "BatchCreateEntitySignalsMappingsRequest", + "BatchCreateEntitySignalsMappingsResponse", + "BatchUpdateEntitySignalsMappingsRequest", + "BatchUpdateEntitySignalsMappingsResponse", + "CreateEntitySignalsMappingRequest", + "GetEntitySignalsMappingRequest", + "ListEntitySignalsMappingsRequest", + "ListEntitySignalsMappingsResponse", + "UpdateEntitySignalsMappingRequest", "EnvironmentTypeEnum", "FrequencyCap", - "TimeUnitEnum", - "Goal", - "GoalTypeEnum", - "UnitTypeEnum", - "GetLabelRequest", "Label", - "ListLabelsRequest", - "ListLabelsResponse", - "CreativeRotationTypeEnum", - "DeliveryRateTypeEnum", - "LineItemCostTypeEnum", - "LineItemDiscountTypeEnum", - "LineItemTypeEnum", - "ReservationStatusEnum", - "GetLineItemRequest", - "LineItem", - "ListLineItemsRequest", - "ListLineItemsResponse", - "GetNetworkRequest", "Network", + "GetNetworkRequest", + "ListNetworksRequest", + "ListNetworksResponse", + "OrderStatusEnum", + "Order", "GetOrderRequest", "ListOrdersRequest", "ListOrdersResponse", - "Order", "PlacementStatusEnum", + "Placement", "GetPlacementRequest", "ListPlacementsRequest", "ListPlacementsResponse", - "Placement", - "ExportSavedReportMetadata", - "ExportSavedReportRequest", - "ExportSavedReportResponse", + "CreateReportRequest", + "FetchReportResultRowsRequest", + "FetchReportResultRowsResponse", + "GetReportRequest", + "ListReportsRequest", + "ListReportsResponse", "Report", + "ReportDefinition", + "RunReportMetadata", + "RunReportRequest", + "RunReportResponse", + "Schedule", + "ScheduleOptions", + "UpdateReportRequest", + "RoleStatusEnum", + "Role", "GetRoleRequest", "ListRolesRequest", "ListRolesResponse", - "Role", "Size", "SizeTypeEnum", - "GetTeamRequest", - "ListTeamsRequest", - "ListTeamsResponse", + "TaxonomyCategory", + "GetTaxonomyCategoryRequest", + "ListTaxonomyCategoriesRequest", + "ListTaxonomyCategoriesResponse", + "TaxonomyTypeEnum", "Team", - "GetUserRequest", - "ListUsersRequest", - "ListUsersResponse", + "TimeUnitEnum", "User", + "GetUserRequest", ) diff --git a/packages/google-ads-admanager/google/ads/admanager_v1/types/ad_partner_declaration.py b/packages/google-ads-admanager/google/ads/admanager_v1/types/ad_partner_declaration.py deleted file mode 100644 index 23d969b2e272..000000000000 --- a/packages/google-ads-admanager/google/ads/admanager_v1/types/ad_partner_declaration.py +++ /dev/null @@ -1,80 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from __future__ import annotations - -from typing import MutableMapping, MutableSequence - -import proto # type: ignore - -__protobuf__ = proto.module( - package="google.ads.admanager.v1", - manifest={ - "AdPartnerDeclaration", - "DeclarationTypeEnum", - }, -) - - -class AdPartnerDeclaration(proto.Message): - r"""Represents a set of declarations about what (if any) ad - partners are associated with a given creative. This can be set - at the network level, as a default for all creatives, or - overridden for a particular creative. - - Attributes: - type_ (google.ads.admanager_v1.types.DeclarationTypeEnum.DeclarationType): - They type of declaration. - ad_partners (MutableSequence[str]): - The resource names of AdPartners being declared. Format: - "networks/{network_code}/adPartners/{ad_partner_id}". - """ - - type_: "DeclarationTypeEnum.DeclarationType" = proto.Field( - proto.ENUM, - number=1, - enum="DeclarationTypeEnum.DeclarationType", - ) - ad_partners: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=2, - ) - - -class DeclarationTypeEnum(proto.Message): - r"""Wrapper message for - [DeclarationTypeEnum][google.ads.admanager.v1.DeclarationTypeEnum]. - - """ - - class DeclarationType(proto.Enum): - r"""The declaration about third party data usage on the - associated entity. - - Values: - DECLARATION_TYPE_UNSPECIFIED (0): - Default value. This value is unused. - NONE (1): - No ad technology providers to declare. - DECLARED (2): - There are are ad technology providers to - declare on this entity. - """ - DECLARATION_TYPE_UNSPECIFIED = 0 - NONE = 1 - DECLARED = 2 - - -__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-ads-admanager/google/ads/admanager_v1/types/ad_unit_enums.py b/packages/google-ads-admanager/google/ads/admanager_v1/types/ad_unit_enums.py index 31ddc4b2a8fe..6ab55de981f0 100644 --- a/packages/google-ads-admanager/google/ads/admanager_v1/types/ad_unit_enums.py +++ b/packages/google-ads-admanager/google/ads/admanager_v1/types/ad_unit_enums.py @@ -22,36 +22,91 @@ __protobuf__ = proto.module( package="google.ads.admanager.v1", manifest={ - "AppliedAdsenseEnabledEnum", + "AdUnitStatusEnum", + "SmartSizeModeEnum", + "TargetWindowEnum", }, ) -class AppliedAdsenseEnabledEnum(proto.Message): +class AdUnitStatusEnum(proto.Message): r"""Wrapper message for - [AppliedAdsenseEnabled][google.ads.admanager.v1.AppliedAdsenseEnabledEnum.AppliedAdsenseEnabled] + [AdUnitStatus][google.ads.admanager.v1.AdUnitStatusEnum.AdUnitStatus] """ - class AppliedAdsenseEnabled(proto.Enum): - r"""Specifies if serving ads from the AdSense content network is - enabled. + class AdUnitStatus(proto.Enum): + r"""The status of an AdUnit. Values: - APPLIED_ADSENSE_ENABLED_UNSPECIFIED (0): - No adsense enabled setting applied directly; - value will be inherited from parent or system - default. - TRUE (1): - Serving ads from AdSense content network is - enabled. - FALSE (2): - Serving ads from AdSense content network is - disabled. + AD_UNIT_STATUS_UNSPECIFIED (0): + Default value. This value is unused. + ACTIVE (1): + The ad unit is active, available for + targeting, and serving. + INACTIVE (2): + The ad unit will be visible in the UI, but + ignored by serving. + ARCHIVED (3): + The ad unit will be hidden in the UI and + ignored by serving. """ - APPLIED_ADSENSE_ENABLED_UNSPECIFIED = 0 - TRUE = 1 - FALSE = 2 + AD_UNIT_STATUS_UNSPECIFIED = 0 + ACTIVE = 1 + INACTIVE = 2 + ARCHIVED = 3 + + +class SmartSizeModeEnum(proto.Message): + r"""Wrapper message for + [SmartSizeMode][google.ads.admanager.v1.SmartSizeModeEnum.SmartSizeMode]. + + """ + + class SmartSizeMode(proto.Enum): + r"""The smart size mode for this ad unit. This attribute is + optional and defaults to SmartSizeMode.NONE for fixed sizes. + + Values: + SMART_SIZE_MODE_UNSPECIFIED (0): + Default value. This value is unused. + NONE (1): + Fixed size mode (default). + SMART_BANNER (2): + The height is fixed for the request, the + width is a range. + DYNAMIC_SIZE (3): + Height and width are ranges. + """ + SMART_SIZE_MODE_UNSPECIFIED = 0 + NONE = 1 + SMART_BANNER = 2 + DYNAMIC_SIZE = 3 + + +class TargetWindowEnum(proto.Message): + r"""Wrapper message for + [TargetWindow][google.ads.admanager.v1.TargetWindowEnum.TargetWindow]. + + """ + + class TargetWindow(proto.Enum): + r"""Corresponds to an HTML link's target attribute. + See http://www.w3.org/TR/html401/present/frames.html#adef-target + + Values: + TARGET_WINDOW_UNSPECIFIED (0): + Default value. This value is unused. + TOP (1): + Specifies that the link should open in the + full body of the page. + BLANK (2): + Specifies that the link should open in a new + window. + """ + TARGET_WINDOW_UNSPECIFIED = 0 + TOP = 1 + BLANK = 2 __all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-ads-admanager/google/ads/admanager_v1/types/ad_unit_messages.py b/packages/google-ads-admanager/google/ads/admanager_v1/types/ad_unit_messages.py new file mode 100644 index 000000000000..85d82af952a2 --- /dev/null +++ b/packages/google-ads-admanager/google/ads/admanager_v1/types/ad_unit_messages.py @@ -0,0 +1,369 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +from google.protobuf import duration_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore +import proto # type: ignore + +from google.ads.admanager_v1.types import ( + ad_unit_enums, + applied_label, + environment_type_enum, +) +from google.ads.admanager_v1.types import frequency_cap as gaa_frequency_cap +from google.ads.admanager_v1.types import size as gaa_size + +__protobuf__ = proto.module( + package="google.ads.admanager.v1", + manifest={ + "AdUnit", + "AdUnitSize", + "AdUnitParent", + "LabelFrequencyCap", + }, +) + + +class AdUnit(proto.Message): + r"""The AdUnit resource. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + name (str): + Identifier. The resource name of the AdUnit. Format: + ``networks/{network_code}/adUnits/{ad_unit_id}`` + ad_unit_id (int): + Output only. AdUnit ID. + parent_ad_unit (str): + Required. Immutable. The AdUnit's parent. Every ad unit has + a parent except for the root ad unit, which is created by + Google. Format: + "networks/{network_code}/adUnits/{ad_unit_id}". + parent_path (MutableSequence[google.ads.admanager_v1.types.AdUnitParent]): + Output only. The path to this AdUnit in the + ad unit hierarchy represented as a list from the + root to this ad unit's parent. For root ad + units, this list is empty. + display_name (str): + Required. The display name of the ad unit. + Its maximum length is 255 characters. + ad_unit_code (str): + Immutable. A string used to uniquely identify + the ad unit for the purposes of serving the ad. + This attribute is optional and can be set during + ad unit creation. If it is not provided, it will + be assigned by Google based on the ad unit ID. + status (google.ads.admanager_v1.types.AdUnitStatusEnum.AdUnitStatus): + Output only. The status of this ad unit. It + defaults to ACTIVE. + applied_target_window (google.ads.admanager_v1.types.TargetWindowEnum.TargetWindow): + Optional. The target window directly applied + to this AdUnit. If this field is not set, this + AdUnit uses the target window specified in + effectiveTargetWindow. + effective_target_window (google.ads.admanager_v1.types.TargetWindowEnum.TargetWindow): + Output only. Non-empty default. The target + window of this AdUnit. This value is inherited + from ancestor AdUnits and defaults to TOP if no + AdUnit in the hierarchy specifies it. + applied_teams (MutableSequence[str]): + Optional. The resource names of Teams directly applied to + this AdUnit. Format: + "networks/{network_code}/teams/{team_id}". + teams (MutableSequence[str]): + Output only. The resource names of all Teams that this + AdUnit is on as well as those inherited from parent AdUnits. + Format: "networks/{network_code}/teams/{team_id}". + description (str): + Optional. A description of the ad unit. The + maximum length is 65,535 characters. + explicitly_targeted (bool): + Optional. If this field is set to true, then + the AdUnit will not be implicitly targeted when + its parent is. Traffickers must explicitly + target such an AdUnit or else no line items will + serve to it. This feature is only available for + Ad Manager 360 accounts. + has_children (bool): + Output only. This field is set to true if the + ad unit has any children. + update_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The time this AdUnit was last + modified. + ad_unit_sizes (MutableSequence[google.ads.admanager_v1.types.AdUnitSize]): + Optional. The sizes that can be served inside + this ad unit. + external_set_top_box_channel_id (str): + Optional. Determines what set top box video + on demand channel this ad unit corresponds to in + an external set top box ad campaign system. + refresh_delay (google.protobuf.duration_pb2.Duration): + Optional. The duration after which an Ad Unit + will automatically refresh. This is only valid + for ad units in mobile apps. If not set, the ad + unit will not refresh. + applied_labels (MutableSequence[google.ads.admanager_v1.types.AppliedLabel]): + Optional. The set of labels applied directly + to this ad unit. + effective_applied_labels (MutableSequence[google.ads.admanager_v1.types.AppliedLabel]): + Output only. Contains the set of labels + applied directly to the ad unit as well as those + inherited from the parent ad units. If a label + has been negated, only the negated label is + returned. This field is readonly and is assigned + by Google. + applied_label_frequency_caps (MutableSequence[google.ads.admanager_v1.types.LabelFrequencyCap]): + Optional. The set of label frequency caps + applied directly to this ad unit. There is a + limit of 10 label frequency caps per ad unit. + effective_label_frequency_caps (MutableSequence[google.ads.admanager_v1.types.LabelFrequencyCap]): + Output only. The label frequency caps applied + directly to the ad unit as well as those + inherited from parent ad units. + smart_size_mode (google.ads.admanager_v1.types.SmartSizeModeEnum.SmartSizeMode): + Optional. The smart size mode for this ad + unit. This attribute is optional and defaults to + SmartSizeMode.NONE for fixed sizes. + applied_adsense_enabled (bool): + Optional. The value of AdSense enabled + directly applied to this ad unit. This attribute + is optional and if not specified this ad unit + will inherit the value of + effectiveAdsenseEnabled from its ancestors. + + This field is a member of `oneof`_ ``_applied_adsense_enabled``. + effective_adsense_enabled (bool): + Output only. Specifies whether or not the + AdUnit is enabled for serving ads from the + AdSense content network. This attribute defaults + to the ad unit's parent or ancestor's setting if + one has been set. If no ancestor of the ad unit + has set appliedAdsenseEnabled, the attribute is + defaulted to true. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + ad_unit_id: int = proto.Field( + proto.INT64, + number=15, + ) + parent_ad_unit: str = proto.Field( + proto.STRING, + number=10, + ) + parent_path: MutableSequence["AdUnitParent"] = proto.RepeatedField( + proto.MESSAGE, + number=11, + message="AdUnitParent", + ) + display_name: str = proto.Field( + proto.STRING, + number=9, + ) + ad_unit_code: str = proto.Field( + proto.STRING, + number=2, + ) + status: ad_unit_enums.AdUnitStatusEnum.AdUnitStatus = proto.Field( + proto.ENUM, + number=13, + enum=ad_unit_enums.AdUnitStatusEnum.AdUnitStatus, + ) + applied_target_window: ad_unit_enums.TargetWindowEnum.TargetWindow = proto.Field( + proto.ENUM, + number=44, + enum=ad_unit_enums.TargetWindowEnum.TargetWindow, + ) + effective_target_window: ad_unit_enums.TargetWindowEnum.TargetWindow = proto.Field( + proto.ENUM, + number=45, + enum=ad_unit_enums.TargetWindowEnum.TargetWindow, + ) + applied_teams: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=3, + ) + teams: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=4, + ) + description: str = proto.Field( + proto.STRING, + number=5, + ) + explicitly_targeted: bool = proto.Field( + proto.BOOL, + number=6, + ) + has_children: bool = proto.Field( + proto.BOOL, + number=7, + ) + update_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=8, + message=timestamp_pb2.Timestamp, + ) + ad_unit_sizes: MutableSequence["AdUnitSize"] = proto.RepeatedField( + proto.MESSAGE, + number=14, + message="AdUnitSize", + ) + external_set_top_box_channel_id: str = proto.Field( + proto.STRING, + number=17, + ) + refresh_delay: duration_pb2.Duration = proto.Field( + proto.MESSAGE, + number=19, + message=duration_pb2.Duration, + ) + applied_labels: MutableSequence[applied_label.AppliedLabel] = proto.RepeatedField( + proto.MESSAGE, + number=21, + message=applied_label.AppliedLabel, + ) + effective_applied_labels: MutableSequence[ + applied_label.AppliedLabel + ] = proto.RepeatedField( + proto.MESSAGE, + number=22, + message=applied_label.AppliedLabel, + ) + applied_label_frequency_caps: MutableSequence[ + "LabelFrequencyCap" + ] = proto.RepeatedField( + proto.MESSAGE, + number=23, + message="LabelFrequencyCap", + ) + effective_label_frequency_caps: MutableSequence[ + "LabelFrequencyCap" + ] = proto.RepeatedField( + proto.MESSAGE, + number=24, + message="LabelFrequencyCap", + ) + smart_size_mode: ad_unit_enums.SmartSizeModeEnum.SmartSizeMode = proto.Field( + proto.ENUM, + number=25, + enum=ad_unit_enums.SmartSizeModeEnum.SmartSizeMode, + ) + applied_adsense_enabled: bool = proto.Field( + proto.BOOL, + number=26, + optional=True, + ) + effective_adsense_enabled: bool = proto.Field( + proto.BOOL, + number=27, + ) + + +class AdUnitSize(proto.Message): + r"""Represents the size, environment, and companions of an ad in + an ad unit. + + Attributes: + size (google.ads.admanager_v1.types.Size): + Required. The Size of the AdUnit. + environment_type (google.ads.admanager_v1.types.EnvironmentTypeEnum.EnvironmentType): + Required. The EnvironmentType of the AdUnit + companions (MutableSequence[google.ads.admanager_v1.types.Size]): + The companions for this ad unit size. Companions are only + valid if the environment is + [VIDEO_PLAYER][google.ads.admanager.v1.EnvironmentTypeEnum.EnvironmentType]. + """ + + size: gaa_size.Size = proto.Field( + proto.MESSAGE, + number=1, + message=gaa_size.Size, + ) + environment_type: environment_type_enum.EnvironmentTypeEnum.EnvironmentType = ( + proto.Field( + proto.ENUM, + number=2, + enum=environment_type_enum.EnvironmentTypeEnum.EnvironmentType, + ) + ) + companions: MutableSequence[gaa_size.Size] = proto.RepeatedField( + proto.MESSAGE, + number=3, + message=gaa_size.Size, + ) + + +class AdUnitParent(proto.Message): + r"""The summary of a parent AdUnit. + + Attributes: + parent_ad_unit (str): + Output only. The parent of the current AdUnit Format: + ``networks/{network_code}/adUnits/{ad_unit_id}`` + display_name (str): + Output only. The display name of the parent + AdUnit. + ad_unit_code (str): + Output only. A string used to uniquely + identify the ad unit for the purposes of serving + the ad. + """ + + parent_ad_unit: str = proto.Field( + proto.STRING, + number=1, + ) + display_name: str = proto.Field( + proto.STRING, + number=2, + ) + ad_unit_code: str = proto.Field( + proto.STRING, + number=3, + ) + + +class LabelFrequencyCap(proto.Message): + r"""Frequency cap using a label. + + Attributes: + label (str): + Required. The label to used for frequency capping. Format: + "networks/{network_code}/labels/{label_id}". + frequency_cap (google.ads.admanager_v1.types.FrequencyCap): + The frequency cap. + """ + + label: str = proto.Field( + proto.STRING, + number=1, + ) + frequency_cap: gaa_frequency_cap.FrequencyCap = proto.Field( + proto.MESSAGE, + number=2, + message=gaa_frequency_cap.FrequencyCap, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-ads-admanager/google/ads/admanager_v1/types/ad_unit_service.py b/packages/google-ads-admanager/google/ads/admanager_v1/types/ad_unit_service.py index 1150d9f25c0c..6e87bb669a0e 100644 --- a/packages/google-ads-admanager/google/ads/admanager_v1/types/ad_unit_service.py +++ b/packages/google-ads-admanager/google/ads/admanager_v1/types/ad_unit_service.py @@ -17,422 +17,163 @@ from typing import MutableMapping, MutableSequence -from google.protobuf import duration_pb2 # type: ignore -from google.protobuf import timestamp_pb2 # type: ignore import proto # type: ignore -from google.ads.admanager_v1.types import ad_unit_enums, ad_unit_size, applied_label -from google.ads.admanager_v1.types import frequency_cap as gaa_frequency_cap +from google.ads.admanager_v1.types import ad_unit_messages __protobuf__ = proto.module( package="google.ads.admanager.v1", manifest={ - "AdUnit", - "AdUnitParent", - "TargetWindowEnum", - "LabelFrequencyCap", - "SmartSizeModeEnum", "GetAdUnitRequest", "ListAdUnitsRequest", "ListAdUnitsResponse", + "ListAdUnitSizesRequest", + "ListAdUnitSizesResponse", }, ) -class AdUnit(proto.Message): - r"""The AdUnit resource. +class GetAdUnitRequest(proto.Message): + r"""Request object for GetAdUnit method. Attributes: name (str): - Identifier. The resource name of the AdUnit. Format: + Required. The resource name of the AdUnit. Format: ``networks/{network_code}/adUnits/{ad_unit_id}`` - ad_unit_id (int): - Output only. AdUnit ID. - parent_ad_unit (str): - Required. Immutable. The AdUnit's parent. Every ad unit has - a parent except for the root ad unit, which is created by - Google. Format: - "networks/{network_code}/adUnits/{ad_unit_id}". - parent_path (MutableSequence[google.ads.admanager_v1.types.AdUnitParent]): - Output only. The path to this AdUnit in the - ad unit hierarchy represented as a list from the - root to this ad unit's parent. For root ad - units, this list is empty. - display_name (str): - Required. The display name of the ad unit. - Its maximum length is 255 characters. - ad_unit_code (str): - Immutable. A string used to uniquely identify - the ad unit for the purposes of serving the ad. - This attribute is optional and can be set during - ad unit creation. If it is not provided, it will - be assigned by Google based off of the ad unit - ID. - status (google.ads.admanager_v1.types.AdUnit.Status): - Output only. The status of this ad unit. It - defaults to ACTIVE. - target_window (google.ads.admanager_v1.types.TargetWindowEnum.TargetWindow): - Non-empty default. The value to use for the - HTML link's target attribute. This value will be - interpreted as TOP if left blank. - applied_teams (MutableSequence[str]): - Optional. The resource names of Teams directly applied to - this AdUnit. Format: - "networks/{network_code}/teams/{team_id}". - teams (MutableSequence[str]): - Output only. The resource names of all Teams that this - AdUnit is on as well as those inherited from parent AdUnits. - Format: "networks/{network_code}/teams/{team_id}". - description (str): - Optional. A description of the ad unit. The - maximum length is 65,535 characters. - explicitly_targeted (bool): - Optional. If this field is set to true, then - the AdUnit will not be implicitly targeted when - its parent is. Traffickers must explicitly - target such an AdUnit or else no line items will - serve to it. This feature is only available for - Ad Manager 360 accounts. - has_children (bool): - Output only. This field is set to true if the - ad unit has any children. - update_time (google.protobuf.timestamp_pb2.Timestamp): - Output only. The instant this AdUnit was last - modified. - ad_unit_sizes (MutableSequence[google.ads.admanager_v1.types.AdUnitSize]): - Optional. The sizes that can be served inside - this ad unit. - external_set_top_box_channel_id (str): - Optional. Determines what set top box video - on demand channel this ad unit corresponds to in - an external set top box ad campaign system. - refresh_delay (google.protobuf.duration_pb2.Duration): - Optional. The duration after which an Ad Unit - will automatically refresh. This is only valid - for ad units in mobile apps. If not set, the ad - unit will not refresh. - ctv_application_id (int): - Optional. The ID of the CTV application that - this ad unit is within. - applied_labels (MutableSequence[google.ads.admanager_v1.types.AppliedLabel]): - Optional. The set of labels applied directly - to this ad unit. - effective_applied_labels (MutableSequence[google.ads.admanager_v1.types.AppliedLabel]): - Output only. Contains the set of labels - applied directly to the ad unit as well as those - inherited from the parent ad units. If a label - has been negated, only the negated label is - returned. This field is readonly and is assigned - by Google. - applied_label_frequency_caps (MutableSequence[google.ads.admanager_v1.types.LabelFrequencyCap]): - Optional. The set of label frequency caps - applied directly to this ad unit. There is a - limit of 10 label frequency caps per ad unit. - effective_label_frequency_caps (MutableSequence[google.ads.admanager_v1.types.LabelFrequencyCap]): - Output only. The label frequency caps applied - directly to the ad unit as well as those - inherited from parent ad units. - smart_size_mode (google.ads.admanager_v1.types.SmartSizeModeEnum.SmartSizeMode): - Optional. The smart size mode for this ad - unit. This attribute is optional and defaults to - SmartSizeMode.NONE for fixed sizes. - applied_adsense_enabled (google.ads.admanager_v1.types.AppliedAdsenseEnabledEnum.AppliedAdsenseEnabled): - Optional. The value of AdSense enabled - directly applied to this ad unit. This attribute - is optional and if not specified this ad unit - will inherit the value of - effectiveAdsenseEnabled from its ancestors. - effective_adsense_enabled (bool): - Output only. Specifies whether or not the - AdUnit is enabled for serving ads from the - AdSense content network. This attribute defaults - to the ad unit's parent or ancestor's setting if - one has been set. If no ancestor of the ad unit - has set appliedAdsenseEnabled, the attribute is - defaulted to true. """ - class Status(proto.Enum): - r"""The status of an AdUnit. - - Values: - STATUS_UNSPECIFIED (0): - Default value. This value is unused. - ACTIVE (1): - The ad unit is active, available for - targeting, and serving. - INACTIVE (2): - The ad unit will be visible in the UI, but - ignored by serving. - ARCHIVED (3): - The ad unit will be hidden in the UI and - ignored by serving. - """ - STATUS_UNSPECIFIED = 0 - ACTIVE = 1 - INACTIVE = 2 - ARCHIVED = 3 - name: str = proto.Field( proto.STRING, number=1, ) - ad_unit_id: int = proto.Field( - proto.INT64, - number=15, - ) - parent_ad_unit: str = proto.Field( - proto.STRING, - number=10, - ) - parent_path: MutableSequence["AdUnitParent"] = proto.RepeatedField( - proto.MESSAGE, - number=11, - message="AdUnitParent", - ) - display_name: str = proto.Field( + + +class ListAdUnitsRequest(proto.Message): + r"""Request object for ListAdUnits method. + + Attributes: + parent (str): + Required. The parent, which owns this collection of AdUnits. + Format: ``networks/{network_code}`` + page_size (int): + Optional. The maximum number of AdUnits to + return. The service may return fewer than this + value. If unspecified, at most 50 ad units will + be returned. The maximum value is 1000; values + above 1000 will be coerced to 1000. + page_token (str): + Optional. A page token, received from a previous + ``ListAdUnits`` call. Provide this to retrieve the + subsequent page. + + When paginating, all other parameters provided to + ``ListAdUnits`` must match the call that provided the page + token. + filter (str): + Optional. Expression to filter the response. + See syntax details at + https://developers.google.com/ad-manager/api/beta/filters + order_by (str): + Optional. Expression to specify sorting + order. See syntax details at + https://developers.google.com/ad-manager/api/beta/filters#order + skip (int): + Optional. Number of individual resources to + skip while paginating. + """ + + parent: str = proto.Field( proto.STRING, - number=9, + number=1, ) - ad_unit_code: str = proto.Field( - proto.STRING, + page_size: int = proto.Field( + proto.INT32, number=2, ) - status: Status = proto.Field( - proto.ENUM, - number=13, - enum=Status, - ) - target_window: "TargetWindowEnum.TargetWindow" = proto.Field( - proto.ENUM, - number=12, - enum="TargetWindowEnum.TargetWindow", - ) - applied_teams: MutableSequence[str] = proto.RepeatedField( + page_token: str = proto.Field( proto.STRING, number=3, ) - teams: MutableSequence[str] = proto.RepeatedField( + filter: str = proto.Field( proto.STRING, number=4, ) - description: str = proto.Field( + order_by: str = proto.Field( proto.STRING, number=5, ) - explicitly_targeted: bool = proto.Field( - proto.BOOL, + skip: int = proto.Field( + proto.INT32, number=6, ) - has_children: bool = proto.Field( - proto.BOOL, - number=7, - ) - update_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=8, - message=timestamp_pb2.Timestamp, - ) - ad_unit_sizes: MutableSequence[ad_unit_size.AdUnitSize] = proto.RepeatedField( - proto.MESSAGE, - number=14, - message=ad_unit_size.AdUnitSize, - ) - external_set_top_box_channel_id: str = proto.Field( - proto.STRING, - number=17, - ) - refresh_delay: duration_pb2.Duration = proto.Field( - proto.MESSAGE, - number=19, - message=duration_pb2.Duration, - ) - ctv_application_id: int = proto.Field( - proto.INT64, - number=20, - ) - applied_labels: MutableSequence[applied_label.AppliedLabel] = proto.RepeatedField( - proto.MESSAGE, - number=21, - message=applied_label.AppliedLabel, - ) - effective_applied_labels: MutableSequence[ - applied_label.AppliedLabel - ] = proto.RepeatedField( - proto.MESSAGE, - number=22, - message=applied_label.AppliedLabel, - ) - applied_label_frequency_caps: MutableSequence[ - "LabelFrequencyCap" - ] = proto.RepeatedField( - proto.MESSAGE, - number=23, - message="LabelFrequencyCap", - ) - effective_label_frequency_caps: MutableSequence[ - "LabelFrequencyCap" - ] = proto.RepeatedField( - proto.MESSAGE, - number=24, - message="LabelFrequencyCap", - ) - smart_size_mode: "SmartSizeModeEnum.SmartSizeMode" = proto.Field( - proto.ENUM, - number=25, - enum="SmartSizeModeEnum.SmartSizeMode", - ) - applied_adsense_enabled: ad_unit_enums.AppliedAdsenseEnabledEnum.AppliedAdsenseEnabled = proto.Field( - proto.ENUM, - number=26, - enum=ad_unit_enums.AppliedAdsenseEnabledEnum.AppliedAdsenseEnabled, - ) - effective_adsense_enabled: bool = proto.Field( - proto.BOOL, - number=27, - ) -class AdUnitParent(proto.Message): - r"""The summary of a parent AdUnit. +class ListAdUnitsResponse(proto.Message): + r"""Response object for ListAdUnitsRequest containing matching + AdUnit resources. Attributes: - parent_ad_unit (str): - Output only. The parent of the current AdUnit Format: - ``networks/{network_code}/adUnits/{ad_unit_id}`` - display_name (str): - Output only. The display name of the parent - AdUnit. - ad_unit_code (str): - Output only. A string used to uniquely - identify the ad unit for the purposes of serving - the ad. - """ - - parent_ad_unit: str = proto.Field( - proto.STRING, - number=1, - ) - display_name: str = proto.Field( - proto.STRING, - number=2, - ) - ad_unit_code: str = proto.Field( - proto.STRING, - number=3, - ) - + ad_units (MutableSequence[google.ads.admanager_v1.types.AdUnit]): + The AdUnit from the specified network. + next_page_token (str): + A token, which can be sent as ``page_token`` to retrieve the + next page. If this field is omitted, there are no subsequent + pages. + total_size (int): + Total number of AdUnits. If a filter was included in the + request, this reflects the total number after the filtering + is applied. -class TargetWindowEnum(proto.Message): - r"""Wrapper message for - [TargetWindow][google.ads.admanager.v1.TargetWindowEnum.TargetWindow]. + ``total_size`` will not be calculated in the response unless + it has been included in a response field mask. The response + field mask can be provided to the method by using the URL + parameter ``$fields`` or ``fields``, or by using the + HTTP/gRPC header ``X-Goog-FieldMask``. + For more information, see + https://developers.google.com/ad-manager/api/beta/field-masks """ - class TargetWindow(proto.Enum): - r"""Corresponds to an HTML link's target attribute. - See http://www.w3.org/TR/html401/present/frames.html#adef-target - - Values: - TARGET_WINDOW_UNSPECIFIED (0): - Default value. This value is unused. - TOP (1): - Specifies that the link should open in the - full body of the page. - BLANK (2): - Specifies that the link should open in a new - window. - """ - TARGET_WINDOW_UNSPECIFIED = 0 - TOP = 1 - BLANK = 2 - - -class LabelFrequencyCap(proto.Message): - r"""Frequency cap using a label. - - Attributes: - label (str): - The label to used for frequency capping. Format: - "networks/{network_code}/labels/{label_id}". - frequency_cap (google.ads.admanager_v1.types.FrequencyCap): - The frequency cap. - """ + @property + def raw_page(self): + return self - label: str = proto.Field( - proto.STRING, + ad_units: MutableSequence[ad_unit_messages.AdUnit] = proto.RepeatedField( + proto.MESSAGE, number=1, + message=ad_unit_messages.AdUnit, ) - frequency_cap: gaa_frequency_cap.FrequencyCap = proto.Field( - proto.MESSAGE, + next_page_token: str = proto.Field( + proto.STRING, number=2, - message=gaa_frequency_cap.FrequencyCap, ) - - -class SmartSizeModeEnum(proto.Message): - r"""Wrapper message for - [SmartSizeMode][google.ads.admanager.v1.SmartSizeModeEnum.SmartSizeMode]. - - """ - - class SmartSizeMode(proto.Enum): - r"""The smart size mode for this ad unit. This attribute is - optional and defaults to SmartSizeMode.NONE for fixed sizes. - - Values: - SMART_SIZE_MODE_UNSPECIFIED (0): - Default value. This value is unused. - NONE (1): - Fixed size mode (default). - SMART_BANNER (2): - The height is fixed for the request, the - width is a range. - DYNAMIC_SIZE (3): - Height and width are ranges. - """ - SMART_SIZE_MODE_UNSPECIFIED = 0 - NONE = 1 - SMART_BANNER = 2 - DYNAMIC_SIZE = 3 - - -class GetAdUnitRequest(proto.Message): - r"""Request object for GetAdUnit method. - - Attributes: - name (str): - Required. The resource name of the AdUnit. Format: - ``networks/{network_code}/adUnits/{ad_unit_id}`` - """ - - name: str = proto.Field( - proto.STRING, - number=1, + total_size: int = proto.Field( + proto.INT32, + number=3, ) -class ListAdUnitsRequest(proto.Message): - r"""Request object for ListAdUnits method. +class ListAdUnitSizesRequest(proto.Message): + r"""Request object for ListAdUnitSizes method. Attributes: parent (str): - Required. The parent, which owns this collection of AdUnits. - Format: ``networks/{network_code}`` + Required. The parent, which owns this collection of + AdUnitSizes. Format: ``networks/{network_code}`` page_size (int): - Optional. The maximum number of AdUnits to - return. The service may return fewer than this - value. If unspecified, at most 50 ad units will - be returned. The maximum value is 1000; values - above 1000 will be coerced to 1000. + Optional. The maximum number of AdUnitSizes + to return. The service may return fewer than + this value. If unspecified, at most 50 ad unit + sizes will be returned. The maximum value is + 1000; values above 1000 will be coerced to 1000. page_token (str): Optional. A page token, received from a previous - ``ListAdUnits`` call. Provide this to retrieve the + ``ListAdUnitSizes`` call. Provide this to retrieve the subsequent page. When paginating, all other parameters provided to - ``ListAdUnits`` must match the call that provided the page - token. + ``ListAdUnitSizes`` must match the call that provided the + page token. filter (str): Optional. Expression to filter the response. See syntax details at @@ -472,19 +213,19 @@ class ListAdUnitsRequest(proto.Message): ) -class ListAdUnitsResponse(proto.Message): - r"""Response object for ListAdUnitsRequest containing matching - AdUnit resources. +class ListAdUnitSizesResponse(proto.Message): + r"""Response object for ListAdUnitSizesRequest containing + matching AdUnitSizes. Attributes: - ad_units (MutableSequence[google.ads.admanager_v1.types.AdUnit]): - The AdUnit from the specified network. + ad_unit_sizes (MutableSequence[google.ads.admanager_v1.types.AdUnitSize]): + The AdUnitSizes from the specified network. next_page_token (str): A token, which can be sent as ``page_token`` to retrieve the next page. If this field is omitted, there are no subsequent pages. total_size (int): - Total number of AdUnits. If a filter was included in the + Total number of AdUnitSizes. If a filter was included in the request, this reflects the total number after the filtering is applied. @@ -502,10 +243,10 @@ class ListAdUnitsResponse(proto.Message): def raw_page(self): return self - ad_units: MutableSequence["AdUnit"] = proto.RepeatedField( + ad_unit_sizes: MutableSequence[ad_unit_messages.AdUnitSize] = proto.RepeatedField( proto.MESSAGE, number=1, - message="AdUnit", + message=ad_unit_messages.AdUnitSize, ) next_page_token: str = proto.Field( proto.STRING, diff --git a/packages/google-ads-admanager/google/ads/admanager_v1/types/ad_unit_size.py b/packages/google-ads-admanager/google/ads/admanager_v1/types/ad_unit_size.py deleted file mode 100644 index 8881f72afd02..000000000000 --- a/packages/google-ads-admanager/google/ads/admanager_v1/types/ad_unit_size.py +++ /dev/null @@ -1,67 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from __future__ import annotations - -from typing import MutableMapping, MutableSequence - -import proto # type: ignore - -from google.ads.admanager_v1.types import environment_type_enum -from google.ads.admanager_v1.types import size as gaa_size - -__protobuf__ = proto.module( - package="google.ads.admanager.v1", - manifest={ - "AdUnitSize", - }, -) - - -class AdUnitSize(proto.Message): - r"""Represents the size, environment, and companions of an ad in - an ad unit. - - Attributes: - size (google.ads.admanager_v1.types.Size): - Required. The Size of the AdUnit. - environment_type (google.ads.admanager_v1.types.EnvironmentTypeEnum.EnvironmentType): - Required. The EnvironmentType of the AdUnit - companions (MutableSequence[google.ads.admanager_v1.types.Size]): - The companions for this ad unit size. Companions are only - valid if the environment is - [VIDEO_PLAYER][google.ads.admanager.v1.EnvironmentTypeEnum.EnvironmentType]. - """ - - size: gaa_size.Size = proto.Field( - proto.MESSAGE, - number=1, - message=gaa_size.Size, - ) - environment_type: environment_type_enum.EnvironmentTypeEnum.EnvironmentType = ( - proto.Field( - proto.ENUM, - number=2, - enum=environment_type_enum.EnvironmentTypeEnum.EnvironmentType, - ) - ) - companions: MutableSequence[gaa_size.Size] = proto.RepeatedField( - proto.MESSAGE, - number=3, - message=gaa_size.Size, - ) - - -__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-ads-admanager/google/ads/admanager_v1/types/company_messages.py b/packages/google-ads-admanager/google/ads/admanager_v1/types/company_messages.py new file mode 100644 index 000000000000..84cb8ffea435 --- /dev/null +++ b/packages/google-ads-admanager/google/ads/admanager_v1/types/company_messages.py @@ -0,0 +1,174 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +from google.protobuf import timestamp_pb2 # type: ignore +import proto # type: ignore + +from google.ads.admanager_v1.types import ( + applied_label, + company_credit_status_enum, + company_type_enum, +) + +__protobuf__ = proto.module( + package="google.ads.admanager.v1", + manifest={ + "Company", + }, +) + + +class Company(proto.Message): + r"""The ``Company`` resource. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + name (str): + Identifier. The resource name of the ``Company``. Format: + ``networks/{network_code}/companies/{company_id}`` + company_id (int): + Output only. ``Company`` ID. + display_name (str): + Required. The display name of the ``Company``. + + This value has a maximum length of 127 characters. + type_ (google.ads.admanager_v1.types.CompanyTypeEnum.CompanyType): + Required. The type of the ``Company``. + address (str): + Optional. The address for the ``Company``. + + This value has a maximum length of 1024 characters. + email (str): + Optional. The email for the ``Company``. + + This value has a maximum length of 128 characters. + fax (str): + Optional. The fax number for the ``Company``. + + This value has a maximum length of 63 characters. + phone (str): + Optional. The phone number for the ``Company``. + + This value has a maximum length of 63 characters. + external_id (str): + Optional. The external ID for the ``Company``. + + This value has a maximum length of 255 characters. + comment (str): + Optional. Comments about the ``Company``. + + This value has a maximum length of 1024 characters. + credit_status (google.ads.admanager_v1.types.CompanyCreditStatusEnum.CompanyCreditStatus): + Optional. The credit status of the ``Company``. + + This attribute defaults to ``ACTIVE`` if basic settings are + enabled and ``ON_HOLD`` if advance settings are enabled. + applied_labels (MutableSequence[google.ads.admanager_v1.types.AppliedLabel]): + Optional. The labels that are directly applied to the + ``Company``. + primary_contact (str): + Optional. The resource names of primary Contact of the + ``Company``. Format: + "networks/{network_code}/contacts/{contact_id}". + + This field is a member of `oneof`_ ``_primary_contact``. + applied_teams (MutableSequence[str]): + Optional. The resource names of Teams that are directly + associated with the ``Company``. Format: + "networks/{network_code}/teams/{team_id}". + update_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The time the ``Company`` was last modified. + third_party_company_id (int): + Optional. The ID of the Google-recognized canonicalized form + of the ``Company``. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + company_id: int = proto.Field( + proto.INT64, + number=2, + ) + display_name: str = proto.Field( + proto.STRING, + number=3, + ) + type_: company_type_enum.CompanyTypeEnum.CompanyType = proto.Field( + proto.ENUM, + number=4, + enum=company_type_enum.CompanyTypeEnum.CompanyType, + ) + address: str = proto.Field( + proto.STRING, + number=5, + ) + email: str = proto.Field( + proto.STRING, + number=6, + ) + fax: str = proto.Field( + proto.STRING, + number=7, + ) + phone: str = proto.Field( + proto.STRING, + number=8, + ) + external_id: str = proto.Field( + proto.STRING, + number=9, + ) + comment: str = proto.Field( + proto.STRING, + number=10, + ) + credit_status: company_credit_status_enum.CompanyCreditStatusEnum.CompanyCreditStatus = proto.Field( + proto.ENUM, + number=11, + enum=company_credit_status_enum.CompanyCreditStatusEnum.CompanyCreditStatus, + ) + applied_labels: MutableSequence[applied_label.AppliedLabel] = proto.RepeatedField( + proto.MESSAGE, + number=12, + message=applied_label.AppliedLabel, + ) + primary_contact: str = proto.Field( + proto.STRING, + number=13, + optional=True, + ) + applied_teams: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=14, + ) + update_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=15, + message=timestamp_pb2.Timestamp, + ) + third_party_company_id: int = proto.Field( + proto.INT64, + number=16, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-ads-admanager/google/ads/admanager_v1/types/company_service.py b/packages/google-ads-admanager/google/ads/admanager_v1/types/company_service.py index 204204d7ff34..c75654a1ee37 100644 --- a/packages/google-ads-admanager/google/ads/admanager_v1/types/company_service.py +++ b/packages/google-ads-admanager/google/ads/admanager_v1/types/company_service.py @@ -19,16 +19,11 @@ import proto # type: ignore -from google.ads.admanager_v1.types import ( - applied_label, - company_credit_status_enum, - company_type_enum, -) +from google.ads.admanager_v1.types import company_messages __protobuf__ = proto.module( package="google.ads.admanager.v1", manifest={ - "Company", "GetCompanyRequest", "ListCompaniesRequest", "ListCompaniesResponse", @@ -36,129 +31,6 @@ ) -class Company(proto.Message): - r"""The ``Company`` resource. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - name (str): - Identifier. The resource name of the ``Company``. Format: - ``networks/{network_code}/companies/{company_id}`` - company_id (int): - Output only. ``Company`` ID. - display_name (str): - Required. The display name of the ``Company``. - - This value has a maximum length of 127 characters. - type_ (google.ads.admanager_v1.types.CompanyTypeEnum.CompanyType): - Required. The type of the ``Company``. - address (str): - Optional. The address for the ``Company``. - - This value has a maximum length of 1024 characters. - email (str): - Optional. The email for the ``Company``. - - This value has a maximum length of 128 characters. - fax (str): - Optional. The fax number for the ``Company``. - - This value has a maximum length of 63 characters. - phone (str): - Optional. The phone number for the ``Company``. - - This value has a maximum length of 63 characters. - external_id (str): - Optional. The external ID for the ``Company``. - - This value has a maximum length of 255 characters. - comment (str): - Optional. Comments about the ``Company``. - - This value has a maximum length of 1024 characters. - credit_status (google.ads.admanager_v1.types.CompanyCreditStatusEnum.CompanyCreditStatus): - Optional. The credit status of this company. - - This attribute defaults to ``ACTIVE`` if basic settings are - enabled and ``ON_HOLD`` if advance settings are enabled. - applied_labels (MutableSequence[google.ads.admanager_v1.types.AppliedLabel]): - Optional. The labels that are directly - applied to this company. - primary_contact (str): - Optional. The resource names of primary Contact of this - company. Format: - "networks/{network_code}/contacts/{contact_id}". - - This field is a member of `oneof`_ ``_primary_contact``. - applied_teams (MutableSequence[str]): - Optional. The resource names of Teams that are directly - associated with this company. Format: - "networks/{network_code}/teams/{team_id}". - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - company_id: int = proto.Field( - proto.INT64, - number=2, - ) - display_name: str = proto.Field( - proto.STRING, - number=3, - ) - type_: company_type_enum.CompanyTypeEnum.CompanyType = proto.Field( - proto.ENUM, - number=4, - enum=company_type_enum.CompanyTypeEnum.CompanyType, - ) - address: str = proto.Field( - proto.STRING, - number=5, - ) - email: str = proto.Field( - proto.STRING, - number=6, - ) - fax: str = proto.Field( - proto.STRING, - number=7, - ) - phone: str = proto.Field( - proto.STRING, - number=8, - ) - external_id: str = proto.Field( - proto.STRING, - number=9, - ) - comment: str = proto.Field( - proto.STRING, - number=10, - ) - credit_status: company_credit_status_enum.CompanyCreditStatusEnum.CompanyCreditStatus = proto.Field( - proto.ENUM, - number=11, - enum=company_credit_status_enum.CompanyCreditStatusEnum.CompanyCreditStatus, - ) - applied_labels: MutableSequence[applied_label.AppliedLabel] = proto.RepeatedField( - proto.MESSAGE, - number=12, - message=applied_label.AppliedLabel, - ) - primary_contact: str = proto.Field( - proto.STRING, - number=13, - optional=True, - ) - applied_teams: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=14, - ) - - class GetCompanyRequest(proto.Message): r"""Request object for ``GetCompany`` method. @@ -235,19 +107,19 @@ class ListCompaniesRequest(proto.Message): class ListCompaniesResponse(proto.Message): r"""Response object for ``ListCompaniesRequest`` containing matching - ``Company`` resources. + ``Company`` objects. Attributes: companies (MutableSequence[google.ads.admanager_v1.types.Company]): - The ``Company`` from the specified network. + The ``Company`` objects from the specified network. next_page_token (str): A token, which can be sent as ``page_token`` to retrieve the next page. If this field is omitted, there are no subsequent pages. total_size (int): - Total number of ``Companies``. If a filter was included in - the request, this reflects the total number after the - filtering is applied. + Total number of ``Company`` objects. If a filter was + included in the request, this reflects the total number + after the filtering is applied. ``total_size`` will not be calculated in the response unless it has been included in a response field mask. The response @@ -263,10 +135,10 @@ class ListCompaniesResponse(proto.Message): def raw_page(self): return self - companies: MutableSequence["Company"] = proto.RepeatedField( + companies: MutableSequence[company_messages.Company] = proto.RepeatedField( proto.MESSAGE, number=1, - message="Company", + message=company_messages.Company, ) next_page_token: str = proto.Field( proto.STRING, diff --git a/packages/google-ads-admanager/google/ads/admanager_v1/types/company_type_enum.py b/packages/google-ads-admanager/google/ads/admanager_v1/types/company_type_enum.py index f1fe0af784b0..49f5326ce645 100644 --- a/packages/google-ads-admanager/google/ads/admanager_v1/types/company_type_enum.py +++ b/packages/google-ads-admanager/google/ads/admanager_v1/types/company_type_enum.py @@ -55,9 +55,6 @@ class CompanyType(proto.Enum): AD_NETWORK (5): A company representing multiple advertisers and agencies. - VIEWABILITY_PROVIDER (6): - A third-party that measures creative - viewability. """ COMPANY_TYPE_UNSPECIFIED = 0 ADVERTISER = 1 @@ -65,7 +62,6 @@ class CompanyType(proto.Enum): AGENCY = 3 HOUSE_AGENCY = 4 AD_NETWORK = 5 - VIEWABILITY_PROVIDER = 6 __all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-ads-admanager/google/ads/admanager_v1/types/computed_status_enum.py b/packages/google-ads-admanager/google/ads/admanager_v1/types/computed_status_enum.py deleted file mode 100644 index 74a09cadb595..000000000000 --- a/packages/google-ads-admanager/google/ads/admanager_v1/types/computed_status_enum.py +++ /dev/null @@ -1,90 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from __future__ import annotations - -from typing import MutableMapping, MutableSequence - -import proto # type: ignore - -__protobuf__ = proto.module( - package="google.ads.admanager.v1", - manifest={ - "ComputedStatusEnum", - }, -) - - -class ComputedStatusEnum(proto.Message): - r"""Wrapper message for - [ComputedStatus][google.ads.admanager.v1.ComputedStatusEnum.ComputedStatus]. - - """ - - class ComputedStatus(proto.Enum): - r"""Describes the computed LineItem status that is derived from - the current state of the LineItem. - - Values: - COMPUTED_STATUS_UNSPECIFIED (0): - No value specified. - DELIVERY_EXTENDED (1): - The LineItem has past its link - LineItem#endDateTime with an auto extension, but - hasn't met its goal. - DELIVERING (2): - The LineItem has begun serving. - READY (3): - The LineItem has been activated and is ready - to serve. - PAUSED (4): - The LineItem has been paused from serving. - INACTIVE (5): - The LineItem is inactive. It is either caused - by missing creatives or the network disabling - auto-activation. - PAUSED_INVENTORY_RELEASED (6): - The LineItem has been paused and its reserved - inventory has been released. The LineItem will - not serve. - PENDING_APPROVAL (7): - The LineItem has been submitted for approval. - COMPLETED (8): - The LineItem has completed its run. - DISAPPROVED (9): - The LineItem has been disapproved and is not - eligible to serve. - DRAFT (10): - The LineItem is still being drafted. - CANCELED (11): - The LineItem has been canceled and is no - longer eligible to serve. This is a legacy - status imported from Google Ad Manager orders. - """ - COMPUTED_STATUS_UNSPECIFIED = 0 - DELIVERY_EXTENDED = 1 - DELIVERING = 2 - READY = 3 - PAUSED = 4 - INACTIVE = 5 - PAUSED_INVENTORY_RELEASED = 6 - PENDING_APPROVAL = 7 - COMPLETED = 8 - DISAPPROVED = 9 - DRAFT = 10 - CANCELED = 11 - - -__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-ads-admanager/google/ads/admanager_v1/types/contact_messages.py b/packages/google-ads-admanager/google/ads/admanager_v1/types/contact_messages.py new file mode 100644 index 000000000000..dfc3e05dab92 --- /dev/null +++ b/packages/google-ads-admanager/google/ads/admanager_v1/types/contact_messages.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +import proto # type: ignore + +__protobuf__ = proto.module( + package="google.ads.admanager.v1", + manifest={ + "Contact", + }, +) + + +class Contact(proto.Message): + r"""A contact represents a person who is affiliated with a single + company. A contact can have a variety of contact information + associated to it, and can be invited to view their company's + orders, line items, creatives, and reports. + + Attributes: + name (str): + Identifier. The resource name of the ``Contact``. Format: + ``networks/{network_code}/contacts/{contact_id}`` + contact_id (int): + Output only. The unique ID of the contact. + This value is readonly and is assigned by + Google. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + contact_id: int = proto.Field( + proto.INT64, + number=2, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-ads-admanager/google/ads/admanager_v1/types/contact_service.py b/packages/google-ads-admanager/google/ads/admanager_v1/types/contact_service.py deleted file mode 100644 index a7ee0ea8a74d..000000000000 --- a/packages/google-ads-admanager/google/ads/admanager_v1/types/contact_service.py +++ /dev/null @@ -1,174 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from __future__ import annotations - -from typing import MutableMapping, MutableSequence - -import proto # type: ignore - -__protobuf__ = proto.module( - package="google.ads.admanager.v1", - manifest={ - "Contact", - "GetContactRequest", - "ListContactsRequest", - "ListContactsResponse", - }, -) - - -class Contact(proto.Message): - r"""The Contact resource. - - Attributes: - name (str): - Identifier. The resource name of the Contact. Format: - ``networks/{network_code}/contacts/{contact_id}`` - contact_id (int): - Output only. ``Contact`` ID. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - contact_id: int = proto.Field( - proto.INT64, - number=2, - ) - - -class GetContactRequest(proto.Message): - r"""Request object for GetContact method. - - Attributes: - name (str): - Required. The resource name of the Contact. Format: - ``networks/{network_code}/contacts/{contact_id}`` - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - - -class ListContactsRequest(proto.Message): - r"""Request object for ListContacts method. - - Attributes: - parent (str): - Required. The parent, which owns this collection of - Contacts. Format: ``networks/{network_code}`` - page_size (int): - Optional. The maximum number of Contacts to - return. The service may return fewer than this - value. If unspecified, at most 50 contacts will - be returned. The maximum value is 1000; values - above 1000 will be coerced to 1000. - page_token (str): - Optional. A page token, received from a previous - ``ListContacts`` call. Provide this to retrieve the - subsequent page. - - When paginating, all other parameters provided to - ``ListContacts`` must match the call that provided the page - token. - filter (str): - Optional. Expression to filter the response. - See syntax details at - https://developers.google.com/ad-manager/api/beta/filters - order_by (str): - Optional. Expression to specify sorting - order. See syntax details at - https://developers.google.com/ad-manager/api/beta/filters#order - skip (int): - Optional. Number of individual resources to - skip while paginating. - """ - - parent: str = proto.Field( - proto.STRING, - number=1, - ) - page_size: int = proto.Field( - proto.INT32, - number=2, - ) - page_token: str = proto.Field( - proto.STRING, - number=3, - ) - filter: str = proto.Field( - proto.STRING, - number=4, - ) - order_by: str = proto.Field( - proto.STRING, - number=5, - ) - skip: int = proto.Field( - proto.INT32, - number=6, - ) - - -class ListContactsResponse(proto.Message): - r"""Response object for ListContactsRequest containing matching - Contact resources. - - Attributes: - contacts (MutableSequence[google.ads.admanager_v1.types.Contact]): - The Contact from the specified network. - next_page_token (str): - A token, which can be sent as ``page_token`` to retrieve the - next page. If this field is omitted, there are no subsequent - pages. - total_size (int): - Total number of Contacts. If a filter was included in the - request, this reflects the total number after the filtering - is applied. - - ``total_size`` will not be calculated in the response unless - it has been included in a response field mask. The response - field mask can be provided to the method by using the URL - parameter ``$fields`` or ``fields``, or by using the - HTTP/gRPC header ``X-Goog-FieldMask``. - - For more information, see - https://developers.google.com/ad-manager/api/beta/field-masks - """ - - @property - def raw_page(self): - return self - - contacts: MutableSequence["Contact"] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message="Contact", - ) - next_page_token: str = proto.Field( - proto.STRING, - number=2, - ) - total_size: int = proto.Field( - proto.INT32, - number=3, - ) - - -__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-ads-admanager/google/ads/admanager_v1/types/creative_placeholder.py b/packages/google-ads-admanager/google/ads/admanager_v1/types/creative_placeholder.py deleted file mode 100644 index 1b18adecffba..000000000000 --- a/packages/google-ads-admanager/google/ads/admanager_v1/types/creative_placeholder.py +++ /dev/null @@ -1,99 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from __future__ import annotations - -from typing import MutableMapping, MutableSequence - -import proto # type: ignore - -from google.ads.admanager_v1.types import applied_label -from google.ads.admanager_v1.types import size as gaa_size - -__protobuf__ = proto.module( - package="google.ads.admanager.v1", - manifest={ - "CreativePlaceholder", - }, -) - - -class CreativePlaceholder(proto.Message): - r"""Describes a slot that a creative is expected to fill. This is - used in forecasting and to validate that the correct creatives - are associated with the line item. - - Attributes: - size (google.ads.admanager_v1.types.Size): - Required. The size that the creative is - expected to have. - companion_sizes (MutableSequence[google.ads.admanager_v1.types.Size]): - The companions that the creative is expected to have. This - attribute can only be set if the line item it belongs to has - an - [EnvironmentType][google.ads.admanager.v1.EnvironmentTypeEnum.EnvironmentType] - of VIDEO_PLAYER or - [roadblocking_type][LineItem.roadblocking_type] of - CREATIVE_SET. - expected_creative_count (int): - Expected number of creatives that will be - uploaded corresponding to this creative - placeholder. This estimate is used to improve - the accuracy of forecasting; for example, if - label frequency capping limits the number of - times a creative may be served. - applied_labels (MutableSequence[google.ads.admanager_v1.types.AppliedLabel]): - Set of labels applied directly to this - CreativePlaceholder. - amp_only (bool): - Indicates if the expected creative of this - placeholder has an AMP only variant. This is - used to improve the accuracy of forecasting and - has no effect on serving. - creative_targeting_display_name (str): - The display name of the creative targeting - that this CreativePlaceholder represents. - """ - - size: gaa_size.Size = proto.Field( - proto.MESSAGE, - number=1, - message=gaa_size.Size, - ) - companion_sizes: MutableSequence[gaa_size.Size] = proto.RepeatedField( - proto.MESSAGE, - number=2, - message=gaa_size.Size, - ) - expected_creative_count: int = proto.Field( - proto.INT32, - number=3, - ) - applied_labels: MutableSequence[applied_label.AppliedLabel] = proto.RepeatedField( - proto.MESSAGE, - number=4, - message=applied_label.AppliedLabel, - ) - amp_only: bool = proto.Field( - proto.BOOL, - number=5, - ) - creative_targeting_display_name: str = proto.Field( - proto.STRING, - number=6, - ) - - -__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-ads-admanager/google/ads/admanager_v1/types/creative_service.py b/packages/google-ads-admanager/google/ads/admanager_v1/types/creative_service.py deleted file mode 100644 index ae4a836d0a24..000000000000 --- a/packages/google-ads-admanager/google/ads/admanager_v1/types/creative_service.py +++ /dev/null @@ -1,229 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from __future__ import annotations - -from typing import MutableMapping, MutableSequence - -from google.protobuf import timestamp_pb2 # type: ignore -import proto # type: ignore - -from google.ads.admanager_v1.types import ( - ad_partner_declaration as gaa_ad_partner_declaration, -) - -__protobuf__ = proto.module( - package="google.ads.admanager.v1", - manifest={ - "Creative", - "GetCreativeRequest", - "ListCreativesRequest", - "ListCreativesResponse", - }, -) - - -class Creative(proto.Message): - r"""The Creative resource. - - Attributes: - name (str): - Identifier. The resource name of the Creative. Format: - ``networks/{network_code}/creatives/{creative_id}`` - creative_id (int): - Output only. ``Creative`` ID. - display_name (str): - Optional. Display name of the ``Creative``. This attribute - has a maximum length of 255 characters. - advertiser (str): - Required. The resource name of the Company, which is of type - Company.Type.ADVERTISER, to which this Creative belongs. - Format: "networks/{network_code}/companies/{company_id}". - update_time (google.protobuf.timestamp_pb2.Timestamp): - Output only. The instant this Creative was - last modified. - preview_url (str): - Output only. The URL of the creative for - previewing the media. - size_label (str): - Output only. String representations of creative size. This - field is temporarily available and will be deprecated when - ``Creative.size`` becomes available. - ad_partner_declaration (google.ads.admanager_v1.types.AdPartnerDeclaration): - Optional. The Ad Partners associated with - this creative. This is distinct from any - associated companies that Google may detect - programmatically. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - creative_id: int = proto.Field( - proto.INT64, - number=7, - ) - display_name: str = proto.Field( - proto.STRING, - number=8, - ) - advertiser: str = proto.Field( - proto.STRING, - number=2, - ) - update_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=3, - message=timestamp_pb2.Timestamp, - ) - preview_url: str = proto.Field( - proto.STRING, - number=4, - ) - size_label: str = proto.Field( - proto.STRING, - number=9, - ) - ad_partner_declaration: gaa_ad_partner_declaration.AdPartnerDeclaration = ( - proto.Field( - proto.MESSAGE, - number=6, - message=gaa_ad_partner_declaration.AdPartnerDeclaration, - ) - ) - - -class GetCreativeRequest(proto.Message): - r"""Request object for GetCreative method. - - Attributes: - name (str): - Required. The resource name of the Creative. Format: - ``networks/{network_code}/creatives/{creative_id}`` - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - - -class ListCreativesRequest(proto.Message): - r"""Request object for ListCreatives method. - - Attributes: - parent (str): - Required. The parent, which owns this collection of - Creatives. Format: networks/{network_code} - page_size (int): - Optional. The maximum number of Creatives to - return. The service may return fewer than this - value. If unspecified, at most 50 creatives will - be returned. The maximum value is 1000; values - above 1000 will be coerced to 1000. - page_token (str): - Optional. A page token, received from a previous - ``ListCreatives`` call. Provide this to retrieve the - subsequent page. - - When paginating, all other parameters provided to - ``ListCreatives`` must match the call that provided the page - token. - filter (str): - Optional. Expression to filter the response. - See syntax details at - https://developers.google.com/ad-manager/api/beta/filters - order_by (str): - Optional. Expression to specify sorting - order. See syntax details at - https://developers.google.com/ad-manager/api/beta/filters#order - skip (int): - Optional. Number of individual resources to - skip while paginating. - """ - - parent: str = proto.Field( - proto.STRING, - number=1, - ) - page_size: int = proto.Field( - proto.INT32, - number=2, - ) - page_token: str = proto.Field( - proto.STRING, - number=3, - ) - filter: str = proto.Field( - proto.STRING, - number=4, - ) - order_by: str = proto.Field( - proto.STRING, - number=5, - ) - skip: int = proto.Field( - proto.INT32, - number=6, - ) - - -class ListCreativesResponse(proto.Message): - r"""Response object for ListCreativesRequest containing matching - Creative resources. - - Attributes: - creatives (MutableSequence[google.ads.admanager_v1.types.Creative]): - The Creative from the specified network. - next_page_token (str): - A token, which can be sent as ``page_token`` to retrieve the - next page. If this field is omitted, there are no subsequent - pages. - total_size (int): - Total number of Creatives. If a filter was included in the - request, this reflects the total number after the filtering - is applied. - - ``total_size`` will not be calculated in the response unless - it has been included in a response field mask. The response - field mask can be provided to the method by using the URL - parameter ``$fields`` or ``fields``, or by using the - HTTP/gRPC header ``X-Goog-FieldMask``. - - For more information, see - https://developers.google.com/ad-manager/api/beta/field-masks - """ - - @property - def raw_page(self): - return self - - creatives: MutableSequence["Creative"] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message="Creative", - ) - next_page_token: str = proto.Field( - proto.STRING, - number=2, - ) - total_size: int = proto.Field( - proto.INT32, - number=3, - ) - - -__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-ads-admanager/google/ads/admanager_v1/types/custom_field_messages.py b/packages/google-ads-admanager/google/ads/admanager_v1/types/custom_field_messages.py new file mode 100644 index 000000000000..c1e94e44f8a1 --- /dev/null +++ b/packages/google-ads-admanager/google/ads/admanager_v1/types/custom_field_messages.py @@ -0,0 +1,138 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +import proto # type: ignore + +from google.ads.admanager_v1.types import custom_field_enums + +__protobuf__ = proto.module( + package="google.ads.admanager.v1", + manifest={ + "CustomField", + "CustomFieldOption", + }, +) + + +class CustomField(proto.Message): + r"""An additional, user-created field on an entity. + + Attributes: + name (str): + Identifier. The resource name of the ``CustomField``. + Format: + ``networks/{network_code}/customFields/{custom_field_id}`` + custom_field_id (int): + Output only. Unique ID of the CustomField. + This value is readonly and is assigned by + Google. + display_name (str): + Required. Name of the CustomField. The max + length is 127 characters. + description (str): + Optional. A description of the custom field. + The maximum length is 511 characters. + status (google.ads.admanager_v1.types.CustomFieldStatusEnum.CustomFieldStatus): + Output only. The status of the ``CustomField``. + entity_type (google.ads.admanager_v1.types.CustomFieldEntityTypeEnum.CustomFieldEntityType): + Required. The type of entity the ``CustomField`` can be + applied to. + data_type (google.ads.admanager_v1.types.CustomFieldDataTypeEnum.CustomFieldDataType): + Required. The data type of the ``CustomField``. + visibility (google.ads.admanager_v1.types.CustomFieldVisibilityEnum.CustomFieldVisibility): + Required. The visibility of the ``CustomField``. + options (MutableSequence[google.ads.admanager_v1.types.CustomFieldOption]): + Optional. The drop-down options for the ``CustomField``. + + Only applicable for ``CustomField`` with the drop-down data + type. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + custom_field_id: int = proto.Field( + proto.INT64, + number=2, + ) + display_name: str = proto.Field( + proto.STRING, + number=3, + ) + description: str = proto.Field( + proto.STRING, + number=4, + ) + status: custom_field_enums.CustomFieldStatusEnum.CustomFieldStatus = proto.Field( + proto.ENUM, + number=5, + enum=custom_field_enums.CustomFieldStatusEnum.CustomFieldStatus, + ) + entity_type: custom_field_enums.CustomFieldEntityTypeEnum.CustomFieldEntityType = ( + proto.Field( + proto.ENUM, + number=7, + enum=custom_field_enums.CustomFieldEntityTypeEnum.CustomFieldEntityType, + ) + ) + data_type: custom_field_enums.CustomFieldDataTypeEnum.CustomFieldDataType = ( + proto.Field( + proto.ENUM, + number=8, + enum=custom_field_enums.CustomFieldDataTypeEnum.CustomFieldDataType, + ) + ) + visibility: custom_field_enums.CustomFieldVisibilityEnum.CustomFieldVisibility = ( + proto.Field( + proto.ENUM, + number=9, + enum=custom_field_enums.CustomFieldVisibilityEnum.CustomFieldVisibility, + ) + ) + options: MutableSequence["CustomFieldOption"] = proto.RepeatedField( + proto.MESSAGE, + number=10, + message="CustomFieldOption", + ) + + +class CustomFieldOption(proto.Message): + r"""An option for a drop-down ``CustomField``. + + Attributes: + custom_field_option_id (int): + Output only. ``CustomFieldOption`` ID. + display_name (str): + Required. The display name of the ``CustomFieldOption``. + + This value has a maximum length of 127 characters. + """ + + custom_field_option_id: int = proto.Field( + proto.INT64, + number=1, + ) + display_name: str = proto.Field( + proto.STRING, + number=2, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-ads-admanager/google/ads/admanager_v1/types/custom_field_service.py b/packages/google-ads-admanager/google/ads/admanager_v1/types/custom_field_service.py index 8a31ad64ae6f..7a014e10e456 100644 --- a/packages/google-ads-admanager/google/ads/admanager_v1/types/custom_field_service.py +++ b/packages/google-ads-admanager/google/ads/admanager_v1/types/custom_field_service.py @@ -19,13 +19,11 @@ import proto # type: ignore -from google.ads.admanager_v1.types import custom_field_enums +from google.ads.admanager_v1.types import custom_field_messages __protobuf__ = proto.module( package="google.ads.admanager.v1", manifest={ - "CustomField", - "CustomFieldOption", "GetCustomFieldRequest", "ListCustomFieldsRequest", "ListCustomFieldsResponse", @@ -33,111 +31,6 @@ ) -class CustomField(proto.Message): - r"""The ``CustomField`` resource. - - Attributes: - name (str): - Identifier. The resource name of the ``CustomField``. - Format: - ``networks/{network_code}/customFields/{custom_field_id}`` - custom_field_id (int): - Output only. ``CustomField`` ID. - display_name (str): - Required. The display name of the ``CustomField``. - - This value has a maximum length of 127 characters. - description (str): - Optional. The description of the ``CustomField``. - - This value has a maximum length of 511 characters. - status (google.ads.admanager_v1.types.CustomFieldStatusEnum.CustomFieldStatus): - Output only. The status of the ``CustomField``. - entity_type (google.ads.admanager_v1.types.CustomFieldEntityTypeEnum.CustomFieldEntityType): - Required. The type of entity the ``CustomField`` can be - applied to. - data_type (google.ads.admanager_v1.types.CustomFieldDataTypeEnum.CustomFieldDataType): - Required. The data type of the ``CustomField``. - visibility (google.ads.admanager_v1.types.CustomFieldVisibilityEnum.CustomFieldVisibility): - Required. The visibility of the ``CustomField``. - options (MutableSequence[google.ads.admanager_v1.types.CustomFieldOption]): - Optional. The drop-down options for the ``CustomField``. - - Only applicable for ``CustomField`` with the drop-down data - type. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - custom_field_id: int = proto.Field( - proto.INT64, - number=2, - ) - display_name: str = proto.Field( - proto.STRING, - number=3, - ) - description: str = proto.Field( - proto.STRING, - number=4, - ) - status: custom_field_enums.CustomFieldStatusEnum.CustomFieldStatus = proto.Field( - proto.ENUM, - number=5, - enum=custom_field_enums.CustomFieldStatusEnum.CustomFieldStatus, - ) - entity_type: custom_field_enums.CustomFieldEntityTypeEnum.CustomFieldEntityType = ( - proto.Field( - proto.ENUM, - number=7, - enum=custom_field_enums.CustomFieldEntityTypeEnum.CustomFieldEntityType, - ) - ) - data_type: custom_field_enums.CustomFieldDataTypeEnum.CustomFieldDataType = ( - proto.Field( - proto.ENUM, - number=8, - enum=custom_field_enums.CustomFieldDataTypeEnum.CustomFieldDataType, - ) - ) - visibility: custom_field_enums.CustomFieldVisibilityEnum.CustomFieldVisibility = ( - proto.Field( - proto.ENUM, - number=9, - enum=custom_field_enums.CustomFieldVisibilityEnum.CustomFieldVisibility, - ) - ) - options: MutableSequence["CustomFieldOption"] = proto.RepeatedField( - proto.MESSAGE, - number=10, - message="CustomFieldOption", - ) - - -class CustomFieldOption(proto.Message): - r"""An option for a drop-down ``CustomField``. - - Attributes: - custom_field_option_id (int): - Output only. ``CustomFieldOption`` ID. - display_name (str): - Required. The display name of the ``CustomFieldOption``. - - This value has a maximum length of 127 characters. - """ - - custom_field_option_id: int = proto.Field( - proto.INT64, - number=1, - ) - display_name: str = proto.Field( - proto.STRING, - number=2, - ) - - class GetCustomFieldRequest(proto.Message): r"""Request object for ``GetCustomField`` method. @@ -176,11 +69,12 @@ class ListCustomFieldsRequest(proto.Message): page token. filter (str): Optional. Expression to filter the response. - See syntax details at https://google.aip.dev/160 + See syntax details at + https://developers.google.com/ad-manager/api/beta/filters order_by (str): Optional. Expression to specify sorting order. See syntax details at - https://google.aip.dev/132#ordering + https://developers.google.com/ad-manager/api/beta/filters#order skip (int): Optional. Number of individual resources to skip while paginating. @@ -234,18 +128,20 @@ class ListCustomFieldsResponse(proto.Message): parameter ``$fields`` or ``fields``, or by using the HTTP/gRPC header ``X-Goog-FieldMask``. - For more information, see `System - Parameters `__. + For more information, see + https://developers.google.com/ad-manager/api/beta/field-masks """ @property def raw_page(self): return self - custom_fields: MutableSequence["CustomField"] = proto.RepeatedField( + custom_fields: MutableSequence[ + custom_field_messages.CustomField + ] = proto.RepeatedField( proto.MESSAGE, number=1, - message="CustomField", + message=custom_field_messages.CustomField, ) next_page_token: str = proto.Field( proto.STRING, diff --git a/packages/google-ads-admanager/google/ads/admanager_v1/types/custom_field_value.py b/packages/google-ads-admanager/google/ads/admanager_v1/types/custom_field_value.py new file mode 100644 index 000000000000..80465b4b9fb2 --- /dev/null +++ b/packages/google-ads-admanager/google/ads/admanager_v1/types/custom_field_value.py @@ -0,0 +1,114 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +import proto # type: ignore + +__protobuf__ = proto.module( + package="google.ads.admanager.v1", + manifest={ + "CustomFieldValue", + }, +) + + +class CustomFieldValue(proto.Message): + r"""A value for a CustomField on a resource. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + custom_field (str): + Required. The custom field for which this is a value. + Format: + "networks/{network_code}/customFields/{custom_field_id}". + value (google.ads.admanager_v1.types.CustomFieldValue.Value): + Required. A typed value representation of the + value. + + This field is a member of `oneof`_ ``_value``. + """ + + class Value(proto.Message): + r"""Represent custom field value type. + Next Id: 5 + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + dropdown_value (int): + The custom_field_option_id, if the CustomFieldDataType is + DROPDOWN. + + This field is a member of `oneof`_ ``value``. + string_value (str): + The value, if the CustomFieldDataType is + STRING. + + This field is a member of `oneof`_ ``value``. + number_value (float): + The value, if the CustomFieldDataType is + NUMBER. + + This field is a member of `oneof`_ ``value``. + toggle_value (bool): + The value, if the CustomFieldDataType is + TOGGLE. + + This field is a member of `oneof`_ ``value``. + """ + + dropdown_value: int = proto.Field( + proto.INT64, + number=1, + oneof="value", + ) + string_value: str = proto.Field( + proto.STRING, + number=2, + oneof="value", + ) + number_value: float = proto.Field( + proto.DOUBLE, + number=3, + oneof="value", + ) + toggle_value: bool = proto.Field( + proto.BOOL, + number=4, + oneof="value", + ) + + custom_field: str = proto.Field( + proto.STRING, + number=1, + ) + value: Value = proto.Field( + proto.MESSAGE, + number=2, + optional=True, + message=Value, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-ads-admanager/google/ads/admanager_v1/types/custom_targeting_key_messages.py b/packages/google-ads-admanager/google/ads/admanager_v1/types/custom_targeting_key_messages.py new file mode 100644 index 000000000000..db7f1de5e6cc --- /dev/null +++ b/packages/google-ads-admanager/google/ads/admanager_v1/types/custom_targeting_key_messages.py @@ -0,0 +1,93 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +import proto # type: ignore + +from google.ads.admanager_v1.types import custom_targeting_key_enums + +__protobuf__ = proto.module( + package="google.ads.admanager.v1", + manifest={ + "CustomTargetingKey", + }, +) + + +class CustomTargetingKey(proto.Message): + r"""The ``CustomTargetingKey`` resource. + + Attributes: + name (str): + Identifier. The resource name of the ``CustomTargetingKey``. + Format: + ``networks/{network_code}/customTargetingKeys/{custom_targeting_key_id}`` + custom_targeting_key_id (int): + Output only. ``CustomTargetingKey`` ID. + ad_tag_name (str): + Immutable. Name of the key. Keys can contain up to 10 + characters each. You can use alphanumeric characters and + symbols other than the following: ", ', =, !, +, #, \*, ~, + ;, ^, (, ), <, >, [, ], the white space character. + display_name (str): + Optional. Descriptive name for the ``CustomTargetingKey``. + type_ (google.ads.admanager_v1.types.CustomTargetingKeyTypeEnum.CustomTargetingKeyType): + Required. Indicates whether users will select + from predefined values or create new targeting + values, while specifying targeting criteria for + a line item. + status (google.ads.admanager_v1.types.CustomTargetingKeyStatusEnum.CustomTargetingKeyStatus): + Output only. Status of the ``CustomTargetingKey``. + reportable_type (google.ads.admanager_v1.types.CustomTargetingKeyReportableTypeEnum.CustomTargetingKeyReportableType): + Required. Reportable state of the ``CustomTargetingKey``. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + custom_targeting_key_id: int = proto.Field( + proto.INT64, + number=2, + ) + ad_tag_name: str = proto.Field( + proto.STRING, + number=3, + ) + display_name: str = proto.Field( + proto.STRING, + number=4, + ) + type_: custom_targeting_key_enums.CustomTargetingKeyTypeEnum.CustomTargetingKeyType = proto.Field( + proto.ENUM, + number=5, + enum=custom_targeting_key_enums.CustomTargetingKeyTypeEnum.CustomTargetingKeyType, + ) + status: custom_targeting_key_enums.CustomTargetingKeyStatusEnum.CustomTargetingKeyStatus = proto.Field( + proto.ENUM, + number=6, + enum=custom_targeting_key_enums.CustomTargetingKeyStatusEnum.CustomTargetingKeyStatus, + ) + reportable_type: custom_targeting_key_enums.CustomTargetingKeyReportableTypeEnum.CustomTargetingKeyReportableType = proto.Field( + proto.ENUM, + number=7, + enum=custom_targeting_key_enums.CustomTargetingKeyReportableTypeEnum.CustomTargetingKeyReportableType, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-ads-admanager/google/ads/admanager_v1/types/custom_targeting_key_service.py b/packages/google-ads-admanager/google/ads/admanager_v1/types/custom_targeting_key_service.py index f9a8089d3e0e..f7246e702c32 100644 --- a/packages/google-ads-admanager/google/ads/admanager_v1/types/custom_targeting_key_service.py +++ b/packages/google-ads-admanager/google/ads/admanager_v1/types/custom_targeting_key_service.py @@ -19,12 +19,11 @@ import proto # type: ignore -from google.ads.admanager_v1.types import custom_targeting_key_enums +from google.ads.admanager_v1.types import custom_targeting_key_messages __protobuf__ = proto.module( package="google.ads.admanager.v1", manifest={ - "CustomTargetingKey", "GetCustomTargetingKeyRequest", "ListCustomTargetingKeysRequest", "ListCustomTargetingKeysResponse", @@ -32,67 +31,6 @@ ) -class CustomTargetingKey(proto.Message): - r"""The ``CustomTargetingKey`` resource. - - Attributes: - name (str): - Identifier. The resource name of the ``CustomTargetingKey``. - Format: - ``networks/{network_code}/customTargetingKeys/{custom_targeting_key_id}`` - custom_targeting_key_id (int): - Output only. ``CustomTargetingKey`` ID. - ad_tag_name (str): - Immutable. Name of the key. Keys can contain up to 10 - characters each. You can use alphanumeric characters and - symbols other than the following: ", ', =, !, +, #, \*, ~, - ;, ^, (, ), <, >, [, ], the white space character. - display_name (str): - Optional. Descriptive name for the ``CustomTargetingKey``. - type_ (google.ads.admanager_v1.types.CustomTargetingKeyTypeEnum.CustomTargetingKeyType): - Required. Indicates whether users will select - from predefined values or create new targeting - values, while specifying targeting criteria for - a line item. - status (google.ads.admanager_v1.types.CustomTargetingKeyStatusEnum.CustomTargetingKeyStatus): - Output only. Status of the ``CustomTargetingKey``. - reportable_type (google.ads.admanager_v1.types.CustomTargetingKeyReportableTypeEnum.CustomTargetingKeyReportableType): - Required. Reportable state of the ``CustomTargetingKey``. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - custom_targeting_key_id: int = proto.Field( - proto.INT64, - number=2, - ) - ad_tag_name: str = proto.Field( - proto.STRING, - number=3, - ) - display_name: str = proto.Field( - proto.STRING, - number=4, - ) - type_: custom_targeting_key_enums.CustomTargetingKeyTypeEnum.CustomTargetingKeyType = proto.Field( - proto.ENUM, - number=5, - enum=custom_targeting_key_enums.CustomTargetingKeyTypeEnum.CustomTargetingKeyType, - ) - status: custom_targeting_key_enums.CustomTargetingKeyStatusEnum.CustomTargetingKeyStatus = proto.Field( - proto.ENUM, - number=6, - enum=custom_targeting_key_enums.CustomTargetingKeyStatusEnum.CustomTargetingKeyStatus, - ) - reportable_type: custom_targeting_key_enums.CustomTargetingKeyReportableTypeEnum.CustomTargetingKeyReportableType = proto.Field( - proto.ENUM, - number=7, - enum=custom_targeting_key_enums.CustomTargetingKeyReportableTypeEnum.CustomTargetingKeyReportableType, - ) - - class GetCustomTargetingKeyRequest(proto.Message): r"""Request object for ``GetCustomTargetingKey`` method. @@ -200,10 +138,12 @@ class ListCustomTargetingKeysResponse(proto.Message): def raw_page(self): return self - custom_targeting_keys: MutableSequence["CustomTargetingKey"] = proto.RepeatedField( + custom_targeting_keys: MutableSequence[ + custom_targeting_key_messages.CustomTargetingKey + ] = proto.RepeatedField( proto.MESSAGE, number=1, - message="CustomTargetingKey", + message=custom_targeting_key_messages.CustomTargetingKey, ) next_page_token: str = proto.Field( proto.STRING, diff --git a/packages/google-ads-admanager/google/ads/admanager_v1/types/custom_targeting_value_messages.py b/packages/google-ads-admanager/google/ads/admanager_v1/types/custom_targeting_value_messages.py new file mode 100644 index 000000000000..de852fc338b4 --- /dev/null +++ b/packages/google-ads-admanager/google/ads/admanager_v1/types/custom_targeting_value_messages.py @@ -0,0 +1,81 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +import proto # type: ignore + +from google.ads.admanager_v1.types import custom_targeting_value_enums + +__protobuf__ = proto.module( + package="google.ads.admanager.v1", + manifest={ + "CustomTargetingValue", + }, +) + + +class CustomTargetingValue(proto.Message): + r"""The ``CustomTargetingValue`` resource. + + Attributes: + name (str): + Identifier. The resource name of the + ``CustomTargetingValue``. Format: + ``networks/{network_code}/customTargetingKeys/{custom_targeting_key_id}/customTargetingValues/{custom_targeting_value_id}`` + ad_tag_name (str): + Immutable. Name of the ``CustomTargetingValue``. Values can + contain up to 40 characters each. You can use alphanumeric + characters and symbols other than the following: ", ', =, !, + +, #, \*, ~, ;, ^, (, ), <, >, [, ]. Values are not + data-specific; all values are treated as strings. For + example, instead of using "age>=18 AND <=34", try "18-34". + display_name (str): + Optional. Descriptive name for the ``CustomTargetingValue``. + match_type (google.ads.admanager_v1.types.CustomTargetingValueMatchTypeEnum.CustomTargetingValueMatchType): + Required. Immutable. The way in which the + CustomTargetingValue.name strings will be + matched. + status (google.ads.admanager_v1.types.CustomTargetingValueStatusEnum.CustomTargetingValueStatus): + Output only. Status of the ``CustomTargetingValue``. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + ad_tag_name: str = proto.Field( + proto.STRING, + number=4, + ) + display_name: str = proto.Field( + proto.STRING, + number=5, + ) + match_type: custom_targeting_value_enums.CustomTargetingValueMatchTypeEnum.CustomTargetingValueMatchType = proto.Field( + proto.ENUM, + number=6, + enum=custom_targeting_value_enums.CustomTargetingValueMatchTypeEnum.CustomTargetingValueMatchType, + ) + status: custom_targeting_value_enums.CustomTargetingValueStatusEnum.CustomTargetingValueStatus = proto.Field( + proto.ENUM, + number=7, + enum=custom_targeting_value_enums.CustomTargetingValueStatusEnum.CustomTargetingValueStatus, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-ads-admanager/google/ads/admanager_v1/types/custom_targeting_value_service.py b/packages/google-ads-admanager/google/ads/admanager_v1/types/custom_targeting_value_service.py index e7d7581572e8..70624af890fc 100644 --- a/packages/google-ads-admanager/google/ads/admanager_v1/types/custom_targeting_value_service.py +++ b/packages/google-ads-admanager/google/ads/admanager_v1/types/custom_targeting_value_service.py @@ -19,12 +19,11 @@ import proto # type: ignore -from google.ads.admanager_v1.types import custom_targeting_value_enums +from google.ads.admanager_v1.types import custom_targeting_value_messages __protobuf__ = proto.module( package="google.ads.admanager.v1", manifest={ - "CustomTargetingValue", "GetCustomTargetingValueRequest", "ListCustomTargetingValuesRequest", "ListCustomTargetingValuesResponse", @@ -32,55 +31,6 @@ ) -class CustomTargetingValue(proto.Message): - r"""The ``CustomTargetingValue`` resource. - - Attributes: - name (str): - Identifier. The resource name of the - ``CustomTargetingValue``. Format: - ``networks/{network_code}/customTargetingKeys/{custom_targeting_key_id}/customTargetingValues/{custom_targeting_value_id}`` - ad_tag_name (str): - Immutable. Name of the ``CustomTargetingValue``. Values can - contain up to 40 characters each. You can use alphanumeric - characters and symbols other than the following: ", ', =, !, - +, #, \*, ~, ;, ^, (, ), <, >, [, ]. Values are not - data-specific; all values are treated as string. For - example, instead of using "age>=18 AND <=34", try "18-34". - display_name (str): - Optional. Descriptive name for the ``CustomTargetingValue``. - match_type (google.ads.admanager_v1.types.CustomTargetingValueMatchTypeEnum.CustomTargetingValueMatchType): - Required. The way in which the - CustomTargetingValue.name strings will be - matched. - status (google.ads.admanager_v1.types.CustomTargetingValueStatusEnum.CustomTargetingValueStatus): - Output only. Status of the ``CustomTargetingValue``. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - ad_tag_name: str = proto.Field( - proto.STRING, - number=4, - ) - display_name: str = proto.Field( - proto.STRING, - number=5, - ) - match_type: custom_targeting_value_enums.CustomTargetingValueMatchTypeEnum.CustomTargetingValueMatchType = proto.Field( - proto.ENUM, - number=6, - enum=custom_targeting_value_enums.CustomTargetingValueMatchTypeEnum.CustomTargetingValueMatchType, - ) - status: custom_targeting_value_enums.CustomTargetingValueStatusEnum.CustomTargetingValueStatus = proto.Field( - proto.ENUM, - number=7, - enum=custom_targeting_value_enums.CustomTargetingValueStatusEnum.CustomTargetingValueStatus, - ) - - class GetCustomTargetingValueRequest(proto.Message): r"""Request object for ``GetCustomTargetingValue`` method. @@ -190,11 +140,11 @@ def raw_page(self): return self custom_targeting_values: MutableSequence[ - "CustomTargetingValue" + custom_targeting_value_messages.CustomTargetingValue ] = proto.RepeatedField( proto.MESSAGE, number=1, - message="CustomTargetingValue", + message=custom_targeting_value_messages.CustomTargetingValue, ) next_page_token: str = proto.Field( proto.STRING, diff --git a/packages/google-ads-admanager/google/ads/admanager_v1/types/entity_signals_mapping_messages.py b/packages/google-ads-admanager/google/ads/admanager_v1/types/entity_signals_mapping_messages.py new file mode 100644 index 000000000000..79fdf386e430 --- /dev/null +++ b/packages/google-ads-admanager/google/ads/admanager_v1/types/entity_signals_mapping_messages.py @@ -0,0 +1,96 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +import proto # type: ignore + +__protobuf__ = proto.module( + package="google.ads.admanager.v1", + manifest={ + "EntitySignalsMapping", + }, +) + + +class EntitySignalsMapping(proto.Message): + r"""The ``EntitySignalsMapping`` resource. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + audience_segment_id (int): + ID of an AudienceSegment that this mapping + belongs to. + + This field is a member of `oneof`_ ``entity``. + content_bundle_id (int): + ID of a ContentBundle that this mapping + belongs to. + + This field is a member of `oneof`_ ``entity``. + custom_targeting_value_id (int): + ID of a CustomValue that this mapping belongs + to. + + This field is a member of `oneof`_ ``entity``. + name (str): + Identifier. The resource name of the + ``EntitySignalsMapping``. Format: + ``networks/{network_code}/entitySignalsMappings/{entity_signals_mapping_id}`` + entity_signals_mapping_id (int): + Output only. ``EntitySignalsMapping`` ID. + taxonomy_category_ids (MutableSequence[int]): + Required. The IDs of the categories that are + associated with the referencing entity. + """ + + audience_segment_id: int = proto.Field( + proto.INT64, + number=3, + oneof="entity", + ) + content_bundle_id: int = proto.Field( + proto.INT64, + number=4, + oneof="entity", + ) + custom_targeting_value_id: int = proto.Field( + proto.INT64, + number=5, + oneof="entity", + ) + name: str = proto.Field( + proto.STRING, + number=1, + ) + entity_signals_mapping_id: int = proto.Field( + proto.INT64, + number=2, + ) + taxonomy_category_ids: MutableSequence[int] = proto.RepeatedField( + proto.INT64, + number=6, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-ads-admanager/google/ads/admanager_v1/types/entity_signals_mapping_service.py b/packages/google-ads-admanager/google/ads/admanager_v1/types/entity_signals_mapping_service.py new file mode 100644 index 000000000000..f63377f55ea1 --- /dev/null +++ b/packages/google-ads-admanager/google/ads/admanager_v1/types/entity_signals_mapping_service.py @@ -0,0 +1,306 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +from google.protobuf import field_mask_pb2 # type: ignore +import proto # type: ignore + +from google.ads.admanager_v1.types import entity_signals_mapping_messages + +__protobuf__ = proto.module( + package="google.ads.admanager.v1", + manifest={ + "GetEntitySignalsMappingRequest", + "ListEntitySignalsMappingsRequest", + "CreateEntitySignalsMappingRequest", + "UpdateEntitySignalsMappingRequest", + "ListEntitySignalsMappingsResponse", + "BatchCreateEntitySignalsMappingsRequest", + "BatchCreateEntitySignalsMappingsResponse", + "BatchUpdateEntitySignalsMappingsRequest", + "BatchUpdateEntitySignalsMappingsResponse", + }, +) + + +class GetEntitySignalsMappingRequest(proto.Message): + r"""Request object for ``GetEntitySignalsMapping`` method. + + Attributes: + name (str): + Required. The resource name of the EntitySignalsMapping. + Format: + ``networks/{network_code}/entitySignalsMappings/{entity_signals_mapping_id}`` + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class ListEntitySignalsMappingsRequest(proto.Message): + r"""Request object for ``ListEntitySignalsMappings`` method. + + Attributes: + parent (str): + Required. The parent, which owns this collection of + EntitySignalsMappings. Format: ``networks/{network_code}`` + page_size (int): + Optional. The maximum number of ``EntitySignalsMappings`` to + return. The service may return fewer than this value. If + unspecified, at most 50 ``EntitySignalsMappings`` will be + returned. The maximum value is 1000; values above 1000 will + be coerced to 1000. + page_token (str): + Optional. A page token, received from a previous + ``ListEntitySignalsMappings`` call. Provide this to retrieve + the subsequent page. + + When paginating, all other parameters provided to + ``ListEntitySignalsMappings`` must match the call that + provided the page token. + filter (str): + Optional. Expression to filter the response. + See syntax details at + https://developers.google.com/ad-manager/api/beta/filters + order_by (str): + Optional. Expression to specify sorting + order. See syntax details at + https://developers.google.com/ad-manager/api/beta/filters#order + skip (int): + Optional. Number of individual resources to + skip while paginating. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + page_size: int = proto.Field( + proto.INT32, + number=2, + ) + page_token: str = proto.Field( + proto.STRING, + number=3, + ) + filter: str = proto.Field( + proto.STRING, + number=4, + ) + order_by: str = proto.Field( + proto.STRING, + number=5, + ) + skip: int = proto.Field( + proto.INT32, + number=6, + ) + + +class CreateEntitySignalsMappingRequest(proto.Message): + r"""Request object for 'CreateEntitySignalsMapping' method. + + Attributes: + parent (str): + Required. The parent resource where this + EntitySignalsMapping will be created. Format: + ``networks/{network_code}`` + entity_signals_mapping (google.ads.admanager_v1.types.EntitySignalsMapping): + Required. The EntitySignalsMapping object to + create. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + entity_signals_mapping: entity_signals_mapping_messages.EntitySignalsMapping = ( + proto.Field( + proto.MESSAGE, + number=2, + message=entity_signals_mapping_messages.EntitySignalsMapping, + ) + ) + + +class UpdateEntitySignalsMappingRequest(proto.Message): + r"""Request object for 'UpdateEntitySignalsMapping' method. + + Attributes: + entity_signals_mapping (google.ads.admanager_v1.types.EntitySignalsMapping): + Required. The ``EntitySignalsMapping`` to update. + + The EntitySignalsMapping's name is used to identify the + EntitySignalsMapping to update. Format: + ``networks/{network_code}/entitySignalsMappings/{entity_signals_mapping}`` + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Required. The list of fields to update. + """ + + entity_signals_mapping: entity_signals_mapping_messages.EntitySignalsMapping = ( + proto.Field( + proto.MESSAGE, + number=1, + message=entity_signals_mapping_messages.EntitySignalsMapping, + ) + ) + update_mask: field_mask_pb2.FieldMask = proto.Field( + proto.MESSAGE, + number=2, + message=field_mask_pb2.FieldMask, + ) + + +class ListEntitySignalsMappingsResponse(proto.Message): + r"""Response object for ``ListEntitySignalsMappingsRequest`` containing + matching ``EntitySignalsMapping`` resources. + + Attributes: + entity_signals_mappings (MutableSequence[google.ads.admanager_v1.types.EntitySignalsMapping]): + The ``EntitySignalsMapping`` from the specified network. + next_page_token (str): + A token, which can be sent as ``page_token`` to retrieve the + next page. If this field is omitted, there are no subsequent + pages. + total_size (int): + Total number of ``EntitySignalsMappings``. If a filter was + included in the request, this reflects the total number + after the filtering is applied. + + ``total_size`` will not be calculated in the response unless + it has been included in a response field mask. The response + field mask can be provided to the method by using the URL + parameter ``$fields`` or ``fields``, or by using the + HTTP/gRPC header ``X-Goog-FieldMask``. + + For more information, see + https://developers.google.com/ad-manager/api/beta/field-masks + """ + + @property + def raw_page(self): + return self + + entity_signals_mappings: MutableSequence[ + entity_signals_mapping_messages.EntitySignalsMapping + ] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=entity_signals_mapping_messages.EntitySignalsMapping, + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + total_size: int = proto.Field( + proto.INT32, + number=3, + ) + + +class BatchCreateEntitySignalsMappingsRequest(proto.Message): + r"""Request object for ``BatchCreateEntitySignalsMappings`` method. + + Attributes: + parent (str): + Required. The parent resource where + ``EntitySignalsMappings`` will be created. Format: + ``networks/{network_code}`` The parent field in the + CreateEntitySignalsMappingRequest must match this field. + requests (MutableSequence[google.ads.admanager_v1.types.CreateEntitySignalsMappingRequest]): + Required. The ``EntitySignalsMapping`` objects to create. A + maximum of 100 objects can be created in a batch. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + requests: MutableSequence[ + "CreateEntitySignalsMappingRequest" + ] = proto.RepeatedField( + proto.MESSAGE, + number=2, + message="CreateEntitySignalsMappingRequest", + ) + + +class BatchCreateEntitySignalsMappingsResponse(proto.Message): + r"""Response object for ``BatchCreateEntitySignalsMappings`` method. + + Attributes: + entity_signals_mappings (MutableSequence[google.ads.admanager_v1.types.EntitySignalsMapping]): + The ``EntitySignalsMapping`` objects created. + """ + + entity_signals_mappings: MutableSequence[ + entity_signals_mapping_messages.EntitySignalsMapping + ] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=entity_signals_mapping_messages.EntitySignalsMapping, + ) + + +class BatchUpdateEntitySignalsMappingsRequest(proto.Message): + r"""Request object for ``BatchUpdateEntitySignalsMappings`` method. + + Attributes: + parent (str): + Required. The parent resource where + ``EntitySignalsMappings`` will be updated. Format: + ``networks/{network_code}`` The parent field in the + UpdateEntitySignalsMappingRequest must match this field. + requests (MutableSequence[google.ads.admanager_v1.types.UpdateEntitySignalsMappingRequest]): + Required. The ``EntitySignalsMapping`` objects to update. A + maximum of 100 objects can be updated in a batch. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + requests: MutableSequence[ + "UpdateEntitySignalsMappingRequest" + ] = proto.RepeatedField( + proto.MESSAGE, + number=2, + message="UpdateEntitySignalsMappingRequest", + ) + + +class BatchUpdateEntitySignalsMappingsResponse(proto.Message): + r"""Response object for ``BatchUpdateEntitySignalsMappings`` method. + + Attributes: + entity_signals_mappings (MutableSequence[google.ads.admanager_v1.types.EntitySignalsMapping]): + The ``EntitySignalsMapping`` objects updated. + """ + + entity_signals_mappings: MutableSequence[ + entity_signals_mapping_messages.EntitySignalsMapping + ] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=entity_signals_mapping_messages.EntitySignalsMapping, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-ads-admanager/google/ads/admanager_v1/types/frequency_cap.py b/packages/google-ads-admanager/google/ads/admanager_v1/types/frequency_cap.py index ecaf3a433ae7..62ff46a14006 100644 --- a/packages/google-ads-admanager/google/ads/admanager_v1/types/frequency_cap.py +++ b/packages/google-ads-admanager/google/ads/admanager_v1/types/frequency_cap.py @@ -19,11 +19,12 @@ import proto # type: ignore +from google.ads.admanager_v1.types import time_unit_enum + __protobuf__ = proto.module( package="google.ads.admanager.v1", manifest={ "FrequencyCap", - "TimeUnitEnum", }, ) @@ -60,51 +61,12 @@ class FrequencyCap(proto.Message): number=2, optional=True, ) - time_unit: "TimeUnitEnum.TimeUnit" = proto.Field( + time_unit: time_unit_enum.TimeUnitEnum.TimeUnit = proto.Field( proto.ENUM, number=3, optional=True, - enum="TimeUnitEnum.TimeUnit", + enum=time_unit_enum.TimeUnitEnum.TimeUnit, ) -class TimeUnitEnum(proto.Message): - r"""Wrapper message for TimeUnit.""" - - class TimeUnit(proto.Enum): - r"""Unit of time for the frequency cap. - - Values: - TIME_UNIT_UNSPECIFIED (0): - Default value. This value is unused. - MINUTE (1): - Minute - HOUR (2): - Hour - DAY (3): - Day - WEEK (4): - Week - MONTH (5): - Month - LIFETIME (6): - Lifetime - POD (7): - Per pod of ads in a video stream. Only valid for entities in - a VIDEO_PLAYER environment. - STREAM (8): - Per video stream. Only valid for entities in a VIDEO_PLAYER - environment. - """ - TIME_UNIT_UNSPECIFIED = 0 - MINUTE = 1 - HOUR = 2 - DAY = 3 - WEEK = 4 - MONTH = 5 - LIFETIME = 6 - POD = 7 - STREAM = 8 - - __all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-ads-admanager/google/ads/admanager_v1/types/goal.py b/packages/google-ads-admanager/google/ads/admanager_v1/types/goal.py deleted file mode 100644 index d73e088136fa..000000000000 --- a/packages/google-ads-admanager/google/ads/admanager_v1/types/goal.py +++ /dev/null @@ -1,204 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from __future__ import annotations - -from typing import MutableMapping, MutableSequence - -import proto # type: ignore - -__protobuf__ = proto.module( - package="google.ads.admanager.v1", - manifest={ - "Goal", - "GoalTypeEnum", - "UnitTypeEnum", - }, -) - - -class Goal(proto.Message): - r"""Defines the criteria a [LineItem][google.ads.admanager.v1.LineItem] - needs to satisfy to meet its delivery goal. - - Attributes: - goal_type (google.ads.admanager_v1.types.GoalTypeEnum.GoalType): - The type of the goal for the LineItem. It - defines the period over which the goal should be - reached. - unit_type (google.ads.admanager_v1.types.UnitTypeEnum.UnitType): - The type of the goal unit for the LineItem. - units (int): - If this is a primary goal, it represents the number or - percentage of impressions or clicks that will be reserved. - If the line item is of type - [LineItemTypeEnum.LineItemType.SPONSORSHIP][google.ads.admanager.v1.LineItemTypeEnum.LineItemType.SPONSORSHIP], - it represents the percentage of available impressions - reserved. If the line item is of type - [LineItemTypeEnum.LineItemType.BULK][google.ads.admanager.v1.LineItemTypeEnum.LineItemType.BULK] - or - [LineItemTypeEnum.LineItemType.PRICE_PRIORITY][google.ads.admanager.v1.LineItemTypeEnum.LineItemType.PRICE_PRIORITY], - it represents the number of remaining impressions reserved. - If the line item is of type - [LineItemTypeEnum.LineItemType.NETWORK][google.ads.admanager.v1.LineItemTypeEnum.LineItemType.NETWORK] - or - [LineItemTypeEnum.LineItemType.HOUSE][google.ads.admanager.v1.LineItemTypeEnum.LineItemType.HOUSE], - it represents the percentage of remaining impressions - reserved. If this is an impression cap goal, it represents - the number of impressions or conversions that the line item - will stop serving at if reached. For valid line item types, - see [LineItem.impressions_cap][]. - """ - - goal_type: "GoalTypeEnum.GoalType" = proto.Field( - proto.ENUM, - number=1, - enum="GoalTypeEnum.GoalType", - ) - unit_type: "UnitTypeEnum.UnitType" = proto.Field( - proto.ENUM, - number=2, - enum="UnitTypeEnum.UnitType", - ) - units: int = proto.Field( - proto.INT64, - number=3, - ) - - -class GoalTypeEnum(proto.Message): - r"""Wrapper message for - [GoalType][google.ads.admanager.v1.GoalTypeEnum.GoalType]. - - """ - - class GoalType(proto.Enum): - r"""Specifies the type of the goal for a LineItem. - - Values: - GOAL_TYPE_UNSPECIFIED (0): - Default value. This value is unused. - NONE (1): - No goal is specified for the number of ads delivered. The - line item - [type][google.ads.admanager.v1.LineItem.line_item_type] must - be one of: - - - [LineItemTypeEnum.LineItemType.PRICE_PRIORITY][google.ads.admanager.v1.LineItemTypeEnum.LineItemType.PRICE_PRIORITY] - - [LineItemTypeEnum.LineItemType.AD_EXCHANGE][google.ads.admanager.v1.LineItemTypeEnum.LineItemType.AD_EXCHANGE] - - [LineItemTypeEnum.LineItemType.CLICK_TRACKING][google.ads.admanager.v1.LineItemTypeEnum.LineItemType.CLICK_TRACKING] - LIFETIME (2): - There is a goal on the number of ads delivered for this line - item during its entire lifetime. The line item - [type][google.ads.admanager.v1.LineItem.line_item_type] must - be one of: - - - [LineItemTypeEnum.LineItemType.STANDARD][google.ads.admanager.v1.LineItemTypeEnum.LineItemType.STANDARD] - - [LineItemTypeEnum.LineItemType.BULK][google.ads.admanager.v1.LineItemTypeEnum.LineItemType.BULK] - - [LineItemTypeEnum.LineItemType.PRICE_PRIORITY][google.ads.admanager.v1.LineItemTypeEnum.LineItemType.PRICE_PRIORITY] - - [LineItemTypeEnum.LineItemType.ADSENSE][google.ads.admanager.v1.LineItemTypeEnum.LineItemType.ADSENSE] - - [LineItemTypeEnum.LineItemType.AD_EXCHANGE][google.ads.admanager.v1.LineItemTypeEnum.LineItemType.AD_EXCHANGE] - - [LineItemTypeEnum.LineItemType.ADMOB][google.ads.admanager.v1.LineItemTypeEnum.LineItemType.ADMOB] - - [LineItemTypeEnum.LineItemType.CLICK_TRACKING][google.ads.admanager.v1.LineItemTypeEnum.LineItemType.CLICK_TRACKING] - DAILY (3): - There is a daily goal on the number of ads delivered for - this line item. The line item - [type][google.ads.admanager.v1.LineItem.line_item_type] must - be one of: - - - [LineItemTypeEnum.LineItemType.SPONSORSHIP][google.ads.admanager.v1.LineItemTypeEnum.LineItemType.SPONSORSHIP] - - [LineItemTypeEnum.LineItemType.NETWORK][google.ads.admanager.v1.LineItemTypeEnum.LineItemType.NETWORK] - - [LineItemTypeEnum.LineItemType.PRICE_PRIORITY][google.ads.admanager.v1.LineItemTypeEnum.LineItemType.PRICE_PRIORITY] - - [LineItemTypeEnum.LineItemType.HOUSE][google.ads.admanager.v1.LineItemTypeEnum.LineItemType.HOUSE] - - [LineItemTypeEnum.LineItemType.ADSENSE][google.ads.admanager.v1.LineItemTypeEnum.LineItemType.ADSENSE] - - [LineItemTypeEnum.LineItemType.AD_EXCHANGE][google.ads.admanager.v1.LineItemTypeEnum.LineItemType.AD_EXCHANGE] - - [LineItemTypeEnum.LineItemType.ADMOB][google.ads.admanager.v1.LineItemTypeEnum.LineItemType.ADMOB] - - [LineItemTypeEnum.LineItemType.BUMPER][google.ads.admanager.v1.LineItemTypeEnum.LineItemType.BUMPER] - """ - GOAL_TYPE_UNSPECIFIED = 0 - NONE = 1 - LIFETIME = 2 - DAILY = 3 - - -class UnitTypeEnum(proto.Message): - r"""Wrapper message for - [UnitType][google.ads.admanager.v1.UnitTypeEnum.UnitType]. - - """ - - class UnitType(proto.Enum): - r"""Indicates the type of unit used for defining a reservation. The - [LineItem.cost_type][google.ads.admanager.v1.LineItem.cost_type] can - differ from the UnitType - an ad can have an impression goal, but be - billed by its click. Usually CostType and UnitType will refer to the - same unit. - - Values: - UNIT_TYPE_UNSPECIFIED (0): - Default value. This value is unused. - IMPRESSIONS (1): - The number of impressions served by creatives - associated with the line item. - CLICKS (2): - The number of clicks reported by creatives associated with - the line item. The line item - [type][google.ads.admanager.v1.LineItem.line_item_type] must - be one of: - - - [LineItemTypeEnum.LineItemType.STANDARD][google.ads.admanager.v1.LineItemTypeEnum.LineItemType.STANDARD] - - [LineItemTypeEnum.LineItemType.BULK][google.ads.admanager.v1.LineItemTypeEnum.LineItemType.BULK] - - [LineItemTypeEnum.LineItemType.PRICE_PRIORITY][google.ads.admanager.v1.LineItemTypeEnum.LineItemType.PRICE_PRIORITY] - CLICK_THROUGH_CPA_CONVERSIONS (3): - The number of view-through Cost-Per-Action (CPA) conversions - from creatives associated with the line item. This is only - supported as secondary goal and the - [LineItem.cost_type][google.ads.admanager.v1.LineItem.cost_type] - must be [CostTypeEnum.CostType.CPA][]. - VIEW_THROUGH_CPA_CONVERSIONS (4): - The number of view-through Cost-Per-Action (CPA) conversions - from creatives associated with the line item. This is only - supported as secondary goal and the - [LineItem.cost_type][google.ads.admanager.v1.LineItem.cost_type] - must be [CostTypeEnum.CostType.CPA}. - TOTAL_CPA_CONVERSIONS (5): - The number of total Cost-Per-Action (CPA) conversions from - creatives associated with the line item. This is only - supported as secondary goal and the [LineItem.cost_type} - must be [CostTypeEnum.CostType.CPA}. - VIEWABLE_IMPRESSIONS (6): - The number of viewable impressions reported by creatives - associated with the line item. The - [LineItem.line_item_type][google.ads.admanager.v1.LineItem.line_item_type] - must be - [LineItemTypeEnum.LineItemType.STANDARD][google.ads.admanager.v1.LineItemTypeEnum.LineItemType.STANDARD]. - IN_TARGET_IMPRESSIONS (7): - The number of in-target impressions reported by third party - measurements. The - [LineItem.line_item_type][google.ads.admanager.v1.LineItem.line_item_type] - must be - [LineItemTypeEnum.LineItemType.STANDARD][google.ads.admanager.v1.LineItemTypeEnum.LineItemType.STANDARD]. - """ - UNIT_TYPE_UNSPECIFIED = 0 - IMPRESSIONS = 1 - CLICKS = 2 - CLICK_THROUGH_CPA_CONVERSIONS = 3 - VIEW_THROUGH_CPA_CONVERSIONS = 4 - TOTAL_CPA_CONVERSIONS = 5 - VIEWABLE_IMPRESSIONS = 6 - IN_TARGET_IMPRESSIONS = 7 - - -__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-ads-admanager/google/ads/admanager_v1/types/label_messages.py b/packages/google-ads-admanager/google/ads/admanager_v1/types/label_messages.py new file mode 100644 index 000000000000..4f95e140723a --- /dev/null +++ b/packages/google-ads-admanager/google/ads/admanager_v1/types/label_messages.py @@ -0,0 +1,46 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +import proto # type: ignore + +__protobuf__ = proto.module( + package="google.ads.admanager.v1", + manifest={ + "Label", + }, +) + + +class Label(proto.Message): + r"""A Label is additional information that can be added to an + entity. + + Attributes: + name (str): + Identifier. The resource name of the ``Label``. Format: + ``networks/{network_code}/labels/{label_id}`` + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-ads-admanager/google/ads/admanager_v1/types/label_service.py b/packages/google-ads-admanager/google/ads/admanager_v1/types/label_service.py deleted file mode 100644 index cfb44f629186..000000000000 --- a/packages/google-ads-admanager/google/ads/admanager_v1/types/label_service.py +++ /dev/null @@ -1,168 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from __future__ import annotations - -from typing import MutableMapping, MutableSequence - -import proto # type: ignore - -__protobuf__ = proto.module( - package="google.ads.admanager.v1", - manifest={ - "Label", - "GetLabelRequest", - "ListLabelsRequest", - "ListLabelsResponse", - }, -) - - -class Label(proto.Message): - r"""The Label resource. - - Attributes: - name (str): - Identifier. The resource name of the Label. Format: - ``networks/{network_code}/labels/{label_id}`` - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - - -class GetLabelRequest(proto.Message): - r"""Request object for GetLabel method. - - Attributes: - name (str): - Required. The resource name of the Label. Format: - ``networks/{network_code}/labels/{label_id}`` - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - - -class ListLabelsRequest(proto.Message): - r"""Request object for ListLabels method. - - Attributes: - parent (str): - Required. The parent, which owns this collection of Labels. - Format: ``networks/{network_code}`` - page_size (int): - Optional. The maximum number of Labels to - return. The service may return fewer than this - value. If unspecified, at most 50 labels will be - returned. The maximum value is 1000; values - above 1000 will be coerced to 1000. - page_token (str): - Optional. A page token, received from a previous - ``ListLabels`` call. Provide this to retrieve the subsequent - page. - - When paginating, all other parameters provided to - ``ListLabels`` must match the call that provided the page - token. - filter (str): - Optional. Expression to filter the response. - See syntax details at - https://developers.google.com/ad-manager/api/beta/filters - order_by (str): - Optional. Expression to specify sorting - order. See syntax details at - https://developers.google.com/ad-manager/api/beta/filters#order - skip (int): - Optional. Number of individual resources to - skip while paginating. - """ - - parent: str = proto.Field( - proto.STRING, - number=1, - ) - page_size: int = proto.Field( - proto.INT32, - number=2, - ) - page_token: str = proto.Field( - proto.STRING, - number=3, - ) - filter: str = proto.Field( - proto.STRING, - number=4, - ) - order_by: str = proto.Field( - proto.STRING, - number=5, - ) - skip: int = proto.Field( - proto.INT32, - number=6, - ) - - -class ListLabelsResponse(proto.Message): - r"""Response object for ListLabelsRequest containing matching - Label resources. - - Attributes: - labels (MutableSequence[google.ads.admanager_v1.types.Label]): - The Label from the specified network. - next_page_token (str): - A token, which can be sent as ``page_token`` to retrieve the - next page. If this field is omitted, there are no subsequent - pages. - total_size (int): - Total number of Labels. If a filter was included in the - request, this reflects the total number after the filtering - is applied. - - ``total_size`` will not be calculated in the response unless - it has been included in a response field mask. The response - field mask can be provided to the method by using the URL - parameter ``$fields`` or ``fields``, or by using the - HTTP/gRPC header ``X-Goog-FieldMask``. - - For more information, see - https://developers.google.com/ad-manager/api/beta/field-masks - """ - - @property - def raw_page(self): - return self - - labels: MutableSequence["Label"] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message="Label", - ) - next_page_token: str = proto.Field( - proto.STRING, - number=2, - ) - total_size: int = proto.Field( - proto.INT32, - number=3, - ) - - -__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-ads-admanager/google/ads/admanager_v1/types/line_item_enums.py b/packages/google-ads-admanager/google/ads/admanager_v1/types/line_item_enums.py deleted file mode 100644 index 6f993c155a83..000000000000 --- a/packages/google-ads-admanager/google/ads/admanager_v1/types/line_item_enums.py +++ /dev/null @@ -1,314 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from __future__ import annotations - -from typing import MutableMapping, MutableSequence - -import proto # type: ignore - -__protobuf__ = proto.module( - package="google.ads.admanager.v1", - manifest={ - "LineItemCostTypeEnum", - "CreativeRotationTypeEnum", - "DeliveryRateTypeEnum", - "LineItemDiscountTypeEnum", - "LineItemTypeEnum", - "ReservationStatusEnum", - }, -) - - -class LineItemCostTypeEnum(proto.Message): - r"""Wrapper message for - [LineItemCostType][google.ads.admanager.v1.LineItemCostTypeEnum.LineItemCostType]. - - """ - - class LineItemCostType(proto.Enum): - r"""Describes the LineItem actions that are billable. - - Values: - LINE_ITEM_COST_TYPE_UNSPECIFIED (0): - Not specified value. - CPA (1): - Cost per action. The line item - [type][google.ads.admanager.v1.LineItem.line_item_type] must - be one of: - - - [LineItemTypeEnum.LineItemType.SPONSORSHIP][google.ads.admanager.v1.LineItemTypeEnum.LineItemType.SPONSORSHIP] - - [LineItemTypeEnum.LineItemType.STANDARD][google.ads.admanager.v1.LineItemTypeEnum.LineItemType.STANDARD] - - [LineItemTypeEnum.LineItemType.BULK][google.ads.admanager.v1.LineItemTypeEnum.LineItemType.BULK] - - [LineItemTypeEnum.LineItemType.NETWORK][google.ads.admanager.v1.LineItemTypeEnum.LineItemType.NETWORK] - CPC (2): - Cost per click. The line item - [type][google.ads.admanager.v1.LineItem.line_item_type] must - be one of: - - - [LineItemTypeEnum.LineItemType.SPONSORSHIP][google.ads.admanager.v1.LineItemTypeEnum.LineItemType.SPONSORSHIP] - - [LineItemTypeEnum.LineItemType.STANDARD][google.ads.admanager.v1.LineItemTypeEnum.LineItemType.STANDARD] - - [LineItemTypeEnum.LineItemType.BULK][google.ads.admanager.v1.LineItemTypeEnum.LineItemType.BULK] - - [LineItemTypeEnum.LineItemType.NETWORK][google.ads.admanager.v1.LineItemTypeEnum.LineItemType.NETWORK] - - [LineItemTypeEnum.LineItemType.PRICE_PRIORITY][google.ads.admanager.v1.LineItemTypeEnum.LineItemType.PRICE_PRIORITY] - - [LineItemTypeEnum.LineItemType.HOUSE][google.ads.admanager.v1.LineItemTypeEnum.LineItemType.HOUSE] - CPD (3): - Cost per day. The line item - [type][google.ads.admanager.v1.LineItem.line_item_type] must - be one of: - - - [LineItemTypeEnum.LineItemType.SPONSORSHIP][google.ads.admanager.v1.LineItemTypeEnum.LineItemType.SPONSORSHIP] - - [LineItemTypeEnum.LineItemType.NETWORK][google.ads.admanager.v1.LineItemTypeEnum.LineItemType.NETWORK] - CPM (4): - Cost per mille (thousand) impressions. The line item - [type][google.ads.admanager.v1.LineItem.line_item_type] must - be one of: - - - [LineItemTypeEnum.LineItemType.SPONSORSHIP][google.ads.admanager.v1.LineItemTypeEnum.LineItemType.SPONSORSHIP] - - [LineItemTypeEnum.LineItemType.STANDARD][google.ads.admanager.v1.LineItemTypeEnum.LineItemType.STANDARD] - - [LineItemTypeEnum.LineItemType.BULK][google.ads.admanager.v1.LineItemTypeEnum.LineItemType.BULK] - - [LineItemTypeEnum.LineItemType.NETWORK][google.ads.admanager.v1.LineItemTypeEnum.LineItemType.NETWORK] - - [LineItemTypeEnum.LineItemType.PRICE_PRIORITY][google.ads.admanager.v1.LineItemTypeEnum.LineItemType.PRICE_PRIORITY] - - [LineItemTypeEnum.LineItemType.HOUSE][google.ads.admanager.v1.LineItemTypeEnum.LineItemType.HOUSE] - VCPM (5): - Cost per mille (thousand) Active View viewable impressions. - The line item - [type][google.ads.admanager.v1.LineItem.line_item_type] must - be one of: - - - [LineItemTypeEnum.LineItemType.STANDARD][google.ads.admanager.v1.LineItemTypeEnum.LineItemType.STANDARD] - CPM_IN_TARGET (6): - Cost per millie (thousand) in-target impressions. The line - item [type][google.ads.admanager.v1.LineItem.line_item_type] - must be one of: - - - [LineItemTypeEnum.LineItemType.STANDARD][google.ads.admanager.v1.LineItemTypeEnum.LineItemType.STANDARD] - CPF (7): - Cost for the entire flight of the deal. The line item - [type][google.ads.admanager.v1.LineItem.line_item_type] must - be must be one of: - - - [LineItemTypeEnum.LineItemType.SPONSORSHIP][google.ads.admanager.v1.LineItemTypeEnum.LineItemType.SPONSORSHIP] - """ - LINE_ITEM_COST_TYPE_UNSPECIFIED = 0 - CPA = 1 - CPC = 2 - CPD = 3 - CPM = 4 - VCPM = 5 - CPM_IN_TARGET = 6 - CPF = 7 - - -class CreativeRotationTypeEnum(proto.Message): - r"""Wrapper message for - [CreativeRotationType][google.ads.admanager.v1.CreativeRotationTypeEnum.CreativeRotationType]. - - """ - - class CreativeRotationType(proto.Enum): - r"""The strategy to use for displaying multiple - [creatives][google.ads.admanager.v1.Creative] that are associated - with a line item. - - Values: - CREATIVE_ROTATION_TYPE_UNSPECIFIED (0): - Not specified value - EVENLY (1): - Creatives are displayed approximately the - same number of times over the duration of the - line item. - OPTIMIZED (2): - Creatives are served approximately - proportionally to their performance. - WEIGHTED (3): - Creatives are served approximately proportionally to their - weights, set on the ``LineItemCreativeAssociation``. - SEQUENTIAL (4): - Creatives are served exactly in sequential order, aka - Storyboarding. Set on the ``LineItemCreativeAssociation``. - """ - CREATIVE_ROTATION_TYPE_UNSPECIFIED = 0 - EVENLY = 1 - OPTIMIZED = 2 - WEIGHTED = 3 - SEQUENTIAL = 4 - - -class DeliveryRateTypeEnum(proto.Message): - r"""Wrapper message for - [DeliveryRateType][google.ads.admanager.v1.DeliveryRateTypeEnum.DeliveryRateType]. - - """ - - class DeliveryRateType(proto.Enum): - r"""Possible delivery rates for a line item. It dictates the - manner in which the line item is served. - - Values: - DELIVERY_RATE_TYPE_UNSPECIFIED (0): - Not specified value - EVENLY (1): - Line items are served as evenly as possible across the - number of days specified in a line item's - [duration][LineItem.duration]. - FRONTLOADED (2): - Line items are served more aggressively in - the beginning of the flight date. - AS_FAST_AS_POSSIBLE (3): - The booked impressions may delivered well before the - [end_time][google.ads.admanager.v1.LineItem.end_time]. Other - lower-priority or lower-value line items will be stopped - from delivering until the line item meets the number of - impressions or clicks it is booked for. - """ - DELIVERY_RATE_TYPE_UNSPECIFIED = 0 - EVENLY = 1 - FRONTLOADED = 2 - AS_FAST_AS_POSSIBLE = 3 - - -class LineItemDiscountTypeEnum(proto.Message): - r"""Wrapper message for - [LineItemDiscountType][google.ads.admanager.v1.LineItemDiscountTypeEnum.LineItemDiscountType]. - - """ - - class LineItemDiscountType(proto.Enum): - r"""Describes the possible discount types on the cost of booking - a line item. - - Values: - LINE_ITEM_DISCOUNT_TYPE_UNSPECIFIED (0): - No value specified - ABSOLUTE_VALUE (1): - An absolute value will be discounted from the - line item's cost. - PERCENTAGE (2): - A percentage of the cost will be discounted - for booking the line item. - """ - LINE_ITEM_DISCOUNT_TYPE_UNSPECIFIED = 0 - ABSOLUTE_VALUE = 1 - PERCENTAGE = 2 - - -class LineItemTypeEnum(proto.Message): - r"""Wrapper message for - [LineItemType][google.ads.admanager.v1.LineItemTypeEnum.LineItemType]. - - """ - - class LineItemType(proto.Enum): - r"""Indicates the priority of a LineItem, determined by the way - in which impressions are reserved to be served for it. - - Values: - LINE_ITEM_TYPE_UNSPECIFIED (0): - Not specified value. - SPONSORSHIP (12): - The type of LineItem for which a percentage - of all the impressions that are being sold are - reserved. - STANDARD (13): - The type of LineItem for which a fixed - quantity of impressions or clicks are reserved. - NETWORK (9): - The type of LineItem most commonly used to - fill a site's unsold inventory if not - contractually obligated to deliver a requested - number of impressions. Uses daily percentage of - unsold impressions or clicks. - BULK (4): - The type of LineItem for which a fixed - quantity of impressions or clicks will be - delivered at a priority lower than the STANDARD - type. - PRICE_PRIORITY (11): - The type of LineItem most commonly used to - fill a site's unsold inventory if not - contractually obligated to deliver a requested - number of impressions. Uses fixed quantity - percentage of unsold impressions or clicks. - HOUSE (7): - The type of LineItem typically used for ads - that promote products and services chosen by the - publisher. - LEGACY_DFP (8): - Represents a legacy LineItem that has been - migrated from the DFP system. - CLICK_TRACKING (6): - The type of LineItem used for ads that track - ads being served externally of Ad Manager. - ADSENSE (2): - A LineItem using dynamic allocation backed by - AdSense. - AD_EXCHANGE (3): - A LineItem using dynamic allocation backed by - the Google Ad Exchange. - BUMPER (5): - Represents a non-monetizable video LineItem - that targets one or more bumper positions, which - are short house video messages used by - publishers to separate content from ad breaks. - ADMOB (1): - A LineItem using dynamic allocation backed by - AdMob. - PREFERRED_DEAL (10): - The type of LineItem for which there are no - impressions reserved, and will serve for a - second price bid. - """ - LINE_ITEM_TYPE_UNSPECIFIED = 0 - SPONSORSHIP = 12 - STANDARD = 13 - NETWORK = 9 - BULK = 4 - PRICE_PRIORITY = 11 - HOUSE = 7 - LEGACY_DFP = 8 - CLICK_TRACKING = 6 - ADSENSE = 2 - AD_EXCHANGE = 3 - BUMPER = 5 - ADMOB = 1 - PREFERRED_DEAL = 10 - - -class ReservationStatusEnum(proto.Message): - r"""Wrapper message for - [ReservationStatus][google.ads.admanager.v1.ReservationStatusEnum.ReservationStatus]. - - """ - - class ReservationStatus(proto.Enum): - r"""Defines the different reservation statuses of a line item. - - Values: - RESERVATION_STATUS_UNSPECIFIED (0): - No value specified - RESERVED (1): - Indicates that inventory has been reserved - for the line item. - UNRESERVED (2): - Indicates that inventory has not been - reserved for the line item. - """ - RESERVATION_STATUS_UNSPECIFIED = 0 - RESERVED = 1 - UNRESERVED = 2 - - -__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-ads-admanager/google/ads/admanager_v1/types/line_item_service.py b/packages/google-ads-admanager/google/ads/admanager_v1/types/line_item_service.py deleted file mode 100644 index 6aaf37448195..000000000000 --- a/packages/google-ads-admanager/google/ads/admanager_v1/types/line_item_service.py +++ /dev/null @@ -1,491 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from __future__ import annotations - -from typing import MutableMapping, MutableSequence - -from google.protobuf import timestamp_pb2 # type: ignore -from google.type import money_pb2 # type: ignore -import proto # type: ignore - -from google.ads.admanager_v1.types import ( - computed_status_enum, - creative_placeholder, - environment_type_enum, - goal, - line_item_enums, -) - -__protobuf__ = proto.module( - package="google.ads.admanager.v1", - manifest={ - "LineItem", - "GetLineItemRequest", - "ListLineItemsRequest", - "ListLineItemsResponse", - }, -) - - -class LineItem(proto.Message): - r"""The LineItem resource. - - Attributes: - name (str): - Identifier. The resource name of the LineItem. Format: - ``networks/{network_code}/orders/{order_id}/lineItems/{line_item_id}`` - display_name (str): - Optional. Display name of the LineItem. This - attribute has a maximum length of 255 - characters. - archived (bool): - Output only. The archival status of the - LineItem. - contracted_units_bought (int): - Optional. This attribute is only applicable for certain - [line item types][LineItemType] and acts as an "FYI" or - note, which does not impact ad-serving or other backend - systems. - - For [SPONSORSHIP][LineItemType.SPONSORSHIP] line items, this - represents the minimum quantity, which is a lifetime - impression volume goal for reporting purposes. - - For [STANDARD][LineItemType.STANDARD] line items, this - represents the contracted quantity, which is the number of - units specified in the contract that the advertiser has - bought for this line item. This attribute is only available - if you have this feature enabled on your network. - cost_per_unit (google.type.money_pb2.Money): - Required. The amount of money to spend per - impression or click. - cost_type (google.ads.admanager_v1.types.LineItemCostTypeEnum.LineItemCostType): - Required. The method used for billing this - line item. - create_time (google.protobuf.timestamp_pb2.Timestamp): - Output only. The instant at which the - LineItem was created. This attribute may be null - for line items created before this feature was - introduced. - update_time (google.protobuf.timestamp_pb2.Timestamp): - Output only. The instant at which the - LineItem was last updated - creative_rotation_type (google.ads.admanager_v1.types.CreativeRotationTypeEnum.CreativeRotationType): - Required. The strategy used for displaying multiple - [creatives][google.ads.admanager.v1.Creative] that are - associated with the line item. - delivery_rate_type (google.ads.admanager_v1.types.DeliveryRateTypeEnum.DeliveryRateType): - Non-empty default. The strategy for delivering ads over the - duration of the line item. Defaults to - [EVENLY][DeliveryRateType.EVENLY] or - [FRONTLOADED][DeliveryRatetype.FRONTLOADED] depending on the - network's configuration. - discount (float): - Optional. The number here is either a percentage or an - absolute value depending on the - [discount_type][google.ads.admanager.v1.LineItem.discount_type]. - If it is [PERCENTAGE][LineItemDiscountType.PERCENTAGE], then - only non-fractional values are supported. - discount_type (google.ads.admanager_v1.types.LineItemDiscountTypeEnum.LineItemDiscountType): - Non-empty default. The type of discount applied to the line - item. Defaults to - [PERCENTAGE][LineItemDiscountType.PERCENTAGE]. - environment_type (google.ads.admanager_v1.types.EnvironmentTypeEnum.EnvironmentType): - Non-empty default. The environment that the line item is - targeting. The default value is - [BROWSER][EnvironmentType.BROWSER]. If this value is - [VIDEO_PLAYER][EnvironmentType.VIDEO_PLAYER], then this line - item can only target - [AdUnits][google.ads.admanager.v1.AdUnit] that have - ``AdUnitSizes`` whose ``environment_type`` is also - ``VIDEO_PLAYER``. - external_id (str): - Optional. Identifier for the LineItem that is - meaningful to the publisher. This attribute has - a maximum length of 255 characters. - start_time (google.protobuf.timestamp_pb2.Timestamp): - Required. Time at which the LineItem will - begin serving. This attribute must be in the - future when creating a LineItem. - end_time (google.protobuf.timestamp_pb2.Timestamp): - Optional. Time at which the LineItem will stop serving. This - attribute is ignored when - [unlimited_end_time][google.ads.admanager.v1.LineItem.unlimited_end_time] - is ``true``. If specified, it must be after - [start_time][google.ads.admanager.v1.LineItem.start_time]. - This end time does not include - [auto_extension_days][google.ads.admanager.v1.LineItem.auto_extension_days]. - auto_extension_days (int): - Optional. Number of days to allow a LineItem to deliver past - its [end_time][google.ads.admanager.v1.LineItem.end_time]. A - maximum of 7 days is allowed. This feature is only available - for Ad Manager 360 accounts. - unlimited_end_time (bool): - Optional. Whether the LineItem has an - [end_time][google.ads.admanager.v1.LineItem.end_time]. This - attribute can be set to ``true`` for only LineItems with - [line_item_type][google.ads.admanager.v1.LineItem.line_item_type] - [SPONSORSHIP][LineItemType.SPONSORSHIP], - [NETWORK][LineItemType.NETWORK], - [PRICE_PRIORITY][LineItemType.PRICE_PRIORITY] and - [HOUSE][LineItemType.HOUSE]. - last_modified_by_app (str): - Output only. The application that last - modified this line item. - line_item_type (google.ads.admanager_v1.types.LineItemTypeEnum.LineItemType): - Required. Determines the default priority of the LineItem - for delivery. More information can be found on the `Ad - Manager Help - Center `__. - missing_creatives (bool): - Output only. Indicates if a line item is missing any - [creatives][google.ads.admanager.v1.Creative] for the - [creative_placeholders][google.ads.admanager.v1.LineItem.creative_placeholders] - specified. - - [Creatives][google.ads.admanager.v1.Creative] can be - considered missing for several reasons: - - - Not enough [creatives][google.ads.admanager.v1.Creative] - of a certain size have been uploaded, as determined by - [expectedCreativeCount][google.ads.admanager.v1.CreativePlaceholder.expected_creative_count]. - For example a line item specifies 750x350, 400x200, but - only a 750x350 was uploaded. Or line item specifies - 750x350 with an expected count of 2, but only one was - uploaded. - - The [appliedLabels][Creative.applied_labels] of an - associated [Creative][google.ads.admanager.v1.Creative] - do not match the - [effectiveAppliedLabels][CreativePlaceholder.effective_applied_labels] - of the line item. For example if a line item specifies - 750x350 with a foo applied label, but a 750x350 creative - without an applied label was uploaded. - notes (str): - Optional. Provides any additional notes that - may annotate LineItem. This field has a maximum - length of 65,535 characters. - priority (int): - Optional. Priority of the LineItem for delivery. Valid - values range from 1 to 16. This field can only be changed by - certain networks, otherwise a ``PERMISSION_DENIED`` error - will occur. - - The following list shows the default, minimum, and maximum - priority values for each [LineItemType][LineItemType]: - formatted as ``LineItemType``: default priority (minimum - priority, maximum priority): - - - ``SPONSORSHIP``: 4 (2,5) - - ``STANDARD``: 8 (6,10) - - ``NETWORK``: 12 (11, 14) - - ``BULK``: 12 (11, 14) - - ``PRICE_PRIORITY``: 12 (11, 14) - - ``HOUSE``: 16 (15, 16) - - ``CLICK_TRACKING``: 16 (1, 16) - - ``AD_EXCHANGE``: 12 (1, 16) - - ``ADSENSE``: 12 (1, 16) - - ``BUMPER``: 16 (15, 16) - reservation_status (google.ads.admanager_v1.types.ReservationStatusEnum.ReservationStatus): - Output only. Describes whether or not - inventory has been reserved for the line item. - web_property_code (str): - Optional. The web property code used for dynamic allocation - line items. This web property is only required with line - item types [AD_EXCHANGE][LineItemType.AD_EXCHANGE] and - [ADSENSE][LineItemType.ADSENSE]. - creative_placeholders (MutableSequence[google.ads.admanager_v1.types.CreativePlaceholder]): - Required. Details about the creatives that - are expected to serve through this LineItem. - status (google.ads.admanager_v1.types.ComputedStatusEnum.ComputedStatus): - Output only. The status of the LineItem. - primary_goal (google.ads.admanager_v1.types.Goal): - Required. The primary goal that this LineItem - is associated with, which is used in its pacing - and budgeting. - impression_limit (google.ads.admanager_v1.types.Goal): - Optional. The impression limit for the LineItem. This field - is meaningful only if the - [LineItem.line_item_type][google.ads.admanager.v1.LineItem.line_item_type] - is [LineItemType.SPONSORSHIP][] and - [LineItem.cost_type][google.ads.admanager.v1.LineItem.cost_type] - is [CostType.CPM][]. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - display_name: str = proto.Field( - proto.STRING, - number=2, - ) - archived: bool = proto.Field( - proto.BOOL, - number=14, - ) - contracted_units_bought: int = proto.Field( - proto.INT64, - number=18, - ) - cost_per_unit: money_pb2.Money = proto.Field( - proto.MESSAGE, - number=15, - message=money_pb2.Money, - ) - cost_type: line_item_enums.LineItemCostTypeEnum.LineItemCostType = proto.Field( - proto.ENUM, - number=19, - enum=line_item_enums.LineItemCostTypeEnum.LineItemCostType, - ) - create_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=12, - message=timestamp_pb2.Timestamp, - ) - update_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=31, - message=timestamp_pb2.Timestamp, - ) - creative_rotation_type: line_item_enums.CreativeRotationTypeEnum.CreativeRotationType = proto.Field( - proto.ENUM, - number=22, - enum=line_item_enums.CreativeRotationTypeEnum.CreativeRotationType, - ) - delivery_rate_type: line_item_enums.DeliveryRateTypeEnum.DeliveryRateType = ( - proto.Field( - proto.ENUM, - number=23, - enum=line_item_enums.DeliveryRateTypeEnum.DeliveryRateType, - ) - ) - discount: float = proto.Field( - proto.DOUBLE, - number=13, - ) - discount_type: line_item_enums.LineItemDiscountTypeEnum.LineItemDiscountType = ( - proto.Field( - proto.ENUM, - number=24, - enum=line_item_enums.LineItemDiscountTypeEnum.LineItemDiscountType, - ) - ) - environment_type: environment_type_enum.EnvironmentTypeEnum.EnvironmentType = ( - proto.Field( - proto.ENUM, - number=25, - enum=environment_type_enum.EnvironmentTypeEnum.EnvironmentType, - ) - ) - external_id: str = proto.Field( - proto.STRING, - number=5, - ) - start_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=6, - message=timestamp_pb2.Timestamp, - ) - end_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=7, - message=timestamp_pb2.Timestamp, - ) - auto_extension_days: int = proto.Field( - proto.INT32, - number=8, - ) - unlimited_end_time: bool = proto.Field( - proto.BOOL, - number=9, - ) - last_modified_by_app: str = proto.Field( - proto.STRING, - number=17, - ) - line_item_type: line_item_enums.LineItemTypeEnum.LineItemType = proto.Field( - proto.ENUM, - number=10, - enum=line_item_enums.LineItemTypeEnum.LineItemType, - ) - missing_creatives: bool = proto.Field( - proto.BOOL, - number=16, - ) - notes: str = proto.Field( - proto.STRING, - number=20, - ) - priority: int = proto.Field( - proto.INT64, - number=11, - ) - reservation_status: line_item_enums.ReservationStatusEnum.ReservationStatus = ( - proto.Field( - proto.ENUM, - number=26, - enum=line_item_enums.ReservationStatusEnum.ReservationStatus, - ) - ) - web_property_code: str = proto.Field( - proto.STRING, - number=21, - ) - creative_placeholders: MutableSequence[ - creative_placeholder.CreativePlaceholder - ] = proto.RepeatedField( - proto.MESSAGE, - number=27, - message=creative_placeholder.CreativePlaceholder, - ) - status: computed_status_enum.ComputedStatusEnum.ComputedStatus = proto.Field( - proto.ENUM, - number=28, - enum=computed_status_enum.ComputedStatusEnum.ComputedStatus, - ) - primary_goal: goal.Goal = proto.Field( - proto.MESSAGE, - number=29, - message=goal.Goal, - ) - impression_limit: goal.Goal = proto.Field( - proto.MESSAGE, - number=30, - message=goal.Goal, - ) - - -class GetLineItemRequest(proto.Message): - r"""Request object for GetLineItem method. - - Attributes: - name (str): - Required. The resource name of the LineItem. Format: - ``networks/{network_code}/orders/{order_id}/lineItems/{line_item_id}`` - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - - -class ListLineItemsRequest(proto.Message): - r"""Request object for ListLineItems method. - - Attributes: - parent (str): - Required. The parent, which owns this collection of - LineItems. Format: networks/{network_code}/orders/{order_id} - page_size (int): - Optional. The maximum number of LineItems to - return. The service may return fewer than this - value. If unspecified, at most 50 line items - will be returned. The maximum value is 1000; - values above 1000 will be coerced to 1000. - page_token (str): - Optional. A page token, received from a previous - ``ListLineItems`` call. Provide this to retrieve the - subsequent page. - - When paginating, all other parameters provided to - ``ListLineItems`` must match the call that provided the page - token. - filter (str): - Optional. Expression to filter the response. - See syntax details at - https://developers.google.com/ad-manager/api/beta/filters - order_by (str): - Optional. Expression to specify sorting - order. See syntax details at - https://developers.google.com/ad-manager/api/beta/filters#order - skip (int): - Optional. Number of individual resources to - skip while paginating. - """ - - parent: str = proto.Field( - proto.STRING, - number=1, - ) - page_size: int = proto.Field( - proto.INT32, - number=2, - ) - page_token: str = proto.Field( - proto.STRING, - number=3, - ) - filter: str = proto.Field( - proto.STRING, - number=4, - ) - order_by: str = proto.Field( - proto.STRING, - number=5, - ) - skip: int = proto.Field( - proto.INT32, - number=6, - ) - - -class ListLineItemsResponse(proto.Message): - r"""Response object for ListLineItemsRequest containing matching - LineItem resources. - - Attributes: - line_items (MutableSequence[google.ads.admanager_v1.types.LineItem]): - The LineItem from the specified network. - next_page_token (str): - A token, which can be sent as ``page_token`` to retrieve the - next page. If this field is omitted, there are no subsequent - pages. - total_size (int): - Total number of LineItems. If a filter was included in the - request, this reflects the total number after the filtering - is applied. - - ``total_size`` will not be calculated in the response unless - it has been included in a response field mask. The response - field mask can be provided to the method by using the URL - parameter ``$fields`` or ``fields``, or by using the - HTTP/gRPC header ``X-Goog-FieldMask``. - - For more information, see - https://developers.google.com/ad-manager/api/beta/field-masks - """ - - @property - def raw_page(self): - return self - - line_items: MutableSequence["LineItem"] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message="LineItem", - ) - next_page_token: str = proto.Field( - proto.STRING, - number=2, - ) - total_size: int = proto.Field( - proto.INT32, - number=3, - ) - - -__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-ads-admanager/google/ads/admanager_v1/types/network_messages.py b/packages/google-ads-admanager/google/ads/admanager_v1/types/network_messages.py new file mode 100644 index 000000000000..7c815ca0767b --- /dev/null +++ b/packages/google-ads-admanager/google/ads/admanager_v1/types/network_messages.py @@ -0,0 +1,106 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +import proto # type: ignore + +__protobuf__ = proto.module( + package="google.ads.admanager.v1", + manifest={ + "Network", + }, +) + + +class Network(proto.Message): + r"""The Network resource. + + Attributes: + name (str): + Identifier. The resource name of the Network. Format: + networks/{network_code} + display_name (str): + Optional. Display name for Network. + network_code (str): + Output only. Network Code. + property_code (str): + Output only. Property code. + time_zone (str): + Output only. Time zone associated with the + delivery of orders and reporting. + currency_code (str): + Output only. Primary currency code, in + ISO-4217 format. + secondary_currency_codes (MutableSequence[str]): + Optional. Currency codes that can be used as + an alternative to the primary currency code for + trafficking Line Items. + effective_root_ad_unit (str): + Output only. Top most `Ad + Unit `__ to which descendant + Ad Units can be added. Format: + networks/{network_code}/adUnit/{ad_unit_id} + test_network (bool): + Output only. Whether this is a test network. + network_id (int): + Output only. Network ID. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + display_name: str = proto.Field( + proto.STRING, + number=2, + ) + network_code: str = proto.Field( + proto.STRING, + number=3, + ) + property_code: str = proto.Field( + proto.STRING, + number=4, + ) + time_zone: str = proto.Field( + proto.STRING, + number=5, + ) + currency_code: str = proto.Field( + proto.STRING, + number=6, + ) + secondary_currency_codes: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=7, + ) + effective_root_ad_unit: str = proto.Field( + proto.STRING, + number=8, + ) + test_network: bool = proto.Field( + proto.BOOL, + number=10, + ) + network_id: int = proto.Field( + proto.INT64, + number=11, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-ads-admanager/google/ads/admanager_v1/types/network_service.py b/packages/google-ads-admanager/google/ads/admanager_v1/types/network_service.py index 8a564ae54919..7ee37dfea726 100644 --- a/packages/google-ads-admanager/google/ads/admanager_v1/types/network_service.py +++ b/packages/google-ads-admanager/google/ads/admanager_v1/types/network_service.py @@ -19,103 +19,49 @@ import proto # type: ignore +from google.ads.admanager_v1.types import network_messages + __protobuf__ = proto.module( package="google.ads.admanager.v1", manifest={ - "Network", "GetNetworkRequest", + "ListNetworksRequest", + "ListNetworksResponse", }, ) -class Network(proto.Message): - r"""The Network resource. +class GetNetworkRequest(proto.Message): + r"""Request to get Network Attributes: name (str): - Identifier. The resource name of the Network. Format: + Required. Resource name of Network. Format: networks/{network_code} - display_name (str): - Optional. Display name for Network. - network_code (str): - Output only. Network Code. - property_code (str): - Output only. Property code. - time_zone (str): - Output only. Time zone associated with the - delivery of orders and reporting. - currency_code (str): - Output only. Primary currency code, in - ISO-4217 format. - secondary_currency_codes (MutableSequence[str]): - Optional. Currency codes that can be used as - an alternative to the primary currency code for - trafficking Line Items. - effective_root_ad_unit (str): - Output only. Top most `Ad - Unit `__ to which descendant - Ad Units can be added. Format: - networks/{network_code}/adUnit/{ad_unit_id} - test_network (bool): - Output only. Whether this is a test network. - network_id (int): - Output only. Network ID. """ name: str = proto.Field( proto.STRING, number=1, ) - display_name: str = proto.Field( - proto.STRING, - number=2, - ) - network_code: str = proto.Field( - proto.STRING, - number=3, - ) - property_code: str = proto.Field( - proto.STRING, - number=4, - ) - time_zone: str = proto.Field( - proto.STRING, - number=5, - ) - currency_code: str = proto.Field( - proto.STRING, - number=6, - ) - secondary_currency_codes: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=7, - ) - effective_root_ad_unit: str = proto.Field( - proto.STRING, - number=8, - ) - test_network: bool = proto.Field( - proto.BOOL, - number=10, - ) - network_id: int = proto.Field( - proto.INT64, - number=11, - ) -class GetNetworkRequest(proto.Message): - r"""Request to get Network +class ListNetworksRequest(proto.Message): + r"""Request object for ``ListNetworks`` method.""" + + +class ListNetworksResponse(proto.Message): + r"""Response object for ``ListNetworks`` method. Attributes: - name (str): - Required. Resource name of Network. Format: - networks/{network_code} + networks (MutableSequence[google.ads.admanager_v1.types.Network]): + The ``Network``\ s a user has access to. """ - name: str = proto.Field( - proto.STRING, + networks: MutableSequence[network_messages.Network] = proto.RepeatedField( + proto.MESSAGE, number=1, + message=network_messages.Network, ) diff --git a/packages/google-ads-admanager/google/ads/admanager_v1/types/order_enums.py b/packages/google-ads-admanager/google/ads/admanager_v1/types/order_enums.py new file mode 100644 index 000000000000..1ec6647453c1 --- /dev/null +++ b/packages/google-ads-admanager/google/ads/admanager_v1/types/order_enums.py @@ -0,0 +1,73 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +import proto # type: ignore + +__protobuf__ = proto.module( + package="google.ads.admanager.v1", + manifest={ + "OrderStatusEnum", + }, +) + + +class OrderStatusEnum(proto.Message): + r"""Wrapper message for + [OrderStatus][google.ads.admanager.v1.OrderStatusEnum.OrderStatus]. + + """ + + class OrderStatus(proto.Enum): + r"""The status of an Order. + + Values: + ORDER_STATUS_UNSPECIFIED (0): + Default value. This value is unused. + DRAFT (2): + Indicates that the Order has just been + created but no approval has been requested yet. + PENDING_APPROVAL (3): + Indicates that a request for approval for the + Order has been made. + APPROVED (4): + Indicates that the Order has been approved + and is ready to serve. + DISAPPROVED (5): + Indicates that the Order has been disapproved + and is not eligible to serve. + PAUSED (6): + This is a legacy state. Paused status should + be checked on LineItems within the order. + CANCELED (7): + Indicates that the Order has been canceled + and cannot serve. + DELETED (8): + Indicates that the Order has been deleted. + """ + ORDER_STATUS_UNSPECIFIED = 0 + DRAFT = 2 + PENDING_APPROVAL = 3 + APPROVED = 4 + DISAPPROVED = 5 + PAUSED = 6 + CANCELED = 7 + DELETED = 8 + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-ads-admanager/google/ads/admanager_v1/types/order_messages.py b/packages/google-ads-admanager/google/ads/admanager_v1/types/order_messages.py new file mode 100644 index 000000000000..4e85a8eea818 --- /dev/null +++ b/packages/google-ads-admanager/google/ads/admanager_v1/types/order_messages.py @@ -0,0 +1,278 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +from google.protobuf import timestamp_pb2 # type: ignore +import proto # type: ignore + +from google.ads.admanager_v1.types import applied_label, custom_field_value, order_enums + +__protobuf__ = proto.module( + package="google.ads.admanager.v1", + manifest={ + "Order", + }, +) + + +class Order(proto.Message): + r"""The ``Order`` resource. + + Attributes: + name (str): + Identifier. The resource name of the ``Order``. Format: + ``networks/{network_code}/orders/{order_id}`` + order_id (int): + Output only. Order ID. + display_name (str): + Required. The display name of the Order. + This value has a maximum length of 255 + characters. + programmatic (bool): + Optional. Specifies whether or not the Order + is a programmatic order. + trafficker (str): + Required. The resource name of the User responsible for + trafficking the Order. Format: + "networks/{network_code}/users/{user_id}". + advertiser_contacts (MutableSequence[str]): + Optional. The resource names of Contacts from the advertiser + of this Order. Format: + "networks/{network_code}/contacts/{contact_id}". + advertiser (str): + Required. The resource name of the Company, which is of type + Company.Type.ADVERTISER, to which this order belongs. + Format: "networks/{network_code}/companies/{company_id}". + agency_contacts (MutableSequence[str]): + Optional. The resource names of Contacts from the + advertising Agency of this Order. Format: + "networks/{network_code}/contacts/{contact_id}". + agency (str): + Optional. The resource name of the Company, which is of type + Company.Type.AGENCY, with which this order is associated. + Format: "networks/{network_code}/companies/{company_id}". + applied_teams (MutableSequence[str]): + Optional. The resource names of Teams directly applied to + this Order. Format: + "networks/{network_code}/teams/{team_id}". + effective_teams (MutableSequence[str]): + Output only. The resource names of Teams applied to this + Order including inherited values. Format: + "networks/{network_code}/teams/{team_id}". + creator (str): + Output only. The resource name of the User who created the + Order on behalf of the advertiser. This value is assigned by + Google. Format: "networks/{network_code}/users/{user_id}". + currency_code (str): + Output only. The ISO 4217 3-letter currency + code for the currency used by the Order. This + value is the network's currency code. + start_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The instant at which the Order and its + associated line items are eligible to begin serving. This + attribute is derived from the line item of the order that + has the earliest LineItem.start_time. + end_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The instant at which the Order and its + associated line items stop being served. This attribute is + derived from the line item of the order that has the latest + LineItem.end_time. + unlimited_end_time (bool): + Output only. Indicates whether or not this + Order has an end time. + external_order_id (int): + Optional. An arbitrary ID to associate to the + Order, which can be used as a key to an external + system. + archived (bool): + Output only. The archival status of the + Order. + last_modified_by_app (str): + Output only. The application which modified + this order. This attribute is assigned by + Google. + update_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The instant this Order was last + modified. + notes (str): + Optional. Provides any additional notes that + may annotate the Order. This attribute has a + maximum length of 65,535 characters. + po_number (str): + Optional. The purchase order number for the + Order. This value has a maximum length of 63 + characters. + status (google.ads.admanager_v1.types.OrderStatusEnum.OrderStatus): + Output only. The status of the Order. + salesperson (str): + Optional. The resource name of the User responsible for the + sales of the Order. Format: + "networks/{network_code}/users/{user_id}". + secondary_salespeople (MutableSequence[str]): + Optional. Unordered list. The resource names of the + secondary salespeople associated with the order. Format: + "networks/{network_code}/users/{user_id}". + secondary_traffickers (MutableSequence[str]): + Optional. Unordered list. The resource names of the + secondary traffickers associated with the order. Format: + "networks/{network_code}/users/{user_id}". + applied_labels (MutableSequence[google.ads.admanager_v1.types.AppliedLabel]): + Optional. The set of labels applied directly + to this order. + effective_applied_labels (MutableSequence[google.ads.admanager_v1.types.AppliedLabel]): + Output only. Contains the set of labels + applied directly to the order as well as those + inherited from the company that owns the order. + If a label has been negated, only the negated + label is returned. This field is assigned by + Google. + custom_field_values (MutableSequence[google.ads.admanager_v1.types.CustomFieldValue]): + Optional. The set of custom field values to + this order. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + order_id: int = proto.Field( + proto.INT64, + number=4, + ) + display_name: str = proto.Field( + proto.STRING, + number=2, + ) + programmatic: bool = proto.Field( + proto.BOOL, + number=3, + ) + trafficker: str = proto.Field( + proto.STRING, + number=23, + ) + advertiser_contacts: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=5, + ) + advertiser: str = proto.Field( + proto.STRING, + number=6, + ) + agency_contacts: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=7, + ) + agency: str = proto.Field( + proto.STRING, + number=8, + ) + applied_teams: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=9, + ) + effective_teams: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=28, + ) + creator: str = proto.Field( + proto.STRING, + number=10, + ) + currency_code: str = proto.Field( + proto.STRING, + number=11, + ) + start_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=19, + message=timestamp_pb2.Timestamp, + ) + end_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=12, + message=timestamp_pb2.Timestamp, + ) + unlimited_end_time: bool = proto.Field( + proto.BOOL, + number=45, + ) + external_order_id: int = proto.Field( + proto.INT64, + number=13, + ) + archived: bool = proto.Field( + proto.BOOL, + number=14, + ) + last_modified_by_app: str = proto.Field( + proto.STRING, + number=15, + ) + update_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=16, + message=timestamp_pb2.Timestamp, + ) + notes: str = proto.Field( + proto.STRING, + number=17, + ) + po_number: str = proto.Field( + proto.STRING, + number=18, + ) + status: order_enums.OrderStatusEnum.OrderStatus = proto.Field( + proto.ENUM, + number=20, + enum=order_enums.OrderStatusEnum.OrderStatus, + ) + salesperson: str = proto.Field( + proto.STRING, + number=21, + ) + secondary_salespeople: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=22, + ) + secondary_traffickers: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=24, + ) + applied_labels: MutableSequence[applied_label.AppliedLabel] = proto.RepeatedField( + proto.MESSAGE, + number=25, + message=applied_label.AppliedLabel, + ) + effective_applied_labels: MutableSequence[ + applied_label.AppliedLabel + ] = proto.RepeatedField( + proto.MESSAGE, + number=26, + message=applied_label.AppliedLabel, + ) + custom_field_values: MutableSequence[ + custom_field_value.CustomFieldValue + ] = proto.RepeatedField( + proto.MESSAGE, + number=38, + message=custom_field_value.CustomFieldValue, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-ads-admanager/google/ads/admanager_v1/types/order_service.py b/packages/google-ads-admanager/google/ads/admanager_v1/types/order_service.py index 3884d4acf7ed..3b13ef2d8105 100644 --- a/packages/google-ads-admanager/google/ads/admanager_v1/types/order_service.py +++ b/packages/google-ads-admanager/google/ads/admanager_v1/types/order_service.py @@ -17,15 +17,13 @@ from typing import MutableMapping, MutableSequence -from google.protobuf import timestamp_pb2 # type: ignore import proto # type: ignore -from google.ads.admanager_v1.types import applied_label +from google.ads.admanager_v1.types import order_messages __protobuf__ = proto.module( package="google.ads.admanager.v1", manifest={ - "Order", "GetOrderRequest", "ListOrdersRequest", "ListOrdersResponse", @@ -33,271 +31,6 @@ ) -class Order(proto.Message): - r"""The ``Order`` resource. - - Attributes: - name (str): - Identifier. The resource name of the ``Order``. Format: - ``networks/{network_code}/orders/{order_id}`` - order_id (int): - Output only. Order ID. - display_name (str): - Required. The display name of the Order. - This value is required to create an order and - has a maximum length of 255 characters. - programmatic (bool): - Optional. Specifies whether or not the Order - is a programmatic order. - trafficker (str): - Required. The resource name of the User responsible for - trafficking the Order. Format: - "networks/{network_code}/users/{user_id}". - advertiser_contacts (MutableSequence[str]): - Optional. The resource names of Contacts from the advertiser - of this Order. Format: - "networks/{network_code}/contacts/{contact_id}". - advertiser (str): - Required. The resource name of the Company, which is of type - Company.Type.ADVERTISER, to which this order belongs. This - attribute is required. Format: - "networks/{network_code}/companies/{company_id}". - agency_contacts (MutableSequence[str]): - Optional. The resource names of Contacts from the - advertising Agency of this Order. Format: - "networks/{network_code}/contacts/{contact_id}". - agency (str): - Optional. The resource name of the Company, which is of type - Company.Type.AGENCY, with which this order is associated. - Format: "networks/{network_code}/companies/{company_id}". - applied_teams (MutableSequence[str]): - Optional. The resource names of Teams directly applied to - this Order. Format: - "networks/{network_code}/teams/{team_id}". - effective_teams (MutableSequence[str]): - Output only. The resource names of Teams applied to this - Order including inherited values. Format: - "networks/{network_code}/teams/{team_id}". - creator (str): - Output only. The resource name of the User who created the - Order on behalf of the advertiser. This value is assigned by - Google. Format: "networks/{network_code}/users/{user_id}". - currency_code (str): - Output only. The ISO 4217 3-letter currency - code for the currency used by the Order. This - value is the network's currency code. - start_time (google.protobuf.timestamp_pb2.Timestamp): - Output only. The instant at which the Order and its - associated line items are eligible to begin serving. This - attribute is derived from the line item of the order that - has the earliest LineItem.start_time. - end_time (google.protobuf.timestamp_pb2.Timestamp): - Output only. The instant at which the Order and its - associated line items stop being served. This attribute is - derived from the line item of the order that has the latest - LineItem.end_time. - external_order_id (int): - Optional. An arbitrary ID to associate to the - Order, which can be used as a key to an external - system. - archived (bool): - Output only. The archival status of the - Order. - last_modified_by_app (str): - Output only. The application which modified - this order. This attribute is assigned by - Google. - update_time (google.protobuf.timestamp_pb2.Timestamp): - Output only. The instant this Order was last - modified. - notes (str): - Optional. Provides any additional notes that - may annotate the Order. This attribute has a - maximum length of 65,535 characters. - po_number (str): - Optional. The purchase order number for the - Order. This value has a maximum length of 63 - characters. - status (google.ads.admanager_v1.types.Order.Status): - Output only. The status of the Order. - salesperson (str): - Optional. The resource name of the User responsible for the - sales of the Order. Format: - "networks/{network_code}/users/{user_id}". - secondary_salespeople (MutableSequence[str]): - Optional. The resource names of the secondary salespeople - associated with the order. Format: - "networks/{network_code}/users/{user_id}". - secondary_traffickers (MutableSequence[str]): - Optional. The resource names of the secondary traffickers - associated with the order. Format: - "networks/{network_code}/users/{user_id}". - applied_labels (MutableSequence[google.ads.admanager_v1.types.AppliedLabel]): - Optional. The set of labels applied directly - to this order. - effective_applied_labels (MutableSequence[google.ads.admanager_v1.types.AppliedLabel]): - Output only. Contains the set of labels - applied directly to the order as well as those - inherited from the company that owns the order. - If a label has been negated, only the negated - label is returned. This field is assigned by - Google. - """ - - class Status(proto.Enum): - r"""The status of an Order. - - Values: - STATUS_UNSPECIFIED (0): - Default value. This value is unused. - DRAFT (2): - Indicates that the Order has just been - created but no approval has been requested yet. - PENDING_APPROVAL (3): - Indicates that a request for approval for the - Order has been made. - APPROVED (4): - Indicates that the Order has been approved - and is ready to serve. - DISAPPROVED (5): - Indicates that the Order has been disapproved - and is not eligible to serve. - PAUSED (6): - This is a legacy state. Paused status should - be checked on LineItems within the order. - CANCELED (7): - Indicates that the Order has been canceled - and cannot serve. - DELETED (8): - Indicates that the Order has been deleted. - """ - STATUS_UNSPECIFIED = 0 - DRAFT = 2 - PENDING_APPROVAL = 3 - APPROVED = 4 - DISAPPROVED = 5 - PAUSED = 6 - CANCELED = 7 - DELETED = 8 - - name: str = proto.Field( - proto.STRING, - number=1, - ) - order_id: int = proto.Field( - proto.INT64, - number=4, - ) - display_name: str = proto.Field( - proto.STRING, - number=2, - ) - programmatic: bool = proto.Field( - proto.BOOL, - number=3, - ) - trafficker: str = proto.Field( - proto.STRING, - number=23, - ) - advertiser_contacts: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=5, - ) - advertiser: str = proto.Field( - proto.STRING, - number=6, - ) - agency_contacts: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=7, - ) - agency: str = proto.Field( - proto.STRING, - number=8, - ) - applied_teams: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=9, - ) - effective_teams: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=28, - ) - creator: str = proto.Field( - proto.STRING, - number=10, - ) - currency_code: str = proto.Field( - proto.STRING, - number=11, - ) - start_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=19, - message=timestamp_pb2.Timestamp, - ) - end_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=12, - message=timestamp_pb2.Timestamp, - ) - external_order_id: int = proto.Field( - proto.INT64, - number=13, - ) - archived: bool = proto.Field( - proto.BOOL, - number=14, - ) - last_modified_by_app: str = proto.Field( - proto.STRING, - number=15, - ) - update_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=16, - message=timestamp_pb2.Timestamp, - ) - notes: str = proto.Field( - proto.STRING, - number=17, - ) - po_number: str = proto.Field( - proto.STRING, - number=18, - ) - status: Status = proto.Field( - proto.ENUM, - number=20, - enum=Status, - ) - salesperson: str = proto.Field( - proto.STRING, - number=21, - ) - secondary_salespeople: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=22, - ) - secondary_traffickers: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=24, - ) - applied_labels: MutableSequence[applied_label.AppliedLabel] = proto.RepeatedField( - proto.MESSAGE, - number=25, - message=applied_label.AppliedLabel, - ) - effective_applied_labels: MutableSequence[ - applied_label.AppliedLabel - ] = proto.RepeatedField( - proto.MESSAGE, - number=26, - message=applied_label.AppliedLabel, - ) - - class GetOrderRequest(proto.Message): r"""Request object for ``GetOrder`` method. @@ -402,10 +135,10 @@ class ListOrdersResponse(proto.Message): def raw_page(self): return self - orders: MutableSequence["Order"] = proto.RepeatedField( + orders: MutableSequence[order_messages.Order] = proto.RepeatedField( proto.MESSAGE, number=1, - message="Order", + message=order_messages.Order, ) next_page_token: str = proto.Field( proto.STRING, diff --git a/packages/google-ads-admanager/google/ads/admanager_v1/types/placement_messages.py b/packages/google-ads-admanager/google/ads/admanager_v1/types/placement_messages.py new file mode 100644 index 000000000000..5705c4acda2f --- /dev/null +++ b/packages/google-ads-admanager/google/ads/admanager_v1/types/placement_messages.py @@ -0,0 +1,102 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +from google.protobuf import timestamp_pb2 # type: ignore +import proto # type: ignore + +from google.ads.admanager_v1.types import placement_enums + +__protobuf__ = proto.module( + package="google.ads.admanager.v1", + manifest={ + "Placement", + }, +) + + +class Placement(proto.Message): + r"""The ``Placement`` resource. + + Attributes: + name (str): + Identifier. The resource name of the ``Placement``. Format: + ``networks/{network_code}/placements/{placement_id}`` + placement_id (int): + Output only. ``Placement`` ID. + display_name (str): + Required. The display name of the placement. + Its maximum length is 255 characters. + description (str): + Optional. A description of the Placement. + This value is optional and its maximum length is + 65,535 characters. + placement_code (str): + Output only. A string used to uniquely + identify the Placement for purposes of serving + the ad. This attribute is read-only and is + assigned by Google when a placement is created. + status (google.ads.admanager_v1.types.PlacementStatusEnum.PlacementStatus): + Output only. The status of the Placement. + This attribute is read-only. + targeted_ad_units (MutableSequence[str]): + Optional. The resource names of AdUnits that constitute the + Placement. Format: + "networks/{network_code}/adUnits/{ad_unit_id}". + update_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The instant this Placement was + last modified. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + placement_id: int = proto.Field( + proto.INT64, + number=2, + ) + display_name: str = proto.Field( + proto.STRING, + number=3, + ) + description: str = proto.Field( + proto.STRING, + number=4, + ) + placement_code: str = proto.Field( + proto.STRING, + number=5, + ) + status: placement_enums.PlacementStatusEnum.PlacementStatus = proto.Field( + proto.ENUM, + number=6, + enum=placement_enums.PlacementStatusEnum.PlacementStatus, + ) + targeted_ad_units: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=7, + ) + update_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=9, + message=timestamp_pb2.Timestamp, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-ads-admanager/google/ads/admanager_v1/types/placement_service.py b/packages/google-ads-admanager/google/ads/admanager_v1/types/placement_service.py index 5221b8ce4a6f..6d313e514801 100644 --- a/packages/google-ads-admanager/google/ads/admanager_v1/types/placement_service.py +++ b/packages/google-ads-admanager/google/ads/admanager_v1/types/placement_service.py @@ -17,15 +17,13 @@ from typing import MutableMapping, MutableSequence -from google.protobuf import timestamp_pb2 # type: ignore import proto # type: ignore -from google.ads.admanager_v1.types import placement_enums +from google.ads.admanager_v1.types import placement_messages __protobuf__ = proto.module( package="google.ads.admanager.v1", manifest={ - "Placement", "GetPlacementRequest", "ListPlacementsRequest", "ListPlacementsResponse", @@ -33,75 +31,6 @@ ) -class Placement(proto.Message): - r"""The ``Placement`` resource. - - Attributes: - name (str): - Identifier. The resource name of the ``Placement``. Format: - ``networks/{network_code}/placements/{placement_id}`` - placement_id (int): - Output only. ``Placement`` ID. - display_name (str): - Required. The display name of the placement. - Its maximum length is 255 characters. - description (str): - Optional. A description of the Placement. - This value is optional and its maximum length is - 65,535 characters. - placement_code (str): - Output only. A string used to uniquely - identify the Placement for purposes of serving - the ad. This attribute is read-only and is - assigned by Google when a placement is created. - status (google.ads.admanager_v1.types.PlacementStatusEnum.PlacementStatus): - Output only. The status of the Placement. - This attribute is read-only. - targeted_ad_units (MutableSequence[str]): - Optional. The resource names of AdUnits that constitute the - Placement. Format: - "networks/{network_code}/adUnits/{ad_unit_id}". - update_time (google.protobuf.timestamp_pb2.Timestamp): - Output only. The instant this Placement was - last modified. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - placement_id: int = proto.Field( - proto.INT64, - number=2, - ) - display_name: str = proto.Field( - proto.STRING, - number=3, - ) - description: str = proto.Field( - proto.STRING, - number=4, - ) - placement_code: str = proto.Field( - proto.STRING, - number=5, - ) - status: placement_enums.PlacementStatusEnum.PlacementStatus = proto.Field( - proto.ENUM, - number=6, - enum=placement_enums.PlacementStatusEnum.PlacementStatus, - ) - targeted_ad_units: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=7, - ) - update_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=9, - message=timestamp_pb2.Timestamp, - ) - - class GetPlacementRequest(proto.Message): r"""Request object for ``GetPlacement`` method. @@ -207,10 +136,10 @@ class ListPlacementsResponse(proto.Message): def raw_page(self): return self - placements: MutableSequence["Placement"] = proto.RepeatedField( + placements: MutableSequence[placement_messages.Placement] = proto.RepeatedField( proto.MESSAGE, number=1, - message="Placement", + message=placement_messages.Placement, ) next_page_token: str = proto.Field( proto.STRING, diff --git a/packages/google-ads-admanager/google/ads/admanager_v1/types/report_service.py b/packages/google-ads-admanager/google/ads/admanager_v1/types/report_service.py index f479273afbb6..9cbecb1b5556 100644 --- a/packages/google-ads-admanager/google/ads/admanager_v1/types/report_service.py +++ b/packages/google-ads-admanager/google/ads/admanager_v1/types/report_service.py @@ -17,143 +17,4654 @@ from typing import MutableMapping, MutableSequence +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore +from google.type import date_pb2 # type: ignore +from google.type import dayofweek_pb2 # type: ignore +from google.type import timeofday_pb2 # type: ignore import proto # type: ignore __protobuf__ = proto.module( package="google.ads.admanager.v1", manifest={ "Report", - "ExportSavedReportRequest", - "ExportSavedReportMetadata", - "ExportSavedReportResponse", + "RunReportRequest", + "RunReportMetadata", + "RunReportResponse", + "GetReportRequest", + "ListReportsRequest", + "ListReportsResponse", + "CreateReportRequest", + "UpdateReportRequest", + "FetchReportResultRowsRequest", + "FetchReportResultRowsResponse", + "ReportDefinition", + "ScheduleOptions", + "Schedule", }, ) class Report(proto.Message): - r"""The Report resource. + r"""The ``Report`` resource. Attributes: name (str): - Identifier. The resource name of the Report. Report resource + Identifier. The resource name of the report. Report resource name have the form: ``networks/{network_code}/reports/{report_id}`` + report_id (int): + Output only. Report ID. + visibility (google.ads.admanager_v1.types.Report.Visibility): + Optional. The visibility of a report. + report_definition (google.ads.admanager_v1.types.ReportDefinition): + Required. The report definition of the + report. + display_name (str): + Optional. Display name for the report. + update_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The instant this report was last + modified. + create_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The instant this report was + created. + locale (str): + Output only. The locale of this report. + Locale is set from the user's locale at the time + of the request. Locale can not be modified. + schedule_options (google.ads.admanager_v1.types.ScheduleOptions): + Optional. The schedule options of this + report. """ + class TimePeriodColumn(proto.Enum): + r"""Valid time period columns. + + Values: + TIME_PERIOD_COLUMN_UNSPECIFIED (0): + Default value. Report will have no time + period column. + TIME_PERIOD_COLUMN_DATE (1): + A column for each date in the report. + TIME_PERIOD_COLUMN_WEEK (2): + A column for each week in the report. + TIME_PERIOD_COLUMN_MONTH (3): + A column for each month in the report. + TIME_PERIOD_COLUMN_QUARTER (4): + A column for each quarter in the report. + """ + TIME_PERIOD_COLUMN_UNSPECIFIED = 0 + TIME_PERIOD_COLUMN_DATE = 1 + TIME_PERIOD_COLUMN_WEEK = 2 + TIME_PERIOD_COLUMN_MONTH = 3 + TIME_PERIOD_COLUMN_QUARTER = 4 + + class Dimension(proto.Enum): + r"""Reporting dimensions. + + Values: + DIMENSION_UNSPECIFIED (0): + Default value. This value is unused. + ADVERTISER_DOMAIN_NAME (242): + The domain name of the advertiser. + ADVERTISER_EXTERNAL_ID (228): + The ID used in an external system for + advertiser identification + ADVERTISER_ID (131): + The ID of an advertiser company assigned to + an order + ADVERTISER_LABELS (230): + Labels applied to the advertiser + can be used for either competitive exclusion or + ad exclusion + ADVERTISER_LABEL_IDS (229): + Label ids applied to the advertiser + can be used for either competitive exclusion or + ad exclusion + ADVERTISER_NAME (132): + The name of an advertiser company assigned to + an order + ADVERTISER_PRIMARY_CONTACT (227): + The name of the contact associated with an + advertiser company + AD_LOCATION (390): + Shows an ENUM value describing whether a + given piece of publisher inventory was above + (ATF) or below the fold (BTF) of a page. + AD_LOCATION_NAME (391): + Shows a localized string describing whether a + given piece of publisher inventory was above + (ATF) or below the fold (BTF) of a page. + AD_UNIT_CODE (64): + The code of the ad unit where the ad was + requested. + AD_UNIT_CODE_LEVEL_1 (65): + The code of the first level ad unit of the ad + unit where the ad was requested. + AD_UNIT_CODE_LEVEL_10 (74): + The code of the tenth level ad unit of the ad + unit where the ad was requested. + AD_UNIT_CODE_LEVEL_11 (75): + The code of the eleventh level ad unit of the + ad unit where the ad was requested. + AD_UNIT_CODE_LEVEL_12 (76): + The code of the twelfth level ad unit of the + ad unit where the ad was requested. + AD_UNIT_CODE_LEVEL_13 (77): + The code of the thirteenth level ad unit of + the ad unit where the ad was requested. + AD_UNIT_CODE_LEVEL_14 (78): + The code of the fourteenth level ad unit of + the ad unit where the ad was requested. + AD_UNIT_CODE_LEVEL_15 (79): + The code of the fifteenth level ad unit of + the ad unit where the ad was requested. + AD_UNIT_CODE_LEVEL_16 (80): + The code of the sixteenth level ad unit of + the ad unit where the ad was requested. + AD_UNIT_CODE_LEVEL_2 (66): + The code of the second level ad unit of the + ad unit where the ad was requested. + AD_UNIT_CODE_LEVEL_3 (67): + The code of the third level ad unit of the ad + unit where the ad was requested. + AD_UNIT_CODE_LEVEL_4 (68): + The code of the fourth level ad unit of the + ad unit where the ad was requested. + AD_UNIT_CODE_LEVEL_5 (69): + The code of the fifth level ad unit of the ad + unit where the ad was requested. + AD_UNIT_CODE_LEVEL_6 (70): + The code of the sixth level ad unit of the ad + unit where the ad was requested. + AD_UNIT_CODE_LEVEL_7 (71): + The code of the seventh level ad unit of the + ad unit where the ad was requested. + AD_UNIT_CODE_LEVEL_8 (72): + The code of the eighth level ad unit of the + ad unit where the ad was requested. + AD_UNIT_CODE_LEVEL_9 (73): + The code of the ninth level ad unit of the ad + unit where the ad was requested. + AD_UNIT_DEPTH (101): + The depth of the ad unit's hierarchy + AD_UNIT_ID (25): + The ID of the ad unit where the ad was + requested. + AD_UNIT_ID_ALL_LEVEL (27): + The full hierarchy of ad unit IDs where the + ad was requested, from root to leaf, excluding + the root ad unit ID. + AD_UNIT_ID_LEVEL_1 (30): + The first level ad unit ID of the ad unit + where the ad was requested. + AD_UNIT_ID_LEVEL_10 (48): + The tenth level ad unit ID of the ad unit + where the ad was requested. + AD_UNIT_ID_LEVEL_11 (50): + The eleventh level ad unit ID of the ad unit + where the ad was requested. + AD_UNIT_ID_LEVEL_12 (52): + The twelfth level ad unit ID of the ad unit + where the ad was requested. + AD_UNIT_ID_LEVEL_13 (54): + The thirteenth level ad unit ID of the ad + unit where the ad was requested. + AD_UNIT_ID_LEVEL_14 (56): + The fourteenth level ad unit ID of the ad + unit where the ad was requested. + AD_UNIT_ID_LEVEL_15 (58): + The fifteenth level ad unit ID of the ad unit + where the ad was requested. + AD_UNIT_ID_LEVEL_16 (60): + The sixteenth level ad unit ID of the ad unit + where the ad was requested. + AD_UNIT_ID_LEVEL_2 (32): + The second level ad unit ID of the ad unit + where the ad was requested. + AD_UNIT_ID_LEVEL_3 (34): + The third level ad unit ID of the ad unit + where the ad was requested. + AD_UNIT_ID_LEVEL_4 (36): + The fourth level ad unit ID of the ad unit + where the ad was requested. + AD_UNIT_ID_LEVEL_5 (38): + The fifth level ad unit ID of the ad unit + where the ad was requested. + AD_UNIT_ID_LEVEL_6 (40): + The sixth level ad unit ID of the ad unit + where the ad was requested. + AD_UNIT_ID_LEVEL_7 (42): + The seventh level ad unit ID of the ad unit + where the ad was requested. + AD_UNIT_ID_LEVEL_8 (44): + The eighth level ad unit ID of the ad unit + where the ad was requested. + AD_UNIT_ID_LEVEL_9 (46): + The ninth level ad unit ID of the ad unit + where the ad was requested. + AD_UNIT_ID_TOP_LEVEL (142): + The top-level ad unit ID of the ad unit where + the ad was requested. + AD_UNIT_NAME (26): + The name of the ad unit where the ad was + requested. + AD_UNIT_NAME_ALL_LEVEL (29): + The full hierarchy of ad unit names where the + ad was requested, from root to leaf, excluding + the root ad unit name. + AD_UNIT_NAME_LEVEL_1 (31): + The first level ad unit name of the ad unit + where the ad was requested. + AD_UNIT_NAME_LEVEL_10 (49): + The tenth level ad unit name of the ad unit + where the ad was requested. + AD_UNIT_NAME_LEVEL_11 (51): + The eleventh level ad unit name of the ad + unit where the ad was requested. + AD_UNIT_NAME_LEVEL_12 (53): + The twelfth level ad unit name of the ad unit + where the ad was requested. + AD_UNIT_NAME_LEVEL_13 (55): + The thirteenth level ad unit name of the ad + unit where the ad was requested. + AD_UNIT_NAME_LEVEL_14 (57): + The fourteenth level ad unit name of the ad + unit where the ad was requested. + AD_UNIT_NAME_LEVEL_15 (59): + The fifteenth level ad unit name of the ad + unit where the ad was requested. + AD_UNIT_NAME_LEVEL_16 (61): + The sixteenth level ad unit name of the ad + unit where the ad was requested. + AD_UNIT_NAME_LEVEL_2 (33): + The second level ad unit name of the ad unit + where the ad was requested. + AD_UNIT_NAME_LEVEL_3 (35): + The third level ad unit name of the ad unit + where the ad was requested. + AD_UNIT_NAME_LEVEL_4 (37): + The fourth level ad unit name of the ad unit + where the ad was requested. + AD_UNIT_NAME_LEVEL_5 (39): + The fifth level ad unit name of the ad unit + where the ad was requested. + AD_UNIT_NAME_LEVEL_6 (41): + The sixth level ad unit name of the ad unit + where the ad was requested. + AD_UNIT_NAME_LEVEL_7 (43): + The seventh level ad unit name of the ad unit + where the ad was requested. + AD_UNIT_NAME_LEVEL_8 (45): + The eighth level ad unit name of the ad unit + where the ad was requested. + AD_UNIT_NAME_LEVEL_9 (47): + The ninth level ad unit name of the ad unit + where the ad was requested. + AD_UNIT_NAME_TOP_LEVEL (143): + The top-level ad unit name of the ad unit + where the ad was requested. + AD_UNIT_REWARD_AMOUNT (63): + The reward amount of the ad unit where the ad + was requested. + AD_UNIT_REWARD_TYPE (62): + The reward type of the ad unit where the ad + was requested. + AD_UNIT_STATUS (206): + The status of the ad unit + AD_UNIT_STATUS_NAME (207): + The name of the status of the ad unit + APP_VERSION (392): + The app version. + BACKFILL_ADVERTISER_EXTERNAL_ID (349): + The ID used in an external system for + advertiser identification + BACKFILL_ADVERTISER_ID (346): + The ID of an advertiser company assigned to a + backfill order + BACKFILL_ADVERTISER_LABELS (351): + Labels applied to the advertiser + can be used for either competitive exclusion or + ad exclusion + BACKFILL_ADVERTISER_LABEL_IDS (350): + Label ids applied to the advertiser + can be used for either competitive exclusion or + ad exclusion + BACKFILL_ADVERTISER_NAME (347): + The name of an advertiser company assigned to + a backfill order + BACKFILL_ADVERTISER_PRIMARY_CONTACT (348): + The name of the contact associated with an + advertiser company + BACKFILL_CREATIVE_BILLING_TYPE (378): + Enum value of Backfill creative billing type + BACKFILL_CREATIVE_BILLING_TYPE_NAME (379): + Localized string value of Backfill creative + billing type + BACKFILL_CREATIVE_CLICK_THROUGH_URL (376): + Represents the click-through URL of a + Backfill creative + BACKFILL_CREATIVE_ID (370): + The ID of a Backfill creative + BACKFILL_CREATIVE_NAME (371): + Backfill creative name + BACKFILL_CREATIVE_THIRD_PARTY_VENDOR (377): + Third party vendor name of a Backfill + creative + BACKFILL_CREATIVE_TYPE (374): + Enum value of Backfill creative type + BACKFILL_CREATIVE_TYPE_NAME (375): + Localized string name of Backfill creative + type + BACKFILL_LINE_ITEM_ARCHIVED (278): + Whether a Backfill line item is archived. + BACKFILL_LINE_ITEM_COMPANION_DELIVERY_OPTION (258): + Backfill line item comanion delivery option + ENUM value. + BACKFILL_LINE_ITEM_COMPANION_DELIVERY_OPTION_NAME (259): + Localized Backfill line item comanion + delivery option name. + BACKFILL_LINE_ITEM_COMPUTED_STATUS (296): + The computed status of the BackfillLineItem. + BACKFILL_LINE_ITEM_COMPUTED_STATUS_NAME (297): + The localized name of the computed status of + the BackfillLineItem. + BACKFILL_LINE_ITEM_CONTRACTED_QUANTITY (280): + The contracted units bought for the Backfill + line item. + BACKFILL_LINE_ITEM_COST_PER_UNIT (272): + The cost per unit of the Backfill line item. + BACKFILL_LINE_ITEM_COST_TYPE (264): + Backfill line item cost type ENUM value. + BACKFILL_LINE_ITEM_COST_TYPE_NAME (265): + Localized Backfill line item cost type name. + BACKFILL_LINE_ITEM_CREATIVE_END_DATE (381): + Represent the end date of a Backfill creative + associated with a Backfill line item + BACKFILL_LINE_ITEM_CREATIVE_ROTATION_TYPE (290): + The creative rotation type of the + BackfillLineItem. + BACKFILL_LINE_ITEM_CREATIVE_ROTATION_TYPE_NAME (291): + The localized name of the creative rotation + type of the BackfillLineItem. + BACKFILL_LINE_ITEM_CREATIVE_START_DATE (380): + Represent the start date of a Backfill + creative associated with a Backfill line item + BACKFILL_LINE_ITEM_CURRENCY_CODE (288): + The 3 letter currency code of the Backfill + line item + BACKFILL_LINE_ITEM_DELIVERY_INDICATOR (274): + The progress made for the delivery of the + Backfill line item. + BACKFILL_LINE_ITEM_DELIVERY_RATE_TYPE (292): + The delivery rate type of the + BackfillLineItem. + BACKFILL_LINE_ITEM_DELIVERY_RATE_TYPE_NAME (293): + The localized name of the delivery rate type + of the BackfillLineItem. + BACKFILL_LINE_ITEM_DISCOUNT_ABSOLUTE (294): + The discount of the BackfillLineItem in whole + units in the BackfillLineItem's currency code, + or if unspecified the Network's currency code. + BACKFILL_LINE_ITEM_DISCOUNT_PERCENTAGE (295): + The discount of the BackfillLineItem in + percentage. + BACKFILL_LINE_ITEM_END_DATE (267): + The end date of the Backfill line item. + BACKFILL_LINE_ITEM_END_DATE_TIME (269): + The end date and time of the Backfill line + item. + BACKFILL_LINE_ITEM_ENVIRONMENT_TYPE (302): + The ENUM value of the environment a Backfill + line item is targeting. + BACKFILL_LINE_ITEM_ENVIRONMENT_TYPE_NAME (257): + The localized name of the environment a + Backfill line item is targeting. + BACKFILL_LINE_ITEM_EXTERNAL_DEAL_ID (285): + The deal ID of the Backfill line item. Set + for Programmatic Direct campaigns. + BACKFILL_LINE_ITEM_EXTERNAL_ID (273): + The external ID of the Backfill line item. + BACKFILL_LINE_ITEM_FREQUENCY_CAP (303): + The frequency cap of the Backfill line item + (descriptive string). + BACKFILL_LINE_ITEM_ID (298): + Backfill line item ID. + BACKFILL_LINE_ITEM_LAST_MODIFIED_BY_APP (289): + The application that last modified the + Backfill line item. + BACKFILL_LINE_ITEM_LIFETIME_CLICKS (283): + The total number of clicks delivered of the + lifetime of the Backfill line item. + BACKFILL_LINE_ITEM_LIFETIME_IMPRESSIONS (282): + The total number of impressions delivered + over the lifetime of the Backfill line item. + BACKFILL_LINE_ITEM_LIFETIME_VIEWABLE_IMPRESSIONS (284): + The total number of viewable impressions + delivered over the lifetime of the Backfill line + item. + BACKFILL_LINE_ITEM_MAKEGOOD (276): + Whether or not the Backfill line item is + Makegood. Makegood refers to free inventory + offered to buyers to compensate for mistakes or + under-delivery in the original campaigns. + BACKFILL_LINE_ITEM_NAME (299): + Backfill line item name. + BACKFILL_LINE_ITEM_NON_CPD_BOOKED_REVENUE (286): + The cost of booking for the Backfill line + item (non-CPD). + BACKFILL_LINE_ITEM_OPTIMIZABLE (277): + Whether a Backfill line item is eligible for + opitimization. + BACKFILL_LINE_ITEM_PRIMARY_GOAL_TYPE (262): + Goal type ENUM value of the primary goal of + the Backfill line item. + BACKFILL_LINE_ITEM_PRIMARY_GOAL_TYPE_NAME (263): + Localized goal type name of the primary goal + of the Backfill line item. + BACKFILL_LINE_ITEM_PRIMARY_GOAL_UNIT_TYPE (260): + Unit type ENUM value of the primary goal of + the Backfill line item. + BACKFILL_LINE_ITEM_PRIMARY_GOAL_UNIT_TYPE_NAME (261): + Localized unit type name of the primary goal + of the Backfill line item. + BACKFILL_LINE_ITEM_PRIORITY (266): + The priority of this Backfill line item as a + value between 1 and 16. In general, a lower + priority means more serving priority for the + Backfill line item. + BACKFILL_LINE_ITEM_RESERVATION_STATUS (306): + ENUM value describing the state of inventory + reservation for the BackfillLineItem. + BACKFILL_LINE_ITEM_RESERVATION_STATUS_NAME (307): + Localized string describing the state of + inventory reservation for the BackfillLineItem. + BACKFILL_LINE_ITEM_START_DATE (268): + The start date of the Backfill line item. + BACKFILL_LINE_ITEM_START_DATE_TIME (270): + The start date and time of the Backfill line + item. + BACKFILL_LINE_ITEM_TYPE (300): + Backfill line item type ENUM value. + BACKFILL_LINE_ITEM_TYPE_NAME (301): + Localized Backfill line item type name. + BACKFILL_LINE_ITEM_UNLIMITED_END (271): + Whether the Backfill line item end time and + end date is set to effectively never end. + BACKFILL_LINE_ITEM_VALUE_COST_PER_UNIT (275): + The artificial cost per unit used by the Ad + server to help rank inventory. + BACKFILL_LINE_ITEM_WEB_PROPERTY_CODE (287): + The web property code used for dynamic + allocation Backfill line items. + BACKFILL_MASTER_COMPANION_CREATIVE_ID (372): + The ID of Backfill creative, includes regular + creatives, and master and companions in case of + creative sets + BACKFILL_MASTER_COMPANION_CREATIVE_NAME (373): + Name of Backfill creative, includes regular + creatives, and master and companions in case of + creative sets + BACKFILL_ORDER_AGENCY (313): + Backfill order agency. + BACKFILL_ORDER_AGENCY_ID (314): + Backfill order agency ID. + BACKFILL_ORDER_BOOKED_CPC (315): + Backfill order booked CPC. + BACKFILL_ORDER_BOOKED_CPM (316): + Backfill order booked CPM. + BACKFILL_ORDER_DELIVERY_STATUS (340): + Backfill order delivery status ENUM value. + BACKFILL_ORDER_DELIVERY_STATUS_NAME (341): + Backfill order delivery status localized + name. + BACKFILL_ORDER_END_DATE (317): + Backfill order end date. + BACKFILL_ORDER_END_DATE_TIME (319): + Backfill order end date and time. + BACKFILL_ORDER_EXTERNAL_ID (320): + Backfill order external ID. + BACKFILL_ORDER_ID (338): + Backfill order id. + BACKFILL_ORDER_LABELS (334): + Backfill order labels. + BACKFILL_ORDER_LABEL_IDS (335): + Backfill order labels IDs. + BACKFILL_ORDER_LIFETIME_CLICKS (322): + Backfill order lifetime clicks. + BACKFILL_ORDER_LIFETIME_IMPRESSIONS (323): + Backfill order lifetime impressions. + BACKFILL_ORDER_NAME (339): + Backfill order name. + BACKFILL_ORDER_PO_NUMBER (324): + Backfill order PO number. + BACKFILL_ORDER_PROGRAMMATIC (321): + Whether the Backfill order is programmatic. + BACKFILL_ORDER_SALESPERSON (325): + Backfill order sales person. + BACKFILL_ORDER_SECONDARY_SALESPEOPLE (329): + Backfill order secondary sales people. + BACKFILL_ORDER_SECONDARY_SALESPEOPLE_ID (328): + Backfill order secondary sales people ID. + BACKFILL_ORDER_SECONDARY_TRAFFICKERS (331): + Backfill order secondary traffickers. + BACKFILL_ORDER_SECONDARY_TRAFFICKERS_ID (330): + Backfill order secondary traffickers ID. + BACKFILL_ORDER_START_DATE (332): + Backfill order start date. + BACKFILL_ORDER_START_DATE_TIME (333): + Backfill order start date and time. + BACKFILL_ORDER_TRAFFICKER (326): + Backfill order trafficker. + BACKFILL_ORDER_TRAFFICKER_ID (327): + Backfill order trafficker ID. + BACKFILL_ORDER_UNLIMITED_END (318): + Whether the Backfill order end time and end + date is set to effectively never end. + BACKFILL_PROGRAMMATIC_BUYER_ID (336): + The ID of the buyer on a backfill + programmatic proposal. + BACKFILL_PROGRAMMATIC_BUYER_NAME (337): + The name of the buyer on a backfill + programmatic proposal. + BRANDING_TYPE (383): + The amount of information about the + Publisher's page sent to the buyer who purchased + the impressions. + BRANDING_TYPE_NAME (384): + The localized version of branding type, the + amount of information about the Publisher's page + sent to the buyer who purchased the impressions. + BROWSER_CATEGORY (119): + Browser category. + BROWSER_CATEGORY_NAME (120): + Browser category name. + BROWSER_ID (235): + The ID of the browser. + BROWSER_NAME (236): + The name of the browser. + CARRIER_ID (369): + Mobile carrier ID. + CARRIER_NAME (368): + Name of the mobile carrier. + CLASSIFIED_ADVERTISER_ID (133): + The ID of an advertiser, classified by + Google, associated with a creative transacted + CLASSIFIED_ADVERTISER_NAME (134): + The name of an advertiser, classified by + Google, associated with a creative transacted + CLASSIFIED_BRAND_ID (243): + ID of the brand, as classified by Google, + CLASSIFIED_BRAND_NAME (244): + Name of the brand, as classified by Google, + CONTENT_ID (246): + ID of the video content served. + CONTENT_NAME (247): + Name of the video content served. + COUNTRY_ID (11): + The criteria ID of the country in which the + ad served. + COUNTRY_NAME (12): + The name of the country in which the ad + served. + CREATIVE_BILLING_TYPE (366): + Enum value of creative billing type + CREATIVE_BILLING_TYPE_NAME (367): + Localized string value of creative billing + type + CREATIVE_CLICK_THROUGH_URL (174): + Represents the click-through URL of a + creative + CREATIVE_ID (138): + The ID of a creative + CREATIVE_NAME (139): + Creative name + CREATIVE_TECHNOLOGY (148): + Creative technology ENUM + CREATIVE_TECHNOLOGY_NAME (149): + Creative technology locallized name + CREATIVE_THIRD_PARTY_VENDOR (361): + Third party vendor name of a creative + CREATIVE_TYPE (344): + Enum value of creative type + CREATIVE_TYPE_NAME (345): + Localized string name of creative type + DATE (3): + Breaks down reporting data by date. + DAY_OF_WEEK (4): + Breaks down reporting data by day of the + week. Monday is 1 and 7 is Sunday. + DEMAND_CHANNEL (9): + Demand channel. + DEMAND_CHANNEL_NAME (10): + Demand channel name. + DEMAND_SUBCHANNEL (22): + Demand subchannel. + DEMAND_SUBCHANNEL_NAME (23): + Demand subchannel name. + DEVICE (226): + The device on which an ad was served. + DEVICE_CATEGORY (15): + The device category to which an ad is being + targeted. + DEVICE_CATEGORY_NAME (16): + The name of the category of device + (smartphone, feature phone, tablet, or desktop) + to which an ad is being targeted. + DEVICE_NAME (225): + The localized name of the device on which an + ad was served. + EXCHANGE_THIRD_PARTY_COMPANY_ID (185): + ID of the yield partner as classified by + Google + EXCHANGE_THIRD_PARTY_COMPANY_NAME (186): + Name of the yield partner as classified by + Google + FIRST_LOOK_PRICING_RULE_ID (248): + The ID of the first look pricing rule. + FIRST_LOOK_PRICING_RULE_NAME (249): + The name of the first look pricing rule. + HOUR (100): + Breaks down reporting data by hour in one + day. + INTERACTION_TYPE (223): + The interaction type of an ad. + INTERACTION_TYPE_NAME (224): + The localized name of the interaction type of + an ad. + INVENTORY_FORMAT (17): + Inventory format. + The format of the ad unit (e.g, banner) where + the ad was requested. + INVENTORY_FORMAT_NAME (18): + Inventory format name. + The format of the ad unit (e.g, banner) where + the ad was requested. + INVENTORY_TYPE (19): + Inventory type. + The kind of web page or device where the ad was + requested. + INVENTORY_TYPE_NAME (20): + Inventory type name. + The kind of web page or device where the ad was + requested. + IS_ADX_DIRECT (382): + Whether traffic is Adx Direct. + IS_FIRST_LOOK_DEAL (401): + Whether traffic is First Look. + KEY_VALUES_ID (214): + The Custom Targeting Value ID + KEY_VALUES_NAME (215): + The Custom Targeting Value formatted like + = + LINE_ITEM_ARCHIVED (188): + Whether a Line item is archived. + LINE_ITEM_COMPANION_DELIVERY_OPTION (204): + Line item comanion delivery option ENUM + value. + LINE_ITEM_COMPANION_DELIVERY_OPTION_NAME (205): + Localized line item comanion delivery option + name. + LINE_ITEM_COMPUTED_STATUS (250): + The computed status of the LineItem. + LINE_ITEM_COMPUTED_STATUS_NAME (251): + The localized name of the computed status of + the LineItem. + LINE_ITEM_CONTRACTED_QUANTITY (92): + The contracted units bought for the Line + item. + LINE_ITEM_COST_PER_UNIT (85): + The cost per unit of the Line item. + LINE_ITEM_COST_TYPE (212): + Line item cost type ENUM value. + LINE_ITEM_COST_TYPE_NAME (213): + Localized line item cost type name. + LINE_ITEM_CREATIVE_END_DATE (176): + Represent the end date of a creative + associated with line item + LINE_ITEM_CREATIVE_ROTATION_TYPE (189): + The creative rotation type of the LineItem. + LINE_ITEM_CREATIVE_ROTATION_TYPE_NAME (190): + The localized name of the creative rotation + type of the LineItem. + LINE_ITEM_CREATIVE_START_DATE (175): + Represent the start date of a creative + associated with line item + LINE_ITEM_CURRENCY_CODE (180): + The 3 letter currency code of the Line Item + LINE_ITEM_DELIVERY_INDICATOR (87): + The progress made for the delivery of the + Line item. + LINE_ITEM_DELIVERY_RATE_TYPE (191): + The delivery rate type of the LineItem. + LINE_ITEM_DELIVERY_RATE_TYPE_NAME (192): + The localized name of the delivery rate type + of the LineItem. + LINE_ITEM_DISCOUNT_ABSOLUTE (195): + The discount of the LineItem in whole units + in the LineItem's currency code, or if + unspecified the Network's currency code. + LINE_ITEM_DISCOUNT_PERCENTAGE (196): + The discount of the LineItem in percentage. + LINE_ITEM_END_DATE (81): + The end date of the Line item. + LINE_ITEM_END_DATE_TIME (83): + The end date and time of the Line item. + LINE_ITEM_ENVIRONMENT_TYPE (201): + The ENUM value of the environment a LineItem + is targeting. + LINE_ITEM_ENVIRONMENT_TYPE_NAME (202): + The localized name of the environment a + LineItem is targeting. + LINE_ITEM_EXTERNAL_DEAL_ID (97): + The deal ID of the Line item. Set for + Programmatic Direct campaigns. + LINE_ITEM_EXTERNAL_ID (86): + The external ID of the Line item. + LINE_ITEM_FREQUENCY_CAP (256): + The frequency cap of the Line item + (descriptive string). + LINE_ITEM_ID (1): + Line item ID. + LINE_ITEM_LAST_MODIFIED_BY_APP (181): + The application that last modified the Line + Item. + LINE_ITEM_LIFETIME_CLICKS (95): + The total number of clicks delivered of the + lifetime of the Line item. + LINE_ITEM_LIFETIME_IMPRESSIONS (94): + The total number of impressions delivered + over the lifetime of the Line item. + LINE_ITEM_LIFETIME_VIEWABLE_IMPRESSIONS (96): + The total number of viewable impressions + delivered over the lifetime of the Line item. + LINE_ITEM_MAKEGOOD (89): + Whether or not the Line item is Makegood. + Makegood refers to free inventory offered to + buyers to compensate for mistakes or + under-delivery in the original campaigns. + LINE_ITEM_NAME (2): + Line item Name. + LINE_ITEM_NON_CPD_BOOKED_REVENUE (98): + The cost of booking for the Line item + (non-CPD). + LINE_ITEM_OPTIMIZABLE (90): + Whether a Line item is eligible for + opitimization. + LINE_ITEM_PRIMARY_GOAL_TYPE (210): + Goal type ENUM value of the primary goal of + the line item. + LINE_ITEM_PRIMARY_GOAL_TYPE_NAME (211): + Localized goal type name of the primary goal + of the line item. + LINE_ITEM_PRIMARY_GOAL_UNITS_ABSOLUTE (93): + The total number of impressions or clicks that are reserved + for a line item. For line items of type BULK or + PRICE_PRIORITY, this represents the number of remaining + impressions reserved. If the line item has an impression cap + goal, this represents the number of impressions or + conversions that the line item will stop serving at if + reached. + LINE_ITEM_PRIMARY_GOAL_UNITS_PERCENTAGE (396): + The percentage of impressions or clicks that + are reserved for a line item. For line items of + type SPONSORSHIP, this represents the percentage + of available impressions reserved. For line + items of type NETWORK or HOUSE, this represents + the percentage of remaining impressions + reserved. + LINE_ITEM_PRIMARY_GOAL_UNIT_TYPE (208): + Unit type ENUM value of the primary goal of + the line item. + LINE_ITEM_PRIMARY_GOAL_UNIT_TYPE_NAME (209): + Localized unit type name of the primary goal + of the line item. + LINE_ITEM_PRIORITY (24): + The priority of this Line item as a value + between 1 and 16. In general, a lower priority + means more serving priority for the Line item. + LINE_ITEM_RESERVATION_STATUS (304): + ENUM value describing the state of inventory + reservation for the LineItem. + LINE_ITEM_RESERVATION_STATUS_NAME (305): + Localized string describing the state of + inventory reservation for the LineItem. + LINE_ITEM_START_DATE (82): + The start date of the Line item. + LINE_ITEM_START_DATE_TIME (84): + The start date and time of the Line item. + LINE_ITEM_TYPE (193): + Line item type ENUM value. + LINE_ITEM_TYPE_NAME (194): + Localized line item type name. + LINE_ITEM_UNLIMITED_END (187): + Whether the Line item end time and end date + is set to effectively never end. + LINE_ITEM_VALUE_COST_PER_UNIT (88): + The artificial cost per unit used by the Ad + server to help rank inventory. + LINE_ITEM_WEB_PROPERTY_CODE (179): + The web property code used for dynamic + allocation Line Items. + MASTER_COMPANION_CREATIVE_ID (140): + The ID of creative, includes regular + creatives, and master and companions in case of + creative sets + MASTER_COMPANION_CREATIVE_NAME (141): + Name of creative, includes regular creatives, + and master and companions in case of creative + sets + MOBILE_APP_FREE (128): + Whether the mobile app is free. + MOBILE_APP_ICON_URL (129): + URL of app icon for the mobile app. + MOBILE_APP_ID (123): + The ID of the Mobile App. + MOBILE_APP_NAME (127): + The name of the mobile app. + MOBILE_APP_OWNERSHIP_STATUS (311): + Ownership status of the mobile app. + MOBILE_APP_OWNERSHIP_STATUS_NAME (312): + Ownership status of the mobile app. + MOBILE_APP_STORE (125): + The App Store of the mobile app. + MOBILE_APP_STORE_NAME (245): + The localized name of the mobile app store. + MOBILE_INVENTORY_TYPE (99): + Mobile inventory type. + Identifies whether a mobile ad came from a + regular web page, an AMP web page, or a mobile + app. + Values match the Inventory type dimension + available in the Overview Home dashboard. Note: + Video takes precedence over any other value, for + example, if there is an in-stream video + impression on a desktop device, it will be + attributed to in-stream video and not desktop + web. + MOBILE_INVENTORY_TYPE_NAME (21): + Mobile inventory type name. + Identifies whether a mobile ad came from a + regular web page, an AMP web page, or a mobile + app. + MOBILE_SDK_VERSION_NAME (130): + SDK version of the mobile device. + MONTH_YEAR (6): + Breaks down reporting data by month and year. + NATIVE_AD_FORMAT_ID (255): + Native ad format ID. + NATIVE_AD_FORMAT_NAME (254): + Native ad format name. + NATIVE_STYLE_ID (253): + Native style ID. + NATIVE_STYLE_NAME (252): + Native style name. + OPERATING_SYSTEM_CATEGORY (117): + Operating system category. + OPERATING_SYSTEM_CATEGORY_NAME (118): + Operating system category name. + OPERATING_SYSTEM_VERSION_ID (238): + ID of the operating system version. + OPERATING_SYSTEM_VERSION_NAME (237): + Details of the operating system, including + version. + ORDER_AGENCY (150): + Order agency. + ORDER_AGENCY_ID (151): + Order agency ID. + ORDER_BOOKED_CPC (152): + Order booked CPC. + ORDER_BOOKED_CPM (153): + Order booked CPM. + ORDER_DELIVERY_STATUS (231): + Order delivery status ENUM value. + ORDER_DELIVERY_STATUS_NAME (239): + Order delivery status localized name. + ORDER_END_DATE (154): + Order end date. + ORDER_END_DATE_TIME (155): + Order end date and time. + ORDER_EXTERNAL_ID (156): + Order external ID. + ORDER_ID (7): + Order id. + ORDER_LABELS (170): + Order labels. + ORDER_LABEL_IDS (171): + Order labels IDs. + ORDER_LIFETIME_CLICKS (158): + Order lifetime clicks. + ORDER_LIFETIME_IMPRESSIONS (159): + Order lifetime impressions. + ORDER_NAME (8): + Order name. + ORDER_PO_NUMBER (160): + Order PO number. + ORDER_PROGRAMMATIC (157): + Whether the Order is programmatic. + ORDER_SALESPERSON (161): + Order sales person. + ORDER_SECONDARY_SALESPEOPLE (164): + Order secondary sales people. + ORDER_SECONDARY_SALESPEOPLE_ID (165): + Order secondary sales people ID. + ORDER_SECONDARY_TRAFFICKERS (166): + Order secondary traffickers. + ORDER_SECONDARY_TRAFFICKERS_ID (167): + Order secondary traffickers ID. + ORDER_START_DATE (168): + Order start date. + ORDER_START_DATE_TIME (169): + Order start date and time. + ORDER_TRAFFICKER (162): + Order trafficker. + ORDER_TRAFFICKER_ID (163): + Order trafficker ID. + ORDER_UNLIMITED_END (203): + Whether the Order end time and end date is + set to effectively never end. + PLACEMENT_ID (113): + Placement ID + PLACEMENT_ID_ALL (144): + The full list of placement IDs associated + with the ad unit. + PLACEMENT_NAME (114): + Placement name + PLACEMENT_NAME_ALL (145): + The full list of placement names associated + with the ad unit. + PLACEMENT_STATUS (362): + Placement status ENUM value + PLACEMENT_STATUS_ALL (363): + The full list of placement status ENUM values + associated with the ad unit. + PLACEMENT_STATUS_NAME (364): + Localized placement status name. + PLACEMENT_STATUS_NAME_ALL (365): + The full list of localized placement status + names associated with the ad unit. + PROGRAMMATIC_BUYER_ID (240): + The ID of the buyer on a programmatic + proposal. + PROGRAMMATIC_BUYER_NAME (241): + The name of the buyer on a programmatic + proposal. + PROGRAMMATIC_CHANNEL (13): + Programmatic channel. + The type of transaction that occurred in Ad + Exchange. + PROGRAMMATIC_CHANNEL_NAME (14): + Programmatic channel name. + The type of transaction that occurred in Ad + Exchange. + RENDERED_CREATIVE_SIZE (343): + The size of a rendered creative, It can + differ with the creative's size if a creative is + shown in an ad slot of a different size. + REQUESTED_AD_SIZES (352): + Inventory Requested Ad Sizes dimension + REQUEST_TYPE (146): + Request type ENUM + REQUEST_TYPE_NAME (147): + Request type locallized name + SITE (387): + Information about domain or subdomains. + TARGETING_ID (232): + The ID of the browser, device or other + environment into which a line item or creative + was served. + TARGETING_NAME (233): + Information about the browser, device and + other environments into which a line item or + creative was served. + TARGETING_TYPE (385): + The way in which advertisers targeted their + ads. + TARGETING_TYPE_NAME (386): + The localized name of the way in which + advertisers targeted their ads. + TRAFFIC_SOURCE (388): + Inventory Traffic source dimension + TRAFFIC_SOURCE_NAME (389): + Inventory Traffic source dimension name + UNIFIED_PRICING_RULE_ID (393): + Unified pricing rule ID dimension + UNIFIED_PRICING_RULE_NAME (394): + Unified pricing rule name dimension + VIDEO_PLCMT (172): + The video placement enum as defined by ADCOM + 1.0-202303. + VIDEO_PLCMT_NAME (173): + The localized name of the video placement as + defined by ADCOM 1.0-202303. + WEEK (5): + Breaks down reporting data by week of the + year. + YIELD_GROUP_BUYER_NAME (184): + Name of the company within a yield group + YIELD_GROUP_ID (182): + ID of the group of ad networks or exchanges + used for Mediation and Open Bidding + YIELD_GROUP_NAME (183): + Name of the group of ad networks or exchanges + used for Mediation and Open Bidding + LINE_ITEM_CUSTOM_FIELD_0_OPTION_ID (10000): + Custom field option ID for Line Item with custom field ID + equal to the ID in index 0 of + ``ReportDefinition.line_item_custom_field_ids``. + LINE_ITEM_CUSTOM_FIELD_1_OPTION_ID (10001): + Custom field option ID for Line Item with custom field ID + equal to the ID in index 1 of + ``ReportDefinition.line_item_custom_field_ids``. + LINE_ITEM_CUSTOM_FIELD_2_OPTION_ID (10002): + Custom field option ID for Line Item with custom field ID + equal to the ID in index 2 of + ``ReportDefinition.line_item_custom_field_ids``. + LINE_ITEM_CUSTOM_FIELD_3_OPTION_ID (10003): + Custom field option ID for Line Item with custom field ID + equal to the ID in index 3 of + ``ReportDefinition.line_item_custom_field_ids``. + LINE_ITEM_CUSTOM_FIELD_4_OPTION_ID (10004): + Custom field option ID for Line Item with custom field ID + equal to the ID in index 4 of + ``ReportDefinition.line_item_custom_field_ids``. + LINE_ITEM_CUSTOM_FIELD_5_OPTION_ID (10005): + Custom field option ID for Line Item with custom field ID + equal to the ID in index 5 of + ``ReportDefinition.line_item_custom_field_ids``. + LINE_ITEM_CUSTOM_FIELD_6_OPTION_ID (10006): + Custom field option ID for Line Item with custom field ID + equal to the ID in index 6 of + ``ReportDefinition.line_item_custom_field_ids``. + LINE_ITEM_CUSTOM_FIELD_7_OPTION_ID (10007): + Custom field option ID for Line Item with custom field ID + equal to the ID in index 7 of + ``ReportDefinition.line_item_custom_field_ids``. + LINE_ITEM_CUSTOM_FIELD_8_OPTION_ID (10008): + Custom field option ID for Line Item with custom field ID + equal to the ID in index 8 of + ``ReportDefinition.line_item_custom_field_ids``. + LINE_ITEM_CUSTOM_FIELD_9_OPTION_ID (10009): + Custom field option ID for Line Item with custom field ID + equal to the ID in index 9 of + ``ReportDefinition.line_item_custom_field_ids``. + LINE_ITEM_CUSTOM_FIELD_10_OPTION_ID (10010): + Custom field option ID for Line Item with custom field ID + equal to the ID in index 10 of + ``ReportDefinition.line_item_custom_field_ids``. + LINE_ITEM_CUSTOM_FIELD_11_OPTION_ID (10011): + Custom field option ID for Line Item with custom field ID + equal to the ID in index 11 of + ``ReportDefinition.line_item_custom_field_ids``. + LINE_ITEM_CUSTOM_FIELD_12_OPTION_ID (10012): + Custom field option ID for Line Item with custom field ID + equal to the ID in index 12 of + ``ReportDefinition.line_item_custom_field_ids``. + LINE_ITEM_CUSTOM_FIELD_13_OPTION_ID (10013): + Custom field option ID for Line Item with custom field ID + equal to the ID in index 13 of + ``ReportDefinition.line_item_custom_field_ids``. + LINE_ITEM_CUSTOM_FIELD_14_OPTION_ID (10014): + Custom field option ID for Line Item with custom field ID + equal to the ID in index 14 of + ``ReportDefinition.line_item_custom_field_ids``. + LINE_ITEM_CUSTOM_FIELD_0_VALUE (11000): + Custom field value for Line Item with custom field ID equal + to the ID in index 0 of + ``ReportDefinition.line_item_custom_field_ids``. + LINE_ITEM_CUSTOM_FIELD_1_VALUE (11001): + Custom field value for Line Item with custom field ID equal + to the ID in index 1 of + ``ReportDefinition.line_item_custom_field_ids``. + LINE_ITEM_CUSTOM_FIELD_2_VALUE (11002): + Custom field value for Line Item with custom field ID equal + to the ID in index 2 of + ``ReportDefinition.line_item_custom_field_ids``. + LINE_ITEM_CUSTOM_FIELD_3_VALUE (11003): + Custom field value for Line Item with custom field ID equal + to the ID in index 3 of + ``ReportDefinition.line_item_custom_field_ids``. + LINE_ITEM_CUSTOM_FIELD_4_VALUE (11004): + Custom field value for Line Item with custom field ID equal + to the ID in index 4 of + ``ReportDefinition.line_item_custom_field_ids``. + LINE_ITEM_CUSTOM_FIELD_5_VALUE (11005): + Custom field value for Line Item with custom field ID equal + to the ID in index 5 of + ``ReportDefinition.line_item_custom_field_ids``. + LINE_ITEM_CUSTOM_FIELD_6_VALUE (11006): + Custom field value for Line Item with custom field ID equal + to the ID in index 6 of + ``ReportDefinition.line_item_custom_field_ids``. + LINE_ITEM_CUSTOM_FIELD_7_VALUE (11007): + Custom field value for Line Item with custom field ID equal + to the ID in index 7 of + ``ReportDefinition.line_item_custom_field_ids``. + LINE_ITEM_CUSTOM_FIELD_8_VALUE (11008): + Custom field value for Line Item with custom field ID equal + to the ID in index 8 of + ``ReportDefinition.line_item_custom_field_ids``. + LINE_ITEM_CUSTOM_FIELD_9_VALUE (11009): + Custom field value for Line Item with custom field ID equal + to the ID in index 9 of + ``ReportDefinition.line_item_custom_field_ids``. + LINE_ITEM_CUSTOM_FIELD_10_VALUE (11010): + Custom field value for Line Item with custom field ID equal + to the ID in index 10 of + ``ReportDefinition.line_item_custom_field_ids``. + LINE_ITEM_CUSTOM_FIELD_11_VALUE (11011): + Custom field value for Line Item with custom field ID equal + to the ID in index 11 of + ``ReportDefinition.line_item_custom_field_ids``. + LINE_ITEM_CUSTOM_FIELD_12_VALUE (11012): + Custom field value for Line Item with custom field ID equal + to the ID in index 12 of + ``ReportDefinition.line_item_custom_field_ids``. + LINE_ITEM_CUSTOM_FIELD_13_VALUE (11013): + Custom field value for Line Item with custom field ID equal + to the ID in index 13 of + ``ReportDefinition.line_item_custom_field_ids``. + LINE_ITEM_CUSTOM_FIELD_14_VALUE (11014): + Custom field value for Line Item with custom field ID equal + to the ID in index 14 of + ``ReportDefinition.line_item_custom_field_ids``. + ORDER_CUSTOM_FIELD_0_OPTION_ID (12000): + Custom field option ID for Order with custom field ID equal + to the ID in index 0 of + ``ReportDefinition.order_custom_field_ids``. + ORDER_CUSTOM_FIELD_1_OPTION_ID (12001): + Custom field option ID for Order with custom field ID equal + to the ID in index 1 of + ``ReportDefinition.order_custom_field_ids``. + ORDER_CUSTOM_FIELD_2_OPTION_ID (12002): + Custom field option ID for Order with custom field ID equal + to the ID in index 2 of + ``ReportDefinition.order_custom_field_ids``. + ORDER_CUSTOM_FIELD_3_OPTION_ID (12003): + Custom field option ID for Order with custom field ID equal + to the ID in index 3 of + ``ReportDefinition.order_custom_field_ids``. + ORDER_CUSTOM_FIELD_4_OPTION_ID (12004): + Custom field option ID for Order with custom field ID equal + to the ID in index 4 of + ``ReportDefinition.order_custom_field_ids``. + ORDER_CUSTOM_FIELD_5_OPTION_ID (12005): + Custom field option ID for Order with custom field ID equal + to the ID in index 5 of + ``ReportDefinition.order_custom_field_ids``. + ORDER_CUSTOM_FIELD_6_OPTION_ID (12006): + Custom field option ID for Order with custom field ID equal + to the ID in index 6 of + ``ReportDefinition.order_custom_field_ids``. + ORDER_CUSTOM_FIELD_7_OPTION_ID (12007): + Custom field option ID for Order with custom field ID equal + to the ID in index 7 of + ``ReportDefinition.order_custom_field_ids``. + ORDER_CUSTOM_FIELD_8_OPTION_ID (12008): + Custom field option ID for Order with custom field ID equal + to the ID in index 8 of + ``ReportDefinition.order_custom_field_ids``. + ORDER_CUSTOM_FIELD_9_OPTION_ID (12009): + Custom field option ID for Order with custom field ID equal + to the ID in index 9 of + ``ReportDefinition.order_custom_field_ids``. + ORDER_CUSTOM_FIELD_10_OPTION_ID (12010): + Custom field option ID for Order with custom field ID equal + to the ID in index 10 of + ``ReportDefinition.order_custom_field_ids``. + ORDER_CUSTOM_FIELD_11_OPTION_ID (12011): + Custom field option ID for Order with custom field ID equal + to the ID in index 11 of + ``ReportDefinition.order_custom_field_ids``. + ORDER_CUSTOM_FIELD_12_OPTION_ID (12012): + Custom field option ID for Order with custom field ID equal + to the ID in index 12 of + ``ReportDefinition.order_custom_field_ids``. + ORDER_CUSTOM_FIELD_13_OPTION_ID (12013): + Custom field option ID for Order with custom field ID equal + to the ID in index 13 of + ``ReportDefinition.order_custom_field_ids``. + ORDER_CUSTOM_FIELD_14_OPTION_ID (12014): + Custom field option ID for Order with custom field ID equal + to the ID in index 14 of + ``ReportDefinition.order_custom_field_ids``. + ORDER_CUSTOM_FIELD_0_VALUE (13000): + Custom field value for Order with custom field ID equal to + the ID in index 0 of + ``ReportDefinition.order_custom_field_ids``. + ORDER_CUSTOM_FIELD_1_VALUE (13001): + Custom field value for Order with custom field ID equal to + the ID in index 1 of + ``ReportDefinition.order_custom_field_ids``. + ORDER_CUSTOM_FIELD_2_VALUE (13002): + Custom field value for Order with custom field ID equal to + the ID in index 2 of + ``ReportDefinition.order_custom_field_ids``. + ORDER_CUSTOM_FIELD_3_VALUE (13003): + Custom field value for Order with custom field ID equal to + the ID in index 3 of + ``ReportDefinition.order_custom_field_ids``. + ORDER_CUSTOM_FIELD_4_VALUE (13004): + Custom field value for Order with custom field ID equal to + the ID in index 4 of + ``ReportDefinition.order_custom_field_ids``. + ORDER_CUSTOM_FIELD_5_VALUE (13005): + Custom field value for Order with custom field ID equal to + the ID in index 5 of + ``ReportDefinition.order_custom_field_ids``. + ORDER_CUSTOM_FIELD_6_VALUE (13006): + Custom field value for Order with custom field ID equal to + the ID in index 6 of + ``ReportDefinition.order_custom_field_ids``. + ORDER_CUSTOM_FIELD_7_VALUE (13007): + Custom field value for Order with custom field ID equal to + the ID in index 7 of + ``ReportDefinition.order_custom_field_ids``. + ORDER_CUSTOM_FIELD_8_VALUE (13008): + Custom field value for Order with custom field ID equal to + the ID in index 8 of + ``ReportDefinition.order_custom_field_ids``. + ORDER_CUSTOM_FIELD_9_VALUE (13009): + Custom field value for Order with custom field ID equal to + the ID in index 9 of + ``ReportDefinition.order_custom_field_ids``. + ORDER_CUSTOM_FIELD_10_VALUE (13010): + Custom field value for Order with custom field ID equal to + the ID in index 10 of + ``ReportDefinition.order_custom_field_ids``. + ORDER_CUSTOM_FIELD_11_VALUE (13011): + Custom field value for Order with custom field ID equal to + the ID in index 11 of + ``ReportDefinition.order_custom_field_ids``. + ORDER_CUSTOM_FIELD_12_VALUE (13012): + Custom field value for Order with custom field ID equal to + the ID in index 12 of + ``ReportDefinition.order_custom_field_ids``. + ORDER_CUSTOM_FIELD_13_VALUE (13013): + Custom field value for Order with custom field ID equal to + the ID in index 13 of + ``ReportDefinition.order_custom_field_ids``. + ORDER_CUSTOM_FIELD_14_VALUE (13014): + Custom field value for Order with custom field ID equal to + the ID in index 14 of + ``ReportDefinition.order_custom_field_ids``. + CREATIVE_CUSTOM_FIELD_0_OPTION_ID (14000): + Custom field option ID for Creative with custom field ID + equal to the ID in index 0 of + ``ReportDefinition.creative_custom_field_ids``. + CREATIVE_CUSTOM_FIELD_1_OPTION_ID (14001): + Custom field option ID for Creative with custom field ID + equal to the ID in index 1 of + ``ReportDefinition.creative_custom_field_ids``. + CREATIVE_CUSTOM_FIELD_2_OPTION_ID (14002): + Custom field option ID for Creative with custom field ID + equal to the ID in index 2 of + ``ReportDefinition.creative_custom_field_ids``. + CREATIVE_CUSTOM_FIELD_3_OPTION_ID (14003): + Custom field option ID for Creative with custom field ID + equal to the ID in index 3 of + ``ReportDefinition.creative_custom_field_ids``. + CREATIVE_CUSTOM_FIELD_4_OPTION_ID (14004): + Custom field option ID for Creative with custom field ID + equal to the ID in index 4 of + ``ReportDefinition.creative_custom_field_ids``. + CREATIVE_CUSTOM_FIELD_5_OPTION_ID (14005): + Custom field option ID for Creative with custom field ID + equal to the ID in index 5 of + ``ReportDefinition.creative_custom_field_ids``. + CREATIVE_CUSTOM_FIELD_6_OPTION_ID (14006): + Custom field option ID for Creative with custom field ID + equal to the ID in index 6 of + ``ReportDefinition.creative_custom_field_ids``. + CREATIVE_CUSTOM_FIELD_7_OPTION_ID (14007): + Custom field option ID for Creative with custom field ID + equal to the ID in index 7 of + ``ReportDefinition.creative_custom_field_ids``. + CREATIVE_CUSTOM_FIELD_8_OPTION_ID (14008): + Custom field option ID for Creative with custom field ID + equal to the ID in index 8 of + ``ReportDefinition.creative_custom_field_ids``. + CREATIVE_CUSTOM_FIELD_9_OPTION_ID (14009): + Custom field option ID for Creative with custom field ID + equal to the ID in index 9 of + ``ReportDefinition.creative_custom_field_ids``. + CREATIVE_CUSTOM_FIELD_10_OPTION_ID (14010): + Custom field option ID for Creative with custom field ID + equal to the ID in index 10 of + ``ReportDefinition.creative_custom_field_ids``. + CREATIVE_CUSTOM_FIELD_11_OPTION_ID (14011): + Custom field option ID for Creative with custom field ID + equal to the ID in index 11 of + ``ReportDefinition.creative_custom_field_ids``. + CREATIVE_CUSTOM_FIELD_12_OPTION_ID (14012): + Custom field option ID for Creative with custom field ID + equal to the ID in index 12 of + ``ReportDefinition.creative_custom_field_ids``. + CREATIVE_CUSTOM_FIELD_13_OPTION_ID (14013): + Custom field option ID for Creative with custom field ID + equal to the ID in index 13 of + ``ReportDefinition.creative_custom_field_ids``. + CREATIVE_CUSTOM_FIELD_14_OPTION_ID (14014): + Custom field option ID for Creative with custom field ID + equal to the ID in index 14 of + ``ReportDefinition.creative_custom_field_ids``. + CREATIVE_CUSTOM_FIELD_0_VALUE (15000): + Custom field value for Creative with custom field ID equal + to the ID in index 0 of + ``ReportDefinition.creative_custom_field_ids``. + CREATIVE_CUSTOM_FIELD_1_VALUE (15001): + Custom field value for Creative with custom field ID equal + to the ID in index 1 of + ``ReportDefinition.creative_custom_field_ids``. + CREATIVE_CUSTOM_FIELD_2_VALUE (15002): + Custom field value for Creative with custom field ID equal + to the ID in index 2 of + ``ReportDefinition.creative_custom_field_ids``. + CREATIVE_CUSTOM_FIELD_3_VALUE (15003): + Custom field value for Creative with custom field ID equal + to the ID in index 3 of + ``ReportDefinition.creative_custom_field_ids``. + CREATIVE_CUSTOM_FIELD_4_VALUE (15004): + Custom field value for Creative with custom field ID equal + to the ID in index 4 of + ``ReportDefinition.creative_custom_field_ids``. + CREATIVE_CUSTOM_FIELD_5_VALUE (15005): + Custom field value for Creative with custom field ID equal + to the ID in index 5 of + ``ReportDefinition.creative_custom_field_ids``. + CREATIVE_CUSTOM_FIELD_6_VALUE (15006): + Custom field value for Creative with custom field ID equal + to the ID in index 6 of + ``ReportDefinition.creative_custom_field_ids``. + CREATIVE_CUSTOM_FIELD_7_VALUE (15007): + Custom field value for Creative with custom field ID equal + to the ID in index 7 of + ``ReportDefinition.creative_custom_field_ids``. + CREATIVE_CUSTOM_FIELD_8_VALUE (15008): + Custom field value for Creative with custom field ID equal + to the ID in index 8 of + ``ReportDefinition.creative_custom_field_ids``. + CREATIVE_CUSTOM_FIELD_9_VALUE (15009): + Custom field value for Creative with custom field ID equal + to the ID in index 9 of + ``ReportDefinition.creative_custom_field_ids``. + CREATIVE_CUSTOM_FIELD_10_VALUE (15010): + Custom field value for Creative with custom field ID equal + to the ID in index 10 of + ``ReportDefinition.creative_custom_field_ids``. + CREATIVE_CUSTOM_FIELD_11_VALUE (15011): + Custom field value for Creative with custom field ID equal + to the ID in index 11 of + ``ReportDefinition.creative_custom_field_ids``. + CREATIVE_CUSTOM_FIELD_12_VALUE (15012): + Custom field value for Creative with custom field ID equal + to the ID in index 12 of + ``ReportDefinition.creative_custom_field_ids``. + CREATIVE_CUSTOM_FIELD_13_VALUE (15013): + Custom field value for Creative with custom field ID equal + to the ID in index 13 of + ``ReportDefinition.creative_custom_field_ids``. + CREATIVE_CUSTOM_FIELD_14_VALUE (15014): + Custom field value for Creative with custom field ID equal + to the ID in index 14 of + ``ReportDefinition.creative_custom_field_ids``. + BACKFILL_LINE_ITEM_CUSTOM_FIELD_0_OPTION_ID (16000): + Custom field option ID for Backfill line item with custom + field ID equal to the ID in index 0 of + ``ReportDefinition.line_item_custom_field_ids``. + BACKFILL_LINE_ITEM_CUSTOM_FIELD_1_OPTION_ID (16001): + Custom field option ID for Backfill line item with custom + field ID equal to the ID in index 1 of + ``ReportDefinition.line_item_custom_field_ids``. + BACKFILL_LINE_ITEM_CUSTOM_FIELD_2_OPTION_ID (16002): + Custom field option ID for Backfill line item with custom + field ID equal to the ID in index 2 of + ``ReportDefinition.line_item_custom_field_ids``. + BACKFILL_LINE_ITEM_CUSTOM_FIELD_3_OPTION_ID (16003): + Custom field option ID for Backfill line item with custom + field ID equal to the ID in index 3 of + ``ReportDefinition.line_item_custom_field_ids``. + BACKFILL_LINE_ITEM_CUSTOM_FIELD_4_OPTION_ID (16004): + Custom field option ID for Backfill line item with custom + field ID equal to the ID in index 4 of + ``ReportDefinition.line_item_custom_field_ids``. + BACKFILL_LINE_ITEM_CUSTOM_FIELD_5_OPTION_ID (16005): + Custom field option ID for Backfill line item with custom + field ID equal to the ID in index 5 of + ``ReportDefinition.line_item_custom_field_ids``. + BACKFILL_LINE_ITEM_CUSTOM_FIELD_6_OPTION_ID (16006): + Custom field option ID for Backfill line item with custom + field ID equal to the ID in index 6 of + ``ReportDefinition.line_item_custom_field_ids``. + BACKFILL_LINE_ITEM_CUSTOM_FIELD_7_OPTION_ID (16007): + Custom field option ID for Backfill line item with custom + field ID equal to the ID in index 7 of + ``ReportDefinition.line_item_custom_field_ids``. + BACKFILL_LINE_ITEM_CUSTOM_FIELD_8_OPTION_ID (16008): + Custom field option ID for Backfill line item with custom + field ID equal to the ID in index 8 of + ``ReportDefinition.line_item_custom_field_ids``. + BACKFILL_LINE_ITEM_CUSTOM_FIELD_9_OPTION_ID (16009): + Custom field option ID for Backfill line item with custom + field ID equal to the ID in index 9 of + ``ReportDefinition.line_item_custom_field_ids``. + BACKFILL_LINE_ITEM_CUSTOM_FIELD_10_OPTION_ID (16010): + Custom field option ID for Backfill line item with custom + field ID equal to the ID in index 10 of + ``ReportDefinition.line_item_custom_field_ids``. + BACKFILL_LINE_ITEM_CUSTOM_FIELD_11_OPTION_ID (16011): + Custom field option ID for Backfill line item with custom + field ID equal to the ID in index 11 of + ``ReportDefinition.line_item_custom_field_ids``. + BACKFILL_LINE_ITEM_CUSTOM_FIELD_12_OPTION_ID (16012): + Custom field option ID for Backfill line item with custom + field ID equal to the ID in index 12 of + ``ReportDefinition.line_item_custom_field_ids``. + BACKFILL_LINE_ITEM_CUSTOM_FIELD_13_OPTION_ID (16013): + Custom field option ID for Backfill line item with custom + field ID equal to the ID in index 13 of + ``ReportDefinition.line_item_custom_field_ids``. + BACKFILL_LINE_ITEM_CUSTOM_FIELD_14_OPTION_ID (16014): + Custom field option ID for Backfill line item with custom + field ID equal to the ID in index 14 of + ``ReportDefinition.line_item_custom_field_ids``. + BACKFILL_LINE_ITEM_CUSTOM_FIELD_0_VALUE (17000): + Custom field value for Backfill line item with custom field + ID equal to the ID in index 0 of + ``ReportDefinition.line_item_custom_field_ids``. + BACKFILL_LINE_ITEM_CUSTOM_FIELD_1_VALUE (17001): + Custom field value for Backfill line item with custom field + ID equal to the ID in index 1 of + ``ReportDefinition.line_item_custom_field_ids``. + BACKFILL_LINE_ITEM_CUSTOM_FIELD_2_VALUE (17002): + Custom field value for Backfill line item with custom field + ID equal to the ID in index 2 of + ``ReportDefinition.line_item_custom_field_ids``. + BACKFILL_LINE_ITEM_CUSTOM_FIELD_3_VALUE (17003): + Custom field value for Backfill line item with custom field + ID equal to the ID in index 3 of + ``ReportDefinition.line_item_custom_field_ids``. + BACKFILL_LINE_ITEM_CUSTOM_FIELD_4_VALUE (17004): + Custom field value for Backfill line item with custom field + ID equal to the ID in index 4 of + ``ReportDefinition.line_item_custom_field_ids``. + BACKFILL_LINE_ITEM_CUSTOM_FIELD_5_VALUE (17005): + Custom field value for Backfill line item with custom field + ID equal to the ID in index 5 of + ``ReportDefinition.line_item_custom_field_ids``. + BACKFILL_LINE_ITEM_CUSTOM_FIELD_6_VALUE (17006): + Custom field value for Backfill line item with custom field + ID equal to the ID in index 6 of + ``ReportDefinition.line_item_custom_field_ids``. + BACKFILL_LINE_ITEM_CUSTOM_FIELD_7_VALUE (17007): + Custom field value for Backfill line item with custom field + ID equal to the ID in index 7 of + ``ReportDefinition.line_item_custom_field_ids``. + BACKFILL_LINE_ITEM_CUSTOM_FIELD_8_VALUE (17008): + Custom field value for Backfill line item with custom field + ID equal to the ID in index 8 of + ``ReportDefinition.line_item_custom_field_ids``. + BACKFILL_LINE_ITEM_CUSTOM_FIELD_9_VALUE (17009): + Custom field value for Backfill line item with custom field + ID equal to the ID in index 9 of + ``ReportDefinition.line_item_custom_field_ids``. + BACKFILL_LINE_ITEM_CUSTOM_FIELD_10_VALUE (17010): + Custom field value for Backfill line item with custom field + ID equal to the ID in index 10 of + ``ReportDefinition.line_item_custom_field_ids``. + BACKFILL_LINE_ITEM_CUSTOM_FIELD_11_VALUE (17011): + Custom field value for Backfill line item with custom field + ID equal to the ID in index 11 of + ``ReportDefinition.line_item_custom_field_ids``. + BACKFILL_LINE_ITEM_CUSTOM_FIELD_12_VALUE (17012): + Custom field value for Backfill line item with custom field + ID equal to the ID in index 12 of + ``ReportDefinition.line_item_custom_field_ids``. + BACKFILL_LINE_ITEM_CUSTOM_FIELD_13_VALUE (17013): + Custom field value for Backfill line item with custom field + ID equal to the ID in index 13 of + ``ReportDefinition.line_item_custom_field_ids``. + BACKFILL_LINE_ITEM_CUSTOM_FIELD_14_VALUE (17014): + Custom field value for Backfill line item with custom field + ID equal to the ID in index 14 of + ``ReportDefinition.line_item_custom_field_ids``. + BACKFILL_ORDER_CUSTOM_FIELD_0_OPTION_ID (18000): + Custom field option ID for Backfill order with custom field + ID equal to the ID in index 0 of + ``ReportDefinition.order_custom_field_ids``. + BACKFILL_ORDER_CUSTOM_FIELD_1_OPTION_ID (18001): + Custom field option ID for Backfill order with custom field + ID equal to the ID in index 1 of + ``ReportDefinition.order_custom_field_ids``. + BACKFILL_ORDER_CUSTOM_FIELD_2_OPTION_ID (18002): + Custom field option ID for Backfill order with custom field + ID equal to the ID in index 2 of + ``ReportDefinition.order_custom_field_ids``. + BACKFILL_ORDER_CUSTOM_FIELD_3_OPTION_ID (18003): + Custom field option ID for Backfill order with custom field + ID equal to the ID in index 3 of + ``ReportDefinition.order_custom_field_ids``. + BACKFILL_ORDER_CUSTOM_FIELD_4_OPTION_ID (18004): + Custom field option ID for Backfill order with custom field + ID equal to the ID in index 4 of + ``ReportDefinition.order_custom_field_ids``. + BACKFILL_ORDER_CUSTOM_FIELD_5_OPTION_ID (18005): + Custom field option ID for Backfill order with custom field + ID equal to the ID in index 5 of + ``ReportDefinition.order_custom_field_ids``. + BACKFILL_ORDER_CUSTOM_FIELD_6_OPTION_ID (18006): + Custom field option ID for Backfill order with custom field + ID equal to the ID in index 6 of + ``ReportDefinition.order_custom_field_ids``. + BACKFILL_ORDER_CUSTOM_FIELD_7_OPTION_ID (18007): + Custom field option ID for Backfill order with custom field + ID equal to the ID in index 7 of + ``ReportDefinition.order_custom_field_ids``. + BACKFILL_ORDER_CUSTOM_FIELD_8_OPTION_ID (18008): + Custom field option ID for Backfill order with custom field + ID equal to the ID in index 8 of + ``ReportDefinition.order_custom_field_ids``. + BACKFILL_ORDER_CUSTOM_FIELD_9_OPTION_ID (18009): + Custom field option ID for Backfill order with custom field + ID equal to the ID in index 9 of + ``ReportDefinition.order_custom_field_ids``. + BACKFILL_ORDER_CUSTOM_FIELD_10_OPTION_ID (18010): + Custom field option ID for Backfill order with custom field + ID equal to the ID in index 10 of + ``ReportDefinition.order_custom_field_ids``. + BACKFILL_ORDER_CUSTOM_FIELD_11_OPTION_ID (18011): + Custom field option ID for Backfill order with custom field + ID equal to the ID in index 11 of + ``ReportDefinition.order_custom_field_ids``. + BACKFILL_ORDER_CUSTOM_FIELD_12_OPTION_ID (18012): + Custom field option ID for Backfill order with custom field + ID equal to the ID in index 12 of + ``ReportDefinition.order_custom_field_ids``. + BACKFILL_ORDER_CUSTOM_FIELD_13_OPTION_ID (18013): + Custom field option ID for Backfill order with custom field + ID equal to the ID in index 13 of + ``ReportDefinition.order_custom_field_ids``. + BACKFILL_ORDER_CUSTOM_FIELD_14_OPTION_ID (18014): + Custom field option ID for Backfill order with custom field + ID equal to the ID in index 14 of + ``ReportDefinition.order_custom_field_ids``. + BACKFILL_ORDER_CUSTOM_FIELD_0_VALUE (19000): + Custom field value for Backfill order with custom field ID + equal to the ID in index 0 of + ``ReportDefinition.order_custom_field_ids``. + BACKFILL_ORDER_CUSTOM_FIELD_1_VALUE (19001): + Custom field value for Backfill order with custom field ID + equal to the ID in index 1 of + ``ReportDefinition.order_custom_field_ids``. + BACKFILL_ORDER_CUSTOM_FIELD_2_VALUE (19002): + Custom field value for Backfill order with custom field ID + equal to the ID in index 2 of + ``ReportDefinition.order_custom_field_ids``. + BACKFILL_ORDER_CUSTOM_FIELD_3_VALUE (19003): + Custom field value for Backfill order with custom field ID + equal to the ID in index 3 of + ``ReportDefinition.order_custom_field_ids``. + BACKFILL_ORDER_CUSTOM_FIELD_4_VALUE (19004): + Custom field value for Backfill order with custom field ID + equal to the ID in index 4 of + ``ReportDefinition.order_custom_field_ids``. + BACKFILL_ORDER_CUSTOM_FIELD_5_VALUE (19005): + Custom field value for Backfill order with custom field ID + equal to the ID in index 5 of + ``ReportDefinition.order_custom_field_ids``. + BACKFILL_ORDER_CUSTOM_FIELD_6_VALUE (19006): + Custom field value for Backfill order with custom field ID + equal to the ID in index 6 of + ``ReportDefinition.order_custom_field_ids``. + BACKFILL_ORDER_CUSTOM_FIELD_7_VALUE (19007): + Custom field value for Backfill order with custom field ID + equal to the ID in index 7 of + ``ReportDefinition.order_custom_field_ids``. + BACKFILL_ORDER_CUSTOM_FIELD_8_VALUE (19008): + Custom field value for Backfill order with custom field ID + equal to the ID in index 8 of + ``ReportDefinition.order_custom_field_ids``. + BACKFILL_ORDER_CUSTOM_FIELD_9_VALUE (19009): + Custom field value for Backfill order with custom field ID + equal to the ID in index 9 of + ``ReportDefinition.order_custom_field_ids``. + BACKFILL_ORDER_CUSTOM_FIELD_10_VALUE (19010): + Custom field value for Backfill order with custom field ID + equal to the ID in index 10 of + ``ReportDefinition.order_custom_field_ids``. + BACKFILL_ORDER_CUSTOM_FIELD_11_VALUE (19011): + Custom field value for Backfill order with custom field ID + equal to the ID in index 11 of + ``ReportDefinition.order_custom_field_ids``. + BACKFILL_ORDER_CUSTOM_FIELD_12_VALUE (19012): + Custom field value for Backfill order with custom field ID + equal to the ID in index 12 of + ``ReportDefinition.order_custom_field_ids``. + BACKFILL_ORDER_CUSTOM_FIELD_13_VALUE (19013): + Custom field value for Backfill order with custom field ID + equal to the ID in index 13 of + ``ReportDefinition.order_custom_field_ids``. + BACKFILL_ORDER_CUSTOM_FIELD_14_VALUE (19014): + Custom field value for Backfill order with custom field ID + equal to the ID in index 14 of + ``ReportDefinition.order_custom_field_ids``. + BACKFILL_CREATIVE_CUSTOM_FIELD_0_OPTION_ID (20000): + Custom field option ID for Backfill creative with custom + field ID equal to the ID in index 0 of + ``ReportDefinition.creative_custom_field_ids``. + BACKFILL_CREATIVE_CUSTOM_FIELD_1_OPTION_ID (20001): + Custom field option ID for Backfill creative with custom + field ID equal to the ID in index 1 of + ``ReportDefinition.creative_custom_field_ids``. + BACKFILL_CREATIVE_CUSTOM_FIELD_2_OPTION_ID (20002): + Custom field option ID for Backfill creative with custom + field ID equal to the ID in index 2 of + ``ReportDefinition.creative_custom_field_ids``. + BACKFILL_CREATIVE_CUSTOM_FIELD_3_OPTION_ID (20003): + Custom field option ID for Backfill creative with custom + field ID equal to the ID in index 3 of + ``ReportDefinition.creative_custom_field_ids``. + BACKFILL_CREATIVE_CUSTOM_FIELD_4_OPTION_ID (20004): + Custom field option ID for Backfill creative with custom + field ID equal to the ID in index 4 of + ``ReportDefinition.creative_custom_field_ids``. + BACKFILL_CREATIVE_CUSTOM_FIELD_5_OPTION_ID (20005): + Custom field option ID for Backfill creative with custom + field ID equal to the ID in index 5 of + ``ReportDefinition.creative_custom_field_ids``. + BACKFILL_CREATIVE_CUSTOM_FIELD_6_OPTION_ID (20006): + Custom field option ID for Backfill creative with custom + field ID equal to the ID in index 6 of + ``ReportDefinition.creative_custom_field_ids``. + BACKFILL_CREATIVE_CUSTOM_FIELD_7_OPTION_ID (20007): + Custom field option ID for Backfill creative with custom + field ID equal to the ID in index 7 of + ``ReportDefinition.creative_custom_field_ids``. + BACKFILL_CREATIVE_CUSTOM_FIELD_8_OPTION_ID (20008): + Custom field option ID for Backfill creative with custom + field ID equal to the ID in index 8 of + ``ReportDefinition.creative_custom_field_ids``. + BACKFILL_CREATIVE_CUSTOM_FIELD_9_OPTION_ID (20009): + Custom field option ID for Backfill creative with custom + field ID equal to the ID in index 9 of + ``ReportDefinition.creative_custom_field_ids``. + BACKFILL_CREATIVE_CUSTOM_FIELD_10_OPTION_ID (20010): + Custom field option ID for Backfill creative with custom + field ID equal to the ID in index 10 of + ``ReportDefinition.creative_custom_field_ids``. + BACKFILL_CREATIVE_CUSTOM_FIELD_11_OPTION_ID (20011): + Custom field option ID for Backfill creative with custom + field ID equal to the ID in index 11 of + ``ReportDefinition.creative_custom_field_ids``. + BACKFILL_CREATIVE_CUSTOM_FIELD_12_OPTION_ID (20012): + Custom field option ID for Backfill creative with custom + field ID equal to the ID in index 12 of + ``ReportDefinition.creative_custom_field_ids``. + BACKFILL_CREATIVE_CUSTOM_FIELD_13_OPTION_ID (20013): + Custom field option ID for Backfill creative with custom + field ID equal to the ID in index 13 of + ``ReportDefinition.creative_custom_field_ids``. + BACKFILL_CREATIVE_CUSTOM_FIELD_14_OPTION_ID (20014): + Custom field option ID for Backfill creative with custom + field ID equal to the ID in index 14 of + ``ReportDefinition.creative_custom_field_ids``. + BACKFILL_CREATIVE_CUSTOM_FIELD_0_VALUE (21000): + Custom field value for Backfill creative with custom field + ID equal to the ID in index 0 of + ``ReportDefinition.creative_custom_field_ids``. + BACKFILL_CREATIVE_CUSTOM_FIELD_1_VALUE (21001): + Custom field value for Backfill creative with custom field + ID equal to the ID in index 1 of + ``ReportDefinition.creative_custom_field_ids``. + BACKFILL_CREATIVE_CUSTOM_FIELD_2_VALUE (21002): + Custom field value for Backfill creative with custom field + ID equal to the ID in index 2 of + ``ReportDefinition.creative_custom_field_ids``. + BACKFILL_CREATIVE_CUSTOM_FIELD_3_VALUE (21003): + Custom field value for Backfill creative with custom field + ID equal to the ID in index 3 of + ``ReportDefinition.creative_custom_field_ids``. + BACKFILL_CREATIVE_CUSTOM_FIELD_4_VALUE (21004): + Custom field value for Backfill creative with custom field + ID equal to the ID in index 4 of + ``ReportDefinition.creative_custom_field_ids``. + BACKFILL_CREATIVE_CUSTOM_FIELD_5_VALUE (21005): + Custom field value for Backfill creative with custom field + ID equal to the ID in index 5 of + ``ReportDefinition.creative_custom_field_ids``. + BACKFILL_CREATIVE_CUSTOM_FIELD_6_VALUE (21006): + Custom field value for Backfill creative with custom field + ID equal to the ID in index 6 of + ``ReportDefinition.creative_custom_field_ids``. + BACKFILL_CREATIVE_CUSTOM_FIELD_7_VALUE (21007): + Custom field value for Backfill creative with custom field + ID equal to the ID in index 7 of + ``ReportDefinition.creative_custom_field_ids``. + BACKFILL_CREATIVE_CUSTOM_FIELD_8_VALUE (21008): + Custom field value for Backfill creative with custom field + ID equal to the ID in index 8 of + ``ReportDefinition.creative_custom_field_ids``. + BACKFILL_CREATIVE_CUSTOM_FIELD_9_VALUE (21009): + Custom field value for Backfill creative with custom field + ID equal to the ID in index 9 of + ``ReportDefinition.creative_custom_field_ids``. + BACKFILL_CREATIVE_CUSTOM_FIELD_10_VALUE (21010): + Custom field value for Backfill creative with custom field + ID equal to the ID in index 10 of + ``ReportDefinition.creative_custom_field_ids``. + BACKFILL_CREATIVE_CUSTOM_FIELD_11_VALUE (21011): + Custom field value for Backfill creative with custom field + ID equal to the ID in index 11 of + ``ReportDefinition.creative_custom_field_ids``. + BACKFILL_CREATIVE_CUSTOM_FIELD_12_VALUE (21012): + Custom field value for Backfill creative with custom field + ID equal to the ID in index 12 of + ``ReportDefinition.creative_custom_field_ids``. + BACKFILL_CREATIVE_CUSTOM_FIELD_13_VALUE (21013): + Custom field value for Backfill creative with custom field + ID equal to the ID in index 13 of + ``ReportDefinition.creative_custom_field_ids``. + BACKFILL_CREATIVE_CUSTOM_FIELD_14_VALUE (21014): + Custom field value for Backfill creative with custom field + ID equal to the ID in index 14 of + ``ReportDefinition.creative_custom_field_ids``. + CUSTOM_DIMENSION_0_VALUE_ID (100000): + Custom Dimension Value ID for Custom Dimension with key + equal to the key in index 0 of + ``ReportDefinition.custom_dimension_key_ids``. + CUSTOM_DIMENSION_1_VALUE_ID (100001): + Custom Dimension Value ID for Custom Dimension with key + equal to the key in index 1 of + ``ReportDefinition.custom_dimension_key_ids``. + CUSTOM_DIMENSION_2_VALUE_ID (100002): + Custom Dimension Value ID for Custom Dimension with key + equal to the key in index 2 of + ``ReportDefinition.custom_dimension_key_ids``. + CUSTOM_DIMENSION_3_VALUE_ID (100003): + Custom Dimension Value ID for Custom Dimension with key + equal to the key in index 3 of + ``ReportDefinition.custom_dimension_key_ids``. + CUSTOM_DIMENSION_4_VALUE_ID (100004): + Custom Dimension Value ID for Custom Dimension with key + equal to the key in index 4 of + ``ReportDefinition.custom_dimension_key_ids``. + CUSTOM_DIMENSION_5_VALUE_ID (100005): + Custom Dimension Value ID for Custom Dimension with key + equal to the key in index 5 of + ``ReportDefinition.custom_dimension_key_ids``. + CUSTOM_DIMENSION_6_VALUE_ID (100006): + Custom Dimension Value ID for Custom Dimension with key + equal to the key in index 6 of + ``ReportDefinition.custom_dimension_key_ids``. + CUSTOM_DIMENSION_7_VALUE_ID (100007): + Custom Dimension Value ID for Custom Dimension with key + equal to the key in index 9 of + ``ReportDefinition.custom_dimension_key_ids``. + CUSTOM_DIMENSION_8_VALUE_ID (100008): + Custom Dimension Value ID for Custom Dimension with key + equal to the key in index 8 of + ``ReportDefinition.custom_dimension_key_ids``. + CUSTOM_DIMENSION_9_VALUE_ID (100009): + Custom Dimension Value ID for Custom Dimension with key + equal to the key in index 9 of + ``ReportDefinition.custom_dimension_key_ids``. + CUSTOM_DIMENSION_0_VALUE (101000): + Custom Dimension Value name for Custom Dimension with key + equal to the id in index 0 of + ``ReportDefinition.custom_dimension_key_ids``. + CUSTOM_DIMENSION_1_VALUE (101001): + Custom Dimension Value name for Custom Dimension with key + equal to the id in index 1 of + ``ReportDefinition.custom_dimension_key_ids``. + CUSTOM_DIMENSION_2_VALUE (101002): + Custom Dimension Value name for Custom Dimension with key + equal to the id in index 2 of + ``ReportDefinition.custom_dimension_key_ids``. + CUSTOM_DIMENSION_3_VALUE (101003): + Custom Dimension Value name for Custom Dimension with key + equal to the id in index 3 of + ``ReportDefinition.custom_dimension_key_ids``. + CUSTOM_DIMENSION_4_VALUE (101004): + Custom Dimension Value name for Custom Dimension with key + equal to the id in index 4 of + ``ReportDefinition.custom_dimension_key_ids``. + CUSTOM_DIMENSION_5_VALUE (101005): + Custom Dimension Value name for Custom Dimension with key + equal to the id in index 5 of + ``ReportDefinition.custom_dimension_key_ids``. + CUSTOM_DIMENSION_6_VALUE (101006): + Custom Dimension Value name for Custom Dimension with key + equal to the id in index 6 of + ``ReportDefinition.custom_dimension_key_ids``. + CUSTOM_DIMENSION_7_VALUE (101007): + Custom Dimension Value name for Custom Dimension with key + equal to the id in index 7 of + ``ReportDefinition.custom_dimension_key_ids``. + CUSTOM_DIMENSION_8_VALUE (101008): + Custom Dimension Value name for Custom Dimension with key + equal to the id in index 8 of + ``ReportDefinition.custom_dimension_key_ids``. + CUSTOM_DIMENSION_9_VALUE (101009): + Custom Dimension Value name for Custom Dimension with key + equal to the id in index 9 of + ``ReportDefinition.custom_dimension_key_ids``. + """ + DIMENSION_UNSPECIFIED = 0 + ADVERTISER_DOMAIN_NAME = 242 + ADVERTISER_EXTERNAL_ID = 228 + ADVERTISER_ID = 131 + ADVERTISER_LABELS = 230 + ADVERTISER_LABEL_IDS = 229 + ADVERTISER_NAME = 132 + ADVERTISER_PRIMARY_CONTACT = 227 + AD_LOCATION = 390 + AD_LOCATION_NAME = 391 + AD_UNIT_CODE = 64 + AD_UNIT_CODE_LEVEL_1 = 65 + AD_UNIT_CODE_LEVEL_10 = 74 + AD_UNIT_CODE_LEVEL_11 = 75 + AD_UNIT_CODE_LEVEL_12 = 76 + AD_UNIT_CODE_LEVEL_13 = 77 + AD_UNIT_CODE_LEVEL_14 = 78 + AD_UNIT_CODE_LEVEL_15 = 79 + AD_UNIT_CODE_LEVEL_16 = 80 + AD_UNIT_CODE_LEVEL_2 = 66 + AD_UNIT_CODE_LEVEL_3 = 67 + AD_UNIT_CODE_LEVEL_4 = 68 + AD_UNIT_CODE_LEVEL_5 = 69 + AD_UNIT_CODE_LEVEL_6 = 70 + AD_UNIT_CODE_LEVEL_7 = 71 + AD_UNIT_CODE_LEVEL_8 = 72 + AD_UNIT_CODE_LEVEL_9 = 73 + AD_UNIT_DEPTH = 101 + AD_UNIT_ID = 25 + AD_UNIT_ID_ALL_LEVEL = 27 + AD_UNIT_ID_LEVEL_1 = 30 + AD_UNIT_ID_LEVEL_10 = 48 + AD_UNIT_ID_LEVEL_11 = 50 + AD_UNIT_ID_LEVEL_12 = 52 + AD_UNIT_ID_LEVEL_13 = 54 + AD_UNIT_ID_LEVEL_14 = 56 + AD_UNIT_ID_LEVEL_15 = 58 + AD_UNIT_ID_LEVEL_16 = 60 + AD_UNIT_ID_LEVEL_2 = 32 + AD_UNIT_ID_LEVEL_3 = 34 + AD_UNIT_ID_LEVEL_4 = 36 + AD_UNIT_ID_LEVEL_5 = 38 + AD_UNIT_ID_LEVEL_6 = 40 + AD_UNIT_ID_LEVEL_7 = 42 + AD_UNIT_ID_LEVEL_8 = 44 + AD_UNIT_ID_LEVEL_9 = 46 + AD_UNIT_ID_TOP_LEVEL = 142 + AD_UNIT_NAME = 26 + AD_UNIT_NAME_ALL_LEVEL = 29 + AD_UNIT_NAME_LEVEL_1 = 31 + AD_UNIT_NAME_LEVEL_10 = 49 + AD_UNIT_NAME_LEVEL_11 = 51 + AD_UNIT_NAME_LEVEL_12 = 53 + AD_UNIT_NAME_LEVEL_13 = 55 + AD_UNIT_NAME_LEVEL_14 = 57 + AD_UNIT_NAME_LEVEL_15 = 59 + AD_UNIT_NAME_LEVEL_16 = 61 + AD_UNIT_NAME_LEVEL_2 = 33 + AD_UNIT_NAME_LEVEL_3 = 35 + AD_UNIT_NAME_LEVEL_4 = 37 + AD_UNIT_NAME_LEVEL_5 = 39 + AD_UNIT_NAME_LEVEL_6 = 41 + AD_UNIT_NAME_LEVEL_7 = 43 + AD_UNIT_NAME_LEVEL_8 = 45 + AD_UNIT_NAME_LEVEL_9 = 47 + AD_UNIT_NAME_TOP_LEVEL = 143 + AD_UNIT_REWARD_AMOUNT = 63 + AD_UNIT_REWARD_TYPE = 62 + AD_UNIT_STATUS = 206 + AD_UNIT_STATUS_NAME = 207 + APP_VERSION = 392 + BACKFILL_ADVERTISER_EXTERNAL_ID = 349 + BACKFILL_ADVERTISER_ID = 346 + BACKFILL_ADVERTISER_LABELS = 351 + BACKFILL_ADVERTISER_LABEL_IDS = 350 + BACKFILL_ADVERTISER_NAME = 347 + BACKFILL_ADVERTISER_PRIMARY_CONTACT = 348 + BACKFILL_CREATIVE_BILLING_TYPE = 378 + BACKFILL_CREATIVE_BILLING_TYPE_NAME = 379 + BACKFILL_CREATIVE_CLICK_THROUGH_URL = 376 + BACKFILL_CREATIVE_ID = 370 + BACKFILL_CREATIVE_NAME = 371 + BACKFILL_CREATIVE_THIRD_PARTY_VENDOR = 377 + BACKFILL_CREATIVE_TYPE = 374 + BACKFILL_CREATIVE_TYPE_NAME = 375 + BACKFILL_LINE_ITEM_ARCHIVED = 278 + BACKFILL_LINE_ITEM_COMPANION_DELIVERY_OPTION = 258 + BACKFILL_LINE_ITEM_COMPANION_DELIVERY_OPTION_NAME = 259 + BACKFILL_LINE_ITEM_COMPUTED_STATUS = 296 + BACKFILL_LINE_ITEM_COMPUTED_STATUS_NAME = 297 + BACKFILL_LINE_ITEM_CONTRACTED_QUANTITY = 280 + BACKFILL_LINE_ITEM_COST_PER_UNIT = 272 + BACKFILL_LINE_ITEM_COST_TYPE = 264 + BACKFILL_LINE_ITEM_COST_TYPE_NAME = 265 + BACKFILL_LINE_ITEM_CREATIVE_END_DATE = 381 + BACKFILL_LINE_ITEM_CREATIVE_ROTATION_TYPE = 290 + BACKFILL_LINE_ITEM_CREATIVE_ROTATION_TYPE_NAME = 291 + BACKFILL_LINE_ITEM_CREATIVE_START_DATE = 380 + BACKFILL_LINE_ITEM_CURRENCY_CODE = 288 + BACKFILL_LINE_ITEM_DELIVERY_INDICATOR = 274 + BACKFILL_LINE_ITEM_DELIVERY_RATE_TYPE = 292 + BACKFILL_LINE_ITEM_DELIVERY_RATE_TYPE_NAME = 293 + BACKFILL_LINE_ITEM_DISCOUNT_ABSOLUTE = 294 + BACKFILL_LINE_ITEM_DISCOUNT_PERCENTAGE = 295 + BACKFILL_LINE_ITEM_END_DATE = 267 + BACKFILL_LINE_ITEM_END_DATE_TIME = 269 + BACKFILL_LINE_ITEM_ENVIRONMENT_TYPE = 302 + BACKFILL_LINE_ITEM_ENVIRONMENT_TYPE_NAME = 257 + BACKFILL_LINE_ITEM_EXTERNAL_DEAL_ID = 285 + BACKFILL_LINE_ITEM_EXTERNAL_ID = 273 + BACKFILL_LINE_ITEM_FREQUENCY_CAP = 303 + BACKFILL_LINE_ITEM_ID = 298 + BACKFILL_LINE_ITEM_LAST_MODIFIED_BY_APP = 289 + BACKFILL_LINE_ITEM_LIFETIME_CLICKS = 283 + BACKFILL_LINE_ITEM_LIFETIME_IMPRESSIONS = 282 + BACKFILL_LINE_ITEM_LIFETIME_VIEWABLE_IMPRESSIONS = 284 + BACKFILL_LINE_ITEM_MAKEGOOD = 276 + BACKFILL_LINE_ITEM_NAME = 299 + BACKFILL_LINE_ITEM_NON_CPD_BOOKED_REVENUE = 286 + BACKFILL_LINE_ITEM_OPTIMIZABLE = 277 + BACKFILL_LINE_ITEM_PRIMARY_GOAL_TYPE = 262 + BACKFILL_LINE_ITEM_PRIMARY_GOAL_TYPE_NAME = 263 + BACKFILL_LINE_ITEM_PRIMARY_GOAL_UNIT_TYPE = 260 + BACKFILL_LINE_ITEM_PRIMARY_GOAL_UNIT_TYPE_NAME = 261 + BACKFILL_LINE_ITEM_PRIORITY = 266 + BACKFILL_LINE_ITEM_RESERVATION_STATUS = 306 + BACKFILL_LINE_ITEM_RESERVATION_STATUS_NAME = 307 + BACKFILL_LINE_ITEM_START_DATE = 268 + BACKFILL_LINE_ITEM_START_DATE_TIME = 270 + BACKFILL_LINE_ITEM_TYPE = 300 + BACKFILL_LINE_ITEM_TYPE_NAME = 301 + BACKFILL_LINE_ITEM_UNLIMITED_END = 271 + BACKFILL_LINE_ITEM_VALUE_COST_PER_UNIT = 275 + BACKFILL_LINE_ITEM_WEB_PROPERTY_CODE = 287 + BACKFILL_MASTER_COMPANION_CREATIVE_ID = 372 + BACKFILL_MASTER_COMPANION_CREATIVE_NAME = 373 + BACKFILL_ORDER_AGENCY = 313 + BACKFILL_ORDER_AGENCY_ID = 314 + BACKFILL_ORDER_BOOKED_CPC = 315 + BACKFILL_ORDER_BOOKED_CPM = 316 + BACKFILL_ORDER_DELIVERY_STATUS = 340 + BACKFILL_ORDER_DELIVERY_STATUS_NAME = 341 + BACKFILL_ORDER_END_DATE = 317 + BACKFILL_ORDER_END_DATE_TIME = 319 + BACKFILL_ORDER_EXTERNAL_ID = 320 + BACKFILL_ORDER_ID = 338 + BACKFILL_ORDER_LABELS = 334 + BACKFILL_ORDER_LABEL_IDS = 335 + BACKFILL_ORDER_LIFETIME_CLICKS = 322 + BACKFILL_ORDER_LIFETIME_IMPRESSIONS = 323 + BACKFILL_ORDER_NAME = 339 + BACKFILL_ORDER_PO_NUMBER = 324 + BACKFILL_ORDER_PROGRAMMATIC = 321 + BACKFILL_ORDER_SALESPERSON = 325 + BACKFILL_ORDER_SECONDARY_SALESPEOPLE = 329 + BACKFILL_ORDER_SECONDARY_SALESPEOPLE_ID = 328 + BACKFILL_ORDER_SECONDARY_TRAFFICKERS = 331 + BACKFILL_ORDER_SECONDARY_TRAFFICKERS_ID = 330 + BACKFILL_ORDER_START_DATE = 332 + BACKFILL_ORDER_START_DATE_TIME = 333 + BACKFILL_ORDER_TRAFFICKER = 326 + BACKFILL_ORDER_TRAFFICKER_ID = 327 + BACKFILL_ORDER_UNLIMITED_END = 318 + BACKFILL_PROGRAMMATIC_BUYER_ID = 336 + BACKFILL_PROGRAMMATIC_BUYER_NAME = 337 + BRANDING_TYPE = 383 + BRANDING_TYPE_NAME = 384 + BROWSER_CATEGORY = 119 + BROWSER_CATEGORY_NAME = 120 + BROWSER_ID = 235 + BROWSER_NAME = 236 + CARRIER_ID = 369 + CARRIER_NAME = 368 + CLASSIFIED_ADVERTISER_ID = 133 + CLASSIFIED_ADVERTISER_NAME = 134 + CLASSIFIED_BRAND_ID = 243 + CLASSIFIED_BRAND_NAME = 244 + CONTENT_ID = 246 + CONTENT_NAME = 247 + COUNTRY_ID = 11 + COUNTRY_NAME = 12 + CREATIVE_BILLING_TYPE = 366 + CREATIVE_BILLING_TYPE_NAME = 367 + CREATIVE_CLICK_THROUGH_URL = 174 + CREATIVE_ID = 138 + CREATIVE_NAME = 139 + CREATIVE_TECHNOLOGY = 148 + CREATIVE_TECHNOLOGY_NAME = 149 + CREATIVE_THIRD_PARTY_VENDOR = 361 + CREATIVE_TYPE = 344 + CREATIVE_TYPE_NAME = 345 + DATE = 3 + DAY_OF_WEEK = 4 + DEMAND_CHANNEL = 9 + DEMAND_CHANNEL_NAME = 10 + DEMAND_SUBCHANNEL = 22 + DEMAND_SUBCHANNEL_NAME = 23 + DEVICE = 226 + DEVICE_CATEGORY = 15 + DEVICE_CATEGORY_NAME = 16 + DEVICE_NAME = 225 + EXCHANGE_THIRD_PARTY_COMPANY_ID = 185 + EXCHANGE_THIRD_PARTY_COMPANY_NAME = 186 + FIRST_LOOK_PRICING_RULE_ID = 248 + FIRST_LOOK_PRICING_RULE_NAME = 249 + HOUR = 100 + INTERACTION_TYPE = 223 + INTERACTION_TYPE_NAME = 224 + INVENTORY_FORMAT = 17 + INVENTORY_FORMAT_NAME = 18 + INVENTORY_TYPE = 19 + INVENTORY_TYPE_NAME = 20 + IS_ADX_DIRECT = 382 + IS_FIRST_LOOK_DEAL = 401 + KEY_VALUES_ID = 214 + KEY_VALUES_NAME = 215 + LINE_ITEM_ARCHIVED = 188 + LINE_ITEM_COMPANION_DELIVERY_OPTION = 204 + LINE_ITEM_COMPANION_DELIVERY_OPTION_NAME = 205 + LINE_ITEM_COMPUTED_STATUS = 250 + LINE_ITEM_COMPUTED_STATUS_NAME = 251 + LINE_ITEM_CONTRACTED_QUANTITY = 92 + LINE_ITEM_COST_PER_UNIT = 85 + LINE_ITEM_COST_TYPE = 212 + LINE_ITEM_COST_TYPE_NAME = 213 + LINE_ITEM_CREATIVE_END_DATE = 176 + LINE_ITEM_CREATIVE_ROTATION_TYPE = 189 + LINE_ITEM_CREATIVE_ROTATION_TYPE_NAME = 190 + LINE_ITEM_CREATIVE_START_DATE = 175 + LINE_ITEM_CURRENCY_CODE = 180 + LINE_ITEM_DELIVERY_INDICATOR = 87 + LINE_ITEM_DELIVERY_RATE_TYPE = 191 + LINE_ITEM_DELIVERY_RATE_TYPE_NAME = 192 + LINE_ITEM_DISCOUNT_ABSOLUTE = 195 + LINE_ITEM_DISCOUNT_PERCENTAGE = 196 + LINE_ITEM_END_DATE = 81 + LINE_ITEM_END_DATE_TIME = 83 + LINE_ITEM_ENVIRONMENT_TYPE = 201 + LINE_ITEM_ENVIRONMENT_TYPE_NAME = 202 + LINE_ITEM_EXTERNAL_DEAL_ID = 97 + LINE_ITEM_EXTERNAL_ID = 86 + LINE_ITEM_FREQUENCY_CAP = 256 + LINE_ITEM_ID = 1 + LINE_ITEM_LAST_MODIFIED_BY_APP = 181 + LINE_ITEM_LIFETIME_CLICKS = 95 + LINE_ITEM_LIFETIME_IMPRESSIONS = 94 + LINE_ITEM_LIFETIME_VIEWABLE_IMPRESSIONS = 96 + LINE_ITEM_MAKEGOOD = 89 + LINE_ITEM_NAME = 2 + LINE_ITEM_NON_CPD_BOOKED_REVENUE = 98 + LINE_ITEM_OPTIMIZABLE = 90 + LINE_ITEM_PRIMARY_GOAL_TYPE = 210 + LINE_ITEM_PRIMARY_GOAL_TYPE_NAME = 211 + LINE_ITEM_PRIMARY_GOAL_UNITS_ABSOLUTE = 93 + LINE_ITEM_PRIMARY_GOAL_UNITS_PERCENTAGE = 396 + LINE_ITEM_PRIMARY_GOAL_UNIT_TYPE = 208 + LINE_ITEM_PRIMARY_GOAL_UNIT_TYPE_NAME = 209 + LINE_ITEM_PRIORITY = 24 + LINE_ITEM_RESERVATION_STATUS = 304 + LINE_ITEM_RESERVATION_STATUS_NAME = 305 + LINE_ITEM_START_DATE = 82 + LINE_ITEM_START_DATE_TIME = 84 + LINE_ITEM_TYPE = 193 + LINE_ITEM_TYPE_NAME = 194 + LINE_ITEM_UNLIMITED_END = 187 + LINE_ITEM_VALUE_COST_PER_UNIT = 88 + LINE_ITEM_WEB_PROPERTY_CODE = 179 + MASTER_COMPANION_CREATIVE_ID = 140 + MASTER_COMPANION_CREATIVE_NAME = 141 + MOBILE_APP_FREE = 128 + MOBILE_APP_ICON_URL = 129 + MOBILE_APP_ID = 123 + MOBILE_APP_NAME = 127 + MOBILE_APP_OWNERSHIP_STATUS = 311 + MOBILE_APP_OWNERSHIP_STATUS_NAME = 312 + MOBILE_APP_STORE = 125 + MOBILE_APP_STORE_NAME = 245 + MOBILE_INVENTORY_TYPE = 99 + MOBILE_INVENTORY_TYPE_NAME = 21 + MOBILE_SDK_VERSION_NAME = 130 + MONTH_YEAR = 6 + NATIVE_AD_FORMAT_ID = 255 + NATIVE_AD_FORMAT_NAME = 254 + NATIVE_STYLE_ID = 253 + NATIVE_STYLE_NAME = 252 + OPERATING_SYSTEM_CATEGORY = 117 + OPERATING_SYSTEM_CATEGORY_NAME = 118 + OPERATING_SYSTEM_VERSION_ID = 238 + OPERATING_SYSTEM_VERSION_NAME = 237 + ORDER_AGENCY = 150 + ORDER_AGENCY_ID = 151 + ORDER_BOOKED_CPC = 152 + ORDER_BOOKED_CPM = 153 + ORDER_DELIVERY_STATUS = 231 + ORDER_DELIVERY_STATUS_NAME = 239 + ORDER_END_DATE = 154 + ORDER_END_DATE_TIME = 155 + ORDER_EXTERNAL_ID = 156 + ORDER_ID = 7 + ORDER_LABELS = 170 + ORDER_LABEL_IDS = 171 + ORDER_LIFETIME_CLICKS = 158 + ORDER_LIFETIME_IMPRESSIONS = 159 + ORDER_NAME = 8 + ORDER_PO_NUMBER = 160 + ORDER_PROGRAMMATIC = 157 + ORDER_SALESPERSON = 161 + ORDER_SECONDARY_SALESPEOPLE = 164 + ORDER_SECONDARY_SALESPEOPLE_ID = 165 + ORDER_SECONDARY_TRAFFICKERS = 166 + ORDER_SECONDARY_TRAFFICKERS_ID = 167 + ORDER_START_DATE = 168 + ORDER_START_DATE_TIME = 169 + ORDER_TRAFFICKER = 162 + ORDER_TRAFFICKER_ID = 163 + ORDER_UNLIMITED_END = 203 + PLACEMENT_ID = 113 + PLACEMENT_ID_ALL = 144 + PLACEMENT_NAME = 114 + PLACEMENT_NAME_ALL = 145 + PLACEMENT_STATUS = 362 + PLACEMENT_STATUS_ALL = 363 + PLACEMENT_STATUS_NAME = 364 + PLACEMENT_STATUS_NAME_ALL = 365 + PROGRAMMATIC_BUYER_ID = 240 + PROGRAMMATIC_BUYER_NAME = 241 + PROGRAMMATIC_CHANNEL = 13 + PROGRAMMATIC_CHANNEL_NAME = 14 + RENDERED_CREATIVE_SIZE = 343 + REQUESTED_AD_SIZES = 352 + REQUEST_TYPE = 146 + REQUEST_TYPE_NAME = 147 + SITE = 387 + TARGETING_ID = 232 + TARGETING_NAME = 233 + TARGETING_TYPE = 385 + TARGETING_TYPE_NAME = 386 + TRAFFIC_SOURCE = 388 + TRAFFIC_SOURCE_NAME = 389 + UNIFIED_PRICING_RULE_ID = 393 + UNIFIED_PRICING_RULE_NAME = 394 + VIDEO_PLCMT = 172 + VIDEO_PLCMT_NAME = 173 + WEEK = 5 + YIELD_GROUP_BUYER_NAME = 184 + YIELD_GROUP_ID = 182 + YIELD_GROUP_NAME = 183 + LINE_ITEM_CUSTOM_FIELD_0_OPTION_ID = 10000 + LINE_ITEM_CUSTOM_FIELD_1_OPTION_ID = 10001 + LINE_ITEM_CUSTOM_FIELD_2_OPTION_ID = 10002 + LINE_ITEM_CUSTOM_FIELD_3_OPTION_ID = 10003 + LINE_ITEM_CUSTOM_FIELD_4_OPTION_ID = 10004 + LINE_ITEM_CUSTOM_FIELD_5_OPTION_ID = 10005 + LINE_ITEM_CUSTOM_FIELD_6_OPTION_ID = 10006 + LINE_ITEM_CUSTOM_FIELD_7_OPTION_ID = 10007 + LINE_ITEM_CUSTOM_FIELD_8_OPTION_ID = 10008 + LINE_ITEM_CUSTOM_FIELD_9_OPTION_ID = 10009 + LINE_ITEM_CUSTOM_FIELD_10_OPTION_ID = 10010 + LINE_ITEM_CUSTOM_FIELD_11_OPTION_ID = 10011 + LINE_ITEM_CUSTOM_FIELD_12_OPTION_ID = 10012 + LINE_ITEM_CUSTOM_FIELD_13_OPTION_ID = 10013 + LINE_ITEM_CUSTOM_FIELD_14_OPTION_ID = 10014 + LINE_ITEM_CUSTOM_FIELD_0_VALUE = 11000 + LINE_ITEM_CUSTOM_FIELD_1_VALUE = 11001 + LINE_ITEM_CUSTOM_FIELD_2_VALUE = 11002 + LINE_ITEM_CUSTOM_FIELD_3_VALUE = 11003 + LINE_ITEM_CUSTOM_FIELD_4_VALUE = 11004 + LINE_ITEM_CUSTOM_FIELD_5_VALUE = 11005 + LINE_ITEM_CUSTOM_FIELD_6_VALUE = 11006 + LINE_ITEM_CUSTOM_FIELD_7_VALUE = 11007 + LINE_ITEM_CUSTOM_FIELD_8_VALUE = 11008 + LINE_ITEM_CUSTOM_FIELD_9_VALUE = 11009 + LINE_ITEM_CUSTOM_FIELD_10_VALUE = 11010 + LINE_ITEM_CUSTOM_FIELD_11_VALUE = 11011 + LINE_ITEM_CUSTOM_FIELD_12_VALUE = 11012 + LINE_ITEM_CUSTOM_FIELD_13_VALUE = 11013 + LINE_ITEM_CUSTOM_FIELD_14_VALUE = 11014 + ORDER_CUSTOM_FIELD_0_OPTION_ID = 12000 + ORDER_CUSTOM_FIELD_1_OPTION_ID = 12001 + ORDER_CUSTOM_FIELD_2_OPTION_ID = 12002 + ORDER_CUSTOM_FIELD_3_OPTION_ID = 12003 + ORDER_CUSTOM_FIELD_4_OPTION_ID = 12004 + ORDER_CUSTOM_FIELD_5_OPTION_ID = 12005 + ORDER_CUSTOM_FIELD_6_OPTION_ID = 12006 + ORDER_CUSTOM_FIELD_7_OPTION_ID = 12007 + ORDER_CUSTOM_FIELD_8_OPTION_ID = 12008 + ORDER_CUSTOM_FIELD_9_OPTION_ID = 12009 + ORDER_CUSTOM_FIELD_10_OPTION_ID = 12010 + ORDER_CUSTOM_FIELD_11_OPTION_ID = 12011 + ORDER_CUSTOM_FIELD_12_OPTION_ID = 12012 + ORDER_CUSTOM_FIELD_13_OPTION_ID = 12013 + ORDER_CUSTOM_FIELD_14_OPTION_ID = 12014 + ORDER_CUSTOM_FIELD_0_VALUE = 13000 + ORDER_CUSTOM_FIELD_1_VALUE = 13001 + ORDER_CUSTOM_FIELD_2_VALUE = 13002 + ORDER_CUSTOM_FIELD_3_VALUE = 13003 + ORDER_CUSTOM_FIELD_4_VALUE = 13004 + ORDER_CUSTOM_FIELD_5_VALUE = 13005 + ORDER_CUSTOM_FIELD_6_VALUE = 13006 + ORDER_CUSTOM_FIELD_7_VALUE = 13007 + ORDER_CUSTOM_FIELD_8_VALUE = 13008 + ORDER_CUSTOM_FIELD_9_VALUE = 13009 + ORDER_CUSTOM_FIELD_10_VALUE = 13010 + ORDER_CUSTOM_FIELD_11_VALUE = 13011 + ORDER_CUSTOM_FIELD_12_VALUE = 13012 + ORDER_CUSTOM_FIELD_13_VALUE = 13013 + ORDER_CUSTOM_FIELD_14_VALUE = 13014 + CREATIVE_CUSTOM_FIELD_0_OPTION_ID = 14000 + CREATIVE_CUSTOM_FIELD_1_OPTION_ID = 14001 + CREATIVE_CUSTOM_FIELD_2_OPTION_ID = 14002 + CREATIVE_CUSTOM_FIELD_3_OPTION_ID = 14003 + CREATIVE_CUSTOM_FIELD_4_OPTION_ID = 14004 + CREATIVE_CUSTOM_FIELD_5_OPTION_ID = 14005 + CREATIVE_CUSTOM_FIELD_6_OPTION_ID = 14006 + CREATIVE_CUSTOM_FIELD_7_OPTION_ID = 14007 + CREATIVE_CUSTOM_FIELD_8_OPTION_ID = 14008 + CREATIVE_CUSTOM_FIELD_9_OPTION_ID = 14009 + CREATIVE_CUSTOM_FIELD_10_OPTION_ID = 14010 + CREATIVE_CUSTOM_FIELD_11_OPTION_ID = 14011 + CREATIVE_CUSTOM_FIELD_12_OPTION_ID = 14012 + CREATIVE_CUSTOM_FIELD_13_OPTION_ID = 14013 + CREATIVE_CUSTOM_FIELD_14_OPTION_ID = 14014 + CREATIVE_CUSTOM_FIELD_0_VALUE = 15000 + CREATIVE_CUSTOM_FIELD_1_VALUE = 15001 + CREATIVE_CUSTOM_FIELD_2_VALUE = 15002 + CREATIVE_CUSTOM_FIELD_3_VALUE = 15003 + CREATIVE_CUSTOM_FIELD_4_VALUE = 15004 + CREATIVE_CUSTOM_FIELD_5_VALUE = 15005 + CREATIVE_CUSTOM_FIELD_6_VALUE = 15006 + CREATIVE_CUSTOM_FIELD_7_VALUE = 15007 + CREATIVE_CUSTOM_FIELD_8_VALUE = 15008 + CREATIVE_CUSTOM_FIELD_9_VALUE = 15009 + CREATIVE_CUSTOM_FIELD_10_VALUE = 15010 + CREATIVE_CUSTOM_FIELD_11_VALUE = 15011 + CREATIVE_CUSTOM_FIELD_12_VALUE = 15012 + CREATIVE_CUSTOM_FIELD_13_VALUE = 15013 + CREATIVE_CUSTOM_FIELD_14_VALUE = 15014 + BACKFILL_LINE_ITEM_CUSTOM_FIELD_0_OPTION_ID = 16000 + BACKFILL_LINE_ITEM_CUSTOM_FIELD_1_OPTION_ID = 16001 + BACKFILL_LINE_ITEM_CUSTOM_FIELD_2_OPTION_ID = 16002 + BACKFILL_LINE_ITEM_CUSTOM_FIELD_3_OPTION_ID = 16003 + BACKFILL_LINE_ITEM_CUSTOM_FIELD_4_OPTION_ID = 16004 + BACKFILL_LINE_ITEM_CUSTOM_FIELD_5_OPTION_ID = 16005 + BACKFILL_LINE_ITEM_CUSTOM_FIELD_6_OPTION_ID = 16006 + BACKFILL_LINE_ITEM_CUSTOM_FIELD_7_OPTION_ID = 16007 + BACKFILL_LINE_ITEM_CUSTOM_FIELD_8_OPTION_ID = 16008 + BACKFILL_LINE_ITEM_CUSTOM_FIELD_9_OPTION_ID = 16009 + BACKFILL_LINE_ITEM_CUSTOM_FIELD_10_OPTION_ID = 16010 + BACKFILL_LINE_ITEM_CUSTOM_FIELD_11_OPTION_ID = 16011 + BACKFILL_LINE_ITEM_CUSTOM_FIELD_12_OPTION_ID = 16012 + BACKFILL_LINE_ITEM_CUSTOM_FIELD_13_OPTION_ID = 16013 + BACKFILL_LINE_ITEM_CUSTOM_FIELD_14_OPTION_ID = 16014 + BACKFILL_LINE_ITEM_CUSTOM_FIELD_0_VALUE = 17000 + BACKFILL_LINE_ITEM_CUSTOM_FIELD_1_VALUE = 17001 + BACKFILL_LINE_ITEM_CUSTOM_FIELD_2_VALUE = 17002 + BACKFILL_LINE_ITEM_CUSTOM_FIELD_3_VALUE = 17003 + BACKFILL_LINE_ITEM_CUSTOM_FIELD_4_VALUE = 17004 + BACKFILL_LINE_ITEM_CUSTOM_FIELD_5_VALUE = 17005 + BACKFILL_LINE_ITEM_CUSTOM_FIELD_6_VALUE = 17006 + BACKFILL_LINE_ITEM_CUSTOM_FIELD_7_VALUE = 17007 + BACKFILL_LINE_ITEM_CUSTOM_FIELD_8_VALUE = 17008 + BACKFILL_LINE_ITEM_CUSTOM_FIELD_9_VALUE = 17009 + BACKFILL_LINE_ITEM_CUSTOM_FIELD_10_VALUE = 17010 + BACKFILL_LINE_ITEM_CUSTOM_FIELD_11_VALUE = 17011 + BACKFILL_LINE_ITEM_CUSTOM_FIELD_12_VALUE = 17012 + BACKFILL_LINE_ITEM_CUSTOM_FIELD_13_VALUE = 17013 + BACKFILL_LINE_ITEM_CUSTOM_FIELD_14_VALUE = 17014 + BACKFILL_ORDER_CUSTOM_FIELD_0_OPTION_ID = 18000 + BACKFILL_ORDER_CUSTOM_FIELD_1_OPTION_ID = 18001 + BACKFILL_ORDER_CUSTOM_FIELD_2_OPTION_ID = 18002 + BACKFILL_ORDER_CUSTOM_FIELD_3_OPTION_ID = 18003 + BACKFILL_ORDER_CUSTOM_FIELD_4_OPTION_ID = 18004 + BACKFILL_ORDER_CUSTOM_FIELD_5_OPTION_ID = 18005 + BACKFILL_ORDER_CUSTOM_FIELD_6_OPTION_ID = 18006 + BACKFILL_ORDER_CUSTOM_FIELD_7_OPTION_ID = 18007 + BACKFILL_ORDER_CUSTOM_FIELD_8_OPTION_ID = 18008 + BACKFILL_ORDER_CUSTOM_FIELD_9_OPTION_ID = 18009 + BACKFILL_ORDER_CUSTOM_FIELD_10_OPTION_ID = 18010 + BACKFILL_ORDER_CUSTOM_FIELD_11_OPTION_ID = 18011 + BACKFILL_ORDER_CUSTOM_FIELD_12_OPTION_ID = 18012 + BACKFILL_ORDER_CUSTOM_FIELD_13_OPTION_ID = 18013 + BACKFILL_ORDER_CUSTOM_FIELD_14_OPTION_ID = 18014 + BACKFILL_ORDER_CUSTOM_FIELD_0_VALUE = 19000 + BACKFILL_ORDER_CUSTOM_FIELD_1_VALUE = 19001 + BACKFILL_ORDER_CUSTOM_FIELD_2_VALUE = 19002 + BACKFILL_ORDER_CUSTOM_FIELD_3_VALUE = 19003 + BACKFILL_ORDER_CUSTOM_FIELD_4_VALUE = 19004 + BACKFILL_ORDER_CUSTOM_FIELD_5_VALUE = 19005 + BACKFILL_ORDER_CUSTOM_FIELD_6_VALUE = 19006 + BACKFILL_ORDER_CUSTOM_FIELD_7_VALUE = 19007 + BACKFILL_ORDER_CUSTOM_FIELD_8_VALUE = 19008 + BACKFILL_ORDER_CUSTOM_FIELD_9_VALUE = 19009 + BACKFILL_ORDER_CUSTOM_FIELD_10_VALUE = 19010 + BACKFILL_ORDER_CUSTOM_FIELD_11_VALUE = 19011 + BACKFILL_ORDER_CUSTOM_FIELD_12_VALUE = 19012 + BACKFILL_ORDER_CUSTOM_FIELD_13_VALUE = 19013 + BACKFILL_ORDER_CUSTOM_FIELD_14_VALUE = 19014 + BACKFILL_CREATIVE_CUSTOM_FIELD_0_OPTION_ID = 20000 + BACKFILL_CREATIVE_CUSTOM_FIELD_1_OPTION_ID = 20001 + BACKFILL_CREATIVE_CUSTOM_FIELD_2_OPTION_ID = 20002 + BACKFILL_CREATIVE_CUSTOM_FIELD_3_OPTION_ID = 20003 + BACKFILL_CREATIVE_CUSTOM_FIELD_4_OPTION_ID = 20004 + BACKFILL_CREATIVE_CUSTOM_FIELD_5_OPTION_ID = 20005 + BACKFILL_CREATIVE_CUSTOM_FIELD_6_OPTION_ID = 20006 + BACKFILL_CREATIVE_CUSTOM_FIELD_7_OPTION_ID = 20007 + BACKFILL_CREATIVE_CUSTOM_FIELD_8_OPTION_ID = 20008 + BACKFILL_CREATIVE_CUSTOM_FIELD_9_OPTION_ID = 20009 + BACKFILL_CREATIVE_CUSTOM_FIELD_10_OPTION_ID = 20010 + BACKFILL_CREATIVE_CUSTOM_FIELD_11_OPTION_ID = 20011 + BACKFILL_CREATIVE_CUSTOM_FIELD_12_OPTION_ID = 20012 + BACKFILL_CREATIVE_CUSTOM_FIELD_13_OPTION_ID = 20013 + BACKFILL_CREATIVE_CUSTOM_FIELD_14_OPTION_ID = 20014 + BACKFILL_CREATIVE_CUSTOM_FIELD_0_VALUE = 21000 + BACKFILL_CREATIVE_CUSTOM_FIELD_1_VALUE = 21001 + BACKFILL_CREATIVE_CUSTOM_FIELD_2_VALUE = 21002 + BACKFILL_CREATIVE_CUSTOM_FIELD_3_VALUE = 21003 + BACKFILL_CREATIVE_CUSTOM_FIELD_4_VALUE = 21004 + BACKFILL_CREATIVE_CUSTOM_FIELD_5_VALUE = 21005 + BACKFILL_CREATIVE_CUSTOM_FIELD_6_VALUE = 21006 + BACKFILL_CREATIVE_CUSTOM_FIELD_7_VALUE = 21007 + BACKFILL_CREATIVE_CUSTOM_FIELD_8_VALUE = 21008 + BACKFILL_CREATIVE_CUSTOM_FIELD_9_VALUE = 21009 + BACKFILL_CREATIVE_CUSTOM_FIELD_10_VALUE = 21010 + BACKFILL_CREATIVE_CUSTOM_FIELD_11_VALUE = 21011 + BACKFILL_CREATIVE_CUSTOM_FIELD_12_VALUE = 21012 + BACKFILL_CREATIVE_CUSTOM_FIELD_13_VALUE = 21013 + BACKFILL_CREATIVE_CUSTOM_FIELD_14_VALUE = 21014 + CUSTOM_DIMENSION_0_VALUE_ID = 100000 + CUSTOM_DIMENSION_1_VALUE_ID = 100001 + CUSTOM_DIMENSION_2_VALUE_ID = 100002 + CUSTOM_DIMENSION_3_VALUE_ID = 100003 + CUSTOM_DIMENSION_4_VALUE_ID = 100004 + CUSTOM_DIMENSION_5_VALUE_ID = 100005 + CUSTOM_DIMENSION_6_VALUE_ID = 100006 + CUSTOM_DIMENSION_7_VALUE_ID = 100007 + CUSTOM_DIMENSION_8_VALUE_ID = 100008 + CUSTOM_DIMENSION_9_VALUE_ID = 100009 + CUSTOM_DIMENSION_0_VALUE = 101000 + CUSTOM_DIMENSION_1_VALUE = 101001 + CUSTOM_DIMENSION_2_VALUE = 101002 + CUSTOM_DIMENSION_3_VALUE = 101003 + CUSTOM_DIMENSION_4_VALUE = 101004 + CUSTOM_DIMENSION_5_VALUE = 101005 + CUSTOM_DIMENSION_6_VALUE = 101006 + CUSTOM_DIMENSION_7_VALUE = 101007 + CUSTOM_DIMENSION_8_VALUE = 101008 + CUSTOM_DIMENSION_9_VALUE = 101009 + + class Metric(proto.Enum): + r"""Reporting metrics. + + Values: + METRIC_UNSPECIFIED (0): + Default value. This value is unused. + ACTIVE_VIEW_AVERAGE_VIEWABLE_TIME (61): + Active View total average time in seconds + that specific impressions are reported as being + viewable. + ACTIVE_VIEW_ELIGIBLE_IMPRESSIONS (58): + Total number of impressions that were + eligible to measure viewability. + ACTIVE_VIEW_MEASURABLE_IMPRESSIONS (57): + The total number of impressions that were + sampled and measured by active view. + ACTIVE_VIEW_MEASURABLE_IMPRESSIONS_RATE (60): + The percentage of total impressions that were + measurable by active view (out of all the total + impressions sampled for active view). + ACTIVE_VIEW_VIEWABLE_IMPRESSIONS (56): + The total number of impressions viewed on the + user's screen. + ACTIVE_VIEW_VIEWABLE_IMPRESSIONS_RATE (59): + The percentage of total impressions viewed on + the user's screen (out of the total impressions + measurable by active view). + ADSENSE_ACTIVE_VIEW_AVERAGE_VIEWABLE_TIME (73): + Active View AdSense average time in seconds + that specific impressions are reported as being + viewable. + ADSENSE_ACTIVE_VIEW_ELIGIBLE_IMPRESSIONS (70): + Total number of impressions delivered by + AdSense that were eligible to measure + viewability. + ADSENSE_ACTIVE_VIEW_MEASURABLE_IMPRESSIONS (69): + The number of impressions delivered by + AdSense that were sampled, and measurable by + active view. + ADSENSE_ACTIVE_VIEW_MEASURABLE_IMPRESSIONS_RATE (72): + The percentage of impressions delivered by + AdSense that were measurable by active view (out + of all AdSense impressions sampled for active + view). + ADSENSE_ACTIVE_VIEW_VIEWABLE_IMPRESSIONS (68): + The number of impressions delivered by + AdSense viewed on the user's screen. + ADSENSE_ACTIVE_VIEW_VIEWABLE_IMPRESSIONS_RATE (71): + The percentage of impressions delivered by + AdSense viewed on the user's screen (out of + AdSense impressions measurable by active view). + ADSENSE_AVERAGE_ECPM (26): + The average effective + cost-per-thousand-impressions earned from the + ads delivered by AdSense through line item + dynamic allocation. + ADSENSE_CLICKS (23): + Number of clicks delivered by AdSense demand + channel. + ADSENSE_CTR (24): + The ratio of impressions served by AdSense + that resulted in users clicking on an ad. The + clickthrough rate (CTR) is updated nightly. The + AdSense CTR is calculated as: (AdSense clicks / + AdSense impressions). + ADSENSE_IMPRESSIONS (22): + Total impressions delivered by AdSense. + ADSENSE_PERCENT_CLICKS (28): + Ratio of clicks delivered by AdSense through + line item dynamic allocation in relation to the + total clicks delivered. + ADSENSE_PERCENT_IMPRESSIONS (27): + Ratio of impressions delivered by AdSense + through line item dynamic allocation in relation + to the total impressions delivered. + ADSENSE_PERCENT_REVENUE (29): + Ratio of revenue generated by AdSense through + line item dynamic allocation in relation to the + total revenue. + ADSENSE_PERCENT_REVENUE_WITHOUT_CPD (30): + Ratio of revenue generated by AdSense through + line item dynamic allocation in relation to the + total revenue (excluding CPD). + ADSENSE_RESPONSES_SERVED (41): + The total number of times that an AdSense ad + is delivered. + ADSENSE_REVENUE (25): + Revenue generated from AdSense through line + item dynamic allocation, calculated in the + network's currency and time zone. + AD_EXCHANGE_ACTIVE_VIEW_AVERAGE_VIEWABLE_TIME (79): + Active View AdExchange average time in + seconds that specific impressions are reported + as being viewable. + AD_EXCHANGE_ACTIVE_VIEW_ELIGIBLE_IMPRESSIONS (76): + Total number of impressions delivered by Ad + Exchange that were eligible to measure + viewability. + AD_EXCHANGE_ACTIVE_VIEW_MEASURABLE_IMPRESSIONS (75): + The number of impressions delivered by Ad + Exchange that were sampled, and measurable by + active view. + AD_EXCHANGE_ACTIVE_VIEW_MEASURABLE_IMPRESSIONS_RATE (78): + The percentage of impressions delivered by Ad + Exchange that were measurable by active view + (out of all Ad Exchange impressions sampled for + active view). + AD_EXCHANGE_ACTIVE_VIEW_VIEWABLE_IMPRESSIONS (74): + The number of impressions delivered by Ad + Exchange viewed on the user's screen. + AD_EXCHANGE_ACTIVE_VIEW_VIEWABLE_IMPRESSIONS_RATE (77): + The percentage of impressions delivered by Ad + Exchange viewed on the user's screen (out of Ad + Exchange impressions measurable by active view). + AD_EXCHANGE_AVERAGE_ECPM (18): + The average effective + cost-per-thousand-impressions earned from the + ads delivered by Ad Exchange through line item + dynamic allocation. + AD_EXCHANGE_CLICKS (15): + Number of clicks delivered by the Ad + Exchange. + AD_EXCHANGE_CTR (16): + The ratio of impressions served by the Ad + Exchange that resulted in users clicking on an + ad. The clickthrough rate (CTR) is updated + nightly. Ad Exchange CTR is calculated as: (Ad + Exchange clicks / Ad Exchange impressions). + AD_EXCHANGE_IMPRESSIONS (14): + Total impressions delivered by the Ad + Exchange. + AD_EXCHANGE_PERCENT_CLICKS (20): + Ratio of clicks delivered by Ad Exchange + through line item dynamic allocation in relation + to the total clicks delivered. + AD_EXCHANGE_PERCENT_IMPRESSIONS (19): + Ratio of impressions delivered by Ad Exchange + through line item dynamic allocation in relation + to the total impressions delivered. + AD_EXCHANGE_PERCENT_REVENUE (21): + Ratio of revenue generated by Ad Exchange + through line item dynamic allocation in relation + to the total revenue. + AD_EXCHANGE_PERCENT_REVENUE_WITHOUT_CPD (31): + Ratio of revenue generated by Ad Exchange + through line item dynamic allocation in relation + to the total revenue (excluding CPD). + AD_EXCHANGE_RESPONSES_SERVED (42): + The total number of times that an Ad Exchange + ad is delivered. + AD_EXCHANGE_REVENUE (17): + Revenue generated from the Ad Exchange + through line item dynamic allocation, calculated + in your network's currency and time zone. + AD_REQUESTS (38): + The total number of times that an ad request + is sent to the ad server including dynamic + allocation. + AD_SERVER_ACTIVE_VIEW_AVERAGE_VIEWABLE_TIME (67): + Active View ad server average time in seconds + that specific impressions are reported as being + viewable. + AD_SERVER_ACTIVE_VIEW_ELIGIBLE_IMPRESSIONS (64): + Total number of impressions delivered by the + ad server that were eligible to measure + viewability. + AD_SERVER_ACTIVE_VIEW_MEASURABLE_IMPRESSIONS (63): + The number of impressions delivered by the ad + server that were sampled, and measurable by + active view. + AD_SERVER_ACTIVE_VIEW_MEASURABLE_IMPRESSIONS_RATE (66): + The percentage of impressions delivered by + the ad server that were measurable by active + view (out of all the ad server impressions + sampled for active view). + AD_SERVER_ACTIVE_VIEW_VIEWABLE_IMPRESSIONS (62): + The number of impressions delivered by the ad + server viewed on the user's screen. + AD_SERVER_ACTIVE_VIEW_VIEWABLE_IMPRESSIONS_RATE (65): + The percentage of impressions delivered by + the ad server viewed on the user's screen (out + of the ad server impressions measurable by + active view). + AD_SERVER_AVERAGE_ECPM (34): + Average effective + cost-per-thousand-impressions earned from the + ads delivered by the Google Ad Manager server. + AD_SERVER_AVERAGE_ECPM_WITHOUT_CPD (10): + Average effective + cost-per-thousand-impressions earned from the + ads delivered by the Google Ad Manager server, + excluding CPD value. + AD_SERVER_CLICKS (7): + Total clicks served by the Google Ad Manager + server. It usually takes about 30 minutes for + new clicks to be recorded and added to the total + displayed in reporting. + AD_SERVER_CPD_REVENUE (32): + CPD revenue earned, calculated in your + network's currency, for the ads delivered by the + Google Ad Manager server. Sum of all booked + revenue. + AD_SERVER_CTR (8): + Ratio of impressions served by the Google Ad + Manager server that resulted in users clicking + on an ad. The clickthrough rate (CTR) is updated + nightly. The ad server CTR is calculated as: (Ad + server clicks / Ad server impressions). + AD_SERVER_IMPRESSIONS (6): + Total impressions delivered by the Ad Server. + AD_SERVER_PERCENT_CLICKS (12): + Ratio of clicks delivered by the Google Ad + Manager server in relation to the total clicks + delivered. + AD_SERVER_PERCENT_IMPRESSIONS (11): + Ratio of impressions delivered by the Google + Ad Manager server in relation to the total + impressions delivered. + AD_SERVER_PERCENT_REVENUE (35): + Ratio of revenue generated by the Google Ad + Manager server in relation to the total revenue. + AD_SERVER_PERCENT_REVENUE_WITHOUT_CPD (13): + Ratio of revenue generated by the Google Ad + Manager server (excluding CPD) in relation to + the total revenue. + AD_SERVER_RESPONSES_SERVED (40): + The total number of times that an ad is + served by the ad server. + AD_SERVER_REVENUE (33): + All CPM, CPC, and CPD revenue earned, + calculated in your network's currency, for the + ads delivered by the Google Ad Manager server. + Sum of all booked revenue. + AD_SERVER_REVENUE_WITHOUT_CPD (9): + Revenue (excluding CPD) earned, calculated in + your network's currency, for the ads delivered + by the Google Ad Manager server. Sum of all + booked revenue. + AUCTIONS_WON (80): + Number of winning bids received from Open + Bidding buyers, even when the winning bid is + placed at the end of a mediation for mobile apps + chain. + AVERAGE_ECPM (37): + eCPM averaged across the Google Ad Manager + server, AdSense, and Ad Exchange. + AVERAGE_ECPM_WITHOUT_CPD (5): + eCPM averaged across the Google Ad Manager + server (excluding CPD), AdSense, and Ad + Exchange. + BIDS (81): + Number of bids received from Open Bidding + buyers, regardless of whether the returned bid + competes in an auction. + BIDS_IN_AUCTION (82): + Number of bids received from Open Bidding + buyers that competed in the auction. + CALLOUTS (83): + Number of times a yield partner is asked to + return bid to fill a yield group request. + CLICKS (2): + The number of times a user clicked on an ad. + CODE_SERVED_COUNT (44): + The total number of times that the code for + an ad is served by the ad server including + dynamic allocation. + CTR (3): + For standard ads, your ad clickthrough rate + (CTR) is the number of ad clicks divided by the + number of individual ad impressions expressed as + a fraction. Ad CTR = Clicks / Ad impressions. + GOOGLE_SOLD_AUCTION_COVIEWED_IMPRESSIONS (129): + The number of coviewed impressions sold by + Google in partner sales. + GOOGLE_SOLD_AUCTION_IMPRESSIONS (128): + The number of auction impressions sold by + Google in partner sales. + GOOGLE_SOLD_COVIEWED_IMPRESSIONS (131): + The number of coviewed impressions sold by + Google in partner sales. + GOOGLE_SOLD_IMPRESSIONS (130): + The number of impressions sold by Google in + partner sales. + GOOGLE_SOLD_RESERVATION_COVIEWED_IMPRESSIONS (127): + The number of coviewed impressions sold by + Google in partner sales. + GOOGLE_SOLD_RESERVATION_IMPRESSIONS (126): + The number of reservation impressions sold by + Google in partner sales. + IMPRESSIONS (1): + Total impressions from the Google Ad Manager + server, AdSense, Ad Exchange, and yield group + partners. + PARTNER_SALES_FILLED_POD_REQUESTS (135): + The number of filled pod requests (filled by + partner or Google) in partner sales. + PARTNER_SALES_FILL_RATE (136): + The percent of filled requests to total ad + requests in partner sales. + PARTNER_SALES_PARTNER_MATCH_RATE (137): + The percent of partner filled requests to + total ad requests in partner sales. + PARTNER_SALES_QUERIES (132): + The number of queries eligible for partner + sales. + PARTNER_SALES_UNFILLED_IMPRESSIONS (133): + The number of partner unfilled impressions in + partner sales. If a pod request is not filled by + partner but filled by Google, this metric will + still count 1. + PARTNER_SALES_UNMATCHED_QUERIES (134): + The number of partner unmatched queries in + partner sales. If an ad request is not filled by + partner but filled by Google, this metric will + still count 1. + PARTNER_SOLD_CODE_SERVED (125): + The number of code served sold by partner in + partner sales. + PARTNER_SOLD_COVIEWED_IMPRESSIONS (124): + The number of coviewed impressions sold by + partner in partner sales. + PARTNER_SOLD_IMPRESSIONS (123): + The number of impressions sold by partner in + partner sales. + PROGRAMMATIC_ELIGIBLE_AD_REQUESTS (177): + The total number of ad requests eligible for + programmatic inventory, including Programmatic + Guaranteed, Preferred Deals, backfill, and open + auction. + PROGRAMMATIC_MATCH_RATE (178): + The number of programmatic responses served + divided by the number of programmatic eligible + ad requests. Includes Ad Exchange, Open Bidding, + and Preferred Deals. + PROGRAMMATIC_RESPONSES_SERVED (176): + Total number of ad responses served from programmatic demand + sources. Includes Ad Exchange, Open Bidding, and Preferred + Deals. + + Differs from AD_EXCHANGE_RESPONSES_SERVED, which doesn't + include Open Bidding ad requests. + RESPONSES_SERVED (39): + The total number of times that an ad is + served by the ad server including dynamic + allocation. + REVENUE (36): + Total amount of CPM, CPC, and CPD revenue + based on the number of units served by the + Google Ad Manager server, AdSense, Ad Exchange, + and third-party Mediation networks. + REVENUE_WITHOUT_CPD (4): + Total amount of revenue (excluding CPD) based + on the number of units served by the Google Ad + Manager server, AdSense, Ad Exchange, and + third-party Mediation networks. + SUCCESSFUL_RESPONSES (84): + Number of times a yield group buyer + successfully returned a bid in response to a + yield group callout. + UNFILLED_IMPRESSIONS (45): + The total number of missed impressions due to + the ad servers' inability to find ads to serve + including dynamic allocation. + UNMATCHED_AD_REQUESTS (43): + The total number of times that an ad is not + returned by the ad server. + USER_MESSAGES_OFFERWALL_MESSAGES_SHOWN (121): + Number of times an Offerwall message was + shown to users. + USER_MESSAGES_OFFERWALL_SUCCESSFUL_ENGAGEMENTS (122): + The number of messages where the user gained + an entitlement. + VIDEO_INTERACTION_AVERAGE_INTERACTION_RATE (92): + The number of user interactions with a video, + on average, such as pause, full screen, mute, + etc. + VIDEO_INTERACTION_COLLAPSES (93): + The number of times a user collapses a video, + either to its original size or to a different + size. + VIDEO_INTERACTION_EXPANDS (95): + The number of times a user expands a video. + VIDEO_INTERACTION_FULL_SCREENS (96): + The number of times ad clip played in full + screen mode. + VIDEO_INTERACTION_MUTES (97): + The number of times video player was in mute + state during play of ad clip. + VIDEO_INTERACTION_PAUSES (98): + The number of times user paused ad clip. + VIDEO_INTERACTION_RESUMES (99): + The number of times the user unpaused the + video. + VIDEO_INTERACTION_REWINDS (100): + The number of times a user rewinds the video. + VIDEO_INTERACTION_UNMUTES (101): + The number of times a user unmutes the video. + VIDEO_INTERACTION_VIDEO_SKIPS (102): + The number of times a skippable video is + skipped. + VIDEO_REAL_TIME_CREATIVE_SERVES (139): + The number of total creative serves in video + realtime reporting. + VIDEO_REAL_TIME_ERROR_100_COUNT (143): + The number of errors of type 100 in video + realtime reporting. + VIDEO_REAL_TIME_ERROR_101_COUNT (144): + The number of errors of type 101 in video + realtime reporting. + VIDEO_REAL_TIME_ERROR_102_COUNT (145): + The number of errors of type 102 in video + realtime reporting. + VIDEO_REAL_TIME_ERROR_200_COUNT (146): + The number of errors of type 200 in video + realtime reporting. + VIDEO_REAL_TIME_ERROR_201_COUNT (147): + The number of errors of type 201 in video + realtime reporting. + VIDEO_REAL_TIME_ERROR_202_COUNT (148): + The number of errors of type 202 in video + realtime reporting. + VIDEO_REAL_TIME_ERROR_203_COUNT (149): + The number of errors of type 203 in video + realtime reporting. + VIDEO_REAL_TIME_ERROR_300_COUNT (150): + The number of errors of type 300 in video + realtime reporting. + VIDEO_REAL_TIME_ERROR_301_COUNT (151): + The number of errors of type 301 in video + realtime reporting. + VIDEO_REAL_TIME_ERROR_302_COUNT (152): + The number of errors of type 302 in video + realtime reporting. + VIDEO_REAL_TIME_ERROR_303_COUNT (153): + The number of errors of type 303 in video + realtime reporting. + VIDEO_REAL_TIME_ERROR_400_COUNT (154): + The number of errors of type 400 in video + realtime reporting. + VIDEO_REAL_TIME_ERROR_401_COUNT (155): + The number of errors of type 401 in video + realtime reporting. + VIDEO_REAL_TIME_ERROR_402_COUNT (156): + The number of errors of type 402 in video + realtime reporting. + VIDEO_REAL_TIME_ERROR_403_COUNT (157): + The number of errors of type 403 in video + realtime reporting. + VIDEO_REAL_TIME_ERROR_405_COUNT (158): + The number of errors of type 405 in video + realtime reporting. + VIDEO_REAL_TIME_ERROR_406_COUNT (159): + The number of errors of type 406 in video + realtime reporting. + VIDEO_REAL_TIME_ERROR_407_COUNT (160): + The number of errors of type 407 in video + realtime reporting. + VIDEO_REAL_TIME_ERROR_408_COUNT (161): + The number of errors of type 408 in video + realtime reporting. + VIDEO_REAL_TIME_ERROR_409_COUNT (162): + The number of errors of type 409 in video + realtime reporting. + VIDEO_REAL_TIME_ERROR_410_COUNT (163): + The number of errors of type 410 in video + realtime reporting. + VIDEO_REAL_TIME_ERROR_500_COUNT (164): + The number of errors of type 500 in video + realtime reporting. + VIDEO_REAL_TIME_ERROR_501_COUNT (165): + The number of errors of type 501 in video + realtime reporting. + VIDEO_REAL_TIME_ERROR_502_COUNT (166): + The number of errors of type 502 in video + realtime reporting. + VIDEO_REAL_TIME_ERROR_503_COUNT (167): + The number of errors of type 503 in video + realtime reporting. + VIDEO_REAL_TIME_ERROR_600_COUNT (168): + The number of errors of type 600 in video + realtime reporting. + VIDEO_REAL_TIME_ERROR_601_COUNT (169): + The number of errors of type 601 in video + realtime reporting. + VIDEO_REAL_TIME_ERROR_602_COUNT (170): + The number of errors of type 602 in video + realtime reporting. + VIDEO_REAL_TIME_ERROR_603_COUNT (171): + The number of errors of type 603 in video + realtime reporting. + VIDEO_REAL_TIME_ERROR_604_COUNT (172): + The number of errors of type 604 in video + realtime reporting. + VIDEO_REAL_TIME_ERROR_900_COUNT (173): + The number of errors of type 900 in video + realtime reporting. + VIDEO_REAL_TIME_ERROR_901_COUNT (174): + The number of errors of type 901 in video + realtime reporting. + VIDEO_REAL_TIME_IMPRESSIONS (138): + The number of total impressions in video + realtime reporting. + VIDEO_REAL_TIME_MATCHED_QUERIES (140): + The number of matched queries in video + realtime reporting. + VIDEO_REAL_TIME_TOTAL_ERROR_COUNT (175): + The number of all errors in video realtime + reporting. + VIDEO_REAL_TIME_TOTAL_QUERIES (142): + The number of total queries in video realtime + reporting. + VIDEO_REAL_TIME_UNMATCHED_QUERIES (141): + The number of unmatched queries in video + realtime reporting. + VIDEO_VIEWERSHIP_AUTO_PLAYS (103): + Number of times that the publisher specified + a video ad played automatically. + VIDEO_VIEWERSHIP_AVERAGE_VIEW_RATE (104): + Average percentage of the video watched by + users. + VIDEO_VIEWERSHIP_AVERAGE_VIEW_TIME (105): + Average time(seconds) users watched the + video. + VIDEO_VIEWERSHIP_CLICK_TO_PLAYS (106): + Number of times that the publisher specified + a video ad was clicked to play. + VIDEO_VIEWERSHIP_COMPLETES (107): + The number of times the video played to + completion. + VIDEO_VIEWERSHIP_COMPLETION_RATE (108): + Percentage of times the video played to the + end. + VIDEO_VIEWERSHIP_ENGAGED_VIEWS (109): + The number of engaged views: ad is viewed to + completion or for 30s, whichever comes first. + VIDEO_VIEWERSHIP_FIRST_QUARTILES (110): + The number of times the video played to 25% + of its length. + VIDEO_VIEWERSHIP_MIDPOINTS (111): + The number of times the video reached its + midpoint during play. + VIDEO_VIEWERSHIP_SKIP_BUTTONS_SHOWN (112): + The number of times a skip button is shown in + video. + VIDEO_VIEWERSHIP_STARTS (113): + The number of impressions where the video was + played. + VIDEO_VIEWERSHIP_THIRD_QUARTILES (114): + The number of times the video played to 75% + of its length. + VIDEO_VIEWERSHIP_TOTAL_ERROR_COUNT (115): + The number of times an error occurred, such + as a VAST redirect error, a video playback + error, or an invalid response error. + VIDEO_VIEWERSHIP_TOTAL_ERROR_RATE (94): + The percentage of video error count. + VIDEO_VIEWERSHIP_VIDEO_LENGTH (116): + Duration of the video creative. + VIDEO_VIEWERSHIP_VIEW_THROUGH_RATE (117): + View-through rate represented as a + percentage. + YIELD_GROUP_ESTIMATED_CPM (88): + The estimated net rate for yield groups or + individual yield group partners. + YIELD_GROUP_ESTIMATED_REVENUE (87): + Total net revenue earned by a yield group, + based upon the yield group estimated CPM and + yield group impressions recorded. + YIELD_GROUP_IMPRESSIONS (85): + Number of matched yield group requests where + a yield partner delivered their ad to publisher + inventory. + YIELD_GROUP_MEDIATION_FILL_RATE (89): + Yield group Mediation fill rate indicating + how often a network fills an ad request. + YIELD_GROUP_MEDIATION_MATCHED_QUERIES (86): + Total requests where a Mediation chain was + served. + YIELD_GROUP_MEDIATION_PASSBACKS (118): + The number of mediation chain passback across + all channels. + YIELD_GROUP_MEDIATION_THIRD_PARTY_ECPM (90): + Revenue per thousand impressions based on + data collected by Ad Manager from third-party ad + network reports. + """ + METRIC_UNSPECIFIED = 0 + ACTIVE_VIEW_AVERAGE_VIEWABLE_TIME = 61 + ACTIVE_VIEW_ELIGIBLE_IMPRESSIONS = 58 + ACTIVE_VIEW_MEASURABLE_IMPRESSIONS = 57 + ACTIVE_VIEW_MEASURABLE_IMPRESSIONS_RATE = 60 + ACTIVE_VIEW_VIEWABLE_IMPRESSIONS = 56 + ACTIVE_VIEW_VIEWABLE_IMPRESSIONS_RATE = 59 + ADSENSE_ACTIVE_VIEW_AVERAGE_VIEWABLE_TIME = 73 + ADSENSE_ACTIVE_VIEW_ELIGIBLE_IMPRESSIONS = 70 + ADSENSE_ACTIVE_VIEW_MEASURABLE_IMPRESSIONS = 69 + ADSENSE_ACTIVE_VIEW_MEASURABLE_IMPRESSIONS_RATE = 72 + ADSENSE_ACTIVE_VIEW_VIEWABLE_IMPRESSIONS = 68 + ADSENSE_ACTIVE_VIEW_VIEWABLE_IMPRESSIONS_RATE = 71 + ADSENSE_AVERAGE_ECPM = 26 + ADSENSE_CLICKS = 23 + ADSENSE_CTR = 24 + ADSENSE_IMPRESSIONS = 22 + ADSENSE_PERCENT_CLICKS = 28 + ADSENSE_PERCENT_IMPRESSIONS = 27 + ADSENSE_PERCENT_REVENUE = 29 + ADSENSE_PERCENT_REVENUE_WITHOUT_CPD = 30 + ADSENSE_RESPONSES_SERVED = 41 + ADSENSE_REVENUE = 25 + AD_EXCHANGE_ACTIVE_VIEW_AVERAGE_VIEWABLE_TIME = 79 + AD_EXCHANGE_ACTIVE_VIEW_ELIGIBLE_IMPRESSIONS = 76 + AD_EXCHANGE_ACTIVE_VIEW_MEASURABLE_IMPRESSIONS = 75 + AD_EXCHANGE_ACTIVE_VIEW_MEASURABLE_IMPRESSIONS_RATE = 78 + AD_EXCHANGE_ACTIVE_VIEW_VIEWABLE_IMPRESSIONS = 74 + AD_EXCHANGE_ACTIVE_VIEW_VIEWABLE_IMPRESSIONS_RATE = 77 + AD_EXCHANGE_AVERAGE_ECPM = 18 + AD_EXCHANGE_CLICKS = 15 + AD_EXCHANGE_CTR = 16 + AD_EXCHANGE_IMPRESSIONS = 14 + AD_EXCHANGE_PERCENT_CLICKS = 20 + AD_EXCHANGE_PERCENT_IMPRESSIONS = 19 + AD_EXCHANGE_PERCENT_REVENUE = 21 + AD_EXCHANGE_PERCENT_REVENUE_WITHOUT_CPD = 31 + AD_EXCHANGE_RESPONSES_SERVED = 42 + AD_EXCHANGE_REVENUE = 17 + AD_REQUESTS = 38 + AD_SERVER_ACTIVE_VIEW_AVERAGE_VIEWABLE_TIME = 67 + AD_SERVER_ACTIVE_VIEW_ELIGIBLE_IMPRESSIONS = 64 + AD_SERVER_ACTIVE_VIEW_MEASURABLE_IMPRESSIONS = 63 + AD_SERVER_ACTIVE_VIEW_MEASURABLE_IMPRESSIONS_RATE = 66 + AD_SERVER_ACTIVE_VIEW_VIEWABLE_IMPRESSIONS = 62 + AD_SERVER_ACTIVE_VIEW_VIEWABLE_IMPRESSIONS_RATE = 65 + AD_SERVER_AVERAGE_ECPM = 34 + AD_SERVER_AVERAGE_ECPM_WITHOUT_CPD = 10 + AD_SERVER_CLICKS = 7 + AD_SERVER_CPD_REVENUE = 32 + AD_SERVER_CTR = 8 + AD_SERVER_IMPRESSIONS = 6 + AD_SERVER_PERCENT_CLICKS = 12 + AD_SERVER_PERCENT_IMPRESSIONS = 11 + AD_SERVER_PERCENT_REVENUE = 35 + AD_SERVER_PERCENT_REVENUE_WITHOUT_CPD = 13 + AD_SERVER_RESPONSES_SERVED = 40 + AD_SERVER_REVENUE = 33 + AD_SERVER_REVENUE_WITHOUT_CPD = 9 + AUCTIONS_WON = 80 + AVERAGE_ECPM = 37 + AVERAGE_ECPM_WITHOUT_CPD = 5 + BIDS = 81 + BIDS_IN_AUCTION = 82 + CALLOUTS = 83 + CLICKS = 2 + CODE_SERVED_COUNT = 44 + CTR = 3 + GOOGLE_SOLD_AUCTION_COVIEWED_IMPRESSIONS = 129 + GOOGLE_SOLD_AUCTION_IMPRESSIONS = 128 + GOOGLE_SOLD_COVIEWED_IMPRESSIONS = 131 + GOOGLE_SOLD_IMPRESSIONS = 130 + GOOGLE_SOLD_RESERVATION_COVIEWED_IMPRESSIONS = 127 + GOOGLE_SOLD_RESERVATION_IMPRESSIONS = 126 + IMPRESSIONS = 1 + PARTNER_SALES_FILLED_POD_REQUESTS = 135 + PARTNER_SALES_FILL_RATE = 136 + PARTNER_SALES_PARTNER_MATCH_RATE = 137 + PARTNER_SALES_QUERIES = 132 + PARTNER_SALES_UNFILLED_IMPRESSIONS = 133 + PARTNER_SALES_UNMATCHED_QUERIES = 134 + PARTNER_SOLD_CODE_SERVED = 125 + PARTNER_SOLD_COVIEWED_IMPRESSIONS = 124 + PARTNER_SOLD_IMPRESSIONS = 123 + PROGRAMMATIC_ELIGIBLE_AD_REQUESTS = 177 + PROGRAMMATIC_MATCH_RATE = 178 + PROGRAMMATIC_RESPONSES_SERVED = 176 + RESPONSES_SERVED = 39 + REVENUE = 36 + REVENUE_WITHOUT_CPD = 4 + SUCCESSFUL_RESPONSES = 84 + UNFILLED_IMPRESSIONS = 45 + UNMATCHED_AD_REQUESTS = 43 + USER_MESSAGES_OFFERWALL_MESSAGES_SHOWN = 121 + USER_MESSAGES_OFFERWALL_SUCCESSFUL_ENGAGEMENTS = 122 + VIDEO_INTERACTION_AVERAGE_INTERACTION_RATE = 92 + VIDEO_INTERACTION_COLLAPSES = 93 + VIDEO_INTERACTION_EXPANDS = 95 + VIDEO_INTERACTION_FULL_SCREENS = 96 + VIDEO_INTERACTION_MUTES = 97 + VIDEO_INTERACTION_PAUSES = 98 + VIDEO_INTERACTION_RESUMES = 99 + VIDEO_INTERACTION_REWINDS = 100 + VIDEO_INTERACTION_UNMUTES = 101 + VIDEO_INTERACTION_VIDEO_SKIPS = 102 + VIDEO_REAL_TIME_CREATIVE_SERVES = 139 + VIDEO_REAL_TIME_ERROR_100_COUNT = 143 + VIDEO_REAL_TIME_ERROR_101_COUNT = 144 + VIDEO_REAL_TIME_ERROR_102_COUNT = 145 + VIDEO_REAL_TIME_ERROR_200_COUNT = 146 + VIDEO_REAL_TIME_ERROR_201_COUNT = 147 + VIDEO_REAL_TIME_ERROR_202_COUNT = 148 + VIDEO_REAL_TIME_ERROR_203_COUNT = 149 + VIDEO_REAL_TIME_ERROR_300_COUNT = 150 + VIDEO_REAL_TIME_ERROR_301_COUNT = 151 + VIDEO_REAL_TIME_ERROR_302_COUNT = 152 + VIDEO_REAL_TIME_ERROR_303_COUNT = 153 + VIDEO_REAL_TIME_ERROR_400_COUNT = 154 + VIDEO_REAL_TIME_ERROR_401_COUNT = 155 + VIDEO_REAL_TIME_ERROR_402_COUNT = 156 + VIDEO_REAL_TIME_ERROR_403_COUNT = 157 + VIDEO_REAL_TIME_ERROR_405_COUNT = 158 + VIDEO_REAL_TIME_ERROR_406_COUNT = 159 + VIDEO_REAL_TIME_ERROR_407_COUNT = 160 + VIDEO_REAL_TIME_ERROR_408_COUNT = 161 + VIDEO_REAL_TIME_ERROR_409_COUNT = 162 + VIDEO_REAL_TIME_ERROR_410_COUNT = 163 + VIDEO_REAL_TIME_ERROR_500_COUNT = 164 + VIDEO_REAL_TIME_ERROR_501_COUNT = 165 + VIDEO_REAL_TIME_ERROR_502_COUNT = 166 + VIDEO_REAL_TIME_ERROR_503_COUNT = 167 + VIDEO_REAL_TIME_ERROR_600_COUNT = 168 + VIDEO_REAL_TIME_ERROR_601_COUNT = 169 + VIDEO_REAL_TIME_ERROR_602_COUNT = 170 + VIDEO_REAL_TIME_ERROR_603_COUNT = 171 + VIDEO_REAL_TIME_ERROR_604_COUNT = 172 + VIDEO_REAL_TIME_ERROR_900_COUNT = 173 + VIDEO_REAL_TIME_ERROR_901_COUNT = 174 + VIDEO_REAL_TIME_IMPRESSIONS = 138 + VIDEO_REAL_TIME_MATCHED_QUERIES = 140 + VIDEO_REAL_TIME_TOTAL_ERROR_COUNT = 175 + VIDEO_REAL_TIME_TOTAL_QUERIES = 142 + VIDEO_REAL_TIME_UNMATCHED_QUERIES = 141 + VIDEO_VIEWERSHIP_AUTO_PLAYS = 103 + VIDEO_VIEWERSHIP_AVERAGE_VIEW_RATE = 104 + VIDEO_VIEWERSHIP_AVERAGE_VIEW_TIME = 105 + VIDEO_VIEWERSHIP_CLICK_TO_PLAYS = 106 + VIDEO_VIEWERSHIP_COMPLETES = 107 + VIDEO_VIEWERSHIP_COMPLETION_RATE = 108 + VIDEO_VIEWERSHIP_ENGAGED_VIEWS = 109 + VIDEO_VIEWERSHIP_FIRST_QUARTILES = 110 + VIDEO_VIEWERSHIP_MIDPOINTS = 111 + VIDEO_VIEWERSHIP_SKIP_BUTTONS_SHOWN = 112 + VIDEO_VIEWERSHIP_STARTS = 113 + VIDEO_VIEWERSHIP_THIRD_QUARTILES = 114 + VIDEO_VIEWERSHIP_TOTAL_ERROR_COUNT = 115 + VIDEO_VIEWERSHIP_TOTAL_ERROR_RATE = 94 + VIDEO_VIEWERSHIP_VIDEO_LENGTH = 116 + VIDEO_VIEWERSHIP_VIEW_THROUGH_RATE = 117 + YIELD_GROUP_ESTIMATED_CPM = 88 + YIELD_GROUP_ESTIMATED_REVENUE = 87 + YIELD_GROUP_IMPRESSIONS = 85 + YIELD_GROUP_MEDIATION_FILL_RATE = 89 + YIELD_GROUP_MEDIATION_MATCHED_QUERIES = 86 + YIELD_GROUP_MEDIATION_PASSBACKS = 118 + YIELD_GROUP_MEDIATION_THIRD_PARTY_ECPM = 90 + + class MetricValueType(proto.Enum): + r"""Possible metric value types to add. + + Values: + PRIMARY (0): + The values for the primary date_range. + PRIMARY_PERCENT_OF_TOTAL (1): + Each metrics' percent of the total for the primary + date_range. + COMPARISON (2): + The values for the comparison_date_range. + COMPARISON_PERCENT_OF_TOTAL (3): + Each metrics' percent of the total for the + comparison_date_range. + ABSOLUTE_CHANGE (4): + The absolute change between the primary and + comparison date ranges. + RELATIVE_CHANGE (5): + The relative change between the primary and + comparison date ranges. + """ + PRIMARY = 0 + PRIMARY_PERCENT_OF_TOTAL = 1 + COMPARISON = 2 + COMPARISON_PERCENT_OF_TOTAL = 3 + ABSOLUTE_CHANGE = 4 + RELATIVE_CHANGE = 5 + + class ReportType(proto.Enum): + r"""Supported report types. + + Values: + REPORT_TYPE_UNSPECIFIED (0): + Default value. This value is unused. + HISTORICAL (1): + Historical. + """ + REPORT_TYPE_UNSPECIFIED = 0 + HISTORICAL = 1 + + class Visibility(proto.Enum): + r"""The visibility of a report. + + Values: + HIDDEN (0): + Default value. Reports with hidden visibility + will not appear in the Ad Manager UI. + DRAFT (1): + Reports with draft visibility will appear in + the Ad Manager UI only if the user has + configured the UI to show them. + SAVED (2): + Reports with saved visibility will appear in + the Ad Manager UI by default. + """ + HIDDEN = 0 + DRAFT = 1 + SAVED = 2 + + class Value(proto.Message): + r"""Represents a single value in a report. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + int_value (int): + For integer values. + + This field is a member of `oneof`_ ``value``. + double_value (float): + For double values. + + This field is a member of `oneof`_ ``value``. + string_value (str): + For string values. + + This field is a member of `oneof`_ ``value``. + bool_value (bool): + For boolean values. + + This field is a member of `oneof`_ ``value``. + int_list_value (google.ads.admanager_v1.types.Report.Value.IntList): + For lists of integer values. + + This field is a member of `oneof`_ ``value``. + string_list_value (google.ads.admanager_v1.types.Report.Value.StringList): + For lists of string values. + + This field is a member of `oneof`_ ``value``. + bytes_value (bytes): + For bytes values. + + This field is a member of `oneof`_ ``value``. + """ + + class IntList(proto.Message): + r"""A list of integer values. + + Attributes: + values (MutableSequence[int]): + The values + """ + + values: MutableSequence[int] = proto.RepeatedField( + proto.INT64, + number=1, + ) + + class StringList(proto.Message): + r"""A list of string values. + + Attributes: + values (MutableSequence[str]): + The values + """ + + values: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=1, + ) + + int_value: int = proto.Field( + proto.INT64, + number=1, + oneof="value", + ) + double_value: float = proto.Field( + proto.DOUBLE, + number=2, + oneof="value", + ) + string_value: str = proto.Field( + proto.STRING, + number=3, + oneof="value", + ) + bool_value: bool = proto.Field( + proto.BOOL, + number=4, + oneof="value", + ) + int_list_value: "Report.Value.IntList" = proto.Field( + proto.MESSAGE, + number=6, + oneof="value", + message="Report.Value.IntList", + ) + string_list_value: "Report.Value.StringList" = proto.Field( + proto.MESSAGE, + number=7, + oneof="value", + message="Report.Value.StringList", + ) + bytes_value: bytes = proto.Field( + proto.BYTES, + number=8, + oneof="value", + ) + + class Sort(proto.Message): + r"""Represents a sorting in a report. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + field (google.ads.admanager_v1.types.Report.Field): + Required. A field (dimension or metric) to + sort by. + descending (bool): + Optional. The sort order. If true the sort + will be descending. + slice_ (google.ads.admanager_v1.types.Report.Slice): + Optional. Use to sort on a specific slice of + data. + + This field is a member of `oneof`_ ``_slice``. + time_period_index (int): + Optional. When using time period columns, use + this to sort on a specific column. + + This field is a member of `oneof`_ ``_time_period_index``. + metric_value_type (google.ads.admanager_v1.types.Report.MetricValueType): + Optional. Use to specify which metric value + type to sort on. Defaults to PRIMARY. + + This field is a member of `oneof`_ ``_metric_value_type``. + """ + + field: "Report.Field" = proto.Field( + proto.MESSAGE, + number=1, + message="Report.Field", + ) + descending: bool = proto.Field( + proto.BOOL, + number=2, + ) + slice_: "Report.Slice" = proto.Field( + proto.MESSAGE, + number=3, + optional=True, + message="Report.Slice", + ) + time_period_index: int = proto.Field( + proto.INT32, + number=4, + optional=True, + ) + metric_value_type: "Report.MetricValueType" = proto.Field( + proto.ENUM, + number=5, + optional=True, + enum="Report.MetricValueType", + ) + + class DataTable(proto.Message): + r"""A table containing report data including dimension and metric + values. + + """ + + class Row(proto.Message): + r"""A row of report data. + + Attributes: + dimension_values (MutableSequence[google.ads.admanager_v1.types.Report.Value]): + The order of the dimension values is the same + as the order of the dimensions specified in the + request. + metric_value_groups (MutableSequence[google.ads.admanager_v1.types.Report.DataTable.MetricValueGroup]): + The length of the metric_value_groups field will be equal to + the length of the date_ranges field in the fetch response. + The metric_value_groups field is ordered such that each + index corresponds to the date_range at the same index. For + example, given date_ranges [x, y], metric_value_groups will + have a length of two. The first entry in metric_value_groups + represents the metrics for date x and the second entry in + metric_value_groups represents the metrics for date y. + """ + + dimension_values: MutableSequence["Report.Value"] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="Report.Value", + ) + metric_value_groups: MutableSequence[ + "Report.DataTable.MetricValueGroup" + ] = proto.RepeatedField( + proto.MESSAGE, + number=2, + message="Report.DataTable.MetricValueGroup", + ) + + class MetricValueGroup(proto.Message): + r"""Contains all metric values requested for a single date range + and set of column dimension values (returned in the columns + field of the response). The order of the metrics in each field + corresponds to the order of the metrics specified in the + request. + + Attributes: + primary_values (MutableSequence[google.ads.admanager_v1.types.Report.Value]): + Data for the PRIMARY MetricValueType. + primary_percent_of_total_values (MutableSequence[google.ads.admanager_v1.types.Report.Value]): + Data for the PRIMARY_PERCENT_OF_TOTAL MetricValueType. + comparison_values (MutableSequence[google.ads.admanager_v1.types.Report.Value]): + Data for the COMPARISON MetricValueType. + comparison_percent_of_total_values (MutableSequence[google.ads.admanager_v1.types.Report.Value]): + Data for the COMPARISON_PERCENT_OF_TOTAL MetricValueType. + absolute_change_values (MutableSequence[google.ads.admanager_v1.types.Report.Value]): + Data for the ABSOLUTE_CHANGE MetricValueType. + relative_change_values (MutableSequence[google.ads.admanager_v1.types.Report.Value]): + Data for the RELATIVE_CHANGE MetricValueType. + flag_values (MutableSequence[bool]): + If true, the flag's conditions are met. If false, the flag's + conditions are not met. flag_values has the same length as + flags and index i of flag_values represents the flag at + index i of flags. + """ + + primary_values: MutableSequence["Report.Value"] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="Report.Value", + ) + primary_percent_of_total_values: MutableSequence[ + "Report.Value" + ] = proto.RepeatedField( + proto.MESSAGE, + number=2, + message="Report.Value", + ) + comparison_values: MutableSequence["Report.Value"] = proto.RepeatedField( + proto.MESSAGE, + number=3, + message="Report.Value", + ) + comparison_percent_of_total_values: MutableSequence[ + "Report.Value" + ] = proto.RepeatedField( + proto.MESSAGE, + number=4, + message="Report.Value", + ) + absolute_change_values: MutableSequence[ + "Report.Value" + ] = proto.RepeatedField( + proto.MESSAGE, + number=5, + message="Report.Value", + ) + relative_change_values: MutableSequence[ + "Report.Value" + ] = proto.RepeatedField( + proto.MESSAGE, + number=6, + message="Report.Value", + ) + flag_values: MutableSequence[bool] = proto.RepeatedField( + proto.BOOL, + number=7, + ) + + class Field(proto.Message): + r"""A dimension or a metric in a report. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + dimension (google.ads.admanager_v1.types.Report.Dimension): + The dimension this field represents. + + This field is a member of `oneof`_ ``field``. + metric (google.ads.admanager_v1.types.Report.Metric): + The metric this field represents. + + This field is a member of `oneof`_ ``field``. + """ + + dimension: "Report.Dimension" = proto.Field( + proto.ENUM, + number=1, + oneof="field", + enum="Report.Dimension", + ) + metric: "Report.Metric" = proto.Field( + proto.ENUM, + number=2, + oneof="field", + enum="Report.Metric", + ) + + class Slice(proto.Message): + r"""Use to specify a slice of data. + + For example, in a report, to focus on just data from the US, specify + ``COUNTRY_NAME`` for dimension and value: ``"United States"``. + + Attributes: + dimension (google.ads.admanager_v1.types.Report.Dimension): + Required. The dimension to slice on. + value (google.ads.admanager_v1.types.Report.Value): + Required. The value of the dimension. + """ + + dimension: "Report.Dimension" = proto.Field( + proto.ENUM, + number=1, + enum="Report.Dimension", + ) + value: "Report.Value" = proto.Field( + proto.MESSAGE, + number=2, + message="Report.Value", + ) + + class Filter(proto.Message): + r"""A filter over one or more fields. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + field_filter (google.ads.admanager_v1.types.Report.Filter.FieldFilter): + A filter on a single field. + + This field is a member of `oneof`_ ``type``. + not_filter (google.ads.admanager_v1.types.Report.Filter): + A filter whose result is negated. + + This field is a member of `oneof`_ ``type``. + and_filter (google.ads.admanager_v1.types.Report.Filter.FilterList): + A list of filters whose results are AND-ed. + + This field is a member of `oneof`_ ``type``. + or_filter (google.ads.admanager_v1.types.Report.Filter.FilterList): + A list of filters whose results are OR-ed. + + This field is a member of `oneof`_ ``type``. + """ + + class Operation(proto.Enum): + r"""Supported filter operations. + + Values: + IN (0): + For scalar operands, checks if the operand is + in the set of provided filter values. + + For list operands, checks if any element in the + operand is in the set of provided filter values. + + Default value. + NOT_IN (1): + For scalar operands, checks that the operand + is not in the set of provided filter values. + + For list operands, checks that none of the + elements in the operand is in the set of + provided filter values. + CONTAINS (2): + For scalar string operands, checks if the + operand contains any of the provided filter + substrings. + + For string list operands, checks if any string + in the operand contains any of the provided + filter substrings. + NOT_CONTAINS (3): + For scalar string operands, checks that the + operand contains none of the provided filter + substrings. + + For string list operands, checks that none of + the strings in the operand contain none of the + provided filter substrings. + LESS_THAN (4): + Operand is less than the provided filter + value. + LESS_THAN_EQUALS (5): + Operand is less than or equal to provided + filter value. + GREATER_THAN (6): + Operand is greater than provided filter + value. + GREATER_THAN_EQUALS (7): + Operand is greater than or equal to provided + filter value. + BETWEEN (8): + Operand is between provided filter values. + MATCHES (9): + Operand matches against a regex or set of + regexes (one must match) + NOT_MATCHES (10): + Operand negative matches against a regex or + set of regexes (none must match) + """ + IN = 0 + NOT_IN = 1 + CONTAINS = 2 + NOT_CONTAINS = 3 + LESS_THAN = 4 + LESS_THAN_EQUALS = 5 + GREATER_THAN = 6 + GREATER_THAN_EQUALS = 7 + BETWEEN = 8 + MATCHES = 9 + NOT_MATCHES = 10 + + class FieldFilter(proto.Message): + r"""A filter on a specific field. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + field (google.ads.admanager_v1.types.Report.Field): + Required. The field to filter on. + operation (google.ads.admanager_v1.types.Report.Filter.Operation): + Required. The operation of this filter. + values (MutableSequence[google.ads.admanager_v1.types.Report.Value]): + Required. Values to filter to. + slice_ (google.ads.admanager_v1.types.Report.Slice): + Optional. Use to filter on a specific slice + of data. + + This field is a member of `oneof`_ ``_slice``. + time_period_index (int): + Optional. When using time period columns, use + this to filter on a specific column. + + This field is a member of `oneof`_ ``_time_period_index``. + metric_value_type (google.ads.admanager_v1.types.Report.MetricValueType): + Optional. Use to specify which metric value + type to filter on. Defaults to PRIMARY. + + This field is a member of `oneof`_ ``_metric_value_type``. + """ + + field: "Report.Field" = proto.Field( + proto.MESSAGE, + number=1, + message="Report.Field", + ) + operation: "Report.Filter.Operation" = proto.Field( + proto.ENUM, + number=2, + enum="Report.Filter.Operation", + ) + values: MutableSequence["Report.Value"] = proto.RepeatedField( + proto.MESSAGE, + number=3, + message="Report.Value", + ) + slice_: "Report.Slice" = proto.Field( + proto.MESSAGE, + number=4, + optional=True, + message="Report.Slice", + ) + time_period_index: int = proto.Field( + proto.INT32, + number=5, + optional=True, + ) + metric_value_type: "Report.MetricValueType" = proto.Field( + proto.ENUM, + number=6, + optional=True, + enum="Report.MetricValueType", + ) + + class FilterList(proto.Message): + r"""A list of filters. + + Attributes: + filters (MutableSequence[google.ads.admanager_v1.types.Report.Filter]): + Required. A list of filters. + """ + + filters: MutableSequence["Report.Filter"] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="Report.Filter", + ) + + field_filter: "Report.Filter.FieldFilter" = proto.Field( + proto.MESSAGE, + number=1, + oneof="type", + message="Report.Filter.FieldFilter", + ) + not_filter: "Report.Filter" = proto.Field( + proto.MESSAGE, + number=2, + oneof="type", + message="Report.Filter", + ) + and_filter: "Report.Filter.FilterList" = proto.Field( + proto.MESSAGE, + number=3, + oneof="type", + message="Report.Filter.FilterList", + ) + or_filter: "Report.Filter.FilterList" = proto.Field( + proto.MESSAGE, + number=4, + oneof="type", + message="Report.Filter.FilterList", + ) + + class Flag(proto.Message): + r"""A flag for a report. Flags are used show if certain thresholds are + met. Result rows that match the filter will have the corresponding + [MetricValueGroup.flagValues][MetricValueGroup] index set to true. + For more information about flags see: + https://support.google.com/admanager/answer/15079975 + + Attributes: + filters (MutableSequence[google.ads.admanager_v1.types.Report.Filter]): + Required. Filters to apply for the flag. + name (str): + Optional. Name of the flag. + The flag names RED, YELLOW, GREEN, BLUE, PURPLE, + and GREY correspond to the colored flags that + appear in the UI. The UI will not display flags + with other names, but they are available for use + by API clients. + """ + + filters: MutableSequence["Report.Filter"] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="Report.Filter", + ) + name: str = proto.Field( + proto.STRING, + number=2, + ) + + class DateRange(proto.Message): + r"""A date range for a report. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + fixed (google.ads.admanager_v1.types.Report.DateRange.FixedDateRange): + A fixed date range. + + This field is a member of `oneof`_ ``date_range_type``. + relative (google.ads.admanager_v1.types.Report.DateRange.RelativeDateRange): + A relative date range. + + This field is a member of `oneof`_ ``date_range_type``. + """ + + class RelativeDateRange(proto.Enum): + r"""Options for relative date ranges. + + Values: + RELATIVE_DATE_RANGE_UNSPECIFIED (0): + Default value. This value is unused. + TODAY (1): + The date the report is run. + YESTERDAY (2): + The date a day before the date that the + report is run. + THIS_WEEK (3): + The full week in which this report is run. + Could include dates in the future. + THIS_WEEK_TO_DATE (29): + From the beginning of the calendar week + (Monday to Sunday) in which the up to and + including the day the report is run. + THIS_MONTH (4): + The full month in which this report is run. + Could include dates in the future. + THIS_MONTH_TO_DATE (26): + From the beginning of the calendar month in + which the report is run, to up to and including + the day the report is run. + THIS_QUARTER (5): + The full quarter in which this report is run. + Could include dates in the future. + THIS_QUARTER_TO_DATE (27): + From the beginning of the calendar quarter in + which the report is run, up to and including the + day the report is run. + THIS_YEAR (6): + The full year in which this report is run. + Could include dates in the future. + THIS_YEAR_TO_DATE (28): + From the beginning of the calendar year in + which the report is run, to up to and including + the day the report is run. + LAST_WEEK (7): + The entire previous calendar week, Monday to + Sunday (inclusive), preceding the calendar week + the report is run. + LAST_MONTH (8): + The entire previous calendar month preceding + the calendar month the report is run. + LAST_QUARTER (9): + The entire previous calendar quarter + preceding the calendar quarter the report is + run. + LAST_YEAR (10): + The entire previous calendar year preceding + the calendar year the report is run. + LAST_7_DAYS (11): + The 7 days preceding the day the report is + run. + LAST_30_DAYS (12): + The 30 days preceding the day the report is + run. + LAST_60_DAYS (13): + The 60 days preceding the day the report is + run. + LAST_90_DAYS (14): + The 90 days preceding the day the report is + run. + LAST_180_DAYS (15): + The 180 days preceding the day the report is + run. + LAST_360_DAYS (16): + The 360 days preceding the day the report is + run. + LAST_365_DAYS (17): + The 365 days preceding the day the report is + run. + LAST_3_MONTHS (18): + The entire previous 3 calendar months + preceding the calendar month the report is run. + LAST_6_MONTHS (19): + The entire previous 6 calendar months + preceding the calendar month the report is run. + LAST_12_MONTHS (20): + The entire previous 6 calendar months + preceding the calendar month the report is run. + ALL_AVAILABLE (21): + From 3 years before the report is run, to the + day before the report is run, inclusive. + PREVIOUS_PERIOD (22): + Only valid when used in the comparison_date_range field. The + complete period preceding the date period provided in + date_range. + + In the case where date_range is a FixedDateRange of N days, + this will be a period of N days where the end date is the + date preceding the start date of the date_range. + + In the case where date_range is a RelativeDateRange, this + will be a period of the same time frame preceding the + date_range. In the case where the date_range does not + capture the full period because a report is run in the + middle of that period, this will still be the full preceding + period. For example, if date_range is THIS_WEEK, but the + report is run on a Wednesday, THIS_WEEK will be Monday - + Wednesday, but PREVIOUS_PERIOD will be Monday - Sunday. + SAME_PERIOD_PREVIOUS_YEAR (24): + Only valid when used in the comparison_date_range field. The + period starting 1 year prior to the date period provided in + date_range. + + In the case where date_range is a FixedDateRange, this will + be a date range starting 1 year prior to the date_range + start date and ending 1 year prior to the date_range end + date. + + In the case where date_range is a RelativeDateRange, this + will be a period of the same time frame exactly 1 year prior + to the date_range. In the case where the date_range does not + capture the full period because a report is run in the + middle of that period, this will still be the full period 1 + year prior. For example, if date range is THIS_WEEK, but the + report is run on a Wednesday, THIS_WEEK will be Monday - + Wednesday, but SAME_PERIOD_PREVIOUS_YEAR will be Monday - + Sunday. + """ + RELATIVE_DATE_RANGE_UNSPECIFIED = 0 + TODAY = 1 + YESTERDAY = 2 + THIS_WEEK = 3 + THIS_WEEK_TO_DATE = 29 + THIS_MONTH = 4 + THIS_MONTH_TO_DATE = 26 + THIS_QUARTER = 5 + THIS_QUARTER_TO_DATE = 27 + THIS_YEAR = 6 + THIS_YEAR_TO_DATE = 28 + LAST_WEEK = 7 + LAST_MONTH = 8 + LAST_QUARTER = 9 + LAST_YEAR = 10 + LAST_7_DAYS = 11 + LAST_30_DAYS = 12 + LAST_60_DAYS = 13 + LAST_90_DAYS = 14 + LAST_180_DAYS = 15 + LAST_360_DAYS = 16 + LAST_365_DAYS = 17 + LAST_3_MONTHS = 18 + LAST_6_MONTHS = 19 + LAST_12_MONTHS = 20 + ALL_AVAILABLE = 21 + PREVIOUS_PERIOD = 22 + SAME_PERIOD_PREVIOUS_YEAR = 24 + + class FixedDateRange(proto.Message): + r"""A date range between two fixed dates (inclusive of end date). + + Attributes: + start_date (google.type.date_pb2.Date): + Required. The start date of this date range. + end_date (google.type.date_pb2.Date): + Required. The end date (inclusive) of this + date range. + """ + + start_date: date_pb2.Date = proto.Field( + proto.MESSAGE, + number=1, + message=date_pb2.Date, + ) + end_date: date_pb2.Date = proto.Field( + proto.MESSAGE, + number=2, + message=date_pb2.Date, + ) + + fixed: "Report.DateRange.FixedDateRange" = proto.Field( + proto.MESSAGE, + number=1, + oneof="date_range_type", + message="Report.DateRange.FixedDateRange", + ) + relative: "Report.DateRange.RelativeDateRange" = proto.Field( + proto.ENUM, + number=2, + oneof="date_range_type", + enum="Report.DateRange.RelativeDateRange", + ) + name: str = proto.Field( proto.STRING, number=1, ) + report_id: int = proto.Field( + proto.INT64, + number=3, + ) + visibility: Visibility = proto.Field( + proto.ENUM, + number=2, + enum=Visibility, + ) + report_definition: "ReportDefinition" = proto.Field( + proto.MESSAGE, + number=4, + message="ReportDefinition", + ) + display_name: str = proto.Field( + proto.STRING, + number=5, + ) + update_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=6, + message=timestamp_pb2.Timestamp, + ) + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=7, + message=timestamp_pb2.Timestamp, + ) + locale: str = proto.Field( + proto.STRING, + number=8, + ) + schedule_options: "ScheduleOptions" = proto.Field( + proto.MESSAGE, + number=9, + message="ScheduleOptions", + ) -class ExportSavedReportRequest(proto.Message): - r"""Request proto for the configuration of a report run. +class RunReportRequest(proto.Message): + r"""Request message for a running a report. Attributes: - report (str): - The name of a particular saved report resource. - - A report will be run based on the specification of this - saved report. It must have the format of - "networks/{network_code}/reports/{report_id}". - format_ (google.ads.admanager_v1.types.ExportSavedReportRequest.Format): - Required. The export format requested. - include_report_properties (bool): - Whether or not to include the report - properties (e.g. network, user, date - generated...) in the generated report. - include_ids (bool): - Whether or not to include the IDs if there - are any (e.g. advertiser ID, order ID...) - present in the report. - include_totals_row (bool): - Whether or not to include a row containing - metric totals. - file_name (str): - The file name of report download. The file extension is - determined by export_format and gzip_compressed. - - Defaults to "DFP Report" if not specified. + name (str): + Required. The report to run. Format: + ``networks/{network_code}/reports/{report_id}`` """ - class Format(proto.Enum): - r"""Supported file formats. + name: str = proto.Field( + proto.STRING, + number=1, + ) + - Values: - FORMAT_UNSPECIFIED (0): - Default value. This value is unused. - CSV_DUMP (2): - Comma separated values meant to be used by - automated machine processing. - - Unlike other formats, the output is not - localized and there is no totals row by default. - XLSX (5): - The report file is generated as an Office - Open XML spreadsheet designed for Excel 2007+. - XML (6): - The report is generated as XML. - """ - FORMAT_UNSPECIFIED = 0 - CSV_DUMP = 2 - XLSX = 5 - XML = 6 +class RunReportMetadata(proto.Message): + r"""``RunReport`` operation metadata. + Attributes: + percent_complete (int): + An estimate of how close this report is to + being completed. Will always be 100 for failed + and completed reports. + report (str): + The result's parent report. + """ + + percent_complete: int = proto.Field( + proto.INT32, + number=2, + ) report: str = proto.Field( + proto.STRING, + number=4, + ) + + +class RunReportResponse(proto.Message): + r"""Response message for a completed ``RunReport`` operation. + + Attributes: + report_result (str): + The unique name of the generated result. Use with + ``FetchReportResultRows`` to retrieve data. + """ + + report_result: str = proto.Field( proto.STRING, number=1, ) - format_: Format = proto.Field( - proto.ENUM, + + +class GetReportRequest(proto.Message): + r"""Request object for ``GetReport`` method. + + Attributes: + name (str): + Required. The resource name of the report. Format: + ``networks/{network_code}/reports/{report_id}`` + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class ListReportsRequest(proto.Message): + r"""Request object for ``ListReports`` method. + + Attributes: + parent (str): + Required. The parent, which owns this collection of reports. + Format: ``networks/{network_code}`` + page_size (int): + Optional. The maximum number of ``Reports`` to return. The + service may return fewer than this value. If unspecified, at + most 50 ``Reports`` will be returned. The maximum value is + 1000; values above 1000 will be coerced to 1000. + page_token (str): + Optional. A page token, received from a previous + ``ListReports`` call. Provide this to retrieve the + subsequent page. + + When paginating, all other parameters provided to + ``ListReports`` must match the call that provided the page + token. + filter (str): + Optional. Expression to filter the response. + See syntax details at + https://developers.google.com/ad-manager/api/beta/filters + order_by (str): + Optional. Expression to specify sorting + order. See syntax details at + https://developers.google.com/ad-manager/api/beta/filters#order + skip (int): + Optional. Number of individual resources to + skip while paginating. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + page_size: int = proto.Field( + proto.INT32, + number=2, + ) + page_token: str = proto.Field( + proto.STRING, + number=3, + ) + filter: str = proto.Field( + proto.STRING, + number=4, + ) + order_by: str = proto.Field( + proto.STRING, + number=5, + ) + skip: int = proto.Field( + proto.INT32, + number=6, + ) + + +class ListReportsResponse(proto.Message): + r"""Response object for ``ListReportsResponse`` containing matching + ``Report`` objects. + + Attributes: + reports (MutableSequence[google.ads.admanager_v1.types.Report]): + The ``Report`` objects from the specified network. + next_page_token (str): + A token, which can be sent as ``page_token`` to retrieve the + next page. If this field is omitted, there are no subsequent + pages. + total_size (int): + Total number of ``Report`` objects. If a filter was included + in the request, this reflects the total number after the + filtering is applied. + + ``total_size`` will not be calculated in the response unless + it has been included in a response field mask. The response + field mask can be provided to the method by using the URL + parameter ``$fields`` or ``fields``, or by using the + HTTP/gRPC header ``X-Goog-FieldMask``. + + For more information, see + https://developers.google.com/ad-manager/api/beta/field-masks + """ + + @property + def raw_page(self): + return self + + reports: MutableSequence["Report"] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="Report", + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + total_size: int = proto.Field( + proto.INT32, + number=3, + ) + + +class CreateReportRequest(proto.Message): + r"""Request object for ``CreateReport`` method. + + Attributes: + parent (str): + Required. The parent resource where this ``Report`` will be + created. Format: ``networks/{network_code}`` + report (google.ads.admanager_v1.types.Report): + Required. The ``Report`` to create. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + report: "Report" = proto.Field( + proto.MESSAGE, + number=2, + message="Report", + ) + + +class UpdateReportRequest(proto.Message): + r"""Request object for ``UpdateReport`` method. + + Attributes: + report (google.ads.admanager_v1.types.Report): + Required. The ``Report`` to update. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Required. The list of fields to update. + """ + + report: "Report" = proto.Field( + proto.MESSAGE, + number=1, + message="Report", + ) + update_mask: field_mask_pb2.FieldMask = proto.Field( + proto.MESSAGE, + number=2, + message=field_mask_pb2.FieldMask, + ) + + +class FetchReportResultRowsRequest(proto.Message): + r"""The request message for the fetch report result rows + endpoint. + + Attributes: + name (str): + The report result being fetched. Format: + ``networks/{network_code}/reports/{report_id}/results/{report_result_id}`` + page_size (int): + Optional. The maximum number of rows to + return. The service may return fewer than this + value. If unspecified, at most 1,000 rows will + be returned. The maximum value is 10,000; values + above 10,000 will be reduced to 10,000. + page_token (str): + Optional. A page token, received from a previous + ``FetchReportResultRows`` call. Provide this to retrieve the + second and subsequent batches of rows. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + page_size: int = proto.Field( + proto.INT32, + number=2, + ) + page_token: str = proto.Field( + proto.STRING, + number=3, + ) + + +class FetchReportResultRowsResponse(proto.Message): + r"""The response message for the fetch report result rows + endpoint. + + Attributes: + rows (MutableSequence[google.ads.admanager_v1.types.Report.DataTable.Row]): + Up to ``page_size`` rows of report data. + run_time (google.protobuf.timestamp_pb2.Timestamp): + The time at which the report was scheduled to + run. For non-scheduled reports, this is the time + at which the report was requested to be run. + date_ranges (MutableSequence[google.ads.admanager_v1.types.Report.DateRange.FixedDateRange]): + The computed fixed date ranges this report includes. Only + returned with the first page of results (when page_token is + not included in the request). + comparison_date_ranges (MutableSequence[google.ads.admanager_v1.types.Report.DateRange.FixedDateRange]): + The computed comparison fixed date ranges this report + includes. Only returned with the first page of results (when + page_token is not included in the request). + total_row_count (int): + The total number of rows available from this report. Useful + for pagination. Only returned with the first page of results + (when page_token is not included in the request). + next_page_token (str): + A token that can be sent as ``page_token`` to retrieve the + next page. If this field is omitted, there are no subsequent + pages. + """ + + @property + def raw_page(self): + return self + + rows: MutableSequence["Report.DataTable.Row"] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="Report.DataTable.Row", + ) + run_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, number=2, - enum=Format, + message=timestamp_pb2.Timestamp, ) - include_report_properties: bool = proto.Field( - proto.BOOL, + date_ranges: MutableSequence[ + "Report.DateRange.FixedDateRange" + ] = proto.RepeatedField( + proto.MESSAGE, number=3, + message="Report.DateRange.FixedDateRange", ) - include_ids: bool = proto.Field( - proto.BOOL, + comparison_date_ranges: MutableSequence[ + "Report.DateRange.FixedDateRange" + ] = proto.RepeatedField( + proto.MESSAGE, number=4, + message="Report.DateRange.FixedDateRange", ) - include_totals_row: bool = proto.Field( - proto.BOOL, + total_row_count: int = proto.Field( + proto.INT32, number=5, ) - file_name: str = proto.Field( + next_page_token: str = proto.Field( proto.STRING, number=6, ) -class ExportSavedReportMetadata(proto.Message): - r"""The message stored in the - google.longrunning.Operation.metadata field. Contains metadata - regarding this execution. +class ReportDefinition(proto.Message): + r"""The definition of how a report should be run. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields Attributes: - result_id (int): - The result generated in this report run. + dimensions (MutableSequence[google.ads.admanager_v1.types.Report.Dimension]): + Required. The list of dimensions to report + on. If empty, the report will have no + dimensions, and any metrics will be totals. + metrics (MutableSequence[google.ads.admanager_v1.types.Report.Metric]): + Required. The list of metrics to report on. + If empty, the report will have no metrics. + filters (MutableSequence[google.ads.admanager_v1.types.Report.Filter]): + Optional. The filters for this report. + time_zone (str): + Optional. The time zone the date range is defined in for + this report. Defaults to publisher's time zone if not + specified. Time zone in IANA format. Acceptable values + depend on the report type. Publisher time zone is always + accepted. Use "America/Los_Angeles" for pacific time, or + "Etc/UTC" for UTC. + currency_code (str): + Optional. The ISO 4217 currency code for this + report. Defaults to publisher currency code if + not specified. + date_range (google.ads.admanager_v1.types.Report.DateRange): + Required. The primary date range of this + report. + comparison_date_range (google.ads.admanager_v1.types.Report.DateRange): + Optional. The comparison date range of this + report. If unspecified, the report will not have + any comparison metrics. + + This field is a member of `oneof`_ ``_comparison_date_range``. + custom_dimension_key_ids (MutableSequence[int]): + Optional. Custom Dimension keys that represent + `CUSTOM_DIMENSION_*` dimensions. The index of this repeated + field corresponds to the index on each dimension. For + example, custom_dimension_key_ids[0] describes + CUSTOM_DIMENSION_0_VALUE_ID and CUSTOM_DIMENSION_0_VALUE. + line_item_custom_field_ids (MutableSequence[int]): + Optional. Custom field IDs that represent + `LINE_ITEM_CUSTOM_FIELD_*` dimensions. The index of this + repeated field corresponds to the index on each dimension. + For example, line_item_custom_field_ids[0] describes + LINE_ITEM_CUSTOM_FIELD_0_OPTION_ID and + LINE_ITEM_CUSTOM_FIELD_0_VALUE. + order_custom_field_ids (MutableSequence[int]): + Optional. Custom field IDs that represent + `ORDER_CUSTOM_FIELD_*` dimensions. The index of this repeated + field corresponds to the index on each dimension. For + example, order_custom_field_ids[0] describes + ORDER_CUSTOM_FIELD_0_OPTION_ID and + ORDER_CUSTOM_FIELD_0_VALUE. + creative_custom_field_ids (MutableSequence[int]): + Optional. Custom field IDs that represent + `CREATIVE_CUSTOM_FIELD_*` dimensions. The index of this + repeated field corresponds to the index on each dimension. + For example, creative_custom_field_ids[0] describes + CREATIVE_CUSTOM_FIELD_0_OPTION_ID and + CREATIVE_CUSTOM_FIELD_0_VALUE. + report_type (google.ads.admanager_v1.types.Report.ReportType): + Required. The type of this report. + time_period_column (google.ads.admanager_v1.types.Report.TimePeriodColumn): + Optional. Include a time period column to introduce + comparison columns in the report for each generated period. + For example, set to "QUARTERS" here to have a column for + each quarter present in the primary date range. If "PREVIOUS + PERIOD" is specified in comparison_date_range, then each + quarter column will also include comparison values for its + relative previous quarter. + flags (MutableSequence[google.ads.admanager_v1.types.Report.Flag]): + Optional. List of flags for this report. Used + to flag rows in a result set based on a set of + defined filters. + sorts (MutableSequence[google.ads.admanager_v1.types.Report.Sort]): + Optional. Default sorts to apply to this + report. """ - result_id: int = proto.Field( + dimensions: MutableSequence["Report.Dimension"] = proto.RepeatedField( + proto.ENUM, + number=1, + enum="Report.Dimension", + ) + metrics: MutableSequence["Report.Metric"] = proto.RepeatedField( + proto.ENUM, + number=2, + enum="Report.Metric", + ) + filters: MutableSequence["Report.Filter"] = proto.RepeatedField( + proto.MESSAGE, + number=3, + message="Report.Filter", + ) + time_zone: str = proto.Field( + proto.STRING, + number=4, + ) + currency_code: str = proto.Field( + proto.STRING, + number=5, + ) + date_range: "Report.DateRange" = proto.Field( + proto.MESSAGE, + number=6, + message="Report.DateRange", + ) + comparison_date_range: "Report.DateRange" = proto.Field( + proto.MESSAGE, + number=9, + optional=True, + message="Report.DateRange", + ) + custom_dimension_key_ids: MutableSequence[int] = proto.RepeatedField( + proto.INT64, + number=7, + ) + line_item_custom_field_ids: MutableSequence[int] = proto.RepeatedField( + proto.INT64, + number=11, + ) + order_custom_field_ids: MutableSequence[int] = proto.RepeatedField( proto.INT64, + number=12, + ) + creative_custom_field_ids: MutableSequence[int] = proto.RepeatedField( + proto.INT64, + number=13, + ) + report_type: "Report.ReportType" = proto.Field( + proto.ENUM, + number=8, + enum="Report.ReportType", + ) + time_period_column: "Report.TimePeriodColumn" = proto.Field( + proto.ENUM, + number=10, + enum="Report.TimePeriodColumn", + ) + flags: MutableSequence["Report.Flag"] = proto.RepeatedField( + proto.MESSAGE, + number=14, + message="Report.Flag", + ) + sorts: MutableSequence["Report.Sort"] = proto.RepeatedField( + proto.MESSAGE, + number=15, + message="Report.Sort", + ) + + +class ScheduleOptions(proto.Message): + r"""The options for a scheduled report. + + Attributes: + schedule (google.ads.admanager_v1.types.Schedule): + Information pertaining to schedule itself. + delivery_condition (google.ads.admanager_v1.types.ScheduleOptions.DeliveryCondition): + Option for when to deliver the scheduled + report. + flags (MutableSequence[google.ads.admanager_v1.types.Report.Flag]): + Optional. The flags evaluated when + ReportDeliveryOption.WHEN_FLAG_PRESENT is specified. + """ + + class DeliveryCondition(proto.Enum): + r"""Condition for when to email the scheduled report. + + Values: + NEVER (0): + Never deliver report. + ALWAYS (1): + Always deliver report. + WHEN_FLAG_CONDITIONS_MET (2): + Deliver report when flag's conditions are + met. + """ + NEVER = 0 + ALWAYS = 1 + WHEN_FLAG_CONDITIONS_MET = 2 + + schedule: "Schedule" = proto.Field( + proto.MESSAGE, number=1, + message="Schedule", ) + delivery_condition: DeliveryCondition = proto.Field( + proto.ENUM, + number=2, + enum=DeliveryCondition, + ) + flags: MutableSequence["Report.Flag"] = proto.RepeatedField( + proto.MESSAGE, + number=3, + message="Report.Flag", + ) + + +class Schedule(proto.Message): + r"""The schedule for the report + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. -class ExportSavedReportResponse(proto.Message): - r"""Message included in the longrunning Operation result.response - field when the report completes successfully. + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields Attributes: - download_url (str): - The link to the exported file. + weekly_schedule (google.ads.admanager_v1.types.Schedule.WeeklySchedule): + Days of week to schedule report run. + + This field is a member of `oneof`_ ``frequency_schedule``. + monthly_schedule (google.ads.admanager_v1.types.Schedule.MonthlySchedule): + Days of month to schedule report run. + + This field is a member of `oneof`_ ``frequency_schedule``. + start_date (google.type.date_pb2.Date): + Date for the first run of the report. + end_date (google.type.date_pb2.Date): + Date for the final run of the report. + frequency (google.ads.admanager_v1.types.Schedule.Frequency): + Frequency to run report. + start_time (google.type.timeofday_pb2.TimeOfDay): + Indicates start time for schedule to run Will use the + time_zone from ``ReportDefinition``. Defaults to the + publisher's time zone if not specified. + + For HOURLY, TWO_TIMES_DAILY, THREE_TIMES_DAILY, or + FOUR_TIMES_DAILY, this will be the time of day that the + first report will run on the first day. For example, if the + start time is 2:00 PM, and the frequency is + THREE_TIMES_DAILY, the first day will have reports scheduled + at 2:00 PM, 10:00 PM. Each subsequent day will have reports + scheduled at 6:00 AM, 2:00 PM, 10:00 PM. """ - download_url: str = proto.Field( - proto.STRING, + class Frequency(proto.Enum): + r"""Frequency to run report. + + Values: + FREQUENCY_UNSPECIFIED (0): + No Frequency specified. + HOURLY (1): + Schedule report to run every hour. + TWO_TIMES_DAILY (2): + Schedule report to run twice a day (every 12 + hours). + THREE_TIMES_DAILY (3): + Schedule report to run three times a day + (every 8 hours). + FOUR_TIMES_DAILY (4): + Schedule report to run four times a day + (every 6 hours). + DAILY (5): + Schedule report to run on a daily basis. + WEEKLY (6): + Schedule report to run on a weekly basis. + MONTHLY (7): + Schedule report to run on a monthly basis. + """ + FREQUENCY_UNSPECIFIED = 0 + HOURLY = 1 + TWO_TIMES_DAILY = 2 + THREE_TIMES_DAILY = 3 + FOUR_TIMES_DAILY = 4 + DAILY = 5 + WEEKLY = 6 + MONTHLY = 7 + + class WeeklySchedule(proto.Message): + r"""Days of week to schedule report run. + + Attributes: + weekly_scheduled_days (MutableSequence[google.type.dayofweek_pb2.DayOfWeek]): + Specifies days of the week on which to run + report. + """ + + weekly_scheduled_days: MutableSequence[ + dayofweek_pb2.DayOfWeek + ] = proto.RepeatedField( + proto.ENUM, + number=1, + enum=dayofweek_pb2.DayOfWeek, + ) + + class MonthlySchedule(proto.Message): + r"""Days of Month to schedule report run. + + Attributes: + monthly_scheduled_days (MutableSequence[int]): + Specifies days of the month to run report. + Range is from 1-31. Will ignore days that are + not valid for the given month. + """ + + monthly_scheduled_days: MutableSequence[int] = proto.RepeatedField( + proto.INT32, + number=1, + ) + + weekly_schedule: WeeklySchedule = proto.Field( + proto.MESSAGE, + number=6, + oneof="frequency_schedule", + message=WeeklySchedule, + ) + monthly_schedule: MonthlySchedule = proto.Field( + proto.MESSAGE, + number=7, + oneof="frequency_schedule", + message=MonthlySchedule, + ) + start_date: date_pb2.Date = proto.Field( + proto.MESSAGE, number=1, + message=date_pb2.Date, + ) + end_date: date_pb2.Date = proto.Field( + proto.MESSAGE, + number=2, + message=date_pb2.Date, + ) + frequency: Frequency = proto.Field( + proto.ENUM, + number=3, + enum=Frequency, + ) + start_time: timeofday_pb2.TimeOfDay = proto.Field( + proto.MESSAGE, + number=4, + message=timeofday_pb2.TimeOfDay, ) diff --git a/packages/google-ads-admanager/google/ads/admanager_v1/types/role_enums.py b/packages/google-ads-admanager/google/ads/admanager_v1/types/role_enums.py new file mode 100644 index 000000000000..a872c78e51e8 --- /dev/null +++ b/packages/google-ads-admanager/google/ads/admanager_v1/types/role_enums.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +import proto # type: ignore + +__protobuf__ = proto.module( + package="google.ads.admanager.v1", + manifest={ + "RoleStatusEnum", + }, +) + + +class RoleStatusEnum(proto.Message): + r"""Wrapper message for + [RoleStatus][google.ads.admanager.v1.RoleStatusEnum.RoleStatus] + + """ + + class RoleStatus(proto.Enum): + r"""The status of the role. + + Values: + ROLE_STATUS_UNSPECIFIED (0): + No value specified. + ACTIVE (1): + Role is active. + INACTIVE (2): + Role is inactive. + """ + ROLE_STATUS_UNSPECIFIED = 0 + ACTIVE = 1 + INACTIVE = 2 + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-ads-admanager/google/ads/admanager_v1/types/role_messages.py b/packages/google-ads-admanager/google/ads/admanager_v1/types/role_messages.py new file mode 100644 index 000000000000..49acac6291d4 --- /dev/null +++ b/packages/google-ads-admanager/google/ads/admanager_v1/types/role_messages.py @@ -0,0 +1,79 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +import proto # type: ignore + +from google.ads.admanager_v1.types import role_enums + +__protobuf__ = proto.module( + package="google.ads.admanager.v1", + manifest={ + "Role", + }, +) + + +class Role(proto.Message): + r"""The ``Role`` resource. + + Attributes: + name (str): + Identifier. The resource name of the ``Role``. Format: + ``networks/{network_code}/roles/{role_id}`` + role_id (int): + Output only. ``Role`` ID. + display_name (str): + Required. The display name of the ``Role``. + description (str): + Optional. The description of the ``Role``. + built_in (bool): + Output only. Whether the ``Role`` is a built-in or custom + user role. + status (google.ads.admanager_v1.types.RoleStatusEnum.RoleStatus): + Output only. The status of the ``Role``. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + role_id: int = proto.Field( + proto.INT64, + number=2, + ) + display_name: str = proto.Field( + proto.STRING, + number=3, + ) + description: str = proto.Field( + proto.STRING, + number=4, + ) + built_in: bool = proto.Field( + proto.BOOL, + number=5, + ) + status: role_enums.RoleStatusEnum.RoleStatus = proto.Field( + proto.ENUM, + number=6, + enum=role_enums.RoleStatusEnum.RoleStatus, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-ads-admanager/google/ads/admanager_v1/types/role_service.py b/packages/google-ads-admanager/google/ads/admanager_v1/types/role_service.py index ee18f5215bf8..8d6d40291cc2 100644 --- a/packages/google-ads-admanager/google/ads/admanager_v1/types/role_service.py +++ b/packages/google-ads-admanager/google/ads/admanager_v1/types/role_service.py @@ -19,10 +19,11 @@ import proto # type: ignore +from google.ads.admanager_v1.types import role_messages + __protobuf__ = proto.module( package="google.ads.admanager.v1", manifest={ - "Role", "GetRoleRequest", "ListRolesRequest", "ListRolesResponse", @@ -30,23 +31,8 @@ ) -class Role(proto.Message): - r"""The Role resource. - - Attributes: - name (str): - Identifier. The resource name of the Role. Format: - ``networks/{network_code}/roles/{role_id}`` - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - - class GetRoleRequest(proto.Message): - r"""Request object for GetRole method. + r"""Request object for ``GetRole`` method. Attributes: name (str): @@ -61,18 +47,17 @@ class GetRoleRequest(proto.Message): class ListRolesRequest(proto.Message): - r"""Request object for ListRoles method. + r"""Request object for ``ListRoles`` method. Attributes: parent (str): Required. The parent, which owns this collection of Roles. Format: ``networks/{network_code}`` page_size (int): - Optional. The maximum number of Roles to - return. The service may return fewer than this - value. If unspecified, at most 50 roles will be - returned. The maximum value is 1000; values - above 1000 will be coerced to 1000. + Optional. The maximum number of ``Roles`` to return. The + service may return fewer than this value. If unspecified, at + most 50 ``Roles`` will be returned. The maximum value is + 1000; values above 1000 will be coerced to 1000. page_token (str): Optional. A page token, received from a previous ``ListRoles`` call. Provide this to retrieve the subsequent @@ -121,20 +106,20 @@ class ListRolesRequest(proto.Message): class ListRolesResponse(proto.Message): - r"""Response object for ListRolesRequest containing matching Role - resources. + r"""Response object for ``ListRolesRequest`` containing matching + ``Role`` objects. Attributes: roles (MutableSequence[google.ads.admanager_v1.types.Role]): - The Role from the specified network. + The ``Role`` objects from the specified network. next_page_token (str): A token, which can be sent as ``page_token`` to retrieve the next page. If this field is omitted, there are no subsequent pages. total_size (int): - Total number of Roles. If a filter was included in the - request, this reflects the total number after the filtering - is applied. + Total number of ``Role`` objects. If a filter was included + in the request, this reflects the total number after the + filtering is applied. ``total_size`` will not be calculated in the response unless it has been included in a response field mask. The response @@ -150,10 +135,10 @@ class ListRolesResponse(proto.Message): def raw_page(self): return self - roles: MutableSequence["Role"] = proto.RepeatedField( + roles: MutableSequence[role_messages.Role] = proto.RepeatedField( proto.MESSAGE, number=1, - message="Role", + message=role_messages.Role, ) next_page_token: str = proto.Field( proto.STRING, diff --git a/packages/google-ads-admanager/google/ads/admanager_v1/types/size.py b/packages/google-ads-admanager/google/ads/admanager_v1/types/size.py index 5f552ab7d794..33b7dcb841b8 100644 --- a/packages/google-ads-admanager/google/ads/admanager_v1/types/size.py +++ b/packages/google-ads-admanager/google/ads/admanager_v1/types/size.py @@ -19,11 +19,12 @@ import proto # type: ignore +from google.ads.admanager_v1.types import size_type_enum + __protobuf__ = proto.module( package="google.ads.admanager.v1", manifest={ "Size", - "SizeTypeEnum", }, ) @@ -58,59 +59,11 @@ class Size(proto.Message): proto.INT32, number=2, ) - size_type: "SizeTypeEnum.SizeType" = proto.Field( + size_type: size_type_enum.SizeTypeEnum.SizeType = proto.Field( proto.ENUM, number=3, - enum="SizeTypeEnum.SizeType", + enum=size_type_enum.SizeTypeEnum.SizeType, ) -class SizeTypeEnum(proto.Message): - r"""Wrapper message for - [SizeType][google.ads.admanager.v1.SizeTypeEnum.SizeType]. - - """ - - class SizeType(proto.Enum): - r"""The different Size types for an ad. - - Values: - SIZE_TYPE_UNSPECIFIED (0): - Default value. This value is unused. - PIXEL (1): - Dimension based size, an actual height and - width in pixels. - ASPECT_RATIO (2): - Size is expressed as a ratio. For example, - 4:1 could be met by a 100 x 25 sized image. - INTERSTITIAL (3): - Out-of-page (Interstitial) size that is not - related to the slot it is served. This must be - used with 1x1 size. - IGNORED (4): - Size is ignored. This must be used with 1x1 - size. - NATIVE (5): - Native size, which is a function of the how - the client renders the creative. This must be - used with 1x1 size. - FLUID (6): - Fluid size. Automatically sizes the ad by - filling the width of the enclosing column and - adjusting the height as appropriate. This must - be used with 1x1 size. - AUDIO (7): - Audio size. Used with audio ads. This must be - used with 1x1 size. - """ - SIZE_TYPE_UNSPECIFIED = 0 - PIXEL = 1 - ASPECT_RATIO = 2 - INTERSTITIAL = 3 - IGNORED = 4 - NATIVE = 5 - FLUID = 6 - AUDIO = 7 - - __all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-ads-admanager/google/ads/admanager_v1/types/size_type_enum.py b/packages/google-ads-admanager/google/ads/admanager_v1/types/size_type_enum.py new file mode 100644 index 000000000000..8ad905e923a9 --- /dev/null +++ b/packages/google-ads-admanager/google/ads/admanager_v1/types/size_type_enum.py @@ -0,0 +1,78 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +import proto # type: ignore + +__protobuf__ = proto.module( + package="google.ads.admanager.v1", + manifest={ + "SizeTypeEnum", + }, +) + + +class SizeTypeEnum(proto.Message): + r"""Wrapper message for + [SizeType][google.ads.admanager.v1.SizeTypeEnum.SizeType]. + + """ + + class SizeType(proto.Enum): + r"""The different Size types for an ad. + + Values: + SIZE_TYPE_UNSPECIFIED (0): + Default value. This value is unused. + PIXEL (1): + Dimension based size, an actual height and + width in pixels. + ASPECT_RATIO (2): + Size is expressed as a ratio. For example, + 4:1 could be met by a 100 x 25 sized image. + INTERSTITIAL (3): + Out-of-page (Interstitial) size that is not + related to the slot it is served. This must be + used with 1x1 size. + IGNORED (4): + Size is ignored. This must be used with 1x1 + size. + NATIVE (5): + Native size, which is a function of the how + the client renders the creative. This must be + used with 1x1 size. + FLUID (6): + Fluid size. Automatically sizes the ad by + filling the width of the enclosing column and + adjusting the height as appropriate. This must + be used with 1x1 size. + AUDIO (7): + Audio size. Used with audio ads. This must be + used with 1x1 size. + """ + SIZE_TYPE_UNSPECIFIED = 0 + PIXEL = 1 + ASPECT_RATIO = 2 + INTERSTITIAL = 3 + IGNORED = 4 + NATIVE = 5 + FLUID = 6 + AUDIO = 7 + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-ads-admanager/google/ads/admanager_v1/types/taxonomy_category_messages.py b/packages/google-ads-admanager/google/ads/admanager_v1/types/taxonomy_category_messages.py new file mode 100644 index 000000000000..642311d83701 --- /dev/null +++ b/packages/google-ads-admanager/google/ads/admanager_v1/types/taxonomy_category_messages.py @@ -0,0 +1,96 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +import proto # type: ignore + +from google.ads.admanager_v1.types import taxonomy_type_enum + +__protobuf__ = proto.module( + package="google.ads.admanager.v1", + manifest={ + "TaxonomyCategory", + }, +) + + +class TaxonomyCategory(proto.Message): + r"""The ``TaxonomyCategory`` resource. + + Attributes: + name (str): + Identifier. The resource name of the ``TaxonomyCategory``. + Format: + ``networks/{network_code}/taxonomyCategories/{taxonomy_category_id}`` + taxonomy_category_id (int): + Output only. ``TaxonomyCategory`` ID. + display_name (str): + Output only. Display name of the ``TaxonomyCategory``. + grouping_only (bool): + Output only. Whether this ``TaxonomyCategory`` only serves + to group its children. + parent_taxonomy_category_id (int): + Output only. The ID of the parent category this + ``TaxonomyCategory`` descends from. + taxonomy_type (google.ads.admanager_v1.types.TaxonomyTypeEnum.TaxonomyType): + Output only. The taxonomy that this ``TaxonomyCategory`` + belongs to. + ancestor_names (MutableSequence[str]): + Output only. The list of names of the ancestors of this + ``TaxonomyCategory``. + ancestor_taxonomy_category_ids (MutableSequence[int]): + Output only. The list of IDs of the ancestors of this + ``TaxonomyCategory``. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + taxonomy_category_id: int = proto.Field( + proto.INT64, + number=2, + ) + display_name: str = proto.Field( + proto.STRING, + number=3, + ) + grouping_only: bool = proto.Field( + proto.BOOL, + number=5, + ) + parent_taxonomy_category_id: int = proto.Field( + proto.INT64, + number=6, + ) + taxonomy_type: taxonomy_type_enum.TaxonomyTypeEnum.TaxonomyType = proto.Field( + proto.ENUM, + number=9, + enum=taxonomy_type_enum.TaxonomyTypeEnum.TaxonomyType, + ) + ancestor_names: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=7, + ) + ancestor_taxonomy_category_ids: MutableSequence[int] = proto.RepeatedField( + proto.INT64, + number=8, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-ads-admanager/google/ads/admanager_v1/types/ad_partner_service.py b/packages/google-ads-admanager/google/ads/admanager_v1/types/taxonomy_category_service.py similarity index 63% rename from packages/google-ads-admanager/google/ads/admanager_v1/types/ad_partner_service.py rename to packages/google-ads-admanager/google/ads/admanager_v1/types/taxonomy_category_service.py index 8604504f58e5..cc3cdbc83ccb 100644 --- a/packages/google-ads-admanager/google/ads/admanager_v1/types/ad_partner_service.py +++ b/packages/google-ads-admanager/google/ads/admanager_v1/types/taxonomy_category_service.py @@ -19,39 +19,25 @@ import proto # type: ignore +from google.ads.admanager_v1.types import taxonomy_category_messages + __protobuf__ = proto.module( package="google.ads.admanager.v1", manifest={ - "AdPartner", - "GetAdPartnerRequest", - "ListAdPartnersRequest", - "ListAdPartnersResponse", + "GetTaxonomyCategoryRequest", + "ListTaxonomyCategoriesRequest", + "ListTaxonomyCategoriesResponse", }, ) -class AdPartner(proto.Message): - r"""The AdPartner resource. - - Attributes: - name (str): - Identifier. The resource name of the AdPartner. Format: - ``networks/{network_code}/adPartners/{ad_partner_id}`` - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - - -class GetAdPartnerRequest(proto.Message): - r"""Request object for GetAdPartner method. +class GetTaxonomyCategoryRequest(proto.Message): + r"""Request object for ``GetTaxonomyCategory`` method. Attributes: name (str): - Required. The resource name of the AdPartner. Format: - ``networks/{network_code}/adPartners/{ad_partner_id}`` + Required. The resource name of the TaxonomyCategory. Format: + ``networks/{network_code}/taxonomyCategories/{taxonomy_category_id}`` """ name: str = proto.Field( @@ -60,27 +46,27 @@ class GetAdPartnerRequest(proto.Message): ) -class ListAdPartnersRequest(proto.Message): - r"""Request object for ListAdPartners method. +class ListTaxonomyCategoriesRequest(proto.Message): + r"""Request object for ``ListTaxonomyCategories`` method. Attributes: parent (str): Required. The parent, which owns this collection of - AdPartners. Format: ``networks/{network_code}`` + TaxonomyCategories. Format: ``networks/{network_code}`` page_size (int): - Optional. The maximum number of AdPartners to - return. The service may return fewer than this - value. If unspecified, at most 50 AdPartners - will be returned. The maximum value is 1000; - values above 1000 will be coerced to 1000. + Optional. The maximum number of ``TaxonomyCategories`` to + return. The service may return fewer than this value. If + unspecified, at most 50 ``TaxonomyCategories`` will be + returned. The maximum value is 1000; values above 1000 will + be coerced to 1000. page_token (str): Optional. A page token, received from a previous - ``ListAdPartners`` call. Provide this to retrieve the - subsequent page. + ``ListTaxonomyCategories`` call. Provide this to retrieve + the subsequent page. When paginating, all other parameters provided to - ``ListAdPartners`` must match the call that provided the - page token. + ``ListTaxonomyCategories`` must match the call that provided + the page token. filter (str): Optional. Expression to filter the response. See syntax details at @@ -120,21 +106,21 @@ class ListAdPartnersRequest(proto.Message): ) -class ListAdPartnersResponse(proto.Message): - r"""Response object for ListAdPartnersRequest containing matching - AdPartner resources. +class ListTaxonomyCategoriesResponse(proto.Message): + r"""Response object for ``ListTaxonomyCategoriesRequest`` containing + matching ``TaxonomyCategory`` objects. Attributes: - ad_partners (MutableSequence[google.ads.admanager_v1.types.AdPartner]): - The AdPartner from the specified network. + taxonomy_categories (MutableSequence[google.ads.admanager_v1.types.TaxonomyCategory]): + The ``TaxonomyCategory`` objects. next_page_token (str): A token, which can be sent as ``page_token`` to retrieve the next page. If this field is omitted, there are no subsequent pages. total_size (int): - Total number of AdPartners. If a filter was included in the - request, this reflects the total number after the filtering - is applied. + Total number of ``TaxonomyCategory`` objects. If a filter + was included in the request, this reflects the total number + after the filtering is applied. ``total_size`` will not be calculated in the response unless it has been included in a response field mask. The response @@ -150,10 +136,12 @@ class ListAdPartnersResponse(proto.Message): def raw_page(self): return self - ad_partners: MutableSequence["AdPartner"] = proto.RepeatedField( + taxonomy_categories: MutableSequence[ + taxonomy_category_messages.TaxonomyCategory + ] = proto.RepeatedField( proto.MESSAGE, number=1, - message="AdPartner", + message=taxonomy_category_messages.TaxonomyCategory, ) next_page_token: str = proto.Field( proto.STRING, diff --git a/packages/google-ads-admanager/google/ads/admanager_v1/types/taxonomy_type_enum.py b/packages/google-ads-admanager/google/ads/admanager_v1/types/taxonomy_type_enum.py new file mode 100644 index 000000000000..c564f256d0ad --- /dev/null +++ b/packages/google-ads-admanager/google/ads/admanager_v1/types/taxonomy_type_enum.py @@ -0,0 +1,62 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +import proto # type: ignore + +__protobuf__ = proto.module( + package="google.ads.admanager.v1", + manifest={ + "TaxonomyTypeEnum", + }, +) + + +class TaxonomyTypeEnum(proto.Message): + r"""Wrapper for + [TaxonomyType][google.ads.admanager.v1.TaxonomyTypeEnum.TaxonomyType] + + """ + + class TaxonomyType(proto.Enum): + r"""The taxonomy type of the IAB defined taxonomies. + Used for Publisher provided signals. + + Values: + TAXONOMY_TYPE_UNSPECIFIED (0): + Unspecified/not present + TAXONOMY_IAB_AUDIENCE_1_1 (3): + The IAB Audience Taxonomy v1.1. + TAXONOMY_IAB_CONTENT_2_1 (4): + The IAB Content Taxonomy v2.1. + TAXONOMY_IAB_CONTENT_2_2 (6): + The IAB Content Taxonomy v2.2. + TAXONOMY_IAB_CONTENT_3_0 (5): + The IAB Content Taxonomy v3.0. + TAXONOMY_GOOGLE_STRUCTURED_VIDEO_1_0 (7): + The PPS structured video signals taxonomy. + """ + TAXONOMY_TYPE_UNSPECIFIED = 0 + TAXONOMY_IAB_AUDIENCE_1_1 = 3 + TAXONOMY_IAB_CONTENT_2_1 = 4 + TAXONOMY_IAB_CONTENT_2_2 = 6 + TAXONOMY_IAB_CONTENT_3_0 = 5 + TAXONOMY_GOOGLE_STRUCTURED_VIDEO_1_0 = 7 + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-ads-admanager/google/ads/admanager_v1/types/team_messages.py b/packages/google-ads-admanager/google/ads/admanager_v1/types/team_messages.py new file mode 100644 index 000000000000..1016c5f2437d --- /dev/null +++ b/packages/google-ads-admanager/google/ads/admanager_v1/types/team_messages.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +import proto # type: ignore + +__protobuf__ = proto.module( + package="google.ads.admanager.v1", + manifest={ + "Team", + }, +) + + +class Team(proto.Message): + r"""A Team defines a grouping of users and what entities they + have access to. + + Attributes: + name (str): + Identifier. The resource name of the ``Team``. Format: + ``networks/{network_code}/teams/{team_id}`` + team_id (int): + Output only. The unique ID of the Team. This + value is assigned by Google. Teams that are + created by Google will have negative IDs. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + team_id: int = proto.Field( + proto.INT64, + number=2, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-ads-admanager/google/ads/admanager_v1/types/team_service.py b/packages/google-ads-admanager/google/ads/admanager_v1/types/team_service.py deleted file mode 100644 index 2e2a2ed2d4ca..000000000000 --- a/packages/google-ads-admanager/google/ads/admanager_v1/types/team_service.py +++ /dev/null @@ -1,168 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from __future__ import annotations - -from typing import MutableMapping, MutableSequence - -import proto # type: ignore - -__protobuf__ = proto.module( - package="google.ads.admanager.v1", - manifest={ - "Team", - "GetTeamRequest", - "ListTeamsRequest", - "ListTeamsResponse", - }, -) - - -class Team(proto.Message): - r"""The Team resource. - - Attributes: - name (str): - Identifier. The resource name of the Team. Format: - ``networks/{network_code}/teams/{team_id}`` - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - - -class GetTeamRequest(proto.Message): - r"""Request object for GetTeam method. - - Attributes: - name (str): - Required. The resource name of the Team. Format: - ``networks/{network_code}/teams/{team_id}`` - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - - -class ListTeamsRequest(proto.Message): - r"""Request object for ListTeams method. - - Attributes: - parent (str): - Required. The parent, which owns this collection of Teams. - Format: ``networks/{network_code}`` - page_size (int): - Optional. The maximum number of Teams to - return. The service may return fewer than this - value. If unspecified, at most 50 teams will be - returned. The maximum value is 1000; values - above 1000 will be coerced to 1000. - page_token (str): - Optional. A page token, received from a previous - ``ListTeams`` call. Provide this to retrieve the subsequent - page. - - When paginating, all other parameters provided to - ``ListTeams`` must match the call that provided the page - token. - filter (str): - Optional. Expression to filter the response. - See syntax details at - https://developers.google.com/ad-manager/api/beta/filters - order_by (str): - Optional. Expression to specify sorting - order. See syntax details at - https://developers.google.com/ad-manager/api/beta/filters#order - skip (int): - Optional. Number of individual resources to - skip while paginating. - """ - - parent: str = proto.Field( - proto.STRING, - number=1, - ) - page_size: int = proto.Field( - proto.INT32, - number=2, - ) - page_token: str = proto.Field( - proto.STRING, - number=3, - ) - filter: str = proto.Field( - proto.STRING, - number=4, - ) - order_by: str = proto.Field( - proto.STRING, - number=5, - ) - skip: int = proto.Field( - proto.INT32, - number=6, - ) - - -class ListTeamsResponse(proto.Message): - r"""Response object for ListTeamsRequest containing matching Team - resources. - - Attributes: - teams (MutableSequence[google.ads.admanager_v1.types.Team]): - The Team from the specified network. - next_page_token (str): - A token, which can be sent as ``page_token`` to retrieve the - next page. If this field is omitted, there are no subsequent - pages. - total_size (int): - Total number of Teams. If a filter was included in the - request, this reflects the total number after the filtering - is applied. - - ``total_size`` will not be calculated in the response unless - it has been included in a response field mask. The response - field mask can be provided to the method by using the URL - parameter ``$fields`` or ``fields``, or by using the - HTTP/gRPC header ``X-Goog-FieldMask``. - - For more information, see - https://developers.google.com/ad-manager/api/beta/field-masks - """ - - @property - def raw_page(self): - return self - - teams: MutableSequence["Team"] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message="Team", - ) - next_page_token: str = proto.Field( - proto.STRING, - number=2, - ) - total_size: int = proto.Field( - proto.INT32, - number=3, - ) - - -__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-ads-admanager/google/ads/admanager_v1/types/time_unit_enum.py b/packages/google-ads-admanager/google/ads/admanager_v1/types/time_unit_enum.py new file mode 100644 index 000000000000..f3a738c516c4 --- /dev/null +++ b/packages/google-ads-admanager/google/ads/admanager_v1/types/time_unit_enum.py @@ -0,0 +1,69 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +import proto # type: ignore + +__protobuf__ = proto.module( + package="google.ads.admanager.v1", + manifest={ + "TimeUnitEnum", + }, +) + + +class TimeUnitEnum(proto.Message): + r"""Wrapper message for TimeUnit.""" + + class TimeUnit(proto.Enum): + r"""Unit of time for the frequency cap. + + Values: + TIME_UNIT_UNSPECIFIED (0): + Default value. This value is unused. + MINUTE (1): + Minute + HOUR (2): + Hour + DAY (3): + Day + WEEK (4): + Week + MONTH (5): + Month + LIFETIME (6): + Lifetime + POD (7): + Per pod of ads in a video stream. Only valid for entities in + a VIDEO_PLAYER environment. + STREAM (8): + Per video stream. Only valid for entities in a VIDEO_PLAYER + environment. + """ + TIME_UNIT_UNSPECIFIED = 0 + MINUTE = 1 + HOUR = 2 + DAY = 3 + WEEK = 4 + MONTH = 5 + LIFETIME = 6 + POD = 7 + STREAM = 8 + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-ads-admanager/google/ads/admanager_v1/types/user_messages.py b/packages/google-ads-admanager/google/ads/admanager_v1/types/user_messages.py new file mode 100644 index 000000000000..90d2d72e5e60 --- /dev/null +++ b/packages/google-ads-admanager/google/ads/admanager_v1/types/user_messages.py @@ -0,0 +1,109 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +import proto # type: ignore + +__protobuf__ = proto.module( + package="google.ads.admanager.v1", + manifest={ + "User", + }, +) + + +class User(proto.Message): + r"""The User resource. + + Attributes: + name (str): + Identifier. The resource name of the User. Format: + ``networks/{network_code}/users/{user_id}`` + user_id (int): + Output only. ``User`` ID. + display_name (str): + Required. The name of the User. It has a + maximum length of 128 characters. + email (str): + Required. The email or login of the User. In + order to create a new user, you must already + have a Google Account. + role (str): + Required. The unique Role ID of the User. + Roles that are created by Google will have + negative IDs. + active (bool): + Output only. Specifies whether or not the + User is active. An inactive user cannot log in + to the system or perform any operations. + external_id (str): + Optional. An identifier for the User that is + meaningful to the publisher. This attribute has + a maximum length of 255 characters. + service_account (bool): + Output only. Whether the user is an OAuth2 + service account user. Service account users can + only be added through the UI. + orders_ui_local_time_zone (str): + Optional. The IANA Time Zone Database time zone, e.g. + "America/New_York", used in the orders and line items UI for + this User. If not provided, the UI then defaults to using + the Network's timezone. This setting only affects the UI for + this user and does not affect the timezone of any dates and + times returned in API responses. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + user_id: int = proto.Field( + proto.INT64, + number=10, + ) + display_name: str = proto.Field( + proto.STRING, + number=2, + ) + email: str = proto.Field( + proto.STRING, + number=3, + ) + role: str = proto.Field( + proto.STRING, + number=4, + ) + active: bool = proto.Field( + proto.BOOL, + number=6, + ) + external_id: str = proto.Field( + proto.STRING, + number=7, + ) + service_account: bool = proto.Field( + proto.BOOL, + number=8, + ) + orders_ui_local_time_zone: str = proto.Field( + proto.STRING, + number=9, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-ads-admanager/google/ads/admanager_v1/types/user_service.py b/packages/google-ads-admanager/google/ads/admanager_v1/types/user_service.py index 1715ffb0e7c3..931e36a32f12 100644 --- a/packages/google-ads-admanager/google/ads/admanager_v1/types/user_service.py +++ b/packages/google-ads-admanager/google/ads/admanager_v1/types/user_service.py @@ -22,93 +22,11 @@ __protobuf__ = proto.module( package="google.ads.admanager.v1", manifest={ - "User", "GetUserRequest", - "ListUsersRequest", - "ListUsersResponse", }, ) -class User(proto.Message): - r"""The User resource. - - Attributes: - name (str): - Identifier. The resource name of the User. Format: - ``networks/{network_code}/users/{user_id}`` - user_id (int): - Output only. ``User`` ID. - display_name (str): - Required. The name of the User. It has a - maximum length of 128 characters. - email (str): - Required. The email or login of the User. In - order to create a new user, you must already - have a Google Account. - role (str): - Required. The unique Role ID of the User. - Roles that are created by Google will have - negative IDs. - active (bool): - Output only. Specifies whether or not the - User is active. An inactive user cannot log in - to the system or perform any operations. - external_id (str): - Optional. An identifier for the User that is - meaningful to the publisher. This attribute has - a maximum length of 255 characters. - service_account (bool): - Output only. Whether the user is an OAuth2 - service account user. Service account users can - only be added through the UI. - orders_ui_local_time_zone (str): - Optional. The IANA Time Zone Database time zone, e.g. - "America/New_York", used in the orders and line items UI for - this User. If not provided, the UI then defaults to using - the Network's timezone. This setting only affects the UI for - this user and does not affect the timezone of any dates and - times returned in API responses. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - user_id: int = proto.Field( - proto.INT64, - number=10, - ) - display_name: str = proto.Field( - proto.STRING, - number=2, - ) - email: str = proto.Field( - proto.STRING, - number=3, - ) - role: str = proto.Field( - proto.STRING, - number=4, - ) - active: bool = proto.Field( - proto.BOOL, - number=6, - ) - external_id: str = proto.Field( - proto.STRING, - number=7, - ) - service_account: bool = proto.Field( - proto.BOOL, - number=8, - ) - orders_ui_local_time_zone: str = proto.Field( - proto.STRING, - number=9, - ) - - class GetUserRequest(proto.Message): r"""Request object for GetUser method. @@ -124,109 +42,4 @@ class GetUserRequest(proto.Message): ) -class ListUsersRequest(proto.Message): - r"""Request object for ListUsers method. - - Attributes: - parent (str): - Required. The parent, which owns this collection of Users. - Format: ``networks/{network_code}`` - page_size (int): - Optional. The maximum number of Users to - return. The service may return fewer than this - value. If unspecified, at most 50 users will be - returned. The maximum value is 1000; values - above 1000 will be coerced to 1000. - page_token (str): - Optional. A page token, received from a previous - ``ListUsers`` call. Provide this to retrieve the subsequent - page. - - When paginating, all other parameters provided to - ``ListUsers`` must match the call that provided the page - token. - filter (str): - Optional. Expression to filter the response. - See syntax details at - https://developers.google.com/ad-manager/api/beta/filters - order_by (str): - Optional. Expression to specify sorting - order. See syntax details at - https://developers.google.com/ad-manager/api/beta/filters#order - skip (int): - Optional. Number of individual resources to - skip while paginating. - """ - - parent: str = proto.Field( - proto.STRING, - number=1, - ) - page_size: int = proto.Field( - proto.INT32, - number=2, - ) - page_token: str = proto.Field( - proto.STRING, - number=3, - ) - filter: str = proto.Field( - proto.STRING, - number=4, - ) - order_by: str = proto.Field( - proto.STRING, - number=5, - ) - skip: int = proto.Field( - proto.INT32, - number=6, - ) - - -class ListUsersResponse(proto.Message): - r"""Response object for ListUsersRequest containing matching User - resources. - - Attributes: - users (MutableSequence[google.ads.admanager_v1.types.User]): - The User from the specified network. - next_page_token (str): - A token, which can be sent as ``page_token`` to retrieve the - next page. If this field is omitted, there are no subsequent - pages. - total_size (int): - Total number of Users. If a filter was included in the - request, this reflects the total number after the filtering - is applied. - - ``total_size`` will not be calculated in the response unless - it has been included in a response field mask. The response - field mask can be provided to the method by using the URL - parameter ``$fields`` or ``fields``, or by using the - HTTP/gRPC header ``X-Goog-FieldMask``. - - For more information, see - https://developers.google.com/ad-manager/api/beta/field-masks - """ - - @property - def raw_page(self): - return self - - users: MutableSequence["User"] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message="User", - ) - next_page_token: str = proto.Field( - proto.STRING, - number=2, - ) - total_size: int = proto.Field( - proto.INT32, - number=3, - ) - - __all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-ads-admanager/samples/generated_samples/admanager_v1_generated_line_item_service_list_line_items_sync.py b/packages/google-ads-admanager/samples/generated_samples/admanager_v1_generated_ad_unit_service_list_ad_unit_sizes_sync.py similarity index 81% rename from packages/google-ads-admanager/samples/generated_samples/admanager_v1_generated_line_item_service_list_line_items_sync.py rename to packages/google-ads-admanager/samples/generated_samples/admanager_v1_generated_ad_unit_service_list_ad_unit_sizes_sync.py index 822761814315..eb38242abe84 100644 --- a/packages/google-ads-admanager/samples/generated_samples/admanager_v1_generated_line_item_service_list_line_items_sync.py +++ b/packages/google-ads-admanager/samples/generated_samples/admanager_v1_generated_ad_unit_service_list_ad_unit_sizes_sync.py @@ -15,7 +15,7 @@ # # Generated code. DO NOT EDIT! # -# Snippet for ListLineItems +# Snippet for ListAdUnitSizes # NOTE: This snippet has been automatically generated for illustrative purposes only. # It may require modifications to work in your environment. @@ -23,7 +23,7 @@ # python3 -m pip install google-ads-admanager -# [START admanager_v1_generated_LineItemService_ListLineItems_sync] +# [START admanager_v1_generated_AdUnitService_ListAdUnitSizes_sync] # This snippet has been automatically generated and should be regarded as a # code template only. # It will require modifications to work: @@ -34,20 +34,20 @@ from google.ads import admanager_v1 -def sample_list_line_items(): +def sample_list_ad_unit_sizes(): # Create a client - client = admanager_v1.LineItemServiceClient() + client = admanager_v1.AdUnitServiceClient() # Initialize request argument(s) - request = admanager_v1.ListLineItemsRequest( + request = admanager_v1.ListAdUnitSizesRequest( parent="parent_value", ) # Make the request - page_result = client.list_line_items(request=request) + page_result = client.list_ad_unit_sizes(request=request) # Handle the response for response in page_result: print(response) -# [END admanager_v1_generated_LineItemService_ListLineItems_sync] +# [END admanager_v1_generated_AdUnitService_ListAdUnitSizes_sync] diff --git a/packages/google-ads-admanager/samples/generated_samples/admanager_v1_generated_entity_signals_mapping_service_batch_create_entity_signals_mappings_sync.py b/packages/google-ads-admanager/samples/generated_samples/admanager_v1_generated_entity_signals_mapping_service_batch_create_entity_signals_mappings_sync.py new file mode 100644 index 000000000000..d74224f42761 --- /dev/null +++ b/packages/google-ads-admanager/samples/generated_samples/admanager_v1_generated_entity_signals_mapping_service_batch_create_entity_signals_mappings_sync.py @@ -0,0 +1,58 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for BatchCreateEntitySignalsMappings +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-ads-admanager + + +# [START admanager_v1_generated_EntitySignalsMappingService_BatchCreateEntitySignalsMappings_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.ads import admanager_v1 + + +def sample_batch_create_entity_signals_mappings(): + # Create a client + client = admanager_v1.EntitySignalsMappingServiceClient() + + # Initialize request argument(s) + requests = admanager_v1.CreateEntitySignalsMappingRequest() + requests.parent = "parent_value" + requests.entity_signals_mapping.audience_segment_id = 1980 + requests.entity_signals_mapping.taxonomy_category_ids = [2268, 2269] + + request = admanager_v1.BatchCreateEntitySignalsMappingsRequest( + parent="parent_value", + requests=requests, + ) + + # Make the request + response = client.batch_create_entity_signals_mappings(request=request) + + # Handle the response + print(response) + +# [END admanager_v1_generated_EntitySignalsMappingService_BatchCreateEntitySignalsMappings_sync] diff --git a/packages/google-ads-admanager/samples/generated_samples/admanager_v1_generated_entity_signals_mapping_service_batch_update_entity_signals_mappings_sync.py b/packages/google-ads-admanager/samples/generated_samples/admanager_v1_generated_entity_signals_mapping_service_batch_update_entity_signals_mappings_sync.py new file mode 100644 index 000000000000..a1fd341565ed --- /dev/null +++ b/packages/google-ads-admanager/samples/generated_samples/admanager_v1_generated_entity_signals_mapping_service_batch_update_entity_signals_mappings_sync.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for BatchUpdateEntitySignalsMappings +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-ads-admanager + + +# [START admanager_v1_generated_EntitySignalsMappingService_BatchUpdateEntitySignalsMappings_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.ads import admanager_v1 + + +def sample_batch_update_entity_signals_mappings(): + # Create a client + client = admanager_v1.EntitySignalsMappingServiceClient() + + # Initialize request argument(s) + requests = admanager_v1.UpdateEntitySignalsMappingRequest() + requests.entity_signals_mapping.audience_segment_id = 1980 + requests.entity_signals_mapping.taxonomy_category_ids = [2268, 2269] + + request = admanager_v1.BatchUpdateEntitySignalsMappingsRequest( + parent="parent_value", + requests=requests, + ) + + # Make the request + response = client.batch_update_entity_signals_mappings(request=request) + + # Handle the response + print(response) + +# [END admanager_v1_generated_EntitySignalsMappingService_BatchUpdateEntitySignalsMappings_sync] diff --git a/packages/google-ads-admanager/samples/generated_samples/admanager_v1_generated_entity_signals_mapping_service_create_entity_signals_mapping_sync.py b/packages/google-ads-admanager/samples/generated_samples/admanager_v1_generated_entity_signals_mapping_service_create_entity_signals_mapping_sync.py new file mode 100644 index 000000000000..f2802f0f727b --- /dev/null +++ b/packages/google-ads-admanager/samples/generated_samples/admanager_v1_generated_entity_signals_mapping_service_create_entity_signals_mapping_sync.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateEntitySignalsMapping +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-ads-admanager + + +# [START admanager_v1_generated_EntitySignalsMappingService_CreateEntitySignalsMapping_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.ads import admanager_v1 + + +def sample_create_entity_signals_mapping(): + # Create a client + client = admanager_v1.EntitySignalsMappingServiceClient() + + # Initialize request argument(s) + entity_signals_mapping = admanager_v1.EntitySignalsMapping() + entity_signals_mapping.audience_segment_id = 1980 + entity_signals_mapping.taxonomy_category_ids = [2268, 2269] + + request = admanager_v1.CreateEntitySignalsMappingRequest( + parent="parent_value", + entity_signals_mapping=entity_signals_mapping, + ) + + # Make the request + response = client.create_entity_signals_mapping(request=request) + + # Handle the response + print(response) + +# [END admanager_v1_generated_EntitySignalsMappingService_CreateEntitySignalsMapping_sync] diff --git a/packages/google-ads-admanager/samples/generated_samples/admanager_v1_generated_entity_signals_mapping_service_get_entity_signals_mapping_sync.py b/packages/google-ads-admanager/samples/generated_samples/admanager_v1_generated_entity_signals_mapping_service_get_entity_signals_mapping_sync.py new file mode 100644 index 000000000000..071cc603a204 --- /dev/null +++ b/packages/google-ads-admanager/samples/generated_samples/admanager_v1_generated_entity_signals_mapping_service_get_entity_signals_mapping_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetEntitySignalsMapping +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-ads-admanager + + +# [START admanager_v1_generated_EntitySignalsMappingService_GetEntitySignalsMapping_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.ads import admanager_v1 + + +def sample_get_entity_signals_mapping(): + # Create a client + client = admanager_v1.EntitySignalsMappingServiceClient() + + # Initialize request argument(s) + request = admanager_v1.GetEntitySignalsMappingRequest( + name="name_value", + ) + + # Make the request + response = client.get_entity_signals_mapping(request=request) + + # Handle the response + print(response) + +# [END admanager_v1_generated_EntitySignalsMappingService_GetEntitySignalsMapping_sync] diff --git a/packages/google-ads-admanager/samples/generated_samples/admanager_v1_generated_entity_signals_mapping_service_list_entity_signals_mappings_sync.py b/packages/google-ads-admanager/samples/generated_samples/admanager_v1_generated_entity_signals_mapping_service_list_entity_signals_mappings_sync.py new file mode 100644 index 000000000000..530858bf5334 --- /dev/null +++ b/packages/google-ads-admanager/samples/generated_samples/admanager_v1_generated_entity_signals_mapping_service_list_entity_signals_mappings_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListEntitySignalsMappings +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-ads-admanager + + +# [START admanager_v1_generated_EntitySignalsMappingService_ListEntitySignalsMappings_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.ads import admanager_v1 + + +def sample_list_entity_signals_mappings(): + # Create a client + client = admanager_v1.EntitySignalsMappingServiceClient() + + # Initialize request argument(s) + request = admanager_v1.ListEntitySignalsMappingsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_entity_signals_mappings(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END admanager_v1_generated_EntitySignalsMappingService_ListEntitySignalsMappings_sync] diff --git a/packages/google-ads-admanager/samples/generated_samples/admanager_v1_generated_entity_signals_mapping_service_update_entity_signals_mapping_sync.py b/packages/google-ads-admanager/samples/generated_samples/admanager_v1_generated_entity_signals_mapping_service_update_entity_signals_mapping_sync.py new file mode 100644 index 000000000000..4a869eafe22f --- /dev/null +++ b/packages/google-ads-admanager/samples/generated_samples/admanager_v1_generated_entity_signals_mapping_service_update_entity_signals_mapping_sync.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateEntitySignalsMapping +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-ads-admanager + + +# [START admanager_v1_generated_EntitySignalsMappingService_UpdateEntitySignalsMapping_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.ads import admanager_v1 + + +def sample_update_entity_signals_mapping(): + # Create a client + client = admanager_v1.EntitySignalsMappingServiceClient() + + # Initialize request argument(s) + entity_signals_mapping = admanager_v1.EntitySignalsMapping() + entity_signals_mapping.audience_segment_id = 1980 + entity_signals_mapping.taxonomy_category_ids = [2268, 2269] + + request = admanager_v1.UpdateEntitySignalsMappingRequest( + entity_signals_mapping=entity_signals_mapping, + ) + + # Make the request + response = client.update_entity_signals_mapping(request=request) + + # Handle the response + print(response) + +# [END admanager_v1_generated_EntitySignalsMappingService_UpdateEntitySignalsMapping_sync] diff --git a/packages/google-ads-admanager/samples/generated_samples/admanager_v1_generated_label_service_get_label_sync.py b/packages/google-ads-admanager/samples/generated_samples/admanager_v1_generated_network_service_list_networks_sync.py similarity index 81% rename from packages/google-ads-admanager/samples/generated_samples/admanager_v1_generated_label_service_get_label_sync.py rename to packages/google-ads-admanager/samples/generated_samples/admanager_v1_generated_network_service_list_networks_sync.py index f0d7ab8c7c3c..534cec07a4ec 100644 --- a/packages/google-ads-admanager/samples/generated_samples/admanager_v1_generated_label_service_get_label_sync.py +++ b/packages/google-ads-admanager/samples/generated_samples/admanager_v1_generated_network_service_list_networks_sync.py @@ -15,7 +15,7 @@ # # Generated code. DO NOT EDIT! # -# Snippet for GetLabel +# Snippet for ListNetworks # NOTE: This snippet has been automatically generated for illustrative purposes only. # It may require modifications to work in your environment. @@ -23,7 +23,7 @@ # python3 -m pip install google-ads-admanager -# [START admanager_v1_generated_LabelService_GetLabel_sync] +# [START admanager_v1_generated_NetworkService_ListNetworks_sync] # This snippet has been automatically generated and should be regarded as a # code template only. # It will require modifications to work: @@ -34,19 +34,18 @@ from google.ads import admanager_v1 -def sample_get_label(): +def sample_list_networks(): # Create a client - client = admanager_v1.LabelServiceClient() + client = admanager_v1.NetworkServiceClient() # Initialize request argument(s) - request = admanager_v1.GetLabelRequest( - name="name_value", + request = admanager_v1.ListNetworksRequest( ) # Make the request - response = client.get_label(request=request) + response = client.list_networks(request=request) # Handle the response print(response) -# [END admanager_v1_generated_LabelService_GetLabel_sync] +# [END admanager_v1_generated_NetworkService_ListNetworks_sync] diff --git a/packages/google-ads-admanager/samples/generated_samples/admanager_v1_generated_report_service_create_report_sync.py b/packages/google-ads-admanager/samples/generated_samples/admanager_v1_generated_report_service_create_report_sync.py new file mode 100644 index 000000000000..dd81bba668bf --- /dev/null +++ b/packages/google-ads-admanager/samples/generated_samples/admanager_v1_generated_report_service_create_report_sync.py @@ -0,0 +1,58 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateReport +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-ads-admanager + + +# [START admanager_v1_generated_ReportService_CreateReport_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.ads import admanager_v1 + + +def sample_create_report(): + # Create a client + client = admanager_v1.ReportServiceClient() + + # Initialize request argument(s) + report = admanager_v1.Report() + report.report_definition.dimensions = ['CUSTOM_DIMENSION_9_VALUE'] + report.report_definition.metrics = ['YIELD_GROUP_MEDIATION_THIRD_PARTY_ECPM'] + report.report_definition.report_type = "HISTORICAL" + + request = admanager_v1.CreateReportRequest( + parent="parent_value", + report=report, + ) + + # Make the request + response = client.create_report(request=request) + + # Handle the response + print(response) + +# [END admanager_v1_generated_ReportService_CreateReport_sync] diff --git a/packages/google-ads-admanager/samples/generated_samples/admanager_v1_generated_creative_service_list_creatives_sync.py b/packages/google-ads-admanager/samples/generated_samples/admanager_v1_generated_report_service_fetch_report_result_rows_sync.py similarity index 79% rename from packages/google-ads-admanager/samples/generated_samples/admanager_v1_generated_creative_service_list_creatives_sync.py rename to packages/google-ads-admanager/samples/generated_samples/admanager_v1_generated_report_service_fetch_report_result_rows_sync.py index 95328d799b08..7c366358878e 100644 --- a/packages/google-ads-admanager/samples/generated_samples/admanager_v1_generated_creative_service_list_creatives_sync.py +++ b/packages/google-ads-admanager/samples/generated_samples/admanager_v1_generated_report_service_fetch_report_result_rows_sync.py @@ -15,7 +15,7 @@ # # Generated code. DO NOT EDIT! # -# Snippet for ListCreatives +# Snippet for FetchReportResultRows # NOTE: This snippet has been automatically generated for illustrative purposes only. # It may require modifications to work in your environment. @@ -23,7 +23,7 @@ # python3 -m pip install google-ads-admanager -# [START admanager_v1_generated_CreativeService_ListCreatives_sync] +# [START admanager_v1_generated_ReportService_FetchReportResultRows_sync] # This snippet has been automatically generated and should be regarded as a # code template only. # It will require modifications to work: @@ -34,20 +34,19 @@ from google.ads import admanager_v1 -def sample_list_creatives(): +def sample_fetch_report_result_rows(): # Create a client - client = admanager_v1.CreativeServiceClient() + client = admanager_v1.ReportServiceClient() # Initialize request argument(s) - request = admanager_v1.ListCreativesRequest( - parent="parent_value", + request = admanager_v1.FetchReportResultRowsRequest( ) # Make the request - page_result = client.list_creatives(request=request) + page_result = client.fetch_report_result_rows(request=request) # Handle the response for response in page_result: print(response) -# [END admanager_v1_generated_CreativeService_ListCreatives_sync] +# [END admanager_v1_generated_ReportService_FetchReportResultRows_sync] diff --git a/packages/google-ads-admanager/samples/generated_samples/admanager_v1_generated_team_service_get_team_sync.py b/packages/google-ads-admanager/samples/generated_samples/admanager_v1_generated_report_service_get_report_sync.py similarity index 82% rename from packages/google-ads-admanager/samples/generated_samples/admanager_v1_generated_team_service_get_team_sync.py rename to packages/google-ads-admanager/samples/generated_samples/admanager_v1_generated_report_service_get_report_sync.py index 00fdea6f590b..b52631c8aa86 100644 --- a/packages/google-ads-admanager/samples/generated_samples/admanager_v1_generated_team_service_get_team_sync.py +++ b/packages/google-ads-admanager/samples/generated_samples/admanager_v1_generated_report_service_get_report_sync.py @@ -15,7 +15,7 @@ # # Generated code. DO NOT EDIT! # -# Snippet for GetTeam +# Snippet for GetReport # NOTE: This snippet has been automatically generated for illustrative purposes only. # It may require modifications to work in your environment. @@ -23,7 +23,7 @@ # python3 -m pip install google-ads-admanager -# [START admanager_v1_generated_TeamService_GetTeam_sync] +# [START admanager_v1_generated_ReportService_GetReport_sync] # This snippet has been automatically generated and should be regarded as a # code template only. # It will require modifications to work: @@ -34,19 +34,19 @@ from google.ads import admanager_v1 -def sample_get_team(): +def sample_get_report(): # Create a client - client = admanager_v1.TeamServiceClient() + client = admanager_v1.ReportServiceClient() # Initialize request argument(s) - request = admanager_v1.GetTeamRequest( + request = admanager_v1.GetReportRequest( name="name_value", ) # Make the request - response = client.get_team(request=request) + response = client.get_report(request=request) # Handle the response print(response) -# [END admanager_v1_generated_TeamService_GetTeam_sync] +# [END admanager_v1_generated_ReportService_GetReport_sync] diff --git a/packages/google-ads-admanager/samples/generated_samples/admanager_v1_generated_user_service_list_users_sync.py b/packages/google-ads-admanager/samples/generated_samples/admanager_v1_generated_report_service_list_reports_sync.py similarity index 82% rename from packages/google-ads-admanager/samples/generated_samples/admanager_v1_generated_user_service_list_users_sync.py rename to packages/google-ads-admanager/samples/generated_samples/admanager_v1_generated_report_service_list_reports_sync.py index 81f549f0e5bb..451d5fa0a1ff 100644 --- a/packages/google-ads-admanager/samples/generated_samples/admanager_v1_generated_user_service_list_users_sync.py +++ b/packages/google-ads-admanager/samples/generated_samples/admanager_v1_generated_report_service_list_reports_sync.py @@ -15,7 +15,7 @@ # # Generated code. DO NOT EDIT! # -# Snippet for ListUsers +# Snippet for ListReports # NOTE: This snippet has been automatically generated for illustrative purposes only. # It may require modifications to work in your environment. @@ -23,7 +23,7 @@ # python3 -m pip install google-ads-admanager -# [START admanager_v1_generated_UserService_ListUsers_sync] +# [START admanager_v1_generated_ReportService_ListReports_sync] # This snippet has been automatically generated and should be regarded as a # code template only. # It will require modifications to work: @@ -34,20 +34,20 @@ from google.ads import admanager_v1 -def sample_list_users(): +def sample_list_reports(): # Create a client - client = admanager_v1.UserServiceClient() + client = admanager_v1.ReportServiceClient() # Initialize request argument(s) - request = admanager_v1.ListUsersRequest( + request = admanager_v1.ListReportsRequest( parent="parent_value", ) # Make the request - page_result = client.list_users(request=request) + page_result = client.list_reports(request=request) # Handle the response for response in page_result: print(response) -# [END admanager_v1_generated_UserService_ListUsers_sync] +# [END admanager_v1_generated_ReportService_ListReports_sync] diff --git a/packages/google-ads-admanager/samples/generated_samples/admanager_v1_generated_report_service_export_saved_report_sync.py b/packages/google-ads-admanager/samples/generated_samples/admanager_v1_generated_report_service_run_report_sync.py similarity index 82% rename from packages/google-ads-admanager/samples/generated_samples/admanager_v1_generated_report_service_export_saved_report_sync.py rename to packages/google-ads-admanager/samples/generated_samples/admanager_v1_generated_report_service_run_report_sync.py index 745769ffb3a9..d49b2529ce79 100644 --- a/packages/google-ads-admanager/samples/generated_samples/admanager_v1_generated_report_service_export_saved_report_sync.py +++ b/packages/google-ads-admanager/samples/generated_samples/admanager_v1_generated_report_service_run_report_sync.py @@ -15,7 +15,7 @@ # # Generated code. DO NOT EDIT! # -# Snippet for ExportSavedReport +# Snippet for RunReport # NOTE: This snippet has been automatically generated for illustrative purposes only. # It may require modifications to work in your environment. @@ -23,7 +23,7 @@ # python3 -m pip install google-ads-admanager -# [START admanager_v1_generated_ReportService_ExportSavedReport_sync] +# [START admanager_v1_generated_ReportService_RunReport_sync] # This snippet has been automatically generated and should be regarded as a # code template only. # It will require modifications to work: @@ -34,17 +34,17 @@ from google.ads import admanager_v1 -def sample_export_saved_report(): +def sample_run_report(): # Create a client client = admanager_v1.ReportServiceClient() # Initialize request argument(s) - request = admanager_v1.ExportSavedReportRequest( - format_="XML", + request = admanager_v1.RunReportRequest( + name="name_value", ) # Make the request - operation = client.export_saved_report(request=request) + operation = client.run_report(request=request) print("Waiting for operation to complete...") @@ -53,4 +53,4 @@ def sample_export_saved_report(): # Handle the response print(response) -# [END admanager_v1_generated_ReportService_ExportSavedReport_sync] +# [END admanager_v1_generated_ReportService_RunReport_sync] diff --git a/packages/google-ads-admanager/samples/generated_samples/admanager_v1_generated_report_service_update_report_sync.py b/packages/google-ads-admanager/samples/generated_samples/admanager_v1_generated_report_service_update_report_sync.py new file mode 100644 index 000000000000..334c10534208 --- /dev/null +++ b/packages/google-ads-admanager/samples/generated_samples/admanager_v1_generated_report_service_update_report_sync.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateReport +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-ads-admanager + + +# [START admanager_v1_generated_ReportService_UpdateReport_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.ads import admanager_v1 + + +def sample_update_report(): + # Create a client + client = admanager_v1.ReportServiceClient() + + # Initialize request argument(s) + report = admanager_v1.Report() + report.report_definition.dimensions = ['CUSTOM_DIMENSION_9_VALUE'] + report.report_definition.metrics = ['YIELD_GROUP_MEDIATION_THIRD_PARTY_ECPM'] + report.report_definition.report_type = "HISTORICAL" + + request = admanager_v1.UpdateReportRequest( + report=report, + ) + + # Make the request + response = client.update_report(request=request) + + # Handle the response + print(response) + +# [END admanager_v1_generated_ReportService_UpdateReport_sync] diff --git a/packages/google-ads-admanager/samples/generated_samples/admanager_v1_generated_ad_partner_service_get_ad_partner_sync.py b/packages/google-ads-admanager/samples/generated_samples/admanager_v1_generated_taxonomy_category_service_get_taxonomy_category_sync.py similarity index 78% rename from packages/google-ads-admanager/samples/generated_samples/admanager_v1_generated_ad_partner_service_get_ad_partner_sync.py rename to packages/google-ads-admanager/samples/generated_samples/admanager_v1_generated_taxonomy_category_service_get_taxonomy_category_sync.py index ccf7c243f194..5b3de6b5f618 100644 --- a/packages/google-ads-admanager/samples/generated_samples/admanager_v1_generated_ad_partner_service_get_ad_partner_sync.py +++ b/packages/google-ads-admanager/samples/generated_samples/admanager_v1_generated_taxonomy_category_service_get_taxonomy_category_sync.py @@ -15,7 +15,7 @@ # # Generated code. DO NOT EDIT! # -# Snippet for GetAdPartner +# Snippet for GetTaxonomyCategory # NOTE: This snippet has been automatically generated for illustrative purposes only. # It may require modifications to work in your environment. @@ -23,7 +23,7 @@ # python3 -m pip install google-ads-admanager -# [START admanager_v1_generated_AdPartnerService_GetAdPartner_sync] +# [START admanager_v1_generated_TaxonomyCategoryService_GetTaxonomyCategory_sync] # This snippet has been automatically generated and should be regarded as a # code template only. # It will require modifications to work: @@ -34,19 +34,19 @@ from google.ads import admanager_v1 -def sample_get_ad_partner(): +def sample_get_taxonomy_category(): # Create a client - client = admanager_v1.AdPartnerServiceClient() + client = admanager_v1.TaxonomyCategoryServiceClient() # Initialize request argument(s) - request = admanager_v1.GetAdPartnerRequest( + request = admanager_v1.GetTaxonomyCategoryRequest( name="name_value", ) # Make the request - response = client.get_ad_partner(request=request) + response = client.get_taxonomy_category(request=request) # Handle the response print(response) -# [END admanager_v1_generated_AdPartnerService_GetAdPartner_sync] +# [END admanager_v1_generated_TaxonomyCategoryService_GetTaxonomyCategory_sync] diff --git a/packages/google-ads-admanager/samples/generated_samples/admanager_v1_generated_ad_partner_service_list_ad_partners_sync.py b/packages/google-ads-admanager/samples/generated_samples/admanager_v1_generated_taxonomy_category_service_list_taxonomy_categories_sync.py similarity index 78% rename from packages/google-ads-admanager/samples/generated_samples/admanager_v1_generated_ad_partner_service_list_ad_partners_sync.py rename to packages/google-ads-admanager/samples/generated_samples/admanager_v1_generated_taxonomy_category_service_list_taxonomy_categories_sync.py index 41b026a5771f..bde804ae22a1 100644 --- a/packages/google-ads-admanager/samples/generated_samples/admanager_v1_generated_ad_partner_service_list_ad_partners_sync.py +++ b/packages/google-ads-admanager/samples/generated_samples/admanager_v1_generated_taxonomy_category_service_list_taxonomy_categories_sync.py @@ -15,7 +15,7 @@ # # Generated code. DO NOT EDIT! # -# Snippet for ListAdPartners +# Snippet for ListTaxonomyCategories # NOTE: This snippet has been automatically generated for illustrative purposes only. # It may require modifications to work in your environment. @@ -23,7 +23,7 @@ # python3 -m pip install google-ads-admanager -# [START admanager_v1_generated_AdPartnerService_ListAdPartners_sync] +# [START admanager_v1_generated_TaxonomyCategoryService_ListTaxonomyCategories_sync] # This snippet has been automatically generated and should be regarded as a # code template only. # It will require modifications to work: @@ -34,20 +34,20 @@ from google.ads import admanager_v1 -def sample_list_ad_partners(): +def sample_list_taxonomy_categories(): # Create a client - client = admanager_v1.AdPartnerServiceClient() + client = admanager_v1.TaxonomyCategoryServiceClient() # Initialize request argument(s) - request = admanager_v1.ListAdPartnersRequest( + request = admanager_v1.ListTaxonomyCategoriesRequest( parent="parent_value", ) # Make the request - page_result = client.list_ad_partners(request=request) + page_result = client.list_taxonomy_categories(request=request) # Handle the response for response in page_result: print(response) -# [END admanager_v1_generated_AdPartnerService_ListAdPartners_sync] +# [END admanager_v1_generated_TaxonomyCategoryService_ListTaxonomyCategories_sync] diff --git a/packages/google-ads-admanager/samples/generated_samples/snippet_metadata_google.ads.admanager.v1.json b/packages/google-ads-admanager/samples/generated_samples/snippet_metadata_google.ads.admanager.v1.json index 5f13252c91c9..d59199618c62 100644 --- a/packages/google-ads-admanager/samples/generated_samples/snippet_metadata_google.ads.admanager.v1.json +++ b/packages/google-ads-admanager/samples/generated_samples/snippet_metadata_google.ads.admanager.v1.json @@ -8,29 +8,29 @@ ], "language": "PYTHON", "name": "google-ads-admanager", - "version": "0.1.2" + "version": "0.2.0" }, "snippets": [ { "canonical": true, "clientMethod": { "client": { - "fullName": "google.ads.admanager_v1.AdPartnerServiceClient", - "shortName": "AdPartnerServiceClient" + "fullName": "google.ads.admanager_v1.AdUnitServiceClient", + "shortName": "AdUnitServiceClient" }, - "fullName": "google.ads.admanager_v1.AdPartnerServiceClient.get_ad_partner", + "fullName": "google.ads.admanager_v1.AdUnitServiceClient.get_ad_unit", "method": { - "fullName": "google.ads.admanager.v1.AdPartnerService.GetAdPartner", + "fullName": "google.ads.admanager.v1.AdUnitService.GetAdUnit", "service": { - "fullName": "google.ads.admanager.v1.AdPartnerService", - "shortName": "AdPartnerService" + "fullName": "google.ads.admanager.v1.AdUnitService", + "shortName": "AdUnitService" }, - "shortName": "GetAdPartner" + "shortName": "GetAdUnit" }, "parameters": [ { "name": "request", - "type": "google.ads.admanager_v1.types.GetAdPartnerRequest" + "type": "google.ads.admanager_v1.types.GetAdUnitRequest" }, { "name": "name", @@ -49,14 +49,14 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.ads.admanager_v1.types.AdPartner", - "shortName": "get_ad_partner" + "resultType": "google.ads.admanager_v1.types.AdUnit", + "shortName": "get_ad_unit" }, - "description": "Sample for GetAdPartner", - "file": "admanager_v1_generated_ad_partner_service_get_ad_partner_sync.py", + "description": "Sample for GetAdUnit", + "file": "admanager_v1_generated_ad_unit_service_get_ad_unit_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "admanager_v1_generated_AdPartnerService_GetAdPartner_sync", + "regionTag": "admanager_v1_generated_AdUnitService_GetAdUnit_sync", "segments": [ { "end": 51, @@ -89,87 +89,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "admanager_v1_generated_ad_partner_service_get_ad_partner_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.ads.admanager_v1.AdPartnerServiceClient", - "shortName": "AdPartnerServiceClient" - }, - "fullName": "google.ads.admanager_v1.AdPartnerServiceClient.list_ad_partners", - "method": { - "fullName": "google.ads.admanager.v1.AdPartnerService.ListAdPartners", - "service": { - "fullName": "google.ads.admanager.v1.AdPartnerService", - "shortName": "AdPartnerService" - }, - "shortName": "ListAdPartners" - }, - "parameters": [ - { - "name": "request", - "type": "google.ads.admanager_v1.types.ListAdPartnersRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.ads.admanager_v1.services.ad_partner_service.pagers.ListAdPartnersPager", - "shortName": "list_ad_partners" - }, - "description": "Sample for ListAdPartners", - "file": "admanager_v1_generated_ad_partner_service_list_ad_partners_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "admanager_v1_generated_AdPartnerService_ListAdPartners_sync", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "admanager_v1_generated_ad_partner_service_list_ad_partners_sync.py" + "title": "admanager_v1_generated_ad_unit_service_get_ad_unit_sync.py" }, { "canonical": true, @@ -178,22 +98,22 @@ "fullName": "google.ads.admanager_v1.AdUnitServiceClient", "shortName": "AdUnitServiceClient" }, - "fullName": "google.ads.admanager_v1.AdUnitServiceClient.get_ad_unit", + "fullName": "google.ads.admanager_v1.AdUnitServiceClient.list_ad_unit_sizes", "method": { - "fullName": "google.ads.admanager.v1.AdUnitService.GetAdUnit", + "fullName": "google.ads.admanager.v1.AdUnitService.ListAdUnitSizes", "service": { "fullName": "google.ads.admanager.v1.AdUnitService", "shortName": "AdUnitService" }, - "shortName": "GetAdUnit" + "shortName": "ListAdUnitSizes" }, "parameters": [ { "name": "request", - "type": "google.ads.admanager_v1.types.GetAdUnitRequest" + "type": "google.ads.admanager_v1.types.ListAdUnitSizesRequest" }, { - "name": "name", + "name": "parent", "type": "str" }, { @@ -209,22 +129,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.ads.admanager_v1.types.AdUnit", - "shortName": "get_ad_unit" + "resultType": "google.ads.admanager_v1.services.ad_unit_service.pagers.ListAdUnitSizesPager", + "shortName": "list_ad_unit_sizes" }, - "description": "Sample for GetAdUnit", - "file": "admanager_v1_generated_ad_unit_service_get_ad_unit_sync.py", + "description": "Sample for ListAdUnitSizes", + "file": "admanager_v1_generated_ad_unit_service_list_ad_unit_sizes_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "admanager_v1_generated_AdUnitService_GetAdUnit_sync", + "regionTag": "admanager_v1_generated_AdUnitService_ListAdUnitSizes_sync", "segments": [ { - "end": 51, + "end": 52, "start": 27, "type": "FULL" }, { - "end": 51, + "end": 52, "start": 27, "type": "SHORT" }, @@ -244,12 +164,12 @@ "type": "REQUEST_EXECUTION" }, { - "end": 52, + "end": 53, "start": 49, "type": "RESPONSE_HANDLING" } ], - "title": "admanager_v1_generated_ad_unit_service_get_ad_unit_sync.py" + "title": "admanager_v1_generated_ad_unit_service_list_ad_unit_sizes_sync.py" }, { "canonical": true, @@ -495,22 +415,22 @@ "canonical": true, "clientMethod": { "client": { - "fullName": "google.ads.admanager_v1.ContactServiceClient", - "shortName": "ContactServiceClient" + "fullName": "google.ads.admanager_v1.CustomFieldServiceClient", + "shortName": "CustomFieldServiceClient" }, - "fullName": "google.ads.admanager_v1.ContactServiceClient.get_contact", + "fullName": "google.ads.admanager_v1.CustomFieldServiceClient.get_custom_field", "method": { - "fullName": "google.ads.admanager.v1.ContactService.GetContact", + "fullName": "google.ads.admanager.v1.CustomFieldService.GetCustomField", "service": { - "fullName": "google.ads.admanager.v1.ContactService", - "shortName": "ContactService" + "fullName": "google.ads.admanager.v1.CustomFieldService", + "shortName": "CustomFieldService" }, - "shortName": "GetContact" + "shortName": "GetCustomField" }, "parameters": [ { "name": "request", - "type": "google.ads.admanager_v1.types.GetContactRequest" + "type": "google.ads.admanager_v1.types.GetCustomFieldRequest" }, { "name": "name", @@ -529,14 +449,14 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.ads.admanager_v1.types.Contact", - "shortName": "get_contact" + "resultType": "google.ads.admanager_v1.types.CustomField", + "shortName": "get_custom_field" }, - "description": "Sample for GetContact", - "file": "admanager_v1_generated_contact_service_get_contact_sync.py", + "description": "Sample for GetCustomField", + "file": "admanager_v1_generated_custom_field_service_get_custom_field_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "admanager_v1_generated_ContactService_GetContact_sync", + "regionTag": "admanager_v1_generated_CustomFieldService_GetCustomField_sync", "segments": [ { "end": 51, @@ -569,28 +489,28 @@ "type": "RESPONSE_HANDLING" } ], - "title": "admanager_v1_generated_contact_service_get_contact_sync.py" + "title": "admanager_v1_generated_custom_field_service_get_custom_field_sync.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.ads.admanager_v1.ContactServiceClient", - "shortName": "ContactServiceClient" + "fullName": "google.ads.admanager_v1.CustomFieldServiceClient", + "shortName": "CustomFieldServiceClient" }, - "fullName": "google.ads.admanager_v1.ContactServiceClient.list_contacts", + "fullName": "google.ads.admanager_v1.CustomFieldServiceClient.list_custom_fields", "method": { - "fullName": "google.ads.admanager.v1.ContactService.ListContacts", + "fullName": "google.ads.admanager.v1.CustomFieldService.ListCustomFields", "service": { - "fullName": "google.ads.admanager.v1.ContactService", - "shortName": "ContactService" + "fullName": "google.ads.admanager.v1.CustomFieldService", + "shortName": "CustomFieldService" }, - "shortName": "ListContacts" + "shortName": "ListCustomFields" }, "parameters": [ { "name": "request", - "type": "google.ads.admanager_v1.types.ListContactsRequest" + "type": "google.ads.admanager_v1.types.ListCustomFieldsRequest" }, { "name": "parent", @@ -609,14 +529,14 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.ads.admanager_v1.services.contact_service.pagers.ListContactsPager", - "shortName": "list_contacts" + "resultType": "google.ads.admanager_v1.services.custom_field_service.pagers.ListCustomFieldsPager", + "shortName": "list_custom_fields" }, - "description": "Sample for ListContacts", - "file": "admanager_v1_generated_contact_service_list_contacts_sync.py", + "description": "Sample for ListCustomFields", + "file": "admanager_v1_generated_custom_field_service_list_custom_fields_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "admanager_v1_generated_ContactService_ListContacts_sync", + "regionTag": "admanager_v1_generated_CustomFieldService_ListCustomFields_sync", "segments": [ { "end": 52, @@ -649,28 +569,28 @@ "type": "RESPONSE_HANDLING" } ], - "title": "admanager_v1_generated_contact_service_list_contacts_sync.py" + "title": "admanager_v1_generated_custom_field_service_list_custom_fields_sync.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.ads.admanager_v1.CreativeServiceClient", - "shortName": "CreativeServiceClient" + "fullName": "google.ads.admanager_v1.CustomTargetingKeyServiceClient", + "shortName": "CustomTargetingKeyServiceClient" }, - "fullName": "google.ads.admanager_v1.CreativeServiceClient.get_creative", + "fullName": "google.ads.admanager_v1.CustomTargetingKeyServiceClient.get_custom_targeting_key", "method": { - "fullName": "google.ads.admanager.v1.CreativeService.GetCreative", + "fullName": "google.ads.admanager.v1.CustomTargetingKeyService.GetCustomTargetingKey", "service": { - "fullName": "google.ads.admanager.v1.CreativeService", - "shortName": "CreativeService" + "fullName": "google.ads.admanager.v1.CustomTargetingKeyService", + "shortName": "CustomTargetingKeyService" }, - "shortName": "GetCreative" + "shortName": "GetCustomTargetingKey" }, "parameters": [ { "name": "request", - "type": "google.ads.admanager_v1.types.GetCreativeRequest" + "type": "google.ads.admanager_v1.types.GetCustomTargetingKeyRequest" }, { "name": "name", @@ -689,14 +609,14 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.ads.admanager_v1.types.Creative", - "shortName": "get_creative" + "resultType": "google.ads.admanager_v1.types.CustomTargetingKey", + "shortName": "get_custom_targeting_key" }, - "description": "Sample for GetCreative", - "file": "admanager_v1_generated_creative_service_get_creative_sync.py", + "description": "Sample for GetCustomTargetingKey", + "file": "admanager_v1_generated_custom_targeting_key_service_get_custom_targeting_key_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "admanager_v1_generated_CreativeService_GetCreative_sync", + "regionTag": "admanager_v1_generated_CustomTargetingKeyService_GetCustomTargetingKey_sync", "segments": [ { "end": 51, @@ -729,28 +649,28 @@ "type": "RESPONSE_HANDLING" } ], - "title": "admanager_v1_generated_creative_service_get_creative_sync.py" + "title": "admanager_v1_generated_custom_targeting_key_service_get_custom_targeting_key_sync.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.ads.admanager_v1.CreativeServiceClient", - "shortName": "CreativeServiceClient" + "fullName": "google.ads.admanager_v1.CustomTargetingKeyServiceClient", + "shortName": "CustomTargetingKeyServiceClient" }, - "fullName": "google.ads.admanager_v1.CreativeServiceClient.list_creatives", + "fullName": "google.ads.admanager_v1.CustomTargetingKeyServiceClient.list_custom_targeting_keys", "method": { - "fullName": "google.ads.admanager.v1.CreativeService.ListCreatives", + "fullName": "google.ads.admanager.v1.CustomTargetingKeyService.ListCustomTargetingKeys", "service": { - "fullName": "google.ads.admanager.v1.CreativeService", - "shortName": "CreativeService" + "fullName": "google.ads.admanager.v1.CustomTargetingKeyService", + "shortName": "CustomTargetingKeyService" }, - "shortName": "ListCreatives" + "shortName": "ListCustomTargetingKeys" }, "parameters": [ { "name": "request", - "type": "google.ads.admanager_v1.types.ListCreativesRequest" + "type": "google.ads.admanager_v1.types.ListCustomTargetingKeysRequest" }, { "name": "parent", @@ -769,14 +689,14 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.ads.admanager_v1.services.creative_service.pagers.ListCreativesPager", - "shortName": "list_creatives" + "resultType": "google.ads.admanager_v1.services.custom_targeting_key_service.pagers.ListCustomTargetingKeysPager", + "shortName": "list_custom_targeting_keys" }, - "description": "Sample for ListCreatives", - "file": "admanager_v1_generated_creative_service_list_creatives_sync.py", + "description": "Sample for ListCustomTargetingKeys", + "file": "admanager_v1_generated_custom_targeting_key_service_list_custom_targeting_keys_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "admanager_v1_generated_CreativeService_ListCreatives_sync", + "regionTag": "admanager_v1_generated_CustomTargetingKeyService_ListCustomTargetingKeys_sync", "segments": [ { "end": 52, @@ -809,28 +729,28 @@ "type": "RESPONSE_HANDLING" } ], - "title": "admanager_v1_generated_creative_service_list_creatives_sync.py" + "title": "admanager_v1_generated_custom_targeting_key_service_list_custom_targeting_keys_sync.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.ads.admanager_v1.CustomFieldServiceClient", - "shortName": "CustomFieldServiceClient" + "fullName": "google.ads.admanager_v1.CustomTargetingValueServiceClient", + "shortName": "CustomTargetingValueServiceClient" }, - "fullName": "google.ads.admanager_v1.CustomFieldServiceClient.get_custom_field", + "fullName": "google.ads.admanager_v1.CustomTargetingValueServiceClient.get_custom_targeting_value", "method": { - "fullName": "google.ads.admanager.v1.CustomFieldService.GetCustomField", + "fullName": "google.ads.admanager.v1.CustomTargetingValueService.GetCustomTargetingValue", "service": { - "fullName": "google.ads.admanager.v1.CustomFieldService", - "shortName": "CustomFieldService" + "fullName": "google.ads.admanager.v1.CustomTargetingValueService", + "shortName": "CustomTargetingValueService" }, - "shortName": "GetCustomField" + "shortName": "GetCustomTargetingValue" }, "parameters": [ { "name": "request", - "type": "google.ads.admanager_v1.types.GetCustomFieldRequest" + "type": "google.ads.admanager_v1.types.GetCustomTargetingValueRequest" }, { "name": "name", @@ -849,14 +769,14 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.ads.admanager_v1.types.CustomField", - "shortName": "get_custom_field" + "resultType": "google.ads.admanager_v1.types.CustomTargetingValue", + "shortName": "get_custom_targeting_value" }, - "description": "Sample for GetCustomField", - "file": "admanager_v1_generated_custom_field_service_get_custom_field_sync.py", + "description": "Sample for GetCustomTargetingValue", + "file": "admanager_v1_generated_custom_targeting_value_service_get_custom_targeting_value_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "admanager_v1_generated_CustomFieldService_GetCustomField_sync", + "regionTag": "admanager_v1_generated_CustomTargetingValueService_GetCustomTargetingValue_sync", "segments": [ { "end": 51, @@ -889,28 +809,28 @@ "type": "RESPONSE_HANDLING" } ], - "title": "admanager_v1_generated_custom_field_service_get_custom_field_sync.py" + "title": "admanager_v1_generated_custom_targeting_value_service_get_custom_targeting_value_sync.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.ads.admanager_v1.CustomFieldServiceClient", - "shortName": "CustomFieldServiceClient" + "fullName": "google.ads.admanager_v1.CustomTargetingValueServiceClient", + "shortName": "CustomTargetingValueServiceClient" }, - "fullName": "google.ads.admanager_v1.CustomFieldServiceClient.list_custom_fields", + "fullName": "google.ads.admanager_v1.CustomTargetingValueServiceClient.list_custom_targeting_values", "method": { - "fullName": "google.ads.admanager.v1.CustomFieldService.ListCustomFields", + "fullName": "google.ads.admanager.v1.CustomTargetingValueService.ListCustomTargetingValues", "service": { - "fullName": "google.ads.admanager.v1.CustomFieldService", - "shortName": "CustomFieldService" + "fullName": "google.ads.admanager.v1.CustomTargetingValueService", + "shortName": "CustomTargetingValueService" }, - "shortName": "ListCustomFields" + "shortName": "ListCustomTargetingValues" }, "parameters": [ { "name": "request", - "type": "google.ads.admanager_v1.types.ListCustomFieldsRequest" + "type": "google.ads.admanager_v1.types.ListCustomTargetingValuesRequest" }, { "name": "parent", @@ -929,14 +849,14 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.ads.admanager_v1.services.custom_field_service.pagers.ListCustomFieldsPager", - "shortName": "list_custom_fields" + "resultType": "google.ads.admanager_v1.services.custom_targeting_value_service.pagers.ListCustomTargetingValuesPager", + "shortName": "list_custom_targeting_values" }, - "description": "Sample for ListCustomFields", - "file": "admanager_v1_generated_custom_field_service_list_custom_fields_sync.py", + "description": "Sample for ListCustomTargetingValues", + "file": "admanager_v1_generated_custom_targeting_value_service_list_custom_targeting_values_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "admanager_v1_generated_CustomFieldService_ListCustomFields_sync", + "regionTag": "admanager_v1_generated_CustomTargetingValueService_ListCustomTargetingValues_sync", "segments": [ { "end": 52, @@ -969,33 +889,37 @@ "type": "RESPONSE_HANDLING" } ], - "title": "admanager_v1_generated_custom_field_service_list_custom_fields_sync.py" + "title": "admanager_v1_generated_custom_targeting_value_service_list_custom_targeting_values_sync.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.ads.admanager_v1.CustomTargetingKeyServiceClient", - "shortName": "CustomTargetingKeyServiceClient" + "fullName": "google.ads.admanager_v1.EntitySignalsMappingServiceClient", + "shortName": "EntitySignalsMappingServiceClient" }, - "fullName": "google.ads.admanager_v1.CustomTargetingKeyServiceClient.get_custom_targeting_key", + "fullName": "google.ads.admanager_v1.EntitySignalsMappingServiceClient.batch_create_entity_signals_mappings", "method": { - "fullName": "google.ads.admanager.v1.CustomTargetingKeyService.GetCustomTargetingKey", + "fullName": "google.ads.admanager.v1.EntitySignalsMappingService.BatchCreateEntitySignalsMappings", "service": { - "fullName": "google.ads.admanager.v1.CustomTargetingKeyService", - "shortName": "CustomTargetingKeyService" + "fullName": "google.ads.admanager.v1.EntitySignalsMappingService", + "shortName": "EntitySignalsMappingService" }, - "shortName": "GetCustomTargetingKey" + "shortName": "BatchCreateEntitySignalsMappings" }, "parameters": [ { "name": "request", - "type": "google.ads.admanager_v1.types.GetCustomTargetingKeyRequest" + "type": "google.ads.admanager_v1.types.BatchCreateEntitySignalsMappingsRequest" }, { - "name": "name", + "name": "parent", "type": "str" }, + { + "name": "requests", + "type": "MutableSequence[google.ads.admanager_v1.types.CreateEntitySignalsMappingRequest]" + }, { "name": "retry", "type": "google.api_core.retry.Retry" @@ -1009,22 +933,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.ads.admanager_v1.types.CustomTargetingKey", - "shortName": "get_custom_targeting_key" + "resultType": "google.ads.admanager_v1.types.BatchCreateEntitySignalsMappingsResponse", + "shortName": "batch_create_entity_signals_mappings" }, - "description": "Sample for GetCustomTargetingKey", - "file": "admanager_v1_generated_custom_targeting_key_service_get_custom_targeting_key_sync.py", + "description": "Sample for BatchCreateEntitySignalsMappings", + "file": "admanager_v1_generated_entity_signals_mapping_service_batch_create_entity_signals_mappings_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "admanager_v1_generated_CustomTargetingKeyService_GetCustomTargetingKey_sync", + "regionTag": "admanager_v1_generated_EntitySignalsMappingService_BatchCreateEntitySignalsMappings_sync", "segments": [ { - "end": 51, + "end": 57, "start": 27, "type": "FULL" }, { - "end": 51, + "end": 57, "start": 27, "type": "SHORT" }, @@ -1034,48 +958,52 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 45, + "end": 51, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 48, - "start": 46, + "end": 54, + "start": 52, "type": "REQUEST_EXECUTION" }, { - "end": 52, - "start": 49, + "end": 58, + "start": 55, "type": "RESPONSE_HANDLING" } ], - "title": "admanager_v1_generated_custom_targeting_key_service_get_custom_targeting_key_sync.py" + "title": "admanager_v1_generated_entity_signals_mapping_service_batch_create_entity_signals_mappings_sync.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.ads.admanager_v1.CustomTargetingKeyServiceClient", - "shortName": "CustomTargetingKeyServiceClient" + "fullName": "google.ads.admanager_v1.EntitySignalsMappingServiceClient", + "shortName": "EntitySignalsMappingServiceClient" }, - "fullName": "google.ads.admanager_v1.CustomTargetingKeyServiceClient.list_custom_targeting_keys", + "fullName": "google.ads.admanager_v1.EntitySignalsMappingServiceClient.batch_update_entity_signals_mappings", "method": { - "fullName": "google.ads.admanager.v1.CustomTargetingKeyService.ListCustomTargetingKeys", + "fullName": "google.ads.admanager.v1.EntitySignalsMappingService.BatchUpdateEntitySignalsMappings", "service": { - "fullName": "google.ads.admanager.v1.CustomTargetingKeyService", - "shortName": "CustomTargetingKeyService" + "fullName": "google.ads.admanager.v1.EntitySignalsMappingService", + "shortName": "EntitySignalsMappingService" }, - "shortName": "ListCustomTargetingKeys" + "shortName": "BatchUpdateEntitySignalsMappings" }, "parameters": [ { "name": "request", - "type": "google.ads.admanager_v1.types.ListCustomTargetingKeysRequest" + "type": "google.ads.admanager_v1.types.BatchUpdateEntitySignalsMappingsRequest" }, { "name": "parent", "type": "str" }, + { + "name": "requests", + "type": "MutableSequence[google.ads.admanager_v1.types.UpdateEntitySignalsMappingRequest]" + }, { "name": "retry", "type": "google.api_core.retry.Retry" @@ -1089,22 +1017,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.ads.admanager_v1.services.custom_targeting_key_service.pagers.ListCustomTargetingKeysPager", - "shortName": "list_custom_targeting_keys" + "resultType": "google.ads.admanager_v1.types.BatchUpdateEntitySignalsMappingsResponse", + "shortName": "batch_update_entity_signals_mappings" }, - "description": "Sample for ListCustomTargetingKeys", - "file": "admanager_v1_generated_custom_targeting_key_service_list_custom_targeting_keys_sync.py", + "description": "Sample for BatchUpdateEntitySignalsMappings", + "file": "admanager_v1_generated_entity_signals_mapping_service_batch_update_entity_signals_mappings_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "admanager_v1_generated_CustomTargetingKeyService_ListCustomTargetingKeys_sync", + "regionTag": "admanager_v1_generated_EntitySignalsMappingService_BatchUpdateEntitySignalsMappings_sync", "segments": [ { - "end": 52, + "end": 56, "start": 27, "type": "FULL" }, { - "end": 52, + "end": 56, "start": 27, "type": "SHORT" }, @@ -1114,43 +1042,127 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 45, + "end": 50, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 48, - "start": 46, + "end": 53, + "start": 51, "type": "REQUEST_EXECUTION" }, + { + "end": 57, + "start": 54, + "type": "RESPONSE_HANDLING" + } + ], + "title": "admanager_v1_generated_entity_signals_mapping_service_batch_update_entity_signals_mappings_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.ads.admanager_v1.EntitySignalsMappingServiceClient", + "shortName": "EntitySignalsMappingServiceClient" + }, + "fullName": "google.ads.admanager_v1.EntitySignalsMappingServiceClient.create_entity_signals_mapping", + "method": { + "fullName": "google.ads.admanager.v1.EntitySignalsMappingService.CreateEntitySignalsMapping", + "service": { + "fullName": "google.ads.admanager.v1.EntitySignalsMappingService", + "shortName": "EntitySignalsMappingService" + }, + "shortName": "CreateEntitySignalsMapping" + }, + "parameters": [ + { + "name": "request", + "type": "google.ads.admanager_v1.types.CreateEntitySignalsMappingRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "entity_signals_mapping", + "type": "google.ads.admanager_v1.types.EntitySignalsMapping" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.ads.admanager_v1.types.EntitySignalsMapping", + "shortName": "create_entity_signals_mapping" + }, + "description": "Sample for CreateEntitySignalsMapping", + "file": "admanager_v1_generated_entity_signals_mapping_service_create_entity_signals_mapping_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "admanager_v1_generated_EntitySignalsMappingService_CreateEntitySignalsMapping_sync", + "segments": [ + { + "end": 56, + "start": 27, + "type": "FULL" + }, + { + "end": 56, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 50, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, { "end": 53, - "start": 49, + "start": 51, + "type": "REQUEST_EXECUTION" + }, + { + "end": 57, + "start": 54, "type": "RESPONSE_HANDLING" } ], - "title": "admanager_v1_generated_custom_targeting_key_service_list_custom_targeting_keys_sync.py" + "title": "admanager_v1_generated_entity_signals_mapping_service_create_entity_signals_mapping_sync.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.ads.admanager_v1.CustomTargetingValueServiceClient", - "shortName": "CustomTargetingValueServiceClient" + "fullName": "google.ads.admanager_v1.EntitySignalsMappingServiceClient", + "shortName": "EntitySignalsMappingServiceClient" }, - "fullName": "google.ads.admanager_v1.CustomTargetingValueServiceClient.get_custom_targeting_value", + "fullName": "google.ads.admanager_v1.EntitySignalsMappingServiceClient.get_entity_signals_mapping", "method": { - "fullName": "google.ads.admanager.v1.CustomTargetingValueService.GetCustomTargetingValue", + "fullName": "google.ads.admanager.v1.EntitySignalsMappingService.GetEntitySignalsMapping", "service": { - "fullName": "google.ads.admanager.v1.CustomTargetingValueService", - "shortName": "CustomTargetingValueService" + "fullName": "google.ads.admanager.v1.EntitySignalsMappingService", + "shortName": "EntitySignalsMappingService" }, - "shortName": "GetCustomTargetingValue" + "shortName": "GetEntitySignalsMapping" }, "parameters": [ { "name": "request", - "type": "google.ads.admanager_v1.types.GetCustomTargetingValueRequest" + "type": "google.ads.admanager_v1.types.GetEntitySignalsMappingRequest" }, { "name": "name", @@ -1169,14 +1181,14 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.ads.admanager_v1.types.CustomTargetingValue", - "shortName": "get_custom_targeting_value" + "resultType": "google.ads.admanager_v1.types.EntitySignalsMapping", + "shortName": "get_entity_signals_mapping" }, - "description": "Sample for GetCustomTargetingValue", - "file": "admanager_v1_generated_custom_targeting_value_service_get_custom_targeting_value_sync.py", + "description": "Sample for GetEntitySignalsMapping", + "file": "admanager_v1_generated_entity_signals_mapping_service_get_entity_signals_mapping_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "admanager_v1_generated_CustomTargetingValueService_GetCustomTargetingValue_sync", + "regionTag": "admanager_v1_generated_EntitySignalsMappingService_GetEntitySignalsMapping_sync", "segments": [ { "end": 51, @@ -1209,28 +1221,28 @@ "type": "RESPONSE_HANDLING" } ], - "title": "admanager_v1_generated_custom_targeting_value_service_get_custom_targeting_value_sync.py" + "title": "admanager_v1_generated_entity_signals_mapping_service_get_entity_signals_mapping_sync.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.ads.admanager_v1.CustomTargetingValueServiceClient", - "shortName": "CustomTargetingValueServiceClient" + "fullName": "google.ads.admanager_v1.EntitySignalsMappingServiceClient", + "shortName": "EntitySignalsMappingServiceClient" }, - "fullName": "google.ads.admanager_v1.CustomTargetingValueServiceClient.list_custom_targeting_values", + "fullName": "google.ads.admanager_v1.EntitySignalsMappingServiceClient.list_entity_signals_mappings", "method": { - "fullName": "google.ads.admanager.v1.CustomTargetingValueService.ListCustomTargetingValues", + "fullName": "google.ads.admanager.v1.EntitySignalsMappingService.ListEntitySignalsMappings", "service": { - "fullName": "google.ads.admanager.v1.CustomTargetingValueService", - "shortName": "CustomTargetingValueService" + "fullName": "google.ads.admanager.v1.EntitySignalsMappingService", + "shortName": "EntitySignalsMappingService" }, - "shortName": "ListCustomTargetingValues" + "shortName": "ListEntitySignalsMappings" }, "parameters": [ { "name": "request", - "type": "google.ads.admanager_v1.types.ListCustomTargetingValuesRequest" + "type": "google.ads.admanager_v1.types.ListEntitySignalsMappingsRequest" }, { "name": "parent", @@ -1249,14 +1261,14 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.ads.admanager_v1.services.custom_targeting_value_service.pagers.ListCustomTargetingValuesPager", - "shortName": "list_custom_targeting_values" + "resultType": "google.ads.admanager_v1.services.entity_signals_mapping_service.pagers.ListEntitySignalsMappingsPager", + "shortName": "list_entity_signals_mappings" }, - "description": "Sample for ListCustomTargetingValues", - "file": "admanager_v1_generated_custom_targeting_value_service_list_custom_targeting_values_sync.py", + "description": "Sample for ListEntitySignalsMappings", + "file": "admanager_v1_generated_entity_signals_mapping_service_list_entity_signals_mappings_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "admanager_v1_generated_CustomTargetingValueService_ListCustomTargetingValues_sync", + "regionTag": "admanager_v1_generated_EntitySignalsMappingService_ListEntitySignalsMappings_sync", "segments": [ { "end": 52, @@ -1289,28 +1301,268 @@ "type": "RESPONSE_HANDLING" } ], - "title": "admanager_v1_generated_custom_targeting_value_service_list_custom_targeting_values_sync.py" + "title": "admanager_v1_generated_entity_signals_mapping_service_list_entity_signals_mappings_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.ads.admanager_v1.EntitySignalsMappingServiceClient", + "shortName": "EntitySignalsMappingServiceClient" + }, + "fullName": "google.ads.admanager_v1.EntitySignalsMappingServiceClient.update_entity_signals_mapping", + "method": { + "fullName": "google.ads.admanager.v1.EntitySignalsMappingService.UpdateEntitySignalsMapping", + "service": { + "fullName": "google.ads.admanager.v1.EntitySignalsMappingService", + "shortName": "EntitySignalsMappingService" + }, + "shortName": "UpdateEntitySignalsMapping" + }, + "parameters": [ + { + "name": "request", + "type": "google.ads.admanager_v1.types.UpdateEntitySignalsMappingRequest" + }, + { + "name": "entity_signals_mapping", + "type": "google.ads.admanager_v1.types.EntitySignalsMapping" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.ads.admanager_v1.types.EntitySignalsMapping", + "shortName": "update_entity_signals_mapping" + }, + "description": "Sample for UpdateEntitySignalsMapping", + "file": "admanager_v1_generated_entity_signals_mapping_service_update_entity_signals_mapping_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "admanager_v1_generated_EntitySignalsMappingService_UpdateEntitySignalsMapping_sync", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 49, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 50, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "admanager_v1_generated_entity_signals_mapping_service_update_entity_signals_mapping_sync.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.ads.admanager_v1.LabelServiceClient", - "shortName": "LabelServiceClient" + "fullName": "google.ads.admanager_v1.NetworkServiceClient", + "shortName": "NetworkServiceClient" + }, + "fullName": "google.ads.admanager_v1.NetworkServiceClient.get_network", + "method": { + "fullName": "google.ads.admanager.v1.NetworkService.GetNetwork", + "service": { + "fullName": "google.ads.admanager.v1.NetworkService", + "shortName": "NetworkService" + }, + "shortName": "GetNetwork" + }, + "parameters": [ + { + "name": "request", + "type": "google.ads.admanager_v1.types.GetNetworkRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.ads.admanager_v1.types.Network", + "shortName": "get_network" + }, + "description": "Sample for GetNetwork", + "file": "admanager_v1_generated_network_service_get_network_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "admanager_v1_generated_NetworkService_GetNetwork_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "admanager_v1_generated_network_service_get_network_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.ads.admanager_v1.NetworkServiceClient", + "shortName": "NetworkServiceClient" + }, + "fullName": "google.ads.admanager_v1.NetworkServiceClient.list_networks", + "method": { + "fullName": "google.ads.admanager.v1.NetworkService.ListNetworks", + "service": { + "fullName": "google.ads.admanager.v1.NetworkService", + "shortName": "NetworkService" + }, + "shortName": "ListNetworks" + }, + "parameters": [ + { + "name": "request", + "type": "google.ads.admanager_v1.types.ListNetworksRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.ads.admanager_v1.types.ListNetworksResponse", + "shortName": "list_networks" + }, + "description": "Sample for ListNetworks", + "file": "admanager_v1_generated_network_service_list_networks_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "admanager_v1_generated_NetworkService_ListNetworks_sync", + "segments": [ + { + "end": 50, + "start": 27, + "type": "FULL" + }, + { + "end": 50, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 47, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 51, + "start": 48, + "type": "RESPONSE_HANDLING" + } + ], + "title": "admanager_v1_generated_network_service_list_networks_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.ads.admanager_v1.OrderServiceClient", + "shortName": "OrderServiceClient" }, - "fullName": "google.ads.admanager_v1.LabelServiceClient.get_label", + "fullName": "google.ads.admanager_v1.OrderServiceClient.get_order", "method": { - "fullName": "google.ads.admanager.v1.LabelService.GetLabel", + "fullName": "google.ads.admanager.v1.OrderService.GetOrder", "service": { - "fullName": "google.ads.admanager.v1.LabelService", - "shortName": "LabelService" + "fullName": "google.ads.admanager.v1.OrderService", + "shortName": "OrderService" }, - "shortName": "GetLabel" + "shortName": "GetOrder" }, "parameters": [ { "name": "request", - "type": "google.ads.admanager_v1.types.GetLabelRequest" + "type": "google.ads.admanager_v1.types.GetOrderRequest" }, { "name": "name", @@ -1329,14 +1581,14 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.ads.admanager_v1.types.Label", - "shortName": "get_label" + "resultType": "google.ads.admanager_v1.types.Order", + "shortName": "get_order" }, - "description": "Sample for GetLabel", - "file": "admanager_v1_generated_label_service_get_label_sync.py", + "description": "Sample for GetOrder", + "file": "admanager_v1_generated_order_service_get_order_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "admanager_v1_generated_LabelService_GetLabel_sync", + "regionTag": "admanager_v1_generated_OrderService_GetOrder_sync", "segments": [ { "end": 51, @@ -1369,28 +1621,28 @@ "type": "RESPONSE_HANDLING" } ], - "title": "admanager_v1_generated_label_service_get_label_sync.py" + "title": "admanager_v1_generated_order_service_get_order_sync.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.ads.admanager_v1.LabelServiceClient", - "shortName": "LabelServiceClient" + "fullName": "google.ads.admanager_v1.OrderServiceClient", + "shortName": "OrderServiceClient" }, - "fullName": "google.ads.admanager_v1.LabelServiceClient.list_labels", + "fullName": "google.ads.admanager_v1.OrderServiceClient.list_orders", "method": { - "fullName": "google.ads.admanager.v1.LabelService.ListLabels", + "fullName": "google.ads.admanager.v1.OrderService.ListOrders", "service": { - "fullName": "google.ads.admanager.v1.LabelService", - "shortName": "LabelService" + "fullName": "google.ads.admanager.v1.OrderService", + "shortName": "OrderService" }, - "shortName": "ListLabels" + "shortName": "ListOrders" }, "parameters": [ { "name": "request", - "type": "google.ads.admanager_v1.types.ListLabelsRequest" + "type": "google.ads.admanager_v1.types.ListOrdersRequest" }, { "name": "parent", @@ -1409,14 +1661,14 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.ads.admanager_v1.services.label_service.pagers.ListLabelsPager", - "shortName": "list_labels" + "resultType": "google.ads.admanager_v1.services.order_service.pagers.ListOrdersPager", + "shortName": "list_orders" }, - "description": "Sample for ListLabels", - "file": "admanager_v1_generated_label_service_list_labels_sync.py", + "description": "Sample for ListOrders", + "file": "admanager_v1_generated_order_service_list_orders_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "admanager_v1_generated_LabelService_ListLabels_sync", + "regionTag": "admanager_v1_generated_OrderService_ListOrders_sync", "segments": [ { "end": 52, @@ -1449,28 +1701,28 @@ "type": "RESPONSE_HANDLING" } ], - "title": "admanager_v1_generated_label_service_list_labels_sync.py" + "title": "admanager_v1_generated_order_service_list_orders_sync.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.ads.admanager_v1.LineItemServiceClient", - "shortName": "LineItemServiceClient" + "fullName": "google.ads.admanager_v1.PlacementServiceClient", + "shortName": "PlacementServiceClient" }, - "fullName": "google.ads.admanager_v1.LineItemServiceClient.get_line_item", + "fullName": "google.ads.admanager_v1.PlacementServiceClient.get_placement", "method": { - "fullName": "google.ads.admanager.v1.LineItemService.GetLineItem", + "fullName": "google.ads.admanager.v1.PlacementService.GetPlacement", "service": { - "fullName": "google.ads.admanager.v1.LineItemService", - "shortName": "LineItemService" + "fullName": "google.ads.admanager.v1.PlacementService", + "shortName": "PlacementService" }, - "shortName": "GetLineItem" + "shortName": "GetPlacement" }, "parameters": [ { "name": "request", - "type": "google.ads.admanager_v1.types.GetLineItemRequest" + "type": "google.ads.admanager_v1.types.GetPlacementRequest" }, { "name": "name", @@ -1489,14 +1741,14 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.ads.admanager_v1.types.LineItem", - "shortName": "get_line_item" + "resultType": "google.ads.admanager_v1.types.Placement", + "shortName": "get_placement" }, - "description": "Sample for GetLineItem", - "file": "admanager_v1_generated_line_item_service_get_line_item_sync.py", + "description": "Sample for GetPlacement", + "file": "admanager_v1_generated_placement_service_get_placement_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "admanager_v1_generated_LineItemService_GetLineItem_sync", + "regionTag": "admanager_v1_generated_PlacementService_GetPlacement_sync", "segments": [ { "end": 51, @@ -1529,28 +1781,28 @@ "type": "RESPONSE_HANDLING" } ], - "title": "admanager_v1_generated_line_item_service_get_line_item_sync.py" + "title": "admanager_v1_generated_placement_service_get_placement_sync.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.ads.admanager_v1.LineItemServiceClient", - "shortName": "LineItemServiceClient" + "fullName": "google.ads.admanager_v1.PlacementServiceClient", + "shortName": "PlacementServiceClient" }, - "fullName": "google.ads.admanager_v1.LineItemServiceClient.list_line_items", + "fullName": "google.ads.admanager_v1.PlacementServiceClient.list_placements", "method": { - "fullName": "google.ads.admanager.v1.LineItemService.ListLineItems", + "fullName": "google.ads.admanager.v1.PlacementService.ListPlacements", "service": { - "fullName": "google.ads.admanager.v1.LineItemService", - "shortName": "LineItemService" + "fullName": "google.ads.admanager.v1.PlacementService", + "shortName": "PlacementService" }, - "shortName": "ListLineItems" + "shortName": "ListPlacements" }, "parameters": [ { "name": "request", - "type": "google.ads.admanager_v1.types.ListLineItemsRequest" + "type": "google.ads.admanager_v1.types.ListPlacementsRequest" }, { "name": "parent", @@ -1569,14 +1821,14 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.ads.admanager_v1.services.line_item_service.pagers.ListLineItemsPager", - "shortName": "list_line_items" + "resultType": "google.ads.admanager_v1.services.placement_service.pagers.ListPlacementsPager", + "shortName": "list_placements" }, - "description": "Sample for ListLineItems", - "file": "admanager_v1_generated_line_item_service_list_line_items_sync.py", + "description": "Sample for ListPlacements", + "file": "admanager_v1_generated_placement_service_list_placements_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "admanager_v1_generated_LineItemService_ListLineItems_sync", + "regionTag": "admanager_v1_generated_PlacementService_ListPlacements_sync", "segments": [ { "end": 52, @@ -1609,33 +1861,37 @@ "type": "RESPONSE_HANDLING" } ], - "title": "admanager_v1_generated_line_item_service_list_line_items_sync.py" + "title": "admanager_v1_generated_placement_service_list_placements_sync.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.ads.admanager_v1.NetworkServiceClient", - "shortName": "NetworkServiceClient" + "fullName": "google.ads.admanager_v1.ReportServiceClient", + "shortName": "ReportServiceClient" }, - "fullName": "google.ads.admanager_v1.NetworkServiceClient.get_network", + "fullName": "google.ads.admanager_v1.ReportServiceClient.create_report", "method": { - "fullName": "google.ads.admanager.v1.NetworkService.GetNetwork", + "fullName": "google.ads.admanager.v1.ReportService.CreateReport", "service": { - "fullName": "google.ads.admanager.v1.NetworkService", - "shortName": "NetworkService" + "fullName": "google.ads.admanager.v1.ReportService", + "shortName": "ReportService" }, - "shortName": "GetNetwork" + "shortName": "CreateReport" }, "parameters": [ { "name": "request", - "type": "google.ads.admanager_v1.types.GetNetworkRequest" + "type": "google.ads.admanager_v1.types.CreateReportRequest" }, { - "name": "name", + "name": "parent", "type": "str" }, + { + "name": "report", + "type": "google.ads.admanager_v1.types.Report" + }, { "name": "retry", "type": "google.api_core.retry.Retry" @@ -1649,22 +1905,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.ads.admanager_v1.types.Network", - "shortName": "get_network" + "resultType": "google.ads.admanager_v1.types.Report", + "shortName": "create_report" }, - "description": "Sample for GetNetwork", - "file": "admanager_v1_generated_network_service_get_network_sync.py", + "description": "Sample for CreateReport", + "file": "admanager_v1_generated_report_service_create_report_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "admanager_v1_generated_NetworkService_GetNetwork_sync", + "regionTag": "admanager_v1_generated_ReportService_CreateReport_sync", "segments": [ { - "end": 51, + "end": 57, "start": 27, "type": "FULL" }, { - "end": 51, + "end": 57, "start": 27, "type": "SHORT" }, @@ -1674,43 +1930,43 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 45, + "end": 51, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 48, - "start": 46, + "end": 54, + "start": 52, "type": "REQUEST_EXECUTION" }, { - "end": 52, - "start": 49, + "end": 58, + "start": 55, "type": "RESPONSE_HANDLING" } ], - "title": "admanager_v1_generated_network_service_get_network_sync.py" + "title": "admanager_v1_generated_report_service_create_report_sync.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.ads.admanager_v1.OrderServiceClient", - "shortName": "OrderServiceClient" + "fullName": "google.ads.admanager_v1.ReportServiceClient", + "shortName": "ReportServiceClient" }, - "fullName": "google.ads.admanager_v1.OrderServiceClient.get_order", + "fullName": "google.ads.admanager_v1.ReportServiceClient.fetch_report_result_rows", "method": { - "fullName": "google.ads.admanager.v1.OrderService.GetOrder", + "fullName": "google.ads.admanager.v1.ReportService.FetchReportResultRows", "service": { - "fullName": "google.ads.admanager.v1.OrderService", - "shortName": "OrderService" + "fullName": "google.ads.admanager.v1.ReportService", + "shortName": "ReportService" }, - "shortName": "GetOrder" + "shortName": "FetchReportResultRows" }, "parameters": [ { "name": "request", - "type": "google.ads.admanager_v1.types.GetOrderRequest" + "type": "google.ads.admanager_v1.types.FetchReportResultRowsRequest" }, { "name": "name", @@ -1729,14 +1985,14 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.ads.admanager_v1.types.Order", - "shortName": "get_order" + "resultType": "google.ads.admanager_v1.services.report_service.pagers.FetchReportResultRowsPager", + "shortName": "fetch_report_result_rows" }, - "description": "Sample for GetOrder", - "file": "admanager_v1_generated_order_service_get_order_sync.py", + "description": "Sample for FetchReportResultRows", + "file": "admanager_v1_generated_report_service_fetch_report_result_rows_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "admanager_v1_generated_OrderService_GetOrder_sync", + "regionTag": "admanager_v1_generated_ReportService_FetchReportResultRows_sync", "segments": [ { "end": 51, @@ -1754,46 +2010,46 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 45, + "end": 44, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 48, - "start": 46, + "end": 47, + "start": 45, "type": "REQUEST_EXECUTION" }, { "end": 52, - "start": 49, + "start": 48, "type": "RESPONSE_HANDLING" } ], - "title": "admanager_v1_generated_order_service_get_order_sync.py" + "title": "admanager_v1_generated_report_service_fetch_report_result_rows_sync.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.ads.admanager_v1.OrderServiceClient", - "shortName": "OrderServiceClient" + "fullName": "google.ads.admanager_v1.ReportServiceClient", + "shortName": "ReportServiceClient" }, - "fullName": "google.ads.admanager_v1.OrderServiceClient.list_orders", + "fullName": "google.ads.admanager_v1.ReportServiceClient.get_report", "method": { - "fullName": "google.ads.admanager.v1.OrderService.ListOrders", + "fullName": "google.ads.admanager.v1.ReportService.GetReport", "service": { - "fullName": "google.ads.admanager.v1.OrderService", - "shortName": "OrderService" + "fullName": "google.ads.admanager.v1.ReportService", + "shortName": "ReportService" }, - "shortName": "ListOrders" + "shortName": "GetReport" }, "parameters": [ { "name": "request", - "type": "google.ads.admanager_v1.types.ListOrdersRequest" + "type": "google.ads.admanager_v1.types.GetReportRequest" }, { - "name": "parent", + "name": "name", "type": "str" }, { @@ -1809,22 +2065,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.ads.admanager_v1.services.order_service.pagers.ListOrdersPager", - "shortName": "list_orders" + "resultType": "google.ads.admanager_v1.types.Report", + "shortName": "get_report" }, - "description": "Sample for ListOrders", - "file": "admanager_v1_generated_order_service_list_orders_sync.py", + "description": "Sample for GetReport", + "file": "admanager_v1_generated_report_service_get_report_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "admanager_v1_generated_OrderService_ListOrders_sync", + "regionTag": "admanager_v1_generated_ReportService_GetReport_sync", "segments": [ { - "end": 52, + "end": 51, "start": 27, "type": "FULL" }, { - "end": 52, + "end": 51, "start": 27, "type": "SHORT" }, @@ -1844,36 +2100,36 @@ "type": "REQUEST_EXECUTION" }, { - "end": 53, + "end": 52, "start": 49, "type": "RESPONSE_HANDLING" } ], - "title": "admanager_v1_generated_order_service_list_orders_sync.py" + "title": "admanager_v1_generated_report_service_get_report_sync.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.ads.admanager_v1.PlacementServiceClient", - "shortName": "PlacementServiceClient" + "fullName": "google.ads.admanager_v1.ReportServiceClient", + "shortName": "ReportServiceClient" }, - "fullName": "google.ads.admanager_v1.PlacementServiceClient.get_placement", + "fullName": "google.ads.admanager_v1.ReportServiceClient.list_reports", "method": { - "fullName": "google.ads.admanager.v1.PlacementService.GetPlacement", + "fullName": "google.ads.admanager.v1.ReportService.ListReports", "service": { - "fullName": "google.ads.admanager.v1.PlacementService", - "shortName": "PlacementService" + "fullName": "google.ads.admanager.v1.ReportService", + "shortName": "ReportService" }, - "shortName": "GetPlacement" + "shortName": "ListReports" }, "parameters": [ { "name": "request", - "type": "google.ads.admanager_v1.types.GetPlacementRequest" + "type": "google.ads.admanager_v1.types.ListReportsRequest" }, { - "name": "name", + "name": "parent", "type": "str" }, { @@ -1889,22 +2145,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.ads.admanager_v1.types.Placement", - "shortName": "get_placement" + "resultType": "google.ads.admanager_v1.services.report_service.pagers.ListReportsPager", + "shortName": "list_reports" }, - "description": "Sample for GetPlacement", - "file": "admanager_v1_generated_placement_service_get_placement_sync.py", + "description": "Sample for ListReports", + "file": "admanager_v1_generated_report_service_list_reports_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "admanager_v1_generated_PlacementService_GetPlacement_sync", + "regionTag": "admanager_v1_generated_ReportService_ListReports_sync", "segments": [ { - "end": 51, + "end": 52, "start": 27, "type": "FULL" }, { - "end": 51, + "end": 52, "start": 27, "type": "SHORT" }, @@ -1924,36 +2180,36 @@ "type": "REQUEST_EXECUTION" }, { - "end": 52, + "end": 53, "start": 49, "type": "RESPONSE_HANDLING" } ], - "title": "admanager_v1_generated_placement_service_get_placement_sync.py" + "title": "admanager_v1_generated_report_service_list_reports_sync.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.ads.admanager_v1.PlacementServiceClient", - "shortName": "PlacementServiceClient" + "fullName": "google.ads.admanager_v1.ReportServiceClient", + "shortName": "ReportServiceClient" }, - "fullName": "google.ads.admanager_v1.PlacementServiceClient.list_placements", + "fullName": "google.ads.admanager_v1.ReportServiceClient.run_report", "method": { - "fullName": "google.ads.admanager.v1.PlacementService.ListPlacements", + "fullName": "google.ads.admanager.v1.ReportService.RunReport", "service": { - "fullName": "google.ads.admanager.v1.PlacementService", - "shortName": "PlacementService" + "fullName": "google.ads.admanager.v1.ReportService", + "shortName": "ReportService" }, - "shortName": "ListPlacements" + "shortName": "RunReport" }, "parameters": [ { "name": "request", - "type": "google.ads.admanager_v1.types.ListPlacementsRequest" + "type": "google.ads.admanager_v1.types.RunReportRequest" }, { - "name": "parent", + "name": "name", "type": "str" }, { @@ -1969,22 +2225,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.ads.admanager_v1.services.placement_service.pagers.ListPlacementsPager", - "shortName": "list_placements" + "resultType": "google.api_core.operation.Operation", + "shortName": "run_report" }, - "description": "Sample for ListPlacements", - "file": "admanager_v1_generated_placement_service_list_placements_sync.py", + "description": "Sample for RunReport", + "file": "admanager_v1_generated_report_service_run_report_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "admanager_v1_generated_PlacementService_ListPlacements_sync", + "regionTag": "admanager_v1_generated_ReportService_RunReport_sync", "segments": [ { - "end": 52, + "end": 55, "start": 27, "type": "FULL" }, { - "end": 52, + "end": 55, "start": 27, "type": "SHORT" }, @@ -1999,17 +2255,17 @@ "type": "REQUEST_INITIALIZATION" }, { - "end": 48, + "end": 52, "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 53, - "start": 49, + "end": 56, + "start": 53, "type": "RESPONSE_HANDLING" } ], - "title": "admanager_v1_generated_placement_service_list_placements_sync.py" + "title": "admanager_v1_generated_report_service_run_report_sync.py" }, { "canonical": true, @@ -2018,23 +2274,27 @@ "fullName": "google.ads.admanager_v1.ReportServiceClient", "shortName": "ReportServiceClient" }, - "fullName": "google.ads.admanager_v1.ReportServiceClient.export_saved_report", + "fullName": "google.ads.admanager_v1.ReportServiceClient.update_report", "method": { - "fullName": "google.ads.admanager.v1.ReportService.ExportSavedReport", + "fullName": "google.ads.admanager.v1.ReportService.UpdateReport", "service": { "fullName": "google.ads.admanager.v1.ReportService", "shortName": "ReportService" }, - "shortName": "ExportSavedReport" + "shortName": "UpdateReport" }, "parameters": [ { "name": "request", - "type": "google.ads.admanager_v1.types.ExportSavedReportRequest" + "type": "google.ads.admanager_v1.types.UpdateReportRequest" }, { "name": "report", - "type": "str" + "type": "google.ads.admanager_v1.types.Report" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" }, { "name": "retry", @@ -2049,22 +2309,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.api_core.operation.Operation", - "shortName": "export_saved_report" + "resultType": "google.ads.admanager_v1.types.Report", + "shortName": "update_report" }, - "description": "Sample for ExportSavedReport", - "file": "admanager_v1_generated_report_service_export_saved_report_sync.py", + "description": "Sample for UpdateReport", + "file": "admanager_v1_generated_report_service_update_report_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "admanager_v1_generated_ReportService_ExportSavedReport_sync", + "regionTag": "admanager_v1_generated_ReportService_UpdateReport_sync", "segments": [ { - "end": 55, + "end": 56, "start": 27, "type": "FULL" }, { - "end": 55, + "end": 56, "start": 27, "type": "SHORT" }, @@ -2074,22 +2334,22 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 45, + "end": 50, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 52, - "start": 46, + "end": 53, + "start": 51, "type": "REQUEST_EXECUTION" }, { - "end": 56, - "start": 53, + "end": 57, + "start": 54, "type": "RESPONSE_HANDLING" } ], - "title": "admanager_v1_generated_report_service_export_saved_report_sync.py" + "title": "admanager_v1_generated_report_service_update_report_sync.py" }, { "canonical": true, @@ -2255,22 +2515,22 @@ "canonical": true, "clientMethod": { "client": { - "fullName": "google.ads.admanager_v1.TeamServiceClient", - "shortName": "TeamServiceClient" + "fullName": "google.ads.admanager_v1.TaxonomyCategoryServiceClient", + "shortName": "TaxonomyCategoryServiceClient" }, - "fullName": "google.ads.admanager_v1.TeamServiceClient.get_team", + "fullName": "google.ads.admanager_v1.TaxonomyCategoryServiceClient.get_taxonomy_category", "method": { - "fullName": "google.ads.admanager.v1.TeamService.GetTeam", + "fullName": "google.ads.admanager.v1.TaxonomyCategoryService.GetTaxonomyCategory", "service": { - "fullName": "google.ads.admanager.v1.TeamService", - "shortName": "TeamService" + "fullName": "google.ads.admanager.v1.TaxonomyCategoryService", + "shortName": "TaxonomyCategoryService" }, - "shortName": "GetTeam" + "shortName": "GetTaxonomyCategory" }, "parameters": [ { "name": "request", - "type": "google.ads.admanager_v1.types.GetTeamRequest" + "type": "google.ads.admanager_v1.types.GetTaxonomyCategoryRequest" }, { "name": "name", @@ -2289,14 +2549,14 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.ads.admanager_v1.types.Team", - "shortName": "get_team" + "resultType": "google.ads.admanager_v1.types.TaxonomyCategory", + "shortName": "get_taxonomy_category" }, - "description": "Sample for GetTeam", - "file": "admanager_v1_generated_team_service_get_team_sync.py", + "description": "Sample for GetTaxonomyCategory", + "file": "admanager_v1_generated_taxonomy_category_service_get_taxonomy_category_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "admanager_v1_generated_TeamService_GetTeam_sync", + "regionTag": "admanager_v1_generated_TaxonomyCategoryService_GetTaxonomyCategory_sync", "segments": [ { "end": 51, @@ -2329,28 +2589,28 @@ "type": "RESPONSE_HANDLING" } ], - "title": "admanager_v1_generated_team_service_get_team_sync.py" + "title": "admanager_v1_generated_taxonomy_category_service_get_taxonomy_category_sync.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.ads.admanager_v1.TeamServiceClient", - "shortName": "TeamServiceClient" + "fullName": "google.ads.admanager_v1.TaxonomyCategoryServiceClient", + "shortName": "TaxonomyCategoryServiceClient" }, - "fullName": "google.ads.admanager_v1.TeamServiceClient.list_teams", + "fullName": "google.ads.admanager_v1.TaxonomyCategoryServiceClient.list_taxonomy_categories", "method": { - "fullName": "google.ads.admanager.v1.TeamService.ListTeams", + "fullName": "google.ads.admanager.v1.TaxonomyCategoryService.ListTaxonomyCategories", "service": { - "fullName": "google.ads.admanager.v1.TeamService", - "shortName": "TeamService" + "fullName": "google.ads.admanager.v1.TaxonomyCategoryService", + "shortName": "TaxonomyCategoryService" }, - "shortName": "ListTeams" + "shortName": "ListTaxonomyCategories" }, "parameters": [ { "name": "request", - "type": "google.ads.admanager_v1.types.ListTeamsRequest" + "type": "google.ads.admanager_v1.types.ListTaxonomyCategoriesRequest" }, { "name": "parent", @@ -2369,14 +2629,14 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.ads.admanager_v1.services.team_service.pagers.ListTeamsPager", - "shortName": "list_teams" + "resultType": "google.ads.admanager_v1.services.taxonomy_category_service.pagers.ListTaxonomyCategoriesPager", + "shortName": "list_taxonomy_categories" }, - "description": "Sample for ListTeams", - "file": "admanager_v1_generated_team_service_list_teams_sync.py", + "description": "Sample for ListTaxonomyCategories", + "file": "admanager_v1_generated_taxonomy_category_service_list_taxonomy_categories_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "admanager_v1_generated_TeamService_ListTeams_sync", + "regionTag": "admanager_v1_generated_TaxonomyCategoryService_ListTaxonomyCategories_sync", "segments": [ { "end": 52, @@ -2409,7 +2669,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "admanager_v1_generated_team_service_list_teams_sync.py" + "title": "admanager_v1_generated_taxonomy_category_service_list_taxonomy_categories_sync.py" }, { "canonical": true, @@ -2490,86 +2750,6 @@ } ], "title": "admanager_v1_generated_user_service_get_user_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.ads.admanager_v1.UserServiceClient", - "shortName": "UserServiceClient" - }, - "fullName": "google.ads.admanager_v1.UserServiceClient.list_users", - "method": { - "fullName": "google.ads.admanager.v1.UserService.ListUsers", - "service": { - "fullName": "google.ads.admanager.v1.UserService", - "shortName": "UserService" - }, - "shortName": "ListUsers" - }, - "parameters": [ - { - "name": "request", - "type": "google.ads.admanager_v1.types.ListUsersRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.ads.admanager_v1.services.user_service.pagers.ListUsersPager", - "shortName": "list_users" - }, - "description": "Sample for ListUsers", - "file": "admanager_v1_generated_user_service_list_users_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "admanager_v1_generated_UserService_ListUsers_sync", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "admanager_v1_generated_user_service_list_users_sync.py" } ] } diff --git a/packages/google-ads-admanager/scripts/client-post-processing/doc-formatting.yaml b/packages/google-ads-admanager/scripts/client-post-processing/doc-formatting.yaml new file mode 120000 index 000000000000..6e0991666f97 --- /dev/null +++ b/packages/google-ads-admanager/scripts/client-post-processing/doc-formatting.yaml @@ -0,0 +1 @@ +../../../../scripts/client-post-processing/doc-formatting.yaml \ No newline at end of file diff --git a/packages/google-ads-admanager/scripts/fixup_admanager_v1_keywords.py b/packages/google-ads-admanager/scripts/fixup_admanager_v1_keywords.py index f8aeaf3c31f3..72c4b05e14f3 100644 --- a/packages/google-ads-admanager/scripts/fixup_admanager_v1_keywords.py +++ b/packages/google-ads-admanager/scripts/fixup_admanager_v1_keywords.py @@ -39,38 +39,40 @@ def partition( class admanagerCallTransformer(cst.CSTTransformer): CTRL_PARAMS: Tuple[str] = ('retry', 'timeout', 'metadata') METHOD_TO_PARAMS: Dict[str, Tuple[str]] = { - 'export_saved_report': ('format_', 'report', 'include_report_properties', 'include_ids', 'include_totals_row', 'file_name', ), - 'get_ad_partner': ('name', ), + 'batch_create_entity_signals_mappings': ('parent', 'requests', ), + 'batch_update_entity_signals_mappings': ('parent', 'requests', ), + 'create_entity_signals_mapping': ('parent', 'entity_signals_mapping', ), + 'create_report': ('parent', 'report', ), + 'fetch_report_result_rows': ('name', 'page_size', 'page_token', ), 'get_ad_unit': ('name', ), 'get_company': ('name', ), - 'get_contact': ('name', ), - 'get_creative': ('name', ), 'get_custom_field': ('name', ), 'get_custom_targeting_key': ('name', ), 'get_custom_targeting_value': ('name', ), - 'get_label': ('name', ), - 'get_line_item': ('name', ), + 'get_entity_signals_mapping': ('name', ), 'get_network': ('name', ), 'get_order': ('name', ), 'get_placement': ('name', ), + 'get_report': ('name', ), 'get_role': ('name', ), - 'get_team': ('name', ), + 'get_taxonomy_category': ('name', ), 'get_user': ('name', ), - 'list_ad_partners': ('parent', 'page_size', 'page_token', 'filter', 'order_by', 'skip', ), 'list_ad_units': ('parent', 'page_size', 'page_token', 'filter', 'order_by', 'skip', ), + 'list_ad_unit_sizes': ('parent', 'page_size', 'page_token', 'filter', 'order_by', 'skip', ), 'list_companies': ('parent', 'page_size', 'page_token', 'filter', 'order_by', 'skip', ), - 'list_contacts': ('parent', 'page_size', 'page_token', 'filter', 'order_by', 'skip', ), - 'list_creatives': ('parent', 'page_size', 'page_token', 'filter', 'order_by', 'skip', ), 'list_custom_fields': ('parent', 'page_size', 'page_token', 'filter', 'order_by', 'skip', ), 'list_custom_targeting_keys': ('parent', 'page_size', 'page_token', 'filter', 'order_by', 'skip', ), 'list_custom_targeting_values': ('parent', 'page_size', 'page_token', 'filter', 'order_by', 'skip', ), - 'list_labels': ('parent', 'page_size', 'page_token', 'filter', 'order_by', 'skip', ), - 'list_line_items': ('parent', 'page_size', 'page_token', 'filter', 'order_by', 'skip', ), + 'list_entity_signals_mappings': ('parent', 'page_size', 'page_token', 'filter', 'order_by', 'skip', ), + 'list_networks': (), 'list_orders': ('parent', 'page_size', 'page_token', 'filter', 'order_by', 'skip', ), 'list_placements': ('parent', 'page_size', 'page_token', 'filter', 'order_by', 'skip', ), + 'list_reports': ('parent', 'page_size', 'page_token', 'filter', 'order_by', 'skip', ), 'list_roles': ('parent', 'page_size', 'page_token', 'filter', 'order_by', 'skip', ), - 'list_teams': ('parent', 'page_size', 'page_token', 'filter', 'order_by', 'skip', ), - 'list_users': ('parent', 'page_size', 'page_token', 'filter', 'order_by', 'skip', ), + 'list_taxonomy_categories': ('parent', 'page_size', 'page_token', 'filter', 'order_by', 'skip', ), + 'run_report': ('name', ), + 'update_entity_signals_mapping': ('entity_signals_mapping', 'update_mask', ), + 'update_report': ('report', 'update_mask', ), } def leave_Call(self, original: cst.Call, updated: cst.Call) -> cst.CSTNode: diff --git a/packages/google-ads-admanager/tests/unit/gapic/admanager_v1/test_ad_unit_service.py b/packages/google-ads-admanager/tests/unit/gapic/admanager_v1/test_ad_unit_service.py index 4aef4911e6dd..6f4aca46282b 100644 --- a/packages/google-ads-admanager/tests/unit/gapic/admanager_v1/test_ad_unit_service.py +++ b/packages/google-ads-admanager/tests/unit/gapic/admanager_v1/test_ad_unit_service.py @@ -53,8 +53,8 @@ ) from google.ads.admanager_v1.types import ( ad_unit_enums, + ad_unit_messages, ad_unit_service, - ad_unit_size, applied_label, ) @@ -975,23 +975,23 @@ def test_get_ad_unit_rest(request_type): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = ad_unit_service.AdUnit( + return_value = ad_unit_messages.AdUnit( name="name_value", ad_unit_id=1040, parent_ad_unit="parent_ad_unit_value", display_name="display_name_value", ad_unit_code="ad_unit_code_value", - status=ad_unit_service.AdUnit.Status.ACTIVE, - target_window=ad_unit_service.TargetWindowEnum.TargetWindow.TOP, + status=ad_unit_enums.AdUnitStatusEnum.AdUnitStatus.ACTIVE, + applied_target_window=ad_unit_enums.TargetWindowEnum.TargetWindow.TOP, + effective_target_window=ad_unit_enums.TargetWindowEnum.TargetWindow.TOP, applied_teams=["applied_teams_value"], teams=["teams_value"], description="description_value", explicitly_targeted=True, has_children=True, external_set_top_box_channel_id="external_set_top_box_channel_id_value", - ctv_application_id=1900, - smart_size_mode=ad_unit_service.SmartSizeModeEnum.SmartSizeMode.NONE, - applied_adsense_enabled=ad_unit_enums.AppliedAdsenseEnabledEnum.AppliedAdsenseEnabled.TRUE, + smart_size_mode=ad_unit_enums.SmartSizeModeEnum.SmartSizeMode.NONE, + applied_adsense_enabled=True, effective_adsense_enabled=True, ) @@ -999,7 +999,7 @@ def test_get_ad_unit_rest(request_type): response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = ad_unit_service.AdUnit.pb(return_value) + return_value = ad_unit_messages.AdUnit.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") @@ -1007,14 +1007,21 @@ def test_get_ad_unit_rest(request_type): response = client.get_ad_unit(request) # Establish that the response is the type that we expect. - assert isinstance(response, ad_unit_service.AdUnit) + assert isinstance(response, ad_unit_messages.AdUnit) assert response.name == "name_value" assert response.ad_unit_id == 1040 assert response.parent_ad_unit == "parent_ad_unit_value" assert response.display_name == "display_name_value" assert response.ad_unit_code == "ad_unit_code_value" - assert response.status == ad_unit_service.AdUnit.Status.ACTIVE - assert response.target_window == ad_unit_service.TargetWindowEnum.TargetWindow.TOP + assert response.status == ad_unit_enums.AdUnitStatusEnum.AdUnitStatus.ACTIVE + assert ( + response.applied_target_window + == ad_unit_enums.TargetWindowEnum.TargetWindow.TOP + ) + assert ( + response.effective_target_window + == ad_unit_enums.TargetWindowEnum.TargetWindow.TOP + ) assert response.applied_teams == ["applied_teams_value"] assert response.teams == ["teams_value"] assert response.description == "description_value" @@ -1024,14 +1031,10 @@ def test_get_ad_unit_rest(request_type): response.external_set_top_box_channel_id == "external_set_top_box_channel_id_value" ) - assert response.ctv_application_id == 1900 - assert ( - response.smart_size_mode == ad_unit_service.SmartSizeModeEnum.SmartSizeMode.NONE - ) assert ( - response.applied_adsense_enabled - == ad_unit_enums.AppliedAdsenseEnabledEnum.AppliedAdsenseEnabled.TRUE + response.smart_size_mode == ad_unit_enums.SmartSizeModeEnum.SmartSizeMode.NONE ) + assert response.applied_adsense_enabled is True assert response.effective_adsense_enabled is True @@ -1111,7 +1114,7 @@ def test_get_ad_unit_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = ad_unit_service.AdUnit() + return_value = ad_unit_messages.AdUnit() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -1132,7 +1135,7 @@ def test_get_ad_unit_rest_required_fields( response_value.status_code = 200 # Convert return value to protobuf type - return_value = ad_unit_service.AdUnit.pb(return_value) + return_value = ad_unit_messages.AdUnit.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") @@ -1187,8 +1190,8 @@ def test_get_ad_unit_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = ad_unit_service.AdUnit.to_json( - ad_unit_service.AdUnit() + req.return_value._content = ad_unit_messages.AdUnit.to_json( + ad_unit_messages.AdUnit() ) request = ad_unit_service.GetAdUnitRequest() @@ -1197,7 +1200,7 @@ def test_get_ad_unit_rest_interceptors(null_interceptor): ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = ad_unit_service.AdUnit() + post.return_value = ad_unit_messages.AdUnit() client.get_ad_unit( request, @@ -1244,7 +1247,7 @@ def test_get_ad_unit_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = ad_unit_service.AdUnit() + return_value = ad_unit_messages.AdUnit() # get arguments that satisfy an http rule for this method sample_request = {"name": "networks/sample1/adUnits/sample2"} @@ -1259,7 +1262,7 @@ def test_get_ad_unit_rest_flattened(): response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = ad_unit_service.AdUnit.pb(return_value) + return_value = ad_unit_messages.AdUnit.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1628,9 +1631,9 @@ def test_list_ad_units_rest_pager(transport: str = "rest"): response = ( ad_unit_service.ListAdUnitsResponse( ad_units=[ - ad_unit_service.AdUnit(), - ad_unit_service.AdUnit(), - ad_unit_service.AdUnit(), + ad_unit_messages.AdUnit(), + ad_unit_messages.AdUnit(), + ad_unit_messages.AdUnit(), ], next_page_token="abc", ), @@ -1640,14 +1643,14 @@ def test_list_ad_units_rest_pager(transport: str = "rest"): ), ad_unit_service.ListAdUnitsResponse( ad_units=[ - ad_unit_service.AdUnit(), + ad_unit_messages.AdUnit(), ], next_page_token="ghi", ), ad_unit_service.ListAdUnitsResponse( ad_units=[ - ad_unit_service.AdUnit(), - ad_unit_service.AdUnit(), + ad_unit_messages.AdUnit(), + ad_unit_messages.AdUnit(), ], ), ) @@ -1670,13 +1673,398 @@ def test_list_ad_units_rest_pager(transport: str = "rest"): results = list(pager) assert len(results) == 6 - assert all(isinstance(i, ad_unit_service.AdUnit) for i in results) + assert all(isinstance(i, ad_unit_messages.AdUnit) for i in results) pages = list(client.list_ad_units(request=sample_request).pages) for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token +@pytest.mark.parametrize( + "request_type", + [ + ad_unit_service.ListAdUnitSizesRequest, + dict, + ], +) +def test_list_ad_unit_sizes_rest(request_type): + client = AdUnitServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "networks/sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = ad_unit_service.ListAdUnitSizesResponse( + next_page_token="next_page_token_value", + total_size=1086, + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = ad_unit_service.ListAdUnitSizesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.list_ad_unit_sizes(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListAdUnitSizesPager) + assert response.next_page_token == "next_page_token_value" + assert response.total_size == 1086 + + +def test_list_ad_unit_sizes_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = AdUnitServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.list_ad_unit_sizes in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.list_ad_unit_sizes + ] = mock_rpc + + request = {} + client.list_ad_unit_sizes(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_ad_unit_sizes(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_list_ad_unit_sizes_rest_required_fields( + request_type=ad_unit_service.ListAdUnitSizesRequest, +): + transport_class = transports.AdUnitServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_ad_unit_sizes._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_ad_unit_sizes._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "filter", + "order_by", + "page_size", + "page_token", + "skip", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = AdUnitServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = ad_unit_service.ListAdUnitSizesResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = ad_unit_service.ListAdUnitSizesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.list_ad_unit_sizes(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_list_ad_unit_sizes_rest_unset_required_fields(): + transport = transports.AdUnitServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.list_ad_unit_sizes._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "filter", + "orderBy", + "pageSize", + "pageToken", + "skip", + ) + ) + & set(("parent",)) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_ad_unit_sizes_rest_interceptors(null_interceptor): + transport = transports.AdUnitServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.AdUnitServiceRestInterceptor(), + ) + client = AdUnitServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.AdUnitServiceRestInterceptor, "post_list_ad_unit_sizes" + ) as post, mock.patch.object( + transports.AdUnitServiceRestInterceptor, "pre_list_ad_unit_sizes" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = ad_unit_service.ListAdUnitSizesRequest.pb( + ad_unit_service.ListAdUnitSizesRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = ad_unit_service.ListAdUnitSizesResponse.to_json( + ad_unit_service.ListAdUnitSizesResponse() + ) + + request = ad_unit_service.ListAdUnitSizesRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = ad_unit_service.ListAdUnitSizesResponse() + + client.list_ad_unit_sizes( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_ad_unit_sizes_rest_bad_request( + transport: str = "rest", request_type=ad_unit_service.ListAdUnitSizesRequest +): + client = AdUnitServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "networks/sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_ad_unit_sizes(request) + + +def test_list_ad_unit_sizes_rest_flattened(): + client = AdUnitServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = ad_unit_service.ListAdUnitSizesResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "networks/sample1"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = ad_unit_service.ListAdUnitSizesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.list_ad_unit_sizes(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=networks/*}/adUnitSizes" % client.transport._host, args[1] + ) + + +def test_list_ad_unit_sizes_rest_flattened_error(transport: str = "rest"): + client = AdUnitServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_ad_unit_sizes( + ad_unit_service.ListAdUnitSizesRequest(), + parent="parent_value", + ) + + +def test_list_ad_unit_sizes_rest_pager(transport: str = "rest"): + client = AdUnitServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + ad_unit_service.ListAdUnitSizesResponse( + ad_unit_sizes=[ + ad_unit_messages.AdUnitSize(), + ad_unit_messages.AdUnitSize(), + ad_unit_messages.AdUnitSize(), + ], + next_page_token="abc", + ), + ad_unit_service.ListAdUnitSizesResponse( + ad_unit_sizes=[], + next_page_token="def", + ), + ad_unit_service.ListAdUnitSizesResponse( + ad_unit_sizes=[ + ad_unit_messages.AdUnitSize(), + ], + next_page_token="ghi", + ), + ad_unit_service.ListAdUnitSizesResponse( + ad_unit_sizes=[ + ad_unit_messages.AdUnitSize(), + ad_unit_messages.AdUnitSize(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple( + ad_unit_service.ListAdUnitSizesResponse.to_json(x) for x in response + ) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {"parent": "networks/sample1"} + + pager = client.list_ad_unit_sizes(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, ad_unit_messages.AdUnitSize) for i in results) + + pages = list(client.list_ad_unit_sizes(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + def test_credentials_transport_error(): # It is an error to provide credentials and a transport instance. transport = transports.AdUnitServiceRestTransport( @@ -1789,6 +2177,7 @@ def test_ad_unit_service_base_transport(): methods = ( "get_ad_unit", "list_ad_units", + "list_ad_unit_sizes", "get_operation", ) for method in methods: @@ -1927,6 +2316,9 @@ def test_ad_unit_service_client_transport_session_collision(transport_name): session1 = client1.transport.list_ad_units._session session2 = client2.transport.list_ad_units._session assert session1 != session2 + session1 = client1.transport.list_ad_unit_sizes._session + session2 = client2.transport.list_ad_unit_sizes._session + assert session1 != session2 def test_ad_unit_path(): @@ -2154,7 +2546,7 @@ def test_get_operation_rest_bad_request( request = request_type() request = json_format.ParseDict( - {"name": "networks/sample1/operations/reports/exports/sample2"}, request + {"name": "networks/sample1/operations/reports/runs/sample2"}, request ) # Mock the http request call within the method and fake a BadRequest error. @@ -2181,7 +2573,7 @@ def test_get_operation_rest(request_type): credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) - request_init = {"name": "networks/sample1/operations/reports/exports/sample2"} + request_init = {"name": "networks/sample1/operations/reports/runs/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: diff --git a/packages/google-ads-admanager/tests/unit/gapic/admanager_v1/test_company_service.py b/packages/google-ads-admanager/tests/unit/gapic/admanager_v1/test_company_service.py index 817775eba7e3..c64524fa3284 100644 --- a/packages/google-ads-admanager/tests/unit/gapic/admanager_v1/test_company_service.py +++ b/packages/google-ads-admanager/tests/unit/gapic/admanager_v1/test_company_service.py @@ -36,6 +36,7 @@ from google.longrunning import operations_pb2 # type: ignore from google.oauth2 import service_account from google.protobuf import json_format +from google.protobuf import timestamp_pb2 # type: ignore import grpc from grpc.experimental import aio from proto.marshal.rules import wrappers @@ -52,6 +53,7 @@ from google.ads.admanager_v1.types import ( applied_label, company_credit_status_enum, + company_messages, company_service, company_type_enum, ) @@ -977,7 +979,7 @@ def test_get_company_rest(request_type): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = company_service.Company( + return_value = company_messages.Company( name="name_value", company_id=1059, display_name="display_name_value", @@ -991,13 +993,14 @@ def test_get_company_rest(request_type): credit_status=company_credit_status_enum.CompanyCreditStatusEnum.CompanyCreditStatus.ACTIVE, primary_contact="primary_contact_value", applied_teams=["applied_teams_value"], + third_party_company_id=2348, ) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = company_service.Company.pb(return_value) + return_value = company_messages.Company.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") @@ -1005,7 +1008,7 @@ def test_get_company_rest(request_type): response = client.get_company(request) # Establish that the response is the type that we expect. - assert isinstance(response, company_service.Company) + assert isinstance(response, company_messages.Company) assert response.name == "name_value" assert response.company_id == 1059 assert response.display_name == "display_name_value" @@ -1022,6 +1025,7 @@ def test_get_company_rest(request_type): ) assert response.primary_contact == "primary_contact_value" assert response.applied_teams == ["applied_teams_value"] + assert response.third_party_company_id == 2348 def test_get_company_rest_use_cached_wrapped_rpc(): @@ -1100,7 +1104,7 @@ def test_get_company_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = company_service.Company() + return_value = company_messages.Company() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -1121,7 +1125,7 @@ def test_get_company_rest_required_fields( response_value.status_code = 200 # Convert return value to protobuf type - return_value = company_service.Company.pb(return_value) + return_value = company_messages.Company.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") @@ -1176,8 +1180,8 @@ def test_get_company_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = company_service.Company.to_json( - company_service.Company() + req.return_value._content = company_messages.Company.to_json( + company_messages.Company() ) request = company_service.GetCompanyRequest() @@ -1186,7 +1190,7 @@ def test_get_company_rest_interceptors(null_interceptor): ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = company_service.Company() + post.return_value = company_messages.Company() client.get_company( request, @@ -1233,7 +1237,7 @@ def test_get_company_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = company_service.Company() + return_value = company_messages.Company() # get arguments that satisfy an http rule for this method sample_request = {"name": "networks/sample1/companies/sample2"} @@ -1248,7 +1252,7 @@ def test_get_company_rest_flattened(): response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = company_service.Company.pb(return_value) + return_value = company_messages.Company.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1617,9 +1621,9 @@ def test_list_companies_rest_pager(transport: str = "rest"): response = ( company_service.ListCompaniesResponse( companies=[ - company_service.Company(), - company_service.Company(), - company_service.Company(), + company_messages.Company(), + company_messages.Company(), + company_messages.Company(), ], next_page_token="abc", ), @@ -1629,14 +1633,14 @@ def test_list_companies_rest_pager(transport: str = "rest"): ), company_service.ListCompaniesResponse( companies=[ - company_service.Company(), + company_messages.Company(), ], next_page_token="ghi", ), company_service.ListCompaniesResponse( companies=[ - company_service.Company(), - company_service.Company(), + company_messages.Company(), + company_messages.Company(), ], ), ) @@ -1659,7 +1663,7 @@ def test_list_companies_rest_pager(transport: str = "rest"): results = list(pager) assert len(results) == 6 - assert all(isinstance(i, company_service.Company) for i in results) + assert all(isinstance(i, company_messages.Company) for i in results) pages = list(client.list_companies(request=sample_request).pages) for page_, token in zip(pages, ["abc", "def", "ghi", ""]): @@ -2166,7 +2170,7 @@ def test_get_operation_rest_bad_request( request = request_type() request = json_format.ParseDict( - {"name": "networks/sample1/operations/reports/exports/sample2"}, request + {"name": "networks/sample1/operations/reports/runs/sample2"}, request ) # Mock the http request call within the method and fake a BadRequest error. @@ -2193,7 +2197,7 @@ def test_get_operation_rest(request_type): credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) - request_init = {"name": "networks/sample1/operations/reports/exports/sample2"} + request_init = {"name": "networks/sample1/operations/reports/runs/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: diff --git a/packages/google-ads-admanager/tests/unit/gapic/admanager_v1/test_custom_field_service.py b/packages/google-ads-admanager/tests/unit/gapic/admanager_v1/test_custom_field_service.py index 422c0de35f80..68e7487bb7ca 100644 --- a/packages/google-ads-admanager/tests/unit/gapic/admanager_v1/test_custom_field_service.py +++ b/packages/google-ads-admanager/tests/unit/gapic/admanager_v1/test_custom_field_service.py @@ -49,7 +49,11 @@ pagers, transports, ) -from google.ads.admanager_v1.types import custom_field_enums, custom_field_service +from google.ads.admanager_v1.types import ( + custom_field_enums, + custom_field_messages, + custom_field_service, +) def client_cert_source_callback(): @@ -1004,7 +1008,7 @@ def test_get_custom_field_rest(request_type): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = custom_field_service.CustomField( + return_value = custom_field_messages.CustomField( name="name_value", custom_field_id=1578, display_name="display_name_value", @@ -1019,7 +1023,7 @@ def test_get_custom_field_rest(request_type): response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = custom_field_service.CustomField.pb(return_value) + return_value = custom_field_messages.CustomField.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") @@ -1027,7 +1031,7 @@ def test_get_custom_field_rest(request_type): response = client.get_custom_field(request) # Establish that the response is the type that we expect. - assert isinstance(response, custom_field_service.CustomField) + assert isinstance(response, custom_field_messages.CustomField) assert response.name == "name_value" assert response.custom_field_id == 1578 assert response.display_name == "display_name_value" @@ -1128,7 +1132,7 @@ def test_get_custom_field_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = custom_field_service.CustomField() + return_value = custom_field_messages.CustomField() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -1149,7 +1153,7 @@ def test_get_custom_field_rest_required_fields( response_value.status_code = 200 # Convert return value to protobuf type - return_value = custom_field_service.CustomField.pb(return_value) + return_value = custom_field_messages.CustomField.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") @@ -1204,8 +1208,8 @@ def test_get_custom_field_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = custom_field_service.CustomField.to_json( - custom_field_service.CustomField() + req.return_value._content = custom_field_messages.CustomField.to_json( + custom_field_messages.CustomField() ) request = custom_field_service.GetCustomFieldRequest() @@ -1214,7 +1218,7 @@ def test_get_custom_field_rest_interceptors(null_interceptor): ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = custom_field_service.CustomField() + post.return_value = custom_field_messages.CustomField() client.get_custom_field( request, @@ -1261,7 +1265,7 @@ def test_get_custom_field_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = custom_field_service.CustomField() + return_value = custom_field_messages.CustomField() # get arguments that satisfy an http rule for this method sample_request = {"name": "networks/sample1/customFields/sample2"} @@ -1276,7 +1280,7 @@ def test_get_custom_field_rest_flattened(): response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = custom_field_service.CustomField.pb(return_value) + return_value = custom_field_messages.CustomField.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1653,9 +1657,9 @@ def test_list_custom_fields_rest_pager(transport: str = "rest"): response = ( custom_field_service.ListCustomFieldsResponse( custom_fields=[ - custom_field_service.CustomField(), - custom_field_service.CustomField(), - custom_field_service.CustomField(), + custom_field_messages.CustomField(), + custom_field_messages.CustomField(), + custom_field_messages.CustomField(), ], next_page_token="abc", ), @@ -1665,14 +1669,14 @@ def test_list_custom_fields_rest_pager(transport: str = "rest"): ), custom_field_service.ListCustomFieldsResponse( custom_fields=[ - custom_field_service.CustomField(), + custom_field_messages.CustomField(), ], next_page_token="ghi", ), custom_field_service.ListCustomFieldsResponse( custom_fields=[ - custom_field_service.CustomField(), - custom_field_service.CustomField(), + custom_field_messages.CustomField(), + custom_field_messages.CustomField(), ], ), ) @@ -1695,7 +1699,7 @@ def test_list_custom_fields_rest_pager(transport: str = "rest"): results = list(pager) assert len(results) == 6 - assert all(isinstance(i, custom_field_service.CustomField) for i in results) + assert all(isinstance(i, custom_field_messages.CustomField) for i in results) pages = list(client.list_custom_fields(request=sample_request).pages) for page_, token in zip(pages, ["abc", "def", "ghi", ""]): @@ -2133,7 +2137,7 @@ def test_get_operation_rest_bad_request( request = request_type() request = json_format.ParseDict( - {"name": "networks/sample1/operations/reports/exports/sample2"}, request + {"name": "networks/sample1/operations/reports/runs/sample2"}, request ) # Mock the http request call within the method and fake a BadRequest error. @@ -2160,7 +2164,7 @@ def test_get_operation_rest(request_type): credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) - request_init = {"name": "networks/sample1/operations/reports/exports/sample2"} + request_init = {"name": "networks/sample1/operations/reports/runs/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: diff --git a/packages/google-ads-admanager/tests/unit/gapic/admanager_v1/test_custom_targeting_key_service.py b/packages/google-ads-admanager/tests/unit/gapic/admanager_v1/test_custom_targeting_key_service.py index 552cd9447d1c..bf29a7231ef8 100644 --- a/packages/google-ads-admanager/tests/unit/gapic/admanager_v1/test_custom_targeting_key_service.py +++ b/packages/google-ads-admanager/tests/unit/gapic/admanager_v1/test_custom_targeting_key_service.py @@ -51,6 +51,7 @@ ) from google.ads.admanager_v1.types import ( custom_targeting_key_enums, + custom_targeting_key_messages, custom_targeting_key_service, ) @@ -1035,7 +1036,7 @@ def test_get_custom_targeting_key_rest(request_type): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = custom_targeting_key_service.CustomTargetingKey( + return_value = custom_targeting_key_messages.CustomTargetingKey( name="name_value", custom_targeting_key_id=2451, ad_tag_name="ad_tag_name_value", @@ -1049,7 +1050,7 @@ def test_get_custom_targeting_key_rest(request_type): response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = custom_targeting_key_service.CustomTargetingKey.pb(return_value) + return_value = custom_targeting_key_messages.CustomTargetingKey.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") @@ -1057,7 +1058,7 @@ def test_get_custom_targeting_key_rest(request_type): response = client.get_custom_targeting_key(request) # Establish that the response is the type that we expect. - assert isinstance(response, custom_targeting_key_service.CustomTargetingKey) + assert isinstance(response, custom_targeting_key_messages.CustomTargetingKey) assert response.name == "name_value" assert response.custom_targeting_key_id == 2451 assert response.ad_tag_name == "ad_tag_name_value" @@ -1157,7 +1158,7 @@ def test_get_custom_targeting_key_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = custom_targeting_key_service.CustomTargetingKey() + return_value = custom_targeting_key_messages.CustomTargetingKey() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -1178,7 +1179,7 @@ def test_get_custom_targeting_key_rest_required_fields( response_value.status_code = 200 # Convert return value to protobuf type - return_value = custom_targeting_key_service.CustomTargetingKey.pb( + return_value = custom_targeting_key_messages.CustomTargetingKey.pb( return_value ) json_return_value = json_format.MessageToJson(return_value) @@ -1238,8 +1239,8 @@ def test_get_custom_targeting_key_rest_interceptors(null_interceptor): req.return_value.status_code = 200 req.return_value.request = PreparedRequest() req.return_value._content = ( - custom_targeting_key_service.CustomTargetingKey.to_json( - custom_targeting_key_service.CustomTargetingKey() + custom_targeting_key_messages.CustomTargetingKey.to_json( + custom_targeting_key_messages.CustomTargetingKey() ) ) @@ -1249,7 +1250,7 @@ def test_get_custom_targeting_key_rest_interceptors(null_interceptor): ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = custom_targeting_key_service.CustomTargetingKey() + post.return_value = custom_targeting_key_messages.CustomTargetingKey() client.get_custom_targeting_key( request, @@ -1297,7 +1298,7 @@ def test_get_custom_targeting_key_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = custom_targeting_key_service.CustomTargetingKey() + return_value = custom_targeting_key_messages.CustomTargetingKey() # get arguments that satisfy an http rule for this method sample_request = {"name": "networks/sample1/customTargetingKeys/sample2"} @@ -1312,7 +1313,7 @@ def test_get_custom_targeting_key_rest_flattened(): response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = custom_targeting_key_service.CustomTargetingKey.pb(return_value) + return_value = custom_targeting_key_messages.CustomTargetingKey.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1703,9 +1704,9 @@ def test_list_custom_targeting_keys_rest_pager(transport: str = "rest"): response = ( custom_targeting_key_service.ListCustomTargetingKeysResponse( custom_targeting_keys=[ - custom_targeting_key_service.CustomTargetingKey(), - custom_targeting_key_service.CustomTargetingKey(), - custom_targeting_key_service.CustomTargetingKey(), + custom_targeting_key_messages.CustomTargetingKey(), + custom_targeting_key_messages.CustomTargetingKey(), + custom_targeting_key_messages.CustomTargetingKey(), ], next_page_token="abc", ), @@ -1715,14 +1716,14 @@ def test_list_custom_targeting_keys_rest_pager(transport: str = "rest"): ), custom_targeting_key_service.ListCustomTargetingKeysResponse( custom_targeting_keys=[ - custom_targeting_key_service.CustomTargetingKey(), + custom_targeting_key_messages.CustomTargetingKey(), ], next_page_token="ghi", ), custom_targeting_key_service.ListCustomTargetingKeysResponse( custom_targeting_keys=[ - custom_targeting_key_service.CustomTargetingKey(), - custom_targeting_key_service.CustomTargetingKey(), + custom_targeting_key_messages.CustomTargetingKey(), + custom_targeting_key_messages.CustomTargetingKey(), ], ), ) @@ -1747,7 +1748,7 @@ def test_list_custom_targeting_keys_rest_pager(transport: str = "rest"): results = list(pager) assert len(results) == 6 assert all( - isinstance(i, custom_targeting_key_service.CustomTargetingKey) + isinstance(i, custom_targeting_key_messages.CustomTargetingKey) for i in results ) @@ -2195,7 +2196,7 @@ def test_get_operation_rest_bad_request( request = request_type() request = json_format.ParseDict( - {"name": "networks/sample1/operations/reports/exports/sample2"}, request + {"name": "networks/sample1/operations/reports/runs/sample2"}, request ) # Mock the http request call within the method and fake a BadRequest error. @@ -2222,7 +2223,7 @@ def test_get_operation_rest(request_type): credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) - request_init = {"name": "networks/sample1/operations/reports/exports/sample2"} + request_init = {"name": "networks/sample1/operations/reports/runs/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: diff --git a/packages/google-ads-admanager/tests/unit/gapic/admanager_v1/test_custom_targeting_value_service.py b/packages/google-ads-admanager/tests/unit/gapic/admanager_v1/test_custom_targeting_value_service.py index f3836087467d..a6ac047ff983 100644 --- a/packages/google-ads-admanager/tests/unit/gapic/admanager_v1/test_custom_targeting_value_service.py +++ b/packages/google-ads-admanager/tests/unit/gapic/admanager_v1/test_custom_targeting_value_service.py @@ -51,6 +51,7 @@ ) from google.ads.admanager_v1.types import ( custom_targeting_value_enums, + custom_targeting_value_messages, custom_targeting_value_service, ) @@ -1041,7 +1042,7 @@ def test_get_custom_targeting_value_rest(request_type): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = custom_targeting_value_service.CustomTargetingValue( + return_value = custom_targeting_value_messages.CustomTargetingValue( name="name_value", ad_tag_name="ad_tag_name_value", display_name="display_name_value", @@ -1053,7 +1054,7 @@ def test_get_custom_targeting_value_rest(request_type): response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = custom_targeting_value_service.CustomTargetingValue.pb( + return_value = custom_targeting_value_messages.CustomTargetingValue.pb( return_value ) json_return_value = json_format.MessageToJson(return_value) @@ -1063,7 +1064,7 @@ def test_get_custom_targeting_value_rest(request_type): response = client.get_custom_targeting_value(request) # Establish that the response is the type that we expect. - assert isinstance(response, custom_targeting_value_service.CustomTargetingValue) + assert isinstance(response, custom_targeting_value_messages.CustomTargetingValue) assert response.name == "name_value" assert response.ad_tag_name == "ad_tag_name_value" assert response.display_name == "display_name_value" @@ -1158,7 +1159,7 @@ def test_get_custom_targeting_value_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = custom_targeting_value_service.CustomTargetingValue() + return_value = custom_targeting_value_messages.CustomTargetingValue() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -1179,7 +1180,7 @@ def test_get_custom_targeting_value_rest_required_fields( response_value.status_code = 200 # Convert return value to protobuf type - return_value = custom_targeting_value_service.CustomTargetingValue.pb( + return_value = custom_targeting_value_messages.CustomTargetingValue.pb( return_value ) json_return_value = json_format.MessageToJson(return_value) @@ -1239,8 +1240,8 @@ def test_get_custom_targeting_value_rest_interceptors(null_interceptor): req.return_value.status_code = 200 req.return_value.request = PreparedRequest() req.return_value._content = ( - custom_targeting_value_service.CustomTargetingValue.to_json( - custom_targeting_value_service.CustomTargetingValue() + custom_targeting_value_messages.CustomTargetingValue.to_json( + custom_targeting_value_messages.CustomTargetingValue() ) ) @@ -1250,7 +1251,7 @@ def test_get_custom_targeting_value_rest_interceptors(null_interceptor): ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = custom_targeting_value_service.CustomTargetingValue() + post.return_value = custom_targeting_value_messages.CustomTargetingValue() client.get_custom_targeting_value( request, @@ -1300,7 +1301,7 @@ def test_get_custom_targeting_value_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = custom_targeting_value_service.CustomTargetingValue() + return_value = custom_targeting_value_messages.CustomTargetingValue() # get arguments that satisfy an http rule for this method sample_request = { @@ -1317,7 +1318,7 @@ def test_get_custom_targeting_value_rest_flattened(): response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = custom_targeting_value_service.CustomTargetingValue.pb( + return_value = custom_targeting_value_messages.CustomTargetingValue.pb( return_value ) json_return_value = json_format.MessageToJson(return_value) @@ -1718,9 +1719,9 @@ def test_list_custom_targeting_values_rest_pager(transport: str = "rest"): response = ( custom_targeting_value_service.ListCustomTargetingValuesResponse( custom_targeting_values=[ - custom_targeting_value_service.CustomTargetingValue(), - custom_targeting_value_service.CustomTargetingValue(), - custom_targeting_value_service.CustomTargetingValue(), + custom_targeting_value_messages.CustomTargetingValue(), + custom_targeting_value_messages.CustomTargetingValue(), + custom_targeting_value_messages.CustomTargetingValue(), ], next_page_token="abc", ), @@ -1730,14 +1731,14 @@ def test_list_custom_targeting_values_rest_pager(transport: str = "rest"): ), custom_targeting_value_service.ListCustomTargetingValuesResponse( custom_targeting_values=[ - custom_targeting_value_service.CustomTargetingValue(), + custom_targeting_value_messages.CustomTargetingValue(), ], next_page_token="ghi", ), custom_targeting_value_service.ListCustomTargetingValuesResponse( custom_targeting_values=[ - custom_targeting_value_service.CustomTargetingValue(), - custom_targeting_value_service.CustomTargetingValue(), + custom_targeting_value_messages.CustomTargetingValue(), + custom_targeting_value_messages.CustomTargetingValue(), ], ), ) @@ -1762,7 +1763,7 @@ def test_list_custom_targeting_values_rest_pager(transport: str = "rest"): results = list(pager) assert len(results) == 6 assert all( - isinstance(i, custom_targeting_value_service.CustomTargetingValue) + isinstance(i, custom_targeting_value_messages.CustomTargetingValue) for i in results ) @@ -2218,7 +2219,7 @@ def test_get_operation_rest_bad_request( request = request_type() request = json_format.ParseDict( - {"name": "networks/sample1/operations/reports/exports/sample2"}, request + {"name": "networks/sample1/operations/reports/runs/sample2"}, request ) # Mock the http request call within the method and fake a BadRequest error. @@ -2245,7 +2246,7 @@ def test_get_operation_rest(request_type): credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) - request_init = {"name": "networks/sample1/operations/reports/exports/sample2"} + request_init = {"name": "networks/sample1/operations/reports/runs/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: diff --git a/packages/google-ads-admanager/tests/unit/gapic/admanager_v1/test_entity_signals_mapping_service.py b/packages/google-ads-admanager/tests/unit/gapic/admanager_v1/test_entity_signals_mapping_service.py new file mode 100644 index 000000000000..1fb7a9de78ae --- /dev/null +++ b/packages/google-ads-admanager/tests/unit/gapic/admanager_v1/test_entity_signals_mapping_service.py @@ -0,0 +1,3898 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import os + +# try/except added for compatibility with python < 3.8 +try: + from unittest import mock + from unittest.mock import AsyncMock # pragma: NO COVER +except ImportError: # pragma: NO COVER + import mock + +from collections.abc import Iterable +import json +import math + +from google.api_core import gapic_v1, grpc_helpers, grpc_helpers_async, path_template +from google.api_core import api_core_version, client_options +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +import google.auth +from google.auth import credentials as ga_credentials +from google.auth.exceptions import MutualTLSChannelError +from google.longrunning import operations_pb2 # type: ignore +from google.oauth2 import service_account +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import json_format +import grpc +from grpc.experimental import aio +from proto.marshal.rules import wrappers +from proto.marshal.rules.dates import DurationRule, TimestampRule +import pytest +from requests import PreparedRequest, Request, Response +from requests.sessions import Session + +from google.ads.admanager_v1.services.entity_signals_mapping_service import ( + EntitySignalsMappingServiceClient, + pagers, + transports, +) +from google.ads.admanager_v1.types import ( + entity_signals_mapping_messages, + entity_signals_mapping_service, +) + + +def client_cert_source_callback(): + return b"cert bytes", b"key bytes" + + +# If default endpoint is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint(client): + return ( + "foo.googleapis.com" + if ("localhost" in client.DEFAULT_ENDPOINT) + else client.DEFAULT_ENDPOINT + ) + + +# If default endpoint template is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint template so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint_template(client): + return ( + "test.{UNIVERSE_DOMAIN}" + if ("localhost" in client._DEFAULT_ENDPOINT_TEMPLATE) + else client._DEFAULT_ENDPOINT_TEMPLATE + ) + + +def test__get_default_mtls_endpoint(): + api_endpoint = "example.googleapis.com" + api_mtls_endpoint = "example.mtls.googleapis.com" + sandbox_endpoint = "example.sandbox.googleapis.com" + sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" + non_googleapi = "api.example.com" + + assert EntitySignalsMappingServiceClient._get_default_mtls_endpoint(None) is None + assert ( + EntitySignalsMappingServiceClient._get_default_mtls_endpoint(api_endpoint) + == api_mtls_endpoint + ) + assert ( + EntitySignalsMappingServiceClient._get_default_mtls_endpoint(api_mtls_endpoint) + == api_mtls_endpoint + ) + assert ( + EntitySignalsMappingServiceClient._get_default_mtls_endpoint(sandbox_endpoint) + == sandbox_mtls_endpoint + ) + assert ( + EntitySignalsMappingServiceClient._get_default_mtls_endpoint( + sandbox_mtls_endpoint + ) + == sandbox_mtls_endpoint + ) + assert ( + EntitySignalsMappingServiceClient._get_default_mtls_endpoint(non_googleapi) + == non_googleapi + ) + + +def test__read_environment_variables(): + assert EntitySignalsMappingServiceClient._read_environment_variables() == ( + False, + "auto", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + assert EntitySignalsMappingServiceClient._read_environment_variables() == ( + True, + "auto", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + assert EntitySignalsMappingServiceClient._read_environment_variables() == ( + False, + "auto", + None, + ) + + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + with pytest.raises(ValueError) as excinfo: + EntitySignalsMappingServiceClient._read_environment_variables() + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + assert EntitySignalsMappingServiceClient._read_environment_variables() == ( + False, + "never", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + assert EntitySignalsMappingServiceClient._read_environment_variables() == ( + False, + "always", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}): + assert EntitySignalsMappingServiceClient._read_environment_variables() == ( + False, + "auto", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + EntitySignalsMappingServiceClient._read_environment_variables() + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + with mock.patch.dict(os.environ, {"GOOGLE_CLOUD_UNIVERSE_DOMAIN": "foo.com"}): + assert EntitySignalsMappingServiceClient._read_environment_variables() == ( + False, + "auto", + "foo.com", + ) + + +def test__get_client_cert_source(): + mock_provided_cert_source = mock.Mock() + mock_default_cert_source = mock.Mock() + + assert ( + EntitySignalsMappingServiceClient._get_client_cert_source(None, False) is None + ) + assert ( + EntitySignalsMappingServiceClient._get_client_cert_source( + mock_provided_cert_source, False + ) + is None + ) + assert ( + EntitySignalsMappingServiceClient._get_client_cert_source( + mock_provided_cert_source, True + ) + == mock_provided_cert_source + ) + + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", return_value=True + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=mock_default_cert_source, + ): + assert ( + EntitySignalsMappingServiceClient._get_client_cert_source(None, True) + is mock_default_cert_source + ) + assert ( + EntitySignalsMappingServiceClient._get_client_cert_source( + mock_provided_cert_source, "true" + ) + is mock_provided_cert_source + ) + + +@mock.patch.object( + EntitySignalsMappingServiceClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(EntitySignalsMappingServiceClient), +) +def test__get_api_endpoint(): + api_override = "foo.com" + mock_client_cert_source = mock.Mock() + default_universe = EntitySignalsMappingServiceClient._DEFAULT_UNIVERSE + default_endpoint = ( + EntitySignalsMappingServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=default_universe + ) + ) + mock_universe = "bar.com" + mock_endpoint = EntitySignalsMappingServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=mock_universe + ) + + assert ( + EntitySignalsMappingServiceClient._get_api_endpoint( + api_override, mock_client_cert_source, default_universe, "always" + ) + == api_override + ) + assert ( + EntitySignalsMappingServiceClient._get_api_endpoint( + None, mock_client_cert_source, default_universe, "auto" + ) + == EntitySignalsMappingServiceClient.DEFAULT_MTLS_ENDPOINT + ) + assert ( + EntitySignalsMappingServiceClient._get_api_endpoint( + None, None, default_universe, "auto" + ) + == default_endpoint + ) + assert ( + EntitySignalsMappingServiceClient._get_api_endpoint( + None, None, default_universe, "always" + ) + == EntitySignalsMappingServiceClient.DEFAULT_MTLS_ENDPOINT + ) + assert ( + EntitySignalsMappingServiceClient._get_api_endpoint( + None, mock_client_cert_source, default_universe, "always" + ) + == EntitySignalsMappingServiceClient.DEFAULT_MTLS_ENDPOINT + ) + assert ( + EntitySignalsMappingServiceClient._get_api_endpoint( + None, None, mock_universe, "never" + ) + == mock_endpoint + ) + assert ( + EntitySignalsMappingServiceClient._get_api_endpoint( + None, None, default_universe, "never" + ) + == default_endpoint + ) + + with pytest.raises(MutualTLSChannelError) as excinfo: + EntitySignalsMappingServiceClient._get_api_endpoint( + None, mock_client_cert_source, mock_universe, "auto" + ) + assert ( + str(excinfo.value) + == "mTLS is not supported in any universe other than googleapis.com." + ) + + +def test__get_universe_domain(): + client_universe_domain = "foo.com" + universe_domain_env = "bar.com" + + assert ( + EntitySignalsMappingServiceClient._get_universe_domain( + client_universe_domain, universe_domain_env + ) + == client_universe_domain + ) + assert ( + EntitySignalsMappingServiceClient._get_universe_domain( + None, universe_domain_env + ) + == universe_domain_env + ) + assert ( + EntitySignalsMappingServiceClient._get_universe_domain(None, None) + == EntitySignalsMappingServiceClient._DEFAULT_UNIVERSE + ) + + with pytest.raises(ValueError) as excinfo: + EntitySignalsMappingServiceClient._get_universe_domain("", None) + assert str(excinfo.value) == "Universe Domain cannot be an empty string." + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + ( + EntitySignalsMappingServiceClient, + transports.EntitySignalsMappingServiceRestTransport, + "rest", + ), + ], +) +def test__validate_universe_domain(client_class, transport_class, transport_name): + client = client_class( + transport=transport_class(credentials=ga_credentials.AnonymousCredentials()) + ) + assert client._validate_universe_domain() == True + + # Test the case when universe is already validated. + assert client._validate_universe_domain() == True + + if transport_name == "grpc": + # Test the case where credentials are provided by the + # `local_channel_credentials`. The default universes in both match. + channel = grpc.secure_channel( + "http://localhost/", grpc.local_channel_credentials() + ) + client = client_class(transport=transport_class(channel=channel)) + assert client._validate_universe_domain() == True + + # Test the case where credentials do not exist: e.g. a transport is provided + # with no credentials. Validation should still succeed because there is no + # mismatch with non-existent credentials. + channel = grpc.secure_channel( + "http://localhost/", grpc.local_channel_credentials() + ) + transport = transport_class(channel=channel) + transport._credentials = None + client = client_class(transport=transport) + assert client._validate_universe_domain() == True + + # TODO: This is needed to cater for older versions of google-auth + # Make this test unconditional once the minimum supported version of + # google-auth becomes 2.23.0 or higher. + google_auth_major, google_auth_minor = [ + int(part) for part in google.auth.__version__.split(".")[0:2] + ] + if google_auth_major > 2 or (google_auth_major == 2 and google_auth_minor >= 23): + credentials = ga_credentials.AnonymousCredentials() + credentials._universe_domain = "foo.com" + # Test the case when there is a universe mismatch from the credentials. + client = client_class(transport=transport_class(credentials=credentials)) + with pytest.raises(ValueError) as excinfo: + client._validate_universe_domain() + assert ( + str(excinfo.value) + == "The configured universe domain (googleapis.com) does not match the universe domain found in the credentials (foo.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." + ) + + # Test the case when there is a universe mismatch from the client. + # + # TODO: Make this test unconditional once the minimum supported version of + # google-api-core becomes 2.15.0 or higher. + api_core_major, api_core_minor = [ + int(part) for part in api_core_version.__version__.split(".")[0:2] + ] + if api_core_major > 2 or (api_core_major == 2 and api_core_minor >= 15): + client = client_class( + client_options={"universe_domain": "bar.com"}, + transport=transport_class( + credentials=ga_credentials.AnonymousCredentials(), + ), + ) + with pytest.raises(ValueError) as excinfo: + client._validate_universe_domain() + assert ( + str(excinfo.value) + == "The configured universe domain (bar.com) does not match the universe domain found in the credentials (googleapis.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." + ) + + # Test that ValueError is raised if universe_domain is provided via client options and credentials is None + with pytest.raises(ValueError): + client._compare_universes("foo.bar", None) + + +@pytest.mark.parametrize( + "client_class,transport_name", + [ + (EntitySignalsMappingServiceClient, "rest"), + ], +) +def test_entity_signals_mapping_service_client_from_service_account_info( + client_class, transport_name +): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_info" + ) as factory: + factory.return_value = creds + info = {"valid": True} + client = client_class.from_service_account_info(info, transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + "admanager.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://admanager.googleapis.com" + ) + + +@pytest.mark.parametrize( + "transport_class,transport_name", + [ + (transports.EntitySignalsMappingServiceRestTransport, "rest"), + ], +) +def test_entity_signals_mapping_service_client_service_account_always_use_jwt( + transport_class, transport_name +): + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=True) + use_jwt.assert_called_once_with(True) + + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=False) + use_jwt.assert_not_called() + + +@pytest.mark.parametrize( + "client_class,transport_name", + [ + (EntitySignalsMappingServiceClient, "rest"), + ], +) +def test_entity_signals_mapping_service_client_from_service_account_file( + client_class, transport_name +): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_file" + ) as factory: + factory.return_value = creds + client = client_class.from_service_account_file( + "dummy/file/path.json", transport=transport_name + ) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + client = client_class.from_service_account_json( + "dummy/file/path.json", transport=transport_name + ) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + "admanager.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://admanager.googleapis.com" + ) + + +def test_entity_signals_mapping_service_client_get_transport_class(): + transport = EntitySignalsMappingServiceClient.get_transport_class() + available_transports = [ + transports.EntitySignalsMappingServiceRestTransport, + ] + assert transport in available_transports + + transport = EntitySignalsMappingServiceClient.get_transport_class("rest") + assert transport == transports.EntitySignalsMappingServiceRestTransport + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + ( + EntitySignalsMappingServiceClient, + transports.EntitySignalsMappingServiceRestTransport, + "rest", + ), + ], +) +@mock.patch.object( + EntitySignalsMappingServiceClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(EntitySignalsMappingServiceClient), +) +def test_entity_signals_mapping_service_client_client_options( + client_class, transport_class, transport_name +): + # Check that if channel is provided we won't create a new one. + with mock.patch.object( + EntitySignalsMappingServiceClient, "get_transport_class" + ) as gtc: + transport = transport_class(credentials=ga_credentials.AnonymousCredentials()) + client = client_class(transport=transport) + gtc.assert_not_called() + + # Check that if channel is provided via str we will create a new one. + with mock.patch.object( + EntitySignalsMappingServiceClient, "get_transport_class" + ) as gtc: + client = client_class(transport=transport_name) + gtc.assert_called() + + # Check the case api_endpoint is provided. + options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name, client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_MTLS_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + client = client_class(transport=transport_name) + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + with pytest.raises(ValueError) as excinfo: + client = client_class(transport=transport_name) + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + + # Check the case quota_project_id is provided + options = client_options.ClientOptions(quota_project_id="octopus") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id="octopus", + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + # Check the case api_endpoint is provided + options = client_options.ClientOptions( + api_audience="https://language.googleapis.com" + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience="https://language.googleapis.com", + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,use_client_cert_env", + [ + ( + EntitySignalsMappingServiceClient, + transports.EntitySignalsMappingServiceRestTransport, + "rest", + "true", + ), + ( + EntitySignalsMappingServiceClient, + transports.EntitySignalsMappingServiceRestTransport, + "rest", + "false", + ), + ], +) +@mock.patch.object( + EntitySignalsMappingServiceClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(EntitySignalsMappingServiceClient), +) +@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) +def test_entity_signals_mapping_service_client_mtls_env_auto( + client_class, transport_class, transport_name, use_client_cert_env +): + # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default + # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. + + # Check the case client_cert_source is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + options = client_options.ClientOptions( + client_cert_source=client_cert_source_callback + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + + if use_client_cert_env == "false": + expected_client_cert_source = None + expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ) + else: + expected_client_cert_source = client_cert_source_callback + expected_host = client.DEFAULT_MTLS_ENDPOINT + + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case ADC client cert is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=client_cert_source_callback, + ): + if use_client_cert_env == "false": + expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ) + expected_client_cert_source = None + else: + expected_host = client.DEFAULT_MTLS_ENDPOINT + expected_client_cert_source = client_cert_source_callback + + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case client_cert_source and ADC client cert are not provided. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize("client_class", [EntitySignalsMappingServiceClient]) +@mock.patch.object( + EntitySignalsMappingServiceClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(EntitySignalsMappingServiceClient), +) +def test_entity_signals_mapping_service_client_get_mtls_endpoint_and_cert_source( + client_class, +): + mock_client_cert_source = mock.Mock() + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source == mock_client_cert_source + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + mock_client_cert_source = mock.Mock() + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=mock_client_cert_source, + ): + ( + api_endpoint, + cert_source, + ) = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source == mock_client_cert_source + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + client_class.get_mtls_endpoint_and_cert_source() + + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + with pytest.raises(ValueError) as excinfo: + client_class.get_mtls_endpoint_and_cert_source() + + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + + +@pytest.mark.parametrize("client_class", [EntitySignalsMappingServiceClient]) +@mock.patch.object( + EntitySignalsMappingServiceClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(EntitySignalsMappingServiceClient), +) +def test_entity_signals_mapping_service_client_client_api_endpoint(client_class): + mock_client_cert_source = client_cert_source_callback + api_override = "foo.com" + default_universe = EntitySignalsMappingServiceClient._DEFAULT_UNIVERSE + default_endpoint = ( + EntitySignalsMappingServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=default_universe + ) + ) + mock_universe = "bar.com" + mock_endpoint = EntitySignalsMappingServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=mock_universe + ) + + # If ClientOptions.api_endpoint is set and GOOGLE_API_USE_CLIENT_CERTIFICATE="true", + # use ClientOptions.api_endpoint as the api endpoint regardless. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.requests.AuthorizedSession.configure_mtls_channel" + ): + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=api_override + ) + client = client_class( + client_options=options, + credentials=ga_credentials.AnonymousCredentials(), + ) + assert client.api_endpoint == api_override + + # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="never", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + client = client_class(credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == default_endpoint + + # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="always", + # use the DEFAULT_MTLS_ENDPOINT as the api endpoint. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + client = client_class(credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + + # If ClientOptions.api_endpoint is not set, GOOGLE_API_USE_MTLS_ENDPOINT="auto" (default), + # GOOGLE_API_USE_CLIENT_CERTIFICATE="false" (default), default cert source doesn't exist, + # and ClientOptions.universe_domain="bar.com", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with universe domain as the api endpoint. + options = client_options.ClientOptions() + universe_exists = hasattr(options, "universe_domain") + if universe_exists: + options = client_options.ClientOptions(universe_domain=mock_universe) + client = client_class( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + else: + client = client_class( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + assert client.api_endpoint == ( + mock_endpoint if universe_exists else default_endpoint + ) + assert client.universe_domain == ( + mock_universe if universe_exists else default_universe + ) + + # If ClientOptions does not have a universe domain attribute and GOOGLE_API_USE_MTLS_ENDPOINT="never", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. + options = client_options.ClientOptions() + if hasattr(options, "universe_domain"): + delattr(options, "universe_domain") + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + client = client_class( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + assert client.api_endpoint == default_endpoint + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + ( + EntitySignalsMappingServiceClient, + transports.EntitySignalsMappingServiceRestTransport, + "rest", + ), + ], +) +def test_entity_signals_mapping_service_client_client_options_scopes( + client_class, transport_class, transport_name +): + # Check the case scopes are provided. + options = client_options.ClientOptions( + scopes=["1", "2"], + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=["1", "2"], + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,grpc_helpers", + [ + ( + EntitySignalsMappingServiceClient, + transports.EntitySignalsMappingServiceRestTransport, + "rest", + None, + ), + ], +) +def test_entity_signals_mapping_service_client_client_options_credentials_file( + client_class, transport_class, transport_name, grpc_helpers +): + # Check the case credentials file is provided. + options = client_options.ClientOptions(credentials_file="credentials.json") + + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "request_type", + [ + entity_signals_mapping_service.GetEntitySignalsMappingRequest, + dict, + ], +) +def test_get_entity_signals_mapping_rest(request_type): + client = EntitySignalsMappingServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"name": "networks/sample1/entitySignalsMappings/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = entity_signals_mapping_messages.EntitySignalsMapping( + name="name_value", + entity_signals_mapping_id=2660, + taxonomy_category_ids=[2267], + audience_segment_id=1980, + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = entity_signals_mapping_messages.EntitySignalsMapping.pb( + return_value + ) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.get_entity_signals_mapping(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, entity_signals_mapping_messages.EntitySignalsMapping) + assert response.name == "name_value" + assert response.entity_signals_mapping_id == 2660 + assert response.taxonomy_category_ids == [2267] + + +def test_get_entity_signals_mapping_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = EntitySignalsMappingServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.get_entity_signals_mapping + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.get_entity_signals_mapping + ] = mock_rpc + + request = {} + client.get_entity_signals_mapping(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_entity_signals_mapping(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_get_entity_signals_mapping_rest_required_fields( + request_type=entity_signals_mapping_service.GetEntitySignalsMappingRequest, +): + transport_class = transports.EntitySignalsMappingServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_entity_signals_mapping._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_entity_signals_mapping._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = EntitySignalsMappingServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = entity_signals_mapping_messages.EntitySignalsMapping() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = entity_signals_mapping_messages.EntitySignalsMapping.pb( + return_value + ) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_entity_signals_mapping(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_get_entity_signals_mapping_rest_unset_required_fields(): + transport = transports.EntitySignalsMappingServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.get_entity_signals_mapping._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_entity_signals_mapping_rest_interceptors(null_interceptor): + transport = transports.EntitySignalsMappingServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.EntitySignalsMappingServiceRestInterceptor(), + ) + client = EntitySignalsMappingServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.EntitySignalsMappingServiceRestInterceptor, + "post_get_entity_signals_mapping", + ) as post, mock.patch.object( + transports.EntitySignalsMappingServiceRestInterceptor, + "pre_get_entity_signals_mapping", + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = entity_signals_mapping_service.GetEntitySignalsMappingRequest.pb( + entity_signals_mapping_service.GetEntitySignalsMappingRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = ( + entity_signals_mapping_messages.EntitySignalsMapping.to_json( + entity_signals_mapping_messages.EntitySignalsMapping() + ) + ) + + request = entity_signals_mapping_service.GetEntitySignalsMappingRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = entity_signals_mapping_messages.EntitySignalsMapping() + + client.get_entity_signals_mapping( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_entity_signals_mapping_rest_bad_request( + transport: str = "rest", + request_type=entity_signals_mapping_service.GetEntitySignalsMappingRequest, +): + client = EntitySignalsMappingServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"name": "networks/sample1/entitySignalsMappings/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_entity_signals_mapping(request) + + +def test_get_entity_signals_mapping_rest_flattened(): + client = EntitySignalsMappingServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = entity_signals_mapping_messages.EntitySignalsMapping() + + # get arguments that satisfy an http rule for this method + sample_request = {"name": "networks/sample1/entitySignalsMappings/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = entity_signals_mapping_messages.EntitySignalsMapping.pb( + return_value + ) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.get_entity_signals_mapping(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=networks/*/entitySignalsMappings/*}" % client.transport._host, + args[1], + ) + + +def test_get_entity_signals_mapping_rest_flattened_error(transport: str = "rest"): + client = EntitySignalsMappingServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_entity_signals_mapping( + entity_signals_mapping_service.GetEntitySignalsMappingRequest(), + name="name_value", + ) + + +def test_get_entity_signals_mapping_rest_error(): + client = EntitySignalsMappingServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + entity_signals_mapping_service.ListEntitySignalsMappingsRequest, + dict, + ], +) +def test_list_entity_signals_mappings_rest(request_type): + client = EntitySignalsMappingServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "networks/sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = entity_signals_mapping_service.ListEntitySignalsMappingsResponse( + next_page_token="next_page_token_value", + total_size=1086, + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = ( + entity_signals_mapping_service.ListEntitySignalsMappingsResponse.pb( + return_value + ) + ) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.list_entity_signals_mappings(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListEntitySignalsMappingsPager) + assert response.next_page_token == "next_page_token_value" + assert response.total_size == 1086 + + +def test_list_entity_signals_mappings_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = EntitySignalsMappingServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.list_entity_signals_mappings + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.list_entity_signals_mappings + ] = mock_rpc + + request = {} + client.list_entity_signals_mappings(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_entity_signals_mappings(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_list_entity_signals_mappings_rest_required_fields( + request_type=entity_signals_mapping_service.ListEntitySignalsMappingsRequest, +): + transport_class = transports.EntitySignalsMappingServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_entity_signals_mappings._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_entity_signals_mappings._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "filter", + "order_by", + "page_size", + "page_token", + "skip", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = EntitySignalsMappingServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = entity_signals_mapping_service.ListEntitySignalsMappingsResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = ( + entity_signals_mapping_service.ListEntitySignalsMappingsResponse.pb( + return_value + ) + ) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.list_entity_signals_mappings(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_list_entity_signals_mappings_rest_unset_required_fields(): + transport = transports.EntitySignalsMappingServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.list_entity_signals_mappings._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "filter", + "orderBy", + "pageSize", + "pageToken", + "skip", + ) + ) + & set(("parent",)) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_entity_signals_mappings_rest_interceptors(null_interceptor): + transport = transports.EntitySignalsMappingServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.EntitySignalsMappingServiceRestInterceptor(), + ) + client = EntitySignalsMappingServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.EntitySignalsMappingServiceRestInterceptor, + "post_list_entity_signals_mappings", + ) as post, mock.patch.object( + transports.EntitySignalsMappingServiceRestInterceptor, + "pre_list_entity_signals_mappings", + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = entity_signals_mapping_service.ListEntitySignalsMappingsRequest.pb( + entity_signals_mapping_service.ListEntitySignalsMappingsRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = ( + entity_signals_mapping_service.ListEntitySignalsMappingsResponse.to_json( + entity_signals_mapping_service.ListEntitySignalsMappingsResponse() + ) + ) + + request = entity_signals_mapping_service.ListEntitySignalsMappingsRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = ( + entity_signals_mapping_service.ListEntitySignalsMappingsResponse() + ) + + client.list_entity_signals_mappings( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_entity_signals_mappings_rest_bad_request( + transport: str = "rest", + request_type=entity_signals_mapping_service.ListEntitySignalsMappingsRequest, +): + client = EntitySignalsMappingServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "networks/sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_entity_signals_mappings(request) + + +def test_list_entity_signals_mappings_rest_flattened(): + client = EntitySignalsMappingServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = ( + entity_signals_mapping_service.ListEntitySignalsMappingsResponse() + ) + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "networks/sample1"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = ( + entity_signals_mapping_service.ListEntitySignalsMappingsResponse.pb( + return_value + ) + ) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.list_entity_signals_mappings(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=networks/*}/entitySignalsMappings" % client.transport._host, + args[1], + ) + + +def test_list_entity_signals_mappings_rest_flattened_error(transport: str = "rest"): + client = EntitySignalsMappingServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_entity_signals_mappings( + entity_signals_mapping_service.ListEntitySignalsMappingsRequest(), + parent="parent_value", + ) + + +def test_list_entity_signals_mappings_rest_pager(transport: str = "rest"): + client = EntitySignalsMappingServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + entity_signals_mapping_service.ListEntitySignalsMappingsResponse( + entity_signals_mappings=[ + entity_signals_mapping_messages.EntitySignalsMapping(), + entity_signals_mapping_messages.EntitySignalsMapping(), + entity_signals_mapping_messages.EntitySignalsMapping(), + ], + next_page_token="abc", + ), + entity_signals_mapping_service.ListEntitySignalsMappingsResponse( + entity_signals_mappings=[], + next_page_token="def", + ), + entity_signals_mapping_service.ListEntitySignalsMappingsResponse( + entity_signals_mappings=[ + entity_signals_mapping_messages.EntitySignalsMapping(), + ], + next_page_token="ghi", + ), + entity_signals_mapping_service.ListEntitySignalsMappingsResponse( + entity_signals_mappings=[ + entity_signals_mapping_messages.EntitySignalsMapping(), + entity_signals_mapping_messages.EntitySignalsMapping(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple( + entity_signals_mapping_service.ListEntitySignalsMappingsResponse.to_json(x) + for x in response + ) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {"parent": "networks/sample1"} + + pager = client.list_entity_signals_mappings(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all( + isinstance(i, entity_signals_mapping_messages.EntitySignalsMapping) + for i in results + ) + + pages = list(client.list_entity_signals_mappings(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + entity_signals_mapping_service.CreateEntitySignalsMappingRequest, + dict, + ], +) +def test_create_entity_signals_mapping_rest(request_type): + client = EntitySignalsMappingServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "networks/sample1"} + request_init["entity_signals_mapping"] = { + "audience_segment_id": 1980, + "content_bundle_id": 1792, + "custom_targeting_value_id": 2663, + "name": "name_value", + "entity_signals_mapping_id": 2660, + "taxonomy_category_ids": [2268, 2269], + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = ( + entity_signals_mapping_service.CreateEntitySignalsMappingRequest.meta.fields[ + "entity_signals_mapping" + ] + ) + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "entity_signals_mapping" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["entity_signals_mapping"][field])): + del request_init["entity_signals_mapping"][field][i][subfield] + else: + del request_init["entity_signals_mapping"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = entity_signals_mapping_messages.EntitySignalsMapping( + name="name_value", + entity_signals_mapping_id=2660, + taxonomy_category_ids=[2267], + audience_segment_id=1980, + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = entity_signals_mapping_messages.EntitySignalsMapping.pb( + return_value + ) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.create_entity_signals_mapping(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, entity_signals_mapping_messages.EntitySignalsMapping) + assert response.name == "name_value" + assert response.entity_signals_mapping_id == 2660 + assert response.taxonomy_category_ids == [2267] + + +def test_create_entity_signals_mapping_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = EntitySignalsMappingServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.create_entity_signals_mapping + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.create_entity_signals_mapping + ] = mock_rpc + + request = {} + client.create_entity_signals_mapping(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.create_entity_signals_mapping(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_create_entity_signals_mapping_rest_required_fields( + request_type=entity_signals_mapping_service.CreateEntitySignalsMappingRequest, +): + transport_class = transports.EntitySignalsMappingServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_entity_signals_mapping._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_entity_signals_mapping._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = EntitySignalsMappingServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = entity_signals_mapping_messages.EntitySignalsMapping() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = entity_signals_mapping_messages.EntitySignalsMapping.pb( + return_value + ) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.create_entity_signals_mapping(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_create_entity_signals_mapping_rest_unset_required_fields(): + transport = transports.EntitySignalsMappingServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.create_entity_signals_mapping._get_unset_required_fields( + {} + ) + assert set(unset_fields) == ( + set(()) + & set( + ( + "parent", + "entitySignalsMapping", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_create_entity_signals_mapping_rest_interceptors(null_interceptor): + transport = transports.EntitySignalsMappingServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.EntitySignalsMappingServiceRestInterceptor(), + ) + client = EntitySignalsMappingServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.EntitySignalsMappingServiceRestInterceptor, + "post_create_entity_signals_mapping", + ) as post, mock.patch.object( + transports.EntitySignalsMappingServiceRestInterceptor, + "pre_create_entity_signals_mapping", + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = ( + entity_signals_mapping_service.CreateEntitySignalsMappingRequest.pb( + entity_signals_mapping_service.CreateEntitySignalsMappingRequest() + ) + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = ( + entity_signals_mapping_messages.EntitySignalsMapping.to_json( + entity_signals_mapping_messages.EntitySignalsMapping() + ) + ) + + request = entity_signals_mapping_service.CreateEntitySignalsMappingRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = entity_signals_mapping_messages.EntitySignalsMapping() + + client.create_entity_signals_mapping( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_create_entity_signals_mapping_rest_bad_request( + transport: str = "rest", + request_type=entity_signals_mapping_service.CreateEntitySignalsMappingRequest, +): + client = EntitySignalsMappingServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "networks/sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.create_entity_signals_mapping(request) + + +def test_create_entity_signals_mapping_rest_flattened(): + client = EntitySignalsMappingServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = entity_signals_mapping_messages.EntitySignalsMapping() + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "networks/sample1"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + entity_signals_mapping=entity_signals_mapping_messages.EntitySignalsMapping( + audience_segment_id=1980 + ), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = entity_signals_mapping_messages.EntitySignalsMapping.pb( + return_value + ) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.create_entity_signals_mapping(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=networks/*}/entitySignalsMappings" % client.transport._host, + args[1], + ) + + +def test_create_entity_signals_mapping_rest_flattened_error(transport: str = "rest"): + client = EntitySignalsMappingServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_entity_signals_mapping( + entity_signals_mapping_service.CreateEntitySignalsMappingRequest(), + parent="parent_value", + entity_signals_mapping=entity_signals_mapping_messages.EntitySignalsMapping( + audience_segment_id=1980 + ), + ) + + +def test_create_entity_signals_mapping_rest_error(): + client = EntitySignalsMappingServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + entity_signals_mapping_service.UpdateEntitySignalsMappingRequest, + dict, + ], +) +def test_update_entity_signals_mapping_rest(request_type): + client = EntitySignalsMappingServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "entity_signals_mapping": { + "name": "networks/sample1/entitySignalsMappings/sample2" + } + } + request_init["entity_signals_mapping"] = { + "audience_segment_id": 1980, + "content_bundle_id": 1792, + "custom_targeting_value_id": 2663, + "name": "networks/sample1/entitySignalsMappings/sample2", + "entity_signals_mapping_id": 2660, + "taxonomy_category_ids": [2268, 2269], + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = ( + entity_signals_mapping_service.UpdateEntitySignalsMappingRequest.meta.fields[ + "entity_signals_mapping" + ] + ) + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "entity_signals_mapping" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["entity_signals_mapping"][field])): + del request_init["entity_signals_mapping"][field][i][subfield] + else: + del request_init["entity_signals_mapping"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = entity_signals_mapping_messages.EntitySignalsMapping( + name="name_value", + entity_signals_mapping_id=2660, + taxonomy_category_ids=[2267], + audience_segment_id=1980, + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = entity_signals_mapping_messages.EntitySignalsMapping.pb( + return_value + ) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.update_entity_signals_mapping(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, entity_signals_mapping_messages.EntitySignalsMapping) + assert response.name == "name_value" + assert response.entity_signals_mapping_id == 2660 + assert response.taxonomy_category_ids == [2267] + + +def test_update_entity_signals_mapping_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = EntitySignalsMappingServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.update_entity_signals_mapping + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.update_entity_signals_mapping + ] = mock_rpc + + request = {} + client.update_entity_signals_mapping(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.update_entity_signals_mapping(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_update_entity_signals_mapping_rest_required_fields( + request_type=entity_signals_mapping_service.UpdateEntitySignalsMappingRequest, +): + transport_class = transports.EntitySignalsMappingServiceRestTransport + + request_init = {} + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_entity_signals_mapping._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_entity_signals_mapping._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("update_mask",)) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + + client = EntitySignalsMappingServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = entity_signals_mapping_messages.EntitySignalsMapping() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "patch", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = entity_signals_mapping_messages.EntitySignalsMapping.pb( + return_value + ) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.update_entity_signals_mapping(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_update_entity_signals_mapping_rest_unset_required_fields(): + transport = transports.EntitySignalsMappingServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.update_entity_signals_mapping._get_unset_required_fields( + {} + ) + assert set(unset_fields) == ( + set(("updateMask",)) + & set( + ( + "entitySignalsMapping", + "updateMask", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_update_entity_signals_mapping_rest_interceptors(null_interceptor): + transport = transports.EntitySignalsMappingServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.EntitySignalsMappingServiceRestInterceptor(), + ) + client = EntitySignalsMappingServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.EntitySignalsMappingServiceRestInterceptor, + "post_update_entity_signals_mapping", + ) as post, mock.patch.object( + transports.EntitySignalsMappingServiceRestInterceptor, + "pre_update_entity_signals_mapping", + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = ( + entity_signals_mapping_service.UpdateEntitySignalsMappingRequest.pb( + entity_signals_mapping_service.UpdateEntitySignalsMappingRequest() + ) + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = ( + entity_signals_mapping_messages.EntitySignalsMapping.to_json( + entity_signals_mapping_messages.EntitySignalsMapping() + ) + ) + + request = entity_signals_mapping_service.UpdateEntitySignalsMappingRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = entity_signals_mapping_messages.EntitySignalsMapping() + + client.update_entity_signals_mapping( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_update_entity_signals_mapping_rest_bad_request( + transport: str = "rest", + request_type=entity_signals_mapping_service.UpdateEntitySignalsMappingRequest, +): + client = EntitySignalsMappingServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "entity_signals_mapping": { + "name": "networks/sample1/entitySignalsMappings/sample2" + } + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.update_entity_signals_mapping(request) + + +def test_update_entity_signals_mapping_rest_flattened(): + client = EntitySignalsMappingServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = entity_signals_mapping_messages.EntitySignalsMapping() + + # get arguments that satisfy an http rule for this method + sample_request = { + "entity_signals_mapping": { + "name": "networks/sample1/entitySignalsMappings/sample2" + } + } + + # get truthy value for each flattened field + mock_args = dict( + entity_signals_mapping=entity_signals_mapping_messages.EntitySignalsMapping( + audience_segment_id=1980 + ), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = entity_signals_mapping_messages.EntitySignalsMapping.pb( + return_value + ) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.update_entity_signals_mapping(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{entity_signals_mapping.name=networks/*/entitySignalsMappings/*}" + % client.transport._host, + args[1], + ) + + +def test_update_entity_signals_mapping_rest_flattened_error(transport: str = "rest"): + client = EntitySignalsMappingServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_entity_signals_mapping( + entity_signals_mapping_service.UpdateEntitySignalsMappingRequest(), + entity_signals_mapping=entity_signals_mapping_messages.EntitySignalsMapping( + audience_segment_id=1980 + ), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +def test_update_entity_signals_mapping_rest_error(): + client = EntitySignalsMappingServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + entity_signals_mapping_service.BatchCreateEntitySignalsMappingsRequest, + dict, + ], +) +def test_batch_create_entity_signals_mappings_rest(request_type): + client = EntitySignalsMappingServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "networks/sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = ( + entity_signals_mapping_service.BatchCreateEntitySignalsMappingsResponse() + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = ( + entity_signals_mapping_service.BatchCreateEntitySignalsMappingsResponse.pb( + return_value + ) + ) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.batch_create_entity_signals_mappings(request) + + # Establish that the response is the type that we expect. + assert isinstance( + response, + entity_signals_mapping_service.BatchCreateEntitySignalsMappingsResponse, + ) + + +def test_batch_create_entity_signals_mappings_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = EntitySignalsMappingServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.batch_create_entity_signals_mappings + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.batch_create_entity_signals_mappings + ] = mock_rpc + + request = {} + client.batch_create_entity_signals_mappings(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.batch_create_entity_signals_mappings(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_batch_create_entity_signals_mappings_rest_required_fields( + request_type=entity_signals_mapping_service.BatchCreateEntitySignalsMappingsRequest, +): + transport_class = transports.EntitySignalsMappingServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).batch_create_entity_signals_mappings._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).batch_create_entity_signals_mappings._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = EntitySignalsMappingServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = ( + entity_signals_mapping_service.BatchCreateEntitySignalsMappingsResponse() + ) + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = entity_signals_mapping_service.BatchCreateEntitySignalsMappingsResponse.pb( + return_value + ) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.batch_create_entity_signals_mappings(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_batch_create_entity_signals_mappings_rest_unset_required_fields(): + transport = transports.EntitySignalsMappingServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = ( + transport.batch_create_entity_signals_mappings._get_unset_required_fields({}) + ) + assert set(unset_fields) == ( + set(()) + & set( + ( + "parent", + "requests", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_batch_create_entity_signals_mappings_rest_interceptors(null_interceptor): + transport = transports.EntitySignalsMappingServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.EntitySignalsMappingServiceRestInterceptor(), + ) + client = EntitySignalsMappingServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.EntitySignalsMappingServiceRestInterceptor, + "post_batch_create_entity_signals_mappings", + ) as post, mock.patch.object( + transports.EntitySignalsMappingServiceRestInterceptor, + "pre_batch_create_entity_signals_mappings", + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = ( + entity_signals_mapping_service.BatchCreateEntitySignalsMappingsRequest.pb( + entity_signals_mapping_service.BatchCreateEntitySignalsMappingsRequest() + ) + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = entity_signals_mapping_service.BatchCreateEntitySignalsMappingsResponse.to_json( + entity_signals_mapping_service.BatchCreateEntitySignalsMappingsResponse() + ) + + request = ( + entity_signals_mapping_service.BatchCreateEntitySignalsMappingsRequest() + ) + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = ( + entity_signals_mapping_service.BatchCreateEntitySignalsMappingsResponse() + ) + + client.batch_create_entity_signals_mappings( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_batch_create_entity_signals_mappings_rest_bad_request( + transport: str = "rest", + request_type=entity_signals_mapping_service.BatchCreateEntitySignalsMappingsRequest, +): + client = EntitySignalsMappingServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "networks/sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.batch_create_entity_signals_mappings(request) + + +def test_batch_create_entity_signals_mappings_rest_flattened(): + client = EntitySignalsMappingServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = ( + entity_signals_mapping_service.BatchCreateEntitySignalsMappingsResponse() + ) + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "networks/sample1"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + requests=[ + entity_signals_mapping_service.CreateEntitySignalsMappingRequest( + parent="parent_value" + ) + ], + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = ( + entity_signals_mapping_service.BatchCreateEntitySignalsMappingsResponse.pb( + return_value + ) + ) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.batch_create_entity_signals_mappings(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=networks/*}/entitySignalsMappings:batchCreate" + % client.transport._host, + args[1], + ) + + +def test_batch_create_entity_signals_mappings_rest_flattened_error( + transport: str = "rest", +): + client = EntitySignalsMappingServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.batch_create_entity_signals_mappings( + entity_signals_mapping_service.BatchCreateEntitySignalsMappingsRequest(), + parent="parent_value", + requests=[ + entity_signals_mapping_service.CreateEntitySignalsMappingRequest( + parent="parent_value" + ) + ], + ) + + +def test_batch_create_entity_signals_mappings_rest_error(): + client = EntitySignalsMappingServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + entity_signals_mapping_service.BatchUpdateEntitySignalsMappingsRequest, + dict, + ], +) +def test_batch_update_entity_signals_mappings_rest(request_type): + client = EntitySignalsMappingServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "networks/sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = ( + entity_signals_mapping_service.BatchUpdateEntitySignalsMappingsResponse() + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = ( + entity_signals_mapping_service.BatchUpdateEntitySignalsMappingsResponse.pb( + return_value + ) + ) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.batch_update_entity_signals_mappings(request) + + # Establish that the response is the type that we expect. + assert isinstance( + response, + entity_signals_mapping_service.BatchUpdateEntitySignalsMappingsResponse, + ) + + +def test_batch_update_entity_signals_mappings_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = EntitySignalsMappingServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.batch_update_entity_signals_mappings + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.batch_update_entity_signals_mappings + ] = mock_rpc + + request = {} + client.batch_update_entity_signals_mappings(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.batch_update_entity_signals_mappings(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_batch_update_entity_signals_mappings_rest_required_fields( + request_type=entity_signals_mapping_service.BatchUpdateEntitySignalsMappingsRequest, +): + transport_class = transports.EntitySignalsMappingServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).batch_update_entity_signals_mappings._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).batch_update_entity_signals_mappings._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = EntitySignalsMappingServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = ( + entity_signals_mapping_service.BatchUpdateEntitySignalsMappingsResponse() + ) + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = entity_signals_mapping_service.BatchUpdateEntitySignalsMappingsResponse.pb( + return_value + ) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.batch_update_entity_signals_mappings(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_batch_update_entity_signals_mappings_rest_unset_required_fields(): + transport = transports.EntitySignalsMappingServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = ( + transport.batch_update_entity_signals_mappings._get_unset_required_fields({}) + ) + assert set(unset_fields) == ( + set(()) + & set( + ( + "parent", + "requests", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_batch_update_entity_signals_mappings_rest_interceptors(null_interceptor): + transport = transports.EntitySignalsMappingServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.EntitySignalsMappingServiceRestInterceptor(), + ) + client = EntitySignalsMappingServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.EntitySignalsMappingServiceRestInterceptor, + "post_batch_update_entity_signals_mappings", + ) as post, mock.patch.object( + transports.EntitySignalsMappingServiceRestInterceptor, + "pre_batch_update_entity_signals_mappings", + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = ( + entity_signals_mapping_service.BatchUpdateEntitySignalsMappingsRequest.pb( + entity_signals_mapping_service.BatchUpdateEntitySignalsMappingsRequest() + ) + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = entity_signals_mapping_service.BatchUpdateEntitySignalsMappingsResponse.to_json( + entity_signals_mapping_service.BatchUpdateEntitySignalsMappingsResponse() + ) + + request = ( + entity_signals_mapping_service.BatchUpdateEntitySignalsMappingsRequest() + ) + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = ( + entity_signals_mapping_service.BatchUpdateEntitySignalsMappingsResponse() + ) + + client.batch_update_entity_signals_mappings( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_batch_update_entity_signals_mappings_rest_bad_request( + transport: str = "rest", + request_type=entity_signals_mapping_service.BatchUpdateEntitySignalsMappingsRequest, +): + client = EntitySignalsMappingServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "networks/sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.batch_update_entity_signals_mappings(request) + + +def test_batch_update_entity_signals_mappings_rest_flattened(): + client = EntitySignalsMappingServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = ( + entity_signals_mapping_service.BatchUpdateEntitySignalsMappingsResponse() + ) + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "networks/sample1"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + requests=[ + entity_signals_mapping_service.UpdateEntitySignalsMappingRequest( + entity_signals_mapping=entity_signals_mapping_messages.EntitySignalsMapping( + audience_segment_id=1980 + ) + ) + ], + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = ( + entity_signals_mapping_service.BatchUpdateEntitySignalsMappingsResponse.pb( + return_value + ) + ) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.batch_update_entity_signals_mappings(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=networks/*}/entitySignalsMappings:batchUpdate" + % client.transport._host, + args[1], + ) + + +def test_batch_update_entity_signals_mappings_rest_flattened_error( + transport: str = "rest", +): + client = EntitySignalsMappingServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.batch_update_entity_signals_mappings( + entity_signals_mapping_service.BatchUpdateEntitySignalsMappingsRequest(), + parent="parent_value", + requests=[ + entity_signals_mapping_service.UpdateEntitySignalsMappingRequest( + entity_signals_mapping=entity_signals_mapping_messages.EntitySignalsMapping( + audience_segment_id=1980 + ) + ) + ], + ) + + +def test_batch_update_entity_signals_mappings_rest_error(): + client = EntitySignalsMappingServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.EntitySignalsMappingServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = EntitySignalsMappingServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.EntitySignalsMappingServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = EntitySignalsMappingServiceClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide an api_key and a transport instance. + transport = transports.EntitySignalsMappingServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = EntitySignalsMappingServiceClient( + client_options=options, + transport=transport, + ) + + # It is an error to provide an api_key and a credential. + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = EntitySignalsMappingServiceClient( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.EntitySignalsMappingServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = EntitySignalsMappingServiceClient( + client_options={"scopes": ["1", "2"]}, + transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.EntitySignalsMappingServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + client = EntitySignalsMappingServiceClient(transport=transport) + assert client.transport is transport + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.EntitySignalsMappingServiceRestTransport, + ], +) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + + +@pytest.mark.parametrize( + "transport_name", + [ + "rest", + ], +) +def test_transport_kind(transport_name): + transport = EntitySignalsMappingServiceClient.get_transport_class(transport_name)( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert transport.kind == transport_name + + +def test_entity_signals_mapping_service_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(core_exceptions.DuplicateCredentialArgs): + transport = transports.EntitySignalsMappingServiceTransport( + credentials=ga_credentials.AnonymousCredentials(), + credentials_file="credentials.json", + ) + + +def test_entity_signals_mapping_service_base_transport(): + # Instantiate the base transport. + with mock.patch( + "google.ads.admanager_v1.services.entity_signals_mapping_service.transports.EntitySignalsMappingServiceTransport.__init__" + ) as Transport: + Transport.return_value = None + transport = transports.EntitySignalsMappingServiceTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ( + "get_entity_signals_mapping", + "list_entity_signals_mappings", + "create_entity_signals_mapping", + "update_entity_signals_mapping", + "batch_create_entity_signals_mappings", + "batch_update_entity_signals_mappings", + "get_operation", + ) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) + + with pytest.raises(NotImplementedError): + transport.close() + + # Catch all for all remaining methods and properties + remainder = [ + "kind", + ] + for r in remainder: + with pytest.raises(NotImplementedError): + getattr(transport, r)() + + +def test_entity_signals_mapping_service_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch( + "google.ads.admanager_v1.services.entity_signals_mapping_service.transports.EntitySignalsMappingServiceTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.EntitySignalsMappingServiceTransport( + credentials_file="credentials.json", + quota_project_id="octopus", + ) + load_creds.assert_called_once_with( + "credentials.json", + scopes=None, + default_scopes=(), + quota_project_id="octopus", + ) + + +def test_entity_signals_mapping_service_base_transport_with_adc(): + # Test the default credentials are used if credentials and credentials_file are None. + with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch( + "google.ads.admanager_v1.services.entity_signals_mapping_service.transports.EntitySignalsMappingServiceTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.EntitySignalsMappingServiceTransport() + adc.assert_called_once() + + +def test_entity_signals_mapping_service_auth_adc(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + EntitySignalsMappingServiceClient() + adc.assert_called_once_with( + scopes=None, + default_scopes=(), + quota_project_id=None, + ) + + +def test_entity_signals_mapping_service_http_transport_client_cert_source_for_mtls(): + cred = ga_credentials.AnonymousCredentials() + with mock.patch( + "google.auth.transport.requests.AuthorizedSession.configure_mtls_channel" + ) as mock_configure_mtls_channel: + transports.EntitySignalsMappingServiceRestTransport( + credentials=cred, client_cert_source_for_mtls=client_cert_source_callback + ) + mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) + + +@pytest.mark.parametrize( + "transport_name", + [ + "rest", + ], +) +def test_entity_signals_mapping_service_host_no_port(transport_name): + client = EntitySignalsMappingServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="admanager.googleapis.com" + ), + transport=transport_name, + ) + assert client.transport._host == ( + "admanager.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://admanager.googleapis.com" + ) + + +@pytest.mark.parametrize( + "transport_name", + [ + "rest", + ], +) +def test_entity_signals_mapping_service_host_with_port(transport_name): + client = EntitySignalsMappingServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="admanager.googleapis.com:8000" + ), + transport=transport_name, + ) + assert client.transport._host == ( + "admanager.googleapis.com:8000" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://admanager.googleapis.com:8000" + ) + + +@pytest.mark.parametrize( + "transport_name", + [ + "rest", + ], +) +def test_entity_signals_mapping_service_client_transport_session_collision( + transport_name, +): + creds1 = ga_credentials.AnonymousCredentials() + creds2 = ga_credentials.AnonymousCredentials() + client1 = EntitySignalsMappingServiceClient( + credentials=creds1, + transport=transport_name, + ) + client2 = EntitySignalsMappingServiceClient( + credentials=creds2, + transport=transport_name, + ) + session1 = client1.transport.get_entity_signals_mapping._session + session2 = client2.transport.get_entity_signals_mapping._session + assert session1 != session2 + session1 = client1.transport.list_entity_signals_mappings._session + session2 = client2.transport.list_entity_signals_mappings._session + assert session1 != session2 + session1 = client1.transport.create_entity_signals_mapping._session + session2 = client2.transport.create_entity_signals_mapping._session + assert session1 != session2 + session1 = client1.transport.update_entity_signals_mapping._session + session2 = client2.transport.update_entity_signals_mapping._session + assert session1 != session2 + session1 = client1.transport.batch_create_entity_signals_mappings._session + session2 = client2.transport.batch_create_entity_signals_mappings._session + assert session1 != session2 + session1 = client1.transport.batch_update_entity_signals_mappings._session + session2 = client2.transport.batch_update_entity_signals_mappings._session + assert session1 != session2 + + +def test_entity_signals_mapping_path(): + network_code = "squid" + entity_signals_mapping = "clam" + expected = ( + "networks/{network_code}/entitySignalsMappings/{entity_signals_mapping}".format( + network_code=network_code, + entity_signals_mapping=entity_signals_mapping, + ) + ) + actual = EntitySignalsMappingServiceClient.entity_signals_mapping_path( + network_code, entity_signals_mapping + ) + assert expected == actual + + +def test_parse_entity_signals_mapping_path(): + expected = { + "network_code": "whelk", + "entity_signals_mapping": "octopus", + } + path = EntitySignalsMappingServiceClient.entity_signals_mapping_path(**expected) + + # Check that the path construction is reversible. + actual = EntitySignalsMappingServiceClient.parse_entity_signals_mapping_path(path) + assert expected == actual + + +def test_network_path(): + network_code = "oyster" + expected = "networks/{network_code}".format( + network_code=network_code, + ) + actual = EntitySignalsMappingServiceClient.network_path(network_code) + assert expected == actual + + +def test_parse_network_path(): + expected = { + "network_code": "nudibranch", + } + path = EntitySignalsMappingServiceClient.network_path(**expected) + + # Check that the path construction is reversible. + actual = EntitySignalsMappingServiceClient.parse_network_path(path) + assert expected == actual + + +def test_common_billing_account_path(): + billing_account = "cuttlefish" + expected = "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + actual = EntitySignalsMappingServiceClient.common_billing_account_path( + billing_account + ) + assert expected == actual + + +def test_parse_common_billing_account_path(): + expected = { + "billing_account": "mussel", + } + path = EntitySignalsMappingServiceClient.common_billing_account_path(**expected) + + # Check that the path construction is reversible. + actual = EntitySignalsMappingServiceClient.parse_common_billing_account_path(path) + assert expected == actual + + +def test_common_folder_path(): + folder = "winkle" + expected = "folders/{folder}".format( + folder=folder, + ) + actual = EntitySignalsMappingServiceClient.common_folder_path(folder) + assert expected == actual + + +def test_parse_common_folder_path(): + expected = { + "folder": "nautilus", + } + path = EntitySignalsMappingServiceClient.common_folder_path(**expected) + + # Check that the path construction is reversible. + actual = EntitySignalsMappingServiceClient.parse_common_folder_path(path) + assert expected == actual + + +def test_common_organization_path(): + organization = "scallop" + expected = "organizations/{organization}".format( + organization=organization, + ) + actual = EntitySignalsMappingServiceClient.common_organization_path(organization) + assert expected == actual + + +def test_parse_common_organization_path(): + expected = { + "organization": "abalone", + } + path = EntitySignalsMappingServiceClient.common_organization_path(**expected) + + # Check that the path construction is reversible. + actual = EntitySignalsMappingServiceClient.parse_common_organization_path(path) + assert expected == actual + + +def test_common_project_path(): + project = "squid" + expected = "projects/{project}".format( + project=project, + ) + actual = EntitySignalsMappingServiceClient.common_project_path(project) + assert expected == actual + + +def test_parse_common_project_path(): + expected = { + "project": "clam", + } + path = EntitySignalsMappingServiceClient.common_project_path(**expected) + + # Check that the path construction is reversible. + actual = EntitySignalsMappingServiceClient.parse_common_project_path(path) + assert expected == actual + + +def test_common_location_path(): + project = "whelk" + location = "octopus" + expected = "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) + actual = EntitySignalsMappingServiceClient.common_location_path(project, location) + assert expected == actual + + +def test_parse_common_location_path(): + expected = { + "project": "oyster", + "location": "nudibranch", + } + path = EntitySignalsMappingServiceClient.common_location_path(**expected) + + # Check that the path construction is reversible. + actual = EntitySignalsMappingServiceClient.parse_common_location_path(path) + assert expected == actual + + +def test_client_with_default_client_info(): + client_info = gapic_v1.client_info.ClientInfo() + + with mock.patch.object( + transports.EntitySignalsMappingServiceTransport, "_prep_wrapped_messages" + ) as prep: + client = EntitySignalsMappingServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + with mock.patch.object( + transports.EntitySignalsMappingServiceTransport, "_prep_wrapped_messages" + ) as prep: + transport_class = EntitySignalsMappingServiceClient.get_transport_class() + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + +def test_get_operation_rest_bad_request( + transport: str = "rest", request_type=operations_pb2.GetOperationRequest +): + client = EntitySignalsMappingServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + request = request_type() + request = json_format.ParseDict( + {"name": "networks/sample1/operations/reports/runs/sample2"}, request + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_operation(request) + + +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.GetOperationRequest, + dict, + ], +) +def test_get_operation_rest(request_type): + client = EntitySignalsMappingServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = {"name": "networks/sample1/operations/reports/runs/sample2"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_operation(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) + + +def test_transport_close(): + transports = { + "rest": "_session", + } + + for transport, close_name in transports.items(): + client = EntitySignalsMappingServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + with mock.patch.object( + type(getattr(client.transport, close_name)), "close" + ) as close: + with client: + close.assert_not_called() + close.assert_called_once() + + +def test_client_ctx(): + transports = [ + "rest", + ] + for transport in transports: + client = EntitySignalsMappingServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called() + + +@pytest.mark.parametrize( + "client_class,transport_class", + [ + ( + EntitySignalsMappingServiceClient, + transports.EntitySignalsMappingServiceRestTransport, + ), + ], +) +def test_api_key_credentials(client_class, transport_class): + with mock.patch.object( + google.auth._default, "get_api_key_credentials", create=True + ) as get_api_key_credentials: + mock_cred = mock.Mock() + get_api_key_credentials.return_value = mock_cred + options = client_options.ClientOptions() + options.api_key = "api_key" + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=mock_cred, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) diff --git a/packages/google-ads-admanager/tests/unit/gapic/admanager_v1/test_network_service.py b/packages/google-ads-admanager/tests/unit/gapic/admanager_v1/test_network_service.py index 3f5e4ad96587..11622319f40d 100644 --- a/packages/google-ads-admanager/tests/unit/gapic/admanager_v1/test_network_service.py +++ b/packages/google-ads-admanager/tests/unit/gapic/admanager_v1/test_network_service.py @@ -48,7 +48,7 @@ NetworkServiceClient, transports, ) -from google.ads.admanager_v1.types import network_service +from google.ads.admanager_v1.types import network_messages, network_service def client_cert_source_callback(): @@ -971,7 +971,7 @@ def test_get_network_rest(request_type): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = network_service.Network( + return_value = network_messages.Network( name="name_value", display_name="display_name_value", network_code="network_code_value", @@ -988,7 +988,7 @@ def test_get_network_rest(request_type): response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = network_service.Network.pb(return_value) + return_value = network_messages.Network.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") @@ -996,7 +996,7 @@ def test_get_network_rest(request_type): response = client.get_network(request) # Establish that the response is the type that we expect. - assert isinstance(response, network_service.Network) + assert isinstance(response, network_messages.Network) assert response.name == "name_value" assert response.display_name == "display_name_value" assert response.network_code == "network_code_value" @@ -1085,7 +1085,7 @@ def test_get_network_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = network_service.Network() + return_value = network_messages.Network() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -1106,7 +1106,7 @@ def test_get_network_rest_required_fields( response_value.status_code = 200 # Convert return value to protobuf type - return_value = network_service.Network.pb(return_value) + return_value = network_messages.Network.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") @@ -1161,8 +1161,8 @@ def test_get_network_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = network_service.Network.to_json( - network_service.Network() + req.return_value._content = network_messages.Network.to_json( + network_messages.Network() ) request = network_service.GetNetworkRequest() @@ -1171,7 +1171,7 @@ def test_get_network_rest_interceptors(null_interceptor): ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = network_service.Network() + post.return_value = network_messages.Network() client.get_network( request, @@ -1218,7 +1218,7 @@ def test_get_network_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = network_service.Network() + return_value = network_messages.Network() # get arguments that satisfy an http rule for this method sample_request = {"name": "networks/sample1"} @@ -1233,7 +1233,7 @@ def test_get_network_rest_flattened(): response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = network_service.Network.pb(return_value) + return_value = network_messages.Network.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1270,6 +1270,166 @@ def test_get_network_rest_error(): ) +@pytest.mark.parametrize( + "request_type", + [ + network_service.ListNetworksRequest, + dict, + ], +) +def test_list_networks_rest(request_type): + client = NetworkServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = network_service.ListNetworksResponse() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = network_service.ListNetworksResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.list_networks(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, network_service.ListNetworksResponse) + + +def test_list_networks_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = NetworkServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_networks in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.list_networks] = mock_rpc + + request = {} + client.list_networks(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_networks(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_networks_rest_interceptors(null_interceptor): + transport = transports.NetworkServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.NetworkServiceRestInterceptor(), + ) + client = NetworkServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.NetworkServiceRestInterceptor, "post_list_networks" + ) as post, mock.patch.object( + transports.NetworkServiceRestInterceptor, "pre_list_networks" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = network_service.ListNetworksRequest.pb( + network_service.ListNetworksRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = network_service.ListNetworksResponse.to_json( + network_service.ListNetworksResponse() + ) + + request = network_service.ListNetworksRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = network_service.ListNetworksResponse() + + client.list_networks( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_networks_rest_bad_request( + transport: str = "rest", request_type=network_service.ListNetworksRequest +): + client = NetworkServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_networks(request) + + +def test_list_networks_rest_error(): + client = NetworkServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + def test_credentials_transport_error(): # It is an error to provide credentials and a transport instance. transport = transports.NetworkServiceRestTransport( @@ -1381,6 +1541,7 @@ def test_network_service_base_transport(): # raise NotImplementedError. methods = ( "get_network", + "list_networks", "get_operation", ) for method in methods: @@ -1516,6 +1677,9 @@ def test_network_service_client_transport_session_collision(transport_name): session1 = client1.transport.get_network._session session2 = client2.transport.get_network._session assert session1 != session2 + session1 = client1.transport.list_networks._session + session2 = client2.transport.list_networks._session + assert session1 != session2 def test_ad_unit_path(): @@ -1697,7 +1861,7 @@ def test_get_operation_rest_bad_request( request = request_type() request = json_format.ParseDict( - {"name": "networks/sample1/operations/reports/exports/sample2"}, request + {"name": "networks/sample1/operations/reports/runs/sample2"}, request ) # Mock the http request call within the method and fake a BadRequest error. @@ -1724,7 +1888,7 @@ def test_get_operation_rest(request_type): credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) - request_init = {"name": "networks/sample1/operations/reports/exports/sample2"} + request_init = {"name": "networks/sample1/operations/reports/runs/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: diff --git a/packages/google-ads-admanager/tests/unit/gapic/admanager_v1/test_order_service.py b/packages/google-ads-admanager/tests/unit/gapic/admanager_v1/test_order_service.py index e9bf7e0b618d..f3de0981207f 100644 --- a/packages/google-ads-admanager/tests/unit/gapic/admanager_v1/test_order_service.py +++ b/packages/google-ads-admanager/tests/unit/gapic/admanager_v1/test_order_service.py @@ -50,7 +50,13 @@ pagers, transports, ) -from google.ads.admanager_v1.types import applied_label, order_service +from google.ads.admanager_v1.types import ( + applied_label, + custom_field_value, + order_enums, + order_messages, + order_service, +) def client_cert_source_callback(): @@ -956,7 +962,7 @@ def test_get_order_rest(request_type): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = order_service.Order( + return_value = order_messages.Order( name="name_value", order_id=840, display_name="display_name_value", @@ -970,12 +976,13 @@ def test_get_order_rest(request_type): effective_teams=["effective_teams_value"], creator="creator_value", currency_code="currency_code_value", + unlimited_end_time=True, external_order_id=1802, archived=True, last_modified_by_app="last_modified_by_app_value", notes="notes_value", po_number="po_number_value", - status=order_service.Order.Status.DRAFT, + status=order_enums.OrderStatusEnum.OrderStatus.DRAFT, salesperson="salesperson_value", secondary_salespeople=["secondary_salespeople_value"], secondary_traffickers=["secondary_traffickers_value"], @@ -985,7 +992,7 @@ def test_get_order_rest(request_type): response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = order_service.Order.pb(return_value) + return_value = order_messages.Order.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") @@ -993,7 +1000,7 @@ def test_get_order_rest(request_type): response = client.get_order(request) # Establish that the response is the type that we expect. - assert isinstance(response, order_service.Order) + assert isinstance(response, order_messages.Order) assert response.name == "name_value" assert response.order_id == 840 assert response.display_name == "display_name_value" @@ -1007,12 +1014,13 @@ def test_get_order_rest(request_type): assert response.effective_teams == ["effective_teams_value"] assert response.creator == "creator_value" assert response.currency_code == "currency_code_value" + assert response.unlimited_end_time is True assert response.external_order_id == 1802 assert response.archived is True assert response.last_modified_by_app == "last_modified_by_app_value" assert response.notes == "notes_value" assert response.po_number == "po_number_value" - assert response.status == order_service.Order.Status.DRAFT + assert response.status == order_enums.OrderStatusEnum.OrderStatus.DRAFT assert response.salesperson == "salesperson_value" assert response.secondary_salespeople == ["secondary_salespeople_value"] assert response.secondary_traffickers == ["secondary_traffickers_value"] @@ -1092,7 +1100,7 @@ def test_get_order_rest_required_fields(request_type=order_service.GetOrderReque request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = order_service.Order() + return_value = order_messages.Order() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -1113,7 +1121,7 @@ def test_get_order_rest_required_fields(request_type=order_service.GetOrderReque response_value.status_code = 200 # Convert return value to protobuf type - return_value = order_service.Order.pb(return_value) + return_value = order_messages.Order.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") @@ -1166,7 +1174,7 @@ def test_get_order_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = order_service.Order.to_json(order_service.Order()) + req.return_value._content = order_messages.Order.to_json(order_messages.Order()) request = order_service.GetOrderRequest() metadata = [ @@ -1174,7 +1182,7 @@ def test_get_order_rest_interceptors(null_interceptor): ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = order_service.Order() + post.return_value = order_messages.Order() client.get_order( request, @@ -1221,7 +1229,7 @@ def test_get_order_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = order_service.Order() + return_value = order_messages.Order() # get arguments that satisfy an http rule for this method sample_request = {"name": "networks/sample1/orders/sample2"} @@ -1236,7 +1244,7 @@ def test_get_order_rest_flattened(): response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = order_service.Order.pb(return_value) + return_value = order_messages.Order.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1603,9 +1611,9 @@ def test_list_orders_rest_pager(transport: str = "rest"): response = ( order_service.ListOrdersResponse( orders=[ - order_service.Order(), - order_service.Order(), - order_service.Order(), + order_messages.Order(), + order_messages.Order(), + order_messages.Order(), ], next_page_token="abc", ), @@ -1615,14 +1623,14 @@ def test_list_orders_rest_pager(transport: str = "rest"): ), order_service.ListOrdersResponse( orders=[ - order_service.Order(), + order_messages.Order(), ], next_page_token="ghi", ), order_service.ListOrdersResponse( orders=[ - order_service.Order(), - order_service.Order(), + order_messages.Order(), + order_messages.Order(), ], ), ) @@ -1643,7 +1651,7 @@ def test_list_orders_rest_pager(transport: str = "rest"): results = list(pager) assert len(results) == 6 - assert all(isinstance(i, order_service.Order) for i in results) + assert all(isinstance(i, order_messages.Order) for i in results) pages = list(client.list_orders(request=sample_request).pages) for page_, token in zip(pages, ["abc", "def", "ghi", ""]): @@ -1948,9 +1956,32 @@ def test_parse_contact_path(): assert expected == actual -def test_label_path(): +def test_custom_field_path(): network_code = "winkle" - label = "nautilus" + custom_field = "nautilus" + expected = "networks/{network_code}/customFields/{custom_field}".format( + network_code=network_code, + custom_field=custom_field, + ) + actual = OrderServiceClient.custom_field_path(network_code, custom_field) + assert expected == actual + + +def test_parse_custom_field_path(): + expected = { + "network_code": "scallop", + "custom_field": "abalone", + } + path = OrderServiceClient.custom_field_path(**expected) + + # Check that the path construction is reversible. + actual = OrderServiceClient.parse_custom_field_path(path) + assert expected == actual + + +def test_label_path(): + network_code = "squid" + label = "clam" expected = "networks/{network_code}/labels/{label}".format( network_code=network_code, label=label, @@ -1961,8 +1992,8 @@ def test_label_path(): def test_parse_label_path(): expected = { - "network_code": "scallop", - "label": "abalone", + "network_code": "whelk", + "label": "octopus", } path = OrderServiceClient.label_path(**expected) @@ -1972,7 +2003,7 @@ def test_parse_label_path(): def test_network_path(): - network_code = "squid" + network_code = "oyster" expected = "networks/{network_code}".format( network_code=network_code, ) @@ -1982,7 +2013,7 @@ def test_network_path(): def test_parse_network_path(): expected = { - "network_code": "clam", + "network_code": "nudibranch", } path = OrderServiceClient.network_path(**expected) @@ -1992,8 +2023,8 @@ def test_parse_network_path(): def test_order_path(): - network_code = "whelk" - order = "octopus" + network_code = "cuttlefish" + order = "mussel" expected = "networks/{network_code}/orders/{order}".format( network_code=network_code, order=order, @@ -2004,8 +2035,8 @@ def test_order_path(): def test_parse_order_path(): expected = { - "network_code": "oyster", - "order": "nudibranch", + "network_code": "winkle", + "order": "nautilus", } path = OrderServiceClient.order_path(**expected) @@ -2015,8 +2046,8 @@ def test_parse_order_path(): def test_team_path(): - network_code = "cuttlefish" - team = "mussel" + network_code = "scallop" + team = "abalone" expected = "networks/{network_code}/teams/{team}".format( network_code=network_code, team=team, @@ -2027,8 +2058,8 @@ def test_team_path(): def test_parse_team_path(): expected = { - "network_code": "winkle", - "team": "nautilus", + "network_code": "squid", + "team": "clam", } path = OrderServiceClient.team_path(**expected) @@ -2038,8 +2069,8 @@ def test_parse_team_path(): def test_user_path(): - network_code = "scallop" - user = "abalone" + network_code = "whelk" + user = "octopus" expected = "networks/{network_code}/users/{user}".format( network_code=network_code, user=user, @@ -2050,8 +2081,8 @@ def test_user_path(): def test_parse_user_path(): expected = { - "network_code": "squid", - "user": "clam", + "network_code": "oyster", + "user": "nudibranch", } path = OrderServiceClient.user_path(**expected) @@ -2061,7 +2092,7 @@ def test_parse_user_path(): def test_common_billing_account_path(): - billing_account = "whelk" + billing_account = "cuttlefish" expected = "billingAccounts/{billing_account}".format( billing_account=billing_account, ) @@ -2071,7 +2102,7 @@ def test_common_billing_account_path(): def test_parse_common_billing_account_path(): expected = { - "billing_account": "octopus", + "billing_account": "mussel", } path = OrderServiceClient.common_billing_account_path(**expected) @@ -2081,7 +2112,7 @@ def test_parse_common_billing_account_path(): def test_common_folder_path(): - folder = "oyster" + folder = "winkle" expected = "folders/{folder}".format( folder=folder, ) @@ -2091,7 +2122,7 @@ def test_common_folder_path(): def test_parse_common_folder_path(): expected = { - "folder": "nudibranch", + "folder": "nautilus", } path = OrderServiceClient.common_folder_path(**expected) @@ -2101,7 +2132,7 @@ def test_parse_common_folder_path(): def test_common_organization_path(): - organization = "cuttlefish" + organization = "scallop" expected = "organizations/{organization}".format( organization=organization, ) @@ -2111,7 +2142,7 @@ def test_common_organization_path(): def test_parse_common_organization_path(): expected = { - "organization": "mussel", + "organization": "abalone", } path = OrderServiceClient.common_organization_path(**expected) @@ -2121,7 +2152,7 @@ def test_parse_common_organization_path(): def test_common_project_path(): - project = "winkle" + project = "squid" expected = "projects/{project}".format( project=project, ) @@ -2131,7 +2162,7 @@ def test_common_project_path(): def test_parse_common_project_path(): expected = { - "project": "nautilus", + "project": "clam", } path = OrderServiceClient.common_project_path(**expected) @@ -2141,8 +2172,8 @@ def test_parse_common_project_path(): def test_common_location_path(): - project = "scallop" - location = "abalone" + project = "whelk" + location = "octopus" expected = "projects/{project}/locations/{location}".format( project=project, location=location, @@ -2153,8 +2184,8 @@ def test_common_location_path(): def test_parse_common_location_path(): expected = { - "project": "squid", - "location": "clam", + "project": "oyster", + "location": "nudibranch", } path = OrderServiceClient.common_location_path(**expected) @@ -2196,7 +2227,7 @@ def test_get_operation_rest_bad_request( request = request_type() request = json_format.ParseDict( - {"name": "networks/sample1/operations/reports/exports/sample2"}, request + {"name": "networks/sample1/operations/reports/runs/sample2"}, request ) # Mock the http request call within the method and fake a BadRequest error. @@ -2223,7 +2254,7 @@ def test_get_operation_rest(request_type): credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) - request_init = {"name": "networks/sample1/operations/reports/exports/sample2"} + request_init = {"name": "networks/sample1/operations/reports/runs/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: diff --git a/packages/google-ads-admanager/tests/unit/gapic/admanager_v1/test_placement_service.py b/packages/google-ads-admanager/tests/unit/gapic/admanager_v1/test_placement_service.py index bcfdc6a89ea0..96d00d2422b4 100644 --- a/packages/google-ads-admanager/tests/unit/gapic/admanager_v1/test_placement_service.py +++ b/packages/google-ads-admanager/tests/unit/gapic/admanager_v1/test_placement_service.py @@ -50,7 +50,11 @@ pagers, transports, ) -from google.ads.admanager_v1.types import placement_enums, placement_service +from google.ads.admanager_v1.types import ( + placement_enums, + placement_messages, + placement_service, +) def client_cert_source_callback(): @@ -993,7 +997,7 @@ def test_get_placement_rest(request_type): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = placement_service.Placement( + return_value = placement_messages.Placement( name="name_value", placement_id=1253, display_name="display_name_value", @@ -1007,7 +1011,7 @@ def test_get_placement_rest(request_type): response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = placement_service.Placement.pb(return_value) + return_value = placement_messages.Placement.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") @@ -1015,7 +1019,7 @@ def test_get_placement_rest(request_type): response = client.get_placement(request) # Establish that the response is the type that we expect. - assert isinstance(response, placement_service.Placement) + assert isinstance(response, placement_messages.Placement) assert response.name == "name_value" assert response.placement_id == 1253 assert response.display_name == "display_name_value" @@ -1101,7 +1105,7 @@ def test_get_placement_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = placement_service.Placement() + return_value = placement_messages.Placement() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -1122,7 +1126,7 @@ def test_get_placement_rest_required_fields( response_value.status_code = 200 # Convert return value to protobuf type - return_value = placement_service.Placement.pb(return_value) + return_value = placement_messages.Placement.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") @@ -1177,8 +1181,8 @@ def test_get_placement_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = placement_service.Placement.to_json( - placement_service.Placement() + req.return_value._content = placement_messages.Placement.to_json( + placement_messages.Placement() ) request = placement_service.GetPlacementRequest() @@ -1187,7 +1191,7 @@ def test_get_placement_rest_interceptors(null_interceptor): ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = placement_service.Placement() + post.return_value = placement_messages.Placement() client.get_placement( request, @@ -1234,7 +1238,7 @@ def test_get_placement_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = placement_service.Placement() + return_value = placement_messages.Placement() # get arguments that satisfy an http rule for this method sample_request = {"name": "networks/sample1/placements/sample2"} @@ -1249,7 +1253,7 @@ def test_get_placement_rest_flattened(): response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = placement_service.Placement.pb(return_value) + return_value = placement_messages.Placement.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1618,9 +1622,9 @@ def test_list_placements_rest_pager(transport: str = "rest"): response = ( placement_service.ListPlacementsResponse( placements=[ - placement_service.Placement(), - placement_service.Placement(), - placement_service.Placement(), + placement_messages.Placement(), + placement_messages.Placement(), + placement_messages.Placement(), ], next_page_token="abc", ), @@ -1630,14 +1634,14 @@ def test_list_placements_rest_pager(transport: str = "rest"): ), placement_service.ListPlacementsResponse( placements=[ - placement_service.Placement(), + placement_messages.Placement(), ], next_page_token="ghi", ), placement_service.ListPlacementsResponse( placements=[ - placement_service.Placement(), - placement_service.Placement(), + placement_messages.Placement(), + placement_messages.Placement(), ], ), ) @@ -1660,7 +1664,7 @@ def test_list_placements_rest_pager(transport: str = "rest"): results = list(pager) assert len(results) == 6 - assert all(isinstance(i, placement_service.Placement) for i in results) + assert all(isinstance(i, placement_messages.Placement) for i in results) pages = list(client.list_placements(request=sample_request).pages) for page_, token in zip(pages, ["abc", "def", "ghi", ""]): @@ -2121,7 +2125,7 @@ def test_get_operation_rest_bad_request( request = request_type() request = json_format.ParseDict( - {"name": "networks/sample1/operations/reports/exports/sample2"}, request + {"name": "networks/sample1/operations/reports/runs/sample2"}, request ) # Mock the http request call within the method and fake a BadRequest error. @@ -2148,7 +2152,7 @@ def test_get_operation_rest(request_type): credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) - request_init = {"name": "networks/sample1/operations/reports/exports/sample2"} + request_init = {"name": "networks/sample1/operations/reports/runs/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: diff --git a/packages/google-ads-admanager/tests/unit/gapic/admanager_v1/test_report_service.py b/packages/google-ads-admanager/tests/unit/gapic/admanager_v1/test_report_service.py index 9429af4d5338..687f2f1961b5 100644 --- a/packages/google-ads-admanager/tests/unit/gapic/admanager_v1/test_report_service.py +++ b/packages/google-ads-admanager/tests/unit/gapic/admanager_v1/test_report_service.py @@ -44,7 +44,12 @@ from google.auth.exceptions import MutualTLSChannelError from google.longrunning import operations_pb2 # type: ignore from google.oauth2 import service_account +from google.protobuf import field_mask_pb2 # type: ignore from google.protobuf import json_format +from google.protobuf import timestamp_pb2 # type: ignore +from google.type import date_pb2 # type: ignore +from google.type import dayofweek_pb2 # type: ignore +from google.type import timeofday_pb2 # type: ignore import grpc from grpc.experimental import aio from proto.marshal.rules import wrappers @@ -55,6 +60,7 @@ from google.ads.admanager_v1.services.report_service import ( ReportServiceClient, + pagers, transports, ) from google.ads.admanager_v1.types import report_service @@ -959,18 +965,1643 @@ def test_report_service_client_client_options_credentials_file( @pytest.mark.parametrize( "request_type", [ - report_service.ExportSavedReportRequest, + report_service.GetReportRequest, dict, ], ) -def test_export_saved_report_rest(request_type): +def test_get_report_rest(request_type): client = ReportServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = {"report": "networks/sample1/reports/sample2"} + request_init = {"name": "networks/sample1/reports/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = report_service.Report( + name="name_value", + report_id=968, + visibility=report_service.Report.Visibility.DRAFT, + display_name="display_name_value", + locale="locale_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = report_service.Report.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.get_report(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, report_service.Report) + assert response.name == "name_value" + assert response.report_id == 968 + assert response.visibility == report_service.Report.Visibility.DRAFT + assert response.display_name == "display_name_value" + assert response.locale == "locale_value" + + +def test_get_report_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ReportServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_report in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.get_report] = mock_rpc + + request = {} + client.get_report(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_report(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_get_report_rest_required_fields(request_type=report_service.GetReportRequest): + transport_class = transports.ReportServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_report._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_report._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = ReportServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = report_service.Report() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = report_service.Report.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_report(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_get_report_rest_unset_required_fields(): + transport = transports.ReportServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.get_report._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_report_rest_interceptors(null_interceptor): + transport = transports.ReportServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.ReportServiceRestInterceptor(), + ) + client = ReportServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.ReportServiceRestInterceptor, "post_get_report" + ) as post, mock.patch.object( + transports.ReportServiceRestInterceptor, "pre_get_report" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = report_service.GetReportRequest.pb( + report_service.GetReportRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = report_service.Report.to_json( + report_service.Report() + ) + + request = report_service.GetReportRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = report_service.Report() + + client.get_report( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_report_rest_bad_request( + transport: str = "rest", request_type=report_service.GetReportRequest +): + client = ReportServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"name": "networks/sample1/reports/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_report(request) + + +def test_get_report_rest_flattened(): + client = ReportServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = report_service.Report() + + # get arguments that satisfy an http rule for this method + sample_request = {"name": "networks/sample1/reports/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = report_service.Report.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.get_report(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=networks/*/reports/*}" % client.transport._host, args[1] + ) + + +def test_get_report_rest_flattened_error(transport: str = "rest"): + client = ReportServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_report( + report_service.GetReportRequest(), + name="name_value", + ) + + +def test_get_report_rest_error(): + client = ReportServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + report_service.ListReportsRequest, + dict, + ], +) +def test_list_reports_rest(request_type): + client = ReportServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "networks/sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = report_service.ListReportsResponse( + next_page_token="next_page_token_value", + total_size=1086, + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = report_service.ListReportsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.list_reports(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListReportsPager) + assert response.next_page_token == "next_page_token_value" + assert response.total_size == 1086 + + +def test_list_reports_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ReportServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_reports in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.list_reports] = mock_rpc + + request = {} + client.list_reports(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_reports(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_list_reports_rest_required_fields( + request_type=report_service.ListReportsRequest, +): + transport_class = transports.ReportServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_reports._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_reports._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "filter", + "order_by", + "page_size", + "page_token", + "skip", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = ReportServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = report_service.ListReportsResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = report_service.ListReportsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.list_reports(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_list_reports_rest_unset_required_fields(): + transport = transports.ReportServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.list_reports._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "filter", + "orderBy", + "pageSize", + "pageToken", + "skip", + ) + ) + & set(("parent",)) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_reports_rest_interceptors(null_interceptor): + transport = transports.ReportServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.ReportServiceRestInterceptor(), + ) + client = ReportServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.ReportServiceRestInterceptor, "post_list_reports" + ) as post, mock.patch.object( + transports.ReportServiceRestInterceptor, "pre_list_reports" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = report_service.ListReportsRequest.pb( + report_service.ListReportsRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = report_service.ListReportsResponse.to_json( + report_service.ListReportsResponse() + ) + + request = report_service.ListReportsRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = report_service.ListReportsResponse() + + client.list_reports( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_reports_rest_bad_request( + transport: str = "rest", request_type=report_service.ListReportsRequest +): + client = ReportServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "networks/sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_reports(request) + + +def test_list_reports_rest_flattened(): + client = ReportServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = report_service.ListReportsResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "networks/sample1"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = report_service.ListReportsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.list_reports(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=networks/*}/reports" % client.transport._host, args[1] + ) + + +def test_list_reports_rest_flattened_error(transport: str = "rest"): + client = ReportServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_reports( + report_service.ListReportsRequest(), + parent="parent_value", + ) + + +def test_list_reports_rest_pager(transport: str = "rest"): + client = ReportServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + report_service.ListReportsResponse( + reports=[ + report_service.Report(), + report_service.Report(), + report_service.Report(), + ], + next_page_token="abc", + ), + report_service.ListReportsResponse( + reports=[], + next_page_token="def", + ), + report_service.ListReportsResponse( + reports=[ + report_service.Report(), + ], + next_page_token="ghi", + ), + report_service.ListReportsResponse( + reports=[ + report_service.Report(), + report_service.Report(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple( + report_service.ListReportsResponse.to_json(x) for x in response + ) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {"parent": "networks/sample1"} + + pager = client.list_reports(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, report_service.Report) for i in results) + + pages = list(client.list_reports(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + report_service.CreateReportRequest, + dict, + ], +) +def test_create_report_rest(request_type): + client = ReportServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "networks/sample1"} + request_init["report"] = { + "name": "name_value", + "report_id": 968, + "visibility": 1, + "report_definition": { + "dimensions": [242], + "metrics": [61], + "filters": [ + { + "field_filter": { + "field": {"dimension": 242, "metric": 61}, + "operation": 1, + "values": [ + { + "int_value": 967, + "double_value": 0.12710000000000002, + "string_value": "string_value_value", + "bool_value": True, + "int_list_value": {"values": [657, 658]}, + "string_list_value": { + "values": ["values_value1", "values_value2"] + }, + "bytes_value": b"bytes_value_blob", + } + ], + "slice_": {"dimension": 242, "value": {}}, + "time_period_index": 1800, + "metric_value_type": 1, + }, + "not_filter": {}, + "and_filter": {"filters": {}}, + "or_filter": {}, + } + ], + "time_zone": "time_zone_value", + "currency_code": "currency_code_value", + "date_range": { + "fixed": { + "start_date": {"year": 433, "month": 550, "day": 318}, + "end_date": {}, + }, + "relative": 1, + }, + "comparison_date_range": {}, + "custom_dimension_key_ids": [2568, 2569], + "line_item_custom_field_ids": [2739, 2740], + "order_custom_field_ids": [2329, 2330], + "creative_custom_field_ids": [2640, 2641], + "report_type": 1, + "time_period_column": 1, + "flags": [{"filters": {}, "name": "name_value"}], + "sorts": [ + { + "field": {}, + "descending": True, + "slice_": {}, + "time_period_index": 1800, + "metric_value_type": 1, + } + ], + }, + "display_name": "display_name_value", + "update_time": {"seconds": 751, "nanos": 543}, + "create_time": {}, + "locale": "locale_value", + "schedule_options": { + "schedule": { + "weekly_schedule": {"weekly_scheduled_days": [1]}, + "monthly_schedule": {"monthly_scheduled_days": [2348, 2349]}, + "start_date": {}, + "end_date": {}, + "frequency": 1, + "start_time": { + "hours": 561, + "minutes": 773, + "seconds": 751, + "nanos": 543, + }, + }, + "delivery_condition": 1, + "flags": {}, + }, + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = report_service.CreateReportRequest.meta.fields["report"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["report"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["report"][field])): + del request_init["report"][field][i][subfield] + else: + del request_init["report"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = report_service.Report( + name="name_value", + report_id=968, + visibility=report_service.Report.Visibility.DRAFT, + display_name="display_name_value", + locale="locale_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = report_service.Report.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.create_report(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, report_service.Report) + assert response.name == "name_value" + assert response.report_id == 968 + assert response.visibility == report_service.Report.Visibility.DRAFT + assert response.display_name == "display_name_value" + assert response.locale == "locale_value" + + +def test_create_report_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ReportServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.create_report in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.create_report] = mock_rpc + + request = {} + client.create_report(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.create_report(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_create_report_rest_required_fields( + request_type=report_service.CreateReportRequest, +): + transport_class = transports.ReportServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_report._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_report._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = ReportServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = report_service.Report() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = report_service.Report.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.create_report(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_create_report_rest_unset_required_fields(): + transport = transports.ReportServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.create_report._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(()) + & set( + ( + "parent", + "report", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_create_report_rest_interceptors(null_interceptor): + transport = transports.ReportServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.ReportServiceRestInterceptor(), + ) + client = ReportServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.ReportServiceRestInterceptor, "post_create_report" + ) as post, mock.patch.object( + transports.ReportServiceRestInterceptor, "pre_create_report" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = report_service.CreateReportRequest.pb( + report_service.CreateReportRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = report_service.Report.to_json( + report_service.Report() + ) + + request = report_service.CreateReportRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = report_service.Report() + + client.create_report( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_create_report_rest_bad_request( + transport: str = "rest", request_type=report_service.CreateReportRequest +): + client = ReportServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "networks/sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.create_report(request) + + +def test_create_report_rest_flattened(): + client = ReportServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = report_service.Report() + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "networks/sample1"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + report=report_service.Report(name="name_value"), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = report_service.Report.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.create_report(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=networks/*}/reports" % client.transport._host, args[1] + ) + + +def test_create_report_rest_flattened_error(transport: str = "rest"): + client = ReportServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_report( + report_service.CreateReportRequest(), + parent="parent_value", + report=report_service.Report(name="name_value"), + ) + + +def test_create_report_rest_error(): + client = ReportServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + report_service.UpdateReportRequest, + dict, + ], +) +def test_update_report_rest(request_type): + client = ReportServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"report": {"name": "networks/sample1/reports/sample2"}} + request_init["report"] = { + "name": "networks/sample1/reports/sample2", + "report_id": 968, + "visibility": 1, + "report_definition": { + "dimensions": [242], + "metrics": [61], + "filters": [ + { + "field_filter": { + "field": {"dimension": 242, "metric": 61}, + "operation": 1, + "values": [ + { + "int_value": 967, + "double_value": 0.12710000000000002, + "string_value": "string_value_value", + "bool_value": True, + "int_list_value": {"values": [657, 658]}, + "string_list_value": { + "values": ["values_value1", "values_value2"] + }, + "bytes_value": b"bytes_value_blob", + } + ], + "slice_": {"dimension": 242, "value": {}}, + "time_period_index": 1800, + "metric_value_type": 1, + }, + "not_filter": {}, + "and_filter": {"filters": {}}, + "or_filter": {}, + } + ], + "time_zone": "time_zone_value", + "currency_code": "currency_code_value", + "date_range": { + "fixed": { + "start_date": {"year": 433, "month": 550, "day": 318}, + "end_date": {}, + }, + "relative": 1, + }, + "comparison_date_range": {}, + "custom_dimension_key_ids": [2568, 2569], + "line_item_custom_field_ids": [2739, 2740], + "order_custom_field_ids": [2329, 2330], + "creative_custom_field_ids": [2640, 2641], + "report_type": 1, + "time_period_column": 1, + "flags": [{"filters": {}, "name": "name_value"}], + "sorts": [ + { + "field": {}, + "descending": True, + "slice_": {}, + "time_period_index": 1800, + "metric_value_type": 1, + } + ], + }, + "display_name": "display_name_value", + "update_time": {"seconds": 751, "nanos": 543}, + "create_time": {}, + "locale": "locale_value", + "schedule_options": { + "schedule": { + "weekly_schedule": {"weekly_scheduled_days": [1]}, + "monthly_schedule": {"monthly_scheduled_days": [2348, 2349]}, + "start_date": {}, + "end_date": {}, + "frequency": 1, + "start_time": { + "hours": 561, + "minutes": 773, + "seconds": 751, + "nanos": 543, + }, + }, + "delivery_condition": 1, + "flags": {}, + }, + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = report_service.UpdateReportRequest.meta.fields["report"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["report"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["report"][field])): + del request_init["report"][field][i][subfield] + else: + del request_init["report"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = report_service.Report( + name="name_value", + report_id=968, + visibility=report_service.Report.Visibility.DRAFT, + display_name="display_name_value", + locale="locale_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = report_service.Report.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.update_report(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, report_service.Report) + assert response.name == "name_value" + assert response.report_id == 968 + assert response.visibility == report_service.Report.Visibility.DRAFT + assert response.display_name == "display_name_value" + assert response.locale == "locale_value" + + +def test_update_report_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ReportServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.update_report in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.update_report] = mock_rpc + + request = {} + client.update_report(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.update_report(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_update_report_rest_required_fields( + request_type=report_service.UpdateReportRequest, +): + transport_class = transports.ReportServiceRestTransport + + request_init = {} + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_report._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_report._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("update_mask",)) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + + client = ReportServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = report_service.Report() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "patch", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = report_service.Report.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.update_report(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_update_report_rest_unset_required_fields(): + transport = transports.ReportServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.update_report._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(("updateMask",)) + & set( + ( + "report", + "updateMask", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_update_report_rest_interceptors(null_interceptor): + transport = transports.ReportServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.ReportServiceRestInterceptor(), + ) + client = ReportServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.ReportServiceRestInterceptor, "post_update_report" + ) as post, mock.patch.object( + transports.ReportServiceRestInterceptor, "pre_update_report" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = report_service.UpdateReportRequest.pb( + report_service.UpdateReportRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = report_service.Report.to_json( + report_service.Report() + ) + + request = report_service.UpdateReportRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = report_service.Report() + + client.update_report( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_update_report_rest_bad_request( + transport: str = "rest", request_type=report_service.UpdateReportRequest +): + client = ReportServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"report": {"name": "networks/sample1/reports/sample2"}} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.update_report(request) + + +def test_update_report_rest_flattened(): + client = ReportServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = report_service.Report() + + # get arguments that satisfy an http rule for this method + sample_request = {"report": {"name": "networks/sample1/reports/sample2"}} + + # get truthy value for each flattened field + mock_args = dict( + report=report_service.Report(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = report_service.Report.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.update_report(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{report.name=networks/*/reports/*}" % client.transport._host, args[1] + ) + + +def test_update_report_rest_flattened_error(transport: str = "rest"): + client = ReportServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_report( + report_service.UpdateReportRequest(), + report=report_service.Report(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +def test_update_report_rest_error(): + client = ReportServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + report_service.RunReportRequest, + dict, + ], +) +def test_run_report_rest(request_type): + client = ReportServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"name": "networks/sample1/reports/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -985,13 +2616,13 @@ def test_export_saved_report_rest(request_type): response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.export_saved_report(request) + response = client.run_report(request) # Establish that the response is the type that we expect. assert response.operation.name == "operations/spam" -def test_export_saved_report_rest_use_cached_wrapped_rpc(): +def test_run_report_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -1005,21 +2636,17 @@ def test_export_saved_report_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert ( - client._transport.export_saved_report in client._transport._wrapped_methods - ) + assert client._transport.run_report in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.export_saved_report - ] = mock_rpc + client._transport._wrapped_methods[client._transport.run_report] = mock_rpc request = {} - client.export_saved_report(request) + client.run_report(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 @@ -1028,19 +2655,18 @@ def test_export_saved_report_rest_use_cached_wrapped_rpc(): # subsequent calls should use the cached wrapper wrapper_fn.reset_mock() - client.export_saved_report(request) + client.run_report(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_export_saved_report_rest_required_fields( - request_type=report_service.ExportSavedReportRequest, -): +def test_run_report_rest_required_fields(request_type=report_service.RunReportRequest): transport_class = transports.ReportServiceRestTransport request_init = {} + request_init["name"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -1051,17 +2677,21 @@ def test_export_saved_report_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).export_saved_report._get_unset_required_fields(jsonified_request) + ).run_report._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present + jsonified_request["name"] = "name_value" + unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).export_saved_report._get_unset_required_fields(jsonified_request) + ).run_report._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" client = ReportServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -1095,24 +2725,24 @@ def test_export_saved_report_rest_required_fields( response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.export_saved_report(request) + response = client.run_report(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_export_saved_report_rest_unset_required_fields(): +def test_run_report_rest_unset_required_fields(): transport = transports.ReportServiceRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.export_saved_report._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("format",))) + unset_fields = transport.run_report._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_export_saved_report_rest_interceptors(null_interceptor): +def test_run_report_rest_interceptors(null_interceptor): transport = transports.ReportServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -1127,14 +2757,14 @@ def test_export_saved_report_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( operation.Operation, "_set_result_from_operation" ), mock.patch.object( - transports.ReportServiceRestInterceptor, "post_export_saved_report" + transports.ReportServiceRestInterceptor, "post_run_report" ) as post, mock.patch.object( - transports.ReportServiceRestInterceptor, "pre_export_saved_report" + transports.ReportServiceRestInterceptor, "pre_run_report" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = report_service.ExportSavedReportRequest.pb( - report_service.ExportSavedReportRequest() + pb_message = report_service.RunReportRequest.pb( + report_service.RunReportRequest() ) transcode.return_value = { "method": "post", @@ -1150,7 +2780,7 @@ def test_export_saved_report_rest_interceptors(null_interceptor): operations_pb2.Operation() ) - request = report_service.ExportSavedReportRequest() + request = report_service.RunReportRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), @@ -1158,7 +2788,7 @@ def test_export_saved_report_rest_interceptors(null_interceptor): pre.return_value = request, metadata post.return_value = operations_pb2.Operation() - client.export_saved_report( + client.run_report( request, metadata=[ ("key", "val"), @@ -1170,8 +2800,8 @@ def test_export_saved_report_rest_interceptors(null_interceptor): post.assert_called_once() -def test_export_saved_report_rest_bad_request( - transport: str = "rest", request_type=report_service.ExportSavedReportRequest +def test_run_report_rest_bad_request( + transport: str = "rest", request_type=report_service.RunReportRequest ): client = ReportServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -1179,7 +2809,7 @@ def test_export_saved_report_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = {"report": "networks/sample1/reports/sample2"} + request_init = {"name": "networks/sample1/reports/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -1191,10 +2821,10 @@ def test_export_saved_report_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.export_saved_report(request) + client.run_report(request) -def test_export_saved_report_rest_flattened(): +def test_run_report_rest_flattened(): client = ReportServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -1206,35 +2836,262 @@ def test_export_saved_report_rest_flattened(): return_value = operations_pb2.Operation(name="operations/spam") # get arguments that satisfy an http rule for this method - sample_request = {"report": "networks/sample1/reports/sample2"} + sample_request = {"name": "networks/sample1/reports/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.run_report(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=networks/*/reports/*}:run" % client.transport._host, args[1] + ) + + +def test_run_report_rest_flattened_error(transport: str = "rest"): + client = ReportServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.run_report( + report_service.RunReportRequest(), + name="name_value", + ) + + +def test_run_report_rest_error(): + client = ReportServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + report_service.FetchReportResultRowsRequest, + dict, + ], +) +def test_fetch_report_result_rows_rest(request_type): + client = ReportServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"name": "networks/sample1/reports/sample2/results/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = report_service.FetchReportResultRowsResponse( + total_row_count=1635, + next_page_token="next_page_token_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = report_service.FetchReportResultRowsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.fetch_report_result_rows(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.FetchReportResultRowsPager) + assert response.total_row_count == 1635 + assert response.next_page_token == "next_page_token_value" + + +def test_fetch_report_result_rows_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ReportServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.fetch_report_result_rows + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.fetch_report_result_rows + ] = mock_rpc + + request = {} + client.fetch_report_result_rows(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.fetch_report_result_rows(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_fetch_report_result_rows_rest_interceptors(null_interceptor): + transport = transports.ReportServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.ReportServiceRestInterceptor(), + ) + client = ReportServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.ReportServiceRestInterceptor, "post_fetch_report_result_rows" + ) as post, mock.patch.object( + transports.ReportServiceRestInterceptor, "pre_fetch_report_result_rows" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = report_service.FetchReportResultRowsRequest.pb( + report_service.FetchReportResultRowsRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = ( + report_service.FetchReportResultRowsResponse.to_json( + report_service.FetchReportResultRowsResponse() + ) + ) + + request = report_service.FetchReportResultRowsRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = report_service.FetchReportResultRowsResponse() + + client.fetch_report_result_rows( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_fetch_report_result_rows_rest_bad_request( + transport: str = "rest", request_type=report_service.FetchReportResultRowsRequest +): + client = ReportServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"name": "networks/sample1/reports/sample2/results/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.fetch_report_result_rows(request) + + +def test_fetch_report_result_rows_rest_flattened(): + client = ReportServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = report_service.FetchReportResultRowsResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {"name": "networks/sample1/reports/sample2/results/sample3"} # get truthy value for each flattened field mock_args = dict( - report="report_value", + name="name_value", ) mock_args.update(sample_request) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 + # Convert return value to protobuf type + return_value = report_service.FetchReportResultRowsResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.export_saved_report(**mock_args) + client.fetch_report_result_rows(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{report=networks/*/reports/*}:exportSavedReport" + "%s/v1/{name=networks/*/reports/*/results/*}:fetchRows" % client.transport._host, args[1], ) -def test_export_saved_report_rest_flattened_error(transport: str = "rest"): +def test_fetch_report_result_rows_rest_flattened_error(transport: str = "rest"): client = ReportServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -1243,17 +3100,74 @@ def test_export_saved_report_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.export_saved_report( - report_service.ExportSavedReportRequest(), - report="report_value", + client.fetch_report_result_rows( + report_service.FetchReportResultRowsRequest(), + name="name_value", ) -def test_export_saved_report_rest_error(): +def test_fetch_report_result_rows_rest_pager(transport: str = "rest"): client = ReportServiceClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + report_service.FetchReportResultRowsResponse( + rows=[ + report_service.Report.DataTable.Row(), + report_service.Report.DataTable.Row(), + report_service.Report.DataTable.Row(), + ], + next_page_token="abc", + ), + report_service.FetchReportResultRowsResponse( + rows=[], + next_page_token="def", + ), + report_service.FetchReportResultRowsResponse( + rows=[ + report_service.Report.DataTable.Row(), + ], + next_page_token="ghi", + ), + report_service.FetchReportResultRowsResponse( + rows=[ + report_service.Report.DataTable.Row(), + report_service.Report.DataTable.Row(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple( + report_service.FetchReportResultRowsResponse.to_json(x) for x in response + ) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {"name": "networks/sample1/reports/sample2/results/sample3"} + + pager = client.fetch_report_result_rows(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, report_service.Report.DataTable.Row) for i in results) + + pages = list(client.fetch_report_result_rows(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + def test_credentials_transport_error(): # It is an error to provide credentials and a transport instance. @@ -1365,7 +3279,12 @@ def test_report_service_base_transport(): # Every method on the transport should just blindly # raise NotImplementedError. methods = ( - "export_saved_report", + "get_report", + "list_reports", + "create_report", + "update_report", + "run_report", + "fetch_report_result_rows", "get_operation", ) for method in methods: @@ -1520,14 +3439,49 @@ def test_report_service_client_transport_session_collision(transport_name): credentials=creds2, transport=transport_name, ) - session1 = client1.transport.export_saved_report._session - session2 = client2.transport.export_saved_report._session + session1 = client1.transport.get_report._session + session2 = client2.transport.get_report._session + assert session1 != session2 + session1 = client1.transport.list_reports._session + session2 = client2.transport.list_reports._session + assert session1 != session2 + session1 = client1.transport.create_report._session + session2 = client2.transport.create_report._session + assert session1 != session2 + session1 = client1.transport.update_report._session + session2 = client2.transport.update_report._session + assert session1 != session2 + session1 = client1.transport.run_report._session + session2 = client2.transport.run_report._session + assert session1 != session2 + session1 = client1.transport.fetch_report_result_rows._session + session2 = client2.transport.fetch_report_result_rows._session assert session1 != session2 -def test_report_path(): +def test_network_path(): network_code = "squid" - report = "clam" + expected = "networks/{network_code}".format( + network_code=network_code, + ) + actual = ReportServiceClient.network_path(network_code) + assert expected == actual + + +def test_parse_network_path(): + expected = { + "network_code": "clam", + } + path = ReportServiceClient.network_path(**expected) + + # Check that the path construction is reversible. + actual = ReportServiceClient.parse_network_path(path) + assert expected == actual + + +def test_report_path(): + network_code = "whelk" + report = "octopus" expected = "networks/{network_code}/reports/{report}".format( network_code=network_code, report=report, @@ -1538,8 +3492,8 @@ def test_report_path(): def test_parse_report_path(): expected = { - "network_code": "whelk", - "report": "octopus", + "network_code": "oyster", + "report": "nudibranch", } path = ReportServiceClient.report_path(**expected) @@ -1549,7 +3503,7 @@ def test_parse_report_path(): def test_common_billing_account_path(): - billing_account = "oyster" + billing_account = "cuttlefish" expected = "billingAccounts/{billing_account}".format( billing_account=billing_account, ) @@ -1559,7 +3513,7 @@ def test_common_billing_account_path(): def test_parse_common_billing_account_path(): expected = { - "billing_account": "nudibranch", + "billing_account": "mussel", } path = ReportServiceClient.common_billing_account_path(**expected) @@ -1569,7 +3523,7 @@ def test_parse_common_billing_account_path(): def test_common_folder_path(): - folder = "cuttlefish" + folder = "winkle" expected = "folders/{folder}".format( folder=folder, ) @@ -1579,7 +3533,7 @@ def test_common_folder_path(): def test_parse_common_folder_path(): expected = { - "folder": "mussel", + "folder": "nautilus", } path = ReportServiceClient.common_folder_path(**expected) @@ -1589,7 +3543,7 @@ def test_parse_common_folder_path(): def test_common_organization_path(): - organization = "winkle" + organization = "scallop" expected = "organizations/{organization}".format( organization=organization, ) @@ -1599,7 +3553,7 @@ def test_common_organization_path(): def test_parse_common_organization_path(): expected = { - "organization": "nautilus", + "organization": "abalone", } path = ReportServiceClient.common_organization_path(**expected) @@ -1609,7 +3563,7 @@ def test_parse_common_organization_path(): def test_common_project_path(): - project = "scallop" + project = "squid" expected = "projects/{project}".format( project=project, ) @@ -1619,7 +3573,7 @@ def test_common_project_path(): def test_parse_common_project_path(): expected = { - "project": "abalone", + "project": "clam", } path = ReportServiceClient.common_project_path(**expected) @@ -1629,8 +3583,8 @@ def test_parse_common_project_path(): def test_common_location_path(): - project = "squid" - location = "clam" + project = "whelk" + location = "octopus" expected = "projects/{project}/locations/{location}".format( project=project, location=location, @@ -1641,8 +3595,8 @@ def test_common_location_path(): def test_parse_common_location_path(): expected = { - "project": "whelk", - "location": "octopus", + "project": "oyster", + "location": "nudibranch", } path = ReportServiceClient.common_location_path(**expected) @@ -1684,7 +3638,7 @@ def test_get_operation_rest_bad_request( request = request_type() request = json_format.ParseDict( - {"name": "networks/sample1/operations/reports/exports/sample2"}, request + {"name": "networks/sample1/operations/reports/runs/sample2"}, request ) # Mock the http request call within the method and fake a BadRequest error. @@ -1711,7 +3665,7 @@ def test_get_operation_rest(request_type): credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) - request_init = {"name": "networks/sample1/operations/reports/exports/sample2"} + request_init = {"name": "networks/sample1/operations/reports/runs/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: diff --git a/packages/google-ads-admanager/tests/unit/gapic/admanager_v1/test_role_service.py b/packages/google-ads-admanager/tests/unit/gapic/admanager_v1/test_role_service.py index 5e255d3ce5b0..6f32f53c1607 100644 --- a/packages/google-ads-admanager/tests/unit/gapic/admanager_v1/test_role_service.py +++ b/packages/google-ads-admanager/tests/unit/gapic/admanager_v1/test_role_service.py @@ -49,7 +49,7 @@ pagers, transports, ) -from google.ads.admanager_v1.types import role_service +from google.ads.admanager_v1.types import role_enums, role_messages, role_service def client_cert_source_callback(): @@ -951,15 +951,20 @@ def test_get_role_rest(request_type): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = role_service.Role( + return_value = role_messages.Role( name="name_value", + role_id=734, + display_name="display_name_value", + description="description_value", + built_in=True, + status=role_enums.RoleStatusEnum.RoleStatus.ACTIVE, ) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = role_service.Role.pb(return_value) + return_value = role_messages.Role.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") @@ -967,8 +972,13 @@ def test_get_role_rest(request_type): response = client.get_role(request) # Establish that the response is the type that we expect. - assert isinstance(response, role_service.Role) + assert isinstance(response, role_messages.Role) assert response.name == "name_value" + assert response.role_id == 734 + assert response.display_name == "display_name_value" + assert response.description == "description_value" + assert response.built_in is True + assert response.status == role_enums.RoleStatusEnum.RoleStatus.ACTIVE def test_get_role_rest_use_cached_wrapped_rpc(): @@ -1045,7 +1055,7 @@ def test_get_role_rest_required_fields(request_type=role_service.GetRoleRequest) request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = role_service.Role() + return_value = role_messages.Role() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -1066,7 +1076,7 @@ def test_get_role_rest_required_fields(request_type=role_service.GetRoleRequest) response_value.status_code = 200 # Convert return value to protobuf type - return_value = role_service.Role.pb(return_value) + return_value = role_messages.Role.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") @@ -1119,7 +1129,7 @@ def test_get_role_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = role_service.Role.to_json(role_service.Role()) + req.return_value._content = role_messages.Role.to_json(role_messages.Role()) request = role_service.GetRoleRequest() metadata = [ @@ -1127,7 +1137,7 @@ def test_get_role_rest_interceptors(null_interceptor): ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = role_service.Role() + post.return_value = role_messages.Role() client.get_role( request, @@ -1174,7 +1184,7 @@ def test_get_role_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = role_service.Role() + return_value = role_messages.Role() # get arguments that satisfy an http rule for this method sample_request = {"name": "networks/sample1/roles/sample2"} @@ -1189,7 +1199,7 @@ def test_get_role_rest_flattened(): response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = role_service.Role.pb(return_value) + return_value = role_messages.Role.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1554,9 +1564,9 @@ def test_list_roles_rest_pager(transport: str = "rest"): response = ( role_service.ListRolesResponse( roles=[ - role_service.Role(), - role_service.Role(), - role_service.Role(), + role_messages.Role(), + role_messages.Role(), + role_messages.Role(), ], next_page_token="abc", ), @@ -1566,14 +1576,14 @@ def test_list_roles_rest_pager(transport: str = "rest"): ), role_service.ListRolesResponse( roles=[ - role_service.Role(), + role_messages.Role(), ], next_page_token="ghi", ), role_service.ListRolesResponse( roles=[ - role_service.Role(), - role_service.Role(), + role_messages.Role(), + role_messages.Role(), ], ), ) @@ -1594,7 +1604,7 @@ def test_list_roles_rest_pager(transport: str = "rest"): results = list(pager) assert len(results) == 6 - assert all(isinstance(i, role_service.Role) for i in results) + assert all(isinstance(i, role_messages.Role) for i in results) pages = list(client.list_roles(request=sample_request).pages) for page_, token in zip(pages, ["abc", "def", "ghi", ""]): @@ -2032,7 +2042,7 @@ def test_get_operation_rest_bad_request( request = request_type() request = json_format.ParseDict( - {"name": "networks/sample1/operations/reports/exports/sample2"}, request + {"name": "networks/sample1/operations/reports/runs/sample2"}, request ) # Mock the http request call within the method and fake a BadRequest error. @@ -2059,7 +2069,7 @@ def test_get_operation_rest(request_type): credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) - request_init = {"name": "networks/sample1/operations/reports/exports/sample2"} + request_init = {"name": "networks/sample1/operations/reports/runs/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: diff --git a/packages/google-ads-admanager/tests/unit/gapic/admanager_v1/test_ad_partner_service.py b/packages/google-ads-admanager/tests/unit/gapic/admanager_v1/test_taxonomy_category_service.py similarity index 75% rename from packages/google-ads-admanager/tests/unit/gapic/admanager_v1/test_ad_partner_service.py rename to packages/google-ads-admanager/tests/unit/gapic/admanager_v1/test_taxonomy_category_service.py index 395841658712..c6f333dc2ed3 100644 --- a/packages/google-ads-admanager/tests/unit/gapic/admanager_v1/test_ad_partner_service.py +++ b/packages/google-ads-admanager/tests/unit/gapic/admanager_v1/test_taxonomy_category_service.py @@ -44,12 +44,16 @@ from requests import PreparedRequest, Request, Response from requests.sessions import Session -from google.ads.admanager_v1.services.ad_partner_service import ( - AdPartnerServiceClient, +from google.ads.admanager_v1.services.taxonomy_category_service import ( + TaxonomyCategoryServiceClient, pagers, transports, ) -from google.ads.admanager_v1.types import ad_partner_service +from google.ads.admanager_v1.types import ( + taxonomy_category_messages, + taxonomy_category_service, + taxonomy_type_enum, +) def client_cert_source_callback(): @@ -85,41 +89,45 @@ def test__get_default_mtls_endpoint(): sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" non_googleapi = "api.example.com" - assert AdPartnerServiceClient._get_default_mtls_endpoint(None) is None + assert TaxonomyCategoryServiceClient._get_default_mtls_endpoint(None) is None assert ( - AdPartnerServiceClient._get_default_mtls_endpoint(api_endpoint) + TaxonomyCategoryServiceClient._get_default_mtls_endpoint(api_endpoint) == api_mtls_endpoint ) assert ( - AdPartnerServiceClient._get_default_mtls_endpoint(api_mtls_endpoint) + TaxonomyCategoryServiceClient._get_default_mtls_endpoint(api_mtls_endpoint) == api_mtls_endpoint ) assert ( - AdPartnerServiceClient._get_default_mtls_endpoint(sandbox_endpoint) + TaxonomyCategoryServiceClient._get_default_mtls_endpoint(sandbox_endpoint) == sandbox_mtls_endpoint ) assert ( - AdPartnerServiceClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) + TaxonomyCategoryServiceClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) == sandbox_mtls_endpoint ) assert ( - AdPartnerServiceClient._get_default_mtls_endpoint(non_googleapi) + TaxonomyCategoryServiceClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi ) def test__read_environment_variables(): - assert AdPartnerServiceClient._read_environment_variables() == (False, "auto", None) + assert TaxonomyCategoryServiceClient._read_environment_variables() == ( + False, + "auto", + None, + ) with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - assert AdPartnerServiceClient._read_environment_variables() == ( + assert TaxonomyCategoryServiceClient._read_environment_variables() == ( True, "auto", None, ) with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): - assert AdPartnerServiceClient._read_environment_variables() == ( + assert TaxonomyCategoryServiceClient._read_environment_variables() == ( False, "auto", None, @@ -129,28 +137,28 @@ def test__read_environment_variables(): os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} ): with pytest.raises(ValueError) as excinfo: - AdPartnerServiceClient._read_environment_variables() + TaxonomyCategoryServiceClient._read_environment_variables() assert ( str(excinfo.value) == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" ) with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - assert AdPartnerServiceClient._read_environment_variables() == ( + assert TaxonomyCategoryServiceClient._read_environment_variables() == ( False, "never", None, ) with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): - assert AdPartnerServiceClient._read_environment_variables() == ( + assert TaxonomyCategoryServiceClient._read_environment_variables() == ( False, "always", None, ) with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}): - assert AdPartnerServiceClient._read_environment_variables() == ( + assert TaxonomyCategoryServiceClient._read_environment_variables() == ( False, "auto", None, @@ -158,14 +166,14 @@ def test__read_environment_variables(): with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): with pytest.raises(MutualTLSChannelError) as excinfo: - AdPartnerServiceClient._read_environment_variables() + TaxonomyCategoryServiceClient._read_environment_variables() assert ( str(excinfo.value) == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" ) with mock.patch.dict(os.environ, {"GOOGLE_CLOUD_UNIVERSE_DOMAIN": "foo.com"}): - assert AdPartnerServiceClient._read_environment_variables() == ( + assert TaxonomyCategoryServiceClient._read_environment_variables() == ( False, "auto", "foo.com", @@ -176,13 +184,17 @@ def test__get_client_cert_source(): mock_provided_cert_source = mock.Mock() mock_default_cert_source = mock.Mock() - assert AdPartnerServiceClient._get_client_cert_source(None, False) is None + assert TaxonomyCategoryServiceClient._get_client_cert_source(None, False) is None assert ( - AdPartnerServiceClient._get_client_cert_source(mock_provided_cert_source, False) + TaxonomyCategoryServiceClient._get_client_cert_source( + mock_provided_cert_source, False + ) is None ) assert ( - AdPartnerServiceClient._get_client_cert_source(mock_provided_cert_source, True) + TaxonomyCategoryServiceClient._get_client_cert_source( + mock_provided_cert_source, True + ) == mock_provided_cert_source ) @@ -194,11 +206,11 @@ def test__get_client_cert_source(): return_value=mock_default_cert_source, ): assert ( - AdPartnerServiceClient._get_client_cert_source(None, True) + TaxonomyCategoryServiceClient._get_client_cert_source(None, True) is mock_default_cert_source ) assert ( - AdPartnerServiceClient._get_client_cert_source( + TaxonomyCategoryServiceClient._get_client_cert_source( mock_provided_cert_source, "true" ) is mock_provided_cert_source @@ -206,59 +218,67 @@ def test__get_client_cert_source(): @mock.patch.object( - AdPartnerServiceClient, + TaxonomyCategoryServiceClient, "_DEFAULT_ENDPOINT_TEMPLATE", - modify_default_endpoint_template(AdPartnerServiceClient), + modify_default_endpoint_template(TaxonomyCategoryServiceClient), ) def test__get_api_endpoint(): api_override = "foo.com" mock_client_cert_source = mock.Mock() - default_universe = AdPartnerServiceClient._DEFAULT_UNIVERSE - default_endpoint = AdPartnerServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( + default_universe = TaxonomyCategoryServiceClient._DEFAULT_UNIVERSE + default_endpoint = TaxonomyCategoryServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( UNIVERSE_DOMAIN=default_universe ) mock_universe = "bar.com" - mock_endpoint = AdPartnerServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( + mock_endpoint = TaxonomyCategoryServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( UNIVERSE_DOMAIN=mock_universe ) assert ( - AdPartnerServiceClient._get_api_endpoint( + TaxonomyCategoryServiceClient._get_api_endpoint( api_override, mock_client_cert_source, default_universe, "always" ) == api_override ) assert ( - AdPartnerServiceClient._get_api_endpoint( + TaxonomyCategoryServiceClient._get_api_endpoint( None, mock_client_cert_source, default_universe, "auto" ) - == AdPartnerServiceClient.DEFAULT_MTLS_ENDPOINT + == TaxonomyCategoryServiceClient.DEFAULT_MTLS_ENDPOINT ) assert ( - AdPartnerServiceClient._get_api_endpoint(None, None, default_universe, "auto") + TaxonomyCategoryServiceClient._get_api_endpoint( + None, None, default_universe, "auto" + ) == default_endpoint ) assert ( - AdPartnerServiceClient._get_api_endpoint(None, None, default_universe, "always") - == AdPartnerServiceClient.DEFAULT_MTLS_ENDPOINT + TaxonomyCategoryServiceClient._get_api_endpoint( + None, None, default_universe, "always" + ) + == TaxonomyCategoryServiceClient.DEFAULT_MTLS_ENDPOINT ) assert ( - AdPartnerServiceClient._get_api_endpoint( + TaxonomyCategoryServiceClient._get_api_endpoint( None, mock_client_cert_source, default_universe, "always" ) - == AdPartnerServiceClient.DEFAULT_MTLS_ENDPOINT + == TaxonomyCategoryServiceClient.DEFAULT_MTLS_ENDPOINT ) assert ( - AdPartnerServiceClient._get_api_endpoint(None, None, mock_universe, "never") + TaxonomyCategoryServiceClient._get_api_endpoint( + None, None, mock_universe, "never" + ) == mock_endpoint ) assert ( - AdPartnerServiceClient._get_api_endpoint(None, None, default_universe, "never") + TaxonomyCategoryServiceClient._get_api_endpoint( + None, None, default_universe, "never" + ) == default_endpoint ) with pytest.raises(MutualTLSChannelError) as excinfo: - AdPartnerServiceClient._get_api_endpoint( + TaxonomyCategoryServiceClient._get_api_endpoint( None, mock_client_cert_source, mock_universe, "auto" ) assert ( @@ -272,29 +292,33 @@ def test__get_universe_domain(): universe_domain_env = "bar.com" assert ( - AdPartnerServiceClient._get_universe_domain( + TaxonomyCategoryServiceClient._get_universe_domain( client_universe_domain, universe_domain_env ) == client_universe_domain ) assert ( - AdPartnerServiceClient._get_universe_domain(None, universe_domain_env) + TaxonomyCategoryServiceClient._get_universe_domain(None, universe_domain_env) == universe_domain_env ) assert ( - AdPartnerServiceClient._get_universe_domain(None, None) - == AdPartnerServiceClient._DEFAULT_UNIVERSE + TaxonomyCategoryServiceClient._get_universe_domain(None, None) + == TaxonomyCategoryServiceClient._DEFAULT_UNIVERSE ) with pytest.raises(ValueError) as excinfo: - AdPartnerServiceClient._get_universe_domain("", None) + TaxonomyCategoryServiceClient._get_universe_domain("", None) assert str(excinfo.value) == "Universe Domain cannot be an empty string." @pytest.mark.parametrize( "client_class,transport_class,transport_name", [ - (AdPartnerServiceClient, transports.AdPartnerServiceRestTransport, "rest"), + ( + TaxonomyCategoryServiceClient, + transports.TaxonomyCategoryServiceRestTransport, + "rest", + ), ], ) def test__validate_universe_domain(client_class, transport_class, transport_name): @@ -373,10 +397,10 @@ def test__validate_universe_domain(client_class, transport_class, transport_name @pytest.mark.parametrize( "client_class,transport_name", [ - (AdPartnerServiceClient, "rest"), + (TaxonomyCategoryServiceClient, "rest"), ], ) -def test_ad_partner_service_client_from_service_account_info( +def test_taxonomy_category_service_client_from_service_account_info( client_class, transport_name ): creds = ga_credentials.AnonymousCredentials() @@ -399,10 +423,10 @@ def test_ad_partner_service_client_from_service_account_info( @pytest.mark.parametrize( "transport_class,transport_name", [ - (transports.AdPartnerServiceRestTransport, "rest"), + (transports.TaxonomyCategoryServiceRestTransport, "rest"), ], ) -def test_ad_partner_service_client_service_account_always_use_jwt( +def test_taxonomy_category_service_client_service_account_always_use_jwt( transport_class, transport_name ): with mock.patch.object( @@ -423,10 +447,10 @@ def test_ad_partner_service_client_service_account_always_use_jwt( @pytest.mark.parametrize( "client_class,transport_name", [ - (AdPartnerServiceClient, "rest"), + (TaxonomyCategoryServiceClient, "rest"), ], ) -def test_ad_partner_service_client_from_service_account_file( +def test_taxonomy_category_service_client_from_service_account_file( client_class, transport_name ): creds = ga_credentials.AnonymousCredentials() @@ -453,39 +477,43 @@ def test_ad_partner_service_client_from_service_account_file( ) -def test_ad_partner_service_client_get_transport_class(): - transport = AdPartnerServiceClient.get_transport_class() +def test_taxonomy_category_service_client_get_transport_class(): + transport = TaxonomyCategoryServiceClient.get_transport_class() available_transports = [ - transports.AdPartnerServiceRestTransport, + transports.TaxonomyCategoryServiceRestTransport, ] assert transport in available_transports - transport = AdPartnerServiceClient.get_transport_class("rest") - assert transport == transports.AdPartnerServiceRestTransport + transport = TaxonomyCategoryServiceClient.get_transport_class("rest") + assert transport == transports.TaxonomyCategoryServiceRestTransport @pytest.mark.parametrize( "client_class,transport_class,transport_name", [ - (AdPartnerServiceClient, transports.AdPartnerServiceRestTransport, "rest"), + ( + TaxonomyCategoryServiceClient, + transports.TaxonomyCategoryServiceRestTransport, + "rest", + ), ], ) @mock.patch.object( - AdPartnerServiceClient, + TaxonomyCategoryServiceClient, "_DEFAULT_ENDPOINT_TEMPLATE", - modify_default_endpoint_template(AdPartnerServiceClient), + modify_default_endpoint_template(TaxonomyCategoryServiceClient), ) -def test_ad_partner_service_client_client_options( +def test_taxonomy_category_service_client_client_options( client_class, transport_class, transport_name ): # Check that if channel is provided we won't create a new one. - with mock.patch.object(AdPartnerServiceClient, "get_transport_class") as gtc: + with mock.patch.object(TaxonomyCategoryServiceClient, "get_transport_class") as gtc: transport = transport_class(credentials=ga_credentials.AnonymousCredentials()) client = client_class(transport=transport) gtc.assert_not_called() # Check that if channel is provided via str we will create a new one. - with mock.patch.object(AdPartnerServiceClient, "get_transport_class") as gtc: + with mock.patch.object(TaxonomyCategoryServiceClient, "get_transport_class") as gtc: client = client_class(transport=transport_name) gtc.assert_called() @@ -609,26 +637,26 @@ def test_ad_partner_service_client_client_options( "client_class,transport_class,transport_name,use_client_cert_env", [ ( - AdPartnerServiceClient, - transports.AdPartnerServiceRestTransport, + TaxonomyCategoryServiceClient, + transports.TaxonomyCategoryServiceRestTransport, "rest", "true", ), ( - AdPartnerServiceClient, - transports.AdPartnerServiceRestTransport, + TaxonomyCategoryServiceClient, + transports.TaxonomyCategoryServiceRestTransport, "rest", "false", ), ], ) @mock.patch.object( - AdPartnerServiceClient, + TaxonomyCategoryServiceClient, "_DEFAULT_ENDPOINT_TEMPLATE", - modify_default_endpoint_template(AdPartnerServiceClient), + modify_default_endpoint_template(TaxonomyCategoryServiceClient), ) @mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) -def test_ad_partner_service_client_mtls_env_auto( +def test_taxonomy_category_service_client_mtls_env_auto( client_class, transport_class, transport_name, use_client_cert_env ): # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default @@ -730,13 +758,15 @@ def test_ad_partner_service_client_mtls_env_auto( ) -@pytest.mark.parametrize("client_class", [AdPartnerServiceClient]) +@pytest.mark.parametrize("client_class", [TaxonomyCategoryServiceClient]) @mock.patch.object( - AdPartnerServiceClient, + TaxonomyCategoryServiceClient, "DEFAULT_ENDPOINT", - modify_default_endpoint(AdPartnerServiceClient), + modify_default_endpoint(TaxonomyCategoryServiceClient), ) -def test_ad_partner_service_client_get_mtls_endpoint_and_cert_source(client_class): +def test_taxonomy_category_service_client_get_mtls_endpoint_and_cert_source( + client_class, +): mock_client_cert_source = mock.Mock() # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". @@ -827,21 +857,21 @@ def test_ad_partner_service_client_get_mtls_endpoint_and_cert_source(client_clas ) -@pytest.mark.parametrize("client_class", [AdPartnerServiceClient]) +@pytest.mark.parametrize("client_class", [TaxonomyCategoryServiceClient]) @mock.patch.object( - AdPartnerServiceClient, + TaxonomyCategoryServiceClient, "_DEFAULT_ENDPOINT_TEMPLATE", - modify_default_endpoint_template(AdPartnerServiceClient), + modify_default_endpoint_template(TaxonomyCategoryServiceClient), ) -def test_ad_partner_service_client_client_api_endpoint(client_class): +def test_taxonomy_category_service_client_client_api_endpoint(client_class): mock_client_cert_source = client_cert_source_callback api_override = "foo.com" - default_universe = AdPartnerServiceClient._DEFAULT_UNIVERSE - default_endpoint = AdPartnerServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( + default_universe = TaxonomyCategoryServiceClient._DEFAULT_UNIVERSE + default_endpoint = TaxonomyCategoryServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( UNIVERSE_DOMAIN=default_universe ) mock_universe = "bar.com" - mock_endpoint = AdPartnerServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( + mock_endpoint = TaxonomyCategoryServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( UNIVERSE_DOMAIN=mock_universe ) @@ -909,10 +939,14 @@ def test_ad_partner_service_client_client_api_endpoint(client_class): @pytest.mark.parametrize( "client_class,transport_class,transport_name", [ - (AdPartnerServiceClient, transports.AdPartnerServiceRestTransport, "rest"), + ( + TaxonomyCategoryServiceClient, + transports.TaxonomyCategoryServiceRestTransport, + "rest", + ), ], ) -def test_ad_partner_service_client_client_options_scopes( +def test_taxonomy_category_service_client_client_options_scopes( client_class, transport_class, transport_name ): # Check the case scopes are provided. @@ -941,14 +975,14 @@ def test_ad_partner_service_client_client_options_scopes( "client_class,transport_class,transport_name,grpc_helpers", [ ( - AdPartnerServiceClient, - transports.AdPartnerServiceRestTransport, + TaxonomyCategoryServiceClient, + transports.TaxonomyCategoryServiceRestTransport, "rest", None, ), ], ) -def test_ad_partner_service_client_client_options_credentials_file( +def test_taxonomy_category_service_client_client_options_credentials_file( client_class, transport_class, transport_name, grpc_helpers ): # Check the case credentials file is provided. @@ -975,48 +1009,65 @@ def test_ad_partner_service_client_client_options_credentials_file( @pytest.mark.parametrize( "request_type", [ - ad_partner_service.GetAdPartnerRequest, + taxonomy_category_service.GetTaxonomyCategoryRequest, dict, ], ) -def test_get_ad_partner_rest(request_type): - client = AdPartnerServiceClient( +def test_get_taxonomy_category_rest(request_type): + client = TaxonomyCategoryServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = {"name": "networks/sample1/adPartners/sample2"} + request_init = {"name": "networks/sample1/taxonomyCategories/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = ad_partner_service.AdPartner( + return_value = taxonomy_category_messages.TaxonomyCategory( name="name_value", + taxonomy_category_id=2152, + display_name="display_name_value", + grouping_only=True, + parent_taxonomy_category_id=2897, + taxonomy_type=taxonomy_type_enum.TaxonomyTypeEnum.TaxonomyType.TAXONOMY_IAB_AUDIENCE_1_1, + ancestor_names=["ancestor_names_value"], + ancestor_taxonomy_category_ids=[3225], ) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = ad_partner_service.AdPartner.pb(return_value) + return_value = taxonomy_category_messages.TaxonomyCategory.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.get_ad_partner(request) + response = client.get_taxonomy_category(request) # Establish that the response is the type that we expect. - assert isinstance(response, ad_partner_service.AdPartner) + assert isinstance(response, taxonomy_category_messages.TaxonomyCategory) assert response.name == "name_value" + assert response.taxonomy_category_id == 2152 + assert response.display_name == "display_name_value" + assert response.grouping_only is True + assert response.parent_taxonomy_category_id == 2897 + assert ( + response.taxonomy_type + == taxonomy_type_enum.TaxonomyTypeEnum.TaxonomyType.TAXONOMY_IAB_AUDIENCE_1_1 + ) + assert response.ancestor_names == ["ancestor_names_value"] + assert response.ancestor_taxonomy_category_ids == [3225] -def test_get_ad_partner_rest_use_cached_wrapped_rpc(): +def test_get_taxonomy_category_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = AdPartnerServiceClient( + client = TaxonomyCategoryServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) @@ -1026,32 +1077,37 @@ def test_get_ad_partner_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.get_ad_partner in client._transport._wrapped_methods + assert ( + client._transport.get_taxonomy_category + in client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.get_ad_partner] = mock_rpc + client._transport._wrapped_methods[ + client._transport.get_taxonomy_category + ] = mock_rpc request = {} - client.get_ad_partner(request) + client.get_taxonomy_category(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.get_ad_partner(request) + client.get_taxonomy_category(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_get_ad_partner_rest_required_fields( - request_type=ad_partner_service.GetAdPartnerRequest, +def test_get_taxonomy_category_rest_required_fields( + request_type=taxonomy_category_service.GetTaxonomyCategoryRequest, ): - transport_class = transports.AdPartnerServiceRestTransport + transport_class = transports.TaxonomyCategoryServiceRestTransport request_init = {} request_init["name"] = "" @@ -1065,7 +1121,7 @@ def test_get_ad_partner_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).get_ad_partner._get_unset_required_fields(jsonified_request) + ).get_taxonomy_category._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present @@ -1074,21 +1130,21 @@ def test_get_ad_partner_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).get_ad_partner._get_unset_required_fields(jsonified_request) + ).get_taxonomy_category._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone assert "name" in jsonified_request assert jsonified_request["name"] == "name_value" - client = AdPartnerServiceClient( + client = TaxonomyCategoryServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = ad_partner_service.AdPartner() + return_value = taxonomy_category_messages.TaxonomyCategory() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -1109,50 +1165,50 @@ def test_get_ad_partner_rest_required_fields( response_value.status_code = 200 # Convert return value to protobuf type - return_value = ad_partner_service.AdPartner.pb(return_value) + return_value = taxonomy_category_messages.TaxonomyCategory.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.get_ad_partner(request) + response = client.get_taxonomy_category(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_get_ad_partner_rest_unset_required_fields(): - transport = transports.AdPartnerServiceRestTransport( +def test_get_taxonomy_category_rest_unset_required_fields(): + transport = transports.TaxonomyCategoryServiceRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.get_ad_partner._get_unset_required_fields({}) + unset_fields = transport.get_taxonomy_category._get_unset_required_fields({}) assert set(unset_fields) == (set(()) & set(("name",))) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_ad_partner_rest_interceptors(null_interceptor): - transport = transports.AdPartnerServiceRestTransport( +def test_get_taxonomy_category_rest_interceptors(null_interceptor): + transport = transports.TaxonomyCategoryServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None if null_interceptor - else transports.AdPartnerServiceRestInterceptor(), + else transports.TaxonomyCategoryServiceRestInterceptor(), ) - client = AdPartnerServiceClient(transport=transport) + client = TaxonomyCategoryServiceClient(transport=transport) with mock.patch.object( type(client.transport._session), "request" ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.AdPartnerServiceRestInterceptor, "post_get_ad_partner" + transports.TaxonomyCategoryServiceRestInterceptor, "post_get_taxonomy_category" ) as post, mock.patch.object( - transports.AdPartnerServiceRestInterceptor, "pre_get_ad_partner" + transports.TaxonomyCategoryServiceRestInterceptor, "pre_get_taxonomy_category" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = ad_partner_service.GetAdPartnerRequest.pb( - ad_partner_service.GetAdPartnerRequest() + pb_message = taxonomy_category_service.GetTaxonomyCategoryRequest.pb( + taxonomy_category_service.GetTaxonomyCategoryRequest() ) transcode.return_value = { "method": "post", @@ -1164,19 +1220,19 @@ def test_get_ad_partner_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = ad_partner_service.AdPartner.to_json( - ad_partner_service.AdPartner() + req.return_value._content = taxonomy_category_messages.TaxonomyCategory.to_json( + taxonomy_category_messages.TaxonomyCategory() ) - request = ad_partner_service.GetAdPartnerRequest() + request = taxonomy_category_service.GetTaxonomyCategoryRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = ad_partner_service.AdPartner() + post.return_value = taxonomy_category_messages.TaxonomyCategory() - client.get_ad_partner( + client.get_taxonomy_category( request, metadata=[ ("key", "val"), @@ -1188,16 +1244,17 @@ def test_get_ad_partner_rest_interceptors(null_interceptor): post.assert_called_once() -def test_get_ad_partner_rest_bad_request( - transport: str = "rest", request_type=ad_partner_service.GetAdPartnerRequest +def test_get_taxonomy_category_rest_bad_request( + transport: str = "rest", + request_type=taxonomy_category_service.GetTaxonomyCategoryRequest, ): - client = AdPartnerServiceClient( + client = TaxonomyCategoryServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # send a request that will satisfy transcoding - request_init = {"name": "networks/sample1/adPartners/sample2"} + request_init = {"name": "networks/sample1/taxonomyCategories/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -1209,11 +1266,11 @@ def test_get_ad_partner_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.get_ad_partner(request) + client.get_taxonomy_category(request) -def test_get_ad_partner_rest_flattened(): - client = AdPartnerServiceClient( +def test_get_taxonomy_category_rest_flattened(): + client = TaxonomyCategoryServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) @@ -1221,10 +1278,10 @@ def test_get_ad_partner_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = ad_partner_service.AdPartner() + return_value = taxonomy_category_messages.TaxonomyCategory() # get arguments that satisfy an http rule for this method - sample_request = {"name": "networks/sample1/adPartners/sample2"} + sample_request = {"name": "networks/sample1/taxonomyCategories/sample2"} # get truthy value for each flattened field mock_args = dict( @@ -1236,24 +1293,25 @@ def test_get_ad_partner_rest_flattened(): response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = ad_partner_service.AdPartner.pb(return_value) + return_value = taxonomy_category_messages.TaxonomyCategory.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.get_ad_partner(**mock_args) + client.get_taxonomy_category(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{name=networks/*/adPartners/*}" % client.transport._host, args[1] + "%s/v1/{name=networks/*/taxonomyCategories/*}" % client.transport._host, + args[1], ) -def test_get_ad_partner_rest_flattened_error(transport: str = "rest"): - client = AdPartnerServiceClient( +def test_get_taxonomy_category_rest_flattened_error(transport: str = "rest"): + client = TaxonomyCategoryServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -1261,14 +1319,14 @@ def test_get_ad_partner_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.get_ad_partner( - ad_partner_service.GetAdPartnerRequest(), + client.get_taxonomy_category( + taxonomy_category_service.GetTaxonomyCategoryRequest(), name="name_value", ) -def test_get_ad_partner_rest_error(): - client = AdPartnerServiceClient( +def test_get_taxonomy_category_rest_error(): + client = TaxonomyCategoryServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -1276,12 +1334,12 @@ def test_get_ad_partner_rest_error(): @pytest.mark.parametrize( "request_type", [ - ad_partner_service.ListAdPartnersRequest, + taxonomy_category_service.ListTaxonomyCategoriesRequest, dict, ], ) -def test_list_ad_partners_rest(request_type): - client = AdPartnerServiceClient( +def test_list_taxonomy_categories_rest(request_type): + client = TaxonomyCategoryServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) @@ -1293,7 +1351,7 @@ def test_list_ad_partners_rest(request_type): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = ad_partner_service.ListAdPartnersResponse( + return_value = taxonomy_category_service.ListTaxonomyCategoriesResponse( next_page_token="next_page_token_value", total_size=1086, ) @@ -1302,24 +1360,26 @@ def test_list_ad_partners_rest(request_type): response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = ad_partner_service.ListAdPartnersResponse.pb(return_value) + return_value = taxonomy_category_service.ListTaxonomyCategoriesResponse.pb( + return_value + ) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.list_ad_partners(request) + response = client.list_taxonomy_categories(request) # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListAdPartnersPager) + assert isinstance(response, pagers.ListTaxonomyCategoriesPager) assert response.next_page_token == "next_page_token_value" assert response.total_size == 1086 -def test_list_ad_partners_rest_use_cached_wrapped_rpc(): +def test_list_taxonomy_categories_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = AdPartnerServiceClient( + client = TaxonomyCategoryServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) @@ -1329,7 +1389,10 @@ def test_list_ad_partners_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.list_ad_partners in client._transport._wrapped_methods + assert ( + client._transport.list_taxonomy_categories + in client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.Mock() @@ -1337,26 +1400,26 @@ def test_list_ad_partners_rest_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.list_ad_partners + client._transport.list_taxonomy_categories ] = mock_rpc request = {} - client.list_ad_partners(request) + client.list_taxonomy_categories(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.list_ad_partners(request) + client.list_taxonomy_categories(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_list_ad_partners_rest_required_fields( - request_type=ad_partner_service.ListAdPartnersRequest, +def test_list_taxonomy_categories_rest_required_fields( + request_type=taxonomy_category_service.ListTaxonomyCategoriesRequest, ): - transport_class = transports.AdPartnerServiceRestTransport + transport_class = transports.TaxonomyCategoryServiceRestTransport request_init = {} request_init["parent"] = "" @@ -1370,7 +1433,7 @@ def test_list_ad_partners_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).list_ad_partners._get_unset_required_fields(jsonified_request) + ).list_taxonomy_categories._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present @@ -1379,7 +1442,7 @@ def test_list_ad_partners_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).list_ad_partners._get_unset_required_fields(jsonified_request) + ).list_taxonomy_categories._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. assert not set(unset_fields) - set( ( @@ -1396,14 +1459,14 @@ def test_list_ad_partners_rest_required_fields( assert "parent" in jsonified_request assert jsonified_request["parent"] == "parent_value" - client = AdPartnerServiceClient( + client = TaxonomyCategoryServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = ad_partner_service.ListAdPartnersResponse() + return_value = taxonomy_category_service.ListTaxonomyCategoriesResponse() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -1424,25 +1487,27 @@ def test_list_ad_partners_rest_required_fields( response_value.status_code = 200 # Convert return value to protobuf type - return_value = ad_partner_service.ListAdPartnersResponse.pb(return_value) + return_value = taxonomy_category_service.ListTaxonomyCategoriesResponse.pb( + return_value + ) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.list_ad_partners(request) + response = client.list_taxonomy_categories(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_list_ad_partners_rest_unset_required_fields(): - transport = transports.AdPartnerServiceRestTransport( +def test_list_taxonomy_categories_rest_unset_required_fields(): + transport = transports.TaxonomyCategoryServiceRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.list_ad_partners._get_unset_required_fields({}) + unset_fields = transport.list_taxonomy_categories._get_unset_required_fields({}) assert set(unset_fields) == ( set( ( @@ -1458,27 +1523,29 @@ def test_list_ad_partners_rest_unset_required_fields(): @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_ad_partners_rest_interceptors(null_interceptor): - transport = transports.AdPartnerServiceRestTransport( +def test_list_taxonomy_categories_rest_interceptors(null_interceptor): + transport = transports.TaxonomyCategoryServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None if null_interceptor - else transports.AdPartnerServiceRestInterceptor(), + else transports.TaxonomyCategoryServiceRestInterceptor(), ) - client = AdPartnerServiceClient(transport=transport) + client = TaxonomyCategoryServiceClient(transport=transport) with mock.patch.object( type(client.transport._session), "request" ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.AdPartnerServiceRestInterceptor, "post_list_ad_partners" + transports.TaxonomyCategoryServiceRestInterceptor, + "post_list_taxonomy_categories", ) as post, mock.patch.object( - transports.AdPartnerServiceRestInterceptor, "pre_list_ad_partners" + transports.TaxonomyCategoryServiceRestInterceptor, + "pre_list_taxonomy_categories", ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = ad_partner_service.ListAdPartnersRequest.pb( - ad_partner_service.ListAdPartnersRequest() + pb_message = taxonomy_category_service.ListTaxonomyCategoriesRequest.pb( + taxonomy_category_service.ListTaxonomyCategoriesRequest() ) transcode.return_value = { "method": "post", @@ -1490,19 +1557,21 @@ def test_list_ad_partners_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = ad_partner_service.ListAdPartnersResponse.to_json( - ad_partner_service.ListAdPartnersResponse() + req.return_value._content = ( + taxonomy_category_service.ListTaxonomyCategoriesResponse.to_json( + taxonomy_category_service.ListTaxonomyCategoriesResponse() + ) ) - request = ad_partner_service.ListAdPartnersRequest() + request = taxonomy_category_service.ListTaxonomyCategoriesRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = ad_partner_service.ListAdPartnersResponse() + post.return_value = taxonomy_category_service.ListTaxonomyCategoriesResponse() - client.list_ad_partners( + client.list_taxonomy_categories( request, metadata=[ ("key", "val"), @@ -1514,10 +1583,11 @@ def test_list_ad_partners_rest_interceptors(null_interceptor): post.assert_called_once() -def test_list_ad_partners_rest_bad_request( - transport: str = "rest", request_type=ad_partner_service.ListAdPartnersRequest +def test_list_taxonomy_categories_rest_bad_request( + transport: str = "rest", + request_type=taxonomy_category_service.ListTaxonomyCategoriesRequest, ): - client = AdPartnerServiceClient( + client = TaxonomyCategoryServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -1535,11 +1605,11 @@ def test_list_ad_partners_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.list_ad_partners(request) + client.list_taxonomy_categories(request) -def test_list_ad_partners_rest_flattened(): - client = AdPartnerServiceClient( +def test_list_taxonomy_categories_rest_flattened(): + client = TaxonomyCategoryServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) @@ -1547,7 +1617,7 @@ def test_list_ad_partners_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = ad_partner_service.ListAdPartnersResponse() + return_value = taxonomy_category_service.ListTaxonomyCategoriesResponse() # get arguments that satisfy an http rule for this method sample_request = {"parent": "networks/sample1"} @@ -1562,24 +1632,27 @@ def test_list_ad_partners_rest_flattened(): response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = ad_partner_service.ListAdPartnersResponse.pb(return_value) + return_value = taxonomy_category_service.ListTaxonomyCategoriesResponse.pb( + return_value + ) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.list_ad_partners(**mock_args) + client.list_taxonomy_categories(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{parent=networks/*}/adPartners" % client.transport._host, args[1] + "%s/v1/{parent=networks/*}/taxonomyCategories" % client.transport._host, + args[1], ) -def test_list_ad_partners_rest_flattened_error(transport: str = "rest"): - client = AdPartnerServiceClient( +def test_list_taxonomy_categories_rest_flattened_error(transport: str = "rest"): + client = TaxonomyCategoryServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -1587,14 +1660,14 @@ def test_list_ad_partners_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.list_ad_partners( - ad_partner_service.ListAdPartnersRequest(), + client.list_taxonomy_categories( + taxonomy_category_service.ListTaxonomyCategoriesRequest(), parent="parent_value", ) -def test_list_ad_partners_rest_pager(transport: str = "rest"): - client = AdPartnerServiceClient( +def test_list_taxonomy_categories_rest_pager(transport: str = "rest"): + client = TaxonomyCategoryServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -1605,28 +1678,28 @@ def test_list_ad_partners_rest_pager(transport: str = "rest"): # with mock.patch.object(path_template, 'transcode') as transcode: # Set the response as a series of pages response = ( - ad_partner_service.ListAdPartnersResponse( - ad_partners=[ - ad_partner_service.AdPartner(), - ad_partner_service.AdPartner(), - ad_partner_service.AdPartner(), + taxonomy_category_service.ListTaxonomyCategoriesResponse( + taxonomy_categories=[ + taxonomy_category_messages.TaxonomyCategory(), + taxonomy_category_messages.TaxonomyCategory(), + taxonomy_category_messages.TaxonomyCategory(), ], next_page_token="abc", ), - ad_partner_service.ListAdPartnersResponse( - ad_partners=[], + taxonomy_category_service.ListTaxonomyCategoriesResponse( + taxonomy_categories=[], next_page_token="def", ), - ad_partner_service.ListAdPartnersResponse( - ad_partners=[ - ad_partner_service.AdPartner(), + taxonomy_category_service.ListTaxonomyCategoriesResponse( + taxonomy_categories=[ + taxonomy_category_messages.TaxonomyCategory(), ], next_page_token="ghi", ), - ad_partner_service.ListAdPartnersResponse( - ad_partners=[ - ad_partner_service.AdPartner(), - ad_partner_service.AdPartner(), + taxonomy_category_service.ListTaxonomyCategoriesResponse( + taxonomy_categories=[ + taxonomy_category_messages.TaxonomyCategory(), + taxonomy_category_messages.TaxonomyCategory(), ], ), ) @@ -1635,7 +1708,8 @@ def test_list_ad_partners_rest_pager(transport: str = "rest"): # Wrap the values into proper Response objs response = tuple( - ad_partner_service.ListAdPartnersResponse.to_json(x) for x in response + taxonomy_category_service.ListTaxonomyCategoriesResponse.to_json(x) + for x in response ) return_values = tuple(Response() for i in response) for return_val, response_val in zip(return_values, response): @@ -1645,46 +1719,48 @@ def test_list_ad_partners_rest_pager(transport: str = "rest"): sample_request = {"parent": "networks/sample1"} - pager = client.list_ad_partners(request=sample_request) + pager = client.list_taxonomy_categories(request=sample_request) results = list(pager) assert len(results) == 6 - assert all(isinstance(i, ad_partner_service.AdPartner) for i in results) + assert all( + isinstance(i, taxonomy_category_messages.TaxonomyCategory) for i in results + ) - pages = list(client.list_ad_partners(request=sample_request).pages) + pages = list(client.list_taxonomy_categories(request=sample_request).pages) for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token def test_credentials_transport_error(): # It is an error to provide credentials and a transport instance. - transport = transports.AdPartnerServiceRestTransport( + transport = transports.TaxonomyCategoryServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), ) with pytest.raises(ValueError): - client = AdPartnerServiceClient( + client = TaxonomyCategoryServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # It is an error to provide a credentials file and a transport instance. - transport = transports.AdPartnerServiceRestTransport( + transport = transports.TaxonomyCategoryServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), ) with pytest.raises(ValueError): - client = AdPartnerServiceClient( + client = TaxonomyCategoryServiceClient( client_options={"credentials_file": "credentials.json"}, transport=transport, ) # It is an error to provide an api_key and a transport instance. - transport = transports.AdPartnerServiceRestTransport( + transport = transports.TaxonomyCategoryServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), ) options = client_options.ClientOptions() options.api_key = "api_key" with pytest.raises(ValueError): - client = AdPartnerServiceClient( + client = TaxonomyCategoryServiceClient( client_options=options, transport=transport, ) @@ -1693,16 +1769,16 @@ def test_credentials_transport_error(): options = client_options.ClientOptions() options.api_key = "api_key" with pytest.raises(ValueError): - client = AdPartnerServiceClient( + client = TaxonomyCategoryServiceClient( client_options=options, credentials=ga_credentials.AnonymousCredentials() ) # It is an error to provide scopes and a transport instance. - transport = transports.AdPartnerServiceRestTransport( + transport = transports.TaxonomyCategoryServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), ) with pytest.raises(ValueError): - client = AdPartnerServiceClient( + client = TaxonomyCategoryServiceClient( client_options={"scopes": ["1", "2"]}, transport=transport, ) @@ -1710,17 +1786,17 @@ def test_credentials_transport_error(): def test_transport_instance(): # A client may be instantiated with a custom transport instance. - transport = transports.AdPartnerServiceRestTransport( + transport = transports.TaxonomyCategoryServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), ) - client = AdPartnerServiceClient(transport=transport) + client = TaxonomyCategoryServiceClient(transport=transport) assert client.transport is transport @pytest.mark.parametrize( "transport_class", [ - transports.AdPartnerServiceRestTransport, + transports.TaxonomyCategoryServiceRestTransport, ], ) def test_transport_adc(transport_class): @@ -1738,36 +1814,36 @@ def test_transport_adc(transport_class): ], ) def test_transport_kind(transport_name): - transport = AdPartnerServiceClient.get_transport_class(transport_name)( + transport = TaxonomyCategoryServiceClient.get_transport_class(transport_name)( credentials=ga_credentials.AnonymousCredentials(), ) assert transport.kind == transport_name -def test_ad_partner_service_base_transport_error(): +def test_taxonomy_category_service_base_transport_error(): # Passing both a credentials object and credentials_file should raise an error with pytest.raises(core_exceptions.DuplicateCredentialArgs): - transport = transports.AdPartnerServiceTransport( + transport = transports.TaxonomyCategoryServiceTransport( credentials=ga_credentials.AnonymousCredentials(), credentials_file="credentials.json", ) -def test_ad_partner_service_base_transport(): +def test_taxonomy_category_service_base_transport(): # Instantiate the base transport. with mock.patch( - "google.ads.admanager_v1.services.ad_partner_service.transports.AdPartnerServiceTransport.__init__" + "google.ads.admanager_v1.services.taxonomy_category_service.transports.TaxonomyCategoryServiceTransport.__init__" ) as Transport: Transport.return_value = None - transport = transports.AdPartnerServiceTransport( + transport = transports.TaxonomyCategoryServiceTransport( credentials=ga_credentials.AnonymousCredentials(), ) # Every method on the transport should just blindly # raise NotImplementedError. methods = ( - "get_ad_partner", - "list_ad_partners", + "get_taxonomy_category", + "list_taxonomy_categories", "get_operation", ) for method in methods: @@ -1786,16 +1862,16 @@ def test_ad_partner_service_base_transport(): getattr(transport, r)() -def test_ad_partner_service_base_transport_with_credentials_file(): +def test_taxonomy_category_service_base_transport_with_credentials_file(): # Instantiate the base transport with a credentials file with mock.patch.object( google.auth, "load_credentials_from_file", autospec=True ) as load_creds, mock.patch( - "google.ads.admanager_v1.services.ad_partner_service.transports.AdPartnerServiceTransport._prep_wrapped_messages" + "google.ads.admanager_v1.services.taxonomy_category_service.transports.TaxonomyCategoryServiceTransport._prep_wrapped_messages" ) as Transport: Transport.return_value = None load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) - transport = transports.AdPartnerServiceTransport( + transport = transports.TaxonomyCategoryServiceTransport( credentials_file="credentials.json", quota_project_id="octopus", ) @@ -1807,22 +1883,22 @@ def test_ad_partner_service_base_transport_with_credentials_file(): ) -def test_ad_partner_service_base_transport_with_adc(): +def test_taxonomy_category_service_base_transport_with_adc(): # Test the default credentials are used if credentials and credentials_file are None. with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch( - "google.ads.admanager_v1.services.ad_partner_service.transports.AdPartnerServiceTransport._prep_wrapped_messages" + "google.ads.admanager_v1.services.taxonomy_category_service.transports.TaxonomyCategoryServiceTransport._prep_wrapped_messages" ) as Transport: Transport.return_value = None adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport = transports.AdPartnerServiceTransport() + transport = transports.TaxonomyCategoryServiceTransport() adc.assert_called_once() -def test_ad_partner_service_auth_adc(): +def test_taxonomy_category_service_auth_adc(): # If no credentials are provided, we should use ADC credentials. with mock.patch.object(google.auth, "default", autospec=True) as adc: adc.return_value = (ga_credentials.AnonymousCredentials(), None) - AdPartnerServiceClient() + TaxonomyCategoryServiceClient() adc.assert_called_once_with( scopes=None, default_scopes=(), @@ -1830,12 +1906,12 @@ def test_ad_partner_service_auth_adc(): ) -def test_ad_partner_service_http_transport_client_cert_source_for_mtls(): +def test_taxonomy_category_service_http_transport_client_cert_source_for_mtls(): cred = ga_credentials.AnonymousCredentials() with mock.patch( "google.auth.transport.requests.AuthorizedSession.configure_mtls_channel" ) as mock_configure_mtls_channel: - transports.AdPartnerServiceRestTransport( + transports.TaxonomyCategoryServiceRestTransport( credentials=cred, client_cert_source_for_mtls=client_cert_source_callback ) mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) @@ -1847,8 +1923,8 @@ def test_ad_partner_service_http_transport_client_cert_source_for_mtls(): "rest", ], ) -def test_ad_partner_service_host_no_port(transport_name): - client = AdPartnerServiceClient( +def test_taxonomy_category_service_host_no_port(transport_name): + client = TaxonomyCategoryServiceClient( credentials=ga_credentials.AnonymousCredentials(), client_options=client_options.ClientOptions( api_endpoint="admanager.googleapis.com" @@ -1868,8 +1944,8 @@ def test_ad_partner_service_host_no_port(transport_name): "rest", ], ) -def test_ad_partner_service_host_with_port(transport_name): - client = AdPartnerServiceClient( +def test_taxonomy_category_service_host_with_port(transport_name): + client = TaxonomyCategoryServiceClient( credentials=ga_credentials.AnonymousCredentials(), client_options=client_options.ClientOptions( api_endpoint="admanager.googleapis.com:8000" @@ -1889,65 +1965,67 @@ def test_ad_partner_service_host_with_port(transport_name): "rest", ], ) -def test_ad_partner_service_client_transport_session_collision(transport_name): +def test_taxonomy_category_service_client_transport_session_collision(transport_name): creds1 = ga_credentials.AnonymousCredentials() creds2 = ga_credentials.AnonymousCredentials() - client1 = AdPartnerServiceClient( + client1 = TaxonomyCategoryServiceClient( credentials=creds1, transport=transport_name, ) - client2 = AdPartnerServiceClient( + client2 = TaxonomyCategoryServiceClient( credentials=creds2, transport=transport_name, ) - session1 = client1.transport.get_ad_partner._session - session2 = client2.transport.get_ad_partner._session + session1 = client1.transport.get_taxonomy_category._session + session2 = client2.transport.get_taxonomy_category._session assert session1 != session2 - session1 = client1.transport.list_ad_partners._session - session2 = client2.transport.list_ad_partners._session + session1 = client1.transport.list_taxonomy_categories._session + session2 = client2.transport.list_taxonomy_categories._session assert session1 != session2 -def test_ad_partner_path(): +def test_network_path(): network_code = "squid" - ad_partner = "clam" - expected = "networks/{network_code}/adPartners/{ad_partner}".format( + expected = "networks/{network_code}".format( network_code=network_code, - ad_partner=ad_partner, ) - actual = AdPartnerServiceClient.ad_partner_path(network_code, ad_partner) + actual = TaxonomyCategoryServiceClient.network_path(network_code) assert expected == actual -def test_parse_ad_partner_path(): +def test_parse_network_path(): expected = { - "network_code": "whelk", - "ad_partner": "octopus", + "network_code": "clam", } - path = AdPartnerServiceClient.ad_partner_path(**expected) + path = TaxonomyCategoryServiceClient.network_path(**expected) # Check that the path construction is reversible. - actual = AdPartnerServiceClient.parse_ad_partner_path(path) + actual = TaxonomyCategoryServiceClient.parse_network_path(path) assert expected == actual -def test_network_path(): - network_code = "oyster" - expected = "networks/{network_code}".format( +def test_taxonomy_category_path(): + network_code = "whelk" + taxonomy_category = "octopus" + expected = "networks/{network_code}/taxonomyCategories/{taxonomy_category}".format( network_code=network_code, + taxonomy_category=taxonomy_category, + ) + actual = TaxonomyCategoryServiceClient.taxonomy_category_path( + network_code, taxonomy_category ) - actual = AdPartnerServiceClient.network_path(network_code) assert expected == actual -def test_parse_network_path(): +def test_parse_taxonomy_category_path(): expected = { - "network_code": "nudibranch", + "network_code": "oyster", + "taxonomy_category": "nudibranch", } - path = AdPartnerServiceClient.network_path(**expected) + path = TaxonomyCategoryServiceClient.taxonomy_category_path(**expected) # Check that the path construction is reversible. - actual = AdPartnerServiceClient.parse_network_path(path) + actual = TaxonomyCategoryServiceClient.parse_taxonomy_category_path(path) assert expected == actual @@ -1956,7 +2034,7 @@ def test_common_billing_account_path(): expected = "billingAccounts/{billing_account}".format( billing_account=billing_account, ) - actual = AdPartnerServiceClient.common_billing_account_path(billing_account) + actual = TaxonomyCategoryServiceClient.common_billing_account_path(billing_account) assert expected == actual @@ -1964,10 +2042,10 @@ def test_parse_common_billing_account_path(): expected = { "billing_account": "mussel", } - path = AdPartnerServiceClient.common_billing_account_path(**expected) + path = TaxonomyCategoryServiceClient.common_billing_account_path(**expected) # Check that the path construction is reversible. - actual = AdPartnerServiceClient.parse_common_billing_account_path(path) + actual = TaxonomyCategoryServiceClient.parse_common_billing_account_path(path) assert expected == actual @@ -1976,7 +2054,7 @@ def test_common_folder_path(): expected = "folders/{folder}".format( folder=folder, ) - actual = AdPartnerServiceClient.common_folder_path(folder) + actual = TaxonomyCategoryServiceClient.common_folder_path(folder) assert expected == actual @@ -1984,10 +2062,10 @@ def test_parse_common_folder_path(): expected = { "folder": "nautilus", } - path = AdPartnerServiceClient.common_folder_path(**expected) + path = TaxonomyCategoryServiceClient.common_folder_path(**expected) # Check that the path construction is reversible. - actual = AdPartnerServiceClient.parse_common_folder_path(path) + actual = TaxonomyCategoryServiceClient.parse_common_folder_path(path) assert expected == actual @@ -1996,7 +2074,7 @@ def test_common_organization_path(): expected = "organizations/{organization}".format( organization=organization, ) - actual = AdPartnerServiceClient.common_organization_path(organization) + actual = TaxonomyCategoryServiceClient.common_organization_path(organization) assert expected == actual @@ -2004,10 +2082,10 @@ def test_parse_common_organization_path(): expected = { "organization": "abalone", } - path = AdPartnerServiceClient.common_organization_path(**expected) + path = TaxonomyCategoryServiceClient.common_organization_path(**expected) # Check that the path construction is reversible. - actual = AdPartnerServiceClient.parse_common_organization_path(path) + actual = TaxonomyCategoryServiceClient.parse_common_organization_path(path) assert expected == actual @@ -2016,7 +2094,7 @@ def test_common_project_path(): expected = "projects/{project}".format( project=project, ) - actual = AdPartnerServiceClient.common_project_path(project) + actual = TaxonomyCategoryServiceClient.common_project_path(project) assert expected == actual @@ -2024,10 +2102,10 @@ def test_parse_common_project_path(): expected = { "project": "clam", } - path = AdPartnerServiceClient.common_project_path(**expected) + path = TaxonomyCategoryServiceClient.common_project_path(**expected) # Check that the path construction is reversible. - actual = AdPartnerServiceClient.parse_common_project_path(path) + actual = TaxonomyCategoryServiceClient.parse_common_project_path(path) assert expected == actual @@ -2038,7 +2116,7 @@ def test_common_location_path(): project=project, location=location, ) - actual = AdPartnerServiceClient.common_location_path(project, location) + actual = TaxonomyCategoryServiceClient.common_location_path(project, location) assert expected == actual @@ -2047,10 +2125,10 @@ def test_parse_common_location_path(): "project": "oyster", "location": "nudibranch", } - path = AdPartnerServiceClient.common_location_path(**expected) + path = TaxonomyCategoryServiceClient.common_location_path(**expected) # Check that the path construction is reversible. - actual = AdPartnerServiceClient.parse_common_location_path(path) + actual = TaxonomyCategoryServiceClient.parse_common_location_path(path) assert expected == actual @@ -2058,18 +2136,18 @@ def test_client_with_default_client_info(): client_info = gapic_v1.client_info.ClientInfo() with mock.patch.object( - transports.AdPartnerServiceTransport, "_prep_wrapped_messages" + transports.TaxonomyCategoryServiceTransport, "_prep_wrapped_messages" ) as prep: - client = AdPartnerServiceClient( + client = TaxonomyCategoryServiceClient( credentials=ga_credentials.AnonymousCredentials(), client_info=client_info, ) prep.assert_called_once_with(client_info) with mock.patch.object( - transports.AdPartnerServiceTransport, "_prep_wrapped_messages" + transports.TaxonomyCategoryServiceTransport, "_prep_wrapped_messages" ) as prep: - transport_class = AdPartnerServiceClient.get_transport_class() + transport_class = TaxonomyCategoryServiceClient.get_transport_class() transport = transport_class( credentials=ga_credentials.AnonymousCredentials(), client_info=client_info, @@ -2080,14 +2158,14 @@ def test_client_with_default_client_info(): def test_get_operation_rest_bad_request( transport: str = "rest", request_type=operations_pb2.GetOperationRequest ): - client = AdPartnerServiceClient( + client = TaxonomyCategoryServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) request = request_type() request = json_format.ParseDict( - {"name": "networks/sample1/operations/reports/exports/sample2"}, request + {"name": "networks/sample1/operations/reports/runs/sample2"}, request ) # Mock the http request call within the method and fake a BadRequest error. @@ -2110,11 +2188,11 @@ def test_get_operation_rest_bad_request( ], ) def test_get_operation_rest(request_type): - client = AdPartnerServiceClient( + client = TaxonomyCategoryServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) - request_init = {"name": "networks/sample1/operations/reports/exports/sample2"} + request_init = {"name": "networks/sample1/operations/reports/runs/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: @@ -2141,7 +2219,7 @@ def test_transport_close(): } for transport, close_name in transports.items(): - client = AdPartnerServiceClient( + client = TaxonomyCategoryServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport ) with mock.patch.object( @@ -2157,7 +2235,7 @@ def test_client_ctx(): "rest", ] for transport in transports: - client = AdPartnerServiceClient( + client = TaxonomyCategoryServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport ) # Test client calls underlying transport. @@ -2171,7 +2249,10 @@ def test_client_ctx(): @pytest.mark.parametrize( "client_class,transport_class", [ - (AdPartnerServiceClient, transports.AdPartnerServiceRestTransport), + ( + TaxonomyCategoryServiceClient, + transports.TaxonomyCategoryServiceRestTransport, + ), ], ) def test_api_key_credentials(client_class, transport_class): diff --git a/packages/google-ads-admanager/tests/unit/gapic/admanager_v1/test_user_service.py b/packages/google-ads-admanager/tests/unit/gapic/admanager_v1/test_user_service.py index a51726932d23..80cbbb9f0809 100644 --- a/packages/google-ads-admanager/tests/unit/gapic/admanager_v1/test_user_service.py +++ b/packages/google-ads-admanager/tests/unit/gapic/admanager_v1/test_user_service.py @@ -44,12 +44,8 @@ from requests import PreparedRequest, Request, Response from requests.sessions import Session -from google.ads.admanager_v1.services.user_service import ( - UserServiceClient, - pagers, - transports, -) -from google.ads.admanager_v1.types import user_service +from google.ads.admanager_v1.services.user_service import UserServiceClient, transports +from google.ads.admanager_v1.types import user_messages, user_service def client_cert_source_callback(): @@ -951,7 +947,7 @@ def test_get_user_rest(request_type): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = user_service.User( + return_value = user_messages.User( name="name_value", user_id=747, display_name="display_name_value", @@ -967,7 +963,7 @@ def test_get_user_rest(request_type): response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = user_service.User.pb(return_value) + return_value = user_messages.User.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") @@ -975,7 +971,7 @@ def test_get_user_rest(request_type): response = client.get_user(request) # Establish that the response is the type that we expect. - assert isinstance(response, user_service.User) + assert isinstance(response, user_messages.User) assert response.name == "name_value" assert response.user_id == 747 assert response.display_name == "display_name_value" @@ -1061,7 +1057,7 @@ def test_get_user_rest_required_fields(request_type=user_service.GetUserRequest) request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = user_service.User() + return_value = user_messages.User() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -1082,7 +1078,7 @@ def test_get_user_rest_required_fields(request_type=user_service.GetUserRequest) response_value.status_code = 200 # Convert return value to protobuf type - return_value = user_service.User.pb(return_value) + return_value = user_messages.User.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") @@ -1135,7 +1131,7 @@ def test_get_user_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = user_service.User.to_json(user_service.User()) + req.return_value._content = user_messages.User.to_json(user_messages.User()) request = user_service.GetUserRequest() metadata = [ @@ -1143,7 +1139,7 @@ def test_get_user_rest_interceptors(null_interceptor): ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = user_service.User() + post.return_value = user_messages.User() client.get_user( request, @@ -1190,7 +1186,7 @@ def test_get_user_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = user_service.User() + return_value = user_messages.User() # get arguments that satisfy an http rule for this method sample_request = {"name": "networks/sample1/users/sample2"} @@ -1205,7 +1201,7 @@ def test_get_user_rest_flattened(): response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = user_service.User.pb(return_value) + return_value = user_messages.User.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1242,381 +1238,6 @@ def test_get_user_rest_error(): ) -@pytest.mark.parametrize( - "request_type", - [ - user_service.ListUsersRequest, - dict, - ], -) -def test_list_users_rest(request_type): - client = UserServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {"parent": "networks/sample1"} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = user_service.ListUsersResponse( - next_page_token="next_page_token_value", - total_size=1086, - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = user_service.ListUsersResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - response = client.list_users(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListUsersPager) - assert response.next_page_token == "next_page_token_value" - assert response.total_size == 1086 - - -def test_list_users_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = UserServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.list_users in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client._transport._wrapped_methods[client._transport.list_users] = mock_rpc - - request = {} - client.list_users(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.list_users(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_list_users_rest_required_fields(request_type=user_service.ListUsersRequest): - transport_class = transports.UserServiceRestTransport - - request_init = {} - request_init["parent"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads( - json_format.MessageToJson(pb_request, use_integers_for_enums=False) - ) - - # verify fields with default values are dropped - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).list_users._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["parent"] = "parent_value" - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).list_users._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set( - ( - "filter", - "order_by", - "page_size", - "page_token", - "skip", - ) - ) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == "parent_value" - - client = UserServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = user_service.ListUsersResponse() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, "transcode") as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - "uri": "v1/sample_method", - "method": "get", - "query_params": pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = user_service.ListUsersResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - response = client.list_users(request) - - expected_params = [("$alt", "json;enum-encoding=int")] - actual_params = req.call_args.kwargs["params"] - assert expected_params == actual_params - - -def test_list_users_rest_unset_required_fields(): - transport = transports.UserServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials - ) - - unset_fields = transport.list_users._get_unset_required_fields({}) - assert set(unset_fields) == ( - set( - ( - "filter", - "orderBy", - "pageSize", - "pageToken", - "skip", - ) - ) - & set(("parent",)) - ) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_users_rest_interceptors(null_interceptor): - transport = transports.UserServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.UserServiceRestInterceptor(), - ) - client = UserServiceClient(transport=transport) - with mock.patch.object( - type(client.transport._session), "request" - ) as req, mock.patch.object( - path_template, "transcode" - ) as transcode, mock.patch.object( - transports.UserServiceRestInterceptor, "post_list_users" - ) as post, mock.patch.object( - transports.UserServiceRestInterceptor, "pre_list_users" - ) as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = user_service.ListUsersRequest.pb(user_service.ListUsersRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = user_service.ListUsersResponse.to_json( - user_service.ListUsersResponse() - ) - - request = user_service.ListUsersRequest() - metadata = [ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = user_service.ListUsersResponse() - - client.list_users( - request, - metadata=[ - ("key", "val"), - ("cephalopod", "squid"), - ], - ) - - pre.assert_called_once() - post.assert_called_once() - - -def test_list_users_rest_bad_request( - transport: str = "rest", request_type=user_service.ListUsersRequest -): - client = UserServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {"parent": "networks/sample1"} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.list_users(request) - - -def test_list_users_rest_flattened(): - client = UserServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = user_service.ListUsersResponse() - - # get arguments that satisfy an http rule for this method - sample_request = {"parent": "networks/sample1"} - - # get truthy value for each flattened field - mock_args = dict( - parent="parent_value", - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = user_service.ListUsersResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - client.list_users(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate( - "%s/v1/{parent=networks/*}/users" % client.transport._host, args[1] - ) - - -def test_list_users_rest_flattened_error(transport: str = "rest"): - client = UserServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_users( - user_service.ListUsersRequest(), - parent="parent_value", - ) - - -def test_list_users_rest_pager(transport: str = "rest"): - client = UserServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # TODO(kbandes): remove this mock unless there's a good reason for it. - # with mock.patch.object(path_template, 'transcode') as transcode: - # Set the response as a series of pages - response = ( - user_service.ListUsersResponse( - users=[ - user_service.User(), - user_service.User(), - user_service.User(), - ], - next_page_token="abc", - ), - user_service.ListUsersResponse( - users=[], - next_page_token="def", - ), - user_service.ListUsersResponse( - users=[ - user_service.User(), - ], - next_page_token="ghi", - ), - user_service.ListUsersResponse( - users=[ - user_service.User(), - user_service.User(), - ], - ), - ) - # Two responses for two calls - response = response + response - - # Wrap the values into proper Response objs - response = tuple(user_service.ListUsersResponse.to_json(x) for x in response) - return_values = tuple(Response() for i in response) - for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode("UTF-8") - return_val.status_code = 200 - req.side_effect = return_values - - sample_request = {"parent": "networks/sample1"} - - pager = client.list_users(request=sample_request) - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, user_service.User) for i in results) - - pages = list(client.list_users(request=sample_request).pages) - for page_, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page_.raw_page.next_page_token == token - - def test_credentials_transport_error(): # It is an error to provide credentials and a transport instance. transport = transports.UserServiceRestTransport( @@ -1728,7 +1349,6 @@ def test_user_service_base_transport(): # raise NotImplementedError. methods = ( "get_user", - "list_users", "get_operation", ) for method in methods: @@ -1864,34 +1484,11 @@ def test_user_service_client_transport_session_collision(transport_name): session1 = client1.transport.get_user._session session2 = client2.transport.get_user._session assert session1 != session2 - session1 = client1.transport.list_users._session - session2 = client2.transport.list_users._session - assert session1 != session2 - - -def test_network_path(): - network_code = "squid" - expected = "networks/{network_code}".format( - network_code=network_code, - ) - actual = UserServiceClient.network_path(network_code) - assert expected == actual - - -def test_parse_network_path(): - expected = { - "network_code": "clam", - } - path = UserServiceClient.network_path(**expected) - - # Check that the path construction is reversible. - actual = UserServiceClient.parse_network_path(path) - assert expected == actual def test_role_path(): - network_code = "whelk" - role = "octopus" + network_code = "squid" + role = "clam" expected = "networks/{network_code}/roles/{role}".format( network_code=network_code, role=role, @@ -1902,8 +1499,8 @@ def test_role_path(): def test_parse_role_path(): expected = { - "network_code": "oyster", - "role": "nudibranch", + "network_code": "whelk", + "role": "octopus", } path = UserServiceClient.role_path(**expected) @@ -1913,8 +1510,8 @@ def test_parse_role_path(): def test_user_path(): - network_code = "cuttlefish" - user = "mussel" + network_code = "oyster" + user = "nudibranch" expected = "networks/{network_code}/users/{user}".format( network_code=network_code, user=user, @@ -1925,8 +1522,8 @@ def test_user_path(): def test_parse_user_path(): expected = { - "network_code": "winkle", - "user": "nautilus", + "network_code": "cuttlefish", + "user": "mussel", } path = UserServiceClient.user_path(**expected) @@ -1936,7 +1533,7 @@ def test_parse_user_path(): def test_common_billing_account_path(): - billing_account = "scallop" + billing_account = "winkle" expected = "billingAccounts/{billing_account}".format( billing_account=billing_account, ) @@ -1946,7 +1543,7 @@ def test_common_billing_account_path(): def test_parse_common_billing_account_path(): expected = { - "billing_account": "abalone", + "billing_account": "nautilus", } path = UserServiceClient.common_billing_account_path(**expected) @@ -1956,7 +1553,7 @@ def test_parse_common_billing_account_path(): def test_common_folder_path(): - folder = "squid" + folder = "scallop" expected = "folders/{folder}".format( folder=folder, ) @@ -1966,7 +1563,7 @@ def test_common_folder_path(): def test_parse_common_folder_path(): expected = { - "folder": "clam", + "folder": "abalone", } path = UserServiceClient.common_folder_path(**expected) @@ -1976,7 +1573,7 @@ def test_parse_common_folder_path(): def test_common_organization_path(): - organization = "whelk" + organization = "squid" expected = "organizations/{organization}".format( organization=organization, ) @@ -1986,7 +1583,7 @@ def test_common_organization_path(): def test_parse_common_organization_path(): expected = { - "organization": "octopus", + "organization": "clam", } path = UserServiceClient.common_organization_path(**expected) @@ -1996,7 +1593,7 @@ def test_parse_common_organization_path(): def test_common_project_path(): - project = "oyster" + project = "whelk" expected = "projects/{project}".format( project=project, ) @@ -2006,7 +1603,7 @@ def test_common_project_path(): def test_parse_common_project_path(): expected = { - "project": "nudibranch", + "project": "octopus", } path = UserServiceClient.common_project_path(**expected) @@ -2016,8 +1613,8 @@ def test_parse_common_project_path(): def test_common_location_path(): - project = "cuttlefish" - location = "mussel" + project = "oyster" + location = "nudibranch" expected = "projects/{project}/locations/{location}".format( project=project, location=location, @@ -2028,8 +1625,8 @@ def test_common_location_path(): def test_parse_common_location_path(): expected = { - "project": "winkle", - "location": "nautilus", + "project": "cuttlefish", + "location": "mussel", } path = UserServiceClient.common_location_path(**expected) @@ -2071,7 +1668,7 @@ def test_get_operation_rest_bad_request( request = request_type() request = json_format.ParseDict( - {"name": "networks/sample1/operations/reports/exports/sample2"}, request + {"name": "networks/sample1/operations/reports/runs/sample2"}, request ) # Mock the http request call within the method and fake a BadRequest error. @@ -2098,7 +1695,7 @@ def test_get_operation_rest(request_type): credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) - request_init = {"name": "networks/sample1/operations/reports/exports/sample2"} + request_init = {"name": "networks/sample1/operations/reports/runs/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: diff --git a/packages/google-ads-marketingplatform-admin/.OwlBot.yaml b/packages/google-ads-marketingplatform-admin/.OwlBot.yaml new file mode 100644 index 000000000000..d397bf3a63bd --- /dev/null +++ b/packages/google-ads-marketingplatform-admin/.OwlBot.yaml @@ -0,0 +1,18 @@ +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +deep-copy-regex: + - source: /google/marketingplatform/admin/(v.*)/.*-py + dest: /owl-bot-staging/google-ads-marketingplatform-admin/$1 +api-name: google-ads-marketingplatform-admin diff --git a/packages/google-ads-marketingplatform-admin/.coveragerc b/packages/google-ads-marketingplatform-admin/.coveragerc new file mode 100644 index 000000000000..f2b0df425e8e --- /dev/null +++ b/packages/google-ads-marketingplatform-admin/.coveragerc @@ -0,0 +1,13 @@ +[run] +branch = True + +[report] +show_missing = True +omit = + google/ads/marketingplatform_admin/__init__.py + google/ads/marketingplatform_admin/gapic_version.py +exclude_lines = + # Re-enable the standard pragma + pragma: NO COVER + # Ignore debug-only repr + def __repr__ diff --git a/packages/google-ads-marketingplatform-admin/.flake8 b/packages/google-ads-marketingplatform-admin/.flake8 new file mode 100644 index 000000000000..87f6e408c47d --- /dev/null +++ b/packages/google-ads-marketingplatform-admin/.flake8 @@ -0,0 +1,33 @@ +# -*- coding: utf-8 -*- +# +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Generated by synthtool. DO NOT EDIT! +[flake8] +ignore = E203, E231, E266, E501, W503 +exclude = + # Exclude generated code. + **/proto/** + **/gapic/** + **/services/** + **/types/** + *_pb2.py + + # Standard linting exemptions. + **/.nox/** + __pycache__, + .git, + *.pyc, + conf.py diff --git a/packages/google-ads-marketingplatform-admin/.gitignore b/packages/google-ads-marketingplatform-admin/.gitignore new file mode 100644 index 000000000000..b4243ced74e4 --- /dev/null +++ b/packages/google-ads-marketingplatform-admin/.gitignore @@ -0,0 +1,63 @@ +*.py[cod] +*.sw[op] + +# C extensions +*.so + +# Packages +*.egg +*.egg-info +dist +build +eggs +.eggs +parts +bin +var +sdist +develop-eggs +.installed.cfg +lib +lib64 +__pycache__ + +# Installer logs +pip-log.txt + +# Unit test / coverage reports +.coverage +.nox +.cache +.pytest_cache + + +# Mac +.DS_Store + +# JetBrains +.idea + +# VS Code +.vscode + +# emacs +*~ + +# Built documentation +docs/_build +bigquery/docs/generated +docs.metadata + +# Virtual environment +env/ + +# Test logs +coverage.xml +*sponge_log.xml + +# System test environment variables. +system_tests/local_test_setup + +# Make sure a generated file isn't accidentally committed. +pylintrc +pylintrc.test diff --git a/packages/google-ads-marketingplatform-admin/.repo-metadata.json b/packages/google-ads-marketingplatform-admin/.repo-metadata.json new file mode 100644 index 000000000000..094cd0a04207 --- /dev/null +++ b/packages/google-ads-marketingplatform-admin/.repo-metadata.json @@ -0,0 +1,17 @@ +{ + "name": "google-ads-marketingplatform-admin", + "name_pretty": "Google Marketing Platform Admin API", + "api_description": "The Google Marketing Platform Admin API allows for programmatic access to the Google Marketing Platform configuration data. You can use the Google Marketing Platform Admin API to manage links between your Google Marketing Platform organization and Google Analytics accounts, and to set the service level of your GA4 properties.", + "product_documentation": "https://developers.google.com/analytics/devguides/config/gmp/v1", + "client_documentation": "https://googleapis.dev/python/google-ads-marketingplatform-admin/latest", + "issue_tracker": "https://github.com/googleapis/google-cloud-python/issues", + "release_level": "preview", + "language": "python", + "library_type": "GAPIC_AUTO", + "repo": "googleapis/google-cloud-python", + "distribution_name": "google-ads-marketingplatform-admin", + "api_id": "marketingplatformadmin.googleapis.com", + "default_version": "v1alpha", + "codeowner_team": "", + "api_shortname": "marketingplatformadmin" +} diff --git a/packages/google-ads-marketingplatform-admin/CHANGELOG.md b/packages/google-ads-marketingplatform-admin/CHANGELOG.md new file mode 100644 index 000000000000..f8676c0292af --- /dev/null +++ b/packages/google-ads-marketingplatform-admin/CHANGELOG.md @@ -0,0 +1,10 @@ +# Changelog + +## 0.1.0 (2024-09-05) + + +### Features + +* add initial files for google.marketingplatform.admin.v1alpha ([#13060](https://github.com/googleapis/google-cloud-python/issues/13060)) ([2bbab3b](https://github.com/googleapis/google-cloud-python/commit/2bbab3bea1548fdb6200856ffe4a6fe7d6bf8487)) + +## Changelog diff --git a/packages/google-ads-marketingplatform-admin/CODE_OF_CONDUCT.md b/packages/google-ads-marketingplatform-admin/CODE_OF_CONDUCT.md new file mode 100644 index 000000000000..039f43681204 --- /dev/null +++ b/packages/google-ads-marketingplatform-admin/CODE_OF_CONDUCT.md @@ -0,0 +1,95 @@ + +# Code of Conduct + +## Our Pledge + +In the interest of fostering an open and welcoming environment, we as +contributors and maintainers pledge to making participation in our project and +our community a harassment-free experience for everyone, regardless of age, body +size, disability, ethnicity, gender identity and expression, level of +experience, education, socio-economic status, nationality, personal appearance, +race, religion, or sexual identity and orientation. + +## Our Standards + +Examples of behavior that contributes to creating a positive environment +include: + +* Using welcoming and inclusive language +* Being respectful of differing viewpoints and experiences +* Gracefully accepting constructive criticism +* Focusing on what is best for the community +* Showing empathy towards other community members + +Examples of unacceptable behavior by participants include: + +* The use of sexualized language or imagery and unwelcome sexual attention or + advances +* Trolling, insulting/derogatory comments, and personal or political attacks +* Public or private harassment +* Publishing others' private information, such as a physical or electronic + address, without explicit permission +* Other conduct which could reasonably be considered inappropriate in a + professional setting + +## Our Responsibilities + +Project maintainers are responsible for clarifying the standards of acceptable +behavior and are expected to take appropriate and fair corrective action in +response to any instances of unacceptable behavior. + +Project maintainers have the right and responsibility to remove, edit, or reject +comments, commits, code, wiki edits, issues, and other contributions that are +not aligned to this Code of Conduct, or to ban temporarily or permanently any +contributor for other behaviors that they deem inappropriate, threatening, +offensive, or harmful. + +## Scope + +This Code of Conduct applies both within project spaces and in public spaces +when an individual is representing the project or its community. Examples of +representing a project or community include using an official project e-mail +address, posting via an official social media account, or acting as an appointed +representative at an online or offline event. Representation of a project may be +further defined and clarified by project maintainers. + +This Code of Conduct also applies outside the project spaces when the Project +Steward has a reasonable belief that an individual's behavior may have a +negative impact on the project or its community. + +## Conflict Resolution + +We do not believe that all conflict is bad; healthy debate and disagreement +often yield positive results. However, it is never okay to be disrespectful or +to engage in behavior that violates the project’s code of conduct. + +If you see someone violating the code of conduct, you are encouraged to address +the behavior directly with those involved. Many issues can be resolved quickly +and easily, and this gives people more control over the outcome of their +dispute. If you are unable to resolve the matter for any reason, or if the +behavior is threatening or harassing, report it. We are dedicated to providing +an environment where participants feel welcome and safe. + + +Reports should be directed to *googleapis-stewards@google.com*, the +Project Steward(s) for *Google Cloud Client Libraries*. It is the Project Steward’s duty to +receive and address reported violations of the code of conduct. They will then +work with a committee consisting of representatives from the Open Source +Programs Office and the Google Open Source Strategy team. If for any reason you +are uncomfortable reaching out to the Project Steward, please email +opensource@google.com. + +We will investigate every complaint, but you may not receive a direct response. +We will use our discretion in determining when and how to follow up on reported +incidents, which may range from not taking action to permanent expulsion from +the project and project-sponsored spaces. We will notify the accused of the +report and provide them an opportunity to discuss it before any action is taken. +The identity of the reporter will be omitted from the details of the report +supplied to the accused. In potentially harmful situations, such as ongoing +harassment or threats to anyone's safety, we may take action without notice. + +## Attribution + +This Code of Conduct is adapted from the Contributor Covenant, version 1.4, +available at +https://www.contributor-covenant.org/version/1/4/code-of-conduct.html \ No newline at end of file diff --git a/packages/google-ads-marketingplatform-admin/CONTRIBUTING.rst b/packages/google-ads-marketingplatform-admin/CONTRIBUTING.rst new file mode 100644 index 000000000000..a7223a5e0b19 --- /dev/null +++ b/packages/google-ads-marketingplatform-admin/CONTRIBUTING.rst @@ -0,0 +1,271 @@ +.. Generated by synthtool. DO NOT EDIT! +############ +Contributing +############ + +#. **Please sign one of the contributor license agreements below.** +#. Fork the repo, develop and test your code changes, add docs. +#. Make sure that your commit messages clearly describe the changes. +#. Send a pull request. (Please Read: `Faster Pull Request Reviews`_) + +.. _Faster Pull Request Reviews: https://github.com/kubernetes/community/blob/master/contributors/guide/pull-requests.md#best-practices-for-faster-reviews + +.. contents:: Here are some guidelines for hacking on the Google Cloud Client libraries. + +*************** +Adding Features +*************** + +In order to add a feature: + +- The feature must be documented in both the API and narrative + documentation. + +- The feature must work fully on the following CPython versions: + 3.7, 3.8, 3.9, 3.10, 3.11 and 3.12 on both UNIX and Windows. + +- The feature must not add unnecessary dependencies (where + "unnecessary" is of course subjective, but new dependencies should + be discussed). + +**************************** +Using a Development Checkout +**************************** + +You'll have to create a development environment using a Git checkout: + +- While logged into your GitHub account, navigate to the + ``google-cloud-python`` `repo`_ on GitHub. + +- Fork and clone the ``google-cloud-python`` repository to your GitHub account by + clicking the "Fork" button. + +- Clone your fork of ``google-cloud-python`` from your GitHub account to your local + computer, substituting your account username and specifying the destination + as ``hack-on-google-cloud-python``. E.g.:: + + $ cd ${HOME} + $ git clone git@github.com:USERNAME/google-cloud-python.git hack-on-google-cloud-python + $ cd hack-on-google-cloud-python + # Configure remotes such that you can pull changes from the googleapis/google-cloud-python + # repository into your local repository. + $ git remote add upstream git@github.com:googleapis/google-cloud-python.git + # fetch and merge changes from upstream into main + $ git fetch upstream + $ git merge upstream/main + +Now your local repo is set up such that you will push changes to your GitHub +repo, from which you can submit a pull request. + +To work on the codebase and run the tests, we recommend using ``nox``, +but you can also use a ``virtualenv`` of your own creation. + +.. _repo: https://github.com/googleapis/google-cloud-python + +Using ``nox`` +============= + +We use `nox `__ to instrument our tests. + +- To test your changes, run unit tests with ``nox``:: + $ nox -s unit + +- To run a single unit test:: + + $ nox -s unit-3.12 -- -k + + + .. note:: + + The unit tests and system tests are described in the + ``noxfile.py`` files in each directory. + +.. nox: https://pypi.org/project/nox/ + +***************************************** +I'm getting weird errors... Can you help? +***************************************** + +If the error mentions ``Python.h`` not being found, +install ``python-dev`` and try again. +On Debian/Ubuntu:: + + $ sudo apt-get install python-dev + +************ +Coding Style +************ +- We use the automatic code formatter ``black``. You can run it using + the nox session ``blacken``. This will eliminate many lint errors. Run via:: + + $ nox -s blacken + +- PEP8 compliance is required, with exceptions defined in the linter configuration. + If you have ``nox`` installed, you can test that you have not introduced + any non-compliant code via:: + + $ nox -s lint + +- In order to make ``nox -s lint`` run faster, you can set some environment + variables:: + + export GOOGLE_CLOUD_TESTING_REMOTE="upstream" + export GOOGLE_CLOUD_TESTING_BRANCH="main" + + By doing this, you are specifying the location of the most up-to-date + version of ``google-cloud-python``. The + remote name ``upstream`` should point to the official ``googleapis`` + checkout and the branch should be the default branch on that remote (``main``). + +- This repository contains configuration for the + `pre-commit `__ tool, which automates checking + our linters during a commit. If you have it installed on your ``$PATH``, + you can enable enforcing those checks via: + +.. code-block:: bash + + $ pre-commit install + pre-commit installed at .git/hooks/pre-commit + +Exceptions to PEP8: + +- Many unit tests use a helper method, ``_call_fut`` ("FUT" is short for + "Function-Under-Test"), which is PEP8-incompliant, but more readable. + Some also use a local variable, ``MUT`` (short for "Module-Under-Test"). + +******************** +Running System Tests +******************** + +- To run system tests, you can execute:: + + # Run all system tests + $ nox -s system + + # Run a single system test + $ nox -s system-3.12 -- -k + + + .. note:: + + System tests are only configured to run under Python 3.8, 3.9, 3.10, 3.11 and 3.12. + For expediency, we do not run them in older versions of Python 3. + + This alone will not run the tests. You'll need to change some local + auth settings and change some configuration in your project to + run all the tests. + +- System tests will be run against an actual project. You should use local credentials from gcloud when possible. See `Best practices for application authentication `__. Some tests require a service account. For those tests see `Authenticating as a service account `__. + +************* +Test Coverage +************* + +- The codebase *must* have 100% test statement coverage after each commit. + You can test coverage via ``nox -s cover``. + +****************************************************** +Documentation Coverage and Building HTML Documentation +****************************************************** + +If you fix a bug, and the bug requires an API or behavior modification, all +documentation in this package which references that API or behavior must be +changed to reflect the bug fix, ideally in the same commit that fixes the bug +or adds the feature. + +Build the docs via: + + $ nox -s docs + +************************* +Samples and code snippets +************************* + +Code samples and snippets live in the `samples/` catalogue. Feel free to +provide more examples, but make sure to write tests for those examples. +Each folder containing example code requires its own `noxfile.py` script +which automates testing. If you decide to create a new folder, you can +base it on the `samples/snippets` folder (providing `noxfile.py` and +the requirements files). + +The tests will run against a real Google Cloud Project, so you should +configure them just like the System Tests. + +- To run sample tests, you can execute:: + + # Run all tests in a folder + $ cd samples/snippets + $ nox -s py-3.8 + + # Run a single sample test + $ cd samples/snippets + $ nox -s py-3.8 -- -k + +******************************************** +Note About ``README`` as it pertains to PyPI +******************************************** + +The `description on PyPI`_ for the project comes directly from the +``README``. Due to the reStructuredText (``rst``) parser used by +PyPI, relative links which will work on GitHub (e.g. ``CONTRIBUTING.rst`` +instead of +``https://github.com/googleapis/google-cloud-python/blob/main/CONTRIBUTING.rst``) +may cause problems creating links or rendering the description. + +.. _description on PyPI: https://pypi.org/project/google-ads-marketingplatform-admin + + +************************* +Supported Python Versions +************************* + +We support: + +- `Python 3.7`_ +- `Python 3.8`_ +- `Python 3.9`_ +- `Python 3.10`_ +- `Python 3.11`_ +- `Python 3.12`_ + +.. _Python 3.7: https://docs.python.org/3.7/ +.. _Python 3.8: https://docs.python.org/3.8/ +.. _Python 3.9: https://docs.python.org/3.9/ +.. _Python 3.10: https://docs.python.org/3.10/ +.. _Python 3.11: https://docs.python.org/3.11/ +.. _Python 3.12: https://docs.python.org/3.12/ + + +Supported versions can be found in our ``noxfile.py`` `config`_. + +.. _config: https://github.com/googleapis/google-cloud-python/blob/main/packages/google-ads-marketingplatform-admin/noxfile.py + + +********** +Versioning +********** + +This library follows `Semantic Versioning`_. + +.. _Semantic Versioning: http://semver.org/ + +Some packages are currently in major version zero (``0.y.z``), which means that +anything may change at any time and the public API should not be considered +stable. + +****************************** +Contributor License Agreements +****************************** + +Before we can accept your pull requests you'll need to sign a Contributor +License Agreement (CLA): + +- **If you are an individual writing original source code** and **you own the + intellectual property**, then you'll need to sign an + `individual CLA `__. +- **If you work for a company that wants to allow you to contribute your work**, + then you'll need to sign a + `corporate CLA `__. + +You can sign these electronically (just scroll to the bottom). After that, +we'll be able to accept your pull requests. diff --git a/packages/google-ads-marketingplatform-admin/LICENSE b/packages/google-ads-marketingplatform-admin/LICENSE new file mode 100644 index 000000000000..d64569567334 --- /dev/null +++ b/packages/google-ads-marketingplatform-admin/LICENSE @@ -0,0 +1,202 @@ + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/packages/google-ads-marketingplatform-admin/MANIFEST.in b/packages/google-ads-marketingplatform-admin/MANIFEST.in new file mode 100644 index 000000000000..e0a66705318e --- /dev/null +++ b/packages/google-ads-marketingplatform-admin/MANIFEST.in @@ -0,0 +1,25 @@ +# -*- coding: utf-8 -*- +# +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Generated by synthtool. DO NOT EDIT! +include README.rst LICENSE +recursive-include google *.json *.proto py.typed +recursive-include tests * +global-exclude *.py[co] +global-exclude __pycache__ + +# Exclude scripts for samples readmegen +prune scripts/readme-gen diff --git a/packages/google-ads-marketingplatform-admin/README.rst b/packages/google-ads-marketingplatform-admin/README.rst new file mode 100644 index 000000000000..0c99d23ee7c1 --- /dev/null +++ b/packages/google-ads-marketingplatform-admin/README.rst @@ -0,0 +1,108 @@ +Python Client for Google Marketing Platform Admin API +===================================================== + +|preview| |pypi| |versions| + +`Google Marketing Platform Admin API`_: The Google Marketing Platform Admin API allows for programmatic access to the Google Marketing Platform configuration data. You can use the Google Marketing Platform Admin API to manage links between your Google Marketing Platform organization and Google Analytics accounts, and to set the service level of your GA4 properties. + +- `Client Library Documentation`_ +- `Product Documentation`_ + +.. |preview| image:: https://img.shields.io/badge/support-preview-orange.svg + :target: https://github.com/googleapis/google-cloud-python/blob/main/README.rst#stability-levels +.. |pypi| image:: https://img.shields.io/pypi/v/google-ads-marketingplatform-admin.svg + :target: https://pypi.org/project/google-ads-marketingplatform-admin/ +.. |versions| image:: https://img.shields.io/pypi/pyversions/google-ads-marketingplatform-admin.svg + :target: https://pypi.org/project/google-ads-marketingplatform-admin/ +.. _Google Marketing Platform Admin API: https://developers.google.com/analytics/devguides/config/gmp/v1 +.. _Client Library Documentation: https://googleapis.dev/python/google-ads-marketingplatform-admin/latest +.. _Product Documentation: https://developers.google.com/analytics/devguides/config/gmp/v1 + +Quick Start +----------- + +In order to use this library, you first need to go through the following steps: + +1. `Select or create a Cloud Platform project.`_ +2. `Enable billing for your project.`_ +3. `Enable the Google Marketing Platform Admin API.`_ +4. `Setup Authentication.`_ + +.. _Select or create a Cloud Platform project.: https://console.cloud.google.com/project +.. _Enable billing for your project.: https://cloud.google.com/billing/docs/how-to/modify-project#enable_billing_for_a_project +.. _Enable the Google Marketing Platform Admin API.: https://developers.google.com/analytics/devguides/config/gmp/v1 +.. _Setup Authentication.: https://googleapis.dev/python/google-api-core/latest/auth.html + +Installation +~~~~~~~~~~~~ + +Install this library in a virtual environment using `venv`_. `venv`_ is a tool that +creates isolated Python environments. These isolated environments can have separate +versions of Python packages, which allows you to isolate one project's dependencies +from the dependencies of other projects. + +With `venv`_, it's possible to install this library without needing system +install permissions, and without clashing with the installed system +dependencies. + +.. _`venv`: https://docs.python.org/3/library/venv.html + + +Code samples and snippets +~~~~~~~~~~~~~~~~~~~~~~~~~ + +Code samples and snippets live in the `samples/`_ folder. + +.. _samples/: https://github.com/googleapis/google-cloud-python/tree/main/packages/google-ads-marketingplatform-admin/samples + + +Supported Python Versions +^^^^^^^^^^^^^^^^^^^^^^^^^ +Our client libraries are compatible with all current `active`_ and `maintenance`_ versions of +Python. + +Python >= 3.7 + +.. _active: https://devguide.python.org/devcycle/#in-development-main-branch +.. _maintenance: https://devguide.python.org/devcycle/#maintenance-branches + +Unsupported Python Versions +^^^^^^^^^^^^^^^^^^^^^^^^^^^ +Python <= 3.6 + +If you are using an `end-of-life`_ +version of Python, we recommend that you update as soon as possible to an actively supported version. + +.. _end-of-life: https://devguide.python.org/devcycle/#end-of-life-branches + +Mac/Linux +^^^^^^^^^ + +.. code-block:: console + + python3 -m venv + source /bin/activate + pip install google-ads-marketingplatform-admin + + +Windows +^^^^^^^ + +.. code-block:: console + + py -m venv + .\\Scripts\activate + pip install google-ads-marketingplatform-admin + +Next Steps +~~~~~~~~~~ + +- Read the `Client Library Documentation`_ for Google Marketing Platform Admin API + to see other available methods on the client. +- Read the `Google Marketing Platform Admin API Product documentation`_ to learn + more about the product and see How-to Guides. +- View this `README`_ to see the full list of Cloud + APIs that we cover. + +.. _Google Marketing Platform Admin API Product documentation: https://developers.google.com/analytics/devguides/config/gmp/v1 +.. _README: https://github.com/googleapis/google-cloud-python/blob/main/README.rst diff --git a/packages/google-ads-marketingplatform-admin/docs/CHANGELOG.md b/packages/google-ads-marketingplatform-admin/docs/CHANGELOG.md new file mode 120000 index 000000000000..04c99a55caae --- /dev/null +++ b/packages/google-ads-marketingplatform-admin/docs/CHANGELOG.md @@ -0,0 +1 @@ +../CHANGELOG.md \ No newline at end of file diff --git a/packages/google-ads-marketingplatform-admin/docs/README.rst b/packages/google-ads-marketingplatform-admin/docs/README.rst new file mode 120000 index 000000000000..89a0106941ff --- /dev/null +++ b/packages/google-ads-marketingplatform-admin/docs/README.rst @@ -0,0 +1 @@ +../README.rst \ No newline at end of file diff --git a/packages/google-ads-marketingplatform-admin/docs/_static/custom.css b/packages/google-ads-marketingplatform-admin/docs/_static/custom.css new file mode 100644 index 000000000000..b0a295464b23 --- /dev/null +++ b/packages/google-ads-marketingplatform-admin/docs/_static/custom.css @@ -0,0 +1,20 @@ +div#python2-eol { + border-color: red; + border-width: medium; +} + +/* Ensure minimum width for 'Parameters' / 'Returns' column */ +dl.field-list > dt { + min-width: 100px +} + +/* Insert space between methods for readability */ +dl.method { + padding-top: 10px; + padding-bottom: 10px +} + +/* Insert empty space between classes */ +dl.class { + padding-bottom: 50px +} diff --git a/packages/google-ads-marketingplatform-admin/docs/_templates/layout.html b/packages/google-ads-marketingplatform-admin/docs/_templates/layout.html new file mode 100644 index 000000000000..6316a537f72b --- /dev/null +++ b/packages/google-ads-marketingplatform-admin/docs/_templates/layout.html @@ -0,0 +1,50 @@ + +{% extends "!layout.html" %} +{%- block content %} +{%- if theme_fixed_sidebar|lower == 'true' %} +
+ {{ sidebar() }} + {%- block document %} +
+ {%- if render_sidebar %} +
+ {%- endif %} + + {%- block relbar_top %} + {%- if theme_show_relbar_top|tobool %} + + {%- endif %} + {% endblock %} + +
+
+ As of January 1, 2020 this library no longer supports Python 2 on the latest released version. + Library versions released prior to that date will continue to be available. For more information please + visit Python 2 support on Google Cloud. +
+ {% block body %} {% endblock %} +
+ + {%- block relbar_bottom %} + {%- if theme_show_relbar_bottom|tobool %} + + {%- endif %} + {% endblock %} + + {%- if render_sidebar %} +
+ {%- endif %} +
+ {%- endblock %} +
+
+{%- else %} +{{ super() }} +{%- endif %} +{%- endblock %} diff --git a/packages/google-ads-marketingplatform-admin/docs/conf.py b/packages/google-ads-marketingplatform-admin/docs/conf.py new file mode 100644 index 000000000000..5c68a3e1a72f --- /dev/null +++ b/packages/google-ads-marketingplatform-admin/docs/conf.py @@ -0,0 +1,384 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# google-ads-marketingplatform-admin documentation build configuration file +# +# This file is execfile()d with the current directory set to its +# containing dir. +# +# Note that not all possible configuration values are present in this +# autogenerated file. +# +# All configuration values have a default; values that are commented out +# serve to show the default. + +import os +import shlex +import sys + +# If extensions (or modules to document with autodoc) are in another directory, +# add these directories to sys.path here. If the directory is relative to the +# documentation root, use os.path.abspath to make it absolute, like shown here. +sys.path.insert(0, os.path.abspath("..")) + +# For plugins that can not read conf.py. +# See also: https://github.com/docascode/sphinx-docfx-yaml/issues/85 +sys.path.insert(0, os.path.abspath(".")) + +__version__ = "" + +# -- General configuration ------------------------------------------------ + +# If your documentation needs a minimal Sphinx version, state it here. +needs_sphinx = "1.5.5" + +# Add any Sphinx extension module names here, as strings. They can be +# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom +# ones. +extensions = [ + "sphinx.ext.autodoc", + "sphinx.ext.autosummary", + "sphinx.ext.intersphinx", + "sphinx.ext.coverage", + "sphinx.ext.doctest", + "sphinx.ext.napoleon", + "sphinx.ext.todo", + "sphinx.ext.viewcode", + "recommonmark", +] + +# autodoc/autosummary flags +autoclass_content = "both" +autodoc_default_options = {"members": True} +autosummary_generate = True + + +# Add any paths that contain templates here, relative to this directory. +templates_path = ["_templates"] + +# The suffix(es) of source filenames. +# You can specify multiple suffix as a list of string: +# source_suffix = ['.rst', '.md'] +source_suffix = [".rst", ".md"] + +# The encoding of source files. +# source_encoding = 'utf-8-sig' + +# The root toctree document. +root_doc = "index" + +# General information about the project. +project = "google-ads-marketingplatform-admin" +copyright = "2019, Google" +author = "Google APIs" + +# The version info for the project you're documenting, acts as replacement for +# |version| and |release|, also used in various other places throughout the +# built documents. +# +# The full version, including alpha/beta/rc tags. +release = __version__ +# The short X.Y version. +version = ".".join(release.split(".")[0:2]) + +# The language for content autogenerated by Sphinx. Refer to documentation +# for a list of supported languages. +# +# This is also used if you do content translation via gettext catalogs. +# Usually you set "language" from the command line for these cases. +language = None + +# There are two options for replacing |today|: either, you set today to some +# non-false value, then it is used: +# today = '' +# Else, today_fmt is used as the format for a strftime call. +# today_fmt = '%B %d, %Y' + +# List of patterns, relative to source directory, that match files and +# directories to ignore when looking for source files. +exclude_patterns = [ + "_build", + "**/.nox/**/*", + "samples/AUTHORING_GUIDE.md", + "samples/CONTRIBUTING.md", + "samples/snippets/README.rst", +] + +# The reST default role (used for this markup: `text`) to use for all +# documents. +# default_role = None + +# If true, '()' will be appended to :func: etc. cross-reference text. +# add_function_parentheses = True + +# If true, the current module name will be prepended to all description +# unit titles (such as .. function::). +# add_module_names = True + +# If true, sectionauthor and moduleauthor directives will be shown in the +# output. They are ignored by default. +# show_authors = False + +# The name of the Pygments (syntax highlighting) style to use. +pygments_style = "sphinx" + +# A list of ignored prefixes for module index sorting. +# modindex_common_prefix = [] + +# If true, keep warnings as "system message" paragraphs in the built documents. +# keep_warnings = False + +# If true, `todo` and `todoList` produce output, else they produce nothing. +todo_include_todos = True + + +# -- Options for HTML output ---------------------------------------------- + +# The theme to use for HTML and HTML Help pages. See the documentation for +# a list of builtin themes. +html_theme = "alabaster" + +# Theme options are theme-specific and customize the look and feel of a theme +# further. For a list of options available for each theme, see the +# documentation. +html_theme_options = { + "description": "Google Cloud Client Libraries for google-ads-marketingplatform-admin", + "github_user": "googleapis", + "github_repo": "google-cloud-python", + "github_banner": True, + "font_family": "'Roboto', Georgia, sans", + "head_font_family": "'Roboto', Georgia, serif", + "code_font_family": "'Roboto Mono', 'Consolas', monospace", +} + +# Add any paths that contain custom themes here, relative to this directory. +# html_theme_path = [] + +# The name for this set of Sphinx documents. If None, it defaults to +# " v documentation". +# html_title = None + +# A shorter title for the navigation bar. Default is the same as html_title. +# html_short_title = None + +# The name of an image file (relative to this directory) to place at the top +# of the sidebar. +# html_logo = None + +# The name of an image file (within the static path) to use as favicon of the +# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 +# pixels large. +# html_favicon = None + +# Add any paths that contain custom static files (such as style sheets) here, +# relative to this directory. They are copied after the builtin static files, +# so a file named "default.css" will overwrite the builtin "default.css". +html_static_path = ["_static"] + +# Add any extra paths that contain custom files (such as robots.txt or +# .htaccess) here, relative to this directory. These files are copied +# directly to the root of the documentation. +# html_extra_path = [] + +# If not '', a 'Last updated on:' timestamp is inserted at every page bottom, +# using the given strftime format. +# html_last_updated_fmt = '%b %d, %Y' + +# If true, SmartyPants will be used to convert quotes and dashes to +# typographically correct entities. +# html_use_smartypants = True + +# Custom sidebar templates, maps document names to template names. +# html_sidebars = {} + +# Additional templates that should be rendered to pages, maps page names to +# template names. +# html_additional_pages = {} + +# If false, no module index is generated. +# html_domain_indices = True + +# If false, no index is generated. +# html_use_index = True + +# If true, the index is split into individual pages for each letter. +# html_split_index = False + +# If true, links to the reST sources are added to the pages. +# html_show_sourcelink = True + +# If true, "Created using Sphinx" is shown in the HTML footer. Default is True. +# html_show_sphinx = True + +# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. +# html_show_copyright = True + +# If true, an OpenSearch description file will be output, and all pages will +# contain a tag referring to it. The value of this option must be the +# base URL from which the finished HTML is served. +# html_use_opensearch = '' + +# This is the file name suffix for HTML files (e.g. ".xhtml"). +# html_file_suffix = None + +# Language to be used for generating the HTML full-text search index. +# Sphinx supports the following languages: +# 'da', 'de', 'en', 'es', 'fi', 'fr', 'hu', 'it', 'ja' +# 'nl', 'no', 'pt', 'ro', 'ru', 'sv', 'tr' +# html_search_language = 'en' + +# A dictionary with options for the search language support, empty by default. +# Now only 'ja' uses this config value +# html_search_options = {'type': 'default'} + +# The name of a javascript file (relative to the configuration directory) that +# implements a search results scorer. If empty, the default will be used. +# html_search_scorer = 'scorer.js' + +# Output file base name for HTML help builder. +htmlhelp_basename = "google-ads-marketingplatform-admin-doc" + +# -- Options for warnings ------------------------------------------------------ + + +suppress_warnings = [ + # Temporarily suppress this to avoid "more than one target found for + # cross-reference" warning, which are intractable for us to avoid while in + # a mono-repo. + # See https://github.com/sphinx-doc/sphinx/blob + # /2a65ffeef5c107c19084fabdd706cdff3f52d93c/sphinx/domains/python.py#L843 + "ref.python" +] + +# -- Options for LaTeX output --------------------------------------------- + +latex_elements = { + # The paper size ('letterpaper' or 'a4paper'). + #'papersize': 'letterpaper', + # The font size ('10pt', '11pt' or '12pt'). + #'pointsize': '10pt', + # Additional stuff for the LaTeX preamble. + #'preamble': '', + # Latex figure (float) alignment + #'figure_align': 'htbp', +} + +# Grouping the document tree into LaTeX files. List of tuples +# (source start file, target name, title, +# author, documentclass [howto, manual, or own class]). +latex_documents = [ + ( + root_doc, + "google-ads-marketingplatform-admin.tex", + "google-ads-marketingplatform-admin Documentation", + author, + "manual", + ) +] + +# The name of an image file (relative to this directory) to place at the top of +# the title page. +# latex_logo = None + +# For "manual" documents, if this is true, then toplevel headings are parts, +# not chapters. +# latex_use_parts = False + +# If true, show page references after internal links. +# latex_show_pagerefs = False + +# If true, show URL addresses after external links. +# latex_show_urls = False + +# Documents to append as an appendix to all manuals. +# latex_appendices = [] + +# If false, no module index is generated. +# latex_domain_indices = True + + +# -- Options for manual page output --------------------------------------- + +# One entry per manual page. List of tuples +# (source start file, name, description, authors, manual section). +man_pages = [ + ( + root_doc, + "google-ads-marketingplatform-admin", + "google-ads-marketingplatform-admin Documentation", + [author], + 1, + ) +] + +# If true, show URL addresses after external links. +# man_show_urls = False + + +# -- Options for Texinfo output ------------------------------------------- + +# Grouping the document tree into Texinfo files. List of tuples +# (source start file, target name, title, author, +# dir menu entry, description, category) +texinfo_documents = [ + ( + root_doc, + "google-ads-marketingplatform-admin", + "google-ads-marketingplatform-admin Documentation", + author, + "google-ads-marketingplatform-admin", + "google-ads-marketingplatform-admin Library", + "APIs", + ) +] + +# Documents to append as an appendix to all manuals. +# texinfo_appendices = [] + +# If false, no module index is generated. +# texinfo_domain_indices = True + +# How to display URL addresses: 'footnote', 'no', or 'inline'. +# texinfo_show_urls = 'footnote' + +# If true, do not generate a @detailmenu in the "Top" node's menu. +# texinfo_no_detailmenu = False + + +# Example configuration for intersphinx: refer to the Python standard library. +intersphinx_mapping = { + "python": ("https://python.readthedocs.org/en/latest/", None), + "google-auth": ("https://googleapis.dev/python/google-auth/latest/", None), + "google.api_core": ( + "https://googleapis.dev/python/google-api-core/latest/", + None, + ), + "grpc": ("https://grpc.github.io/grpc/python/", None), + "proto-plus": ("https://proto-plus-python.readthedocs.io/en/latest/", None), + "protobuf": ("https://googleapis.dev/python/protobuf/latest/", None), +} + + +# Napoleon settings +napoleon_google_docstring = True +napoleon_numpy_docstring = True +napoleon_include_private_with_doc = False +napoleon_include_special_with_doc = True +napoleon_use_admonition_for_examples = False +napoleon_use_admonition_for_notes = False +napoleon_use_admonition_for_references = False +napoleon_use_ivar = False +napoleon_use_param = True +napoleon_use_rtype = True diff --git a/packages/google-ads-marketingplatform-admin/docs/index.rst b/packages/google-ads-marketingplatform-admin/docs/index.rst new file mode 100644 index 000000000000..b217f5c4b15d --- /dev/null +++ b/packages/google-ads-marketingplatform-admin/docs/index.rst @@ -0,0 +1,23 @@ +.. include:: README.rst + +.. include:: multiprocessing.rst + + +API Reference +------------- +.. toctree:: + :maxdepth: 2 + + marketingplatform_admin_v1alpha/services_ + marketingplatform_admin_v1alpha/types_ + + +Changelog +--------- + +For a list of all ``google-ads-marketingplatform-admin`` releases: + +.. toctree:: + :maxdepth: 2 + + CHANGELOG diff --git a/packages/google-ads-marketingplatform-admin/docs/marketingplatform_admin_v1alpha/marketingplatform_admin_service.rst b/packages/google-ads-marketingplatform-admin/docs/marketingplatform_admin_v1alpha/marketingplatform_admin_service.rst new file mode 100644 index 000000000000..938e180ef989 --- /dev/null +++ b/packages/google-ads-marketingplatform-admin/docs/marketingplatform_admin_v1alpha/marketingplatform_admin_service.rst @@ -0,0 +1,10 @@ +MarketingplatformAdminService +----------------------------------------------- + +.. automodule:: google.ads.marketingplatform_admin_v1alpha.services.marketingplatform_admin_service + :members: + :inherited-members: + +.. automodule:: google.ads.marketingplatform_admin_v1alpha.services.marketingplatform_admin_service.pagers + :members: + :inherited-members: diff --git a/packages/google-ads-marketingplatform-admin/docs/marketingplatform_admin_v1alpha/services_.rst b/packages/google-ads-marketingplatform-admin/docs/marketingplatform_admin_v1alpha/services_.rst new file mode 100644 index 000000000000..427be3b19a50 --- /dev/null +++ b/packages/google-ads-marketingplatform-admin/docs/marketingplatform_admin_v1alpha/services_.rst @@ -0,0 +1,6 @@ +Services for Google Ads Marketingplatform Admin v1alpha API +=========================================================== +.. toctree:: + :maxdepth: 2 + + marketingplatform_admin_service diff --git a/packages/google-ads-marketingplatform-admin/docs/marketingplatform_admin_v1alpha/types_.rst b/packages/google-ads-marketingplatform-admin/docs/marketingplatform_admin_v1alpha/types_.rst new file mode 100644 index 000000000000..829ca4ea07f4 --- /dev/null +++ b/packages/google-ads-marketingplatform-admin/docs/marketingplatform_admin_v1alpha/types_.rst @@ -0,0 +1,6 @@ +Types for Google Ads Marketingplatform Admin v1alpha API +======================================================== + +.. automodule:: google.ads.marketingplatform_admin_v1alpha.types + :members: + :show-inheritance: diff --git a/packages/google-ads-marketingplatform-admin/docs/multiprocessing.rst b/packages/google-ads-marketingplatform-admin/docs/multiprocessing.rst new file mode 100644 index 000000000000..536d17b2ea65 --- /dev/null +++ b/packages/google-ads-marketingplatform-admin/docs/multiprocessing.rst @@ -0,0 +1,7 @@ +.. note:: + + Because this client uses :mod:`grpc` library, it is safe to + share instances across threads. In multiprocessing scenarios, the best + practice is to create client instances *after* the invocation of + :func:`os.fork` by :class:`multiprocessing.pool.Pool` or + :class:`multiprocessing.Process`. diff --git a/packages/google-ads-marketingplatform-admin/google/ads/marketingplatform_admin/__init__.py b/packages/google-ads-marketingplatform-admin/google/ads/marketingplatform_admin/__init__.py new file mode 100644 index 000000000000..56669ac018e6 --- /dev/null +++ b/packages/google-ads-marketingplatform-admin/google/ads/marketingplatform_admin/__init__.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from google.ads.marketingplatform_admin import gapic_version as package_version + +__version__ = package_version.__version__ + + +from google.ads.marketingplatform_admin_v1alpha.services.marketingplatform_admin_service.async_client import ( + MarketingplatformAdminServiceAsyncClient, +) +from google.ads.marketingplatform_admin_v1alpha.services.marketingplatform_admin_service.client import ( + MarketingplatformAdminServiceClient, +) +from google.ads.marketingplatform_admin_v1alpha.types.marketingplatform_admin import ( + AnalyticsServiceLevel, + CreateAnalyticsAccountLinkRequest, + DeleteAnalyticsAccountLinkRequest, + GetOrganizationRequest, + ListAnalyticsAccountLinksRequest, + ListAnalyticsAccountLinksResponse, + SetPropertyServiceLevelRequest, + SetPropertyServiceLevelResponse, +) +from google.ads.marketingplatform_admin_v1alpha.types.resources import ( + AnalyticsAccountLink, + LinkVerificationState, + Organization, +) + +__all__ = ( + "MarketingplatformAdminServiceClient", + "MarketingplatformAdminServiceAsyncClient", + "CreateAnalyticsAccountLinkRequest", + "DeleteAnalyticsAccountLinkRequest", + "GetOrganizationRequest", + "ListAnalyticsAccountLinksRequest", + "ListAnalyticsAccountLinksResponse", + "SetPropertyServiceLevelRequest", + "SetPropertyServiceLevelResponse", + "AnalyticsServiceLevel", + "AnalyticsAccountLink", + "Organization", + "LinkVerificationState", +) diff --git a/packages/google-ads-marketingplatform-admin/google/ads/marketingplatform_admin/gapic_version.py b/packages/google-ads-marketingplatform-admin/google/ads/marketingplatform_admin/gapic_version.py new file mode 100644 index 000000000000..33d37a7b677b --- /dev/null +++ b/packages/google-ads-marketingplatform-admin/google/ads/marketingplatform_admin/gapic_version.py @@ -0,0 +1,16 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +__version__ = "0.1.0" # {x-release-please-version} diff --git a/packages/google-ads-marketingplatform-admin/google/ads/marketingplatform_admin/py.typed b/packages/google-ads-marketingplatform-admin/google/ads/marketingplatform_admin/py.typed new file mode 100644 index 000000000000..4f4b168c56da --- /dev/null +++ b/packages/google-ads-marketingplatform-admin/google/ads/marketingplatform_admin/py.typed @@ -0,0 +1,2 @@ +# Marker file for PEP 561. +# The google-ads-marketingplatform-admin package uses inline types. diff --git a/packages/google-ads-marketingplatform-admin/google/ads/marketingplatform_admin_v1alpha/__init__.py b/packages/google-ads-marketingplatform-admin/google/ads/marketingplatform_admin_v1alpha/__init__.py new file mode 100644 index 000000000000..c04bf34623a8 --- /dev/null +++ b/packages/google-ads-marketingplatform-admin/google/ads/marketingplatform_admin_v1alpha/__init__.py @@ -0,0 +1,51 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from google.ads.marketingplatform_admin_v1alpha import gapic_version as package_version + +__version__ = package_version.__version__ + + +from .services.marketingplatform_admin_service import ( + MarketingplatformAdminServiceAsyncClient, + MarketingplatformAdminServiceClient, +) +from .types.marketingplatform_admin import ( + AnalyticsServiceLevel, + CreateAnalyticsAccountLinkRequest, + DeleteAnalyticsAccountLinkRequest, + GetOrganizationRequest, + ListAnalyticsAccountLinksRequest, + ListAnalyticsAccountLinksResponse, + SetPropertyServiceLevelRequest, + SetPropertyServiceLevelResponse, +) +from .types.resources import AnalyticsAccountLink, LinkVerificationState, Organization + +__all__ = ( + "MarketingplatformAdminServiceAsyncClient", + "AnalyticsAccountLink", + "AnalyticsServiceLevel", + "CreateAnalyticsAccountLinkRequest", + "DeleteAnalyticsAccountLinkRequest", + "GetOrganizationRequest", + "LinkVerificationState", + "ListAnalyticsAccountLinksRequest", + "ListAnalyticsAccountLinksResponse", + "MarketingplatformAdminServiceClient", + "Organization", + "SetPropertyServiceLevelRequest", + "SetPropertyServiceLevelResponse", +) diff --git a/packages/google-ads-marketingplatform-admin/google/ads/marketingplatform_admin_v1alpha/gapic_metadata.json b/packages/google-ads-marketingplatform-admin/google/ads/marketingplatform_admin_v1alpha/gapic_metadata.json new file mode 100644 index 000000000000..8d346e91ed67 --- /dev/null +++ b/packages/google-ads-marketingplatform-admin/google/ads/marketingplatform_admin_v1alpha/gapic_metadata.json @@ -0,0 +1,103 @@ + { + "comment": "This file maps proto services/RPCs to the corresponding library clients/methods", + "language": "python", + "libraryPackage": "google.ads.marketingplatform_admin_v1alpha", + "protoPackage": "google.marketingplatform.admin.v1alpha", + "schema": "1.0", + "services": { + "MarketingplatformAdminService": { + "clients": { + "grpc": { + "libraryClient": "MarketingplatformAdminServiceClient", + "rpcs": { + "CreateAnalyticsAccountLink": { + "methods": [ + "create_analytics_account_link" + ] + }, + "DeleteAnalyticsAccountLink": { + "methods": [ + "delete_analytics_account_link" + ] + }, + "GetOrganization": { + "methods": [ + "get_organization" + ] + }, + "ListAnalyticsAccountLinks": { + "methods": [ + "list_analytics_account_links" + ] + }, + "SetPropertyServiceLevel": { + "methods": [ + "set_property_service_level" + ] + } + } + }, + "grpc-async": { + "libraryClient": "MarketingplatformAdminServiceAsyncClient", + "rpcs": { + "CreateAnalyticsAccountLink": { + "methods": [ + "create_analytics_account_link" + ] + }, + "DeleteAnalyticsAccountLink": { + "methods": [ + "delete_analytics_account_link" + ] + }, + "GetOrganization": { + "methods": [ + "get_organization" + ] + }, + "ListAnalyticsAccountLinks": { + "methods": [ + "list_analytics_account_links" + ] + }, + "SetPropertyServiceLevel": { + "methods": [ + "set_property_service_level" + ] + } + } + }, + "rest": { + "libraryClient": "MarketingplatformAdminServiceClient", + "rpcs": { + "CreateAnalyticsAccountLink": { + "methods": [ + "create_analytics_account_link" + ] + }, + "DeleteAnalyticsAccountLink": { + "methods": [ + "delete_analytics_account_link" + ] + }, + "GetOrganization": { + "methods": [ + "get_organization" + ] + }, + "ListAnalyticsAccountLinks": { + "methods": [ + "list_analytics_account_links" + ] + }, + "SetPropertyServiceLevel": { + "methods": [ + "set_property_service_level" + ] + } + } + } + } + } + } +} diff --git a/packages/google-ads-marketingplatform-admin/google/ads/marketingplatform_admin_v1alpha/gapic_version.py b/packages/google-ads-marketingplatform-admin/google/ads/marketingplatform_admin_v1alpha/gapic_version.py new file mode 100644 index 000000000000..33d37a7b677b --- /dev/null +++ b/packages/google-ads-marketingplatform-admin/google/ads/marketingplatform_admin_v1alpha/gapic_version.py @@ -0,0 +1,16 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +__version__ = "0.1.0" # {x-release-please-version} diff --git a/packages/google-ads-marketingplatform-admin/google/ads/marketingplatform_admin_v1alpha/py.typed b/packages/google-ads-marketingplatform-admin/google/ads/marketingplatform_admin_v1alpha/py.typed new file mode 100644 index 000000000000..4f4b168c56da --- /dev/null +++ b/packages/google-ads-marketingplatform-admin/google/ads/marketingplatform_admin_v1alpha/py.typed @@ -0,0 +1,2 @@ +# Marker file for PEP 561. +# The google-ads-marketingplatform-admin package uses inline types. diff --git a/packages/google-ads-marketingplatform-admin/google/ads/marketingplatform_admin_v1alpha/services/__init__.py b/packages/google-ads-marketingplatform-admin/google/ads/marketingplatform_admin_v1alpha/services/__init__.py new file mode 100644 index 000000000000..8f6cf068242c --- /dev/null +++ b/packages/google-ads-marketingplatform-admin/google/ads/marketingplatform_admin_v1alpha/services/__init__.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/packages/google-ads-marketingplatform-admin/google/ads/marketingplatform_admin_v1alpha/services/marketingplatform_admin_service/__init__.py b/packages/google-ads-marketingplatform-admin/google/ads/marketingplatform_admin_v1alpha/services/marketingplatform_admin_service/__init__.py new file mode 100644 index 000000000000..e634b30fd6a2 --- /dev/null +++ b/packages/google-ads-marketingplatform-admin/google/ads/marketingplatform_admin_v1alpha/services/marketingplatform_admin_service/__init__.py @@ -0,0 +1,22 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .async_client import MarketingplatformAdminServiceAsyncClient +from .client import MarketingplatformAdminServiceClient + +__all__ = ( + "MarketingplatformAdminServiceClient", + "MarketingplatformAdminServiceAsyncClient", +) diff --git a/packages/google-ads-marketingplatform-admin/google/ads/marketingplatform_admin_v1alpha/services/marketingplatform_admin_service/async_client.py b/packages/google-ads-marketingplatform-admin/google/ads/marketingplatform_admin_v1alpha/services/marketingplatform_admin_service/async_client.py new file mode 100644 index 000000000000..cc9647487d98 --- /dev/null +++ b/packages/google-ads-marketingplatform-admin/google/ads/marketingplatform_admin_v1alpha/services/marketingplatform_admin_service/async_client.py @@ -0,0 +1,894 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import re +from typing import ( + Callable, + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, +) + +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry_async as retries +from google.api_core.client_options import ClientOptions +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.ads.marketingplatform_admin_v1alpha import gapic_version as package_version + +try: + OptionalRetry = Union[retries.AsyncRetry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.AsyncRetry, object, None] # type: ignore + +from google.ads.marketingplatform_admin_v1alpha.services.marketingplatform_admin_service import ( + pagers, +) +from google.ads.marketingplatform_admin_v1alpha.types import ( + marketingplatform_admin, + resources, +) + +from .client import MarketingplatformAdminServiceClient +from .transports.base import DEFAULT_CLIENT_INFO, MarketingplatformAdminServiceTransport +from .transports.grpc_asyncio import MarketingplatformAdminServiceGrpcAsyncIOTransport + + +class MarketingplatformAdminServiceAsyncClient: + """Service Interface for the Google Marketing Platform Admin + API. + """ + + _client: MarketingplatformAdminServiceClient + + # Copy defaults from the synchronous client for use here. + # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. + DEFAULT_ENDPOINT = MarketingplatformAdminServiceClient.DEFAULT_ENDPOINT + DEFAULT_MTLS_ENDPOINT = MarketingplatformAdminServiceClient.DEFAULT_MTLS_ENDPOINT + _DEFAULT_ENDPOINT_TEMPLATE = ( + MarketingplatformAdminServiceClient._DEFAULT_ENDPOINT_TEMPLATE + ) + _DEFAULT_UNIVERSE = MarketingplatformAdminServiceClient._DEFAULT_UNIVERSE + + account_path = staticmethod(MarketingplatformAdminServiceClient.account_path) + parse_account_path = staticmethod( + MarketingplatformAdminServiceClient.parse_account_path + ) + analytics_account_link_path = staticmethod( + MarketingplatformAdminServiceClient.analytics_account_link_path + ) + parse_analytics_account_link_path = staticmethod( + MarketingplatformAdminServiceClient.parse_analytics_account_link_path + ) + organization_path = staticmethod( + MarketingplatformAdminServiceClient.organization_path + ) + parse_organization_path = staticmethod( + MarketingplatformAdminServiceClient.parse_organization_path + ) + property_path = staticmethod(MarketingplatformAdminServiceClient.property_path) + parse_property_path = staticmethod( + MarketingplatformAdminServiceClient.parse_property_path + ) + common_billing_account_path = staticmethod( + MarketingplatformAdminServiceClient.common_billing_account_path + ) + parse_common_billing_account_path = staticmethod( + MarketingplatformAdminServiceClient.parse_common_billing_account_path + ) + common_folder_path = staticmethod( + MarketingplatformAdminServiceClient.common_folder_path + ) + parse_common_folder_path = staticmethod( + MarketingplatformAdminServiceClient.parse_common_folder_path + ) + common_organization_path = staticmethod( + MarketingplatformAdminServiceClient.common_organization_path + ) + parse_common_organization_path = staticmethod( + MarketingplatformAdminServiceClient.parse_common_organization_path + ) + common_project_path = staticmethod( + MarketingplatformAdminServiceClient.common_project_path + ) + parse_common_project_path = staticmethod( + MarketingplatformAdminServiceClient.parse_common_project_path + ) + common_location_path = staticmethod( + MarketingplatformAdminServiceClient.common_location_path + ) + parse_common_location_path = staticmethod( + MarketingplatformAdminServiceClient.parse_common_location_path + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + MarketingplatformAdminServiceAsyncClient: The constructed client. + """ + return MarketingplatformAdminServiceClient.from_service_account_info.__func__(MarketingplatformAdminServiceAsyncClient, info, *args, **kwargs) # type: ignore + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + MarketingplatformAdminServiceAsyncClient: The constructed client. + """ + return MarketingplatformAdminServiceClient.from_service_account_file.__func__(MarketingplatformAdminServiceAsyncClient, filename, *args, **kwargs) # type: ignore + + from_service_account_json = from_service_account_file + + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[ClientOptions] = None + ): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + return MarketingplatformAdminServiceClient.get_mtls_endpoint_and_cert_source(client_options) # type: ignore + + @property + def transport(self) -> MarketingplatformAdminServiceTransport: + """Returns the transport used by the client instance. + + Returns: + MarketingplatformAdminServiceTransport: The transport used by the client instance. + """ + return self._client.transport + + @property + def api_endpoint(self): + """Return the API endpoint used by the client instance. + + Returns: + str: The API endpoint used by the client instance. + """ + return self._client._api_endpoint + + @property + def universe_domain(self) -> str: + """Return the universe domain used by the client instance. + + Returns: + str: The universe domain used + by the client instance. + """ + return self._client._universe_domain + + get_transport_class = MarketingplatformAdminServiceClient.get_transport_class + + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[ + Union[ + str, + MarketingplatformAdminServiceTransport, + Callable[..., MarketingplatformAdminServiceTransport], + ] + ] = "grpc_asyncio", + client_options: Optional[ClientOptions] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the marketingplatform admin service async client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Optional[Union[str,MarketingplatformAdminServiceTransport,Callable[..., MarketingplatformAdminServiceTransport]]]): + The transport to use, or a Callable that constructs and returns a new transport to use. + If a Callable is given, it will be called with the same set of initialization + arguments as used in the MarketingplatformAdminServiceTransport constructor. + If set to None, a transport is chosen automatically. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): + Custom options for the client. + + 1. The ``api_endpoint`` property can be used to override the + default endpoint provided by the client when ``transport`` is + not explicitly provided. Only if this property is not set and + ``transport`` was not explicitly provided, the endpoint is + determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment + variable, which have one of the following values: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto-switch to the + default mTLS endpoint if client certificate is present; this is + the default value). + + 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide a client certificate for mTLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + 3. The ``universe_domain`` property can be used to override the + default "googleapis.com" universe. Note that ``api_endpoint`` + property still takes precedence; and ``universe_domain`` is + currently not supported for mTLS. + + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client = MarketingplatformAdminServiceClient( + credentials=credentials, + transport=transport, + client_options=client_options, + client_info=client_info, + ) + + async def get_organization( + self, + request: Optional[ + Union[marketingplatform_admin.GetOrganizationRequest, dict] + ] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> resources.Organization: + r"""Lookup for a single organization. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.ads import marketingplatform_admin_v1alpha + + async def sample_get_organization(): + # Create a client + client = marketingplatform_admin_v1alpha.MarketingplatformAdminServiceAsyncClient() + + # Initialize request argument(s) + request = marketingplatform_admin_v1alpha.GetOrganizationRequest( + name="name_value", + ) + + # Make the request + response = await client.get_organization(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.ads.marketingplatform_admin_v1alpha.types.GetOrganizationRequest, dict]]): + The request object. Request message for GetOrganization + RPC. + name (:class:`str`): + Required. The name of the Organization to retrieve. + Format: organizations/{org_id} + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.ads.marketingplatform_admin_v1alpha.types.Organization: + A resource message representing a + Google Marketing Platform organization. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, marketingplatform_admin.GetOrganizationRequest): + request = marketingplatform_admin.GetOrganizationRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.get_organization + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_analytics_account_links( + self, + request: Optional[ + Union[marketingplatform_admin.ListAnalyticsAccountLinksRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListAnalyticsAccountLinksAsyncPager: + r"""Lists the Google Analytics accounts link to the + specified Google Marketing Platform organization. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.ads import marketingplatform_admin_v1alpha + + async def sample_list_analytics_account_links(): + # Create a client + client = marketingplatform_admin_v1alpha.MarketingplatformAdminServiceAsyncClient() + + # Initialize request argument(s) + request = marketingplatform_admin_v1alpha.ListAnalyticsAccountLinksRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_analytics_account_links(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.ads.marketingplatform_admin_v1alpha.types.ListAnalyticsAccountLinksRequest, dict]]): + The request object. Request message for + ListAnalyticsAccountLinks RPC. + parent (:class:`str`): + Required. The parent organization, which owns this + collection of Analytics account links. Format: + organizations/{org_id} + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.ads.marketingplatform_admin_v1alpha.services.marketingplatform_admin_service.pagers.ListAnalyticsAccountLinksAsyncPager: + Response message for + ListAnalyticsAccountLinks RPC. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance( + request, marketingplatform_admin.ListAnalyticsAccountLinksRequest + ): + request = marketingplatform_admin.ListAnalyticsAccountLinksRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.list_analytics_account_links + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListAnalyticsAccountLinksAsyncPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def create_analytics_account_link( + self, + request: Optional[ + Union[marketingplatform_admin.CreateAnalyticsAccountLinkRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + analytics_account_link: Optional[resources.AnalyticsAccountLink] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> resources.AnalyticsAccountLink: + r"""Creates the link between the Analytics account and + the Google Marketing Platform organization. + + User needs to be an org user, and admin on the Analytics + account to create the link. If the account is already + linked to an organization, user needs to unlink the + account from the current organization, then try link + again. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.ads import marketingplatform_admin_v1alpha + + async def sample_create_analytics_account_link(): + # Create a client + client = marketingplatform_admin_v1alpha.MarketingplatformAdminServiceAsyncClient() + + # Initialize request argument(s) + analytics_account_link = marketingplatform_admin_v1alpha.AnalyticsAccountLink() + analytics_account_link.analytics_account = "analytics_account_value" + + request = marketingplatform_admin_v1alpha.CreateAnalyticsAccountLinkRequest( + parent="parent_value", + analytics_account_link=analytics_account_link, + ) + + # Make the request + response = await client.create_analytics_account_link(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.ads.marketingplatform_admin_v1alpha.types.CreateAnalyticsAccountLinkRequest, dict]]): + The request object. Request message for + CreateAnalyticsAccountLink RPC. + parent (:class:`str`): + Required. The parent resource where this Analytics + account link will be created. Format: + organizations/{org_id} + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + analytics_account_link (:class:`google.ads.marketingplatform_admin_v1alpha.types.AnalyticsAccountLink`): + Required. The Analytics account link + to create. + + This corresponds to the ``analytics_account_link`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.ads.marketingplatform_admin_v1alpha.types.AnalyticsAccountLink: + A resource message representing the + link between a Google Analytics account + and a Google Marketing Platform + organization. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, analytics_account_link]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance( + request, marketingplatform_admin.CreateAnalyticsAccountLinkRequest + ): + request = marketingplatform_admin.CreateAnalyticsAccountLinkRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if analytics_account_link is not None: + request.analytics_account_link = analytics_account_link + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.create_analytics_account_link + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def delete_analytics_account_link( + self, + request: Optional[ + Union[marketingplatform_admin.DeleteAnalyticsAccountLinkRequest, dict] + ] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes the AnalyticsAccountLink, which detaches the + Analytics account from the Google Marketing Platform + organization. + + User needs to be an org user, and admin on the Analytics + account in order to delete the link. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.ads import marketingplatform_admin_v1alpha + + async def sample_delete_analytics_account_link(): + # Create a client + client = marketingplatform_admin_v1alpha.MarketingplatformAdminServiceAsyncClient() + + # Initialize request argument(s) + request = marketingplatform_admin_v1alpha.DeleteAnalyticsAccountLinkRequest( + name="name_value", + ) + + # Make the request + await client.delete_analytics_account_link(request=request) + + Args: + request (Optional[Union[google.ads.marketingplatform_admin_v1alpha.types.DeleteAnalyticsAccountLinkRequest, dict]]): + The request object. Request message for + DeleteAnalyticsAccountLink RPC. + name (:class:`str`): + Required. The name of the Analytics account link to + delete. Format: + organizations/{org_id}/analyticsAccountLinks/{analytics_account_link_id} + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance( + request, marketingplatform_admin.DeleteAnalyticsAccountLinkRequest + ): + request = marketingplatform_admin.DeleteAnalyticsAccountLinkRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.delete_analytics_account_link + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + async def set_property_service_level( + self, + request: Optional[ + Union[marketingplatform_admin.SetPropertyServiceLevelRequest, dict] + ] = None, + *, + analytics_account_link: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> marketingplatform_admin.SetPropertyServiceLevelResponse: + r"""Updates the service level for an Analytics property. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.ads import marketingplatform_admin_v1alpha + + async def sample_set_property_service_level(): + # Create a client + client = marketingplatform_admin_v1alpha.MarketingplatformAdminServiceAsyncClient() + + # Initialize request argument(s) + request = marketingplatform_admin_v1alpha.SetPropertyServiceLevelRequest( + analytics_account_link="analytics_account_link_value", + analytics_property="analytics_property_value", + service_level="ANALYTICS_SERVICE_LEVEL_360", + ) + + # Make the request + response = await client.set_property_service_level(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.ads.marketingplatform_admin_v1alpha.types.SetPropertyServiceLevelRequest, dict]]): + The request object. Request message for + SetPropertyServiceLevel RPC. + analytics_account_link (:class:`str`): + Required. The parent AnalyticsAccountLink scope where + this property is in. Format: + organizations/{org_id}/analyticsAccountLinks/{analytics_account_link_id} + + This corresponds to the ``analytics_account_link`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.ads.marketingplatform_admin_v1alpha.types.SetPropertyServiceLevelResponse: + Response message for + SetPropertyServiceLevel RPC. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([analytics_account_link]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance( + request, marketingplatform_admin.SetPropertyServiceLevelRequest + ): + request = marketingplatform_admin.SetPropertyServiceLevelRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if analytics_account_link is not None: + request.analytics_account_link = analytics_account_link + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.set_property_service_level + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("analytics_account_link", request.analytics_account_link),) + ), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def __aenter__(self) -> "MarketingplatformAdminServiceAsyncClient": + return self + + async def __aexit__(self, exc_type, exc, tb): + await self.transport.close() + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +__all__ = ("MarketingplatformAdminServiceAsyncClient",) diff --git a/packages/google-ads-marketingplatform-admin/google/ads/marketingplatform_admin_v1alpha/services/marketingplatform_admin_service/client.py b/packages/google-ads-marketingplatform-admin/google/ads/marketingplatform_admin_v1alpha/services/marketingplatform_admin_service/client.py new file mode 100644 index 000000000000..f31761153ab6 --- /dev/null +++ b/packages/google-ads-marketingplatform-admin/google/ads/marketingplatform_admin_v1alpha/services/marketingplatform_admin_service/client.py @@ -0,0 +1,1349 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import os +import re +from typing import ( + Callable, + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, + cast, +) +import warnings + +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.ads.marketingplatform_admin_v1alpha import gapic_version as package_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object, None] # type: ignore + +from google.ads.marketingplatform_admin_v1alpha.services.marketingplatform_admin_service import ( + pagers, +) +from google.ads.marketingplatform_admin_v1alpha.types import ( + marketingplatform_admin, + resources, +) + +from .transports.base import DEFAULT_CLIENT_INFO, MarketingplatformAdminServiceTransport +from .transports.grpc import MarketingplatformAdminServiceGrpcTransport +from .transports.grpc_asyncio import MarketingplatformAdminServiceGrpcAsyncIOTransport +from .transports.rest import MarketingplatformAdminServiceRestTransport + + +class MarketingplatformAdminServiceClientMeta(type): + """Metaclass for the MarketingplatformAdminService client. + + This provides class-level methods for building and retrieving + support objects (e.g. transport) without polluting the client instance + objects. + """ + + _transport_registry = ( + OrderedDict() + ) # type: Dict[str, Type[MarketingplatformAdminServiceTransport]] + _transport_registry["grpc"] = MarketingplatformAdminServiceGrpcTransport + _transport_registry[ + "grpc_asyncio" + ] = MarketingplatformAdminServiceGrpcAsyncIOTransport + _transport_registry["rest"] = MarketingplatformAdminServiceRestTransport + + def get_transport_class( + cls, + label: Optional[str] = None, + ) -> Type[MarketingplatformAdminServiceTransport]: + """Returns an appropriate transport class. + + Args: + label: The name of the desired transport. If none is + provided, then the first transport in the registry is used. + + Returns: + The transport class to use. + """ + # If a specific transport is requested, return that one. + if label: + return cls._transport_registry[label] + + # No transport is requested; return the default (that is, the first one + # in the dictionary). + return next(iter(cls._transport_registry.values())) + + +class MarketingplatformAdminServiceClient( + metaclass=MarketingplatformAdminServiceClientMeta +): + """Service Interface for the Google Marketing Platform Admin + API. + """ + + @staticmethod + def _get_default_mtls_endpoint(api_endpoint): + """Converts api endpoint to mTLS endpoint. + + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to + "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. + Args: + api_endpoint (Optional[str]): the api endpoint to convert. + Returns: + str: converted mTLS api endpoint. + """ + if not api_endpoint: + return api_endpoint + + mtls_endpoint_re = re.compile( + r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" + ) + + m = mtls_endpoint_re.match(api_endpoint) + name, mtls, sandbox, googledomain = m.groups() + if mtls or not googledomain: + return api_endpoint + + if sandbox: + return api_endpoint.replace( + "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" + ) + + return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + + # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. + DEFAULT_ENDPOINT = "marketingplatformadmin.googleapis.com" + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore + DEFAULT_ENDPOINT + ) + + _DEFAULT_ENDPOINT_TEMPLATE = "marketingplatformadmin.{UNIVERSE_DOMAIN}" + _DEFAULT_UNIVERSE = "googleapis.com" + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + MarketingplatformAdminServiceClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_info(info) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + MarketingplatformAdminServiceClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file(filename) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> MarketingplatformAdminServiceTransport: + """Returns the transport used by the client instance. + + Returns: + MarketingplatformAdminServiceTransport: The transport used by the client + instance. + """ + return self._transport + + @staticmethod + def account_path( + account: str, + ) -> str: + """Returns a fully-qualified account string.""" + return "accounts/{account}".format( + account=account, + ) + + @staticmethod + def parse_account_path(path: str) -> Dict[str, str]: + """Parses a account path into its component segments.""" + m = re.match(r"^accounts/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def analytics_account_link_path( + organization: str, + analytics_account_link: str, + ) -> str: + """Returns a fully-qualified analytics_account_link string.""" + return "organizations/{organization}/analyticsAccountLinks/{analytics_account_link}".format( + organization=organization, + analytics_account_link=analytics_account_link, + ) + + @staticmethod + def parse_analytics_account_link_path(path: str) -> Dict[str, str]: + """Parses a analytics_account_link path into its component segments.""" + m = re.match( + r"^organizations/(?P.+?)/analyticsAccountLinks/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + + @staticmethod + def organization_path( + organization: str, + ) -> str: + """Returns a fully-qualified organization string.""" + return "organizations/{organization}".format( + organization=organization, + ) + + @staticmethod + def parse_organization_path(path: str) -> Dict[str, str]: + """Parses a organization path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def property_path( + property: str, + ) -> str: + """Returns a fully-qualified property string.""" + return "properties/{property}".format( + property=property, + ) + + @staticmethod + def parse_property_path(path: str) -> Dict[str, str]: + """Parses a property path into its component segments.""" + m = re.match(r"^properties/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_billing_account_path( + billing_account: str, + ) -> str: + """Returns a fully-qualified billing_account string.""" + return "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + + @staticmethod + def parse_common_billing_account_path(path: str) -> Dict[str, str]: + """Parse a billing_account path into its component segments.""" + m = re.match(r"^billingAccounts/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_folder_path( + folder: str, + ) -> str: + """Returns a fully-qualified folder string.""" + return "folders/{folder}".format( + folder=folder, + ) + + @staticmethod + def parse_common_folder_path(path: str) -> Dict[str, str]: + """Parse a folder path into its component segments.""" + m = re.match(r"^folders/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_organization_path( + organization: str, + ) -> str: + """Returns a fully-qualified organization string.""" + return "organizations/{organization}".format( + organization=organization, + ) + + @staticmethod + def parse_common_organization_path(path: str) -> Dict[str, str]: + """Parse a organization path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_project_path( + project: str, + ) -> str: + """Returns a fully-qualified project string.""" + return "projects/{project}".format( + project=project, + ) + + @staticmethod + def parse_common_project_path(path: str) -> Dict[str, str]: + """Parse a project path into its component segments.""" + m = re.match(r"^projects/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_location_path( + project: str, + location: str, + ) -> str: + """Returns a fully-qualified location string.""" + return "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) + + @staticmethod + def parse_common_location_path(path: str) -> Dict[str, str]: + """Parse a location path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[client_options_lib.ClientOptions] = None + ): + """Deprecated. Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + + warnings.warn( + "get_mtls_endpoint_and_cert_source is deprecated. Use the api_endpoint property instead.", + DeprecationWarning, + ) + if client_options is None: + client_options = client_options_lib.ClientOptions() + use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") + if use_client_cert not in ("true", "false"): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError( + "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + # Figure out the client cert source to use. + client_cert_source = None + if use_client_cert == "true": + if client_options.client_cert_source: + client_cert_source = client_options.client_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + elif use_mtls_endpoint == "always" or ( + use_mtls_endpoint == "auto" and client_cert_source + ): + api_endpoint = cls.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = cls.DEFAULT_ENDPOINT + + return api_endpoint, client_cert_source + + @staticmethod + def _read_environment_variables(): + """Returns the environment variables used by the client. + + Returns: + Tuple[bool, str, str]: returns the GOOGLE_API_USE_CLIENT_CERTIFICATE, + GOOGLE_API_USE_MTLS_ENDPOINT, and GOOGLE_CLOUD_UNIVERSE_DOMAIN environment variables. + + Raises: + ValueError: If GOOGLE_API_USE_CLIENT_CERTIFICATE is not + any of ["true", "false"]. + google.auth.exceptions.MutualTLSChannelError: If GOOGLE_API_USE_MTLS_ENDPOINT + is not any of ["auto", "never", "always"]. + """ + use_client_cert = os.getenv( + "GOOGLE_API_USE_CLIENT_CERTIFICATE", "false" + ).lower() + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto").lower() + universe_domain_env = os.getenv("GOOGLE_CLOUD_UNIVERSE_DOMAIN") + if use_client_cert not in ("true", "false"): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError( + "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + return use_client_cert == "true", use_mtls_endpoint, universe_domain_env + + @staticmethod + def _get_client_cert_source(provided_cert_source, use_cert_flag): + """Return the client cert source to be used by the client. + + Args: + provided_cert_source (bytes): The client certificate source provided. + use_cert_flag (bool): A flag indicating whether to use the client certificate. + + Returns: + bytes or None: The client cert source to be used by the client. + """ + client_cert_source = None + if use_cert_flag: + if provided_cert_source: + client_cert_source = provided_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + return client_cert_source + + @staticmethod + def _get_api_endpoint( + api_override, client_cert_source, universe_domain, use_mtls_endpoint + ): + """Return the API endpoint used by the client. + + Args: + api_override (str): The API endpoint override. If specified, this is always + the return value of this function and the other arguments are not used. + client_cert_source (bytes): The client certificate source used by the client. + universe_domain (str): The universe domain used by the client. + use_mtls_endpoint (str): How to use the mTLS endpoint, which depends also on the other parameters. + Possible values are "always", "auto", or "never". + + Returns: + str: The API endpoint to be used by the client. + """ + if api_override is not None: + api_endpoint = api_override + elif use_mtls_endpoint == "always" or ( + use_mtls_endpoint == "auto" and client_cert_source + ): + _default_universe = MarketingplatformAdminServiceClient._DEFAULT_UNIVERSE + if universe_domain != _default_universe: + raise MutualTLSChannelError( + f"mTLS is not supported in any universe other than {_default_universe}." + ) + api_endpoint = MarketingplatformAdminServiceClient.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = ( + MarketingplatformAdminServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=universe_domain + ) + ) + return api_endpoint + + @staticmethod + def _get_universe_domain( + client_universe_domain: Optional[str], universe_domain_env: Optional[str] + ) -> str: + """Return the universe domain used by the client. + + Args: + client_universe_domain (Optional[str]): The universe domain configured via the client options. + universe_domain_env (Optional[str]): The universe domain configured via the "GOOGLE_CLOUD_UNIVERSE_DOMAIN" environment variable. + + Returns: + str: The universe domain to be used by the client. + + Raises: + ValueError: If the universe domain is an empty string. + """ + universe_domain = MarketingplatformAdminServiceClient._DEFAULT_UNIVERSE + if client_universe_domain is not None: + universe_domain = client_universe_domain + elif universe_domain_env is not None: + universe_domain = universe_domain_env + if len(universe_domain.strip()) == 0: + raise ValueError("Universe Domain cannot be an empty string.") + return universe_domain + + @staticmethod + def _compare_universes( + client_universe: str, credentials: ga_credentials.Credentials + ) -> bool: + """Returns True iff the universe domains used by the client and credentials match. + + Args: + client_universe (str): The universe domain configured via the client options. + credentials (ga_credentials.Credentials): The credentials being used in the client. + + Returns: + bool: True iff client_universe matches the universe in credentials. + + Raises: + ValueError: when client_universe does not match the universe in credentials. + """ + + default_universe = MarketingplatformAdminServiceClient._DEFAULT_UNIVERSE + credentials_universe = getattr(credentials, "universe_domain", default_universe) + + if client_universe != credentials_universe: + raise ValueError( + "The configured universe domain " + f"({client_universe}) does not match the universe domain " + f"found in the credentials ({credentials_universe}). " + "If you haven't configured the universe domain explicitly, " + f"`{default_universe}` is the default." + ) + return True + + def _validate_universe_domain(self): + """Validates client's and credentials' universe domains are consistent. + + Returns: + bool: True iff the configured universe domain is valid. + + Raises: + ValueError: If the configured universe domain is not valid. + """ + self._is_universe_domain_valid = ( + self._is_universe_domain_valid + or MarketingplatformAdminServiceClient._compare_universes( + self.universe_domain, self.transport._credentials + ) + ) + return self._is_universe_domain_valid + + @property + def api_endpoint(self): + """Return the API endpoint used by the client instance. + + Returns: + str: The API endpoint used by the client instance. + """ + return self._api_endpoint + + @property + def universe_domain(self) -> str: + """Return the universe domain used by the client instance. + + Returns: + str: The universe domain used by the client instance. + """ + return self._universe_domain + + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[ + Union[ + str, + MarketingplatformAdminServiceTransport, + Callable[..., MarketingplatformAdminServiceTransport], + ] + ] = None, + client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the marketingplatform admin service client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Optional[Union[str,MarketingplatformAdminServiceTransport,Callable[..., MarketingplatformAdminServiceTransport]]]): + The transport to use, or a Callable that constructs and returns a new transport. + If a Callable is given, it will be called with the same set of initialization + arguments as used in the MarketingplatformAdminServiceTransport constructor. + If set to None, a transport is chosen automatically. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): + Custom options for the client. + + 1. The ``api_endpoint`` property can be used to override the + default endpoint provided by the client when ``transport`` is + not explicitly provided. Only if this property is not set and + ``transport`` was not explicitly provided, the endpoint is + determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment + variable, which have one of the following values: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto-switch to the + default mTLS endpoint if client certificate is present; this is + the default value). + + 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide a client certificate for mTLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + 3. The ``universe_domain`` property can be used to override the + default "googleapis.com" universe. Note that the ``api_endpoint`` + property still takes precedence; and ``universe_domain`` is + currently not supported for mTLS. + + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client_options = client_options + if isinstance(self._client_options, dict): + self._client_options = client_options_lib.from_dict(self._client_options) + if self._client_options is None: + self._client_options = client_options_lib.ClientOptions() + self._client_options = cast( + client_options_lib.ClientOptions, self._client_options + ) + + universe_domain_opt = getattr(self._client_options, "universe_domain", None) + + ( + self._use_client_cert, + self._use_mtls_endpoint, + self._universe_domain_env, + ) = MarketingplatformAdminServiceClient._read_environment_variables() + self._client_cert_source = ( + MarketingplatformAdminServiceClient._get_client_cert_source( + self._client_options.client_cert_source, self._use_client_cert + ) + ) + self._universe_domain = ( + MarketingplatformAdminServiceClient._get_universe_domain( + universe_domain_opt, self._universe_domain_env + ) + ) + self._api_endpoint = None # updated below, depending on `transport` + + # Initialize the universe domain validation. + self._is_universe_domain_valid = False + + api_key_value = getattr(self._client_options, "api_key", None) + if api_key_value and credentials: + raise ValueError( + "client_options.api_key and credentials are mutually exclusive" + ) + + # Save or instantiate the transport. + # Ordinarily, we provide the transport, but allowing a custom transport + # instance provides an extensibility point for unusual situations. + transport_provided = isinstance( + transport, MarketingplatformAdminServiceTransport + ) + if transport_provided: + # transport is a MarketingplatformAdminServiceTransport instance. + if credentials or self._client_options.credentials_file or api_key_value: + raise ValueError( + "When providing a transport instance, " + "provide its credentials directly." + ) + if self._client_options.scopes: + raise ValueError( + "When providing a transport instance, provide its scopes " + "directly." + ) + self._transport = cast(MarketingplatformAdminServiceTransport, transport) + self._api_endpoint = self._transport.host + + self._api_endpoint = ( + self._api_endpoint + or MarketingplatformAdminServiceClient._get_api_endpoint( + self._client_options.api_endpoint, + self._client_cert_source, + self._universe_domain, + self._use_mtls_endpoint, + ) + ) + + if not transport_provided: + import google.auth._default # type: ignore + + if api_key_value and hasattr( + google.auth._default, "get_api_key_credentials" + ): + credentials = google.auth._default.get_api_key_credentials( + api_key_value + ) + + transport_init: Union[ + Type[MarketingplatformAdminServiceTransport], + Callable[..., MarketingplatformAdminServiceTransport], + ] = ( + MarketingplatformAdminServiceClient.get_transport_class(transport) + if isinstance(transport, str) or transport is None + else cast( + Callable[..., MarketingplatformAdminServiceTransport], transport + ) + ) + # initialize with the provided callable or the passed in class + self._transport = transport_init( + credentials=credentials, + credentials_file=self._client_options.credentials_file, + host=self._api_endpoint, + scopes=self._client_options.scopes, + client_cert_source_for_mtls=self._client_cert_source, + quota_project_id=self._client_options.quota_project_id, + client_info=client_info, + always_use_jwt_access=True, + api_audience=self._client_options.api_audience, + ) + + def get_organization( + self, + request: Optional[ + Union[marketingplatform_admin.GetOrganizationRequest, dict] + ] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> resources.Organization: + r"""Lookup for a single organization. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.ads import marketingplatform_admin_v1alpha + + def sample_get_organization(): + # Create a client + client = marketingplatform_admin_v1alpha.MarketingplatformAdminServiceClient() + + # Initialize request argument(s) + request = marketingplatform_admin_v1alpha.GetOrganizationRequest( + name="name_value", + ) + + # Make the request + response = client.get_organization(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.ads.marketingplatform_admin_v1alpha.types.GetOrganizationRequest, dict]): + The request object. Request message for GetOrganization + RPC. + name (str): + Required. The name of the Organization to retrieve. + Format: organizations/{org_id} + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.ads.marketingplatform_admin_v1alpha.types.Organization: + A resource message representing a + Google Marketing Platform organization. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, marketingplatform_admin.GetOrganizationRequest): + request = marketingplatform_admin.GetOrganizationRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_organization] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def list_analytics_account_links( + self, + request: Optional[ + Union[marketingplatform_admin.ListAnalyticsAccountLinksRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListAnalyticsAccountLinksPager: + r"""Lists the Google Analytics accounts link to the + specified Google Marketing Platform organization. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.ads import marketingplatform_admin_v1alpha + + def sample_list_analytics_account_links(): + # Create a client + client = marketingplatform_admin_v1alpha.MarketingplatformAdminServiceClient() + + # Initialize request argument(s) + request = marketingplatform_admin_v1alpha.ListAnalyticsAccountLinksRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_analytics_account_links(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.ads.marketingplatform_admin_v1alpha.types.ListAnalyticsAccountLinksRequest, dict]): + The request object. Request message for + ListAnalyticsAccountLinks RPC. + parent (str): + Required. The parent organization, which owns this + collection of Analytics account links. Format: + organizations/{org_id} + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.ads.marketingplatform_admin_v1alpha.services.marketingplatform_admin_service.pagers.ListAnalyticsAccountLinksPager: + Response message for + ListAnalyticsAccountLinks RPC. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance( + request, marketingplatform_admin.ListAnalyticsAccountLinksRequest + ): + request = marketingplatform_admin.ListAnalyticsAccountLinksRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[ + self._transport.list_analytics_account_links + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListAnalyticsAccountLinksPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def create_analytics_account_link( + self, + request: Optional[ + Union[marketingplatform_admin.CreateAnalyticsAccountLinkRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + analytics_account_link: Optional[resources.AnalyticsAccountLink] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> resources.AnalyticsAccountLink: + r"""Creates the link between the Analytics account and + the Google Marketing Platform organization. + + User needs to be an org user, and admin on the Analytics + account to create the link. If the account is already + linked to an organization, user needs to unlink the + account from the current organization, then try link + again. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.ads import marketingplatform_admin_v1alpha + + def sample_create_analytics_account_link(): + # Create a client + client = marketingplatform_admin_v1alpha.MarketingplatformAdminServiceClient() + + # Initialize request argument(s) + analytics_account_link = marketingplatform_admin_v1alpha.AnalyticsAccountLink() + analytics_account_link.analytics_account = "analytics_account_value" + + request = marketingplatform_admin_v1alpha.CreateAnalyticsAccountLinkRequest( + parent="parent_value", + analytics_account_link=analytics_account_link, + ) + + # Make the request + response = client.create_analytics_account_link(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.ads.marketingplatform_admin_v1alpha.types.CreateAnalyticsAccountLinkRequest, dict]): + The request object. Request message for + CreateAnalyticsAccountLink RPC. + parent (str): + Required. The parent resource where this Analytics + account link will be created. Format: + organizations/{org_id} + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + analytics_account_link (google.ads.marketingplatform_admin_v1alpha.types.AnalyticsAccountLink): + Required. The Analytics account link + to create. + + This corresponds to the ``analytics_account_link`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.ads.marketingplatform_admin_v1alpha.types.AnalyticsAccountLink: + A resource message representing the + link between a Google Analytics account + and a Google Marketing Platform + organization. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, analytics_account_link]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance( + request, marketingplatform_admin.CreateAnalyticsAccountLinkRequest + ): + request = marketingplatform_admin.CreateAnalyticsAccountLinkRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if analytics_account_link is not None: + request.analytics_account_link = analytics_account_link + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[ + self._transport.create_analytics_account_link + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def delete_analytics_account_link( + self, + request: Optional[ + Union[marketingplatform_admin.DeleteAnalyticsAccountLinkRequest, dict] + ] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes the AnalyticsAccountLink, which detaches the + Analytics account from the Google Marketing Platform + organization. + + User needs to be an org user, and admin on the Analytics + account in order to delete the link. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.ads import marketingplatform_admin_v1alpha + + def sample_delete_analytics_account_link(): + # Create a client + client = marketingplatform_admin_v1alpha.MarketingplatformAdminServiceClient() + + # Initialize request argument(s) + request = marketingplatform_admin_v1alpha.DeleteAnalyticsAccountLinkRequest( + name="name_value", + ) + + # Make the request + client.delete_analytics_account_link(request=request) + + Args: + request (Union[google.ads.marketingplatform_admin_v1alpha.types.DeleteAnalyticsAccountLinkRequest, dict]): + The request object. Request message for + DeleteAnalyticsAccountLink RPC. + name (str): + Required. The name of the Analytics account link to + delete. Format: + organizations/{org_id}/analyticsAccountLinks/{analytics_account_link_id} + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance( + request, marketingplatform_admin.DeleteAnalyticsAccountLinkRequest + ): + request = marketingplatform_admin.DeleteAnalyticsAccountLinkRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[ + self._transport.delete_analytics_account_link + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + def set_property_service_level( + self, + request: Optional[ + Union[marketingplatform_admin.SetPropertyServiceLevelRequest, dict] + ] = None, + *, + analytics_account_link: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> marketingplatform_admin.SetPropertyServiceLevelResponse: + r"""Updates the service level for an Analytics property. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.ads import marketingplatform_admin_v1alpha + + def sample_set_property_service_level(): + # Create a client + client = marketingplatform_admin_v1alpha.MarketingplatformAdminServiceClient() + + # Initialize request argument(s) + request = marketingplatform_admin_v1alpha.SetPropertyServiceLevelRequest( + analytics_account_link="analytics_account_link_value", + analytics_property="analytics_property_value", + service_level="ANALYTICS_SERVICE_LEVEL_360", + ) + + # Make the request + response = client.set_property_service_level(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.ads.marketingplatform_admin_v1alpha.types.SetPropertyServiceLevelRequest, dict]): + The request object. Request message for + SetPropertyServiceLevel RPC. + analytics_account_link (str): + Required. The parent AnalyticsAccountLink scope where + this property is in. Format: + organizations/{org_id}/analyticsAccountLinks/{analytics_account_link_id} + + This corresponds to the ``analytics_account_link`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.ads.marketingplatform_admin_v1alpha.types.SetPropertyServiceLevelResponse: + Response message for + SetPropertyServiceLevel RPC. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([analytics_account_link]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance( + request, marketingplatform_admin.SetPropertyServiceLevelRequest + ): + request = marketingplatform_admin.SetPropertyServiceLevelRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if analytics_account_link is not None: + request.analytics_account_link = analytics_account_link + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[ + self._transport.set_property_service_level + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("analytics_account_link", request.analytics_account_link),) + ), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def __enter__(self) -> "MarketingplatformAdminServiceClient": + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +__all__ = ("MarketingplatformAdminServiceClient",) diff --git a/packages/google-ads-marketingplatform-admin/google/ads/marketingplatform_admin_v1alpha/services/marketingplatform_admin_service/pagers.py b/packages/google-ads-marketingplatform-admin/google/ads/marketingplatform_admin_v1alpha/services/marketingplatform_admin_service/pagers.py new file mode 100644 index 000000000000..bed8bd431770 --- /dev/null +++ b/packages/google-ads-marketingplatform-admin/google/ads/marketingplatform_admin_v1alpha/services/marketingplatform_admin_service/pagers.py @@ -0,0 +1,208 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import ( + Any, + AsyncIterator, + Awaitable, + Callable, + Iterator, + Optional, + Sequence, + Tuple, + Union, +) + +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.api_core import retry_async as retries_async + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] + OptionalAsyncRetry = Union[ + retries_async.AsyncRetry, gapic_v1.method._MethodDefault, None + ] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object, None] # type: ignore + OptionalAsyncRetry = Union[retries_async.AsyncRetry, object, None] # type: ignore + +from google.ads.marketingplatform_admin_v1alpha.types import ( + marketingplatform_admin, + resources, +) + + +class ListAnalyticsAccountLinksPager: + """A pager for iterating through ``list_analytics_account_links`` requests. + + This class thinly wraps an initial + :class:`google.ads.marketingplatform_admin_v1alpha.types.ListAnalyticsAccountLinksResponse` object, and + provides an ``__iter__`` method to iterate through its + ``analytics_account_links`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListAnalyticsAccountLinks`` requests and continue to iterate + through the ``analytics_account_links`` field on the + corresponding responses. + + All the usual :class:`google.ads.marketingplatform_admin_v1alpha.types.ListAnalyticsAccountLinksResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[ + ..., marketingplatform_admin.ListAnalyticsAccountLinksResponse + ], + request: marketingplatform_admin.ListAnalyticsAccountLinksRequest, + response: marketingplatform_admin.ListAnalyticsAccountLinksResponse, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.ads.marketingplatform_admin_v1alpha.types.ListAnalyticsAccountLinksRequest): + The initial request object. + response (google.ads.marketingplatform_admin_v1alpha.types.ListAnalyticsAccountLinksResponse): + The initial response object. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = marketingplatform_admin.ListAnalyticsAccountLinksRequest( + request + ) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages( + self, + ) -> Iterator[marketingplatform_admin.ListAnalyticsAccountLinksResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) + yield self._response + + def __iter__(self) -> Iterator[resources.AnalyticsAccountLink]: + for page in self.pages: + yield from page.analytics_account_links + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListAnalyticsAccountLinksAsyncPager: + """A pager for iterating through ``list_analytics_account_links`` requests. + + This class thinly wraps an initial + :class:`google.ads.marketingplatform_admin_v1alpha.types.ListAnalyticsAccountLinksResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``analytics_account_links`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListAnalyticsAccountLinks`` requests and continue to iterate + through the ``analytics_account_links`` field on the + corresponding responses. + + All the usual :class:`google.ads.marketingplatform_admin_v1alpha.types.ListAnalyticsAccountLinksResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[ + ..., Awaitable[marketingplatform_admin.ListAnalyticsAccountLinksResponse] + ], + request: marketingplatform_admin.ListAnalyticsAccountLinksRequest, + response: marketingplatform_admin.ListAnalyticsAccountLinksResponse, + *, + retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.ads.marketingplatform_admin_v1alpha.types.ListAnalyticsAccountLinksRequest): + The initial request object. + response (google.ads.marketingplatform_admin_v1alpha.types.ListAnalyticsAccountLinksResponse): + The initial response object. + retry (google.api_core.retry.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = marketingplatform_admin.ListAnalyticsAccountLinksRequest( + request + ) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages( + self, + ) -> AsyncIterator[marketingplatform_admin.ListAnalyticsAccountLinksResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) + yield self._response + + def __aiter__(self) -> AsyncIterator[resources.AnalyticsAccountLink]: + async def async_generator(): + async for page in self.pages: + for response in page.analytics_account_links: + yield response + + return async_generator() + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) diff --git a/packages/google-ads-marketingplatform-admin/google/ads/marketingplatform_admin_v1alpha/services/marketingplatform_admin_service/transports/__init__.py b/packages/google-ads-marketingplatform-admin/google/ads/marketingplatform_admin_v1alpha/services/marketingplatform_admin_service/transports/__init__.py new file mode 100644 index 000000000000..205d647ea99a --- /dev/null +++ b/packages/google-ads-marketingplatform-admin/google/ads/marketingplatform_admin_v1alpha/services/marketingplatform_admin_service/transports/__init__.py @@ -0,0 +1,41 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from typing import Dict, Type + +from .base import MarketingplatformAdminServiceTransport +from .grpc import MarketingplatformAdminServiceGrpcTransport +from .grpc_asyncio import MarketingplatformAdminServiceGrpcAsyncIOTransport +from .rest import ( + MarketingplatformAdminServiceRestInterceptor, + MarketingplatformAdminServiceRestTransport, +) + +# Compile a registry of transports. +_transport_registry = ( + OrderedDict() +) # type: Dict[str, Type[MarketingplatformAdminServiceTransport]] +_transport_registry["grpc"] = MarketingplatformAdminServiceGrpcTransport +_transport_registry["grpc_asyncio"] = MarketingplatformAdminServiceGrpcAsyncIOTransport +_transport_registry["rest"] = MarketingplatformAdminServiceRestTransport + +__all__ = ( + "MarketingplatformAdminServiceTransport", + "MarketingplatformAdminServiceGrpcTransport", + "MarketingplatformAdminServiceGrpcAsyncIOTransport", + "MarketingplatformAdminServiceRestTransport", + "MarketingplatformAdminServiceRestInterceptor", +) diff --git a/packages/google-ads-marketingplatform-admin/google/ads/marketingplatform_admin_v1alpha/services/marketingplatform_admin_service/transports/base.py b/packages/google-ads-marketingplatform-admin/google/ads/marketingplatform_admin_v1alpha/services/marketingplatform_admin_service/transports/base.py new file mode 100644 index 000000000000..6f70b5c211e1 --- /dev/null +++ b/packages/google-ads-marketingplatform-admin/google/ads/marketingplatform_admin_v1alpha/services/marketingplatform_admin_service/transports/base.py @@ -0,0 +1,232 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import abc +from typing import Awaitable, Callable, Dict, Optional, Sequence, Union + +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore +from google.protobuf import empty_pb2 # type: ignore + +from google.ads.marketingplatform_admin_v1alpha import gapic_version as package_version +from google.ads.marketingplatform_admin_v1alpha.types import ( + marketingplatform_admin, + resources, +) + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +class MarketingplatformAdminServiceTransport(abc.ABC): + """Abstract transport class for MarketingplatformAdminService.""" + + AUTH_SCOPES = ( + "https://www.googleapis.com/auth/marketingplatformadmin.analytics.read", + "https://www.googleapis.com/auth/marketingplatformadmin.analytics.update", + ) + + DEFAULT_HOST: str = "marketingplatformadmin.googleapis.com" + + def __init__( + self, + *, + host: str = DEFAULT_HOST, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + **kwargs, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'marketingplatformadmin.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A list of scopes. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + """ + + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} + + # Save the scopes. + self._scopes = scopes + if not hasattr(self, "_ignore_credentials"): + self._ignore_credentials: bool = False + + # If no credentials are provided, then determine the appropriate + # defaults. + if credentials and credentials_file: + raise core_exceptions.DuplicateCredentialArgs( + "'credentials_file' and 'credentials' are mutually exclusive" + ) + + if credentials_file is not None: + credentials, _ = google.auth.load_credentials_from_file( + credentials_file, **scopes_kwargs, quota_project_id=quota_project_id + ) + elif credentials is None and not self._ignore_credentials: + credentials, _ = google.auth.default( + **scopes_kwargs, quota_project_id=quota_project_id + ) + # Don't apply audience if the credentials file passed from user. + if hasattr(credentials, "with_gdch_audience"): + credentials = credentials.with_gdch_audience( + api_audience if api_audience else host + ) + + # If the credentials are service account credentials, then always try to use self signed JWT. + if ( + always_use_jwt_access + and isinstance(credentials, service_account.Credentials) + and hasattr(service_account.Credentials, "with_always_use_jwt_access") + ): + credentials = credentials.with_always_use_jwt_access(True) + + # Save the credentials. + self._credentials = credentials + + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ":" not in host: + host += ":443" + self._host = host + + @property + def host(self): + return self._host + + def _prep_wrapped_messages(self, client_info): + # Precompute the wrapped methods. + self._wrapped_methods = { + self.get_organization: gapic_v1.method.wrap_method( + self.get_organization, + default_timeout=None, + client_info=client_info, + ), + self.list_analytics_account_links: gapic_v1.method.wrap_method( + self.list_analytics_account_links, + default_timeout=None, + client_info=client_info, + ), + self.create_analytics_account_link: gapic_v1.method.wrap_method( + self.create_analytics_account_link, + default_timeout=None, + client_info=client_info, + ), + self.delete_analytics_account_link: gapic_v1.method.wrap_method( + self.delete_analytics_account_link, + default_timeout=None, + client_info=client_info, + ), + self.set_property_service_level: gapic_v1.method.wrap_method( + self.set_property_service_level, + default_timeout=None, + client_info=client_info, + ), + } + + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + + @property + def get_organization( + self, + ) -> Callable[ + [marketingplatform_admin.GetOrganizationRequest], + Union[resources.Organization, Awaitable[resources.Organization]], + ]: + raise NotImplementedError() + + @property + def list_analytics_account_links( + self, + ) -> Callable[ + [marketingplatform_admin.ListAnalyticsAccountLinksRequest], + Union[ + marketingplatform_admin.ListAnalyticsAccountLinksResponse, + Awaitable[marketingplatform_admin.ListAnalyticsAccountLinksResponse], + ], + ]: + raise NotImplementedError() + + @property + def create_analytics_account_link( + self, + ) -> Callable[ + [marketingplatform_admin.CreateAnalyticsAccountLinkRequest], + Union[ + resources.AnalyticsAccountLink, Awaitable[resources.AnalyticsAccountLink] + ], + ]: + raise NotImplementedError() + + @property + def delete_analytics_account_link( + self, + ) -> Callable[ + [marketingplatform_admin.DeleteAnalyticsAccountLinkRequest], + Union[empty_pb2.Empty, Awaitable[empty_pb2.Empty]], + ]: + raise NotImplementedError() + + @property + def set_property_service_level( + self, + ) -> Callable[ + [marketingplatform_admin.SetPropertyServiceLevelRequest], + Union[ + marketingplatform_admin.SetPropertyServiceLevelResponse, + Awaitable[marketingplatform_admin.SetPropertyServiceLevelResponse], + ], + ]: + raise NotImplementedError() + + @property + def kind(self) -> str: + raise NotImplementedError() + + +__all__ = ("MarketingplatformAdminServiceTransport",) diff --git a/packages/google-ads-marketingplatform-admin/google/ads/marketingplatform_admin_v1alpha/services/marketingplatform_admin_service/transports/grpc.py b/packages/google-ads-marketingplatform-admin/google/ads/marketingplatform_admin_v1alpha/services/marketingplatform_admin_service/transports/grpc.py new file mode 100644 index 000000000000..e8f3656c2d06 --- /dev/null +++ b/packages/google-ads-marketingplatform-admin/google/ads/marketingplatform_admin_v1alpha/services/marketingplatform_admin_service/transports/grpc.py @@ -0,0 +1,412 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import gapic_v1, grpc_helpers +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.protobuf import empty_pb2 # type: ignore +import grpc # type: ignore + +from google.ads.marketingplatform_admin_v1alpha.types import ( + marketingplatform_admin, + resources, +) + +from .base import DEFAULT_CLIENT_INFO, MarketingplatformAdminServiceTransport + + +class MarketingplatformAdminServiceGrpcTransport( + MarketingplatformAdminServiceTransport +): + """gRPC backend transport for MarketingplatformAdminService. + + Service Interface for the Google Marketing Platform Admin + API. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _stubs: Dict[str, Callable] + + def __init__( + self, + *, + host: str = "marketingplatformadmin.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'marketingplatformadmin.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if a ``channel`` instance is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if a ``channel`` instance is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if a ``channel`` instance is provided. + channel (Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]]): + A ``Channel`` instance through which to make calls, or a Callable + that constructs and returns one. If set to None, ``self.create_channel`` + is used to create the channel. If a Callable is given, it will be called + with the same arguments as used in ``self.create_channel``. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if a ``channel`` instance is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if a ``channel`` instance or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if isinstance(channel, grpc.Channel): + # Ignore credentials if a channel was passed. + credentials = None + self._ignore_credentials = True + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + # initialize with the provided callable or the default channel + channel_init = channel or type(self).create_channel + self._grpc_channel = channel_init( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @classmethod + def create_channel( + cls, + host: str = "marketingplatformadmin.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> grpc.Channel: + """Create and return a gRPC channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + grpc.Channel: A gRPC channel object. + + Raises: + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + + return grpc_helpers.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs, + ) + + @property + def grpc_channel(self) -> grpc.Channel: + """Return the channel designed to connect to this service.""" + return self._grpc_channel + + @property + def get_organization( + self, + ) -> Callable[ + [marketingplatform_admin.GetOrganizationRequest], resources.Organization + ]: + r"""Return a callable for the get organization method over gRPC. + + Lookup for a single organization. + + Returns: + Callable[[~.GetOrganizationRequest], + ~.Organization]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_organization" not in self._stubs: + self._stubs["get_organization"] = self.grpc_channel.unary_unary( + "/google.marketingplatform.admin.v1alpha.MarketingplatformAdminService/GetOrganization", + request_serializer=marketingplatform_admin.GetOrganizationRequest.serialize, + response_deserializer=resources.Organization.deserialize, + ) + return self._stubs["get_organization"] + + @property + def list_analytics_account_links( + self, + ) -> Callable[ + [marketingplatform_admin.ListAnalyticsAccountLinksRequest], + marketingplatform_admin.ListAnalyticsAccountLinksResponse, + ]: + r"""Return a callable for the list analytics account links method over gRPC. + + Lists the Google Analytics accounts link to the + specified Google Marketing Platform organization. + + Returns: + Callable[[~.ListAnalyticsAccountLinksRequest], + ~.ListAnalyticsAccountLinksResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_analytics_account_links" not in self._stubs: + self._stubs["list_analytics_account_links"] = self.grpc_channel.unary_unary( + "/google.marketingplatform.admin.v1alpha.MarketingplatformAdminService/ListAnalyticsAccountLinks", + request_serializer=marketingplatform_admin.ListAnalyticsAccountLinksRequest.serialize, + response_deserializer=marketingplatform_admin.ListAnalyticsAccountLinksResponse.deserialize, + ) + return self._stubs["list_analytics_account_links"] + + @property + def create_analytics_account_link( + self, + ) -> Callable[ + [marketingplatform_admin.CreateAnalyticsAccountLinkRequest], + resources.AnalyticsAccountLink, + ]: + r"""Return a callable for the create analytics account link method over gRPC. + + Creates the link between the Analytics account and + the Google Marketing Platform organization. + + User needs to be an org user, and admin on the Analytics + account to create the link. If the account is already + linked to an organization, user needs to unlink the + account from the current organization, then try link + again. + + Returns: + Callable[[~.CreateAnalyticsAccountLinkRequest], + ~.AnalyticsAccountLink]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_analytics_account_link" not in self._stubs: + self._stubs[ + "create_analytics_account_link" + ] = self.grpc_channel.unary_unary( + "/google.marketingplatform.admin.v1alpha.MarketingplatformAdminService/CreateAnalyticsAccountLink", + request_serializer=marketingplatform_admin.CreateAnalyticsAccountLinkRequest.serialize, + response_deserializer=resources.AnalyticsAccountLink.deserialize, + ) + return self._stubs["create_analytics_account_link"] + + @property + def delete_analytics_account_link( + self, + ) -> Callable[ + [marketingplatform_admin.DeleteAnalyticsAccountLinkRequest], empty_pb2.Empty + ]: + r"""Return a callable for the delete analytics account link method over gRPC. + + Deletes the AnalyticsAccountLink, which detaches the + Analytics account from the Google Marketing Platform + organization. + + User needs to be an org user, and admin on the Analytics + account in order to delete the link. + + Returns: + Callable[[~.DeleteAnalyticsAccountLinkRequest], + ~.Empty]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_analytics_account_link" not in self._stubs: + self._stubs[ + "delete_analytics_account_link" + ] = self.grpc_channel.unary_unary( + "/google.marketingplatform.admin.v1alpha.MarketingplatformAdminService/DeleteAnalyticsAccountLink", + request_serializer=marketingplatform_admin.DeleteAnalyticsAccountLinkRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs["delete_analytics_account_link"] + + @property + def set_property_service_level( + self, + ) -> Callable[ + [marketingplatform_admin.SetPropertyServiceLevelRequest], + marketingplatform_admin.SetPropertyServiceLevelResponse, + ]: + r"""Return a callable for the set property service level method over gRPC. + + Updates the service level for an Analytics property. + + Returns: + Callable[[~.SetPropertyServiceLevelRequest], + ~.SetPropertyServiceLevelResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "set_property_service_level" not in self._stubs: + self._stubs["set_property_service_level"] = self.grpc_channel.unary_unary( + "/google.marketingplatform.admin.v1alpha.MarketingplatformAdminService/SetPropertyServiceLevel", + request_serializer=marketingplatform_admin.SetPropertyServiceLevelRequest.serialize, + response_deserializer=marketingplatform_admin.SetPropertyServiceLevelResponse.deserialize, + ) + return self._stubs["set_property_service_level"] + + def close(self): + self.grpc_channel.close() + + @property + def kind(self) -> str: + return "grpc" + + +__all__ = ("MarketingplatformAdminServiceGrpcTransport",) diff --git a/packages/google-ads-marketingplatform-admin/google/ads/marketingplatform_admin_v1alpha/services/marketingplatform_admin_service/transports/grpc_asyncio.py b/packages/google-ads-marketingplatform-admin/google/ads/marketingplatform_admin_v1alpha/services/marketingplatform_admin_service/transports/grpc_asyncio.py new file mode 100644 index 000000000000..caef725e70be --- /dev/null +++ b/packages/google-ads-marketingplatform-admin/google/ads/marketingplatform_admin_v1alpha/services/marketingplatform_admin_service/transports/grpc_asyncio.py @@ -0,0 +1,444 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1, grpc_helpers_async +from google.api_core import retry_async as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.protobuf import empty_pb2 # type: ignore +import grpc # type: ignore +from grpc.experimental import aio # type: ignore + +from google.ads.marketingplatform_admin_v1alpha.types import ( + marketingplatform_admin, + resources, +) + +from .base import DEFAULT_CLIENT_INFO, MarketingplatformAdminServiceTransport +from .grpc import MarketingplatformAdminServiceGrpcTransport + + +class MarketingplatformAdminServiceGrpcAsyncIOTransport( + MarketingplatformAdminServiceTransport +): + """gRPC AsyncIO backend transport for MarketingplatformAdminService. + + Service Interface for the Google Marketing Platform Admin + API. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _grpc_channel: aio.Channel + _stubs: Dict[str, Callable] = {} + + @classmethod + def create_channel( + cls, + host: str = "marketingplatformadmin.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> aio.Channel: + """Create and return a gRPC AsyncIO channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + aio.Channel: A gRPC AsyncIO channel object. + """ + + return grpc_helpers_async.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs, + ) + + def __init__( + self, + *, + host: str = "marketingplatformadmin.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[Union[aio.Channel, Callable[..., aio.Channel]]] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'marketingplatformadmin.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if a ``channel`` instance is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if a ``channel`` instance is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + channel (Optional[Union[aio.Channel, Callable[..., aio.Channel]]]): + A ``Channel`` instance through which to make calls, or a Callable + that constructs and returns one. If set to None, ``self.create_channel`` + is used to create the channel. If a Callable is given, it will be called + with the same arguments as used in ``self.create_channel``. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if a ``channel`` instance is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if a ``channel`` instance or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if isinstance(channel, aio.Channel): + # Ignore credentials if a channel was passed. + credentials = None + self._ignore_credentials = True + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + # initialize with the provided callable or the default channel + channel_init = channel or type(self).create_channel + self._grpc_channel = channel_init( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @property + def grpc_channel(self) -> aio.Channel: + """Create the channel designed to connect to this service. + + This property caches on the instance; repeated calls return + the same channel. + """ + # Return the channel from cache. + return self._grpc_channel + + @property + def get_organization( + self, + ) -> Callable[ + [marketingplatform_admin.GetOrganizationRequest], + Awaitable[resources.Organization], + ]: + r"""Return a callable for the get organization method over gRPC. + + Lookup for a single organization. + + Returns: + Callable[[~.GetOrganizationRequest], + Awaitable[~.Organization]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_organization" not in self._stubs: + self._stubs["get_organization"] = self.grpc_channel.unary_unary( + "/google.marketingplatform.admin.v1alpha.MarketingplatformAdminService/GetOrganization", + request_serializer=marketingplatform_admin.GetOrganizationRequest.serialize, + response_deserializer=resources.Organization.deserialize, + ) + return self._stubs["get_organization"] + + @property + def list_analytics_account_links( + self, + ) -> Callable[ + [marketingplatform_admin.ListAnalyticsAccountLinksRequest], + Awaitable[marketingplatform_admin.ListAnalyticsAccountLinksResponse], + ]: + r"""Return a callable for the list analytics account links method over gRPC. + + Lists the Google Analytics accounts link to the + specified Google Marketing Platform organization. + + Returns: + Callable[[~.ListAnalyticsAccountLinksRequest], + Awaitable[~.ListAnalyticsAccountLinksResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_analytics_account_links" not in self._stubs: + self._stubs["list_analytics_account_links"] = self.grpc_channel.unary_unary( + "/google.marketingplatform.admin.v1alpha.MarketingplatformAdminService/ListAnalyticsAccountLinks", + request_serializer=marketingplatform_admin.ListAnalyticsAccountLinksRequest.serialize, + response_deserializer=marketingplatform_admin.ListAnalyticsAccountLinksResponse.deserialize, + ) + return self._stubs["list_analytics_account_links"] + + @property + def create_analytics_account_link( + self, + ) -> Callable[ + [marketingplatform_admin.CreateAnalyticsAccountLinkRequest], + Awaitable[resources.AnalyticsAccountLink], + ]: + r"""Return a callable for the create analytics account link method over gRPC. + + Creates the link between the Analytics account and + the Google Marketing Platform organization. + + User needs to be an org user, and admin on the Analytics + account to create the link. If the account is already + linked to an organization, user needs to unlink the + account from the current organization, then try link + again. + + Returns: + Callable[[~.CreateAnalyticsAccountLinkRequest], + Awaitable[~.AnalyticsAccountLink]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_analytics_account_link" not in self._stubs: + self._stubs[ + "create_analytics_account_link" + ] = self.grpc_channel.unary_unary( + "/google.marketingplatform.admin.v1alpha.MarketingplatformAdminService/CreateAnalyticsAccountLink", + request_serializer=marketingplatform_admin.CreateAnalyticsAccountLinkRequest.serialize, + response_deserializer=resources.AnalyticsAccountLink.deserialize, + ) + return self._stubs["create_analytics_account_link"] + + @property + def delete_analytics_account_link( + self, + ) -> Callable[ + [marketingplatform_admin.DeleteAnalyticsAccountLinkRequest], + Awaitable[empty_pb2.Empty], + ]: + r"""Return a callable for the delete analytics account link method over gRPC. + + Deletes the AnalyticsAccountLink, which detaches the + Analytics account from the Google Marketing Platform + organization. + + User needs to be an org user, and admin on the Analytics + account in order to delete the link. + + Returns: + Callable[[~.DeleteAnalyticsAccountLinkRequest], + Awaitable[~.Empty]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_analytics_account_link" not in self._stubs: + self._stubs[ + "delete_analytics_account_link" + ] = self.grpc_channel.unary_unary( + "/google.marketingplatform.admin.v1alpha.MarketingplatformAdminService/DeleteAnalyticsAccountLink", + request_serializer=marketingplatform_admin.DeleteAnalyticsAccountLinkRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs["delete_analytics_account_link"] + + @property + def set_property_service_level( + self, + ) -> Callable[ + [marketingplatform_admin.SetPropertyServiceLevelRequest], + Awaitable[marketingplatform_admin.SetPropertyServiceLevelResponse], + ]: + r"""Return a callable for the set property service level method over gRPC. + + Updates the service level for an Analytics property. + + Returns: + Callable[[~.SetPropertyServiceLevelRequest], + Awaitable[~.SetPropertyServiceLevelResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "set_property_service_level" not in self._stubs: + self._stubs["set_property_service_level"] = self.grpc_channel.unary_unary( + "/google.marketingplatform.admin.v1alpha.MarketingplatformAdminService/SetPropertyServiceLevel", + request_serializer=marketingplatform_admin.SetPropertyServiceLevelRequest.serialize, + response_deserializer=marketingplatform_admin.SetPropertyServiceLevelResponse.deserialize, + ) + return self._stubs["set_property_service_level"] + + def _prep_wrapped_messages(self, client_info): + """Precompute the wrapped methods, overriding the base class method to use async wrappers.""" + self._wrapped_methods = { + self.get_organization: gapic_v1.method_async.wrap_method( + self.get_organization, + default_timeout=None, + client_info=client_info, + ), + self.list_analytics_account_links: gapic_v1.method_async.wrap_method( + self.list_analytics_account_links, + default_timeout=None, + client_info=client_info, + ), + self.create_analytics_account_link: gapic_v1.method_async.wrap_method( + self.create_analytics_account_link, + default_timeout=None, + client_info=client_info, + ), + self.delete_analytics_account_link: gapic_v1.method_async.wrap_method( + self.delete_analytics_account_link, + default_timeout=None, + client_info=client_info, + ), + self.set_property_service_level: gapic_v1.method_async.wrap_method( + self.set_property_service_level, + default_timeout=None, + client_info=client_info, + ), + } + + def close(self): + return self.grpc_channel.close() + + +__all__ = ("MarketingplatformAdminServiceGrpcAsyncIOTransport",) diff --git a/packages/google-ads-marketingplatform-admin/google/ads/marketingplatform_admin_v1alpha/services/marketingplatform_admin_service/transports/rest.py b/packages/google-ads-marketingplatform-admin/google/ads/marketingplatform_admin_v1alpha/services/marketingplatform_admin_service/transports/rest.py new file mode 100644 index 000000000000..b3894ca05fc4 --- /dev/null +++ b/packages/google-ads-marketingplatform-admin/google/ads/marketingplatform_admin_v1alpha/services/marketingplatform_admin_service/transports/rest.py @@ -0,0 +1,858 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import dataclasses +import json # type: ignore +import re +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import gapic_v1, path_template, rest_helpers, rest_streaming +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth.transport.requests import AuthorizedSession # type: ignore +from google.protobuf import json_format +import grpc # type: ignore +from requests import __version__ as requests_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object, None] # type: ignore + + +from google.protobuf import empty_pb2 # type: ignore + +from google.ads.marketingplatform_admin_v1alpha.types import ( + marketingplatform_admin, + resources, +) + +from .base import DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO +from .base import MarketingplatformAdminServiceTransport + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=requests_version, +) + + +class MarketingplatformAdminServiceRestInterceptor: + """Interceptor for MarketingplatformAdminService. + + Interceptors are used to manipulate requests, request metadata, and responses + in arbitrary ways. + Example use cases include: + * Logging + * Verifying requests according to service or custom semantics + * Stripping extraneous information from responses + + These use cases and more can be enabled by injecting an + instance of a custom subclass when constructing the MarketingplatformAdminServiceRestTransport. + + .. code-block:: python + class MyCustomMarketingplatformAdminServiceInterceptor(MarketingplatformAdminServiceRestInterceptor): + def pre_create_analytics_account_link(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_create_analytics_account_link(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_delete_analytics_account_link(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def pre_get_organization(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_organization(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_analytics_account_links(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_analytics_account_links(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_set_property_service_level(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_set_property_service_level(self, response): + logging.log(f"Received response: {response}") + return response + + transport = MarketingplatformAdminServiceRestTransport(interceptor=MyCustomMarketingplatformAdminServiceInterceptor()) + client = MarketingplatformAdminServiceClient(transport=transport) + + + """ + + def pre_create_analytics_account_link( + self, + request: marketingplatform_admin.CreateAnalyticsAccountLinkRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[ + marketingplatform_admin.CreateAnalyticsAccountLinkRequest, + Sequence[Tuple[str, str]], + ]: + """Pre-rpc interceptor for create_analytics_account_link + + Override in a subclass to manipulate the request or metadata + before they are sent to the MarketingplatformAdminService server. + """ + return request, metadata + + def post_create_analytics_account_link( + self, response: resources.AnalyticsAccountLink + ) -> resources.AnalyticsAccountLink: + """Post-rpc interceptor for create_analytics_account_link + + Override in a subclass to manipulate the response + after it is returned by the MarketingplatformAdminService server but before + it is returned to user code. + """ + return response + + def pre_delete_analytics_account_link( + self, + request: marketingplatform_admin.DeleteAnalyticsAccountLinkRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[ + marketingplatform_admin.DeleteAnalyticsAccountLinkRequest, + Sequence[Tuple[str, str]], + ]: + """Pre-rpc interceptor for delete_analytics_account_link + + Override in a subclass to manipulate the request or metadata + before they are sent to the MarketingplatformAdminService server. + """ + return request, metadata + + def pre_get_organization( + self, + request: marketingplatform_admin.GetOrganizationRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[ + marketingplatform_admin.GetOrganizationRequest, Sequence[Tuple[str, str]] + ]: + """Pre-rpc interceptor for get_organization + + Override in a subclass to manipulate the request or metadata + before they are sent to the MarketingplatformAdminService server. + """ + return request, metadata + + def post_get_organization( + self, response: resources.Organization + ) -> resources.Organization: + """Post-rpc interceptor for get_organization + + Override in a subclass to manipulate the response + after it is returned by the MarketingplatformAdminService server but before + it is returned to user code. + """ + return response + + def pre_list_analytics_account_links( + self, + request: marketingplatform_admin.ListAnalyticsAccountLinksRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[ + marketingplatform_admin.ListAnalyticsAccountLinksRequest, + Sequence[Tuple[str, str]], + ]: + """Pre-rpc interceptor for list_analytics_account_links + + Override in a subclass to manipulate the request or metadata + before they are sent to the MarketingplatformAdminService server. + """ + return request, metadata + + def post_list_analytics_account_links( + self, response: marketingplatform_admin.ListAnalyticsAccountLinksResponse + ) -> marketingplatform_admin.ListAnalyticsAccountLinksResponse: + """Post-rpc interceptor for list_analytics_account_links + + Override in a subclass to manipulate the response + after it is returned by the MarketingplatformAdminService server but before + it is returned to user code. + """ + return response + + def pre_set_property_service_level( + self, + request: marketingplatform_admin.SetPropertyServiceLevelRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[ + marketingplatform_admin.SetPropertyServiceLevelRequest, + Sequence[Tuple[str, str]], + ]: + """Pre-rpc interceptor for set_property_service_level + + Override in a subclass to manipulate the request or metadata + before they are sent to the MarketingplatformAdminService server. + """ + return request, metadata + + def post_set_property_service_level( + self, response: marketingplatform_admin.SetPropertyServiceLevelResponse + ) -> marketingplatform_admin.SetPropertyServiceLevelResponse: + """Post-rpc interceptor for set_property_service_level + + Override in a subclass to manipulate the response + after it is returned by the MarketingplatformAdminService server but before + it is returned to user code. + """ + return response + + +@dataclasses.dataclass +class MarketingplatformAdminServiceRestStub: + _session: AuthorizedSession + _host: str + _interceptor: MarketingplatformAdminServiceRestInterceptor + + +class MarketingplatformAdminServiceRestTransport( + MarketingplatformAdminServiceTransport +): + """REST backend transport for MarketingplatformAdminService. + + Service Interface for the Google Marketing Platform Admin + API. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + + """ + + def __init__( + self, + *, + host: str = "marketingplatformadmin.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = "https", + interceptor: Optional[MarketingplatformAdminServiceRestInterceptor] = None, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'marketingplatformadmin.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client + certificate to configure mutual TLS HTTP channel. It is ignored + if ``channel`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. + # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the + # credentials object + maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) + if maybe_url_match is None: + raise ValueError( + f"Unexpected hostname structure: {host}" + ) # pragma: NO COVER + + url_match_items = maybe_url_match.groupdict() + + host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host + + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + self._session = AuthorizedSession( + self._credentials, default_host=self.DEFAULT_HOST + ) + if client_cert_source_for_mtls: + self._session.configure_mtls_channel(client_cert_source_for_mtls) + self._interceptor = ( + interceptor or MarketingplatformAdminServiceRestInterceptor() + ) + self._prep_wrapped_messages(client_info) + + class _CreateAnalyticsAccountLink(MarketingplatformAdminServiceRestStub): + def __hash__(self): + return hash("CreateAnalyticsAccountLink") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: marketingplatform_admin.CreateAnalyticsAccountLinkRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> resources.AnalyticsAccountLink: + r"""Call the create analytics account + link method over HTTP. + + Args: + request (~.marketingplatform_admin.CreateAnalyticsAccountLinkRequest): + The request object. Request message for + CreateAnalyticsAccountLink RPC. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.resources.AnalyticsAccountLink: + A resource message representing the + link between a Google Analytics account + and a Google Marketing Platform + organization. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1alpha/{parent=organizations/*}/analyticsAccountLinks", + "body": "analytics_account_link", + }, + ] + request, metadata = self._interceptor.pre_create_analytics_account_link( + request, metadata + ) + pb_request = marketingplatform_admin.CreateAnalyticsAccountLinkRequest.pb( + request + ) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = resources.AnalyticsAccountLink() + pb_resp = resources.AnalyticsAccountLink.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_create_analytics_account_link(resp) + return resp + + class _DeleteAnalyticsAccountLink(MarketingplatformAdminServiceRestStub): + def __hash__(self): + return hash("DeleteAnalyticsAccountLink") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: marketingplatform_admin.DeleteAnalyticsAccountLinkRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ): + r"""Call the delete analytics account + link method over HTTP. + + Args: + request (~.marketingplatform_admin.DeleteAnalyticsAccountLinkRequest): + The request object. Request message for + DeleteAnalyticsAccountLink RPC. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "delete", + "uri": "/v1alpha/{name=organizations/*/analyticsAccountLinks/*}", + }, + ] + request, metadata = self._interceptor.pre_delete_analytics_account_link( + request, metadata + ) + pb_request = marketingplatform_admin.DeleteAnalyticsAccountLinkRequest.pb( + request + ) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + class _GetOrganization(MarketingplatformAdminServiceRestStub): + def __hash__(self): + return hash("GetOrganization") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: marketingplatform_admin.GetOrganizationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> resources.Organization: + r"""Call the get organization method over HTTP. + + Args: + request (~.marketingplatform_admin.GetOrganizationRequest): + The request object. Request message for GetOrganization + RPC. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.resources.Organization: + A resource message representing a + Google Marketing Platform organization. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1alpha/{name=organizations/*}", + }, + ] + request, metadata = self._interceptor.pre_get_organization( + request, metadata + ) + pb_request = marketingplatform_admin.GetOrganizationRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = resources.Organization() + pb_resp = resources.Organization.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_organization(resp) + return resp + + class _ListAnalyticsAccountLinks(MarketingplatformAdminServiceRestStub): + def __hash__(self): + return hash("ListAnalyticsAccountLinks") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: marketingplatform_admin.ListAnalyticsAccountLinksRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> marketingplatform_admin.ListAnalyticsAccountLinksResponse: + r"""Call the list analytics account + links method over HTTP. + + Args: + request (~.marketingplatform_admin.ListAnalyticsAccountLinksRequest): + The request object. Request message for + ListAnalyticsAccountLinks RPC. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.marketingplatform_admin.ListAnalyticsAccountLinksResponse: + Response message for + ListAnalyticsAccountLinks RPC. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1alpha/{parent=organizations/*}/analyticsAccountLinks", + }, + ] + request, metadata = self._interceptor.pre_list_analytics_account_links( + request, metadata + ) + pb_request = marketingplatform_admin.ListAnalyticsAccountLinksRequest.pb( + request + ) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = marketingplatform_admin.ListAnalyticsAccountLinksResponse() + pb_resp = marketingplatform_admin.ListAnalyticsAccountLinksResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_analytics_account_links(resp) + return resp + + class _SetPropertyServiceLevel(MarketingplatformAdminServiceRestStub): + def __hash__(self): + return hash("SetPropertyServiceLevel") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: marketingplatform_admin.SetPropertyServiceLevelRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> marketingplatform_admin.SetPropertyServiceLevelResponse: + r"""Call the set property service + level method over HTTP. + + Args: + request (~.marketingplatform_admin.SetPropertyServiceLevelRequest): + The request object. Request message for + SetPropertyServiceLevel RPC. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.marketingplatform_admin.SetPropertyServiceLevelResponse: + Response message for + SetPropertyServiceLevel RPC. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1alpha/{analytics_account_link=organizations/*/analyticsAccountLinks/*}:setPropertyServiceLevel", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_set_property_service_level( + request, metadata + ) + pb_request = marketingplatform_admin.SetPropertyServiceLevelRequest.pb( + request + ) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = marketingplatform_admin.SetPropertyServiceLevelResponse() + pb_resp = marketingplatform_admin.SetPropertyServiceLevelResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_set_property_service_level(resp) + return resp + + @property + def create_analytics_account_link( + self, + ) -> Callable[ + [marketingplatform_admin.CreateAnalyticsAccountLinkRequest], + resources.AnalyticsAccountLink, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._CreateAnalyticsAccountLink(self._session, self._host, self._interceptor) # type: ignore + + @property + def delete_analytics_account_link( + self, + ) -> Callable[ + [marketingplatform_admin.DeleteAnalyticsAccountLinkRequest], empty_pb2.Empty + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._DeleteAnalyticsAccountLink(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_organization( + self, + ) -> Callable[ + [marketingplatform_admin.GetOrganizationRequest], resources.Organization + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetOrganization(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_analytics_account_links( + self, + ) -> Callable[ + [marketingplatform_admin.ListAnalyticsAccountLinksRequest], + marketingplatform_admin.ListAnalyticsAccountLinksResponse, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListAnalyticsAccountLinks(self._session, self._host, self._interceptor) # type: ignore + + @property + def set_property_service_level( + self, + ) -> Callable[ + [marketingplatform_admin.SetPropertyServiceLevelRequest], + marketingplatform_admin.SetPropertyServiceLevelResponse, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._SetPropertyServiceLevel(self._session, self._host, self._interceptor) # type: ignore + + @property + def kind(self) -> str: + return "rest" + + def close(self): + self._session.close() + + +__all__ = ("MarketingplatformAdminServiceRestTransport",) diff --git a/packages/google-ads-marketingplatform-admin/google/ads/marketingplatform_admin_v1alpha/types/__init__.py b/packages/google-ads-marketingplatform-admin/google/ads/marketingplatform_admin_v1alpha/types/__init__.py new file mode 100644 index 000000000000..617c3bec15b6 --- /dev/null +++ b/packages/google-ads-marketingplatform-admin/google/ads/marketingplatform_admin_v1alpha/types/__init__.py @@ -0,0 +1,40 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .marketingplatform_admin import ( + AnalyticsServiceLevel, + CreateAnalyticsAccountLinkRequest, + DeleteAnalyticsAccountLinkRequest, + GetOrganizationRequest, + ListAnalyticsAccountLinksRequest, + ListAnalyticsAccountLinksResponse, + SetPropertyServiceLevelRequest, + SetPropertyServiceLevelResponse, +) +from .resources import AnalyticsAccountLink, LinkVerificationState, Organization + +__all__ = ( + "CreateAnalyticsAccountLinkRequest", + "DeleteAnalyticsAccountLinkRequest", + "GetOrganizationRequest", + "ListAnalyticsAccountLinksRequest", + "ListAnalyticsAccountLinksResponse", + "SetPropertyServiceLevelRequest", + "SetPropertyServiceLevelResponse", + "AnalyticsServiceLevel", + "AnalyticsAccountLink", + "Organization", + "LinkVerificationState", +) diff --git a/packages/google-ads-marketingplatform-admin/google/ads/marketingplatform_admin_v1alpha/types/marketingplatform_admin.py b/packages/google-ads-marketingplatform-admin/google/ads/marketingplatform_admin_v1alpha/types/marketingplatform_admin.py new file mode 100644 index 000000000000..a446e0c57b69 --- /dev/null +++ b/packages/google-ads-marketingplatform-admin/google/ads/marketingplatform_admin_v1alpha/types/marketingplatform_admin.py @@ -0,0 +1,217 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +import proto # type: ignore + +from google.ads.marketingplatform_admin_v1alpha.types import resources + +__protobuf__ = proto.module( + package="google.marketingplatform.admin.v1alpha", + manifest={ + "AnalyticsServiceLevel", + "GetOrganizationRequest", + "ListAnalyticsAccountLinksRequest", + "ListAnalyticsAccountLinksResponse", + "CreateAnalyticsAccountLinkRequest", + "DeleteAnalyticsAccountLinkRequest", + "SetPropertyServiceLevelRequest", + "SetPropertyServiceLevelResponse", + }, +) + + +class AnalyticsServiceLevel(proto.Enum): + r"""Various levels of service for Google Analytics. + + Values: + ANALYTICS_SERVICE_LEVEL_UNSPECIFIED (0): + Service level unspecified. + ANALYTICS_SERVICE_LEVEL_STANDARD (1): + The standard version of Google Analytics. + ANALYTICS_SERVICE_LEVEL_360 (2): + The premium version of Google Analytics. + """ + ANALYTICS_SERVICE_LEVEL_UNSPECIFIED = 0 + ANALYTICS_SERVICE_LEVEL_STANDARD = 1 + ANALYTICS_SERVICE_LEVEL_360 = 2 + + +class GetOrganizationRequest(proto.Message): + r"""Request message for GetOrganization RPC. + + Attributes: + name (str): + Required. The name of the Organization to retrieve. Format: + organizations/{org_id} + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class ListAnalyticsAccountLinksRequest(proto.Message): + r"""Request message for ListAnalyticsAccountLinks RPC. + + Attributes: + parent (str): + Required. The parent organization, which owns this + collection of Analytics account links. Format: + organizations/{org_id} + page_size (int): + Optional. The maximum number of Analytics + account links to return in one call. The service + may return fewer than this value. + + If unspecified, at most 50 Analytics account + links will be returned. The maximum value is + 1000; values above 1000 will be coerced to 1000. + page_token (str): + Optional. A page token, received from a previous + ListAnalyticsAccountLinks call. Provide this to retrieve the + subsequent page. + + When paginating, all other parameters provided to + ``ListAnalyticsAccountLinks`` must match the call that + provided the page token. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + page_size: int = proto.Field( + proto.INT32, + number=2, + ) + page_token: str = proto.Field( + proto.STRING, + number=3, + ) + + +class ListAnalyticsAccountLinksResponse(proto.Message): + r"""Response message for ListAnalyticsAccountLinks RPC. + + Attributes: + analytics_account_links (MutableSequence[google.ads.marketingplatform_admin_v1alpha.types.AnalyticsAccountLink]): + Analytics account links in this organization. + next_page_token (str): + A token, which can be sent as ``page_token`` to retrieve the + next page. If this field is omitted, there are no subsequent + pages. + """ + + @property + def raw_page(self): + return self + + analytics_account_links: MutableSequence[ + resources.AnalyticsAccountLink + ] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=resources.AnalyticsAccountLink, + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + + +class CreateAnalyticsAccountLinkRequest(proto.Message): + r"""Request message for CreateAnalyticsAccountLink RPC. + + Attributes: + parent (str): + Required. The parent resource where this Analytics account + link will be created. Format: organizations/{org_id} + analytics_account_link (google.ads.marketingplatform_admin_v1alpha.types.AnalyticsAccountLink): + Required. The Analytics account link to + create. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + analytics_account_link: resources.AnalyticsAccountLink = proto.Field( + proto.MESSAGE, + number=2, + message=resources.AnalyticsAccountLink, + ) + + +class DeleteAnalyticsAccountLinkRequest(proto.Message): + r"""Request message for DeleteAnalyticsAccountLink RPC. + + Attributes: + name (str): + Required. The name of the Analytics account link to delete. + Format: + organizations/{org_id}/analyticsAccountLinks/{analytics_account_link_id} + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class SetPropertyServiceLevelRequest(proto.Message): + r"""Request message for SetPropertyServiceLevel RPC. + + Attributes: + analytics_account_link (str): + Required. The parent AnalyticsAccountLink scope where this + property is in. Format: + organizations/{org_id}/analyticsAccountLinks/{analytics_account_link_id} + analytics_property (str): + Required. The Analytics property to change the ServiceLevel + setting. This field is the name of the Google Analytics + Admin API property resource. + + Format: + analyticsadmin.googleapis.com/properties/{property_id} + service_level (google.ads.marketingplatform_admin_v1alpha.types.AnalyticsServiceLevel): + Required. The service level to set for this + property. + """ + + analytics_account_link: str = proto.Field( + proto.STRING, + number=1, + ) + analytics_property: str = proto.Field( + proto.STRING, + number=2, + ) + service_level: "AnalyticsServiceLevel" = proto.Field( + proto.ENUM, + number=3, + enum="AnalyticsServiceLevel", + ) + + +class SetPropertyServiceLevelResponse(proto.Message): + r"""Response message for SetPropertyServiceLevel RPC.""" + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-ads-marketingplatform-admin/google/ads/marketingplatform_admin_v1alpha/types/resources.py b/packages/google-ads-marketingplatform-admin/google/ads/marketingplatform_admin_v1alpha/types/resources.py new file mode 100644 index 000000000000..420e17747cf7 --- /dev/null +++ b/packages/google-ads-marketingplatform-admin/google/ads/marketingplatform_admin_v1alpha/types/resources.py @@ -0,0 +1,120 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +import proto # type: ignore + +__protobuf__ = proto.module( + package="google.marketingplatform.admin.v1alpha", + manifest={ + "LinkVerificationState", + "Organization", + "AnalyticsAccountLink", + }, +) + + +class LinkVerificationState(proto.Enum): + r"""The verification state of the link between a product account + and a GMP organization. + + Values: + LINK_VERIFICATION_STATE_UNSPECIFIED (0): + The link state is unknown. + LINK_VERIFICATION_STATE_VERIFIED (1): + The link is established. + LINK_VERIFICATION_STATE_NOT_VERIFIED (2): + The link is requested, but hasn't been + approved by the product account admin. + """ + LINK_VERIFICATION_STATE_UNSPECIFIED = 0 + LINK_VERIFICATION_STATE_VERIFIED = 1 + LINK_VERIFICATION_STATE_NOT_VERIFIED = 2 + + +class Organization(proto.Message): + r"""A resource message representing a Google Marketing Platform + organization. + + Attributes: + name (str): + Identifier. The resource name of the GMP organization. + Format: organizations/{org_id} + display_name (str): + The human-readable name for the organization. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + display_name: str = proto.Field( + proto.STRING, + number=2, + ) + + +class AnalyticsAccountLink(proto.Message): + r"""A resource message representing the link between a Google + Analytics account and a Google Marketing Platform organization. + + Attributes: + name (str): + Identifier. Resource name of this AnalyticsAccountLink. Note + the resource ID is the same as the ID of the Analtyics + account. + + Format: + organizations/{org_id}/analyticsAccountLinks/{analytics_account_link_id} + Example: "organizations/xyz/analyticsAccountLinks/1234". + analytics_account (str): + Required. Immutable. The resource name of the AnalyticsAdmin + API account. The account ID will be used as the ID of this + AnalyticsAccountLink resource, which will become the final + component of the resource name. + + Format: analyticsadmin.googleapis.com/accounts/{account_id} + display_name (str): + Output only. The human-readable name for the + Analytics account. + link_verification_state (google.ads.marketingplatform_admin_v1alpha.types.LinkVerificationState): + Output only. The verification state of the + link between the Analytics account and the + parent organization. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + analytics_account: str = proto.Field( + proto.STRING, + number=2, + ) + display_name: str = proto.Field( + proto.STRING, + number=3, + ) + link_verification_state: "LinkVerificationState" = proto.Field( + proto.ENUM, + number=4, + enum="LinkVerificationState", + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-ads-marketingplatform-admin/mypy.ini b/packages/google-ads-marketingplatform-admin/mypy.ini new file mode 100644 index 000000000000..574c5aed394b --- /dev/null +++ b/packages/google-ads-marketingplatform-admin/mypy.ini @@ -0,0 +1,3 @@ +[mypy] +python_version = 3.7 +namespace_packages = True diff --git a/packages/google-ads-marketingplatform-admin/noxfile.py b/packages/google-ads-marketingplatform-admin/noxfile.py new file mode 100644 index 000000000000..67b7265f7586 --- /dev/null +++ b/packages/google-ads-marketingplatform-admin/noxfile.py @@ -0,0 +1,452 @@ +# -*- coding: utf-8 -*- +# +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Generated by synthtool. DO NOT EDIT! + +from __future__ import absolute_import + +import os +import pathlib +import re +import shutil +from typing import Dict, List +import warnings + +import nox + +BLACK_VERSION = "black[jupyter]==23.7.0" +ISORT_VERSION = "isort==5.11.0" + +LINT_PATHS = ["docs", "google", "tests", "noxfile.py", "setup.py"] + + +DEFAULT_PYTHON_VERSION = "3.10" + +UNIT_TEST_PYTHON_VERSIONS: List[str] = ["3.7", "3.8", "3.9", "3.10", "3.11", "3.12"] +UNIT_TEST_STANDARD_DEPENDENCIES = [ + "mock", + "asyncmock", + "pytest", + "pytest-cov", + "pytest-asyncio", +] +UNIT_TEST_EXTERNAL_DEPENDENCIES: List[str] = [] +UNIT_TEST_LOCAL_DEPENDENCIES: List[str] = [] +UNIT_TEST_DEPENDENCIES: List[str] = [] +UNIT_TEST_EXTRAS: List[str] = [] +UNIT_TEST_EXTRAS_BY_PYTHON: Dict[str, List[str]] = {} + +SYSTEM_TEST_PYTHON_VERSIONS: List[str] = ["3.8", "3.9", "3.10", "3.11", "3.12"] +SYSTEM_TEST_STANDARD_DEPENDENCIES = [ + "mock", + "pytest", + "google-cloud-testutils", +] +SYSTEM_TEST_EXTERNAL_DEPENDENCIES: List[str] = [] +SYSTEM_TEST_LOCAL_DEPENDENCIES: List[str] = [] +SYSTEM_TEST_DEPENDENCIES: List[str] = [] +SYSTEM_TEST_EXTRAS: List[str] = [] +SYSTEM_TEST_EXTRAS_BY_PYTHON: Dict[str, List[str]] = {} + +CURRENT_DIRECTORY = pathlib.Path(__file__).parent.absolute() + +# 'docfx' is excluded since it only needs to run in 'docs-presubmit' +nox.options.sessions = [ + "unit", + "system", + "cover", + "lint", + "lint_setup_py", + "blacken", + "docs", +] + +# Error if a python version is missing +nox.options.error_on_missing_interpreters = True + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def lint(session): + """Run linters. + + Returns a failure if the linters find linting errors or sufficiently + serious code quality issues. + """ + session.install("flake8", BLACK_VERSION) + session.run( + "black", + "--check", + *LINT_PATHS, + ) + + session.run("flake8", "google", "tests") + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def blacken(session): + """Run black. Format code to uniform standard.""" + session.install(BLACK_VERSION) + session.run( + "black", + *LINT_PATHS, + ) + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def format(session): + """ + Run isort to sort imports. Then run black + to format code to uniform standard. + """ + session.install(BLACK_VERSION, ISORT_VERSION) + # Use the --fss option to sort imports using strict alphabetical order. + # See https://pycqa.github.io/isort/docs/configuration/options.html#force-sort-within-sections + session.run( + "isort", + "--fss", + *LINT_PATHS, + ) + session.run( + "black", + *LINT_PATHS, + ) + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def lint_setup_py(session): + """Verify that setup.py is valid (including RST check).""" + session.install("docutils", "pygments") + session.run("python", "setup.py", "check", "--restructuredtext", "--strict") + + +def install_unittest_dependencies(session, *constraints): + standard_deps = UNIT_TEST_STANDARD_DEPENDENCIES + UNIT_TEST_DEPENDENCIES + session.install(*standard_deps, *constraints) + + if UNIT_TEST_EXTERNAL_DEPENDENCIES: + warnings.warn( + "'unit_test_external_dependencies' is deprecated. Instead, please " + "use 'unit_test_dependencies' or 'unit_test_local_dependencies'.", + DeprecationWarning, + ) + session.install(*UNIT_TEST_EXTERNAL_DEPENDENCIES, *constraints) + + if UNIT_TEST_LOCAL_DEPENDENCIES: + session.install(*UNIT_TEST_LOCAL_DEPENDENCIES, *constraints) + + if UNIT_TEST_EXTRAS_BY_PYTHON: + extras = UNIT_TEST_EXTRAS_BY_PYTHON.get(session.python, []) + elif UNIT_TEST_EXTRAS: + extras = UNIT_TEST_EXTRAS + else: + extras = [] + + if extras: + session.install("-e", f".[{','.join(extras)}]", *constraints) + else: + session.install("-e", ".", *constraints) + + +@nox.session(python=UNIT_TEST_PYTHON_VERSIONS) +@nox.parametrize( + "protobuf_implementation", + ["python", "upb", "cpp"], +) +def unit(session, protobuf_implementation): + # Install all test dependencies, then install this package in-place. + + if protobuf_implementation == "cpp" and session.python in ("3.11", "3.12"): + session.skip("cpp implementation is not supported in python 3.11+") + + constraints_path = str( + CURRENT_DIRECTORY / "testing" / f"constraints-{session.python}.txt" + ) + install_unittest_dependencies(session, "-c", constraints_path) + + # TODO(https://github.com/googleapis/synthtool/issues/1976): + # Remove the 'cpp' implementation once support for Protobuf 3.x is dropped. + # The 'cpp' implementation requires Protobuf<4. + if protobuf_implementation == "cpp": + session.install("protobuf<4") + + # Run py.test against the unit tests. + session.run( + "py.test", + "--quiet", + f"--junitxml=unit_{session.python}_sponge_log.xml", + "--cov=google", + "--cov=tests/unit", + "--cov-append", + "--cov-config=.coveragerc", + "--cov-report=", + "--cov-fail-under=0", + os.path.join("tests", "unit"), + *session.posargs, + env={ + "PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION": protobuf_implementation, + }, + ) + + +def install_systemtest_dependencies(session, *constraints): + # Use pre-release gRPC for system tests. + # Exclude version 1.52.0rc1 which has a known issue. + # See https://github.com/grpc/grpc/issues/32163 + session.install("--pre", "grpcio!=1.52.0rc1") + + session.install(*SYSTEM_TEST_STANDARD_DEPENDENCIES, *constraints) + + if SYSTEM_TEST_EXTERNAL_DEPENDENCIES: + session.install(*SYSTEM_TEST_EXTERNAL_DEPENDENCIES, *constraints) + + if SYSTEM_TEST_LOCAL_DEPENDENCIES: + session.install("-e", *SYSTEM_TEST_LOCAL_DEPENDENCIES, *constraints) + + if SYSTEM_TEST_DEPENDENCIES: + session.install("-e", *SYSTEM_TEST_DEPENDENCIES, *constraints) + + if SYSTEM_TEST_EXTRAS_BY_PYTHON: + extras = SYSTEM_TEST_EXTRAS_BY_PYTHON.get(session.python, []) + elif SYSTEM_TEST_EXTRAS: + extras = SYSTEM_TEST_EXTRAS + else: + extras = [] + + if extras: + session.install("-e", f".[{','.join(extras)}]", *constraints) + else: + session.install("-e", ".", *constraints) + + +@nox.session(python=SYSTEM_TEST_PYTHON_VERSIONS) +def system(session): + """Run the system test suite.""" + constraints_path = str( + CURRENT_DIRECTORY / "testing" / f"constraints-{session.python}.txt" + ) + system_test_path = os.path.join("tests", "system.py") + system_test_folder_path = os.path.join("tests", "system") + + # Check the value of `RUN_SYSTEM_TESTS` env var. It defaults to true. + if os.environ.get("RUN_SYSTEM_TESTS", "true") == "false": + session.skip("RUN_SYSTEM_TESTS is set to false, skipping") + # Install pyopenssl for mTLS testing. + if os.environ.get("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") == "true": + session.install("pyopenssl") + + system_test_exists = os.path.exists(system_test_path) + system_test_folder_exists = os.path.exists(system_test_folder_path) + # Sanity check: only run tests if found. + if not system_test_exists and not system_test_folder_exists: + session.skip("System tests were not found") + + install_systemtest_dependencies(session, "-c", constraints_path) + + # Run py.test against the system tests. + if system_test_exists: + session.run( + "py.test", + "--quiet", + f"--junitxml=system_{session.python}_sponge_log.xml", + system_test_path, + *session.posargs, + ) + if system_test_folder_exists: + session.run( + "py.test", + "--quiet", + f"--junitxml=system_{session.python}_sponge_log.xml", + system_test_folder_path, + *session.posargs, + ) + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def cover(session): + """Run the final coverage report. + + This outputs the coverage report aggregating coverage from the unit + test runs (not system test runs), and then erases coverage data. + """ + session.install("coverage", "pytest-cov") + session.run("coverage", "report", "--show-missing", "--fail-under=100") + + session.run("coverage", "erase") + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def docs(session): + """Build the docs for this library.""" + + session.install("-e", ".") + session.install( + # We need to pin to specific versions of the `sphinxcontrib-*` packages + # which still support sphinx 4.x. + # See https://github.com/googleapis/sphinx-docfx-yaml/issues/344 + # and https://github.com/googleapis/sphinx-docfx-yaml/issues/345. + "sphinxcontrib-applehelp==1.0.4", + "sphinxcontrib-devhelp==1.0.2", + "sphinxcontrib-htmlhelp==2.0.1", + "sphinxcontrib-qthelp==1.0.3", + "sphinxcontrib-serializinghtml==1.1.5", + "sphinx==4.5.0", + "alabaster", + "recommonmark", + ) + + shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True) + session.run( + "sphinx-build", + "-W", # warnings as errors + "-T", # show full traceback on exception + "-N", # no colors + "-b", + "html", + "-d", + os.path.join("docs", "_build", "doctrees", ""), + os.path.join("docs", ""), + os.path.join("docs", "_build", "html", ""), + ) + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def docfx(session): + """Build the docfx yaml files for this library.""" + + session.install("-e", ".") + session.install( + # We need to pin to specific versions of the `sphinxcontrib-*` packages + # which still support sphinx 4.x. + # See https://github.com/googleapis/sphinx-docfx-yaml/issues/344 + # and https://github.com/googleapis/sphinx-docfx-yaml/issues/345. + "sphinxcontrib-applehelp==1.0.4", + "sphinxcontrib-devhelp==1.0.2", + "sphinxcontrib-htmlhelp==2.0.1", + "sphinxcontrib-qthelp==1.0.3", + "sphinxcontrib-serializinghtml==1.1.5", + "gcp-sphinx-docfx-yaml", + "alabaster", + "recommonmark", + ) + + shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True) + session.run( + "sphinx-build", + "-T", # show full traceback on exception + "-N", # no colors + "-D", + ( + "extensions=sphinx.ext.autodoc," + "sphinx.ext.autosummary," + "docfx_yaml.extension," + "sphinx.ext.intersphinx," + "sphinx.ext.coverage," + "sphinx.ext.napoleon," + "sphinx.ext.todo," + "sphinx.ext.viewcode," + "recommonmark" + ), + "-b", + "html", + "-d", + os.path.join("docs", "_build", "doctrees", ""), + os.path.join("docs", ""), + os.path.join("docs", "_build", "html", ""), + ) + + +@nox.session(python="3.12") +@nox.parametrize( + "protobuf_implementation", + ["python", "upb", "cpp"], +) +def prerelease_deps(session, protobuf_implementation): + """Run all tests with prerelease versions of dependencies installed.""" + + if protobuf_implementation == "cpp" and session.python in ("3.11", "3.12"): + session.skip("cpp implementation is not supported in python 3.11+") + + # Install all dependencies + session.install("-e", ".[all, tests, tracing]") + unit_deps_all = UNIT_TEST_STANDARD_DEPENDENCIES + UNIT_TEST_EXTERNAL_DEPENDENCIES + session.install(*unit_deps_all) + system_deps_all = ( + SYSTEM_TEST_STANDARD_DEPENDENCIES + + SYSTEM_TEST_EXTERNAL_DEPENDENCIES + + SYSTEM_TEST_EXTRAS + ) + session.install(*system_deps_all) + + # Because we test minimum dependency versions on the minimum Python + # version, the first version we test with in the unit tests sessions has a + # constraints file containing all dependencies and extras. + with open( + CURRENT_DIRECTORY + / "testing" + / f"constraints-{UNIT_TEST_PYTHON_VERSIONS[0]}.txt", + encoding="utf-8", + ) as constraints_file: + constraints_text = constraints_file.read() + + # Ignore leading whitespace and comment lines. + constraints_deps = [ + match.group(1) + for match in re.finditer( + r"^\s*(\S+)(?===\S+)", constraints_text, flags=re.MULTILINE + ) + ] + + session.install(*constraints_deps) + + prerel_deps = [ + "protobuf", + # dependency of grpc + "six", + "grpc-google-iam-v1", + "googleapis-common-protos", + "grpcio", + "grpcio-status", + "google-api-core", + "google-auth", + "proto-plus", + "google-cloud-testutils", + # dependencies of google-cloud-testutils" + "click", + ] + + for dep in prerel_deps: + session.install("--pre", "--no-deps", "--upgrade", dep) + + # Remaining dependencies + other_deps = [ + "requests", + ] + session.install(*other_deps) + + # Print out prerelease package versions + session.run( + "python", "-c", "import google.protobuf; print(google.protobuf.__version__)" + ) + session.run("python", "-c", "import grpc; print(grpc.__version__)") + session.run("python", "-c", "import google.auth; print(google.auth.__version__)") + + session.run( + "py.test", + "tests/unit", + env={ + "PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION": protobuf_implementation, + }, + ) diff --git a/packages/google-ads-marketingplatform-admin/samples/generated_samples/marketingplatformadmin_v1alpha_generated_marketingplatform_admin_service_create_analytics_account_link_async.py b/packages/google-ads-marketingplatform-admin/samples/generated_samples/marketingplatformadmin_v1alpha_generated_marketingplatform_admin_service_create_analytics_account_link_async.py new file mode 100644 index 000000000000..bfd28a483b92 --- /dev/null +++ b/packages/google-ads-marketingplatform-admin/samples/generated_samples/marketingplatformadmin_v1alpha_generated_marketingplatform_admin_service_create_analytics_account_link_async.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateAnalyticsAccountLink +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-ads-marketingplatform-admin + + +# [START marketingplatformadmin_v1alpha_generated_MarketingplatformAdminService_CreateAnalyticsAccountLink_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.ads import marketingplatform_admin_v1alpha + + +async def sample_create_analytics_account_link(): + # Create a client + client = marketingplatform_admin_v1alpha.MarketingplatformAdminServiceAsyncClient() + + # Initialize request argument(s) + analytics_account_link = marketingplatform_admin_v1alpha.AnalyticsAccountLink() + analytics_account_link.analytics_account = "analytics_account_value" + + request = marketingplatform_admin_v1alpha.CreateAnalyticsAccountLinkRequest( + parent="parent_value", + analytics_account_link=analytics_account_link, + ) + + # Make the request + response = await client.create_analytics_account_link(request=request) + + # Handle the response + print(response) + +# [END marketingplatformadmin_v1alpha_generated_MarketingplatformAdminService_CreateAnalyticsAccountLink_async] diff --git a/packages/google-ads-marketingplatform-admin/samples/generated_samples/marketingplatformadmin_v1alpha_generated_marketingplatform_admin_service_create_analytics_account_link_sync.py b/packages/google-ads-marketingplatform-admin/samples/generated_samples/marketingplatformadmin_v1alpha_generated_marketingplatform_admin_service_create_analytics_account_link_sync.py new file mode 100644 index 000000000000..6af1b08a5a3b --- /dev/null +++ b/packages/google-ads-marketingplatform-admin/samples/generated_samples/marketingplatformadmin_v1alpha_generated_marketingplatform_admin_service_create_analytics_account_link_sync.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateAnalyticsAccountLink +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-ads-marketingplatform-admin + + +# [START marketingplatformadmin_v1alpha_generated_MarketingplatformAdminService_CreateAnalyticsAccountLink_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.ads import marketingplatform_admin_v1alpha + + +def sample_create_analytics_account_link(): + # Create a client + client = marketingplatform_admin_v1alpha.MarketingplatformAdminServiceClient() + + # Initialize request argument(s) + analytics_account_link = marketingplatform_admin_v1alpha.AnalyticsAccountLink() + analytics_account_link.analytics_account = "analytics_account_value" + + request = marketingplatform_admin_v1alpha.CreateAnalyticsAccountLinkRequest( + parent="parent_value", + analytics_account_link=analytics_account_link, + ) + + # Make the request + response = client.create_analytics_account_link(request=request) + + # Handle the response + print(response) + +# [END marketingplatformadmin_v1alpha_generated_MarketingplatformAdminService_CreateAnalyticsAccountLink_sync] diff --git a/packages/google-ads-marketingplatform-admin/samples/generated_samples/marketingplatformadmin_v1alpha_generated_marketingplatform_admin_service_delete_analytics_account_link_async.py b/packages/google-ads-marketingplatform-admin/samples/generated_samples/marketingplatformadmin_v1alpha_generated_marketingplatform_admin_service_delete_analytics_account_link_async.py new file mode 100644 index 000000000000..c0b2c7e1ffa9 --- /dev/null +++ b/packages/google-ads-marketingplatform-admin/samples/generated_samples/marketingplatformadmin_v1alpha_generated_marketingplatform_admin_service_delete_analytics_account_link_async.py @@ -0,0 +1,50 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteAnalyticsAccountLink +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-ads-marketingplatform-admin + + +# [START marketingplatformadmin_v1alpha_generated_MarketingplatformAdminService_DeleteAnalyticsAccountLink_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.ads import marketingplatform_admin_v1alpha + + +async def sample_delete_analytics_account_link(): + # Create a client + client = marketingplatform_admin_v1alpha.MarketingplatformAdminServiceAsyncClient() + + # Initialize request argument(s) + request = marketingplatform_admin_v1alpha.DeleteAnalyticsAccountLinkRequest( + name="name_value", + ) + + # Make the request + await client.delete_analytics_account_link(request=request) + + +# [END marketingplatformadmin_v1alpha_generated_MarketingplatformAdminService_DeleteAnalyticsAccountLink_async] diff --git a/packages/google-ads-marketingplatform-admin/samples/generated_samples/marketingplatformadmin_v1alpha_generated_marketingplatform_admin_service_delete_analytics_account_link_sync.py b/packages/google-ads-marketingplatform-admin/samples/generated_samples/marketingplatformadmin_v1alpha_generated_marketingplatform_admin_service_delete_analytics_account_link_sync.py new file mode 100644 index 000000000000..8f1a794eacac --- /dev/null +++ b/packages/google-ads-marketingplatform-admin/samples/generated_samples/marketingplatformadmin_v1alpha_generated_marketingplatform_admin_service_delete_analytics_account_link_sync.py @@ -0,0 +1,50 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteAnalyticsAccountLink +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-ads-marketingplatform-admin + + +# [START marketingplatformadmin_v1alpha_generated_MarketingplatformAdminService_DeleteAnalyticsAccountLink_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.ads import marketingplatform_admin_v1alpha + + +def sample_delete_analytics_account_link(): + # Create a client + client = marketingplatform_admin_v1alpha.MarketingplatformAdminServiceClient() + + # Initialize request argument(s) + request = marketingplatform_admin_v1alpha.DeleteAnalyticsAccountLinkRequest( + name="name_value", + ) + + # Make the request + client.delete_analytics_account_link(request=request) + + +# [END marketingplatformadmin_v1alpha_generated_MarketingplatformAdminService_DeleteAnalyticsAccountLink_sync] diff --git a/packages/google-ads-marketingplatform-admin/samples/generated_samples/marketingplatformadmin_v1alpha_generated_marketingplatform_admin_service_get_organization_async.py b/packages/google-ads-marketingplatform-admin/samples/generated_samples/marketingplatformadmin_v1alpha_generated_marketingplatform_admin_service_get_organization_async.py new file mode 100644 index 000000000000..7666fa53e916 --- /dev/null +++ b/packages/google-ads-marketingplatform-admin/samples/generated_samples/marketingplatformadmin_v1alpha_generated_marketingplatform_admin_service_get_organization_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetOrganization +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-ads-marketingplatform-admin + + +# [START marketingplatformadmin_v1alpha_generated_MarketingplatformAdminService_GetOrganization_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.ads import marketingplatform_admin_v1alpha + + +async def sample_get_organization(): + # Create a client + client = marketingplatform_admin_v1alpha.MarketingplatformAdminServiceAsyncClient() + + # Initialize request argument(s) + request = marketingplatform_admin_v1alpha.GetOrganizationRequest( + name="name_value", + ) + + # Make the request + response = await client.get_organization(request=request) + + # Handle the response + print(response) + +# [END marketingplatformadmin_v1alpha_generated_MarketingplatformAdminService_GetOrganization_async] diff --git a/packages/google-ads-marketingplatform-admin/samples/generated_samples/marketingplatformadmin_v1alpha_generated_marketingplatform_admin_service_get_organization_sync.py b/packages/google-ads-marketingplatform-admin/samples/generated_samples/marketingplatformadmin_v1alpha_generated_marketingplatform_admin_service_get_organization_sync.py new file mode 100644 index 000000000000..52b506c61914 --- /dev/null +++ b/packages/google-ads-marketingplatform-admin/samples/generated_samples/marketingplatformadmin_v1alpha_generated_marketingplatform_admin_service_get_organization_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetOrganization +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-ads-marketingplatform-admin + + +# [START marketingplatformadmin_v1alpha_generated_MarketingplatformAdminService_GetOrganization_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.ads import marketingplatform_admin_v1alpha + + +def sample_get_organization(): + # Create a client + client = marketingplatform_admin_v1alpha.MarketingplatformAdminServiceClient() + + # Initialize request argument(s) + request = marketingplatform_admin_v1alpha.GetOrganizationRequest( + name="name_value", + ) + + # Make the request + response = client.get_organization(request=request) + + # Handle the response + print(response) + +# [END marketingplatformadmin_v1alpha_generated_MarketingplatformAdminService_GetOrganization_sync] diff --git a/packages/google-ads-marketingplatform-admin/samples/generated_samples/marketingplatformadmin_v1alpha_generated_marketingplatform_admin_service_list_analytics_account_links_async.py b/packages/google-ads-marketingplatform-admin/samples/generated_samples/marketingplatformadmin_v1alpha_generated_marketingplatform_admin_service_list_analytics_account_links_async.py new file mode 100644 index 000000000000..3837010ff87f --- /dev/null +++ b/packages/google-ads-marketingplatform-admin/samples/generated_samples/marketingplatformadmin_v1alpha_generated_marketingplatform_admin_service_list_analytics_account_links_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListAnalyticsAccountLinks +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-ads-marketingplatform-admin + + +# [START marketingplatformadmin_v1alpha_generated_MarketingplatformAdminService_ListAnalyticsAccountLinks_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.ads import marketingplatform_admin_v1alpha + + +async def sample_list_analytics_account_links(): + # Create a client + client = marketingplatform_admin_v1alpha.MarketingplatformAdminServiceAsyncClient() + + # Initialize request argument(s) + request = marketingplatform_admin_v1alpha.ListAnalyticsAccountLinksRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_analytics_account_links(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END marketingplatformadmin_v1alpha_generated_MarketingplatformAdminService_ListAnalyticsAccountLinks_async] diff --git a/packages/google-ads-marketingplatform-admin/samples/generated_samples/marketingplatformadmin_v1alpha_generated_marketingplatform_admin_service_list_analytics_account_links_sync.py b/packages/google-ads-marketingplatform-admin/samples/generated_samples/marketingplatformadmin_v1alpha_generated_marketingplatform_admin_service_list_analytics_account_links_sync.py new file mode 100644 index 000000000000..af3ed458056a --- /dev/null +++ b/packages/google-ads-marketingplatform-admin/samples/generated_samples/marketingplatformadmin_v1alpha_generated_marketingplatform_admin_service_list_analytics_account_links_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListAnalyticsAccountLinks +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-ads-marketingplatform-admin + + +# [START marketingplatformadmin_v1alpha_generated_MarketingplatformAdminService_ListAnalyticsAccountLinks_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.ads import marketingplatform_admin_v1alpha + + +def sample_list_analytics_account_links(): + # Create a client + client = marketingplatform_admin_v1alpha.MarketingplatformAdminServiceClient() + + # Initialize request argument(s) + request = marketingplatform_admin_v1alpha.ListAnalyticsAccountLinksRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_analytics_account_links(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END marketingplatformadmin_v1alpha_generated_MarketingplatformAdminService_ListAnalyticsAccountLinks_sync] diff --git a/packages/google-ads-marketingplatform-admin/samples/generated_samples/marketingplatformadmin_v1alpha_generated_marketingplatform_admin_service_set_property_service_level_async.py b/packages/google-ads-marketingplatform-admin/samples/generated_samples/marketingplatformadmin_v1alpha_generated_marketingplatform_admin_service_set_property_service_level_async.py new file mode 100644 index 000000000000..b07e73cde9f5 --- /dev/null +++ b/packages/google-ads-marketingplatform-admin/samples/generated_samples/marketingplatformadmin_v1alpha_generated_marketingplatform_admin_service_set_property_service_level_async.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for SetPropertyServiceLevel +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-ads-marketingplatform-admin + + +# [START marketingplatformadmin_v1alpha_generated_MarketingplatformAdminService_SetPropertyServiceLevel_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.ads import marketingplatform_admin_v1alpha + + +async def sample_set_property_service_level(): + # Create a client + client = marketingplatform_admin_v1alpha.MarketingplatformAdminServiceAsyncClient() + + # Initialize request argument(s) + request = marketingplatform_admin_v1alpha.SetPropertyServiceLevelRequest( + analytics_account_link="analytics_account_link_value", + analytics_property="analytics_property_value", + service_level="ANALYTICS_SERVICE_LEVEL_360", + ) + + # Make the request + response = await client.set_property_service_level(request=request) + + # Handle the response + print(response) + +# [END marketingplatformadmin_v1alpha_generated_MarketingplatformAdminService_SetPropertyServiceLevel_async] diff --git a/packages/google-ads-marketingplatform-admin/samples/generated_samples/marketingplatformadmin_v1alpha_generated_marketingplatform_admin_service_set_property_service_level_sync.py b/packages/google-ads-marketingplatform-admin/samples/generated_samples/marketingplatformadmin_v1alpha_generated_marketingplatform_admin_service_set_property_service_level_sync.py new file mode 100644 index 000000000000..a742b4f50f64 --- /dev/null +++ b/packages/google-ads-marketingplatform-admin/samples/generated_samples/marketingplatformadmin_v1alpha_generated_marketingplatform_admin_service_set_property_service_level_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for SetPropertyServiceLevel +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-ads-marketingplatform-admin + + +# [START marketingplatformadmin_v1alpha_generated_MarketingplatformAdminService_SetPropertyServiceLevel_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.ads import marketingplatform_admin_v1alpha + + +def sample_set_property_service_level(): + # Create a client + client = marketingplatform_admin_v1alpha.MarketingplatformAdminServiceClient() + + # Initialize request argument(s) + request = marketingplatform_admin_v1alpha.SetPropertyServiceLevelRequest( + analytics_account_link="analytics_account_link_value", + analytics_property="analytics_property_value", + service_level="ANALYTICS_SERVICE_LEVEL_360", + ) + + # Make the request + response = client.set_property_service_level(request=request) + + # Handle the response + print(response) + +# [END marketingplatformadmin_v1alpha_generated_MarketingplatformAdminService_SetPropertyServiceLevel_sync] diff --git a/packages/google-ads-marketingplatform-admin/samples/generated_samples/snippet_metadata_google.marketingplatform.admin.v1alpha.json b/packages/google-ads-marketingplatform-admin/samples/generated_samples/snippet_metadata_google.marketingplatform.admin.v1alpha.json new file mode 100644 index 000000000000..72abc6186c7b --- /dev/null +++ b/packages/google-ads-marketingplatform-admin/samples/generated_samples/snippet_metadata_google.marketingplatform.admin.v1alpha.json @@ -0,0 +1,822 @@ +{ + "clientLibrary": { + "apis": [ + { + "id": "google.marketingplatform.admin.v1alpha", + "version": "v1alpha" + } + ], + "language": "PYTHON", + "name": "google-ads-marketingplatform-admin", + "version": "0.1.0" + }, + "snippets": [ + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.ads.marketingplatform_admin_v1alpha.MarketingplatformAdminServiceAsyncClient", + "shortName": "MarketingplatformAdminServiceAsyncClient" + }, + "fullName": "google.ads.marketingplatform_admin_v1alpha.MarketingplatformAdminServiceAsyncClient.create_analytics_account_link", + "method": { + "fullName": "google.marketingplatform.admin.v1alpha.MarketingplatformAdminService.CreateAnalyticsAccountLink", + "service": { + "fullName": "google.marketingplatform.admin.v1alpha.MarketingplatformAdminService", + "shortName": "MarketingplatformAdminService" + }, + "shortName": "CreateAnalyticsAccountLink" + }, + "parameters": [ + { + "name": "request", + "type": "google.ads.marketingplatform_admin_v1alpha.types.CreateAnalyticsAccountLinkRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "analytics_account_link", + "type": "google.ads.marketingplatform_admin_v1alpha.types.AnalyticsAccountLink" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.ads.marketingplatform_admin_v1alpha.types.AnalyticsAccountLink", + "shortName": "create_analytics_account_link" + }, + "description": "Sample for CreateAnalyticsAccountLink", + "file": "marketingplatformadmin_v1alpha_generated_marketingplatform_admin_service_create_analytics_account_link_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "marketingplatformadmin_v1alpha_generated_MarketingplatformAdminService_CreateAnalyticsAccountLink_async", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 49, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 50, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "marketingplatformadmin_v1alpha_generated_marketingplatform_admin_service_create_analytics_account_link_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.ads.marketingplatform_admin_v1alpha.MarketingplatformAdminServiceClient", + "shortName": "MarketingplatformAdminServiceClient" + }, + "fullName": "google.ads.marketingplatform_admin_v1alpha.MarketingplatformAdminServiceClient.create_analytics_account_link", + "method": { + "fullName": "google.marketingplatform.admin.v1alpha.MarketingplatformAdminService.CreateAnalyticsAccountLink", + "service": { + "fullName": "google.marketingplatform.admin.v1alpha.MarketingplatformAdminService", + "shortName": "MarketingplatformAdminService" + }, + "shortName": "CreateAnalyticsAccountLink" + }, + "parameters": [ + { + "name": "request", + "type": "google.ads.marketingplatform_admin_v1alpha.types.CreateAnalyticsAccountLinkRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "analytics_account_link", + "type": "google.ads.marketingplatform_admin_v1alpha.types.AnalyticsAccountLink" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.ads.marketingplatform_admin_v1alpha.types.AnalyticsAccountLink", + "shortName": "create_analytics_account_link" + }, + "description": "Sample for CreateAnalyticsAccountLink", + "file": "marketingplatformadmin_v1alpha_generated_marketingplatform_admin_service_create_analytics_account_link_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "marketingplatformadmin_v1alpha_generated_MarketingplatformAdminService_CreateAnalyticsAccountLink_sync", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 49, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 50, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "marketingplatformadmin_v1alpha_generated_marketingplatform_admin_service_create_analytics_account_link_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.ads.marketingplatform_admin_v1alpha.MarketingplatformAdminServiceAsyncClient", + "shortName": "MarketingplatformAdminServiceAsyncClient" + }, + "fullName": "google.ads.marketingplatform_admin_v1alpha.MarketingplatformAdminServiceAsyncClient.delete_analytics_account_link", + "method": { + "fullName": "google.marketingplatform.admin.v1alpha.MarketingplatformAdminService.DeleteAnalyticsAccountLink", + "service": { + "fullName": "google.marketingplatform.admin.v1alpha.MarketingplatformAdminService", + "shortName": "MarketingplatformAdminService" + }, + "shortName": "DeleteAnalyticsAccountLink" + }, + "parameters": [ + { + "name": "request", + "type": "google.ads.marketingplatform_admin_v1alpha.types.DeleteAnalyticsAccountLinkRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "shortName": "delete_analytics_account_link" + }, + "description": "Sample for DeleteAnalyticsAccountLink", + "file": "marketingplatformadmin_v1alpha_generated_marketingplatform_admin_service_delete_analytics_account_link_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "marketingplatformadmin_v1alpha_generated_MarketingplatformAdminService_DeleteAnalyticsAccountLink_async", + "segments": [ + { + "end": 49, + "start": 27, + "type": "FULL" + }, + { + "end": 49, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "marketingplatformadmin_v1alpha_generated_marketingplatform_admin_service_delete_analytics_account_link_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.ads.marketingplatform_admin_v1alpha.MarketingplatformAdminServiceClient", + "shortName": "MarketingplatformAdminServiceClient" + }, + "fullName": "google.ads.marketingplatform_admin_v1alpha.MarketingplatformAdminServiceClient.delete_analytics_account_link", + "method": { + "fullName": "google.marketingplatform.admin.v1alpha.MarketingplatformAdminService.DeleteAnalyticsAccountLink", + "service": { + "fullName": "google.marketingplatform.admin.v1alpha.MarketingplatformAdminService", + "shortName": "MarketingplatformAdminService" + }, + "shortName": "DeleteAnalyticsAccountLink" + }, + "parameters": [ + { + "name": "request", + "type": "google.ads.marketingplatform_admin_v1alpha.types.DeleteAnalyticsAccountLinkRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "shortName": "delete_analytics_account_link" + }, + "description": "Sample for DeleteAnalyticsAccountLink", + "file": "marketingplatformadmin_v1alpha_generated_marketingplatform_admin_service_delete_analytics_account_link_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "marketingplatformadmin_v1alpha_generated_MarketingplatformAdminService_DeleteAnalyticsAccountLink_sync", + "segments": [ + { + "end": 49, + "start": 27, + "type": "FULL" + }, + { + "end": 49, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "marketingplatformadmin_v1alpha_generated_marketingplatform_admin_service_delete_analytics_account_link_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.ads.marketingplatform_admin_v1alpha.MarketingplatformAdminServiceAsyncClient", + "shortName": "MarketingplatformAdminServiceAsyncClient" + }, + "fullName": "google.ads.marketingplatform_admin_v1alpha.MarketingplatformAdminServiceAsyncClient.get_organization", + "method": { + "fullName": "google.marketingplatform.admin.v1alpha.MarketingplatformAdminService.GetOrganization", + "service": { + "fullName": "google.marketingplatform.admin.v1alpha.MarketingplatformAdminService", + "shortName": "MarketingplatformAdminService" + }, + "shortName": "GetOrganization" + }, + "parameters": [ + { + "name": "request", + "type": "google.ads.marketingplatform_admin_v1alpha.types.GetOrganizationRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.ads.marketingplatform_admin_v1alpha.types.Organization", + "shortName": "get_organization" + }, + "description": "Sample for GetOrganization", + "file": "marketingplatformadmin_v1alpha_generated_marketingplatform_admin_service_get_organization_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "marketingplatformadmin_v1alpha_generated_MarketingplatformAdminService_GetOrganization_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "marketingplatformadmin_v1alpha_generated_marketingplatform_admin_service_get_organization_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.ads.marketingplatform_admin_v1alpha.MarketingplatformAdminServiceClient", + "shortName": "MarketingplatformAdminServiceClient" + }, + "fullName": "google.ads.marketingplatform_admin_v1alpha.MarketingplatformAdminServiceClient.get_organization", + "method": { + "fullName": "google.marketingplatform.admin.v1alpha.MarketingplatformAdminService.GetOrganization", + "service": { + "fullName": "google.marketingplatform.admin.v1alpha.MarketingplatformAdminService", + "shortName": "MarketingplatformAdminService" + }, + "shortName": "GetOrganization" + }, + "parameters": [ + { + "name": "request", + "type": "google.ads.marketingplatform_admin_v1alpha.types.GetOrganizationRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.ads.marketingplatform_admin_v1alpha.types.Organization", + "shortName": "get_organization" + }, + "description": "Sample for GetOrganization", + "file": "marketingplatformadmin_v1alpha_generated_marketingplatform_admin_service_get_organization_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "marketingplatformadmin_v1alpha_generated_MarketingplatformAdminService_GetOrganization_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "marketingplatformadmin_v1alpha_generated_marketingplatform_admin_service_get_organization_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.ads.marketingplatform_admin_v1alpha.MarketingplatformAdminServiceAsyncClient", + "shortName": "MarketingplatformAdminServiceAsyncClient" + }, + "fullName": "google.ads.marketingplatform_admin_v1alpha.MarketingplatformAdminServiceAsyncClient.list_analytics_account_links", + "method": { + "fullName": "google.marketingplatform.admin.v1alpha.MarketingplatformAdminService.ListAnalyticsAccountLinks", + "service": { + "fullName": "google.marketingplatform.admin.v1alpha.MarketingplatformAdminService", + "shortName": "MarketingplatformAdminService" + }, + "shortName": "ListAnalyticsAccountLinks" + }, + "parameters": [ + { + "name": "request", + "type": "google.ads.marketingplatform_admin_v1alpha.types.ListAnalyticsAccountLinksRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.ads.marketingplatform_admin_v1alpha.services.marketingplatform_admin_service.pagers.ListAnalyticsAccountLinksAsyncPager", + "shortName": "list_analytics_account_links" + }, + "description": "Sample for ListAnalyticsAccountLinks", + "file": "marketingplatformadmin_v1alpha_generated_marketingplatform_admin_service_list_analytics_account_links_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "marketingplatformadmin_v1alpha_generated_MarketingplatformAdminService_ListAnalyticsAccountLinks_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "marketingplatformadmin_v1alpha_generated_marketingplatform_admin_service_list_analytics_account_links_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.ads.marketingplatform_admin_v1alpha.MarketingplatformAdminServiceClient", + "shortName": "MarketingplatformAdminServiceClient" + }, + "fullName": "google.ads.marketingplatform_admin_v1alpha.MarketingplatformAdminServiceClient.list_analytics_account_links", + "method": { + "fullName": "google.marketingplatform.admin.v1alpha.MarketingplatformAdminService.ListAnalyticsAccountLinks", + "service": { + "fullName": "google.marketingplatform.admin.v1alpha.MarketingplatformAdminService", + "shortName": "MarketingplatformAdminService" + }, + "shortName": "ListAnalyticsAccountLinks" + }, + "parameters": [ + { + "name": "request", + "type": "google.ads.marketingplatform_admin_v1alpha.types.ListAnalyticsAccountLinksRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.ads.marketingplatform_admin_v1alpha.services.marketingplatform_admin_service.pagers.ListAnalyticsAccountLinksPager", + "shortName": "list_analytics_account_links" + }, + "description": "Sample for ListAnalyticsAccountLinks", + "file": "marketingplatformadmin_v1alpha_generated_marketingplatform_admin_service_list_analytics_account_links_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "marketingplatformadmin_v1alpha_generated_MarketingplatformAdminService_ListAnalyticsAccountLinks_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "marketingplatformadmin_v1alpha_generated_marketingplatform_admin_service_list_analytics_account_links_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.ads.marketingplatform_admin_v1alpha.MarketingplatformAdminServiceAsyncClient", + "shortName": "MarketingplatformAdminServiceAsyncClient" + }, + "fullName": "google.ads.marketingplatform_admin_v1alpha.MarketingplatformAdminServiceAsyncClient.set_property_service_level", + "method": { + "fullName": "google.marketingplatform.admin.v1alpha.MarketingplatformAdminService.SetPropertyServiceLevel", + "service": { + "fullName": "google.marketingplatform.admin.v1alpha.MarketingplatformAdminService", + "shortName": "MarketingplatformAdminService" + }, + "shortName": "SetPropertyServiceLevel" + }, + "parameters": [ + { + "name": "request", + "type": "google.ads.marketingplatform_admin_v1alpha.types.SetPropertyServiceLevelRequest" + }, + { + "name": "analytics_account_link", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.ads.marketingplatform_admin_v1alpha.types.SetPropertyServiceLevelResponse", + "shortName": "set_property_service_level" + }, + "description": "Sample for SetPropertyServiceLevel", + "file": "marketingplatformadmin_v1alpha_generated_marketingplatform_admin_service_set_property_service_level_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "marketingplatformadmin_v1alpha_generated_MarketingplatformAdminService_SetPropertyServiceLevel_async", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 47, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 50, + "start": 48, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 51, + "type": "RESPONSE_HANDLING" + } + ], + "title": "marketingplatformadmin_v1alpha_generated_marketingplatform_admin_service_set_property_service_level_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.ads.marketingplatform_admin_v1alpha.MarketingplatformAdminServiceClient", + "shortName": "MarketingplatformAdminServiceClient" + }, + "fullName": "google.ads.marketingplatform_admin_v1alpha.MarketingplatformAdminServiceClient.set_property_service_level", + "method": { + "fullName": "google.marketingplatform.admin.v1alpha.MarketingplatformAdminService.SetPropertyServiceLevel", + "service": { + "fullName": "google.marketingplatform.admin.v1alpha.MarketingplatformAdminService", + "shortName": "MarketingplatformAdminService" + }, + "shortName": "SetPropertyServiceLevel" + }, + "parameters": [ + { + "name": "request", + "type": "google.ads.marketingplatform_admin_v1alpha.types.SetPropertyServiceLevelRequest" + }, + { + "name": "analytics_account_link", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.ads.marketingplatform_admin_v1alpha.types.SetPropertyServiceLevelResponse", + "shortName": "set_property_service_level" + }, + "description": "Sample for SetPropertyServiceLevel", + "file": "marketingplatformadmin_v1alpha_generated_marketingplatform_admin_service_set_property_service_level_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "marketingplatformadmin_v1alpha_generated_MarketingplatformAdminService_SetPropertyServiceLevel_sync", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 47, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 50, + "start": 48, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 51, + "type": "RESPONSE_HANDLING" + } + ], + "title": "marketingplatformadmin_v1alpha_generated_marketingplatform_admin_service_set_property_service_level_sync.py" + } + ] +} diff --git a/packages/google-ads-marketingplatform-admin/scripts/decrypt-secrets.sh b/packages/google-ads-marketingplatform-admin/scripts/decrypt-secrets.sh new file mode 100755 index 000000000000..0018b421ddf8 --- /dev/null +++ b/packages/google-ads-marketingplatform-admin/scripts/decrypt-secrets.sh @@ -0,0 +1,46 @@ +#!/bin/bash + +# Copyright 2023 Google LLC All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )" +ROOT=$( dirname "$DIR" ) + +# Work from the project root. +cd $ROOT + +# Prevent it from overriding files. +# We recommend that sample authors use their own service account files and cloud project. +# In that case, they are supposed to prepare these files by themselves. +if [[ -f "testing/test-env.sh" ]] || \ + [[ -f "testing/service-account.json" ]] || \ + [[ -f "testing/client-secrets.json" ]]; then + echo "One or more target files exist, aborting." + exit 1 +fi + +# Use SECRET_MANAGER_PROJECT if set, fallback to cloud-devrel-kokoro-resources. +PROJECT_ID="${SECRET_MANAGER_PROJECT:-cloud-devrel-kokoro-resources}" + +gcloud secrets versions access latest --secret="python-docs-samples-test-env" \ + --project="${PROJECT_ID}" \ + > testing/test-env.sh +gcloud secrets versions access latest \ + --secret="python-docs-samples-service-account" \ + --project="${PROJECT_ID}" \ + > testing/service-account.json +gcloud secrets versions access latest \ + --secret="python-docs-samples-client-secrets" \ + --project="${PROJECT_ID}" \ + > testing/client-secrets.json diff --git a/packages/google-ads-marketingplatform-admin/scripts/fixup_marketingplatform_admin_v1alpha_keywords.py b/packages/google-ads-marketingplatform-admin/scripts/fixup_marketingplatform_admin_v1alpha_keywords.py new file mode 100644 index 000000000000..eb6dc67078de --- /dev/null +++ b/packages/google-ads-marketingplatform-admin/scripts/fixup_marketingplatform_admin_v1alpha_keywords.py @@ -0,0 +1,180 @@ +#! /usr/bin/env python3 +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import argparse +import os +import libcst as cst +import pathlib +import sys +from typing import (Any, Callable, Dict, List, Sequence, Tuple) + + +def partition( + predicate: Callable[[Any], bool], + iterator: Sequence[Any] +) -> Tuple[List[Any], List[Any]]: + """A stable, out-of-place partition.""" + results = ([], []) + + for i in iterator: + results[int(predicate(i))].append(i) + + # Returns trueList, falseList + return results[1], results[0] + + +class marketingplatform_adminCallTransformer(cst.CSTTransformer): + CTRL_PARAMS: Tuple[str] = ('retry', 'timeout', 'metadata') + METHOD_TO_PARAMS: Dict[str, Tuple[str]] = { + 'create_analytics_account_link': ('parent', 'analytics_account_link', ), + 'delete_analytics_account_link': ('name', ), + 'get_organization': ('name', ), + 'list_analytics_account_links': ('parent', 'page_size', 'page_token', ), + 'set_property_service_level': ('analytics_account_link', 'analytics_property', 'service_level', ), + } + + def leave_Call(self, original: cst.Call, updated: cst.Call) -> cst.CSTNode: + try: + key = original.func.attr.value + kword_params = self.METHOD_TO_PARAMS[key] + except (AttributeError, KeyError): + # Either not a method from the API or too convoluted to be sure. + return updated + + # If the existing code is valid, keyword args come after positional args. + # Therefore, all positional args must map to the first parameters. + args, kwargs = partition(lambda a: not bool(a.keyword), updated.args) + if any(k.keyword.value == "request" for k in kwargs): + # We've already fixed this file, don't fix it again. + return updated + + kwargs, ctrl_kwargs = partition( + lambda a: a.keyword.value not in self.CTRL_PARAMS, + kwargs + ) + + args, ctrl_args = args[:len(kword_params)], args[len(kword_params):] + ctrl_kwargs.extend(cst.Arg(value=a.value, keyword=cst.Name(value=ctrl)) + for a, ctrl in zip(ctrl_args, self.CTRL_PARAMS)) + + request_arg = cst.Arg( + value=cst.Dict([ + cst.DictElement( + cst.SimpleString("'{}'".format(name)), +cst.Element(value=arg.value) + ) + # Note: the args + kwargs looks silly, but keep in mind that + # the control parameters had to be stripped out, and that + # those could have been passed positionally or by keyword. + for name, arg in zip(kword_params, args + kwargs)]), + keyword=cst.Name("request") + ) + + return updated.with_changes( + args=[request_arg] + ctrl_kwargs + ) + + +def fix_files( + in_dir: pathlib.Path, + out_dir: pathlib.Path, + *, + transformer=marketingplatform_adminCallTransformer(), +): + """Duplicate the input dir to the output dir, fixing file method calls. + + Preconditions: + * in_dir is a real directory + * out_dir is a real, empty directory + """ + pyfile_gen = ( + pathlib.Path(os.path.join(root, f)) + for root, _, files in os.walk(in_dir) + for f in files if os.path.splitext(f)[1] == ".py" + ) + + for fpath in pyfile_gen: + with open(fpath, 'r') as f: + src = f.read() + + # Parse the code and insert method call fixes. + tree = cst.parse_module(src) + updated = tree.visit(transformer) + + # Create the path and directory structure for the new file. + updated_path = out_dir.joinpath(fpath.relative_to(in_dir)) + updated_path.parent.mkdir(parents=True, exist_ok=True) + + # Generate the updated source file at the corresponding path. + with open(updated_path, 'w') as f: + f.write(updated.code) + + +if __name__ == '__main__': + parser = argparse.ArgumentParser( + description="""Fix up source that uses the marketingplatform_admin client library. + +The existing sources are NOT overwritten but are copied to output_dir with changes made. + +Note: This tool operates at a best-effort level at converting positional + parameters in client method calls to keyword based parameters. + Cases where it WILL FAIL include + A) * or ** expansion in a method call. + B) Calls via function or method alias (includes free function calls) + C) Indirect or dispatched calls (e.g. the method is looked up dynamically) + + These all constitute false negatives. The tool will also detect false + positives when an API method shares a name with another method. +""") + parser.add_argument( + '-d', + '--input-directory', + required=True, + dest='input_dir', + help='the input directory to walk for python files to fix up', + ) + parser.add_argument( + '-o', + '--output-directory', + required=True, + dest='output_dir', + help='the directory to output files fixed via un-flattening', + ) + args = parser.parse_args() + input_dir = pathlib.Path(args.input_dir) + output_dir = pathlib.Path(args.output_dir) + if not input_dir.is_dir(): + print( + f"input directory '{input_dir}' does not exist or is not a directory", + file=sys.stderr, + ) + sys.exit(-1) + + if not output_dir.is_dir(): + print( + f"output directory '{output_dir}' does not exist or is not a directory", + file=sys.stderr, + ) + sys.exit(-1) + + if os.listdir(output_dir): + print( + f"output directory '{output_dir}' is not empty", + file=sys.stderr, + ) + sys.exit(-1) + + fix_files(input_dir, output_dir) diff --git a/packages/google-ads-marketingplatform-admin/setup.py b/packages/google-ads-marketingplatform-admin/setup.py new file mode 100644 index 000000000000..bd6f637c0bf8 --- /dev/null +++ b/packages/google-ads-marketingplatform-admin/setup.py @@ -0,0 +1,95 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import io +import os +import re + +import setuptools # type: ignore + +package_root = os.path.abspath(os.path.dirname(__file__)) + +name = "google-ads-marketingplatform-admin" + + +description = "Google Ads Marketingplatform Admin API client library" + +version = None + +with open( + os.path.join(package_root, "google/ads/marketingplatform_admin/gapic_version.py") +) as fp: + version_candidates = re.findall(r"(?<=\")\d+.\d+.\d+(?=\")", fp.read()) + assert len(version_candidates) == 1 + version = version_candidates[0] + +if version[0] == "0": + release_status = "Development Status :: 4 - Beta" +else: + release_status = "Development Status :: 5 - Production/Stable" + +dependencies = [ + "google-api-core[grpc] >= 1.34.1, <3.0.0dev,!=2.0.*,!=2.1.*,!=2.2.*,!=2.3.*,!=2.4.*,!=2.5.*,!=2.6.*,!=2.7.*,!=2.8.*,!=2.9.*,!=2.10.*", + # Exclude incompatible versions of `google-auth` + # See https://github.com/googleapis/google-cloud-python/issues/12364 + "google-auth >= 2.14.1, <3.0.0dev,!=2.24.0,!=2.25.0", + "proto-plus >= 1.22.3, <2.0.0dev", + "protobuf>=3.20.2,<6.0.0dev,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5", +] +url = "https://github.com/googleapis/google-cloud-python/tree/main/packages/google-ads-marketingplatform-admin" + +package_root = os.path.abspath(os.path.dirname(__file__)) + +readme_filename = os.path.join(package_root, "README.rst") +with io.open(readme_filename, encoding="utf-8") as readme_file: + readme = readme_file.read() + +packages = [ + package + for package in setuptools.find_namespace_packages() + if package.startswith("google") +] + +setuptools.setup( + name=name, + version=version, + description=description, + long_description=readme, + author="Google LLC", + author_email="googleapis-packages@google.com", + license="Apache 2.0", + url=url, + classifiers=[ + release_status, + "Intended Audience :: Developers", + "License :: OSI Approved :: Apache Software License", + "Programming Language :: Python", + "Programming Language :: Python :: 3", + "Programming Language :: Python :: 3.7", + "Programming Language :: Python :: 3.8", + "Programming Language :: Python :: 3.9", + "Programming Language :: Python :: 3.10", + "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", + "Operating System :: OS Independent", + "Topic :: Internet", + ], + platforms="Posix; MacOS X; Windows", + packages=packages, + python_requires=">=3.7", + install_requires=dependencies, + include_package_data=True, + zip_safe=False, +) diff --git a/packages/google-ads-marketingplatform-admin/testing/.gitignore b/packages/google-ads-marketingplatform-admin/testing/.gitignore new file mode 100644 index 000000000000..b05fbd630881 --- /dev/null +++ b/packages/google-ads-marketingplatform-admin/testing/.gitignore @@ -0,0 +1,3 @@ +test-env.sh +service-account.json +client-secrets.json \ No newline at end of file diff --git a/packages/google-ads-marketingplatform-admin/testing/constraints-3.10.txt b/packages/google-ads-marketingplatform-admin/testing/constraints-3.10.txt new file mode 100644 index 000000000000..ed7f9aed2559 --- /dev/null +++ b/packages/google-ads-marketingplatform-admin/testing/constraints-3.10.txt @@ -0,0 +1,6 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf diff --git a/packages/google-ads-marketingplatform-admin/testing/constraints-3.11.txt b/packages/google-ads-marketingplatform-admin/testing/constraints-3.11.txt new file mode 100644 index 000000000000..ed7f9aed2559 --- /dev/null +++ b/packages/google-ads-marketingplatform-admin/testing/constraints-3.11.txt @@ -0,0 +1,6 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf diff --git a/packages/google-ads-marketingplatform-admin/testing/constraints-3.12.txt b/packages/google-ads-marketingplatform-admin/testing/constraints-3.12.txt new file mode 100644 index 000000000000..ed7f9aed2559 --- /dev/null +++ b/packages/google-ads-marketingplatform-admin/testing/constraints-3.12.txt @@ -0,0 +1,6 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf diff --git a/packages/google-ads-marketingplatform-admin/testing/constraints-3.7.txt b/packages/google-ads-marketingplatform-admin/testing/constraints-3.7.txt new file mode 100644 index 000000000000..fc812592b0ee --- /dev/null +++ b/packages/google-ads-marketingplatform-admin/testing/constraints-3.7.txt @@ -0,0 +1,10 @@ +# This constraints file is used to check that lower bounds +# are correct in setup.py +# List all library dependencies and extras in this file. +# Pin the version to the lower bound. +# e.g., if setup.py has "google-cloud-foo >= 1.14.0, < 2.0.0dev", +# Then this file should have google-cloud-foo==1.14.0 +google-api-core==1.34.1 +google-auth==2.14.1 +proto-plus==1.22.3 +protobuf==3.20.2 diff --git a/packages/google-ads-marketingplatform-admin/testing/constraints-3.8.txt b/packages/google-ads-marketingplatform-admin/testing/constraints-3.8.txt new file mode 100644 index 000000000000..ed7f9aed2559 --- /dev/null +++ b/packages/google-ads-marketingplatform-admin/testing/constraints-3.8.txt @@ -0,0 +1,6 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf diff --git a/packages/google-ads-marketingplatform-admin/testing/constraints-3.9.txt b/packages/google-ads-marketingplatform-admin/testing/constraints-3.9.txt new file mode 100644 index 000000000000..ed7f9aed2559 --- /dev/null +++ b/packages/google-ads-marketingplatform-admin/testing/constraints-3.9.txt @@ -0,0 +1,6 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf diff --git a/packages/google-ads-marketingplatform-admin/tests/__init__.py b/packages/google-ads-marketingplatform-admin/tests/__init__.py new file mode 100644 index 000000000000..8f6cf068242c --- /dev/null +++ b/packages/google-ads-marketingplatform-admin/tests/__init__.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/packages/google-ads-marketingplatform-admin/tests/unit/__init__.py b/packages/google-ads-marketingplatform-admin/tests/unit/__init__.py new file mode 100644 index 000000000000..8f6cf068242c --- /dev/null +++ b/packages/google-ads-marketingplatform-admin/tests/unit/__init__.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/packages/google-ads-marketingplatform-admin/tests/unit/gapic/__init__.py b/packages/google-ads-marketingplatform-admin/tests/unit/gapic/__init__.py new file mode 100644 index 000000000000..8f6cf068242c --- /dev/null +++ b/packages/google-ads-marketingplatform-admin/tests/unit/gapic/__init__.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/packages/google-ads-marketingplatform-admin/tests/unit/gapic/marketingplatform_admin_v1alpha/__init__.py b/packages/google-ads-marketingplatform-admin/tests/unit/gapic/marketingplatform_admin_v1alpha/__init__.py new file mode 100644 index 000000000000..8f6cf068242c --- /dev/null +++ b/packages/google-ads-marketingplatform-admin/tests/unit/gapic/marketingplatform_admin_v1alpha/__init__.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/packages/google-ads-marketingplatform-admin/tests/unit/gapic/marketingplatform_admin_v1alpha/test_marketingplatform_admin_service.py b/packages/google-ads-marketingplatform-admin/tests/unit/gapic/marketingplatform_admin_v1alpha/test_marketingplatform_admin_service.py new file mode 100644 index 000000000000..c0d88b7ea387 --- /dev/null +++ b/packages/google-ads-marketingplatform-admin/tests/unit/gapic/marketingplatform_admin_v1alpha/test_marketingplatform_admin_service.py @@ -0,0 +1,5969 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import os + +# try/except added for compatibility with python < 3.8 +try: + from unittest import mock + from unittest.mock import AsyncMock # pragma: NO COVER +except ImportError: # pragma: NO COVER + import mock + +from collections.abc import Iterable +import json +import math + +from google.api_core import gapic_v1, grpc_helpers, grpc_helpers_async, path_template +from google.api_core import api_core_version, client_options +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +import google.auth +from google.auth import credentials as ga_credentials +from google.auth.exceptions import MutualTLSChannelError +from google.oauth2 import service_account +from google.protobuf import json_format +import grpc +from grpc.experimental import aio +from proto.marshal.rules import wrappers +from proto.marshal.rules.dates import DurationRule, TimestampRule +import pytest +from requests import PreparedRequest, Request, Response +from requests.sessions import Session + +from google.ads.marketingplatform_admin_v1alpha.services.marketingplatform_admin_service import ( + MarketingplatformAdminServiceAsyncClient, + MarketingplatformAdminServiceClient, + pagers, + transports, +) +from google.ads.marketingplatform_admin_v1alpha.types import ( + marketingplatform_admin, + resources, +) + + +def client_cert_source_callback(): + return b"cert bytes", b"key bytes" + + +# If default endpoint is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint(client): + return ( + "foo.googleapis.com" + if ("localhost" in client.DEFAULT_ENDPOINT) + else client.DEFAULT_ENDPOINT + ) + + +# If default endpoint template is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint template so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint_template(client): + return ( + "test.{UNIVERSE_DOMAIN}" + if ("localhost" in client._DEFAULT_ENDPOINT_TEMPLATE) + else client._DEFAULT_ENDPOINT_TEMPLATE + ) + + +def test__get_default_mtls_endpoint(): + api_endpoint = "example.googleapis.com" + api_mtls_endpoint = "example.mtls.googleapis.com" + sandbox_endpoint = "example.sandbox.googleapis.com" + sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" + non_googleapi = "api.example.com" + + assert MarketingplatformAdminServiceClient._get_default_mtls_endpoint(None) is None + assert ( + MarketingplatformAdminServiceClient._get_default_mtls_endpoint(api_endpoint) + == api_mtls_endpoint + ) + assert ( + MarketingplatformAdminServiceClient._get_default_mtls_endpoint( + api_mtls_endpoint + ) + == api_mtls_endpoint + ) + assert ( + MarketingplatformAdminServiceClient._get_default_mtls_endpoint(sandbox_endpoint) + == sandbox_mtls_endpoint + ) + assert ( + MarketingplatformAdminServiceClient._get_default_mtls_endpoint( + sandbox_mtls_endpoint + ) + == sandbox_mtls_endpoint + ) + assert ( + MarketingplatformAdminServiceClient._get_default_mtls_endpoint(non_googleapi) + == non_googleapi + ) + + +def test__read_environment_variables(): + assert MarketingplatformAdminServiceClient._read_environment_variables() == ( + False, + "auto", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + assert MarketingplatformAdminServiceClient._read_environment_variables() == ( + True, + "auto", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + assert MarketingplatformAdminServiceClient._read_environment_variables() == ( + False, + "auto", + None, + ) + + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + with pytest.raises(ValueError) as excinfo: + MarketingplatformAdminServiceClient._read_environment_variables() + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + assert MarketingplatformAdminServiceClient._read_environment_variables() == ( + False, + "never", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + assert MarketingplatformAdminServiceClient._read_environment_variables() == ( + False, + "always", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}): + assert MarketingplatformAdminServiceClient._read_environment_variables() == ( + False, + "auto", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + MarketingplatformAdminServiceClient._read_environment_variables() + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + with mock.patch.dict(os.environ, {"GOOGLE_CLOUD_UNIVERSE_DOMAIN": "foo.com"}): + assert MarketingplatformAdminServiceClient._read_environment_variables() == ( + False, + "auto", + "foo.com", + ) + + +def test__get_client_cert_source(): + mock_provided_cert_source = mock.Mock() + mock_default_cert_source = mock.Mock() + + assert ( + MarketingplatformAdminServiceClient._get_client_cert_source(None, False) is None + ) + assert ( + MarketingplatformAdminServiceClient._get_client_cert_source( + mock_provided_cert_source, False + ) + is None + ) + assert ( + MarketingplatformAdminServiceClient._get_client_cert_source( + mock_provided_cert_source, True + ) + == mock_provided_cert_source + ) + + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", return_value=True + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=mock_default_cert_source, + ): + assert ( + MarketingplatformAdminServiceClient._get_client_cert_source(None, True) + is mock_default_cert_source + ) + assert ( + MarketingplatformAdminServiceClient._get_client_cert_source( + mock_provided_cert_source, "true" + ) + is mock_provided_cert_source + ) + + +@mock.patch.object( + MarketingplatformAdminServiceClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(MarketingplatformAdminServiceClient), +) +@mock.patch.object( + MarketingplatformAdminServiceAsyncClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(MarketingplatformAdminServiceAsyncClient), +) +def test__get_api_endpoint(): + api_override = "foo.com" + mock_client_cert_source = mock.Mock() + default_universe = MarketingplatformAdminServiceClient._DEFAULT_UNIVERSE + default_endpoint = ( + MarketingplatformAdminServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=default_universe + ) + ) + mock_universe = "bar.com" + mock_endpoint = ( + MarketingplatformAdminServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=mock_universe + ) + ) + + assert ( + MarketingplatformAdminServiceClient._get_api_endpoint( + api_override, mock_client_cert_source, default_universe, "always" + ) + == api_override + ) + assert ( + MarketingplatformAdminServiceClient._get_api_endpoint( + None, mock_client_cert_source, default_universe, "auto" + ) + == MarketingplatformAdminServiceClient.DEFAULT_MTLS_ENDPOINT + ) + assert ( + MarketingplatformAdminServiceClient._get_api_endpoint( + None, None, default_universe, "auto" + ) + == default_endpoint + ) + assert ( + MarketingplatformAdminServiceClient._get_api_endpoint( + None, None, default_universe, "always" + ) + == MarketingplatformAdminServiceClient.DEFAULT_MTLS_ENDPOINT + ) + assert ( + MarketingplatformAdminServiceClient._get_api_endpoint( + None, mock_client_cert_source, default_universe, "always" + ) + == MarketingplatformAdminServiceClient.DEFAULT_MTLS_ENDPOINT + ) + assert ( + MarketingplatformAdminServiceClient._get_api_endpoint( + None, None, mock_universe, "never" + ) + == mock_endpoint + ) + assert ( + MarketingplatformAdminServiceClient._get_api_endpoint( + None, None, default_universe, "never" + ) + == default_endpoint + ) + + with pytest.raises(MutualTLSChannelError) as excinfo: + MarketingplatformAdminServiceClient._get_api_endpoint( + None, mock_client_cert_source, mock_universe, "auto" + ) + assert ( + str(excinfo.value) + == "mTLS is not supported in any universe other than googleapis.com." + ) + + +def test__get_universe_domain(): + client_universe_domain = "foo.com" + universe_domain_env = "bar.com" + + assert ( + MarketingplatformAdminServiceClient._get_universe_domain( + client_universe_domain, universe_domain_env + ) + == client_universe_domain + ) + assert ( + MarketingplatformAdminServiceClient._get_universe_domain( + None, universe_domain_env + ) + == universe_domain_env + ) + assert ( + MarketingplatformAdminServiceClient._get_universe_domain(None, None) + == MarketingplatformAdminServiceClient._DEFAULT_UNIVERSE + ) + + with pytest.raises(ValueError) as excinfo: + MarketingplatformAdminServiceClient._get_universe_domain("", None) + assert str(excinfo.value) == "Universe Domain cannot be an empty string." + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + ( + MarketingplatformAdminServiceClient, + transports.MarketingplatformAdminServiceGrpcTransport, + "grpc", + ), + ( + MarketingplatformAdminServiceClient, + transports.MarketingplatformAdminServiceRestTransport, + "rest", + ), + ], +) +def test__validate_universe_domain(client_class, transport_class, transport_name): + client = client_class( + transport=transport_class(credentials=ga_credentials.AnonymousCredentials()) + ) + assert client._validate_universe_domain() == True + + # Test the case when universe is already validated. + assert client._validate_universe_domain() == True + + if transport_name == "grpc": + # Test the case where credentials are provided by the + # `local_channel_credentials`. The default universes in both match. + channel = grpc.secure_channel( + "http://localhost/", grpc.local_channel_credentials() + ) + client = client_class(transport=transport_class(channel=channel)) + assert client._validate_universe_domain() == True + + # Test the case where credentials do not exist: e.g. a transport is provided + # with no credentials. Validation should still succeed because there is no + # mismatch with non-existent credentials. + channel = grpc.secure_channel( + "http://localhost/", grpc.local_channel_credentials() + ) + transport = transport_class(channel=channel) + transport._credentials = None + client = client_class(transport=transport) + assert client._validate_universe_domain() == True + + # TODO: This is needed to cater for older versions of google-auth + # Make this test unconditional once the minimum supported version of + # google-auth becomes 2.23.0 or higher. + google_auth_major, google_auth_minor = [ + int(part) for part in google.auth.__version__.split(".")[0:2] + ] + if google_auth_major > 2 or (google_auth_major == 2 and google_auth_minor >= 23): + credentials = ga_credentials.AnonymousCredentials() + credentials._universe_domain = "foo.com" + # Test the case when there is a universe mismatch from the credentials. + client = client_class(transport=transport_class(credentials=credentials)) + with pytest.raises(ValueError) as excinfo: + client._validate_universe_domain() + assert ( + str(excinfo.value) + == "The configured universe domain (googleapis.com) does not match the universe domain found in the credentials (foo.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." + ) + + # Test the case when there is a universe mismatch from the client. + # + # TODO: Make this test unconditional once the minimum supported version of + # google-api-core becomes 2.15.0 or higher. + api_core_major, api_core_minor = [ + int(part) for part in api_core_version.__version__.split(".")[0:2] + ] + if api_core_major > 2 or (api_core_major == 2 and api_core_minor >= 15): + client = client_class( + client_options={"universe_domain": "bar.com"}, + transport=transport_class( + credentials=ga_credentials.AnonymousCredentials(), + ), + ) + with pytest.raises(ValueError) as excinfo: + client._validate_universe_domain() + assert ( + str(excinfo.value) + == "The configured universe domain (bar.com) does not match the universe domain found in the credentials (googleapis.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." + ) + + # Test that ValueError is raised if universe_domain is provided via client options and credentials is None + with pytest.raises(ValueError): + client._compare_universes("foo.bar", None) + + +@pytest.mark.parametrize( + "client_class,transport_name", + [ + (MarketingplatformAdminServiceClient, "grpc"), + (MarketingplatformAdminServiceAsyncClient, "grpc_asyncio"), + (MarketingplatformAdminServiceClient, "rest"), + ], +) +def test_marketingplatform_admin_service_client_from_service_account_info( + client_class, transport_name +): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_info" + ) as factory: + factory.return_value = creds + info = {"valid": True} + client = client_class.from_service_account_info(info, transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + "marketingplatformadmin.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://marketingplatformadmin.googleapis.com" + ) + + +@pytest.mark.parametrize( + "transport_class,transport_name", + [ + (transports.MarketingplatformAdminServiceGrpcTransport, "grpc"), + (transports.MarketingplatformAdminServiceGrpcAsyncIOTransport, "grpc_asyncio"), + (transports.MarketingplatformAdminServiceRestTransport, "rest"), + ], +) +def test_marketingplatform_admin_service_client_service_account_always_use_jwt( + transport_class, transport_name +): + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=True) + use_jwt.assert_called_once_with(True) + + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=False) + use_jwt.assert_not_called() + + +@pytest.mark.parametrize( + "client_class,transport_name", + [ + (MarketingplatformAdminServiceClient, "grpc"), + (MarketingplatformAdminServiceAsyncClient, "grpc_asyncio"), + (MarketingplatformAdminServiceClient, "rest"), + ], +) +def test_marketingplatform_admin_service_client_from_service_account_file( + client_class, transport_name +): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_file" + ) as factory: + factory.return_value = creds + client = client_class.from_service_account_file( + "dummy/file/path.json", transport=transport_name + ) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + client = client_class.from_service_account_json( + "dummy/file/path.json", transport=transport_name + ) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + "marketingplatformadmin.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://marketingplatformadmin.googleapis.com" + ) + + +def test_marketingplatform_admin_service_client_get_transport_class(): + transport = MarketingplatformAdminServiceClient.get_transport_class() + available_transports = [ + transports.MarketingplatformAdminServiceGrpcTransport, + transports.MarketingplatformAdminServiceRestTransport, + ] + assert transport in available_transports + + transport = MarketingplatformAdminServiceClient.get_transport_class("grpc") + assert transport == transports.MarketingplatformAdminServiceGrpcTransport + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + ( + MarketingplatformAdminServiceClient, + transports.MarketingplatformAdminServiceGrpcTransport, + "grpc", + ), + ( + MarketingplatformAdminServiceAsyncClient, + transports.MarketingplatformAdminServiceGrpcAsyncIOTransport, + "grpc_asyncio", + ), + ( + MarketingplatformAdminServiceClient, + transports.MarketingplatformAdminServiceRestTransport, + "rest", + ), + ], +) +@mock.patch.object( + MarketingplatformAdminServiceClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(MarketingplatformAdminServiceClient), +) +@mock.patch.object( + MarketingplatformAdminServiceAsyncClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(MarketingplatformAdminServiceAsyncClient), +) +def test_marketingplatform_admin_service_client_client_options( + client_class, transport_class, transport_name +): + # Check that if channel is provided we won't create a new one. + with mock.patch.object( + MarketingplatformAdminServiceClient, "get_transport_class" + ) as gtc: + transport = transport_class(credentials=ga_credentials.AnonymousCredentials()) + client = client_class(transport=transport) + gtc.assert_not_called() + + # Check that if channel is provided via str we will create a new one. + with mock.patch.object( + MarketingplatformAdminServiceClient, "get_transport_class" + ) as gtc: + client = client_class(transport=transport_name) + gtc.assert_called() + + # Check the case api_endpoint is provided. + options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name, client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_MTLS_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + client = client_class(transport=transport_name) + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + with pytest.raises(ValueError) as excinfo: + client = client_class(transport=transport_name) + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + + # Check the case quota_project_id is provided + options = client_options.ClientOptions(quota_project_id="octopus") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id="octopus", + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + # Check the case api_endpoint is provided + options = client_options.ClientOptions( + api_audience="https://language.googleapis.com" + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience="https://language.googleapis.com", + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,use_client_cert_env", + [ + ( + MarketingplatformAdminServiceClient, + transports.MarketingplatformAdminServiceGrpcTransport, + "grpc", + "true", + ), + ( + MarketingplatformAdminServiceAsyncClient, + transports.MarketingplatformAdminServiceGrpcAsyncIOTransport, + "grpc_asyncio", + "true", + ), + ( + MarketingplatformAdminServiceClient, + transports.MarketingplatformAdminServiceGrpcTransport, + "grpc", + "false", + ), + ( + MarketingplatformAdminServiceAsyncClient, + transports.MarketingplatformAdminServiceGrpcAsyncIOTransport, + "grpc_asyncio", + "false", + ), + ( + MarketingplatformAdminServiceClient, + transports.MarketingplatformAdminServiceRestTransport, + "rest", + "true", + ), + ( + MarketingplatformAdminServiceClient, + transports.MarketingplatformAdminServiceRestTransport, + "rest", + "false", + ), + ], +) +@mock.patch.object( + MarketingplatformAdminServiceClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(MarketingplatformAdminServiceClient), +) +@mock.patch.object( + MarketingplatformAdminServiceAsyncClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(MarketingplatformAdminServiceAsyncClient), +) +@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) +def test_marketingplatform_admin_service_client_mtls_env_auto( + client_class, transport_class, transport_name, use_client_cert_env +): + # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default + # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. + + # Check the case client_cert_source is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + options = client_options.ClientOptions( + client_cert_source=client_cert_source_callback + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + + if use_client_cert_env == "false": + expected_client_cert_source = None + expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ) + else: + expected_client_cert_source = client_cert_source_callback + expected_host = client.DEFAULT_MTLS_ENDPOINT + + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case ADC client cert is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=client_cert_source_callback, + ): + if use_client_cert_env == "false": + expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ) + expected_client_cert_source = None + else: + expected_host = client.DEFAULT_MTLS_ENDPOINT + expected_client_cert_source = client_cert_source_callback + + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case client_cert_source and ADC client cert are not provided. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class", + [MarketingplatformAdminServiceClient, MarketingplatformAdminServiceAsyncClient], +) +@mock.patch.object( + MarketingplatformAdminServiceClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(MarketingplatformAdminServiceClient), +) +@mock.patch.object( + MarketingplatformAdminServiceAsyncClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(MarketingplatformAdminServiceAsyncClient), +) +def test_marketingplatform_admin_service_client_get_mtls_endpoint_and_cert_source( + client_class, +): + mock_client_cert_source = mock.Mock() + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source == mock_client_cert_source + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + mock_client_cert_source = mock.Mock() + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=mock_client_cert_source, + ): + ( + api_endpoint, + cert_source, + ) = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source == mock_client_cert_source + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + client_class.get_mtls_endpoint_and_cert_source() + + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + with pytest.raises(ValueError) as excinfo: + client_class.get_mtls_endpoint_and_cert_source() + + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + + +@pytest.mark.parametrize( + "client_class", + [MarketingplatformAdminServiceClient, MarketingplatformAdminServiceAsyncClient], +) +@mock.patch.object( + MarketingplatformAdminServiceClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(MarketingplatformAdminServiceClient), +) +@mock.patch.object( + MarketingplatformAdminServiceAsyncClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(MarketingplatformAdminServiceAsyncClient), +) +def test_marketingplatform_admin_service_client_client_api_endpoint(client_class): + mock_client_cert_source = client_cert_source_callback + api_override = "foo.com" + default_universe = MarketingplatformAdminServiceClient._DEFAULT_UNIVERSE + default_endpoint = ( + MarketingplatformAdminServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=default_universe + ) + ) + mock_universe = "bar.com" + mock_endpoint = ( + MarketingplatformAdminServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=mock_universe + ) + ) + + # If ClientOptions.api_endpoint is set and GOOGLE_API_USE_CLIENT_CERTIFICATE="true", + # use ClientOptions.api_endpoint as the api endpoint regardless. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.requests.AuthorizedSession.configure_mtls_channel" + ): + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=api_override + ) + client = client_class( + client_options=options, + credentials=ga_credentials.AnonymousCredentials(), + ) + assert client.api_endpoint == api_override + + # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="never", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + client = client_class(credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == default_endpoint + + # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="always", + # use the DEFAULT_MTLS_ENDPOINT as the api endpoint. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + client = client_class(credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + + # If ClientOptions.api_endpoint is not set, GOOGLE_API_USE_MTLS_ENDPOINT="auto" (default), + # GOOGLE_API_USE_CLIENT_CERTIFICATE="false" (default), default cert source doesn't exist, + # and ClientOptions.universe_domain="bar.com", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with universe domain as the api endpoint. + options = client_options.ClientOptions() + universe_exists = hasattr(options, "universe_domain") + if universe_exists: + options = client_options.ClientOptions(universe_domain=mock_universe) + client = client_class( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + else: + client = client_class( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + assert client.api_endpoint == ( + mock_endpoint if universe_exists else default_endpoint + ) + assert client.universe_domain == ( + mock_universe if universe_exists else default_universe + ) + + # If ClientOptions does not have a universe domain attribute and GOOGLE_API_USE_MTLS_ENDPOINT="never", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. + options = client_options.ClientOptions() + if hasattr(options, "universe_domain"): + delattr(options, "universe_domain") + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + client = client_class( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + assert client.api_endpoint == default_endpoint + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + ( + MarketingplatformAdminServiceClient, + transports.MarketingplatformAdminServiceGrpcTransport, + "grpc", + ), + ( + MarketingplatformAdminServiceAsyncClient, + transports.MarketingplatformAdminServiceGrpcAsyncIOTransport, + "grpc_asyncio", + ), + ( + MarketingplatformAdminServiceClient, + transports.MarketingplatformAdminServiceRestTransport, + "rest", + ), + ], +) +def test_marketingplatform_admin_service_client_client_options_scopes( + client_class, transport_class, transport_name +): + # Check the case scopes are provided. + options = client_options.ClientOptions( + scopes=["1", "2"], + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=["1", "2"], + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,grpc_helpers", + [ + ( + MarketingplatformAdminServiceClient, + transports.MarketingplatformAdminServiceGrpcTransport, + "grpc", + grpc_helpers, + ), + ( + MarketingplatformAdminServiceAsyncClient, + transports.MarketingplatformAdminServiceGrpcAsyncIOTransport, + "grpc_asyncio", + grpc_helpers_async, + ), + ( + MarketingplatformAdminServiceClient, + transports.MarketingplatformAdminServiceRestTransport, + "rest", + None, + ), + ], +) +def test_marketingplatform_admin_service_client_client_options_credentials_file( + client_class, transport_class, transport_name, grpc_helpers +): + # Check the case credentials file is provided. + options = client_options.ClientOptions(credentials_file="credentials.json") + + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +def test_marketingplatform_admin_service_client_client_options_from_dict(): + with mock.patch( + "google.ads.marketingplatform_admin_v1alpha.services.marketingplatform_admin_service.transports.MarketingplatformAdminServiceGrpcTransport.__init__" + ) as grpc_transport: + grpc_transport.return_value = None + client = MarketingplatformAdminServiceClient( + client_options={"api_endpoint": "squid.clam.whelk"} + ) + grpc_transport.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,grpc_helpers", + [ + ( + MarketingplatformAdminServiceClient, + transports.MarketingplatformAdminServiceGrpcTransport, + "grpc", + grpc_helpers, + ), + ( + MarketingplatformAdminServiceAsyncClient, + transports.MarketingplatformAdminServiceGrpcAsyncIOTransport, + "grpc_asyncio", + grpc_helpers_async, + ), + ], +) +def test_marketingplatform_admin_service_client_create_channel_credentials_file( + client_class, transport_class, transport_name, grpc_helpers +): + # Check the case credentials file is provided. + options = client_options.ClientOptions(credentials_file="credentials.json") + + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # test that the credentials from file are saved and used as the credentials. + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel" + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + file_creds = ga_credentials.AnonymousCredentials() + load_creds.return_value = (file_creds, None) + adc.return_value = (creds, None) + client = client_class(client_options=options, transport=transport_name) + create_channel.assert_called_with( + "marketingplatformadmin.googleapis.com:443", + credentials=file_creds, + credentials_file=None, + quota_project_id=None, + default_scopes=( + "https://www.googleapis.com/auth/marketingplatformadmin.analytics.read", + "https://www.googleapis.com/auth/marketingplatformadmin.analytics.update", + ), + scopes=None, + default_host="marketingplatformadmin.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize( + "request_type", + [ + marketingplatform_admin.GetOrganizationRequest, + dict, + ], +) +def test_get_organization(request_type, transport: str = "grpc"): + client = MarketingplatformAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_organization), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = resources.Organization( + name="name_value", + display_name="display_name_value", + ) + response = client.get_organization(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = marketingplatform_admin.GetOrganizationRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, resources.Organization) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + + +def test_get_organization_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = MarketingplatformAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_organization), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.get_organization() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == marketingplatform_admin.GetOrganizationRequest() + + +def test_get_organization_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = MarketingplatformAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = marketingplatform_admin.GetOrganizationRequest( + name="name_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_organization), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.get_organization(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == marketingplatform_admin.GetOrganizationRequest( + name="name_value", + ) + + +def test_get_organization_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = MarketingplatformAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_organization in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.get_organization + ] = mock_rpc + request = {} + client.get_organization(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_organization(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_get_organization_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = MarketingplatformAdminServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_organization), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + resources.Organization( + name="name_value", + display_name="display_name_value", + ) + ) + response = await client.get_organization() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == marketingplatform_admin.GetOrganizationRequest() + + +@pytest.mark.asyncio +async def test_get_organization_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = MarketingplatformAdminServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.get_organization + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.get_organization + ] = mock_rpc + + request = {} + await client.get_organization(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.get_organization(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_get_organization_async( + transport: str = "grpc_asyncio", + request_type=marketingplatform_admin.GetOrganizationRequest, +): + client = MarketingplatformAdminServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_organization), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + resources.Organization( + name="name_value", + display_name="display_name_value", + ) + ) + response = await client.get_organization(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = marketingplatform_admin.GetOrganizationRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, resources.Organization) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + + +@pytest.mark.asyncio +async def test_get_organization_async_from_dict(): + await test_get_organization_async(request_type=dict) + + +def test_get_organization_field_headers(): + client = MarketingplatformAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = marketingplatform_admin.GetOrganizationRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_organization), "__call__") as call: + call.return_value = resources.Organization() + client.get_organization(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_organization_field_headers_async(): + client = MarketingplatformAdminServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = marketingplatform_admin.GetOrganizationRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_organization), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + resources.Organization() + ) + await client.get_organization(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_get_organization_flattened(): + client = MarketingplatformAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_organization), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = resources.Organization() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_organization( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_get_organization_flattened_error(): + client = MarketingplatformAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_organization( + marketingplatform_admin.GetOrganizationRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_get_organization_flattened_async(): + client = MarketingplatformAdminServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_organization), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = resources.Organization() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + resources.Organization() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_organization( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_get_organization_flattened_error_async(): + client = MarketingplatformAdminServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_organization( + marketingplatform_admin.GetOrganizationRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + marketingplatform_admin.ListAnalyticsAccountLinksRequest, + dict, + ], +) +def test_list_analytics_account_links(request_type, transport: str = "grpc"): + client = MarketingplatformAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_analytics_account_links), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = marketingplatform_admin.ListAnalyticsAccountLinksResponse( + next_page_token="next_page_token_value", + ) + response = client.list_analytics_account_links(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = marketingplatform_admin.ListAnalyticsAccountLinksRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListAnalyticsAccountLinksPager) + assert response.next_page_token == "next_page_token_value" + + +def test_list_analytics_account_links_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = MarketingplatformAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_analytics_account_links), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.list_analytics_account_links() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == marketingplatform_admin.ListAnalyticsAccountLinksRequest() + + +def test_list_analytics_account_links_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = MarketingplatformAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = marketingplatform_admin.ListAnalyticsAccountLinksRequest( + parent="parent_value", + page_token="page_token_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_analytics_account_links), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.list_analytics_account_links(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == marketingplatform_admin.ListAnalyticsAccountLinksRequest( + parent="parent_value", + page_token="page_token_value", + ) + + +def test_list_analytics_account_links_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = MarketingplatformAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.list_analytics_account_links + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.list_analytics_account_links + ] = mock_rpc + request = {} + client.list_analytics_account_links(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_analytics_account_links(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_list_analytics_account_links_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = MarketingplatformAdminServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_analytics_account_links), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + marketingplatform_admin.ListAnalyticsAccountLinksResponse( + next_page_token="next_page_token_value", + ) + ) + response = await client.list_analytics_account_links() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == marketingplatform_admin.ListAnalyticsAccountLinksRequest() + + +@pytest.mark.asyncio +async def test_list_analytics_account_links_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = MarketingplatformAdminServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.list_analytics_account_links + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.list_analytics_account_links + ] = mock_rpc + + request = {} + await client.list_analytics_account_links(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.list_analytics_account_links(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_list_analytics_account_links_async( + transport: str = "grpc_asyncio", + request_type=marketingplatform_admin.ListAnalyticsAccountLinksRequest, +): + client = MarketingplatformAdminServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_analytics_account_links), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + marketingplatform_admin.ListAnalyticsAccountLinksResponse( + next_page_token="next_page_token_value", + ) + ) + response = await client.list_analytics_account_links(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = marketingplatform_admin.ListAnalyticsAccountLinksRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListAnalyticsAccountLinksAsyncPager) + assert response.next_page_token == "next_page_token_value" + + +@pytest.mark.asyncio +async def test_list_analytics_account_links_async_from_dict(): + await test_list_analytics_account_links_async(request_type=dict) + + +def test_list_analytics_account_links_field_headers(): + client = MarketingplatformAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = marketingplatform_admin.ListAnalyticsAccountLinksRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_analytics_account_links), "__call__" + ) as call: + call.return_value = marketingplatform_admin.ListAnalyticsAccountLinksResponse() + client.list_analytics_account_links(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_analytics_account_links_field_headers_async(): + client = MarketingplatformAdminServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = marketingplatform_admin.ListAnalyticsAccountLinksRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_analytics_account_links), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + marketingplatform_admin.ListAnalyticsAccountLinksResponse() + ) + await client.list_analytics_account_links(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_list_analytics_account_links_flattened(): + client = MarketingplatformAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_analytics_account_links), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = marketingplatform_admin.ListAnalyticsAccountLinksResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_analytics_account_links( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +def test_list_analytics_account_links_flattened_error(): + client = MarketingplatformAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_analytics_account_links( + marketingplatform_admin.ListAnalyticsAccountLinksRequest(), + parent="parent_value", + ) + + +@pytest.mark.asyncio +async def test_list_analytics_account_links_flattened_async(): + client = MarketingplatformAdminServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_analytics_account_links), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = marketingplatform_admin.ListAnalyticsAccountLinksResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + marketingplatform_admin.ListAnalyticsAccountLinksResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_analytics_account_links( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_list_analytics_account_links_flattened_error_async(): + client = MarketingplatformAdminServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_analytics_account_links( + marketingplatform_admin.ListAnalyticsAccountLinksRequest(), + parent="parent_value", + ) + + +def test_list_analytics_account_links_pager(transport_name: str = "grpc"): + client = MarketingplatformAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_analytics_account_links), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + marketingplatform_admin.ListAnalyticsAccountLinksResponse( + analytics_account_links=[ + resources.AnalyticsAccountLink(), + resources.AnalyticsAccountLink(), + resources.AnalyticsAccountLink(), + ], + next_page_token="abc", + ), + marketingplatform_admin.ListAnalyticsAccountLinksResponse( + analytics_account_links=[], + next_page_token="def", + ), + marketingplatform_admin.ListAnalyticsAccountLinksResponse( + analytics_account_links=[ + resources.AnalyticsAccountLink(), + ], + next_page_token="ghi", + ), + marketingplatform_admin.ListAnalyticsAccountLinksResponse( + analytics_account_links=[ + resources.AnalyticsAccountLink(), + resources.AnalyticsAccountLink(), + ], + ), + RuntimeError, + ) + + expected_metadata = () + retry = retries.Retry() + timeout = 5 + expected_metadata = tuple(expected_metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + ) + pager = client.list_analytics_account_links( + request={}, retry=retry, timeout=timeout + ) + + assert pager._metadata == expected_metadata + assert pager._retry == retry + assert pager._timeout == timeout + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, resources.AnalyticsAccountLink) for i in results) + + +def test_list_analytics_account_links_pages(transport_name: str = "grpc"): + client = MarketingplatformAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_analytics_account_links), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + marketingplatform_admin.ListAnalyticsAccountLinksResponse( + analytics_account_links=[ + resources.AnalyticsAccountLink(), + resources.AnalyticsAccountLink(), + resources.AnalyticsAccountLink(), + ], + next_page_token="abc", + ), + marketingplatform_admin.ListAnalyticsAccountLinksResponse( + analytics_account_links=[], + next_page_token="def", + ), + marketingplatform_admin.ListAnalyticsAccountLinksResponse( + analytics_account_links=[ + resources.AnalyticsAccountLink(), + ], + next_page_token="ghi", + ), + marketingplatform_admin.ListAnalyticsAccountLinksResponse( + analytics_account_links=[ + resources.AnalyticsAccountLink(), + resources.AnalyticsAccountLink(), + ], + ), + RuntimeError, + ) + pages = list(client.list_analytics_account_links(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.asyncio +async def test_list_analytics_account_links_async_pager(): + client = MarketingplatformAdminServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_analytics_account_links), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + marketingplatform_admin.ListAnalyticsAccountLinksResponse( + analytics_account_links=[ + resources.AnalyticsAccountLink(), + resources.AnalyticsAccountLink(), + resources.AnalyticsAccountLink(), + ], + next_page_token="abc", + ), + marketingplatform_admin.ListAnalyticsAccountLinksResponse( + analytics_account_links=[], + next_page_token="def", + ), + marketingplatform_admin.ListAnalyticsAccountLinksResponse( + analytics_account_links=[ + resources.AnalyticsAccountLink(), + ], + next_page_token="ghi", + ), + marketingplatform_admin.ListAnalyticsAccountLinksResponse( + analytics_account_links=[ + resources.AnalyticsAccountLink(), + resources.AnalyticsAccountLink(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_analytics_account_links( + request={}, + ) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, resources.AnalyticsAccountLink) for i in responses) + + +@pytest.mark.asyncio +async def test_list_analytics_account_links_async_pages(): + client = MarketingplatformAdminServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_analytics_account_links), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + marketingplatform_admin.ListAnalyticsAccountLinksResponse( + analytics_account_links=[ + resources.AnalyticsAccountLink(), + resources.AnalyticsAccountLink(), + resources.AnalyticsAccountLink(), + ], + next_page_token="abc", + ), + marketingplatform_admin.ListAnalyticsAccountLinksResponse( + analytics_account_links=[], + next_page_token="def", + ), + marketingplatform_admin.ListAnalyticsAccountLinksResponse( + analytics_account_links=[ + resources.AnalyticsAccountLink(), + ], + next_page_token="ghi", + ), + marketingplatform_admin.ListAnalyticsAccountLinksResponse( + analytics_account_links=[ + resources.AnalyticsAccountLink(), + resources.AnalyticsAccountLink(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_analytics_account_links(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + marketingplatform_admin.CreateAnalyticsAccountLinkRequest, + dict, + ], +) +def test_create_analytics_account_link(request_type, transport: str = "grpc"): + client = MarketingplatformAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_analytics_account_link), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = resources.AnalyticsAccountLink( + name="name_value", + analytics_account="analytics_account_value", + display_name="display_name_value", + link_verification_state=resources.LinkVerificationState.LINK_VERIFICATION_STATE_VERIFIED, + ) + response = client.create_analytics_account_link(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = marketingplatform_admin.CreateAnalyticsAccountLinkRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, resources.AnalyticsAccountLink) + assert response.name == "name_value" + assert response.analytics_account == "analytics_account_value" + assert response.display_name == "display_name_value" + assert ( + response.link_verification_state + == resources.LinkVerificationState.LINK_VERIFICATION_STATE_VERIFIED + ) + + +def test_create_analytics_account_link_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = MarketingplatformAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_analytics_account_link), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.create_analytics_account_link() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == marketingplatform_admin.CreateAnalyticsAccountLinkRequest() + + +def test_create_analytics_account_link_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = MarketingplatformAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = marketingplatform_admin.CreateAnalyticsAccountLinkRequest( + parent="parent_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_analytics_account_link), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.create_analytics_account_link(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == marketingplatform_admin.CreateAnalyticsAccountLinkRequest( + parent="parent_value", + ) + + +def test_create_analytics_account_link_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = MarketingplatformAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.create_analytics_account_link + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.create_analytics_account_link + ] = mock_rpc + request = {} + client.create_analytics_account_link(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.create_analytics_account_link(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_create_analytics_account_link_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = MarketingplatformAdminServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_analytics_account_link), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + resources.AnalyticsAccountLink( + name="name_value", + analytics_account="analytics_account_value", + display_name="display_name_value", + link_verification_state=resources.LinkVerificationState.LINK_VERIFICATION_STATE_VERIFIED, + ) + ) + response = await client.create_analytics_account_link() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == marketingplatform_admin.CreateAnalyticsAccountLinkRequest() + + +@pytest.mark.asyncio +async def test_create_analytics_account_link_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = MarketingplatformAdminServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.create_analytics_account_link + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.create_analytics_account_link + ] = mock_rpc + + request = {} + await client.create_analytics_account_link(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.create_analytics_account_link(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_create_analytics_account_link_async( + transport: str = "grpc_asyncio", + request_type=marketingplatform_admin.CreateAnalyticsAccountLinkRequest, +): + client = MarketingplatformAdminServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_analytics_account_link), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + resources.AnalyticsAccountLink( + name="name_value", + analytics_account="analytics_account_value", + display_name="display_name_value", + link_verification_state=resources.LinkVerificationState.LINK_VERIFICATION_STATE_VERIFIED, + ) + ) + response = await client.create_analytics_account_link(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = marketingplatform_admin.CreateAnalyticsAccountLinkRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, resources.AnalyticsAccountLink) + assert response.name == "name_value" + assert response.analytics_account == "analytics_account_value" + assert response.display_name == "display_name_value" + assert ( + response.link_verification_state + == resources.LinkVerificationState.LINK_VERIFICATION_STATE_VERIFIED + ) + + +@pytest.mark.asyncio +async def test_create_analytics_account_link_async_from_dict(): + await test_create_analytics_account_link_async(request_type=dict) + + +def test_create_analytics_account_link_field_headers(): + client = MarketingplatformAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = marketingplatform_admin.CreateAnalyticsAccountLinkRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_analytics_account_link), "__call__" + ) as call: + call.return_value = resources.AnalyticsAccountLink() + client.create_analytics_account_link(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_create_analytics_account_link_field_headers_async(): + client = MarketingplatformAdminServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = marketingplatform_admin.CreateAnalyticsAccountLinkRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_analytics_account_link), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + resources.AnalyticsAccountLink() + ) + await client.create_analytics_account_link(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_create_analytics_account_link_flattened(): + client = MarketingplatformAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_analytics_account_link), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = resources.AnalyticsAccountLink() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.create_analytics_account_link( + parent="parent_value", + analytics_account_link=resources.AnalyticsAccountLink(name="name_value"), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].analytics_account_link + mock_val = resources.AnalyticsAccountLink(name="name_value") + assert arg == mock_val + + +def test_create_analytics_account_link_flattened_error(): + client = MarketingplatformAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_analytics_account_link( + marketingplatform_admin.CreateAnalyticsAccountLinkRequest(), + parent="parent_value", + analytics_account_link=resources.AnalyticsAccountLink(name="name_value"), + ) + + +@pytest.mark.asyncio +async def test_create_analytics_account_link_flattened_async(): + client = MarketingplatformAdminServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_analytics_account_link), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = resources.AnalyticsAccountLink() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + resources.AnalyticsAccountLink() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.create_analytics_account_link( + parent="parent_value", + analytics_account_link=resources.AnalyticsAccountLink(name="name_value"), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].analytics_account_link + mock_val = resources.AnalyticsAccountLink(name="name_value") + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_create_analytics_account_link_flattened_error_async(): + client = MarketingplatformAdminServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.create_analytics_account_link( + marketingplatform_admin.CreateAnalyticsAccountLinkRequest(), + parent="parent_value", + analytics_account_link=resources.AnalyticsAccountLink(name="name_value"), + ) + + +@pytest.mark.parametrize( + "request_type", + [ + marketingplatform_admin.DeleteAnalyticsAccountLinkRequest, + dict, + ], +) +def test_delete_analytics_account_link(request_type, transport: str = "grpc"): + client = MarketingplatformAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_analytics_account_link), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.delete_analytics_account_link(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = marketingplatform_admin.DeleteAnalyticsAccountLinkRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_analytics_account_link_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = MarketingplatformAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_analytics_account_link), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.delete_analytics_account_link() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == marketingplatform_admin.DeleteAnalyticsAccountLinkRequest() + + +def test_delete_analytics_account_link_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = MarketingplatformAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = marketingplatform_admin.DeleteAnalyticsAccountLinkRequest( + name="name_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_analytics_account_link), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.delete_analytics_account_link(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == marketingplatform_admin.DeleteAnalyticsAccountLinkRequest( + name="name_value", + ) + + +def test_delete_analytics_account_link_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = MarketingplatformAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.delete_analytics_account_link + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.delete_analytics_account_link + ] = mock_rpc + request = {} + client.delete_analytics_account_link(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.delete_analytics_account_link(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_delete_analytics_account_link_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = MarketingplatformAdminServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_analytics_account_link), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.delete_analytics_account_link() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == marketingplatform_admin.DeleteAnalyticsAccountLinkRequest() + + +@pytest.mark.asyncio +async def test_delete_analytics_account_link_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = MarketingplatformAdminServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.delete_analytics_account_link + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.delete_analytics_account_link + ] = mock_rpc + + request = {} + await client.delete_analytics_account_link(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.delete_analytics_account_link(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_delete_analytics_account_link_async( + transport: str = "grpc_asyncio", + request_type=marketingplatform_admin.DeleteAnalyticsAccountLinkRequest, +): + client = MarketingplatformAdminServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_analytics_account_link), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.delete_analytics_account_link(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = marketingplatform_admin.DeleteAnalyticsAccountLinkRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.asyncio +async def test_delete_analytics_account_link_async_from_dict(): + await test_delete_analytics_account_link_async(request_type=dict) + + +def test_delete_analytics_account_link_field_headers(): + client = MarketingplatformAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = marketingplatform_admin.DeleteAnalyticsAccountLinkRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_analytics_account_link), "__call__" + ) as call: + call.return_value = None + client.delete_analytics_account_link(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_delete_analytics_account_link_field_headers_async(): + client = MarketingplatformAdminServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = marketingplatform_admin.DeleteAnalyticsAccountLinkRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_analytics_account_link), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.delete_analytics_account_link(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_delete_analytics_account_link_flattened(): + client = MarketingplatformAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_analytics_account_link), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = None + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.delete_analytics_account_link( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_delete_analytics_account_link_flattened_error(): + client = MarketingplatformAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_analytics_account_link( + marketingplatform_admin.DeleteAnalyticsAccountLinkRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_delete_analytics_account_link_flattened_async(): + client = MarketingplatformAdminServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_analytics_account_link), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = None + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.delete_analytics_account_link( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_delete_analytics_account_link_flattened_error_async(): + client = MarketingplatformAdminServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.delete_analytics_account_link( + marketingplatform_admin.DeleteAnalyticsAccountLinkRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + marketingplatform_admin.SetPropertyServiceLevelRequest, + dict, + ], +) +def test_set_property_service_level(request_type, transport: str = "grpc"): + client = MarketingplatformAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.set_property_service_level), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = marketingplatform_admin.SetPropertyServiceLevelResponse() + response = client.set_property_service_level(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = marketingplatform_admin.SetPropertyServiceLevelRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, marketingplatform_admin.SetPropertyServiceLevelResponse) + + +def test_set_property_service_level_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = MarketingplatformAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.set_property_service_level), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.set_property_service_level() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == marketingplatform_admin.SetPropertyServiceLevelRequest() + + +def test_set_property_service_level_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = MarketingplatformAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = marketingplatform_admin.SetPropertyServiceLevelRequest( + analytics_account_link="analytics_account_link_value", + analytics_property="analytics_property_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.set_property_service_level), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.set_property_service_level(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == marketingplatform_admin.SetPropertyServiceLevelRequest( + analytics_account_link="analytics_account_link_value", + analytics_property="analytics_property_value", + ) + + +def test_set_property_service_level_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = MarketingplatformAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.set_property_service_level + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.set_property_service_level + ] = mock_rpc + request = {} + client.set_property_service_level(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.set_property_service_level(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_set_property_service_level_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = MarketingplatformAdminServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.set_property_service_level), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + marketingplatform_admin.SetPropertyServiceLevelResponse() + ) + response = await client.set_property_service_level() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == marketingplatform_admin.SetPropertyServiceLevelRequest() + + +@pytest.mark.asyncio +async def test_set_property_service_level_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = MarketingplatformAdminServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.set_property_service_level + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.set_property_service_level + ] = mock_rpc + + request = {} + await client.set_property_service_level(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.set_property_service_level(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_set_property_service_level_async( + transport: str = "grpc_asyncio", + request_type=marketingplatform_admin.SetPropertyServiceLevelRequest, +): + client = MarketingplatformAdminServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.set_property_service_level), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + marketingplatform_admin.SetPropertyServiceLevelResponse() + ) + response = await client.set_property_service_level(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = marketingplatform_admin.SetPropertyServiceLevelRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, marketingplatform_admin.SetPropertyServiceLevelResponse) + + +@pytest.mark.asyncio +async def test_set_property_service_level_async_from_dict(): + await test_set_property_service_level_async(request_type=dict) + + +def test_set_property_service_level_field_headers(): + client = MarketingplatformAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = marketingplatform_admin.SetPropertyServiceLevelRequest() + + request.analytics_account_link = "analytics_account_link_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.set_property_service_level), "__call__" + ) as call: + call.return_value = marketingplatform_admin.SetPropertyServiceLevelResponse() + client.set_property_service_level(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "analytics_account_link=analytics_account_link_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_set_property_service_level_field_headers_async(): + client = MarketingplatformAdminServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = marketingplatform_admin.SetPropertyServiceLevelRequest() + + request.analytics_account_link = "analytics_account_link_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.set_property_service_level), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + marketingplatform_admin.SetPropertyServiceLevelResponse() + ) + await client.set_property_service_level(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "analytics_account_link=analytics_account_link_value", + ) in kw["metadata"] + + +def test_set_property_service_level_flattened(): + client = MarketingplatformAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.set_property_service_level), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = marketingplatform_admin.SetPropertyServiceLevelResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.set_property_service_level( + analytics_account_link="analytics_account_link_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].analytics_account_link + mock_val = "analytics_account_link_value" + assert arg == mock_val + + +def test_set_property_service_level_flattened_error(): + client = MarketingplatformAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.set_property_service_level( + marketingplatform_admin.SetPropertyServiceLevelRequest(), + analytics_account_link="analytics_account_link_value", + ) + + +@pytest.mark.asyncio +async def test_set_property_service_level_flattened_async(): + client = MarketingplatformAdminServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.set_property_service_level), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = marketingplatform_admin.SetPropertyServiceLevelResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + marketingplatform_admin.SetPropertyServiceLevelResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.set_property_service_level( + analytics_account_link="analytics_account_link_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].analytics_account_link + mock_val = "analytics_account_link_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_set_property_service_level_flattened_error_async(): + client = MarketingplatformAdminServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.set_property_service_level( + marketingplatform_admin.SetPropertyServiceLevelRequest(), + analytics_account_link="analytics_account_link_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + marketingplatform_admin.GetOrganizationRequest, + dict, + ], +) +def test_get_organization_rest(request_type): + client = MarketingplatformAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"name": "organizations/sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = resources.Organization( + name="name_value", + display_name="display_name_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = resources.Organization.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.get_organization(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, resources.Organization) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + + +def test_get_organization_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = MarketingplatformAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_organization in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.get_organization + ] = mock_rpc + + request = {} + client.get_organization(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_organization(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_get_organization_rest_required_fields( + request_type=marketingplatform_admin.GetOrganizationRequest, +): + transport_class = transports.MarketingplatformAdminServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_organization._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_organization._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = MarketingplatformAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = resources.Organization() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = resources.Organization.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_organization(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_get_organization_rest_unset_required_fields(): + transport = transports.MarketingplatformAdminServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.get_organization._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_organization_rest_interceptors(null_interceptor): + transport = transports.MarketingplatformAdminServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.MarketingplatformAdminServiceRestInterceptor(), + ) + client = MarketingplatformAdminServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.MarketingplatformAdminServiceRestInterceptor, "post_get_organization" + ) as post, mock.patch.object( + transports.MarketingplatformAdminServiceRestInterceptor, "pre_get_organization" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = marketingplatform_admin.GetOrganizationRequest.pb( + marketingplatform_admin.GetOrganizationRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = resources.Organization.to_json( + resources.Organization() + ) + + request = marketingplatform_admin.GetOrganizationRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = resources.Organization() + + client.get_organization( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_organization_rest_bad_request( + transport: str = "rest", request_type=marketingplatform_admin.GetOrganizationRequest +): + client = MarketingplatformAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"name": "organizations/sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_organization(request) + + +def test_get_organization_rest_flattened(): + client = MarketingplatformAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = resources.Organization() + + # get arguments that satisfy an http rule for this method + sample_request = {"name": "organizations/sample1"} + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = resources.Organization.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.get_organization(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1alpha/{name=organizations/*}" % client.transport._host, args[1] + ) + + +def test_get_organization_rest_flattened_error(transport: str = "rest"): + client = MarketingplatformAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_organization( + marketingplatform_admin.GetOrganizationRequest(), + name="name_value", + ) + + +def test_get_organization_rest_error(): + client = MarketingplatformAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + marketingplatform_admin.ListAnalyticsAccountLinksRequest, + dict, + ], +) +def test_list_analytics_account_links_rest(request_type): + client = MarketingplatformAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "organizations/sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = marketingplatform_admin.ListAnalyticsAccountLinksResponse( + next_page_token="next_page_token_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = marketingplatform_admin.ListAnalyticsAccountLinksResponse.pb( + return_value + ) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.list_analytics_account_links(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListAnalyticsAccountLinksPager) + assert response.next_page_token == "next_page_token_value" + + +def test_list_analytics_account_links_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = MarketingplatformAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.list_analytics_account_links + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.list_analytics_account_links + ] = mock_rpc + + request = {} + client.list_analytics_account_links(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_analytics_account_links(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_list_analytics_account_links_rest_required_fields( + request_type=marketingplatform_admin.ListAnalyticsAccountLinksRequest, +): + transport_class = transports.MarketingplatformAdminServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_analytics_account_links._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_analytics_account_links._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "page_size", + "page_token", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = MarketingplatformAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = marketingplatform_admin.ListAnalyticsAccountLinksResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = marketingplatform_admin.ListAnalyticsAccountLinksResponse.pb( + return_value + ) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.list_analytics_account_links(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_list_analytics_account_links_rest_unset_required_fields(): + transport = transports.MarketingplatformAdminServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.list_analytics_account_links._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "pageSize", + "pageToken", + ) + ) + & set(("parent",)) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_analytics_account_links_rest_interceptors(null_interceptor): + transport = transports.MarketingplatformAdminServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.MarketingplatformAdminServiceRestInterceptor(), + ) + client = MarketingplatformAdminServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.MarketingplatformAdminServiceRestInterceptor, + "post_list_analytics_account_links", + ) as post, mock.patch.object( + transports.MarketingplatformAdminServiceRestInterceptor, + "pre_list_analytics_account_links", + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = marketingplatform_admin.ListAnalyticsAccountLinksRequest.pb( + marketingplatform_admin.ListAnalyticsAccountLinksRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = ( + marketingplatform_admin.ListAnalyticsAccountLinksResponse.to_json( + marketingplatform_admin.ListAnalyticsAccountLinksResponse() + ) + ) + + request = marketingplatform_admin.ListAnalyticsAccountLinksRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = marketingplatform_admin.ListAnalyticsAccountLinksResponse() + + client.list_analytics_account_links( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_analytics_account_links_rest_bad_request( + transport: str = "rest", + request_type=marketingplatform_admin.ListAnalyticsAccountLinksRequest, +): + client = MarketingplatformAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "organizations/sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_analytics_account_links(request) + + +def test_list_analytics_account_links_rest_flattened(): + client = MarketingplatformAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = marketingplatform_admin.ListAnalyticsAccountLinksResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "organizations/sample1"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = marketingplatform_admin.ListAnalyticsAccountLinksResponse.pb( + return_value + ) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.list_analytics_account_links(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1alpha/{parent=organizations/*}/analyticsAccountLinks" + % client.transport._host, + args[1], + ) + + +def test_list_analytics_account_links_rest_flattened_error(transport: str = "rest"): + client = MarketingplatformAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_analytics_account_links( + marketingplatform_admin.ListAnalyticsAccountLinksRequest(), + parent="parent_value", + ) + + +def test_list_analytics_account_links_rest_pager(transport: str = "rest"): + client = MarketingplatformAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + marketingplatform_admin.ListAnalyticsAccountLinksResponse( + analytics_account_links=[ + resources.AnalyticsAccountLink(), + resources.AnalyticsAccountLink(), + resources.AnalyticsAccountLink(), + ], + next_page_token="abc", + ), + marketingplatform_admin.ListAnalyticsAccountLinksResponse( + analytics_account_links=[], + next_page_token="def", + ), + marketingplatform_admin.ListAnalyticsAccountLinksResponse( + analytics_account_links=[ + resources.AnalyticsAccountLink(), + ], + next_page_token="ghi", + ), + marketingplatform_admin.ListAnalyticsAccountLinksResponse( + analytics_account_links=[ + resources.AnalyticsAccountLink(), + resources.AnalyticsAccountLink(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple( + marketingplatform_admin.ListAnalyticsAccountLinksResponse.to_json(x) + for x in response + ) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {"parent": "organizations/sample1"} + + pager = client.list_analytics_account_links(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, resources.AnalyticsAccountLink) for i in results) + + pages = list(client.list_analytics_account_links(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + marketingplatform_admin.CreateAnalyticsAccountLinkRequest, + dict, + ], +) +def test_create_analytics_account_link_rest(request_type): + client = MarketingplatformAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "organizations/sample1"} + request_init["analytics_account_link"] = { + "name": "name_value", + "analytics_account": "analytics_account_value", + "display_name": "display_name_value", + "link_verification_state": 1, + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = marketingplatform_admin.CreateAnalyticsAccountLinkRequest.meta.fields[ + "analytics_account_link" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "analytics_account_link" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["analytics_account_link"][field])): + del request_init["analytics_account_link"][field][i][subfield] + else: + del request_init["analytics_account_link"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = resources.AnalyticsAccountLink( + name="name_value", + analytics_account="analytics_account_value", + display_name="display_name_value", + link_verification_state=resources.LinkVerificationState.LINK_VERIFICATION_STATE_VERIFIED, + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = resources.AnalyticsAccountLink.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.create_analytics_account_link(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, resources.AnalyticsAccountLink) + assert response.name == "name_value" + assert response.analytics_account == "analytics_account_value" + assert response.display_name == "display_name_value" + assert ( + response.link_verification_state + == resources.LinkVerificationState.LINK_VERIFICATION_STATE_VERIFIED + ) + + +def test_create_analytics_account_link_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = MarketingplatformAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.create_analytics_account_link + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.create_analytics_account_link + ] = mock_rpc + + request = {} + client.create_analytics_account_link(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.create_analytics_account_link(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_create_analytics_account_link_rest_required_fields( + request_type=marketingplatform_admin.CreateAnalyticsAccountLinkRequest, +): + transport_class = transports.MarketingplatformAdminServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_analytics_account_link._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_analytics_account_link._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = MarketingplatformAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = resources.AnalyticsAccountLink() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = resources.AnalyticsAccountLink.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.create_analytics_account_link(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_create_analytics_account_link_rest_unset_required_fields(): + transport = transports.MarketingplatformAdminServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.create_analytics_account_link._get_unset_required_fields( + {} + ) + assert set(unset_fields) == ( + set(()) + & set( + ( + "parent", + "analyticsAccountLink", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_create_analytics_account_link_rest_interceptors(null_interceptor): + transport = transports.MarketingplatformAdminServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.MarketingplatformAdminServiceRestInterceptor(), + ) + client = MarketingplatformAdminServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.MarketingplatformAdminServiceRestInterceptor, + "post_create_analytics_account_link", + ) as post, mock.patch.object( + transports.MarketingplatformAdminServiceRestInterceptor, + "pre_create_analytics_account_link", + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = marketingplatform_admin.CreateAnalyticsAccountLinkRequest.pb( + marketingplatform_admin.CreateAnalyticsAccountLinkRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = resources.AnalyticsAccountLink.to_json( + resources.AnalyticsAccountLink() + ) + + request = marketingplatform_admin.CreateAnalyticsAccountLinkRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = resources.AnalyticsAccountLink() + + client.create_analytics_account_link( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_create_analytics_account_link_rest_bad_request( + transport: str = "rest", + request_type=marketingplatform_admin.CreateAnalyticsAccountLinkRequest, +): + client = MarketingplatformAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "organizations/sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.create_analytics_account_link(request) + + +def test_create_analytics_account_link_rest_flattened(): + client = MarketingplatformAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = resources.AnalyticsAccountLink() + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "organizations/sample1"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + analytics_account_link=resources.AnalyticsAccountLink(name="name_value"), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = resources.AnalyticsAccountLink.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.create_analytics_account_link(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1alpha/{parent=organizations/*}/analyticsAccountLinks" + % client.transport._host, + args[1], + ) + + +def test_create_analytics_account_link_rest_flattened_error(transport: str = "rest"): + client = MarketingplatformAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_analytics_account_link( + marketingplatform_admin.CreateAnalyticsAccountLinkRequest(), + parent="parent_value", + analytics_account_link=resources.AnalyticsAccountLink(name="name_value"), + ) + + +def test_create_analytics_account_link_rest_error(): + client = MarketingplatformAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + marketingplatform_admin.DeleteAnalyticsAccountLinkRequest, + dict, + ], +) +def test_delete_analytics_account_link_rest(request_type): + client = MarketingplatformAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"name": "organizations/sample1/analyticsAccountLinks/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "" + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.delete_analytics_account_link(request) + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_analytics_account_link_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = MarketingplatformAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.delete_analytics_account_link + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.delete_analytics_account_link + ] = mock_rpc + + request = {} + client.delete_analytics_account_link(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.delete_analytics_account_link(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_delete_analytics_account_link_rest_required_fields( + request_type=marketingplatform_admin.DeleteAnalyticsAccountLinkRequest, +): + transport_class = transports.MarketingplatformAdminServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_analytics_account_link._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_analytics_account_link._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = MarketingplatformAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = None + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "delete", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = "" + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.delete_analytics_account_link(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_delete_analytics_account_link_rest_unset_required_fields(): + transport = transports.MarketingplatformAdminServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.delete_analytics_account_link._get_unset_required_fields( + {} + ) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_analytics_account_link_rest_interceptors(null_interceptor): + transport = transports.MarketingplatformAdminServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.MarketingplatformAdminServiceRestInterceptor(), + ) + client = MarketingplatformAdminServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.MarketingplatformAdminServiceRestInterceptor, + "pre_delete_analytics_account_link", + ) as pre: + pre.assert_not_called() + pb_message = marketingplatform_admin.DeleteAnalyticsAccountLinkRequest.pb( + marketingplatform_admin.DeleteAnalyticsAccountLinkRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + + request = marketingplatform_admin.DeleteAnalyticsAccountLinkRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + + client.delete_analytics_account_link( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + + +def test_delete_analytics_account_link_rest_bad_request( + transport: str = "rest", + request_type=marketingplatform_admin.DeleteAnalyticsAccountLinkRequest, +): + client = MarketingplatformAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"name": "organizations/sample1/analyticsAccountLinks/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete_analytics_account_link(request) + + +def test_delete_analytics_account_link_rest_flattened(): + client = MarketingplatformAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # get arguments that satisfy an http rule for this method + sample_request = {"name": "organizations/sample1/analyticsAccountLinks/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "" + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.delete_analytics_account_link(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1alpha/{name=organizations/*/analyticsAccountLinks/*}" + % client.transport._host, + args[1], + ) + + +def test_delete_analytics_account_link_rest_flattened_error(transport: str = "rest"): + client = MarketingplatformAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_analytics_account_link( + marketingplatform_admin.DeleteAnalyticsAccountLinkRequest(), + name="name_value", + ) + + +def test_delete_analytics_account_link_rest_error(): + client = MarketingplatformAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + marketingplatform_admin.SetPropertyServiceLevelRequest, + dict, + ], +) +def test_set_property_service_level_rest(request_type): + client = MarketingplatformAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "analytics_account_link": "organizations/sample1/analyticsAccountLinks/sample2" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = marketingplatform_admin.SetPropertyServiceLevelResponse() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = marketingplatform_admin.SetPropertyServiceLevelResponse.pb( + return_value + ) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.set_property_service_level(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, marketingplatform_admin.SetPropertyServiceLevelResponse) + + +def test_set_property_service_level_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = MarketingplatformAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.set_property_service_level + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.set_property_service_level + ] = mock_rpc + + request = {} + client.set_property_service_level(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.set_property_service_level(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_set_property_service_level_rest_required_fields( + request_type=marketingplatform_admin.SetPropertyServiceLevelRequest, +): + transport_class = transports.MarketingplatformAdminServiceRestTransport + + request_init = {} + request_init["analytics_account_link"] = "" + request_init["analytics_property"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).set_property_service_level._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["analyticsAccountLink"] = "analytics_account_link_value" + jsonified_request["analyticsProperty"] = "analytics_property_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).set_property_service_level._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "analyticsAccountLink" in jsonified_request + assert jsonified_request["analyticsAccountLink"] == "analytics_account_link_value" + assert "analyticsProperty" in jsonified_request + assert jsonified_request["analyticsProperty"] == "analytics_property_value" + + client = MarketingplatformAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = marketingplatform_admin.SetPropertyServiceLevelResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = marketingplatform_admin.SetPropertyServiceLevelResponse.pb( + return_value + ) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.set_property_service_level(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_set_property_service_level_rest_unset_required_fields(): + transport = transports.MarketingplatformAdminServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.set_property_service_level._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(()) + & set( + ( + "analyticsAccountLink", + "analyticsProperty", + "serviceLevel", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_set_property_service_level_rest_interceptors(null_interceptor): + transport = transports.MarketingplatformAdminServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.MarketingplatformAdminServiceRestInterceptor(), + ) + client = MarketingplatformAdminServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.MarketingplatformAdminServiceRestInterceptor, + "post_set_property_service_level", + ) as post, mock.patch.object( + transports.MarketingplatformAdminServiceRestInterceptor, + "pre_set_property_service_level", + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = marketingplatform_admin.SetPropertyServiceLevelRequest.pb( + marketingplatform_admin.SetPropertyServiceLevelRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = ( + marketingplatform_admin.SetPropertyServiceLevelResponse.to_json( + marketingplatform_admin.SetPropertyServiceLevelResponse() + ) + ) + + request = marketingplatform_admin.SetPropertyServiceLevelRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = marketingplatform_admin.SetPropertyServiceLevelResponse() + + client.set_property_service_level( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_set_property_service_level_rest_bad_request( + transport: str = "rest", + request_type=marketingplatform_admin.SetPropertyServiceLevelRequest, +): + client = MarketingplatformAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "analytics_account_link": "organizations/sample1/analyticsAccountLinks/sample2" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.set_property_service_level(request) + + +def test_set_property_service_level_rest_flattened(): + client = MarketingplatformAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = marketingplatform_admin.SetPropertyServiceLevelResponse() + + # get arguments that satisfy an http rule for this method + sample_request = { + "analytics_account_link": "organizations/sample1/analyticsAccountLinks/sample2" + } + + # get truthy value for each flattened field + mock_args = dict( + analytics_account_link="analytics_account_link_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = marketingplatform_admin.SetPropertyServiceLevelResponse.pb( + return_value + ) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.set_property_service_level(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1alpha/{analytics_account_link=organizations/*/analyticsAccountLinks/*}:setPropertyServiceLevel" + % client.transport._host, + args[1], + ) + + +def test_set_property_service_level_rest_flattened_error(transport: str = "rest"): + client = MarketingplatformAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.set_property_service_level( + marketingplatform_admin.SetPropertyServiceLevelRequest(), + analytics_account_link="analytics_account_link_value", + ) + + +def test_set_property_service_level_rest_error(): + client = MarketingplatformAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.MarketingplatformAdminServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = MarketingplatformAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.MarketingplatformAdminServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = MarketingplatformAdminServiceClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide an api_key and a transport instance. + transport = transports.MarketingplatformAdminServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = MarketingplatformAdminServiceClient( + client_options=options, + transport=transport, + ) + + # It is an error to provide an api_key and a credential. + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = MarketingplatformAdminServiceClient( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.MarketingplatformAdminServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = MarketingplatformAdminServiceClient( + client_options={"scopes": ["1", "2"]}, + transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.MarketingplatformAdminServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + client = MarketingplatformAdminServiceClient(transport=transport) + assert client.transport is transport + + +def test_transport_get_channel(): + # A client may be instantiated with a custom transport instance. + transport = transports.MarketingplatformAdminServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + transport = transports.MarketingplatformAdminServiceGrpcAsyncIOTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.MarketingplatformAdminServiceGrpcTransport, + transports.MarketingplatformAdminServiceGrpcAsyncIOTransport, + transports.MarketingplatformAdminServiceRestTransport, + ], +) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "rest", + ], +) +def test_transport_kind(transport_name): + transport = MarketingplatformAdminServiceClient.get_transport_class(transport_name)( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert transport.kind == transport_name + + +def test_transport_grpc_default(): + # A client should use the gRPC transport by default. + client = MarketingplatformAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert isinstance( + client.transport, + transports.MarketingplatformAdminServiceGrpcTransport, + ) + + +def test_marketingplatform_admin_service_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(core_exceptions.DuplicateCredentialArgs): + transport = transports.MarketingplatformAdminServiceTransport( + credentials=ga_credentials.AnonymousCredentials(), + credentials_file="credentials.json", + ) + + +def test_marketingplatform_admin_service_base_transport(): + # Instantiate the base transport. + with mock.patch( + "google.ads.marketingplatform_admin_v1alpha.services.marketingplatform_admin_service.transports.MarketingplatformAdminServiceTransport.__init__" + ) as Transport: + Transport.return_value = None + transport = transports.MarketingplatformAdminServiceTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ( + "get_organization", + "list_analytics_account_links", + "create_analytics_account_link", + "delete_analytics_account_link", + "set_property_service_level", + ) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) + + with pytest.raises(NotImplementedError): + transport.close() + + # Catch all for all remaining methods and properties + remainder = [ + "kind", + ] + for r in remainder: + with pytest.raises(NotImplementedError): + getattr(transport, r)() + + +def test_marketingplatform_admin_service_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch( + "google.ads.marketingplatform_admin_v1alpha.services.marketingplatform_admin_service.transports.MarketingplatformAdminServiceTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.MarketingplatformAdminServiceTransport( + credentials_file="credentials.json", + quota_project_id="octopus", + ) + load_creds.assert_called_once_with( + "credentials.json", + scopes=None, + default_scopes=( + "https://www.googleapis.com/auth/marketingplatformadmin.analytics.read", + "https://www.googleapis.com/auth/marketingplatformadmin.analytics.update", + ), + quota_project_id="octopus", + ) + + +def test_marketingplatform_admin_service_base_transport_with_adc(): + # Test the default credentials are used if credentials and credentials_file are None. + with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch( + "google.ads.marketingplatform_admin_v1alpha.services.marketingplatform_admin_service.transports.MarketingplatformAdminServiceTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.MarketingplatformAdminServiceTransport() + adc.assert_called_once() + + +def test_marketingplatform_admin_service_auth_adc(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + MarketingplatformAdminServiceClient() + adc.assert_called_once_with( + scopes=None, + default_scopes=( + "https://www.googleapis.com/auth/marketingplatformadmin.analytics.read", + "https://www.googleapis.com/auth/marketingplatformadmin.analytics.update", + ), + quota_project_id=None, + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.MarketingplatformAdminServiceGrpcTransport, + transports.MarketingplatformAdminServiceGrpcAsyncIOTransport, + ], +) +def test_marketingplatform_admin_service_transport_auth_adc(transport_class): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + adc.assert_called_once_with( + scopes=["1", "2"], + default_scopes=( + "https://www.googleapis.com/auth/marketingplatformadmin.analytics.read", + "https://www.googleapis.com/auth/marketingplatformadmin.analytics.update", + ), + quota_project_id="octopus", + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.MarketingplatformAdminServiceGrpcTransport, + transports.MarketingplatformAdminServiceGrpcAsyncIOTransport, + transports.MarketingplatformAdminServiceRestTransport, + ], +) +def test_marketingplatform_admin_service_transport_auth_gdch_credentials( + transport_class, +): + host = "https://language.com" + api_audience_tests = [None, "https://language2.com"] + api_audience_expect = [host, "https://language2.com"] + for t, e in zip(api_audience_tests, api_audience_expect): + with mock.patch.object(google.auth, "default", autospec=True) as adc: + gdch_mock = mock.MagicMock() + type(gdch_mock).with_gdch_audience = mock.PropertyMock( + return_value=gdch_mock + ) + adc.return_value = (gdch_mock, None) + transport_class(host=host, api_audience=t) + gdch_mock.with_gdch_audience.assert_called_once_with(e) + + +@pytest.mark.parametrize( + "transport_class,grpc_helpers", + [ + (transports.MarketingplatformAdminServiceGrpcTransport, grpc_helpers), + ( + transports.MarketingplatformAdminServiceGrpcAsyncIOTransport, + grpc_helpers_async, + ), + ], +) +def test_marketingplatform_admin_service_transport_create_channel( + transport_class, grpc_helpers +): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel", autospec=True + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + adc.return_value = (creds, None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + + create_channel.assert_called_with( + "marketingplatformadmin.googleapis.com:443", + credentials=creds, + credentials_file=None, + quota_project_id="octopus", + default_scopes=( + "https://www.googleapis.com/auth/marketingplatformadmin.analytics.read", + "https://www.googleapis.com/auth/marketingplatformadmin.analytics.update", + ), + scopes=["1", "2"], + default_host="marketingplatformadmin.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.MarketingplatformAdminServiceGrpcTransport, + transports.MarketingplatformAdminServiceGrpcAsyncIOTransport, + ], +) +def test_marketingplatform_admin_service_grpc_transport_client_cert_source_for_mtls( + transport_class, +): + cred = ga_credentials.AnonymousCredentials() + + # Check ssl_channel_credentials is used if provided. + with mock.patch.object(transport_class, "create_channel") as mock_create_channel: + mock_ssl_channel_creds = mock.Mock() + transport_class( + host="squid.clam.whelk", + credentials=cred, + ssl_channel_credentials=mock_ssl_channel_creds, + ) + mock_create_channel.assert_called_once_with( + "squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_channel_creds, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls + # is used. + with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): + with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: + transport_class( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback, + ) + expected_cert, expected_key = client_cert_source_callback() + mock_ssl_cred.assert_called_once_with( + certificate_chain=expected_cert, private_key=expected_key + ) + + +def test_marketingplatform_admin_service_http_transport_client_cert_source_for_mtls(): + cred = ga_credentials.AnonymousCredentials() + with mock.patch( + "google.auth.transport.requests.AuthorizedSession.configure_mtls_channel" + ) as mock_configure_mtls_channel: + transports.MarketingplatformAdminServiceRestTransport( + credentials=cred, client_cert_source_for_mtls=client_cert_source_callback + ) + mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "grpc_asyncio", + "rest", + ], +) +def test_marketingplatform_admin_service_host_no_port(transport_name): + client = MarketingplatformAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="marketingplatformadmin.googleapis.com" + ), + transport=transport_name, + ) + assert client.transport._host == ( + "marketingplatformadmin.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://marketingplatformadmin.googleapis.com" + ) + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "grpc_asyncio", + "rest", + ], +) +def test_marketingplatform_admin_service_host_with_port(transport_name): + client = MarketingplatformAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="marketingplatformadmin.googleapis.com:8000" + ), + transport=transport_name, + ) + assert client.transport._host == ( + "marketingplatformadmin.googleapis.com:8000" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://marketingplatformadmin.googleapis.com:8000" + ) + + +@pytest.mark.parametrize( + "transport_name", + [ + "rest", + ], +) +def test_marketingplatform_admin_service_client_transport_session_collision( + transport_name, +): + creds1 = ga_credentials.AnonymousCredentials() + creds2 = ga_credentials.AnonymousCredentials() + client1 = MarketingplatformAdminServiceClient( + credentials=creds1, + transport=transport_name, + ) + client2 = MarketingplatformAdminServiceClient( + credentials=creds2, + transport=transport_name, + ) + session1 = client1.transport.get_organization._session + session2 = client2.transport.get_organization._session + assert session1 != session2 + session1 = client1.transport.list_analytics_account_links._session + session2 = client2.transport.list_analytics_account_links._session + assert session1 != session2 + session1 = client1.transport.create_analytics_account_link._session + session2 = client2.transport.create_analytics_account_link._session + assert session1 != session2 + session1 = client1.transport.delete_analytics_account_link._session + session2 = client2.transport.delete_analytics_account_link._session + assert session1 != session2 + session1 = client1.transport.set_property_service_level._session + session2 = client2.transport.set_property_service_level._session + assert session1 != session2 + + +def test_marketingplatform_admin_service_grpc_transport_channel(): + channel = grpc.secure_channel("http://localhost/", grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.MarketingplatformAdminServiceGrpcTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +def test_marketingplatform_admin_service_grpc_asyncio_transport_channel(): + channel = aio.secure_channel("http://localhost/", grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.MarketingplatformAdminServiceGrpcAsyncIOTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize( + "transport_class", + [ + transports.MarketingplatformAdminServiceGrpcTransport, + transports.MarketingplatformAdminServiceGrpcAsyncIOTransport, + ], +) +def test_marketingplatform_admin_service_transport_channel_mtls_with_client_cert_source( + transport_class, +): + with mock.patch( + "grpc.ssl_channel_credentials", autospec=True + ) as grpc_ssl_channel_cred: + with mock.patch.object( + transport_class, "create_channel" + ) as grpc_create_channel: + mock_ssl_cred = mock.Mock() + grpc_ssl_channel_cred.return_value = mock_ssl_cred + + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + + cred = ga_credentials.AnonymousCredentials() + with pytest.warns(DeprecationWarning): + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (cred, None) + transport = transport_class( + host="squid.clam.whelk", + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=client_cert_source_callback, + ) + adc.assert_called_once() + + grpc_ssl_channel_cred.assert_called_once_with( + certificate_chain=b"cert bytes", private_key=b"key bytes" + ) + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + assert transport._ssl_channel_credentials == mock_ssl_cred + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize( + "transport_class", + [ + transports.MarketingplatformAdminServiceGrpcTransport, + transports.MarketingplatformAdminServiceGrpcAsyncIOTransport, + ], +) +def test_marketingplatform_admin_service_transport_channel_mtls_with_adc( + transport_class, +): + mock_ssl_cred = mock.Mock() + with mock.patch.multiple( + "google.auth.transport.grpc.SslCredentials", + __init__=mock.Mock(return_value=None), + ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), + ): + with mock.patch.object( + transport_class, "create_channel" + ) as grpc_create_channel: + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + mock_cred = mock.Mock() + + with pytest.warns(DeprecationWarning): + transport = transport_class( + host="squid.clam.whelk", + credentials=mock_cred, + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=None, + ) + + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=mock_cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + + +def test_account_path(): + account = "squid" + expected = "accounts/{account}".format( + account=account, + ) + actual = MarketingplatformAdminServiceClient.account_path(account) + assert expected == actual + + +def test_parse_account_path(): + expected = { + "account": "clam", + } + path = MarketingplatformAdminServiceClient.account_path(**expected) + + # Check that the path construction is reversible. + actual = MarketingplatformAdminServiceClient.parse_account_path(path) + assert expected == actual + + +def test_analytics_account_link_path(): + organization = "whelk" + analytics_account_link = "octopus" + expected = "organizations/{organization}/analyticsAccountLinks/{analytics_account_link}".format( + organization=organization, + analytics_account_link=analytics_account_link, + ) + actual = MarketingplatformAdminServiceClient.analytics_account_link_path( + organization, analytics_account_link + ) + assert expected == actual + + +def test_parse_analytics_account_link_path(): + expected = { + "organization": "oyster", + "analytics_account_link": "nudibranch", + } + path = MarketingplatformAdminServiceClient.analytics_account_link_path(**expected) + + # Check that the path construction is reversible. + actual = MarketingplatformAdminServiceClient.parse_analytics_account_link_path(path) + assert expected == actual + + +def test_organization_path(): + organization = "cuttlefish" + expected = "organizations/{organization}".format( + organization=organization, + ) + actual = MarketingplatformAdminServiceClient.organization_path(organization) + assert expected == actual + + +def test_parse_organization_path(): + expected = { + "organization": "mussel", + } + path = MarketingplatformAdminServiceClient.organization_path(**expected) + + # Check that the path construction is reversible. + actual = MarketingplatformAdminServiceClient.parse_organization_path(path) + assert expected == actual + + +def test_property_path(): + property = "winkle" + expected = "properties/{property}".format( + property=property, + ) + actual = MarketingplatformAdminServiceClient.property_path(property) + assert expected == actual + + +def test_parse_property_path(): + expected = { + "property": "nautilus", + } + path = MarketingplatformAdminServiceClient.property_path(**expected) + + # Check that the path construction is reversible. + actual = MarketingplatformAdminServiceClient.parse_property_path(path) + assert expected == actual + + +def test_common_billing_account_path(): + billing_account = "scallop" + expected = "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + actual = MarketingplatformAdminServiceClient.common_billing_account_path( + billing_account + ) + assert expected == actual + + +def test_parse_common_billing_account_path(): + expected = { + "billing_account": "abalone", + } + path = MarketingplatformAdminServiceClient.common_billing_account_path(**expected) + + # Check that the path construction is reversible. + actual = MarketingplatformAdminServiceClient.parse_common_billing_account_path(path) + assert expected == actual + + +def test_common_folder_path(): + folder = "squid" + expected = "folders/{folder}".format( + folder=folder, + ) + actual = MarketingplatformAdminServiceClient.common_folder_path(folder) + assert expected == actual + + +def test_parse_common_folder_path(): + expected = { + "folder": "clam", + } + path = MarketingplatformAdminServiceClient.common_folder_path(**expected) + + # Check that the path construction is reversible. + actual = MarketingplatformAdminServiceClient.parse_common_folder_path(path) + assert expected == actual + + +def test_common_organization_path(): + organization = "whelk" + expected = "organizations/{organization}".format( + organization=organization, + ) + actual = MarketingplatformAdminServiceClient.common_organization_path(organization) + assert expected == actual + + +def test_parse_common_organization_path(): + expected = { + "organization": "octopus", + } + path = MarketingplatformAdminServiceClient.common_organization_path(**expected) + + # Check that the path construction is reversible. + actual = MarketingplatformAdminServiceClient.parse_common_organization_path(path) + assert expected == actual + + +def test_common_project_path(): + project = "oyster" + expected = "projects/{project}".format( + project=project, + ) + actual = MarketingplatformAdminServiceClient.common_project_path(project) + assert expected == actual + + +def test_parse_common_project_path(): + expected = { + "project": "nudibranch", + } + path = MarketingplatformAdminServiceClient.common_project_path(**expected) + + # Check that the path construction is reversible. + actual = MarketingplatformAdminServiceClient.parse_common_project_path(path) + assert expected == actual + + +def test_common_location_path(): + project = "cuttlefish" + location = "mussel" + expected = "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) + actual = MarketingplatformAdminServiceClient.common_location_path(project, location) + assert expected == actual + + +def test_parse_common_location_path(): + expected = { + "project": "winkle", + "location": "nautilus", + } + path = MarketingplatformAdminServiceClient.common_location_path(**expected) + + # Check that the path construction is reversible. + actual = MarketingplatformAdminServiceClient.parse_common_location_path(path) + assert expected == actual + + +def test_client_with_default_client_info(): + client_info = gapic_v1.client_info.ClientInfo() + + with mock.patch.object( + transports.MarketingplatformAdminServiceTransport, "_prep_wrapped_messages" + ) as prep: + client = MarketingplatformAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + with mock.patch.object( + transports.MarketingplatformAdminServiceTransport, "_prep_wrapped_messages" + ) as prep: + transport_class = MarketingplatformAdminServiceClient.get_transport_class() + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + +@pytest.mark.asyncio +async def test_transport_close_async(): + client = MarketingplatformAdminServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + with mock.patch.object( + type(getattr(client.transport, "grpc_channel")), "close" + ) as close: + async with client: + close.assert_not_called() + close.assert_called_once() + + +def test_transport_close(): + transports = { + "rest": "_session", + "grpc": "_grpc_channel", + } + + for transport, close_name in transports.items(): + client = MarketingplatformAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + with mock.patch.object( + type(getattr(client.transport, close_name)), "close" + ) as close: + with client: + close.assert_not_called() + close.assert_called_once() + + +def test_client_ctx(): + transports = [ + "rest", + "grpc", + ] + for transport in transports: + client = MarketingplatformAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called() + + +@pytest.mark.parametrize( + "client_class,transport_class", + [ + ( + MarketingplatformAdminServiceClient, + transports.MarketingplatformAdminServiceGrpcTransport, + ), + ( + MarketingplatformAdminServiceAsyncClient, + transports.MarketingplatformAdminServiceGrpcAsyncIOTransport, + ), + ], +) +def test_api_key_credentials(client_class, transport_class): + with mock.patch.object( + google.auth._default, "get_api_key_credentials", create=True + ) as get_api_key_credentials: + mock_cred = mock.Mock() + get_api_key_credentials.return_value = mock_cred + options = client_options.ClientOptions() + options.api_key = "api_key" + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=mock_cred, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) diff --git a/packages/google-ai-generativelanguage/CHANGELOG.md b/packages/google-ai-generativelanguage/CHANGELOG.md index 656da96f6be1..c6fa336083ab 100644 --- a/packages/google-ai-generativelanguage/CHANGELOG.md +++ b/packages/google-ai-generativelanguage/CHANGELOG.md @@ -1,5 +1,38 @@ # Changelog +## [0.6.10](https://github.com/googleapis/google-cloud-python/compare/google-ai-generativelanguage-v0.6.9...google-ai-generativelanguage-v0.6.10) (2024-09-23) + + +### Features + +* Add GenerationConfig.{presence_penalty, frequency_penalty, logprobs, response_logprobs, logprobs} and Candidate.{avg_logprobs, logprobs_result} ([d6238e4](https://github.com/googleapis/google-cloud-python/commit/d6238e49a17caf54dd0fbc45215527beed057cc5)) +* Add GoogleSearchRetrieval tool and candidate.grounding_metadata ([d6238e4](https://github.com/googleapis/google-cloud-python/commit/d6238e49a17caf54dd0fbc45215527beed057cc5)) +* Add HarmBlockThreshold.OFF ([d6238e4](https://github.com/googleapis/google-cloud-python/commit/d6238e49a17caf54dd0fbc45215527beed057cc5)) +* Add HarmCategory.HARM_CATEGORY_CIVIC_INTEGRITY ([d6238e4](https://github.com/googleapis/google-cloud-python/commit/d6238e49a17caf54dd0fbc45215527beed057cc5)) +* Add PredictionService (for Imagen) ([d6238e4](https://github.com/googleapis/google-cloud-python/commit/d6238e49a17caf54dd0fbc45215527beed057cc5)) +* Add Schema.min_items ([d6238e4](https://github.com/googleapis/google-cloud-python/commit/d6238e49a17caf54dd0fbc45215527beed057cc5)) +* Add TunedModels.reader_project_numbers ([d6238e4](https://github.com/googleapis/google-cloud-python/commit/d6238e49a17caf54dd0fbc45215527beed057cc5)) + + +### Documentation + +* Small fixes ([d6238e4](https://github.com/googleapis/google-cloud-python/commit/d6238e49a17caf54dd0fbc45215527beed057cc5)) +* Tag HarmCategories by the model family they're used on. ([d6238e4](https://github.com/googleapis/google-cloud-python/commit/d6238e49a17caf54dd0fbc45215527beed057cc5)) + +## [0.6.9](https://github.com/googleapis/google-cloud-python/compare/google-ai-generativelanguage-v0.6.8...google-ai-generativelanguage-v0.6.9) (2024-08-19) + + +### Features + +* Add model max_temperature ([fdebbf2](https://github.com/googleapis/google-cloud-python/commit/fdebbf2e914e9a8ed5a31a04ce9fe26de0f69c72)) +* Add new PromptFeedback and FinishReason entries ([fdebbf2](https://github.com/googleapis/google-cloud-python/commit/fdebbf2e914e9a8ed5a31a04ce9fe26de0f69c72)) +* Add new PromptFeedback and FinishReason entries for https://github.com/google-gemini/generative-ai-python/issues/476 ([fdebbf2](https://github.com/googleapis/google-cloud-python/commit/fdebbf2e914e9a8ed5a31a04ce9fe26de0f69c72)) + + +### Documentation + +* Many small fixes ([fdebbf2](https://github.com/googleapis/google-cloud-python/commit/fdebbf2e914e9a8ed5a31a04ce9fe26de0f69c72)) + ## [0.6.8](https://github.com/googleapis/google-cloud-python/compare/google-ai-generativelanguage-v0.6.7...google-ai-generativelanguage-v0.6.8) (2024-07-30) diff --git a/packages/google-ai-generativelanguage/docs/generativelanguage_v1beta/prediction_service.rst b/packages/google-ai-generativelanguage/docs/generativelanguage_v1beta/prediction_service.rst new file mode 100644 index 000000000000..7b2b932acacc --- /dev/null +++ b/packages/google-ai-generativelanguage/docs/generativelanguage_v1beta/prediction_service.rst @@ -0,0 +1,6 @@ +PredictionService +----------------------------------- + +.. automodule:: google.ai.generativelanguage_v1beta.services.prediction_service + :members: + :inherited-members: diff --git a/packages/google-ai-generativelanguage/docs/generativelanguage_v1beta/services_.rst b/packages/google-ai-generativelanguage/docs/generativelanguage_v1beta/services_.rst index 24e6184e8b0e..7a7b5429bd6f 100644 --- a/packages/google-ai-generativelanguage/docs/generativelanguage_v1beta/services_.rst +++ b/packages/google-ai-generativelanguage/docs/generativelanguage_v1beta/services_.rst @@ -9,5 +9,6 @@ Services for Google Ai Generativelanguage v1beta API generative_service model_service permission_service + prediction_service retriever_service text_service diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage/__init__.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage/__init__.py index c69803d506d6..750b54051c3f 100644 --- a/packages/google-ai-generativelanguage/google/ai/generativelanguage/__init__.py +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage/__init__.py @@ -54,6 +54,12 @@ from google.ai.generativelanguage_v1beta.services.permission_service.client import ( PermissionServiceClient, ) +from google.ai.generativelanguage_v1beta.services.prediction_service.async_client import ( + PredictionServiceAsyncClient, +) +from google.ai.generativelanguage_v1beta.services.prediction_service.client import ( + PredictionServiceClient, +) from google.ai.generativelanguage_v1beta.services.retriever_service.async_client import ( RetrieverServiceAsyncClient, ) @@ -84,12 +90,14 @@ CodeExecution, CodeExecutionResult, Content, + DynamicRetrievalConfig, ExecutableCode, FileData, FunctionCall, FunctionCallingConfig, FunctionDeclaration, FunctionResponse, + GoogleSearchRetrieval, GroundingPassage, GroundingPassages, Part, @@ -132,6 +140,13 @@ GenerateContentResponse, GenerationConfig, GroundingAttribution, + GroundingChunk, + GroundingMetadata, + GroundingSupport, + LogprobsResult, + RetrievalMetadata, + SearchEntryPoint, + Segment, SemanticRetrieverConfig, TaskType, ) @@ -159,6 +174,10 @@ TransferOwnershipResponse, UpdatePermissionRequest, ) +from google.ai.generativelanguage_v1beta.types.prediction_service import ( + PredictRequest, + PredictResponse, +) from google.ai.generativelanguage_v1beta.types.retriever import ( Chunk, ChunkData, @@ -243,6 +262,8 @@ "ModelServiceAsyncClient", "PermissionServiceClient", "PermissionServiceAsyncClient", + "PredictionServiceClient", + "PredictionServiceAsyncClient", "RetrieverServiceClient", "RetrieverServiceAsyncClient", "TextServiceClient", @@ -260,12 +281,14 @@ "CodeExecution", "CodeExecutionResult", "Content", + "DynamicRetrievalConfig", "ExecutableCode", "FileData", "FunctionCall", "FunctionCallingConfig", "FunctionDeclaration", "FunctionResponse", + "GoogleSearchRetrieval", "GroundingPassage", "GroundingPassages", "Part", @@ -303,6 +326,13 @@ "GenerateContentResponse", "GenerationConfig", "GroundingAttribution", + "GroundingChunk", + "GroundingMetadata", + "GroundingSupport", + "LogprobsResult", + "RetrievalMetadata", + "SearchEntryPoint", + "Segment", "SemanticRetrieverConfig", "TaskType", "Model", @@ -325,6 +355,8 @@ "TransferOwnershipRequest", "TransferOwnershipResponse", "UpdatePermissionRequest", + "PredictRequest", + "PredictResponse", "Chunk", "ChunkData", "Condition", diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage/gapic_version.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage/gapic_version.py index 911c2d1dfcef..8ebdaa033b52 100644 --- a/packages/google-ai-generativelanguage/google/ai/generativelanguage/gapic_version.py +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.6.8" # {x-release-please-version} +__version__ = "0.6.10" # {x-release-please-version} diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1/__init__.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1/__init__.py index 4c8665b1b49a..a383f98f4342 100644 --- a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1/__init__.py +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1/__init__.py @@ -37,6 +37,7 @@ GenerateContentRequest, GenerateContentResponse, GenerationConfig, + LogprobsResult, TaskType, ) from .types.model import Model @@ -66,6 +67,7 @@ "HarmCategory", "ListModelsRequest", "ListModelsResponse", + "LogprobsResult", "Model", "ModelServiceClient", "Part", diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1/gapic_version.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1/gapic_version.py index 911c2d1dfcef..8ebdaa033b52 100644 --- a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1/gapic_version.py +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.6.8" # {x-release-please-version} +__version__ = "0.6.10" # {x-release-please-version} diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1/services/generative_service/async_client.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1/services/generative_service/async_client.py index 7f380977c2b7..2fa8d2f13e5e 100644 --- a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1/services/generative_service/async_client.py +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1/services/generative_service/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( AsyncIterable, @@ -194,9 +193,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(GenerativeServiceClient).get_transport_class, type(GenerativeServiceClient) - ) + get_transport_class = GenerativeServiceClient.get_transport_class def __init__( self, @@ -280,14 +277,15 @@ async def generate_content( timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> generative_service.GenerateContentResponse: - r"""Generates a response from the model given an input - ``GenerateContentRequest``. - - Input capabilities differ between models, including tuned - models. See the `model - guide `__ and `tuning - guide `__ for - details. + r"""Generates a model response given an input + ``GenerateContentRequest``. Refer to the `text generation + guide `__ + for detailed usage information. Input capabilities differ + between models, including tuned models. Refer to the `model + guide `__ + and `tuning + guide `__ + for details. .. code-block:: python @@ -329,12 +327,14 @@ async def sample_generate_content(): on the ``request`` instance; if ``request`` is provided, this should not be set. contents (:class:`MutableSequence[google.ai.generativelanguage_v1.types.Content]`): - Required. The content of the current - conversation with the model. - For single-turn queries, this is a - single instance. For multi-turn queries, - this is a repeated field that contains - conversation history + latest request. + Required. The content of the current conversation with + the model. + + For single-turn queries, this is a single instance. For + multi-turn queries like + `chat `__, + this is a repeated field that contains the conversation + history and the latest request. This corresponds to the ``contents`` field on the ``request`` instance; if ``request`` is provided, this @@ -347,18 +347,18 @@ async def sample_generate_content(): Returns: google.ai.generativelanguage_v1.types.GenerateContentResponse: - Response from the model supporting multiple candidates. + Response from the model supporting multiple candidate + responses. - Note on safety ratings and content filtering. They - are reported for both prompt in + Safety ratings and content filtering are reported for + both prompt in GenerateContentResponse.prompt_feedback and for each candidate in finish_reason and in safety_ratings. The - API contract is that: - either all requested - candidates are returned or no candidates at all - no - candidates are returned only if there was something - wrong with the prompt (see prompt_feedback) - - feedback on each candidate is reported on - finish_reason and safety_ratings. + API: - Returns either all requested candidates or + none of them - Returns no candidates at all only if + there was something wrong with the prompt (check + prompt_feedback) - Reports feedback on each candidate + in finish_reason and safety_ratings. """ # Create or coerce a protobuf request object. @@ -421,8 +421,9 @@ def stream_generate_content( timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> Awaitable[AsyncIterable[generative_service.GenerateContentResponse]]: - r"""Generates a streamed response from the model given an input - ``GenerateContentRequest``. + r"""Generates a `streamed + response `__ + from the model given an input ``GenerateContentRequest``. .. code-block:: python @@ -465,12 +466,14 @@ async def sample_stream_generate_content(): on the ``request`` instance; if ``request`` is provided, this should not be set. contents (:class:`MutableSequence[google.ai.generativelanguage_v1.types.Content]`): - Required. The content of the current - conversation with the model. - For single-turn queries, this is a - single instance. For multi-turn queries, - this is a repeated field that contains - conversation history + latest request. + Required. The content of the current conversation with + the model. + + For single-turn queries, this is a single instance. For + multi-turn queries like + `chat `__, + this is a repeated field that contains the conversation + history and the latest request. This corresponds to the ``contents`` field on the ``request`` instance; if ``request`` is provided, this @@ -483,18 +486,18 @@ async def sample_stream_generate_content(): Returns: AsyncIterable[google.ai.generativelanguage_v1.types.GenerateContentResponse]: - Response from the model supporting multiple candidates. + Response from the model supporting multiple candidate + responses. - Note on safety ratings and content filtering. They - are reported for both prompt in + Safety ratings and content filtering are reported for + both prompt in GenerateContentResponse.prompt_feedback and for each candidate in finish_reason and in safety_ratings. The - API contract is that: - either all requested - candidates are returned or no candidates at all - no - candidates are returned only if there was something - wrong with the prompt (see prompt_feedback) - - feedback on each candidate is reported on - finish_reason and safety_ratings. + API: - Returns either all requested candidates or + none of them - Returns no candidates at all only if + there was something wrong with the prompt (check + prompt_feedback) - Reports feedback on each candidate + in finish_reason and safety_ratings. """ # Create or coerce a protobuf request object. @@ -555,8 +558,9 @@ async def embed_content( timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> generative_service.EmbedContentResponse: - r"""Generates an embedding from the model given an input - ``Content``. + r"""Generates a text embedding vector from the input ``Content`` + using the specified `Gemini Embedding + model `__. .. code-block:: python @@ -679,8 +683,9 @@ async def batch_embed_contents( timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> generative_service.BatchEmbedContentsResponse: - r"""Generates multiple embeddings from the model given - input text in a synchronous call. + r"""Generates multiple embedding vectors from the input ``Content`` + which consists of a batch of strings represented as + ``EmbedContentRequest`` objects. .. code-block:: python @@ -804,8 +809,10 @@ async def count_tokens( timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> generative_service.CountTokensResponse: - r"""Runs a model's tokenizer on input content and returns - the token count. + r"""Runs a model's tokenizer on input ``Content`` and returns the + token count. Refer to the `tokens + guide `__ to learn + more about tokens. .. code-block:: python diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1/services/generative_service/client.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1/services/generative_service/client.py index 886dfd3c9953..c0d080604625 100644 --- a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1/services/generative_service/client.py +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1/services/generative_service/client.py @@ -664,7 +664,7 @@ def __init__( Type[GenerativeServiceTransport], Callable[..., GenerativeServiceTransport], ] = ( - type(self).get_transport_class(transport) + GenerativeServiceClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., GenerativeServiceTransport], transport) ) @@ -693,14 +693,15 @@ def generate_content( timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> generative_service.GenerateContentResponse: - r"""Generates a response from the model given an input - ``GenerateContentRequest``. - - Input capabilities differ between models, including tuned - models. See the `model - guide `__ and `tuning - guide `__ for - details. + r"""Generates a model response given an input + ``GenerateContentRequest``. Refer to the `text generation + guide `__ + for detailed usage information. Input capabilities differ + between models, including tuned models. Refer to the `model + guide `__ + and `tuning + guide `__ + for details. .. code-block:: python @@ -742,12 +743,14 @@ def sample_generate_content(): on the ``request`` instance; if ``request`` is provided, this should not be set. contents (MutableSequence[google.ai.generativelanguage_v1.types.Content]): - Required. The content of the current - conversation with the model. - For single-turn queries, this is a - single instance. For multi-turn queries, - this is a repeated field that contains - conversation history + latest request. + Required. The content of the current conversation with + the model. + + For single-turn queries, this is a single instance. For + multi-turn queries like + `chat `__, + this is a repeated field that contains the conversation + history and the latest request. This corresponds to the ``contents`` field on the ``request`` instance; if ``request`` is provided, this @@ -760,18 +763,18 @@ def sample_generate_content(): Returns: google.ai.generativelanguage_v1.types.GenerateContentResponse: - Response from the model supporting multiple candidates. + Response from the model supporting multiple candidate + responses. - Note on safety ratings and content filtering. They - are reported for both prompt in + Safety ratings and content filtering are reported for + both prompt in GenerateContentResponse.prompt_feedback and for each candidate in finish_reason and in safety_ratings. The - API contract is that: - either all requested - candidates are returned or no candidates at all - no - candidates are returned only if there was something - wrong with the prompt (see prompt_feedback) - - feedback on each candidate is reported on - finish_reason and safety_ratings. + API: - Returns either all requested candidates or + none of them - Returns no candidates at all only if + there was something wrong with the prompt (check + prompt_feedback) - Reports feedback on each candidate + in finish_reason and safety_ratings. """ # Create or coerce a protobuf request object. @@ -831,8 +834,9 @@ def stream_generate_content( timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> Iterable[generative_service.GenerateContentResponse]: - r"""Generates a streamed response from the model given an input - ``GenerateContentRequest``. + r"""Generates a `streamed + response `__ + from the model given an input ``GenerateContentRequest``. .. code-block:: python @@ -875,12 +879,14 @@ def sample_stream_generate_content(): on the ``request`` instance; if ``request`` is provided, this should not be set. contents (MutableSequence[google.ai.generativelanguage_v1.types.Content]): - Required. The content of the current - conversation with the model. - For single-turn queries, this is a - single instance. For multi-turn queries, - this is a repeated field that contains - conversation history + latest request. + Required. The content of the current conversation with + the model. + + For single-turn queries, this is a single instance. For + multi-turn queries like + `chat `__, + this is a repeated field that contains the conversation + history and the latest request. This corresponds to the ``contents`` field on the ``request`` instance; if ``request`` is provided, this @@ -893,18 +899,18 @@ def sample_stream_generate_content(): Returns: Iterable[google.ai.generativelanguage_v1.types.GenerateContentResponse]: - Response from the model supporting multiple candidates. + Response from the model supporting multiple candidate + responses. - Note on safety ratings and content filtering. They - are reported for both prompt in + Safety ratings and content filtering are reported for + both prompt in GenerateContentResponse.prompt_feedback and for each candidate in finish_reason and in safety_ratings. The - API contract is that: - either all requested - candidates are returned or no candidates at all - no - candidates are returned only if there was something - wrong with the prompt (see prompt_feedback) - - feedback on each candidate is reported on - finish_reason and safety_ratings. + API: - Returns either all requested candidates or + none of them - Returns no candidates at all only if + there was something wrong with the prompt (check + prompt_feedback) - Reports feedback on each candidate + in finish_reason and safety_ratings. """ # Create or coerce a protobuf request object. @@ -962,8 +968,9 @@ def embed_content( timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> generative_service.EmbedContentResponse: - r"""Generates an embedding from the model given an input - ``Content``. + r"""Generates a text embedding vector from the input ``Content`` + using the specified `Gemini Embedding + model `__. .. code-block:: python @@ -1083,8 +1090,9 @@ def batch_embed_contents( timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> generative_service.BatchEmbedContentsResponse: - r"""Generates multiple embeddings from the model given - input text in a synchronous call. + r"""Generates multiple embedding vectors from the input ``Content`` + which consists of a batch of strings represented as + ``EmbedContentRequest`` objects. .. code-block:: python @@ -1205,8 +1213,10 @@ def count_tokens( timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> generative_service.CountTokensResponse: - r"""Runs a model's tokenizer on input content and returns - the token count. + r"""Runs a model's tokenizer on input ``Content`` and returns the + token count. Refer to the `tokens + guide `__ to learn + more about tokens. .. code-block:: python diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1/services/generative_service/transports/grpc.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1/services/generative_service/transports/grpc.py index 7fa405324e45..0c05a0b60ad9 100644 --- a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1/services/generative_service/transports/grpc.py +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1/services/generative_service/transports/grpc.py @@ -245,14 +245,15 @@ def generate_content( ]: r"""Return a callable for the generate content method over gRPC. - Generates a response from the model given an input - ``GenerateContentRequest``. - - Input capabilities differ between models, including tuned - models. See the `model - guide `__ and `tuning - guide `__ for - details. + Generates a model response given an input + ``GenerateContentRequest``. Refer to the `text generation + guide `__ + for detailed usage information. Input capabilities differ + between models, including tuned models. Refer to the `model + guide `__ + and `tuning + guide `__ + for details. Returns: Callable[[~.GenerateContentRequest], @@ -281,8 +282,9 @@ def stream_generate_content( ]: r"""Return a callable for the stream generate content method over gRPC. - Generates a streamed response from the model given an input - ``GenerateContentRequest``. + Generates a `streamed + response `__ + from the model given an input ``GenerateContentRequest``. Returns: Callable[[~.GenerateContentRequest], @@ -311,8 +313,9 @@ def embed_content( ]: r"""Return a callable for the embed content method over gRPC. - Generates an embedding from the model given an input - ``Content``. + Generates a text embedding vector from the input ``Content`` + using the specified `Gemini Embedding + model `__. Returns: Callable[[~.EmbedContentRequest], @@ -341,8 +344,9 @@ def batch_embed_contents( ]: r"""Return a callable for the batch embed contents method over gRPC. - Generates multiple embeddings from the model given - input text in a synchronous call. + Generates multiple embedding vectors from the input ``Content`` + which consists of a batch of strings represented as + ``EmbedContentRequest`` objects. Returns: Callable[[~.BatchEmbedContentsRequest], @@ -370,8 +374,10 @@ def count_tokens( ]: r"""Return a callable for the count tokens method over gRPC. - Runs a model's tokenizer on input content and returns - the token count. + Runs a model's tokenizer on input ``Content`` and returns the + token count. Refer to the `tokens + guide `__ to learn + more about tokens. Returns: Callable[[~.CountTokensRequest], diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1/services/generative_service/transports/grpc_asyncio.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1/services/generative_service/transports/grpc_asyncio.py index bc85a9437627..20323a521f71 100644 --- a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1/services/generative_service/transports/grpc_asyncio.py +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1/services/generative_service/transports/grpc_asyncio.py @@ -249,14 +249,15 @@ def generate_content( ]: r"""Return a callable for the generate content method over gRPC. - Generates a response from the model given an input - ``GenerateContentRequest``. - - Input capabilities differ between models, including tuned - models. See the `model - guide `__ and `tuning - guide `__ for - details. + Generates a model response given an input + ``GenerateContentRequest``. Refer to the `text generation + guide `__ + for detailed usage information. Input capabilities differ + between models, including tuned models. Refer to the `model + guide `__ + and `tuning + guide `__ + for details. Returns: Callable[[~.GenerateContentRequest], @@ -285,8 +286,9 @@ def stream_generate_content( ]: r"""Return a callable for the stream generate content method over gRPC. - Generates a streamed response from the model given an input - ``GenerateContentRequest``. + Generates a `streamed + response `__ + from the model given an input ``GenerateContentRequest``. Returns: Callable[[~.GenerateContentRequest], @@ -315,8 +317,9 @@ def embed_content( ]: r"""Return a callable for the embed content method over gRPC. - Generates an embedding from the model given an input - ``Content``. + Generates a text embedding vector from the input ``Content`` + using the specified `Gemini Embedding + model `__. Returns: Callable[[~.EmbedContentRequest], @@ -345,8 +348,9 @@ def batch_embed_contents( ]: r"""Return a callable for the batch embed contents method over gRPC. - Generates multiple embeddings from the model given - input text in a synchronous call. + Generates multiple embedding vectors from the input ``Content`` + which consists of a batch of strings represented as + ``EmbedContentRequest`` objects. Returns: Callable[[~.BatchEmbedContentsRequest], @@ -375,8 +379,10 @@ def count_tokens( ]: r"""Return a callable for the count tokens method over gRPC. - Runs a model's tokenizer on input content and returns - the token count. + Runs a model's tokenizer on input ``Content`` and returns the + token count. Refer to the `tokens + guide `__ to learn + more about tokens. Returns: Callable[[~.CountTokensRequest], diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1/services/generative_service/transports/rest.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1/services/generative_service/transports/rest.py index b604bc06b024..7caa772eb19f 100644 --- a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1/services/generative_service/transports/rest.py +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1/services/generative_service/transports/rest.py @@ -715,20 +715,21 @@ def __call__( Returns: ~.generative_service.GenerateContentResponse: - Response from the model supporting multiple candidates. + Response from the model supporting multiple candidate + responses. - Note on safety ratings and content filtering. They are - reported for both prompt in + Safety ratings and content filtering are reported for + both prompt in ``GenerateContentResponse.prompt_feedback`` and for each candidate in ``finish_reason`` and in - ``safety_ratings``. The API contract is that: + ``safety_ratings``. The API: - - either all requested candidates are returned or no - candidates at all - - no candidates are returned only if there was - something wrong with the prompt (see + - Returns either all requested candidates or none of + them + - Returns no candidates at all only if there was + something wrong with the prompt (check ``prompt_feedback``) - - feedback on each candidate is reported on + - Reports feedback on each candidate in ``finish_reason`` and ``safety_ratings``. """ @@ -830,20 +831,21 @@ def __call__( Returns: ~.generative_service.GenerateContentResponse: - Response from the model supporting multiple candidates. + Response from the model supporting multiple candidate + responses. - Note on safety ratings and content filtering. They are - reported for both prompt in + Safety ratings and content filtering are reported for + both prompt in ``GenerateContentResponse.prompt_feedback`` and for each candidate in ``finish_reason`` and in - ``safety_ratings``. The API contract is that: + ``safety_ratings``. The API: - - either all requested candidates are returned or no - candidates at all - - no candidates are returned only if there was - something wrong with the prompt (see + - Returns either all requested candidates or none of + them + - Returns no candidates at all only if there was + something wrong with the prompt (check ``prompt_feedback``) - - feedback on each candidate is reported on + - Reports feedback on each candidate in ``finish_reason`` and ``safety_ratings``. """ @@ -1055,6 +1057,10 @@ def __call__( "method": "get", "uri": "/v1/{name=tunedModels/*/operations/*}", }, + { + "method": "get", + "uri": "/v1/{name=generatedFiles/*}/operations/*", + }, ] request, metadata = self._interceptor.pre_get_operation(request, metadata) diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1/services/model_service/async_client.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1/services/model_service/async_client.py index 9eaa670055fb..8d69bc5d2451 100644 --- a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1/services/model_service/async_client.py +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1/services/model_service/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -187,9 +186,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(ModelServiceClient).get_transport_class, type(ModelServiceClient) - ) + get_transport_class = ModelServiceClient.get_transport_class def __init__( self, @@ -266,7 +263,12 @@ async def get_model( timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> model.Model: - r"""Gets information about a specific Model. + r"""Gets information about a specific ``Model`` such as its version + number, token limits, + `parameters `__ + and other metadata. Refer to the `Gemini models + guide `__ + for detailed model information. .. code-block:: python @@ -377,7 +379,9 @@ async def list_models( timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.ListModelsAsyncPager: - r"""Lists models available through the API. + r"""Lists the + ```Model``\ s `__ + available through the Gemini API. .. code-block:: python @@ -411,10 +415,9 @@ async def sample_list_models(): page_size (:class:`int`): The maximum number of ``Models`` to return (per page). - The service may return fewer models. If unspecified, at - most 50 models will be returned per page. This method - returns at most 1000 models per page, even if you pass a - larger page_size. + If unspecified, 50 models will be returned per page. + This method returns at most 1000 models per page, even + if you pass a larger page_size. This corresponds to the ``page_size`` field on the ``request`` instance; if ``request`` is provided, this diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1/services/model_service/client.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1/services/model_service/client.py index ffe27c694223..115407649578 100644 --- a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1/services/model_service/client.py +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1/services/model_service/client.py @@ -652,7 +652,7 @@ def __init__( transport_init: Union[ Type[ModelServiceTransport], Callable[..., ModelServiceTransport] ] = ( - type(self).get_transport_class(transport) + ModelServiceClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., ModelServiceTransport], transport) ) @@ -678,7 +678,12 @@ def get_model( timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> model.Model: - r"""Gets information about a specific Model. + r"""Gets information about a specific ``Model`` such as its version + number, token limits, + `parameters `__ + and other metadata. Refer to the `Gemini models + guide `__ + for detailed model information. .. code-block:: python @@ -786,7 +791,9 @@ def list_models( timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.ListModelsPager: - r"""Lists models available through the API. + r"""Lists the + ```Model``\ s `__ + available through the Gemini API. .. code-block:: python @@ -820,10 +827,9 @@ def sample_list_models(): page_size (int): The maximum number of ``Models`` to return (per page). - The service may return fewer models. If unspecified, at - most 50 models will be returned per page. This method - returns at most 1000 models per page, even if you pass a - larger page_size. + If unspecified, 50 models will be returned per page. + This method returns at most 1000 models per page, even + if you pass a larger page_size. This corresponds to the ``page_size`` field on the ``request`` instance; if ``request`` is provided, this diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1/services/model_service/transports/grpc.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1/services/model_service/transports/grpc.py index cd57836b737c..234ee1693ebf 100644 --- a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1/services/model_service/transports/grpc.py +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1/services/model_service/transports/grpc.py @@ -240,7 +240,12 @@ def grpc_channel(self) -> grpc.Channel: def get_model(self) -> Callable[[model_service.GetModelRequest], model.Model]: r"""Return a callable for the get model method over gRPC. - Gets information about a specific Model. + Gets information about a specific ``Model`` such as its version + number, token limits, + `parameters `__ + and other metadata. Refer to the `Gemini models + guide `__ + for detailed model information. Returns: Callable[[~.GetModelRequest], @@ -266,7 +271,9 @@ def list_models( ) -> Callable[[model_service.ListModelsRequest], model_service.ListModelsResponse]: r"""Return a callable for the list models method over gRPC. - Lists models available through the API. + Lists the + ```Model``\ s `__ + available through the Gemini API. Returns: Callable[[~.ListModelsRequest], diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1/services/model_service/transports/grpc_asyncio.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1/services/model_service/transports/grpc_asyncio.py index f1569dd307a5..c7c11c694671 100644 --- a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1/services/model_service/transports/grpc_asyncio.py +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1/services/model_service/transports/grpc_asyncio.py @@ -246,7 +246,12 @@ def get_model( ) -> Callable[[model_service.GetModelRequest], Awaitable[model.Model]]: r"""Return a callable for the get model method over gRPC. - Gets information about a specific Model. + Gets information about a specific ``Model`` such as its version + number, token limits, + `parameters `__ + and other metadata. Refer to the `Gemini models + guide `__ + for detailed model information. Returns: Callable[[~.GetModelRequest], @@ -274,7 +279,9 @@ def list_models( ]: r"""Return a callable for the list models method over gRPC. - Lists models available through the API. + Lists the + ```Model``\ s `__ + available through the Gemini API. Returns: Callable[[~.ListModelsRequest], diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1/services/model_service/transports/rest.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1/services/model_service/transports/rest.py index e21bd17d6e8b..a431622cc869 100644 --- a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1/services/model_service/transports/rest.py +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1/services/model_service/transports/rest.py @@ -572,6 +572,10 @@ def __call__( "method": "get", "uri": "/v1/{name=tunedModels/*/operations/*}", }, + { + "method": "get", + "uri": "/v1/{name=generatedFiles/*}/operations/*", + }, ] request, metadata = self._interceptor.pre_get_operation(request, metadata) diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1/types/__init__.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1/types/__init__.py index 522ecb07c1c3..9156b856ee0e 100644 --- a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1/types/__init__.py +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1/types/__init__.py @@ -27,6 +27,7 @@ GenerateContentRequest, GenerateContentResponse, GenerationConfig, + LogprobsResult, TaskType, ) from .model import Model @@ -50,6 +51,7 @@ "GenerateContentRequest", "GenerateContentResponse", "GenerationConfig", + "LogprobsResult", "TaskType", "Model", "GetModelRequest", diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1/types/generative_service.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1/types/generative_service.py index e1121ef229db..e8062906bfbf 100644 --- a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1/types/generative_service.py +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1/types/generative_service.py @@ -31,6 +31,7 @@ "GenerationConfig", "GenerateContentResponse", "Candidate", + "LogprobsResult", "EmbedContentRequest", "ContentEmbedding", "EmbedContentResponse", @@ -93,12 +94,14 @@ class GenerateContentRequest(proto.Message): Format: ``name=models/{model}``. contents (MutableSequence[google.ai.generativelanguage_v1.types.Content]): - Required. The content of the current - conversation with the model. - For single-turn queries, this is a single - instance. For multi-turn queries, this is a - repeated field that contains conversation - history + latest request. + Required. The content of the current conversation with the + model. + + For single-turn queries, this is a single instance. For + multi-turn queries like + `chat `__, + this is a repeated field that contains the conversation + history and the latest request. safety_settings (MutableSequence[google.ai.generativelanguage_v1.types.SafetySetting]): Optional. A list of unique ``SafetySetting`` instances for blocking unsafe content. @@ -116,7 +119,13 @@ class GenerateContentRequest(proto.Message): categories HARM_CATEGORY_HATE_SPEECH, HARM_CATEGORY_SEXUALLY_EXPLICIT, HARM_CATEGORY_DANGEROUS_CONTENT, HARM_CATEGORY_HARASSMENT - are supported. + are supported. Refer to the + `guide `__ + for detailed information on available safety settings. Also + refer to the `Safety + guidance `__ + to learn how to incorporate safety considerations in your AI + applications. generation_config (google.ai.generativelanguage_v1.types.GenerationConfig): Optional. Configuration options for model generation and outputs. @@ -148,7 +157,7 @@ class GenerateContentRequest(proto.Message): class GenerationConfig(proto.Message): r"""Configuration options for model generation and outputs. Not - all parameters may be configurable for every model. + all parameters are configurable for every model. .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields @@ -162,14 +171,13 @@ class GenerationConfig(proto.Message): This field is a member of `oneof`_ ``_candidate_count``. stop_sequences (MutableSequence[str]): - Optional. The set of character sequences (up - to 5) that will stop output generation. If - specified, the API will stop at the first - appearance of a stop sequence. The stop sequence - will not be included as part of the response. + Optional. The set of character sequences (up to 5) that will + stop output generation. If specified, the API will stop at + the first appearance of a ``stop_sequence``. The stop + sequence will not be included as part of the response. max_output_tokens (int): Optional. The maximum number of tokens to include in a - candidate. + response candidate. Note: The default value varies by model, see the ``Model.output_token_limit`` attribute of the ``Model`` @@ -190,35 +198,89 @@ class GenerationConfig(proto.Message): Optional. The maximum cumulative probability of tokens to consider when sampling. - The model uses combined Top-k and nucleus sampling. + The model uses combined Top-k and Top-p (nucleus) sampling. Tokens are sorted based on their assigned probabilities so that only the most likely tokens are considered. Top-k sampling directly limits the maximum number of tokens to - consider, while Nucleus sampling limits number of tokens + consider, while Nucleus sampling limits the number of tokens based on the cumulative probability. - Note: The default value varies by model, see the - ``Model.top_p`` attribute of the ``Model`` returned from the - ``getModel`` function. + Note: The default value varies by ``Model`` and is specified + by the\ ``Model.top_p`` attribute returned from the + ``getModel`` function. An empty ``top_k`` attribute + indicates that the model doesn't apply top-k sampling and + doesn't allow setting ``top_k`` on requests. This field is a member of `oneof`_ ``_top_p``. top_k (int): Optional. The maximum number of tokens to consider when sampling. - Models use nucleus sampling or combined Top-k and nucleus - sampling. Top-k sampling considers the set of ``top_k`` most - probable tokens. Models running with nucleus sampling don't - allow top_k setting. + Gemini models use Top-p (nucleus) sampling or a combination + of Top-k and nucleus sampling. Top-k sampling considers the + set of ``top_k`` most probable tokens. Models running with + nucleus sampling don't allow top_k setting. - Note: The default value varies by model, see the - ``Model.top_k`` attribute of the ``Model`` returned from the - ``getModel`` function. Empty ``top_k`` field in ``Model`` - indicates the model doesn't apply top-k sampling and doesn't - allow setting ``top_k`` on requests. + Note: The default value varies by ``Model`` and is specified + by the\ ``Model.top_p`` attribute returned from the + ``getModel`` function. An empty ``top_k`` attribute + indicates that the model doesn't apply top-k sampling and + doesn't allow setting ``top_k`` on requests. This field is a member of `oneof`_ ``_top_k``. + presence_penalty (float): + Optional. Presence penalty applied to the next token's + logprobs if the token has already been seen in the response. + + This penalty is binary on/off and not dependant on the + number of times the token is used (after the first). Use + [frequency_penalty][google.ai.generativelanguage.v1.GenerationConfig.frequency_penalty] + for a penalty that increases with each use. + + A positive penalty will discourage the use of tokens that + have already been used in the response, increasing the + vocabulary. + + A negative penalty will encourage the use of tokens that + have already been used in the response, decreasing the + vocabulary. + + This field is a member of `oneof`_ ``_presence_penalty``. + frequency_penalty (float): + Optional. Frequency penalty applied to the next token's + logprobs, multiplied by the number of times each token has + been seen in the respponse so far. + + A positive penalty will discourage the use of tokens that + have already been used, proportional to the number of times + the token has been used: The more a token is used, the more + dificult it is for the model to use that token again + increasing the vocabulary of responses. + + Caution: A *negative* penalty will encourage the model to + reuse tokens proportional to the number of times the token + has been used. Small negative values will reduce the + vocabulary of a response. Larger negative values will cause + the model to start repeating a common token until it hits + the + [max_output_tokens][google.ai.generativelanguage.v1.GenerationConfig.max_output_tokens] + limit: "...the the the the the...". + + This field is a member of `oneof`_ ``_frequency_penalty``. + response_logprobs (bool): + Optional. If true, export the logprobs + results in response. + + This field is a member of `oneof`_ ``_response_logprobs``. + logprobs (int): + Optional. Only valid if + [response_logprobs=True][google.ai.generativelanguage.v1.GenerationConfig.response_logprobs]. + This sets the number of top logprobs to return at each + decoding step in the + [Candidate.logprobs_result][google.ai.generativelanguage.v1.Candidate.logprobs_result]. + + This field is a member of `oneof`_ ``_logprobs``. """ candidate_count: int = proto.Field( @@ -250,21 +312,39 @@ class GenerationConfig(proto.Message): number=7, optional=True, ) + presence_penalty: float = proto.Field( + proto.FLOAT, + number=15, + optional=True, + ) + frequency_penalty: float = proto.Field( + proto.FLOAT, + number=16, + optional=True, + ) + response_logprobs: bool = proto.Field( + proto.BOOL, + number=17, + optional=True, + ) + logprobs: int = proto.Field( + proto.INT32, + number=18, + optional=True, + ) class GenerateContentResponse(proto.Message): - r"""Response from the model supporting multiple candidates. - - Note on safety ratings and content filtering. They are reported for - both prompt in ``GenerateContentResponse.prompt_feedback`` and for - each candidate in ``finish_reason`` and in ``safety_ratings``. The - API contract is that: - - - either all requested candidates are returned or no candidates at - all - - no candidates are returned only if there was something wrong with - the prompt (see ``prompt_feedback``) - - feedback on each candidate is reported on ``finish_reason`` and + r"""Response from the model supporting multiple candidate responses. + + Safety ratings and content filtering are reported for both prompt in + ``GenerateContentResponse.prompt_feedback`` and for each candidate + in ``finish_reason`` and in ``safety_ratings``. The API: + + - Returns either all requested candidates or none of them + - Returns no candidates at all only if there was something wrong + with the prompt (check ``prompt_feedback``) + - Reports feedback on each candidate in ``finish_reason`` and ``safety_ratings``. Attributes: @@ -285,29 +365,35 @@ class PromptFeedback(proto.Message): Attributes: block_reason (google.ai.generativelanguage_v1.types.GenerateContentResponse.PromptFeedback.BlockReason): Optional. If set, the prompt was blocked and - no candidates are returned. Rephrase your - prompt. + no candidates are returned. Rephrase the prompt. safety_ratings (MutableSequence[google.ai.generativelanguage_v1.types.SafetyRating]): Ratings for safety of the prompt. There is at most one rating per category. """ class BlockReason(proto.Enum): - r"""Specifies what was the reason why prompt was blocked. + r"""Specifies the reason why the prompt was blocked. Values: BLOCK_REASON_UNSPECIFIED (0): Default value. This value is unused. SAFETY (1): - Prompt was blocked due to safety reasons. You can inspect + Prompt was blocked due to safety reasons. Inspect ``safety_ratings`` to understand which safety category blocked it. OTHER (2): - Prompt was blocked due to unknown reaasons. + Prompt was blocked due to unknown reasons. + BLOCKLIST (3): + Prompt was blocked due to the terms which are + included from the terminology blocklist. + PROHIBITED_CONTENT (4): + Prompt was blocked due to prohibited content. """ BLOCK_REASON_UNSPECIFIED = 0 SAFETY = 1 OTHER = 2 + BLOCKLIST = 3 + PROHIBITED_CONTENT = 4 block_reason: "GenerateContentResponse.PromptFeedback.BlockReason" = ( proto.Field( @@ -327,13 +413,15 @@ class UsageMetadata(proto.Message): Attributes: prompt_token_count (int): - Number of tokens in the prompt. + Number of tokens in the prompt. When ``cached_content`` is + set, this is still the total effective prompt size meaning + this includes the number of tokens in the cached content. candidates_token_count (int): - Total number of tokens across the generated - candidates. + Total number of tokens across all the + generated response candidates. total_token_count (int): Total token count for the generation request - (prompt + candidates). + (prompt + response candidates). """ prompt_token_count: int = proto.Field( @@ -374,7 +462,7 @@ class Candidate(proto.Message): Attributes: index (int): Output only. Index of the candidate in the - list of candidates. + list of response candidates. This field is a member of `oneof`_ ``_index``. content (google.ai.generativelanguage_v1.types.Content): @@ -384,7 +472,7 @@ class Candidate(proto.Message): Optional. Output only. The reason why the model stopped generating tokens. If empty, the model has not stopped generating - the tokens. + tokens. safety_ratings (MutableSequence[google.ai.generativelanguage_v1.types.SafetyRating]): List of ratings for the safety of a response candidate. @@ -399,6 +487,11 @@ class Candidate(proto.Message): foundational LLM's training data. token_count (int): Output only. Token count for this candidate. + avg_logprobs (float): + Output only. + logprobs_result (google.ai.generativelanguage_v1.types.LogprobsResult): + Output only. Log-likelihood scores for the + response tokens and top tokens """ class FinishReason(proto.Enum): @@ -414,20 +507,41 @@ class FinishReason(proto.Enum): The maximum number of tokens as specified in the request was reached. SAFETY (3): - The candidate content was flagged for safety - reasons. + The response candidate content was flagged + for safety reasons. RECITATION (4): - The candidate content was flagged for - recitation reasons. + The response candidate content was flagged + for recitation reasons. + LANGUAGE (6): + The response candidate content was flagged + for using an unsupported language. OTHER (5): Unknown reason. + BLOCKLIST (7): + Token generation stopped because the content + contains forbidden terms. + PROHIBITED_CONTENT (8): + Token generation stopped for potentially + containing prohibited content. + SPII (9): + Token generation stopped because the content + potentially contains Sensitive Personally + Identifiable Information (SPII). + MALFORMED_FUNCTION_CALL (10): + The function call generated by the model is + invalid. """ FINISH_REASON_UNSPECIFIED = 0 STOP = 1 MAX_TOKENS = 2 SAFETY = 3 RECITATION = 4 + LANGUAGE = 6 OTHER = 5 + BLOCKLIST = 7 + PROHIBITED_CONTENT = 8 + SPII = 9 + MALFORMED_FUNCTION_CALL = 10 index: int = proto.Field( proto.INT32, @@ -458,6 +572,89 @@ class FinishReason(proto.Enum): proto.INT32, number=7, ) + avg_logprobs: float = proto.Field( + proto.DOUBLE, + number=10, + ) + logprobs_result: "LogprobsResult" = proto.Field( + proto.MESSAGE, + number=11, + message="LogprobsResult", + ) + + +class LogprobsResult(proto.Message): + r"""Logprobs Result + + Attributes: + top_candidates (MutableSequence[google.ai.generativelanguage_v1.types.LogprobsResult.TopCandidates]): + Length = total number of decoding steps. + chosen_candidates (MutableSequence[google.ai.generativelanguage_v1.types.LogprobsResult.Candidate]): + Length = total number of decoding steps. The chosen + candidates may or may not be in top_candidates. + """ + + class Candidate(proto.Message): + r"""Candidate for the logprobs token and score. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + token (str): + The candidate’s token string value. + + This field is a member of `oneof`_ ``_token``. + token_id (int): + The candidate’s token id value. + + This field is a member of `oneof`_ ``_token_id``. + log_probability (float): + The candidate's log probability. + + This field is a member of `oneof`_ ``_log_probability``. + """ + + token: str = proto.Field( + proto.STRING, + number=1, + optional=True, + ) + token_id: int = proto.Field( + proto.INT32, + number=3, + optional=True, + ) + log_probability: float = proto.Field( + proto.FLOAT, + number=2, + optional=True, + ) + + class TopCandidates(proto.Message): + r"""Candidates with top log probabilities at each decoding step. + + Attributes: + candidates (MutableSequence[google.ai.generativelanguage_v1.types.LogprobsResult.Candidate]): + Sorted by log probability in descending + order. + """ + + candidates: MutableSequence["LogprobsResult.Candidate"] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="LogprobsResult.Candidate", + ) + + top_candidates: MutableSequence[TopCandidates] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=TopCandidates, + ) + chosen_candidates: MutableSequence[Candidate] = proto.RepeatedField( + proto.MESSAGE, + number=2, + message=Candidate, + ) class EmbedContentRequest(proto.Message): @@ -494,8 +691,8 @@ class EmbedContentRequest(proto.Message): Optional. Optional reduced dimension for the output embedding. If set, excessive values in the output embedding are truncated from the end. Supported by newer models since - 2024, and the earlier model (``models/embedding-001``) - cannot specify this value. + 2024 only. You cannot set this value if using the earlier + model (``models/embedding-001``). This field is a member of `oneof`_ ``_output_dimensionality``. """ @@ -623,9 +820,16 @@ class CountTokensRequest(proto.Message): Optional. The input given to the model as a prompt. This field is ignored when ``generate_content_request`` is set. generate_content_request (google.ai.generativelanguage_v1.types.GenerateContentRequest): - Optional. The overall input given to the - model. CountTokens will count prompt, function - calling, etc. + Optional. The overall input given to the ``Model``. This + includes the prompt as well as other model steering + information like `system + instructions `__, + and/or function declarations for `function + calling `__. + ``Model``\ s/\ ``Content``\ s and + ``generate_content_request``\ s are mutually exclusive. You + can either send ``Model`` + ``Content``\ s or a + ``generate_content_request``, but never both. """ model: str = proto.Field( @@ -651,10 +855,8 @@ class CountTokensResponse(proto.Message): Attributes: total_tokens (int): - The number of tokens that the ``model`` tokenizes the - ``prompt`` into. - - Always non-negative. + The number of tokens that the ``Model`` tokenizes the + ``prompt`` into. Always non-negative. """ total_tokens: int = proto.Field( diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1/types/model.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1/types/model.py index 587b5fb3d92b..ddcd4c24ccb6 100644 --- a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1/types/model.py +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1/types/model.py @@ -34,7 +34,10 @@ class Model(proto.Message): Attributes: name (str): - Required. The resource name of the ``Model``. + Required. The resource name of the ``Model``. Refer to + `Model + variants `__ + for all allowed values. Format: ``models/{model}`` with a ``{model}`` naming convention of: @@ -43,21 +46,21 @@ class Model(proto.Message): Examples: - - ``models/chat-bison-001`` + - ``models/gemini-1.5-flash-001`` base_model_id (str): Required. The name of the base model, pass this to the generation request. Examples: - - ``chat-bison`` + - ``gemini-1.5-flash`` version (str): Required. The version number of the model. - This represents the major version + This represents the major version (``1.0`` or ``1.5``) display_name (str): The human-readable name of the model. E.g. - "Chat Bison". + "Gemini 1.5 Flash". The name can be up to 128 characters long and can consist of any UTF-8 characters. description (str): @@ -71,21 +74,27 @@ class Model(proto.Message): supported_generation_methods (MutableSequence[str]): The model's supported generation methods. - The method names are defined as Pascal case strings, such as - ``generateMessage`` which correspond to API methods. + The corresponding API method names are defined as Pascal + case strings, such as ``generateMessage`` and + ``generateContent``. temperature (float): Controls the randomness of the output. - Values can range over ``[0.0,1.0]``, inclusive. A value - closer to ``1.0`` will produce responses that are more - varied, while a value closer to ``0.0`` will typically - result in less surprising responses from the model. This - value specifies default to be used by the backend while - making the call to the model. + Values can range over ``[0.0,max_temperature]``, inclusive. + A higher value will produce responses that are more varied, + while a value closer to ``0.0`` will typically result in + less surprising responses from the model. This value + specifies default to be used by the backend while making the + call to the model. This field is a member of `oneof`_ ``_temperature``. + max_temperature (float): + The maximum temperature this model can use. + + This field is a member of `oneof`_ ``_max_temperature``. top_p (float): - For Nucleus sampling. + For `Nucleus + sampling `__. Nucleus sampling considers the smallest set of tokens whose probability sum is at least ``top_p``. This value specifies @@ -142,6 +151,11 @@ class Model(proto.Message): number=9, optional=True, ) + max_temperature: float = proto.Field( + proto.FLOAT, + number=13, + optional=True, + ) top_p: float = proto.Field( proto.FLOAT, number=10, diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1/types/model_service.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1/types/model_service.py index 3fe406e5793a..d135709a3eb5 100644 --- a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1/types/model_service.py +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1/types/model_service.py @@ -57,10 +57,9 @@ class ListModelsRequest(proto.Message): page_size (int): The maximum number of ``Models`` to return (per page). - The service may return fewer models. If unspecified, at most - 50 models will be returned per page. This method returns at - most 1000 models per page, even if you pass a larger - page_size. + If unspecified, 50 models will be returned per page. This + method returns at most 1000 models per page, even if you + pass a larger page_size. page_token (str): A page token, received from a previous ``ListModels`` call. diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1/types/safety.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1/types/safety.py index 100fc75977da..2a75fd715410 100644 --- a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1/types/safety.py +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1/types/safety.py @@ -39,31 +39,32 @@ class HarmCategory(proto.Enum): HARM_CATEGORY_UNSPECIFIED (0): Category is unspecified. HARM_CATEGORY_DEROGATORY (1): - Negative or harmful comments targeting - identity and/or protected attribute. + **PaLM** - Negative or harmful comments targeting identity + and/or protected attribute. HARM_CATEGORY_TOXICITY (2): - Content that is rude, disrespectful, or - profane. + **PaLM** - Content that is rude, disrespectful, or profane. HARM_CATEGORY_VIOLENCE (3): - Describes scenarios depicting violence - against an individual or group, or general - descriptions of gore. + **PaLM** - Describes scenarios depicting violence against an + individual or group, or general descriptions of gore. HARM_CATEGORY_SEXUAL (4): - Contains references to sexual acts or other - lewd content. + **PaLM** - Contains references to sexual acts or other lewd + content. HARM_CATEGORY_MEDICAL (5): - Promotes unchecked medical advice. + **PaLM** - Promotes unchecked medical advice. HARM_CATEGORY_DANGEROUS (6): - Dangerous content that promotes, facilitates, - or encourages harmful acts. + **PaLM** - Dangerous content that promotes, facilitates, or + encourages harmful acts. HARM_CATEGORY_HARASSMENT (7): - Harasment content. + **Gemini** - Harassment content. HARM_CATEGORY_HATE_SPEECH (8): - Hate speech and content. + **Gemini** - Hate speech and content. HARM_CATEGORY_SEXUALLY_EXPLICIT (9): - Sexually explicit content. + **Gemini** - Sexually explicit content. HARM_CATEGORY_DANGEROUS_CONTENT (10): - Dangerous content. + **Gemini** - Dangerous content. + HARM_CATEGORY_CIVIC_INTEGRITY (11): + **Gemini** - Content that may be used to harm civic + integrity. """ HARM_CATEGORY_UNSPECIFIED = 0 HARM_CATEGORY_DEROGATORY = 1 @@ -76,6 +77,7 @@ class HarmCategory(proto.Enum): HARM_CATEGORY_HATE_SPEECH = 8 HARM_CATEGORY_SEXUALLY_EXPLICIT = 9 HARM_CATEGORY_DANGEROUS_CONTENT = 10 + HARM_CATEGORY_CIVIC_INTEGRITY = 11 class SafetyRating(proto.Message): @@ -170,12 +172,15 @@ class HarmBlockThreshold(proto.Enum): be allowed. BLOCK_NONE (4): All content will be allowed. + OFF (5): + Turn off the safety filter. """ HARM_BLOCK_THRESHOLD_UNSPECIFIED = 0 BLOCK_LOW_AND_ABOVE = 1 BLOCK_MEDIUM_AND_ABOVE = 2 BLOCK_ONLY_HIGH = 3 BLOCK_NONE = 4 + OFF = 5 category: "HarmCategory" = proto.Field( proto.ENUM, diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/__init__.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/__init__.py index c692fa7725c9..73da8c53fefc 100644 --- a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/__init__.py +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/__init__.py @@ -30,6 +30,10 @@ PermissionServiceAsyncClient, PermissionServiceClient, ) +from .services.prediction_service import ( + PredictionServiceAsyncClient, + PredictionServiceClient, +) from .services.retriever_service import ( RetrieverServiceAsyncClient, RetrieverServiceClient, @@ -50,12 +54,14 @@ CodeExecution, CodeExecutionResult, Content, + DynamicRetrievalConfig, ExecutableCode, FileData, FunctionCall, FunctionCallingConfig, FunctionDeclaration, FunctionResponse, + GoogleSearchRetrieval, GroundingPassage, GroundingPassages, Part, @@ -98,6 +104,13 @@ GenerateContentResponse, GenerationConfig, GroundingAttribution, + GroundingChunk, + GroundingMetadata, + GroundingSupport, + LogprobsResult, + RetrievalMetadata, + SearchEntryPoint, + Segment, SemanticRetrieverConfig, TaskType, ) @@ -125,6 +138,7 @@ TransferOwnershipResponse, UpdatePermissionRequest, ) +from .types.prediction_service import PredictRequest, PredictResponse from .types.retriever import ( Chunk, ChunkData, @@ -203,6 +217,7 @@ "GenerativeServiceAsyncClient", "ModelServiceAsyncClient", "PermissionServiceAsyncClient", + "PredictionServiceAsyncClient", "RetrieverServiceAsyncClient", "TextServiceAsyncClient", "AttributionSourceId", @@ -256,6 +271,7 @@ "DeleteTunedModelRequest", "DiscussServiceClient", "Document", + "DynamicRetrievalConfig", "EmbedContentRequest", "EmbedContentResponse", "EmbedTextRequest", @@ -288,9 +304,13 @@ "GetModelRequest", "GetPermissionRequest", "GetTunedModelRequest", + "GoogleSearchRetrieval", "GroundingAttribution", + "GroundingChunk", + "GroundingMetadata", "GroundingPassage", "GroundingPassages", + "GroundingSupport", "HarmCategory", "Hyperparameters", "ListCachedContentsRequest", @@ -309,6 +329,7 @@ "ListPermissionsResponse", "ListTunedModelsRequest", "ListTunedModelsResponse", + "LogprobsResult", "Message", "MessagePrompt", "MetadataFilter", @@ -317,16 +338,22 @@ "Part", "Permission", "PermissionServiceClient", + "PredictRequest", + "PredictResponse", + "PredictionServiceClient", "QueryCorpusRequest", "QueryCorpusResponse", "QueryDocumentRequest", "QueryDocumentResponse", "RelevantChunk", + "RetrievalMetadata", "RetrieverServiceClient", "SafetyFeedback", "SafetyRating", "SafetySetting", "Schema", + "SearchEntryPoint", + "Segment", "SemanticRetrieverConfig", "StringList", "TaskType", diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/gapic_metadata.json b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/gapic_metadata.json index 24a3b2565007..7fd1909f6ca0 100644 --- a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/gapic_metadata.json +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/gapic_metadata.json @@ -569,6 +569,40 @@ } } }, + "PredictionService": { + "clients": { + "grpc": { + "libraryClient": "PredictionServiceClient", + "rpcs": { + "Predict": { + "methods": [ + "predict" + ] + } + } + }, + "grpc-async": { + "libraryClient": "PredictionServiceAsyncClient", + "rpcs": { + "Predict": { + "methods": [ + "predict" + ] + } + } + }, + "rest": { + "libraryClient": "PredictionServiceClient", + "rpcs": { + "Predict": { + "methods": [ + "predict" + ] + } + } + } + } + }, "RetrieverService": { "clients": { "grpc": { diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/gapic_version.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/gapic_version.py index 911c2d1dfcef..8ebdaa033b52 100644 --- a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/gapic_version.py +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.6.8" # {x-release-please-version} +__version__ = "0.6.10" # {x-release-please-version} diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/cache_service/async_client.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/cache_service/async_client.py index b9c5ece9451d..dd69486d09e4 100644 --- a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/cache_service/async_client.py +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/cache_service/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -202,9 +201,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(CacheServiceClient).get_transport_class, type(CacheServiceClient) - ) + get_transport_class = CacheServiceClient.get_transport_class def __init__( self, diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/cache_service/client.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/cache_service/client.py index cb851995d674..e5ce080ddfcb 100644 --- a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/cache_service/client.py +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/cache_service/client.py @@ -678,7 +678,7 @@ def __init__( transport_init: Union[ Type[CacheServiceTransport], Callable[..., CacheServiceTransport] ] = ( - type(self).get_transport_class(transport) + CacheServiceClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., CacheServiceTransport], transport) ) diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/discuss_service/async_client.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/discuss_service/async_client.py index 1fcd99ba5047..d0fbfd02378c 100644 --- a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/discuss_service/async_client.py +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/discuss_service/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -192,9 +191,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(DiscussServiceClient).get_transport_class, type(DiscussServiceClient) - ) + get_transport_class = DiscussServiceClient.get_transport_class def __init__( self, diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/discuss_service/client.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/discuss_service/client.py index 59883d48f819..3e8160f99d4d 100644 --- a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/discuss_service/client.py +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/discuss_service/client.py @@ -658,7 +658,7 @@ def __init__( transport_init: Union[ Type[DiscussServiceTransport], Callable[..., DiscussServiceTransport] ] = ( - type(self).get_transport_class(transport) + DiscussServiceClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., DiscussServiceTransport], transport) ) diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/file_service/async_client.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/file_service/async_client.py index b1387e09fb59..07a7e21e9827 100644 --- a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/file_service/async_client.py +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/file_service/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -187,9 +186,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(FileServiceClient).get_transport_class, type(FileServiceClient) - ) + get_transport_class = FileServiceClient.get_transport_class def __init__( self, diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/file_service/client.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/file_service/client.py index 11b887da8a6a..87de8e15bc6d 100644 --- a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/file_service/client.py +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/file_service/client.py @@ -652,7 +652,7 @@ def __init__( transport_init: Union[ Type[FileServiceTransport], Callable[..., FileServiceTransport] ] = ( - type(self).get_transport_class(transport) + FileServiceClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., FileServiceTransport], transport) ) diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/generative_service/async_client.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/generative_service/async_client.py index dc9ba6241807..e3fc8e8c632d 100644 --- a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/generative_service/async_client.py +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/generative_service/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( AsyncIterable, @@ -198,9 +197,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(GenerativeServiceClient).get_transport_class, type(GenerativeServiceClient) - ) + get_transport_class = GenerativeServiceClient.get_transport_class def __init__( self, @@ -284,14 +281,15 @@ async def generate_content( timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> generative_service.GenerateContentResponse: - r"""Generates a response from the model given an input - ``GenerateContentRequest``. - - Input capabilities differ between models, including tuned - models. See the `model - guide `__ and `tuning - guide `__ for - details. + r"""Generates a model response given an input + ``GenerateContentRequest``. Refer to the `text generation + guide `__ + for detailed usage information. Input capabilities differ + between models, including tuned models. Refer to the `model + guide `__ + and `tuning + guide `__ + for details. .. code-block:: python @@ -333,12 +331,14 @@ async def sample_generate_content(): on the ``request`` instance; if ``request`` is provided, this should not be set. contents (:class:`MutableSequence[google.ai.generativelanguage_v1beta.types.Content]`): - Required. The content of the current - conversation with the model. - For single-turn queries, this is a - single instance. For multi-turn queries, - this is a repeated field that contains - conversation history + latest request. + Required. The content of the current conversation with + the model. + + For single-turn queries, this is a single instance. For + multi-turn queries like + `chat `__, + this is a repeated field that contains the conversation + history and the latest request. This corresponds to the ``contents`` field on the ``request`` instance; if ``request`` is provided, this @@ -351,18 +351,18 @@ async def sample_generate_content(): Returns: google.ai.generativelanguage_v1beta.types.GenerateContentResponse: - Response from the model supporting multiple candidates. + Response from the model supporting multiple candidate + responses. - Note on safety ratings and content filtering. They - are reported for both prompt in + Safety ratings and content filtering are reported for + both prompt in GenerateContentResponse.prompt_feedback and for each candidate in finish_reason and in safety_ratings. The - API contract is that: - either all requested - candidates are returned or no candidates at all - no - candidates are returned only if there was something - wrong with the prompt (see prompt_feedback) - - feedback on each candidate is reported on - finish_reason and safety_ratings. + API: - Returns either all requested candidates or + none of them - Returns no candidates at all only if + there was something wrong with the prompt (check + prompt_feedback) - Reports feedback on each candidate + in finish_reason and safety_ratings. """ # Create or coerce a protobuf request object. @@ -459,8 +459,8 @@ async def sample_generate_answer(): Args: request (Optional[Union[google.ai.generativelanguage_v1beta.types.GenerateAnswerRequest, dict]]): - The request object. Request to generate a grounded answer - from the model. + The request object. Request to generate a grounded answer from the + ``Model``. model (:class:`str`): Required. The name of the ``Model`` to use for generating the grounded response. @@ -472,13 +472,13 @@ async def sample_generate_answer(): should not be set. contents (:class:`MutableSequence[google.ai.generativelanguage_v1beta.types.Content]`): Required. The content of the current conversation with - the model. For single-turn queries, this is a single + the ``Model``. For single-turn queries, this is a single question to answer. For multi-turn queries, this is a repeated field that contains conversation history and the last ``Content`` in the list containing the question. - Note: GenerateAnswer currently only supports queries in + Note: ``GenerateAnswer`` only supports queries in English. This corresponds to the ``contents`` field @@ -502,7 +502,13 @@ async def sample_generate_answer(): categories HARM_CATEGORY_HATE_SPEECH, HARM_CATEGORY_SEXUALLY_EXPLICIT, HARM_CATEGORY_DANGEROUS_CONTENT, - HARM_CATEGORY_HARASSMENT are supported. + HARM_CATEGORY_HARASSMENT are supported. Refer to the + `guide `__ + for detailed information on available safety settings. + Also refer to the `Safety + guidance `__ + to learn how to incorporate safety considerations in + your AI applications. This corresponds to the ``safety_settings`` field on the ``request`` instance; if ``request`` is provided, this @@ -590,8 +596,9 @@ def stream_generate_content( timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> Awaitable[AsyncIterable[generative_service.GenerateContentResponse]]: - r"""Generates a streamed response from the model given an input - ``GenerateContentRequest``. + r"""Generates a `streamed + response `__ + from the model given an input ``GenerateContentRequest``. .. code-block:: python @@ -634,12 +641,14 @@ async def sample_stream_generate_content(): on the ``request`` instance; if ``request`` is provided, this should not be set. contents (:class:`MutableSequence[google.ai.generativelanguage_v1beta.types.Content]`): - Required. The content of the current - conversation with the model. - For single-turn queries, this is a - single instance. For multi-turn queries, - this is a repeated field that contains - conversation history + latest request. + Required. The content of the current conversation with + the model. + + For single-turn queries, this is a single instance. For + multi-turn queries like + `chat `__, + this is a repeated field that contains the conversation + history and the latest request. This corresponds to the ``contents`` field on the ``request`` instance; if ``request`` is provided, this @@ -652,18 +661,18 @@ async def sample_stream_generate_content(): Returns: AsyncIterable[google.ai.generativelanguage_v1beta.types.GenerateContentResponse]: - Response from the model supporting multiple candidates. + Response from the model supporting multiple candidate + responses. - Note on safety ratings and content filtering. They - are reported for both prompt in + Safety ratings and content filtering are reported for + both prompt in GenerateContentResponse.prompt_feedback and for each candidate in finish_reason and in safety_ratings. The - API contract is that: - either all requested - candidates are returned or no candidates at all - no - candidates are returned only if there was something - wrong with the prompt (see prompt_feedback) - - feedback on each candidate is reported on - finish_reason and safety_ratings. + API: - Returns either all requested candidates or + none of them - Returns no candidates at all only if + there was something wrong with the prompt (check + prompt_feedback) - Reports feedback on each candidate + in finish_reason and safety_ratings. """ # Create or coerce a protobuf request object. @@ -724,8 +733,9 @@ async def embed_content( timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> generative_service.EmbedContentResponse: - r"""Generates an embedding from the model given an input - ``Content``. + r"""Generates a text embedding vector from the input ``Content`` + using the specified `Gemini Embedding + model `__. .. code-block:: python @@ -848,8 +858,9 @@ async def batch_embed_contents( timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> generative_service.BatchEmbedContentsResponse: - r"""Generates multiple embeddings from the model given - input text in a synchronous call. + r"""Generates multiple embedding vectors from the input ``Content`` + which consists of a batch of strings represented as + ``EmbedContentRequest`` objects. .. code-block:: python @@ -973,8 +984,10 @@ async def count_tokens( timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> generative_service.CountTokensResponse: - r"""Runs a model's tokenizer on input content and returns - the token count. + r"""Runs a model's tokenizer on input ``Content`` and returns the + token count. Refer to the `tokens + guide `__ to learn + more about tokens. .. code-block:: python diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/generative_service/client.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/generative_service/client.py index 0dc4b14b681f..64141f79649a 100644 --- a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/generative_service/client.py +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/generative_service/client.py @@ -679,7 +679,7 @@ def __init__( Type[GenerativeServiceTransport], Callable[..., GenerativeServiceTransport], ] = ( - type(self).get_transport_class(transport) + GenerativeServiceClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., GenerativeServiceTransport], transport) ) @@ -708,14 +708,15 @@ def generate_content( timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> generative_service.GenerateContentResponse: - r"""Generates a response from the model given an input - ``GenerateContentRequest``. - - Input capabilities differ between models, including tuned - models. See the `model - guide `__ and `tuning - guide `__ for - details. + r"""Generates a model response given an input + ``GenerateContentRequest``. Refer to the `text generation + guide `__ + for detailed usage information. Input capabilities differ + between models, including tuned models. Refer to the `model + guide `__ + and `tuning + guide `__ + for details. .. code-block:: python @@ -757,12 +758,14 @@ def sample_generate_content(): on the ``request`` instance; if ``request`` is provided, this should not be set. contents (MutableSequence[google.ai.generativelanguage_v1beta.types.Content]): - Required. The content of the current - conversation with the model. - For single-turn queries, this is a - single instance. For multi-turn queries, - this is a repeated field that contains - conversation history + latest request. + Required. The content of the current conversation with + the model. + + For single-turn queries, this is a single instance. For + multi-turn queries like + `chat `__, + this is a repeated field that contains the conversation + history and the latest request. This corresponds to the ``contents`` field on the ``request`` instance; if ``request`` is provided, this @@ -775,18 +778,18 @@ def sample_generate_content(): Returns: google.ai.generativelanguage_v1beta.types.GenerateContentResponse: - Response from the model supporting multiple candidates. + Response from the model supporting multiple candidate + responses. - Note on safety ratings and content filtering. They - are reported for both prompt in + Safety ratings and content filtering are reported for + both prompt in GenerateContentResponse.prompt_feedback and for each candidate in finish_reason and in safety_ratings. The - API contract is that: - either all requested - candidates are returned or no candidates at all - no - candidates are returned only if there was something - wrong with the prompt (see prompt_feedback) - - feedback on each candidate is reported on - finish_reason and safety_ratings. + API: - Returns either all requested candidates or + none of them - Returns no candidates at all only if + there was something wrong with the prompt (check + prompt_feedback) - Reports feedback on each candidate + in finish_reason and safety_ratings. """ # Create or coerce a protobuf request object. @@ -880,8 +883,8 @@ def sample_generate_answer(): Args: request (Union[google.ai.generativelanguage_v1beta.types.GenerateAnswerRequest, dict]): - The request object. Request to generate a grounded answer - from the model. + The request object. Request to generate a grounded answer from the + ``Model``. model (str): Required. The name of the ``Model`` to use for generating the grounded response. @@ -893,13 +896,13 @@ def sample_generate_answer(): should not be set. contents (MutableSequence[google.ai.generativelanguage_v1beta.types.Content]): Required. The content of the current conversation with - the model. For single-turn queries, this is a single + the ``Model``. For single-turn queries, this is a single question to answer. For multi-turn queries, this is a repeated field that contains conversation history and the last ``Content`` in the list containing the question. - Note: GenerateAnswer currently only supports queries in + Note: ``GenerateAnswer`` only supports queries in English. This corresponds to the ``contents`` field @@ -923,7 +926,13 @@ def sample_generate_answer(): categories HARM_CATEGORY_HATE_SPEECH, HARM_CATEGORY_SEXUALLY_EXPLICIT, HARM_CATEGORY_DANGEROUS_CONTENT, - HARM_CATEGORY_HARASSMENT are supported. + HARM_CATEGORY_HARASSMENT are supported. Refer to the + `guide `__ + for detailed information on available safety settings. + Also refer to the `Safety + guidance `__ + to learn how to incorporate safety considerations in + your AI applications. This corresponds to the ``safety_settings`` field on the ``request`` instance; if ``request`` is provided, this @@ -1008,8 +1017,9 @@ def stream_generate_content( timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> Iterable[generative_service.GenerateContentResponse]: - r"""Generates a streamed response from the model given an input - ``GenerateContentRequest``. + r"""Generates a `streamed + response `__ + from the model given an input ``GenerateContentRequest``. .. code-block:: python @@ -1052,12 +1062,14 @@ def sample_stream_generate_content(): on the ``request`` instance; if ``request`` is provided, this should not be set. contents (MutableSequence[google.ai.generativelanguage_v1beta.types.Content]): - Required. The content of the current - conversation with the model. - For single-turn queries, this is a - single instance. For multi-turn queries, - this is a repeated field that contains - conversation history + latest request. + Required. The content of the current conversation with + the model. + + For single-turn queries, this is a single instance. For + multi-turn queries like + `chat `__, + this is a repeated field that contains the conversation + history and the latest request. This corresponds to the ``contents`` field on the ``request`` instance; if ``request`` is provided, this @@ -1070,18 +1082,18 @@ def sample_stream_generate_content(): Returns: Iterable[google.ai.generativelanguage_v1beta.types.GenerateContentResponse]: - Response from the model supporting multiple candidates. + Response from the model supporting multiple candidate + responses. - Note on safety ratings and content filtering. They - are reported for both prompt in + Safety ratings and content filtering are reported for + both prompt in GenerateContentResponse.prompt_feedback and for each candidate in finish_reason and in safety_ratings. The - API contract is that: - either all requested - candidates are returned or no candidates at all - no - candidates are returned only if there was something - wrong with the prompt (see prompt_feedback) - - feedback on each candidate is reported on - finish_reason and safety_ratings. + API: - Returns either all requested candidates or + none of them - Returns no candidates at all only if + there was something wrong with the prompt (check + prompt_feedback) - Reports feedback on each candidate + in finish_reason and safety_ratings. """ # Create or coerce a protobuf request object. @@ -1139,8 +1151,9 @@ def embed_content( timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> generative_service.EmbedContentResponse: - r"""Generates an embedding from the model given an input - ``Content``. + r"""Generates a text embedding vector from the input ``Content`` + using the specified `Gemini Embedding + model `__. .. code-block:: python @@ -1260,8 +1273,9 @@ def batch_embed_contents( timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> generative_service.BatchEmbedContentsResponse: - r"""Generates multiple embeddings from the model given - input text in a synchronous call. + r"""Generates multiple embedding vectors from the input ``Content`` + which consists of a batch of strings represented as + ``EmbedContentRequest`` objects. .. code-block:: python @@ -1382,8 +1396,10 @@ def count_tokens( timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> generative_service.CountTokensResponse: - r"""Runs a model's tokenizer on input content and returns - the token count. + r"""Runs a model's tokenizer on input ``Content`` and returns the + token count. Refer to the `tokens + guide `__ to learn + more about tokens. .. code-block:: python diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/generative_service/transports/grpc.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/generative_service/transports/grpc.py index 1a4ef014a66e..4ff9e19a87b2 100644 --- a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/generative_service/transports/grpc.py +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/generative_service/transports/grpc.py @@ -245,14 +245,15 @@ def generate_content( ]: r"""Return a callable for the generate content method over gRPC. - Generates a response from the model given an input - ``GenerateContentRequest``. - - Input capabilities differ between models, including tuned - models. See the `model - guide `__ and `tuning - guide `__ for - details. + Generates a model response given an input + ``GenerateContentRequest``. Refer to the `text generation + guide `__ + for detailed usage information. Input capabilities differ + between models, including tuned models. Refer to the `model + guide `__ + and `tuning + guide `__ + for details. Returns: Callable[[~.GenerateContentRequest], @@ -311,8 +312,9 @@ def stream_generate_content( ]: r"""Return a callable for the stream generate content method over gRPC. - Generates a streamed response from the model given an input - ``GenerateContentRequest``. + Generates a `streamed + response `__ + from the model given an input ``GenerateContentRequest``. Returns: Callable[[~.GenerateContentRequest], @@ -341,8 +343,9 @@ def embed_content( ]: r"""Return a callable for the embed content method over gRPC. - Generates an embedding from the model given an input - ``Content``. + Generates a text embedding vector from the input ``Content`` + using the specified `Gemini Embedding + model `__. Returns: Callable[[~.EmbedContentRequest], @@ -371,8 +374,9 @@ def batch_embed_contents( ]: r"""Return a callable for the batch embed contents method over gRPC. - Generates multiple embeddings from the model given - input text in a synchronous call. + Generates multiple embedding vectors from the input ``Content`` + which consists of a batch of strings represented as + ``EmbedContentRequest`` objects. Returns: Callable[[~.BatchEmbedContentsRequest], @@ -400,8 +404,10 @@ def count_tokens( ]: r"""Return a callable for the count tokens method over gRPC. - Runs a model's tokenizer on input content and returns - the token count. + Runs a model's tokenizer on input ``Content`` and returns the + token count. Refer to the `tokens + guide `__ to learn + more about tokens. Returns: Callable[[~.CountTokensRequest], diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/generative_service/transports/grpc_asyncio.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/generative_service/transports/grpc_asyncio.py index d175309053ed..67f3f3bec7de 100644 --- a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/generative_service/transports/grpc_asyncio.py +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/generative_service/transports/grpc_asyncio.py @@ -249,14 +249,15 @@ def generate_content( ]: r"""Return a callable for the generate content method over gRPC. - Generates a response from the model given an input - ``GenerateContentRequest``. - - Input capabilities differ between models, including tuned - models. See the `model - guide `__ and `tuning - guide `__ for - details. + Generates a model response given an input + ``GenerateContentRequest``. Refer to the `text generation + guide `__ + for detailed usage information. Input capabilities differ + between models, including tuned models. Refer to the `model + guide `__ + and `tuning + guide `__ + for details. Returns: Callable[[~.GenerateContentRequest], @@ -315,8 +316,9 @@ def stream_generate_content( ]: r"""Return a callable for the stream generate content method over gRPC. - Generates a streamed response from the model given an input - ``GenerateContentRequest``. + Generates a `streamed + response `__ + from the model given an input ``GenerateContentRequest``. Returns: Callable[[~.GenerateContentRequest], @@ -345,8 +347,9 @@ def embed_content( ]: r"""Return a callable for the embed content method over gRPC. - Generates an embedding from the model given an input - ``Content``. + Generates a text embedding vector from the input ``Content`` + using the specified `Gemini Embedding + model `__. Returns: Callable[[~.EmbedContentRequest], @@ -375,8 +378,9 @@ def batch_embed_contents( ]: r"""Return a callable for the batch embed contents method over gRPC. - Generates multiple embeddings from the model given - input text in a synchronous call. + Generates multiple embedding vectors from the input ``Content`` + which consists of a batch of strings represented as + ``EmbedContentRequest`` objects. Returns: Callable[[~.BatchEmbedContentsRequest], @@ -405,8 +409,10 @@ def count_tokens( ]: r"""Return a callable for the count tokens method over gRPC. - Runs a model's tokenizer on input content and returns - the token count. + Runs a model's tokenizer on input ``Content`` and returns the + token count. Refer to the `tokens + guide `__ to learn + more about tokens. Returns: Callable[[~.CountTokensRequest], diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/generative_service/transports/rest.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/generative_service/transports/rest.py index 28b876f57052..3bf4d1331eec 100644 --- a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/generative_service/transports/rest.py +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/generative_service/transports/rest.py @@ -669,8 +669,8 @@ def __call__( Args: request (~.generative_service.GenerateAnswerRequest): - The request object. Request to generate a grounded answer - from the model. + The request object. Request to generate a grounded answer from the + ``Model``. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -774,20 +774,21 @@ def __call__( Returns: ~.generative_service.GenerateContentResponse: - Response from the model supporting multiple candidates. + Response from the model supporting multiple candidate + responses. - Note on safety ratings and content filtering. They are - reported for both prompt in + Safety ratings and content filtering are reported for + both prompt in ``GenerateContentResponse.prompt_feedback`` and for each candidate in ``finish_reason`` and in - ``safety_ratings``. The API contract is that: + ``safety_ratings``. The API: - - either all requested candidates are returned or no - candidates at all - - no candidates are returned only if there was - something wrong with the prompt (see + - Returns either all requested candidates or none of + them + - Returns no candidates at all only if there was + something wrong with the prompt (check ``prompt_feedback``) - - feedback on each candidate is reported on + - Reports feedback on each candidate in ``finish_reason`` and ``safety_ratings``. """ @@ -889,20 +890,21 @@ def __call__( Returns: ~.generative_service.GenerateContentResponse: - Response from the model supporting multiple candidates. + Response from the model supporting multiple candidate + responses. - Note on safety ratings and content filtering. They are - reported for both prompt in + Safety ratings and content filtering are reported for + both prompt in ``GenerateContentResponse.prompt_feedback`` and for each candidate in ``finish_reason`` and in - ``safety_ratings``. The API contract is that: + ``safety_ratings``. The API: - - either all requested candidates are returned or no - candidates at all - - no candidates are returned only if there was - something wrong with the prompt (see + - Returns either all requested candidates or none of + them + - Returns no candidates at all only if there was + something wrong with the prompt (check ``prompt_feedback``) - - feedback on each candidate is reported on + - Reports feedback on each candidate in ``finish_reason`` and ``safety_ratings``. """ diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/model_service/async_client.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/model_service/async_client.py index 2177c1e0a437..6cd12d3367ac 100644 --- a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/model_service/async_client.py +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/model_service/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -195,9 +194,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(ModelServiceClient).get_transport_class, type(ModelServiceClient) - ) + get_transport_class = ModelServiceClient.get_transport_class def __init__( self, @@ -274,7 +271,12 @@ async def get_model( timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> model.Model: - r"""Gets information about a specific Model. + r"""Gets information about a specific ``Model`` such as its version + number, token limits, + `parameters `__ + and other metadata. Refer to the `Gemini models + guide `__ + for detailed model information. .. code-block:: python @@ -385,7 +387,9 @@ async def list_models( timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.ListModelsAsyncPager: - r"""Lists models available through the API. + r"""Lists the + ```Model``\ s `__ + available through the Gemini API. .. code-block:: python @@ -419,10 +423,9 @@ async def sample_list_models(): page_size (:class:`int`): The maximum number of ``Models`` to return (per page). - The service may return fewer models. If unspecified, at - most 50 models will be returned per page. This method - returns at most 1000 models per page, even if you pass a - larger page_size. + If unspecified, 50 models will be returned per page. + This method returns at most 1000 models per page, even + if you pass a larger page_size. This corresponds to the ``page_size`` field on the ``request`` instance; if ``request`` is provided, this @@ -626,7 +629,7 @@ async def list_tuned_models( timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.ListTunedModelsAsyncPager: - r"""Lists tuned models owned by the user. + r"""Lists created tuned models. .. code-block:: python @@ -761,12 +764,11 @@ async def create_tuned_model( timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> operation_async.AsyncOperation: - r"""Creates a tuned model. Intermediate tuning progress (if any) is - accessed through the [google.longrunning.Operations] service. + r"""Creates a tuned model. Check intermediate tuning progress (if + any) through the [google.longrunning.Operations] service. - Status and results can be accessed through the Operations - service. Example: GET - /v1/tunedModels/az2mb0bpw6i/operations/000-111-222 + Access status and results through the Operations service. + Example: GET /v1/tunedModels/az2mb0bpw6i/operations/000-111-222 .. code-block:: python @@ -815,7 +817,7 @@ async def sample_create_tuned_model(): specified. This value should be up to 40 characters, the first character must be a letter, the last could be a letter or a number. The id must match the regular - expression: `a-z <[a-z0-9-]{0,38}[a-z0-9]>`__?. + expression: ``[a-z]([a-z0-9-]{0,38}[a-z0-9])?``. This corresponds to the ``tuned_model_id`` field on the ``request`` instance; if ``request`` is provided, this diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/model_service/client.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/model_service/client.py index 013ab369acb9..c35b6b3c2168 100644 --- a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/model_service/client.py +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/model_service/client.py @@ -673,7 +673,7 @@ def __init__( transport_init: Union[ Type[ModelServiceTransport], Callable[..., ModelServiceTransport] ] = ( - type(self).get_transport_class(transport) + ModelServiceClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., ModelServiceTransport], transport) ) @@ -699,7 +699,12 @@ def get_model( timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> model.Model: - r"""Gets information about a specific Model. + r"""Gets information about a specific ``Model`` such as its version + number, token limits, + `parameters `__ + and other metadata. Refer to the `Gemini models + guide `__ + for detailed model information. .. code-block:: python @@ -807,7 +812,9 @@ def list_models( timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.ListModelsPager: - r"""Lists models available through the API. + r"""Lists the + ```Model``\ s `__ + available through the Gemini API. .. code-block:: python @@ -841,10 +848,9 @@ def sample_list_models(): page_size (int): The maximum number of ``Models`` to return (per page). - The service may return fewer models. If unspecified, at - most 50 models will be returned per page. This method - returns at most 1000 models per page, even if you pass a - larger page_size. + If unspecified, 50 models will be returned per page. + This method returns at most 1000 models per page, even + if you pass a larger page_size. This corresponds to the ``page_size`` field on the ``request`` instance; if ``request`` is provided, this @@ -1042,7 +1048,7 @@ def list_tuned_models( timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.ListTunedModelsPager: - r"""Lists tuned models owned by the user. + r"""Lists created tuned models. .. code-block:: python @@ -1174,12 +1180,11 @@ def create_tuned_model( timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> operation.Operation: - r"""Creates a tuned model. Intermediate tuning progress (if any) is - accessed through the [google.longrunning.Operations] service. + r"""Creates a tuned model. Check intermediate tuning progress (if + any) through the [google.longrunning.Operations] service. - Status and results can be accessed through the Operations - service. Example: GET - /v1/tunedModels/az2mb0bpw6i/operations/000-111-222 + Access status and results through the Operations service. + Example: GET /v1/tunedModels/az2mb0bpw6i/operations/000-111-222 .. code-block:: python @@ -1228,7 +1233,7 @@ def sample_create_tuned_model(): specified. This value should be up to 40 characters, the first character must be a letter, the last could be a letter or a number. The id must match the regular - expression: `a-z <[a-z0-9-]{0,38}[a-z0-9]>`__?. + expression: ``[a-z]([a-z0-9-]{0,38}[a-z0-9])?``. This corresponds to the ``tuned_model_id`` field on the ``request`` instance; if ``request`` is provided, this diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/model_service/transports/grpc.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/model_service/transports/grpc.py index 0649fc878074..7e61ad9e403c 100644 --- a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/model_service/transports/grpc.py +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/model_service/transports/grpc.py @@ -258,7 +258,12 @@ def operations_client(self) -> operations_v1.OperationsClient: def get_model(self) -> Callable[[model_service.GetModelRequest], model.Model]: r"""Return a callable for the get model method over gRPC. - Gets information about a specific Model. + Gets information about a specific ``Model`` such as its version + number, token limits, + `parameters `__ + and other metadata. Refer to the `Gemini models + guide `__ + for detailed model information. Returns: Callable[[~.GetModelRequest], @@ -284,7 +289,9 @@ def list_models( ) -> Callable[[model_service.ListModelsRequest], model_service.ListModelsResponse]: r"""Return a callable for the list models method over gRPC. - Lists models available through the API. + Lists the + ```Model``\ s `__ + available through the Gemini API. Returns: Callable[[~.ListModelsRequest], @@ -338,7 +345,7 @@ def list_tuned_models( ]: r"""Return a callable for the list tuned models method over gRPC. - Lists tuned models owned by the user. + Lists created tuned models. Returns: Callable[[~.ListTunedModelsRequest], @@ -364,12 +371,11 @@ def create_tuned_model( ) -> Callable[[model_service.CreateTunedModelRequest], operations_pb2.Operation]: r"""Return a callable for the create tuned model method over gRPC. - Creates a tuned model. Intermediate tuning progress (if any) is - accessed through the [google.longrunning.Operations] service. + Creates a tuned model. Check intermediate tuning progress (if + any) through the [google.longrunning.Operations] service. - Status and results can be accessed through the Operations - service. Example: GET - /v1/tunedModels/az2mb0bpw6i/operations/000-111-222 + Access status and results through the Operations service. + Example: GET /v1/tunedModels/az2mb0bpw6i/operations/000-111-222 Returns: Callable[[~.CreateTunedModelRequest], diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/model_service/transports/grpc_asyncio.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/model_service/transports/grpc_asyncio.py index bfce8ab6172a..80d377114d32 100644 --- a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/model_service/transports/grpc_asyncio.py +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/model_service/transports/grpc_asyncio.py @@ -266,7 +266,12 @@ def get_model( ) -> Callable[[model_service.GetModelRequest], Awaitable[model.Model]]: r"""Return a callable for the get model method over gRPC. - Gets information about a specific Model. + Gets information about a specific ``Model`` such as its version + number, token limits, + `parameters `__ + and other metadata. Refer to the `Gemini models + guide `__ + for detailed model information. Returns: Callable[[~.GetModelRequest], @@ -294,7 +299,9 @@ def list_models( ]: r"""Return a callable for the list models method over gRPC. - Lists models available through the API. + Lists the + ```Model``\ s `__ + available through the Gemini API. Returns: Callable[[~.ListModelsRequest], @@ -351,7 +358,7 @@ def list_tuned_models( ]: r"""Return a callable for the list tuned models method over gRPC. - Lists tuned models owned by the user. + Lists created tuned models. Returns: Callable[[~.ListTunedModelsRequest], @@ -379,12 +386,11 @@ def create_tuned_model( ]: r"""Return a callable for the create tuned model method over gRPC. - Creates a tuned model. Intermediate tuning progress (if any) is - accessed through the [google.longrunning.Operations] service. + Creates a tuned model. Check intermediate tuning progress (if + any) through the [google.longrunning.Operations] service. - Status and results can be accessed through the Operations - service. Example: GET - /v1/tunedModels/az2mb0bpw6i/operations/000-111-222 + Access status and results through the Operations service. + Example: GET /v1/tunedModels/az2mb0bpw6i/operations/000-111-222 Returns: Callable[[~.CreateTunedModelRequest], diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/permission_service/async_client.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/permission_service/async_client.py index a7ddc4392a3f..a44ae51107b1 100644 --- a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/permission_service/async_client.py +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/permission_service/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -194,9 +193,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(PermissionServiceClient).get_transport_class, type(PermissionServiceClient) - ) + get_transport_class = PermissionServiceClient.get_transport_class def __init__( self, diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/permission_service/client.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/permission_service/client.py index e38752e95708..61db870f8cb2 100644 --- a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/permission_service/client.py +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/permission_service/client.py @@ -669,7 +669,7 @@ def __init__( Type[PermissionServiceTransport], Callable[..., PermissionServiceTransport], ] = ( - type(self).get_transport_class(transport) + PermissionServiceClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., PermissionServiceTransport], transport) ) diff --git a/packages/google-cloud-gke-connect-gateway/google/cloud/gkeconnect/gateway_v1beta1/types/gateway.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/prediction_service/__init__.py similarity index 77% rename from packages/google-cloud-gke-connect-gateway/google/cloud/gkeconnect/gateway_v1beta1/types/gateway.py rename to packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/prediction_service/__init__.py index ca8527fb89ec..6c64cf5ad1c0 100644 --- a/packages/google-cloud-gke-connect-gateway/google/cloud/gkeconnect/gateway_v1beta1/types/gateway.py +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/prediction_service/__init__.py @@ -13,12 +13,10 @@ # See the License for the specific language governing permissions and # limitations under the License. # -import proto # type: ignore +from .async_client import PredictionServiceAsyncClient +from .client import PredictionServiceClient -__protobuf__ = proto.module( - package="google.cloud.gkeconnect.gateway.v1beta1", - manifest={}, +__all__ = ( + "PredictionServiceClient", + "PredictionServiceAsyncClient", ) - - -__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/prediction_service/async_client.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/prediction_service/async_client.py new file mode 100644 index 000000000000..f9e04e3e2aea --- /dev/null +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/prediction_service/async_client.py @@ -0,0 +1,391 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import re +from typing import ( + Callable, + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, +) + +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry_async as retries +from google.api_core.client_options import ClientOptions +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.ai.generativelanguage_v1beta import gapic_version as package_version + +try: + OptionalRetry = Union[retries.AsyncRetry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.AsyncRetry, object, None] # type: ignore + +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import struct_pb2 # type: ignore + +from google.ai.generativelanguage_v1beta.types import prediction_service + +from .client import PredictionServiceClient +from .transports.base import DEFAULT_CLIENT_INFO, PredictionServiceTransport +from .transports.grpc_asyncio import PredictionServiceGrpcAsyncIOTransport + + +class PredictionServiceAsyncClient: + """A service for online predictions and explanations.""" + + _client: PredictionServiceClient + + # Copy defaults from the synchronous client for use here. + # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. + DEFAULT_ENDPOINT = PredictionServiceClient.DEFAULT_ENDPOINT + DEFAULT_MTLS_ENDPOINT = PredictionServiceClient.DEFAULT_MTLS_ENDPOINT + _DEFAULT_ENDPOINT_TEMPLATE = PredictionServiceClient._DEFAULT_ENDPOINT_TEMPLATE + _DEFAULT_UNIVERSE = PredictionServiceClient._DEFAULT_UNIVERSE + + model_path = staticmethod(PredictionServiceClient.model_path) + parse_model_path = staticmethod(PredictionServiceClient.parse_model_path) + common_billing_account_path = staticmethod( + PredictionServiceClient.common_billing_account_path + ) + parse_common_billing_account_path = staticmethod( + PredictionServiceClient.parse_common_billing_account_path + ) + common_folder_path = staticmethod(PredictionServiceClient.common_folder_path) + parse_common_folder_path = staticmethod( + PredictionServiceClient.parse_common_folder_path + ) + common_organization_path = staticmethod( + PredictionServiceClient.common_organization_path + ) + parse_common_organization_path = staticmethod( + PredictionServiceClient.parse_common_organization_path + ) + common_project_path = staticmethod(PredictionServiceClient.common_project_path) + parse_common_project_path = staticmethod( + PredictionServiceClient.parse_common_project_path + ) + common_location_path = staticmethod(PredictionServiceClient.common_location_path) + parse_common_location_path = staticmethod( + PredictionServiceClient.parse_common_location_path + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + PredictionServiceAsyncClient: The constructed client. + """ + return PredictionServiceClient.from_service_account_info.__func__(PredictionServiceAsyncClient, info, *args, **kwargs) # type: ignore + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + PredictionServiceAsyncClient: The constructed client. + """ + return PredictionServiceClient.from_service_account_file.__func__(PredictionServiceAsyncClient, filename, *args, **kwargs) # type: ignore + + from_service_account_json = from_service_account_file + + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[ClientOptions] = None + ): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + return PredictionServiceClient.get_mtls_endpoint_and_cert_source(client_options) # type: ignore + + @property + def transport(self) -> PredictionServiceTransport: + """Returns the transport used by the client instance. + + Returns: + PredictionServiceTransport: The transport used by the client instance. + """ + return self._client.transport + + @property + def api_endpoint(self): + """Return the API endpoint used by the client instance. + + Returns: + str: The API endpoint used by the client instance. + """ + return self._client._api_endpoint + + @property + def universe_domain(self) -> str: + """Return the universe domain used by the client instance. + + Returns: + str: The universe domain used + by the client instance. + """ + return self._client._universe_domain + + get_transport_class = PredictionServiceClient.get_transport_class + + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[ + Union[ + str, + PredictionServiceTransport, + Callable[..., PredictionServiceTransport], + ] + ] = "grpc_asyncio", + client_options: Optional[ClientOptions] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the prediction service async client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Optional[Union[str,PredictionServiceTransport,Callable[..., PredictionServiceTransport]]]): + The transport to use, or a Callable that constructs and returns a new transport to use. + If a Callable is given, it will be called with the same set of initialization + arguments as used in the PredictionServiceTransport constructor. + If set to None, a transport is chosen automatically. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): + Custom options for the client. + + 1. The ``api_endpoint`` property can be used to override the + default endpoint provided by the client when ``transport`` is + not explicitly provided. Only if this property is not set and + ``transport`` was not explicitly provided, the endpoint is + determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment + variable, which have one of the following values: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto-switch to the + default mTLS endpoint if client certificate is present; this is + the default value). + + 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide a client certificate for mTLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + 3. The ``universe_domain`` property can be used to override the + default "googleapis.com" universe. Note that ``api_endpoint`` + property still takes precedence; and ``universe_domain`` is + currently not supported for mTLS. + + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client = PredictionServiceClient( + credentials=credentials, + transport=transport, + client_options=client_options, + client_info=client_info, + ) + + async def predict( + self, + request: Optional[Union[prediction_service.PredictRequest, dict]] = None, + *, + model: Optional[str] = None, + instances: Optional[MutableSequence[struct_pb2.Value]] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> prediction_service.PredictResponse: + r"""Performs a prediction request. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.ai import generativelanguage_v1beta + + async def sample_predict(): + # Create a client + client = generativelanguage_v1beta.PredictionServiceAsyncClient() + + # Initialize request argument(s) + instances = generativelanguage_v1beta.Value() + instances.null_value = "NULL_VALUE" + + request = generativelanguage_v1beta.PredictRequest( + model="model_value", + instances=instances, + ) + + # Make the request + response = await client.predict(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.ai.generativelanguage_v1beta.types.PredictRequest, dict]]): + The request object. Request message for + [PredictionService.Predict][google.ai.generativelanguage.v1beta.PredictionService.Predict]. + model (:class:`str`): + Required. The name of the model for prediction. Format: + ``name=models/{model}``. + + This corresponds to the ``model`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + instances (:class:`MutableSequence[google.protobuf.struct_pb2.Value]`): + Required. The instances that are the + input to the prediction call. + + This corresponds to the ``instances`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.ai.generativelanguage_v1beta.types.PredictResponse: + Response message for [PredictionService.Predict]. + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([model, instances]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, prediction_service.PredictRequest): + request = prediction_service.PredictRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if model is not None: + request.model = model + if instances: + request.instances.extend(instances) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.predict] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("model", request.model),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def __aenter__(self) -> "PredictionServiceAsyncClient": + return self + + async def __aexit__(self, exc_type, exc, tb): + await self.transport.close() + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +__all__ = ("PredictionServiceAsyncClient",) diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/prediction_service/client.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/prediction_service/client.py new file mode 100644 index 000000000000..48736239098d --- /dev/null +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/prediction_service/client.py @@ -0,0 +1,814 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import os +import re +from typing import ( + Callable, + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, + cast, +) +import warnings + +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.ai.generativelanguage_v1beta import gapic_version as package_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object, None] # type: ignore + +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import struct_pb2 # type: ignore + +from google.ai.generativelanguage_v1beta.types import prediction_service + +from .transports.base import DEFAULT_CLIENT_INFO, PredictionServiceTransport +from .transports.grpc import PredictionServiceGrpcTransport +from .transports.grpc_asyncio import PredictionServiceGrpcAsyncIOTransport +from .transports.rest import PredictionServiceRestTransport + + +class PredictionServiceClientMeta(type): + """Metaclass for the PredictionService client. + + This provides class-level methods for building and retrieving + support objects (e.g. transport) without polluting the client instance + objects. + """ + + _transport_registry = ( + OrderedDict() + ) # type: Dict[str, Type[PredictionServiceTransport]] + _transport_registry["grpc"] = PredictionServiceGrpcTransport + _transport_registry["grpc_asyncio"] = PredictionServiceGrpcAsyncIOTransport + _transport_registry["rest"] = PredictionServiceRestTransport + + def get_transport_class( + cls, + label: Optional[str] = None, + ) -> Type[PredictionServiceTransport]: + """Returns an appropriate transport class. + + Args: + label: The name of the desired transport. If none is + provided, then the first transport in the registry is used. + + Returns: + The transport class to use. + """ + # If a specific transport is requested, return that one. + if label: + return cls._transport_registry[label] + + # No transport is requested; return the default (that is, the first one + # in the dictionary). + return next(iter(cls._transport_registry.values())) + + +class PredictionServiceClient(metaclass=PredictionServiceClientMeta): + """A service for online predictions and explanations.""" + + @staticmethod + def _get_default_mtls_endpoint(api_endpoint): + """Converts api endpoint to mTLS endpoint. + + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to + "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. + Args: + api_endpoint (Optional[str]): the api endpoint to convert. + Returns: + str: converted mTLS api endpoint. + """ + if not api_endpoint: + return api_endpoint + + mtls_endpoint_re = re.compile( + r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" + ) + + m = mtls_endpoint_re.match(api_endpoint) + name, mtls, sandbox, googledomain = m.groups() + if mtls or not googledomain: + return api_endpoint + + if sandbox: + return api_endpoint.replace( + "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" + ) + + return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + + # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. + DEFAULT_ENDPOINT = "generativelanguage.googleapis.com" + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore + DEFAULT_ENDPOINT + ) + + _DEFAULT_ENDPOINT_TEMPLATE = "generativelanguage.{UNIVERSE_DOMAIN}" + _DEFAULT_UNIVERSE = "googleapis.com" + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + PredictionServiceClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_info(info) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + PredictionServiceClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file(filename) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> PredictionServiceTransport: + """Returns the transport used by the client instance. + + Returns: + PredictionServiceTransport: The transport used by the client + instance. + """ + return self._transport + + @staticmethod + def model_path( + model: str, + ) -> str: + """Returns a fully-qualified model string.""" + return "models/{model}".format( + model=model, + ) + + @staticmethod + def parse_model_path(path: str) -> Dict[str, str]: + """Parses a model path into its component segments.""" + m = re.match(r"^models/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_billing_account_path( + billing_account: str, + ) -> str: + """Returns a fully-qualified billing_account string.""" + return "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + + @staticmethod + def parse_common_billing_account_path(path: str) -> Dict[str, str]: + """Parse a billing_account path into its component segments.""" + m = re.match(r"^billingAccounts/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_folder_path( + folder: str, + ) -> str: + """Returns a fully-qualified folder string.""" + return "folders/{folder}".format( + folder=folder, + ) + + @staticmethod + def parse_common_folder_path(path: str) -> Dict[str, str]: + """Parse a folder path into its component segments.""" + m = re.match(r"^folders/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_organization_path( + organization: str, + ) -> str: + """Returns a fully-qualified organization string.""" + return "organizations/{organization}".format( + organization=organization, + ) + + @staticmethod + def parse_common_organization_path(path: str) -> Dict[str, str]: + """Parse a organization path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_project_path( + project: str, + ) -> str: + """Returns a fully-qualified project string.""" + return "projects/{project}".format( + project=project, + ) + + @staticmethod + def parse_common_project_path(path: str) -> Dict[str, str]: + """Parse a project path into its component segments.""" + m = re.match(r"^projects/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_location_path( + project: str, + location: str, + ) -> str: + """Returns a fully-qualified location string.""" + return "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) + + @staticmethod + def parse_common_location_path(path: str) -> Dict[str, str]: + """Parse a location path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[client_options_lib.ClientOptions] = None + ): + """Deprecated. Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + + warnings.warn( + "get_mtls_endpoint_and_cert_source is deprecated. Use the api_endpoint property instead.", + DeprecationWarning, + ) + if client_options is None: + client_options = client_options_lib.ClientOptions() + use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") + if use_client_cert not in ("true", "false"): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError( + "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + # Figure out the client cert source to use. + client_cert_source = None + if use_client_cert == "true": + if client_options.client_cert_source: + client_cert_source = client_options.client_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + elif use_mtls_endpoint == "always" or ( + use_mtls_endpoint == "auto" and client_cert_source + ): + api_endpoint = cls.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = cls.DEFAULT_ENDPOINT + + return api_endpoint, client_cert_source + + @staticmethod + def _read_environment_variables(): + """Returns the environment variables used by the client. + + Returns: + Tuple[bool, str, str]: returns the GOOGLE_API_USE_CLIENT_CERTIFICATE, + GOOGLE_API_USE_MTLS_ENDPOINT, and GOOGLE_CLOUD_UNIVERSE_DOMAIN environment variables. + + Raises: + ValueError: If GOOGLE_API_USE_CLIENT_CERTIFICATE is not + any of ["true", "false"]. + google.auth.exceptions.MutualTLSChannelError: If GOOGLE_API_USE_MTLS_ENDPOINT + is not any of ["auto", "never", "always"]. + """ + use_client_cert = os.getenv( + "GOOGLE_API_USE_CLIENT_CERTIFICATE", "false" + ).lower() + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto").lower() + universe_domain_env = os.getenv("GOOGLE_CLOUD_UNIVERSE_DOMAIN") + if use_client_cert not in ("true", "false"): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError( + "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + return use_client_cert == "true", use_mtls_endpoint, universe_domain_env + + @staticmethod + def _get_client_cert_source(provided_cert_source, use_cert_flag): + """Return the client cert source to be used by the client. + + Args: + provided_cert_source (bytes): The client certificate source provided. + use_cert_flag (bool): A flag indicating whether to use the client certificate. + + Returns: + bytes or None: The client cert source to be used by the client. + """ + client_cert_source = None + if use_cert_flag: + if provided_cert_source: + client_cert_source = provided_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + return client_cert_source + + @staticmethod + def _get_api_endpoint( + api_override, client_cert_source, universe_domain, use_mtls_endpoint + ): + """Return the API endpoint used by the client. + + Args: + api_override (str): The API endpoint override. If specified, this is always + the return value of this function and the other arguments are not used. + client_cert_source (bytes): The client certificate source used by the client. + universe_domain (str): The universe domain used by the client. + use_mtls_endpoint (str): How to use the mTLS endpoint, which depends also on the other parameters. + Possible values are "always", "auto", or "never". + + Returns: + str: The API endpoint to be used by the client. + """ + if api_override is not None: + api_endpoint = api_override + elif use_mtls_endpoint == "always" or ( + use_mtls_endpoint == "auto" and client_cert_source + ): + _default_universe = PredictionServiceClient._DEFAULT_UNIVERSE + if universe_domain != _default_universe: + raise MutualTLSChannelError( + f"mTLS is not supported in any universe other than {_default_universe}." + ) + api_endpoint = PredictionServiceClient.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = PredictionServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=universe_domain + ) + return api_endpoint + + @staticmethod + def _get_universe_domain( + client_universe_domain: Optional[str], universe_domain_env: Optional[str] + ) -> str: + """Return the universe domain used by the client. + + Args: + client_universe_domain (Optional[str]): The universe domain configured via the client options. + universe_domain_env (Optional[str]): The universe domain configured via the "GOOGLE_CLOUD_UNIVERSE_DOMAIN" environment variable. + + Returns: + str: The universe domain to be used by the client. + + Raises: + ValueError: If the universe domain is an empty string. + """ + universe_domain = PredictionServiceClient._DEFAULT_UNIVERSE + if client_universe_domain is not None: + universe_domain = client_universe_domain + elif universe_domain_env is not None: + universe_domain = universe_domain_env + if len(universe_domain.strip()) == 0: + raise ValueError("Universe Domain cannot be an empty string.") + return universe_domain + + @staticmethod + def _compare_universes( + client_universe: str, credentials: ga_credentials.Credentials + ) -> bool: + """Returns True iff the universe domains used by the client and credentials match. + + Args: + client_universe (str): The universe domain configured via the client options. + credentials (ga_credentials.Credentials): The credentials being used in the client. + + Returns: + bool: True iff client_universe matches the universe in credentials. + + Raises: + ValueError: when client_universe does not match the universe in credentials. + """ + + default_universe = PredictionServiceClient._DEFAULT_UNIVERSE + credentials_universe = getattr(credentials, "universe_domain", default_universe) + + if client_universe != credentials_universe: + raise ValueError( + "The configured universe domain " + f"({client_universe}) does not match the universe domain " + f"found in the credentials ({credentials_universe}). " + "If you haven't configured the universe domain explicitly, " + f"`{default_universe}` is the default." + ) + return True + + def _validate_universe_domain(self): + """Validates client's and credentials' universe domains are consistent. + + Returns: + bool: True iff the configured universe domain is valid. + + Raises: + ValueError: If the configured universe domain is not valid. + """ + self._is_universe_domain_valid = ( + self._is_universe_domain_valid + or PredictionServiceClient._compare_universes( + self.universe_domain, self.transport._credentials + ) + ) + return self._is_universe_domain_valid + + @property + def api_endpoint(self): + """Return the API endpoint used by the client instance. + + Returns: + str: The API endpoint used by the client instance. + """ + return self._api_endpoint + + @property + def universe_domain(self) -> str: + """Return the universe domain used by the client instance. + + Returns: + str: The universe domain used by the client instance. + """ + return self._universe_domain + + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[ + Union[ + str, + PredictionServiceTransport, + Callable[..., PredictionServiceTransport], + ] + ] = None, + client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the prediction service client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Optional[Union[str,PredictionServiceTransport,Callable[..., PredictionServiceTransport]]]): + The transport to use, or a Callable that constructs and returns a new transport. + If a Callable is given, it will be called with the same set of initialization + arguments as used in the PredictionServiceTransport constructor. + If set to None, a transport is chosen automatically. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): + Custom options for the client. + + 1. The ``api_endpoint`` property can be used to override the + default endpoint provided by the client when ``transport`` is + not explicitly provided. Only if this property is not set and + ``transport`` was not explicitly provided, the endpoint is + determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment + variable, which have one of the following values: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto-switch to the + default mTLS endpoint if client certificate is present; this is + the default value). + + 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide a client certificate for mTLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + 3. The ``universe_domain`` property can be used to override the + default "googleapis.com" universe. Note that the ``api_endpoint`` + property still takes precedence; and ``universe_domain`` is + currently not supported for mTLS. + + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client_options = client_options + if isinstance(self._client_options, dict): + self._client_options = client_options_lib.from_dict(self._client_options) + if self._client_options is None: + self._client_options = client_options_lib.ClientOptions() + self._client_options = cast( + client_options_lib.ClientOptions, self._client_options + ) + + universe_domain_opt = getattr(self._client_options, "universe_domain", None) + + ( + self._use_client_cert, + self._use_mtls_endpoint, + self._universe_domain_env, + ) = PredictionServiceClient._read_environment_variables() + self._client_cert_source = PredictionServiceClient._get_client_cert_source( + self._client_options.client_cert_source, self._use_client_cert + ) + self._universe_domain = PredictionServiceClient._get_universe_domain( + universe_domain_opt, self._universe_domain_env + ) + self._api_endpoint = None # updated below, depending on `transport` + + # Initialize the universe domain validation. + self._is_universe_domain_valid = False + + api_key_value = getattr(self._client_options, "api_key", None) + if api_key_value and credentials: + raise ValueError( + "client_options.api_key and credentials are mutually exclusive" + ) + + # Save or instantiate the transport. + # Ordinarily, we provide the transport, but allowing a custom transport + # instance provides an extensibility point for unusual situations. + transport_provided = isinstance(transport, PredictionServiceTransport) + if transport_provided: + # transport is a PredictionServiceTransport instance. + if credentials or self._client_options.credentials_file or api_key_value: + raise ValueError( + "When providing a transport instance, " + "provide its credentials directly." + ) + if self._client_options.scopes: + raise ValueError( + "When providing a transport instance, provide its scopes " + "directly." + ) + self._transport = cast(PredictionServiceTransport, transport) + self._api_endpoint = self._transport.host + + self._api_endpoint = ( + self._api_endpoint + or PredictionServiceClient._get_api_endpoint( + self._client_options.api_endpoint, + self._client_cert_source, + self._universe_domain, + self._use_mtls_endpoint, + ) + ) + + if not transport_provided: + import google.auth._default # type: ignore + + if api_key_value and hasattr( + google.auth._default, "get_api_key_credentials" + ): + credentials = google.auth._default.get_api_key_credentials( + api_key_value + ) + + transport_init: Union[ + Type[PredictionServiceTransport], + Callable[..., PredictionServiceTransport], + ] = ( + PredictionServiceClient.get_transport_class(transport) + if isinstance(transport, str) or transport is None + else cast(Callable[..., PredictionServiceTransport], transport) + ) + # initialize with the provided callable or the passed in class + self._transport = transport_init( + credentials=credentials, + credentials_file=self._client_options.credentials_file, + host=self._api_endpoint, + scopes=self._client_options.scopes, + client_cert_source_for_mtls=self._client_cert_source, + quota_project_id=self._client_options.quota_project_id, + client_info=client_info, + always_use_jwt_access=True, + api_audience=self._client_options.api_audience, + ) + + def predict( + self, + request: Optional[Union[prediction_service.PredictRequest, dict]] = None, + *, + model: Optional[str] = None, + instances: Optional[MutableSequence[struct_pb2.Value]] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> prediction_service.PredictResponse: + r"""Performs a prediction request. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.ai import generativelanguage_v1beta + + def sample_predict(): + # Create a client + client = generativelanguage_v1beta.PredictionServiceClient() + + # Initialize request argument(s) + instances = generativelanguage_v1beta.Value() + instances.null_value = "NULL_VALUE" + + request = generativelanguage_v1beta.PredictRequest( + model="model_value", + instances=instances, + ) + + # Make the request + response = client.predict(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.ai.generativelanguage_v1beta.types.PredictRequest, dict]): + The request object. Request message for + [PredictionService.Predict][google.ai.generativelanguage.v1beta.PredictionService.Predict]. + model (str): + Required. The name of the model for prediction. Format: + ``name=models/{model}``. + + This corresponds to the ``model`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + instances (MutableSequence[google.protobuf.struct_pb2.Value]): + Required. The instances that are the + input to the prediction call. + + This corresponds to the ``instances`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.ai.generativelanguage_v1beta.types.PredictResponse: + Response message for [PredictionService.Predict]. + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([model, instances]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, prediction_service.PredictRequest): + request = prediction_service.PredictRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if model is not None: + request.model = model + if instances is not None: + request.instances.extend(instances) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.predict] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("model", request.model),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def __enter__(self) -> "PredictionServiceClient": + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +__all__ = ("PredictionServiceClient",) diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/prediction_service/transports/__init__.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/prediction_service/transports/__init__.py new file mode 100644 index 000000000000..d6d645ba1ff1 --- /dev/null +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/prediction_service/transports/__init__.py @@ -0,0 +1,36 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from typing import Dict, Type + +from .base import PredictionServiceTransport +from .grpc import PredictionServiceGrpcTransport +from .grpc_asyncio import PredictionServiceGrpcAsyncIOTransport +from .rest import PredictionServiceRestInterceptor, PredictionServiceRestTransport + +# Compile a registry of transports. +_transport_registry = OrderedDict() # type: Dict[str, Type[PredictionServiceTransport]] +_transport_registry["grpc"] = PredictionServiceGrpcTransport +_transport_registry["grpc_asyncio"] = PredictionServiceGrpcAsyncIOTransport +_transport_registry["rest"] = PredictionServiceRestTransport + +__all__ = ( + "PredictionServiceTransport", + "PredictionServiceGrpcTransport", + "PredictionServiceGrpcAsyncIOTransport", + "PredictionServiceRestTransport", + "PredictionServiceRestInterceptor", +) diff --git a/packages/google-ads-admanager/google/ads/admanager_v1/services/label_service/transports/base.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/prediction_service/transports/base.py similarity index 81% rename from packages/google-ads-admanager/google/ads/admanager_v1/services/label_service/transports/base.py rename to packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/prediction_service/transports/base.py index 48a3880b9b11..1b36658ad423 100644 --- a/packages/google-ads-admanager/google/ads/admanager_v1/services/label_service/transports/base.py +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/prediction_service/transports/base.py @@ -25,20 +25,20 @@ from google.longrunning import operations_pb2 # type: ignore from google.oauth2 import service_account # type: ignore -from google.ads.admanager_v1 import gapic_version as package_version -from google.ads.admanager_v1.types import label_service +from google.ai.generativelanguage_v1beta import gapic_version as package_version +from google.ai.generativelanguage_v1beta.types import prediction_service DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( gapic_version=package_version.__version__ ) -class LabelServiceTransport(abc.ABC): - """Abstract transport class for LabelService.""" +class PredictionServiceTransport(abc.ABC): + """Abstract transport class for PredictionService.""" AUTH_SCOPES = () - DEFAULT_HOST: str = "admanager.googleapis.com" + DEFAULT_HOST: str = "generativelanguage.googleapis.com" def __init__( self, @@ -57,7 +57,7 @@ def __init__( Args: host (Optional[str]): - The hostname to connect to (default: 'admanager.googleapis.com'). + The hostname to connect to (default: 'generativelanguage.googleapis.com'). credentials (Optional[google.auth.credentials.Credentials]): The authorization credentials to attach to requests. These credentials identify the application to the service; if none @@ -129,13 +129,8 @@ def host(self): def _prep_wrapped_messages(self, client_info): # Precompute the wrapped methods. self._wrapped_methods = { - self.get_label: gapic_v1.method.wrap_method( - self.get_label, - default_timeout=None, - client_info=client_info, - ), - self.list_labels: gapic_v1.method.wrap_method( - self.list_labels, + self.predict: gapic_v1.method.wrap_method( + self.predict, default_timeout=None, client_info=client_info, ), @@ -151,38 +146,20 @@ def close(self): raise NotImplementedError() @property - def get_label( + def predict( self, ) -> Callable[ - [label_service.GetLabelRequest], - Union[label_service.Label, Awaitable[label_service.Label]], - ]: - raise NotImplementedError() - - @property - def list_labels( - self, - ) -> Callable[ - [label_service.ListLabelsRequest], + [prediction_service.PredictRequest], Union[ - label_service.ListLabelsResponse, - Awaitable[label_service.ListLabelsResponse], + prediction_service.PredictResponse, + Awaitable[prediction_service.PredictResponse], ], ]: raise NotImplementedError() - @property - def get_operation( - self, - ) -> Callable[ - [operations_pb2.GetOperationRequest], - Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], - ]: - raise NotImplementedError() - @property def kind(self) -> str: raise NotImplementedError() -__all__ = ("LabelServiceTransport",) +__all__ = ("PredictionServiceTransport",) diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/prediction_service/transports/grpc.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/prediction_service/transports/grpc.py new file mode 100644 index 000000000000..285c2ff8af46 --- /dev/null +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/prediction_service/transports/grpc.py @@ -0,0 +1,274 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import gapic_v1, grpc_helpers +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.longrunning import operations_pb2 # type: ignore +import grpc # type: ignore + +from google.ai.generativelanguage_v1beta.types import prediction_service + +from .base import DEFAULT_CLIENT_INFO, PredictionServiceTransport + + +class PredictionServiceGrpcTransport(PredictionServiceTransport): + """gRPC backend transport for PredictionService. + + A service for online predictions and explanations. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _stubs: Dict[str, Callable] + + def __init__( + self, + *, + host: str = "generativelanguage.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'generativelanguage.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if a ``channel`` instance is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if a ``channel`` instance is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if a ``channel`` instance is provided. + channel (Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]]): + A ``Channel`` instance through which to make calls, or a Callable + that constructs and returns one. If set to None, ``self.create_channel`` + is used to create the channel. If a Callable is given, it will be called + with the same arguments as used in ``self.create_channel``. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if a ``channel`` instance is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if a ``channel`` instance or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if isinstance(channel, grpc.Channel): + # Ignore credentials if a channel was passed. + credentials = None + self._ignore_credentials = True + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + # initialize with the provided callable or the default channel + channel_init = channel or type(self).create_channel + self._grpc_channel = channel_init( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @classmethod + def create_channel( + cls, + host: str = "generativelanguage.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> grpc.Channel: + """Create and return a gRPC channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + grpc.Channel: A gRPC channel object. + + Raises: + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + + return grpc_helpers.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs, + ) + + @property + def grpc_channel(self) -> grpc.Channel: + """Return the channel designed to connect to this service.""" + return self._grpc_channel + + @property + def predict( + self, + ) -> Callable[ + [prediction_service.PredictRequest], prediction_service.PredictResponse + ]: + r"""Return a callable for the predict method over gRPC. + + Performs a prediction request. + + Returns: + Callable[[~.PredictRequest], + ~.PredictResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "predict" not in self._stubs: + self._stubs["predict"] = self.grpc_channel.unary_unary( + "/google.ai.generativelanguage.v1beta.PredictionService/Predict", + request_serializer=prediction_service.PredictRequest.serialize, + response_deserializer=prediction_service.PredictResponse.deserialize, + ) + return self._stubs["predict"] + + def close(self): + self.grpc_channel.close() + + @property + def kind(self) -> str: + return "grpc" + + +__all__ = ("PredictionServiceGrpcTransport",) diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/prediction_service/transports/grpc_asyncio.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/prediction_service/transports/grpc_asyncio.py new file mode 100644 index 000000000000..1348f51f6706 --- /dev/null +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/prediction_service/transports/grpc_asyncio.py @@ -0,0 +1,285 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1, grpc_helpers_async +from google.api_core import retry_async as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.longrunning import operations_pb2 # type: ignore +import grpc # type: ignore +from grpc.experimental import aio # type: ignore + +from google.ai.generativelanguage_v1beta.types import prediction_service + +from .base import DEFAULT_CLIENT_INFO, PredictionServiceTransport +from .grpc import PredictionServiceGrpcTransport + + +class PredictionServiceGrpcAsyncIOTransport(PredictionServiceTransport): + """gRPC AsyncIO backend transport for PredictionService. + + A service for online predictions and explanations. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _grpc_channel: aio.Channel + _stubs: Dict[str, Callable] = {} + + @classmethod + def create_channel( + cls, + host: str = "generativelanguage.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> aio.Channel: + """Create and return a gRPC AsyncIO channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + aio.Channel: A gRPC AsyncIO channel object. + """ + + return grpc_helpers_async.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs, + ) + + def __init__( + self, + *, + host: str = "generativelanguage.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[Union[aio.Channel, Callable[..., aio.Channel]]] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'generativelanguage.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if a ``channel`` instance is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if a ``channel`` instance is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + channel (Optional[Union[aio.Channel, Callable[..., aio.Channel]]]): + A ``Channel`` instance through which to make calls, or a Callable + that constructs and returns one. If set to None, ``self.create_channel`` + is used to create the channel. If a Callable is given, it will be called + with the same arguments as used in ``self.create_channel``. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if a ``channel`` instance is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if a ``channel`` instance or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if isinstance(channel, aio.Channel): + # Ignore credentials if a channel was passed. + credentials = None + self._ignore_credentials = True + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + # initialize with the provided callable or the default channel + channel_init = channel or type(self).create_channel + self._grpc_channel = channel_init( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @property + def grpc_channel(self) -> aio.Channel: + """Create the channel designed to connect to this service. + + This property caches on the instance; repeated calls return + the same channel. + """ + # Return the channel from cache. + return self._grpc_channel + + @property + def predict( + self, + ) -> Callable[ + [prediction_service.PredictRequest], + Awaitable[prediction_service.PredictResponse], + ]: + r"""Return a callable for the predict method over gRPC. + + Performs a prediction request. + + Returns: + Callable[[~.PredictRequest], + Awaitable[~.PredictResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "predict" not in self._stubs: + self._stubs["predict"] = self.grpc_channel.unary_unary( + "/google.ai.generativelanguage.v1beta.PredictionService/Predict", + request_serializer=prediction_service.PredictRequest.serialize, + response_deserializer=prediction_service.PredictResponse.deserialize, + ) + return self._stubs["predict"] + + def _prep_wrapped_messages(self, client_info): + """Precompute the wrapped methods, overriding the base class method to use async wrappers.""" + self._wrapped_methods = { + self.predict: gapic_v1.method_async.wrap_method( + self.predict, + default_timeout=None, + client_info=client_info, + ), + } + + def close(self): + return self.grpc_channel.close() + + +__all__ = ("PredictionServiceGrpcAsyncIOTransport",) diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/prediction_service/transports/rest.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/prediction_service/transports/rest.py new file mode 100644 index 000000000000..0fd462caa988 --- /dev/null +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/prediction_service/transports/rest.py @@ -0,0 +1,313 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import dataclasses +import json # type: ignore +import re +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import gapic_v1, path_template, rest_helpers, rest_streaming +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth.transport.requests import AuthorizedSession # type: ignore +from google.protobuf import json_format +import grpc # type: ignore +from requests import __version__ as requests_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object, None] # type: ignore + + +from google.longrunning import operations_pb2 # type: ignore + +from google.ai.generativelanguage_v1beta.types import prediction_service + +from .base import DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO +from .base import PredictionServiceTransport + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=requests_version, +) + + +class PredictionServiceRestInterceptor: + """Interceptor for PredictionService. + + Interceptors are used to manipulate requests, request metadata, and responses + in arbitrary ways. + Example use cases include: + * Logging + * Verifying requests according to service or custom semantics + * Stripping extraneous information from responses + + These use cases and more can be enabled by injecting an + instance of a custom subclass when constructing the PredictionServiceRestTransport. + + .. code-block:: python + class MyCustomPredictionServiceInterceptor(PredictionServiceRestInterceptor): + def pre_predict(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_predict(self, response): + logging.log(f"Received response: {response}") + return response + + transport = PredictionServiceRestTransport(interceptor=MyCustomPredictionServiceInterceptor()) + client = PredictionServiceClient(transport=transport) + + + """ + + def pre_predict( + self, + request: prediction_service.PredictRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[prediction_service.PredictRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for predict + + Override in a subclass to manipulate the request or metadata + before they are sent to the PredictionService server. + """ + return request, metadata + + def post_predict( + self, response: prediction_service.PredictResponse + ) -> prediction_service.PredictResponse: + """Post-rpc interceptor for predict + + Override in a subclass to manipulate the response + after it is returned by the PredictionService server but before + it is returned to user code. + """ + return response + + +@dataclasses.dataclass +class PredictionServiceRestStub: + _session: AuthorizedSession + _host: str + _interceptor: PredictionServiceRestInterceptor + + +class PredictionServiceRestTransport(PredictionServiceTransport): + """REST backend transport for PredictionService. + + A service for online predictions and explanations. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + + """ + + def __init__( + self, + *, + host: str = "generativelanguage.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = "https", + interceptor: Optional[PredictionServiceRestInterceptor] = None, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'generativelanguage.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client + certificate to configure mutual TLS HTTP channel. It is ignored + if ``channel`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. + # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the + # credentials object + maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) + if maybe_url_match is None: + raise ValueError( + f"Unexpected hostname structure: {host}" + ) # pragma: NO COVER + + url_match_items = maybe_url_match.groupdict() + + host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host + + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + self._session = AuthorizedSession( + self._credentials, default_host=self.DEFAULT_HOST + ) + if client_cert_source_for_mtls: + self._session.configure_mtls_channel(client_cert_source_for_mtls) + self._interceptor = interceptor or PredictionServiceRestInterceptor() + self._prep_wrapped_messages(client_info) + + class _Predict(PredictionServiceRestStub): + def __hash__(self): + return hash("Predict") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: prediction_service.PredictRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> prediction_service.PredictResponse: + r"""Call the predict method over HTTP. + + Args: + request (~.prediction_service.PredictRequest): + The request object. Request message for + [PredictionService.Predict][google.ai.generativelanguage.v1beta.PredictionService.Predict]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.prediction_service.PredictResponse: + Response message for [PredictionService.Predict]. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1beta/{model=models/*}:predict", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_predict(request, metadata) + pb_request = prediction_service.PredictRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = prediction_service.PredictResponse() + pb_resp = prediction_service.PredictResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_predict(resp) + return resp + + @property + def predict( + self, + ) -> Callable[ + [prediction_service.PredictRequest], prediction_service.PredictResponse + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._Predict(self._session, self._host, self._interceptor) # type: ignore + + @property + def kind(self) -> str: + return "rest" + + def close(self): + self._session.close() + + +__all__ = ("PredictionServiceRestTransport",) diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/retriever_service/async_client.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/retriever_service/async_client.py index 39d67e5559b9..9f8898271113 100644 --- a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/retriever_service/async_client.py +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/retriever_service/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -197,9 +196,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(RetrieverServiceClient).get_transport_class, type(RetrieverServiceClient) - ) + get_transport_class = RetrieverServiceClient.get_transport_class def __init__( self, diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/retriever_service/client.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/retriever_service/client.py index 336cc97cc34b..2efc8f181dd1 100644 --- a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/retriever_service/client.py +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/retriever_service/client.py @@ -701,7 +701,7 @@ def __init__( Type[RetrieverServiceTransport], Callable[..., RetrieverServiceTransport], ] = ( - type(self).get_transport_class(transport) + RetrieverServiceClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., RetrieverServiceTransport], transport) ) diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/text_service/async_client.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/text_service/async_client.py index 672e05be2d15..adebc14f79ae 100644 --- a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/text_service/async_client.py +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/text_service/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -188,9 +187,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(TextServiceClient).get_transport_class, type(TextServiceClient) - ) + get_transport_class = TextServiceClient.get_transport_class def __init__( self, diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/text_service/client.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/text_service/client.py index 6df1eedcc78d..37c0ff946c1e 100644 --- a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/text_service/client.py +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/text_service/client.py @@ -653,7 +653,7 @@ def __init__( transport_init: Union[ Type[TextServiceTransport], Callable[..., TextServiceTransport] ] = ( - type(self).get_transport_class(transport) + TextServiceClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., TextServiceTransport], transport) ) diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/types/__init__.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/types/__init__.py index 89b4f8ad01b8..9dd7a564142d 100644 --- a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/types/__init__.py +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/types/__init__.py @@ -28,12 +28,14 @@ CodeExecution, CodeExecutionResult, Content, + DynamicRetrievalConfig, ExecutableCode, FileData, FunctionCall, FunctionCallingConfig, FunctionDeclaration, FunctionResponse, + GoogleSearchRetrieval, GroundingPassage, GroundingPassages, Part, @@ -76,6 +78,13 @@ GenerateContentResponse, GenerationConfig, GroundingAttribution, + GroundingChunk, + GroundingMetadata, + GroundingSupport, + LogprobsResult, + RetrievalMetadata, + SearchEntryPoint, + Segment, SemanticRetrieverConfig, TaskType, ) @@ -103,6 +112,7 @@ TransferOwnershipResponse, UpdatePermissionRequest, ) +from .prediction_service import PredictRequest, PredictResponse from .retriever import ( Chunk, ChunkData, @@ -188,12 +198,14 @@ "CodeExecution", "CodeExecutionResult", "Content", + "DynamicRetrievalConfig", "ExecutableCode", "FileData", "FunctionCall", "FunctionCallingConfig", "FunctionDeclaration", "FunctionResponse", + "GoogleSearchRetrieval", "GroundingPassage", "GroundingPassages", "Part", @@ -231,6 +243,13 @@ "GenerateContentResponse", "GenerationConfig", "GroundingAttribution", + "GroundingChunk", + "GroundingMetadata", + "GroundingSupport", + "LogprobsResult", + "RetrievalMetadata", + "SearchEntryPoint", + "Segment", "SemanticRetrieverConfig", "TaskType", "Model", @@ -253,6 +272,8 @@ "TransferOwnershipRequest", "TransferOwnershipResponse", "UpdatePermissionRequest", + "PredictRequest", + "PredictResponse", "Chunk", "ChunkData", "Condition", diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/types/content.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/types/content.py index c5e42c09d5d4..6b5d37cd15ce 100644 --- a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/types/content.py +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/types/content.py @@ -31,6 +31,8 @@ "ExecutableCode", "CodeExecutionResult", "Tool", + "GoogleSearchRetrieval", + "DynamicRetrievalConfig", "CodeExecution", "ToolConfig", "FunctionCallingConfig", @@ -354,14 +356,18 @@ class Tool(proto.Message): The model or system does not execute the function. Instead the defined function may be returned as a - [FunctionCall][content.part.function_call] with arguments to - the client side for execution. The model may decide to call - a subset of these functions by populating - [FunctionCall][content.part.function_call] in the response. - The next conversation turn may contain a - [FunctionResponse][content.part.function_response] with the - [content.role] "function" generation context for the next - model turn. + [FunctionCall][google.ai.generativelanguage.v1beta.Part.function_call] + with arguments to the client side for execution. The model + may decide to call a subset of these functions by populating + [FunctionCall][google.ai.generativelanguage.v1beta.Part.function_call] + in the response. The next conversation turn may contain a + [FunctionResponse][google.ai.generativelanguage.v1beta.Part.function_response] + with the + [Content.role][google.ai.generativelanguage.v1beta.Content.role] + "function" generation context for the next model turn. + google_search_retrieval (google.ai.generativelanguage_v1beta.types.GoogleSearchRetrieval): + Optional. Retrieval tool that is powered by + Google search. code_execution (google.ai.generativelanguage_v1beta.types.CodeExecution): Optional. Enables the model to execute code as part of generation. @@ -372,6 +378,11 @@ class Tool(proto.Message): number=1, message="FunctionDeclaration", ) + google_search_retrieval: "GoogleSearchRetrieval" = proto.Field( + proto.MESSAGE, + number=2, + message="GoogleSearchRetrieval", + ) code_execution: "CodeExecution" = proto.Field( proto.MESSAGE, number=3, @@ -379,6 +390,65 @@ class Tool(proto.Message): ) +class GoogleSearchRetrieval(proto.Message): + r"""Tool to retrieve public web data for grounding, powered by + Google. + + Attributes: + dynamic_retrieval_config (google.ai.generativelanguage_v1beta.types.DynamicRetrievalConfig): + Specifies the dynamic retrieval configuration + for the given source. + """ + + dynamic_retrieval_config: "DynamicRetrievalConfig" = proto.Field( + proto.MESSAGE, + number=1, + message="DynamicRetrievalConfig", + ) + + +class DynamicRetrievalConfig(proto.Message): + r"""Describes the options to customize dynamic retrieval. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + mode (google.ai.generativelanguage_v1beta.types.DynamicRetrievalConfig.Mode): + The mode of the predictor to be used in + dynamic retrieval. + dynamic_threshold (float): + The threshold to be used in dynamic + retrieval. If not set, a system default value is + used. + + This field is a member of `oneof`_ ``_dynamic_threshold``. + """ + + class Mode(proto.Enum): + r"""The mode of the predictor to be used in dynamic retrieval. + + Values: + MODE_UNSPECIFIED (0): + Always trigger retrieval. + MODE_DYNAMIC (1): + Run retrieval only when system decides it is + necessary. + """ + MODE_UNSPECIFIED = 0 + MODE_DYNAMIC = 1 + + mode: Mode = proto.Field( + proto.ENUM, + number=1, + enum=Mode, + ) + dynamic_threshold: float = proto.Field( + proto.FLOAT, + number=2, + optional=True, + ) + + class CodeExecution(proto.Message): r"""Tool that executes code generated by the model, and automatically returns the result to the model. @@ -587,6 +657,7 @@ class Schema(proto.Message): for NUMBER type: float, double for INTEGER type: int32, int64 + for STRING type: enum description (str): Optional. A brief description of the parameter. This could contain examples of use. @@ -604,6 +675,12 @@ class Schema(proto.Message): Type.ARRAY. This field is a member of `oneof`_ ``_items``. + max_items (int): + Optional. Maximum number of the elements for + Type.ARRAY. + min_items (int): + Optional. Minimum number of the elements for + Type.ARRAY. properties (MutableMapping[str, google.ai.generativelanguage_v1beta.types.Schema]): Optional. Properties of Type.OBJECT. required (MutableSequence[str]): @@ -637,6 +714,14 @@ class Schema(proto.Message): optional=True, message="Schema", ) + max_items: int = proto.Field( + proto.INT64, + number=21, + ) + min_items: int = proto.Field( + proto.INT64, + number=22, + ) properties: MutableMapping[str, "Schema"] = proto.MapField( proto.STRING, proto.MESSAGE, diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/types/generative_service.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/types/generative_service.py index a6cbe296b63c..edc4c8ec0ff8 100644 --- a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/types/generative_service.py +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/types/generative_service.py @@ -32,8 +32,15 @@ "SemanticRetrieverConfig", "GenerateContentResponse", "Candidate", + "LogprobsResult", "AttributionSourceId", "GroundingAttribution", + "RetrievalMetadata", + "GroundingMetadata", + "SearchEntryPoint", + "GroundingChunk", + "Segment", + "GroundingSupport", "GenerateAnswerRequest", "GenerateAnswerResponse", "EmbedContentRequest", @@ -98,28 +105,38 @@ class GenerateContentRequest(proto.Message): Format: ``name=models/{model}``. system_instruction (google.ai.generativelanguage_v1beta.types.Content): - Optional. Developer set system instruction. + Optional. Developer set `system + instruction(s) `__. Currently, text only. This field is a member of `oneof`_ ``_system_instruction``. contents (MutableSequence[google.ai.generativelanguage_v1beta.types.Content]): - Required. The content of the current - conversation with the model. - For single-turn queries, this is a single - instance. For multi-turn queries, this is a - repeated field that contains conversation - history + latest request. + Required. The content of the current conversation with the + model. + + For single-turn queries, this is a single instance. For + multi-turn queries like + `chat `__, + this is a repeated field that contains the conversation + history and the latest request. tools (MutableSequence[google.ai.generativelanguage_v1beta.types.Tool]): - Optional. A list of ``Tools`` the model may use to generate - the next response. + Optional. A list of ``Tools`` the ``Model`` may use to + generate the next response. A ``Tool`` is a piece of code that enables the system to interact with external systems to perform an action, or set - of actions, outside of knowledge and scope of the model. The - only supported tool is currently ``Function``. + of actions, outside of knowledge and scope of the ``Model``. + Supported ``Tool``\ s are ``Function`` and + ``code_execution``. Refer to the `Function + calling `__ + and the `Code + execution `__ + guides to learn more. tool_config (google.ai.generativelanguage_v1beta.types.ToolConfig): Optional. Tool configuration for any ``Tool`` specified in - the request. + the request. Refer to the `Function calling + guide `__ + for a usage example. safety_settings (MutableSequence[google.ai.generativelanguage_v1beta.types.SafetySetting]): Optional. A list of unique ``SafetySetting`` instances for blocking unsafe content. @@ -137,17 +154,22 @@ class GenerateContentRequest(proto.Message): categories HARM_CATEGORY_HATE_SPEECH, HARM_CATEGORY_SEXUALLY_EXPLICIT, HARM_CATEGORY_DANGEROUS_CONTENT, HARM_CATEGORY_HARASSMENT - are supported. + are supported. Refer to the + `guide `__ + for detailed information on available safety settings. Also + refer to the `Safety + guidance `__ + to learn how to incorporate safety considerations in your AI + applications. generation_config (google.ai.generativelanguage_v1beta.types.GenerationConfig): Optional. Configuration options for model generation and outputs. This field is a member of `oneof`_ ``_generation_config``. cached_content (str): - Optional. The name of the cached content used as context to - serve the prediction. Note: only used in explicit caching, - where users can have control over caching (e.g. what content - to cache) and enjoy guaranteed cost savings. Format: + Optional. The name of the content + `cached `__ + to use as context to serve the prediction. Format: ``cachedContents/{cachedContent}`` This field is a member of `oneof`_ ``_cached_content``. @@ -198,7 +220,7 @@ class GenerateContentRequest(proto.Message): class GenerationConfig(proto.Message): r"""Configuration options for model generation and outputs. Not - all parameters may be configurable for every model. + all parameters are configurable for every model. .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields @@ -212,14 +234,13 @@ class GenerationConfig(proto.Message): This field is a member of `oneof`_ ``_candidate_count``. stop_sequences (MutableSequence[str]): - Optional. The set of character sequences (up - to 5) that will stop output generation. If - specified, the API will stop at the first - appearance of a stop sequence. The stop sequence - will not be included as part of the response. + Optional. The set of character sequences (up to 5) that will + stop output generation. If specified, the API will stop at + the first appearance of a ``stop_sequence``. The stop + sequence will not be included as part of the response. max_output_tokens (int): Optional. The maximum number of tokens to include in a - candidate. + response candidate. Note: The default value varies by model, see the ``Model.output_token_limit`` attribute of the ``Model`` @@ -240,49 +261,108 @@ class GenerationConfig(proto.Message): Optional. The maximum cumulative probability of tokens to consider when sampling. - The model uses combined Top-k and nucleus sampling. + The model uses combined Top-k and Top-p (nucleus) sampling. Tokens are sorted based on their assigned probabilities so that only the most likely tokens are considered. Top-k sampling directly limits the maximum number of tokens to - consider, while Nucleus sampling limits number of tokens + consider, while Nucleus sampling limits the number of tokens based on the cumulative probability. - Note: The default value varies by model, see the - ``Model.top_p`` attribute of the ``Model`` returned from the - ``getModel`` function. + Note: The default value varies by ``Model`` and is specified + by the\ ``Model.top_p`` attribute returned from the + ``getModel`` function. An empty ``top_k`` attribute + indicates that the model doesn't apply top-k sampling and + doesn't allow setting ``top_k`` on requests. This field is a member of `oneof`_ ``_top_p``. top_k (int): Optional. The maximum number of tokens to consider when sampling. - Models use nucleus sampling or combined Top-k and nucleus - sampling. Top-k sampling considers the set of ``top_k`` most - probable tokens. Models running with nucleus sampling don't - allow top_k setting. + Gemini models use Top-p (nucleus) sampling or a combination + of Top-k and nucleus sampling. Top-k sampling considers the + set of ``top_k`` most probable tokens. Models running with + nucleus sampling don't allow top_k setting. - Note: The default value varies by model, see the - ``Model.top_k`` attribute of the ``Model`` returned from the - ``getModel`` function. Empty ``top_k`` field in ``Model`` - indicates the model doesn't apply top-k sampling and doesn't - allow setting ``top_k`` on requests. + Note: The default value varies by ``Model`` and is specified + by the\ ``Model.top_p`` attribute returned from the + ``getModel`` function. An empty ``top_k`` attribute + indicates that the model doesn't apply top-k sampling and + doesn't allow setting ``top_k`` on requests. This field is a member of `oneof`_ ``_top_k``. response_mime_type (str): - Optional. Output response mimetype of the generated - candidate text. Supported mimetype: ``text/plain``: - (default) Text output. ``application/json``: JSON response - in the candidates. + Optional. MIME type of the generated candidate text. + Supported MIME types are: ``text/plain``: (default) Text + output. ``application/json``: JSON response in the response + candidates. ``text/x.enum``: ENUM as a string response in + the response candidates. Refer to the + `docs `__ + for a list of all supported text MIME types. response_schema (google.ai.generativelanguage_v1beta.types.Schema): - Optional. Output response schema of the generated candidate - text when response mime type can have schema. Schema can be - objects, primitives or arrays and is a subset of `OpenAPI - schema `__. - - If set, a compatible response_mime_type must also be set. - Compatible mimetypes: ``application/json``: Schema for JSON - response. + Optional. Output schema of the generated candidate text. + Schemas must be a subset of the `OpenAPI + schema `__ and + can be objects, primitives or arrays. + + If set, a compatible ``response_mime_type`` must also be + set. Compatible MIME types: ``application/json``: Schema for + JSON response. Refer to the `JSON text generation + guide `__ + for more details. + presence_penalty (float): + Optional. Presence penalty applied to the next token's + logprobs if the token has already been seen in the response. + + This penalty is binary on/off and not dependant on the + number of times the token is used (after the first). Use + [frequency_penalty][google.ai.generativelanguage.v1beta.GenerationConfig.frequency_penalty] + for a penalty that increases with each use. + + A positive penalty will discourage the use of tokens that + have already been used in the response, increasing the + vocabulary. + + A negative penalty will encourage the use of tokens that + have already been used in the response, decreasing the + vocabulary. + + This field is a member of `oneof`_ ``_presence_penalty``. + frequency_penalty (float): + Optional. Frequency penalty applied to the next token's + logprobs, multiplied by the number of times each token has + been seen in the respponse so far. + + A positive penalty will discourage the use of tokens that + have already been used, proportional to the number of times + the token has been used: The more a token is used, the more + dificult it is for the model to use that token again + increasing the vocabulary of responses. + + Caution: A *negative* penalty will encourage the model to + reuse tokens proportional to the number of times the token + has been used. Small negative values will reduce the + vocabulary of a response. Larger negative values will cause + the model to start repeating a common token until it hits + the + [max_output_tokens][google.ai.generativelanguage.v1beta.GenerationConfig.max_output_tokens] + limit: "...the the the the the...". + + This field is a member of `oneof`_ ``_frequency_penalty``. + response_logprobs (bool): + Optional. If true, export the logprobs + results in response. + + This field is a member of `oneof`_ ``_response_logprobs``. + logprobs (int): + Optional. Only valid if + [response_logprobs=True][google.ai.generativelanguage.v1beta.GenerationConfig.response_logprobs]. + This sets the number of top logprobs to return at each + decoding step in the + [Candidate.logprobs_result][google.ai.generativelanguage.v1beta.Candidate.logprobs_result]. + + This field is a member of `oneof`_ ``_logprobs``. """ candidate_count: int = proto.Field( @@ -323,6 +403,26 @@ class GenerationConfig(proto.Message): number=14, message=gag_content.Schema, ) + presence_penalty: float = proto.Field( + proto.FLOAT, + number=15, + optional=True, + ) + frequency_penalty: float = proto.Field( + proto.FLOAT, + number=16, + optional=True, + ) + response_logprobs: bool = proto.Field( + proto.BOOL, + number=17, + optional=True, + ) + logprobs: int = proto.Field( + proto.INT32, + number=18, + optional=True, + ) class SemanticRetrieverConfig(proto.Message): @@ -334,11 +434,11 @@ class SemanticRetrieverConfig(proto.Message): Attributes: source (str): - Required. Name of the resource for retrieval, - e.g. corpora/123 or corpora/123/documents/abc. + Required. Name of the resource for retrieval. Example: + ``corpora/123`` or ``corpora/123/documents/abc``. query (google.ai.generativelanguage_v1beta.types.Content): - Required. Query to use for similarity matching ``Chunk``\ s - in the given resource. + Required. Query to use for matching ``Chunk``\ s in the + given resource by similarity. metadata_filters (MutableSequence[google.ai.generativelanguage_v1beta.types.MetadataFilter]): Optional. Filters for selecting ``Document``\ s and/or ``Chunk``\ s from the resource. @@ -381,18 +481,16 @@ class SemanticRetrieverConfig(proto.Message): class GenerateContentResponse(proto.Message): - r"""Response from the model supporting multiple candidates. - - Note on safety ratings and content filtering. They are reported for - both prompt in ``GenerateContentResponse.prompt_feedback`` and for - each candidate in ``finish_reason`` and in ``safety_ratings``. The - API contract is that: - - - either all requested candidates are returned or no candidates at - all - - no candidates are returned only if there was something wrong with - the prompt (see ``prompt_feedback``) - - feedback on each candidate is reported on ``finish_reason`` and + r"""Response from the model supporting multiple candidate responses. + + Safety ratings and content filtering are reported for both prompt in + ``GenerateContentResponse.prompt_feedback`` and for each candidate + in ``finish_reason`` and in ``safety_ratings``. The API: + + - Returns either all requested candidates or none of them + - Returns no candidates at all only if there was something wrong + with the prompt (check ``prompt_feedback``) + - Reports feedback on each candidate in ``finish_reason`` and ``safety_ratings``. Attributes: @@ -413,29 +511,35 @@ class PromptFeedback(proto.Message): Attributes: block_reason (google.ai.generativelanguage_v1beta.types.GenerateContentResponse.PromptFeedback.BlockReason): Optional. If set, the prompt was blocked and - no candidates are returned. Rephrase your - prompt. + no candidates are returned. Rephrase the prompt. safety_ratings (MutableSequence[google.ai.generativelanguage_v1beta.types.SafetyRating]): Ratings for safety of the prompt. There is at most one rating per category. """ class BlockReason(proto.Enum): - r"""Specifies what was the reason why prompt was blocked. + r"""Specifies the reason why the prompt was blocked. Values: BLOCK_REASON_UNSPECIFIED (0): Default value. This value is unused. SAFETY (1): - Prompt was blocked due to safety reasons. You can inspect + Prompt was blocked due to safety reasons. Inspect ``safety_ratings`` to understand which safety category blocked it. OTHER (2): Prompt was blocked due to unknown reasons. + BLOCKLIST (3): + Prompt was blocked due to the terms which are + included from the terminology blocklist. + PROHIBITED_CONTENT (4): + Prompt was blocked due to prohibited content. """ BLOCK_REASON_UNSPECIFIED = 0 SAFETY = 1 OTHER = 2 + BLOCKLIST = 3 + PROHIBITED_CONTENT = 4 block_reason: "GenerateContentResponse.PromptFeedback.BlockReason" = ( proto.Field( @@ -455,18 +559,18 @@ class UsageMetadata(proto.Message): Attributes: prompt_token_count (int): - Number of tokens in the prompt. When cached_content is set, - this is still the total effective prompt size. I.e. this - includes the number of tokens in the cached content. + Number of tokens in the prompt. When ``cached_content`` is + set, this is still the total effective prompt size meaning + this includes the number of tokens in the cached content. cached_content_token_count (int): Number of tokens in the cached part of the - prompt, i.e. in the cached content. + prompt (the cached content) candidates_token_count (int): - Total number of tokens across the generated - candidates. + Total number of tokens across all the + generated response candidates. total_token_count (int): Total token count for the generation request - (prompt + candidates). + (prompt + response candidates). """ prompt_token_count: int = proto.Field( @@ -511,7 +615,7 @@ class Candidate(proto.Message): Attributes: index (int): Output only. Index of the candidate in the - list of candidates. + list of response candidates. This field is a member of `oneof`_ ``_index``. content (google.ai.generativelanguage_v1beta.types.Content): @@ -521,7 +625,7 @@ class Candidate(proto.Message): Optional. Output only. The reason why the model stopped generating tokens. If empty, the model has not stopped generating - the tokens. + tokens. safety_ratings (MutableSequence[google.ai.generativelanguage_v1beta.types.SafetyRating]): List of ratings for the safety of a response candidate. @@ -541,6 +645,15 @@ class Candidate(proto.Message): contributed to a grounded answer. This field is populated for ``GenerateAnswer`` calls. + grounding_metadata (google.ai.generativelanguage_v1beta.types.GroundingMetadata): + Output only. Grounding metadata for the candidate. + + This field is populated for ``GenerateContent`` calls. + avg_logprobs (float): + Output only. + logprobs_result (google.ai.generativelanguage_v1beta.types.LogprobsResult): + Output only. Log-likelihood scores for the + response tokens and top tokens """ class FinishReason(proto.Enum): @@ -556,20 +669,41 @@ class FinishReason(proto.Enum): The maximum number of tokens as specified in the request was reached. SAFETY (3): - The candidate content was flagged for safety - reasons. + The response candidate content was flagged + for safety reasons. RECITATION (4): - The candidate content was flagged for - recitation reasons. + The response candidate content was flagged + for recitation reasons. + LANGUAGE (6): + The response candidate content was flagged + for using an unsupported language. OTHER (5): Unknown reason. + BLOCKLIST (7): + Token generation stopped because the content + contains forbidden terms. + PROHIBITED_CONTENT (8): + Token generation stopped for potentially + containing prohibited content. + SPII (9): + Token generation stopped because the content + potentially contains Sensitive Personally + Identifiable Information (SPII). + MALFORMED_FUNCTION_CALL (10): + The function call generated by the model is + invalid. """ FINISH_REASON_UNSPECIFIED = 0 STOP = 1 MAX_TOKENS = 2 SAFETY = 3 RECITATION = 4 + LANGUAGE = 6 OTHER = 5 + BLOCKLIST = 7 + PROHIBITED_CONTENT = 8 + SPII = 9 + MALFORMED_FUNCTION_CALL = 10 index: int = proto.Field( proto.INT32, @@ -607,6 +741,94 @@ class FinishReason(proto.Enum): number=8, message="GroundingAttribution", ) + grounding_metadata: "GroundingMetadata" = proto.Field( + proto.MESSAGE, + number=9, + message="GroundingMetadata", + ) + avg_logprobs: float = proto.Field( + proto.DOUBLE, + number=10, + ) + logprobs_result: "LogprobsResult" = proto.Field( + proto.MESSAGE, + number=11, + message="LogprobsResult", + ) + + +class LogprobsResult(proto.Message): + r"""Logprobs Result + + Attributes: + top_candidates (MutableSequence[google.ai.generativelanguage_v1beta.types.LogprobsResult.TopCandidates]): + Length = total number of decoding steps. + chosen_candidates (MutableSequence[google.ai.generativelanguage_v1beta.types.LogprobsResult.Candidate]): + Length = total number of decoding steps. The chosen + candidates may or may not be in top_candidates. + """ + + class Candidate(proto.Message): + r"""Candidate for the logprobs token and score. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + token (str): + The candidate’s token string value. + + This field is a member of `oneof`_ ``_token``. + token_id (int): + The candidate’s token id value. + + This field is a member of `oneof`_ ``_token_id``. + log_probability (float): + The candidate's log probability. + + This field is a member of `oneof`_ ``_log_probability``. + """ + + token: str = proto.Field( + proto.STRING, + number=1, + optional=True, + ) + token_id: int = proto.Field( + proto.INT32, + number=3, + optional=True, + ) + log_probability: float = proto.Field( + proto.FLOAT, + number=2, + optional=True, + ) + + class TopCandidates(proto.Message): + r"""Candidates with top log probabilities at each decoding step. + + Attributes: + candidates (MutableSequence[google.ai.generativelanguage_v1beta.types.LogprobsResult.Candidate]): + Sorted by log probability in descending + order. + """ + + candidates: MutableSequence["LogprobsResult.Candidate"] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="LogprobsResult.Candidate", + ) + + top_candidates: MutableSequence[TopCandidates] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=TopCandidates, + ) + chosen_candidates: MutableSequence[Candidate] = proto.RepeatedField( + proto.MESSAGE, + number=2, + message=Candidate, + ) class AttributionSourceId(proto.Message): @@ -713,8 +935,221 @@ class GroundingAttribution(proto.Message): ) +class RetrievalMetadata(proto.Message): + r"""Metadata related to retrieval in the grounding flow. + + Attributes: + google_search_dynamic_retrieval_score (float): + Optional. Score indicating how likely information from + google search could help answer the prompt. The score is in + the range [0, 1], where 0 is the least likely and 1 is the + most likely. This score is only populated when google search + grounding and dynamic retrieval is enabled. It will be + compared to the threshold to determine whether to trigger + google search. + """ + + google_search_dynamic_retrieval_score: float = proto.Field( + proto.FLOAT, + number=2, + ) + + +class GroundingMetadata(proto.Message): + r"""Metadata returned to client when grounding is enabled. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + search_entry_point (google.ai.generativelanguage_v1beta.types.SearchEntryPoint): + Optional. Google search entry for the + following-up web searches. + + This field is a member of `oneof`_ ``_search_entry_point``. + grounding_chunks (MutableSequence[google.ai.generativelanguage_v1beta.types.GroundingChunk]): + List of supporting references retrieved from + specified grounding source. + grounding_supports (MutableSequence[google.ai.generativelanguage_v1beta.types.GroundingSupport]): + List of grounding support. + retrieval_metadata (google.ai.generativelanguage_v1beta.types.RetrievalMetadata): + Metadata related to retrieval in the + grounding flow. + + This field is a member of `oneof`_ ``_retrieval_metadata``. + """ + + search_entry_point: "SearchEntryPoint" = proto.Field( + proto.MESSAGE, + number=1, + optional=True, + message="SearchEntryPoint", + ) + grounding_chunks: MutableSequence["GroundingChunk"] = proto.RepeatedField( + proto.MESSAGE, + number=2, + message="GroundingChunk", + ) + grounding_supports: MutableSequence["GroundingSupport"] = proto.RepeatedField( + proto.MESSAGE, + number=3, + message="GroundingSupport", + ) + retrieval_metadata: "RetrievalMetadata" = proto.Field( + proto.MESSAGE, + number=4, + optional=True, + message="RetrievalMetadata", + ) + + +class SearchEntryPoint(proto.Message): + r"""Google search entry point. + + Attributes: + rendered_content (str): + Optional. Web content snippet that can be + embedded in a web page or an app webview. + sdk_blob (bytes): + Optional. Base64 encoded JSON representing + array of tuple. + """ + + rendered_content: str = proto.Field( + proto.STRING, + number=1, + ) + sdk_blob: bytes = proto.Field( + proto.BYTES, + number=2, + ) + + +class GroundingChunk(proto.Message): + r"""Grounding chunk. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + web (google.ai.generativelanguage_v1beta.types.GroundingChunk.Web): + Grounding chunk from the web. + + This field is a member of `oneof`_ ``chunk_type``. + """ + + class Web(proto.Message): + r"""Chunk from the web. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + uri (str): + URI reference of the chunk. + + This field is a member of `oneof`_ ``_uri``. + title (str): + Title of the chunk. + + This field is a member of `oneof`_ ``_title``. + """ + + uri: str = proto.Field( + proto.STRING, + number=1, + optional=True, + ) + title: str = proto.Field( + proto.STRING, + number=2, + optional=True, + ) + + web: Web = proto.Field( + proto.MESSAGE, + number=1, + oneof="chunk_type", + message=Web, + ) + + +class Segment(proto.Message): + r"""Segment of the content. + + Attributes: + part_index (int): + Output only. The index of a Part object + within its parent Content object. + start_index (int): + Output only. Start index in the given Part, + measured in bytes. Offset from the start of the + Part, inclusive, starting at zero. + end_index (int): + Output only. End index in the given Part, + measured in bytes. Offset from the start of the + Part, exclusive, starting at zero. + text (str): + Output only. The text corresponding to the + segment from the response. + """ + + part_index: int = proto.Field( + proto.INT32, + number=1, + ) + start_index: int = proto.Field( + proto.INT32, + number=2, + ) + end_index: int = proto.Field( + proto.INT32, + number=3, + ) + text: str = proto.Field( + proto.STRING, + number=4, + ) + + +class GroundingSupport(proto.Message): + r"""Grounding support. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + segment (google.ai.generativelanguage_v1beta.types.Segment): + Segment of the content this support belongs + to. + + This field is a member of `oneof`_ ``_segment``. + grounding_chunk_indices (MutableSequence[int]): + A list of indices (into 'grounding_chunk') specifying the + citations associated with the claim. For instance [1,3,4] + means that grounding_chunk[1], grounding_chunk[3], + grounding_chunk[4] are the retrieved content attributed to + the claim. + confidence_scores (MutableSequence[float]): + Confidence score of the support references. Ranges from 0 to + 1. 1 is the most confident. This list must have the same + size as the grounding_chunk_indices. + """ + + segment: "Segment" = proto.Field( + proto.MESSAGE, + number=1, + optional=True, + message="Segment", + ) + grounding_chunk_indices: MutableSequence[int] = proto.RepeatedField( + proto.INT32, + number=2, + ) + confidence_scores: MutableSequence[float] = proto.RepeatedField( + proto.FLOAT, + number=3, + ) + + class GenerateAnswerRequest(proto.Message): - r"""Request to generate a grounded answer from the model. + r"""Request to generate a grounded answer from the ``Model``. This message has `oneof`_ fields (mutually exclusive fields). For each oneof, at most one member field can be set at the same time. @@ -740,13 +1175,12 @@ class GenerateAnswerRequest(proto.Message): Format: ``model=models/{model}``. contents (MutableSequence[google.ai.generativelanguage_v1beta.types.Content]): Required. The content of the current conversation with the - model. For single-turn queries, this is a single question to - answer. For multi-turn queries, this is a repeated field - that contains conversation history and the last ``Content`` - in the list containing the question. + ``Model``. For single-turn queries, this is a single + question to answer. For multi-turn queries, this is a + repeated field that contains conversation history and the + last ``Content`` in the list containing the question. - Note: GenerateAnswer currently only supports queries in - English. + Note: ``GenerateAnswer`` only supports queries in English. answer_style (google.ai.generativelanguage_v1beta.types.GenerateAnswerRequest.AnswerStyle): Required. Style in which answers should be returned. @@ -767,7 +1201,13 @@ class GenerateAnswerRequest(proto.Message): categories HARM_CATEGORY_HATE_SPEECH, HARM_CATEGORY_SEXUALLY_EXPLICIT, HARM_CATEGORY_DANGEROUS_CONTENT, HARM_CATEGORY_HARASSMENT - are supported. + are supported. Refer to the + `guide `__ + for detailed information on available safety settings. Also + refer to the `Safety + guidance `__ + to learn how to incorporate safety considerations in your AI + applications. temperature (float): Optional. Controls the randomness of the output. @@ -858,26 +1298,25 @@ class GenerateAnswerResponse(proto.Message): Output only. The model's estimate of the probability that its answer is correct and grounded in the input passages. - A low answerable_probability indicates that the answer might - not be grounded in the sources. + A low ``answerable_probability`` indicates that the answer + might not be grounded in the sources. - When ``answerable_probability`` is low, some clients may - wish to: + When ``answerable_probability`` is low, you may want to: - Display a message to the effect of "We couldn’t answer that question" to the user. - Fall back to a general-purpose LLM that answers the question from world knowledge. The threshold and nature - of such fallbacks will depend on individual clients’ use - cases. 0.5 is a good starting threshold. + of such fallbacks will depend on individual use cases. + ``0.5`` is a good starting threshold. This field is a member of `oneof`_ ``_answerable_probability``. input_feedback (google.ai.generativelanguage_v1beta.types.GenerateAnswerResponse.InputFeedback): Output only. Feedback related to the input data used to - answer the question, as opposed to model-generated response - to the question. + answer the question, as opposed to the model-generated + response to the question. - "Input data" can be one or more of the following: + The input data can be one or more of the following: - Question specified by the last entry in ``GenerateAnswerRequest.content`` @@ -892,7 +1331,7 @@ class GenerateAnswerResponse(proto.Message): class InputFeedback(proto.Message): r"""Feedback related to the input data used to answer the - question, as opposed to model-generated response to the + question, as opposed to the model-generated response to the question. @@ -901,7 +1340,7 @@ class InputFeedback(proto.Message): Attributes: block_reason (google.ai.generativelanguage_v1beta.types.GenerateAnswerResponse.InputFeedback.BlockReason): Optional. If set, the input was blocked and - no candidates are returned. Rephrase your input. + no candidates are returned. Rephrase the input. This field is a member of `oneof`_ ``_block_reason``. safety_ratings (MutableSequence[google.ai.generativelanguage_v1beta.types.SafetyRating]): @@ -916,7 +1355,7 @@ class BlockReason(proto.Enum): BLOCK_REASON_UNSPECIFIED (0): Default value. This value is unused. SAFETY (1): - Input was blocked due to safety reasons. You can inspect + Input was blocked due to safety reasons. Inspect ``safety_ratings`` to understand which safety category blocked it. OTHER (2): @@ -990,8 +1429,8 @@ class EmbedContentRequest(proto.Message): Optional. Optional reduced dimension for the output embedding. If set, excessive values in the output embedding are truncated from the end. Supported by newer models since - 2024, and the earlier model (``models/embedding-001``) - cannot specify this value. + 2024 only. You cannot set this value if using the earlier + model (``models/embedding-001``). This field is a member of `oneof`_ ``_output_dimensionality``. """ @@ -1119,9 +1558,16 @@ class CountTokensRequest(proto.Message): Optional. The input given to the model as a prompt. This field is ignored when ``generate_content_request`` is set. generate_content_request (google.ai.generativelanguage_v1beta.types.GenerateContentRequest): - Optional. The overall input given to the - model. CountTokens will count prompt, function - calling, etc. + Optional. The overall input given to the ``Model``. This + includes the prompt as well as other model steering + information like `system + instructions `__, + and/or function declarations for `function + calling `__. + ``Model``\ s/\ ``Content``\ s and + ``generate_content_request``\ s are mutually exclusive. You + can either send ``Model`` + ``Content``\ s or a + ``generate_content_request``, but never both. """ model: str = proto.Field( @@ -1147,15 +1593,11 @@ class CountTokensResponse(proto.Message): Attributes: total_tokens (int): - The number of tokens that the ``model`` tokenizes the - ``prompt`` into. - - Always non-negative. When cached_content is set, this is - still the total effective prompt size. I.e. this includes - the number of tokens in the cached content. + The number of tokens that the ``Model`` tokenizes the + ``prompt`` into. Always non-negative. cached_content_token_count (int): Number of tokens in the cached part of the - prompt, i.e. in the cached content. + prompt (the cached content). """ total_tokens: int = proto.Field( diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/types/model.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/types/model.py index 82dbdd515047..13c91f98f341 100644 --- a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/types/model.py +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/types/model.py @@ -34,7 +34,10 @@ class Model(proto.Message): Attributes: name (str): - Required. The resource name of the ``Model``. + Required. The resource name of the ``Model``. Refer to + `Model + variants `__ + for all allowed values. Format: ``models/{model}`` with a ``{model}`` naming convention of: @@ -43,21 +46,21 @@ class Model(proto.Message): Examples: - - ``models/chat-bison-001`` + - ``models/gemini-1.5-flash-001`` base_model_id (str): Required. The name of the base model, pass this to the generation request. Examples: - - ``chat-bison`` + - ``gemini-1.5-flash`` version (str): Required. The version number of the model. - This represents the major version + This represents the major version (``1.0`` or ``1.5``) display_name (str): The human-readable name of the model. E.g. - "Chat Bison". + "Gemini 1.5 Flash". The name can be up to 128 characters long and can consist of any UTF-8 characters. description (str): @@ -71,8 +74,9 @@ class Model(proto.Message): supported_generation_methods (MutableSequence[str]): The model's supported generation methods. - The method names are defined as Pascal case strings, such as - ``generateMessage`` which correspond to API methods. + The corresponding API method names are defined as Pascal + case strings, such as ``generateMessage`` and + ``generateContent``. temperature (float): Controls the randomness of the output. @@ -89,7 +93,8 @@ class Model(proto.Message): This field is a member of `oneof`_ ``_max_temperature``. top_p (float): - For Nucleus sampling. + For `Nucleus + sampling `__. Nucleus sampling considers the smallest set of tokens whose probability sum is at least ``top_p``. This value specifies diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/types/model_service.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/types/model_service.py index a49938f4feb4..934be96ddd56 100644 --- a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/types/model_service.py +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/types/model_service.py @@ -66,10 +66,9 @@ class ListModelsRequest(proto.Message): page_size (int): The maximum number of ``Models`` to return (per page). - The service may return fewer models. If unspecified, at most - 50 models will be returned per page. This method returns at - most 1000 models per page, even if you pass a larger - page_size. + If unspecified, 50 models will be returned per page. This + method returns at most 1000 models per page, even if you + pass a larger page_size. page_token (str): A page token, received from a previous ``ListModels`` call. @@ -232,7 +231,7 @@ class CreateTunedModelRequest(proto.Message): This value should be up to 40 characters, the first character must be a letter, the last could be a letter or a number. The id must match the regular expression: - `a-z <[a-z0-9-]{0,38}[a-z0-9]>`__?. + ``[a-z]([a-z0-9-]{0,38}[a-z0-9])?``. This field is a member of `oneof`_ ``_tuned_model_id``. tuned_model (google.ai.generativelanguage_v1beta.types.TunedModel): diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/types/prediction_service.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/types/prediction_service.py new file mode 100644 index 000000000000..b6a659782edf --- /dev/null +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/types/prediction_service.py @@ -0,0 +1,79 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +from google.protobuf import struct_pb2 # type: ignore +import proto # type: ignore + +__protobuf__ = proto.module( + package="google.ai.generativelanguage.v1beta", + manifest={ + "PredictRequest", + "PredictResponse", + }, +) + + +class PredictRequest(proto.Message): + r"""Request message for + [PredictionService.Predict][google.ai.generativelanguage.v1beta.PredictionService.Predict]. + + Attributes: + model (str): + Required. The name of the model for prediction. Format: + ``name=models/{model}``. + instances (MutableSequence[google.protobuf.struct_pb2.Value]): + Required. The instances that are the input to + the prediction call. + parameters (google.protobuf.struct_pb2.Value): + Optional. The parameters that govern the + prediction call. + """ + + model: str = proto.Field( + proto.STRING, + number=1, + ) + instances: MutableSequence[struct_pb2.Value] = proto.RepeatedField( + proto.MESSAGE, + number=2, + message=struct_pb2.Value, + ) + parameters: struct_pb2.Value = proto.Field( + proto.MESSAGE, + number=3, + message=struct_pb2.Value, + ) + + +class PredictResponse(proto.Message): + r"""Response message for [PredictionService.Predict]. + + Attributes: + predictions (MutableSequence[google.protobuf.struct_pb2.Value]): + The outputs of the prediction call. + """ + + predictions: MutableSequence[struct_pb2.Value] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=struct_pb2.Value, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/types/safety.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/types/safety.py index 113590701d4b..8ede1042a0ac 100644 --- a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/types/safety.py +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/types/safety.py @@ -41,31 +41,32 @@ class HarmCategory(proto.Enum): HARM_CATEGORY_UNSPECIFIED (0): Category is unspecified. HARM_CATEGORY_DEROGATORY (1): - Negative or harmful comments targeting - identity and/or protected attribute. + **PaLM** - Negative or harmful comments targeting identity + and/or protected attribute. HARM_CATEGORY_TOXICITY (2): - Content that is rude, disrespectful, or - profane. + **PaLM** - Content that is rude, disrespectful, or profane. HARM_CATEGORY_VIOLENCE (3): - Describes scenarios depicting violence - against an individual or group, or general - descriptions of gore. + **PaLM** - Describes scenarios depicting violence against an + individual or group, or general descriptions of gore. HARM_CATEGORY_SEXUAL (4): - Contains references to sexual acts or other - lewd content. + **PaLM** - Contains references to sexual acts or other lewd + content. HARM_CATEGORY_MEDICAL (5): - Promotes unchecked medical advice. + **PaLM** - Promotes unchecked medical advice. HARM_CATEGORY_DANGEROUS (6): - Dangerous content that promotes, facilitates, - or encourages harmful acts. + **PaLM** - Dangerous content that promotes, facilitates, or + encourages harmful acts. HARM_CATEGORY_HARASSMENT (7): - Harasment content. + **Gemini** - Harassment content. HARM_CATEGORY_HATE_SPEECH (8): - Hate speech and content. + **Gemini** - Hate speech and content. HARM_CATEGORY_SEXUALLY_EXPLICIT (9): - Sexually explicit content. + **Gemini** - Sexually explicit content. HARM_CATEGORY_DANGEROUS_CONTENT (10): - Dangerous content. + **Gemini** - Dangerous content. + HARM_CATEGORY_CIVIC_INTEGRITY (11): + **Gemini** - Content that may be used to harm civic + integrity. """ HARM_CATEGORY_UNSPECIFIED = 0 HARM_CATEGORY_DEROGATORY = 1 @@ -78,6 +79,7 @@ class HarmCategory(proto.Enum): HARM_CATEGORY_HATE_SPEECH = 8 HARM_CATEGORY_SEXUALLY_EXPLICIT = 9 HARM_CATEGORY_DANGEROUS_CONTENT = 10 + HARM_CATEGORY_CIVIC_INTEGRITY = 11 class ContentFilter(proto.Message): @@ -249,12 +251,15 @@ class HarmBlockThreshold(proto.Enum): be allowed. BLOCK_NONE (4): All content will be allowed. + OFF (5): + Turn off the safety filter. """ HARM_BLOCK_THRESHOLD_UNSPECIFIED = 0 BLOCK_LOW_AND_ABOVE = 1 BLOCK_MEDIUM_AND_ABOVE = 2 BLOCK_ONLY_HIGH = 3 BLOCK_NONE = 4 + OFF = 5 category: "HarmCategory" = proto.Field( proto.ENUM, diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/types/tuned_model.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/types/tuned_model.py index 97d190d45489..3be0f7e6b586 100644 --- a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/types/tuned_model.py +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/types/tuned_model.py @@ -54,7 +54,7 @@ class TunedModel(proto.Message): This field is a member of `oneof`_ ``source_model``. base_model (str): Immutable. The name of the ``Model`` to tune. Example: - ``models/text-bison-001`` + ``models/gemini-1.5-flash-001`` This field is a member of `oneof`_ ``source_model``. name (str): @@ -63,8 +63,11 @@ class TunedModel(proto.Message): display_name is set on create, the id portion of the name will be set by concatenating the words of the display_name with hyphens and adding a random portion for uniqueness. - Example: display_name = "Sentence Translator" name = - "tunedModels/sentence-translator-u3b7m". + + Example: + + - display_name = ``Sentence Translator`` + - name = ``tunedModels/sentence-translator-u3b7m`` display_name (str): Optional. The name to display for this model in user interfaces. The display name must be up @@ -115,6 +118,9 @@ class TunedModel(proto.Message): tuning_task (google.ai.generativelanguage_v1beta.types.TuningTask): Required. The tuning task that creates the tuned model. + reader_project_numbers (MutableSequence[int]): + Optional. List of project numbers that have + read access to the tuned model. """ class State(proto.Enum): @@ -193,6 +199,10 @@ class State(proto.Enum): number=10, message="TuningTask", ) + reader_project_numbers: MutableSequence[int] = proto.RepeatedField( + proto.INT64, + number=14, + ) class TunedModelSource(proto.Message): @@ -206,7 +216,7 @@ class TunedModelSource(proto.Message): base_model (str): Output only. The name of the base ``Model`` this ``TunedModel`` was tuned from. Example: - ``models/text-bison-001`` + ``models/gemini-1.5-flash-001`` """ tuned_model: str = proto.Field( diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta2/gapic_version.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta2/gapic_version.py index 911c2d1dfcef..8ebdaa033b52 100644 --- a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta2/gapic_version.py +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta2/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.6.8" # {x-release-please-version} +__version__ = "0.6.10" # {x-release-please-version} diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta2/services/discuss_service/async_client.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta2/services/discuss_service/async_client.py index 7e192545aa93..6413d76f3cbd 100644 --- a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta2/services/discuss_service/async_client.py +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta2/services/discuss_service/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -190,9 +189,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(DiscussServiceClient).get_transport_class, type(DiscussServiceClient) - ) + get_transport_class = DiscussServiceClient.get_transport_class def __init__( self, diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta2/services/discuss_service/client.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta2/services/discuss_service/client.py index 9c93c822fda1..18bbc6108ddd 100644 --- a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta2/services/discuss_service/client.py +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta2/services/discuss_service/client.py @@ -656,7 +656,7 @@ def __init__( transport_init: Union[ Type[DiscussServiceTransport], Callable[..., DiscussServiceTransport] ] = ( - type(self).get_transport_class(transport) + DiscussServiceClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., DiscussServiceTransport], transport) ) diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta2/services/model_service/async_client.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta2/services/model_service/async_client.py index cd7ca4b19b5c..53f2c11e8a5c 100644 --- a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta2/services/model_service/async_client.py +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta2/services/model_service/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -185,9 +184,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(ModelServiceClient).get_transport_class, type(ModelServiceClient) - ) + get_transport_class = ModelServiceClient.get_transport_class def __init__( self, diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta2/services/model_service/client.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta2/services/model_service/client.py index 45998abaf48f..b4c965681988 100644 --- a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta2/services/model_service/client.py +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta2/services/model_service/client.py @@ -650,7 +650,7 @@ def __init__( transport_init: Union[ Type[ModelServiceTransport], Callable[..., ModelServiceTransport] ] = ( - type(self).get_transport_class(transport) + ModelServiceClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., ModelServiceTransport], transport) ) diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta2/services/text_service/async_client.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta2/services/text_service/async_client.py index 2d648ebfd134..14b3d745095b 100644 --- a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta2/services/text_service/async_client.py +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta2/services/text_service/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -186,9 +185,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(TextServiceClient).get_transport_class, type(TextServiceClient) - ) + get_transport_class = TextServiceClient.get_transport_class def __init__( self, diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta2/services/text_service/client.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta2/services/text_service/client.py index d7f49966172f..74a4d9424c53 100644 --- a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta2/services/text_service/client.py +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta2/services/text_service/client.py @@ -651,7 +651,7 @@ def __init__( transport_init: Union[ Type[TextServiceTransport], Callable[..., TextServiceTransport] ] = ( - type(self).get_transport_class(transport) + TextServiceClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., TextServiceTransport], transport) ) diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta3/gapic_version.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta3/gapic_version.py index 911c2d1dfcef..8ebdaa033b52 100644 --- a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta3/gapic_version.py +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta3/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.6.8" # {x-release-please-version} +__version__ = "0.6.10" # {x-release-please-version} diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta3/services/discuss_service/async_client.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta3/services/discuss_service/async_client.py index 12f8cd1d7215..0683a984cae1 100644 --- a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta3/services/discuss_service/async_client.py +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta3/services/discuss_service/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -192,9 +191,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(DiscussServiceClient).get_transport_class, type(DiscussServiceClient) - ) + get_transport_class = DiscussServiceClient.get_transport_class def __init__( self, diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta3/services/discuss_service/client.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta3/services/discuss_service/client.py index 15ec3f87e353..3f96ccafb73a 100644 --- a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta3/services/discuss_service/client.py +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta3/services/discuss_service/client.py @@ -658,7 +658,7 @@ def __init__( transport_init: Union[ Type[DiscussServiceTransport], Callable[..., DiscussServiceTransport] ] = ( - type(self).get_transport_class(transport) + DiscussServiceClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., DiscussServiceTransport], transport) ) diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta3/services/model_service/async_client.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta3/services/model_service/async_client.py index 991704110b98..c51657477468 100644 --- a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta3/services/model_service/async_client.py +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta3/services/model_service/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -195,9 +194,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(ModelServiceClient).get_transport_class, type(ModelServiceClient) - ) + get_transport_class = ModelServiceClient.get_transport_class def __init__( self, diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta3/services/model_service/client.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta3/services/model_service/client.py index 96f90e217bc6..f76ac868c667 100644 --- a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta3/services/model_service/client.py +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta3/services/model_service/client.py @@ -673,7 +673,7 @@ def __init__( transport_init: Union[ Type[ModelServiceTransport], Callable[..., ModelServiceTransport] ] = ( - type(self).get_transport_class(transport) + ModelServiceClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., ModelServiceTransport], transport) ) diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta3/services/permission_service/async_client.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta3/services/permission_service/async_client.py index c7829c1a55d8..bc1bb058d9ff 100644 --- a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta3/services/permission_service/async_client.py +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta3/services/permission_service/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -198,9 +197,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(PermissionServiceClient).get_transport_class, type(PermissionServiceClient) - ) + get_transport_class = PermissionServiceClient.get_transport_class def __init__( self, diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta3/services/permission_service/client.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta3/services/permission_service/client.py index aef954c40823..fbbd86c3e7d5 100644 --- a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta3/services/permission_service/client.py +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta3/services/permission_service/client.py @@ -684,7 +684,7 @@ def __init__( Type[PermissionServiceTransport], Callable[..., PermissionServiceTransport], ] = ( - type(self).get_transport_class(transport) + PermissionServiceClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., PermissionServiceTransport], transport) ) diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta3/services/permission_service/transports/rest.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta3/services/permission_service/transports/rest.py index 41e23b173930..a96955194c9d 100644 --- a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta3/services/permission_service/transports/rest.py +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta3/services/permission_service/transports/rest.py @@ -395,11 +395,11 @@ def __call__( role is a superset of the previous role's permitted operations: - - reader can use the resource (e.g. + - reader can use the resource (e.g. tuned model) for inference - - writer has reader's permissions and + - writer has reader's permissions and additionally can edit and share - - owner has writer's permissions and + - owner has writer's permissions and additionally can delete """ @@ -588,11 +588,11 @@ def __call__( role is a superset of the previous role's permitted operations: - - reader can use the resource (e.g. + - reader can use the resource (e.g. tuned model) for inference - - writer has reader's permissions and + - writer has reader's permissions and additionally can edit and share - - owner has writer's permissions and + - owner has writer's permissions and additionally can delete """ @@ -882,11 +882,11 @@ def __call__( role is a superset of the previous role's permitted operations: - - reader can use the resource (e.g. + - reader can use the resource (e.g. tuned model) for inference - - writer has reader's permissions and + - writer has reader's permissions and additionally can edit and share - - owner has writer's permissions and + - owner has writer's permissions and additionally can delete """ diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta3/services/text_service/async_client.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta3/services/text_service/async_client.py index 8c2138d0991e..69c1df66a614 100644 --- a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta3/services/text_service/async_client.py +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta3/services/text_service/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -188,9 +187,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(TextServiceClient).get_transport_class, type(TextServiceClient) - ) + get_transport_class = TextServiceClient.get_transport_class def __init__( self, diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta3/services/text_service/client.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta3/services/text_service/client.py index a13903fde0a4..ad257c9d2909 100644 --- a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta3/services/text_service/client.py +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta3/services/text_service/client.py @@ -653,7 +653,7 @@ def __init__( transport_init: Union[ Type[TextServiceTransport], Callable[..., TextServiceTransport] ] = ( - type(self).get_transport_class(transport) + TextServiceClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., TextServiceTransport], transport) ) diff --git a/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1beta_generated_prediction_service_predict_async.py b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1beta_generated_prediction_service_predict_async.py new file mode 100644 index 000000000000..851ebfa44e4d --- /dev/null +++ b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1beta_generated_prediction_service_predict_async.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for Predict +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-ai-generativelanguage + + +# [START generativelanguage_v1beta_generated_PredictionService_Predict_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.ai import generativelanguage_v1beta + + +async def sample_predict(): + # Create a client + client = generativelanguage_v1beta.PredictionServiceAsyncClient() + + # Initialize request argument(s) + instances = generativelanguage_v1beta.Value() + instances.null_value = "NULL_VALUE" + + request = generativelanguage_v1beta.PredictRequest( + model="model_value", + instances=instances, + ) + + # Make the request + response = await client.predict(request=request) + + # Handle the response + print(response) + +# [END generativelanguage_v1beta_generated_PredictionService_Predict_async] diff --git a/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1beta_generated_prediction_service_predict_sync.py b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1beta_generated_prediction_service_predict_sync.py new file mode 100644 index 000000000000..ade0be26d986 --- /dev/null +++ b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1beta_generated_prediction_service_predict_sync.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for Predict +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-ai-generativelanguage + + +# [START generativelanguage_v1beta_generated_PredictionService_Predict_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.ai import generativelanguage_v1beta + + +def sample_predict(): + # Create a client + client = generativelanguage_v1beta.PredictionServiceClient() + + # Initialize request argument(s) + instances = generativelanguage_v1beta.Value() + instances.null_value = "NULL_VALUE" + + request = generativelanguage_v1beta.PredictRequest( + model="model_value", + instances=instances, + ) + + # Make the request + response = client.predict(request=request) + + # Handle the response + print(response) + +# [END generativelanguage_v1beta_generated_PredictionService_Predict_sync] diff --git a/packages/google-ai-generativelanguage/samples/generated_samples/snippet_metadata_google.ai.generativelanguage.v1.json b/packages/google-ai-generativelanguage/samples/generated_samples/snippet_metadata_google.ai.generativelanguage.v1.json index c1b9855b9786..416353581730 100644 --- a/packages/google-ai-generativelanguage/samples/generated_samples/snippet_metadata_google.ai.generativelanguage.v1.json +++ b/packages/google-ai-generativelanguage/samples/generated_samples/snippet_metadata_google.ai.generativelanguage.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-ai-generativelanguage", - "version": "0.6.8" + "version": "0.6.10" }, "snippets": [ { diff --git a/packages/google-ai-generativelanguage/samples/generated_samples/snippet_metadata_google.ai.generativelanguage.v1beta.json b/packages/google-ai-generativelanguage/samples/generated_samples/snippet_metadata_google.ai.generativelanguage.v1beta.json index 24523bd58674..a2110fd118ef 100644 --- a/packages/google-ai-generativelanguage/samples/generated_samples/snippet_metadata_google.ai.generativelanguage.v1beta.json +++ b/packages/google-ai-generativelanguage/samples/generated_samples/snippet_metadata_google.ai.generativelanguage.v1beta.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-ai-generativelanguage", - "version": "0.6.8" + "version": "0.6.10" }, "snippets": [ { @@ -4953,6 +4953,175 @@ ], "title": "generativelanguage_v1beta_generated_permission_service_update_permission_sync.py" }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.ai.generativelanguage_v1beta.PredictionServiceAsyncClient", + "shortName": "PredictionServiceAsyncClient" + }, + "fullName": "google.ai.generativelanguage_v1beta.PredictionServiceAsyncClient.predict", + "method": { + "fullName": "google.ai.generativelanguage.v1beta.PredictionService.Predict", + "service": { + "fullName": "google.ai.generativelanguage.v1beta.PredictionService", + "shortName": "PredictionService" + }, + "shortName": "Predict" + }, + "parameters": [ + { + "name": "request", + "type": "google.ai.generativelanguage_v1beta.types.PredictRequest" + }, + { + "name": "model", + "type": "str" + }, + { + "name": "instances", + "type": "MutableSequence[google.protobuf.struct_pb2.Value]" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.ai.generativelanguage_v1beta.types.PredictResponse", + "shortName": "predict" + }, + "description": "Sample for Predict", + "file": "generativelanguage_v1beta_generated_prediction_service_predict_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "generativelanguage_v1beta_generated_PredictionService_Predict_async", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 49, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 50, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "generativelanguage_v1beta_generated_prediction_service_predict_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.ai.generativelanguage_v1beta.PredictionServiceClient", + "shortName": "PredictionServiceClient" + }, + "fullName": "google.ai.generativelanguage_v1beta.PredictionServiceClient.predict", + "method": { + "fullName": "google.ai.generativelanguage.v1beta.PredictionService.Predict", + "service": { + "fullName": "google.ai.generativelanguage.v1beta.PredictionService", + "shortName": "PredictionService" + }, + "shortName": "Predict" + }, + "parameters": [ + { + "name": "request", + "type": "google.ai.generativelanguage_v1beta.types.PredictRequest" + }, + { + "name": "model", + "type": "str" + }, + { + "name": "instances", + "type": "MutableSequence[google.protobuf.struct_pb2.Value]" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.ai.generativelanguage_v1beta.types.PredictResponse", + "shortName": "predict" + }, + "description": "Sample for Predict", + "file": "generativelanguage_v1beta_generated_prediction_service_predict_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "generativelanguage_v1beta_generated_PredictionService_Predict_sync", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 49, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 50, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "generativelanguage_v1beta_generated_prediction_service_predict_sync.py" + }, { "canonical": true, "clientMethod": { diff --git a/packages/google-ai-generativelanguage/samples/generated_samples/snippet_metadata_google.ai.generativelanguage.v1beta2.json b/packages/google-ai-generativelanguage/samples/generated_samples/snippet_metadata_google.ai.generativelanguage.v1beta2.json index d7f8a8be4c13..865de14ffa13 100644 --- a/packages/google-ai-generativelanguage/samples/generated_samples/snippet_metadata_google.ai.generativelanguage.v1beta2.json +++ b/packages/google-ai-generativelanguage/samples/generated_samples/snippet_metadata_google.ai.generativelanguage.v1beta2.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-ai-generativelanguage", - "version": "0.6.8" + "version": "0.6.10" }, "snippets": [ { diff --git a/packages/google-ai-generativelanguage/samples/generated_samples/snippet_metadata_google.ai.generativelanguage.v1beta3.json b/packages/google-ai-generativelanguage/samples/generated_samples/snippet_metadata_google.ai.generativelanguage.v1beta3.json index 3d177a97e009..7fbde27c9197 100644 --- a/packages/google-ai-generativelanguage/samples/generated_samples/snippet_metadata_google.ai.generativelanguage.v1beta3.json +++ b/packages/google-ai-generativelanguage/samples/generated_samples/snippet_metadata_google.ai.generativelanguage.v1beta3.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-ai-generativelanguage", - "version": "0.6.8" + "version": "0.6.10" }, "snippets": [ { diff --git a/packages/google-ai-generativelanguage/scripts/client-post-processing/doc-formatting.yaml b/packages/google-ai-generativelanguage/scripts/client-post-processing/doc-formatting.yaml new file mode 120000 index 000000000000..6e0991666f97 --- /dev/null +++ b/packages/google-ai-generativelanguage/scripts/client-post-processing/doc-formatting.yaml @@ -0,0 +1 @@ +../../../../scripts/client-post-processing/doc-formatting.yaml \ No newline at end of file diff --git a/packages/google-ai-generativelanguage/scripts/fixup_generativelanguage_v1beta_keywords.py b/packages/google-ai-generativelanguage/scripts/fixup_generativelanguage_v1beta_keywords.py index dcb5cdfbb55c..8e69225c75de 100644 --- a/packages/google-ai-generativelanguage/scripts/fixup_generativelanguage_v1beta_keywords.py +++ b/packages/google-ai-generativelanguage/scripts/fixup_generativelanguage_v1beta_keywords.py @@ -83,6 +83,7 @@ class generativelanguageCallTransformer(cst.CSTTransformer): 'list_models': ('page_size', 'page_token', ), 'list_permissions': ('parent', 'page_size', 'page_token', ), 'list_tuned_models': ('page_size', 'page_token', 'filter', ), + 'predict': ('model', 'instances', 'parameters', ), 'query_corpus': ('name', 'query', 'metadata_filters', 'results_count', ), 'query_document': ('name', 'query', 'results_count', 'metadata_filters', ), 'stream_generate_content': ('model', 'contents', 'system_instruction', 'tools', 'tool_config', 'safety_settings', 'generation_config', 'cached_content', ), diff --git a/packages/google-ai-generativelanguage/tests/unit/gapic/generativelanguage_v1/test_generative_service.py b/packages/google-ai-generativelanguage/tests/unit/gapic/generativelanguage_v1/test_generative_service.py index 1fcb053b7910..24d7d6fbcddc 100644 --- a/packages/google-ai-generativelanguage/tests/unit/gapic/generativelanguage_v1/test_generative_service.py +++ b/packages/google-ai-generativelanguage/tests/unit/gapic/generativelanguage_v1/test_generative_service.py @@ -1318,22 +1318,23 @@ async def test_generate_content_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.generate_content - ] = mock_object + ] = mock_rpc request = {} await client.generate_content(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.generate_content(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1700,22 +1701,23 @@ async def test_stream_generate_content_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.stream_generate_content - ] = mock_object + ] = mock_rpc request = {} await client.stream_generate_content(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.stream_generate_content(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2080,22 +2082,23 @@ async def test_embed_content_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.embed_content - ] = mock_object + ] = mock_rpc request = {} await client.embed_content(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.embed_content(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2458,22 +2461,23 @@ async def test_batch_embed_contents_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.batch_embed_contents - ] = mock_object + ] = mock_rpc request = {} await client.batch_embed_contents(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.batch_embed_contents(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2840,22 +2844,23 @@ async def test_count_tokens_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.count_tokens - ] = mock_object + ] = mock_rpc request = {} await client.count_tokens(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.count_tokens(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-ai-generativelanguage/tests/unit/gapic/generativelanguage_v1/test_model_service.py b/packages/google-ai-generativelanguage/tests/unit/gapic/generativelanguage_v1/test_model_service.py index fe9436e4d83c..c5ec01d8390e 100644 --- a/packages/google-ai-generativelanguage/tests/unit/gapic/generativelanguage_v1/test_model_service.py +++ b/packages/google-ai-generativelanguage/tests/unit/gapic/generativelanguage_v1/test_model_service.py @@ -1125,6 +1125,7 @@ def test_get_model(request_type, transport: str = "grpc"): output_token_limit=1967, supported_generation_methods=["supported_generation_methods_value"], temperature=0.1198, + max_temperature=0.16190000000000002, top_p=0.546, top_k=541, ) @@ -1149,6 +1150,7 @@ def test_get_model(request_type, transport: str = "grpc"): "supported_generation_methods_value" ] assert math.isclose(response.temperature, 0.1198, rel_tol=1e-6) + assert math.isclose(response.max_temperature, 0.16190000000000002, rel_tol=1e-6) assert math.isclose(response.top_p, 0.546, rel_tol=1e-6) assert response.top_k == 541 @@ -1258,6 +1260,7 @@ async def test_get_model_empty_call_async(): output_token_limit=1967, supported_generation_methods=["supported_generation_methods_value"], temperature=0.1198, + max_temperature=0.16190000000000002, top_p=0.546, top_k=541, ) @@ -1289,22 +1292,23 @@ async def test_get_model_async_use_cached_wrapped_rpc(transport: str = "grpc_asy ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_model - ] = mock_object + ] = mock_rpc request = {} await client.get_model(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_model(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1334,6 +1338,7 @@ async def test_get_model_async( output_token_limit=1967, supported_generation_methods=["supported_generation_methods_value"], temperature=0.1198, + max_temperature=0.16190000000000002, top_p=0.546, top_k=541, ) @@ -1359,6 +1364,7 @@ async def test_get_model_async( "supported_generation_methods_value" ] assert math.isclose(response.temperature, 0.1198, rel_tol=1e-6) + assert math.isclose(response.max_temperature, 0.16190000000000002, rel_tol=1e-6) assert math.isclose(response.top_p, 0.546, rel_tol=1e-6) assert response.top_k == 541 @@ -1671,22 +1677,23 @@ async def test_list_models_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_models - ] = mock_object + ] = mock_rpc request = {} await client.list_models(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_models(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2041,6 +2048,7 @@ def test_get_model_rest(request_type): output_token_limit=1967, supported_generation_methods=["supported_generation_methods_value"], temperature=0.1198, + max_temperature=0.16190000000000002, top_p=0.546, top_k=541, ) @@ -2069,6 +2077,7 @@ def test_get_model_rest(request_type): "supported_generation_methods_value" ] assert math.isclose(response.temperature, 0.1198, rel_tol=1e-6) + assert math.isclose(response.max_temperature, 0.16190000000000002, rel_tol=1e-6) assert math.isclose(response.top_p, 0.546, rel_tol=1e-6) assert response.top_k == 541 diff --git a/packages/google-ai-generativelanguage/tests/unit/gapic/generativelanguage_v1beta/test_cache_service.py b/packages/google-ai-generativelanguage/tests/unit/gapic/generativelanguage_v1beta/test_cache_service.py index becb6c3615c0..1cc7b01d0c19 100644 --- a/packages/google-ai-generativelanguage/tests/unit/gapic/generativelanguage_v1beta/test_cache_service.py +++ b/packages/google-ai-generativelanguage/tests/unit/gapic/generativelanguage_v1beta/test_cache_service.py @@ -1280,22 +1280,23 @@ async def test_list_cached_contents_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_cached_contents - ] = mock_object + ] = mock_rpc request = {} await client.list_cached_contents(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_cached_contents(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1718,22 +1719,23 @@ async def test_create_cached_content_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_cached_content - ] = mock_object + ] = mock_rpc request = {} await client.create_cached_content(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_cached_content(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2062,22 +2064,23 @@ async def test_get_cached_content_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_cached_content - ] = mock_object + ] = mock_rpc request = {} await client.get_cached_content(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_cached_content(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2455,22 +2458,23 @@ async def test_update_cached_content_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_cached_content - ] = mock_object + ] = mock_rpc request = {} await client.update_cached_content(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_cached_content(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2862,22 +2866,23 @@ async def test_delete_cached_content_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_cached_content - ] = mock_object + ] = mock_rpc request = {} await client.delete_cached_content(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_cached_content(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3343,11 +3348,16 @@ def test_create_cached_content_rest(request_type): "nullable": True, "enum": ["enum_value1", "enum_value2"], "items": {}, + "max_items": 967, + "min_items": 965, "properties": {}, "required": ["required_value1", "required_value2"], }, } ], + "google_search_retrieval": { + "dynamic_retrieval_config": {"mode": 1, "dynamic_threshold": 0.1809} + }, "code_execution": {}, } ], @@ -4090,11 +4100,16 @@ def test_update_cached_content_rest(request_type): "nullable": True, "enum": ["enum_value1", "enum_value2"], "items": {}, + "max_items": 967, + "min_items": 965, "properties": {}, "required": ["required_value1", "required_value2"], }, } ], + "google_search_retrieval": { + "dynamic_retrieval_config": {"mode": 1, "dynamic_threshold": 0.1809} + }, "code_execution": {}, } ], diff --git a/packages/google-ai-generativelanguage/tests/unit/gapic/generativelanguage_v1beta/test_discuss_service.py b/packages/google-ai-generativelanguage/tests/unit/gapic/generativelanguage_v1beta/test_discuss_service.py index 2f193871150e..90e1265131cc 100644 --- a/packages/google-ai-generativelanguage/tests/unit/gapic/generativelanguage_v1beta/test_discuss_service.py +++ b/packages/google-ai-generativelanguage/tests/unit/gapic/generativelanguage_v1beta/test_discuss_service.py @@ -1278,22 +1278,23 @@ async def test_generate_message_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.generate_message - ] = mock_object + ] = mock_rpc request = {} await client.generate_message(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.generate_message(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1693,22 +1694,23 @@ async def test_count_message_tokens_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.count_message_tokens - ] = mock_object + ] = mock_rpc request = {} await client.count_message_tokens(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.count_message_tokens(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-ai-generativelanguage/tests/unit/gapic/generativelanguage_v1beta/test_file_service.py b/packages/google-ai-generativelanguage/tests/unit/gapic/generativelanguage_v1beta/test_file_service.py index 9b544412dcbe..d7a595504369 100644 --- a/packages/google-ai-generativelanguage/tests/unit/gapic/generativelanguage_v1beta/test_file_service.py +++ b/packages/google-ai-generativelanguage/tests/unit/gapic/generativelanguage_v1beta/test_file_service.py @@ -1240,22 +1240,23 @@ async def test_create_file_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_file - ] = mock_object + ] = mock_rpc request = {} await client.create_file(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_file(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1456,22 +1457,23 @@ async def test_list_files_async_use_cached_wrapped_rpc(transport: str = "grpc_as ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_files - ] = mock_object + ] = mock_rpc request = {} await client.list_files(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_files(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1884,22 +1886,23 @@ async def test_get_file_async_use_cached_wrapped_rpc(transport: str = "grpc_asyn ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_file - ] = mock_object + ] = mock_rpc request = {} await client.get_file(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_file(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2249,22 +2252,23 @@ async def test_delete_file_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_file - ] = mock_object + ] = mock_rpc request = {} await client.delete_file(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_file(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-ai-generativelanguage/tests/unit/gapic/generativelanguage_v1beta/test_generative_service.py b/packages/google-ai-generativelanguage/tests/unit/gapic/generativelanguage_v1beta/test_generative_service.py index 61b7792b3d1e..b73be3c92750 100644 --- a/packages/google-ai-generativelanguage/tests/unit/gapic/generativelanguage_v1beta/test_generative_service.py +++ b/packages/google-ai-generativelanguage/tests/unit/gapic/generativelanguage_v1beta/test_generative_service.py @@ -1325,22 +1325,23 @@ async def test_generate_content_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.generate_content - ] = mock_object + ] = mock_rpc request = {} await client.generate_content(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.generate_content(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1697,22 +1698,23 @@ async def test_generate_answer_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.generate_answer - ] = mock_object + ] = mock_rpc request = {} await client.generate_answer(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.generate_answer(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2124,22 +2126,23 @@ async def test_stream_generate_content_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.stream_generate_content - ] = mock_object + ] = mock_rpc request = {} await client.stream_generate_content(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.stream_generate_content(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2504,22 +2507,23 @@ async def test_embed_content_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.embed_content - ] = mock_object + ] = mock_rpc request = {} await client.embed_content(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.embed_content(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2882,22 +2886,23 @@ async def test_batch_embed_contents_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.batch_embed_contents - ] = mock_object + ] = mock_rpc request = {} await client.batch_embed_contents(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.batch_embed_contents(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3267,22 +3272,23 @@ async def test_count_tokens_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.count_tokens - ] = mock_object + ] = mock_rpc request = {} await client.count_tokens(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.count_tokens(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-ai-generativelanguage/tests/unit/gapic/generativelanguage_v1beta/test_model_service.py b/packages/google-ai-generativelanguage/tests/unit/gapic/generativelanguage_v1beta/test_model_service.py index 7cd36f346f06..93ee6b5f5eb4 100644 --- a/packages/google-ai-generativelanguage/tests/unit/gapic/generativelanguage_v1beta/test_model_service.py +++ b/packages/google-ai-generativelanguage/tests/unit/gapic/generativelanguage_v1beta/test_model_service.py @@ -1305,22 +1305,23 @@ async def test_get_model_async_use_cached_wrapped_rpc(transport: str = "grpc_asy ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_model - ] = mock_object + ] = mock_rpc request = {} await client.get_model(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_model(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1689,22 +1690,23 @@ async def test_list_models_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_models - ] = mock_object + ] = mock_rpc request = {} await client.list_models(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_models(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2057,6 +2059,7 @@ def test_get_tuned_model(request_type, transport: str = "grpc"): top_p=0.546, top_k=541, state=tuned_model.TunedModel.State.CREATING, + reader_project_numbers=[2340], base_model="base_model_value", ) response = client.get_tuned_model(request) @@ -2076,6 +2079,7 @@ def test_get_tuned_model(request_type, transport: str = "grpc"): assert math.isclose(response.top_p, 0.546, rel_tol=1e-6) assert response.top_k == 541 assert response.state == tuned_model.TunedModel.State.CREATING + assert response.reader_project_numbers == [2340] def test_get_tuned_model_empty_call(): @@ -2181,6 +2185,7 @@ async def test_get_tuned_model_empty_call_async(): top_p=0.546, top_k=541, state=tuned_model.TunedModel.State.CREATING, + reader_project_numbers=[2340], ) ) response = await client.get_tuned_model() @@ -2212,22 +2217,23 @@ async def test_get_tuned_model_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_tuned_model - ] = mock_object + ] = mock_rpc request = {} await client.get_tuned_model(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_tuned_model(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2255,6 +2261,7 @@ async def test_get_tuned_model_async( top_p=0.546, top_k=541, state=tuned_model.TunedModel.State.CREATING, + reader_project_numbers=[2340], ) ) response = await client.get_tuned_model(request) @@ -2274,6 +2281,7 @@ async def test_get_tuned_model_async( assert math.isclose(response.top_p, 0.546, rel_tol=1e-6) assert response.top_k == 541 assert response.state == tuned_model.TunedModel.State.CREATING + assert response.reader_project_numbers == [2340] @pytest.mark.asyncio @@ -2600,22 +2608,23 @@ async def test_list_tuned_models_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_tuned_models - ] = mock_object + ] = mock_rpc request = {} await client.list_tuned_models(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_tuned_models(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3072,8 +3081,9 @@ def test_create_tuned_model_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.create_tuned_model(request) @@ -3129,26 +3139,28 @@ async def test_create_tuned_model_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_tuned_model - ] = mock_object + ] = mock_rpc request = {} await client.create_tuned_model(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.create_tuned_model(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3339,6 +3351,7 @@ def test_update_tuned_model(request_type, transport: str = "grpc"): top_p=0.546, top_k=541, state=gag_tuned_model.TunedModel.State.CREATING, + reader_project_numbers=[2340], base_model="base_model_value", ) response = client.update_tuned_model(request) @@ -3358,6 +3371,7 @@ def test_update_tuned_model(request_type, transport: str = "grpc"): assert math.isclose(response.top_p, 0.546, rel_tol=1e-6) assert response.top_k == 541 assert response.state == gag_tuned_model.TunedModel.State.CREATING + assert response.reader_project_numbers == [2340] def test_update_tuned_model_empty_call(): @@ -3469,6 +3483,7 @@ async def test_update_tuned_model_empty_call_async(): top_p=0.546, top_k=541, state=gag_tuned_model.TunedModel.State.CREATING, + reader_project_numbers=[2340], ) ) response = await client.update_tuned_model() @@ -3500,22 +3515,23 @@ async def test_update_tuned_model_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_tuned_model - ] = mock_object + ] = mock_rpc request = {} await client.update_tuned_model(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_tuned_model(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3545,6 +3561,7 @@ async def test_update_tuned_model_async( top_p=0.546, top_k=541, state=gag_tuned_model.TunedModel.State.CREATING, + reader_project_numbers=[2340], ) ) response = await client.update_tuned_model(request) @@ -3564,6 +3581,7 @@ async def test_update_tuned_model_async( assert math.isclose(response.top_p, 0.546, rel_tol=1e-6) assert response.top_k == 541 assert response.state == gag_tuned_model.TunedModel.State.CREATING + assert response.reader_project_numbers == [2340] @pytest.mark.asyncio @@ -3925,22 +3943,23 @@ async def test_delete_tuned_model_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_tuned_model - ] = mock_object + ] = mock_rpc request = {} await client.delete_tuned_model(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_tuned_model(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4748,6 +4767,7 @@ def test_get_tuned_model_rest(request_type): top_p=0.546, top_k=541, state=tuned_model.TunedModel.State.CREATING, + reader_project_numbers=[2340], base_model="base_model_value", ) @@ -4771,6 +4791,7 @@ def test_get_tuned_model_rest(request_type): assert math.isclose(response.top_p, 0.546, rel_tol=1e-6) assert response.top_k == 541 assert response.state == tuned_model.TunedModel.State.CREATING + assert response.reader_project_numbers == [2340] def test_get_tuned_model_rest_use_cached_wrapped_rpc(): @@ -5363,6 +5384,7 @@ def test_create_tuned_model_rest(request_type): "batch_size": 1052, }, }, + "reader_project_numbers": [2341, 2342], } # The version of a generated dependency at test runtime may differ from the version used during generation. # Delete any fields which are not present in the current runtime dependency @@ -5775,6 +5797,7 @@ def test_update_tuned_model_rest(request_type): "batch_size": 1052, }, }, + "reader_project_numbers": [2341, 2342], } # The version of a generated dependency at test runtime may differ from the version used during generation. # Delete any fields which are not present in the current runtime dependency @@ -5856,6 +5879,7 @@ def get_message_fields(field): top_p=0.546, top_k=541, state=gag_tuned_model.TunedModel.State.CREATING, + reader_project_numbers=[2340], base_model="base_model_value", ) @@ -5879,6 +5903,7 @@ def get_message_fields(field): assert math.isclose(response.top_p, 0.546, rel_tol=1e-6) assert response.top_k == 541 assert response.state == gag_tuned_model.TunedModel.State.CREATING + assert response.reader_project_numbers == [2340] def test_update_tuned_model_rest_use_cached_wrapped_rpc(): diff --git a/packages/google-ai-generativelanguage/tests/unit/gapic/generativelanguage_v1beta/test_permission_service.py b/packages/google-ai-generativelanguage/tests/unit/gapic/generativelanguage_v1beta/test_permission_service.py index d6e039a3fdef..4a5b16af4ee0 100644 --- a/packages/google-ai-generativelanguage/tests/unit/gapic/generativelanguage_v1beta/test_permission_service.py +++ b/packages/google-ai-generativelanguage/tests/unit/gapic/generativelanguage_v1beta/test_permission_service.py @@ -1342,22 +1342,23 @@ async def test_create_permission_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_permission - ] = mock_object + ] = mock_rpc request = {} await client.create_permission(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_permission(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1742,22 +1743,23 @@ async def test_get_permission_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_permission - ] = mock_object + ] = mock_rpc request = {} await client.get_permission(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_permission(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2117,22 +2119,23 @@ async def test_list_permissions_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_permissions - ] = mock_object + ] = mock_rpc request = {} await client.list_permissions(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_permissions(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2691,22 +2694,23 @@ async def test_update_permission_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_permission - ] = mock_object + ] = mock_rpc request = {} await client.update_permission(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_permission(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3085,22 +3089,23 @@ async def test_delete_permission_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_permission - ] = mock_object + ] = mock_rpc request = {} await client.delete_permission(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_permission(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3460,22 +3465,23 @@ async def test_transfer_ownership_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.transfer_ownership - ] = mock_object + ] = mock_rpc request = {} await client.transfer_ownership(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.transfer_ownership(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-ads-admanager/tests/unit/gapic/admanager_v1/test_creative_service.py b/packages/google-ai-generativelanguage/tests/unit/gapic/generativelanguage_v1beta/test_prediction_service.py similarity index 55% rename from packages/google-ads-admanager/tests/unit/gapic/admanager_v1/test_creative_service.py rename to packages/google-ai-generativelanguage/tests/unit/gapic/generativelanguage_v1beta/test_prediction_service.py index 1fae403e50f6..5532205c7f0a 100644 --- a/packages/google-ads-admanager/tests/unit/gapic/admanager_v1/test_creative_service.py +++ b/packages/google-ai-generativelanguage/tests/unit/gapic/generativelanguage_v1beta/test_prediction_service.py @@ -36,7 +36,7 @@ from google.longrunning import operations_pb2 # type: ignore from google.oauth2 import service_account from google.protobuf import json_format -from google.protobuf import timestamp_pb2 # type: ignore +from google.protobuf import struct_pb2 # type: ignore import grpc from grpc.experimental import aio from proto.marshal.rules import wrappers @@ -45,12 +45,12 @@ from requests import PreparedRequest, Request, Response from requests.sessions import Session -from google.ads.admanager_v1.services.creative_service import ( - CreativeServiceClient, - pagers, +from google.ai.generativelanguage_v1beta.services.prediction_service import ( + PredictionServiceAsyncClient, + PredictionServiceClient, transports, ) -from google.ads.admanager_v1.types import ad_partner_declaration, creative_service +from google.ai.generativelanguage_v1beta.types import prediction_service def client_cert_source_callback(): @@ -86,40 +86,45 @@ def test__get_default_mtls_endpoint(): sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" non_googleapi = "api.example.com" - assert CreativeServiceClient._get_default_mtls_endpoint(None) is None + assert PredictionServiceClient._get_default_mtls_endpoint(None) is None assert ( - CreativeServiceClient._get_default_mtls_endpoint(api_endpoint) + PredictionServiceClient._get_default_mtls_endpoint(api_endpoint) == api_mtls_endpoint ) assert ( - CreativeServiceClient._get_default_mtls_endpoint(api_mtls_endpoint) + PredictionServiceClient._get_default_mtls_endpoint(api_mtls_endpoint) == api_mtls_endpoint ) assert ( - CreativeServiceClient._get_default_mtls_endpoint(sandbox_endpoint) + PredictionServiceClient._get_default_mtls_endpoint(sandbox_endpoint) == sandbox_mtls_endpoint ) assert ( - CreativeServiceClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) + PredictionServiceClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) == sandbox_mtls_endpoint ) assert ( - CreativeServiceClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi + PredictionServiceClient._get_default_mtls_endpoint(non_googleapi) + == non_googleapi ) def test__read_environment_variables(): - assert CreativeServiceClient._read_environment_variables() == (False, "auto", None) + assert PredictionServiceClient._read_environment_variables() == ( + False, + "auto", + None, + ) with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - assert CreativeServiceClient._read_environment_variables() == ( + assert PredictionServiceClient._read_environment_variables() == ( True, "auto", None, ) with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): - assert CreativeServiceClient._read_environment_variables() == ( + assert PredictionServiceClient._read_environment_variables() == ( False, "auto", None, @@ -129,28 +134,28 @@ def test__read_environment_variables(): os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} ): with pytest.raises(ValueError) as excinfo: - CreativeServiceClient._read_environment_variables() + PredictionServiceClient._read_environment_variables() assert ( str(excinfo.value) == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" ) with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - assert CreativeServiceClient._read_environment_variables() == ( + assert PredictionServiceClient._read_environment_variables() == ( False, "never", None, ) with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): - assert CreativeServiceClient._read_environment_variables() == ( + assert PredictionServiceClient._read_environment_variables() == ( False, "always", None, ) with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}): - assert CreativeServiceClient._read_environment_variables() == ( + assert PredictionServiceClient._read_environment_variables() == ( False, "auto", None, @@ -158,14 +163,14 @@ def test__read_environment_variables(): with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): with pytest.raises(MutualTLSChannelError) as excinfo: - CreativeServiceClient._read_environment_variables() + PredictionServiceClient._read_environment_variables() assert ( str(excinfo.value) == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" ) with mock.patch.dict(os.environ, {"GOOGLE_CLOUD_UNIVERSE_DOMAIN": "foo.com"}): - assert CreativeServiceClient._read_environment_variables() == ( + assert PredictionServiceClient._read_environment_variables() == ( False, "auto", "foo.com", @@ -176,13 +181,15 @@ def test__get_client_cert_source(): mock_provided_cert_source = mock.Mock() mock_default_cert_source = mock.Mock() - assert CreativeServiceClient._get_client_cert_source(None, False) is None + assert PredictionServiceClient._get_client_cert_source(None, False) is None assert ( - CreativeServiceClient._get_client_cert_source(mock_provided_cert_source, False) + PredictionServiceClient._get_client_cert_source( + mock_provided_cert_source, False + ) is None ) assert ( - CreativeServiceClient._get_client_cert_source(mock_provided_cert_source, True) + PredictionServiceClient._get_client_cert_source(mock_provided_cert_source, True) == mock_provided_cert_source ) @@ -194,11 +201,11 @@ def test__get_client_cert_source(): return_value=mock_default_cert_source, ): assert ( - CreativeServiceClient._get_client_cert_source(None, True) + PredictionServiceClient._get_client_cert_source(None, True) is mock_default_cert_source ) assert ( - CreativeServiceClient._get_client_cert_source( + PredictionServiceClient._get_client_cert_source( mock_provided_cert_source, "true" ) is mock_provided_cert_source @@ -206,59 +213,66 @@ def test__get_client_cert_source(): @mock.patch.object( - CreativeServiceClient, + PredictionServiceClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(PredictionServiceClient), +) +@mock.patch.object( + PredictionServiceAsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", - modify_default_endpoint_template(CreativeServiceClient), + modify_default_endpoint_template(PredictionServiceAsyncClient), ) def test__get_api_endpoint(): api_override = "foo.com" mock_client_cert_source = mock.Mock() - default_universe = CreativeServiceClient._DEFAULT_UNIVERSE - default_endpoint = CreativeServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( + default_universe = PredictionServiceClient._DEFAULT_UNIVERSE + default_endpoint = PredictionServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( UNIVERSE_DOMAIN=default_universe ) mock_universe = "bar.com" - mock_endpoint = CreativeServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( + mock_endpoint = PredictionServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( UNIVERSE_DOMAIN=mock_universe ) assert ( - CreativeServiceClient._get_api_endpoint( + PredictionServiceClient._get_api_endpoint( api_override, mock_client_cert_source, default_universe, "always" ) == api_override ) assert ( - CreativeServiceClient._get_api_endpoint( + PredictionServiceClient._get_api_endpoint( None, mock_client_cert_source, default_universe, "auto" ) - == CreativeServiceClient.DEFAULT_MTLS_ENDPOINT + == PredictionServiceClient.DEFAULT_MTLS_ENDPOINT ) assert ( - CreativeServiceClient._get_api_endpoint(None, None, default_universe, "auto") + PredictionServiceClient._get_api_endpoint(None, None, default_universe, "auto") == default_endpoint ) assert ( - CreativeServiceClient._get_api_endpoint(None, None, default_universe, "always") - == CreativeServiceClient.DEFAULT_MTLS_ENDPOINT + PredictionServiceClient._get_api_endpoint( + None, None, default_universe, "always" + ) + == PredictionServiceClient.DEFAULT_MTLS_ENDPOINT ) assert ( - CreativeServiceClient._get_api_endpoint( + PredictionServiceClient._get_api_endpoint( None, mock_client_cert_source, default_universe, "always" ) - == CreativeServiceClient.DEFAULT_MTLS_ENDPOINT + == PredictionServiceClient.DEFAULT_MTLS_ENDPOINT ) assert ( - CreativeServiceClient._get_api_endpoint(None, None, mock_universe, "never") + PredictionServiceClient._get_api_endpoint(None, None, mock_universe, "never") == mock_endpoint ) assert ( - CreativeServiceClient._get_api_endpoint(None, None, default_universe, "never") + PredictionServiceClient._get_api_endpoint(None, None, default_universe, "never") == default_endpoint ) with pytest.raises(MutualTLSChannelError) as excinfo: - CreativeServiceClient._get_api_endpoint( + PredictionServiceClient._get_api_endpoint( None, mock_client_cert_source, mock_universe, "auto" ) assert ( @@ -272,29 +286,30 @@ def test__get_universe_domain(): universe_domain_env = "bar.com" assert ( - CreativeServiceClient._get_universe_domain( + PredictionServiceClient._get_universe_domain( client_universe_domain, universe_domain_env ) == client_universe_domain ) assert ( - CreativeServiceClient._get_universe_domain(None, universe_domain_env) + PredictionServiceClient._get_universe_domain(None, universe_domain_env) == universe_domain_env ) assert ( - CreativeServiceClient._get_universe_domain(None, None) - == CreativeServiceClient._DEFAULT_UNIVERSE + PredictionServiceClient._get_universe_domain(None, None) + == PredictionServiceClient._DEFAULT_UNIVERSE ) with pytest.raises(ValueError) as excinfo: - CreativeServiceClient._get_universe_domain("", None) + PredictionServiceClient._get_universe_domain("", None) assert str(excinfo.value) == "Universe Domain cannot be an empty string." @pytest.mark.parametrize( "client_class,transport_class,transport_name", [ - (CreativeServiceClient, transports.CreativeServiceRestTransport, "rest"), + (PredictionServiceClient, transports.PredictionServiceGrpcTransport, "grpc"), + (PredictionServiceClient, transports.PredictionServiceRestTransport, "rest"), ], ) def test__validate_universe_domain(client_class, transport_class, transport_name): @@ -373,10 +388,12 @@ def test__validate_universe_domain(client_class, transport_class, transport_name @pytest.mark.parametrize( "client_class,transport_name", [ - (CreativeServiceClient, "rest"), + (PredictionServiceClient, "grpc"), + (PredictionServiceAsyncClient, "grpc_asyncio"), + (PredictionServiceClient, "rest"), ], ) -def test_creative_service_client_from_service_account_info( +def test_prediction_service_client_from_service_account_info( client_class, transport_name ): creds = ga_credentials.AnonymousCredentials() @@ -390,19 +407,21 @@ def test_creative_service_client_from_service_account_info( assert isinstance(client, client_class) assert client.transport._host == ( - "admanager.googleapis.com:443" + "generativelanguage.googleapis.com:443" if transport_name in ["grpc", "grpc_asyncio"] - else "https://admanager.googleapis.com" + else "https://generativelanguage.googleapis.com" ) @pytest.mark.parametrize( "transport_class,transport_name", [ - (transports.CreativeServiceRestTransport, "rest"), + (transports.PredictionServiceGrpcTransport, "grpc"), + (transports.PredictionServiceGrpcAsyncIOTransport, "grpc_asyncio"), + (transports.PredictionServiceRestTransport, "rest"), ], ) -def test_creative_service_client_service_account_always_use_jwt( +def test_prediction_service_client_service_account_always_use_jwt( transport_class, transport_name ): with mock.patch.object( @@ -423,10 +442,12 @@ def test_creative_service_client_service_account_always_use_jwt( @pytest.mark.parametrize( "client_class,transport_name", [ - (CreativeServiceClient, "rest"), + (PredictionServiceClient, "grpc"), + (PredictionServiceAsyncClient, "grpc_asyncio"), + (PredictionServiceClient, "rest"), ], ) -def test_creative_service_client_from_service_account_file( +def test_prediction_service_client_from_service_account_file( client_class, transport_name ): creds = ga_credentials.AnonymousCredentials() @@ -447,45 +468,57 @@ def test_creative_service_client_from_service_account_file( assert isinstance(client, client_class) assert client.transport._host == ( - "admanager.googleapis.com:443" + "generativelanguage.googleapis.com:443" if transport_name in ["grpc", "grpc_asyncio"] - else "https://admanager.googleapis.com" + else "https://generativelanguage.googleapis.com" ) -def test_creative_service_client_get_transport_class(): - transport = CreativeServiceClient.get_transport_class() +def test_prediction_service_client_get_transport_class(): + transport = PredictionServiceClient.get_transport_class() available_transports = [ - transports.CreativeServiceRestTransport, + transports.PredictionServiceGrpcTransport, + transports.PredictionServiceRestTransport, ] assert transport in available_transports - transport = CreativeServiceClient.get_transport_class("rest") - assert transport == transports.CreativeServiceRestTransport + transport = PredictionServiceClient.get_transport_class("grpc") + assert transport == transports.PredictionServiceGrpcTransport @pytest.mark.parametrize( "client_class,transport_class,transport_name", [ - (CreativeServiceClient, transports.CreativeServiceRestTransport, "rest"), + (PredictionServiceClient, transports.PredictionServiceGrpcTransport, "grpc"), + ( + PredictionServiceAsyncClient, + transports.PredictionServiceGrpcAsyncIOTransport, + "grpc_asyncio", + ), + (PredictionServiceClient, transports.PredictionServiceRestTransport, "rest"), ], ) @mock.patch.object( - CreativeServiceClient, + PredictionServiceClient, "_DEFAULT_ENDPOINT_TEMPLATE", - modify_default_endpoint_template(CreativeServiceClient), + modify_default_endpoint_template(PredictionServiceClient), ) -def test_creative_service_client_client_options( +@mock.patch.object( + PredictionServiceAsyncClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(PredictionServiceAsyncClient), +) +def test_prediction_service_client_client_options( client_class, transport_class, transport_name ): # Check that if channel is provided we won't create a new one. - with mock.patch.object(CreativeServiceClient, "get_transport_class") as gtc: + with mock.patch.object(PredictionServiceClient, "get_transport_class") as gtc: transport = transport_class(credentials=ga_credentials.AnonymousCredentials()) client = client_class(transport=transport) gtc.assert_not_called() # Check that if channel is provided via str we will create a new one. - with mock.patch.object(CreativeServiceClient, "get_transport_class") as gtc: + with mock.patch.object(PredictionServiceClient, "get_transport_class") as gtc: client = client_class(transport=transport_name) gtc.assert_called() @@ -609,26 +642,55 @@ def test_creative_service_client_client_options( "client_class,transport_class,transport_name,use_client_cert_env", [ ( - CreativeServiceClient, - transports.CreativeServiceRestTransport, + PredictionServiceClient, + transports.PredictionServiceGrpcTransport, + "grpc", + "true", + ), + ( + PredictionServiceAsyncClient, + transports.PredictionServiceGrpcAsyncIOTransport, + "grpc_asyncio", + "true", + ), + ( + PredictionServiceClient, + transports.PredictionServiceGrpcTransport, + "grpc", + "false", + ), + ( + PredictionServiceAsyncClient, + transports.PredictionServiceGrpcAsyncIOTransport, + "grpc_asyncio", + "false", + ), + ( + PredictionServiceClient, + transports.PredictionServiceRestTransport, "rest", "true", ), ( - CreativeServiceClient, - transports.CreativeServiceRestTransport, + PredictionServiceClient, + transports.PredictionServiceRestTransport, "rest", "false", ), ], ) @mock.patch.object( - CreativeServiceClient, + PredictionServiceClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(PredictionServiceClient), +) +@mock.patch.object( + PredictionServiceAsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", - modify_default_endpoint_template(CreativeServiceClient), + modify_default_endpoint_template(PredictionServiceAsyncClient), ) @mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) -def test_creative_service_client_mtls_env_auto( +def test_prediction_service_client_mtls_env_auto( client_class, transport_class, transport_name, use_client_cert_env ): # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default @@ -730,13 +792,20 @@ def test_creative_service_client_mtls_env_auto( ) -@pytest.mark.parametrize("client_class", [CreativeServiceClient]) +@pytest.mark.parametrize( + "client_class", [PredictionServiceClient, PredictionServiceAsyncClient] +) @mock.patch.object( - CreativeServiceClient, + PredictionServiceClient, "DEFAULT_ENDPOINT", - modify_default_endpoint(CreativeServiceClient), + modify_default_endpoint(PredictionServiceClient), ) -def test_creative_service_client_get_mtls_endpoint_and_cert_source(client_class): +@mock.patch.object( + PredictionServiceAsyncClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(PredictionServiceAsyncClient), +) +def test_prediction_service_client_get_mtls_endpoint_and_cert_source(client_class): mock_client_cert_source = mock.Mock() # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". @@ -827,21 +896,28 @@ def test_creative_service_client_get_mtls_endpoint_and_cert_source(client_class) ) -@pytest.mark.parametrize("client_class", [CreativeServiceClient]) +@pytest.mark.parametrize( + "client_class", [PredictionServiceClient, PredictionServiceAsyncClient] +) +@mock.patch.object( + PredictionServiceClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(PredictionServiceClient), +) @mock.patch.object( - CreativeServiceClient, + PredictionServiceAsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", - modify_default_endpoint_template(CreativeServiceClient), + modify_default_endpoint_template(PredictionServiceAsyncClient), ) -def test_creative_service_client_client_api_endpoint(client_class): +def test_prediction_service_client_client_api_endpoint(client_class): mock_client_cert_source = client_cert_source_callback api_override = "foo.com" - default_universe = CreativeServiceClient._DEFAULT_UNIVERSE - default_endpoint = CreativeServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( + default_universe = PredictionServiceClient._DEFAULT_UNIVERSE + default_endpoint = PredictionServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( UNIVERSE_DOMAIN=default_universe ) mock_universe = "bar.com" - mock_endpoint = CreativeServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( + mock_endpoint = PredictionServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( UNIVERSE_DOMAIN=mock_universe ) @@ -909,10 +985,16 @@ def test_creative_service_client_client_api_endpoint(client_class): @pytest.mark.parametrize( "client_class,transport_class,transport_name", [ - (CreativeServiceClient, transports.CreativeServiceRestTransport, "rest"), + (PredictionServiceClient, transports.PredictionServiceGrpcTransport, "grpc"), + ( + PredictionServiceAsyncClient, + transports.PredictionServiceGrpcAsyncIOTransport, + "grpc_asyncio", + ), + (PredictionServiceClient, transports.PredictionServiceRestTransport, "rest"), ], ) -def test_creative_service_client_client_options_scopes( +def test_prediction_service_client_client_options_scopes( client_class, transport_class, transport_name ): # Check the case scopes are provided. @@ -940,10 +1022,27 @@ def test_creative_service_client_client_options_scopes( @pytest.mark.parametrize( "client_class,transport_class,transport_name,grpc_helpers", [ - (CreativeServiceClient, transports.CreativeServiceRestTransport, "rest", None), + ( + PredictionServiceClient, + transports.PredictionServiceGrpcTransport, + "grpc", + grpc_helpers, + ), + ( + PredictionServiceAsyncClient, + transports.PredictionServiceGrpcAsyncIOTransport, + "grpc_asyncio", + grpc_helpers_async, + ), + ( + PredictionServiceClient, + transports.PredictionServiceRestTransport, + "rest", + None, + ), ], ) -def test_creative_service_client_client_options_credentials_file( +def test_prediction_service_client_client_options_credentials_file( client_class, transport_class, transport_name, grpc_helpers ): # Check the case credentials file is provided. @@ -967,63 +1066,183 @@ def test_creative_service_client_client_options_credentials_file( ) +def test_prediction_service_client_client_options_from_dict(): + with mock.patch( + "google.ai.generativelanguage_v1beta.services.prediction_service.transports.PredictionServiceGrpcTransport.__init__" + ) as grpc_transport: + grpc_transport.return_value = None + client = PredictionServiceClient( + client_options={"api_endpoint": "squid.clam.whelk"} + ) + grpc_transport.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,grpc_helpers", + [ + ( + PredictionServiceClient, + transports.PredictionServiceGrpcTransport, + "grpc", + grpc_helpers, + ), + ( + PredictionServiceAsyncClient, + transports.PredictionServiceGrpcAsyncIOTransport, + "grpc_asyncio", + grpc_helpers_async, + ), + ], +) +def test_prediction_service_client_create_channel_credentials_file( + client_class, transport_class, transport_name, grpc_helpers +): + # Check the case credentials file is provided. + options = client_options.ClientOptions(credentials_file="credentials.json") + + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # test that the credentials from file are saved and used as the credentials. + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel" + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + file_creds = ga_credentials.AnonymousCredentials() + load_creds.return_value = (file_creds, None) + adc.return_value = (creds, None) + client = client_class(client_options=options, transport=transport_name) + create_channel.assert_called_with( + "generativelanguage.googleapis.com:443", + credentials=file_creds, + credentials_file=None, + quota_project_id=None, + default_scopes=(), + scopes=None, + default_host="generativelanguage.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + @pytest.mark.parametrize( "request_type", [ - creative_service.GetCreativeRequest, + prediction_service.PredictRequest, dict, ], ) -def test_get_creative_rest(request_type): - client = CreativeServiceClient( +def test_predict(request_type, transport: str = "grpc"): + client = PredictionServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport="rest", + transport=transport, ) - # send a request that will satisfy transcoding - request_init = {"name": "networks/sample1/creatives/sample2"} - request = request_type(**request_init) + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = creative_service.Creative( - name="name_value", - creative_id=1151, - display_name="display_name_value", - advertiser="advertiser_value", - preview_url="preview_url_value", - size_label="size_label_value", + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.predict), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = prediction_service.PredictResponse() + response = client.predict(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = prediction_service.PredictRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, prediction_service.PredictResponse) + + +def test_predict_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = PredictionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.predict), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. ) + client.predict() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == prediction_service.PredictRequest() - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = creative_service.Creative.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - response = client.get_creative(request) +def test_predict_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = PredictionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) - # Establish that the response is the type that we expect. - assert isinstance(response, creative_service.Creative) - assert response.name == "name_value" - assert response.creative_id == 1151 - assert response.display_name == "display_name_value" - assert response.advertiser == "advertiser_value" - assert response.preview_url == "preview_url_value" - assert response.size_label == "size_label_value" + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = prediction_service.PredictRequest( + model="model_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.predict), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.predict(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == prediction_service.PredictRequest( + model="model_value", + ) -def test_get_creative_rest_use_cached_wrapped_rpc(): +def test_predict_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = CreativeServiceClient( + client = PredictionServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport="rest", + transport="grpc", ) # Should wrap all calls on client creation @@ -1031,300 +1250,319 @@ def test_get_creative_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.get_creative in client._transport._wrapped_methods + assert client._transport.predict in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.get_creative] = mock_rpc - + client._transport._wrapped_methods[client._transport.predict] = mock_rpc request = {} - client.get_creative(request) + client.predict(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.get_creative(request) + client.predict(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_get_creative_rest_required_fields( - request_type=creative_service.GetCreativeRequest, -): - transport_class = transports.CreativeServiceRestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads( - json_format.MessageToJson(pb_request, use_integers_for_enums=False) +@pytest.mark.asyncio +async def test_predict_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = PredictionServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", ) - # verify fields with default values are dropped + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.predict), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + prediction_service.PredictResponse() + ) + response = await client.predict() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == prediction_service.PredictRequest() - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).get_creative._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - # verify required fields with default values are now present +@pytest.mark.asyncio +async def test_predict_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = PredictionServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) - jsonified_request["name"] = "name_value" + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).get_creative._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) + # Ensure method has been cached + assert ( + client._client._transport.predict + in client._client._transport._wrapped_methods + ) - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == "name_value" + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.predict + ] = mock_rpc + + request = {} + await client.predict(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 - client = CreativeServiceClient( + await client.predict(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_predict_async( + transport: str = "grpc_asyncio", request_type=prediction_service.PredictRequest +): + client = PredictionServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), - transport="rest", + transport=transport, ) - request = request_type(**request_init) - # Designate an appropriate value for the returned response. - return_value = creative_service.Creative() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, "transcode") as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - "uri": "v1/sample_method", - "method": "get", - "query_params": pb_request, - } - transcode.return_value = transcode_result + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() - response_value = Response() - response_value.status_code = 200 + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.predict), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + prediction_service.PredictResponse() + ) + response = await client.predict(request) - # Convert return value to protobuf type - return_value = creative_service.Creative.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = prediction_service.PredictRequest() + assert args[0] == request - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value + # Establish that the response is the type that we expect. + assert isinstance(response, prediction_service.PredictResponse) - response = client.get_creative(request) - expected_params = [("$alt", "json;enum-encoding=int")] - actual_params = req.call_args.kwargs["params"] - assert expected_params == actual_params +@pytest.mark.asyncio +async def test_predict_async_from_dict(): + await test_predict_async(request_type=dict) -def test_get_creative_rest_unset_required_fields(): - transport = transports.CreativeServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials +def test_predict_field_headers(): + client = PredictionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), ) - unset_fields = transport.get_creative._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name",))) + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = prediction_service.PredictRequest() + request.model = "model_value" -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_creative_rest_interceptors(null_interceptor): - transport = transports.CreativeServiceRestTransport( + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.predict), "__call__") as call: + call.return_value = prediction_service.PredictResponse() + client.predict(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "model=model_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_predict_field_headers_async(): + client = PredictionServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.CreativeServiceRestInterceptor(), ) - client = CreativeServiceClient(transport=transport) - with mock.patch.object( - type(client.transport._session), "request" - ) as req, mock.patch.object( - path_template, "transcode" - ) as transcode, mock.patch.object( - transports.CreativeServiceRestInterceptor, "post_get_creative" - ) as post, mock.patch.object( - transports.CreativeServiceRestInterceptor, "pre_get_creative" - ) as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = creative_service.GetCreativeRequest.pb( - creative_service.GetCreativeRequest() - ) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = creative_service.Creative.to_json( - creative_service.Creative() - ) + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = prediction_service.PredictRequest() - request = creative_service.GetCreativeRequest() - metadata = [ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = creative_service.Creative() + request.model = "model_value" - client.get_creative( - request, - metadata=[ - ("key", "val"), - ("cephalopod", "squid"), - ], + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.predict), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + prediction_service.PredictResponse() ) + await client.predict(request) - pre.assert_called_once() - post.assert_called_once() + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "model=model_value", + ) in kw["metadata"] -def test_get_creative_rest_bad_request( - transport: str = "rest", request_type=creative_service.GetCreativeRequest -): - client = CreativeServiceClient( +def test_predict_flattened(): + client = PredictionServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, ) - # send a request that will satisfy transcoding - request_init = {"name": "networks/sample1/creatives/sample2"} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.get_creative(request) - + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.predict), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = prediction_service.PredictResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.predict( + model="model_value", + instances=[struct_pb2.Value(null_value=struct_pb2.NullValue.NULL_VALUE)], + ) -def test_get_creative_rest_flattened(): - client = CreativeServiceClient( + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].model + mock_val = "model_value" + assert arg == mock_val + arg = args[0].instances + mock_val = [struct_pb2.Value(null_value=struct_pb2.NullValue.NULL_VALUE)] + assert arg == mock_val + + +def test_predict_flattened_error(): + client = PredictionServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport="rest", ) - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = creative_service.Creative() + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.predict( + prediction_service.PredictRequest(), + model="model_value", + instances=[struct_pb2.Value(null_value=struct_pb2.NullValue.NULL_VALUE)], + ) - # get arguments that satisfy an http rule for this method - sample_request = {"name": "networks/sample1/creatives/sample2"} - # get truthy value for each flattened field - mock_args = dict( - name="name_value", - ) - mock_args.update(sample_request) +@pytest.mark.asyncio +async def test_predict_flattened_async(): + client = PredictionServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = creative_service.Creative.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.predict), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = prediction_service.PredictResponse() - client.get_creative(**mock_args) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + prediction_service.PredictResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.predict( + model="model_value", + instances=[struct_pb2.Value(null_value=struct_pb2.NullValue.NULL_VALUE)], + ) # Establish that the underlying call was made with the expected # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate( - "%s/v1/{name=networks/*/creatives/*}" % client.transport._host, args[1] - ) - - -def test_get_creative_rest_flattened_error(transport: str = "rest"): - client = CreativeServiceClient( + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].model + mock_val = "model_value" + assert arg == mock_val + arg = args[0].instances + mock_val = [struct_pb2.Value(null_value=struct_pb2.NullValue.NULL_VALUE)] + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_predict_flattened_error_async(): + client = PredictionServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.get_creative( - creative_service.GetCreativeRequest(), - name="name_value", + await client.predict( + prediction_service.PredictRequest(), + model="model_value", + instances=[struct_pb2.Value(null_value=struct_pb2.NullValue.NULL_VALUE)], ) -def test_get_creative_rest_error(): - client = CreativeServiceClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) - - @pytest.mark.parametrize( "request_type", [ - creative_service.ListCreativesRequest, + prediction_service.PredictRequest, dict, ], ) -def test_list_creatives_rest(request_type): - client = CreativeServiceClient( +def test_predict_rest(request_type): + client = PredictionServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = {"parent": "networks/sample1"} + request_init = {"model": "models/sample1"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = creative_service.ListCreativesResponse( - next_page_token="next_page_token_value", - total_size=1086, - ) + return_value = prediction_service.PredictResponse() # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = creative_service.ListCreativesResponse.pb(return_value) + return_value = prediction_service.PredictResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.list_creatives(request) + response = client.predict(request) # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListCreativesPager) - assert response.next_page_token == "next_page_token_value" - assert response.total_size == 1086 + assert isinstance(response, prediction_service.PredictResponse) -def test_list_creatives_rest_use_cached_wrapped_rpc(): +def test_predict_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = CreativeServiceClient( + client = PredictionServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) @@ -1334,35 +1572,33 @@ def test_list_creatives_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.list_creatives in client._transport._wrapped_methods + assert client._transport.predict in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.list_creatives] = mock_rpc + client._transport._wrapped_methods[client._transport.predict] = mock_rpc request = {} - client.list_creatives(request) + client.predict(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.list_creatives(request) + client.predict(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_list_creatives_rest_required_fields( - request_type=creative_service.ListCreativesRequest, -): - transport_class = transports.CreativeServiceRestTransport +def test_predict_rest_required_fields(request_type=prediction_service.PredictRequest): + transport_class = transports.PredictionServiceRestTransport request_init = {} - request_init["parent"] = "" + request_init["model"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -1373,40 +1609,30 @@ def test_list_creatives_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).list_creatives._get_unset_required_fields(jsonified_request) + ).predict._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["parent"] = "parent_value" + jsonified_request["model"] = "model_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).list_creatives._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set( - ( - "filter", - "order_by", - "page_size", - "page_token", - "skip", - ) - ) + ).predict._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == "parent_value" + assert "model" in jsonified_request + assert jsonified_request["model"] == "model_value" - client = CreativeServiceClient( + client = PredictionServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = creative_service.ListCreativesResponse() + return_value = prediction_service.PredictResponse() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -1418,70 +1644,68 @@ def test_list_creatives_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "get", + "method": "post", "query_params": pb_request, } + transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = creative_service.ListCreativesResponse.pb(return_value) + return_value = prediction_service.PredictResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.list_creatives(request) + response = client.predict(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_list_creatives_rest_unset_required_fields(): - transport = transports.CreativeServiceRestTransport( +def test_predict_rest_unset_required_fields(): + transport = transports.PredictionServiceRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.list_creatives._get_unset_required_fields({}) + unset_fields = transport.predict._get_unset_required_fields({}) assert set(unset_fields) == ( - set( + set(()) + & set( ( - "filter", - "orderBy", - "pageSize", - "pageToken", - "skip", + "model", + "instances", ) ) - & set(("parent",)) ) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_creatives_rest_interceptors(null_interceptor): - transport = transports.CreativeServiceRestTransport( +def test_predict_rest_interceptors(null_interceptor): + transport = transports.PredictionServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None if null_interceptor - else transports.CreativeServiceRestInterceptor(), + else transports.PredictionServiceRestInterceptor(), ) - client = CreativeServiceClient(transport=transport) + client = PredictionServiceClient(transport=transport) with mock.patch.object( type(client.transport._session), "request" ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.CreativeServiceRestInterceptor, "post_list_creatives" + transports.PredictionServiceRestInterceptor, "post_predict" ) as post, mock.patch.object( - transports.CreativeServiceRestInterceptor, "pre_list_creatives" + transports.PredictionServiceRestInterceptor, "pre_predict" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = creative_service.ListCreativesRequest.pb( - creative_service.ListCreativesRequest() + pb_message = prediction_service.PredictRequest.pb( + prediction_service.PredictRequest() ) transcode.return_value = { "method": "post", @@ -1493,19 +1717,19 @@ def test_list_creatives_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = creative_service.ListCreativesResponse.to_json( - creative_service.ListCreativesResponse() + req.return_value._content = prediction_service.PredictResponse.to_json( + prediction_service.PredictResponse() ) - request = creative_service.ListCreativesRequest() + request = prediction_service.PredictRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = creative_service.ListCreativesResponse() + post.return_value = prediction_service.PredictResponse() - client.list_creatives( + client.predict( request, metadata=[ ("key", "val"), @@ -1517,16 +1741,16 @@ def test_list_creatives_rest_interceptors(null_interceptor): post.assert_called_once() -def test_list_creatives_rest_bad_request( - transport: str = "rest", request_type=creative_service.ListCreativesRequest +def test_predict_rest_bad_request( + transport: str = "rest", request_type=prediction_service.PredictRequest ): - client = CreativeServiceClient( + client = PredictionServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # send a request that will satisfy transcoding - request_init = {"parent": "networks/sample1"} + request_init = {"model": "models/sample1"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -1538,11 +1762,11 @@ def test_list_creatives_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.list_creatives(request) + client.predict(request) -def test_list_creatives_rest_flattened(): - client = CreativeServiceClient( +def test_predict_rest_flattened(): + client = PredictionServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) @@ -1550,14 +1774,15 @@ def test_list_creatives_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = creative_service.ListCreativesResponse() + return_value = prediction_service.PredictResponse() # get arguments that satisfy an http rule for this method - sample_request = {"parent": "networks/sample1"} + sample_request = {"model": "models/sample1"} # get truthy value for each flattened field mock_args = dict( - parent="parent_value", + model="model_value", + instances=[struct_pb2.Value(null_value=struct_pb2.NullValue.NULL_VALUE)], ) mock_args.update(sample_request) @@ -1565,24 +1790,24 @@ def test_list_creatives_rest_flattened(): response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = creative_service.ListCreativesResponse.pb(return_value) + return_value = prediction_service.PredictResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.list_creatives(**mock_args) + client.predict(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{parent=networks/*}/creatives" % client.transport._host, args[1] + "%s/v1beta/{model=models/*}:predict" % client.transport._host, args[1] ) -def test_list_creatives_rest_flattened_error(transport: str = "rest"): - client = CreativeServiceClient( +def test_predict_rest_flattened_error(transport: str = "rest"): + client = PredictionServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -1590,104 +1815,48 @@ def test_list_creatives_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.list_creatives( - creative_service.ListCreativesRequest(), - parent="parent_value", + client.predict( + prediction_service.PredictRequest(), + model="model_value", + instances=[struct_pb2.Value(null_value=struct_pb2.NullValue.NULL_VALUE)], ) -def test_list_creatives_rest_pager(transport: str = "rest"): - client = CreativeServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, +def test_predict_rest_error(): + client = PredictionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # TODO(kbandes): remove this mock unless there's a good reason for it. - # with mock.patch.object(path_template, 'transcode') as transcode: - # Set the response as a series of pages - response = ( - creative_service.ListCreativesResponse( - creatives=[ - creative_service.Creative(), - creative_service.Creative(), - creative_service.Creative(), - ], - next_page_token="abc", - ), - creative_service.ListCreativesResponse( - creatives=[], - next_page_token="def", - ), - creative_service.ListCreativesResponse( - creatives=[ - creative_service.Creative(), - ], - next_page_token="ghi", - ), - creative_service.ListCreativesResponse( - creatives=[ - creative_service.Creative(), - creative_service.Creative(), - ], - ), - ) - # Two responses for two calls - response = response + response - - # Wrap the values into proper Response objs - response = tuple( - creative_service.ListCreativesResponse.to_json(x) for x in response - ) - return_values = tuple(Response() for i in response) - for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode("UTF-8") - return_val.status_code = 200 - req.side_effect = return_values - - sample_request = {"parent": "networks/sample1"} - - pager = client.list_creatives(request=sample_request) - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, creative_service.Creative) for i in results) - - pages = list(client.list_creatives(request=sample_request).pages) - for page_, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page_.raw_page.next_page_token == token - def test_credentials_transport_error(): # It is an error to provide credentials and a transport instance. - transport = transports.CreativeServiceRestTransport( + transport = transports.PredictionServiceGrpcTransport( credentials=ga_credentials.AnonymousCredentials(), ) with pytest.raises(ValueError): - client = CreativeServiceClient( + client = PredictionServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # It is an error to provide a credentials file and a transport instance. - transport = transports.CreativeServiceRestTransport( + transport = transports.PredictionServiceGrpcTransport( credentials=ga_credentials.AnonymousCredentials(), ) with pytest.raises(ValueError): - client = CreativeServiceClient( + client = PredictionServiceClient( client_options={"credentials_file": "credentials.json"}, transport=transport, ) # It is an error to provide an api_key and a transport instance. - transport = transports.CreativeServiceRestTransport( + transport = transports.PredictionServiceGrpcTransport( credentials=ga_credentials.AnonymousCredentials(), ) options = client_options.ClientOptions() options.api_key = "api_key" with pytest.raises(ValueError): - client = CreativeServiceClient( + client = PredictionServiceClient( client_options=options, transport=transport, ) @@ -1696,16 +1865,16 @@ def test_credentials_transport_error(): options = client_options.ClientOptions() options.api_key = "api_key" with pytest.raises(ValueError): - client = CreativeServiceClient( + client = PredictionServiceClient( client_options=options, credentials=ga_credentials.AnonymousCredentials() ) # It is an error to provide scopes and a transport instance. - transport = transports.CreativeServiceRestTransport( + transport = transports.PredictionServiceGrpcTransport( credentials=ga_credentials.AnonymousCredentials(), ) with pytest.raises(ValueError): - client = CreativeServiceClient( + client = PredictionServiceClient( client_options={"scopes": ["1", "2"]}, transport=transport, ) @@ -1713,17 +1882,34 @@ def test_credentials_transport_error(): def test_transport_instance(): # A client may be instantiated with a custom transport instance. - transport = transports.CreativeServiceRestTransport( + transport = transports.PredictionServiceGrpcTransport( credentials=ga_credentials.AnonymousCredentials(), ) - client = CreativeServiceClient(transport=transport) + client = PredictionServiceClient(transport=transport) assert client.transport is transport +def test_transport_get_channel(): + # A client may be instantiated with a custom transport instance. + transport = transports.PredictionServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + transport = transports.PredictionServiceGrpcAsyncIOTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + @pytest.mark.parametrize( "transport_class", [ - transports.CreativeServiceRestTransport, + transports.PredictionServiceGrpcTransport, + transports.PredictionServiceGrpcAsyncIOTransport, + transports.PredictionServiceRestTransport, ], ) def test_transport_adc(transport_class): @@ -1737,42 +1923,50 @@ def test_transport_adc(transport_class): @pytest.mark.parametrize( "transport_name", [ + "grpc", "rest", ], ) def test_transport_kind(transport_name): - transport = CreativeServiceClient.get_transport_class(transport_name)( + transport = PredictionServiceClient.get_transport_class(transport_name)( credentials=ga_credentials.AnonymousCredentials(), ) assert transport.kind == transport_name -def test_creative_service_base_transport_error(): +def test_transport_grpc_default(): + # A client should use the gRPC transport by default. + client = PredictionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert isinstance( + client.transport, + transports.PredictionServiceGrpcTransport, + ) + + +def test_prediction_service_base_transport_error(): # Passing both a credentials object and credentials_file should raise an error with pytest.raises(core_exceptions.DuplicateCredentialArgs): - transport = transports.CreativeServiceTransport( + transport = transports.PredictionServiceTransport( credentials=ga_credentials.AnonymousCredentials(), credentials_file="credentials.json", ) -def test_creative_service_base_transport(): +def test_prediction_service_base_transport(): # Instantiate the base transport. with mock.patch( - "google.ads.admanager_v1.services.creative_service.transports.CreativeServiceTransport.__init__" + "google.ai.generativelanguage_v1beta.services.prediction_service.transports.PredictionServiceTransport.__init__" ) as Transport: Transport.return_value = None - transport = transports.CreativeServiceTransport( + transport = transports.PredictionServiceTransport( credentials=ga_credentials.AnonymousCredentials(), ) # Every method on the transport should just blindly # raise NotImplementedError. - methods = ( - "get_creative", - "list_creatives", - "get_operation", - ) + methods = ("predict",) for method in methods: with pytest.raises(NotImplementedError): getattr(transport, method)(request=object()) @@ -1789,16 +1983,16 @@ def test_creative_service_base_transport(): getattr(transport, r)() -def test_creative_service_base_transport_with_credentials_file(): +def test_prediction_service_base_transport_with_credentials_file(): # Instantiate the base transport with a credentials file with mock.patch.object( google.auth, "load_credentials_from_file", autospec=True ) as load_creds, mock.patch( - "google.ads.admanager_v1.services.creative_service.transports.CreativeServiceTransport._prep_wrapped_messages" + "google.ai.generativelanguage_v1beta.services.prediction_service.transports.PredictionServiceTransport._prep_wrapped_messages" ) as Transport: Transport.return_value = None load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) - transport = transports.CreativeServiceTransport( + transport = transports.PredictionServiceTransport( credentials_file="credentials.json", quota_project_id="octopus", ) @@ -1810,22 +2004,22 @@ def test_creative_service_base_transport_with_credentials_file(): ) -def test_creative_service_base_transport_with_adc(): +def test_prediction_service_base_transport_with_adc(): # Test the default credentials are used if credentials and credentials_file are None. with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch( - "google.ads.admanager_v1.services.creative_service.transports.CreativeServiceTransport._prep_wrapped_messages" + "google.ai.generativelanguage_v1beta.services.prediction_service.transports.PredictionServiceTransport._prep_wrapped_messages" ) as Transport: Transport.return_value = None adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport = transports.CreativeServiceTransport() + transport = transports.PredictionServiceTransport() adc.assert_called_once() -def test_creative_service_auth_adc(): +def test_prediction_service_auth_adc(): # If no credentials are provided, we should use ADC credentials. with mock.patch.object(google.auth, "default", autospec=True) as adc: adc.return_value = (ga_credentials.AnonymousCredentials(), None) - CreativeServiceClient() + PredictionServiceClient() adc.assert_called_once_with( scopes=None, default_scopes=(), @@ -1833,12 +2027,135 @@ def test_creative_service_auth_adc(): ) -def test_creative_service_http_transport_client_cert_source_for_mtls(): +@pytest.mark.parametrize( + "transport_class", + [ + transports.PredictionServiceGrpcTransport, + transports.PredictionServiceGrpcAsyncIOTransport, + ], +) +def test_prediction_service_transport_auth_adc(transport_class): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + adc.assert_called_once_with( + scopes=["1", "2"], + default_scopes=(), + quota_project_id="octopus", + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.PredictionServiceGrpcTransport, + transports.PredictionServiceGrpcAsyncIOTransport, + transports.PredictionServiceRestTransport, + ], +) +def test_prediction_service_transport_auth_gdch_credentials(transport_class): + host = "https://language.com" + api_audience_tests = [None, "https://language2.com"] + api_audience_expect = [host, "https://language2.com"] + for t, e in zip(api_audience_tests, api_audience_expect): + with mock.patch.object(google.auth, "default", autospec=True) as adc: + gdch_mock = mock.MagicMock() + type(gdch_mock).with_gdch_audience = mock.PropertyMock( + return_value=gdch_mock + ) + adc.return_value = (gdch_mock, None) + transport_class(host=host, api_audience=t) + gdch_mock.with_gdch_audience.assert_called_once_with(e) + + +@pytest.mark.parametrize( + "transport_class,grpc_helpers", + [ + (transports.PredictionServiceGrpcTransport, grpc_helpers), + (transports.PredictionServiceGrpcAsyncIOTransport, grpc_helpers_async), + ], +) +def test_prediction_service_transport_create_channel(transport_class, grpc_helpers): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel", autospec=True + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + adc.return_value = (creds, None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + + create_channel.assert_called_with( + "generativelanguage.googleapis.com:443", + credentials=creds, + credentials_file=None, + quota_project_id="octopus", + default_scopes=(), + scopes=["1", "2"], + default_host="generativelanguage.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.PredictionServiceGrpcTransport, + transports.PredictionServiceGrpcAsyncIOTransport, + ], +) +def test_prediction_service_grpc_transport_client_cert_source_for_mtls(transport_class): + cred = ga_credentials.AnonymousCredentials() + + # Check ssl_channel_credentials is used if provided. + with mock.patch.object(transport_class, "create_channel") as mock_create_channel: + mock_ssl_channel_creds = mock.Mock() + transport_class( + host="squid.clam.whelk", + credentials=cred, + ssl_channel_credentials=mock_ssl_channel_creds, + ) + mock_create_channel.assert_called_once_with( + "squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_channel_creds, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls + # is used. + with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): + with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: + transport_class( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback, + ) + expected_cert, expected_key = client_cert_source_callback() + mock_ssl_cred.assert_called_once_with( + certificate_chain=expected_cert, private_key=expected_key + ) + + +def test_prediction_service_http_transport_client_cert_source_for_mtls(): cred = ga_credentials.AnonymousCredentials() with mock.patch( "google.auth.transport.requests.AuthorizedSession.configure_mtls_channel" ) as mock_configure_mtls_channel: - transports.CreativeServiceRestTransport( + transports.PredictionServiceRestTransport( credentials=cred, client_cert_source_for_mtls=client_cert_source_callback ) mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) @@ -1847,42 +2164,46 @@ def test_creative_service_http_transport_client_cert_source_for_mtls(): @pytest.mark.parametrize( "transport_name", [ + "grpc", + "grpc_asyncio", "rest", ], ) -def test_creative_service_host_no_port(transport_name): - client = CreativeServiceClient( +def test_prediction_service_host_no_port(transport_name): + client = PredictionServiceClient( credentials=ga_credentials.AnonymousCredentials(), client_options=client_options.ClientOptions( - api_endpoint="admanager.googleapis.com" + api_endpoint="generativelanguage.googleapis.com" ), transport=transport_name, ) assert client.transport._host == ( - "admanager.googleapis.com:443" + "generativelanguage.googleapis.com:443" if transport_name in ["grpc", "grpc_asyncio"] - else "https://admanager.googleapis.com" + else "https://generativelanguage.googleapis.com" ) @pytest.mark.parametrize( "transport_name", [ + "grpc", + "grpc_asyncio", "rest", ], ) -def test_creative_service_host_with_port(transport_name): - client = CreativeServiceClient( +def test_prediction_service_host_with_port(transport_name): + client = PredictionServiceClient( credentials=ga_credentials.AnonymousCredentials(), client_options=client_options.ClientOptions( - api_endpoint="admanager.googleapis.com:8000" + api_endpoint="generativelanguage.googleapis.com:8000" ), transport=transport_name, ) assert client.transport._host == ( - "admanager.googleapis.com:8000" + "generativelanguage.googleapis.com:8000" if transport_name in ["grpc", "grpc_asyncio"] - else "https://admanager.googleapis.com:8000" + else "https://generativelanguage.googleapis.com:8000" ) @@ -1892,111 +2213,165 @@ def test_creative_service_host_with_port(transport_name): "rest", ], ) -def test_creative_service_client_transport_session_collision(transport_name): +def test_prediction_service_client_transport_session_collision(transport_name): creds1 = ga_credentials.AnonymousCredentials() creds2 = ga_credentials.AnonymousCredentials() - client1 = CreativeServiceClient( + client1 = PredictionServiceClient( credentials=creds1, transport=transport_name, ) - client2 = CreativeServiceClient( + client2 = PredictionServiceClient( credentials=creds2, transport=transport_name, ) - session1 = client1.transport.get_creative._session - session2 = client2.transport.get_creative._session + session1 = client1.transport.predict._session + session2 = client2.transport.predict._session assert session1 != session2 - session1 = client1.transport.list_creatives._session - session2 = client2.transport.list_creatives._session - assert session1 != session2 - -def test_ad_partner_path(): - network_code = "squid" - ad_partner = "clam" - expected = "networks/{network_code}/adPartners/{ad_partner}".format( - network_code=network_code, - ad_partner=ad_partner, - ) - actual = CreativeServiceClient.ad_partner_path(network_code, ad_partner) - assert expected == actual +def test_prediction_service_grpc_transport_channel(): + channel = grpc.secure_channel("http://localhost/", grpc.local_channel_credentials()) -def test_parse_ad_partner_path(): - expected = { - "network_code": "whelk", - "ad_partner": "octopus", - } - path = CreativeServiceClient.ad_partner_path(**expected) + # Check that channel is used if provided. + transport = transports.PredictionServiceGrpcTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None - # Check that the path construction is reversible. - actual = CreativeServiceClient.parse_ad_partner_path(path) - assert expected == actual +def test_prediction_service_grpc_asyncio_transport_channel(): + channel = aio.secure_channel("http://localhost/", grpc.local_channel_credentials()) -def test_company_path(): - network_code = "oyster" - company = "nudibranch" - expected = "networks/{network_code}/companies/{company}".format( - network_code=network_code, - company=company, + # Check that channel is used if provided. + transport = transports.PredictionServiceGrpcAsyncIOTransport( + host="squid.clam.whelk", + channel=channel, ) - actual = CreativeServiceClient.company_path(network_code, company) - assert expected == actual - + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None -def test_parse_company_path(): - expected = { - "network_code": "cuttlefish", - "company": "mussel", - } - path = CreativeServiceClient.company_path(**expected) - - # Check that the path construction is reversible. - actual = CreativeServiceClient.parse_company_path(path) - assert expected == actual +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize( + "transport_class", + [ + transports.PredictionServiceGrpcTransport, + transports.PredictionServiceGrpcAsyncIOTransport, + ], +) +def test_prediction_service_transport_channel_mtls_with_client_cert_source( + transport_class, +): + with mock.patch( + "grpc.ssl_channel_credentials", autospec=True + ) as grpc_ssl_channel_cred: + with mock.patch.object( + transport_class, "create_channel" + ) as grpc_create_channel: + mock_ssl_cred = mock.Mock() + grpc_ssl_channel_cred.return_value = mock_ssl_cred + + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + + cred = ga_credentials.AnonymousCredentials() + with pytest.warns(DeprecationWarning): + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (cred, None) + transport = transport_class( + host="squid.clam.whelk", + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=client_cert_source_callback, + ) + adc.assert_called_once() -def test_creative_path(): - network_code = "winkle" - creative = "nautilus" - expected = "networks/{network_code}/creatives/{creative}".format( - network_code=network_code, - creative=creative, - ) - actual = CreativeServiceClient.creative_path(network_code, creative) - assert expected == actual + grpc_ssl_channel_cred.assert_called_once_with( + certificate_chain=b"cert bytes", private_key=b"key bytes" + ) + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + assert transport._ssl_channel_credentials == mock_ssl_cred -def test_parse_creative_path(): - expected = { - "network_code": "scallop", - "creative": "abalone", - } - path = CreativeServiceClient.creative_path(**expected) +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize( + "transport_class", + [ + transports.PredictionServiceGrpcTransport, + transports.PredictionServiceGrpcAsyncIOTransport, + ], +) +def test_prediction_service_transport_channel_mtls_with_adc(transport_class): + mock_ssl_cred = mock.Mock() + with mock.patch.multiple( + "google.auth.transport.grpc.SslCredentials", + __init__=mock.Mock(return_value=None), + ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), + ): + with mock.patch.object( + transport_class, "create_channel" + ) as grpc_create_channel: + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + mock_cred = mock.Mock() + + with pytest.warns(DeprecationWarning): + transport = transport_class( + host="squid.clam.whelk", + credentials=mock_cred, + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=None, + ) - # Check that the path construction is reversible. - actual = CreativeServiceClient.parse_creative_path(path) - assert expected == actual + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=mock_cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel -def test_network_path(): - network_code = "squid" - expected = "networks/{network_code}".format( - network_code=network_code, +def test_model_path(): + model = "squid" + expected = "models/{model}".format( + model=model, ) - actual = CreativeServiceClient.network_path(network_code) + actual = PredictionServiceClient.model_path(model) assert expected == actual -def test_parse_network_path(): +def test_parse_model_path(): expected = { - "network_code": "clam", + "model": "clam", } - path = CreativeServiceClient.network_path(**expected) + path = PredictionServiceClient.model_path(**expected) # Check that the path construction is reversible. - actual = CreativeServiceClient.parse_network_path(path) + actual = PredictionServiceClient.parse_model_path(path) assert expected == actual @@ -2005,7 +2380,7 @@ def test_common_billing_account_path(): expected = "billingAccounts/{billing_account}".format( billing_account=billing_account, ) - actual = CreativeServiceClient.common_billing_account_path(billing_account) + actual = PredictionServiceClient.common_billing_account_path(billing_account) assert expected == actual @@ -2013,10 +2388,10 @@ def test_parse_common_billing_account_path(): expected = { "billing_account": "octopus", } - path = CreativeServiceClient.common_billing_account_path(**expected) + path = PredictionServiceClient.common_billing_account_path(**expected) # Check that the path construction is reversible. - actual = CreativeServiceClient.parse_common_billing_account_path(path) + actual = PredictionServiceClient.parse_common_billing_account_path(path) assert expected == actual @@ -2025,7 +2400,7 @@ def test_common_folder_path(): expected = "folders/{folder}".format( folder=folder, ) - actual = CreativeServiceClient.common_folder_path(folder) + actual = PredictionServiceClient.common_folder_path(folder) assert expected == actual @@ -2033,10 +2408,10 @@ def test_parse_common_folder_path(): expected = { "folder": "nudibranch", } - path = CreativeServiceClient.common_folder_path(**expected) + path = PredictionServiceClient.common_folder_path(**expected) # Check that the path construction is reversible. - actual = CreativeServiceClient.parse_common_folder_path(path) + actual = PredictionServiceClient.parse_common_folder_path(path) assert expected == actual @@ -2045,7 +2420,7 @@ def test_common_organization_path(): expected = "organizations/{organization}".format( organization=organization, ) - actual = CreativeServiceClient.common_organization_path(organization) + actual = PredictionServiceClient.common_organization_path(organization) assert expected == actual @@ -2053,10 +2428,10 @@ def test_parse_common_organization_path(): expected = { "organization": "mussel", } - path = CreativeServiceClient.common_organization_path(**expected) + path = PredictionServiceClient.common_organization_path(**expected) # Check that the path construction is reversible. - actual = CreativeServiceClient.parse_common_organization_path(path) + actual = PredictionServiceClient.parse_common_organization_path(path) assert expected == actual @@ -2065,7 +2440,7 @@ def test_common_project_path(): expected = "projects/{project}".format( project=project, ) - actual = CreativeServiceClient.common_project_path(project) + actual = PredictionServiceClient.common_project_path(project) assert expected == actual @@ -2073,10 +2448,10 @@ def test_parse_common_project_path(): expected = { "project": "nautilus", } - path = CreativeServiceClient.common_project_path(**expected) + path = PredictionServiceClient.common_project_path(**expected) # Check that the path construction is reversible. - actual = CreativeServiceClient.parse_common_project_path(path) + actual = PredictionServiceClient.parse_common_project_path(path) assert expected == actual @@ -2087,7 +2462,7 @@ def test_common_location_path(): project=project, location=location, ) - actual = CreativeServiceClient.common_location_path(project, location) + actual = PredictionServiceClient.common_location_path(project, location) assert expected == actual @@ -2096,10 +2471,10 @@ def test_parse_common_location_path(): "project": "squid", "location": "clam", } - path = CreativeServiceClient.common_location_path(**expected) + path = PredictionServiceClient.common_location_path(**expected) # Check that the path construction is reversible. - actual = CreativeServiceClient.parse_common_location_path(path) + actual = PredictionServiceClient.parse_common_location_path(path) assert expected == actual @@ -2107,18 +2482,18 @@ def test_client_with_default_client_info(): client_info = gapic_v1.client_info.ClientInfo() with mock.patch.object( - transports.CreativeServiceTransport, "_prep_wrapped_messages" + transports.PredictionServiceTransport, "_prep_wrapped_messages" ) as prep: - client = CreativeServiceClient( + client = PredictionServiceClient( credentials=ga_credentials.AnonymousCredentials(), client_info=client_info, ) prep.assert_called_once_with(client_info) with mock.patch.object( - transports.CreativeServiceTransport, "_prep_wrapped_messages" + transports.PredictionServiceTransport, "_prep_wrapped_messages" ) as prep: - transport_class = CreativeServiceClient.get_transport_class() + transport_class = PredictionServiceClient.get_transport_class() transport = transport_class( credentials=ga_credentials.AnonymousCredentials(), client_info=client_info, @@ -2126,71 +2501,28 @@ def test_client_with_default_client_info(): prep.assert_called_once_with(client_info) -def test_get_operation_rest_bad_request( - transport: str = "rest", request_type=operations_pb2.GetOperationRequest -): - client = CreativeServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - request = request_type() - request = json_format.ParseDict( - {"name": "networks/sample1/operations/reports/exports/sample2"}, request - ) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.get_operation(request) - - -@pytest.mark.parametrize( - "request_type", - [ - operations_pb2.GetOperationRequest, - dict, - ], -) -def test_get_operation_rest(request_type): - client = CreativeServiceClient( +@pytest.mark.asyncio +async def test_transport_close_async(): + client = PredictionServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), - transport="rest", + transport="grpc_asyncio", ) - request_init = {"name": "networks/sample1/operations/reports/exports/sample2"} - request = request_type(**request_init) - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation() - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - response = client.get_operation(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, operations_pb2.Operation) + with mock.patch.object( + type(getattr(client.transport, "grpc_channel")), "close" + ) as close: + async with client: + close.assert_not_called() + close.assert_called_once() def test_transport_close(): transports = { "rest": "_session", + "grpc": "_grpc_channel", } for transport, close_name in transports.items(): - client = CreativeServiceClient( + client = PredictionServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport ) with mock.patch.object( @@ -2204,9 +2536,10 @@ def test_transport_close(): def test_client_ctx(): transports = [ "rest", + "grpc", ] for transport in transports: - client = CreativeServiceClient( + client = PredictionServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport ) # Test client calls underlying transport. @@ -2220,7 +2553,11 @@ def test_client_ctx(): @pytest.mark.parametrize( "client_class,transport_class", [ - (CreativeServiceClient, transports.CreativeServiceRestTransport), + (PredictionServiceClient, transports.PredictionServiceGrpcTransport), + ( + PredictionServiceAsyncClient, + transports.PredictionServiceGrpcAsyncIOTransport, + ), ], ) def test_api_key_credentials(client_class, transport_class): diff --git a/packages/google-ai-generativelanguage/tests/unit/gapic/generativelanguage_v1beta/test_retriever_service.py b/packages/google-ai-generativelanguage/tests/unit/gapic/generativelanguage_v1beta/test_retriever_service.py index 4ad80ff8f8b0..d04ac7285fb9 100644 --- a/packages/google-ai-generativelanguage/tests/unit/gapic/generativelanguage_v1beta/test_retriever_service.py +++ b/packages/google-ai-generativelanguage/tests/unit/gapic/generativelanguage_v1beta/test_retriever_service.py @@ -1313,22 +1313,23 @@ async def test_create_corpus_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_corpus - ] = mock_object + ] = mock_rpc request = {} await client.create_corpus(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_corpus(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1617,22 +1618,23 @@ async def test_get_corpus_async_use_cached_wrapped_rpc(transport: str = "grpc_as ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_corpus - ] = mock_object + ] = mock_rpc request = {} await client.get_corpus(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_corpus(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1978,22 +1980,23 @@ async def test_update_corpus_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_corpus - ] = mock_object + ] = mock_rpc request = {} await client.update_corpus(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_corpus(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2343,22 +2346,23 @@ async def test_delete_corpus_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_corpus - ] = mock_object + ] = mock_rpc request = {} await client.delete_corpus(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_corpus(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2698,22 +2702,23 @@ async def test_list_corpora_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_corpora - ] = mock_object + ] = mock_rpc request = {} await client.list_corpora(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_corpora(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3107,22 +3112,23 @@ async def test_query_corpus_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.query_corpus - ] = mock_object + ] = mock_rpc request = {} await client.query_corpus(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.query_corpus(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3389,22 +3395,23 @@ async def test_create_document_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_document - ] = mock_object + ] = mock_rpc request = {} await client.create_document(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_document(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3765,22 +3772,23 @@ async def test_get_document_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_document - ] = mock_object + ] = mock_rpc request = {} await client.get_document(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_document(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4126,22 +4134,23 @@ async def test_update_document_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_document - ] = mock_object + ] = mock_rpc request = {} await client.update_document(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_document(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4492,22 +4501,23 @@ async def test_delete_document_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_document - ] = mock_object + ] = mock_rpc request = {} await client.delete_document(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_document(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4850,22 +4860,23 @@ async def test_list_documents_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_documents - ] = mock_object + ] = mock_rpc request = {} await client.list_documents(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_documents(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5405,22 +5416,23 @@ async def test_query_document_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.query_document - ] = mock_object + ] = mock_rpc request = {} await client.query_document(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.query_document(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5687,22 +5699,23 @@ async def test_create_chunk_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_chunk - ] = mock_object + ] = mock_rpc request = {} await client.create_chunk(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_chunk(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -6066,22 +6079,23 @@ async def test_batch_create_chunks_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.batch_create_chunks - ] = mock_object + ] = mock_rpc request = {} await client.batch_create_chunks(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.batch_create_chunks(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -6353,22 +6367,23 @@ async def test_get_chunk_async_use_cached_wrapped_rpc(transport: str = "grpc_asy ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_chunk - ] = mock_object + ] = mock_rpc request = {} await client.get_chunk(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_chunk(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -6714,22 +6729,23 @@ async def test_update_chunk_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_chunk - ] = mock_object + ] = mock_rpc request = {} await client.update_chunk(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_chunk(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -7093,22 +7109,23 @@ async def test_batch_update_chunks_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.batch_update_chunks - ] = mock_object + ] = mock_rpc request = {} await client.batch_update_chunks(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.batch_update_chunks(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -7372,22 +7389,23 @@ async def test_delete_chunk_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_chunk - ] = mock_object + ] = mock_rpc request = {} await client.delete_chunk(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_chunk(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -7732,22 +7750,23 @@ async def test_batch_delete_chunks_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.batch_delete_chunks - ] = mock_object + ] = mock_rpc request = {} await client.batch_delete_chunks(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.batch_delete_chunks(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -8016,22 +8035,23 @@ async def test_list_chunks_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_chunks - ] = mock_object + ] = mock_rpc request = {} await client.list_chunks(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_chunks(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-ai-generativelanguage/tests/unit/gapic/generativelanguage_v1beta/test_text_service.py b/packages/google-ai-generativelanguage/tests/unit/gapic/generativelanguage_v1beta/test_text_service.py index bdae2d9cd5ca..0ef83108f220 100644 --- a/packages/google-ai-generativelanguage/tests/unit/gapic/generativelanguage_v1beta/test_text_service.py +++ b/packages/google-ai-generativelanguage/tests/unit/gapic/generativelanguage_v1beta/test_text_service.py @@ -1239,22 +1239,23 @@ async def test_generate_text_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.generate_text - ] = mock_object + ] = mock_rpc request = {} await client.generate_text(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.generate_text(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1647,22 +1648,23 @@ async def test_embed_text_async_use_cached_wrapped_rpc(transport: str = "grpc_as ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.embed_text - ] = mock_object + ] = mock_rpc request = {} await client.embed_text(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.embed_text(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2015,22 +2017,23 @@ async def test_batch_embed_text_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.batch_embed_text - ] = mock_object + ] = mock_rpc request = {} await client.batch_embed_text(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.batch_embed_text(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2396,22 +2399,23 @@ async def test_count_text_tokens_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.count_text_tokens - ] = mock_object + ] = mock_rpc request = {} await client.count_text_tokens(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.count_text_tokens(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-ai-generativelanguage/tests/unit/gapic/generativelanguage_v1beta2/test_discuss_service.py b/packages/google-ai-generativelanguage/tests/unit/gapic/generativelanguage_v1beta2/test_discuss_service.py index 37e8bfb66a96..aaca5a1e5113 100644 --- a/packages/google-ai-generativelanguage/tests/unit/gapic/generativelanguage_v1beta2/test_discuss_service.py +++ b/packages/google-ai-generativelanguage/tests/unit/gapic/generativelanguage_v1beta2/test_discuss_service.py @@ -1277,22 +1277,23 @@ async def test_generate_message_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.generate_message - ] = mock_object + ] = mock_rpc request = {} await client.generate_message(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.generate_message(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1692,22 +1693,23 @@ async def test_count_message_tokens_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.count_message_tokens - ] = mock_object + ] = mock_rpc request = {} await client.count_message_tokens(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.count_message_tokens(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-ai-generativelanguage/tests/unit/gapic/generativelanguage_v1beta2/test_model_service.py b/packages/google-ai-generativelanguage/tests/unit/gapic/generativelanguage_v1beta2/test_model_service.py index 45fec79440be..bb7f4de6054b 100644 --- a/packages/google-ai-generativelanguage/tests/unit/gapic/generativelanguage_v1beta2/test_model_service.py +++ b/packages/google-ai-generativelanguage/tests/unit/gapic/generativelanguage_v1beta2/test_model_service.py @@ -1288,22 +1288,23 @@ async def test_get_model_async_use_cached_wrapped_rpc(transport: str = "grpc_asy ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_model - ] = mock_object + ] = mock_rpc request = {} await client.get_model(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_model(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1670,22 +1671,23 @@ async def test_list_models_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_models - ] = mock_object + ] = mock_rpc request = {} await client.list_models(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_models(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-ai-generativelanguage/tests/unit/gapic/generativelanguage_v1beta2/test_text_service.py b/packages/google-ai-generativelanguage/tests/unit/gapic/generativelanguage_v1beta2/test_text_service.py index b52f00220a90..8780254ecbaa 100644 --- a/packages/google-ai-generativelanguage/tests/unit/gapic/generativelanguage_v1beta2/test_text_service.py +++ b/packages/google-ai-generativelanguage/tests/unit/gapic/generativelanguage_v1beta2/test_text_service.py @@ -1238,22 +1238,23 @@ async def test_generate_text_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.generate_text - ] = mock_object + ] = mock_rpc request = {} await client.generate_text(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.generate_text(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1646,22 +1647,23 @@ async def test_embed_text_async_use_cached_wrapped_rpc(transport: str = "grpc_as ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.embed_text - ] = mock_object + ] = mock_rpc request = {} await client.embed_text(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.embed_text(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-ai-generativelanguage/tests/unit/gapic/generativelanguage_v1beta3/test_discuss_service.py b/packages/google-ai-generativelanguage/tests/unit/gapic/generativelanguage_v1beta3/test_discuss_service.py index 963596a4ca0f..6b698304d1c0 100644 --- a/packages/google-ai-generativelanguage/tests/unit/gapic/generativelanguage_v1beta3/test_discuss_service.py +++ b/packages/google-ai-generativelanguage/tests/unit/gapic/generativelanguage_v1beta3/test_discuss_service.py @@ -1278,22 +1278,23 @@ async def test_generate_message_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.generate_message - ] = mock_object + ] = mock_rpc request = {} await client.generate_message(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.generate_message(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1693,22 +1694,23 @@ async def test_count_message_tokens_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.count_message_tokens - ] = mock_object + ] = mock_rpc request = {} await client.count_message_tokens(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.count_message_tokens(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-ai-generativelanguage/tests/unit/gapic/generativelanguage_v1beta3/test_model_service.py b/packages/google-ai-generativelanguage/tests/unit/gapic/generativelanguage_v1beta3/test_model_service.py index 502ea6d85d66..65e877a7a177 100644 --- a/packages/google-ai-generativelanguage/tests/unit/gapic/generativelanguage_v1beta3/test_model_service.py +++ b/packages/google-ai-generativelanguage/tests/unit/gapic/generativelanguage_v1beta3/test_model_service.py @@ -1302,22 +1302,23 @@ async def test_get_model_async_use_cached_wrapped_rpc(transport: str = "grpc_asy ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_model - ] = mock_object + ] = mock_rpc request = {} await client.get_model(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_model(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1684,22 +1685,23 @@ async def test_list_models_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_models - ] = mock_object + ] = mock_rpc request = {} await client.list_models(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_models(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2207,22 +2209,23 @@ async def test_get_tuned_model_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_tuned_model - ] = mock_object + ] = mock_rpc request = {} await client.get_tuned_model(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_tuned_model(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2593,22 +2596,23 @@ async def test_list_tuned_models_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_tuned_models - ] = mock_object + ] = mock_rpc request = {} await client.list_tuned_models(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_tuned_models(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3065,8 +3069,9 @@ def test_create_tuned_model_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.create_tuned_model(request) @@ -3122,26 +3127,28 @@ async def test_create_tuned_model_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_tuned_model - ] = mock_object + ] = mock_rpc request = {} await client.create_tuned_model(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.create_tuned_model(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3493,22 +3500,23 @@ async def test_update_tuned_model_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_tuned_model - ] = mock_object + ] = mock_rpc request = {} await client.update_tuned_model(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_tuned_model(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3918,22 +3926,23 @@ async def test_delete_tuned_model_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_tuned_model - ] = mock_object + ] = mock_rpc request = {} await client.delete_tuned_model(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_tuned_model(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-ai-generativelanguage/tests/unit/gapic/generativelanguage_v1beta3/test_permission_service.py b/packages/google-ai-generativelanguage/tests/unit/gapic/generativelanguage_v1beta3/test_permission_service.py index 92213ee3a04e..faeeaf23de2c 100644 --- a/packages/google-ai-generativelanguage/tests/unit/gapic/generativelanguage_v1beta3/test_permission_service.py +++ b/packages/google-ai-generativelanguage/tests/unit/gapic/generativelanguage_v1beta3/test_permission_service.py @@ -1342,22 +1342,23 @@ async def test_create_permission_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_permission - ] = mock_object + ] = mock_rpc request = {} await client.create_permission(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_permission(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1742,22 +1743,23 @@ async def test_get_permission_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_permission - ] = mock_object + ] = mock_rpc request = {} await client.get_permission(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_permission(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2117,22 +2119,23 @@ async def test_list_permissions_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_permissions - ] = mock_object + ] = mock_rpc request = {} await client.list_permissions(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_permissions(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2691,22 +2694,23 @@ async def test_update_permission_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_permission - ] = mock_object + ] = mock_rpc request = {} await client.update_permission(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_permission(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3085,22 +3089,23 @@ async def test_delete_permission_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_permission - ] = mock_object + ] = mock_rpc request = {} await client.delete_permission(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_permission(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3460,22 +3465,23 @@ async def test_transfer_ownership_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.transfer_ownership - ] = mock_object + ] = mock_rpc request = {} await client.transfer_ownership(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.transfer_ownership(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-ai-generativelanguage/tests/unit/gapic/generativelanguage_v1beta3/test_text_service.py b/packages/google-ai-generativelanguage/tests/unit/gapic/generativelanguage_v1beta3/test_text_service.py index cbd2b4c65ce6..30b8ee8bd044 100644 --- a/packages/google-ai-generativelanguage/tests/unit/gapic/generativelanguage_v1beta3/test_text_service.py +++ b/packages/google-ai-generativelanguage/tests/unit/gapic/generativelanguage_v1beta3/test_text_service.py @@ -1239,22 +1239,23 @@ async def test_generate_text_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.generate_text - ] = mock_object + ] = mock_rpc request = {} await client.generate_text(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.generate_text(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1647,22 +1648,23 @@ async def test_embed_text_async_use_cached_wrapped_rpc(transport: str = "grpc_as ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.embed_text - ] = mock_object + ] = mock_rpc request = {} await client.embed_text(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.embed_text(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2015,22 +2017,23 @@ async def test_batch_embed_text_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.batch_embed_text - ] = mock_object + ] = mock_rpc request = {} await client.batch_embed_text(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.batch_embed_text(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2396,22 +2399,23 @@ async def test_count_text_tokens_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.count_text_tokens - ] = mock_object + ] = mock_rpc request = {} await client.count_text_tokens(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.count_text_tokens(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-analytics-admin/CHANGELOG.md b/packages/google-analytics-admin/CHANGELOG.md index d3ac10702d8f..2085426cb884 100644 --- a/packages/google-analytics-admin/CHANGELOG.md +++ b/packages/google-analytics-admin/CHANGELOG.md @@ -1,5 +1,37 @@ # Changelog +## [0.23.0](https://github.com/googleapis/google-cloud-python/compare/google-analytics-admin-v0.22.9...google-analytics-admin-v0.23.0) (2024-08-08) + + +### ⚠ BREAKING CHANGES + +* Rename custom method `CreateSubpropertyRequest` to `ProvisionSubpropertyRequest` + +### Features + +* add `CreateBigQueryLink`, `UpdateBigQueryLink`, and `DeleteBigQueryLink` methods ([9033800](https://github.com/googleapis/google-cloud-python/commit/9033800e464f15be0e4c418710c158591a84439d)) +* add `GetEventEditRule`, `CreateEventEditRule`, `ListEventEditRules`, `UpdateEventEditRule`, `DeleteEventEditRule`, and `ReorderEventEditRules` methods to the Admin API v1 alpha ([9033800](https://github.com/googleapis/google-cloud-python/commit/9033800e464f15be0e4c418710c158591a84439d)) +* add `GetKeyEvent`, `CreateKeyEvent`, `ListKeyEvents`, `UpdateKeyEvent`, and `DeleteKeyEvent` methods ([9033800](https://github.com/googleapis/google-cloud-python/commit/9033800e464f15be0e4c418710c158591a84439d)) +* add the `BIGQUERY_LINK` option to the `ChangeHistoryResourceType` enum ([9033800](https://github.com/googleapis/google-cloud-python/commit/9033800e464f15be0e4c418710c158591a84439d)) +* add the `create_time` field to the `Audience` resource ([9033800](https://github.com/googleapis/google-cloud-python/commit/9033800e464f15be0e4c418710c158591a84439d)) +* add the `dataset_location` field to the `BigQueryLink` resource ([9033800](https://github.com/googleapis/google-cloud-python/commit/9033800e464f15be0e4c418710c158591a84439d)) +* add the `gmp_organization` field to the `Account` resource ([9033800](https://github.com/googleapis/google-cloud-python/commit/9033800e464f15be0e4c418710c158591a84439d)) +* add the `primary` field to the `ChannelGroup` resource ([9033800](https://github.com/googleapis/google-cloud-python/commit/9033800e464f15be0e4c418710c158591a84439d)) +* mark `GetConversionEvent`, `CreateConversionEvent`, `ListConversionEvents`, `UpdateConversionEvent`, and `DeleteConversionEvent` methods as deprecated ([9033800](https://github.com/googleapis/google-cloud-python/commit/9033800e464f15be0e4c418710c158591a84439d)) + + +### Bug Fixes + +* Rename custom method `CreateSubpropertyRequest` to `ProvisionSubpropertyRequest` ([9033800](https://github.com/googleapis/google-cloud-python/commit/9033800e464f15be0e4c418710c158591a84439d)) + + +### Documentation + +* add deprecation comment to `GetConversionEvent`, `CreateConversionEvent`, `ListConversionEvents`, `UpdateConversionEvent`, and `DeleteConversionEvent` methods ([9033800](https://github.com/googleapis/google-cloud-python/commit/9033800e464f15be0e4c418710c158591a84439d)) +* improve comment formatting of `account` and `property` fields in `SearchChangeHistoryEventsRequest` ([9033800](https://github.com/googleapis/google-cloud-python/commit/9033800e464f15be0e4c418710c158591a84439d)) +* improve comment formatting of the `name` field in `DeleteFirebaseLinkRequest`, `GetGlobalSiteTagRequest`, and `GetDataSharingSettingsRequest` ([9033800](https://github.com/googleapis/google-cloud-python/commit/9033800e464f15be0e4c418710c158591a84439d)) +* improve comment formatting of the `parent` field in `CreateFirebaseLinkRequest` and `ListFirebaseLinksRequest` ([9033800](https://github.com/googleapis/google-cloud-python/commit/9033800e464f15be0e4c418710c158591a84439d)) + ## [0.22.9](https://github.com/googleapis/google-cloud-python/compare/google-analytics-admin-v0.22.8...google-analytics-admin-v0.22.9) (2024-07-30) diff --git a/packages/google-analytics-admin/google/analytics/admin/__init__.py b/packages/google-analytics-admin/google/analytics/admin/__init__.py index 59a76ba72c0d..9bb3ffa15f32 100644 --- a/packages/google-analytics-admin/google/analytics/admin/__init__.py +++ b/packages/google-analytics-admin/google/analytics/admin/__init__.py @@ -64,6 +64,7 @@ CreateAccessBindingRequest, CreateAdSenseLinkRequest, CreateAudienceRequest, + CreateBigQueryLinkRequest, CreateCalculatedMetricRequest, CreateChannelGroupRequest, CreateConnectedSiteTagRequest, @@ -75,9 +76,11 @@ CreateDisplayVideo360AdvertiserLinkProposalRequest, CreateDisplayVideo360AdvertiserLinkRequest, CreateEventCreateRuleRequest, + CreateEventEditRuleRequest, CreateExpandedDataSetRequest, CreateFirebaseLinkRequest, CreateGoogleAdsLinkRequest, + CreateKeyEventRequest, CreateMeasurementProtocolSecretRequest, CreatePropertyRequest, CreateRollupPropertyRequest, @@ -86,11 +89,10 @@ CreateSearchAds360LinkRequest, CreateSKAdNetworkConversionValueSchemaRequest, CreateSubpropertyEventFilterRequest, - CreateSubpropertyRequest, - CreateSubpropertyResponse, DeleteAccessBindingRequest, DeleteAccountRequest, DeleteAdSenseLinkRequest, + DeleteBigQueryLinkRequest, DeleteCalculatedMetricRequest, DeleteChannelGroupRequest, DeleteConnectedSiteTagRequest, @@ -99,9 +101,11 @@ DeleteDisplayVideo360AdvertiserLinkProposalRequest, DeleteDisplayVideo360AdvertiserLinkRequest, DeleteEventCreateRuleRequest, + DeleteEventEditRuleRequest, DeleteExpandedDataSetRequest, DeleteFirebaseLinkRequest, DeleteGoogleAdsLinkRequest, + DeleteKeyEventRequest, DeleteMeasurementProtocolSecretRequest, DeletePropertyRequest, DeleteRollupPropertySourceLinkRequest, @@ -131,9 +135,11 @@ GetDisplayVideo360AdvertiserLinkRequest, GetEnhancedMeasurementSettingsRequest, GetEventCreateRuleRequest, + GetEventEditRuleRequest, GetExpandedDataSetRequest, GetGlobalSiteTagRequest, GetGoogleSignalsSettingsRequest, + GetKeyEventRequest, GetMeasurementProtocolSecretRequest, GetPropertyRequest, GetRollupPropertySourceLinkRequest, @@ -172,12 +178,16 @@ ListDisplayVideo360AdvertiserLinksResponse, ListEventCreateRulesRequest, ListEventCreateRulesResponse, + ListEventEditRulesRequest, + ListEventEditRulesResponse, ListExpandedDataSetsRequest, ListExpandedDataSetsResponse, ListFirebaseLinksRequest, ListFirebaseLinksResponse, ListGoogleAdsLinksRequest, ListGoogleAdsLinksResponse, + ListKeyEventsRequest, + ListKeyEventsResponse, ListMeasurementProtocolSecretsRequest, ListMeasurementProtocolSecretsResponse, ListPropertiesRequest, @@ -192,6 +202,9 @@ ListSubpropertyEventFiltersResponse, ProvisionAccountTicketRequest, ProvisionAccountTicketResponse, + ProvisionSubpropertyRequest, + ProvisionSubpropertyResponse, + ReorderEventEditRulesRequest, RunAccessReportRequest, RunAccessReportResponse, SearchChangeHistoryEventsRequest, @@ -202,6 +215,7 @@ UpdateAccountRequest, UpdateAttributionSettingsRequest, UpdateAudienceRequest, + UpdateBigQueryLinkRequest, UpdateCalculatedMetricRequest, UpdateChannelGroupRequest, UpdateConversionEventRequest, @@ -213,9 +227,11 @@ UpdateDisplayVideo360AdvertiserLinkRequest, UpdateEnhancedMeasurementSettingsRequest, UpdateEventCreateRuleRequest, + UpdateEventEditRuleRequest, UpdateExpandedDataSetRequest, UpdateGoogleAdsLinkRequest, UpdateGoogleSignalsSettingsRequest, + UpdateKeyEventRequest, UpdateMeasurementProtocolSecretRequest, UpdatePropertyRequest, UpdateSearchAds360LinkRequest, @@ -243,6 +259,7 @@ ) from google.analytics.admin_v1alpha.types.event_create_and_edit import ( EventCreateRule, + EventEditRule, MatchingCondition, ParameterMutation, ) @@ -286,6 +303,7 @@ GoogleSignalsSettings, GoogleSignalsState, IndustryCategory, + KeyEvent, LinkProposalInitiatingProduct, LinkProposalState, LinkProposalStatusDetails, @@ -347,6 +365,7 @@ "CreateAccessBindingRequest", "CreateAdSenseLinkRequest", "CreateAudienceRequest", + "CreateBigQueryLinkRequest", "CreateCalculatedMetricRequest", "CreateChannelGroupRequest", "CreateConnectedSiteTagRequest", @@ -358,9 +377,11 @@ "CreateDisplayVideo360AdvertiserLinkProposalRequest", "CreateDisplayVideo360AdvertiserLinkRequest", "CreateEventCreateRuleRequest", + "CreateEventEditRuleRequest", "CreateExpandedDataSetRequest", "CreateFirebaseLinkRequest", "CreateGoogleAdsLinkRequest", + "CreateKeyEventRequest", "CreateMeasurementProtocolSecretRequest", "CreatePropertyRequest", "CreateRollupPropertyRequest", @@ -369,11 +390,10 @@ "CreateSearchAds360LinkRequest", "CreateSKAdNetworkConversionValueSchemaRequest", "CreateSubpropertyEventFilterRequest", - "CreateSubpropertyRequest", - "CreateSubpropertyResponse", "DeleteAccessBindingRequest", "DeleteAccountRequest", "DeleteAdSenseLinkRequest", + "DeleteBigQueryLinkRequest", "DeleteCalculatedMetricRequest", "DeleteChannelGroupRequest", "DeleteConnectedSiteTagRequest", @@ -382,9 +402,11 @@ "DeleteDisplayVideo360AdvertiserLinkProposalRequest", "DeleteDisplayVideo360AdvertiserLinkRequest", "DeleteEventCreateRuleRequest", + "DeleteEventEditRuleRequest", "DeleteExpandedDataSetRequest", "DeleteFirebaseLinkRequest", "DeleteGoogleAdsLinkRequest", + "DeleteKeyEventRequest", "DeleteMeasurementProtocolSecretRequest", "DeletePropertyRequest", "DeleteRollupPropertySourceLinkRequest", @@ -414,9 +436,11 @@ "GetDisplayVideo360AdvertiserLinkRequest", "GetEnhancedMeasurementSettingsRequest", "GetEventCreateRuleRequest", + "GetEventEditRuleRequest", "GetExpandedDataSetRequest", "GetGlobalSiteTagRequest", "GetGoogleSignalsSettingsRequest", + "GetKeyEventRequest", "GetMeasurementProtocolSecretRequest", "GetPropertyRequest", "GetRollupPropertySourceLinkRequest", @@ -455,12 +479,16 @@ "ListDisplayVideo360AdvertiserLinksResponse", "ListEventCreateRulesRequest", "ListEventCreateRulesResponse", + "ListEventEditRulesRequest", + "ListEventEditRulesResponse", "ListExpandedDataSetsRequest", "ListExpandedDataSetsResponse", "ListFirebaseLinksRequest", "ListFirebaseLinksResponse", "ListGoogleAdsLinksRequest", "ListGoogleAdsLinksResponse", + "ListKeyEventsRequest", + "ListKeyEventsResponse", "ListMeasurementProtocolSecretsRequest", "ListMeasurementProtocolSecretsResponse", "ListPropertiesRequest", @@ -475,6 +503,9 @@ "ListSubpropertyEventFiltersResponse", "ProvisionAccountTicketRequest", "ProvisionAccountTicketResponse", + "ProvisionSubpropertyRequest", + "ProvisionSubpropertyResponse", + "ReorderEventEditRulesRequest", "RunAccessReportRequest", "RunAccessReportResponse", "SearchChangeHistoryEventsRequest", @@ -485,6 +516,7 @@ "UpdateAccountRequest", "UpdateAttributionSettingsRequest", "UpdateAudienceRequest", + "UpdateBigQueryLinkRequest", "UpdateCalculatedMetricRequest", "UpdateChannelGroupRequest", "UpdateConversionEventRequest", @@ -496,9 +528,11 @@ "UpdateDisplayVideo360AdvertiserLinkRequest", "UpdateEnhancedMeasurementSettingsRequest", "UpdateEventCreateRuleRequest", + "UpdateEventEditRuleRequest", "UpdateExpandedDataSetRequest", "UpdateGoogleAdsLinkRequest", "UpdateGoogleSignalsSettingsRequest", + "UpdateKeyEventRequest", "UpdateMeasurementProtocolSecretRequest", "UpdatePropertyRequest", "UpdateSearchAds360LinkRequest", @@ -520,6 +554,7 @@ "ChannelGroupFilterExpressionList", "GroupingRule", "EventCreateRule", + "EventEditRule", "MatchingCondition", "ParameterMutation", "ExpandedDataSet", @@ -552,6 +587,7 @@ "GlobalSiteTag", "GoogleAdsLink", "GoogleSignalsSettings", + "KeyEvent", "LinkProposalStatusDetails", "MeasurementProtocolSecret", "PostbackWindow", diff --git a/packages/google-analytics-admin/google/analytics/admin/gapic_version.py b/packages/google-analytics-admin/google/analytics/admin/gapic_version.py index f56358e27bf1..558c8aab67c5 100644 --- a/packages/google-analytics-admin/google/analytics/admin/gapic_version.py +++ b/packages/google-analytics-admin/google/analytics/admin/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.22.9" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-analytics-admin/google/analytics/admin_v1alpha/__init__.py b/packages/google-analytics-admin/google/analytics/admin_v1alpha/__init__.py index a3eb8f302be4..6231a2bcad0d 100644 --- a/packages/google-analytics-admin/google/analytics/admin_v1alpha/__init__.py +++ b/packages/google-analytics-admin/google/analytics/admin_v1alpha/__init__.py @@ -62,6 +62,7 @@ CreateAccessBindingRequest, CreateAdSenseLinkRequest, CreateAudienceRequest, + CreateBigQueryLinkRequest, CreateCalculatedMetricRequest, CreateChannelGroupRequest, CreateConnectedSiteTagRequest, @@ -73,9 +74,11 @@ CreateDisplayVideo360AdvertiserLinkProposalRequest, CreateDisplayVideo360AdvertiserLinkRequest, CreateEventCreateRuleRequest, + CreateEventEditRuleRequest, CreateExpandedDataSetRequest, CreateFirebaseLinkRequest, CreateGoogleAdsLinkRequest, + CreateKeyEventRequest, CreateMeasurementProtocolSecretRequest, CreatePropertyRequest, CreateRollupPropertyRequest, @@ -84,11 +87,10 @@ CreateSearchAds360LinkRequest, CreateSKAdNetworkConversionValueSchemaRequest, CreateSubpropertyEventFilterRequest, - CreateSubpropertyRequest, - CreateSubpropertyResponse, DeleteAccessBindingRequest, DeleteAccountRequest, DeleteAdSenseLinkRequest, + DeleteBigQueryLinkRequest, DeleteCalculatedMetricRequest, DeleteChannelGroupRequest, DeleteConnectedSiteTagRequest, @@ -97,9 +99,11 @@ DeleteDisplayVideo360AdvertiserLinkProposalRequest, DeleteDisplayVideo360AdvertiserLinkRequest, DeleteEventCreateRuleRequest, + DeleteEventEditRuleRequest, DeleteExpandedDataSetRequest, DeleteFirebaseLinkRequest, DeleteGoogleAdsLinkRequest, + DeleteKeyEventRequest, DeleteMeasurementProtocolSecretRequest, DeletePropertyRequest, DeleteRollupPropertySourceLinkRequest, @@ -129,9 +133,11 @@ GetDisplayVideo360AdvertiserLinkRequest, GetEnhancedMeasurementSettingsRequest, GetEventCreateRuleRequest, + GetEventEditRuleRequest, GetExpandedDataSetRequest, GetGlobalSiteTagRequest, GetGoogleSignalsSettingsRequest, + GetKeyEventRequest, GetMeasurementProtocolSecretRequest, GetPropertyRequest, GetRollupPropertySourceLinkRequest, @@ -170,12 +176,16 @@ ListDisplayVideo360AdvertiserLinksResponse, ListEventCreateRulesRequest, ListEventCreateRulesResponse, + ListEventEditRulesRequest, + ListEventEditRulesResponse, ListExpandedDataSetsRequest, ListExpandedDataSetsResponse, ListFirebaseLinksRequest, ListFirebaseLinksResponse, ListGoogleAdsLinksRequest, ListGoogleAdsLinksResponse, + ListKeyEventsRequest, + ListKeyEventsResponse, ListMeasurementProtocolSecretsRequest, ListMeasurementProtocolSecretsResponse, ListPropertiesRequest, @@ -190,6 +200,9 @@ ListSubpropertyEventFiltersResponse, ProvisionAccountTicketRequest, ProvisionAccountTicketResponse, + ProvisionSubpropertyRequest, + ProvisionSubpropertyResponse, + ReorderEventEditRulesRequest, RunAccessReportRequest, RunAccessReportResponse, SearchChangeHistoryEventsRequest, @@ -200,6 +213,7 @@ UpdateAccountRequest, UpdateAttributionSettingsRequest, UpdateAudienceRequest, + UpdateBigQueryLinkRequest, UpdateCalculatedMetricRequest, UpdateChannelGroupRequest, UpdateConversionEventRequest, @@ -211,9 +225,11 @@ UpdateDisplayVideo360AdvertiserLinkRequest, UpdateEnhancedMeasurementSettingsRequest, UpdateEventCreateRuleRequest, + UpdateEventEditRuleRequest, UpdateExpandedDataSetRequest, UpdateGoogleAdsLinkRequest, UpdateGoogleSignalsSettingsRequest, + UpdateKeyEventRequest, UpdateMeasurementProtocolSecretRequest, UpdatePropertyRequest, UpdateSearchAds360LinkRequest, @@ -241,6 +257,7 @@ ) from .types.event_create_and_edit import ( EventCreateRule, + EventEditRule, MatchingCondition, ParameterMutation, ) @@ -284,6 +301,7 @@ GoogleSignalsSettings, GoogleSignalsState, IndustryCategory, + KeyEvent, LinkProposalInitiatingProduct, LinkProposalState, LinkProposalStatusDetails, @@ -374,6 +392,7 @@ "CreateAccessBindingRequest", "CreateAdSenseLinkRequest", "CreateAudienceRequest", + "CreateBigQueryLinkRequest", "CreateCalculatedMetricRequest", "CreateChannelGroupRequest", "CreateConnectedSiteTagRequest", @@ -385,9 +404,11 @@ "CreateDisplayVideo360AdvertiserLinkProposalRequest", "CreateDisplayVideo360AdvertiserLinkRequest", "CreateEventCreateRuleRequest", + "CreateEventEditRuleRequest", "CreateExpandedDataSetRequest", "CreateFirebaseLinkRequest", "CreateGoogleAdsLinkRequest", + "CreateKeyEventRequest", "CreateMeasurementProtocolSecretRequest", "CreatePropertyRequest", "CreateRollupPropertyRequest", @@ -396,8 +417,6 @@ "CreateSKAdNetworkConversionValueSchemaRequest", "CreateSearchAds360LinkRequest", "CreateSubpropertyEventFilterRequest", - "CreateSubpropertyRequest", - "CreateSubpropertyResponse", "CustomDimension", "CustomMetric", "DataRedactionSettings", @@ -407,6 +426,7 @@ "DeleteAccessBindingRequest", "DeleteAccountRequest", "DeleteAdSenseLinkRequest", + "DeleteBigQueryLinkRequest", "DeleteCalculatedMetricRequest", "DeleteChannelGroupRequest", "DeleteConnectedSiteTagRequest", @@ -415,9 +435,11 @@ "DeleteDisplayVideo360AdvertiserLinkProposalRequest", "DeleteDisplayVideo360AdvertiserLinkRequest", "DeleteEventCreateRuleRequest", + "DeleteEventEditRuleRequest", "DeleteExpandedDataSetRequest", "DeleteFirebaseLinkRequest", "DeleteGoogleAdsLinkRequest", + "DeleteKeyEventRequest", "DeleteMeasurementProtocolSecretRequest", "DeletePropertyRequest", "DeleteRollupPropertySourceLinkRequest", @@ -428,6 +450,7 @@ "DisplayVideo360AdvertiserLinkProposal", "EnhancedMeasurementSettings", "EventCreateRule", + "EventEditRule", "EventMapping", "ExpandedDataSet", "ExpandedDataSetFilter", @@ -457,9 +480,11 @@ "GetDisplayVideo360AdvertiserLinkRequest", "GetEnhancedMeasurementSettingsRequest", "GetEventCreateRuleRequest", + "GetEventEditRuleRequest", "GetExpandedDataSetRequest", "GetGlobalSiteTagRequest", "GetGoogleSignalsSettingsRequest", + "GetKeyEventRequest", "GetMeasurementProtocolSecretRequest", "GetPropertyRequest", "GetRollupPropertySourceLinkRequest", @@ -473,6 +498,7 @@ "GoogleSignalsState", "GroupingRule", "IndustryCategory", + "KeyEvent", "LinkProposalInitiatingProduct", "LinkProposalState", "LinkProposalStatusDetails", @@ -508,12 +534,16 @@ "ListDisplayVideo360AdvertiserLinksResponse", "ListEventCreateRulesRequest", "ListEventCreateRulesResponse", + "ListEventEditRulesRequest", + "ListEventEditRulesResponse", "ListExpandedDataSetsRequest", "ListExpandedDataSetsResponse", "ListFirebaseLinksRequest", "ListFirebaseLinksResponse", "ListGoogleAdsLinksRequest", "ListGoogleAdsLinksResponse", + "ListKeyEventsRequest", + "ListKeyEventsResponse", "ListMeasurementProtocolSecretsRequest", "ListMeasurementProtocolSecretsResponse", "ListPropertiesRequest", @@ -536,6 +566,9 @@ "PropertyType", "ProvisionAccountTicketRequest", "ProvisionAccountTicketResponse", + "ProvisionSubpropertyRequest", + "ProvisionSubpropertyResponse", + "ReorderEventEditRulesRequest", "RollupPropertySourceLink", "RunAccessReportRequest", "RunAccessReportResponse", @@ -555,6 +588,7 @@ "UpdateAccountRequest", "UpdateAttributionSettingsRequest", "UpdateAudienceRequest", + "UpdateBigQueryLinkRequest", "UpdateCalculatedMetricRequest", "UpdateChannelGroupRequest", "UpdateConversionEventRequest", @@ -566,9 +600,11 @@ "UpdateDisplayVideo360AdvertiserLinkRequest", "UpdateEnhancedMeasurementSettingsRequest", "UpdateEventCreateRuleRequest", + "UpdateEventEditRuleRequest", "UpdateExpandedDataSetRequest", "UpdateGoogleAdsLinkRequest", "UpdateGoogleSignalsSettingsRequest", + "UpdateKeyEventRequest", "UpdateMeasurementProtocolSecretRequest", "UpdatePropertyRequest", "UpdateSKAdNetworkConversionValueSchemaRequest", diff --git a/packages/google-analytics-admin/google/analytics/admin_v1alpha/gapic_metadata.json b/packages/google-analytics-admin/google/analytics/admin_v1alpha/gapic_metadata.json index a6360caf93cd..d379bf7a5952 100644 --- a/packages/google-analytics-admin/google/analytics/admin_v1alpha/gapic_metadata.json +++ b/packages/google-analytics-admin/google/analytics/admin_v1alpha/gapic_metadata.json @@ -75,6 +75,11 @@ "create_audience" ] }, + "CreateBigQueryLink": { + "methods": [ + "create_big_query_link" + ] + }, "CreateCalculatedMetric": { "methods": [ "create_calculated_metric" @@ -125,6 +130,11 @@ "create_event_create_rule" ] }, + "CreateEventEditRule": { + "methods": [ + "create_event_edit_rule" + ] + }, "CreateExpandedDataSet": { "methods": [ "create_expanded_data_set" @@ -140,6 +150,11 @@ "create_google_ads_link" ] }, + "CreateKeyEvent": { + "methods": [ + "create_key_event" + ] + }, "CreateMeasurementProtocolSecret": { "methods": [ "create_measurement_protocol_secret" @@ -170,11 +185,6 @@ "create_search_ads360_link" ] }, - "CreateSubproperty": { - "methods": [ - "create_subproperty" - ] - }, "CreateSubpropertyEventFilter": { "methods": [ "create_subproperty_event_filter" @@ -195,6 +205,11 @@ "delete_ad_sense_link" ] }, + "DeleteBigQueryLink": { + "methods": [ + "delete_big_query_link" + ] + }, "DeleteCalculatedMetric": { "methods": [ "delete_calculated_metric" @@ -235,6 +250,11 @@ "delete_event_create_rule" ] }, + "DeleteEventEditRule": { + "methods": [ + "delete_event_edit_rule" + ] + }, "DeleteExpandedDataSet": { "methods": [ "delete_expanded_data_set" @@ -250,6 +270,11 @@ "delete_google_ads_link" ] }, + "DeleteKeyEvent": { + "methods": [ + "delete_key_event" + ] + }, "DeleteMeasurementProtocolSecret": { "methods": [ "delete_measurement_protocol_secret" @@ -385,6 +410,11 @@ "get_event_create_rule" ] }, + "GetEventEditRule": { + "methods": [ + "get_event_edit_rule" + ] + }, "GetExpandedDataSet": { "methods": [ "get_expanded_data_set" @@ -400,6 +430,11 @@ "get_google_signals_settings" ] }, + "GetKeyEvent": { + "methods": [ + "get_key_event" + ] + }, "GetMeasurementProtocolSecret": { "methods": [ "get_measurement_protocol_secret" @@ -510,6 +545,11 @@ "list_event_create_rules" ] }, + "ListEventEditRules": { + "methods": [ + "list_event_edit_rules" + ] + }, "ListExpandedDataSets": { "methods": [ "list_expanded_data_sets" @@ -525,6 +565,11 @@ "list_google_ads_links" ] }, + "ListKeyEvents": { + "methods": [ + "list_key_events" + ] + }, "ListMeasurementProtocolSecrets": { "methods": [ "list_measurement_protocol_secrets" @@ -560,6 +605,16 @@ "provision_account_ticket" ] }, + "ProvisionSubproperty": { + "methods": [ + "provision_subproperty" + ] + }, + "ReorderEventEditRules": { + "methods": [ + "reorder_event_edit_rules" + ] + }, "RunAccessReport": { "methods": [ "run_access_report" @@ -595,6 +650,11 @@ "update_audience" ] }, + "UpdateBigQueryLink": { + "methods": [ + "update_big_query_link" + ] + }, "UpdateCalculatedMetric": { "methods": [ "update_calculated_metric" @@ -650,6 +710,11 @@ "update_event_create_rule" ] }, + "UpdateEventEditRule": { + "methods": [ + "update_event_edit_rule" + ] + }, "UpdateExpandedDataSet": { "methods": [ "update_expanded_data_set" @@ -665,6 +730,11 @@ "update_google_signals_settings" ] }, + "UpdateKeyEvent": { + "methods": [ + "update_key_event" + ] + }, "UpdateMeasurementProtocolSecret": { "methods": [ "update_measurement_protocol_secret" @@ -760,6 +830,11 @@ "create_audience" ] }, + "CreateBigQueryLink": { + "methods": [ + "create_big_query_link" + ] + }, "CreateCalculatedMetric": { "methods": [ "create_calculated_metric" @@ -810,6 +885,11 @@ "create_event_create_rule" ] }, + "CreateEventEditRule": { + "methods": [ + "create_event_edit_rule" + ] + }, "CreateExpandedDataSet": { "methods": [ "create_expanded_data_set" @@ -825,6 +905,11 @@ "create_google_ads_link" ] }, + "CreateKeyEvent": { + "methods": [ + "create_key_event" + ] + }, "CreateMeasurementProtocolSecret": { "methods": [ "create_measurement_protocol_secret" @@ -855,11 +940,6 @@ "create_search_ads360_link" ] }, - "CreateSubproperty": { - "methods": [ - "create_subproperty" - ] - }, "CreateSubpropertyEventFilter": { "methods": [ "create_subproperty_event_filter" @@ -880,6 +960,11 @@ "delete_ad_sense_link" ] }, + "DeleteBigQueryLink": { + "methods": [ + "delete_big_query_link" + ] + }, "DeleteCalculatedMetric": { "methods": [ "delete_calculated_metric" @@ -920,6 +1005,11 @@ "delete_event_create_rule" ] }, + "DeleteEventEditRule": { + "methods": [ + "delete_event_edit_rule" + ] + }, "DeleteExpandedDataSet": { "methods": [ "delete_expanded_data_set" @@ -935,6 +1025,11 @@ "delete_google_ads_link" ] }, + "DeleteKeyEvent": { + "methods": [ + "delete_key_event" + ] + }, "DeleteMeasurementProtocolSecret": { "methods": [ "delete_measurement_protocol_secret" @@ -1070,6 +1165,11 @@ "get_event_create_rule" ] }, + "GetEventEditRule": { + "methods": [ + "get_event_edit_rule" + ] + }, "GetExpandedDataSet": { "methods": [ "get_expanded_data_set" @@ -1085,6 +1185,11 @@ "get_google_signals_settings" ] }, + "GetKeyEvent": { + "methods": [ + "get_key_event" + ] + }, "GetMeasurementProtocolSecret": { "methods": [ "get_measurement_protocol_secret" @@ -1195,6 +1300,11 @@ "list_event_create_rules" ] }, + "ListEventEditRules": { + "methods": [ + "list_event_edit_rules" + ] + }, "ListExpandedDataSets": { "methods": [ "list_expanded_data_sets" @@ -1210,6 +1320,11 @@ "list_google_ads_links" ] }, + "ListKeyEvents": { + "methods": [ + "list_key_events" + ] + }, "ListMeasurementProtocolSecrets": { "methods": [ "list_measurement_protocol_secrets" @@ -1245,6 +1360,16 @@ "provision_account_ticket" ] }, + "ProvisionSubproperty": { + "methods": [ + "provision_subproperty" + ] + }, + "ReorderEventEditRules": { + "methods": [ + "reorder_event_edit_rules" + ] + }, "RunAccessReport": { "methods": [ "run_access_report" @@ -1280,6 +1405,11 @@ "update_audience" ] }, + "UpdateBigQueryLink": { + "methods": [ + "update_big_query_link" + ] + }, "UpdateCalculatedMetric": { "methods": [ "update_calculated_metric" @@ -1335,6 +1465,11 @@ "update_event_create_rule" ] }, + "UpdateEventEditRule": { + "methods": [ + "update_event_edit_rule" + ] + }, "UpdateExpandedDataSet": { "methods": [ "update_expanded_data_set" @@ -1350,6 +1485,11 @@ "update_google_signals_settings" ] }, + "UpdateKeyEvent": { + "methods": [ + "update_key_event" + ] + }, "UpdateMeasurementProtocolSecret": { "methods": [ "update_measurement_protocol_secret" @@ -1445,6 +1585,11 @@ "create_audience" ] }, + "CreateBigQueryLink": { + "methods": [ + "create_big_query_link" + ] + }, "CreateCalculatedMetric": { "methods": [ "create_calculated_metric" @@ -1495,6 +1640,11 @@ "create_event_create_rule" ] }, + "CreateEventEditRule": { + "methods": [ + "create_event_edit_rule" + ] + }, "CreateExpandedDataSet": { "methods": [ "create_expanded_data_set" @@ -1510,6 +1660,11 @@ "create_google_ads_link" ] }, + "CreateKeyEvent": { + "methods": [ + "create_key_event" + ] + }, "CreateMeasurementProtocolSecret": { "methods": [ "create_measurement_protocol_secret" @@ -1540,11 +1695,6 @@ "create_search_ads360_link" ] }, - "CreateSubproperty": { - "methods": [ - "create_subproperty" - ] - }, "CreateSubpropertyEventFilter": { "methods": [ "create_subproperty_event_filter" @@ -1565,6 +1715,11 @@ "delete_ad_sense_link" ] }, + "DeleteBigQueryLink": { + "methods": [ + "delete_big_query_link" + ] + }, "DeleteCalculatedMetric": { "methods": [ "delete_calculated_metric" @@ -1605,6 +1760,11 @@ "delete_event_create_rule" ] }, + "DeleteEventEditRule": { + "methods": [ + "delete_event_edit_rule" + ] + }, "DeleteExpandedDataSet": { "methods": [ "delete_expanded_data_set" @@ -1620,6 +1780,11 @@ "delete_google_ads_link" ] }, + "DeleteKeyEvent": { + "methods": [ + "delete_key_event" + ] + }, "DeleteMeasurementProtocolSecret": { "methods": [ "delete_measurement_protocol_secret" @@ -1755,6 +1920,11 @@ "get_event_create_rule" ] }, + "GetEventEditRule": { + "methods": [ + "get_event_edit_rule" + ] + }, "GetExpandedDataSet": { "methods": [ "get_expanded_data_set" @@ -1770,6 +1940,11 @@ "get_google_signals_settings" ] }, + "GetKeyEvent": { + "methods": [ + "get_key_event" + ] + }, "GetMeasurementProtocolSecret": { "methods": [ "get_measurement_protocol_secret" @@ -1880,6 +2055,11 @@ "list_event_create_rules" ] }, + "ListEventEditRules": { + "methods": [ + "list_event_edit_rules" + ] + }, "ListExpandedDataSets": { "methods": [ "list_expanded_data_sets" @@ -1895,6 +2075,11 @@ "list_google_ads_links" ] }, + "ListKeyEvents": { + "methods": [ + "list_key_events" + ] + }, "ListMeasurementProtocolSecrets": { "methods": [ "list_measurement_protocol_secrets" @@ -1930,6 +2115,16 @@ "provision_account_ticket" ] }, + "ProvisionSubproperty": { + "methods": [ + "provision_subproperty" + ] + }, + "ReorderEventEditRules": { + "methods": [ + "reorder_event_edit_rules" + ] + }, "RunAccessReport": { "methods": [ "run_access_report" @@ -1965,6 +2160,11 @@ "update_audience" ] }, + "UpdateBigQueryLink": { + "methods": [ + "update_big_query_link" + ] + }, "UpdateCalculatedMetric": { "methods": [ "update_calculated_metric" @@ -2020,6 +2220,11 @@ "update_event_create_rule" ] }, + "UpdateEventEditRule": { + "methods": [ + "update_event_edit_rule" + ] + }, "UpdateExpandedDataSet": { "methods": [ "update_expanded_data_set" @@ -2035,6 +2240,11 @@ "update_google_signals_settings" ] }, + "UpdateKeyEvent": { + "methods": [ + "update_key_event" + ] + }, "UpdateMeasurementProtocolSecret": { "methods": [ "update_measurement_protocol_secret" diff --git a/packages/google-analytics-admin/google/analytics/admin_v1alpha/gapic_version.py b/packages/google-analytics-admin/google/analytics/admin_v1alpha/gapic_version.py index f56358e27bf1..558c8aab67c5 100644 --- a/packages/google-analytics-admin/google/analytics/admin_v1alpha/gapic_version.py +++ b/packages/google-analytics-admin/google/analytics/admin_v1alpha/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.22.9" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-analytics-admin/google/analytics/admin_v1alpha/services/analytics_admin_service/async_client.py b/packages/google-analytics-admin/google/analytics/admin_v1alpha/services/analytics_admin_service/async_client.py index 5375c8fec5f5..ef2d97ef9155 100644 --- a/packages/google-analytics-admin/google/analytics/admin_v1alpha/services/analytics_admin_service/async_client.py +++ b/packages/google-analytics-admin/google/analytics/admin_v1alpha/services/analytics_admin_service/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -28,6 +27,7 @@ Type, Union, ) +import warnings from google.api_core import exceptions as core_exceptions from google.api_core import gapic_v1 @@ -181,6 +181,12 @@ class AnalyticsAdminServiceAsyncClient: parse_event_create_rule_path = staticmethod( AnalyticsAdminServiceClient.parse_event_create_rule_path ) + event_edit_rule_path = staticmethod( + AnalyticsAdminServiceClient.event_edit_rule_path + ) + parse_event_edit_rule_path = staticmethod( + AnalyticsAdminServiceClient.parse_event_edit_rule_path + ) expanded_data_set_path = staticmethod( AnalyticsAdminServiceClient.expanded_data_set_path ) @@ -209,12 +215,20 @@ class AnalyticsAdminServiceAsyncClient: parse_google_signals_settings_path = staticmethod( AnalyticsAdminServiceClient.parse_google_signals_settings_path ) + key_event_path = staticmethod(AnalyticsAdminServiceClient.key_event_path) + parse_key_event_path = staticmethod( + AnalyticsAdminServiceClient.parse_key_event_path + ) measurement_protocol_secret_path = staticmethod( AnalyticsAdminServiceClient.measurement_protocol_secret_path ) parse_measurement_protocol_secret_path = staticmethod( AnalyticsAdminServiceClient.parse_measurement_protocol_secret_path ) + organization_path = staticmethod(AnalyticsAdminServiceClient.organization_path) + parse_organization_path = staticmethod( + AnalyticsAdminServiceClient.parse_organization_path + ) property_path = staticmethod(AnalyticsAdminServiceClient.property_path) parse_property_path = staticmethod(AnalyticsAdminServiceClient.parse_property_path) rollup_property_source_link_path = staticmethod( @@ -365,10 +379,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(AnalyticsAdminServiceClient).get_transport_class, - type(AnalyticsAdminServiceClient), - ) + get_transport_class = AnalyticsAdminServiceClient.get_transport_class def __init__( self, @@ -1330,8 +1341,9 @@ async def create_firebase_link( The request object. Request message for CreateFirebaseLink RPC parent (:class:`str`): - Required. Format: properties/{property_id} Example: - properties/1234 + Required. Format: properties/{property_id} + + Example: ``properties/1234`` This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this @@ -1423,7 +1435,8 @@ async def delete_firebase_link( name (:class:`str`): Required. Format: properties/{property_id}/firebaseLinks/{firebase_link_id} - Example: properties/1234/firebaseLinks/5678 + + Example: ``properties/1234/firebaseLinks/5678`` This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this @@ -1494,8 +1507,9 @@ async def list_firebase_links( The request object. Request message for ListFirebaseLinks RPC parent (:class:`str`): - Required. Format: properties/{property_id} Example: - properties/1234 + Required. Format: properties/{property_id} + + Example: ``properties/1234`` This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this @@ -1593,7 +1607,9 @@ async def get_global_site_tag( site tags are singletons and do not have unique IDs. Format: properties/{property_id}/dataStreams/{stream_id}/globalSiteTag - Example: "properties/123/dataStreams/456/globalSiteTag" + + Example: + ``properties/123/dataStreams/456/globalSiteTag`` This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this @@ -2033,11 +2049,10 @@ async def get_data_sharing_settings( The request object. Request message for GetDataSharingSettings RPC. name (:class:`str`): - Required. The name of the settings to - lookup. Format: + Required. The name of the settings to lookup. Format: accounts/{account}/dataSharingSettings - Example: - "accounts/1000/dataSharingSettings" + + Example: ``accounts/1000/dataSharingSettings`` This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this @@ -3385,8 +3400,8 @@ async def create_conversion_event( timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> resources.ConversionEvent: - r"""Creates a conversion event with the specified - attributes. + r"""Deprecated: Use ``CreateKeyEvent`` instead. Creates a conversion + event with the specified attributes. Args: request (Optional[Union[google.analytics.admin_v1alpha.types.CreateConversionEventRequest, dict]]): @@ -3420,6 +3435,11 @@ async def create_conversion_event( Analytics property. """ + warnings.warn( + "AnalyticsAdminServiceAsyncClient.create_conversion_event is deprecated", + DeprecationWarning, + ) + # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. @@ -3480,8 +3500,8 @@ async def update_conversion_event( timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> resources.ConversionEvent: - r"""Updates a conversion event with the specified - attributes. + r"""Deprecated: Use ``UpdateKeyEvent`` instead. Updates a conversion + event with the specified attributes. Args: request (Optional[Union[google.analytics.admin_v1alpha.types.UpdateConversionEventRequest, dict]]): @@ -3516,6 +3536,11 @@ async def update_conversion_event( Analytics property. """ + warnings.warn( + "AnalyticsAdminServiceAsyncClient.update_conversion_event is deprecated", + DeprecationWarning, + ) + # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. @@ -3577,7 +3602,8 @@ async def get_conversion_event( timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> resources.ConversionEvent: - r"""Retrieve a single conversion event. + r"""Deprecated: Use ``GetKeyEvent`` instead. Retrieve a single + conversion event. Args: request (Optional[Union[google.analytics.admin_v1alpha.types.GetConversionEventRequest, dict]]): @@ -3604,6 +3630,11 @@ async def get_conversion_event( Analytics property. """ + warnings.warn( + "AnalyticsAdminServiceAsyncClient.get_conversion_event is deprecated", + DeprecationWarning, + ) + # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. @@ -3661,7 +3692,8 @@ async def delete_conversion_event( timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> None: - r"""Deletes a conversion event in a property. + r"""Deprecated: Use ``DeleteKeyEvent`` instead. Deletes a conversion + event in a property. Args: request (Optional[Union[google.analytics.admin_v1alpha.types.DeleteConversionEventRequest, dict]]): @@ -3682,6 +3714,11 @@ async def delete_conversion_event( metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. """ + warnings.warn( + "AnalyticsAdminServiceAsyncClient.delete_conversion_event is deprecated", + DeprecationWarning, + ) + # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. @@ -3736,8 +3773,9 @@ async def list_conversion_events( timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.ListConversionEventsAsyncPager: - r"""Returns a list of conversion events in the specified - parent property. + r"""Deprecated: Use ``ListKeyEvents`` instead. Returns a list of + conversion events in the specified parent property. + Returns an empty list if no conversion events are found. Args: @@ -3767,6 +3805,11 @@ async def list_conversion_events( automatically. """ + warnings.warn( + "AnalyticsAdminServiceAsyncClient.list_conversion_events is deprecated", + DeprecationWarning, + ) + # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. @@ -3824,30 +3867,33 @@ async def list_conversion_events( # Done; return the response. return response - async def get_display_video360_advertiser_link( + async def create_key_event( self, - request: Optional[ - Union[analytics_admin.GetDisplayVideo360AdvertiserLinkRequest, dict] - ] = None, + request: Optional[Union[analytics_admin.CreateKeyEventRequest, dict]] = None, *, - name: Optional[str] = None, + parent: Optional[str] = None, + key_event: Optional[resources.KeyEvent] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), - ) -> resources.DisplayVideo360AdvertiserLink: - r"""Look up a single DisplayVideo360AdvertiserLink + ) -> resources.KeyEvent: + r"""Creates a Key Event. Args: - request (Optional[Union[google.analytics.admin_v1alpha.types.GetDisplayVideo360AdvertiserLinkRequest, dict]]): - The request object. Request message for - GetDisplayVideo360AdvertiserLink RPC. - name (:class:`str`): - Required. The name of the - DisplayVideo360AdvertiserLink to get. - Example format: - properties/1234/displayVideo360AdvertiserLink/5678 + request (Optional[Union[google.analytics.admin_v1alpha.types.CreateKeyEventRequest, dict]]): + The request object. Request message for CreateKeyEvent + RPC + parent (:class:`str`): + Required. The resource name of the + parent property where this Key Event + will be created. Format: properties/123 - This corresponds to the ``name`` field + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + key_event (:class:`google.analytics.admin_v1alpha.types.KeyEvent`): + Required. The Key Event to create. + This corresponds to the ``key_event`` field on the ``request`` instance; if ``request`` is provided, this should not be set. retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, @@ -3857,15 +3903,15 @@ async def get_display_video360_advertiser_link( sent along with the request as metadata. Returns: - google.analytics.admin_v1alpha.types.DisplayVideo360AdvertiserLink: - A link between a GA4 property and a - Display & Video 360 advertiser. + google.analytics.admin_v1alpha.types.KeyEvent: + A key event in a Google Analytics + property. """ # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) + has_flattened_params = any([parent, key_event]) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -3874,26 +3920,26 @@ async def get_display_video360_advertiser_link( # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. - if not isinstance( - request, analytics_admin.GetDisplayVideo360AdvertiserLinkRequest - ): - request = analytics_admin.GetDisplayVideo360AdvertiserLinkRequest(request) + if not isinstance(request, analytics_admin.CreateKeyEventRequest): + request = analytics_admin.CreateKeyEventRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. - if name is not None: - request.name = name + if parent is not None: + request.parent = parent + if key_event is not None: + request.key_event = key_event # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. rpc = self._client._transport._wrapped_methods[ - self._client._transport.get_display_video360_advertiser_link + self._client._transport.create_key_event ] # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), ) # Validate the universe domain. @@ -3910,29 +3956,37 @@ async def get_display_video360_advertiser_link( # Done; return the response. return response - async def list_display_video360_advertiser_links( + async def update_key_event( self, - request: Optional[ - Union[analytics_admin.ListDisplayVideo360AdvertiserLinksRequest, dict] - ] = None, + request: Optional[Union[analytics_admin.UpdateKeyEventRequest, dict]] = None, *, - parent: Optional[str] = None, + key_event: Optional[resources.KeyEvent] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), - ) -> pagers.ListDisplayVideo360AdvertiserLinksAsyncPager: - r"""Lists all DisplayVideo360AdvertiserLinks on a - property. + ) -> resources.KeyEvent: + r"""Updates a Key Event. Args: - request (Optional[Union[google.analytics.admin_v1alpha.types.ListDisplayVideo360AdvertiserLinksRequest, dict]]): - The request object. Request message for - ListDisplayVideo360AdvertiserLinks RPC. - parent (:class:`str`): - Required. Example format: - properties/1234 + request (Optional[Union[google.analytics.admin_v1alpha.types.UpdateKeyEventRequest, dict]]): + The request object. Request message for UpdateKeyEvent + RPC + key_event (:class:`google.analytics.admin_v1alpha.types.KeyEvent`): + Required. The Key Event to update. The ``name`` field is + used to identify the settings to be updated. - This corresponds to the ``parent`` field + This corresponds to the ``key_event`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): + Required. The list of fields to be updated. Field names + must be in snake case (e.g., "field_to_update"). Omitted + fields will not be updated. To replace the entire + entity, use one path with the string "*" to match all + fields. + + This corresponds to the ``update_mask`` field on the ``request`` instance; if ``request`` is provided, this should not be set. retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, @@ -3942,18 +3996,15 @@ async def list_display_video360_advertiser_links( sent along with the request as metadata. Returns: - google.analytics.admin_v1alpha.services.analytics_admin_service.pagers.ListDisplayVideo360AdvertiserLinksAsyncPager: - Response message for - ListDisplayVideo360AdvertiserLinks RPC. - Iterating over this object will yield - results and resolve additional pages - automatically. + google.analytics.admin_v1alpha.types.KeyEvent: + A key event in a Google Analytics + property. """ # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) + has_flattened_params = any([key_event, update_mask]) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -3962,26 +4013,28 @@ async def list_display_video360_advertiser_links( # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. - if not isinstance( - request, analytics_admin.ListDisplayVideo360AdvertiserLinksRequest - ): - request = analytics_admin.ListDisplayVideo360AdvertiserLinksRequest(request) + if not isinstance(request, analytics_admin.UpdateKeyEventRequest): + request = analytics_admin.UpdateKeyEventRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. - if parent is not None: - request.parent = parent + if key_event is not None: + request.key_event = key_event + if update_mask is not None: + request.update_mask = update_mask # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. rpc = self._client._transport._wrapped_methods[ - self._client._transport.list_display_video360_advertiser_links + self._client._transport.update_key_event ] # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + gapic_v1.routing_header.to_grpc_metadata( + (("key_event.name", request.key_event.name),) + ), ) # Validate the universe domain. @@ -3995,57 +4048,30 @@ async def list_display_video360_advertiser_links( metadata=metadata, ) - # This method is paged; wrap the response in a pager, which provides - # an `__aiter__` convenience method. - response = pagers.ListDisplayVideo360AdvertiserLinksAsyncPager( - method=rpc, - request=request, - response=response, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - # Done; return the response. return response - async def create_display_video360_advertiser_link( + async def get_key_event( self, - request: Optional[ - Union[analytics_admin.CreateDisplayVideo360AdvertiserLinkRequest, dict] - ] = None, + request: Optional[Union[analytics_admin.GetKeyEventRequest, dict]] = None, *, - parent: Optional[str] = None, - display_video_360_advertiser_link: Optional[ - resources.DisplayVideo360AdvertiserLink - ] = None, + name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), - ) -> resources.DisplayVideo360AdvertiserLink: - r"""Creates a DisplayVideo360AdvertiserLink. - This can only be utilized by users who have proper - authorization both on the Google Analytics property and - on the Display & Video 360 advertiser. Users who do not - have access to the Display & Video 360 advertiser should - instead seek to create a DisplayVideo360LinkProposal. + ) -> resources.KeyEvent: + r"""Retrieve a single Key Event. Args: - request (Optional[Union[google.analytics.admin_v1alpha.types.CreateDisplayVideo360AdvertiserLinkRequest, dict]]): - The request object. Request message for - CreateDisplayVideo360AdvertiserLink RPC. - parent (:class:`str`): - Required. Example format: - properties/1234 - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - display_video_360_advertiser_link (:class:`google.analytics.admin_v1alpha.types.DisplayVideo360AdvertiserLink`): - Required. The - DisplayVideo360AdvertiserLink to create. + request (Optional[Union[google.analytics.admin_v1alpha.types.GetKeyEventRequest, dict]]): + The request object. Request message for GetKeyEvent RPC + name (:class:`str`): + Required. The resource name of the Key Event to + retrieve. Format: + properties/{property}/keyEvents/{key_event} Example: + "properties/123/keyEvents/456" - This corresponds to the ``display_video_360_advertiser_link`` field + This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, @@ -4055,15 +4081,15 @@ async def create_display_video360_advertiser_link( sent along with the request as metadata. Returns: - google.analytics.admin_v1alpha.types.DisplayVideo360AdvertiserLink: - A link between a GA4 property and a - Display & Video 360 advertiser. + google.analytics.admin_v1alpha.types.KeyEvent: + A key event in a Google Analytics + property. """ # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent, display_video_360_advertiser_link]) + has_flattened_params = any([name]) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -4072,32 +4098,24 @@ async def create_display_video360_advertiser_link( # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. - if not isinstance( - request, analytics_admin.CreateDisplayVideo360AdvertiserLinkRequest - ): - request = analytics_admin.CreateDisplayVideo360AdvertiserLinkRequest( - request - ) + if not isinstance(request, analytics_admin.GetKeyEventRequest): + request = analytics_admin.GetKeyEventRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. - if parent is not None: - request.parent = parent - if display_video_360_advertiser_link is not None: - request.display_video_360_advertiser_link = ( - display_video_360_advertiser_link - ) + if name is not None: + request.name = name # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. rpc = self._client._transport._wrapped_methods[ - self._client._transport.create_display_video360_advertiser_link + self._client._transport.get_key_event ] # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), ) # Validate the universe domain. @@ -4114,29 +4132,25 @@ async def create_display_video360_advertiser_link( # Done; return the response. return response - async def delete_display_video360_advertiser_link( + async def delete_key_event( self, - request: Optional[ - Union[analytics_admin.DeleteDisplayVideo360AdvertiserLinkRequest, dict] - ] = None, + request: Optional[Union[analytics_admin.DeleteKeyEventRequest, dict]] = None, *, name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> None: - r"""Deletes a DisplayVideo360AdvertiserLink on a - property. + r"""Deletes a Key Event. Args: - request (Optional[Union[google.analytics.admin_v1alpha.types.DeleteDisplayVideo360AdvertiserLinkRequest, dict]]): - The request object. Request message for - DeleteDisplayVideo360AdvertiserLink RPC. + request (Optional[Union[google.analytics.admin_v1alpha.types.DeleteKeyEventRequest, dict]]): + The request object. Request message for DeleteKeyEvent + RPC name (:class:`str`): - Required. The name of the - DisplayVideo360AdvertiserLink to delete. - Example format: - properties/1234/displayVideo360AdvertiserLinks/5678 + Required. The resource name of the Key Event to delete. + Format: properties/{property}/keyEvents/{key_event} + Example: "properties/123/keyEvents/456" This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this @@ -4159,12 +4173,8 @@ async def delete_display_video360_advertiser_link( # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. - if not isinstance( - request, analytics_admin.DeleteDisplayVideo360AdvertiserLinkRequest - ): - request = analytics_admin.DeleteDisplayVideo360AdvertiserLinkRequest( - request - ) + if not isinstance(request, analytics_admin.DeleteKeyEventRequest): + request = analytics_admin.DeleteKeyEventRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. @@ -4174,7 +4184,7 @@ async def delete_display_video360_advertiser_link( # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. rpc = self._client._transport._wrapped_methods[ - self._client._transport.delete_display_video360_advertiser_link + self._client._transport.delete_key_event ] # Certain fields should be provided within the metadata header; @@ -4194,41 +4204,28 @@ async def delete_display_video360_advertiser_link( metadata=metadata, ) - async def update_display_video360_advertiser_link( + async def list_key_events( self, - request: Optional[ - Union[analytics_admin.UpdateDisplayVideo360AdvertiserLinkRequest, dict] - ] = None, + request: Optional[Union[analytics_admin.ListKeyEventsRequest, dict]] = None, *, - display_video_360_advertiser_link: Optional[ - resources.DisplayVideo360AdvertiserLink - ] = None, - update_mask: Optional[field_mask_pb2.FieldMask] = None, + parent: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), - ) -> resources.DisplayVideo360AdvertiserLink: - r"""Updates a DisplayVideo360AdvertiserLink on a - property. + ) -> pagers.ListKeyEventsAsyncPager: + r"""Returns a list of Key Events in the specified parent + property. Returns an empty list if no Key Events are + found. Args: - request (Optional[Union[google.analytics.admin_v1alpha.types.UpdateDisplayVideo360AdvertiserLinkRequest, dict]]): - The request object. Request message for - UpdateDisplayVideo360AdvertiserLink RPC. - display_video_360_advertiser_link (:class:`google.analytics.admin_v1alpha.types.DisplayVideo360AdvertiserLink`): - The DisplayVideo360AdvertiserLink to - update - - This corresponds to the ``display_video_360_advertiser_link`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): - Required. The list of fields to be updated. Omitted - fields will not be updated. To replace the entire - entity, use one path with the string "*" to match all - fields. + request (Optional[Union[google.analytics.admin_v1alpha.types.ListKeyEventsRequest, dict]]): + The request object. Request message for ListKeyEvents RPC + parent (:class:`str`): + Required. The resource name of the + parent property. Example: + 'properties/123' - This corresponds to the ``update_mask`` field + This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this should not be set. retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, @@ -4238,15 +4235,18 @@ async def update_display_video360_advertiser_link( sent along with the request as metadata. Returns: - google.analytics.admin_v1alpha.types.DisplayVideo360AdvertiserLink: - A link between a GA4 property and a - Display & Video 360 advertiser. + google.analytics.admin_v1alpha.services.analytics_admin_service.pagers.ListKeyEventsAsyncPager: + Response message for ListKeyEvents + RPC. + Iterating over this object will yield + results and resolve additional pages + automatically. """ # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([display_video_360_advertiser_link, update_mask]) + has_flattened_params = any([parent]) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -4255,39 +4255,24 @@ async def update_display_video360_advertiser_link( # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. - if not isinstance( - request, analytics_admin.UpdateDisplayVideo360AdvertiserLinkRequest - ): - request = analytics_admin.UpdateDisplayVideo360AdvertiserLinkRequest( - request - ) + if not isinstance(request, analytics_admin.ListKeyEventsRequest): + request = analytics_admin.ListKeyEventsRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. - if display_video_360_advertiser_link is not None: - request.display_video_360_advertiser_link = ( - display_video_360_advertiser_link - ) - if update_mask is not None: - request.update_mask = update_mask + if parent is not None: + request.parent = parent # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. rpc = self._client._transport._wrapped_methods[ - self._client._transport.update_display_video360_advertiser_link + self._client._transport.list_key_events ] # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - ( - ( - "display_video_360_advertiser_link.name", - request.display_video_360_advertiser_link.name, - ), - ) - ), + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), ) # Validate the universe domain. @@ -4301,33 +4286,42 @@ async def update_display_video360_advertiser_link( metadata=metadata, ) + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListKeyEventsAsyncPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + # Done; return the response. return response - async def get_display_video360_advertiser_link_proposal( + async def get_display_video360_advertiser_link( self, request: Optional[ - Union[analytics_admin.GetDisplayVideo360AdvertiserLinkProposalRequest, dict] + Union[analytics_admin.GetDisplayVideo360AdvertiserLinkRequest, dict] ] = None, *, name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), - ) -> resources.DisplayVideo360AdvertiserLinkProposal: - r"""Lookup for a single - DisplayVideo360AdvertiserLinkProposal. + ) -> resources.DisplayVideo360AdvertiserLink: + r"""Look up a single DisplayVideo360AdvertiserLink Args: - request (Optional[Union[google.analytics.admin_v1alpha.types.GetDisplayVideo360AdvertiserLinkProposalRequest, dict]]): + request (Optional[Union[google.analytics.admin_v1alpha.types.GetDisplayVideo360AdvertiserLinkRequest, dict]]): The request object. Request message for - GetDisplayVideo360AdvertiserLinkProposal - RPC. + GetDisplayVideo360AdvertiserLink RPC. name (:class:`str`): Required. The name of the - DisplayVideo360AdvertiserLinkProposal to - get. Example format: - properties/1234/displayVideo360AdvertiserLinkProposals/5678 + DisplayVideo360AdvertiserLink to get. + Example format: + properties/1234/displayVideo360AdvertiserLink/5678 This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this @@ -4339,17 +4333,9 @@ async def get_display_video360_advertiser_link_proposal( sent along with the request as metadata. Returns: - google.analytics.admin_v1alpha.types.DisplayVideo360AdvertiserLinkProposal: - A proposal for a link between a GA4 - property and a Display & Video 360 - advertiser. - - A proposal is converted to a - DisplayVideo360AdvertiserLink once - approved. Google Analytics admins - approve inbound proposals while Display - & Video 360 admins approve outbound - proposals. + google.analytics.admin_v1alpha.types.DisplayVideo360AdvertiserLink: + A link between a GA4 property and a + Display & Video 360 advertiser. """ # Create or coerce a protobuf request object. @@ -4365,11 +4351,9 @@ async def get_display_video360_advertiser_link_proposal( # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. if not isinstance( - request, analytics_admin.GetDisplayVideo360AdvertiserLinkProposalRequest + request, analytics_admin.GetDisplayVideo360AdvertiserLinkRequest ): - request = analytics_admin.GetDisplayVideo360AdvertiserLinkProposalRequest( - request - ) + request = analytics_admin.GetDisplayVideo360AdvertiserLinkRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. @@ -4379,7 +4363,7 @@ async def get_display_video360_advertiser_link_proposal( # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. rpc = self._client._transport._wrapped_methods[ - self._client._transport.get_display_video360_advertiser_link_proposal + self._client._transport.get_display_video360_advertiser_link ] # Certain fields should be provided within the metadata header; @@ -4402,27 +4386,24 @@ async def get_display_video360_advertiser_link_proposal( # Done; return the response. return response - async def list_display_video360_advertiser_link_proposals( + async def list_display_video360_advertiser_links( self, request: Optional[ - Union[ - analytics_admin.ListDisplayVideo360AdvertiserLinkProposalsRequest, dict - ] + Union[analytics_admin.ListDisplayVideo360AdvertiserLinksRequest, dict] ] = None, *, parent: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), - ) -> pagers.ListDisplayVideo360AdvertiserLinkProposalsAsyncPager: - r"""Lists DisplayVideo360AdvertiserLinkProposals on a + ) -> pagers.ListDisplayVideo360AdvertiserLinksAsyncPager: + r"""Lists all DisplayVideo360AdvertiserLinks on a property. Args: - request (Optional[Union[google.analytics.admin_v1alpha.types.ListDisplayVideo360AdvertiserLinkProposalsRequest, dict]]): + request (Optional[Union[google.analytics.admin_v1alpha.types.ListDisplayVideo360AdvertiserLinksRequest, dict]]): The request object. Request message for - ListDisplayVideo360AdvertiserLinkProposals - RPC. + ListDisplayVideo360AdvertiserLinks RPC. parent (:class:`str`): Required. Example format: properties/1234 @@ -4437,12 +4418,12 @@ async def list_display_video360_advertiser_link_proposals( sent along with the request as metadata. Returns: - google.analytics.admin_v1alpha.services.analytics_admin_service.pagers.ListDisplayVideo360AdvertiserLinkProposalsAsyncPager: + google.analytics.admin_v1alpha.services.analytics_admin_service.pagers.ListDisplayVideo360AdvertiserLinksAsyncPager: Response message for - ListDisplayVideo360AdvertiserLinkProposals - RPC. Iterating over this object will - yield results and resolve additional - pages automatically. + ListDisplayVideo360AdvertiserLinks RPC. + Iterating over this object will yield + results and resolve additional pages + automatically. """ # Create or coerce a protobuf request object. @@ -4458,11 +4439,9 @@ async def list_display_video360_advertiser_link_proposals( # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. if not isinstance( - request, analytics_admin.ListDisplayVideo360AdvertiserLinkProposalsRequest + request, analytics_admin.ListDisplayVideo360AdvertiserLinksRequest ): - request = analytics_admin.ListDisplayVideo360AdvertiserLinkProposalsRequest( - request - ) + request = analytics_admin.ListDisplayVideo360AdvertiserLinksRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. @@ -4472,7 +4451,7 @@ async def list_display_video360_advertiser_link_proposals( # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. rpc = self._client._transport._wrapped_methods[ - self._client._transport.list_display_video360_advertiser_link_proposals + self._client._transport.list_display_video360_advertiser_links ] # Certain fields should be provided within the metadata header; @@ -4494,7 +4473,7 @@ async def list_display_video360_advertiser_link_proposals( # This method is paged; wrap the response in a pager, which provides # an `__aiter__` convenience method. - response = pagers.ListDisplayVideo360AdvertiserLinkProposalsAsyncPager( + response = pagers.ListDisplayVideo360AdvertiserLinksAsyncPager( method=rpc, request=request, response=response, @@ -4506,29 +4485,31 @@ async def list_display_video360_advertiser_link_proposals( # Done; return the response. return response - async def create_display_video360_advertiser_link_proposal( + async def create_display_video360_advertiser_link( self, request: Optional[ - Union[ - analytics_admin.CreateDisplayVideo360AdvertiserLinkProposalRequest, dict - ] + Union[analytics_admin.CreateDisplayVideo360AdvertiserLinkRequest, dict] ] = None, *, parent: Optional[str] = None, - display_video_360_advertiser_link_proposal: Optional[ - resources.DisplayVideo360AdvertiserLinkProposal + display_video_360_advertiser_link: Optional[ + resources.DisplayVideo360AdvertiserLink ] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), - ) -> resources.DisplayVideo360AdvertiserLinkProposal: - r"""Creates a DisplayVideo360AdvertiserLinkProposal. + ) -> resources.DisplayVideo360AdvertiserLink: + r"""Creates a DisplayVideo360AdvertiserLink. + This can only be utilized by users who have proper + authorization both on the Google Analytics property and + on the Display & Video 360 advertiser. Users who do not + have access to the Display & Video 360 advertiser should + instead seek to create a DisplayVideo360LinkProposal. Args: - request (Optional[Union[google.analytics.admin_v1alpha.types.CreateDisplayVideo360AdvertiserLinkProposalRequest, dict]]): + request (Optional[Union[google.analytics.admin_v1alpha.types.CreateDisplayVideo360AdvertiserLinkRequest, dict]]): The request object. Request message for - CreateDisplayVideo360AdvertiserLinkProposal - RPC. + CreateDisplayVideo360AdvertiserLink RPC. parent (:class:`str`): Required. Example format: properties/1234 @@ -4536,12 +4517,11 @@ async def create_display_video360_advertiser_link_proposal( This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - display_video_360_advertiser_link_proposal (:class:`google.analytics.admin_v1alpha.types.DisplayVideo360AdvertiserLinkProposal`): + display_video_360_advertiser_link (:class:`google.analytics.admin_v1alpha.types.DisplayVideo360AdvertiserLink`): Required. The - DisplayVideo360AdvertiserLinkProposal to - create. + DisplayVideo360AdvertiserLink to create. - This corresponds to the ``display_video_360_advertiser_link_proposal`` field + This corresponds to the ``display_video_360_advertiser_link`` field on the ``request`` instance; if ``request`` is provided, this should not be set. retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, @@ -4551,23 +4531,15 @@ async def create_display_video360_advertiser_link_proposal( sent along with the request as metadata. Returns: - google.analytics.admin_v1alpha.types.DisplayVideo360AdvertiserLinkProposal: - A proposal for a link between a GA4 - property and a Display & Video 360 - advertiser. - - A proposal is converted to a - DisplayVideo360AdvertiserLink once - approved. Google Analytics admins - approve inbound proposals while Display - & Video 360 admins approve outbound - proposals. + google.analytics.admin_v1alpha.types.DisplayVideo360AdvertiserLink: + A link between a GA4 property and a + Display & Video 360 advertiser. """ # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent, display_video_360_advertiser_link_proposal]) + has_flattened_params = any([parent, display_video_360_advertiser_link]) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -4577,27 +4549,25 @@ async def create_display_video360_advertiser_link_proposal( # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. if not isinstance( - request, analytics_admin.CreateDisplayVideo360AdvertiserLinkProposalRequest + request, analytics_admin.CreateDisplayVideo360AdvertiserLinkRequest ): - request = ( - analytics_admin.CreateDisplayVideo360AdvertiserLinkProposalRequest( - request - ) + request = analytics_admin.CreateDisplayVideo360AdvertiserLinkRequest( + request ) # If we have keyword arguments corresponding to fields on the # request, apply these. if parent is not None: request.parent = parent - if display_video_360_advertiser_link_proposal is not None: - request.display_video_360_advertiser_link_proposal = ( - display_video_360_advertiser_link_proposal + if display_video_360_advertiser_link is not None: + request.display_video_360_advertiser_link = ( + display_video_360_advertiser_link ) # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. rpc = self._client._transport._wrapped_methods[ - self._client._transport.create_display_video360_advertiser_link_proposal + self._client._transport.create_display_video360_advertiser_link ] # Certain fields should be provided within the metadata header; @@ -4620,12 +4590,10 @@ async def create_display_video360_advertiser_link_proposal( # Done; return the response. return response - async def delete_display_video360_advertiser_link_proposal( + async def delete_display_video360_advertiser_link( self, request: Optional[ - Union[ - analytics_admin.DeleteDisplayVideo360AdvertiserLinkProposalRequest, dict - ] + Union[analytics_admin.DeleteDisplayVideo360AdvertiserLinkRequest, dict] ] = None, *, name: Optional[str] = None, @@ -4633,19 +4601,18 @@ async def delete_display_video360_advertiser_link_proposal( timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> None: - r"""Deletes a DisplayVideo360AdvertiserLinkProposal on a - property. This can only be used on cancelled proposals. + r"""Deletes a DisplayVideo360AdvertiserLink on a + property. Args: - request (Optional[Union[google.analytics.admin_v1alpha.types.DeleteDisplayVideo360AdvertiserLinkProposalRequest, dict]]): + request (Optional[Union[google.analytics.admin_v1alpha.types.DeleteDisplayVideo360AdvertiserLinkRequest, dict]]): The request object. Request message for - DeleteDisplayVideo360AdvertiserLinkProposal - RPC. + DeleteDisplayVideo360AdvertiserLink RPC. name (:class:`str`): Required. The name of the - DisplayVideo360AdvertiserLinkProposal to - delete. Example format: - properties/1234/displayVideo360AdvertiserLinkProposals/5678 + DisplayVideo360AdvertiserLink to delete. + Example format: + properties/1234/displayVideo360AdvertiserLinks/5678 This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this @@ -4669,12 +4636,10 @@ async def delete_display_video360_advertiser_link_proposal( # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. if not isinstance( - request, analytics_admin.DeleteDisplayVideo360AdvertiserLinkProposalRequest + request, analytics_admin.DeleteDisplayVideo360AdvertiserLinkRequest ): - request = ( - analytics_admin.DeleteDisplayVideo360AdvertiserLinkProposalRequest( - request - ) + request = analytics_admin.DeleteDisplayVideo360AdvertiserLinkRequest( + request ) # If we have keyword arguments corresponding to fields on the @@ -4685,7 +4650,7 @@ async def delete_display_video360_advertiser_link_proposal( # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. rpc = self._client._transport._wrapped_methods[ - self._client._transport.delete_display_video360_advertiser_link_proposal + self._client._transport.delete_display_video360_advertiser_link ] # Certain fields should be provided within the metadata header; @@ -4705,29 +4670,43 @@ async def delete_display_video360_advertiser_link_proposal( metadata=metadata, ) - async def approve_display_video360_advertiser_link_proposal( + async def update_display_video360_advertiser_link( self, request: Optional[ - Union[ - analytics_admin.ApproveDisplayVideo360AdvertiserLinkProposalRequest, - dict, - ] + Union[analytics_admin.UpdateDisplayVideo360AdvertiserLinkRequest, dict] ] = None, *, + display_video_360_advertiser_link: Optional[ + resources.DisplayVideo360AdvertiserLink + ] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), - ) -> analytics_admin.ApproveDisplayVideo360AdvertiserLinkProposalResponse: - r"""Approves a DisplayVideo360AdvertiserLinkProposal. - The DisplayVideo360AdvertiserLinkProposal will be - deleted and a new DisplayVideo360AdvertiserLink will be - created. + ) -> resources.DisplayVideo360AdvertiserLink: + r"""Updates a DisplayVideo360AdvertiserLink on a + property. Args: - request (Optional[Union[google.analytics.admin_v1alpha.types.ApproveDisplayVideo360AdvertiserLinkProposalRequest, dict]]): + request (Optional[Union[google.analytics.admin_v1alpha.types.UpdateDisplayVideo360AdvertiserLinkRequest, dict]]): The request object. Request message for - ApproveDisplayVideo360AdvertiserLinkProposal - RPC. + UpdateDisplayVideo360AdvertiserLink RPC. + display_video_360_advertiser_link (:class:`google.analytics.admin_v1alpha.types.DisplayVideo360AdvertiserLink`): + The DisplayVideo360AdvertiserLink to + update + + This corresponds to the ``display_video_360_advertiser_link`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): + Required. The list of fields to be updated. Omitted + fields will not be updated. To replace the entire + entity, use one path with the string "*" to match all + fields. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -4735,34 +4714,56 @@ async def approve_display_video360_advertiser_link_proposal( sent along with the request as metadata. Returns: - google.analytics.admin_v1alpha.types.ApproveDisplayVideo360AdvertiserLinkProposalResponse: - Response message for - ApproveDisplayVideo360AdvertiserLinkProposal - RPC. + google.analytics.admin_v1alpha.types.DisplayVideo360AdvertiserLink: + A link between a GA4 property and a + Display & Video 360 advertiser. """ # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([display_video_360_advertiser_link, update_mask]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. if not isinstance( - request, analytics_admin.ApproveDisplayVideo360AdvertiserLinkProposalRequest + request, analytics_admin.UpdateDisplayVideo360AdvertiserLinkRequest ): - request = ( - analytics_admin.ApproveDisplayVideo360AdvertiserLinkProposalRequest( - request - ) + request = analytics_admin.UpdateDisplayVideo360AdvertiserLinkRequest( + request + ) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if display_video_360_advertiser_link is not None: + request.display_video_360_advertiser_link = ( + display_video_360_advertiser_link ) + if update_mask is not None: + request.update_mask = update_mask # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. rpc = self._client._transport._wrapped_methods[ - self._client._transport.approve_display_video360_advertiser_link_proposal + self._client._transport.update_display_video360_advertiser_link ] # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + gapic_v1.routing_header.to_grpc_metadata( + ( + ( + "display_video_360_advertiser_link.name", + request.display_video_360_advertiser_link.name, + ), + ) + ), ) # Validate the universe domain. @@ -4779,32 +4780,34 @@ async def approve_display_video360_advertiser_link_proposal( # Done; return the response. return response - async def cancel_display_video360_advertiser_link_proposal( + async def get_display_video360_advertiser_link_proposal( self, request: Optional[ - Union[ - analytics_admin.CancelDisplayVideo360AdvertiserLinkProposalRequest, dict - ] + Union[analytics_admin.GetDisplayVideo360AdvertiserLinkProposalRequest, dict] ] = None, *, + name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> resources.DisplayVideo360AdvertiserLinkProposal: - r"""Cancels a DisplayVideo360AdvertiserLinkProposal. - Cancelling can mean either: - - - Declining a proposal initiated from Display & Video - 360 - - Withdrawing a proposal initiated from Google Analytics - After being cancelled, a proposal will eventually be - deleted automatically. + r"""Lookup for a single + DisplayVideo360AdvertiserLinkProposal. Args: - request (Optional[Union[google.analytics.admin_v1alpha.types.CancelDisplayVideo360AdvertiserLinkProposalRequest, dict]]): + request (Optional[Union[google.analytics.admin_v1alpha.types.GetDisplayVideo360AdvertiserLinkProposalRequest, dict]]): The request object. Request message for - CancelDisplayVideo360AdvertiserLinkProposal + GetDisplayVideo360AdvertiserLinkProposal RPC. + name (:class:`str`): + Required. The name of the + DisplayVideo360AdvertiserLinkProposal to + get. Example format: + properties/1234/displayVideo360AdvertiserLinkProposals/5678 + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -4826,21 +4829,33 @@ async def cancel_display_video360_advertiser_link_proposal( """ # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. if not isinstance( - request, analytics_admin.CancelDisplayVideo360AdvertiserLinkProposalRequest + request, analytics_admin.GetDisplayVideo360AdvertiserLinkProposalRequest ): - request = ( - analytics_admin.CancelDisplayVideo360AdvertiserLinkProposalRequest( - request - ) + request = analytics_admin.GetDisplayVideo360AdvertiserLinkProposalRequest( + request ) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. rpc = self._client._transport._wrapped_methods[ - self._client._transport.cancel_display_video360_advertiser_link_proposal + self._client._transport.get_display_video360_advertiser_link_proposal ] # Certain fields should be provided within the metadata header; @@ -4863,24 +4878,27 @@ async def cancel_display_video360_advertiser_link_proposal( # Done; return the response. return response - async def create_custom_dimension( + async def list_display_video360_advertiser_link_proposals( self, request: Optional[ - Union[analytics_admin.CreateCustomDimensionRequest, dict] + Union[ + analytics_admin.ListDisplayVideo360AdvertiserLinkProposalsRequest, dict + ] ] = None, *, parent: Optional[str] = None, - custom_dimension: Optional[resources.CustomDimension] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), - ) -> resources.CustomDimension: - r"""Creates a CustomDimension. + ) -> pagers.ListDisplayVideo360AdvertiserLinkProposalsAsyncPager: + r"""Lists DisplayVideo360AdvertiserLinkProposals on a + property. Args: - request (Optional[Union[google.analytics.admin_v1alpha.types.CreateCustomDimensionRequest, dict]]): + request (Optional[Union[google.analytics.admin_v1alpha.types.ListDisplayVideo360AdvertiserLinkProposalsRequest, dict]]): The request object. Request message for - CreateCustomDimension RPC. + ListDisplayVideo360AdvertiserLinkProposals + RPC. parent (:class:`str`): Required. Example format: properties/1234 @@ -4888,13 +4906,6 @@ async def create_custom_dimension( This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - custom_dimension (:class:`google.analytics.admin_v1alpha.types.CustomDimension`): - Required. The CustomDimension to - create. - - This corresponds to the ``custom_dimension`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -4902,13 +4913,18 @@ async def create_custom_dimension( sent along with the request as metadata. Returns: - google.analytics.admin_v1alpha.types.CustomDimension: - A definition for a CustomDimension. + google.analytics.admin_v1alpha.services.analytics_admin_service.pagers.ListDisplayVideo360AdvertiserLinkProposalsAsyncPager: + Response message for + ListDisplayVideo360AdvertiserLinkProposals + RPC. Iterating over this object will + yield results and resolve additional + pages automatically. + """ # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent, custom_dimension]) + has_flattened_params = any([parent]) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -4917,20 +4933,22 @@ async def create_custom_dimension( # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. - if not isinstance(request, analytics_admin.CreateCustomDimensionRequest): - request = analytics_admin.CreateCustomDimensionRequest(request) + if not isinstance( + request, analytics_admin.ListDisplayVideo360AdvertiserLinkProposalsRequest + ): + request = analytics_admin.ListDisplayVideo360AdvertiserLinkProposalsRequest( + request + ) # If we have keyword arguments corresponding to fields on the # request, apply these. if parent is not None: request.parent = parent - if custom_dimension is not None: - request.custom_dimension = custom_dimension # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. rpc = self._client._transport._wrapped_methods[ - self._client._transport.create_custom_dimension + self._client._transport.list_display_video360_advertiser_link_proposals ] # Certain fields should be provided within the metadata header; @@ -4950,39 +4968,56 @@ async def create_custom_dimension( metadata=metadata, ) + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListDisplayVideo360AdvertiserLinkProposalsAsyncPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + # Done; return the response. return response - async def update_custom_dimension( + async def create_display_video360_advertiser_link_proposal( self, request: Optional[ - Union[analytics_admin.UpdateCustomDimensionRequest, dict] + Union[ + analytics_admin.CreateDisplayVideo360AdvertiserLinkProposalRequest, dict + ] ] = None, *, - custom_dimension: Optional[resources.CustomDimension] = None, - update_mask: Optional[field_mask_pb2.FieldMask] = None, + parent: Optional[str] = None, + display_video_360_advertiser_link_proposal: Optional[ + resources.DisplayVideo360AdvertiserLinkProposal + ] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), - ) -> resources.CustomDimension: - r"""Updates a CustomDimension on a property. + ) -> resources.DisplayVideo360AdvertiserLinkProposal: + r"""Creates a DisplayVideo360AdvertiserLinkProposal. Args: - request (Optional[Union[google.analytics.admin_v1alpha.types.UpdateCustomDimensionRequest, dict]]): + request (Optional[Union[google.analytics.admin_v1alpha.types.CreateDisplayVideo360AdvertiserLinkProposalRequest, dict]]): The request object. Request message for - UpdateCustomDimension RPC. - custom_dimension (:class:`google.analytics.admin_v1alpha.types.CustomDimension`): - The CustomDimension to update - This corresponds to the ``custom_dimension`` field + CreateDisplayVideo360AdvertiserLinkProposal + RPC. + parent (:class:`str`): + Required. Example format: + properties/1234 + + This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): - Required. The list of fields to be updated. Omitted - fields will not be updated. To replace the entire - entity, use one path with the string "*" to match all - fields. + display_video_360_advertiser_link_proposal (:class:`google.analytics.admin_v1alpha.types.DisplayVideo360AdvertiserLinkProposal`): + Required. The + DisplayVideo360AdvertiserLinkProposal to + create. - This corresponds to the ``update_mask`` field + This corresponds to the ``display_video_360_advertiser_link_proposal`` field on the ``request`` instance; if ``request`` is provided, this should not be set. retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, @@ -4992,13 +5027,23 @@ async def update_custom_dimension( sent along with the request as metadata. Returns: - google.analytics.admin_v1alpha.types.CustomDimension: - A definition for a CustomDimension. + google.analytics.admin_v1alpha.types.DisplayVideo360AdvertiserLinkProposal: + A proposal for a link between a GA4 + property and a Display & Video 360 + advertiser. + + A proposal is converted to a + DisplayVideo360AdvertiserLink once + approved. Google Analytics admins + approve inbound proposals while Display + & Video 360 admins approve outbound + proposals. + """ # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([custom_dimension, update_mask]) + has_flattened_params = any([parent, display_video_360_advertiser_link_proposal]) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -5007,28 +5052,34 @@ async def update_custom_dimension( # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. - if not isinstance(request, analytics_admin.UpdateCustomDimensionRequest): - request = analytics_admin.UpdateCustomDimensionRequest(request) + if not isinstance( + request, analytics_admin.CreateDisplayVideo360AdvertiserLinkProposalRequest + ): + request = ( + analytics_admin.CreateDisplayVideo360AdvertiserLinkProposalRequest( + request + ) + ) # If we have keyword arguments corresponding to fields on the # request, apply these. - if custom_dimension is not None: - request.custom_dimension = custom_dimension - if update_mask is not None: - request.update_mask = update_mask + if parent is not None: + request.parent = parent + if display_video_360_advertiser_link_proposal is not None: + request.display_video_360_advertiser_link_proposal = ( + display_video_360_advertiser_link_proposal + ) # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. rpc = self._client._transport._wrapped_methods[ - self._client._transport.update_custom_dimension + self._client._transport.create_display_video360_advertiser_link_proposal ] # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("custom_dimension.name", request.custom_dimension.name),) - ), + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), ) # Validate the universe domain. @@ -5045,28 +5096,34 @@ async def update_custom_dimension( # Done; return the response. return response - async def list_custom_dimensions( + async def delete_display_video360_advertiser_link_proposal( self, request: Optional[ - Union[analytics_admin.ListCustomDimensionsRequest, dict] + Union[ + analytics_admin.DeleteDisplayVideo360AdvertiserLinkProposalRequest, dict + ] ] = None, *, - parent: Optional[str] = None, + name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), - ) -> pagers.ListCustomDimensionsAsyncPager: - r"""Lists CustomDimensions on a property. + ) -> None: + r"""Deletes a DisplayVideo360AdvertiserLinkProposal on a + property. This can only be used on cancelled proposals. Args: - request (Optional[Union[google.analytics.admin_v1alpha.types.ListCustomDimensionsRequest, dict]]): + request (Optional[Union[google.analytics.admin_v1alpha.types.DeleteDisplayVideo360AdvertiserLinkProposalRequest, dict]]): The request object. Request message for - ListCustomDimensions RPC. - parent (:class:`str`): - Required. Example format: - properties/1234 + DeleteDisplayVideo360AdvertiserLinkProposal + RPC. + name (:class:`str`): + Required. The name of the + DisplayVideo360AdvertiserLinkProposal to + delete. Example format: + properties/1234/displayVideo360AdvertiserLinkProposals/5678 - This corresponds to the ``parent`` field + This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, @@ -5074,20 +5131,11 @@ async def list_custom_dimensions( timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. - - Returns: - google.analytics.admin_v1alpha.services.analytics_admin_service.pagers.ListCustomDimensionsAsyncPager: - Response message for - ListCustomDimensions RPC. - Iterating over this object will yield - results and resolve additional pages - automatically. - """ # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) + has_flattened_params = any([name]) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -5096,107 +5144,95 @@ async def list_custom_dimensions( # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. - if not isinstance(request, analytics_admin.ListCustomDimensionsRequest): - request = analytics_admin.ListCustomDimensionsRequest(request) + if not isinstance( + request, analytics_admin.DeleteDisplayVideo360AdvertiserLinkProposalRequest + ): + request = ( + analytics_admin.DeleteDisplayVideo360AdvertiserLinkProposalRequest( + request + ) + ) # If we have keyword arguments corresponding to fields on the # request, apply these. - if parent is not None: - request.parent = parent + if name is not None: + request.name = name # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. rpc = self._client._transport._wrapped_methods[ - self._client._transport.list_custom_dimensions + self._client._transport.delete_display_video360_advertiser_link_proposal ] # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), ) # Validate the universe domain. self._client._validate_universe_domain() # Send the request. - response = await rpc( + await rpc( request, retry=retry, timeout=timeout, metadata=metadata, ) - # This method is paged; wrap the response in a pager, which provides - # an `__aiter__` convenience method. - response = pagers.ListCustomDimensionsAsyncPager( - method=rpc, - request=request, - response=response, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def archive_custom_dimension( + async def approve_display_video360_advertiser_link_proposal( self, request: Optional[ - Union[analytics_admin.ArchiveCustomDimensionRequest, dict] + Union[ + analytics_admin.ApproveDisplayVideo360AdvertiserLinkProposalRequest, + dict, + ] ] = None, *, - name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), - ) -> None: - r"""Archives a CustomDimension on a property. + ) -> analytics_admin.ApproveDisplayVideo360AdvertiserLinkProposalResponse: + r"""Approves a DisplayVideo360AdvertiserLinkProposal. + The DisplayVideo360AdvertiserLinkProposal will be + deleted and a new DisplayVideo360AdvertiserLink will be + created. Args: - request (Optional[Union[google.analytics.admin_v1alpha.types.ArchiveCustomDimensionRequest, dict]]): + request (Optional[Union[google.analytics.admin_v1alpha.types.ApproveDisplayVideo360AdvertiserLinkProposalRequest, dict]]): The request object. Request message for - ArchiveCustomDimension RPC. - name (:class:`str`): - Required. The name of the - CustomDimension to archive. Example - format: - properties/1234/customDimensions/5678 - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. + ApproveDisplayVideo360AdvertiserLinkProposal + RPC. retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. + + Returns: + google.analytics.admin_v1alpha.types.ApproveDisplayVideo360AdvertiserLinkProposalResponse: + Response message for + ApproveDisplayVideo360AdvertiserLinkProposal + RPC. + """ # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError( - "If the `request` argument is set, then none of " - "the individual field arguments should be set." - ) - # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. - if not isinstance(request, analytics_admin.ArchiveCustomDimensionRequest): - request = analytics_admin.ArchiveCustomDimensionRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name + if not isinstance( + request, analytics_admin.ApproveDisplayVideo360AdvertiserLinkProposalRequest + ): + request = ( + analytics_admin.ApproveDisplayVideo360AdvertiserLinkProposalRequest( + request + ) + ) # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. rpc = self._client._transport._wrapped_methods[ - self._client._transport.archive_custom_dimension + self._client._transport.approve_display_video360_advertiser_link_proposal ] # Certain fields should be provided within the metadata header; @@ -5209,38 +5245,42 @@ async def archive_custom_dimension( self._client._validate_universe_domain() # Send the request. - await rpc( + response = await rpc( request, retry=retry, timeout=timeout, metadata=metadata, ) - async def get_custom_dimension( + # Done; return the response. + return response + + async def cancel_display_video360_advertiser_link_proposal( self, request: Optional[ - Union[analytics_admin.GetCustomDimensionRequest, dict] + Union[ + analytics_admin.CancelDisplayVideo360AdvertiserLinkProposalRequest, dict + ] ] = None, *, - name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), - ) -> resources.CustomDimension: - r"""Lookup for a single CustomDimension. + ) -> resources.DisplayVideo360AdvertiserLinkProposal: + r"""Cancels a DisplayVideo360AdvertiserLinkProposal. + Cancelling can mean either: + + - Declining a proposal initiated from Display & Video + 360 + - Withdrawing a proposal initiated from Google Analytics + After being cancelled, a proposal will eventually be + deleted automatically. Args: - request (Optional[Union[google.analytics.admin_v1alpha.types.GetCustomDimensionRequest, dict]]): + request (Optional[Union[google.analytics.admin_v1alpha.types.CancelDisplayVideo360AdvertiserLinkProposalRequest, dict]]): The request object. Request message for - GetCustomDimension RPC. - name (:class:`str`): - Required. The name of the - CustomDimension to get. Example format: - properties/1234/customDimensions/5678 - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. + CancelDisplayVideo360AdvertiserLinkProposal + RPC. retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -5248,33 +5288,35 @@ async def get_custom_dimension( sent along with the request as metadata. Returns: - google.analytics.admin_v1alpha.types.CustomDimension: - A definition for a CustomDimension. + google.analytics.admin_v1alpha.types.DisplayVideo360AdvertiserLinkProposal: + A proposal for a link between a GA4 + property and a Display & Video 360 + advertiser. + + A proposal is converted to a + DisplayVideo360AdvertiserLink once + approved. Google Analytics admins + approve inbound proposals while Display + & Video 360 admins approve outbound + proposals. + """ # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError( - "If the `request` argument is set, then none of " - "the individual field arguments should be set." - ) - # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. - if not isinstance(request, analytics_admin.GetCustomDimensionRequest): - request = analytics_admin.GetCustomDimensionRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name + if not isinstance( + request, analytics_admin.CancelDisplayVideo360AdvertiserLinkProposalRequest + ): + request = ( + analytics_admin.CancelDisplayVideo360AdvertiserLinkProposalRequest( + request + ) + ) # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. rpc = self._client._transport._wrapped_methods[ - self._client._transport.get_custom_dimension + self._client._transport.cancel_display_video360_advertiser_link_proposal ] # Certain fields should be provided within the metadata header; @@ -5297,24 +5339,24 @@ async def get_custom_dimension( # Done; return the response. return response - async def create_custom_metric( + async def create_custom_dimension( self, request: Optional[ - Union[analytics_admin.CreateCustomMetricRequest, dict] + Union[analytics_admin.CreateCustomDimensionRequest, dict] ] = None, *, parent: Optional[str] = None, - custom_metric: Optional[resources.CustomMetric] = None, + custom_dimension: Optional[resources.CustomDimension] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), - ) -> resources.CustomMetric: - r"""Creates a CustomMetric. + ) -> resources.CustomDimension: + r"""Creates a CustomDimension. Args: - request (Optional[Union[google.analytics.admin_v1alpha.types.CreateCustomMetricRequest, dict]]): + request (Optional[Union[google.analytics.admin_v1alpha.types.CreateCustomDimensionRequest, dict]]): The request object. Request message for - CreateCustomMetric RPC. + CreateCustomDimension RPC. parent (:class:`str`): Required. Example format: properties/1234 @@ -5322,9 +5364,11 @@ async def create_custom_metric( This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - custom_metric (:class:`google.analytics.admin_v1alpha.types.CustomMetric`): - Required. The CustomMetric to create. - This corresponds to the ``custom_metric`` field + custom_dimension (:class:`google.analytics.admin_v1alpha.types.CustomDimension`): + Required. The CustomDimension to + create. + + This corresponds to the ``custom_dimension`` field on the ``request`` instance; if ``request`` is provided, this should not be set. retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, @@ -5334,13 +5378,13 @@ async def create_custom_metric( sent along with the request as metadata. Returns: - google.analytics.admin_v1alpha.types.CustomMetric: - A definition for a custom metric. + google.analytics.admin_v1alpha.types.CustomDimension: + A definition for a CustomDimension. """ # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent, custom_metric]) + has_flattened_params = any([parent, custom_dimension]) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -5349,20 +5393,20 @@ async def create_custom_metric( # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. - if not isinstance(request, analytics_admin.CreateCustomMetricRequest): - request = analytics_admin.CreateCustomMetricRequest(request) + if not isinstance(request, analytics_admin.CreateCustomDimensionRequest): + request = analytics_admin.CreateCustomDimensionRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. if parent is not None: request.parent = parent - if custom_metric is not None: - request.custom_metric = custom_metric + if custom_dimension is not None: + request.custom_dimension = custom_dimension # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. rpc = self._client._transport._wrapped_methods[ - self._client._transport.create_custom_metric + self._client._transport.create_custom_dimension ] # Certain fields should be provided within the metadata header; @@ -5385,27 +5429,27 @@ async def create_custom_metric( # Done; return the response. return response - async def update_custom_metric( + async def update_custom_dimension( self, request: Optional[ - Union[analytics_admin.UpdateCustomMetricRequest, dict] + Union[analytics_admin.UpdateCustomDimensionRequest, dict] ] = None, *, - custom_metric: Optional[resources.CustomMetric] = None, + custom_dimension: Optional[resources.CustomDimension] = None, update_mask: Optional[field_mask_pb2.FieldMask] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), - ) -> resources.CustomMetric: - r"""Updates a CustomMetric on a property. + ) -> resources.CustomDimension: + r"""Updates a CustomDimension on a property. Args: - request (Optional[Union[google.analytics.admin_v1alpha.types.UpdateCustomMetricRequest, dict]]): + request (Optional[Union[google.analytics.admin_v1alpha.types.UpdateCustomDimensionRequest, dict]]): The request object. Request message for - UpdateCustomMetric RPC. - custom_metric (:class:`google.analytics.admin_v1alpha.types.CustomMetric`): - The CustomMetric to update - This corresponds to the ``custom_metric`` field + UpdateCustomDimension RPC. + custom_dimension (:class:`google.analytics.admin_v1alpha.types.CustomDimension`): + The CustomDimension to update + This corresponds to the ``custom_dimension`` field on the ``request`` instance; if ``request`` is provided, this should not be set. update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): @@ -5424,13 +5468,13 @@ async def update_custom_metric( sent along with the request as metadata. Returns: - google.analytics.admin_v1alpha.types.CustomMetric: - A definition for a custom metric. + google.analytics.admin_v1alpha.types.CustomDimension: + A definition for a CustomDimension. """ # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([custom_metric, update_mask]) + has_flattened_params = any([custom_dimension, update_mask]) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -5439,27 +5483,27 @@ async def update_custom_metric( # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. - if not isinstance(request, analytics_admin.UpdateCustomMetricRequest): - request = analytics_admin.UpdateCustomMetricRequest(request) + if not isinstance(request, analytics_admin.UpdateCustomDimensionRequest): + request = analytics_admin.UpdateCustomDimensionRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. - if custom_metric is not None: - request.custom_metric = custom_metric + if custom_dimension is not None: + request.custom_dimension = custom_dimension if update_mask is not None: request.update_mask = update_mask # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. rpc = self._client._transport._wrapped_methods[ - self._client._transport.update_custom_metric + self._client._transport.update_custom_dimension ] # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata( - (("custom_metric.name", request.custom_metric.name),) + (("custom_dimension.name", request.custom_dimension.name),) ), ) @@ -5477,21 +5521,23 @@ async def update_custom_metric( # Done; return the response. return response - async def list_custom_metrics( + async def list_custom_dimensions( self, - request: Optional[Union[analytics_admin.ListCustomMetricsRequest, dict]] = None, + request: Optional[ + Union[analytics_admin.ListCustomDimensionsRequest, dict] + ] = None, *, parent: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), - ) -> pagers.ListCustomMetricsAsyncPager: - r"""Lists CustomMetrics on a property. + ) -> pagers.ListCustomDimensionsAsyncPager: + r"""Lists CustomDimensions on a property. Args: - request (Optional[Union[google.analytics.admin_v1alpha.types.ListCustomMetricsRequest, dict]]): - The request object. Request message for ListCustomMetrics - RPC. + request (Optional[Union[google.analytics.admin_v1alpha.types.ListCustomDimensionsRequest, dict]]): + The request object. Request message for + ListCustomDimensions RPC. parent (:class:`str`): Required. Example format: properties/1234 @@ -5506,9 +5552,9 @@ async def list_custom_metrics( sent along with the request as metadata. Returns: - google.analytics.admin_v1alpha.services.analytics_admin_service.pagers.ListCustomMetricsAsyncPager: + google.analytics.admin_v1alpha.services.analytics_admin_service.pagers.ListCustomDimensionsAsyncPager: Response message for - ListCustomMetrics RPC. + ListCustomDimensions RPC. Iterating over this object will yield results and resolve additional pages automatically. @@ -5526,8 +5572,8 @@ async def list_custom_metrics( # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. - if not isinstance(request, analytics_admin.ListCustomMetricsRequest): - request = analytics_admin.ListCustomMetricsRequest(request) + if not isinstance(request, analytics_admin.ListCustomDimensionsRequest): + request = analytics_admin.ListCustomDimensionsRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. @@ -5537,7 +5583,7 @@ async def list_custom_metrics( # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. rpc = self._client._transport._wrapped_methods[ - self._client._transport.list_custom_metrics + self._client._transport.list_custom_dimensions ] # Certain fields should be provided within the metadata header; @@ -5559,7 +5605,7 @@ async def list_custom_metrics( # This method is paged; wrap the response in a pager, which provides # an `__aiter__` convenience method. - response = pagers.ListCustomMetricsAsyncPager( + response = pagers.ListCustomDimensionsAsyncPager( method=rpc, request=request, response=response, @@ -5571,10 +5617,10 @@ async def list_custom_metrics( # Done; return the response. return response - async def archive_custom_metric( + async def archive_custom_dimension( self, request: Optional[ - Union[analytics_admin.ArchiveCustomMetricRequest, dict] + Union[analytics_admin.ArchiveCustomDimensionRequest, dict] ] = None, *, name: Optional[str] = None, @@ -5582,16 +5628,17 @@ async def archive_custom_metric( timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> None: - r"""Archives a CustomMetric on a property. + r"""Archives a CustomDimension on a property. Args: - request (Optional[Union[google.analytics.admin_v1alpha.types.ArchiveCustomMetricRequest, dict]]): + request (Optional[Union[google.analytics.admin_v1alpha.types.ArchiveCustomDimensionRequest, dict]]): The request object. Request message for - ArchiveCustomMetric RPC. + ArchiveCustomDimension RPC. name (:class:`str`): Required. The name of the - CustomMetric to archive. Example format: - properties/1234/customMetrics/5678 + CustomDimension to archive. Example + format: + properties/1234/customDimensions/5678 This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this @@ -5614,8 +5661,8 @@ async def archive_custom_metric( # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. - if not isinstance(request, analytics_admin.ArchiveCustomMetricRequest): - request = analytics_admin.ArchiveCustomMetricRequest(request) + if not isinstance(request, analytics_admin.ArchiveCustomDimensionRequest): + request = analytics_admin.ArchiveCustomDimensionRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. @@ -5625,7 +5672,7 @@ async def archive_custom_metric( # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. rpc = self._client._transport._wrapped_methods[ - self._client._transport.archive_custom_metric + self._client._transport.archive_custom_dimension ] # Certain fields should be provided within the metadata header; @@ -5645,25 +5692,27 @@ async def archive_custom_metric( metadata=metadata, ) - async def get_custom_metric( + async def get_custom_dimension( self, - request: Optional[Union[analytics_admin.GetCustomMetricRequest, dict]] = None, + request: Optional[ + Union[analytics_admin.GetCustomDimensionRequest, dict] + ] = None, *, name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), - ) -> resources.CustomMetric: - r"""Lookup for a single CustomMetric. + ) -> resources.CustomDimension: + r"""Lookup for a single CustomDimension. Args: - request (Optional[Union[google.analytics.admin_v1alpha.types.GetCustomMetricRequest, dict]]): - The request object. Request message for GetCustomMetric - RPC. + request (Optional[Union[google.analytics.admin_v1alpha.types.GetCustomDimensionRequest, dict]]): + The request object. Request message for + GetCustomDimension RPC. name (:class:`str`): Required. The name of the - CustomMetric to get. Example format: - properties/1234/customMetrics/5678 + CustomDimension to get. Example format: + properties/1234/customDimensions/5678 This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this @@ -5675,8 +5724,8 @@ async def get_custom_metric( sent along with the request as metadata. Returns: - google.analytics.admin_v1alpha.types.CustomMetric: - A definition for a custom metric. + google.analytics.admin_v1alpha.types.CustomDimension: + A definition for a CustomDimension. """ # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have @@ -5690,8 +5739,8 @@ async def get_custom_metric( # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. - if not isinstance(request, analytics_admin.GetCustomMetricRequest): - request = analytics_admin.GetCustomMetricRequest(request) + if not isinstance(request, analytics_admin.GetCustomDimensionRequest): + request = analytics_admin.GetCustomDimensionRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. @@ -5701,7 +5750,7 @@ async def get_custom_metric( # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. rpc = self._client._transport._wrapped_methods[ - self._client._transport.get_custom_metric + self._client._transport.get_custom_dimension ] # Certain fields should be provided within the metadata header; @@ -5724,33 +5773,34 @@ async def get_custom_metric( # Done; return the response. return response - async def get_data_retention_settings( + async def create_custom_metric( self, request: Optional[ - Union[analytics_admin.GetDataRetentionSettingsRequest, dict] + Union[analytics_admin.CreateCustomMetricRequest, dict] ] = None, *, - name: Optional[str] = None, + parent: Optional[str] = None, + custom_metric: Optional[resources.CustomMetric] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), - ) -> resources.DataRetentionSettings: - r"""Returns the singleton data retention settings for - this property. + ) -> resources.CustomMetric: + r"""Creates a CustomMetric. Args: - request (Optional[Union[google.analytics.admin_v1alpha.types.GetDataRetentionSettingsRequest, dict]]): + request (Optional[Union[google.analytics.admin_v1alpha.types.CreateCustomMetricRequest, dict]]): The request object. Request message for - GetDataRetentionSettings RPC. - name (:class:`str`): - Required. The name of the settings to - lookup. Format: - - properties/{property}/dataRetentionSettings - Example: - "properties/1000/dataRetentionSettings" + CreateCustomMetric RPC. + parent (:class:`str`): + Required. Example format: + properties/1234 - This corresponds to the ``name`` field + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + custom_metric (:class:`google.analytics.admin_v1alpha.types.CustomMetric`): + Required. The CustomMetric to create. + This corresponds to the ``custom_metric`` field on the ``request`` instance; if ``request`` is provided, this should not be set. retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, @@ -5760,15 +5810,13 @@ async def get_data_retention_settings( sent along with the request as metadata. Returns: - google.analytics.admin_v1alpha.types.DataRetentionSettings: - Settings values for data retention. - This is a singleton resource. - + google.analytics.admin_v1alpha.types.CustomMetric: + A definition for a custom metric. """ # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) + has_flattened_params = any([parent, custom_metric]) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -5777,24 +5825,26 @@ async def get_data_retention_settings( # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. - if not isinstance(request, analytics_admin.GetDataRetentionSettingsRequest): - request = analytics_admin.GetDataRetentionSettingsRequest(request) + if not isinstance(request, analytics_admin.CreateCustomMetricRequest): + request = analytics_admin.CreateCustomMetricRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. - if name is not None: - request.name = name + if parent is not None: + request.parent = parent + if custom_metric is not None: + request.custom_metric = custom_metric # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. rpc = self._client._transport._wrapped_methods[ - self._client._transport.get_data_retention_settings + self._client._transport.create_custom_metric ] # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), ) # Validate the universe domain. @@ -5811,35 +5861,31 @@ async def get_data_retention_settings( # Done; return the response. return response - async def update_data_retention_settings( + async def update_custom_metric( self, request: Optional[ - Union[analytics_admin.UpdateDataRetentionSettingsRequest, dict] + Union[analytics_admin.UpdateCustomMetricRequest, dict] ] = None, *, - data_retention_settings: Optional[resources.DataRetentionSettings] = None, + custom_metric: Optional[resources.CustomMetric] = None, update_mask: Optional[field_mask_pb2.FieldMask] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), - ) -> resources.DataRetentionSettings: - r"""Updates the singleton data retention settings for - this property. + ) -> resources.CustomMetric: + r"""Updates a CustomMetric on a property. Args: - request (Optional[Union[google.analytics.admin_v1alpha.types.UpdateDataRetentionSettingsRequest, dict]]): + request (Optional[Union[google.analytics.admin_v1alpha.types.UpdateCustomMetricRequest, dict]]): The request object. Request message for - UpdateDataRetentionSettings RPC. - data_retention_settings (:class:`google.analytics.admin_v1alpha.types.DataRetentionSettings`): - Required. The settings to update. The ``name`` field is - used to identify the settings to be updated. - - This corresponds to the ``data_retention_settings`` field + UpdateCustomMetric RPC. + custom_metric (:class:`google.analytics.admin_v1alpha.types.CustomMetric`): + The CustomMetric to update + This corresponds to the ``custom_metric`` field on the ``request`` instance; if ``request`` is provided, this should not be set. update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): - Required. The list of fields to be updated. Field names - must be in snake case (e.g., "field_to_update"). Omitted + Required. The list of fields to be updated. Omitted fields will not be updated. To replace the entire entity, use one path with the string "*" to match all fields. @@ -5854,15 +5900,13 @@ async def update_data_retention_settings( sent along with the request as metadata. Returns: - google.analytics.admin_v1alpha.types.DataRetentionSettings: - Settings values for data retention. - This is a singleton resource. - + google.analytics.admin_v1alpha.types.CustomMetric: + A definition for a custom metric. """ # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([data_retention_settings, update_mask]) + has_flattened_params = any([custom_metric, update_mask]) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -5871,32 +5915,27 @@ async def update_data_retention_settings( # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. - if not isinstance(request, analytics_admin.UpdateDataRetentionSettingsRequest): - request = analytics_admin.UpdateDataRetentionSettingsRequest(request) + if not isinstance(request, analytics_admin.UpdateCustomMetricRequest): + request = analytics_admin.UpdateCustomMetricRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. - if data_retention_settings is not None: - request.data_retention_settings = data_retention_settings + if custom_metric is not None: + request.custom_metric = custom_metric if update_mask is not None: request.update_mask = update_mask # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. rpc = self._client._transport._wrapped_methods[ - self._client._transport.update_data_retention_settings + self._client._transport.update_custom_metric ] # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata( - ( - ( - "data_retention_settings.name", - request.data_retention_settings.name, - ), - ) + (("custom_metric.name", request.custom_metric.name),) ), ) @@ -5914,21 +5953,20 @@ async def update_data_retention_settings( # Done; return the response. return response - async def create_data_stream( + async def list_custom_metrics( self, - request: Optional[Union[analytics_admin.CreateDataStreamRequest, dict]] = None, + request: Optional[Union[analytics_admin.ListCustomMetricsRequest, dict]] = None, *, parent: Optional[str] = None, - data_stream: Optional[resources.DataStream] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), - ) -> resources.DataStream: - r"""Creates a DataStream. + ) -> pagers.ListCustomMetricsAsyncPager: + r"""Lists CustomMetrics on a property. Args: - request (Optional[Union[google.analytics.admin_v1alpha.types.CreateDataStreamRequest, dict]]): - The request object. Request message for CreateDataStream + request (Optional[Union[google.analytics.admin_v1alpha.types.ListCustomMetricsRequest, dict]]): + The request object. Request message for ListCustomMetrics RPC. parent (:class:`str`): Required. Example format: @@ -5937,11 +5975,6 @@ async def create_data_stream( This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - data_stream (:class:`google.analytics.admin_v1alpha.types.DataStream`): - Required. The DataStream to create. - This corresponds to the ``data_stream`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -5949,15 +5982,18 @@ async def create_data_stream( sent along with the request as metadata. Returns: - google.analytics.admin_v1alpha.types.DataStream: - A resource message representing a - data stream. + google.analytics.admin_v1alpha.services.analytics_admin_service.pagers.ListCustomMetricsAsyncPager: + Response message for + ListCustomMetrics RPC. + Iterating over this object will yield + results and resolve additional pages + automatically. """ # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent, data_stream]) + has_flattened_params = any([parent]) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -5966,20 +6002,18 @@ async def create_data_stream( # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. - if not isinstance(request, analytics_admin.CreateDataStreamRequest): - request = analytics_admin.CreateDataStreamRequest(request) + if not isinstance(request, analytics_admin.ListCustomMetricsRequest): + request = analytics_admin.ListCustomMetricsRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. if parent is not None: request.parent = parent - if data_stream is not None: - request.data_stream = data_stream # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. rpc = self._client._transport._wrapped_methods[ - self._client._transport.create_data_stream + self._client._transport.list_custom_metrics ] # Certain fields should be provided within the metadata header; @@ -5999,28 +6033,41 @@ async def create_data_stream( metadata=metadata, ) + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListCustomMetricsAsyncPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + # Done; return the response. return response - async def delete_data_stream( + async def archive_custom_metric( self, - request: Optional[Union[analytics_admin.DeleteDataStreamRequest, dict]] = None, + request: Optional[ + Union[analytics_admin.ArchiveCustomMetricRequest, dict] + ] = None, *, name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> None: - r"""Deletes a DataStream on a property. + r"""Archives a CustomMetric on a property. Args: - request (Optional[Union[google.analytics.admin_v1alpha.types.DeleteDataStreamRequest, dict]]): - The request object. Request message for DeleteDataStream - RPC. + request (Optional[Union[google.analytics.admin_v1alpha.types.ArchiveCustomMetricRequest, dict]]): + The request object. Request message for + ArchiveCustomMetric RPC. name (:class:`str`): - Required. The name of the DataStream - to delete. Example format: - properties/1234/dataStreams/5678 + Required. The name of the + CustomMetric to archive. Example format: + properties/1234/customMetrics/5678 This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this @@ -6043,8 +6090,8 @@ async def delete_data_stream( # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. - if not isinstance(request, analytics_admin.DeleteDataStreamRequest): - request = analytics_admin.DeleteDataStreamRequest(request) + if not isinstance(request, analytics_admin.ArchiveCustomMetricRequest): + request = analytics_admin.ArchiveCustomMetricRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. @@ -6054,7 +6101,7 @@ async def delete_data_stream( # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. rpc = self._client._transport._wrapped_methods[ - self._client._transport.delete_data_stream + self._client._transport.archive_custom_metric ] # Certain fields should be provided within the metadata header; @@ -6074,34 +6121,27 @@ async def delete_data_stream( metadata=metadata, ) - async def update_data_stream( + async def get_custom_metric( self, - request: Optional[Union[analytics_admin.UpdateDataStreamRequest, dict]] = None, + request: Optional[Union[analytics_admin.GetCustomMetricRequest, dict]] = None, *, - data_stream: Optional[resources.DataStream] = None, - update_mask: Optional[field_mask_pb2.FieldMask] = None, + name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), - ) -> resources.DataStream: - r"""Updates a DataStream on a property. + ) -> resources.CustomMetric: + r"""Lookup for a single CustomMetric. Args: - request (Optional[Union[google.analytics.admin_v1alpha.types.UpdateDataStreamRequest, dict]]): - The request object. Request message for UpdateDataStream + request (Optional[Union[google.analytics.admin_v1alpha.types.GetCustomMetricRequest, dict]]): + The request object. Request message for GetCustomMetric RPC. - data_stream (:class:`google.analytics.admin_v1alpha.types.DataStream`): - The DataStream to update - This corresponds to the ``data_stream`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): - Required. The list of fields to be updated. Omitted - fields will not be updated. To replace the entire - entity, use one path with the string "*" to match all - fields. + name (:class:`str`): + Required. The name of the + CustomMetric to get. Example format: + properties/1234/customMetrics/5678 - This corresponds to the ``update_mask`` field + This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, @@ -6111,15 +6151,13 @@ async def update_data_stream( sent along with the request as metadata. Returns: - google.analytics.admin_v1alpha.types.DataStream: - A resource message representing a - data stream. - + google.analytics.admin_v1alpha.types.CustomMetric: + A definition for a custom metric. """ # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([data_stream, update_mask]) + has_flattened_params = any([name]) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -6128,28 +6166,24 @@ async def update_data_stream( # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. - if not isinstance(request, analytics_admin.UpdateDataStreamRequest): - request = analytics_admin.UpdateDataStreamRequest(request) + if not isinstance(request, analytics_admin.GetCustomMetricRequest): + request = analytics_admin.GetCustomMetricRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. - if data_stream is not None: - request.data_stream = data_stream - if update_mask is not None: - request.update_mask = update_mask + if name is not None: + request.name = name # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. rpc = self._client._transport._wrapped_methods[ - self._client._transport.update_data_stream + self._client._transport.get_custom_metric ] # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("data_stream.name", request.data_stream.name),) - ), + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), ) # Validate the universe domain. @@ -6166,26 +6200,33 @@ async def update_data_stream( # Done; return the response. return response - async def list_data_streams( + async def get_data_retention_settings( self, - request: Optional[Union[analytics_admin.ListDataStreamsRequest, dict]] = None, + request: Optional[ + Union[analytics_admin.GetDataRetentionSettingsRequest, dict] + ] = None, *, - parent: Optional[str] = None, + name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), - ) -> pagers.ListDataStreamsAsyncPager: - r"""Lists DataStreams on a property. + ) -> resources.DataRetentionSettings: + r"""Returns the singleton data retention settings for + this property. Args: - request (Optional[Union[google.analytics.admin_v1alpha.types.ListDataStreamsRequest, dict]]): - The request object. Request message for ListDataStreams - RPC. - parent (:class:`str`): - Required. Example format: - properties/1234 + request (Optional[Union[google.analytics.admin_v1alpha.types.GetDataRetentionSettingsRequest, dict]]): + The request object. Request message for + GetDataRetentionSettings RPC. + name (:class:`str`): + Required. The name of the settings to + lookup. Format: - This corresponds to the ``parent`` field + properties/{property}/dataRetentionSettings + Example: + "properties/1000/dataRetentionSettings" + + This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, @@ -6195,18 +6236,15 @@ async def list_data_streams( sent along with the request as metadata. Returns: - google.analytics.admin_v1alpha.services.analytics_admin_service.pagers.ListDataStreamsAsyncPager: - Response message for ListDataStreams - RPC. - Iterating over this object will yield - results and resolve additional pages - automatically. + google.analytics.admin_v1alpha.types.DataRetentionSettings: + Settings values for data retention. + This is a singleton resource. """ # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) + has_flattened_params = any([name]) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -6215,24 +6253,24 @@ async def list_data_streams( # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. - if not isinstance(request, analytics_admin.ListDataStreamsRequest): - request = analytics_admin.ListDataStreamsRequest(request) + if not isinstance(request, analytics_admin.GetDataRetentionSettingsRequest): + request = analytics_admin.GetDataRetentionSettingsRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. - if parent is not None: - request.parent = parent + if name is not None: + request.name = name # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. rpc = self._client._transport._wrapped_methods[ - self._client._transport.list_data_streams + self._client._transport.get_data_retention_settings ] # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), ) # Validate the universe domain. @@ -6246,41 +6284,43 @@ async def list_data_streams( metadata=metadata, ) - # This method is paged; wrap the response in a pager, which provides - # an `__aiter__` convenience method. - response = pagers.ListDataStreamsAsyncPager( - method=rpc, - request=request, - response=response, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - # Done; return the response. return response - async def get_data_stream( + async def update_data_retention_settings( self, - request: Optional[Union[analytics_admin.GetDataStreamRequest, dict]] = None, + request: Optional[ + Union[analytics_admin.UpdateDataRetentionSettingsRequest, dict] + ] = None, *, - name: Optional[str] = None, + data_retention_settings: Optional[resources.DataRetentionSettings] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), - ) -> resources.DataStream: - r"""Lookup for a single DataStream. + ) -> resources.DataRetentionSettings: + r"""Updates the singleton data retention settings for + this property. Args: - request (Optional[Union[google.analytics.admin_v1alpha.types.GetDataStreamRequest, dict]]): - The request object. Request message for GetDataStream - RPC. - name (:class:`str`): - Required. The name of the DataStream - to get. Example format: - properties/1234/dataStreams/5678 + request (Optional[Union[google.analytics.admin_v1alpha.types.UpdateDataRetentionSettingsRequest, dict]]): + The request object. Request message for + UpdateDataRetentionSettings RPC. + data_retention_settings (:class:`google.analytics.admin_v1alpha.types.DataRetentionSettings`): + Required. The settings to update. The ``name`` field is + used to identify the settings to be updated. - This corresponds to the ``name`` field + This corresponds to the ``data_retention_settings`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): + Required. The list of fields to be updated. Field names + must be in snake case (e.g., "field_to_update"). Omitted + fields will not be updated. To replace the entire + entity, use one path with the string "*" to match all + fields. + + This corresponds to the ``update_mask`` field on the ``request`` instance; if ``request`` is provided, this should not be set. retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, @@ -6290,15 +6330,15 @@ async def get_data_stream( sent along with the request as metadata. Returns: - google.analytics.admin_v1alpha.types.DataStream: - A resource message representing a - data stream. + google.analytics.admin_v1alpha.types.DataRetentionSettings: + Settings values for data retention. + This is a singleton resource. """ # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) + has_flattened_params = any([data_retention_settings, update_mask]) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -6307,24 +6347,33 @@ async def get_data_stream( # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. - if not isinstance(request, analytics_admin.GetDataStreamRequest): - request = analytics_admin.GetDataStreamRequest(request) + if not isinstance(request, analytics_admin.UpdateDataRetentionSettingsRequest): + request = analytics_admin.UpdateDataRetentionSettingsRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. - if name is not None: - request.name = name + if data_retention_settings is not None: + request.data_retention_settings = data_retention_settings + if update_mask is not None: + request.update_mask = update_mask # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. rpc = self._client._transport._wrapped_methods[ - self._client._transport.get_data_stream + self._client._transport.update_data_retention_settings ] # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + gapic_v1.routing_header.to_grpc_metadata( + ( + ( + "data_retention_settings.name", + request.data_retention_settings.name, + ), + ) + ), ) # Validate the universe domain. @@ -6341,28 +6390,32 @@ async def get_data_stream( # Done; return the response. return response - async def get_audience( + async def create_data_stream( self, - request: Optional[Union[analytics_admin.GetAudienceRequest, dict]] = None, + request: Optional[Union[analytics_admin.CreateDataStreamRequest, dict]] = None, *, - name: Optional[str] = None, + parent: Optional[str] = None, + data_stream: Optional[resources.DataStream] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), - ) -> audience.Audience: - r"""Lookup for a single Audience. - Audiences created before 2020 may not be supported. - Default audiences will not show filter definitions. + ) -> resources.DataStream: + r"""Creates a DataStream. Args: - request (Optional[Union[google.analytics.admin_v1alpha.types.GetAudienceRequest, dict]]): - The request object. Request message for GetAudience RPC. - name (:class:`str`): - Required. The name of the Audience to - get. Example format: - properties/1234/audiences/5678 + request (Optional[Union[google.analytics.admin_v1alpha.types.CreateDataStreamRequest, dict]]): + The request object. Request message for CreateDataStream + RPC. + parent (:class:`str`): + Required. Example format: + properties/1234 - This corresponds to the ``name`` field + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + data_stream (:class:`google.analytics.admin_v1alpha.types.DataStream`): + Required. The DataStream to create. + This corresponds to the ``data_stream`` field on the ``request`` instance; if ``request`` is provided, this should not be set. retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, @@ -6372,15 +6425,15 @@ async def get_audience( sent along with the request as metadata. Returns: - google.analytics.admin_v1alpha.types.Audience: - A resource message representing a GA4 - Audience. + google.analytics.admin_v1alpha.types.DataStream: + A resource message representing a + data stream. """ # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) + has_flattened_params = any([parent, data_stream]) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -6389,24 +6442,26 @@ async def get_audience( # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. - if not isinstance(request, analytics_admin.GetAudienceRequest): - request = analytics_admin.GetAudienceRequest(request) + if not isinstance(request, analytics_admin.CreateDataStreamRequest): + request = analytics_admin.CreateDataStreamRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. - if name is not None: - request.name = name + if parent is not None: + request.parent = parent + if data_stream is not None: + request.data_stream = data_stream # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. rpc = self._client._transport._wrapped_methods[ - self._client._transport.get_audience + self._client._transport.create_data_stream ] # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), ) # Validate the universe domain. @@ -6423,28 +6478,27 @@ async def get_audience( # Done; return the response. return response - async def list_audiences( + async def delete_data_stream( self, - request: Optional[Union[analytics_admin.ListAudiencesRequest, dict]] = None, + request: Optional[Union[analytics_admin.DeleteDataStreamRequest, dict]] = None, *, - parent: Optional[str] = None, + name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), - ) -> pagers.ListAudiencesAsyncPager: - r"""Lists Audiences on a property. - Audiences created before 2020 may not be supported. - Default audiences will not show filter definitions. + ) -> None: + r"""Deletes a DataStream on a property. Args: - request (Optional[Union[google.analytics.admin_v1alpha.types.ListAudiencesRequest, dict]]): - The request object. Request message for ListAudiences + request (Optional[Union[google.analytics.admin_v1alpha.types.DeleteDataStreamRequest, dict]]): + The request object. Request message for DeleteDataStream RPC. - parent (:class:`str`): - Required. Example format: - properties/1234 + name (:class:`str`): + Required. The name of the DataStream + to delete. Example format: + properties/1234/dataStreams/5678 - This corresponds to the ``parent`` field + This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, @@ -6452,20 +6506,11 @@ async def list_audiences( timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. - - Returns: - google.analytics.admin_v1alpha.services.analytics_admin_service.pagers.ListAudiencesAsyncPager: - Response message for ListAudiences - RPC. - Iterating over this object will yield - results and resolve additional pages - automatically. - """ # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) + has_flattened_params = any([name]) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -6474,77 +6519,65 @@ async def list_audiences( # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. - if not isinstance(request, analytics_admin.ListAudiencesRequest): - request = analytics_admin.ListAudiencesRequest(request) + if not isinstance(request, analytics_admin.DeleteDataStreamRequest): + request = analytics_admin.DeleteDataStreamRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. - if parent is not None: - request.parent = parent + if name is not None: + request.name = name # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. rpc = self._client._transport._wrapped_methods[ - self._client._transport.list_audiences + self._client._transport.delete_data_stream ] # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), ) # Validate the universe domain. self._client._validate_universe_domain() # Send the request. - response = await rpc( + await rpc( request, retry=retry, timeout=timeout, metadata=metadata, ) - # This method is paged; wrap the response in a pager, which provides - # an `__aiter__` convenience method. - response = pagers.ListAudiencesAsyncPager( - method=rpc, - request=request, - response=response, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def create_audience( + async def update_data_stream( self, - request: Optional[Union[analytics_admin.CreateAudienceRequest, dict]] = None, + request: Optional[Union[analytics_admin.UpdateDataStreamRequest, dict]] = None, *, - parent: Optional[str] = None, - audience: Optional[gaa_audience.Audience] = None, + data_stream: Optional[resources.DataStream] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), - ) -> gaa_audience.Audience: - r"""Creates an Audience. + ) -> resources.DataStream: + r"""Updates a DataStream on a property. Args: - request (Optional[Union[google.analytics.admin_v1alpha.types.CreateAudienceRequest, dict]]): - The request object. Request message for CreateAudience + request (Optional[Union[google.analytics.admin_v1alpha.types.UpdateDataStreamRequest, dict]]): + The request object. Request message for UpdateDataStream RPC. - parent (:class:`str`): - Required. Example format: - properties/1234 - - This corresponds to the ``parent`` field + data_stream (:class:`google.analytics.admin_v1alpha.types.DataStream`): + The DataStream to update + This corresponds to the ``data_stream`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - audience (:class:`google.analytics.admin_v1alpha.types.Audience`): - Required. The audience to create. - This corresponds to the ``audience`` field + update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): + Required. The list of fields to be updated. Omitted + fields will not be updated. To replace the entire + entity, use one path with the string "*" to match all + fields. + + This corresponds to the ``update_mask`` field on the ``request`` instance; if ``request`` is provided, this should not be set. retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, @@ -6554,15 +6587,15 @@ async def create_audience( sent along with the request as metadata. Returns: - google.analytics.admin_v1alpha.types.Audience: - A resource message representing a GA4 - Audience. + google.analytics.admin_v1alpha.types.DataStream: + A resource message representing a + data stream. """ # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent, audience]) + has_flattened_params = any([data_stream, update_mask]) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -6571,26 +6604,28 @@ async def create_audience( # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. - if not isinstance(request, analytics_admin.CreateAudienceRequest): - request = analytics_admin.CreateAudienceRequest(request) + if not isinstance(request, analytics_admin.UpdateDataStreamRequest): + request = analytics_admin.UpdateDataStreamRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. - if parent is not None: - request.parent = parent - if audience is not None: - request.audience = audience + if data_stream is not None: + request.data_stream = data_stream + if update_mask is not None: + request.update_mask = update_mask # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. rpc = self._client._transport._wrapped_methods[ - self._client._transport.create_audience + self._client._transport.update_data_stream ] # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + gapic_v1.routing_header.to_grpc_metadata( + (("data_stream.name", request.data_stream.name),) + ), ) # Validate the universe domain. @@ -6607,38 +6642,26 @@ async def create_audience( # Done; return the response. return response - async def update_audience( + async def list_data_streams( self, - request: Optional[Union[analytics_admin.UpdateAudienceRequest, dict]] = None, + request: Optional[Union[analytics_admin.ListDataStreamsRequest, dict]] = None, *, - audience: Optional[gaa_audience.Audience] = None, - update_mask: Optional[field_mask_pb2.FieldMask] = None, + parent: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), - ) -> gaa_audience.Audience: - r"""Updates an Audience on a property. + ) -> pagers.ListDataStreamsAsyncPager: + r"""Lists DataStreams on a property. Args: - request (Optional[Union[google.analytics.admin_v1alpha.types.UpdateAudienceRequest, dict]]): - The request object. Request message for UpdateAudience + request (Optional[Union[google.analytics.admin_v1alpha.types.ListDataStreamsRequest, dict]]): + The request object. Request message for ListDataStreams RPC. - audience (:class:`google.analytics.admin_v1alpha.types.Audience`): - Required. The audience to update. The audience's - ``name`` field is used to identify the audience to be - updated. - - This corresponds to the ``audience`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): - Required. The list of fields to be updated. Field names - must be in snake case (e.g., "field_to_update"). Omitted - fields will not be updated. To replace the entire - entity, use one path with the string "*" to match all - fields. + parent (:class:`str`): + Required. Example format: + properties/1234 - This corresponds to the ``update_mask`` field + This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this should not be set. retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, @@ -6648,15 +6671,18 @@ async def update_audience( sent along with the request as metadata. Returns: - google.analytics.admin_v1alpha.types.Audience: - A resource message representing a GA4 - Audience. + google.analytics.admin_v1alpha.services.analytics_admin_service.pagers.ListDataStreamsAsyncPager: + Response message for ListDataStreams + RPC. + Iterating over this object will yield + results and resolve additional pages + automatically. """ # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([audience, update_mask]) + has_flattened_params = any([parent]) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -6665,28 +6691,24 @@ async def update_audience( # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. - if not isinstance(request, analytics_admin.UpdateAudienceRequest): - request = analytics_admin.UpdateAudienceRequest(request) + if not isinstance(request, analytics_admin.ListDataStreamsRequest): + request = analytics_admin.ListDataStreamsRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. - if audience is not None: - request.audience = audience - if update_mask is not None: - request.update_mask = update_mask + if parent is not None: + request.parent = parent # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. rpc = self._client._transport._wrapped_methods[ - self._client._transport.update_audience + self._client._transport.list_data_streams ] # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("audience.name", request.audience.name),) - ), + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), ) # Validate the universe domain. @@ -6700,39 +6722,79 @@ async def update_audience( metadata=metadata, ) + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListDataStreamsAsyncPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + # Done; return the response. return response - async def archive_audience( + async def get_data_stream( self, - request: Optional[Union[analytics_admin.ArchiveAudienceRequest, dict]] = None, + request: Optional[Union[analytics_admin.GetDataStreamRequest, dict]] = None, *, + name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), - ) -> None: - r"""Archives an Audience on a property. + ) -> resources.DataStream: + r"""Lookup for a single DataStream. Args: - request (Optional[Union[google.analytics.admin_v1alpha.types.ArchiveAudienceRequest, dict]]): - The request object. Request message for ArchiveAudience + request (Optional[Union[google.analytics.admin_v1alpha.types.GetDataStreamRequest, dict]]): + The request object. Request message for GetDataStream RPC. + name (:class:`str`): + Required. The name of the DataStream + to get. Example format: + properties/1234/dataStreams/5678 + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. + + Returns: + google.analytics.admin_v1alpha.types.DataStream: + A resource message representing a + data stream. + """ # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. - if not isinstance(request, analytics_admin.ArchiveAudienceRequest): - request = analytics_admin.ArchiveAudienceRequest(request) + if not isinstance(request, analytics_admin.GetDataStreamRequest): + request = analytics_admin.GetDataStreamRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. rpc = self._client._transport._wrapped_methods[ - self._client._transport.archive_audience + self._client._transport.get_data_stream ] # Certain fields should be provided within the metadata header; @@ -6745,34 +6807,36 @@ async def archive_audience( self._client._validate_universe_domain() # Send the request. - await rpc( + response = await rpc( request, retry=retry, timeout=timeout, metadata=metadata, ) - async def get_search_ads360_link( + # Done; return the response. + return response + + async def get_audience( self, - request: Optional[ - Union[analytics_admin.GetSearchAds360LinkRequest, dict] - ] = None, + request: Optional[Union[analytics_admin.GetAudienceRequest, dict]] = None, *, name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), - ) -> resources.SearchAds360Link: - r"""Look up a single SearchAds360Link + ) -> audience.Audience: + r"""Lookup for a single Audience. + Audiences created before 2020 may not be supported. + Default audiences will not show filter definitions. Args: - request (Optional[Union[google.analytics.admin_v1alpha.types.GetSearchAds360LinkRequest, dict]]): - The request object. Request message for - GetSearchAds360Link RPC. + request (Optional[Union[google.analytics.admin_v1alpha.types.GetAudienceRequest, dict]]): + The request object. Request message for GetAudience RPC. name (:class:`str`): - Required. The name of the - SearchAds360Link to get. Example format: - properties/1234/SearchAds360Link/5678 + Required. The name of the Audience to + get. Example format: + properties/1234/audiences/5678 This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this @@ -6784,9 +6848,9 @@ async def get_search_ads360_link( sent along with the request as metadata. Returns: - google.analytics.admin_v1alpha.types.SearchAds360Link: - A link between a GA4 property and a - Search Ads 360 entity. + google.analytics.admin_v1alpha.types.Audience: + A resource message representing a GA4 + Audience. """ # Create or coerce a protobuf request object. @@ -6801,8 +6865,8 @@ async def get_search_ads360_link( # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. - if not isinstance(request, analytics_admin.GetSearchAds360LinkRequest): - request = analytics_admin.GetSearchAds360LinkRequest(request) + if not isinstance(request, analytics_admin.GetAudienceRequest): + request = analytics_admin.GetAudienceRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. @@ -6812,7 +6876,7 @@ async def get_search_ads360_link( # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. rpc = self._client._transport._wrapped_methods[ - self._client._transport.get_search_ads360_link + self._client._transport.get_audience ] # Certain fields should be provided within the metadata header; @@ -6835,23 +6899,23 @@ async def get_search_ads360_link( # Done; return the response. return response - async def list_search_ads360_links( + async def list_audiences( self, - request: Optional[ - Union[analytics_admin.ListSearchAds360LinksRequest, dict] - ] = None, + request: Optional[Union[analytics_admin.ListAudiencesRequest, dict]] = None, *, parent: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), - ) -> pagers.ListSearchAds360LinksAsyncPager: - r"""Lists all SearchAds360Links on a property. + ) -> pagers.ListAudiencesAsyncPager: + r"""Lists Audiences on a property. + Audiences created before 2020 may not be supported. + Default audiences will not show filter definitions. Args: - request (Optional[Union[google.analytics.admin_v1alpha.types.ListSearchAds360LinksRequest, dict]]): - The request object. Request message for - ListSearchAds360Links RPC. + request (Optional[Union[google.analytics.admin_v1alpha.types.ListAudiencesRequest, dict]]): + The request object. Request message for ListAudiences + RPC. parent (:class:`str`): Required. Example format: properties/1234 @@ -6866,9 +6930,9 @@ async def list_search_ads360_links( sent along with the request as metadata. Returns: - google.analytics.admin_v1alpha.services.analytics_admin_service.pagers.ListSearchAds360LinksAsyncPager: - Response message for - ListSearchAds360Links RPC. + google.analytics.admin_v1alpha.services.analytics_admin_service.pagers.ListAudiencesAsyncPager: + Response message for ListAudiences + RPC. Iterating over this object will yield results and resolve additional pages automatically. @@ -6886,8 +6950,8 @@ async def list_search_ads360_links( # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. - if not isinstance(request, analytics_admin.ListSearchAds360LinksRequest): - request = analytics_admin.ListSearchAds360LinksRequest(request) + if not isinstance(request, analytics_admin.ListAudiencesRequest): + request = analytics_admin.ListAudiencesRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. @@ -6897,7 +6961,7 @@ async def list_search_ads360_links( # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. rpc = self._client._transport._wrapped_methods[ - self._client._transport.list_search_ads360_links + self._client._transport.list_audiences ] # Certain fields should be provided within the metadata header; @@ -6919,7 +6983,7 @@ async def list_search_ads360_links( # This method is paged; wrap the response in a pager, which provides # an `__aiter__` convenience method. - response = pagers.ListSearchAds360LinksAsyncPager( + response = pagers.ListAudiencesAsyncPager( method=rpc, request=request, response=response, @@ -6931,24 +6995,22 @@ async def list_search_ads360_links( # Done; return the response. return response - async def create_search_ads360_link( + async def create_audience( self, - request: Optional[ - Union[analytics_admin.CreateSearchAds360LinkRequest, dict] - ] = None, + request: Optional[Union[analytics_admin.CreateAudienceRequest, dict]] = None, *, parent: Optional[str] = None, - search_ads_360_link: Optional[resources.SearchAds360Link] = None, + audience: Optional[gaa_audience.Audience] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), - ) -> resources.SearchAds360Link: - r"""Creates a SearchAds360Link. + ) -> gaa_audience.Audience: + r"""Creates an Audience. Args: - request (Optional[Union[google.analytics.admin_v1alpha.types.CreateSearchAds360LinkRequest, dict]]): - The request object. Request message for - CreateSearchAds360Link RPC. + request (Optional[Union[google.analytics.admin_v1alpha.types.CreateAudienceRequest, dict]]): + The request object. Request message for CreateAudience + RPC. parent (:class:`str`): Required. Example format: properties/1234 @@ -6956,11 +7018,9 @@ async def create_search_ads360_link( This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - search_ads_360_link (:class:`google.analytics.admin_v1alpha.types.SearchAds360Link`): - Required. The SearchAds360Link to - create. - - This corresponds to the ``search_ads_360_link`` field + audience (:class:`google.analytics.admin_v1alpha.types.Audience`): + Required. The audience to create. + This corresponds to the ``audience`` field on the ``request`` instance; if ``request`` is provided, this should not be set. retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, @@ -6970,15 +7030,15 @@ async def create_search_ads360_link( sent along with the request as metadata. Returns: - google.analytics.admin_v1alpha.types.SearchAds360Link: - A link between a GA4 property and a - Search Ads 360 entity. + google.analytics.admin_v1alpha.types.Audience: + A resource message representing a GA4 + Audience. """ # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent, search_ads_360_link]) + has_flattened_params = any([parent, audience]) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -6987,20 +7047,20 @@ async def create_search_ads360_link( # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. - if not isinstance(request, analytics_admin.CreateSearchAds360LinkRequest): - request = analytics_admin.CreateSearchAds360LinkRequest(request) + if not isinstance(request, analytics_admin.CreateAudienceRequest): + request = analytics_admin.CreateAudienceRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. if parent is not None: request.parent = parent - if search_ads_360_link is not None: - request.search_ads_360_link = search_ads_360_link + if audience is not None: + request.audience = audience # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. rpc = self._client._transport._wrapped_methods[ - self._client._transport.create_search_ads360_link + self._client._transport.create_audience ] # Certain fields should be provided within the metadata header; @@ -7023,30 +7083,38 @@ async def create_search_ads360_link( # Done; return the response. return response - async def delete_search_ads360_link( + async def update_audience( self, - request: Optional[ - Union[analytics_admin.DeleteSearchAds360LinkRequest, dict] - ] = None, + request: Optional[Union[analytics_admin.UpdateAudienceRequest, dict]] = None, *, - name: Optional[str] = None, + audience: Optional[gaa_audience.Audience] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), - ) -> None: - r"""Deletes a SearchAds360Link on a property. + ) -> gaa_audience.Audience: + r"""Updates an Audience on a property. Args: - request (Optional[Union[google.analytics.admin_v1alpha.types.DeleteSearchAds360LinkRequest, dict]]): - The request object. Request message for - DeleteSearchAds360Link RPC. - name (:class:`str`): - Required. The name of the - SearchAds360Link to delete. Example - format: - properties/1234/SearchAds360Links/5678 + request (Optional[Union[google.analytics.admin_v1alpha.types.UpdateAudienceRequest, dict]]): + The request object. Request message for UpdateAudience + RPC. + audience (:class:`google.analytics.admin_v1alpha.types.Audience`): + Required. The audience to update. The audience's + ``name`` field is used to identify the audience to be + updated. - This corresponds to the ``name`` field + This corresponds to the ``audience`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): + Required. The list of fields to be updated. Field names + must be in snake case (e.g., "field_to_update"). Omitted + fields will not be updated. To replace the entire + entity, use one path with the string "*" to match all + fields. + + This corresponds to the ``update_mask`` field on the ``request`` instance; if ``request`` is provided, this should not be set. retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, @@ -7054,11 +7122,17 @@ async def delete_search_ads360_link( timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. + + Returns: + google.analytics.admin_v1alpha.types.Audience: + A resource message representing a GA4 + Audience. + """ # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) + has_flattened_params = any([audience, update_mask]) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -7067,152 +7141,114 @@ async def delete_search_ads360_link( # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. - if not isinstance(request, analytics_admin.DeleteSearchAds360LinkRequest): - request = analytics_admin.DeleteSearchAds360LinkRequest(request) + if not isinstance(request, analytics_admin.UpdateAudienceRequest): + request = analytics_admin.UpdateAudienceRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. - if name is not None: - request.name = name + if audience is not None: + request.audience = audience + if update_mask is not None: + request.update_mask = update_mask # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. rpc = self._client._transport._wrapped_methods[ - self._client._transport.delete_search_ads360_link + self._client._transport.update_audience ] # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + gapic_v1.routing_header.to_grpc_metadata( + (("audience.name", request.audience.name),) + ), ) # Validate the universe domain. self._client._validate_universe_domain() # Send the request. - await rpc( + response = await rpc( request, retry=retry, timeout=timeout, metadata=metadata, ) - async def update_search_ads360_link( + # Done; return the response. + return response + + async def archive_audience( self, - request: Optional[ - Union[analytics_admin.UpdateSearchAds360LinkRequest, dict] - ] = None, + request: Optional[Union[analytics_admin.ArchiveAudienceRequest, dict]] = None, *, - search_ads_360_link: Optional[resources.SearchAds360Link] = None, - update_mask: Optional[field_mask_pb2.FieldMask] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), - ) -> resources.SearchAds360Link: - r"""Updates a SearchAds360Link on a property. + ) -> None: + r"""Archives an Audience on a property. Args: - request (Optional[Union[google.analytics.admin_v1alpha.types.UpdateSearchAds360LinkRequest, dict]]): - The request object. Request message for - UpdateSearchAds360Link RPC. - search_ads_360_link (:class:`google.analytics.admin_v1alpha.types.SearchAds360Link`): - The SearchAds360Link to update - This corresponds to the ``search_ads_360_link`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): - Required. The list of fields to be updated. Omitted - fields will not be updated. To replace the entire - entity, use one path with the string "*" to match all - fields. - - This corresponds to the ``update_mask`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. + request (Optional[Union[google.analytics.admin_v1alpha.types.ArchiveAudienceRequest, dict]]): + The request object. Request message for ArchiveAudience + RPC. retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. - - Returns: - google.analytics.admin_v1alpha.types.SearchAds360Link: - A link between a GA4 property and a - Search Ads 360 entity. - """ # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([search_ads_360_link, update_mask]) - if request is not None and has_flattened_params: - raise ValueError( - "If the `request` argument is set, then none of " - "the individual field arguments should be set." - ) - # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. - if not isinstance(request, analytics_admin.UpdateSearchAds360LinkRequest): - request = analytics_admin.UpdateSearchAds360LinkRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if search_ads_360_link is not None: - request.search_ads_360_link = search_ads_360_link - if update_mask is not None: - request.update_mask = update_mask + if not isinstance(request, analytics_admin.ArchiveAudienceRequest): + request = analytics_admin.ArchiveAudienceRequest(request) # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. rpc = self._client._transport._wrapped_methods[ - self._client._transport.update_search_ads360_link + self._client._transport.archive_audience ] # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("search_ads_360_link.name", request.search_ads_360_link.name),) - ), + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), ) # Validate the universe domain. self._client._validate_universe_domain() # Send the request. - response = await rpc( + await rpc( request, retry=retry, timeout=timeout, metadata=metadata, ) - # Done; return the response. - return response - - async def get_attribution_settings( + async def get_search_ads360_link( self, request: Optional[ - Union[analytics_admin.GetAttributionSettingsRequest, dict] + Union[analytics_admin.GetSearchAds360LinkRequest, dict] ] = None, *, name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), - ) -> resources.AttributionSettings: - r"""Lookup for a AttributionSettings singleton. + ) -> resources.SearchAds360Link: + r"""Look up a single SearchAds360Link Args: - request (Optional[Union[google.analytics.admin_v1alpha.types.GetAttributionSettingsRequest, dict]]): + request (Optional[Union[google.analytics.admin_v1alpha.types.GetSearchAds360LinkRequest, dict]]): The request object. Request message for - GetAttributionSettings RPC. + GetSearchAds360Link RPC. name (:class:`str`): - Required. The name of the attribution - settings to retrieve. Format: - properties/{property}/attributionSettings + Required. The name of the + SearchAds360Link to get. Example format: + properties/1234/SearchAds360Link/5678 This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this @@ -7224,10 +7260,9 @@ async def get_attribution_settings( sent along with the request as metadata. Returns: - google.analytics.admin_v1alpha.types.AttributionSettings: - The attribution settings used for a - given property. This is a singleton - resource. + google.analytics.admin_v1alpha.types.SearchAds360Link: + A link between a GA4 property and a + Search Ads 360 entity. """ # Create or coerce a protobuf request object. @@ -7242,8 +7277,8 @@ async def get_attribution_settings( # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. - if not isinstance(request, analytics_admin.GetAttributionSettingsRequest): - request = analytics_admin.GetAttributionSettingsRequest(request) + if not isinstance(request, analytics_admin.GetSearchAds360LinkRequest): + request = analytics_admin.GetSearchAds360LinkRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. @@ -7253,7 +7288,7 @@ async def get_attribution_settings( # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. rpc = self._client._transport._wrapped_methods[ - self._client._transport.get_attribution_settings + self._client._transport.get_search_ads360_link ] # Certain fields should be provided within the metadata header; @@ -7276,40 +7311,28 @@ async def get_attribution_settings( # Done; return the response. return response - async def update_attribution_settings( + async def list_search_ads360_links( self, request: Optional[ - Union[analytics_admin.UpdateAttributionSettingsRequest, dict] + Union[analytics_admin.ListSearchAds360LinksRequest, dict] ] = None, *, - attribution_settings: Optional[resources.AttributionSettings] = None, - update_mask: Optional[field_mask_pb2.FieldMask] = None, + parent: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), - ) -> resources.AttributionSettings: - r"""Updates attribution settings on a property. + ) -> pagers.ListSearchAds360LinksAsyncPager: + r"""Lists all SearchAds360Links on a property. Args: - request (Optional[Union[google.analytics.admin_v1alpha.types.UpdateAttributionSettingsRequest, dict]]): + request (Optional[Union[google.analytics.admin_v1alpha.types.ListSearchAds360LinksRequest, dict]]): The request object. Request message for - UpdateAttributionSettings RPC - attribution_settings (:class:`google.analytics.admin_v1alpha.types.AttributionSettings`): - Required. The attribution settings to update. The - ``name`` field is used to identify the settings to be - updated. - - This corresponds to the ``attribution_settings`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): - Required. The list of fields to be updated. Field names - must be in snake case (e.g., "field_to_update"). Omitted - fields will not be updated. To replace the entire - entity, use one path with the string "*" to match all - fields. + ListSearchAds360Links RPC. + parent (:class:`str`): + Required. Example format: + properties/1234 - This corresponds to the ``update_mask`` field + This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this should not be set. retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, @@ -7319,16 +7342,18 @@ async def update_attribution_settings( sent along with the request as metadata. Returns: - google.analytics.admin_v1alpha.types.AttributionSettings: - The attribution settings used for a - given property. This is a singleton - resource. + google.analytics.admin_v1alpha.services.analytics_admin_service.pagers.ListSearchAds360LinksAsyncPager: + Response message for + ListSearchAds360Links RPC. + Iterating over this object will yield + results and resolve additional pages + automatically. """ # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([attribution_settings, update_mask]) + has_flattened_params = any([parent]) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -7337,28 +7362,24 @@ async def update_attribution_settings( # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. - if not isinstance(request, analytics_admin.UpdateAttributionSettingsRequest): - request = analytics_admin.UpdateAttributionSettingsRequest(request) + if not isinstance(request, analytics_admin.ListSearchAds360LinksRequest): + request = analytics_admin.ListSearchAds360LinksRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. - if attribution_settings is not None: - request.attribution_settings = attribution_settings - if update_mask is not None: - request.update_mask = update_mask + if parent is not None: + request.parent = parent # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. rpc = self._client._transport._wrapped_methods[ - self._client._transport.update_attribution_settings + self._client._transport.list_search_ads360_links ] # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("attribution_settings.name", request.attribution_settings.name),) - ), + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), ) # Validate the universe domain. @@ -7372,74 +7393,12 @@ async def update_attribution_settings( metadata=metadata, ) - # Done; return the response. - return response - - async def run_access_report( - self, - request: Optional[Union[analytics_admin.RunAccessReportRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> analytics_admin.RunAccessReportResponse: - r"""Returns a customized report of data access records. The report - provides records of each time a user reads Google Analytics - reporting data. Access records are retained for up to 2 years. - - Data Access Reports can be requested for a property. Reports may - be requested for any property, but dimensions that aren't - related to quota can only be requested on Google Analytics 360 - properties. This method is only available to Administrators. - - These data access records include GA4 UI Reporting, GA4 UI - Explorations, GA4 Data API, and other products like Firebase & - Admob that can retrieve data from Google Analytics through a - linkage. These records don't include property configuration - changes like adding a stream or changing a property's time zone. - For configuration change history, see - `searchChangeHistoryEvents `__. - - Args: - request (Optional[Union[google.analytics.admin_v1alpha.types.RunAccessReportRequest, dict]]): - The request object. The request for a Data Access Record - Report. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.analytics.admin_v1alpha.types.RunAccessReportResponse: - The customized Data Access Record - Report response. - - """ - # Create or coerce a protobuf request object. - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, analytics_admin.RunAccessReportRequest): - request = analytics_admin.RunAccessReportRequest(request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[ - self._client._transport.run_access_report - ] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("entity", request.entity),)), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListSearchAds360LinksAsyncPager( + method=rpc, + request=request, + response=response, retry=retry, timeout=timeout, metadata=metadata, @@ -7448,38 +7407,36 @@ async def run_access_report( # Done; return the response. return response - async def create_access_binding( + async def create_search_ads360_link( self, request: Optional[ - Union[analytics_admin.CreateAccessBindingRequest, dict] + Union[analytics_admin.CreateSearchAds360LinkRequest, dict] ] = None, *, parent: Optional[str] = None, - access_binding: Optional[resources.AccessBinding] = None, + search_ads_360_link: Optional[resources.SearchAds360Link] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), - ) -> resources.AccessBinding: - r"""Creates an access binding on an account or property. + ) -> resources.SearchAds360Link: + r"""Creates a SearchAds360Link. Args: - request (Optional[Union[google.analytics.admin_v1alpha.types.CreateAccessBindingRequest, dict]]): + request (Optional[Union[google.analytics.admin_v1alpha.types.CreateSearchAds360LinkRequest, dict]]): The request object. Request message for - CreateAccessBinding RPC. + CreateSearchAds360Link RPC. parent (:class:`str`): - Required. Formats: - - - accounts/{account} - - properties/{property} + Required. Example format: + properties/1234 This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - access_binding (:class:`google.analytics.admin_v1alpha.types.AccessBinding`): - Required. The access binding to + search_ads_360_link (:class:`google.analytics.admin_v1alpha.types.SearchAds360Link`): + Required. The SearchAds360Link to create. - This corresponds to the ``access_binding`` field + This corresponds to the ``search_ads_360_link`` field on the ``request`` instance; if ``request`` is provided, this should not be set. retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, @@ -7489,15 +7446,15 @@ async def create_access_binding( sent along with the request as metadata. Returns: - google.analytics.admin_v1alpha.types.AccessBinding: - A binding of a user to a set of - roles. + google.analytics.admin_v1alpha.types.SearchAds360Link: + A link between a GA4 property and a + Search Ads 360 entity. """ # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent, access_binding]) + has_flattened_params = any([parent, search_ads_360_link]) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -7506,20 +7463,20 @@ async def create_access_binding( # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. - if not isinstance(request, analytics_admin.CreateAccessBindingRequest): - request = analytics_admin.CreateAccessBindingRequest(request) + if not isinstance(request, analytics_admin.CreateSearchAds360LinkRequest): + request = analytics_admin.CreateSearchAds360LinkRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. if parent is not None: request.parent = parent - if access_binding is not None: - request.access_binding = access_binding + if search_ads_360_link is not None: + request.search_ads_360_link = search_ads_360_link # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. rpc = self._client._transport._wrapped_methods[ - self._client._transport.create_access_binding + self._client._transport.create_search_ads360_link ] # Certain fields should be provided within the metadata header; @@ -7542,29 +7499,28 @@ async def create_access_binding( # Done; return the response. return response - async def get_access_binding( + async def delete_search_ads360_link( self, - request: Optional[Union[analytics_admin.GetAccessBindingRequest, dict]] = None, + request: Optional[ + Union[analytics_admin.DeleteSearchAds360LinkRequest, dict] + ] = None, *, name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), - ) -> resources.AccessBinding: - r"""Gets information about an access binding. + ) -> None: + r"""Deletes a SearchAds360Link on a property. Args: - request (Optional[Union[google.analytics.admin_v1alpha.types.GetAccessBindingRequest, dict]]): - The request object. Request message for GetAccessBinding - RPC. + request (Optional[Union[google.analytics.admin_v1alpha.types.DeleteSearchAds360LinkRequest, dict]]): + The request object. Request message for + DeleteSearchAds360Link RPC. name (:class:`str`): - Required. The name of the access - binding to retrieve. Formats: - - - - accounts/{account}/accessBindings/{accessBinding} - - - properties/{property}/accessBindings/{accessBinding} + Required. The name of the + SearchAds360Link to delete. Example + format: + properties/1234/SearchAds360Links/5678 This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this @@ -7574,12 +7530,6 @@ async def get_access_binding( timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. - - Returns: - google.analytics.admin_v1alpha.types.AccessBinding: - A binding of a user to a set of - roles. - """ # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have @@ -7593,8 +7543,8 @@ async def get_access_binding( # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. - if not isinstance(request, analytics_admin.GetAccessBindingRequest): - request = analytics_admin.GetAccessBindingRequest(request) + if not isinstance(request, analytics_admin.DeleteSearchAds360LinkRequest): + request = analytics_admin.DeleteSearchAds360LinkRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. @@ -7604,7 +7554,7 @@ async def get_access_binding( # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. rpc = self._client._transport._wrapped_methods[ - self._client._transport.get_access_binding + self._client._transport.delete_search_ads360_link ] # Certain fields should be provided within the metadata header; @@ -7617,38 +7567,43 @@ async def get_access_binding( self._client._validate_universe_domain() # Send the request. - response = await rpc( + await rpc( request, retry=retry, timeout=timeout, metadata=metadata, ) - # Done; return the response. - return response - - async def update_access_binding( + async def update_search_ads360_link( self, request: Optional[ - Union[analytics_admin.UpdateAccessBindingRequest, dict] + Union[analytics_admin.UpdateSearchAds360LinkRequest, dict] ] = None, *, - access_binding: Optional[resources.AccessBinding] = None, + search_ads_360_link: Optional[resources.SearchAds360Link] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), - ) -> resources.AccessBinding: - r"""Updates an access binding on an account or property. + ) -> resources.SearchAds360Link: + r"""Updates a SearchAds360Link on a property. Args: - request (Optional[Union[google.analytics.admin_v1alpha.types.UpdateAccessBindingRequest, dict]]): + request (Optional[Union[google.analytics.admin_v1alpha.types.UpdateSearchAds360LinkRequest, dict]]): The request object. Request message for - UpdateAccessBinding RPC. - access_binding (:class:`google.analytics.admin_v1alpha.types.AccessBinding`): - Required. The access binding to - update. + UpdateSearchAds360Link RPC. + search_ads_360_link (:class:`google.analytics.admin_v1alpha.types.SearchAds360Link`): + The SearchAds360Link to update + This corresponds to the ``search_ads_360_link`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): + Required. The list of fields to be updated. Omitted + fields will not be updated. To replace the entire + entity, use one path with the string "*" to match all + fields. - This corresponds to the ``access_binding`` field + This corresponds to the ``update_mask`` field on the ``request`` instance; if ``request`` is provided, this should not be set. retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, @@ -7658,15 +7613,15 @@ async def update_access_binding( sent along with the request as metadata. Returns: - google.analytics.admin_v1alpha.types.AccessBinding: - A binding of a user to a set of - roles. - + google.analytics.admin_v1alpha.types.SearchAds360Link: + A link between a GA4 property and a + Search Ads 360 entity. + """ # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([access_binding]) + has_flattened_params = any([search_ads_360_link, update_mask]) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -7675,25 +7630,27 @@ async def update_access_binding( # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. - if not isinstance(request, analytics_admin.UpdateAccessBindingRequest): - request = analytics_admin.UpdateAccessBindingRequest(request) + if not isinstance(request, analytics_admin.UpdateSearchAds360LinkRequest): + request = analytics_admin.UpdateSearchAds360LinkRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. - if access_binding is not None: - request.access_binding = access_binding + if search_ads_360_link is not None: + request.search_ads_360_link = search_ads_360_link + if update_mask is not None: + request.update_mask = update_mask # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. rpc = self._client._transport._wrapped_methods[ - self._client._transport.update_access_binding + self._client._transport.update_search_ads360_link ] # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata( - (("access_binding.name", request.access_binding.name),) + (("search_ads_360_link.name", request.search_ads_360_link.name),) ), ) @@ -7711,30 +7668,27 @@ async def update_access_binding( # Done; return the response. return response - async def delete_access_binding( + async def get_attribution_settings( self, request: Optional[ - Union[analytics_admin.DeleteAccessBindingRequest, dict] + Union[analytics_admin.GetAttributionSettingsRequest, dict] ] = None, *, name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), - ) -> None: - r"""Deletes an access binding on an account or property. + ) -> resources.AttributionSettings: + r"""Lookup for a AttributionSettings singleton. Args: - request (Optional[Union[google.analytics.admin_v1alpha.types.DeleteAccessBindingRequest, dict]]): + request (Optional[Union[google.analytics.admin_v1alpha.types.GetAttributionSettingsRequest, dict]]): The request object. Request message for - DeleteAccessBinding RPC. + GetAttributionSettings RPC. name (:class:`str`): - Required. Formats: - - - - accounts/{account}/accessBindings/{accessBinding} - - - properties/{property}/accessBindings/{accessBinding} + Required. The name of the attribution + settings to retrieve. Format: + properties/{property}/attributionSettings This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this @@ -7744,6 +7698,13 @@ async def delete_access_binding( timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. + + Returns: + google.analytics.admin_v1alpha.types.AttributionSettings: + The attribution settings used for a + given property. This is a singleton + resource. + """ # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have @@ -7757,8 +7718,8 @@ async def delete_access_binding( # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. - if not isinstance(request, analytics_admin.DeleteAccessBindingRequest): - request = analytics_admin.DeleteAccessBindingRequest(request) + if not isinstance(request, analytics_admin.GetAttributionSettingsRequest): + request = analytics_admin.GetAttributionSettingsRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. @@ -7768,7 +7729,7 @@ async def delete_access_binding( # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. rpc = self._client._transport._wrapped_methods[ - self._client._transport.delete_access_binding + self._client._transport.get_attribution_settings ] # Certain fields should be provided within the metadata header; @@ -7781,37 +7742,50 @@ async def delete_access_binding( self._client._validate_universe_domain() # Send the request. - await rpc( + response = await rpc( request, retry=retry, timeout=timeout, metadata=metadata, ) - async def list_access_bindings( + # Done; return the response. + return response + + async def update_attribution_settings( self, request: Optional[ - Union[analytics_admin.ListAccessBindingsRequest, dict] + Union[analytics_admin.UpdateAttributionSettingsRequest, dict] ] = None, *, - parent: Optional[str] = None, + attribution_settings: Optional[resources.AttributionSettings] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), - ) -> pagers.ListAccessBindingsAsyncPager: - r"""Lists all access bindings on an account or property. + ) -> resources.AttributionSettings: + r"""Updates attribution settings on a property. Args: - request (Optional[Union[google.analytics.admin_v1alpha.types.ListAccessBindingsRequest, dict]]): + request (Optional[Union[google.analytics.admin_v1alpha.types.UpdateAttributionSettingsRequest, dict]]): The request object. Request message for - ListAccessBindings RPC. - parent (:class:`str`): - Required. Formats: + UpdateAttributionSettings RPC + attribution_settings (:class:`google.analytics.admin_v1alpha.types.AttributionSettings`): + Required. The attribution settings to update. The + ``name`` field is used to identify the settings to be + updated. - - accounts/{account} - - properties/{property} + This corresponds to the ``attribution_settings`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): + Required. The list of fields to be updated. Field names + must be in snake case (e.g., "field_to_update"). Omitted + fields will not be updated. To replace the entire + entity, use one path with the string "*" to match all + fields. - This corresponds to the ``parent`` field + This corresponds to the ``update_mask`` field on the ``request`` instance; if ``request`` is provided, this should not be set. retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, @@ -7821,18 +7795,16 @@ async def list_access_bindings( sent along with the request as metadata. Returns: - google.analytics.admin_v1alpha.services.analytics_admin_service.pagers.ListAccessBindingsAsyncPager: - Response message for - ListAccessBindings RPC. - Iterating over this object will yield - results and resolve additional pages - automatically. + google.analytics.admin_v1alpha.types.AttributionSettings: + The attribution settings used for a + given property. This is a singleton + resource. """ # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) + has_flattened_params = any([attribution_settings, update_mask]) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -7841,24 +7813,28 @@ async def list_access_bindings( # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. - if not isinstance(request, analytics_admin.ListAccessBindingsRequest): - request = analytics_admin.ListAccessBindingsRequest(request) + if not isinstance(request, analytics_admin.UpdateAttributionSettingsRequest): + request = analytics_admin.UpdateAttributionSettingsRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. - if parent is not None: - request.parent = parent + if attribution_settings is not None: + request.attribution_settings = attribution_settings + if update_mask is not None: + request.update_mask = update_mask # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. rpc = self._client._transport._wrapped_methods[ - self._client._transport.list_access_bindings + self._client._transport.update_attribution_settings ] # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + gapic_v1.routing_header.to_grpc_metadata( + (("attribution_settings.name", request.attribution_settings.name),) + ), ) # Validate the universe domain. @@ -7872,41 +7848,38 @@ async def list_access_bindings( metadata=metadata, ) - # This method is paged; wrap the response in a pager, which provides - # an `__aiter__` convenience method. - response = pagers.ListAccessBindingsAsyncPager( - method=rpc, - request=request, - response=response, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - # Done; return the response. return response - async def batch_create_access_bindings( + async def run_access_report( self, - request: Optional[ - Union[analytics_admin.BatchCreateAccessBindingsRequest, dict] - ] = None, + request: Optional[Union[analytics_admin.RunAccessReportRequest, dict]] = None, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), - ) -> analytics_admin.BatchCreateAccessBindingsResponse: - r"""Creates information about multiple access bindings to - an account or property. + ) -> analytics_admin.RunAccessReportResponse: + r"""Returns a customized report of data access records. The report + provides records of each time a user reads Google Analytics + reporting data. Access records are retained for up to 2 years. - This method is transactional. If any AccessBinding - cannot be created, none of the AccessBindings will be - created. + Data Access Reports can be requested for a property. Reports may + be requested for any property, but dimensions that aren't + related to quota can only be requested on Google Analytics 360 + properties. This method is only available to Administrators. + + These data access records include GA4 UI Reporting, GA4 UI + Explorations, GA4 Data API, and other products like Firebase & + Admob that can retrieve data from Google Analytics through a + linkage. These records don't include property configuration + changes like adding a stream or changing a property's time zone. + For configuration change history, see + `searchChangeHistoryEvents `__. Args: - request (Optional[Union[google.analytics.admin_v1alpha.types.BatchCreateAccessBindingsRequest, dict]]): - The request object. Request message for - BatchCreateAccessBindings RPC. + request (Optional[Union[google.analytics.admin_v1alpha.types.RunAccessReportRequest, dict]]): + The request object. The request for a Data Access Record + Report. retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -7914,27 +7887,27 @@ async def batch_create_access_bindings( sent along with the request as metadata. Returns: - google.analytics.admin_v1alpha.types.BatchCreateAccessBindingsResponse: - Response message for - BatchCreateAccessBindings RPC. + google.analytics.admin_v1alpha.types.RunAccessReportResponse: + The customized Data Access Record + Report response. """ # Create or coerce a protobuf request object. # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. - if not isinstance(request, analytics_admin.BatchCreateAccessBindingsRequest): - request = analytics_admin.BatchCreateAccessBindingsRequest(request) + if not isinstance(request, analytics_admin.RunAccessReportRequest): + request = analytics_admin.RunAccessReportRequest(request) # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. rpc = self._client._transport._wrapped_methods[ - self._client._transport.batch_create_access_bindings + self._client._transport.run_access_report ] # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + gapic_v1.routing_header.to_grpc_metadata((("entity", request.entity),)), ) # Validate the universe domain. @@ -7951,23 +7924,40 @@ async def batch_create_access_bindings( # Done; return the response. return response - async def batch_get_access_bindings( + async def create_access_binding( self, request: Optional[ - Union[analytics_admin.BatchGetAccessBindingsRequest, dict] + Union[analytics_admin.CreateAccessBindingRequest, dict] ] = None, *, + parent: Optional[str] = None, + access_binding: Optional[resources.AccessBinding] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), - ) -> analytics_admin.BatchGetAccessBindingsResponse: - r"""Gets information about multiple access bindings to an - account or property. + ) -> resources.AccessBinding: + r"""Creates an access binding on an account or property. Args: - request (Optional[Union[google.analytics.admin_v1alpha.types.BatchGetAccessBindingsRequest, dict]]): + request (Optional[Union[google.analytics.admin_v1alpha.types.CreateAccessBindingRequest, dict]]): The request object. Request message for - BatchGetAccessBindings RPC. + CreateAccessBinding RPC. + parent (:class:`str`): + Required. Formats: + + - accounts/{account} + - properties/{property} + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + access_binding (:class:`google.analytics.admin_v1alpha.types.AccessBinding`): + Required. The access binding to + create. + + This corresponds to the ``access_binding`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -7975,21 +7965,37 @@ async def batch_get_access_bindings( sent along with the request as metadata. Returns: - google.analytics.admin_v1alpha.types.BatchGetAccessBindingsResponse: - Response message for - BatchGetAccessBindings RPC. + google.analytics.admin_v1alpha.types.AccessBinding: + A binding of a user to a set of + roles. """ # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, access_binding]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. - if not isinstance(request, analytics_admin.BatchGetAccessBindingsRequest): - request = analytics_admin.BatchGetAccessBindingsRequest(request) + if not isinstance(request, analytics_admin.CreateAccessBindingRequest): + request = analytics_admin.CreateAccessBindingRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if access_binding is not None: + request.access_binding = access_binding # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. rpc = self._client._transport._wrapped_methods[ - self._client._transport.batch_get_access_bindings + self._client._transport.create_access_binding ] # Certain fields should be provided within the metadata header; @@ -8012,51 +8018,75 @@ async def batch_get_access_bindings( # Done; return the response. return response - async def batch_update_access_bindings( + async def get_access_binding( self, - request: Optional[ - Union[analytics_admin.BatchUpdateAccessBindingsRequest, dict] - ] = None, + request: Optional[Union[analytics_admin.GetAccessBindingRequest, dict]] = None, *, + name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), - ) -> analytics_admin.BatchUpdateAccessBindingsResponse: - r"""Updates information about multiple access bindings to - an account or property. + ) -> resources.AccessBinding: + r"""Gets information about an access binding. Args: - request (Optional[Union[google.analytics.admin_v1alpha.types.BatchUpdateAccessBindingsRequest, dict]]): - The request object. Request message for - BatchUpdateAccessBindings RPC. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. + request (Optional[Union[google.analytics.admin_v1alpha.types.GetAccessBindingRequest, dict]]): + The request object. Request message for GetAccessBinding + RPC. + name (:class:`str`): + Required. The name of the access + binding to retrieve. Formats: + + - + accounts/{account}/accessBindings/{accessBinding} + - + properties/{property}/accessBindings/{accessBinding} + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. Returns: - google.analytics.admin_v1alpha.types.BatchUpdateAccessBindingsResponse: - Response message for - BatchUpdateAccessBindings RPC. + google.analytics.admin_v1alpha.types.AccessBinding: + A binding of a user to a set of + roles. """ # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. - if not isinstance(request, analytics_admin.BatchUpdateAccessBindingsRequest): - request = analytics_admin.BatchUpdateAccessBindingsRequest(request) + if not isinstance(request, analytics_admin.GetAccessBindingRequest): + request = analytics_admin.GetAccessBindingRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. rpc = self._client._transport._wrapped_methods[ - self._client._transport.batch_update_access_bindings + self._client._transport.get_access_binding ] # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), ) # Validate the universe domain. @@ -8073,79 +8103,114 @@ async def batch_update_access_bindings( # Done; return the response. return response - async def batch_delete_access_bindings( + async def update_access_binding( self, request: Optional[ - Union[analytics_admin.BatchDeleteAccessBindingsRequest, dict] + Union[analytics_admin.UpdateAccessBindingRequest, dict] ] = None, *, + access_binding: Optional[resources.AccessBinding] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), - ) -> None: - r"""Deletes information about multiple users' links to an - account or property. + ) -> resources.AccessBinding: + r"""Updates an access binding on an account or property. Args: - request (Optional[Union[google.analytics.admin_v1alpha.types.BatchDeleteAccessBindingsRequest, dict]]): + request (Optional[Union[google.analytics.admin_v1alpha.types.UpdateAccessBindingRequest, dict]]): The request object. Request message for - BatchDeleteAccessBindings RPC. + UpdateAccessBinding RPC. + access_binding (:class:`google.analytics.admin_v1alpha.types.AccessBinding`): + Required. The access binding to + update. + + This corresponds to the ``access_binding`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. + + Returns: + google.analytics.admin_v1alpha.types.AccessBinding: + A binding of a user to a set of + roles. + """ # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([access_binding]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. - if not isinstance(request, analytics_admin.BatchDeleteAccessBindingsRequest): - request = analytics_admin.BatchDeleteAccessBindingsRequest(request) + if not isinstance(request, analytics_admin.UpdateAccessBindingRequest): + request = analytics_admin.UpdateAccessBindingRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if access_binding is not None: + request.access_binding = access_binding # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. rpc = self._client._transport._wrapped_methods[ - self._client._transport.batch_delete_access_bindings + self._client._transport.update_access_binding ] # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + gapic_v1.routing_header.to_grpc_metadata( + (("access_binding.name", request.access_binding.name),) + ), ) # Validate the universe domain. self._client._validate_universe_domain() # Send the request. - await rpc( + response = await rpc( request, retry=retry, timeout=timeout, metadata=metadata, ) - async def get_expanded_data_set( + # Done; return the response. + return response + + async def delete_access_binding( self, request: Optional[ - Union[analytics_admin.GetExpandedDataSetRequest, dict] + Union[analytics_admin.DeleteAccessBindingRequest, dict] ] = None, *, name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), - ) -> expanded_data_set.ExpandedDataSet: - r"""Lookup for a single ExpandedDataSet. + ) -> None: + r"""Deletes an access binding on an account or property. Args: - request (Optional[Union[google.analytics.admin_v1alpha.types.GetExpandedDataSetRequest, dict]]): + request (Optional[Union[google.analytics.admin_v1alpha.types.DeleteAccessBindingRequest, dict]]): The request object. Request message for - GetExpandedDataSet RPC. + DeleteAccessBinding RPC. name (:class:`str`): - Required. The name of the - ExpandedDataSet to get. Example format: - properties/1234/expandedDataSets/5678 + Required. Formats: + + - + accounts/{account}/accessBindings/{accessBinding} + - + properties/{property}/accessBindings/{accessBinding} This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this @@ -8155,12 +8220,6 @@ async def get_expanded_data_set( timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. - - Returns: - google.analytics.admin_v1alpha.types.ExpandedDataSet: - A resource message representing a GA4 - ExpandedDataSet. - """ # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have @@ -8174,8 +8233,8 @@ async def get_expanded_data_set( # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. - if not isinstance(request, analytics_admin.GetExpandedDataSetRequest): - request = analytics_admin.GetExpandedDataSetRequest(request) + if not isinstance(request, analytics_admin.DeleteAccessBindingRequest): + request = analytics_admin.DeleteAccessBindingRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. @@ -8185,7 +8244,7 @@ async def get_expanded_data_set( # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. rpc = self._client._transport._wrapped_methods[ - self._client._transport.get_expanded_data_set + self._client._transport.delete_access_binding ] # Certain fields should be provided within the metadata header; @@ -8198,36 +8257,35 @@ async def get_expanded_data_set( self._client._validate_universe_domain() # Send the request. - response = await rpc( + await rpc( request, retry=retry, timeout=timeout, metadata=metadata, ) - # Done; return the response. - return response - - async def list_expanded_data_sets( + async def list_access_bindings( self, request: Optional[ - Union[analytics_admin.ListExpandedDataSetsRequest, dict] + Union[analytics_admin.ListAccessBindingsRequest, dict] ] = None, *, parent: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), - ) -> pagers.ListExpandedDataSetsAsyncPager: - r"""Lists ExpandedDataSets on a property. + ) -> pagers.ListAccessBindingsAsyncPager: + r"""Lists all access bindings on an account or property. Args: - request (Optional[Union[google.analytics.admin_v1alpha.types.ListExpandedDataSetsRequest, dict]]): + request (Optional[Union[google.analytics.admin_v1alpha.types.ListAccessBindingsRequest, dict]]): The request object. Request message for - ListExpandedDataSets RPC. + ListAccessBindings RPC. parent (:class:`str`): - Required. Example format: - properties/1234 + Required. Formats: + + - accounts/{account} + - properties/{property} This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this @@ -8239,9 +8297,9 @@ async def list_expanded_data_sets( sent along with the request as metadata. Returns: - google.analytics.admin_v1alpha.services.analytics_admin_service.pagers.ListExpandedDataSetsAsyncPager: + google.analytics.admin_v1alpha.services.analytics_admin_service.pagers.ListAccessBindingsAsyncPager: Response message for - ListExpandedDataSets RPC. + ListAccessBindings RPC. Iterating over this object will yield results and resolve additional pages automatically. @@ -8259,8 +8317,8 @@ async def list_expanded_data_sets( # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. - if not isinstance(request, analytics_admin.ListExpandedDataSetsRequest): - request = analytics_admin.ListExpandedDataSetsRequest(request) + if not isinstance(request, analytics_admin.ListAccessBindingsRequest): + request = analytics_admin.ListAccessBindingsRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. @@ -8270,7 +8328,7 @@ async def list_expanded_data_sets( # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. rpc = self._client._transport._wrapped_methods[ - self._client._transport.list_expanded_data_sets + self._client._transport.list_access_bindings ] # Certain fields should be provided within the metadata header; @@ -8292,7 +8350,7 @@ async def list_expanded_data_sets( # This method is paged; wrap the response in a pager, which provides # an `__aiter__` convenience method. - response = pagers.ListExpandedDataSetsAsyncPager( + response = pagers.ListAccessBindingsAsyncPager( method=rpc, request=request, response=response, @@ -8304,38 +8362,27 @@ async def list_expanded_data_sets( # Done; return the response. return response - async def create_expanded_data_set( + async def batch_create_access_bindings( self, request: Optional[ - Union[analytics_admin.CreateExpandedDataSetRequest, dict] + Union[analytics_admin.BatchCreateAccessBindingsRequest, dict] ] = None, *, - parent: Optional[str] = None, - expanded_data_set: Optional[gaa_expanded_data_set.ExpandedDataSet] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), - ) -> gaa_expanded_data_set.ExpandedDataSet: - r"""Creates a ExpandedDataSet. + ) -> analytics_admin.BatchCreateAccessBindingsResponse: + r"""Creates information about multiple access bindings to + an account or property. + + This method is transactional. If any AccessBinding + cannot be created, none of the AccessBindings will be + created. Args: - request (Optional[Union[google.analytics.admin_v1alpha.types.CreateExpandedDataSetRequest, dict]]): + request (Optional[Union[google.analytics.admin_v1alpha.types.BatchCreateAccessBindingsRequest, dict]]): The request object. Request message for - CreateExpandedDataSet RPC. - parent (:class:`str`): - Required. Example format: - properties/1234 - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - expanded_data_set (:class:`google.analytics.admin_v1alpha.types.ExpandedDataSet`): - Required. The ExpandedDataSet to - create. - - This corresponds to the ``expanded_data_set`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. + BatchCreateAccessBindings RPC. retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -8343,37 +8390,21 @@ async def create_expanded_data_set( sent along with the request as metadata. Returns: - google.analytics.admin_v1alpha.types.ExpandedDataSet: - A resource message representing a GA4 - ExpandedDataSet. + google.analytics.admin_v1alpha.types.BatchCreateAccessBindingsResponse: + Response message for + BatchCreateAccessBindings RPC. """ # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent, expanded_data_set]) - if request is not None and has_flattened_params: - raise ValueError( - "If the `request` argument is set, then none of " - "the individual field arguments should be set." - ) - # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. - if not isinstance(request, analytics_admin.CreateExpandedDataSetRequest): - request = analytics_admin.CreateExpandedDataSetRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - if expanded_data_set is not None: - request.expanded_data_set = expanded_data_set + if not isinstance(request, analytics_admin.BatchCreateAccessBindingsRequest): + request = analytics_admin.BatchCreateAccessBindingsRequest(request) # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. rpc = self._client._transport._wrapped_methods[ - self._client._transport.create_expanded_data_set + self._client._transport.batch_create_access_bindings ] # Certain fields should be provided within the metadata header; @@ -8396,42 +8427,84 @@ async def create_expanded_data_set( # Done; return the response. return response - async def update_expanded_data_set( + async def batch_get_access_bindings( self, request: Optional[ - Union[analytics_admin.UpdateExpandedDataSetRequest, dict] + Union[analytics_admin.BatchGetAccessBindingsRequest, dict] ] = None, *, - expanded_data_set: Optional[gaa_expanded_data_set.ExpandedDataSet] = None, - update_mask: Optional[field_mask_pb2.FieldMask] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), - ) -> gaa_expanded_data_set.ExpandedDataSet: - r"""Updates a ExpandedDataSet on a property. + ) -> analytics_admin.BatchGetAccessBindingsResponse: + r"""Gets information about multiple access bindings to an + account or property. Args: - request (Optional[Union[google.analytics.admin_v1alpha.types.UpdateExpandedDataSetRequest, dict]]): + request (Optional[Union[google.analytics.admin_v1alpha.types.BatchGetAccessBindingsRequest, dict]]): The request object. Request message for - UpdateExpandedDataSet RPC. - expanded_data_set (:class:`google.analytics.admin_v1alpha.types.ExpandedDataSet`): - Required. The ExpandedDataSet to update. The resource's - ``name`` field is used to identify the ExpandedDataSet - to be updated. + BatchGetAccessBindings RPC. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. - This corresponds to the ``expanded_data_set`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): - Required. The list of fields to be updated. Field names - must be in snake case (e.g., "field_to_update"). Omitted - fields will not be updated. To replace the entire - entity, use one path with the string "*" to match all - fields. + Returns: + google.analytics.admin_v1alpha.types.BatchGetAccessBindingsResponse: + Response message for + BatchGetAccessBindings RPC. - This corresponds to the ``update_mask`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. + """ + # Create or coerce a protobuf request object. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, analytics_admin.BatchGetAccessBindingsRequest): + request = analytics_admin.BatchGetAccessBindingsRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.batch_get_access_bindings + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def batch_update_access_bindings( + self, + request: Optional[ + Union[analytics_admin.BatchUpdateAccessBindingsRequest, dict] + ] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> analytics_admin.BatchUpdateAccessBindingsResponse: + r"""Updates information about multiple access bindings to + an account or property. + + Args: + request (Optional[Union[google.analytics.admin_v1alpha.types.BatchUpdateAccessBindingsRequest, dict]]): + The request object. Request message for + BatchUpdateAccessBindings RPC. retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -8439,45 +8512,27 @@ async def update_expanded_data_set( sent along with the request as metadata. Returns: - google.analytics.admin_v1alpha.types.ExpandedDataSet: - A resource message representing a GA4 - ExpandedDataSet. + google.analytics.admin_v1alpha.types.BatchUpdateAccessBindingsResponse: + Response message for + BatchUpdateAccessBindings RPC. """ # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([expanded_data_set, update_mask]) - if request is not None and has_flattened_params: - raise ValueError( - "If the `request` argument is set, then none of " - "the individual field arguments should be set." - ) - # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. - if not isinstance(request, analytics_admin.UpdateExpandedDataSetRequest): - request = analytics_admin.UpdateExpandedDataSetRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if expanded_data_set is not None: - request.expanded_data_set = expanded_data_set - if update_mask is not None: - request.update_mask = update_mask + if not isinstance(request, analytics_admin.BatchUpdateAccessBindingsRequest): + request = analytics_admin.BatchUpdateAccessBindingsRequest(request) # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. rpc = self._client._transport._wrapped_methods[ - self._client._transport.update_expanded_data_set + self._client._transport.batch_update_access_bindings ] # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("expanded_data_set.name", request.expanded_data_set.name),) - ), + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), ) # Validate the universe domain. @@ -8494,30 +8549,23 @@ async def update_expanded_data_set( # Done; return the response. return response - async def delete_expanded_data_set( + async def batch_delete_access_bindings( self, request: Optional[ - Union[analytics_admin.DeleteExpandedDataSetRequest, dict] + Union[analytics_admin.BatchDeleteAccessBindingsRequest, dict] ] = None, *, - name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> None: - r"""Deletes a ExpandedDataSet on a property. + r"""Deletes information about multiple users' links to an + account or property. Args: - request (Optional[Union[google.analytics.admin_v1alpha.types.DeleteExpandedDataSetRequest, dict]]): + request (Optional[Union[google.analytics.admin_v1alpha.types.BatchDeleteAccessBindingsRequest, dict]]): The request object. Request message for - DeleteExpandedDataSet RPC. - name (:class:`str`): - Required. Example format: - properties/1234/expandedDataSets/5678 - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. + BatchDeleteAccessBindings RPC. retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -8525,35 +8573,21 @@ async def delete_expanded_data_set( sent along with the request as metadata. """ # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError( - "If the `request` argument is set, then none of " - "the individual field arguments should be set." - ) - # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. - if not isinstance(request, analytics_admin.DeleteExpandedDataSetRequest): - request = analytics_admin.DeleteExpandedDataSetRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name + if not isinstance(request, analytics_admin.BatchDeleteAccessBindingsRequest): + request = analytics_admin.BatchDeleteAccessBindingsRequest(request) # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. rpc = self._client._transport._wrapped_methods[ - self._client._transport.delete_expanded_data_set + self._client._transport.batch_delete_access_bindings ] # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), ) # Validate the universe domain. @@ -8567,25 +8601,27 @@ async def delete_expanded_data_set( metadata=metadata, ) - async def get_channel_group( + async def get_expanded_data_set( self, - request: Optional[Union[analytics_admin.GetChannelGroupRequest, dict]] = None, + request: Optional[ + Union[analytics_admin.GetExpandedDataSetRequest, dict] + ] = None, *, name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), - ) -> channel_group.ChannelGroup: - r"""Lookup for a single ChannelGroup. + ) -> expanded_data_set.ExpandedDataSet: + r"""Lookup for a single ExpandedDataSet. Args: - request (Optional[Union[google.analytics.admin_v1alpha.types.GetChannelGroupRequest, dict]]): - The request object. Request message for GetChannelGroup - RPC. + request (Optional[Union[google.analytics.admin_v1alpha.types.GetExpandedDataSetRequest, dict]]): + The request object. Request message for + GetExpandedDataSet RPC. name (:class:`str`): - Required. The ChannelGroup to get. - Example format: - properties/1234/channelGroups/5678 + Required. The name of the + ExpandedDataSet to get. Example format: + properties/1234/expandedDataSets/5678 This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this @@ -8597,9 +8633,9 @@ async def get_channel_group( sent along with the request as metadata. Returns: - google.analytics.admin_v1alpha.types.ChannelGroup: - A resource message representing a - Channel Group. + google.analytics.admin_v1alpha.types.ExpandedDataSet: + A resource message representing a GA4 + ExpandedDataSet. """ # Create or coerce a protobuf request object. @@ -8614,8 +8650,8 @@ async def get_channel_group( # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. - if not isinstance(request, analytics_admin.GetChannelGroupRequest): - request = analytics_admin.GetChannelGroupRequest(request) + if not isinstance(request, analytics_admin.GetExpandedDataSetRequest): + request = analytics_admin.GetExpandedDataSetRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. @@ -8625,7 +8661,7 @@ async def get_channel_group( # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. rpc = self._client._transport._wrapped_methods[ - self._client._transport.get_channel_group + self._client._transport.get_expanded_data_set ] # Certain fields should be provided within the metadata header; @@ -8648,24 +8684,25 @@ async def get_channel_group( # Done; return the response. return response - async def list_channel_groups( + async def list_expanded_data_sets( self, - request: Optional[Union[analytics_admin.ListChannelGroupsRequest, dict]] = None, + request: Optional[ + Union[analytics_admin.ListExpandedDataSetsRequest, dict] + ] = None, *, parent: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), - ) -> pagers.ListChannelGroupsAsyncPager: - r"""Lists ChannelGroups on a property. + ) -> pagers.ListExpandedDataSetsAsyncPager: + r"""Lists ExpandedDataSets on a property. Args: - request (Optional[Union[google.analytics.admin_v1alpha.types.ListChannelGroupsRequest, dict]]): - The request object. Request message for ListChannelGroups - RPC. + request (Optional[Union[google.analytics.admin_v1alpha.types.ListExpandedDataSetsRequest, dict]]): + The request object. Request message for + ListExpandedDataSets RPC. parent (:class:`str`): - Required. The property for which to - list ChannelGroups. Example format: + Required. Example format: properties/1234 This corresponds to the ``parent`` field @@ -8678,9 +8715,9 @@ async def list_channel_groups( sent along with the request as metadata. Returns: - google.analytics.admin_v1alpha.services.analytics_admin_service.pagers.ListChannelGroupsAsyncPager: + google.analytics.admin_v1alpha.services.analytics_admin_service.pagers.ListExpandedDataSetsAsyncPager: Response message for - ListChannelGroups RPC. + ListExpandedDataSets RPC. Iterating over this object will yield results and resolve additional pages automatically. @@ -8698,8 +8735,8 @@ async def list_channel_groups( # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. - if not isinstance(request, analytics_admin.ListChannelGroupsRequest): - request = analytics_admin.ListChannelGroupsRequest(request) + if not isinstance(request, analytics_admin.ListExpandedDataSetsRequest): + request = analytics_admin.ListExpandedDataSetsRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. @@ -8709,7 +8746,7 @@ async def list_channel_groups( # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. rpc = self._client._transport._wrapped_methods[ - self._client._transport.list_channel_groups + self._client._transport.list_expanded_data_sets ] # Certain fields should be provided within the metadata header; @@ -8731,7 +8768,7 @@ async def list_channel_groups( # This method is paged; wrap the response in a pager, which provides # an `__aiter__` convenience method. - response = pagers.ListChannelGroupsAsyncPager( + response = pagers.ListExpandedDataSetsAsyncPager( method=rpc, request=request, response=response, @@ -8743,35 +8780,36 @@ async def list_channel_groups( # Done; return the response. return response - async def create_channel_group( + async def create_expanded_data_set( self, request: Optional[ - Union[analytics_admin.CreateChannelGroupRequest, dict] + Union[analytics_admin.CreateExpandedDataSetRequest, dict] ] = None, *, parent: Optional[str] = None, - channel_group: Optional[gaa_channel_group.ChannelGroup] = None, + expanded_data_set: Optional[gaa_expanded_data_set.ExpandedDataSet] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), - ) -> gaa_channel_group.ChannelGroup: - r"""Creates a ChannelGroup. + ) -> gaa_expanded_data_set.ExpandedDataSet: + r"""Creates a ExpandedDataSet. Args: - request (Optional[Union[google.analytics.admin_v1alpha.types.CreateChannelGroupRequest, dict]]): + request (Optional[Union[google.analytics.admin_v1alpha.types.CreateExpandedDataSetRequest, dict]]): The request object. Request message for - CreateChannelGroup RPC. + CreateExpandedDataSet RPC. parent (:class:`str`): - Required. The property for which to - create a ChannelGroup. Example format: + Required. Example format: properties/1234 This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - channel_group (:class:`google.analytics.admin_v1alpha.types.ChannelGroup`): - Required. The ChannelGroup to create. - This corresponds to the ``channel_group`` field + expanded_data_set (:class:`google.analytics.admin_v1alpha.types.ExpandedDataSet`): + Required. The ExpandedDataSet to + create. + + This corresponds to the ``expanded_data_set`` field on the ``request`` instance; if ``request`` is provided, this should not be set. retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, @@ -8781,15 +8819,15 @@ async def create_channel_group( sent along with the request as metadata. Returns: - google.analytics.admin_v1alpha.types.ChannelGroup: - A resource message representing a - Channel Group. + google.analytics.admin_v1alpha.types.ExpandedDataSet: + A resource message representing a GA4 + ExpandedDataSet. """ # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent, channel_group]) + has_flattened_params = any([parent, expanded_data_set]) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -8798,20 +8836,20 @@ async def create_channel_group( # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. - if not isinstance(request, analytics_admin.CreateChannelGroupRequest): - request = analytics_admin.CreateChannelGroupRequest(request) + if not isinstance(request, analytics_admin.CreateExpandedDataSetRequest): + request = analytics_admin.CreateExpandedDataSetRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. if parent is not None: request.parent = parent - if channel_group is not None: - request.channel_group = channel_group + if expanded_data_set is not None: + request.expanded_data_set = expanded_data_set # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. rpc = self._client._transport._wrapped_methods[ - self._client._transport.create_channel_group + self._client._transport.create_expanded_data_set ] # Certain fields should be provided within the metadata header; @@ -8834,30 +8872,30 @@ async def create_channel_group( # Done; return the response. return response - async def update_channel_group( + async def update_expanded_data_set( self, request: Optional[ - Union[analytics_admin.UpdateChannelGroupRequest, dict] + Union[analytics_admin.UpdateExpandedDataSetRequest, dict] ] = None, *, - channel_group: Optional[gaa_channel_group.ChannelGroup] = None, + expanded_data_set: Optional[gaa_expanded_data_set.ExpandedDataSet] = None, update_mask: Optional[field_mask_pb2.FieldMask] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), - ) -> gaa_channel_group.ChannelGroup: - r"""Updates a ChannelGroup. + ) -> gaa_expanded_data_set.ExpandedDataSet: + r"""Updates a ExpandedDataSet on a property. Args: - request (Optional[Union[google.analytics.admin_v1alpha.types.UpdateChannelGroupRequest, dict]]): + request (Optional[Union[google.analytics.admin_v1alpha.types.UpdateExpandedDataSetRequest, dict]]): The request object. Request message for - UpdateChannelGroup RPC. - channel_group (:class:`google.analytics.admin_v1alpha.types.ChannelGroup`): - Required. The ChannelGroup to update. The resource's - ``name`` field is used to identify the ChannelGroup to - be updated. + UpdateExpandedDataSet RPC. + expanded_data_set (:class:`google.analytics.admin_v1alpha.types.ExpandedDataSet`): + Required. The ExpandedDataSet to update. The resource's + ``name`` field is used to identify the ExpandedDataSet + to be updated. - This corresponds to the ``channel_group`` field + This corresponds to the ``expanded_data_set`` field on the ``request`` instance; if ``request`` is provided, this should not be set. update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): @@ -8877,15 +8915,15 @@ async def update_channel_group( sent along with the request as metadata. Returns: - google.analytics.admin_v1alpha.types.ChannelGroup: - A resource message representing a - Channel Group. + google.analytics.admin_v1alpha.types.ExpandedDataSet: + A resource message representing a GA4 + ExpandedDataSet. """ # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([channel_group, update_mask]) + has_flattened_params = any([expanded_data_set, update_mask]) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -8894,27 +8932,27 @@ async def update_channel_group( # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. - if not isinstance(request, analytics_admin.UpdateChannelGroupRequest): - request = analytics_admin.UpdateChannelGroupRequest(request) + if not isinstance(request, analytics_admin.UpdateExpandedDataSetRequest): + request = analytics_admin.UpdateExpandedDataSetRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. - if channel_group is not None: - request.channel_group = channel_group + if expanded_data_set is not None: + request.expanded_data_set = expanded_data_set if update_mask is not None: request.update_mask = update_mask # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. rpc = self._client._transport._wrapped_methods[ - self._client._transport.update_channel_group + self._client._transport.update_expanded_data_set ] # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata( - (("channel_group.name", request.channel_group.name),) + (("expanded_data_set.name", request.expanded_data_set.name),) ), ) @@ -8932,10 +8970,10 @@ async def update_channel_group( # Done; return the response. return response - async def delete_channel_group( + async def delete_expanded_data_set( self, request: Optional[ - Union[analytics_admin.DeleteChannelGroupRequest, dict] + Union[analytics_admin.DeleteExpandedDataSetRequest, dict] ] = None, *, name: Optional[str] = None, @@ -8943,16 +8981,15 @@ async def delete_channel_group( timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> None: - r"""Deletes a ChannelGroup on a property. + r"""Deletes a ExpandedDataSet on a property. Args: - request (Optional[Union[google.analytics.admin_v1alpha.types.DeleteChannelGroupRequest, dict]]): + request (Optional[Union[google.analytics.admin_v1alpha.types.DeleteExpandedDataSetRequest, dict]]): The request object. Request message for - DeleteChannelGroup RPC. + DeleteExpandedDataSet RPC. name (:class:`str`): - Required. The ChannelGroup to delete. - Example format: - properties/1234/channelGroups/5678 + Required. Example format: + properties/1234/expandedDataSets/5678 This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this @@ -8975,8 +9012,8 @@ async def delete_channel_group( # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. - if not isinstance(request, analytics_admin.DeleteChannelGroupRequest): - request = analytics_admin.DeleteChannelGroupRequest(request) + if not isinstance(request, analytics_admin.DeleteExpandedDataSetRequest): + request = analytics_admin.DeleteExpandedDataSetRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. @@ -8986,7 +9023,7 @@ async def delete_channel_group( # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. rpc = self._client._transport._wrapped_methods[ - self._client._transport.delete_channel_group + self._client._transport.delete_expanded_data_set ] # Certain fields should be provided within the metadata header; @@ -9006,148 +9043,25 @@ async def delete_channel_group( metadata=metadata, ) - async def set_automated_ga4_configuration_opt_out( - self, - request: Optional[ - Union[analytics_admin.SetAutomatedGa4ConfigurationOptOutRequest, dict] - ] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> analytics_admin.SetAutomatedGa4ConfigurationOptOutResponse: - r"""Sets the opt out status for the automated GA4 setup - process for a UA property. - Note: this has no effect on GA4 property. - - Args: - request (Optional[Union[google.analytics.admin_v1alpha.types.SetAutomatedGa4ConfigurationOptOutRequest, dict]]): - The request object. Request for setting the opt out - status for the automated GA4 setup - process. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.analytics.admin_v1alpha.types.SetAutomatedGa4ConfigurationOptOutResponse: - Response message for setting the opt - out status for the automated GA4 setup - process. - - """ - # Create or coerce a protobuf request object. - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance( - request, analytics_admin.SetAutomatedGa4ConfigurationOptOutRequest - ): - request = analytics_admin.SetAutomatedGa4ConfigurationOptOutRequest(request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[ - self._client._transport.set_automated_ga4_configuration_opt_out - ] - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def fetch_automated_ga4_configuration_opt_out( - self, - request: Optional[ - Union[analytics_admin.FetchAutomatedGa4ConfigurationOptOutRequest, dict] - ] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> analytics_admin.FetchAutomatedGa4ConfigurationOptOutResponse: - r"""Fetches the opt out status for the automated GA4 - setup process for a UA property. - Note: this has no effect on GA4 property. - - Args: - request (Optional[Union[google.analytics.admin_v1alpha.types.FetchAutomatedGa4ConfigurationOptOutRequest, dict]]): - The request object. Request for fetching the opt out - status for the automated GA4 setup - process. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.analytics.admin_v1alpha.types.FetchAutomatedGa4ConfigurationOptOutResponse: - Response message for fetching the opt - out status for the automated GA4 setup - process. - - """ - # Create or coerce a protobuf request object. - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance( - request, analytics_admin.FetchAutomatedGa4ConfigurationOptOutRequest - ): - request = analytics_admin.FetchAutomatedGa4ConfigurationOptOutRequest( - request - ) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[ - self._client._transport.fetch_automated_ga4_configuration_opt_out - ] - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def get_big_query_link( + async def get_channel_group( self, - request: Optional[Union[analytics_admin.GetBigQueryLinkRequest, dict]] = None, + request: Optional[Union[analytics_admin.GetChannelGroupRequest, dict]] = None, *, name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), - ) -> resources.BigQueryLink: - r"""Lookup for a single BigQuery Link. + ) -> channel_group.ChannelGroup: + r"""Lookup for a single ChannelGroup. Args: - request (Optional[Union[google.analytics.admin_v1alpha.types.GetBigQueryLinkRequest, dict]]): - The request object. Request message for GetBigQueryLink + request (Optional[Union[google.analytics.admin_v1alpha.types.GetChannelGroupRequest, dict]]): + The request object. Request message for GetChannelGroup RPC. name (:class:`str`): - Required. The name of the BigQuery link to lookup. - Format: - properties/{property_id}/bigQueryLinks/{bigquery_link_id} - Example: properties/123/bigQueryLinks/456 + Required. The ChannelGroup to get. + Example format: + properties/1234/channelGroups/5678 This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this @@ -9159,9 +9073,9 @@ async def get_big_query_link( sent along with the request as metadata. Returns: - google.analytics.admin_v1alpha.types.BigQueryLink: - A link between a GA4 Property and - BigQuery project. + google.analytics.admin_v1alpha.types.ChannelGroup: + A resource message representing a + Channel Group. """ # Create or coerce a protobuf request object. @@ -9176,8 +9090,8 @@ async def get_big_query_link( # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. - if not isinstance(request, analytics_admin.GetBigQueryLinkRequest): - request = analytics_admin.GetBigQueryLinkRequest(request) + if not isinstance(request, analytics_admin.GetChannelGroupRequest): + request = analytics_admin.GetChannelGroupRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. @@ -9187,7 +9101,7 @@ async def get_big_query_link( # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. rpc = self._client._transport._wrapped_methods[ - self._client._transport.get_big_query_link + self._client._transport.get_channel_group ] # Certain fields should be provided within the metadata header; @@ -9210,24 +9124,24 @@ async def get_big_query_link( # Done; return the response. return response - async def list_big_query_links( + async def list_channel_groups( self, - request: Optional[Union[analytics_admin.ListBigQueryLinksRequest, dict]] = None, + request: Optional[Union[analytics_admin.ListChannelGroupsRequest, dict]] = None, *, parent: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), - ) -> pagers.ListBigQueryLinksAsyncPager: - r"""Lists BigQuery Links on a property. + ) -> pagers.ListChannelGroupsAsyncPager: + r"""Lists ChannelGroups on a property. Args: - request (Optional[Union[google.analytics.admin_v1alpha.types.ListBigQueryLinksRequest, dict]]): - The request object. Request message for ListBigQueryLinks + request (Optional[Union[google.analytics.admin_v1alpha.types.ListChannelGroupsRequest, dict]]): + The request object. Request message for ListChannelGroups RPC. parent (:class:`str`): - Required. The name of the property to list BigQuery - links under. Format: properties/{property_id} Example: + Required. The property for which to + list ChannelGroups. Example format: properties/1234 This corresponds to the ``parent`` field @@ -9240,9 +9154,9 @@ async def list_big_query_links( sent along with the request as metadata. Returns: - google.analytics.admin_v1alpha.services.analytics_admin_service.pagers.ListBigQueryLinksAsyncPager: + google.analytics.admin_v1alpha.services.analytics_admin_service.pagers.ListChannelGroupsAsyncPager: Response message for - ListBigQueryLinks RPC + ListChannelGroups RPC. Iterating over this object will yield results and resolve additional pages automatically. @@ -9260,8 +9174,8 @@ async def list_big_query_links( # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. - if not isinstance(request, analytics_admin.ListBigQueryLinksRequest): - request = analytics_admin.ListBigQueryLinksRequest(request) + if not isinstance(request, analytics_admin.ListChannelGroupsRequest): + request = analytics_admin.ListChannelGroupsRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. @@ -9271,7 +9185,7 @@ async def list_big_query_links( # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. rpc = self._client._transport._wrapped_methods[ - self._client._transport.list_big_query_links + self._client._transport.list_channel_groups ] # Certain fields should be provided within the metadata header; @@ -9293,7 +9207,7 @@ async def list_big_query_links( # This method is paged; wrap the response in a pager, which provides # an `__aiter__` convenience method. - response = pagers.ListBigQueryLinksAsyncPager( + response = pagers.ListChannelGroupsAsyncPager( method=rpc, request=request, response=response, @@ -9305,32 +9219,35 @@ async def list_big_query_links( # Done; return the response. return response - async def get_enhanced_measurement_settings( + async def create_channel_group( self, request: Optional[ - Union[analytics_admin.GetEnhancedMeasurementSettingsRequest, dict] + Union[analytics_admin.CreateChannelGroupRequest, dict] ] = None, *, - name: Optional[str] = None, + parent: Optional[str] = None, + channel_group: Optional[gaa_channel_group.ChannelGroup] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), - ) -> resources.EnhancedMeasurementSettings: - r"""Returns the enhanced measurement settings for this - data stream. Note that the stream must enable enhanced - measurement for these settings to take effect. + ) -> gaa_channel_group.ChannelGroup: + r"""Creates a ChannelGroup. Args: - request (Optional[Union[google.analytics.admin_v1alpha.types.GetEnhancedMeasurementSettingsRequest, dict]]): + request (Optional[Union[google.analytics.admin_v1alpha.types.CreateChannelGroupRequest, dict]]): The request object. Request message for - GetEnhancedMeasurementSettings RPC. - name (:class:`str`): - Required. The name of the settings to lookup. Format: - properties/{property}/dataStreams/{data_stream}/enhancedMeasurementSettings - Example: - "properties/1000/dataStreams/2000/enhancedMeasurementSettings" + CreateChannelGroup RPC. + parent (:class:`str`): + Required. The property for which to + create a ChannelGroup. Example format: + properties/1234 - This corresponds to the ``name`` field + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + channel_group (:class:`google.analytics.admin_v1alpha.types.ChannelGroup`): + Required. The ChannelGroup to create. + This corresponds to the ``channel_group`` field on the ``request`` instance; if ``request`` is provided, this should not be set. retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, @@ -9340,17 +9257,15 @@ async def get_enhanced_measurement_settings( sent along with the request as metadata. Returns: - google.analytics.admin_v1alpha.types.EnhancedMeasurementSettings: - Singleton resource under a web - DataStream, configuring measurement of - additional site interactions and - content. + google.analytics.admin_v1alpha.types.ChannelGroup: + A resource message representing a + Channel Group. """ # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) + has_flattened_params = any([parent, channel_group]) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -9359,26 +9274,26 @@ async def get_enhanced_measurement_settings( # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. - if not isinstance( - request, analytics_admin.GetEnhancedMeasurementSettingsRequest - ): - request = analytics_admin.GetEnhancedMeasurementSettingsRequest(request) + if not isinstance(request, analytics_admin.CreateChannelGroupRequest): + request = analytics_admin.CreateChannelGroupRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. - if name is not None: - request.name = name + if parent is not None: + request.parent = parent + if channel_group is not None: + request.channel_group = channel_group # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. rpc = self._client._transport._wrapped_methods[ - self._client._transport.get_enhanced_measurement_settings + self._client._transport.create_channel_group ] # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), ) # Validate the universe domain. @@ -9395,33 +9310,30 @@ async def get_enhanced_measurement_settings( # Done; return the response. return response - async def update_enhanced_measurement_settings( + async def update_channel_group( self, request: Optional[ - Union[analytics_admin.UpdateEnhancedMeasurementSettingsRequest, dict] + Union[analytics_admin.UpdateChannelGroupRequest, dict] ] = None, *, - enhanced_measurement_settings: Optional[ - resources.EnhancedMeasurementSettings - ] = None, + channel_group: Optional[gaa_channel_group.ChannelGroup] = None, update_mask: Optional[field_mask_pb2.FieldMask] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), - ) -> resources.EnhancedMeasurementSettings: - r"""Updates the enhanced measurement settings for this - data stream. Note that the stream must enable enhanced - measurement for these settings to take effect. + ) -> gaa_channel_group.ChannelGroup: + r"""Updates a ChannelGroup. Args: - request (Optional[Union[google.analytics.admin_v1alpha.types.UpdateEnhancedMeasurementSettingsRequest, dict]]): + request (Optional[Union[google.analytics.admin_v1alpha.types.UpdateChannelGroupRequest, dict]]): The request object. Request message for - UpdateEnhancedMeasurementSettings RPC. - enhanced_measurement_settings (:class:`google.analytics.admin_v1alpha.types.EnhancedMeasurementSettings`): - Required. The settings to update. The ``name`` field is - used to identify the settings to be updated. + UpdateChannelGroup RPC. + channel_group (:class:`google.analytics.admin_v1alpha.types.ChannelGroup`): + Required. The ChannelGroup to update. The resource's + ``name`` field is used to identify the ChannelGroup to + be updated. - This corresponds to the ``enhanced_measurement_settings`` field + This corresponds to the ``channel_group`` field on the ``request`` instance; if ``request`` is provided, this should not be set. update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): @@ -9441,17 +9353,15 @@ async def update_enhanced_measurement_settings( sent along with the request as metadata. Returns: - google.analytics.admin_v1alpha.types.EnhancedMeasurementSettings: - Singleton resource under a web - DataStream, configuring measurement of - additional site interactions and - content. + google.analytics.admin_v1alpha.types.ChannelGroup: + A resource message representing a + Channel Group. """ # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([enhanced_measurement_settings, update_mask]) + has_flattened_params = any([channel_group, update_mask]) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -9460,34 +9370,27 @@ async def update_enhanced_measurement_settings( # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. - if not isinstance( - request, analytics_admin.UpdateEnhancedMeasurementSettingsRequest - ): - request = analytics_admin.UpdateEnhancedMeasurementSettingsRequest(request) + if not isinstance(request, analytics_admin.UpdateChannelGroupRequest): + request = analytics_admin.UpdateChannelGroupRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. - if enhanced_measurement_settings is not None: - request.enhanced_measurement_settings = enhanced_measurement_settings + if channel_group is not None: + request.channel_group = channel_group if update_mask is not None: request.update_mask = update_mask # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. rpc = self._client._transport._wrapped_methods[ - self._client._transport.update_enhanced_measurement_settings + self._client._transport.update_channel_group ] # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata( - ( - ( - "enhanced_measurement_settings.name", - request.enhanced_measurement_settings.name, - ), - ) + (("channel_group.name", request.channel_group.name),) ), ) @@ -9505,129 +9408,159 @@ async def update_enhanced_measurement_settings( # Done; return the response. return response - async def create_connected_site_tag( + async def delete_channel_group( self, request: Optional[ - Union[analytics_admin.CreateConnectedSiteTagRequest, dict] + Union[analytics_admin.DeleteChannelGroupRequest, dict] ] = None, *, + name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), - ) -> analytics_admin.CreateConnectedSiteTagResponse: - r"""Creates a connected site tag for a Universal - Analytics property. You can create a maximum of 20 - connected site tags per property. Note: This API cannot - be used on GA4 properties. + ) -> None: + r"""Deletes a ChannelGroup on a property. Args: - request (Optional[Union[google.analytics.admin_v1alpha.types.CreateConnectedSiteTagRequest, dict]]): + request (Optional[Union[google.analytics.admin_v1alpha.types.DeleteChannelGroupRequest, dict]]): The request object. Request message for - CreateConnectedSiteTag RPC. + DeleteChannelGroup RPC. + name (:class:`str`): + Required. The ChannelGroup to delete. + Example format: + properties/1234/channelGroups/5678 + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. - - Returns: - google.analytics.admin_v1alpha.types.CreateConnectedSiteTagResponse: - Response message for - CreateConnectedSiteTag RPC. - """ # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. - if not isinstance(request, analytics_admin.CreateConnectedSiteTagRequest): - request = analytics_admin.CreateConnectedSiteTagRequest(request) + if not isinstance(request, analytics_admin.DeleteChannelGroupRequest): + request = analytics_admin.DeleteChannelGroupRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. rpc = self._client._transport._wrapped_methods[ - self._client._transport.create_connected_site_tag + self._client._transport.delete_channel_group ] + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + # Validate the universe domain. self._client._validate_universe_domain() # Send the request. - response = await rpc( + await rpc( request, retry=retry, timeout=timeout, metadata=metadata, ) - # Done; return the response. - return response - - async def delete_connected_site_tag( + async def set_automated_ga4_configuration_opt_out( self, request: Optional[ - Union[analytics_admin.DeleteConnectedSiteTagRequest, dict] + Union[analytics_admin.SetAutomatedGa4ConfigurationOptOutRequest, dict] ] = None, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), - ) -> None: - r"""Deletes a connected site tag for a Universal - Analytics property. Note: this has no effect on GA4 - properties. + ) -> analytics_admin.SetAutomatedGa4ConfigurationOptOutResponse: + r"""Sets the opt out status for the automated GA4 setup + process for a UA property. + Note: this has no effect on GA4 property. Args: - request (Optional[Union[google.analytics.admin_v1alpha.types.DeleteConnectedSiteTagRequest, dict]]): - The request object. Request message for - DeleteConnectedSiteTag RPC. + request (Optional[Union[google.analytics.admin_v1alpha.types.SetAutomatedGa4ConfigurationOptOutRequest, dict]]): + The request object. Request for setting the opt out + status for the automated GA4 setup + process. retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. + + Returns: + google.analytics.admin_v1alpha.types.SetAutomatedGa4ConfigurationOptOutResponse: + Response message for setting the opt + out status for the automated GA4 setup + process. + """ # Create or coerce a protobuf request object. # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. - if not isinstance(request, analytics_admin.DeleteConnectedSiteTagRequest): - request = analytics_admin.DeleteConnectedSiteTagRequest(request) + if not isinstance( + request, analytics_admin.SetAutomatedGa4ConfigurationOptOutRequest + ): + request = analytics_admin.SetAutomatedGa4ConfigurationOptOutRequest(request) # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. rpc = self._client._transport._wrapped_methods[ - self._client._transport.delete_connected_site_tag + self._client._transport.set_automated_ga4_configuration_opt_out ] # Validate the universe domain. self._client._validate_universe_domain() # Send the request. - await rpc( + response = await rpc( request, retry=retry, timeout=timeout, metadata=metadata, ) - async def list_connected_site_tags( + # Done; return the response. + return response + + async def fetch_automated_ga4_configuration_opt_out( self, request: Optional[ - Union[analytics_admin.ListConnectedSiteTagsRequest, dict] + Union[analytics_admin.FetchAutomatedGa4ConfigurationOptOutRequest, dict] ] = None, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), - ) -> analytics_admin.ListConnectedSiteTagsResponse: - r"""Lists the connected site tags for a Universal - Analytics property. A maximum of 20 connected site tags - will be returned. Note: this has no effect on GA4 - property. + ) -> analytics_admin.FetchAutomatedGa4ConfigurationOptOutResponse: + r"""Fetches the opt out status for the automated GA4 + setup process for a UA property. + Note: this has no effect on GA4 property. Args: - request (Optional[Union[google.analytics.admin_v1alpha.types.ListConnectedSiteTagsRequest, dict]]): - The request object. Request message for - ListConnectedSiteTags RPC. + request (Optional[Union[google.analytics.admin_v1alpha.types.FetchAutomatedGa4ConfigurationOptOutRequest, dict]]): + The request object. Request for fetching the opt out + status for the automated GA4 setup + process. retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -9635,21 +9568,26 @@ async def list_connected_site_tags( sent along with the request as metadata. Returns: - google.analytics.admin_v1alpha.types.ListConnectedSiteTagsResponse: - Response message for - ListConnectedSiteTags RPC. + google.analytics.admin_v1alpha.types.FetchAutomatedGa4ConfigurationOptOutResponse: + Response message for fetching the opt + out status for the automated GA4 setup + process. """ # Create or coerce a protobuf request object. # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. - if not isinstance(request, analytics_admin.ListConnectedSiteTagsRequest): - request = analytics_admin.ListConnectedSiteTagsRequest(request) + if not isinstance( + request, analytics_admin.FetchAutomatedGa4ConfigurationOptOutRequest + ): + request = analytics_admin.FetchAutomatedGa4ConfigurationOptOutRequest( + request + ) # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. rpc = self._client._transport._wrapped_methods[ - self._client._transport.list_connected_site_tags + self._client._transport.fetch_automated_ga4_configuration_opt_out ] # Validate the universe domain. @@ -9666,24 +9604,36 @@ async def list_connected_site_tags( # Done; return the response. return response - async def fetch_connected_ga4_property( + async def create_big_query_link( self, request: Optional[ - Union[analytics_admin.FetchConnectedGa4PropertyRequest, dict] + Union[analytics_admin.CreateBigQueryLinkRequest, dict] ] = None, *, + parent: Optional[str] = None, + bigquery_link: Optional[resources.BigQueryLink] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), - ) -> analytics_admin.FetchConnectedGa4PropertyResponse: - r"""Given a specified UA property, looks up the GA4 - property connected to it. Note: this cannot be used with - GA4 properties. + ) -> resources.BigQueryLink: + r"""Creates a BigQueryLink. Args: - request (Optional[Union[google.analytics.admin_v1alpha.types.FetchConnectedGa4PropertyRequest, dict]]): - The request object. Request for looking up GA4 property - connected to a UA property. + request (Optional[Union[google.analytics.admin_v1alpha.types.CreateBigQueryLinkRequest, dict]]): + The request object. Request message for + CreateBigQueryLink RPC. + parent (:class:`str`): + Required. Example format: + properties/1234 + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + bigquery_link (:class:`google.analytics.admin_v1alpha.types.BigQueryLink`): + Required. The BigQueryLink to create. + This corresponds to the ``bigquery_link`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -9691,23 +9641,45 @@ async def fetch_connected_ga4_property( sent along with the request as metadata. Returns: - google.analytics.admin_v1alpha.types.FetchConnectedGa4PropertyResponse: - Response for looking up GA4 property - connected to a UA property. - + google.analytics.admin_v1alpha.types.BigQueryLink: + A link between a GA4 Property and + BigQuery project. + """ # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, bigquery_link]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. - if not isinstance(request, analytics_admin.FetchConnectedGa4PropertyRequest): - request = analytics_admin.FetchConnectedGa4PropertyRequest(request) + if not isinstance(request, analytics_admin.CreateBigQueryLinkRequest): + request = analytics_admin.CreateBigQueryLinkRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if bigquery_link is not None: + request.bigquery_link = bigquery_link # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. rpc = self._client._transport._wrapped_methods[ - self._client._transport.fetch_connected_ga4_property + self._client._transport.create_big_query_link ] + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + # Validate the universe domain. self._client._validate_universe_domain() @@ -9722,27 +9694,26 @@ async def fetch_connected_ga4_property( # Done; return the response. return response - async def get_ad_sense_link( + async def get_big_query_link( self, - request: Optional[Union[analytics_admin.GetAdSenseLinkRequest, dict]] = None, + request: Optional[Union[analytics_admin.GetBigQueryLinkRequest, dict]] = None, *, name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), - ) -> resources.AdSenseLink: - r"""Looks up a single AdSenseLink. + ) -> resources.BigQueryLink: + r"""Lookup for a single BigQuery Link. Args: - request (Optional[Union[google.analytics.admin_v1alpha.types.GetAdSenseLinkRequest, dict]]): - The request object. Request message to be passed to - GetAdSenseLink method. + request (Optional[Union[google.analytics.admin_v1alpha.types.GetBigQueryLinkRequest, dict]]): + The request object. Request message for GetBigQueryLink + RPC. name (:class:`str`): - Required. Unique identifier for the - AdSense Link requested. Format: - properties/{propertyId}/adSenseLinks/{linkId} - Example: - properties/1234/adSenseLinks/5678 + Required. The name of the BigQuery link to lookup. + Format: + properties/{property_id}/bigQueryLinks/{bigquery_link_id} + Example: properties/123/bigQueryLinks/456 This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this @@ -9754,9 +9725,9 @@ async def get_ad_sense_link( sent along with the request as metadata. Returns: - google.analytics.admin_v1alpha.types.AdSenseLink: - A link between a GA4 Property and an - AdSense for Content ad client. + google.analytics.admin_v1alpha.types.BigQueryLink: + A link between a GA4 Property and + BigQuery project. """ # Create or coerce a protobuf request object. @@ -9771,8 +9742,8 @@ async def get_ad_sense_link( # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. - if not isinstance(request, analytics_admin.GetAdSenseLinkRequest): - request = analytics_admin.GetAdSenseLinkRequest(request) + if not isinstance(request, analytics_admin.GetBigQueryLinkRequest): + request = analytics_admin.GetBigQueryLinkRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. @@ -9782,7 +9753,7 @@ async def get_ad_sense_link( # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. rpc = self._client._transport._wrapped_methods[ - self._client._transport.get_ad_sense_link + self._client._transport.get_big_query_link ] # Certain fields should be provided within the metadata header; @@ -9805,36 +9776,29 @@ async def get_ad_sense_link( # Done; return the response. return response - async def create_ad_sense_link( + async def list_big_query_links( self, - request: Optional[Union[analytics_admin.CreateAdSenseLinkRequest, dict]] = None, + request: Optional[Union[analytics_admin.ListBigQueryLinksRequest, dict]] = None, *, parent: Optional[str] = None, - adsense_link: Optional[resources.AdSenseLink] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), - ) -> resources.AdSenseLink: - r"""Creates an AdSenseLink. + ) -> pagers.ListBigQueryLinksAsyncPager: + r"""Lists BigQuery Links on a property. Args: - request (Optional[Union[google.analytics.admin_v1alpha.types.CreateAdSenseLinkRequest, dict]]): - The request object. Request message to be passed to - CreateAdSenseLink method. + request (Optional[Union[google.analytics.admin_v1alpha.types.ListBigQueryLinksRequest, dict]]): + The request object. Request message for ListBigQueryLinks + RPC. parent (:class:`str`): - Required. The property for which to - create an AdSense Link. Format: - properties/{propertyId} Example: + Required. The name of the property to list BigQuery + links under. Format: properties/{property_id} Example: properties/1234 This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - adsense_link (:class:`google.analytics.admin_v1alpha.types.AdSenseLink`): - Required. The AdSense Link to create - This corresponds to the ``adsense_link`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -9842,15 +9806,18 @@ async def create_ad_sense_link( sent along with the request as metadata. Returns: - google.analytics.admin_v1alpha.types.AdSenseLink: - A link between a GA4 Property and an - AdSense for Content ad client. + google.analytics.admin_v1alpha.services.analytics_admin_service.pagers.ListBigQueryLinksAsyncPager: + Response message for + ListBigQueryLinks RPC + Iterating over this object will yield + results and resolve additional pages + automatically. """ # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent, adsense_link]) + has_flattened_params = any([parent]) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -9859,20 +9826,18 @@ async def create_ad_sense_link( # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. - if not isinstance(request, analytics_admin.CreateAdSenseLinkRequest): - request = analytics_admin.CreateAdSenseLinkRequest(request) + if not isinstance(request, analytics_admin.ListBigQueryLinksRequest): + request = analytics_admin.ListBigQueryLinksRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. if parent is not None: request.parent = parent - if adsense_link is not None: - request.adsense_link = adsense_link # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. rpc = self._client._transport._wrapped_methods[ - self._client._transport.create_ad_sense_link + self._client._transport.list_big_query_links ] # Certain fields should be provided within the metadata header; @@ -9892,44 +9857,1310 @@ async def create_ad_sense_link( metadata=metadata, ) + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListBigQueryLinksAsyncPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + # Done; return the response. return response - async def delete_ad_sense_link( + async def delete_big_query_link( self, - request: Optional[Union[analytics_admin.DeleteAdSenseLinkRequest, dict]] = None, + request: Optional[ + Union[analytics_admin.DeleteBigQueryLinkRequest, dict] + ] = None, *, name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> None: - r"""Deletes an AdSenseLink. + r"""Deletes a BigQueryLink on a property. Args: - request (Optional[Union[google.analytics.admin_v1alpha.types.DeleteAdSenseLinkRequest, dict]]): - The request object. Request message to be passed to - DeleteAdSenseLink method. + request (Optional[Union[google.analytics.admin_v1alpha.types.DeleteBigQueryLinkRequest, dict]]): + The request object. Request message for + DeleteBigQueryLink RPC. name (:class:`str`): - Required. Unique identifier for the - AdSense Link to be deleted. Format: - properties/{propertyId}/adSenseLinks/{linkId} - Example: - properties/1234/adSenseLinks/5678 + Required. The BigQueryLink to delete. + Example format: + properties/1234/bigQueryLinks/5678 + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, analytics_admin.DeleteBigQueryLinkRequest): + request = analytics_admin.DeleteBigQueryLinkRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.delete_big_query_link + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + async def update_big_query_link( + self, + request: Optional[ + Union[analytics_admin.UpdateBigQueryLinkRequest, dict] + ] = None, + *, + bigquery_link: Optional[resources.BigQueryLink] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> resources.BigQueryLink: + r"""Updates a BigQueryLink. + + Args: + request (Optional[Union[google.analytics.admin_v1alpha.types.UpdateBigQueryLinkRequest, dict]]): + The request object. Request message for + UpdateBigQueryLink RPC. + bigquery_link (:class:`google.analytics.admin_v1alpha.types.BigQueryLink`): + Required. The settings to update. The ``name`` field is + used to identify the settings to be updated. + + This corresponds to the ``bigquery_link`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): + Required. The list of fields to be updated. Field names + must be in snake case (e.g., "field_to_update"). Omitted + fields will not be updated. To replace the entire + entity, use one path with the string "*" to match all + fields. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.analytics.admin_v1alpha.types.BigQueryLink: + A link between a GA4 Property and + BigQuery project. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([bigquery_link, update_mask]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, analytics_admin.UpdateBigQueryLinkRequest): + request = analytics_admin.UpdateBigQueryLinkRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if bigquery_link is not None: + request.bigquery_link = bigquery_link + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.update_big_query_link + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("bigquery_link.name", request.bigquery_link.name),) + ), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_enhanced_measurement_settings( + self, + request: Optional[ + Union[analytics_admin.GetEnhancedMeasurementSettingsRequest, dict] + ] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> resources.EnhancedMeasurementSettings: + r"""Returns the enhanced measurement settings for this + data stream. Note that the stream must enable enhanced + measurement for these settings to take effect. + + Args: + request (Optional[Union[google.analytics.admin_v1alpha.types.GetEnhancedMeasurementSettingsRequest, dict]]): + The request object. Request message for + GetEnhancedMeasurementSettings RPC. + name (:class:`str`): + Required. The name of the settings to lookup. Format: + properties/{property}/dataStreams/{data_stream}/enhancedMeasurementSettings + Example: + "properties/1000/dataStreams/2000/enhancedMeasurementSettings" + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.analytics.admin_v1alpha.types.EnhancedMeasurementSettings: + Singleton resource under a web + DataStream, configuring measurement of + additional site interactions and + content. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance( + request, analytics_admin.GetEnhancedMeasurementSettingsRequest + ): + request = analytics_admin.GetEnhancedMeasurementSettingsRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.get_enhanced_measurement_settings + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def update_enhanced_measurement_settings( + self, + request: Optional[ + Union[analytics_admin.UpdateEnhancedMeasurementSettingsRequest, dict] + ] = None, + *, + enhanced_measurement_settings: Optional[ + resources.EnhancedMeasurementSettings + ] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> resources.EnhancedMeasurementSettings: + r"""Updates the enhanced measurement settings for this + data stream. Note that the stream must enable enhanced + measurement for these settings to take effect. + + Args: + request (Optional[Union[google.analytics.admin_v1alpha.types.UpdateEnhancedMeasurementSettingsRequest, dict]]): + The request object. Request message for + UpdateEnhancedMeasurementSettings RPC. + enhanced_measurement_settings (:class:`google.analytics.admin_v1alpha.types.EnhancedMeasurementSettings`): + Required. The settings to update. The ``name`` field is + used to identify the settings to be updated. + + This corresponds to the ``enhanced_measurement_settings`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): + Required. The list of fields to be updated. Field names + must be in snake case (e.g., "field_to_update"). Omitted + fields will not be updated. To replace the entire + entity, use one path with the string "*" to match all + fields. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.analytics.admin_v1alpha.types.EnhancedMeasurementSettings: + Singleton resource under a web + DataStream, configuring measurement of + additional site interactions and + content. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([enhanced_measurement_settings, update_mask]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance( + request, analytics_admin.UpdateEnhancedMeasurementSettingsRequest + ): + request = analytics_admin.UpdateEnhancedMeasurementSettingsRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if enhanced_measurement_settings is not None: + request.enhanced_measurement_settings = enhanced_measurement_settings + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.update_enhanced_measurement_settings + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + ( + ( + "enhanced_measurement_settings.name", + request.enhanced_measurement_settings.name, + ), + ) + ), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def create_connected_site_tag( + self, + request: Optional[ + Union[analytics_admin.CreateConnectedSiteTagRequest, dict] + ] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> analytics_admin.CreateConnectedSiteTagResponse: + r"""Creates a connected site tag for a Universal + Analytics property. You can create a maximum of 20 + connected site tags per property. Note: This API cannot + be used on GA4 properties. + + Args: + request (Optional[Union[google.analytics.admin_v1alpha.types.CreateConnectedSiteTagRequest, dict]]): + The request object. Request message for + CreateConnectedSiteTag RPC. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.analytics.admin_v1alpha.types.CreateConnectedSiteTagResponse: + Response message for + CreateConnectedSiteTag RPC. + + """ + # Create or coerce a protobuf request object. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, analytics_admin.CreateConnectedSiteTagRequest): + request = analytics_admin.CreateConnectedSiteTagRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.create_connected_site_tag + ] + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def delete_connected_site_tag( + self, + request: Optional[ + Union[analytics_admin.DeleteConnectedSiteTagRequest, dict] + ] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes a connected site tag for a Universal + Analytics property. Note: this has no effect on GA4 + properties. + + Args: + request (Optional[Union[google.analytics.admin_v1alpha.types.DeleteConnectedSiteTagRequest, dict]]): + The request object. Request message for + DeleteConnectedSiteTag RPC. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, analytics_admin.DeleteConnectedSiteTagRequest): + request = analytics_admin.DeleteConnectedSiteTagRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.delete_connected_site_tag + ] + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + async def list_connected_site_tags( + self, + request: Optional[ + Union[analytics_admin.ListConnectedSiteTagsRequest, dict] + ] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> analytics_admin.ListConnectedSiteTagsResponse: + r"""Lists the connected site tags for a Universal + Analytics property. A maximum of 20 connected site tags + will be returned. Note: this has no effect on GA4 + property. + + Args: + request (Optional[Union[google.analytics.admin_v1alpha.types.ListConnectedSiteTagsRequest, dict]]): + The request object. Request message for + ListConnectedSiteTags RPC. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.analytics.admin_v1alpha.types.ListConnectedSiteTagsResponse: + Response message for + ListConnectedSiteTags RPC. + + """ + # Create or coerce a protobuf request object. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, analytics_admin.ListConnectedSiteTagsRequest): + request = analytics_admin.ListConnectedSiteTagsRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.list_connected_site_tags + ] + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def fetch_connected_ga4_property( + self, + request: Optional[ + Union[analytics_admin.FetchConnectedGa4PropertyRequest, dict] + ] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> analytics_admin.FetchConnectedGa4PropertyResponse: + r"""Given a specified UA property, looks up the GA4 + property connected to it. Note: this cannot be used with + GA4 properties. + + Args: + request (Optional[Union[google.analytics.admin_v1alpha.types.FetchConnectedGa4PropertyRequest, dict]]): + The request object. Request for looking up GA4 property + connected to a UA property. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.analytics.admin_v1alpha.types.FetchConnectedGa4PropertyResponse: + Response for looking up GA4 property + connected to a UA property. + + """ + # Create or coerce a protobuf request object. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, analytics_admin.FetchConnectedGa4PropertyRequest): + request = analytics_admin.FetchConnectedGa4PropertyRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.fetch_connected_ga4_property + ] + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_ad_sense_link( + self, + request: Optional[Union[analytics_admin.GetAdSenseLinkRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> resources.AdSenseLink: + r"""Looks up a single AdSenseLink. + + Args: + request (Optional[Union[google.analytics.admin_v1alpha.types.GetAdSenseLinkRequest, dict]]): + The request object. Request message to be passed to + GetAdSenseLink method. + name (:class:`str`): + Required. Unique identifier for the + AdSense Link requested. Format: + properties/{propertyId}/adSenseLinks/{linkId} + Example: + properties/1234/adSenseLinks/5678 + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.analytics.admin_v1alpha.types.AdSenseLink: + A link between a GA4 Property and an + AdSense for Content ad client. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, analytics_admin.GetAdSenseLinkRequest): + request = analytics_admin.GetAdSenseLinkRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.get_ad_sense_link + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def create_ad_sense_link( + self, + request: Optional[Union[analytics_admin.CreateAdSenseLinkRequest, dict]] = None, + *, + parent: Optional[str] = None, + adsense_link: Optional[resources.AdSenseLink] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> resources.AdSenseLink: + r"""Creates an AdSenseLink. + + Args: + request (Optional[Union[google.analytics.admin_v1alpha.types.CreateAdSenseLinkRequest, dict]]): + The request object. Request message to be passed to + CreateAdSenseLink method. + parent (:class:`str`): + Required. The property for which to + create an AdSense Link. Format: + properties/{propertyId} Example: + properties/1234 + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + adsense_link (:class:`google.analytics.admin_v1alpha.types.AdSenseLink`): + Required. The AdSense Link to create + This corresponds to the ``adsense_link`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.analytics.admin_v1alpha.types.AdSenseLink: + A link between a GA4 Property and an + AdSense for Content ad client. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, adsense_link]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, analytics_admin.CreateAdSenseLinkRequest): + request = analytics_admin.CreateAdSenseLinkRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if adsense_link is not None: + request.adsense_link = adsense_link + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.create_ad_sense_link + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def delete_ad_sense_link( + self, + request: Optional[Union[analytics_admin.DeleteAdSenseLinkRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes an AdSenseLink. + + Args: + request (Optional[Union[google.analytics.admin_v1alpha.types.DeleteAdSenseLinkRequest, dict]]): + The request object. Request message to be passed to + DeleteAdSenseLink method. + name (:class:`str`): + Required. Unique identifier for the + AdSense Link to be deleted. Format: + properties/{propertyId}/adSenseLinks/{linkId} + Example: + properties/1234/adSenseLinks/5678 + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, analytics_admin.DeleteAdSenseLinkRequest): + request = analytics_admin.DeleteAdSenseLinkRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.delete_ad_sense_link + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + async def list_ad_sense_links( + self, + request: Optional[Union[analytics_admin.ListAdSenseLinksRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListAdSenseLinksAsyncPager: + r"""Lists AdSenseLinks on a property. + + Args: + request (Optional[Union[google.analytics.admin_v1alpha.types.ListAdSenseLinksRequest, dict]]): + The request object. Request message to be passed to + ListAdSenseLinks method. + parent (:class:`str`): + Required. Resource name of the parent + property. Format: + properties/{propertyId} + Example: properties/1234 + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.analytics.admin_v1alpha.services.analytics_admin_service.pagers.ListAdSenseLinksAsyncPager: + Response message for ListAdSenseLinks + method. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, analytics_admin.ListAdSenseLinksRequest): + request = analytics_admin.ListAdSenseLinksRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.list_ad_sense_links + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListAdSenseLinksAsyncPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_event_create_rule( + self, + request: Optional[ + Union[analytics_admin.GetEventCreateRuleRequest, dict] + ] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> event_create_and_edit.EventCreateRule: + r"""Lookup for a single EventCreateRule. + + Args: + request (Optional[Union[google.analytics.admin_v1alpha.types.GetEventCreateRuleRequest, dict]]): + The request object. Request message for + GetEventCreateRule RPC. + name (:class:`str`): + Required. The name of the + EventCreateRule to get. Example format: + properties/123/dataStreams/456/eventCreateRules/789 + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.analytics.admin_v1alpha.types.EventCreateRule: + An Event Create Rule defines + conditions that will trigger the + creation of an entirely new event based + upon matched criteria of a source event. + Additional mutations of the parameters + from the source event can be defined. + + Unlike Event Edit rules, Event Creation + Rules have no defined order. They will + all be run independently. + + Event Edit and Event Create rules can't + be used to modify an event created from + an Event Create rule. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, analytics_admin.GetEventCreateRuleRequest): + request = analytics_admin.GetEventCreateRuleRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.get_event_create_rule + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_event_create_rules( + self, + request: Optional[ + Union[analytics_admin.ListEventCreateRulesRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListEventCreateRulesAsyncPager: + r"""Lists EventCreateRules on a web data stream. + + Args: + request (Optional[Union[google.analytics.admin_v1alpha.types.ListEventCreateRulesRequest, dict]]): + The request object. Request message for + ListEventCreateRules RPC. + parent (:class:`str`): + Required. Example format: + properties/123/dataStreams/456 + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.analytics.admin_v1alpha.services.analytics_admin_service.pagers.ListEventCreateRulesAsyncPager: + Response message for + ListEventCreateRules RPC. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, analytics_admin.ListEventCreateRulesRequest): + request = analytics_admin.ListEventCreateRulesRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.list_event_create_rules + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListEventCreateRulesAsyncPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def create_event_create_rule( + self, + request: Optional[ + Union[analytics_admin.CreateEventCreateRuleRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + event_create_rule: Optional[event_create_and_edit.EventCreateRule] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> event_create_and_edit.EventCreateRule: + r"""Creates an EventCreateRule. + + Args: + request (Optional[Union[google.analytics.admin_v1alpha.types.CreateEventCreateRuleRequest, dict]]): + The request object. Request message for + CreateEventCreateRule RPC. + parent (:class:`str`): + Required. Example format: + properties/123/dataStreams/456 + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + event_create_rule (:class:`google.analytics.admin_v1alpha.types.EventCreateRule`): + Required. The EventCreateRule to + create. + + This corresponds to the ``event_create_rule`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.analytics.admin_v1alpha.types.EventCreateRule: + An Event Create Rule defines + conditions that will trigger the + creation of an entirely new event based + upon matched criteria of a source event. + Additional mutations of the parameters + from the source event can be defined. + + Unlike Event Edit rules, Event Creation + Rules have no defined order. They will + all be run independently. + + Event Edit and Event Create rules can't + be used to modify an event created from + an Event Create rule. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, event_create_rule]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, analytics_admin.CreateEventCreateRuleRequest): + request = analytics_admin.CreateEventCreateRuleRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if event_create_rule is not None: + request.event_create_rule = event_create_rule + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.create_event_create_rule + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def update_event_create_rule( + self, + request: Optional[ + Union[analytics_admin.UpdateEventCreateRuleRequest, dict] + ] = None, + *, + event_create_rule: Optional[event_create_and_edit.EventCreateRule] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> event_create_and_edit.EventCreateRule: + r"""Updates an EventCreateRule. + + Args: + request (Optional[Union[google.analytics.admin_v1alpha.types.UpdateEventCreateRuleRequest, dict]]): + The request object. Request message for + UpdateEventCreateRule RPC. + event_create_rule (:class:`google.analytics.admin_v1alpha.types.EventCreateRule`): + Required. The EventCreateRule to update. The resource's + ``name`` field is used to identify the EventCreateRule + to be updated. + + This corresponds to the ``event_create_rule`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): + Required. The list of fields to be updated. Field names + must be in snake case (e.g., "field_to_update"). Omitted + fields will not be updated. To replace the entire + entity, use one path with the string "*" to match all + fields. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.analytics.admin_v1alpha.types.EventCreateRule: + An Event Create Rule defines + conditions that will trigger the + creation of an entirely new event based + upon matched criteria of a source event. + Additional mutations of the parameters + from the source event can be defined. + + Unlike Event Edit rules, Event Creation + Rules have no defined order. They will + all be run independently. + + Event Edit and Event Create rules can't + be used to modify an event created from + an Event Create rule. - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. """ # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) + has_flattened_params = any([event_create_rule, update_mask]) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -9938,59 +11169,67 @@ async def delete_ad_sense_link( # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. - if not isinstance(request, analytics_admin.DeleteAdSenseLinkRequest): - request = analytics_admin.DeleteAdSenseLinkRequest(request) + if not isinstance(request, analytics_admin.UpdateEventCreateRuleRequest): + request = analytics_admin.UpdateEventCreateRuleRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. - if name is not None: - request.name = name + if event_create_rule is not None: + request.event_create_rule = event_create_rule + if update_mask is not None: + request.update_mask = update_mask # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. rpc = self._client._transport._wrapped_methods[ - self._client._transport.delete_ad_sense_link + self._client._transport.update_event_create_rule ] # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + gapic_v1.routing_header.to_grpc_metadata( + (("event_create_rule.name", request.event_create_rule.name),) + ), ) # Validate the universe domain. self._client._validate_universe_domain() # Send the request. - await rpc( + response = await rpc( request, retry=retry, timeout=timeout, metadata=metadata, ) - async def list_ad_sense_links( + # Done; return the response. + return response + + async def delete_event_create_rule( self, - request: Optional[Union[analytics_admin.ListAdSenseLinksRequest, dict]] = None, + request: Optional[ + Union[analytics_admin.DeleteEventCreateRuleRequest, dict] + ] = None, *, - parent: Optional[str] = None, + name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), - ) -> pagers.ListAdSenseLinksAsyncPager: - r"""Lists AdSenseLinks on a property. + ) -> None: + r"""Deletes an EventCreateRule. Args: - request (Optional[Union[google.analytics.admin_v1alpha.types.ListAdSenseLinksRequest, dict]]): - The request object. Request message to be passed to - ListAdSenseLinks method. - parent (:class:`str`): - Required. Resource name of the parent - property. Format: - properties/{propertyId} - Example: properties/1234 + request (Optional[Union[google.analytics.admin_v1alpha.types.DeleteEventCreateRuleRequest, dict]]): + The request object. Request message for + DeleteEventCreateRule RPC. + name (:class:`str`): + Required. Example format: - This corresponds to the ``parent`` field + properties/123/dataStreams/456/eventCreateRules/789 + + This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, @@ -9998,20 +11237,11 @@ async def list_ad_sense_links( timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. - - Returns: - google.analytics.admin_v1alpha.services.analytics_admin_service.pagers.ListAdSenseLinksAsyncPager: - Response message for ListAdSenseLinks - method. - Iterating over this object will yield - results and resolve additional pages - automatically. - """ # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) + has_flattened_params = any([name]) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -10020,72 +11250,56 @@ async def list_ad_sense_links( # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. - if not isinstance(request, analytics_admin.ListAdSenseLinksRequest): - request = analytics_admin.ListAdSenseLinksRequest(request) + if not isinstance(request, analytics_admin.DeleteEventCreateRuleRequest): + request = analytics_admin.DeleteEventCreateRuleRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. - if parent is not None: - request.parent = parent + if name is not None: + request.name = name # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. rpc = self._client._transport._wrapped_methods[ - self._client._transport.list_ad_sense_links + self._client._transport.delete_event_create_rule ] # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), ) # Validate the universe domain. self._client._validate_universe_domain() # Send the request. - response = await rpc( + await rpc( request, retry=retry, timeout=timeout, metadata=metadata, ) - # This method is paged; wrap the response in a pager, which provides - # an `__aiter__` convenience method. - response = pagers.ListAdSenseLinksAsyncPager( - method=rpc, - request=request, - response=response, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def get_event_create_rule( + async def get_event_edit_rule( self, - request: Optional[ - Union[analytics_admin.GetEventCreateRuleRequest, dict] - ] = None, + request: Optional[Union[analytics_admin.GetEventEditRuleRequest, dict]] = None, *, name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), - ) -> event_create_and_edit.EventCreateRule: - r"""Lookup for a single EventCreateRule. + ) -> event_create_and_edit.EventEditRule: + r"""Lookup for a single EventEditRule. Args: - request (Optional[Union[google.analytics.admin_v1alpha.types.GetEventCreateRuleRequest, dict]]): - The request object. Request message for - GetEventCreateRule RPC. + request (Optional[Union[google.analytics.admin_v1alpha.types.GetEventEditRuleRequest, dict]]): + The request object. Request message for GetEventEditRule + RPC. name (:class:`str`): Required. The name of the - EventCreateRule to get. Example format: - properties/123/dataStreams/456/eventCreateRules/789 + EventEditRule to get. Example format: + properties/123/dataStreams/456/eventEditRules/789 This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this @@ -10097,21 +11311,21 @@ async def get_event_create_rule( sent along with the request as metadata. Returns: - google.analytics.admin_v1alpha.types.EventCreateRule: - An Event Create Rule defines - conditions that will trigger the - creation of an entirely new event based - upon matched criteria of a source event. - Additional mutations of the parameters - from the source event can be defined. + google.analytics.admin_v1alpha.types.EventEditRule: + An Event Edit Rule defines conditions + that will trigger the creation of an + entirely new event based upon matched + criteria of a source event. Additional + mutations of the parameters from the + source event can be defined. - Unlike Event Edit rules, Event Creation - Rules have no defined order. They will - all be run independently. + Unlike Event Create rules, Event Edit + Rules are applied in their defined + order. - Event Edit and Event Create rules can't - be used to modify an event created from - an Event Create rule. + Event Edit rules can't be used to modify + an event created from an Event Create + rule. """ # Create or coerce a protobuf request object. @@ -10126,8 +11340,8 @@ async def get_event_create_rule( # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. - if not isinstance(request, analytics_admin.GetEventCreateRuleRequest): - request = analytics_admin.GetEventCreateRuleRequest(request) + if not isinstance(request, analytics_admin.GetEventEditRuleRequest): + request = analytics_admin.GetEventEditRuleRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. @@ -10137,7 +11351,7 @@ async def get_event_create_rule( # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. rpc = self._client._transport._wrapped_methods[ - self._client._transport.get_event_create_rule + self._client._transport.get_event_edit_rule ] # Certain fields should be provided within the metadata header; @@ -10160,23 +11374,23 @@ async def get_event_create_rule( # Done; return the response. return response - async def list_event_create_rules( + async def list_event_edit_rules( self, request: Optional[ - Union[analytics_admin.ListEventCreateRulesRequest, dict] + Union[analytics_admin.ListEventEditRulesRequest, dict] ] = None, *, parent: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), - ) -> pagers.ListEventCreateRulesAsyncPager: - r"""Lists EventCreateRules on a web data stream. + ) -> pagers.ListEventEditRulesAsyncPager: + r"""Lists EventEditRules on a web data stream. Args: - request (Optional[Union[google.analytics.admin_v1alpha.types.ListEventCreateRulesRequest, dict]]): + request (Optional[Union[google.analytics.admin_v1alpha.types.ListEventEditRulesRequest, dict]]): The request object. Request message for - ListEventCreateRules RPC. + ListEventEditRules RPC. parent (:class:`str`): Required. Example format: properties/123/dataStreams/456 @@ -10191,9 +11405,9 @@ async def list_event_create_rules( sent along with the request as metadata. Returns: - google.analytics.admin_v1alpha.services.analytics_admin_service.pagers.ListEventCreateRulesAsyncPager: + google.analytics.admin_v1alpha.services.analytics_admin_service.pagers.ListEventEditRulesAsyncPager: Response message for - ListEventCreateRules RPC. + ListEventEditRules RPC. Iterating over this object will yield results and resolve additional pages automatically. @@ -10211,8 +11425,8 @@ async def list_event_create_rules( # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. - if not isinstance(request, analytics_admin.ListEventCreateRulesRequest): - request = analytics_admin.ListEventCreateRulesRequest(request) + if not isinstance(request, analytics_admin.ListEventEditRulesRequest): + request = analytics_admin.ListEventEditRulesRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. @@ -10222,7 +11436,7 @@ async def list_event_create_rules( # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. rpc = self._client._transport._wrapped_methods[ - self._client._transport.list_event_create_rules + self._client._transport.list_event_edit_rules ] # Certain fields should be provided within the metadata header; @@ -10244,7 +11458,7 @@ async def list_event_create_rules( # This method is paged; wrap the response in a pager, which provides # an `__aiter__` convenience method. - response = pagers.ListEventCreateRulesAsyncPager( + response = pagers.ListEventEditRulesAsyncPager( method=rpc, request=request, response=response, @@ -10256,24 +11470,24 @@ async def list_event_create_rules( # Done; return the response. return response - async def create_event_create_rule( + async def create_event_edit_rule( self, request: Optional[ - Union[analytics_admin.CreateEventCreateRuleRequest, dict] + Union[analytics_admin.CreateEventEditRuleRequest, dict] ] = None, *, parent: Optional[str] = None, - event_create_rule: Optional[event_create_and_edit.EventCreateRule] = None, + event_edit_rule: Optional[event_create_and_edit.EventEditRule] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), - ) -> event_create_and_edit.EventCreateRule: - r"""Creates an EventCreateRule. + ) -> event_create_and_edit.EventEditRule: + r"""Creates an EventEditRule. Args: - request (Optional[Union[google.analytics.admin_v1alpha.types.CreateEventCreateRuleRequest, dict]]): + request (Optional[Union[google.analytics.admin_v1alpha.types.CreateEventEditRuleRequest, dict]]): The request object. Request message for - CreateEventCreateRule RPC. + CreateEventEditRule RPC. parent (:class:`str`): Required. Example format: properties/123/dataStreams/456 @@ -10281,11 +11495,11 @@ async def create_event_create_rule( This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - event_create_rule (:class:`google.analytics.admin_v1alpha.types.EventCreateRule`): - Required. The EventCreateRule to + event_edit_rule (:class:`google.analytics.admin_v1alpha.types.EventEditRule`): + Required. The EventEditRule to create. - This corresponds to the ``event_create_rule`` field + This corresponds to the ``event_edit_rule`` field on the ``request`` instance; if ``request`` is provided, this should not be set. retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, @@ -10295,27 +11509,27 @@ async def create_event_create_rule( sent along with the request as metadata. Returns: - google.analytics.admin_v1alpha.types.EventCreateRule: - An Event Create Rule defines - conditions that will trigger the - creation of an entirely new event based - upon matched criteria of a source event. - Additional mutations of the parameters - from the source event can be defined. + google.analytics.admin_v1alpha.types.EventEditRule: + An Event Edit Rule defines conditions + that will trigger the creation of an + entirely new event based upon matched + criteria of a source event. Additional + mutations of the parameters from the + source event can be defined. - Unlike Event Edit rules, Event Creation - Rules have no defined order. They will - all be run independently. + Unlike Event Create rules, Event Edit + Rules are applied in their defined + order. - Event Edit and Event Create rules can't - be used to modify an event created from - an Event Create rule. + Event Edit rules can't be used to modify + an event created from an Event Create + rule. """ # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent, event_create_rule]) + has_flattened_params = any([parent, event_edit_rule]) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -10324,20 +11538,20 @@ async def create_event_create_rule( # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. - if not isinstance(request, analytics_admin.CreateEventCreateRuleRequest): - request = analytics_admin.CreateEventCreateRuleRequest(request) + if not isinstance(request, analytics_admin.CreateEventEditRuleRequest): + request = analytics_admin.CreateEventEditRuleRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. if parent is not None: request.parent = parent - if event_create_rule is not None: - request.event_create_rule = event_create_rule + if event_edit_rule is not None: + request.event_edit_rule = event_edit_rule # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. rpc = self._client._transport._wrapped_methods[ - self._client._transport.create_event_create_rule + self._client._transport.create_event_edit_rule ] # Certain fields should be provided within the metadata header; @@ -10360,30 +11574,30 @@ async def create_event_create_rule( # Done; return the response. return response - async def update_event_create_rule( + async def update_event_edit_rule( self, request: Optional[ - Union[analytics_admin.UpdateEventCreateRuleRequest, dict] + Union[analytics_admin.UpdateEventEditRuleRequest, dict] ] = None, *, - event_create_rule: Optional[event_create_and_edit.EventCreateRule] = None, + event_edit_rule: Optional[event_create_and_edit.EventEditRule] = None, update_mask: Optional[field_mask_pb2.FieldMask] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), - ) -> event_create_and_edit.EventCreateRule: - r"""Updates an EventCreateRule. + ) -> event_create_and_edit.EventEditRule: + r"""Updates an EventEditRule. Args: - request (Optional[Union[google.analytics.admin_v1alpha.types.UpdateEventCreateRuleRequest, dict]]): + request (Optional[Union[google.analytics.admin_v1alpha.types.UpdateEventEditRuleRequest, dict]]): The request object. Request message for - UpdateEventCreateRule RPC. - event_create_rule (:class:`google.analytics.admin_v1alpha.types.EventCreateRule`): - Required. The EventCreateRule to update. The resource's - ``name`` field is used to identify the EventCreateRule - to be updated. + UpdateEventEditRule RPC. + event_edit_rule (:class:`google.analytics.admin_v1alpha.types.EventEditRule`): + Required. The EventEditRule to update. The resource's + ``name`` field is used to identify the EventEditRule to + be updated. - This corresponds to the ``event_create_rule`` field + This corresponds to the ``event_edit_rule`` field on the ``request`` instance; if ``request`` is provided, this should not be set. update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): @@ -10403,27 +11617,27 @@ async def update_event_create_rule( sent along with the request as metadata. Returns: - google.analytics.admin_v1alpha.types.EventCreateRule: - An Event Create Rule defines - conditions that will trigger the - creation of an entirely new event based - upon matched criteria of a source event. - Additional mutations of the parameters - from the source event can be defined. + google.analytics.admin_v1alpha.types.EventEditRule: + An Event Edit Rule defines conditions + that will trigger the creation of an + entirely new event based upon matched + criteria of a source event. Additional + mutations of the parameters from the + source event can be defined. - Unlike Event Edit rules, Event Creation - Rules have no defined order. They will - all be run independently. + Unlike Event Create rules, Event Edit + Rules are applied in their defined + order. - Event Edit and Event Create rules can't - be used to modify an event created from - an Event Create rule. + Event Edit rules can't be used to modify + an event created from an Event Create + rule. """ # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([event_create_rule, update_mask]) + has_flattened_params = any([event_edit_rule, update_mask]) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -10432,27 +11646,27 @@ async def update_event_create_rule( # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. - if not isinstance(request, analytics_admin.UpdateEventCreateRuleRequest): - request = analytics_admin.UpdateEventCreateRuleRequest(request) + if not isinstance(request, analytics_admin.UpdateEventEditRuleRequest): + request = analytics_admin.UpdateEventEditRuleRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. - if event_create_rule is not None: - request.event_create_rule = event_create_rule + if event_edit_rule is not None: + request.event_edit_rule = event_edit_rule if update_mask is not None: request.update_mask = update_mask # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. rpc = self._client._transport._wrapped_methods[ - self._client._transport.update_event_create_rule + self._client._transport.update_event_edit_rule ] # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata( - (("event_create_rule.name", request.event_create_rule.name),) + (("event_edit_rule.name", request.event_edit_rule.name),) ), ) @@ -10470,10 +11684,10 @@ async def update_event_create_rule( # Done; return the response. return response - async def delete_event_create_rule( + async def delete_event_edit_rule( self, request: Optional[ - Union[analytics_admin.DeleteEventCreateRuleRequest, dict] + Union[analytics_admin.DeleteEventEditRuleRequest, dict] ] = None, *, name: Optional[str] = None, @@ -10481,16 +11695,15 @@ async def delete_event_create_rule( timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> None: - r"""Deletes an EventCreateRule. + r"""Deletes an EventEditRule. Args: - request (Optional[Union[google.analytics.admin_v1alpha.types.DeleteEventCreateRuleRequest, dict]]): + request (Optional[Union[google.analytics.admin_v1alpha.types.DeleteEventEditRuleRequest, dict]]): The request object. Request message for - DeleteEventCreateRule RPC. + DeleteEventEditRule RPC. name (:class:`str`): Required. Example format: - - properties/123/dataStreams/456/eventCreateRules/789 + properties/123/dataStreams/456/eventEditRules/789 This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this @@ -10513,8 +11726,8 @@ async def delete_event_create_rule( # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. - if not isinstance(request, analytics_admin.DeleteEventCreateRuleRequest): - request = analytics_admin.DeleteEventCreateRuleRequest(request) + if not isinstance(request, analytics_admin.DeleteEventEditRuleRequest): + request = analytics_admin.DeleteEventEditRuleRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. @@ -10524,7 +11737,7 @@ async def delete_event_create_rule( # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. rpc = self._client._transport._wrapped_methods[ - self._client._transport.delete_event_create_rule + self._client._transport.delete_event_edit_rule ] # Certain fields should be provided within the metadata header; @@ -10544,6 +11757,58 @@ async def delete_event_create_rule( metadata=metadata, ) + async def reorder_event_edit_rules( + self, + request: Optional[ + Union[analytics_admin.ReorderEventEditRulesRequest, dict] + ] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Changes the processing order of event edit rules on + the specified stream. + + Args: + request (Optional[Union[google.analytics.admin_v1alpha.types.ReorderEventEditRulesRequest, dict]]): + The request object. Request message for + ReorderEventEditRules RPC. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, analytics_admin.ReorderEventEditRulesRequest): + request = analytics_admin.ReorderEventEditRulesRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.reorder_event_edit_rules + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + async def update_data_redaction_settings( self, request: Optional[ @@ -10853,7 +12118,7 @@ async def create_calculated_metric( metric's resource name. This value should be 1-80 characters and valid - characters are `[a-zA-Z0-9_]`, no spaces allowed. + characters are /[a-zA-Z0-9_]/, no spaces allowed. calculated_metric_id must be unique between all calculated metrics under a property. The calculated_metric_id is used when referencing this @@ -11612,19 +12877,21 @@ async def delete_rollup_property_source_link( metadata=metadata, ) - async def create_subproperty( + async def provision_subproperty( self, - request: Optional[Union[analytics_admin.CreateSubpropertyRequest, dict]] = None, + request: Optional[ + Union[analytics_admin.ProvisionSubpropertyRequest, dict] + ] = None, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), - ) -> analytics_admin.CreateSubpropertyResponse: + ) -> analytics_admin.ProvisionSubpropertyResponse: r"""Create a subproperty and a subproperty event filter that applies to the created subproperty. Args: - request (Optional[Union[google.analytics.admin_v1alpha.types.CreateSubpropertyRequest, dict]]): + request (Optional[Union[google.analytics.admin_v1alpha.types.ProvisionSubpropertyRequest, dict]]): The request object. Request message for CreateSubproperty RPC. retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, @@ -11634,21 +12901,21 @@ async def create_subproperty( sent along with the request as metadata. Returns: - google.analytics.admin_v1alpha.types.CreateSubpropertyResponse: + google.analytics.admin_v1alpha.types.ProvisionSubpropertyResponse: Response message for - CreateSubproperty RPC. + ProvisionSubproperty RPC. """ # Create or coerce a protobuf request object. # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. - if not isinstance(request, analytics_admin.CreateSubpropertyRequest): - request = analytics_admin.CreateSubpropertyRequest(request) + if not isinstance(request, analytics_admin.ProvisionSubpropertyRequest): + request = analytics_admin.ProvisionSubpropertyRequest(request) # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. rpc = self._client._transport._wrapped_methods[ - self._client._transport.create_subproperty + self._client._transport.provision_subproperty ] # Validate the universe domain. diff --git a/packages/google-analytics-admin/google/analytics/admin_v1alpha/services/analytics_admin_service/client.py b/packages/google-analytics-admin/google/analytics/admin_v1alpha/services/analytics_admin_service/client.py index 45710bbc0c33..dc955b205d26 100644 --- a/packages/google-analytics-admin/google/analytics/admin_v1alpha/services/analytics_admin_service/client.py +++ b/packages/google-analytics-admin/google/analytics/admin_v1alpha/services/analytics_admin_service/client.py @@ -573,6 +573,28 @@ def parse_event_create_rule_path(path: str) -> Dict[str, str]: ) return m.groupdict() if m else {} + @staticmethod + def event_edit_rule_path( + property: str, + data_stream: str, + event_edit_rule: str, + ) -> str: + """Returns a fully-qualified event_edit_rule string.""" + return "properties/{property}/dataStreams/{data_stream}/eventEditRules/{event_edit_rule}".format( + property=property, + data_stream=data_stream, + event_edit_rule=event_edit_rule, + ) + + @staticmethod + def parse_event_edit_rule_path(path: str) -> Dict[str, str]: + """Parses a event_edit_rule path into its component segments.""" + m = re.match( + r"^properties/(?P.+?)/dataStreams/(?P.+?)/eventEditRules/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + @staticmethod def expanded_data_set_path( property: str, @@ -667,6 +689,25 @@ def parse_google_signals_settings_path(path: str) -> Dict[str, str]: m = re.match(r"^properties/(?P.+?)/googleSignalsSettings$", path) return m.groupdict() if m else {} + @staticmethod + def key_event_path( + property: str, + key_event: str, + ) -> str: + """Returns a fully-qualified key_event string.""" + return "properties/{property}/keyEvents/{key_event}".format( + property=property, + key_event=key_event, + ) + + @staticmethod + def parse_key_event_path(path: str) -> Dict[str, str]: + """Parses a key_event path into its component segments.""" + m = re.match( + r"^properties/(?P.+?)/keyEvents/(?P.+?)$", path + ) + return m.groupdict() if m else {} + @staticmethod def measurement_protocol_secret_path( property: str, @@ -689,6 +730,21 @@ def parse_measurement_protocol_secret_path(path: str) -> Dict[str, str]: ) return m.groupdict() if m else {} + @staticmethod + def organization_path( + organization: str, + ) -> str: + """Returns a fully-qualified organization string.""" + return "organizations/{organization}".format( + organization=organization, + ) + + @staticmethod + def parse_organization_path(path: str) -> Dict[str, str]: + """Parses a organization path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)$", path) + return m.groupdict() if m else {} + @staticmethod def property_path( property: str, @@ -1248,7 +1304,7 @@ def __init__( Type[AnalyticsAdminServiceTransport], Callable[..., AnalyticsAdminServiceTransport], ] = ( - type(self).get_transport_class(transport) + AnalyticsAdminServiceClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., AnalyticsAdminServiceTransport], transport) ) @@ -2126,8 +2182,9 @@ def create_firebase_link( The request object. Request message for CreateFirebaseLink RPC parent (str): - Required. Format: properties/{property_id} Example: - properties/1234 + Required. Format: properties/{property_id} + + Example: ``properties/1234`` This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this @@ -2216,7 +2273,8 @@ def delete_firebase_link( name (str): Required. Format: properties/{property_id}/firebaseLinks/{firebase_link_id} - Example: properties/1234/firebaseLinks/5678 + + Example: ``properties/1234/firebaseLinks/5678`` This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this @@ -2284,8 +2342,9 @@ def list_firebase_links( The request object. Request message for ListFirebaseLinks RPC parent (str): - Required. Format: properties/{property_id} Example: - properties/1234 + Required. Format: properties/{property_id} + + Example: ``properties/1234`` This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this @@ -2380,7 +2439,9 @@ def get_global_site_tag( site tags are singletons and do not have unique IDs. Format: properties/{property_id}/dataStreams/{stream_id}/globalSiteTag - Example: "properties/123/dataStreams/456/globalSiteTag" + + Example: + ``properties/123/dataStreams/456/globalSiteTag`` This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this @@ -2805,11 +2866,10 @@ def get_data_sharing_settings( The request object. Request message for GetDataSharingSettings RPC. name (str): - Required. The name of the settings to - lookup. Format: + Required. The name of the settings to lookup. Format: accounts/{account}/dataSharingSettings - Example: - "accounts/1000/dataSharingSettings" + + Example: ``accounts/1000/dataSharingSettings`` This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this @@ -4144,8 +4204,8 @@ def create_conversion_event( timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> resources.ConversionEvent: - r"""Creates a conversion event with the specified - attributes. + r"""Deprecated: Use ``CreateKeyEvent`` instead. Creates a conversion + event with the specified attributes. Args: request (Union[google.analytics.admin_v1alpha.types.CreateConversionEventRequest, dict]): @@ -4179,6 +4239,11 @@ def create_conversion_event( Analytics property. """ + warnings.warn( + "AnalyticsAdminServiceClient.create_conversion_event is deprecated", + DeprecationWarning, + ) + # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. @@ -4236,8 +4301,8 @@ def update_conversion_event( timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> resources.ConversionEvent: - r"""Updates a conversion event with the specified - attributes. + r"""Deprecated: Use ``UpdateKeyEvent`` instead. Updates a conversion + event with the specified attributes. Args: request (Union[google.analytics.admin_v1alpha.types.UpdateConversionEventRequest, dict]): @@ -4272,6 +4337,11 @@ def update_conversion_event( Analytics property. """ + warnings.warn( + "AnalyticsAdminServiceClient.update_conversion_event is deprecated", + DeprecationWarning, + ) + # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. @@ -4330,7 +4400,8 @@ def get_conversion_event( timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> resources.ConversionEvent: - r"""Retrieve a single conversion event. + r"""Deprecated: Use ``GetKeyEvent`` instead. Retrieve a single + conversion event. Args: request (Union[google.analytics.admin_v1alpha.types.GetConversionEventRequest, dict]): @@ -4357,6 +4428,11 @@ def get_conversion_event( Analytics property. """ + warnings.warn( + "AnalyticsAdminServiceClient.get_conversion_event is deprecated", + DeprecationWarning, + ) + # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. @@ -4411,7 +4487,8 @@ def delete_conversion_event( timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> None: - r"""Deletes a conversion event in a property. + r"""Deprecated: Use ``DeleteKeyEvent`` instead. Deletes a conversion + event in a property. Args: request (Union[google.analytics.admin_v1alpha.types.DeleteConversionEventRequest, dict]): @@ -4432,6 +4509,11 @@ def delete_conversion_event( metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. """ + warnings.warn( + "AnalyticsAdminServiceClient.delete_conversion_event is deprecated", + DeprecationWarning, + ) + # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. @@ -4483,8 +4565,9 @@ def list_conversion_events( timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.ListConversionEventsPager: - r"""Returns a list of conversion events in the specified - parent property. + r"""Deprecated: Use ``ListKeyEvents`` instead. Returns a list of + conversion events in the specified parent property. + Returns an empty list if no conversion events are found. Args: @@ -4514,6 +4597,11 @@ def list_conversion_events( automatically. """ + warnings.warn( + "AnalyticsAdminServiceClient.list_conversion_events is deprecated", + DeprecationWarning, + ) + # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. @@ -4568,30 +4656,33 @@ def list_conversion_events( # Done; return the response. return response - def get_display_video360_advertiser_link( + def create_key_event( self, - request: Optional[ - Union[analytics_admin.GetDisplayVideo360AdvertiserLinkRequest, dict] - ] = None, + request: Optional[Union[analytics_admin.CreateKeyEventRequest, dict]] = None, *, - name: Optional[str] = None, + parent: Optional[str] = None, + key_event: Optional[resources.KeyEvent] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), - ) -> resources.DisplayVideo360AdvertiserLink: - r"""Look up a single DisplayVideo360AdvertiserLink + ) -> resources.KeyEvent: + r"""Creates a Key Event. Args: - request (Union[google.analytics.admin_v1alpha.types.GetDisplayVideo360AdvertiserLinkRequest, dict]): - The request object. Request message for - GetDisplayVideo360AdvertiserLink RPC. - name (str): - Required. The name of the - DisplayVideo360AdvertiserLink to get. - Example format: - properties/1234/displayVideo360AdvertiserLink/5678 + request (Union[google.analytics.admin_v1alpha.types.CreateKeyEventRequest, dict]): + The request object. Request message for CreateKeyEvent + RPC + parent (str): + Required. The resource name of the + parent property where this Key Event + will be created. Format: properties/123 - This corresponds to the ``name`` field + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + key_event (google.analytics.admin_v1alpha.types.KeyEvent): + Required. The Key Event to create. + This corresponds to the ``key_event`` field on the ``request`` instance; if ``request`` is provided, this should not be set. retry (google.api_core.retry.Retry): Designation of what errors, if any, @@ -4601,15 +4692,15 @@ def get_display_video360_advertiser_link( sent along with the request as metadata. Returns: - google.analytics.admin_v1alpha.types.DisplayVideo360AdvertiserLink: - A link between a GA4 property and a - Display & Video 360 advertiser. + google.analytics.admin_v1alpha.types.KeyEvent: + A key event in a Google Analytics + property. """ # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) + has_flattened_params = any([parent, key_event]) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -4618,25 +4709,23 @@ def get_display_video360_advertiser_link( # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. - if not isinstance( - request, analytics_admin.GetDisplayVideo360AdvertiserLinkRequest - ): - request = analytics_admin.GetDisplayVideo360AdvertiserLinkRequest(request) + if not isinstance(request, analytics_admin.CreateKeyEventRequest): + request = analytics_admin.CreateKeyEventRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. - if name is not None: - request.name = name + if parent is not None: + request.parent = parent + if key_event is not None: + request.key_event = key_event # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._transport._wrapped_methods[ - self._transport.get_display_video360_advertiser_link - ] + rpc = self._transport._wrapped_methods[self._transport.create_key_event] # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), ) # Validate the universe domain. @@ -4653,29 +4742,37 @@ def get_display_video360_advertiser_link( # Done; return the response. return response - def list_display_video360_advertiser_links( + def update_key_event( self, - request: Optional[ - Union[analytics_admin.ListDisplayVideo360AdvertiserLinksRequest, dict] - ] = None, + request: Optional[Union[analytics_admin.UpdateKeyEventRequest, dict]] = None, *, - parent: Optional[str] = None, + key_event: Optional[resources.KeyEvent] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), - ) -> pagers.ListDisplayVideo360AdvertiserLinksPager: - r"""Lists all DisplayVideo360AdvertiserLinks on a - property. + ) -> resources.KeyEvent: + r"""Updates a Key Event. Args: - request (Union[google.analytics.admin_v1alpha.types.ListDisplayVideo360AdvertiserLinksRequest, dict]): - The request object. Request message for - ListDisplayVideo360AdvertiserLinks RPC. - parent (str): - Required. Example format: - properties/1234 + request (Union[google.analytics.admin_v1alpha.types.UpdateKeyEventRequest, dict]): + The request object. Request message for UpdateKeyEvent + RPC + key_event (google.analytics.admin_v1alpha.types.KeyEvent): + Required. The Key Event to update. The ``name`` field is + used to identify the settings to be updated. - This corresponds to the ``parent`` field + This corresponds to the ``key_event`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Required. The list of fields to be updated. Field names + must be in snake case (e.g., "field_to_update"). Omitted + fields will not be updated. To replace the entire + entity, use one path with the string "*" to match all + fields. + + This corresponds to the ``update_mask`` field on the ``request`` instance; if ``request`` is provided, this should not be set. retry (google.api_core.retry.Retry): Designation of what errors, if any, @@ -4685,18 +4782,15 @@ def list_display_video360_advertiser_links( sent along with the request as metadata. Returns: - google.analytics.admin_v1alpha.services.analytics_admin_service.pagers.ListDisplayVideo360AdvertiserLinksPager: - Response message for - ListDisplayVideo360AdvertiserLinks RPC. - Iterating over this object will yield - results and resolve additional pages - automatically. + google.analytics.admin_v1alpha.types.KeyEvent: + A key event in a Google Analytics + property. """ # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) + has_flattened_params = any([key_event, update_mask]) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -4705,25 +4799,25 @@ def list_display_video360_advertiser_links( # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. - if not isinstance( - request, analytics_admin.ListDisplayVideo360AdvertiserLinksRequest - ): - request = analytics_admin.ListDisplayVideo360AdvertiserLinksRequest(request) + if not isinstance(request, analytics_admin.UpdateKeyEventRequest): + request = analytics_admin.UpdateKeyEventRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. - if parent is not None: - request.parent = parent + if key_event is not None: + request.key_event = key_event + if update_mask is not None: + request.update_mask = update_mask # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._transport._wrapped_methods[ - self._transport.list_display_video360_advertiser_links - ] + rpc = self._transport._wrapped_methods[self._transport.update_key_event] # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + gapic_v1.routing_header.to_grpc_metadata( + (("key_event.name", request.key_event.name),) + ), ) # Validate the universe domain. @@ -4737,57 +4831,30 @@ def list_display_video360_advertiser_links( metadata=metadata, ) - # This method is paged; wrap the response in a pager, which provides - # an `__iter__` convenience method. - response = pagers.ListDisplayVideo360AdvertiserLinksPager( - method=rpc, - request=request, - response=response, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - # Done; return the response. return response - def create_display_video360_advertiser_link( + def get_key_event( self, - request: Optional[ - Union[analytics_admin.CreateDisplayVideo360AdvertiserLinkRequest, dict] - ] = None, + request: Optional[Union[analytics_admin.GetKeyEventRequest, dict]] = None, *, - parent: Optional[str] = None, - display_video_360_advertiser_link: Optional[ - resources.DisplayVideo360AdvertiserLink - ] = None, + name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), - ) -> resources.DisplayVideo360AdvertiserLink: - r"""Creates a DisplayVideo360AdvertiserLink. - This can only be utilized by users who have proper - authorization both on the Google Analytics property and - on the Display & Video 360 advertiser. Users who do not - have access to the Display & Video 360 advertiser should - instead seek to create a DisplayVideo360LinkProposal. + ) -> resources.KeyEvent: + r"""Retrieve a single Key Event. Args: - request (Union[google.analytics.admin_v1alpha.types.CreateDisplayVideo360AdvertiserLinkRequest, dict]): - The request object. Request message for - CreateDisplayVideo360AdvertiserLink RPC. - parent (str): - Required. Example format: - properties/1234 - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - display_video_360_advertiser_link (google.analytics.admin_v1alpha.types.DisplayVideo360AdvertiserLink): - Required. The - DisplayVideo360AdvertiserLink to create. + request (Union[google.analytics.admin_v1alpha.types.GetKeyEventRequest, dict]): + The request object. Request message for GetKeyEvent RPC + name (str): + Required. The resource name of the Key Event to + retrieve. Format: + properties/{property}/keyEvents/{key_event} Example: + "properties/123/keyEvents/456" - This corresponds to the ``display_video_360_advertiser_link`` field + This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. retry (google.api_core.retry.Retry): Designation of what errors, if any, @@ -4797,15 +4864,15 @@ def create_display_video360_advertiser_link( sent along with the request as metadata. Returns: - google.analytics.admin_v1alpha.types.DisplayVideo360AdvertiserLink: - A link between a GA4 property and a - Display & Video 360 advertiser. + google.analytics.admin_v1alpha.types.KeyEvent: + A key event in a Google Analytics + property. """ # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent, display_video_360_advertiser_link]) + has_flattened_params = any([name]) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -4814,31 +4881,21 @@ def create_display_video360_advertiser_link( # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. - if not isinstance( - request, analytics_admin.CreateDisplayVideo360AdvertiserLinkRequest - ): - request = analytics_admin.CreateDisplayVideo360AdvertiserLinkRequest( - request - ) + if not isinstance(request, analytics_admin.GetKeyEventRequest): + request = analytics_admin.GetKeyEventRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. - if parent is not None: - request.parent = parent - if display_video_360_advertiser_link is not None: - request.display_video_360_advertiser_link = ( - display_video_360_advertiser_link - ) + if name is not None: + request.name = name # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._transport._wrapped_methods[ - self._transport.create_display_video360_advertiser_link - ] + rpc = self._transport._wrapped_methods[self._transport.get_key_event] # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), ) # Validate the universe domain. @@ -4855,29 +4912,25 @@ def create_display_video360_advertiser_link( # Done; return the response. return response - def delete_display_video360_advertiser_link( + def delete_key_event( self, - request: Optional[ - Union[analytics_admin.DeleteDisplayVideo360AdvertiserLinkRequest, dict] - ] = None, + request: Optional[Union[analytics_admin.DeleteKeyEventRequest, dict]] = None, *, name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> None: - r"""Deletes a DisplayVideo360AdvertiserLink on a - property. + r"""Deletes a Key Event. Args: - request (Union[google.analytics.admin_v1alpha.types.DeleteDisplayVideo360AdvertiserLinkRequest, dict]): - The request object. Request message for - DeleteDisplayVideo360AdvertiserLink RPC. + request (Union[google.analytics.admin_v1alpha.types.DeleteKeyEventRequest, dict]): + The request object. Request message for DeleteKeyEvent + RPC name (str): - Required. The name of the - DisplayVideo360AdvertiserLink to delete. - Example format: - properties/1234/displayVideo360AdvertiserLinks/5678 + Required. The resource name of the Key Event to delete. + Format: properties/{property}/keyEvents/{key_event} + Example: "properties/123/keyEvents/456" This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this @@ -4900,12 +4953,8 @@ def delete_display_video360_advertiser_link( # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. - if not isinstance( - request, analytics_admin.DeleteDisplayVideo360AdvertiserLinkRequest - ): - request = analytics_admin.DeleteDisplayVideo360AdvertiserLinkRequest( - request - ) + if not isinstance(request, analytics_admin.DeleteKeyEventRequest): + request = analytics_admin.DeleteKeyEventRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. if name is not None: @@ -4913,9 +4962,7 @@ def delete_display_video360_advertiser_link( # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._transport._wrapped_methods[ - self._transport.delete_display_video360_advertiser_link - ] + rpc = self._transport._wrapped_methods[self._transport.delete_key_event] # Certain fields should be provided within the metadata header; # add these here. @@ -4934,41 +4981,28 @@ def delete_display_video360_advertiser_link( metadata=metadata, ) - def update_display_video360_advertiser_link( + def list_key_events( self, - request: Optional[ - Union[analytics_admin.UpdateDisplayVideo360AdvertiserLinkRequest, dict] - ] = None, + request: Optional[Union[analytics_admin.ListKeyEventsRequest, dict]] = None, *, - display_video_360_advertiser_link: Optional[ - resources.DisplayVideo360AdvertiserLink - ] = None, - update_mask: Optional[field_mask_pb2.FieldMask] = None, + parent: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), - ) -> resources.DisplayVideo360AdvertiserLink: - r"""Updates a DisplayVideo360AdvertiserLink on a - property. + ) -> pagers.ListKeyEventsPager: + r"""Returns a list of Key Events in the specified parent + property. Returns an empty list if no Key Events are + found. Args: - request (Union[google.analytics.admin_v1alpha.types.UpdateDisplayVideo360AdvertiserLinkRequest, dict]): - The request object. Request message for - UpdateDisplayVideo360AdvertiserLink RPC. - display_video_360_advertiser_link (google.analytics.admin_v1alpha.types.DisplayVideo360AdvertiserLink): - The DisplayVideo360AdvertiserLink to - update - - This corresponds to the ``display_video_360_advertiser_link`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - update_mask (google.protobuf.field_mask_pb2.FieldMask): - Required. The list of fields to be updated. Omitted - fields will not be updated. To replace the entire - entity, use one path with the string "*" to match all - fields. + request (Union[google.analytics.admin_v1alpha.types.ListKeyEventsRequest, dict]): + The request object. Request message for ListKeyEvents RPC + parent (str): + Required. The resource name of the + parent property. Example: + 'properties/123' - This corresponds to the ``update_mask`` field + This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this should not be set. retry (google.api_core.retry.Retry): Designation of what errors, if any, @@ -4978,15 +5012,18 @@ def update_display_video360_advertiser_link( sent along with the request as metadata. Returns: - google.analytics.admin_v1alpha.types.DisplayVideo360AdvertiserLink: - A link between a GA4 property and a - Display & Video 360 advertiser. + google.analytics.admin_v1alpha.services.analytics_admin_service.pagers.ListKeyEventsPager: + Response message for ListKeyEvents + RPC. + Iterating over this object will yield + results and resolve additional pages + automatically. """ # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([display_video_360_advertiser_link, update_mask]) + has_flattened_params = any([parent]) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -4995,38 +5032,21 @@ def update_display_video360_advertiser_link( # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. - if not isinstance( - request, analytics_admin.UpdateDisplayVideo360AdvertiserLinkRequest - ): - request = analytics_admin.UpdateDisplayVideo360AdvertiserLinkRequest( - request - ) + if not isinstance(request, analytics_admin.ListKeyEventsRequest): + request = analytics_admin.ListKeyEventsRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. - if display_video_360_advertiser_link is not None: - request.display_video_360_advertiser_link = ( - display_video_360_advertiser_link - ) - if update_mask is not None: - request.update_mask = update_mask + if parent is not None: + request.parent = parent # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._transport._wrapped_methods[ - self._transport.update_display_video360_advertiser_link - ] + rpc = self._transport._wrapped_methods[self._transport.list_key_events] # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - ( - ( - "display_video_360_advertiser_link.name", - request.display_video_360_advertiser_link.name, - ), - ) - ), + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), ) # Validate the universe domain. @@ -5040,33 +5060,42 @@ def update_display_video360_advertiser_link( metadata=metadata, ) + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListKeyEventsPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + # Done; return the response. return response - def get_display_video360_advertiser_link_proposal( + def get_display_video360_advertiser_link( self, request: Optional[ - Union[analytics_admin.GetDisplayVideo360AdvertiserLinkProposalRequest, dict] + Union[analytics_admin.GetDisplayVideo360AdvertiserLinkRequest, dict] ] = None, *, name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), - ) -> resources.DisplayVideo360AdvertiserLinkProposal: - r"""Lookup for a single - DisplayVideo360AdvertiserLinkProposal. + ) -> resources.DisplayVideo360AdvertiserLink: + r"""Look up a single DisplayVideo360AdvertiserLink Args: - request (Union[google.analytics.admin_v1alpha.types.GetDisplayVideo360AdvertiserLinkProposalRequest, dict]): + request (Union[google.analytics.admin_v1alpha.types.GetDisplayVideo360AdvertiserLinkRequest, dict]): The request object. Request message for - GetDisplayVideo360AdvertiserLinkProposal - RPC. + GetDisplayVideo360AdvertiserLink RPC. name (str): Required. The name of the - DisplayVideo360AdvertiserLinkProposal to - get. Example format: - properties/1234/displayVideo360AdvertiserLinkProposals/5678 + DisplayVideo360AdvertiserLink to get. + Example format: + properties/1234/displayVideo360AdvertiserLink/5678 This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this @@ -5078,17 +5107,9 @@ def get_display_video360_advertiser_link_proposal( sent along with the request as metadata. Returns: - google.analytics.admin_v1alpha.types.DisplayVideo360AdvertiserLinkProposal: - A proposal for a link between a GA4 - property and a Display & Video 360 - advertiser. - - A proposal is converted to a - DisplayVideo360AdvertiserLink once - approved. Google Analytics admins - approve inbound proposals while Display - & Video 360 admins approve outbound - proposals. + google.analytics.admin_v1alpha.types.DisplayVideo360AdvertiserLink: + A link between a GA4 property and a + Display & Video 360 advertiser. """ # Create or coerce a protobuf request object. @@ -5104,11 +5125,9 @@ def get_display_video360_advertiser_link_proposal( # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. if not isinstance( - request, analytics_admin.GetDisplayVideo360AdvertiserLinkProposalRequest + request, analytics_admin.GetDisplayVideo360AdvertiserLinkRequest ): - request = analytics_admin.GetDisplayVideo360AdvertiserLinkProposalRequest( - request - ) + request = analytics_admin.GetDisplayVideo360AdvertiserLinkRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. if name is not None: @@ -5117,7 +5136,7 @@ def get_display_video360_advertiser_link_proposal( # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. rpc = self._transport._wrapped_methods[ - self._transport.get_display_video360_advertiser_link_proposal + self._transport.get_display_video360_advertiser_link ] # Certain fields should be provided within the metadata header; @@ -5140,27 +5159,24 @@ def get_display_video360_advertiser_link_proposal( # Done; return the response. return response - def list_display_video360_advertiser_link_proposals( + def list_display_video360_advertiser_links( self, request: Optional[ - Union[ - analytics_admin.ListDisplayVideo360AdvertiserLinkProposalsRequest, dict - ] + Union[analytics_admin.ListDisplayVideo360AdvertiserLinksRequest, dict] ] = None, *, parent: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), - ) -> pagers.ListDisplayVideo360AdvertiserLinkProposalsPager: - r"""Lists DisplayVideo360AdvertiserLinkProposals on a + ) -> pagers.ListDisplayVideo360AdvertiserLinksPager: + r"""Lists all DisplayVideo360AdvertiserLinks on a property. Args: - request (Union[google.analytics.admin_v1alpha.types.ListDisplayVideo360AdvertiserLinkProposalsRequest, dict]): + request (Union[google.analytics.admin_v1alpha.types.ListDisplayVideo360AdvertiserLinksRequest, dict]): The request object. Request message for - ListDisplayVideo360AdvertiserLinkProposals - RPC. + ListDisplayVideo360AdvertiserLinks RPC. parent (str): Required. Example format: properties/1234 @@ -5175,12 +5191,12 @@ def list_display_video360_advertiser_link_proposals( sent along with the request as metadata. Returns: - google.analytics.admin_v1alpha.services.analytics_admin_service.pagers.ListDisplayVideo360AdvertiserLinkProposalsPager: + google.analytics.admin_v1alpha.services.analytics_admin_service.pagers.ListDisplayVideo360AdvertiserLinksPager: Response message for - ListDisplayVideo360AdvertiserLinkProposals - RPC. Iterating over this object will - yield results and resolve additional - pages automatically. + ListDisplayVideo360AdvertiserLinks RPC. + Iterating over this object will yield + results and resolve additional pages + automatically. """ # Create or coerce a protobuf request object. @@ -5196,11 +5212,9 @@ def list_display_video360_advertiser_link_proposals( # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. if not isinstance( - request, analytics_admin.ListDisplayVideo360AdvertiserLinkProposalsRequest + request, analytics_admin.ListDisplayVideo360AdvertiserLinksRequest ): - request = analytics_admin.ListDisplayVideo360AdvertiserLinkProposalsRequest( - request - ) + request = analytics_admin.ListDisplayVideo360AdvertiserLinksRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. if parent is not None: @@ -5209,7 +5223,7 @@ def list_display_video360_advertiser_link_proposals( # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. rpc = self._transport._wrapped_methods[ - self._transport.list_display_video360_advertiser_link_proposals + self._transport.list_display_video360_advertiser_links ] # Certain fields should be provided within the metadata header; @@ -5231,7 +5245,7 @@ def list_display_video360_advertiser_link_proposals( # This method is paged; wrap the response in a pager, which provides # an `__iter__` convenience method. - response = pagers.ListDisplayVideo360AdvertiserLinkProposalsPager( + response = pagers.ListDisplayVideo360AdvertiserLinksPager( method=rpc, request=request, response=response, @@ -5243,29 +5257,31 @@ def list_display_video360_advertiser_link_proposals( # Done; return the response. return response - def create_display_video360_advertiser_link_proposal( + def create_display_video360_advertiser_link( self, request: Optional[ - Union[ - analytics_admin.CreateDisplayVideo360AdvertiserLinkProposalRequest, dict - ] + Union[analytics_admin.CreateDisplayVideo360AdvertiserLinkRequest, dict] ] = None, *, parent: Optional[str] = None, - display_video_360_advertiser_link_proposal: Optional[ - resources.DisplayVideo360AdvertiserLinkProposal + display_video_360_advertiser_link: Optional[ + resources.DisplayVideo360AdvertiserLink ] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), - ) -> resources.DisplayVideo360AdvertiserLinkProposal: - r"""Creates a DisplayVideo360AdvertiserLinkProposal. + ) -> resources.DisplayVideo360AdvertiserLink: + r"""Creates a DisplayVideo360AdvertiserLink. + This can only be utilized by users who have proper + authorization both on the Google Analytics property and + on the Display & Video 360 advertiser. Users who do not + have access to the Display & Video 360 advertiser should + instead seek to create a DisplayVideo360LinkProposal. Args: - request (Union[google.analytics.admin_v1alpha.types.CreateDisplayVideo360AdvertiserLinkProposalRequest, dict]): + request (Union[google.analytics.admin_v1alpha.types.CreateDisplayVideo360AdvertiserLinkRequest, dict]): The request object. Request message for - CreateDisplayVideo360AdvertiserLinkProposal - RPC. + CreateDisplayVideo360AdvertiserLink RPC. parent (str): Required. Example format: properties/1234 @@ -5273,12 +5289,11 @@ def create_display_video360_advertiser_link_proposal( This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - display_video_360_advertiser_link_proposal (google.analytics.admin_v1alpha.types.DisplayVideo360AdvertiserLinkProposal): + display_video_360_advertiser_link (google.analytics.admin_v1alpha.types.DisplayVideo360AdvertiserLink): Required. The - DisplayVideo360AdvertiserLinkProposal to - create. + DisplayVideo360AdvertiserLink to create. - This corresponds to the ``display_video_360_advertiser_link_proposal`` field + This corresponds to the ``display_video_360_advertiser_link`` field on the ``request`` instance; if ``request`` is provided, this should not be set. retry (google.api_core.retry.Retry): Designation of what errors, if any, @@ -5288,23 +5303,15 @@ def create_display_video360_advertiser_link_proposal( sent along with the request as metadata. Returns: - google.analytics.admin_v1alpha.types.DisplayVideo360AdvertiserLinkProposal: - A proposal for a link between a GA4 - property and a Display & Video 360 - advertiser. - - A proposal is converted to a - DisplayVideo360AdvertiserLink once - approved. Google Analytics admins - approve inbound proposals while Display - & Video 360 admins approve outbound - proposals. + google.analytics.admin_v1alpha.types.DisplayVideo360AdvertiserLink: + A link between a GA4 property and a + Display & Video 360 advertiser. """ # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent, display_video_360_advertiser_link_proposal]) + has_flattened_params = any([parent, display_video_360_advertiser_link]) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -5314,26 +5321,24 @@ def create_display_video360_advertiser_link_proposal( # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. if not isinstance( - request, analytics_admin.CreateDisplayVideo360AdvertiserLinkProposalRequest + request, analytics_admin.CreateDisplayVideo360AdvertiserLinkRequest ): - request = ( - analytics_admin.CreateDisplayVideo360AdvertiserLinkProposalRequest( - request - ) + request = analytics_admin.CreateDisplayVideo360AdvertiserLinkRequest( + request ) # If we have keyword arguments corresponding to fields on the # request, apply these. if parent is not None: request.parent = parent - if display_video_360_advertiser_link_proposal is not None: - request.display_video_360_advertiser_link_proposal = ( - display_video_360_advertiser_link_proposal + if display_video_360_advertiser_link is not None: + request.display_video_360_advertiser_link = ( + display_video_360_advertiser_link ) # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. rpc = self._transport._wrapped_methods[ - self._transport.create_display_video360_advertiser_link_proposal + self._transport.create_display_video360_advertiser_link ] # Certain fields should be provided within the metadata header; @@ -5356,12 +5361,10 @@ def create_display_video360_advertiser_link_proposal( # Done; return the response. return response - def delete_display_video360_advertiser_link_proposal( + def delete_display_video360_advertiser_link( self, request: Optional[ - Union[ - analytics_admin.DeleteDisplayVideo360AdvertiserLinkProposalRequest, dict - ] + Union[analytics_admin.DeleteDisplayVideo360AdvertiserLinkRequest, dict] ] = None, *, name: Optional[str] = None, @@ -5369,19 +5372,18 @@ def delete_display_video360_advertiser_link_proposal( timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> None: - r"""Deletes a DisplayVideo360AdvertiserLinkProposal on a - property. This can only be used on cancelled proposals. + r"""Deletes a DisplayVideo360AdvertiserLink on a + property. Args: - request (Union[google.analytics.admin_v1alpha.types.DeleteDisplayVideo360AdvertiserLinkProposalRequest, dict]): + request (Union[google.analytics.admin_v1alpha.types.DeleteDisplayVideo360AdvertiserLinkRequest, dict]): The request object. Request message for - DeleteDisplayVideo360AdvertiserLinkProposal - RPC. + DeleteDisplayVideo360AdvertiserLink RPC. name (str): Required. The name of the - DisplayVideo360AdvertiserLinkProposal to - delete. Example format: - properties/1234/displayVideo360AdvertiserLinkProposals/5678 + DisplayVideo360AdvertiserLink to delete. + Example format: + properties/1234/displayVideo360AdvertiserLinks/5678 This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this @@ -5405,12 +5407,10 @@ def delete_display_video360_advertiser_link_proposal( # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. if not isinstance( - request, analytics_admin.DeleteDisplayVideo360AdvertiserLinkProposalRequest + request, analytics_admin.DeleteDisplayVideo360AdvertiserLinkRequest ): - request = ( - analytics_admin.DeleteDisplayVideo360AdvertiserLinkProposalRequest( - request - ) + request = analytics_admin.DeleteDisplayVideo360AdvertiserLinkRequest( + request ) # If we have keyword arguments corresponding to fields on the # request, apply these. @@ -5420,7 +5420,7 @@ def delete_display_video360_advertiser_link_proposal( # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. rpc = self._transport._wrapped_methods[ - self._transport.delete_display_video360_advertiser_link_proposal + self._transport.delete_display_video360_advertiser_link ] # Certain fields should be provided within the metadata header; @@ -5440,29 +5440,43 @@ def delete_display_video360_advertiser_link_proposal( metadata=metadata, ) - def approve_display_video360_advertiser_link_proposal( + def update_display_video360_advertiser_link( self, request: Optional[ - Union[ - analytics_admin.ApproveDisplayVideo360AdvertiserLinkProposalRequest, - dict, - ] + Union[analytics_admin.UpdateDisplayVideo360AdvertiserLinkRequest, dict] ] = None, *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, + display_video_360_advertiser_link: Optional[ + resources.DisplayVideo360AdvertiserLink + ] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), - ) -> analytics_admin.ApproveDisplayVideo360AdvertiserLinkProposalResponse: - r"""Approves a DisplayVideo360AdvertiserLinkProposal. - The DisplayVideo360AdvertiserLinkProposal will be - deleted and a new DisplayVideo360AdvertiserLink will be - created. + ) -> resources.DisplayVideo360AdvertiserLink: + r"""Updates a DisplayVideo360AdvertiserLink on a + property. Args: - request (Union[google.analytics.admin_v1alpha.types.ApproveDisplayVideo360AdvertiserLinkProposalRequest, dict]): + request (Union[google.analytics.admin_v1alpha.types.UpdateDisplayVideo360AdvertiserLinkRequest, dict]): The request object. Request message for - ApproveDisplayVideo360AdvertiserLinkProposal - RPC. + UpdateDisplayVideo360AdvertiserLink RPC. + display_video_360_advertiser_link (google.analytics.admin_v1alpha.types.DisplayVideo360AdvertiserLink): + The DisplayVideo360AdvertiserLink to + update + + This corresponds to the ``display_video_360_advertiser_link`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Required. The list of fields to be updated. Omitted + fields will not be updated. To replace the entire + entity, use one path with the string "*" to match all + fields. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -5470,34 +5484,55 @@ def approve_display_video360_advertiser_link_proposal( sent along with the request as metadata. Returns: - google.analytics.admin_v1alpha.types.ApproveDisplayVideo360AdvertiserLinkProposalResponse: - Response message for - ApproveDisplayVideo360AdvertiserLinkProposal - RPC. + google.analytics.admin_v1alpha.types.DisplayVideo360AdvertiserLink: + A link between a GA4 property and a + Display & Video 360 advertiser. """ # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([display_video_360_advertiser_link, update_mask]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. if not isinstance( - request, analytics_admin.ApproveDisplayVideo360AdvertiserLinkProposalRequest + request, analytics_admin.UpdateDisplayVideo360AdvertiserLinkRequest ): - request = ( - analytics_admin.ApproveDisplayVideo360AdvertiserLinkProposalRequest( - request - ) + request = analytics_admin.UpdateDisplayVideo360AdvertiserLinkRequest( + request ) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if display_video_360_advertiser_link is not None: + request.display_video_360_advertiser_link = ( + display_video_360_advertiser_link + ) + if update_mask is not None: + request.update_mask = update_mask # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. rpc = self._transport._wrapped_methods[ - self._transport.approve_display_video360_advertiser_link_proposal + self._transport.update_display_video360_advertiser_link ] # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + gapic_v1.routing_header.to_grpc_metadata( + ( + ( + "display_video_360_advertiser_link.name", + request.display_video_360_advertiser_link.name, + ), + ) + ), ) # Validate the universe domain. @@ -5514,32 +5549,34 @@ def approve_display_video360_advertiser_link_proposal( # Done; return the response. return response - def cancel_display_video360_advertiser_link_proposal( + def get_display_video360_advertiser_link_proposal( self, request: Optional[ - Union[ - analytics_admin.CancelDisplayVideo360AdvertiserLinkProposalRequest, dict - ] + Union[analytics_admin.GetDisplayVideo360AdvertiserLinkProposalRequest, dict] ] = None, *, + name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> resources.DisplayVideo360AdvertiserLinkProposal: - r"""Cancels a DisplayVideo360AdvertiserLinkProposal. - Cancelling can mean either: - - - Declining a proposal initiated from Display & Video - 360 - - Withdrawing a proposal initiated from Google Analytics - After being cancelled, a proposal will eventually be - deleted automatically. + r"""Lookup for a single + DisplayVideo360AdvertiserLinkProposal. Args: - request (Union[google.analytics.admin_v1alpha.types.CancelDisplayVideo360AdvertiserLinkProposalRequest, dict]): + request (Union[google.analytics.admin_v1alpha.types.GetDisplayVideo360AdvertiserLinkProposalRequest, dict]): The request object. Request message for - CancelDisplayVideo360AdvertiserLinkProposal + GetDisplayVideo360AdvertiserLinkProposal RPC. + name (str): + Required. The name of the + DisplayVideo360AdvertiserLinkProposal to + get. Example format: + properties/1234/displayVideo360AdvertiserLinkProposals/5678 + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -5561,21 +5598,32 @@ def cancel_display_video360_advertiser_link_proposal( """ # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. if not isinstance( - request, analytics_admin.CancelDisplayVideo360AdvertiserLinkProposalRequest + request, analytics_admin.GetDisplayVideo360AdvertiserLinkProposalRequest ): - request = ( - analytics_admin.CancelDisplayVideo360AdvertiserLinkProposalRequest( - request - ) + request = analytics_admin.GetDisplayVideo360AdvertiserLinkProposalRequest( + request ) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. rpc = self._transport._wrapped_methods[ - self._transport.cancel_display_video360_advertiser_link_proposal + self._transport.get_display_video360_advertiser_link_proposal ] # Certain fields should be provided within the metadata header; @@ -5598,24 +5646,27 @@ def cancel_display_video360_advertiser_link_proposal( # Done; return the response. return response - def create_custom_dimension( + def list_display_video360_advertiser_link_proposals( self, request: Optional[ - Union[analytics_admin.CreateCustomDimensionRequest, dict] + Union[ + analytics_admin.ListDisplayVideo360AdvertiserLinkProposalsRequest, dict + ] ] = None, *, parent: Optional[str] = None, - custom_dimension: Optional[resources.CustomDimension] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), - ) -> resources.CustomDimension: - r"""Creates a CustomDimension. + ) -> pagers.ListDisplayVideo360AdvertiserLinkProposalsPager: + r"""Lists DisplayVideo360AdvertiserLinkProposals on a + property. Args: - request (Union[google.analytics.admin_v1alpha.types.CreateCustomDimensionRequest, dict]): + request (Union[google.analytics.admin_v1alpha.types.ListDisplayVideo360AdvertiserLinkProposalsRequest, dict]): The request object. Request message for - CreateCustomDimension RPC. + ListDisplayVideo360AdvertiserLinkProposals + RPC. parent (str): Required. Example format: properties/1234 @@ -5623,13 +5674,6 @@ def create_custom_dimension( This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - custom_dimension (google.analytics.admin_v1alpha.types.CustomDimension): - Required. The CustomDimension to - create. - - This corresponds to the ``custom_dimension`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -5637,13 +5681,18 @@ def create_custom_dimension( sent along with the request as metadata. Returns: - google.analytics.admin_v1alpha.types.CustomDimension: - A definition for a CustomDimension. + google.analytics.admin_v1alpha.services.analytics_admin_service.pagers.ListDisplayVideo360AdvertiserLinkProposalsPager: + Response message for + ListDisplayVideo360AdvertiserLinkProposals + RPC. Iterating over this object will + yield results and resolve additional + pages automatically. + """ # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent, custom_dimension]) + has_flattened_params = any([parent]) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -5652,18 +5701,22 @@ def create_custom_dimension( # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. - if not isinstance(request, analytics_admin.CreateCustomDimensionRequest): - request = analytics_admin.CreateCustomDimensionRequest(request) + if not isinstance( + request, analytics_admin.ListDisplayVideo360AdvertiserLinkProposalsRequest + ): + request = analytics_admin.ListDisplayVideo360AdvertiserLinkProposalsRequest( + request + ) # If we have keyword arguments corresponding to fields on the # request, apply these. if parent is not None: request.parent = parent - if custom_dimension is not None: - request.custom_dimension = custom_dimension # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.create_custom_dimension] + rpc = self._transport._wrapped_methods[ + self._transport.list_display_video360_advertiser_link_proposals + ] # Certain fields should be provided within the metadata header; # add these here. @@ -5682,39 +5735,56 @@ def create_custom_dimension( metadata=metadata, ) + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListDisplayVideo360AdvertiserLinkProposalsPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + # Done; return the response. return response - def update_custom_dimension( + def create_display_video360_advertiser_link_proposal( self, request: Optional[ - Union[analytics_admin.UpdateCustomDimensionRequest, dict] + Union[ + analytics_admin.CreateDisplayVideo360AdvertiserLinkProposalRequest, dict + ] ] = None, *, - custom_dimension: Optional[resources.CustomDimension] = None, - update_mask: Optional[field_mask_pb2.FieldMask] = None, + parent: Optional[str] = None, + display_video_360_advertiser_link_proposal: Optional[ + resources.DisplayVideo360AdvertiserLinkProposal + ] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), - ) -> resources.CustomDimension: - r"""Updates a CustomDimension on a property. + ) -> resources.DisplayVideo360AdvertiserLinkProposal: + r"""Creates a DisplayVideo360AdvertiserLinkProposal. Args: - request (Union[google.analytics.admin_v1alpha.types.UpdateCustomDimensionRequest, dict]): + request (Union[google.analytics.admin_v1alpha.types.CreateDisplayVideo360AdvertiserLinkProposalRequest, dict]): The request object. Request message for - UpdateCustomDimension RPC. - custom_dimension (google.analytics.admin_v1alpha.types.CustomDimension): - The CustomDimension to update - This corresponds to the ``custom_dimension`` field + CreateDisplayVideo360AdvertiserLinkProposal + RPC. + parent (str): + Required. Example format: + properties/1234 + + This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - update_mask (google.protobuf.field_mask_pb2.FieldMask): - Required. The list of fields to be updated. Omitted - fields will not be updated. To replace the entire - entity, use one path with the string "*" to match all - fields. + display_video_360_advertiser_link_proposal (google.analytics.admin_v1alpha.types.DisplayVideo360AdvertiserLinkProposal): + Required. The + DisplayVideo360AdvertiserLinkProposal to + create. - This corresponds to the ``update_mask`` field + This corresponds to the ``display_video_360_advertiser_link_proposal`` field on the ``request`` instance; if ``request`` is provided, this should not be set. retry (google.api_core.retry.Retry): Designation of what errors, if any, @@ -5724,13 +5794,23 @@ def update_custom_dimension( sent along with the request as metadata. Returns: - google.analytics.admin_v1alpha.types.CustomDimension: - A definition for a CustomDimension. + google.analytics.admin_v1alpha.types.DisplayVideo360AdvertiserLinkProposal: + A proposal for a link between a GA4 + property and a Display & Video 360 + advertiser. + + A proposal is converted to a + DisplayVideo360AdvertiserLink once + approved. Google Analytics admins + approve inbound proposals while Display + & Video 360 admins approve outbound + proposals. + """ # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([custom_dimension, update_mask]) + has_flattened_params = any([parent, display_video_360_advertiser_link_proposal]) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -5739,25 +5819,33 @@ def update_custom_dimension( # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. - if not isinstance(request, analytics_admin.UpdateCustomDimensionRequest): - request = analytics_admin.UpdateCustomDimensionRequest(request) + if not isinstance( + request, analytics_admin.CreateDisplayVideo360AdvertiserLinkProposalRequest + ): + request = ( + analytics_admin.CreateDisplayVideo360AdvertiserLinkProposalRequest( + request + ) + ) # If we have keyword arguments corresponding to fields on the # request, apply these. - if custom_dimension is not None: - request.custom_dimension = custom_dimension - if update_mask is not None: - request.update_mask = update_mask + if parent is not None: + request.parent = parent + if display_video_360_advertiser_link_proposal is not None: + request.display_video_360_advertiser_link_proposal = ( + display_video_360_advertiser_link_proposal + ) # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.update_custom_dimension] + rpc = self._transport._wrapped_methods[ + self._transport.create_display_video360_advertiser_link_proposal + ] # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("custom_dimension.name", request.custom_dimension.name),) - ), + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), ) # Validate the universe domain. @@ -5774,28 +5862,34 @@ def update_custom_dimension( # Done; return the response. return response - def list_custom_dimensions( + def delete_display_video360_advertiser_link_proposal( self, request: Optional[ - Union[analytics_admin.ListCustomDimensionsRequest, dict] + Union[ + analytics_admin.DeleteDisplayVideo360AdvertiserLinkProposalRequest, dict + ] ] = None, *, - parent: Optional[str] = None, + name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), - ) -> pagers.ListCustomDimensionsPager: - r"""Lists CustomDimensions on a property. + ) -> None: + r"""Deletes a DisplayVideo360AdvertiserLinkProposal on a + property. This can only be used on cancelled proposals. Args: - request (Union[google.analytics.admin_v1alpha.types.ListCustomDimensionsRequest, dict]): + request (Union[google.analytics.admin_v1alpha.types.DeleteDisplayVideo360AdvertiserLinkProposalRequest, dict]): The request object. Request message for - ListCustomDimensions RPC. - parent (str): - Required. Example format: - properties/1234 + DeleteDisplayVideo360AdvertiserLinkProposal + RPC. + name (str): + Required. The name of the + DisplayVideo360AdvertiserLinkProposal to + delete. Example format: + properties/1234/displayVideo360AdvertiserLinkProposals/5678 - This corresponds to the ``parent`` field + This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. retry (google.api_core.retry.Retry): Designation of what errors, if any, @@ -5803,20 +5897,11 @@ def list_custom_dimensions( timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. - - Returns: - google.analytics.admin_v1alpha.services.analytics_admin_service.pagers.ListCustomDimensionsPager: - Response message for - ListCustomDimensions RPC. - Iterating over this object will yield - results and resolve additional pages - automatically. - """ # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) + has_flattened_params = any([name]) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -5825,102 +5910,95 @@ def list_custom_dimensions( # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. - if not isinstance(request, analytics_admin.ListCustomDimensionsRequest): - request = analytics_admin.ListCustomDimensionsRequest(request) + if not isinstance( + request, analytics_admin.DeleteDisplayVideo360AdvertiserLinkProposalRequest + ): + request = ( + analytics_admin.DeleteDisplayVideo360AdvertiserLinkProposalRequest( + request + ) + ) # If we have keyword arguments corresponding to fields on the # request, apply these. - if parent is not None: - request.parent = parent + if name is not None: + request.name = name # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.list_custom_dimensions] + rpc = self._transport._wrapped_methods[ + self._transport.delete_display_video360_advertiser_link_proposal + ] # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), ) # Validate the universe domain. self._validate_universe_domain() # Send the request. - response = rpc( + rpc( request, retry=retry, timeout=timeout, metadata=metadata, ) - # This method is paged; wrap the response in a pager, which provides - # an `__iter__` convenience method. - response = pagers.ListCustomDimensionsPager( - method=rpc, - request=request, - response=response, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def archive_custom_dimension( + def approve_display_video360_advertiser_link_proposal( self, request: Optional[ - Union[analytics_admin.ArchiveCustomDimensionRequest, dict] + Union[ + analytics_admin.ApproveDisplayVideo360AdvertiserLinkProposalRequest, + dict, + ] ] = None, *, - name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), - ) -> None: - r"""Archives a CustomDimension on a property. + ) -> analytics_admin.ApproveDisplayVideo360AdvertiserLinkProposalResponse: + r"""Approves a DisplayVideo360AdvertiserLinkProposal. + The DisplayVideo360AdvertiserLinkProposal will be + deleted and a new DisplayVideo360AdvertiserLink will be + created. Args: - request (Union[google.analytics.admin_v1alpha.types.ArchiveCustomDimensionRequest, dict]): + request (Union[google.analytics.admin_v1alpha.types.ApproveDisplayVideo360AdvertiserLinkProposalRequest, dict]): The request object. Request message for - ArchiveCustomDimension RPC. - name (str): - Required. The name of the - CustomDimension to archive. Example - format: - properties/1234/customDimensions/5678 - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. + ApproveDisplayVideo360AdvertiserLinkProposal + RPC. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. + + Returns: + google.analytics.admin_v1alpha.types.ApproveDisplayVideo360AdvertiserLinkProposalResponse: + Response message for + ApproveDisplayVideo360AdvertiserLinkProposal + RPC. + """ # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError( - "If the `request` argument is set, then none of " - "the individual field arguments should be set." - ) - # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. - if not isinstance(request, analytics_admin.ArchiveCustomDimensionRequest): - request = analytics_admin.ArchiveCustomDimensionRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name + if not isinstance( + request, analytics_admin.ApproveDisplayVideo360AdvertiserLinkProposalRequest + ): + request = ( + analytics_admin.ApproveDisplayVideo360AdvertiserLinkProposalRequest( + request + ) + ) # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.archive_custom_dimension] + rpc = self._transport._wrapped_methods[ + self._transport.approve_display_video360_advertiser_link_proposal + ] # Certain fields should be provided within the metadata header; # add these here. @@ -5932,38 +6010,42 @@ def archive_custom_dimension( self._validate_universe_domain() # Send the request. - rpc( + response = rpc( request, retry=retry, timeout=timeout, metadata=metadata, ) - def get_custom_dimension( + # Done; return the response. + return response + + def cancel_display_video360_advertiser_link_proposal( self, request: Optional[ - Union[analytics_admin.GetCustomDimensionRequest, dict] + Union[ + analytics_admin.CancelDisplayVideo360AdvertiserLinkProposalRequest, dict + ] ] = None, *, - name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), - ) -> resources.CustomDimension: - r"""Lookup for a single CustomDimension. + ) -> resources.DisplayVideo360AdvertiserLinkProposal: + r"""Cancels a DisplayVideo360AdvertiserLinkProposal. + Cancelling can mean either: + + - Declining a proposal initiated from Display & Video + 360 + - Withdrawing a proposal initiated from Google Analytics + After being cancelled, a proposal will eventually be + deleted automatically. Args: - request (Union[google.analytics.admin_v1alpha.types.GetCustomDimensionRequest, dict]): + request (Union[google.analytics.admin_v1alpha.types.CancelDisplayVideo360AdvertiserLinkProposalRequest, dict]): The request object. Request message for - GetCustomDimension RPC. - name (str): - Required. The name of the - CustomDimension to get. Example format: - properties/1234/customDimensions/5678 - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. + CancelDisplayVideo360AdvertiserLinkProposal + RPC. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -5971,31 +6053,36 @@ def get_custom_dimension( sent along with the request as metadata. Returns: - google.analytics.admin_v1alpha.types.CustomDimension: - A definition for a CustomDimension. + google.analytics.admin_v1alpha.types.DisplayVideo360AdvertiserLinkProposal: + A proposal for a link between a GA4 + property and a Display & Video 360 + advertiser. + + A proposal is converted to a + DisplayVideo360AdvertiserLink once + approved. Google Analytics admins + approve inbound proposals while Display + & Video 360 admins approve outbound + proposals. + """ # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError( - "If the `request` argument is set, then none of " - "the individual field arguments should be set." - ) - # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. - if not isinstance(request, analytics_admin.GetCustomDimensionRequest): - request = analytics_admin.GetCustomDimensionRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name + if not isinstance( + request, analytics_admin.CancelDisplayVideo360AdvertiserLinkProposalRequest + ): + request = ( + analytics_admin.CancelDisplayVideo360AdvertiserLinkProposalRequest( + request + ) + ) # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.get_custom_dimension] + rpc = self._transport._wrapped_methods[ + self._transport.cancel_display_video360_advertiser_link_proposal + ] # Certain fields should be provided within the metadata header; # add these here. @@ -6017,24 +6104,24 @@ def get_custom_dimension( # Done; return the response. return response - def create_custom_metric( + def create_custom_dimension( self, request: Optional[ - Union[analytics_admin.CreateCustomMetricRequest, dict] + Union[analytics_admin.CreateCustomDimensionRequest, dict] ] = None, *, parent: Optional[str] = None, - custom_metric: Optional[resources.CustomMetric] = None, + custom_dimension: Optional[resources.CustomDimension] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), - ) -> resources.CustomMetric: - r"""Creates a CustomMetric. + ) -> resources.CustomDimension: + r"""Creates a CustomDimension. Args: - request (Union[google.analytics.admin_v1alpha.types.CreateCustomMetricRequest, dict]): + request (Union[google.analytics.admin_v1alpha.types.CreateCustomDimensionRequest, dict]): The request object. Request message for - CreateCustomMetric RPC. + CreateCustomDimension RPC. parent (str): Required. Example format: properties/1234 @@ -6042,9 +6129,11 @@ def create_custom_metric( This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - custom_metric (google.analytics.admin_v1alpha.types.CustomMetric): - Required. The CustomMetric to create. - This corresponds to the ``custom_metric`` field + custom_dimension (google.analytics.admin_v1alpha.types.CustomDimension): + Required. The CustomDimension to + create. + + This corresponds to the ``custom_dimension`` field on the ``request`` instance; if ``request`` is provided, this should not be set. retry (google.api_core.retry.Retry): Designation of what errors, if any, @@ -6054,13 +6143,13 @@ def create_custom_metric( sent along with the request as metadata. Returns: - google.analytics.admin_v1alpha.types.CustomMetric: - A definition for a custom metric. + google.analytics.admin_v1alpha.types.CustomDimension: + A definition for a CustomDimension. """ # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent, custom_metric]) + has_flattened_params = any([parent, custom_dimension]) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -6069,18 +6158,18 @@ def create_custom_metric( # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. - if not isinstance(request, analytics_admin.CreateCustomMetricRequest): - request = analytics_admin.CreateCustomMetricRequest(request) + if not isinstance(request, analytics_admin.CreateCustomDimensionRequest): + request = analytics_admin.CreateCustomDimensionRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. if parent is not None: request.parent = parent - if custom_metric is not None: - request.custom_metric = custom_metric + if custom_dimension is not None: + request.custom_dimension = custom_dimension # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.create_custom_metric] + rpc = self._transport._wrapped_methods[self._transport.create_custom_dimension] # Certain fields should be provided within the metadata header; # add these here. @@ -6102,27 +6191,27 @@ def create_custom_metric( # Done; return the response. return response - def update_custom_metric( + def update_custom_dimension( self, request: Optional[ - Union[analytics_admin.UpdateCustomMetricRequest, dict] + Union[analytics_admin.UpdateCustomDimensionRequest, dict] ] = None, *, - custom_metric: Optional[resources.CustomMetric] = None, + custom_dimension: Optional[resources.CustomDimension] = None, update_mask: Optional[field_mask_pb2.FieldMask] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), - ) -> resources.CustomMetric: - r"""Updates a CustomMetric on a property. + ) -> resources.CustomDimension: + r"""Updates a CustomDimension on a property. Args: - request (Union[google.analytics.admin_v1alpha.types.UpdateCustomMetricRequest, dict]): + request (Union[google.analytics.admin_v1alpha.types.UpdateCustomDimensionRequest, dict]): The request object. Request message for - UpdateCustomMetric RPC. - custom_metric (google.analytics.admin_v1alpha.types.CustomMetric): - The CustomMetric to update - This corresponds to the ``custom_metric`` field + UpdateCustomDimension RPC. + custom_dimension (google.analytics.admin_v1alpha.types.CustomDimension): + The CustomDimension to update + This corresponds to the ``custom_dimension`` field on the ``request`` instance; if ``request`` is provided, this should not be set. update_mask (google.protobuf.field_mask_pb2.FieldMask): @@ -6141,13 +6230,13 @@ def update_custom_metric( sent along with the request as metadata. Returns: - google.analytics.admin_v1alpha.types.CustomMetric: - A definition for a custom metric. + google.analytics.admin_v1alpha.types.CustomDimension: + A definition for a CustomDimension. """ # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([custom_metric, update_mask]) + has_flattened_params = any([custom_dimension, update_mask]) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -6156,24 +6245,24 @@ def update_custom_metric( # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. - if not isinstance(request, analytics_admin.UpdateCustomMetricRequest): - request = analytics_admin.UpdateCustomMetricRequest(request) + if not isinstance(request, analytics_admin.UpdateCustomDimensionRequest): + request = analytics_admin.UpdateCustomDimensionRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. - if custom_metric is not None: - request.custom_metric = custom_metric + if custom_dimension is not None: + request.custom_dimension = custom_dimension if update_mask is not None: request.update_mask = update_mask # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.update_custom_metric] + rpc = self._transport._wrapped_methods[self._transport.update_custom_dimension] # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata( - (("custom_metric.name", request.custom_metric.name),) + (("custom_dimension.name", request.custom_dimension.name),) ), ) @@ -6191,21 +6280,23 @@ def update_custom_metric( # Done; return the response. return response - def list_custom_metrics( + def list_custom_dimensions( self, - request: Optional[Union[analytics_admin.ListCustomMetricsRequest, dict]] = None, + request: Optional[ + Union[analytics_admin.ListCustomDimensionsRequest, dict] + ] = None, *, parent: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), - ) -> pagers.ListCustomMetricsPager: - r"""Lists CustomMetrics on a property. + ) -> pagers.ListCustomDimensionsPager: + r"""Lists CustomDimensions on a property. Args: - request (Union[google.analytics.admin_v1alpha.types.ListCustomMetricsRequest, dict]): - The request object. Request message for ListCustomMetrics - RPC. + request (Union[google.analytics.admin_v1alpha.types.ListCustomDimensionsRequest, dict]): + The request object. Request message for + ListCustomDimensions RPC. parent (str): Required. Example format: properties/1234 @@ -6220,9 +6311,9 @@ def list_custom_metrics( sent along with the request as metadata. Returns: - google.analytics.admin_v1alpha.services.analytics_admin_service.pagers.ListCustomMetricsPager: + google.analytics.admin_v1alpha.services.analytics_admin_service.pagers.ListCustomDimensionsPager: Response message for - ListCustomMetrics RPC. + ListCustomDimensions RPC. Iterating over this object will yield results and resolve additional pages automatically. @@ -6240,8 +6331,8 @@ def list_custom_metrics( # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. - if not isinstance(request, analytics_admin.ListCustomMetricsRequest): - request = analytics_admin.ListCustomMetricsRequest(request) + if not isinstance(request, analytics_admin.ListCustomDimensionsRequest): + request = analytics_admin.ListCustomDimensionsRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. if parent is not None: @@ -6249,7 +6340,7 @@ def list_custom_metrics( # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.list_custom_metrics] + rpc = self._transport._wrapped_methods[self._transport.list_custom_dimensions] # Certain fields should be provided within the metadata header; # add these here. @@ -6270,7 +6361,7 @@ def list_custom_metrics( # This method is paged; wrap the response in a pager, which provides # an `__iter__` convenience method. - response = pagers.ListCustomMetricsPager( + response = pagers.ListCustomDimensionsPager( method=rpc, request=request, response=response, @@ -6282,10 +6373,10 @@ def list_custom_metrics( # Done; return the response. return response - def archive_custom_metric( + def archive_custom_dimension( self, request: Optional[ - Union[analytics_admin.ArchiveCustomMetricRequest, dict] + Union[analytics_admin.ArchiveCustomDimensionRequest, dict] ] = None, *, name: Optional[str] = None, @@ -6293,16 +6384,17 @@ def archive_custom_metric( timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> None: - r"""Archives a CustomMetric on a property. + r"""Archives a CustomDimension on a property. Args: - request (Union[google.analytics.admin_v1alpha.types.ArchiveCustomMetricRequest, dict]): + request (Union[google.analytics.admin_v1alpha.types.ArchiveCustomDimensionRequest, dict]): The request object. Request message for - ArchiveCustomMetric RPC. + ArchiveCustomDimension RPC. name (str): Required. The name of the - CustomMetric to archive. Example format: - properties/1234/customMetrics/5678 + CustomDimension to archive. Example + format: + properties/1234/customDimensions/5678 This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this @@ -6325,8 +6417,8 @@ def archive_custom_metric( # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. - if not isinstance(request, analytics_admin.ArchiveCustomMetricRequest): - request = analytics_admin.ArchiveCustomMetricRequest(request) + if not isinstance(request, analytics_admin.ArchiveCustomDimensionRequest): + request = analytics_admin.ArchiveCustomDimensionRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. if name is not None: @@ -6334,7 +6426,7 @@ def archive_custom_metric( # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.archive_custom_metric] + rpc = self._transport._wrapped_methods[self._transport.archive_custom_dimension] # Certain fields should be provided within the metadata header; # add these here. @@ -6353,25 +6445,27 @@ def archive_custom_metric( metadata=metadata, ) - def get_custom_metric( + def get_custom_dimension( self, - request: Optional[Union[analytics_admin.GetCustomMetricRequest, dict]] = None, + request: Optional[ + Union[analytics_admin.GetCustomDimensionRequest, dict] + ] = None, *, name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), - ) -> resources.CustomMetric: - r"""Lookup for a single CustomMetric. + ) -> resources.CustomDimension: + r"""Lookup for a single CustomDimension. Args: - request (Union[google.analytics.admin_v1alpha.types.GetCustomMetricRequest, dict]): - The request object. Request message for GetCustomMetric - RPC. + request (Union[google.analytics.admin_v1alpha.types.GetCustomDimensionRequest, dict]): + The request object. Request message for + GetCustomDimension RPC. name (str): Required. The name of the - CustomMetric to get. Example format: - properties/1234/customMetrics/5678 + CustomDimension to get. Example format: + properties/1234/customDimensions/5678 This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this @@ -6383,8 +6477,8 @@ def get_custom_metric( sent along with the request as metadata. Returns: - google.analytics.admin_v1alpha.types.CustomMetric: - A definition for a custom metric. + google.analytics.admin_v1alpha.types.CustomDimension: + A definition for a CustomDimension. """ # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have @@ -6398,8 +6492,8 @@ def get_custom_metric( # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. - if not isinstance(request, analytics_admin.GetCustomMetricRequest): - request = analytics_admin.GetCustomMetricRequest(request) + if not isinstance(request, analytics_admin.GetCustomDimensionRequest): + request = analytics_admin.GetCustomDimensionRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. if name is not None: @@ -6407,7 +6501,7 @@ def get_custom_metric( # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.get_custom_metric] + rpc = self._transport._wrapped_methods[self._transport.get_custom_dimension] # Certain fields should be provided within the metadata header; # add these here. @@ -6429,33 +6523,34 @@ def get_custom_metric( # Done; return the response. return response - def get_data_retention_settings( + def create_custom_metric( self, request: Optional[ - Union[analytics_admin.GetDataRetentionSettingsRequest, dict] + Union[analytics_admin.CreateCustomMetricRequest, dict] ] = None, *, - name: Optional[str] = None, + parent: Optional[str] = None, + custom_metric: Optional[resources.CustomMetric] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), - ) -> resources.DataRetentionSettings: - r"""Returns the singleton data retention settings for - this property. + ) -> resources.CustomMetric: + r"""Creates a CustomMetric. Args: - request (Union[google.analytics.admin_v1alpha.types.GetDataRetentionSettingsRequest, dict]): + request (Union[google.analytics.admin_v1alpha.types.CreateCustomMetricRequest, dict]): The request object. Request message for - GetDataRetentionSettings RPC. - name (str): - Required. The name of the settings to - lookup. Format: - - properties/{property}/dataRetentionSettings - Example: - "properties/1000/dataRetentionSettings" + CreateCustomMetric RPC. + parent (str): + Required. Example format: + properties/1234 - This corresponds to the ``name`` field + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + custom_metric (google.analytics.admin_v1alpha.types.CustomMetric): + Required. The CustomMetric to create. + This corresponds to the ``custom_metric`` field on the ``request`` instance; if ``request`` is provided, this should not be set. retry (google.api_core.retry.Retry): Designation of what errors, if any, @@ -6465,15 +6560,13 @@ def get_data_retention_settings( sent along with the request as metadata. Returns: - google.analytics.admin_v1alpha.types.DataRetentionSettings: - Settings values for data retention. - This is a singleton resource. - + google.analytics.admin_v1alpha.types.CustomMetric: + A definition for a custom metric. """ # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) + has_flattened_params = any([parent, custom_metric]) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -6482,23 +6575,23 @@ def get_data_retention_settings( # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. - if not isinstance(request, analytics_admin.GetDataRetentionSettingsRequest): - request = analytics_admin.GetDataRetentionSettingsRequest(request) + if not isinstance(request, analytics_admin.CreateCustomMetricRequest): + request = analytics_admin.CreateCustomMetricRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. - if name is not None: - request.name = name + if parent is not None: + request.parent = parent + if custom_metric is not None: + request.custom_metric = custom_metric # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._transport._wrapped_methods[ - self._transport.get_data_retention_settings - ] + rpc = self._transport._wrapped_methods[self._transport.create_custom_metric] # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), ) # Validate the universe domain. @@ -6515,35 +6608,31 @@ def get_data_retention_settings( # Done; return the response. return response - def update_data_retention_settings( + def update_custom_metric( self, request: Optional[ - Union[analytics_admin.UpdateDataRetentionSettingsRequest, dict] + Union[analytics_admin.UpdateCustomMetricRequest, dict] ] = None, *, - data_retention_settings: Optional[resources.DataRetentionSettings] = None, + custom_metric: Optional[resources.CustomMetric] = None, update_mask: Optional[field_mask_pb2.FieldMask] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), - ) -> resources.DataRetentionSettings: - r"""Updates the singleton data retention settings for - this property. + ) -> resources.CustomMetric: + r"""Updates a CustomMetric on a property. Args: - request (Union[google.analytics.admin_v1alpha.types.UpdateDataRetentionSettingsRequest, dict]): + request (Union[google.analytics.admin_v1alpha.types.UpdateCustomMetricRequest, dict]): The request object. Request message for - UpdateDataRetentionSettings RPC. - data_retention_settings (google.analytics.admin_v1alpha.types.DataRetentionSettings): - Required. The settings to update. The ``name`` field is - used to identify the settings to be updated. - - This corresponds to the ``data_retention_settings`` field + UpdateCustomMetric RPC. + custom_metric (google.analytics.admin_v1alpha.types.CustomMetric): + The CustomMetric to update + This corresponds to the ``custom_metric`` field on the ``request`` instance; if ``request`` is provided, this should not be set. update_mask (google.protobuf.field_mask_pb2.FieldMask): - Required. The list of fields to be updated. Field names - must be in snake case (e.g., "field_to_update"). Omitted + Required. The list of fields to be updated. Omitted fields will not be updated. To replace the entire entity, use one path with the string "*" to match all fields. @@ -6558,15 +6647,13 @@ def update_data_retention_settings( sent along with the request as metadata. Returns: - google.analytics.admin_v1alpha.types.DataRetentionSettings: - Settings values for data retention. - This is a singleton resource. - + google.analytics.admin_v1alpha.types.CustomMetric: + A definition for a custom metric. """ # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([data_retention_settings, update_mask]) + has_flattened_params = any([custom_metric, update_mask]) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -6575,31 +6662,24 @@ def update_data_retention_settings( # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. - if not isinstance(request, analytics_admin.UpdateDataRetentionSettingsRequest): - request = analytics_admin.UpdateDataRetentionSettingsRequest(request) + if not isinstance(request, analytics_admin.UpdateCustomMetricRequest): + request = analytics_admin.UpdateCustomMetricRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. - if data_retention_settings is not None: - request.data_retention_settings = data_retention_settings + if custom_metric is not None: + request.custom_metric = custom_metric if update_mask is not None: request.update_mask = update_mask # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._transport._wrapped_methods[ - self._transport.update_data_retention_settings - ] + rpc = self._transport._wrapped_methods[self._transport.update_custom_metric] # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata( - ( - ( - "data_retention_settings.name", - request.data_retention_settings.name, - ), - ) + (("custom_metric.name", request.custom_metric.name),) ), ) @@ -6617,21 +6697,20 @@ def update_data_retention_settings( # Done; return the response. return response - def create_data_stream( + def list_custom_metrics( self, - request: Optional[Union[analytics_admin.CreateDataStreamRequest, dict]] = None, + request: Optional[Union[analytics_admin.ListCustomMetricsRequest, dict]] = None, *, parent: Optional[str] = None, - data_stream: Optional[resources.DataStream] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), - ) -> resources.DataStream: - r"""Creates a DataStream. + ) -> pagers.ListCustomMetricsPager: + r"""Lists CustomMetrics on a property. Args: - request (Union[google.analytics.admin_v1alpha.types.CreateDataStreamRequest, dict]): - The request object. Request message for CreateDataStream + request (Union[google.analytics.admin_v1alpha.types.ListCustomMetricsRequest, dict]): + The request object. Request message for ListCustomMetrics RPC. parent (str): Required. Example format: @@ -6640,11 +6719,6 @@ def create_data_stream( This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - data_stream (google.analytics.admin_v1alpha.types.DataStream): - Required. The DataStream to create. - This corresponds to the ``data_stream`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -6652,15 +6726,18 @@ def create_data_stream( sent along with the request as metadata. Returns: - google.analytics.admin_v1alpha.types.DataStream: - A resource message representing a - data stream. + google.analytics.admin_v1alpha.services.analytics_admin_service.pagers.ListCustomMetricsPager: + Response message for + ListCustomMetrics RPC. + Iterating over this object will yield + results and resolve additional pages + automatically. """ # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent, data_stream]) + has_flattened_params = any([parent]) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -6669,18 +6746,16 @@ def create_data_stream( # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. - if not isinstance(request, analytics_admin.CreateDataStreamRequest): - request = analytics_admin.CreateDataStreamRequest(request) + if not isinstance(request, analytics_admin.ListCustomMetricsRequest): + request = analytics_admin.ListCustomMetricsRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. if parent is not None: request.parent = parent - if data_stream is not None: - request.data_stream = data_stream # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.create_data_stream] + rpc = self._transport._wrapped_methods[self._transport.list_custom_metrics] # Certain fields should be provided within the metadata header; # add these here. @@ -6699,28 +6774,41 @@ def create_data_stream( metadata=metadata, ) + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListCustomMetricsPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + # Done; return the response. return response - def delete_data_stream( + def archive_custom_metric( self, - request: Optional[Union[analytics_admin.DeleteDataStreamRequest, dict]] = None, + request: Optional[ + Union[analytics_admin.ArchiveCustomMetricRequest, dict] + ] = None, *, name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> None: - r"""Deletes a DataStream on a property. + r"""Archives a CustomMetric on a property. Args: - request (Union[google.analytics.admin_v1alpha.types.DeleteDataStreamRequest, dict]): - The request object. Request message for DeleteDataStream - RPC. + request (Union[google.analytics.admin_v1alpha.types.ArchiveCustomMetricRequest, dict]): + The request object. Request message for + ArchiveCustomMetric RPC. name (str): - Required. The name of the DataStream - to delete. Example format: - properties/1234/dataStreams/5678 + Required. The name of the + CustomMetric to archive. Example format: + properties/1234/customMetrics/5678 This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this @@ -6743,8 +6831,8 @@ def delete_data_stream( # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. - if not isinstance(request, analytics_admin.DeleteDataStreamRequest): - request = analytics_admin.DeleteDataStreamRequest(request) + if not isinstance(request, analytics_admin.ArchiveCustomMetricRequest): + request = analytics_admin.ArchiveCustomMetricRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. if name is not None: @@ -6752,7 +6840,7 @@ def delete_data_stream( # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.delete_data_stream] + rpc = self._transport._wrapped_methods[self._transport.archive_custom_metric] # Certain fields should be provided within the metadata header; # add these here. @@ -6771,34 +6859,27 @@ def delete_data_stream( metadata=metadata, ) - def update_data_stream( + def get_custom_metric( self, - request: Optional[Union[analytics_admin.UpdateDataStreamRequest, dict]] = None, + request: Optional[Union[analytics_admin.GetCustomMetricRequest, dict]] = None, *, - data_stream: Optional[resources.DataStream] = None, - update_mask: Optional[field_mask_pb2.FieldMask] = None, + name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), - ) -> resources.DataStream: - r"""Updates a DataStream on a property. + ) -> resources.CustomMetric: + r"""Lookup for a single CustomMetric. Args: - request (Union[google.analytics.admin_v1alpha.types.UpdateDataStreamRequest, dict]): - The request object. Request message for UpdateDataStream + request (Union[google.analytics.admin_v1alpha.types.GetCustomMetricRequest, dict]): + The request object. Request message for GetCustomMetric RPC. - data_stream (google.analytics.admin_v1alpha.types.DataStream): - The DataStream to update - This corresponds to the ``data_stream`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - update_mask (google.protobuf.field_mask_pb2.FieldMask): - Required. The list of fields to be updated. Omitted - fields will not be updated. To replace the entire - entity, use one path with the string "*" to match all - fields. + name (str): + Required. The name of the + CustomMetric to get. Example format: + properties/1234/customMetrics/5678 - This corresponds to the ``update_mask`` field + This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. retry (google.api_core.retry.Retry): Designation of what errors, if any, @@ -6808,15 +6889,13 @@ def update_data_stream( sent along with the request as metadata. Returns: - google.analytics.admin_v1alpha.types.DataStream: - A resource message representing a - data stream. - + google.analytics.admin_v1alpha.types.CustomMetric: + A definition for a custom metric. """ # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([data_stream, update_mask]) + has_flattened_params = any([name]) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -6825,25 +6904,21 @@ def update_data_stream( # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. - if not isinstance(request, analytics_admin.UpdateDataStreamRequest): - request = analytics_admin.UpdateDataStreamRequest(request) + if not isinstance(request, analytics_admin.GetCustomMetricRequest): + request = analytics_admin.GetCustomMetricRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. - if data_stream is not None: - request.data_stream = data_stream - if update_mask is not None: - request.update_mask = update_mask + if name is not None: + request.name = name # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.update_data_stream] + rpc = self._transport._wrapped_methods[self._transport.get_custom_metric] # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("data_stream.name", request.data_stream.name),) - ), + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), ) # Validate the universe domain. @@ -6860,26 +6935,33 @@ def update_data_stream( # Done; return the response. return response - def list_data_streams( + def get_data_retention_settings( self, - request: Optional[Union[analytics_admin.ListDataStreamsRequest, dict]] = None, + request: Optional[ + Union[analytics_admin.GetDataRetentionSettingsRequest, dict] + ] = None, *, - parent: Optional[str] = None, + name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), - ) -> pagers.ListDataStreamsPager: - r"""Lists DataStreams on a property. + ) -> resources.DataRetentionSettings: + r"""Returns the singleton data retention settings for + this property. Args: - request (Union[google.analytics.admin_v1alpha.types.ListDataStreamsRequest, dict]): - The request object. Request message for ListDataStreams - RPC. - parent (str): - Required. Example format: - properties/1234 + request (Union[google.analytics.admin_v1alpha.types.GetDataRetentionSettingsRequest, dict]): + The request object. Request message for + GetDataRetentionSettings RPC. + name (str): + Required. The name of the settings to + lookup. Format: - This corresponds to the ``parent`` field + properties/{property}/dataRetentionSettings + Example: + "properties/1000/dataRetentionSettings" + + This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. retry (google.api_core.retry.Retry): Designation of what errors, if any, @@ -6889,18 +6971,15 @@ def list_data_streams( sent along with the request as metadata. Returns: - google.analytics.admin_v1alpha.services.analytics_admin_service.pagers.ListDataStreamsPager: - Response message for ListDataStreams - RPC. - Iterating over this object will yield - results and resolve additional pages - automatically. + google.analytics.admin_v1alpha.types.DataRetentionSettings: + Settings values for data retention. + This is a singleton resource. """ # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) + has_flattened_params = any([name]) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -6909,21 +6988,23 @@ def list_data_streams( # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. - if not isinstance(request, analytics_admin.ListDataStreamsRequest): - request = analytics_admin.ListDataStreamsRequest(request) + if not isinstance(request, analytics_admin.GetDataRetentionSettingsRequest): + request = analytics_admin.GetDataRetentionSettingsRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. - if parent is not None: - request.parent = parent + if name is not None: + request.name = name # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.list_data_streams] + rpc = self._transport._wrapped_methods[ + self._transport.get_data_retention_settings + ] # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), ) # Validate the universe domain. @@ -6937,41 +7018,43 @@ def list_data_streams( metadata=metadata, ) - # This method is paged; wrap the response in a pager, which provides - # an `__iter__` convenience method. - response = pagers.ListDataStreamsPager( - method=rpc, - request=request, - response=response, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - # Done; return the response. return response - def get_data_stream( + def update_data_retention_settings( self, - request: Optional[Union[analytics_admin.GetDataStreamRequest, dict]] = None, + request: Optional[ + Union[analytics_admin.UpdateDataRetentionSettingsRequest, dict] + ] = None, *, - name: Optional[str] = None, + data_retention_settings: Optional[resources.DataRetentionSettings] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), - ) -> resources.DataStream: - r"""Lookup for a single DataStream. + ) -> resources.DataRetentionSettings: + r"""Updates the singleton data retention settings for + this property. Args: - request (Union[google.analytics.admin_v1alpha.types.GetDataStreamRequest, dict]): - The request object. Request message for GetDataStream - RPC. - name (str): - Required. The name of the DataStream - to get. Example format: - properties/1234/dataStreams/5678 + request (Union[google.analytics.admin_v1alpha.types.UpdateDataRetentionSettingsRequest, dict]): + The request object. Request message for + UpdateDataRetentionSettings RPC. + data_retention_settings (google.analytics.admin_v1alpha.types.DataRetentionSettings): + Required. The settings to update. The ``name`` field is + used to identify the settings to be updated. - This corresponds to the ``name`` field + This corresponds to the ``data_retention_settings`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Required. The list of fields to be updated. Field names + must be in snake case (e.g., "field_to_update"). Omitted + fields will not be updated. To replace the entire + entity, use one path with the string "*" to match all + fields. + + This corresponds to the ``update_mask`` field on the ``request`` instance; if ``request`` is provided, this should not be set. retry (google.api_core.retry.Retry): Designation of what errors, if any, @@ -6981,15 +7064,15 @@ def get_data_stream( sent along with the request as metadata. Returns: - google.analytics.admin_v1alpha.types.DataStream: - A resource message representing a - data stream. + google.analytics.admin_v1alpha.types.DataRetentionSettings: + Settings values for data retention. + This is a singleton resource. """ # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) + has_flattened_params = any([data_retention_settings, update_mask]) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -6998,21 +7081,32 @@ def get_data_stream( # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. - if not isinstance(request, analytics_admin.GetDataStreamRequest): - request = analytics_admin.GetDataStreamRequest(request) + if not isinstance(request, analytics_admin.UpdateDataRetentionSettingsRequest): + request = analytics_admin.UpdateDataRetentionSettingsRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. - if name is not None: - request.name = name + if data_retention_settings is not None: + request.data_retention_settings = data_retention_settings + if update_mask is not None: + request.update_mask = update_mask # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.get_data_stream] + rpc = self._transport._wrapped_methods[ + self._transport.update_data_retention_settings + ] # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + gapic_v1.routing_header.to_grpc_metadata( + ( + ( + "data_retention_settings.name", + request.data_retention_settings.name, + ), + ) + ), ) # Validate the universe domain. @@ -7029,28 +7123,32 @@ def get_data_stream( # Done; return the response. return response - def get_audience( + def create_data_stream( self, - request: Optional[Union[analytics_admin.GetAudienceRequest, dict]] = None, + request: Optional[Union[analytics_admin.CreateDataStreamRequest, dict]] = None, *, - name: Optional[str] = None, + parent: Optional[str] = None, + data_stream: Optional[resources.DataStream] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), - ) -> audience.Audience: - r"""Lookup for a single Audience. - Audiences created before 2020 may not be supported. - Default audiences will not show filter definitions. + ) -> resources.DataStream: + r"""Creates a DataStream. Args: - request (Union[google.analytics.admin_v1alpha.types.GetAudienceRequest, dict]): - The request object. Request message for GetAudience RPC. - name (str): - Required. The name of the Audience to - get. Example format: - properties/1234/audiences/5678 + request (Union[google.analytics.admin_v1alpha.types.CreateDataStreamRequest, dict]): + The request object. Request message for CreateDataStream + RPC. + parent (str): + Required. Example format: + properties/1234 - This corresponds to the ``name`` field + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + data_stream (google.analytics.admin_v1alpha.types.DataStream): + Required. The DataStream to create. + This corresponds to the ``data_stream`` field on the ``request`` instance; if ``request`` is provided, this should not be set. retry (google.api_core.retry.Retry): Designation of what errors, if any, @@ -7060,15 +7158,15 @@ def get_audience( sent along with the request as metadata. Returns: - google.analytics.admin_v1alpha.types.Audience: - A resource message representing a GA4 - Audience. + google.analytics.admin_v1alpha.types.DataStream: + A resource message representing a + data stream. """ # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) + has_flattened_params = any([parent, data_stream]) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -7077,21 +7175,23 @@ def get_audience( # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. - if not isinstance(request, analytics_admin.GetAudienceRequest): - request = analytics_admin.GetAudienceRequest(request) + if not isinstance(request, analytics_admin.CreateDataStreamRequest): + request = analytics_admin.CreateDataStreamRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. - if name is not None: - request.name = name + if parent is not None: + request.parent = parent + if data_stream is not None: + request.data_stream = data_stream # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.get_audience] + rpc = self._transport._wrapped_methods[self._transport.create_data_stream] # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), ) # Validate the universe domain. @@ -7108,28 +7208,27 @@ def get_audience( # Done; return the response. return response - def list_audiences( + def delete_data_stream( self, - request: Optional[Union[analytics_admin.ListAudiencesRequest, dict]] = None, + request: Optional[Union[analytics_admin.DeleteDataStreamRequest, dict]] = None, *, - parent: Optional[str] = None, + name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), - ) -> pagers.ListAudiencesPager: - r"""Lists Audiences on a property. - Audiences created before 2020 may not be supported. - Default audiences will not show filter definitions. + ) -> None: + r"""Deletes a DataStream on a property. Args: - request (Union[google.analytics.admin_v1alpha.types.ListAudiencesRequest, dict]): - The request object. Request message for ListAudiences + request (Union[google.analytics.admin_v1alpha.types.DeleteDataStreamRequest, dict]): + The request object. Request message for DeleteDataStream RPC. - parent (str): - Required. Example format: - properties/1234 + name (str): + Required. The name of the DataStream + to delete. Example format: + properties/1234/dataStreams/5678 - This corresponds to the ``parent`` field + This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. retry (google.api_core.retry.Retry): Designation of what errors, if any, @@ -7137,20 +7236,11 @@ def list_audiences( timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. - - Returns: - google.analytics.admin_v1alpha.services.analytics_admin_service.pagers.ListAudiencesPager: - Response message for ListAudiences - RPC. - Iterating over this object will yield - results and resolve additional pages - automatically. - """ # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) + has_flattened_params = any([name]) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -7159,74 +7249,62 @@ def list_audiences( # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. - if not isinstance(request, analytics_admin.ListAudiencesRequest): - request = analytics_admin.ListAudiencesRequest(request) + if not isinstance(request, analytics_admin.DeleteDataStreamRequest): + request = analytics_admin.DeleteDataStreamRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. - if parent is not None: - request.parent = parent + if name is not None: + request.name = name # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.list_audiences] + rpc = self._transport._wrapped_methods[self._transport.delete_data_stream] # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), ) # Validate the universe domain. self._validate_universe_domain() # Send the request. - response = rpc( + rpc( request, retry=retry, timeout=timeout, metadata=metadata, ) - # This method is paged; wrap the response in a pager, which provides - # an `__iter__` convenience method. - response = pagers.ListAudiencesPager( - method=rpc, - request=request, - response=response, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def create_audience( + def update_data_stream( self, - request: Optional[Union[analytics_admin.CreateAudienceRequest, dict]] = None, + request: Optional[Union[analytics_admin.UpdateDataStreamRequest, dict]] = None, *, - parent: Optional[str] = None, - audience: Optional[gaa_audience.Audience] = None, + data_stream: Optional[resources.DataStream] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), - ) -> gaa_audience.Audience: - r"""Creates an Audience. + ) -> resources.DataStream: + r"""Updates a DataStream on a property. Args: - request (Union[google.analytics.admin_v1alpha.types.CreateAudienceRequest, dict]): - The request object. Request message for CreateAudience + request (Union[google.analytics.admin_v1alpha.types.UpdateDataStreamRequest, dict]): + The request object. Request message for UpdateDataStream RPC. - parent (str): - Required. Example format: - properties/1234 - - This corresponds to the ``parent`` field + data_stream (google.analytics.admin_v1alpha.types.DataStream): + The DataStream to update + This corresponds to the ``data_stream`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - audience (google.analytics.admin_v1alpha.types.Audience): - Required. The audience to create. - This corresponds to the ``audience`` field + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Required. The list of fields to be updated. Omitted + fields will not be updated. To replace the entire + entity, use one path with the string "*" to match all + fields. + + This corresponds to the ``update_mask`` field on the ``request`` instance; if ``request`` is provided, this should not be set. retry (google.api_core.retry.Retry): Designation of what errors, if any, @@ -7236,15 +7314,15 @@ def create_audience( sent along with the request as metadata. Returns: - google.analytics.admin_v1alpha.types.Audience: - A resource message representing a GA4 - Audience. + google.analytics.admin_v1alpha.types.DataStream: + A resource message representing a + data stream. """ # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent, audience]) + has_flattened_params = any([data_stream, update_mask]) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -7253,23 +7331,25 @@ def create_audience( # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. - if not isinstance(request, analytics_admin.CreateAudienceRequest): - request = analytics_admin.CreateAudienceRequest(request) + if not isinstance(request, analytics_admin.UpdateDataStreamRequest): + request = analytics_admin.UpdateDataStreamRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. - if parent is not None: - request.parent = parent - if audience is not None: - request.audience = audience + if data_stream is not None: + request.data_stream = data_stream + if update_mask is not None: + request.update_mask = update_mask # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.create_audience] + rpc = self._transport._wrapped_methods[self._transport.update_data_stream] # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + gapic_v1.routing_header.to_grpc_metadata( + (("data_stream.name", request.data_stream.name),) + ), ) # Validate the universe domain. @@ -7286,38 +7366,26 @@ def create_audience( # Done; return the response. return response - def update_audience( + def list_data_streams( self, - request: Optional[Union[analytics_admin.UpdateAudienceRequest, dict]] = None, + request: Optional[Union[analytics_admin.ListDataStreamsRequest, dict]] = None, *, - audience: Optional[gaa_audience.Audience] = None, - update_mask: Optional[field_mask_pb2.FieldMask] = None, + parent: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), - ) -> gaa_audience.Audience: - r"""Updates an Audience on a property. + ) -> pagers.ListDataStreamsPager: + r"""Lists DataStreams on a property. Args: - request (Union[google.analytics.admin_v1alpha.types.UpdateAudienceRequest, dict]): - The request object. Request message for UpdateAudience + request (Union[google.analytics.admin_v1alpha.types.ListDataStreamsRequest, dict]): + The request object. Request message for ListDataStreams RPC. - audience (google.analytics.admin_v1alpha.types.Audience): - Required. The audience to update. The audience's - ``name`` field is used to identify the audience to be - updated. - - This corresponds to the ``audience`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - update_mask (google.protobuf.field_mask_pb2.FieldMask): - Required. The list of fields to be updated. Field names - must be in snake case (e.g., "field_to_update"). Omitted - fields will not be updated. To replace the entire - entity, use one path with the string "*" to match all - fields. + parent (str): + Required. Example format: + properties/1234 - This corresponds to the ``update_mask`` field + This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this should not be set. retry (google.api_core.retry.Retry): Designation of what errors, if any, @@ -7327,15 +7395,18 @@ def update_audience( sent along with the request as metadata. Returns: - google.analytics.admin_v1alpha.types.Audience: - A resource message representing a GA4 - Audience. + google.analytics.admin_v1alpha.services.analytics_admin_service.pagers.ListDataStreamsPager: + Response message for ListDataStreams + RPC. + Iterating over this object will yield + results and resolve additional pages + automatically. """ # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([audience, update_mask]) + has_flattened_params = any([parent]) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -7344,25 +7415,21 @@ def update_audience( # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. - if not isinstance(request, analytics_admin.UpdateAudienceRequest): - request = analytics_admin.UpdateAudienceRequest(request) + if not isinstance(request, analytics_admin.ListDataStreamsRequest): + request = analytics_admin.ListDataStreamsRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. - if audience is not None: - request.audience = audience - if update_mask is not None: - request.update_mask = update_mask + if parent is not None: + request.parent = parent # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.update_audience] + rpc = self._transport._wrapped_methods[self._transport.list_data_streams] # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("audience.name", request.audience.name),) - ), + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), ) # Validate the universe domain. @@ -7376,38 +7443,77 @@ def update_audience( metadata=metadata, ) + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListDataStreamsPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + # Done; return the response. return response - def archive_audience( + def get_data_stream( self, - request: Optional[Union[analytics_admin.ArchiveAudienceRequest, dict]] = None, + request: Optional[Union[analytics_admin.GetDataStreamRequest, dict]] = None, *, + name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), - ) -> None: - r"""Archives an Audience on a property. + ) -> resources.DataStream: + r"""Lookup for a single DataStream. Args: - request (Union[google.analytics.admin_v1alpha.types.ArchiveAudienceRequest, dict]): - The request object. Request message for ArchiveAudience + request (Union[google.analytics.admin_v1alpha.types.GetDataStreamRequest, dict]): + The request object. Request message for GetDataStream RPC. + name (str): + Required. The name of the DataStream + to get. Example format: + properties/1234/dataStreams/5678 + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. + + Returns: + google.analytics.admin_v1alpha.types.DataStream: + A resource message representing a + data stream. + """ # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. - if not isinstance(request, analytics_admin.ArchiveAudienceRequest): - request = analytics_admin.ArchiveAudienceRequest(request) + if not isinstance(request, analytics_admin.GetDataStreamRequest): + request = analytics_admin.GetDataStreamRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.archive_audience] + rpc = self._transport._wrapped_methods[self._transport.get_data_stream] # Certain fields should be provided within the metadata header; # add these here. @@ -7419,34 +7525,36 @@ def archive_audience( self._validate_universe_domain() # Send the request. - rpc( + response = rpc( request, retry=retry, timeout=timeout, metadata=metadata, ) - def get_search_ads360_link( + # Done; return the response. + return response + + def get_audience( self, - request: Optional[ - Union[analytics_admin.GetSearchAds360LinkRequest, dict] - ] = None, + request: Optional[Union[analytics_admin.GetAudienceRequest, dict]] = None, *, name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), - ) -> resources.SearchAds360Link: - r"""Look up a single SearchAds360Link + ) -> audience.Audience: + r"""Lookup for a single Audience. + Audiences created before 2020 may not be supported. + Default audiences will not show filter definitions. Args: - request (Union[google.analytics.admin_v1alpha.types.GetSearchAds360LinkRequest, dict]): - The request object. Request message for - GetSearchAds360Link RPC. + request (Union[google.analytics.admin_v1alpha.types.GetAudienceRequest, dict]): + The request object. Request message for GetAudience RPC. name (str): - Required. The name of the - SearchAds360Link to get. Example format: - properties/1234/SearchAds360Link/5678 + Required. The name of the Audience to + get. Example format: + properties/1234/audiences/5678 This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this @@ -7458,9 +7566,9 @@ def get_search_ads360_link( sent along with the request as metadata. Returns: - google.analytics.admin_v1alpha.types.SearchAds360Link: - A link between a GA4 property and a - Search Ads 360 entity. + google.analytics.admin_v1alpha.types.Audience: + A resource message representing a GA4 + Audience. """ # Create or coerce a protobuf request object. @@ -7475,8 +7583,8 @@ def get_search_ads360_link( # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. - if not isinstance(request, analytics_admin.GetSearchAds360LinkRequest): - request = analytics_admin.GetSearchAds360LinkRequest(request) + if not isinstance(request, analytics_admin.GetAudienceRequest): + request = analytics_admin.GetAudienceRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. if name is not None: @@ -7484,7 +7592,7 @@ def get_search_ads360_link( # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.get_search_ads360_link] + rpc = self._transport._wrapped_methods[self._transport.get_audience] # Certain fields should be provided within the metadata header; # add these here. @@ -7506,23 +7614,23 @@ def get_search_ads360_link( # Done; return the response. return response - def list_search_ads360_links( + def list_audiences( self, - request: Optional[ - Union[analytics_admin.ListSearchAds360LinksRequest, dict] - ] = None, + request: Optional[Union[analytics_admin.ListAudiencesRequest, dict]] = None, *, parent: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), - ) -> pagers.ListSearchAds360LinksPager: - r"""Lists all SearchAds360Links on a property. + ) -> pagers.ListAudiencesPager: + r"""Lists Audiences on a property. + Audiences created before 2020 may not be supported. + Default audiences will not show filter definitions. Args: - request (Union[google.analytics.admin_v1alpha.types.ListSearchAds360LinksRequest, dict]): - The request object. Request message for - ListSearchAds360Links RPC. + request (Union[google.analytics.admin_v1alpha.types.ListAudiencesRequest, dict]): + The request object. Request message for ListAudiences + RPC. parent (str): Required. Example format: properties/1234 @@ -7537,9 +7645,9 @@ def list_search_ads360_links( sent along with the request as metadata. Returns: - google.analytics.admin_v1alpha.services.analytics_admin_service.pagers.ListSearchAds360LinksPager: - Response message for - ListSearchAds360Links RPC. + google.analytics.admin_v1alpha.services.analytics_admin_service.pagers.ListAudiencesPager: + Response message for ListAudiences + RPC. Iterating over this object will yield results and resolve additional pages automatically. @@ -7557,8 +7665,8 @@ def list_search_ads360_links( # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. - if not isinstance(request, analytics_admin.ListSearchAds360LinksRequest): - request = analytics_admin.ListSearchAds360LinksRequest(request) + if not isinstance(request, analytics_admin.ListAudiencesRequest): + request = analytics_admin.ListAudiencesRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. if parent is not None: @@ -7566,7 +7674,7 @@ def list_search_ads360_links( # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.list_search_ads360_links] + rpc = self._transport._wrapped_methods[self._transport.list_audiences] # Certain fields should be provided within the metadata header; # add these here. @@ -7587,7 +7695,7 @@ def list_search_ads360_links( # This method is paged; wrap the response in a pager, which provides # an `__iter__` convenience method. - response = pagers.ListSearchAds360LinksPager( + response = pagers.ListAudiencesPager( method=rpc, request=request, response=response, @@ -7599,24 +7707,22 @@ def list_search_ads360_links( # Done; return the response. return response - def create_search_ads360_link( + def create_audience( self, - request: Optional[ - Union[analytics_admin.CreateSearchAds360LinkRequest, dict] - ] = None, + request: Optional[Union[analytics_admin.CreateAudienceRequest, dict]] = None, *, parent: Optional[str] = None, - search_ads_360_link: Optional[resources.SearchAds360Link] = None, + audience: Optional[gaa_audience.Audience] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), - ) -> resources.SearchAds360Link: - r"""Creates a SearchAds360Link. + ) -> gaa_audience.Audience: + r"""Creates an Audience. Args: - request (Union[google.analytics.admin_v1alpha.types.CreateSearchAds360LinkRequest, dict]): - The request object. Request message for - CreateSearchAds360Link RPC. + request (Union[google.analytics.admin_v1alpha.types.CreateAudienceRequest, dict]): + The request object. Request message for CreateAudience + RPC. parent (str): Required. Example format: properties/1234 @@ -7624,11 +7730,9 @@ def create_search_ads360_link( This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - search_ads_360_link (google.analytics.admin_v1alpha.types.SearchAds360Link): - Required. The SearchAds360Link to - create. - - This corresponds to the ``search_ads_360_link`` field + audience (google.analytics.admin_v1alpha.types.Audience): + Required. The audience to create. + This corresponds to the ``audience`` field on the ``request`` instance; if ``request`` is provided, this should not be set. retry (google.api_core.retry.Retry): Designation of what errors, if any, @@ -7638,15 +7742,15 @@ def create_search_ads360_link( sent along with the request as metadata. Returns: - google.analytics.admin_v1alpha.types.SearchAds360Link: - A link between a GA4 property and a - Search Ads 360 entity. + google.analytics.admin_v1alpha.types.Audience: + A resource message representing a GA4 + Audience. """ # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent, search_ads_360_link]) + has_flattened_params = any([parent, audience]) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -7655,20 +7759,18 @@ def create_search_ads360_link( # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. - if not isinstance(request, analytics_admin.CreateSearchAds360LinkRequest): - request = analytics_admin.CreateSearchAds360LinkRequest(request) + if not isinstance(request, analytics_admin.CreateAudienceRequest): + request = analytics_admin.CreateAudienceRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. if parent is not None: request.parent = parent - if search_ads_360_link is not None: - request.search_ads_360_link = search_ads_360_link + if audience is not None: + request.audience = audience # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._transport._wrapped_methods[ - self._transport.create_search_ads360_link - ] + rpc = self._transport._wrapped_methods[self._transport.create_audience] # Certain fields should be provided within the metadata header; # add these here. @@ -7690,30 +7792,38 @@ def create_search_ads360_link( # Done; return the response. return response - def delete_search_ads360_link( + def update_audience( self, - request: Optional[ - Union[analytics_admin.DeleteSearchAds360LinkRequest, dict] - ] = None, + request: Optional[Union[analytics_admin.UpdateAudienceRequest, dict]] = None, *, - name: Optional[str] = None, + audience: Optional[gaa_audience.Audience] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), - ) -> None: - r"""Deletes a SearchAds360Link on a property. + ) -> gaa_audience.Audience: + r"""Updates an Audience on a property. Args: - request (Union[google.analytics.admin_v1alpha.types.DeleteSearchAds360LinkRequest, dict]): - The request object. Request message for - DeleteSearchAds360Link RPC. - name (str): - Required. The name of the - SearchAds360Link to delete. Example - format: - properties/1234/SearchAds360Links/5678 + request (Union[google.analytics.admin_v1alpha.types.UpdateAudienceRequest, dict]): + The request object. Request message for UpdateAudience + RPC. + audience (google.analytics.admin_v1alpha.types.Audience): + Required. The audience to update. The audience's + ``name`` field is used to identify the audience to be + updated. - This corresponds to the ``name`` field + This corresponds to the ``audience`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Required. The list of fields to be updated. Field names + must be in snake case (e.g., "field_to_update"). Omitted + fields will not be updated. To replace the entire + entity, use one path with the string "*" to match all + fields. + + This corresponds to the ``update_mask`` field on the ``request`` instance; if ``request`` is provided, this should not be set. retry (google.api_core.retry.Retry): Designation of what errors, if any, @@ -7721,11 +7831,17 @@ def delete_search_ads360_link( timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. + + Returns: + google.analytics.admin_v1alpha.types.Audience: + A resource message representing a GA4 + Audience. + """ # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) + has_flattened_params = any([audience, update_mask]) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -7734,150 +7850,109 @@ def delete_search_ads360_link( # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. - if not isinstance(request, analytics_admin.DeleteSearchAds360LinkRequest): - request = analytics_admin.DeleteSearchAds360LinkRequest(request) + if not isinstance(request, analytics_admin.UpdateAudienceRequest): + request = analytics_admin.UpdateAudienceRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. - if name is not None: - request.name = name + if audience is not None: + request.audience = audience + if update_mask is not None: + request.update_mask = update_mask # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._transport._wrapped_methods[ - self._transport.delete_search_ads360_link - ] + rpc = self._transport._wrapped_methods[self._transport.update_audience] # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + gapic_v1.routing_header.to_grpc_metadata( + (("audience.name", request.audience.name),) + ), ) # Validate the universe domain. self._validate_universe_domain() # Send the request. - rpc( + response = rpc( request, retry=retry, timeout=timeout, metadata=metadata, ) - def update_search_ads360_link( + # Done; return the response. + return response + + def archive_audience( self, - request: Optional[ - Union[analytics_admin.UpdateSearchAds360LinkRequest, dict] - ] = None, + request: Optional[Union[analytics_admin.ArchiveAudienceRequest, dict]] = None, *, - search_ads_360_link: Optional[resources.SearchAds360Link] = None, - update_mask: Optional[field_mask_pb2.FieldMask] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), - ) -> resources.SearchAds360Link: - r"""Updates a SearchAds360Link on a property. + ) -> None: + r"""Archives an Audience on a property. Args: - request (Union[google.analytics.admin_v1alpha.types.UpdateSearchAds360LinkRequest, dict]): - The request object. Request message for - UpdateSearchAds360Link RPC. - search_ads_360_link (google.analytics.admin_v1alpha.types.SearchAds360Link): - The SearchAds360Link to update - This corresponds to the ``search_ads_360_link`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - update_mask (google.protobuf.field_mask_pb2.FieldMask): - Required. The list of fields to be updated. Omitted - fields will not be updated. To replace the entire - entity, use one path with the string "*" to match all - fields. - - This corresponds to the ``update_mask`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. + request (Union[google.analytics.admin_v1alpha.types.ArchiveAudienceRequest, dict]): + The request object. Request message for ArchiveAudience + RPC. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. - - Returns: - google.analytics.admin_v1alpha.types.SearchAds360Link: - A link between a GA4 property and a - Search Ads 360 entity. - """ # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([search_ads_360_link, update_mask]) - if request is not None and has_flattened_params: - raise ValueError( - "If the `request` argument is set, then none of " - "the individual field arguments should be set." - ) - # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. - if not isinstance(request, analytics_admin.UpdateSearchAds360LinkRequest): - request = analytics_admin.UpdateSearchAds360LinkRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if search_ads_360_link is not None: - request.search_ads_360_link = search_ads_360_link - if update_mask is not None: - request.update_mask = update_mask + if not isinstance(request, analytics_admin.ArchiveAudienceRequest): + request = analytics_admin.ArchiveAudienceRequest(request) # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._transport._wrapped_methods[ - self._transport.update_search_ads360_link - ] + rpc = self._transport._wrapped_methods[self._transport.archive_audience] # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("search_ads_360_link.name", request.search_ads_360_link.name),) - ), + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), ) # Validate the universe domain. self._validate_universe_domain() # Send the request. - response = rpc( + rpc( request, retry=retry, timeout=timeout, metadata=metadata, ) - # Done; return the response. - return response - - def get_attribution_settings( + def get_search_ads360_link( self, request: Optional[ - Union[analytics_admin.GetAttributionSettingsRequest, dict] + Union[analytics_admin.GetSearchAds360LinkRequest, dict] ] = None, *, name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), - ) -> resources.AttributionSettings: - r"""Lookup for a AttributionSettings singleton. + ) -> resources.SearchAds360Link: + r"""Look up a single SearchAds360Link Args: - request (Union[google.analytics.admin_v1alpha.types.GetAttributionSettingsRequest, dict]): + request (Union[google.analytics.admin_v1alpha.types.GetSearchAds360LinkRequest, dict]): The request object. Request message for - GetAttributionSettings RPC. + GetSearchAds360Link RPC. name (str): - Required. The name of the attribution - settings to retrieve. Format: - properties/{property}/attributionSettings + Required. The name of the + SearchAds360Link to get. Example format: + properties/1234/SearchAds360Link/5678 This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this @@ -7889,10 +7964,9 @@ def get_attribution_settings( sent along with the request as metadata. Returns: - google.analytics.admin_v1alpha.types.AttributionSettings: - The attribution settings used for a - given property. This is a singleton - resource. + google.analytics.admin_v1alpha.types.SearchAds360Link: + A link between a GA4 property and a + Search Ads 360 entity. """ # Create or coerce a protobuf request object. @@ -7907,8 +7981,8 @@ def get_attribution_settings( # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. - if not isinstance(request, analytics_admin.GetAttributionSettingsRequest): - request = analytics_admin.GetAttributionSettingsRequest(request) + if not isinstance(request, analytics_admin.GetSearchAds360LinkRequest): + request = analytics_admin.GetSearchAds360LinkRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. if name is not None: @@ -7916,7 +7990,7 @@ def get_attribution_settings( # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.get_attribution_settings] + rpc = self._transport._wrapped_methods[self._transport.get_search_ads360_link] # Certain fields should be provided within the metadata header; # add these here. @@ -7938,40 +8012,28 @@ def get_attribution_settings( # Done; return the response. return response - def update_attribution_settings( + def list_search_ads360_links( self, request: Optional[ - Union[analytics_admin.UpdateAttributionSettingsRequest, dict] + Union[analytics_admin.ListSearchAds360LinksRequest, dict] ] = None, *, - attribution_settings: Optional[resources.AttributionSettings] = None, - update_mask: Optional[field_mask_pb2.FieldMask] = None, + parent: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), - ) -> resources.AttributionSettings: - r"""Updates attribution settings on a property. + ) -> pagers.ListSearchAds360LinksPager: + r"""Lists all SearchAds360Links on a property. Args: - request (Union[google.analytics.admin_v1alpha.types.UpdateAttributionSettingsRequest, dict]): + request (Union[google.analytics.admin_v1alpha.types.ListSearchAds360LinksRequest, dict]): The request object. Request message for - UpdateAttributionSettings RPC - attribution_settings (google.analytics.admin_v1alpha.types.AttributionSettings): - Required. The attribution settings to update. The - ``name`` field is used to identify the settings to be - updated. - - This corresponds to the ``attribution_settings`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - update_mask (google.protobuf.field_mask_pb2.FieldMask): - Required. The list of fields to be updated. Field names - must be in snake case (e.g., "field_to_update"). Omitted - fields will not be updated. To replace the entire - entity, use one path with the string "*" to match all - fields. + ListSearchAds360Links RPC. + parent (str): + Required. Example format: + properties/1234 - This corresponds to the ``update_mask`` field + This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this should not be set. retry (google.api_core.retry.Retry): Designation of what errors, if any, @@ -7981,16 +8043,18 @@ def update_attribution_settings( sent along with the request as metadata. Returns: - google.analytics.admin_v1alpha.types.AttributionSettings: - The attribution settings used for a - given property. This is a singleton - resource. + google.analytics.admin_v1alpha.services.analytics_admin_service.pagers.ListSearchAds360LinksPager: + Response message for + ListSearchAds360Links RPC. + Iterating over this object will yield + results and resolve additional pages + automatically. """ # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([attribution_settings, update_mask]) + has_flattened_params = any([parent]) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -7999,27 +8063,21 @@ def update_attribution_settings( # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. - if not isinstance(request, analytics_admin.UpdateAttributionSettingsRequest): - request = analytics_admin.UpdateAttributionSettingsRequest(request) + if not isinstance(request, analytics_admin.ListSearchAds360LinksRequest): + request = analytics_admin.ListSearchAds360LinksRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. - if attribution_settings is not None: - request.attribution_settings = attribution_settings - if update_mask is not None: - request.update_mask = update_mask + if parent is not None: + request.parent = parent # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._transport._wrapped_methods[ - self._transport.update_attribution_settings - ] + rpc = self._transport._wrapped_methods[self._transport.list_search_ads360_links] # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("attribution_settings.name", request.attribution_settings.name),) - ), + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), ) # Validate the universe domain. @@ -8033,72 +8091,12 @@ def update_attribution_settings( metadata=metadata, ) - # Done; return the response. - return response - - def run_access_report( - self, - request: Optional[Union[analytics_admin.RunAccessReportRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> analytics_admin.RunAccessReportResponse: - r"""Returns a customized report of data access records. The report - provides records of each time a user reads Google Analytics - reporting data. Access records are retained for up to 2 years. - - Data Access Reports can be requested for a property. Reports may - be requested for any property, but dimensions that aren't - related to quota can only be requested on Google Analytics 360 - properties. This method is only available to Administrators. - - These data access records include GA4 UI Reporting, GA4 UI - Explorations, GA4 Data API, and other products like Firebase & - Admob that can retrieve data from Google Analytics through a - linkage. These records don't include property configuration - changes like adding a stream or changing a property's time zone. - For configuration change history, see - `searchChangeHistoryEvents `__. - - Args: - request (Union[google.analytics.admin_v1alpha.types.RunAccessReportRequest, dict]): - The request object. The request for a Data Access Record - Report. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.analytics.admin_v1alpha.types.RunAccessReportResponse: - The customized Data Access Record - Report response. - - """ - # Create or coerce a protobuf request object. - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, analytics_admin.RunAccessReportRequest): - request = analytics_admin.RunAccessReportRequest(request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.run_access_report] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("entity", request.entity),)), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListSearchAds360LinksPager( + method=rpc, + request=request, + response=response, retry=retry, timeout=timeout, metadata=metadata, @@ -8107,38 +8105,36 @@ def run_access_report( # Done; return the response. return response - def create_access_binding( + def create_search_ads360_link( self, request: Optional[ - Union[analytics_admin.CreateAccessBindingRequest, dict] + Union[analytics_admin.CreateSearchAds360LinkRequest, dict] ] = None, *, parent: Optional[str] = None, - access_binding: Optional[resources.AccessBinding] = None, + search_ads_360_link: Optional[resources.SearchAds360Link] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), - ) -> resources.AccessBinding: - r"""Creates an access binding on an account or property. + ) -> resources.SearchAds360Link: + r"""Creates a SearchAds360Link. Args: - request (Union[google.analytics.admin_v1alpha.types.CreateAccessBindingRequest, dict]): + request (Union[google.analytics.admin_v1alpha.types.CreateSearchAds360LinkRequest, dict]): The request object. Request message for - CreateAccessBinding RPC. + CreateSearchAds360Link RPC. parent (str): - Required. Formats: - - - accounts/{account} - - properties/{property} + Required. Example format: + properties/1234 This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - access_binding (google.analytics.admin_v1alpha.types.AccessBinding): - Required. The access binding to + search_ads_360_link (google.analytics.admin_v1alpha.types.SearchAds360Link): + Required. The SearchAds360Link to create. - This corresponds to the ``access_binding`` field + This corresponds to the ``search_ads_360_link`` field on the ``request`` instance; if ``request`` is provided, this should not be set. retry (google.api_core.retry.Retry): Designation of what errors, if any, @@ -8148,15 +8144,15 @@ def create_access_binding( sent along with the request as metadata. Returns: - google.analytics.admin_v1alpha.types.AccessBinding: - A binding of a user to a set of - roles. + google.analytics.admin_v1alpha.types.SearchAds360Link: + A link between a GA4 property and a + Search Ads 360 entity. """ # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent, access_binding]) + has_flattened_params = any([parent, search_ads_360_link]) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -8165,18 +8161,20 @@ def create_access_binding( # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. - if not isinstance(request, analytics_admin.CreateAccessBindingRequest): - request = analytics_admin.CreateAccessBindingRequest(request) + if not isinstance(request, analytics_admin.CreateSearchAds360LinkRequest): + request = analytics_admin.CreateSearchAds360LinkRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. if parent is not None: request.parent = parent - if access_binding is not None: - request.access_binding = access_binding + if search_ads_360_link is not None: + request.search_ads_360_link = search_ads_360_link # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.create_access_binding] + rpc = self._transport._wrapped_methods[ + self._transport.create_search_ads360_link + ] # Certain fields should be provided within the metadata header; # add these here. @@ -8198,29 +8196,28 @@ def create_access_binding( # Done; return the response. return response - def get_access_binding( + def delete_search_ads360_link( self, - request: Optional[Union[analytics_admin.GetAccessBindingRequest, dict]] = None, + request: Optional[ + Union[analytics_admin.DeleteSearchAds360LinkRequest, dict] + ] = None, *, name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), - ) -> resources.AccessBinding: - r"""Gets information about an access binding. + ) -> None: + r"""Deletes a SearchAds360Link on a property. Args: - request (Union[google.analytics.admin_v1alpha.types.GetAccessBindingRequest, dict]): - The request object. Request message for GetAccessBinding - RPC. + request (Union[google.analytics.admin_v1alpha.types.DeleteSearchAds360LinkRequest, dict]): + The request object. Request message for + DeleteSearchAds360Link RPC. name (str): - Required. The name of the access - binding to retrieve. Formats: - - - - accounts/{account}/accessBindings/{accessBinding} - - - properties/{property}/accessBindings/{accessBinding} + Required. The name of the + SearchAds360Link to delete. Example + format: + properties/1234/SearchAds360Links/5678 This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this @@ -8230,12 +8227,6 @@ def get_access_binding( timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. - - Returns: - google.analytics.admin_v1alpha.types.AccessBinding: - A binding of a user to a set of - roles. - """ # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have @@ -8249,8 +8240,8 @@ def get_access_binding( # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. - if not isinstance(request, analytics_admin.GetAccessBindingRequest): - request = analytics_admin.GetAccessBindingRequest(request) + if not isinstance(request, analytics_admin.DeleteSearchAds360LinkRequest): + request = analytics_admin.DeleteSearchAds360LinkRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. if name is not None: @@ -8258,7 +8249,9 @@ def get_access_binding( # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.get_access_binding] + rpc = self._transport._wrapped_methods[ + self._transport.delete_search_ads360_link + ] # Certain fields should be provided within the metadata header; # add these here. @@ -8270,38 +8263,43 @@ def get_access_binding( self._validate_universe_domain() # Send the request. - response = rpc( + rpc( request, retry=retry, timeout=timeout, metadata=metadata, ) - # Done; return the response. - return response - - def update_access_binding( + def update_search_ads360_link( self, request: Optional[ - Union[analytics_admin.UpdateAccessBindingRequest, dict] + Union[analytics_admin.UpdateSearchAds360LinkRequest, dict] ] = None, *, - access_binding: Optional[resources.AccessBinding] = None, + search_ads_360_link: Optional[resources.SearchAds360Link] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), - ) -> resources.AccessBinding: - r"""Updates an access binding on an account or property. + ) -> resources.SearchAds360Link: + r"""Updates a SearchAds360Link on a property. Args: - request (Union[google.analytics.admin_v1alpha.types.UpdateAccessBindingRequest, dict]): + request (Union[google.analytics.admin_v1alpha.types.UpdateSearchAds360LinkRequest, dict]): The request object. Request message for - UpdateAccessBinding RPC. - access_binding (google.analytics.admin_v1alpha.types.AccessBinding): - Required. The access binding to - update. + UpdateSearchAds360Link RPC. + search_ads_360_link (google.analytics.admin_v1alpha.types.SearchAds360Link): + The SearchAds360Link to update + This corresponds to the ``search_ads_360_link`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Required. The list of fields to be updated. Omitted + fields will not be updated. To replace the entire + entity, use one path with the string "*" to match all + fields. - This corresponds to the ``access_binding`` field + This corresponds to the ``update_mask`` field on the ``request`` instance; if ``request`` is provided, this should not be set. retry (google.api_core.retry.Retry): Designation of what errors, if any, @@ -8311,15 +8309,15 @@ def update_access_binding( sent along with the request as metadata. Returns: - google.analytics.admin_v1alpha.types.AccessBinding: - A binding of a user to a set of - roles. + google.analytics.admin_v1alpha.types.SearchAds360Link: + A link between a GA4 property and a + Search Ads 360 entity. """ # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([access_binding]) + has_flattened_params = any([search_ads_360_link, update_mask]) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -8328,22 +8326,26 @@ def update_access_binding( # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. - if not isinstance(request, analytics_admin.UpdateAccessBindingRequest): - request = analytics_admin.UpdateAccessBindingRequest(request) + if not isinstance(request, analytics_admin.UpdateSearchAds360LinkRequest): + request = analytics_admin.UpdateSearchAds360LinkRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. - if access_binding is not None: - request.access_binding = access_binding + if search_ads_360_link is not None: + request.search_ads_360_link = search_ads_360_link + if update_mask is not None: + request.update_mask = update_mask # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.update_access_binding] + rpc = self._transport._wrapped_methods[ + self._transport.update_search_ads360_link + ] # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata( - (("access_binding.name", request.access_binding.name),) + (("search_ads_360_link.name", request.search_ads_360_link.name),) ), ) @@ -8361,30 +8363,27 @@ def update_access_binding( # Done; return the response. return response - def delete_access_binding( + def get_attribution_settings( self, request: Optional[ - Union[analytics_admin.DeleteAccessBindingRequest, dict] + Union[analytics_admin.GetAttributionSettingsRequest, dict] ] = None, *, name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), - ) -> None: - r"""Deletes an access binding on an account or property. + ) -> resources.AttributionSettings: + r"""Lookup for a AttributionSettings singleton. Args: - request (Union[google.analytics.admin_v1alpha.types.DeleteAccessBindingRequest, dict]): + request (Union[google.analytics.admin_v1alpha.types.GetAttributionSettingsRequest, dict]): The request object. Request message for - DeleteAccessBinding RPC. + GetAttributionSettings RPC. name (str): - Required. Formats: - - - - accounts/{account}/accessBindings/{accessBinding} - - - properties/{property}/accessBindings/{accessBinding} + Required. The name of the attribution + settings to retrieve. Format: + properties/{property}/attributionSettings This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this @@ -8394,6 +8393,13 @@ def delete_access_binding( timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. + + Returns: + google.analytics.admin_v1alpha.types.AttributionSettings: + The attribution settings used for a + given property. This is a singleton + resource. + """ # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have @@ -8407,8 +8413,8 @@ def delete_access_binding( # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. - if not isinstance(request, analytics_admin.DeleteAccessBindingRequest): - request = analytics_admin.DeleteAccessBindingRequest(request) + if not isinstance(request, analytics_admin.GetAttributionSettingsRequest): + request = analytics_admin.GetAttributionSettingsRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. if name is not None: @@ -8416,7 +8422,7 @@ def delete_access_binding( # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.delete_access_binding] + rpc = self._transport._wrapped_methods[self._transport.get_attribution_settings] # Certain fields should be provided within the metadata header; # add these here. @@ -8428,37 +8434,50 @@ def delete_access_binding( self._validate_universe_domain() # Send the request. - rpc( + response = rpc( request, retry=retry, timeout=timeout, metadata=metadata, ) - def list_access_bindings( + # Done; return the response. + return response + + def update_attribution_settings( self, request: Optional[ - Union[analytics_admin.ListAccessBindingsRequest, dict] + Union[analytics_admin.UpdateAttributionSettingsRequest, dict] ] = None, *, - parent: Optional[str] = None, + attribution_settings: Optional[resources.AttributionSettings] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), - ) -> pagers.ListAccessBindingsPager: - r"""Lists all access bindings on an account or property. + ) -> resources.AttributionSettings: + r"""Updates attribution settings on a property. Args: - request (Union[google.analytics.admin_v1alpha.types.ListAccessBindingsRequest, dict]): + request (Union[google.analytics.admin_v1alpha.types.UpdateAttributionSettingsRequest, dict]): The request object. Request message for - ListAccessBindings RPC. - parent (str): - Required. Formats: + UpdateAttributionSettings RPC + attribution_settings (google.analytics.admin_v1alpha.types.AttributionSettings): + Required. The attribution settings to update. The + ``name`` field is used to identify the settings to be + updated. - - accounts/{account} - - properties/{property} + This corresponds to the ``attribution_settings`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Required. The list of fields to be updated. Field names + must be in snake case (e.g., "field_to_update"). Omitted + fields will not be updated. To replace the entire + entity, use one path with the string "*" to match all + fields. - This corresponds to the ``parent`` field + This corresponds to the ``update_mask`` field on the ``request`` instance; if ``request`` is provided, this should not be set. retry (google.api_core.retry.Retry): Designation of what errors, if any, @@ -8468,18 +8487,16 @@ def list_access_bindings( sent along with the request as metadata. Returns: - google.analytics.admin_v1alpha.services.analytics_admin_service.pagers.ListAccessBindingsPager: - Response message for - ListAccessBindings RPC. - Iterating over this object will yield - results and resolve additional pages - automatically. + google.analytics.admin_v1alpha.types.AttributionSettings: + The attribution settings used for a + given property. This is a singleton + resource. """ # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) + has_flattened_params = any([attribution_settings, update_mask]) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -8488,21 +8505,27 @@ def list_access_bindings( # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. - if not isinstance(request, analytics_admin.ListAccessBindingsRequest): - request = analytics_admin.ListAccessBindingsRequest(request) + if not isinstance(request, analytics_admin.UpdateAttributionSettingsRequest): + request = analytics_admin.UpdateAttributionSettingsRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. - if parent is not None: - request.parent = parent + if attribution_settings is not None: + request.attribution_settings = attribution_settings + if update_mask is not None: + request.update_mask = update_mask # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.list_access_bindings] + rpc = self._transport._wrapped_methods[ + self._transport.update_attribution_settings + ] # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + gapic_v1.routing_header.to_grpc_metadata( + (("attribution_settings.name", request.attribution_settings.name),) + ), ) # Validate the universe domain. @@ -8516,41 +8539,38 @@ def list_access_bindings( metadata=metadata, ) - # This method is paged; wrap the response in a pager, which provides - # an `__iter__` convenience method. - response = pagers.ListAccessBindingsPager( - method=rpc, - request=request, - response=response, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - # Done; return the response. return response - def batch_create_access_bindings( + def run_access_report( self, - request: Optional[ - Union[analytics_admin.BatchCreateAccessBindingsRequest, dict] - ] = None, + request: Optional[Union[analytics_admin.RunAccessReportRequest, dict]] = None, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), - ) -> analytics_admin.BatchCreateAccessBindingsResponse: - r"""Creates information about multiple access bindings to - an account or property. + ) -> analytics_admin.RunAccessReportResponse: + r"""Returns a customized report of data access records. The report + provides records of each time a user reads Google Analytics + reporting data. Access records are retained for up to 2 years. - This method is transactional. If any AccessBinding - cannot be created, none of the AccessBindings will be - created. + Data Access Reports can be requested for a property. Reports may + be requested for any property, but dimensions that aren't + related to quota can only be requested on Google Analytics 360 + properties. This method is only available to Administrators. + + These data access records include GA4 UI Reporting, GA4 UI + Explorations, GA4 Data API, and other products like Firebase & + Admob that can retrieve data from Google Analytics through a + linkage. These records don't include property configuration + changes like adding a stream or changing a property's time zone. + For configuration change history, see + `searchChangeHistoryEvents `__. Args: - request (Union[google.analytics.admin_v1alpha.types.BatchCreateAccessBindingsRequest, dict]): - The request object. Request message for - BatchCreateAccessBindings RPC. + request (Union[google.analytics.admin_v1alpha.types.RunAccessReportRequest, dict]): + The request object. The request for a Data Access Record + Report. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -8558,27 +8578,25 @@ def batch_create_access_bindings( sent along with the request as metadata. Returns: - google.analytics.admin_v1alpha.types.BatchCreateAccessBindingsResponse: - Response message for - BatchCreateAccessBindings RPC. + google.analytics.admin_v1alpha.types.RunAccessReportResponse: + The customized Data Access Record + Report response. """ # Create or coerce a protobuf request object. # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. - if not isinstance(request, analytics_admin.BatchCreateAccessBindingsRequest): - request = analytics_admin.BatchCreateAccessBindingsRequest(request) + if not isinstance(request, analytics_admin.RunAccessReportRequest): + request = analytics_admin.RunAccessReportRequest(request) # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._transport._wrapped_methods[ - self._transport.batch_create_access_bindings - ] + rpc = self._transport._wrapped_methods[self._transport.run_access_report] # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + gapic_v1.routing_header.to_grpc_metadata((("entity", request.entity),)), ) # Validate the universe domain. @@ -8595,23 +8613,40 @@ def batch_create_access_bindings( # Done; return the response. return response - def batch_get_access_bindings( + def create_access_binding( self, request: Optional[ - Union[analytics_admin.BatchGetAccessBindingsRequest, dict] + Union[analytics_admin.CreateAccessBindingRequest, dict] ] = None, *, + parent: Optional[str] = None, + access_binding: Optional[resources.AccessBinding] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), - ) -> analytics_admin.BatchGetAccessBindingsResponse: - r"""Gets information about multiple access bindings to an - account or property. + ) -> resources.AccessBinding: + r"""Creates an access binding on an account or property. Args: - request (Union[google.analytics.admin_v1alpha.types.BatchGetAccessBindingsRequest, dict]): + request (Union[google.analytics.admin_v1alpha.types.CreateAccessBindingRequest, dict]): The request object. Request message for - BatchGetAccessBindings RPC. + CreateAccessBinding RPC. + parent (str): + Required. Formats: + + - accounts/{account} + - properties/{property} + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + access_binding (google.analytics.admin_v1alpha.types.AccessBinding): + Required. The access binding to + create. + + This corresponds to the ``access_binding`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -8619,22 +8654,35 @@ def batch_get_access_bindings( sent along with the request as metadata. Returns: - google.analytics.admin_v1alpha.types.BatchGetAccessBindingsResponse: - Response message for - BatchGetAccessBindings RPC. + google.analytics.admin_v1alpha.types.AccessBinding: + A binding of a user to a set of + roles. """ # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, access_binding]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. - if not isinstance(request, analytics_admin.BatchGetAccessBindingsRequest): - request = analytics_admin.BatchGetAccessBindingsRequest(request) + if not isinstance(request, analytics_admin.CreateAccessBindingRequest): + request = analytics_admin.CreateAccessBindingRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if access_binding is not None: + request.access_binding = access_binding # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._transport._wrapped_methods[ - self._transport.batch_get_access_bindings - ] + rpc = self._transport._wrapped_methods[self._transport.create_access_binding] # Certain fields should be provided within the metadata header; # add these here. @@ -8656,23 +8704,33 @@ def batch_get_access_bindings( # Done; return the response. return response - def batch_update_access_bindings( + def get_access_binding( self, - request: Optional[ - Union[analytics_admin.BatchUpdateAccessBindingsRequest, dict] - ] = None, + request: Optional[Union[analytics_admin.GetAccessBindingRequest, dict]] = None, *, + name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), - ) -> analytics_admin.BatchUpdateAccessBindingsResponse: - r"""Updates information about multiple access bindings to - an account or property. + ) -> resources.AccessBinding: + r"""Gets information about an access binding. Args: - request (Union[google.analytics.admin_v1alpha.types.BatchUpdateAccessBindingsRequest, dict]): - The request object. Request message for - BatchUpdateAccessBindings RPC. + request (Union[google.analytics.admin_v1alpha.types.GetAccessBindingRequest, dict]): + The request object. Request message for GetAccessBinding + RPC. + name (str): + Required. The name of the access + binding to retrieve. Formats: + + - + accounts/{account}/accessBindings/{accessBinding} + - + properties/{property}/accessBindings/{accessBinding} + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -8680,27 +8738,38 @@ def batch_update_access_bindings( sent along with the request as metadata. Returns: - google.analytics.admin_v1alpha.types.BatchUpdateAccessBindingsResponse: - Response message for - BatchUpdateAccessBindings RPC. + google.analytics.admin_v1alpha.types.AccessBinding: + A binding of a user to a set of + roles. """ # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. - if not isinstance(request, analytics_admin.BatchUpdateAccessBindingsRequest): - request = analytics_admin.BatchUpdateAccessBindingsRequest(request) + if not isinstance(request, analytics_admin.GetAccessBindingRequest): + request = analytics_admin.GetAccessBindingRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._transport._wrapped_methods[ - self._transport.batch_update_access_bindings - ] + rpc = self._transport._wrapped_methods[self._transport.get_access_binding] # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), ) # Validate the universe domain. @@ -8717,79 +8786,111 @@ def batch_update_access_bindings( # Done; return the response. return response - def batch_delete_access_bindings( + def update_access_binding( self, request: Optional[ - Union[analytics_admin.BatchDeleteAccessBindingsRequest, dict] + Union[analytics_admin.UpdateAccessBindingRequest, dict] ] = None, *, + access_binding: Optional[resources.AccessBinding] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), - ) -> None: - r"""Deletes information about multiple users' links to an - account or property. + ) -> resources.AccessBinding: + r"""Updates an access binding on an account or property. Args: - request (Union[google.analytics.admin_v1alpha.types.BatchDeleteAccessBindingsRequest, dict]): + request (Union[google.analytics.admin_v1alpha.types.UpdateAccessBindingRequest, dict]): The request object. Request message for - BatchDeleteAccessBindings RPC. + UpdateAccessBinding RPC. + access_binding (google.analytics.admin_v1alpha.types.AccessBinding): + Required. The access binding to + update. + + This corresponds to the ``access_binding`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. + + Returns: + google.analytics.admin_v1alpha.types.AccessBinding: + A binding of a user to a set of + roles. + """ # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([access_binding]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. - if not isinstance(request, analytics_admin.BatchDeleteAccessBindingsRequest): - request = analytics_admin.BatchDeleteAccessBindingsRequest(request) + if not isinstance(request, analytics_admin.UpdateAccessBindingRequest): + request = analytics_admin.UpdateAccessBindingRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if access_binding is not None: + request.access_binding = access_binding # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._transport._wrapped_methods[ - self._transport.batch_delete_access_bindings - ] + rpc = self._transport._wrapped_methods[self._transport.update_access_binding] # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + gapic_v1.routing_header.to_grpc_metadata( + (("access_binding.name", request.access_binding.name),) + ), ) # Validate the universe domain. self._validate_universe_domain() # Send the request. - rpc( + response = rpc( request, retry=retry, timeout=timeout, metadata=metadata, ) - def get_expanded_data_set( + # Done; return the response. + return response + + def delete_access_binding( self, request: Optional[ - Union[analytics_admin.GetExpandedDataSetRequest, dict] + Union[analytics_admin.DeleteAccessBindingRequest, dict] ] = None, *, name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), - ) -> expanded_data_set.ExpandedDataSet: - r"""Lookup for a single ExpandedDataSet. + ) -> None: + r"""Deletes an access binding on an account or property. Args: - request (Union[google.analytics.admin_v1alpha.types.GetExpandedDataSetRequest, dict]): + request (Union[google.analytics.admin_v1alpha.types.DeleteAccessBindingRequest, dict]): The request object. Request message for - GetExpandedDataSet RPC. + DeleteAccessBinding RPC. name (str): - Required. The name of the - ExpandedDataSet to get. Example format: - properties/1234/expandedDataSets/5678 + Required. Formats: + + - + accounts/{account}/accessBindings/{accessBinding} + - + properties/{property}/accessBindings/{accessBinding} This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this @@ -8799,12 +8900,6 @@ def get_expanded_data_set( timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. - - Returns: - google.analytics.admin_v1alpha.types.ExpandedDataSet: - A resource message representing a GA4 - ExpandedDataSet. - """ # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have @@ -8818,8 +8913,8 @@ def get_expanded_data_set( # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. - if not isinstance(request, analytics_admin.GetExpandedDataSetRequest): - request = analytics_admin.GetExpandedDataSetRequest(request) + if not isinstance(request, analytics_admin.DeleteAccessBindingRequest): + request = analytics_admin.DeleteAccessBindingRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. if name is not None: @@ -8827,7 +8922,7 @@ def get_expanded_data_set( # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.get_expanded_data_set] + rpc = self._transport._wrapped_methods[self._transport.delete_access_binding] # Certain fields should be provided within the metadata header; # add these here. @@ -8839,36 +8934,35 @@ def get_expanded_data_set( self._validate_universe_domain() # Send the request. - response = rpc( + rpc( request, retry=retry, timeout=timeout, metadata=metadata, ) - # Done; return the response. - return response - - def list_expanded_data_sets( + def list_access_bindings( self, request: Optional[ - Union[analytics_admin.ListExpandedDataSetsRequest, dict] + Union[analytics_admin.ListAccessBindingsRequest, dict] ] = None, *, parent: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), - ) -> pagers.ListExpandedDataSetsPager: - r"""Lists ExpandedDataSets on a property. + ) -> pagers.ListAccessBindingsPager: + r"""Lists all access bindings on an account or property. Args: - request (Union[google.analytics.admin_v1alpha.types.ListExpandedDataSetsRequest, dict]): + request (Union[google.analytics.admin_v1alpha.types.ListAccessBindingsRequest, dict]): The request object. Request message for - ListExpandedDataSets RPC. + ListAccessBindings RPC. parent (str): - Required. Example format: - properties/1234 + Required. Formats: + + - accounts/{account} + - properties/{property} This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this @@ -8880,9 +8974,9 @@ def list_expanded_data_sets( sent along with the request as metadata. Returns: - google.analytics.admin_v1alpha.services.analytics_admin_service.pagers.ListExpandedDataSetsPager: + google.analytics.admin_v1alpha.services.analytics_admin_service.pagers.ListAccessBindingsPager: Response message for - ListExpandedDataSets RPC. + ListAccessBindings RPC. Iterating over this object will yield results and resolve additional pages automatically. @@ -8900,8 +8994,8 @@ def list_expanded_data_sets( # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. - if not isinstance(request, analytics_admin.ListExpandedDataSetsRequest): - request = analytics_admin.ListExpandedDataSetsRequest(request) + if not isinstance(request, analytics_admin.ListAccessBindingsRequest): + request = analytics_admin.ListAccessBindingsRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. if parent is not None: @@ -8909,7 +9003,7 @@ def list_expanded_data_sets( # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.list_expanded_data_sets] + rpc = self._transport._wrapped_methods[self._transport.list_access_bindings] # Certain fields should be provided within the metadata header; # add these here. @@ -8930,7 +9024,7 @@ def list_expanded_data_sets( # This method is paged; wrap the response in a pager, which provides # an `__iter__` convenience method. - response = pagers.ListExpandedDataSetsPager( + response = pagers.ListAccessBindingsPager( method=rpc, request=request, response=response, @@ -8942,38 +9036,27 @@ def list_expanded_data_sets( # Done; return the response. return response - def create_expanded_data_set( + def batch_create_access_bindings( self, request: Optional[ - Union[analytics_admin.CreateExpandedDataSetRequest, dict] + Union[analytics_admin.BatchCreateAccessBindingsRequest, dict] ] = None, *, - parent: Optional[str] = None, - expanded_data_set: Optional[gaa_expanded_data_set.ExpandedDataSet] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), - ) -> gaa_expanded_data_set.ExpandedDataSet: - r"""Creates a ExpandedDataSet. + ) -> analytics_admin.BatchCreateAccessBindingsResponse: + r"""Creates information about multiple access bindings to + an account or property. + + This method is transactional. If any AccessBinding + cannot be created, none of the AccessBindings will be + created. Args: - request (Union[google.analytics.admin_v1alpha.types.CreateExpandedDataSetRequest, dict]): + request (Union[google.analytics.admin_v1alpha.types.BatchCreateAccessBindingsRequest, dict]): The request object. Request message for - CreateExpandedDataSet RPC. - parent (str): - Required. Example format: - properties/1234 - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - expanded_data_set (google.analytics.admin_v1alpha.types.ExpandedDataSet): - Required. The ExpandedDataSet to - create. - - This corresponds to the ``expanded_data_set`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. + BatchCreateAccessBindings RPC. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -8981,35 +9064,22 @@ def create_expanded_data_set( sent along with the request as metadata. Returns: - google.analytics.admin_v1alpha.types.ExpandedDataSet: - A resource message representing a GA4 - ExpandedDataSet. + google.analytics.admin_v1alpha.types.BatchCreateAccessBindingsResponse: + Response message for + BatchCreateAccessBindings RPC. """ # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent, expanded_data_set]) - if request is not None and has_flattened_params: - raise ValueError( - "If the `request` argument is set, then none of " - "the individual field arguments should be set." - ) - # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. - if not isinstance(request, analytics_admin.CreateExpandedDataSetRequest): - request = analytics_admin.CreateExpandedDataSetRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - if expanded_data_set is not None: - request.expanded_data_set = expanded_data_set + if not isinstance(request, analytics_admin.BatchCreateAccessBindingsRequest): + request = analytics_admin.BatchCreateAccessBindingsRequest(request) # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.create_expanded_data_set] + rpc = self._transport._wrapped_methods[ + self._transport.batch_create_access_bindings + ] # Certain fields should be provided within the metadata header; # add these here. @@ -9031,42 +9101,84 @@ def create_expanded_data_set( # Done; return the response. return response - def update_expanded_data_set( + def batch_get_access_bindings( self, request: Optional[ - Union[analytics_admin.UpdateExpandedDataSetRequest, dict] + Union[analytics_admin.BatchGetAccessBindingsRequest, dict] ] = None, *, - expanded_data_set: Optional[gaa_expanded_data_set.ExpandedDataSet] = None, - update_mask: Optional[field_mask_pb2.FieldMask] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), - ) -> gaa_expanded_data_set.ExpandedDataSet: - r"""Updates a ExpandedDataSet on a property. + ) -> analytics_admin.BatchGetAccessBindingsResponse: + r"""Gets information about multiple access bindings to an + account or property. Args: - request (Union[google.analytics.admin_v1alpha.types.UpdateExpandedDataSetRequest, dict]): + request (Union[google.analytics.admin_v1alpha.types.BatchGetAccessBindingsRequest, dict]): The request object. Request message for - UpdateExpandedDataSet RPC. - expanded_data_set (google.analytics.admin_v1alpha.types.ExpandedDataSet): - Required. The ExpandedDataSet to update. The resource's - ``name`` field is used to identify the ExpandedDataSet - to be updated. + BatchGetAccessBindings RPC. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. - This corresponds to the ``expanded_data_set`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - update_mask (google.protobuf.field_mask_pb2.FieldMask): - Required. The list of fields to be updated. Field names - must be in snake case (e.g., "field_to_update"). Omitted - fields will not be updated. To replace the entire - entity, use one path with the string "*" to match all - fields. + Returns: + google.analytics.admin_v1alpha.types.BatchGetAccessBindingsResponse: + Response message for + BatchGetAccessBindings RPC. - This corresponds to the ``update_mask`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. + """ + # Create or coerce a protobuf request object. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, analytics_admin.BatchGetAccessBindingsRequest): + request = analytics_admin.BatchGetAccessBindingsRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[ + self._transport.batch_get_access_bindings + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def batch_update_access_bindings( + self, + request: Optional[ + Union[analytics_admin.BatchUpdateAccessBindingsRequest, dict] + ] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> analytics_admin.BatchUpdateAccessBindingsResponse: + r"""Updates information about multiple access bindings to + an account or property. + + Args: + request (Union[google.analytics.admin_v1alpha.types.BatchUpdateAccessBindingsRequest, dict]): + The request object. Request message for + BatchUpdateAccessBindings RPC. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -9074,42 +9186,27 @@ def update_expanded_data_set( sent along with the request as metadata. Returns: - google.analytics.admin_v1alpha.types.ExpandedDataSet: - A resource message representing a GA4 - ExpandedDataSet. + google.analytics.admin_v1alpha.types.BatchUpdateAccessBindingsResponse: + Response message for + BatchUpdateAccessBindings RPC. """ # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([expanded_data_set, update_mask]) - if request is not None and has_flattened_params: - raise ValueError( - "If the `request` argument is set, then none of " - "the individual field arguments should be set." - ) - # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. - if not isinstance(request, analytics_admin.UpdateExpandedDataSetRequest): - request = analytics_admin.UpdateExpandedDataSetRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if expanded_data_set is not None: - request.expanded_data_set = expanded_data_set - if update_mask is not None: - request.update_mask = update_mask + if not isinstance(request, analytics_admin.BatchUpdateAccessBindingsRequest): + request = analytics_admin.BatchUpdateAccessBindingsRequest(request) # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.update_expanded_data_set] + rpc = self._transport._wrapped_methods[ + self._transport.batch_update_access_bindings + ] # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("expanded_data_set.name", request.expanded_data_set.name),) - ), + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), ) # Validate the universe domain. @@ -9126,30 +9223,23 @@ def update_expanded_data_set( # Done; return the response. return response - def delete_expanded_data_set( + def batch_delete_access_bindings( self, request: Optional[ - Union[analytics_admin.DeleteExpandedDataSetRequest, dict] + Union[analytics_admin.BatchDeleteAccessBindingsRequest, dict] ] = None, *, - name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> None: - r"""Deletes a ExpandedDataSet on a property. + r"""Deletes information about multiple users' links to an + account or property. Args: - request (Union[google.analytics.admin_v1alpha.types.DeleteExpandedDataSetRequest, dict]): + request (Union[google.analytics.admin_v1alpha.types.BatchDeleteAccessBindingsRequest, dict]): The request object. Request message for - DeleteExpandedDataSet RPC. - name (str): - Required. Example format: - properties/1234/expandedDataSets/5678 - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. + BatchDeleteAccessBindings RPC. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -9157,32 +9247,21 @@ def delete_expanded_data_set( sent along with the request as metadata. """ # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError( - "If the `request` argument is set, then none of " - "the individual field arguments should be set." - ) - # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. - if not isinstance(request, analytics_admin.DeleteExpandedDataSetRequest): - request = analytics_admin.DeleteExpandedDataSetRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name + if not isinstance(request, analytics_admin.BatchDeleteAccessBindingsRequest): + request = analytics_admin.BatchDeleteAccessBindingsRequest(request) # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.delete_expanded_data_set] + rpc = self._transport._wrapped_methods[ + self._transport.batch_delete_access_bindings + ] # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), ) # Validate the universe domain. @@ -9196,25 +9275,27 @@ def delete_expanded_data_set( metadata=metadata, ) - def get_channel_group( + def get_expanded_data_set( self, - request: Optional[Union[analytics_admin.GetChannelGroupRequest, dict]] = None, + request: Optional[ + Union[analytics_admin.GetExpandedDataSetRequest, dict] + ] = None, *, name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), - ) -> channel_group.ChannelGroup: - r"""Lookup for a single ChannelGroup. + ) -> expanded_data_set.ExpandedDataSet: + r"""Lookup for a single ExpandedDataSet. Args: - request (Union[google.analytics.admin_v1alpha.types.GetChannelGroupRequest, dict]): - The request object. Request message for GetChannelGroup - RPC. + request (Union[google.analytics.admin_v1alpha.types.GetExpandedDataSetRequest, dict]): + The request object. Request message for + GetExpandedDataSet RPC. name (str): - Required. The ChannelGroup to get. - Example format: - properties/1234/channelGroups/5678 + Required. The name of the + ExpandedDataSet to get. Example format: + properties/1234/expandedDataSets/5678 This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this @@ -9226,9 +9307,9 @@ def get_channel_group( sent along with the request as metadata. Returns: - google.analytics.admin_v1alpha.types.ChannelGroup: - A resource message representing a - Channel Group. + google.analytics.admin_v1alpha.types.ExpandedDataSet: + A resource message representing a GA4 + ExpandedDataSet. """ # Create or coerce a protobuf request object. @@ -9243,8 +9324,8 @@ def get_channel_group( # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. - if not isinstance(request, analytics_admin.GetChannelGroupRequest): - request = analytics_admin.GetChannelGroupRequest(request) + if not isinstance(request, analytics_admin.GetExpandedDataSetRequest): + request = analytics_admin.GetExpandedDataSetRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. if name is not None: @@ -9252,7 +9333,7 @@ def get_channel_group( # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.get_channel_group] + rpc = self._transport._wrapped_methods[self._transport.get_expanded_data_set] # Certain fields should be provided within the metadata header; # add these here. @@ -9274,24 +9355,25 @@ def get_channel_group( # Done; return the response. return response - def list_channel_groups( + def list_expanded_data_sets( self, - request: Optional[Union[analytics_admin.ListChannelGroupsRequest, dict]] = None, + request: Optional[ + Union[analytics_admin.ListExpandedDataSetsRequest, dict] + ] = None, *, parent: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), - ) -> pagers.ListChannelGroupsPager: - r"""Lists ChannelGroups on a property. + ) -> pagers.ListExpandedDataSetsPager: + r"""Lists ExpandedDataSets on a property. Args: - request (Union[google.analytics.admin_v1alpha.types.ListChannelGroupsRequest, dict]): - The request object. Request message for ListChannelGroups - RPC. + request (Union[google.analytics.admin_v1alpha.types.ListExpandedDataSetsRequest, dict]): + The request object. Request message for + ListExpandedDataSets RPC. parent (str): - Required. The property for which to - list ChannelGroups. Example format: + Required. Example format: properties/1234 This corresponds to the ``parent`` field @@ -9304,9 +9386,9 @@ def list_channel_groups( sent along with the request as metadata. Returns: - google.analytics.admin_v1alpha.services.analytics_admin_service.pagers.ListChannelGroupsPager: + google.analytics.admin_v1alpha.services.analytics_admin_service.pagers.ListExpandedDataSetsPager: Response message for - ListChannelGroups RPC. + ListExpandedDataSets RPC. Iterating over this object will yield results and resolve additional pages automatically. @@ -9324,8 +9406,8 @@ def list_channel_groups( # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. - if not isinstance(request, analytics_admin.ListChannelGroupsRequest): - request = analytics_admin.ListChannelGroupsRequest(request) + if not isinstance(request, analytics_admin.ListExpandedDataSetsRequest): + request = analytics_admin.ListExpandedDataSetsRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. if parent is not None: @@ -9333,7 +9415,7 @@ def list_channel_groups( # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.list_channel_groups] + rpc = self._transport._wrapped_methods[self._transport.list_expanded_data_sets] # Certain fields should be provided within the metadata header; # add these here. @@ -9354,7 +9436,7 @@ def list_channel_groups( # This method is paged; wrap the response in a pager, which provides # an `__iter__` convenience method. - response = pagers.ListChannelGroupsPager( + response = pagers.ListExpandedDataSetsPager( method=rpc, request=request, response=response, @@ -9366,35 +9448,36 @@ def list_channel_groups( # Done; return the response. return response - def create_channel_group( + def create_expanded_data_set( self, request: Optional[ - Union[analytics_admin.CreateChannelGroupRequest, dict] + Union[analytics_admin.CreateExpandedDataSetRequest, dict] ] = None, *, parent: Optional[str] = None, - channel_group: Optional[gaa_channel_group.ChannelGroup] = None, + expanded_data_set: Optional[gaa_expanded_data_set.ExpandedDataSet] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), - ) -> gaa_channel_group.ChannelGroup: - r"""Creates a ChannelGroup. + ) -> gaa_expanded_data_set.ExpandedDataSet: + r"""Creates a ExpandedDataSet. Args: - request (Union[google.analytics.admin_v1alpha.types.CreateChannelGroupRequest, dict]): + request (Union[google.analytics.admin_v1alpha.types.CreateExpandedDataSetRequest, dict]): The request object. Request message for - CreateChannelGroup RPC. + CreateExpandedDataSet RPC. parent (str): - Required. The property for which to - create a ChannelGroup. Example format: + Required. Example format: properties/1234 This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - channel_group (google.analytics.admin_v1alpha.types.ChannelGroup): - Required. The ChannelGroup to create. - This corresponds to the ``channel_group`` field + expanded_data_set (google.analytics.admin_v1alpha.types.ExpandedDataSet): + Required. The ExpandedDataSet to + create. + + This corresponds to the ``expanded_data_set`` field on the ``request`` instance; if ``request`` is provided, this should not be set. retry (google.api_core.retry.Retry): Designation of what errors, if any, @@ -9404,15 +9487,15 @@ def create_channel_group( sent along with the request as metadata. Returns: - google.analytics.admin_v1alpha.types.ChannelGroup: - A resource message representing a - Channel Group. + google.analytics.admin_v1alpha.types.ExpandedDataSet: + A resource message representing a GA4 + ExpandedDataSet. """ # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent, channel_group]) + has_flattened_params = any([parent, expanded_data_set]) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -9421,18 +9504,18 @@ def create_channel_group( # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. - if not isinstance(request, analytics_admin.CreateChannelGroupRequest): - request = analytics_admin.CreateChannelGroupRequest(request) + if not isinstance(request, analytics_admin.CreateExpandedDataSetRequest): + request = analytics_admin.CreateExpandedDataSetRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. if parent is not None: request.parent = parent - if channel_group is not None: - request.channel_group = channel_group + if expanded_data_set is not None: + request.expanded_data_set = expanded_data_set # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.create_channel_group] + rpc = self._transport._wrapped_methods[self._transport.create_expanded_data_set] # Certain fields should be provided within the metadata header; # add these here. @@ -9454,30 +9537,30 @@ def create_channel_group( # Done; return the response. return response - def update_channel_group( + def update_expanded_data_set( self, request: Optional[ - Union[analytics_admin.UpdateChannelGroupRequest, dict] + Union[analytics_admin.UpdateExpandedDataSetRequest, dict] ] = None, *, - channel_group: Optional[gaa_channel_group.ChannelGroup] = None, + expanded_data_set: Optional[gaa_expanded_data_set.ExpandedDataSet] = None, update_mask: Optional[field_mask_pb2.FieldMask] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), - ) -> gaa_channel_group.ChannelGroup: - r"""Updates a ChannelGroup. + ) -> gaa_expanded_data_set.ExpandedDataSet: + r"""Updates a ExpandedDataSet on a property. Args: - request (Union[google.analytics.admin_v1alpha.types.UpdateChannelGroupRequest, dict]): + request (Union[google.analytics.admin_v1alpha.types.UpdateExpandedDataSetRequest, dict]): The request object. Request message for - UpdateChannelGroup RPC. - channel_group (google.analytics.admin_v1alpha.types.ChannelGroup): - Required. The ChannelGroup to update. The resource's - ``name`` field is used to identify the ChannelGroup to - be updated. + UpdateExpandedDataSet RPC. + expanded_data_set (google.analytics.admin_v1alpha.types.ExpandedDataSet): + Required. The ExpandedDataSet to update. The resource's + ``name`` field is used to identify the ExpandedDataSet + to be updated. - This corresponds to the ``channel_group`` field + This corresponds to the ``expanded_data_set`` field on the ``request`` instance; if ``request`` is provided, this should not be set. update_mask (google.protobuf.field_mask_pb2.FieldMask): @@ -9497,15 +9580,15 @@ def update_channel_group( sent along with the request as metadata. Returns: - google.analytics.admin_v1alpha.types.ChannelGroup: - A resource message representing a - Channel Group. + google.analytics.admin_v1alpha.types.ExpandedDataSet: + A resource message representing a GA4 + ExpandedDataSet. """ # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([channel_group, update_mask]) + has_flattened_params = any([expanded_data_set, update_mask]) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -9514,24 +9597,24 @@ def update_channel_group( # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. - if not isinstance(request, analytics_admin.UpdateChannelGroupRequest): - request = analytics_admin.UpdateChannelGroupRequest(request) + if not isinstance(request, analytics_admin.UpdateExpandedDataSetRequest): + request = analytics_admin.UpdateExpandedDataSetRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. - if channel_group is not None: - request.channel_group = channel_group + if expanded_data_set is not None: + request.expanded_data_set = expanded_data_set if update_mask is not None: request.update_mask = update_mask # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.update_channel_group] + rpc = self._transport._wrapped_methods[self._transport.update_expanded_data_set] # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata( - (("channel_group.name", request.channel_group.name),) + (("expanded_data_set.name", request.expanded_data_set.name),) ), ) @@ -9549,10 +9632,10 @@ def update_channel_group( # Done; return the response. return response - def delete_channel_group( + def delete_expanded_data_set( self, request: Optional[ - Union[analytics_admin.DeleteChannelGroupRequest, dict] + Union[analytics_admin.DeleteExpandedDataSetRequest, dict] ] = None, *, name: Optional[str] = None, @@ -9560,16 +9643,15 @@ def delete_channel_group( timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> None: - r"""Deletes a ChannelGroup on a property. + r"""Deletes a ExpandedDataSet on a property. Args: - request (Union[google.analytics.admin_v1alpha.types.DeleteChannelGroupRequest, dict]): + request (Union[google.analytics.admin_v1alpha.types.DeleteExpandedDataSetRequest, dict]): The request object. Request message for - DeleteChannelGroup RPC. + DeleteExpandedDataSet RPC. name (str): - Required. The ChannelGroup to delete. - Example format: - properties/1234/channelGroups/5678 + Required. Example format: + properties/1234/expandedDataSets/5678 This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this @@ -9592,8 +9674,8 @@ def delete_channel_group( # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. - if not isinstance(request, analytics_admin.DeleteChannelGroupRequest): - request = analytics_admin.DeleteChannelGroupRequest(request) + if not isinstance(request, analytics_admin.DeleteExpandedDataSetRequest): + request = analytics_admin.DeleteExpandedDataSetRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. if name is not None: @@ -9601,7 +9683,7 @@ def delete_channel_group( # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.delete_channel_group] + rpc = self._transport._wrapped_methods[self._transport.delete_expanded_data_set] # Certain fields should be provided within the metadata header; # add these here. @@ -9620,25 +9702,29 @@ def delete_channel_group( metadata=metadata, ) - def set_automated_ga4_configuration_opt_out( + def get_channel_group( self, - request: Optional[ - Union[analytics_admin.SetAutomatedGa4ConfigurationOptOutRequest, dict] - ] = None, + request: Optional[Union[analytics_admin.GetChannelGroupRequest, dict]] = None, *, + name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), - ) -> analytics_admin.SetAutomatedGa4ConfigurationOptOutResponse: - r"""Sets the opt out status for the automated GA4 setup - process for a UA property. - Note: this has no effect on GA4 property. + ) -> channel_group.ChannelGroup: + r"""Lookup for a single ChannelGroup. Args: - request (Union[google.analytics.admin_v1alpha.types.SetAutomatedGa4ConfigurationOptOutRequest, dict]): - The request object. Request for setting the opt out - status for the automated GA4 setup - process. + request (Union[google.analytics.admin_v1alpha.types.GetChannelGroupRequest, dict]): + The request object. Request message for GetChannelGroup + RPC. + name (str): + Required. The ChannelGroup to get. + Example format: + properties/1234/channelGroups/5678 + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -9646,25 +9732,39 @@ def set_automated_ga4_configuration_opt_out( sent along with the request as metadata. Returns: - google.analytics.admin_v1alpha.types.SetAutomatedGa4ConfigurationOptOutResponse: - Response message for setting the opt - out status for the automated GA4 setup - process. + google.analytics.admin_v1alpha.types.ChannelGroup: + A resource message representing a + Channel Group. """ # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. - if not isinstance( - request, analytics_admin.SetAutomatedGa4ConfigurationOptOutRequest - ): - request = analytics_admin.SetAutomatedGa4ConfigurationOptOutRequest(request) + if not isinstance(request, analytics_admin.GetChannelGroupRequest): + request = analytics_admin.GetChannelGroupRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._transport._wrapped_methods[ - self._transport.set_automated_ga4_configuration_opt_out - ] + rpc = self._transport._wrapped_methods[self._transport.get_channel_group] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) # Validate the universe domain. self._validate_universe_domain() @@ -9680,25 +9780,29 @@ def set_automated_ga4_configuration_opt_out( # Done; return the response. return response - def fetch_automated_ga4_configuration_opt_out( + def list_channel_groups( self, - request: Optional[ - Union[analytics_admin.FetchAutomatedGa4ConfigurationOptOutRequest, dict] - ] = None, + request: Optional[Union[analytics_admin.ListChannelGroupsRequest, dict]] = None, *, + parent: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), - ) -> analytics_admin.FetchAutomatedGa4ConfigurationOptOutResponse: - r"""Fetches the opt out status for the automated GA4 - setup process for a UA property. - Note: this has no effect on GA4 property. + ) -> pagers.ListChannelGroupsPager: + r"""Lists ChannelGroups on a property. Args: - request (Union[google.analytics.admin_v1alpha.types.FetchAutomatedGa4ConfigurationOptOutRequest, dict]): - The request object. Request for fetching the opt out - status for the automated GA4 setup - process. + request (Union[google.analytics.admin_v1alpha.types.ListChannelGroupsRequest, dict]): + The request object. Request message for ListChannelGroups + RPC. + parent (str): + Required. The property for which to + list ChannelGroups. Example format: + properties/1234 + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -9706,27 +9810,42 @@ def fetch_automated_ga4_configuration_opt_out( sent along with the request as metadata. Returns: - google.analytics.admin_v1alpha.types.FetchAutomatedGa4ConfigurationOptOutResponse: - Response message for fetching the opt - out status for the automated GA4 setup - process. + google.analytics.admin_v1alpha.services.analytics_admin_service.pagers.ListChannelGroupsPager: + Response message for + ListChannelGroups RPC. + Iterating over this object will yield + results and resolve additional pages + automatically. """ # Create or coerce a protobuf request object. - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance( - request, analytics_admin.FetchAutomatedGa4ConfigurationOptOutRequest - ): - request = analytics_admin.FetchAutomatedGa4ConfigurationOptOutRequest( - request + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." ) + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, analytics_admin.ListChannelGroupsRequest): + request = analytics_admin.ListChannelGroupsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._transport._wrapped_methods[ - self._transport.fetch_automated_ga4_configuration_opt_out - ] + rpc = self._transport._wrapped_methods[self._transport.list_channel_groups] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) # Validate the universe domain. self._validate_universe_domain() @@ -9739,31 +9858,49 @@ def fetch_automated_ga4_configuration_opt_out( metadata=metadata, ) + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListChannelGroupsPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + # Done; return the response. return response - def get_big_query_link( + def create_channel_group( self, - request: Optional[Union[analytics_admin.GetBigQueryLinkRequest, dict]] = None, + request: Optional[ + Union[analytics_admin.CreateChannelGroupRequest, dict] + ] = None, *, - name: Optional[str] = None, + parent: Optional[str] = None, + channel_group: Optional[gaa_channel_group.ChannelGroup] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), - ) -> resources.BigQueryLink: - r"""Lookup for a single BigQuery Link. + ) -> gaa_channel_group.ChannelGroup: + r"""Creates a ChannelGroup. Args: - request (Union[google.analytics.admin_v1alpha.types.GetBigQueryLinkRequest, dict]): - The request object. Request message for GetBigQueryLink - RPC. - name (str): - Required. The name of the BigQuery link to lookup. - Format: - properties/{property_id}/bigQueryLinks/{bigquery_link_id} - Example: properties/123/bigQueryLinks/456 + request (Union[google.analytics.admin_v1alpha.types.CreateChannelGroupRequest, dict]): + The request object. Request message for + CreateChannelGroup RPC. + parent (str): + Required. The property for which to + create a ChannelGroup. Example format: + properties/1234 - This corresponds to the ``name`` field + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + channel_group (google.analytics.admin_v1alpha.types.ChannelGroup): + Required. The ChannelGroup to create. + This corresponds to the ``channel_group`` field on the ``request`` instance; if ``request`` is provided, this should not be set. retry (google.api_core.retry.Retry): Designation of what errors, if any, @@ -9773,15 +9910,15 @@ def get_big_query_link( sent along with the request as metadata. Returns: - google.analytics.admin_v1alpha.types.BigQueryLink: - A link between a GA4 Property and - BigQuery project. + google.analytics.admin_v1alpha.types.ChannelGroup: + A resource message representing a + Channel Group. """ # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) + has_flattened_params = any([parent, channel_group]) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -9790,21 +9927,23 @@ def get_big_query_link( # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. - if not isinstance(request, analytics_admin.GetBigQueryLinkRequest): - request = analytics_admin.GetBigQueryLinkRequest(request) + if not isinstance(request, analytics_admin.CreateChannelGroupRequest): + request = analytics_admin.CreateChannelGroupRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. - if name is not None: - request.name = name + if parent is not None: + request.parent = parent + if channel_group is not None: + request.channel_group = channel_group # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.get_big_query_link] + rpc = self._transport._wrapped_methods[self._transport.create_channel_group] # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), ) # Validate the universe domain. @@ -9821,27 +9960,40 @@ def get_big_query_link( # Done; return the response. return response - def list_big_query_links( + def update_channel_group( self, - request: Optional[Union[analytics_admin.ListBigQueryLinksRequest, dict]] = None, + request: Optional[ + Union[analytics_admin.UpdateChannelGroupRequest, dict] + ] = None, *, - parent: Optional[str] = None, + channel_group: Optional[gaa_channel_group.ChannelGroup] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), - ) -> pagers.ListBigQueryLinksPager: - r"""Lists BigQuery Links on a property. + ) -> gaa_channel_group.ChannelGroup: + r"""Updates a ChannelGroup. Args: - request (Union[google.analytics.admin_v1alpha.types.ListBigQueryLinksRequest, dict]): - The request object. Request message for ListBigQueryLinks - RPC. - parent (str): - Required. The name of the property to list BigQuery - links under. Format: properties/{property_id} Example: - properties/1234 + request (Union[google.analytics.admin_v1alpha.types.UpdateChannelGroupRequest, dict]): + The request object. Request message for + UpdateChannelGroup RPC. + channel_group (google.analytics.admin_v1alpha.types.ChannelGroup): + Required. The ChannelGroup to update. The resource's + ``name`` field is used to identify the ChannelGroup to + be updated. - This corresponds to the ``parent`` field + This corresponds to the ``channel_group`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Required. The list of fields to be updated. Field names + must be in snake case (e.g., "field_to_update"). Omitted + fields will not be updated. To replace the entire + entity, use one path with the string "*" to match all + fields. + + This corresponds to the ``update_mask`` field on the ``request`` instance; if ``request`` is provided, this should not be set. retry (google.api_core.retry.Retry): Designation of what errors, if any, @@ -9851,18 +10003,15 @@ def list_big_query_links( sent along with the request as metadata. Returns: - google.analytics.admin_v1alpha.services.analytics_admin_service.pagers.ListBigQueryLinksPager: - Response message for - ListBigQueryLinks RPC - Iterating over this object will yield - results and resolve additional pages - automatically. + google.analytics.admin_v1alpha.types.ChannelGroup: + A resource message representing a + Channel Group. """ # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) + has_flattened_params = any([channel_group, update_mask]) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -9871,21 +10020,25 @@ def list_big_query_links( # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. - if not isinstance(request, analytics_admin.ListBigQueryLinksRequest): - request = analytics_admin.ListBigQueryLinksRequest(request) + if not isinstance(request, analytics_admin.UpdateChannelGroupRequest): + request = analytics_admin.UpdateChannelGroupRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. - if parent is not None: - request.parent = parent + if channel_group is not None: + request.channel_group = channel_group + if update_mask is not None: + request.update_mask = update_mask # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.list_big_query_links] + rpc = self._transport._wrapped_methods[self._transport.update_channel_group] # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + gapic_v1.routing_header.to_grpc_metadata( + (("channel_group.name", request.channel_group.name),) + ), ) # Validate the universe domain. @@ -9899,44 +10052,30 @@ def list_big_query_links( metadata=metadata, ) - # This method is paged; wrap the response in a pager, which provides - # an `__iter__` convenience method. - response = pagers.ListBigQueryLinksPager( - method=rpc, - request=request, - response=response, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - # Done; return the response. return response - def get_enhanced_measurement_settings( + def delete_channel_group( self, request: Optional[ - Union[analytics_admin.GetEnhancedMeasurementSettingsRequest, dict] + Union[analytics_admin.DeleteChannelGroupRequest, dict] ] = None, *, name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), - ) -> resources.EnhancedMeasurementSettings: - r"""Returns the enhanced measurement settings for this - data stream. Note that the stream must enable enhanced - measurement for these settings to take effect. + ) -> None: + r"""Deletes a ChannelGroup on a property. Args: - request (Union[google.analytics.admin_v1alpha.types.GetEnhancedMeasurementSettingsRequest, dict]): + request (Union[google.analytics.admin_v1alpha.types.DeleteChannelGroupRequest, dict]): The request object. Request message for - GetEnhancedMeasurementSettings RPC. + DeleteChannelGroup RPC. name (str): - Required. The name of the settings to lookup. Format: - properties/{property}/dataStreams/{data_stream}/enhancedMeasurementSettings - Example: - "properties/1000/dataStreams/2000/enhancedMeasurementSettings" + Required. The ChannelGroup to delete. + Example format: + properties/1234/channelGroups/5678 This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this @@ -9946,14 +10085,6 @@ def get_enhanced_measurement_settings( timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. - - Returns: - google.analytics.admin_v1alpha.types.EnhancedMeasurementSettings: - Singleton resource under a web - DataStream, configuring measurement of - additional site interactions and - content. - """ # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have @@ -9967,10 +10098,8 @@ def get_enhanced_measurement_settings( # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. - if not isinstance( - request, analytics_admin.GetEnhancedMeasurementSettingsRequest - ): - request = analytics_admin.GetEnhancedMeasurementSettingsRequest(request) + if not isinstance(request, analytics_admin.DeleteChannelGroupRequest): + request = analytics_admin.DeleteChannelGroupRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. if name is not None: @@ -9978,9 +10107,7 @@ def get_enhanced_measurement_settings( # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._transport._wrapped_methods[ - self._transport.get_enhanced_measurement_settings - ] + rpc = self._transport._wrapped_methods[self._transport.delete_channel_group] # Certain fields should be provided within the metadata header; # add these here. @@ -9992,55 +10119,32 @@ def get_enhanced_measurement_settings( self._validate_universe_domain() # Send the request. - response = rpc( + rpc( request, retry=retry, timeout=timeout, metadata=metadata, ) - # Done; return the response. - return response - - def update_enhanced_measurement_settings( + def set_automated_ga4_configuration_opt_out( self, request: Optional[ - Union[analytics_admin.UpdateEnhancedMeasurementSettingsRequest, dict] + Union[analytics_admin.SetAutomatedGa4ConfigurationOptOutRequest, dict] ] = None, *, - enhanced_measurement_settings: Optional[ - resources.EnhancedMeasurementSettings - ] = None, - update_mask: Optional[field_mask_pb2.FieldMask] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), - ) -> resources.EnhancedMeasurementSettings: - r"""Updates the enhanced measurement settings for this - data stream. Note that the stream must enable enhanced - measurement for these settings to take effect. + ) -> analytics_admin.SetAutomatedGa4ConfigurationOptOutResponse: + r"""Sets the opt out status for the automated GA4 setup + process for a UA property. + Note: this has no effect on GA4 property. Args: - request (Union[google.analytics.admin_v1alpha.types.UpdateEnhancedMeasurementSettingsRequest, dict]): - The request object. Request message for - UpdateEnhancedMeasurementSettings RPC. - enhanced_measurement_settings (google.analytics.admin_v1alpha.types.EnhancedMeasurementSettings): - Required. The settings to update. The ``name`` field is - used to identify the settings to be updated. - - This corresponds to the ``enhanced_measurement_settings`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - update_mask (google.protobuf.field_mask_pb2.FieldMask): - Required. The list of fields to be updated. Field names - must be in snake case (e.g., "field_to_update"). Omitted - fields will not be updated. To replace the entire - entity, use one path with the string "*" to match all - fields. - - This corresponds to the ``update_mask`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. + request (Union[google.analytics.admin_v1alpha.types.SetAutomatedGa4ConfigurationOptOutRequest, dict]): + The request object. Request for setting the opt out + status for the automated GA4 setup + process. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -10048,55 +10152,26 @@ def update_enhanced_measurement_settings( sent along with the request as metadata. Returns: - google.analytics.admin_v1alpha.types.EnhancedMeasurementSettings: - Singleton resource under a web - DataStream, configuring measurement of - additional site interactions and - content. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([enhanced_measurement_settings, update_mask]) - if request is not None and has_flattened_params: - raise ValueError( - "If the `request` argument is set, then none of " - "the individual field arguments should be set." - ) + google.analytics.admin_v1alpha.types.SetAutomatedGa4ConfigurationOptOutResponse: + Response message for setting the opt + out status for the automated GA4 setup + process. + """ + # Create or coerce a protobuf request object. # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. if not isinstance( - request, analytics_admin.UpdateEnhancedMeasurementSettingsRequest + request, analytics_admin.SetAutomatedGa4ConfigurationOptOutRequest ): - request = analytics_admin.UpdateEnhancedMeasurementSettingsRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if enhanced_measurement_settings is not None: - request.enhanced_measurement_settings = enhanced_measurement_settings - if update_mask is not None: - request.update_mask = update_mask + request = analytics_admin.SetAutomatedGa4ConfigurationOptOutRequest(request) # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. rpc = self._transport._wrapped_methods[ - self._transport.update_enhanced_measurement_settings + self._transport.set_automated_ga4_configuration_opt_out ] - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - ( - ( - "enhanced_measurement_settings.name", - request.enhanced_measurement_settings.name, - ), - ) - ), - ) - # Validate the universe domain. self._validate_universe_domain() @@ -10111,25 +10186,25 @@ def update_enhanced_measurement_settings( # Done; return the response. return response - def create_connected_site_tag( + def fetch_automated_ga4_configuration_opt_out( self, request: Optional[ - Union[analytics_admin.CreateConnectedSiteTagRequest, dict] + Union[analytics_admin.FetchAutomatedGa4ConfigurationOptOutRequest, dict] ] = None, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), - ) -> analytics_admin.CreateConnectedSiteTagResponse: - r"""Creates a connected site tag for a Universal - Analytics property. You can create a maximum of 20 - connected site tags per property. Note: This API cannot - be used on GA4 properties. + ) -> analytics_admin.FetchAutomatedGa4ConfigurationOptOutResponse: + r"""Fetches the opt out status for the automated GA4 + setup process for a UA property. + Note: this has no effect on GA4 property. Args: - request (Union[google.analytics.admin_v1alpha.types.CreateConnectedSiteTagRequest, dict]): - The request object. Request message for - CreateConnectedSiteTag RPC. + request (Union[google.analytics.admin_v1alpha.types.FetchAutomatedGa4ConfigurationOptOutRequest, dict]): + The request object. Request for fetching the opt out + status for the automated GA4 setup + process. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -10137,21 +10212,26 @@ def create_connected_site_tag( sent along with the request as metadata. Returns: - google.analytics.admin_v1alpha.types.CreateConnectedSiteTagResponse: - Response message for - CreateConnectedSiteTag RPC. + google.analytics.admin_v1alpha.types.FetchAutomatedGa4ConfigurationOptOutResponse: + Response message for fetching the opt + out status for the automated GA4 setup + process. """ # Create or coerce a protobuf request object. # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. - if not isinstance(request, analytics_admin.CreateConnectedSiteTagRequest): - request = analytics_admin.CreateConnectedSiteTagRequest(request) + if not isinstance( + request, analytics_admin.FetchAutomatedGa4ConfigurationOptOutRequest + ): + request = analytics_admin.FetchAutomatedGa4ConfigurationOptOutRequest( + request + ) # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. rpc = self._transport._wrapped_methods[ - self._transport.create_connected_site_tag + self._transport.fetch_automated_ga4_configuration_opt_out ] # Validate the universe domain. @@ -10168,72 +10248,36 @@ def create_connected_site_tag( # Done; return the response. return response - def delete_connected_site_tag( + def create_big_query_link( self, request: Optional[ - Union[analytics_admin.DeleteConnectedSiteTagRequest, dict] + Union[analytics_admin.CreateBigQueryLinkRequest, dict] ] = None, *, + parent: Optional[str] = None, + bigquery_link: Optional[resources.BigQueryLink] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), - ) -> None: - r"""Deletes a connected site tag for a Universal - Analytics property. Note: this has no effect on GA4 - properties. + ) -> resources.BigQueryLink: + r"""Creates a BigQueryLink. Args: - request (Union[google.analytics.admin_v1alpha.types.DeleteConnectedSiteTagRequest, dict]): + request (Union[google.analytics.admin_v1alpha.types.CreateBigQueryLinkRequest, dict]): The request object. Request message for - DeleteConnectedSiteTag RPC. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - # Create or coerce a protobuf request object. - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, analytics_admin.DeleteConnectedSiteTagRequest): - request = analytics_admin.DeleteConnectedSiteTagRequest(request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[ - self._transport.delete_connected_site_tag - ] - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - def list_connected_site_tags( - self, - request: Optional[ - Union[analytics_admin.ListConnectedSiteTagsRequest, dict] - ] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> analytics_admin.ListConnectedSiteTagsResponse: - r"""Lists the connected site tags for a Universal - Analytics property. A maximum of 20 connected site tags - will be returned. Note: this has no effect on GA4 - property. + CreateBigQueryLink RPC. + parent (str): + Required. Example format: + properties/1234 - Args: - request (Union[google.analytics.admin_v1alpha.types.ListConnectedSiteTagsRequest, dict]): - The request object. Request message for - ListConnectedSiteTags RPC. + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + bigquery_link (google.analytics.admin_v1alpha.types.BigQueryLink): + Required. The BigQueryLink to create. + This corresponds to the ``bigquery_link`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -10241,77 +10285,42 @@ def list_connected_site_tags( sent along with the request as metadata. Returns: - google.analytics.admin_v1alpha.types.ListConnectedSiteTagsResponse: - Response message for - ListConnectedSiteTags RPC. + google.analytics.admin_v1alpha.types.BigQueryLink: + A link between a GA4 Property and + BigQuery project. """ # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, bigquery_link]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. - if not isinstance(request, analytics_admin.ListConnectedSiteTagsRequest): - request = analytics_admin.ListConnectedSiteTagsRequest(request) + if not isinstance(request, analytics_admin.CreateBigQueryLinkRequest): + request = analytics_admin.CreateBigQueryLinkRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if bigquery_link is not None: + request.bigquery_link = bigquery_link # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.list_connected_site_tags] - - # Validate the universe domain. - self._validate_universe_domain() + rpc = self._transport._wrapped_methods[self._transport.create_big_query_link] - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), ) - # Done; return the response. - return response - - def fetch_connected_ga4_property( - self, - request: Optional[ - Union[analytics_admin.FetchConnectedGa4PropertyRequest, dict] - ] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> analytics_admin.FetchConnectedGa4PropertyResponse: - r"""Given a specified UA property, looks up the GA4 - property connected to it. Note: this cannot be used with - GA4 properties. - - Args: - request (Union[google.analytics.admin_v1alpha.types.FetchConnectedGa4PropertyRequest, dict]): - The request object. Request for looking up GA4 property - connected to a UA property. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.analytics.admin_v1alpha.types.FetchConnectedGa4PropertyResponse: - Response for looking up GA4 property - connected to a UA property. - - """ - # Create or coerce a protobuf request object. - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, analytics_admin.FetchConnectedGa4PropertyRequest): - request = analytics_admin.FetchConnectedGa4PropertyRequest(request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[ - self._transport.fetch_connected_ga4_property - ] - # Validate the universe domain. self._validate_universe_domain() @@ -10326,27 +10335,26 @@ def fetch_connected_ga4_property( # Done; return the response. return response - def get_ad_sense_link( + def get_big_query_link( self, - request: Optional[Union[analytics_admin.GetAdSenseLinkRequest, dict]] = None, + request: Optional[Union[analytics_admin.GetBigQueryLinkRequest, dict]] = None, *, name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), - ) -> resources.AdSenseLink: - r"""Looks up a single AdSenseLink. + ) -> resources.BigQueryLink: + r"""Lookup for a single BigQuery Link. Args: - request (Union[google.analytics.admin_v1alpha.types.GetAdSenseLinkRequest, dict]): - The request object. Request message to be passed to - GetAdSenseLink method. + request (Union[google.analytics.admin_v1alpha.types.GetBigQueryLinkRequest, dict]): + The request object. Request message for GetBigQueryLink + RPC. name (str): - Required. Unique identifier for the - AdSense Link requested. Format: - properties/{propertyId}/adSenseLinks/{linkId} - Example: - properties/1234/adSenseLinks/5678 + Required. The name of the BigQuery link to lookup. + Format: + properties/{property_id}/bigQueryLinks/{bigquery_link_id} + Example: properties/123/bigQueryLinks/456 This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this @@ -10358,9 +10366,9 @@ def get_ad_sense_link( sent along with the request as metadata. Returns: - google.analytics.admin_v1alpha.types.AdSenseLink: - A link between a GA4 Property and an - AdSense for Content ad client. + google.analytics.admin_v1alpha.types.BigQueryLink: + A link between a GA4 Property and + BigQuery project. """ # Create or coerce a protobuf request object. @@ -10375,8 +10383,8 @@ def get_ad_sense_link( # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. - if not isinstance(request, analytics_admin.GetAdSenseLinkRequest): - request = analytics_admin.GetAdSenseLinkRequest(request) + if not isinstance(request, analytics_admin.GetBigQueryLinkRequest): + request = analytics_admin.GetBigQueryLinkRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. if name is not None: @@ -10384,7 +10392,7 @@ def get_ad_sense_link( # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.get_ad_sense_link] + rpc = self._transport._wrapped_methods[self._transport.get_big_query_link] # Certain fields should be provided within the metadata header; # add these here. @@ -10406,36 +10414,29 @@ def get_ad_sense_link( # Done; return the response. return response - def create_ad_sense_link( + def list_big_query_links( self, - request: Optional[Union[analytics_admin.CreateAdSenseLinkRequest, dict]] = None, + request: Optional[Union[analytics_admin.ListBigQueryLinksRequest, dict]] = None, *, parent: Optional[str] = None, - adsense_link: Optional[resources.AdSenseLink] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), - ) -> resources.AdSenseLink: - r"""Creates an AdSenseLink. + ) -> pagers.ListBigQueryLinksPager: + r"""Lists BigQuery Links on a property. Args: - request (Union[google.analytics.admin_v1alpha.types.CreateAdSenseLinkRequest, dict]): - The request object. Request message to be passed to - CreateAdSenseLink method. + request (Union[google.analytics.admin_v1alpha.types.ListBigQueryLinksRequest, dict]): + The request object. Request message for ListBigQueryLinks + RPC. parent (str): - Required. The property for which to - create an AdSense Link. Format: - properties/{propertyId} Example: + Required. The name of the property to list BigQuery + links under. Format: properties/{property_id} Example: properties/1234 This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - adsense_link (google.analytics.admin_v1alpha.types.AdSenseLink): - Required. The AdSense Link to create - This corresponds to the ``adsense_link`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -10443,15 +10444,18 @@ def create_ad_sense_link( sent along with the request as metadata. Returns: - google.analytics.admin_v1alpha.types.AdSenseLink: - A link between a GA4 Property and an - AdSense for Content ad client. + google.analytics.admin_v1alpha.services.analytics_admin_service.pagers.ListBigQueryLinksPager: + Response message for + ListBigQueryLinks RPC + Iterating over this object will yield + results and resolve additional pages + automatically. """ # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent, adsense_link]) + has_flattened_params = any([parent]) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -10460,18 +10464,16 @@ def create_ad_sense_link( # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. - if not isinstance(request, analytics_admin.CreateAdSenseLinkRequest): - request = analytics_admin.CreateAdSenseLinkRequest(request) + if not isinstance(request, analytics_admin.ListBigQueryLinksRequest): + request = analytics_admin.ListBigQueryLinksRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. if parent is not None: request.parent = parent - if adsense_link is not None: - request.adsense_link = adsense_link # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.create_ad_sense_link] + rpc = self._transport._wrapped_methods[self._transport.list_big_query_links] # Certain fields should be provided within the metadata header; # add these here. @@ -10490,44 +10492,1279 @@ def create_ad_sense_link( metadata=metadata, ) + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListBigQueryLinksPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + # Done; return the response. return response - def delete_ad_sense_link( + def delete_big_query_link( self, - request: Optional[Union[analytics_admin.DeleteAdSenseLinkRequest, dict]] = None, + request: Optional[ + Union[analytics_admin.DeleteBigQueryLinkRequest, dict] + ] = None, *, name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> None: - r"""Deletes an AdSenseLink. + r"""Deletes a BigQueryLink on a property. Args: - request (Union[google.analytics.admin_v1alpha.types.DeleteAdSenseLinkRequest, dict]): - The request object. Request message to be passed to - DeleteAdSenseLink method. + request (Union[google.analytics.admin_v1alpha.types.DeleteBigQueryLinkRequest, dict]): + The request object. Request message for + DeleteBigQueryLink RPC. name (str): - Required. Unique identifier for the - AdSense Link to be deleted. Format: - properties/{propertyId}/adSenseLinks/{linkId} + Required. The BigQueryLink to delete. + Example format: + properties/1234/bigQueryLinks/5678 + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, analytics_admin.DeleteBigQueryLinkRequest): + request = analytics_admin.DeleteBigQueryLinkRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_big_query_link] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + def update_big_query_link( + self, + request: Optional[ + Union[analytics_admin.UpdateBigQueryLinkRequest, dict] + ] = None, + *, + bigquery_link: Optional[resources.BigQueryLink] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> resources.BigQueryLink: + r"""Updates a BigQueryLink. + + Args: + request (Union[google.analytics.admin_v1alpha.types.UpdateBigQueryLinkRequest, dict]): + The request object. Request message for + UpdateBigQueryLink RPC. + bigquery_link (google.analytics.admin_v1alpha.types.BigQueryLink): + Required. The settings to update. The ``name`` field is + used to identify the settings to be updated. + + This corresponds to the ``bigquery_link`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Required. The list of fields to be updated. Field names + must be in snake case (e.g., "field_to_update"). Omitted + fields will not be updated. To replace the entire + entity, use one path with the string "*" to match all + fields. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.analytics.admin_v1alpha.types.BigQueryLink: + A link between a GA4 Property and + BigQuery project. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([bigquery_link, update_mask]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, analytics_admin.UpdateBigQueryLinkRequest): + request = analytics_admin.UpdateBigQueryLinkRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if bigquery_link is not None: + request.bigquery_link = bigquery_link + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.update_big_query_link] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("bigquery_link.name", request.bigquery_link.name),) + ), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_enhanced_measurement_settings( + self, + request: Optional[ + Union[analytics_admin.GetEnhancedMeasurementSettingsRequest, dict] + ] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> resources.EnhancedMeasurementSettings: + r"""Returns the enhanced measurement settings for this + data stream. Note that the stream must enable enhanced + measurement for these settings to take effect. + + Args: + request (Union[google.analytics.admin_v1alpha.types.GetEnhancedMeasurementSettingsRequest, dict]): + The request object. Request message for + GetEnhancedMeasurementSettings RPC. + name (str): + Required. The name of the settings to lookup. Format: + properties/{property}/dataStreams/{data_stream}/enhancedMeasurementSettings Example: - properties/1234/adSenseLinks/5678 + "properties/1000/dataStreams/2000/enhancedMeasurementSettings" + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.analytics.admin_v1alpha.types.EnhancedMeasurementSettings: + Singleton resource under a web + DataStream, configuring measurement of + additional site interactions and + content. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance( + request, analytics_admin.GetEnhancedMeasurementSettingsRequest + ): + request = analytics_admin.GetEnhancedMeasurementSettingsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[ + self._transport.get_enhanced_measurement_settings + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def update_enhanced_measurement_settings( + self, + request: Optional[ + Union[analytics_admin.UpdateEnhancedMeasurementSettingsRequest, dict] + ] = None, + *, + enhanced_measurement_settings: Optional[ + resources.EnhancedMeasurementSettings + ] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> resources.EnhancedMeasurementSettings: + r"""Updates the enhanced measurement settings for this + data stream. Note that the stream must enable enhanced + measurement for these settings to take effect. + + Args: + request (Union[google.analytics.admin_v1alpha.types.UpdateEnhancedMeasurementSettingsRequest, dict]): + The request object. Request message for + UpdateEnhancedMeasurementSettings RPC. + enhanced_measurement_settings (google.analytics.admin_v1alpha.types.EnhancedMeasurementSettings): + Required. The settings to update. The ``name`` field is + used to identify the settings to be updated. + + This corresponds to the ``enhanced_measurement_settings`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Required. The list of fields to be updated. Field names + must be in snake case (e.g., "field_to_update"). Omitted + fields will not be updated. To replace the entire + entity, use one path with the string "*" to match all + fields. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.analytics.admin_v1alpha.types.EnhancedMeasurementSettings: + Singleton resource under a web + DataStream, configuring measurement of + additional site interactions and + content. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([enhanced_measurement_settings, update_mask]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance( + request, analytics_admin.UpdateEnhancedMeasurementSettingsRequest + ): + request = analytics_admin.UpdateEnhancedMeasurementSettingsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if enhanced_measurement_settings is not None: + request.enhanced_measurement_settings = enhanced_measurement_settings + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[ + self._transport.update_enhanced_measurement_settings + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + ( + ( + "enhanced_measurement_settings.name", + request.enhanced_measurement_settings.name, + ), + ) + ), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def create_connected_site_tag( + self, + request: Optional[ + Union[analytics_admin.CreateConnectedSiteTagRequest, dict] + ] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> analytics_admin.CreateConnectedSiteTagResponse: + r"""Creates a connected site tag for a Universal + Analytics property. You can create a maximum of 20 + connected site tags per property. Note: This API cannot + be used on GA4 properties. + + Args: + request (Union[google.analytics.admin_v1alpha.types.CreateConnectedSiteTagRequest, dict]): + The request object. Request message for + CreateConnectedSiteTag RPC. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.analytics.admin_v1alpha.types.CreateConnectedSiteTagResponse: + Response message for + CreateConnectedSiteTag RPC. + + """ + # Create or coerce a protobuf request object. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, analytics_admin.CreateConnectedSiteTagRequest): + request = analytics_admin.CreateConnectedSiteTagRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[ + self._transport.create_connected_site_tag + ] + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def delete_connected_site_tag( + self, + request: Optional[ + Union[analytics_admin.DeleteConnectedSiteTagRequest, dict] + ] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes a connected site tag for a Universal + Analytics property. Note: this has no effect on GA4 + properties. + + Args: + request (Union[google.analytics.admin_v1alpha.types.DeleteConnectedSiteTagRequest, dict]): + The request object. Request message for + DeleteConnectedSiteTag RPC. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, analytics_admin.DeleteConnectedSiteTagRequest): + request = analytics_admin.DeleteConnectedSiteTagRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[ + self._transport.delete_connected_site_tag + ] + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + def list_connected_site_tags( + self, + request: Optional[ + Union[analytics_admin.ListConnectedSiteTagsRequest, dict] + ] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> analytics_admin.ListConnectedSiteTagsResponse: + r"""Lists the connected site tags for a Universal + Analytics property. A maximum of 20 connected site tags + will be returned. Note: this has no effect on GA4 + property. + + Args: + request (Union[google.analytics.admin_v1alpha.types.ListConnectedSiteTagsRequest, dict]): + The request object. Request message for + ListConnectedSiteTags RPC. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.analytics.admin_v1alpha.types.ListConnectedSiteTagsResponse: + Response message for + ListConnectedSiteTags RPC. + + """ + # Create or coerce a protobuf request object. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, analytics_admin.ListConnectedSiteTagsRequest): + request = analytics_admin.ListConnectedSiteTagsRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_connected_site_tags] + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def fetch_connected_ga4_property( + self, + request: Optional[ + Union[analytics_admin.FetchConnectedGa4PropertyRequest, dict] + ] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> analytics_admin.FetchConnectedGa4PropertyResponse: + r"""Given a specified UA property, looks up the GA4 + property connected to it. Note: this cannot be used with + GA4 properties. + + Args: + request (Union[google.analytics.admin_v1alpha.types.FetchConnectedGa4PropertyRequest, dict]): + The request object. Request for looking up GA4 property + connected to a UA property. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.analytics.admin_v1alpha.types.FetchConnectedGa4PropertyResponse: + Response for looking up GA4 property + connected to a UA property. + + """ + # Create or coerce a protobuf request object. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, analytics_admin.FetchConnectedGa4PropertyRequest): + request = analytics_admin.FetchConnectedGa4PropertyRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[ + self._transport.fetch_connected_ga4_property + ] + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_ad_sense_link( + self, + request: Optional[Union[analytics_admin.GetAdSenseLinkRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> resources.AdSenseLink: + r"""Looks up a single AdSenseLink. + + Args: + request (Union[google.analytics.admin_v1alpha.types.GetAdSenseLinkRequest, dict]): + The request object. Request message to be passed to + GetAdSenseLink method. + name (str): + Required. Unique identifier for the + AdSense Link requested. Format: + properties/{propertyId}/adSenseLinks/{linkId} + Example: + properties/1234/adSenseLinks/5678 + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.analytics.admin_v1alpha.types.AdSenseLink: + A link between a GA4 Property and an + AdSense for Content ad client. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, analytics_admin.GetAdSenseLinkRequest): + request = analytics_admin.GetAdSenseLinkRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_ad_sense_link] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def create_ad_sense_link( + self, + request: Optional[Union[analytics_admin.CreateAdSenseLinkRequest, dict]] = None, + *, + parent: Optional[str] = None, + adsense_link: Optional[resources.AdSenseLink] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> resources.AdSenseLink: + r"""Creates an AdSenseLink. + + Args: + request (Union[google.analytics.admin_v1alpha.types.CreateAdSenseLinkRequest, dict]): + The request object. Request message to be passed to + CreateAdSenseLink method. + parent (str): + Required. The property for which to + create an AdSense Link. Format: + properties/{propertyId} Example: + properties/1234 + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + adsense_link (google.analytics.admin_v1alpha.types.AdSenseLink): + Required. The AdSense Link to create + This corresponds to the ``adsense_link`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.analytics.admin_v1alpha.types.AdSenseLink: + A link between a GA4 Property and an + AdSense for Content ad client. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, adsense_link]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, analytics_admin.CreateAdSenseLinkRequest): + request = analytics_admin.CreateAdSenseLinkRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if adsense_link is not None: + request.adsense_link = adsense_link + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.create_ad_sense_link] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def delete_ad_sense_link( + self, + request: Optional[Union[analytics_admin.DeleteAdSenseLinkRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes an AdSenseLink. + + Args: + request (Union[google.analytics.admin_v1alpha.types.DeleteAdSenseLinkRequest, dict]): + The request object. Request message to be passed to + DeleteAdSenseLink method. + name (str): + Required. Unique identifier for the + AdSense Link to be deleted. Format: + properties/{propertyId}/adSenseLinks/{linkId} + Example: + properties/1234/adSenseLinks/5678 + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, analytics_admin.DeleteAdSenseLinkRequest): + request = analytics_admin.DeleteAdSenseLinkRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_ad_sense_link] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + def list_ad_sense_links( + self, + request: Optional[Union[analytics_admin.ListAdSenseLinksRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListAdSenseLinksPager: + r"""Lists AdSenseLinks on a property. + + Args: + request (Union[google.analytics.admin_v1alpha.types.ListAdSenseLinksRequest, dict]): + The request object. Request message to be passed to + ListAdSenseLinks method. + parent (str): + Required. Resource name of the parent + property. Format: + properties/{propertyId} + Example: properties/1234 + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.analytics.admin_v1alpha.services.analytics_admin_service.pagers.ListAdSenseLinksPager: + Response message for ListAdSenseLinks + method. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, analytics_admin.ListAdSenseLinksRequest): + request = analytics_admin.ListAdSenseLinksRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_ad_sense_links] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListAdSenseLinksPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_event_create_rule( + self, + request: Optional[ + Union[analytics_admin.GetEventCreateRuleRequest, dict] + ] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> event_create_and_edit.EventCreateRule: + r"""Lookup for a single EventCreateRule. + + Args: + request (Union[google.analytics.admin_v1alpha.types.GetEventCreateRuleRequest, dict]): + The request object. Request message for + GetEventCreateRule RPC. + name (str): + Required. The name of the + EventCreateRule to get. Example format: + properties/123/dataStreams/456/eventCreateRules/789 + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.analytics.admin_v1alpha.types.EventCreateRule: + An Event Create Rule defines + conditions that will trigger the + creation of an entirely new event based + upon matched criteria of a source event. + Additional mutations of the parameters + from the source event can be defined. + + Unlike Event Edit rules, Event Creation + Rules have no defined order. They will + all be run independently. + + Event Edit and Event Create rules can't + be used to modify an event created from + an Event Create rule. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, analytics_admin.GetEventCreateRuleRequest): + request = analytics_admin.GetEventCreateRuleRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_event_create_rule] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def list_event_create_rules( + self, + request: Optional[ + Union[analytics_admin.ListEventCreateRulesRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListEventCreateRulesPager: + r"""Lists EventCreateRules on a web data stream. + + Args: + request (Union[google.analytics.admin_v1alpha.types.ListEventCreateRulesRequest, dict]): + The request object. Request message for + ListEventCreateRules RPC. + parent (str): + Required. Example format: + properties/123/dataStreams/456 + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.analytics.admin_v1alpha.services.analytics_admin_service.pagers.ListEventCreateRulesPager: + Response message for + ListEventCreateRules RPC. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, analytics_admin.ListEventCreateRulesRequest): + request = analytics_admin.ListEventCreateRulesRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_event_create_rules] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListEventCreateRulesPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def create_event_create_rule( + self, + request: Optional[ + Union[analytics_admin.CreateEventCreateRuleRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + event_create_rule: Optional[event_create_and_edit.EventCreateRule] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> event_create_and_edit.EventCreateRule: + r"""Creates an EventCreateRule. + + Args: + request (Union[google.analytics.admin_v1alpha.types.CreateEventCreateRuleRequest, dict]): + The request object. Request message for + CreateEventCreateRule RPC. + parent (str): + Required. Example format: + properties/123/dataStreams/456 + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + event_create_rule (google.analytics.admin_v1alpha.types.EventCreateRule): + Required. The EventCreateRule to + create. + + This corresponds to the ``event_create_rule`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.analytics.admin_v1alpha.types.EventCreateRule: + An Event Create Rule defines + conditions that will trigger the + creation of an entirely new event based + upon matched criteria of a source event. + Additional mutations of the parameters + from the source event can be defined. + + Unlike Event Edit rules, Event Creation + Rules have no defined order. They will + all be run independently. + + Event Edit and Event Create rules can't + be used to modify an event created from + an Event Create rule. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, event_create_rule]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, analytics_admin.CreateEventCreateRuleRequest): + request = analytics_admin.CreateEventCreateRuleRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if event_create_rule is not None: + request.event_create_rule = event_create_rule + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.create_event_create_rule] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def update_event_create_rule( + self, + request: Optional[ + Union[analytics_admin.UpdateEventCreateRuleRequest, dict] + ] = None, + *, + event_create_rule: Optional[event_create_and_edit.EventCreateRule] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> event_create_and_edit.EventCreateRule: + r"""Updates an EventCreateRule. + + Args: + request (Union[google.analytics.admin_v1alpha.types.UpdateEventCreateRuleRequest, dict]): + The request object. Request message for + UpdateEventCreateRule RPC. + event_create_rule (google.analytics.admin_v1alpha.types.EventCreateRule): + Required. The EventCreateRule to update. The resource's + ``name`` field is used to identify the EventCreateRule + to be updated. + + This corresponds to the ``event_create_rule`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Required. The list of fields to be updated. Field names + must be in snake case (e.g., "field_to_update"). Omitted + fields will not be updated. To replace the entire + entity, use one path with the string "*" to match all + fields. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.analytics.admin_v1alpha.types.EventCreateRule: + An Event Create Rule defines + conditions that will trigger the + creation of an entirely new event based + upon matched criteria of a source event. + Additional mutations of the parameters + from the source event can be defined. + + Unlike Event Edit rules, Event Creation + Rules have no defined order. They will + all be run independently. + + Event Edit and Event Create rules can't + be used to modify an event created from + an Event Create rule. - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. """ # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) + has_flattened_params = any([event_create_rule, update_mask]) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -10536,56 +11773,64 @@ def delete_ad_sense_link( # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. - if not isinstance(request, analytics_admin.DeleteAdSenseLinkRequest): - request = analytics_admin.DeleteAdSenseLinkRequest(request) + if not isinstance(request, analytics_admin.UpdateEventCreateRuleRequest): + request = analytics_admin.UpdateEventCreateRuleRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. - if name is not None: - request.name = name + if event_create_rule is not None: + request.event_create_rule = event_create_rule + if update_mask is not None: + request.update_mask = update_mask # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.delete_ad_sense_link] + rpc = self._transport._wrapped_methods[self._transport.update_event_create_rule] # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + gapic_v1.routing_header.to_grpc_metadata( + (("event_create_rule.name", request.event_create_rule.name),) + ), ) # Validate the universe domain. self._validate_universe_domain() # Send the request. - rpc( + response = rpc( request, retry=retry, timeout=timeout, metadata=metadata, ) - def list_ad_sense_links( + # Done; return the response. + return response + + def delete_event_create_rule( self, - request: Optional[Union[analytics_admin.ListAdSenseLinksRequest, dict]] = None, + request: Optional[ + Union[analytics_admin.DeleteEventCreateRuleRequest, dict] + ] = None, *, - parent: Optional[str] = None, + name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), - ) -> pagers.ListAdSenseLinksPager: - r"""Lists AdSenseLinks on a property. + ) -> None: + r"""Deletes an EventCreateRule. Args: - request (Union[google.analytics.admin_v1alpha.types.ListAdSenseLinksRequest, dict]): - The request object. Request message to be passed to - ListAdSenseLinks method. - parent (str): - Required. Resource name of the parent - property. Format: - properties/{propertyId} - Example: properties/1234 + request (Union[google.analytics.admin_v1alpha.types.DeleteEventCreateRuleRequest, dict]): + The request object. Request message for + DeleteEventCreateRule RPC. + name (str): + Required. Example format: - This corresponds to the ``parent`` field + properties/123/dataStreams/456/eventCreateRules/789 + + This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. retry (google.api_core.retry.Retry): Designation of what errors, if any, @@ -10593,20 +11838,11 @@ def list_ad_sense_links( timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. - - Returns: - google.analytics.admin_v1alpha.services.analytics_admin_service.pagers.ListAdSenseLinksPager: - Response message for ListAdSenseLinks - method. - Iterating over this object will yield - results and resolve additional pages - automatically. - """ # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) + has_flattened_params = any([name]) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -10615,69 +11851,53 @@ def list_ad_sense_links( # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. - if not isinstance(request, analytics_admin.ListAdSenseLinksRequest): - request = analytics_admin.ListAdSenseLinksRequest(request) + if not isinstance(request, analytics_admin.DeleteEventCreateRuleRequest): + request = analytics_admin.DeleteEventCreateRuleRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. - if parent is not None: - request.parent = parent + if name is not None: + request.name = name # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.list_ad_sense_links] + rpc = self._transport._wrapped_methods[self._transport.delete_event_create_rule] # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), ) # Validate the universe domain. self._validate_universe_domain() # Send the request. - response = rpc( + rpc( request, retry=retry, timeout=timeout, metadata=metadata, ) - # This method is paged; wrap the response in a pager, which provides - # an `__iter__` convenience method. - response = pagers.ListAdSenseLinksPager( - method=rpc, - request=request, - response=response, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def get_event_create_rule( + def get_event_edit_rule( self, - request: Optional[ - Union[analytics_admin.GetEventCreateRuleRequest, dict] - ] = None, + request: Optional[Union[analytics_admin.GetEventEditRuleRequest, dict]] = None, *, name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), - ) -> event_create_and_edit.EventCreateRule: - r"""Lookup for a single EventCreateRule. + ) -> event_create_and_edit.EventEditRule: + r"""Lookup for a single EventEditRule. Args: - request (Union[google.analytics.admin_v1alpha.types.GetEventCreateRuleRequest, dict]): - The request object. Request message for - GetEventCreateRule RPC. + request (Union[google.analytics.admin_v1alpha.types.GetEventEditRuleRequest, dict]): + The request object. Request message for GetEventEditRule + RPC. name (str): Required. The name of the - EventCreateRule to get. Example format: - properties/123/dataStreams/456/eventCreateRules/789 + EventEditRule to get. Example format: + properties/123/dataStreams/456/eventEditRules/789 This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this @@ -10689,21 +11909,21 @@ def get_event_create_rule( sent along with the request as metadata. Returns: - google.analytics.admin_v1alpha.types.EventCreateRule: - An Event Create Rule defines - conditions that will trigger the - creation of an entirely new event based - upon matched criteria of a source event. - Additional mutations of the parameters - from the source event can be defined. + google.analytics.admin_v1alpha.types.EventEditRule: + An Event Edit Rule defines conditions + that will trigger the creation of an + entirely new event based upon matched + criteria of a source event. Additional + mutations of the parameters from the + source event can be defined. - Unlike Event Edit rules, Event Creation - Rules have no defined order. They will - all be run independently. + Unlike Event Create rules, Event Edit + Rules are applied in their defined + order. - Event Edit and Event Create rules can't - be used to modify an event created from - an Event Create rule. + Event Edit rules can't be used to modify + an event created from an Event Create + rule. """ # Create or coerce a protobuf request object. @@ -10718,8 +11938,8 @@ def get_event_create_rule( # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. - if not isinstance(request, analytics_admin.GetEventCreateRuleRequest): - request = analytics_admin.GetEventCreateRuleRequest(request) + if not isinstance(request, analytics_admin.GetEventEditRuleRequest): + request = analytics_admin.GetEventEditRuleRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. if name is not None: @@ -10727,7 +11947,7 @@ def get_event_create_rule( # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.get_event_create_rule] + rpc = self._transport._wrapped_methods[self._transport.get_event_edit_rule] # Certain fields should be provided within the metadata header; # add these here. @@ -10749,23 +11969,23 @@ def get_event_create_rule( # Done; return the response. return response - def list_event_create_rules( + def list_event_edit_rules( self, request: Optional[ - Union[analytics_admin.ListEventCreateRulesRequest, dict] + Union[analytics_admin.ListEventEditRulesRequest, dict] ] = None, *, parent: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), - ) -> pagers.ListEventCreateRulesPager: - r"""Lists EventCreateRules on a web data stream. + ) -> pagers.ListEventEditRulesPager: + r"""Lists EventEditRules on a web data stream. Args: - request (Union[google.analytics.admin_v1alpha.types.ListEventCreateRulesRequest, dict]): + request (Union[google.analytics.admin_v1alpha.types.ListEventEditRulesRequest, dict]): The request object. Request message for - ListEventCreateRules RPC. + ListEventEditRules RPC. parent (str): Required. Example format: properties/123/dataStreams/456 @@ -10780,9 +12000,9 @@ def list_event_create_rules( sent along with the request as metadata. Returns: - google.analytics.admin_v1alpha.services.analytics_admin_service.pagers.ListEventCreateRulesPager: + google.analytics.admin_v1alpha.services.analytics_admin_service.pagers.ListEventEditRulesPager: Response message for - ListEventCreateRules RPC. + ListEventEditRules RPC. Iterating over this object will yield results and resolve additional pages automatically. @@ -10800,8 +12020,8 @@ def list_event_create_rules( # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. - if not isinstance(request, analytics_admin.ListEventCreateRulesRequest): - request = analytics_admin.ListEventCreateRulesRequest(request) + if not isinstance(request, analytics_admin.ListEventEditRulesRequest): + request = analytics_admin.ListEventEditRulesRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. if parent is not None: @@ -10809,7 +12029,7 @@ def list_event_create_rules( # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.list_event_create_rules] + rpc = self._transport._wrapped_methods[self._transport.list_event_edit_rules] # Certain fields should be provided within the metadata header; # add these here. @@ -10830,7 +12050,7 @@ def list_event_create_rules( # This method is paged; wrap the response in a pager, which provides # an `__iter__` convenience method. - response = pagers.ListEventCreateRulesPager( + response = pagers.ListEventEditRulesPager( method=rpc, request=request, response=response, @@ -10842,24 +12062,24 @@ def list_event_create_rules( # Done; return the response. return response - def create_event_create_rule( + def create_event_edit_rule( self, request: Optional[ - Union[analytics_admin.CreateEventCreateRuleRequest, dict] + Union[analytics_admin.CreateEventEditRuleRequest, dict] ] = None, *, parent: Optional[str] = None, - event_create_rule: Optional[event_create_and_edit.EventCreateRule] = None, + event_edit_rule: Optional[event_create_and_edit.EventEditRule] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), - ) -> event_create_and_edit.EventCreateRule: - r"""Creates an EventCreateRule. + ) -> event_create_and_edit.EventEditRule: + r"""Creates an EventEditRule. Args: - request (Union[google.analytics.admin_v1alpha.types.CreateEventCreateRuleRequest, dict]): + request (Union[google.analytics.admin_v1alpha.types.CreateEventEditRuleRequest, dict]): The request object. Request message for - CreateEventCreateRule RPC. + CreateEventEditRule RPC. parent (str): Required. Example format: properties/123/dataStreams/456 @@ -10867,11 +12087,11 @@ def create_event_create_rule( This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - event_create_rule (google.analytics.admin_v1alpha.types.EventCreateRule): - Required. The EventCreateRule to + event_edit_rule (google.analytics.admin_v1alpha.types.EventEditRule): + Required. The EventEditRule to create. - This corresponds to the ``event_create_rule`` field + This corresponds to the ``event_edit_rule`` field on the ``request`` instance; if ``request`` is provided, this should not be set. retry (google.api_core.retry.Retry): Designation of what errors, if any, @@ -10881,27 +12101,27 @@ def create_event_create_rule( sent along with the request as metadata. Returns: - google.analytics.admin_v1alpha.types.EventCreateRule: - An Event Create Rule defines - conditions that will trigger the - creation of an entirely new event based - upon matched criteria of a source event. - Additional mutations of the parameters - from the source event can be defined. + google.analytics.admin_v1alpha.types.EventEditRule: + An Event Edit Rule defines conditions + that will trigger the creation of an + entirely new event based upon matched + criteria of a source event. Additional + mutations of the parameters from the + source event can be defined. - Unlike Event Edit rules, Event Creation - Rules have no defined order. They will - all be run independently. + Unlike Event Create rules, Event Edit + Rules are applied in their defined + order. - Event Edit and Event Create rules can't - be used to modify an event created from - an Event Create rule. + Event Edit rules can't be used to modify + an event created from an Event Create + rule. """ # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent, event_create_rule]) + has_flattened_params = any([parent, event_edit_rule]) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -10910,18 +12130,18 @@ def create_event_create_rule( # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. - if not isinstance(request, analytics_admin.CreateEventCreateRuleRequest): - request = analytics_admin.CreateEventCreateRuleRequest(request) + if not isinstance(request, analytics_admin.CreateEventEditRuleRequest): + request = analytics_admin.CreateEventEditRuleRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. if parent is not None: request.parent = parent - if event_create_rule is not None: - request.event_create_rule = event_create_rule + if event_edit_rule is not None: + request.event_edit_rule = event_edit_rule # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.create_event_create_rule] + rpc = self._transport._wrapped_methods[self._transport.create_event_edit_rule] # Certain fields should be provided within the metadata header; # add these here. @@ -10943,30 +12163,30 @@ def create_event_create_rule( # Done; return the response. return response - def update_event_create_rule( + def update_event_edit_rule( self, request: Optional[ - Union[analytics_admin.UpdateEventCreateRuleRequest, dict] + Union[analytics_admin.UpdateEventEditRuleRequest, dict] ] = None, *, - event_create_rule: Optional[event_create_and_edit.EventCreateRule] = None, + event_edit_rule: Optional[event_create_and_edit.EventEditRule] = None, update_mask: Optional[field_mask_pb2.FieldMask] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), - ) -> event_create_and_edit.EventCreateRule: - r"""Updates an EventCreateRule. + ) -> event_create_and_edit.EventEditRule: + r"""Updates an EventEditRule. Args: - request (Union[google.analytics.admin_v1alpha.types.UpdateEventCreateRuleRequest, dict]): + request (Union[google.analytics.admin_v1alpha.types.UpdateEventEditRuleRequest, dict]): The request object. Request message for - UpdateEventCreateRule RPC. - event_create_rule (google.analytics.admin_v1alpha.types.EventCreateRule): - Required. The EventCreateRule to update. The resource's - ``name`` field is used to identify the EventCreateRule - to be updated. + UpdateEventEditRule RPC. + event_edit_rule (google.analytics.admin_v1alpha.types.EventEditRule): + Required. The EventEditRule to update. The resource's + ``name`` field is used to identify the EventEditRule to + be updated. - This corresponds to the ``event_create_rule`` field + This corresponds to the ``event_edit_rule`` field on the ``request`` instance; if ``request`` is provided, this should not be set. update_mask (google.protobuf.field_mask_pb2.FieldMask): @@ -10986,27 +12206,27 @@ def update_event_create_rule( sent along with the request as metadata. Returns: - google.analytics.admin_v1alpha.types.EventCreateRule: - An Event Create Rule defines - conditions that will trigger the - creation of an entirely new event based - upon matched criteria of a source event. - Additional mutations of the parameters - from the source event can be defined. + google.analytics.admin_v1alpha.types.EventEditRule: + An Event Edit Rule defines conditions + that will trigger the creation of an + entirely new event based upon matched + criteria of a source event. Additional + mutations of the parameters from the + source event can be defined. - Unlike Event Edit rules, Event Creation - Rules have no defined order. They will - all be run independently. + Unlike Event Create rules, Event Edit + Rules are applied in their defined + order. - Event Edit and Event Create rules can't - be used to modify an event created from - an Event Create rule. + Event Edit rules can't be used to modify + an event created from an Event Create + rule. """ # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([event_create_rule, update_mask]) + has_flattened_params = any([event_edit_rule, update_mask]) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -11015,24 +12235,24 @@ def update_event_create_rule( # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. - if not isinstance(request, analytics_admin.UpdateEventCreateRuleRequest): - request = analytics_admin.UpdateEventCreateRuleRequest(request) + if not isinstance(request, analytics_admin.UpdateEventEditRuleRequest): + request = analytics_admin.UpdateEventEditRuleRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. - if event_create_rule is not None: - request.event_create_rule = event_create_rule + if event_edit_rule is not None: + request.event_edit_rule = event_edit_rule if update_mask is not None: request.update_mask = update_mask # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.update_event_create_rule] + rpc = self._transport._wrapped_methods[self._transport.update_event_edit_rule] # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata( - (("event_create_rule.name", request.event_create_rule.name),) + (("event_edit_rule.name", request.event_edit_rule.name),) ), ) @@ -11050,10 +12270,10 @@ def update_event_create_rule( # Done; return the response. return response - def delete_event_create_rule( + def delete_event_edit_rule( self, request: Optional[ - Union[analytics_admin.DeleteEventCreateRuleRequest, dict] + Union[analytics_admin.DeleteEventEditRuleRequest, dict] ] = None, *, name: Optional[str] = None, @@ -11061,16 +12281,15 @@ def delete_event_create_rule( timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> None: - r"""Deletes an EventCreateRule. + r"""Deletes an EventEditRule. Args: - request (Union[google.analytics.admin_v1alpha.types.DeleteEventCreateRuleRequest, dict]): + request (Union[google.analytics.admin_v1alpha.types.DeleteEventEditRuleRequest, dict]): The request object. Request message for - DeleteEventCreateRule RPC. + DeleteEventEditRule RPC. name (str): Required. Example format: - - properties/123/dataStreams/456/eventCreateRules/789 + properties/123/dataStreams/456/eventEditRules/789 This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this @@ -11093,8 +12312,8 @@ def delete_event_create_rule( # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. - if not isinstance(request, analytics_admin.DeleteEventCreateRuleRequest): - request = analytics_admin.DeleteEventCreateRuleRequest(request) + if not isinstance(request, analytics_admin.DeleteEventEditRuleRequest): + request = analytics_admin.DeleteEventEditRuleRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. if name is not None: @@ -11102,7 +12321,7 @@ def delete_event_create_rule( # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.delete_event_create_rule] + rpc = self._transport._wrapped_methods[self._transport.delete_event_edit_rule] # Certain fields should be provided within the metadata header; # add these here. @@ -11121,6 +12340,56 @@ def delete_event_create_rule( metadata=metadata, ) + def reorder_event_edit_rules( + self, + request: Optional[ + Union[analytics_admin.ReorderEventEditRulesRequest, dict] + ] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Changes the processing order of event edit rules on + the specified stream. + + Args: + request (Union[google.analytics.admin_v1alpha.types.ReorderEventEditRulesRequest, dict]): + The request object. Request message for + ReorderEventEditRules RPC. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, analytics_admin.ReorderEventEditRulesRequest): + request = analytics_admin.ReorderEventEditRulesRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.reorder_event_edit_rules] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + def update_data_redaction_settings( self, request: Optional[ @@ -11425,7 +12694,7 @@ def create_calculated_metric( metric's resource name. This value should be 1-80 characters and valid - characters are `[a-zA-Z0-9_]`, no spaces allowed. + characters are /[a-zA-Z0-9_]/, no spaces allowed. calculated_metric_id must be unique between all calculated metrics under a property. The calculated_metric_id is used when referencing this @@ -12166,19 +13435,21 @@ def delete_rollup_property_source_link( metadata=metadata, ) - def create_subproperty( + def provision_subproperty( self, - request: Optional[Union[analytics_admin.CreateSubpropertyRequest, dict]] = None, + request: Optional[ + Union[analytics_admin.ProvisionSubpropertyRequest, dict] + ] = None, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), - ) -> analytics_admin.CreateSubpropertyResponse: + ) -> analytics_admin.ProvisionSubpropertyResponse: r"""Create a subproperty and a subproperty event filter that applies to the created subproperty. Args: - request (Union[google.analytics.admin_v1alpha.types.CreateSubpropertyRequest, dict]): + request (Union[google.analytics.admin_v1alpha.types.ProvisionSubpropertyRequest, dict]): The request object. Request message for CreateSubproperty RPC. retry (google.api_core.retry.Retry): Designation of what errors, if any, @@ -12188,20 +13459,20 @@ def create_subproperty( sent along with the request as metadata. Returns: - google.analytics.admin_v1alpha.types.CreateSubpropertyResponse: + google.analytics.admin_v1alpha.types.ProvisionSubpropertyResponse: Response message for - CreateSubproperty RPC. + ProvisionSubproperty RPC. """ # Create or coerce a protobuf request object. # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. - if not isinstance(request, analytics_admin.CreateSubpropertyRequest): - request = analytics_admin.CreateSubpropertyRequest(request) + if not isinstance(request, analytics_admin.ProvisionSubpropertyRequest): + request = analytics_admin.ProvisionSubpropertyRequest(request) # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.create_subproperty] + rpc = self._transport._wrapped_methods[self._transport.provision_subproperty] # Validate the universe domain. self._validate_universe_domain() diff --git a/packages/google-analytics-admin/google/analytics/admin_v1alpha/services/analytics_admin_service/pagers.py b/packages/google-analytics-admin/google/analytics/admin_v1alpha/services/analytics_admin_service/pagers.py index ed8c53f696c4..1b458359c185 100644 --- a/packages/google-analytics-admin/google/analytics/admin_v1alpha/services/analytics_admin_service/pagers.py +++ b/packages/google-analytics-admin/google/analytics/admin_v1alpha/services/analytics_admin_service/pagers.py @@ -1442,6 +1442,158 @@ def __repr__(self) -> str: return "{0}<{1!r}>".format(self.__class__.__name__, self._response) +class ListKeyEventsPager: + """A pager for iterating through ``list_key_events`` requests. + + This class thinly wraps an initial + :class:`google.analytics.admin_v1alpha.types.ListKeyEventsResponse` object, and + provides an ``__iter__`` method to iterate through its + ``key_events`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListKeyEvents`` requests and continue to iterate + through the ``key_events`` field on the + corresponding responses. + + All the usual :class:`google.analytics.admin_v1alpha.types.ListKeyEventsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., analytics_admin.ListKeyEventsResponse], + request: analytics_admin.ListKeyEventsRequest, + response: analytics_admin.ListKeyEventsResponse, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.analytics.admin_v1alpha.types.ListKeyEventsRequest): + The initial request object. + response (google.analytics.admin_v1alpha.types.ListKeyEventsResponse): + The initial response object. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = analytics_admin.ListKeyEventsRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[analytics_admin.ListKeyEventsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) + yield self._response + + def __iter__(self) -> Iterator[resources.KeyEvent]: + for page in self.pages: + yield from page.key_events + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListKeyEventsAsyncPager: + """A pager for iterating through ``list_key_events`` requests. + + This class thinly wraps an initial + :class:`google.analytics.admin_v1alpha.types.ListKeyEventsResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``key_events`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListKeyEvents`` requests and continue to iterate + through the ``key_events`` field on the + corresponding responses. + + All the usual :class:`google.analytics.admin_v1alpha.types.ListKeyEventsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., Awaitable[analytics_admin.ListKeyEventsResponse]], + request: analytics_admin.ListKeyEventsRequest, + response: analytics_admin.ListKeyEventsResponse, + *, + retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.analytics.admin_v1alpha.types.ListKeyEventsRequest): + The initial request object. + response (google.analytics.admin_v1alpha.types.ListKeyEventsResponse): + The initial response object. + retry (google.api_core.retry.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = analytics_admin.ListKeyEventsRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[analytics_admin.ListKeyEventsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) + yield self._response + + def __aiter__(self) -> AsyncIterator[resources.KeyEvent]: + async def async_generator(): + async for page in self.pages: + for response in page.key_events: + yield response + + return async_generator() + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + class ListDisplayVideo360AdvertiserLinksPager: """A pager for iterating through ``list_display_video360_advertiser_links`` requests. @@ -3457,6 +3609,158 @@ def __repr__(self) -> str: return "{0}<{1!r}>".format(self.__class__.__name__, self._response) +class ListEventEditRulesPager: + """A pager for iterating through ``list_event_edit_rules`` requests. + + This class thinly wraps an initial + :class:`google.analytics.admin_v1alpha.types.ListEventEditRulesResponse` object, and + provides an ``__iter__`` method to iterate through its + ``event_edit_rules`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListEventEditRules`` requests and continue to iterate + through the ``event_edit_rules`` field on the + corresponding responses. + + All the usual :class:`google.analytics.admin_v1alpha.types.ListEventEditRulesResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., analytics_admin.ListEventEditRulesResponse], + request: analytics_admin.ListEventEditRulesRequest, + response: analytics_admin.ListEventEditRulesResponse, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.analytics.admin_v1alpha.types.ListEventEditRulesRequest): + The initial request object. + response (google.analytics.admin_v1alpha.types.ListEventEditRulesResponse): + The initial response object. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = analytics_admin.ListEventEditRulesRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[analytics_admin.ListEventEditRulesResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) + yield self._response + + def __iter__(self) -> Iterator[event_create_and_edit.EventEditRule]: + for page in self.pages: + yield from page.event_edit_rules + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListEventEditRulesAsyncPager: + """A pager for iterating through ``list_event_edit_rules`` requests. + + This class thinly wraps an initial + :class:`google.analytics.admin_v1alpha.types.ListEventEditRulesResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``event_edit_rules`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListEventEditRules`` requests and continue to iterate + through the ``event_edit_rules`` field on the + corresponding responses. + + All the usual :class:`google.analytics.admin_v1alpha.types.ListEventEditRulesResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., Awaitable[analytics_admin.ListEventEditRulesResponse]], + request: analytics_admin.ListEventEditRulesRequest, + response: analytics_admin.ListEventEditRulesResponse, + *, + retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.analytics.admin_v1alpha.types.ListEventEditRulesRequest): + The initial request object. + response (google.analytics.admin_v1alpha.types.ListEventEditRulesResponse): + The initial response object. + retry (google.api_core.retry.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = analytics_admin.ListEventEditRulesRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[analytics_admin.ListEventEditRulesResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) + yield self._response + + def __aiter__(self) -> AsyncIterator[event_create_and_edit.EventEditRule]: + async def async_generator(): + async for page in self.pages: + for response in page.event_edit_rules: + yield response + + return async_generator() + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + class ListCalculatedMetricsPager: """A pager for iterating through ``list_calculated_metrics`` requests. diff --git a/packages/google-analytics-admin/google/analytics/admin_v1alpha/services/analytics_admin_service/transports/base.py b/packages/google-analytics-admin/google/analytics/admin_v1alpha/services/analytics_admin_service/transports/base.py index 9e0d10b2ba63..b920e8f88c08 100644 --- a/packages/google-analytics-admin/google/analytics/admin_v1alpha/services/analytics_admin_service/transports/base.py +++ b/packages/google-analytics-admin/google/analytics/admin_v1alpha/services/analytics_admin_service/transports/base.py @@ -343,6 +343,31 @@ def _prep_wrapped_messages(self, client_info): default_timeout=None, client_info=client_info, ), + self.create_key_event: gapic_v1.method.wrap_method( + self.create_key_event, + default_timeout=None, + client_info=client_info, + ), + self.update_key_event: gapic_v1.method.wrap_method( + self.update_key_event, + default_timeout=None, + client_info=client_info, + ), + self.get_key_event: gapic_v1.method.wrap_method( + self.get_key_event, + default_timeout=None, + client_info=client_info, + ), + self.delete_key_event: gapic_v1.method.wrap_method( + self.delete_key_event, + default_timeout=None, + client_info=client_info, + ), + self.list_key_events: gapic_v1.method.wrap_method( + self.list_key_events, + default_timeout=None, + client_info=client_info, + ), self.get_display_video360_advertiser_link: gapic_v1.method.wrap_method( self.get_display_video360_advertiser_link, default_timeout=None, @@ -653,6 +678,11 @@ def _prep_wrapped_messages(self, client_info): default_timeout=None, client_info=client_info, ), + self.create_big_query_link: gapic_v1.method.wrap_method( + self.create_big_query_link, + default_timeout=None, + client_info=client_info, + ), self.get_big_query_link: gapic_v1.method.wrap_method( self.get_big_query_link, default_timeout=None, @@ -663,6 +693,16 @@ def _prep_wrapped_messages(self, client_info): default_timeout=None, client_info=client_info, ), + self.delete_big_query_link: gapic_v1.method.wrap_method( + self.delete_big_query_link, + default_timeout=None, + client_info=client_info, + ), + self.update_big_query_link: gapic_v1.method.wrap_method( + self.update_big_query_link, + default_timeout=None, + client_info=client_info, + ), self.get_enhanced_measurement_settings: gapic_v1.method.wrap_method( self.get_enhanced_measurement_settings, default_timeout=60.0, @@ -738,6 +778,36 @@ def _prep_wrapped_messages(self, client_info): default_timeout=None, client_info=client_info, ), + self.get_event_edit_rule: gapic_v1.method.wrap_method( + self.get_event_edit_rule, + default_timeout=None, + client_info=client_info, + ), + self.list_event_edit_rules: gapic_v1.method.wrap_method( + self.list_event_edit_rules, + default_timeout=None, + client_info=client_info, + ), + self.create_event_edit_rule: gapic_v1.method.wrap_method( + self.create_event_edit_rule, + default_timeout=None, + client_info=client_info, + ), + self.update_event_edit_rule: gapic_v1.method.wrap_method( + self.update_event_edit_rule, + default_timeout=None, + client_info=client_info, + ), + self.delete_event_edit_rule: gapic_v1.method.wrap_method( + self.delete_event_edit_rule, + default_timeout=None, + client_info=client_info, + ), + self.reorder_event_edit_rules: gapic_v1.method.wrap_method( + self.reorder_event_edit_rules, + default_timeout=None, + client_info=client_info, + ), self.update_data_redaction_settings: gapic_v1.method.wrap_method( self.update_data_redaction_settings, default_timeout=None, @@ -798,8 +868,8 @@ def _prep_wrapped_messages(self, client_info): default_timeout=None, client_info=client_info, ), - self.create_subproperty: gapic_v1.method.wrap_method( - self.create_subproperty, + self.provision_subproperty: gapic_v1.method.wrap_method( + self.provision_subproperty, default_timeout=None, client_info=client_info, ), @@ -1245,6 +1315,54 @@ def list_conversion_events( ]: raise NotImplementedError() + @property + def create_key_event( + self, + ) -> Callable[ + [analytics_admin.CreateKeyEventRequest], + Union[resources.KeyEvent, Awaitable[resources.KeyEvent]], + ]: + raise NotImplementedError() + + @property + def update_key_event( + self, + ) -> Callable[ + [analytics_admin.UpdateKeyEventRequest], + Union[resources.KeyEvent, Awaitable[resources.KeyEvent]], + ]: + raise NotImplementedError() + + @property + def get_key_event( + self, + ) -> Callable[ + [analytics_admin.GetKeyEventRequest], + Union[resources.KeyEvent, Awaitable[resources.KeyEvent]], + ]: + raise NotImplementedError() + + @property + def delete_key_event( + self, + ) -> Callable[ + [analytics_admin.DeleteKeyEventRequest], + Union[empty_pb2.Empty, Awaitable[empty_pb2.Empty]], + ]: + raise NotImplementedError() + + @property + def list_key_events( + self, + ) -> Callable[ + [analytics_admin.ListKeyEventsRequest], + Union[ + analytics_admin.ListKeyEventsResponse, + Awaitable[analytics_admin.ListKeyEventsResponse], + ], + ]: + raise NotImplementedError() + @property def get_display_video360_advertiser_link( self, @@ -1893,6 +2011,15 @@ def fetch_automated_ga4_configuration_opt_out( ]: raise NotImplementedError() + @property + def create_big_query_link( + self, + ) -> Callable[ + [analytics_admin.CreateBigQueryLinkRequest], + Union[resources.BigQueryLink, Awaitable[resources.BigQueryLink]], + ]: + raise NotImplementedError() + @property def get_big_query_link( self, @@ -1914,6 +2041,24 @@ def list_big_query_links( ]: raise NotImplementedError() + @property + def delete_big_query_link( + self, + ) -> Callable[ + [analytics_admin.DeleteBigQueryLinkRequest], + Union[empty_pb2.Empty, Awaitable[empty_pb2.Empty]], + ]: + raise NotImplementedError() + + @property + def update_big_query_link( + self, + ) -> Callable[ + [analytics_admin.UpdateBigQueryLinkRequest], + Union[resources.BigQueryLink, Awaitable[resources.BigQueryLink]], + ]: + raise NotImplementedError() + @property def get_enhanced_measurement_settings( self, @@ -2079,6 +2224,72 @@ def delete_event_create_rule( ]: raise NotImplementedError() + @property + def get_event_edit_rule( + self, + ) -> Callable[ + [analytics_admin.GetEventEditRuleRequest], + Union[ + event_create_and_edit.EventEditRule, + Awaitable[event_create_and_edit.EventEditRule], + ], + ]: + raise NotImplementedError() + + @property + def list_event_edit_rules( + self, + ) -> Callable[ + [analytics_admin.ListEventEditRulesRequest], + Union[ + analytics_admin.ListEventEditRulesResponse, + Awaitable[analytics_admin.ListEventEditRulesResponse], + ], + ]: + raise NotImplementedError() + + @property + def create_event_edit_rule( + self, + ) -> Callable[ + [analytics_admin.CreateEventEditRuleRequest], + Union[ + event_create_and_edit.EventEditRule, + Awaitable[event_create_and_edit.EventEditRule], + ], + ]: + raise NotImplementedError() + + @property + def update_event_edit_rule( + self, + ) -> Callable[ + [analytics_admin.UpdateEventEditRuleRequest], + Union[ + event_create_and_edit.EventEditRule, + Awaitable[event_create_and_edit.EventEditRule], + ], + ]: + raise NotImplementedError() + + @property + def delete_event_edit_rule( + self, + ) -> Callable[ + [analytics_admin.DeleteEventEditRuleRequest], + Union[empty_pb2.Empty, Awaitable[empty_pb2.Empty]], + ]: + raise NotImplementedError() + + @property + def reorder_event_edit_rules( + self, + ) -> Callable[ + [analytics_admin.ReorderEventEditRulesRequest], + Union[empty_pb2.Empty, Awaitable[empty_pb2.Empty]], + ]: + raise NotImplementedError() + @property def update_data_redaction_settings( self, @@ -2207,13 +2418,13 @@ def delete_rollup_property_source_link( raise NotImplementedError() @property - def create_subproperty( + def provision_subproperty( self, ) -> Callable[ - [analytics_admin.CreateSubpropertyRequest], + [analytics_admin.ProvisionSubpropertyRequest], Union[ - analytics_admin.CreateSubpropertyResponse, - Awaitable[analytics_admin.CreateSubpropertyResponse], + analytics_admin.ProvisionSubpropertyResponse, + Awaitable[analytics_admin.ProvisionSubpropertyResponse], ], ]: raise NotImplementedError() diff --git a/packages/google-analytics-admin/google/analytics/admin_v1alpha/services/analytics_admin_service/transports/grpc.py b/packages/google-analytics-admin/google/analytics/admin_v1alpha/services/analytics_admin_service/transports/grpc.py index 4d51d993b37b..e73db13774f3 100644 --- a/packages/google-analytics-admin/google/analytics/admin_v1alpha/services/analytics_admin_service/transports/grpc.py +++ b/packages/google-analytics-admin/google/analytics/admin_v1alpha/services/analytics_admin_service/transports/grpc.py @@ -1289,8 +1289,8 @@ def create_conversion_event( ]: r"""Return a callable for the create conversion event method over gRPC. - Creates a conversion event with the specified - attributes. + Deprecated: Use ``CreateKeyEvent`` instead. Creates a conversion + event with the specified attributes. Returns: Callable[[~.CreateConversionEventRequest], @@ -1318,8 +1318,8 @@ def update_conversion_event( ]: r"""Return a callable for the update conversion event method over gRPC. - Updates a conversion event with the specified - attributes. + Deprecated: Use ``UpdateKeyEvent`` instead. Updates a conversion + event with the specified attributes. Returns: Callable[[~.UpdateConversionEventRequest], @@ -1347,7 +1347,8 @@ def get_conversion_event( ]: r"""Return a callable for the get conversion event method over gRPC. - Retrieve a single conversion event. + Deprecated: Use ``GetKeyEvent`` instead. Retrieve a single + conversion event. Returns: Callable[[~.GetConversionEventRequest], @@ -1373,7 +1374,8 @@ def delete_conversion_event( ) -> Callable[[analytics_admin.DeleteConversionEventRequest], empty_pb2.Empty]: r"""Return a callable for the delete conversion event method over gRPC. - Deletes a conversion event in a property. + Deprecated: Use ``DeleteKeyEvent`` instead. Deletes a conversion + event in a property. Returns: Callable[[~.DeleteConversionEventRequest], @@ -1402,8 +1404,9 @@ def list_conversion_events( ]: r"""Return a callable for the list conversion events method over gRPC. - Returns a list of conversion events in the specified - parent property. + Deprecated: Use ``ListKeyEvents`` instead. Returns a list of + conversion events in the specified parent property. + Returns an empty list if no conversion events are found. Returns: @@ -1424,6 +1427,140 @@ def list_conversion_events( ) return self._stubs["list_conversion_events"] + @property + def create_key_event( + self, + ) -> Callable[[analytics_admin.CreateKeyEventRequest], resources.KeyEvent]: + r"""Return a callable for the create key event method over gRPC. + + Creates a Key Event. + + Returns: + Callable[[~.CreateKeyEventRequest], + ~.KeyEvent]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_key_event" not in self._stubs: + self._stubs["create_key_event"] = self.grpc_channel.unary_unary( + "/google.analytics.admin.v1alpha.AnalyticsAdminService/CreateKeyEvent", + request_serializer=analytics_admin.CreateKeyEventRequest.serialize, + response_deserializer=resources.KeyEvent.deserialize, + ) + return self._stubs["create_key_event"] + + @property + def update_key_event( + self, + ) -> Callable[[analytics_admin.UpdateKeyEventRequest], resources.KeyEvent]: + r"""Return a callable for the update key event method over gRPC. + + Updates a Key Event. + + Returns: + Callable[[~.UpdateKeyEventRequest], + ~.KeyEvent]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_key_event" not in self._stubs: + self._stubs["update_key_event"] = self.grpc_channel.unary_unary( + "/google.analytics.admin.v1alpha.AnalyticsAdminService/UpdateKeyEvent", + request_serializer=analytics_admin.UpdateKeyEventRequest.serialize, + response_deserializer=resources.KeyEvent.deserialize, + ) + return self._stubs["update_key_event"] + + @property + def get_key_event( + self, + ) -> Callable[[analytics_admin.GetKeyEventRequest], resources.KeyEvent]: + r"""Return a callable for the get key event method over gRPC. + + Retrieve a single Key Event. + + Returns: + Callable[[~.GetKeyEventRequest], + ~.KeyEvent]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_key_event" not in self._stubs: + self._stubs["get_key_event"] = self.grpc_channel.unary_unary( + "/google.analytics.admin.v1alpha.AnalyticsAdminService/GetKeyEvent", + request_serializer=analytics_admin.GetKeyEventRequest.serialize, + response_deserializer=resources.KeyEvent.deserialize, + ) + return self._stubs["get_key_event"] + + @property + def delete_key_event( + self, + ) -> Callable[[analytics_admin.DeleteKeyEventRequest], empty_pb2.Empty]: + r"""Return a callable for the delete key event method over gRPC. + + Deletes a Key Event. + + Returns: + Callable[[~.DeleteKeyEventRequest], + ~.Empty]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_key_event" not in self._stubs: + self._stubs["delete_key_event"] = self.grpc_channel.unary_unary( + "/google.analytics.admin.v1alpha.AnalyticsAdminService/DeleteKeyEvent", + request_serializer=analytics_admin.DeleteKeyEventRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs["delete_key_event"] + + @property + def list_key_events( + self, + ) -> Callable[ + [analytics_admin.ListKeyEventsRequest], analytics_admin.ListKeyEventsResponse + ]: + r"""Return a callable for the list key events method over gRPC. + + Returns a list of Key Events in the specified parent + property. Returns an empty list if no Key Events are + found. + + Returns: + Callable[[~.ListKeyEventsRequest], + ~.ListKeyEventsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_key_events" not in self._stubs: + self._stubs["list_key_events"] = self.grpc_channel.unary_unary( + "/google.analytics.admin.v1alpha.AnalyticsAdminService/ListKeyEvents", + request_serializer=analytics_admin.ListKeyEventsRequest.serialize, + response_deserializer=analytics_admin.ListKeyEventsResponse.deserialize, + ) + return self._stubs["list_key_events"] + @property def get_display_video360_advertiser_link( self, @@ -3243,6 +3380,32 @@ def fetch_automated_ga4_configuration_opt_out( ) return self._stubs["fetch_automated_ga4_configuration_opt_out"] + @property + def create_big_query_link( + self, + ) -> Callable[[analytics_admin.CreateBigQueryLinkRequest], resources.BigQueryLink]: + r"""Return a callable for the create big query link method over gRPC. + + Creates a BigQueryLink. + + Returns: + Callable[[~.CreateBigQueryLinkRequest], + ~.BigQueryLink]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_big_query_link" not in self._stubs: + self._stubs["create_big_query_link"] = self.grpc_channel.unary_unary( + "/google.analytics.admin.v1alpha.AnalyticsAdminService/CreateBigQueryLink", + request_serializer=analytics_admin.CreateBigQueryLinkRequest.serialize, + response_deserializer=resources.BigQueryLink.deserialize, + ) + return self._stubs["create_big_query_link"] + @property def get_big_query_link( self, @@ -3298,6 +3461,58 @@ def list_big_query_links( ) return self._stubs["list_big_query_links"] + @property + def delete_big_query_link( + self, + ) -> Callable[[analytics_admin.DeleteBigQueryLinkRequest], empty_pb2.Empty]: + r"""Return a callable for the delete big query link method over gRPC. + + Deletes a BigQueryLink on a property. + + Returns: + Callable[[~.DeleteBigQueryLinkRequest], + ~.Empty]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_big_query_link" not in self._stubs: + self._stubs["delete_big_query_link"] = self.grpc_channel.unary_unary( + "/google.analytics.admin.v1alpha.AnalyticsAdminService/DeleteBigQueryLink", + request_serializer=analytics_admin.DeleteBigQueryLinkRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs["delete_big_query_link"] + + @property + def update_big_query_link( + self, + ) -> Callable[[analytics_admin.UpdateBigQueryLinkRequest], resources.BigQueryLink]: + r"""Return a callable for the update big query link method over gRPC. + + Updates a BigQueryLink. + + Returns: + Callable[[~.UpdateBigQueryLinkRequest], + ~.BigQueryLink]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_big_query_link" not in self._stubs: + self._stubs["update_big_query_link"] = self.grpc_channel.unary_unary( + "/google.analytics.admin.v1alpha.AnalyticsAdminService/UpdateBigQueryLink", + request_serializer=analytics_admin.UpdateBigQueryLinkRequest.serialize, + response_deserializer=resources.BigQueryLink.deserialize, + ) + return self._stubs["update_big_query_link"] + @property def get_enhanced_measurement_settings( self, @@ -3738,6 +3953,174 @@ def delete_event_create_rule( ) return self._stubs["delete_event_create_rule"] + @property + def get_event_edit_rule( + self, + ) -> Callable[ + [analytics_admin.GetEventEditRuleRequest], event_create_and_edit.EventEditRule + ]: + r"""Return a callable for the get event edit rule method over gRPC. + + Lookup for a single EventEditRule. + + Returns: + Callable[[~.GetEventEditRuleRequest], + ~.EventEditRule]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_event_edit_rule" not in self._stubs: + self._stubs["get_event_edit_rule"] = self.grpc_channel.unary_unary( + "/google.analytics.admin.v1alpha.AnalyticsAdminService/GetEventEditRule", + request_serializer=analytics_admin.GetEventEditRuleRequest.serialize, + response_deserializer=event_create_and_edit.EventEditRule.deserialize, + ) + return self._stubs["get_event_edit_rule"] + + @property + def list_event_edit_rules( + self, + ) -> Callable[ + [analytics_admin.ListEventEditRulesRequest], + analytics_admin.ListEventEditRulesResponse, + ]: + r"""Return a callable for the list event edit rules method over gRPC. + + Lists EventEditRules on a web data stream. + + Returns: + Callable[[~.ListEventEditRulesRequest], + ~.ListEventEditRulesResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_event_edit_rules" not in self._stubs: + self._stubs["list_event_edit_rules"] = self.grpc_channel.unary_unary( + "/google.analytics.admin.v1alpha.AnalyticsAdminService/ListEventEditRules", + request_serializer=analytics_admin.ListEventEditRulesRequest.serialize, + response_deserializer=analytics_admin.ListEventEditRulesResponse.deserialize, + ) + return self._stubs["list_event_edit_rules"] + + @property + def create_event_edit_rule( + self, + ) -> Callable[ + [analytics_admin.CreateEventEditRuleRequest], + event_create_and_edit.EventEditRule, + ]: + r"""Return a callable for the create event edit rule method over gRPC. + + Creates an EventEditRule. + + Returns: + Callable[[~.CreateEventEditRuleRequest], + ~.EventEditRule]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_event_edit_rule" not in self._stubs: + self._stubs["create_event_edit_rule"] = self.grpc_channel.unary_unary( + "/google.analytics.admin.v1alpha.AnalyticsAdminService/CreateEventEditRule", + request_serializer=analytics_admin.CreateEventEditRuleRequest.serialize, + response_deserializer=event_create_and_edit.EventEditRule.deserialize, + ) + return self._stubs["create_event_edit_rule"] + + @property + def update_event_edit_rule( + self, + ) -> Callable[ + [analytics_admin.UpdateEventEditRuleRequest], + event_create_and_edit.EventEditRule, + ]: + r"""Return a callable for the update event edit rule method over gRPC. + + Updates an EventEditRule. + + Returns: + Callable[[~.UpdateEventEditRuleRequest], + ~.EventEditRule]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_event_edit_rule" not in self._stubs: + self._stubs["update_event_edit_rule"] = self.grpc_channel.unary_unary( + "/google.analytics.admin.v1alpha.AnalyticsAdminService/UpdateEventEditRule", + request_serializer=analytics_admin.UpdateEventEditRuleRequest.serialize, + response_deserializer=event_create_and_edit.EventEditRule.deserialize, + ) + return self._stubs["update_event_edit_rule"] + + @property + def delete_event_edit_rule( + self, + ) -> Callable[[analytics_admin.DeleteEventEditRuleRequest], empty_pb2.Empty]: + r"""Return a callable for the delete event edit rule method over gRPC. + + Deletes an EventEditRule. + + Returns: + Callable[[~.DeleteEventEditRuleRequest], + ~.Empty]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_event_edit_rule" not in self._stubs: + self._stubs["delete_event_edit_rule"] = self.grpc_channel.unary_unary( + "/google.analytics.admin.v1alpha.AnalyticsAdminService/DeleteEventEditRule", + request_serializer=analytics_admin.DeleteEventEditRuleRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs["delete_event_edit_rule"] + + @property + def reorder_event_edit_rules( + self, + ) -> Callable[[analytics_admin.ReorderEventEditRulesRequest], empty_pb2.Empty]: + r"""Return a callable for the reorder event edit rules method over gRPC. + + Changes the processing order of event edit rules on + the specified stream. + + Returns: + Callable[[~.ReorderEventEditRulesRequest], + ~.Empty]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "reorder_event_edit_rules" not in self._stubs: + self._stubs["reorder_event_edit_rules"] = self.grpc_channel.unary_unary( + "/google.analytics.admin.v1alpha.AnalyticsAdminService/ReorderEventEditRules", + request_serializer=analytics_admin.ReorderEventEditRulesRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs["reorder_event_edit_rules"] + @property def update_data_redaction_settings( self, @@ -4107,20 +4490,20 @@ def delete_rollup_property_source_link( return self._stubs["delete_rollup_property_source_link"] @property - def create_subproperty( + def provision_subproperty( self, ) -> Callable[ - [analytics_admin.CreateSubpropertyRequest], - analytics_admin.CreateSubpropertyResponse, + [analytics_admin.ProvisionSubpropertyRequest], + analytics_admin.ProvisionSubpropertyResponse, ]: - r"""Return a callable for the create subproperty method over gRPC. + r"""Return a callable for the provision subproperty method over gRPC. Create a subproperty and a subproperty event filter that applies to the created subproperty. Returns: - Callable[[~.CreateSubpropertyRequest], - ~.CreateSubpropertyResponse]: + Callable[[~.ProvisionSubpropertyRequest], + ~.ProvisionSubpropertyResponse]: A function that, when called, will call the underlying RPC on the server. """ @@ -4128,13 +4511,13 @@ def create_subproperty( # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if "create_subproperty" not in self._stubs: - self._stubs["create_subproperty"] = self.grpc_channel.unary_unary( - "/google.analytics.admin.v1alpha.AnalyticsAdminService/CreateSubproperty", - request_serializer=analytics_admin.CreateSubpropertyRequest.serialize, - response_deserializer=analytics_admin.CreateSubpropertyResponse.deserialize, + if "provision_subproperty" not in self._stubs: + self._stubs["provision_subproperty"] = self.grpc_channel.unary_unary( + "/google.analytics.admin.v1alpha.AnalyticsAdminService/ProvisionSubproperty", + request_serializer=analytics_admin.ProvisionSubpropertyRequest.serialize, + response_deserializer=analytics_admin.ProvisionSubpropertyResponse.deserialize, ) - return self._stubs["create_subproperty"] + return self._stubs["provision_subproperty"] @property def create_subproperty_event_filter( diff --git a/packages/google-analytics-admin/google/analytics/admin_v1alpha/services/analytics_admin_service/transports/grpc_asyncio.py b/packages/google-analytics-admin/google/analytics/admin_v1alpha/services/analytics_admin_service/transports/grpc_asyncio.py index 74e6b73f4ffd..4b19ffcc0cbf 100644 --- a/packages/google-analytics-admin/google/analytics/admin_v1alpha/services/analytics_admin_service/transports/grpc_asyncio.py +++ b/packages/google-analytics-admin/google/analytics/admin_v1alpha/services/analytics_admin_service/transports/grpc_asyncio.py @@ -1313,8 +1313,8 @@ def create_conversion_event( ]: r"""Return a callable for the create conversion event method over gRPC. - Creates a conversion event with the specified - attributes. + Deprecated: Use ``CreateKeyEvent`` instead. Creates a conversion + event with the specified attributes. Returns: Callable[[~.CreateConversionEventRequest], @@ -1343,8 +1343,8 @@ def update_conversion_event( ]: r"""Return a callable for the update conversion event method over gRPC. - Updates a conversion event with the specified - attributes. + Deprecated: Use ``UpdateKeyEvent`` instead. Updates a conversion + event with the specified attributes. Returns: Callable[[~.UpdateConversionEventRequest], @@ -1373,7 +1373,8 @@ def get_conversion_event( ]: r"""Return a callable for the get conversion event method over gRPC. - Retrieve a single conversion event. + Deprecated: Use ``GetKeyEvent`` instead. Retrieve a single + conversion event. Returns: Callable[[~.GetConversionEventRequest], @@ -1401,7 +1402,8 @@ def delete_conversion_event( ]: r"""Return a callable for the delete conversion event method over gRPC. - Deletes a conversion event in a property. + Deprecated: Use ``DeleteKeyEvent`` instead. Deletes a conversion + event in a property. Returns: Callable[[~.DeleteConversionEventRequest], @@ -1430,8 +1432,9 @@ def list_conversion_events( ]: r"""Return a callable for the list conversion events method over gRPC. - Returns a list of conversion events in the specified - parent property. + Deprecated: Use ``ListKeyEvents`` instead. Returns a list of + conversion events in the specified parent property. + Returns an empty list if no conversion events are found. Returns: @@ -1452,6 +1455,145 @@ def list_conversion_events( ) return self._stubs["list_conversion_events"] + @property + def create_key_event( + self, + ) -> Callable[ + [analytics_admin.CreateKeyEventRequest], Awaitable[resources.KeyEvent] + ]: + r"""Return a callable for the create key event method over gRPC. + + Creates a Key Event. + + Returns: + Callable[[~.CreateKeyEventRequest], + Awaitable[~.KeyEvent]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_key_event" not in self._stubs: + self._stubs["create_key_event"] = self.grpc_channel.unary_unary( + "/google.analytics.admin.v1alpha.AnalyticsAdminService/CreateKeyEvent", + request_serializer=analytics_admin.CreateKeyEventRequest.serialize, + response_deserializer=resources.KeyEvent.deserialize, + ) + return self._stubs["create_key_event"] + + @property + def update_key_event( + self, + ) -> Callable[ + [analytics_admin.UpdateKeyEventRequest], Awaitable[resources.KeyEvent] + ]: + r"""Return a callable for the update key event method over gRPC. + + Updates a Key Event. + + Returns: + Callable[[~.UpdateKeyEventRequest], + Awaitable[~.KeyEvent]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_key_event" not in self._stubs: + self._stubs["update_key_event"] = self.grpc_channel.unary_unary( + "/google.analytics.admin.v1alpha.AnalyticsAdminService/UpdateKeyEvent", + request_serializer=analytics_admin.UpdateKeyEventRequest.serialize, + response_deserializer=resources.KeyEvent.deserialize, + ) + return self._stubs["update_key_event"] + + @property + def get_key_event( + self, + ) -> Callable[[analytics_admin.GetKeyEventRequest], Awaitable[resources.KeyEvent]]: + r"""Return a callable for the get key event method over gRPC. + + Retrieve a single Key Event. + + Returns: + Callable[[~.GetKeyEventRequest], + Awaitable[~.KeyEvent]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_key_event" not in self._stubs: + self._stubs["get_key_event"] = self.grpc_channel.unary_unary( + "/google.analytics.admin.v1alpha.AnalyticsAdminService/GetKeyEvent", + request_serializer=analytics_admin.GetKeyEventRequest.serialize, + response_deserializer=resources.KeyEvent.deserialize, + ) + return self._stubs["get_key_event"] + + @property + def delete_key_event( + self, + ) -> Callable[[analytics_admin.DeleteKeyEventRequest], Awaitable[empty_pb2.Empty]]: + r"""Return a callable for the delete key event method over gRPC. + + Deletes a Key Event. + + Returns: + Callable[[~.DeleteKeyEventRequest], + Awaitable[~.Empty]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_key_event" not in self._stubs: + self._stubs["delete_key_event"] = self.grpc_channel.unary_unary( + "/google.analytics.admin.v1alpha.AnalyticsAdminService/DeleteKeyEvent", + request_serializer=analytics_admin.DeleteKeyEventRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs["delete_key_event"] + + @property + def list_key_events( + self, + ) -> Callable[ + [analytics_admin.ListKeyEventsRequest], + Awaitable[analytics_admin.ListKeyEventsResponse], + ]: + r"""Return a callable for the list key events method over gRPC. + + Returns a list of Key Events in the specified parent + property. Returns an empty list if no Key Events are + found. + + Returns: + Callable[[~.ListKeyEventsRequest], + Awaitable[~.ListKeyEventsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_key_events" not in self._stubs: + self._stubs["list_key_events"] = self.grpc_channel.unary_unary( + "/google.analytics.admin.v1alpha.AnalyticsAdminService/ListKeyEvents", + request_serializer=analytics_admin.ListKeyEventsRequest.serialize, + response_deserializer=analytics_admin.ListKeyEventsResponse.deserialize, + ) + return self._stubs["list_key_events"] + @property def get_display_video360_advertiser_link( self, @@ -3319,6 +3461,34 @@ def fetch_automated_ga4_configuration_opt_out( ) return self._stubs["fetch_automated_ga4_configuration_opt_out"] + @property + def create_big_query_link( + self, + ) -> Callable[ + [analytics_admin.CreateBigQueryLinkRequest], Awaitable[resources.BigQueryLink] + ]: + r"""Return a callable for the create big query link method over gRPC. + + Creates a BigQueryLink. + + Returns: + Callable[[~.CreateBigQueryLinkRequest], + Awaitable[~.BigQueryLink]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_big_query_link" not in self._stubs: + self._stubs["create_big_query_link"] = self.grpc_channel.unary_unary( + "/google.analytics.admin.v1alpha.AnalyticsAdminService/CreateBigQueryLink", + request_serializer=analytics_admin.CreateBigQueryLinkRequest.serialize, + response_deserializer=resources.BigQueryLink.deserialize, + ) + return self._stubs["create_big_query_link"] + @property def get_big_query_link( self, @@ -3376,6 +3546,62 @@ def list_big_query_links( ) return self._stubs["list_big_query_links"] + @property + def delete_big_query_link( + self, + ) -> Callable[ + [analytics_admin.DeleteBigQueryLinkRequest], Awaitable[empty_pb2.Empty] + ]: + r"""Return a callable for the delete big query link method over gRPC. + + Deletes a BigQueryLink on a property. + + Returns: + Callable[[~.DeleteBigQueryLinkRequest], + Awaitable[~.Empty]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_big_query_link" not in self._stubs: + self._stubs["delete_big_query_link"] = self.grpc_channel.unary_unary( + "/google.analytics.admin.v1alpha.AnalyticsAdminService/DeleteBigQueryLink", + request_serializer=analytics_admin.DeleteBigQueryLinkRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs["delete_big_query_link"] + + @property + def update_big_query_link( + self, + ) -> Callable[ + [analytics_admin.UpdateBigQueryLinkRequest], Awaitable[resources.BigQueryLink] + ]: + r"""Return a callable for the update big query link method over gRPC. + + Updates a BigQueryLink. + + Returns: + Callable[[~.UpdateBigQueryLinkRequest], + Awaitable[~.BigQueryLink]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_big_query_link" not in self._stubs: + self._stubs["update_big_query_link"] = self.grpc_channel.unary_unary( + "/google.analytics.admin.v1alpha.AnalyticsAdminService/UpdateBigQueryLink", + request_serializer=analytics_admin.UpdateBigQueryLinkRequest.serialize, + response_deserializer=resources.BigQueryLink.deserialize, + ) + return self._stubs["update_big_query_link"] + @property def get_enhanced_measurement_settings( self, @@ -3826,6 +4052,179 @@ def delete_event_create_rule( ) return self._stubs["delete_event_create_rule"] + @property + def get_event_edit_rule( + self, + ) -> Callable[ + [analytics_admin.GetEventEditRuleRequest], + Awaitable[event_create_and_edit.EventEditRule], + ]: + r"""Return a callable for the get event edit rule method over gRPC. + + Lookup for a single EventEditRule. + + Returns: + Callable[[~.GetEventEditRuleRequest], + Awaitable[~.EventEditRule]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_event_edit_rule" not in self._stubs: + self._stubs["get_event_edit_rule"] = self.grpc_channel.unary_unary( + "/google.analytics.admin.v1alpha.AnalyticsAdminService/GetEventEditRule", + request_serializer=analytics_admin.GetEventEditRuleRequest.serialize, + response_deserializer=event_create_and_edit.EventEditRule.deserialize, + ) + return self._stubs["get_event_edit_rule"] + + @property + def list_event_edit_rules( + self, + ) -> Callable[ + [analytics_admin.ListEventEditRulesRequest], + Awaitable[analytics_admin.ListEventEditRulesResponse], + ]: + r"""Return a callable for the list event edit rules method over gRPC. + + Lists EventEditRules on a web data stream. + + Returns: + Callable[[~.ListEventEditRulesRequest], + Awaitable[~.ListEventEditRulesResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_event_edit_rules" not in self._stubs: + self._stubs["list_event_edit_rules"] = self.grpc_channel.unary_unary( + "/google.analytics.admin.v1alpha.AnalyticsAdminService/ListEventEditRules", + request_serializer=analytics_admin.ListEventEditRulesRequest.serialize, + response_deserializer=analytics_admin.ListEventEditRulesResponse.deserialize, + ) + return self._stubs["list_event_edit_rules"] + + @property + def create_event_edit_rule( + self, + ) -> Callable[ + [analytics_admin.CreateEventEditRuleRequest], + Awaitable[event_create_and_edit.EventEditRule], + ]: + r"""Return a callable for the create event edit rule method over gRPC. + + Creates an EventEditRule. + + Returns: + Callable[[~.CreateEventEditRuleRequest], + Awaitable[~.EventEditRule]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_event_edit_rule" not in self._stubs: + self._stubs["create_event_edit_rule"] = self.grpc_channel.unary_unary( + "/google.analytics.admin.v1alpha.AnalyticsAdminService/CreateEventEditRule", + request_serializer=analytics_admin.CreateEventEditRuleRequest.serialize, + response_deserializer=event_create_and_edit.EventEditRule.deserialize, + ) + return self._stubs["create_event_edit_rule"] + + @property + def update_event_edit_rule( + self, + ) -> Callable[ + [analytics_admin.UpdateEventEditRuleRequest], + Awaitable[event_create_and_edit.EventEditRule], + ]: + r"""Return a callable for the update event edit rule method over gRPC. + + Updates an EventEditRule. + + Returns: + Callable[[~.UpdateEventEditRuleRequest], + Awaitable[~.EventEditRule]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_event_edit_rule" not in self._stubs: + self._stubs["update_event_edit_rule"] = self.grpc_channel.unary_unary( + "/google.analytics.admin.v1alpha.AnalyticsAdminService/UpdateEventEditRule", + request_serializer=analytics_admin.UpdateEventEditRuleRequest.serialize, + response_deserializer=event_create_and_edit.EventEditRule.deserialize, + ) + return self._stubs["update_event_edit_rule"] + + @property + def delete_event_edit_rule( + self, + ) -> Callable[ + [analytics_admin.DeleteEventEditRuleRequest], Awaitable[empty_pb2.Empty] + ]: + r"""Return a callable for the delete event edit rule method over gRPC. + + Deletes an EventEditRule. + + Returns: + Callable[[~.DeleteEventEditRuleRequest], + Awaitable[~.Empty]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_event_edit_rule" not in self._stubs: + self._stubs["delete_event_edit_rule"] = self.grpc_channel.unary_unary( + "/google.analytics.admin.v1alpha.AnalyticsAdminService/DeleteEventEditRule", + request_serializer=analytics_admin.DeleteEventEditRuleRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs["delete_event_edit_rule"] + + @property + def reorder_event_edit_rules( + self, + ) -> Callable[ + [analytics_admin.ReorderEventEditRulesRequest], Awaitable[empty_pb2.Empty] + ]: + r"""Return a callable for the reorder event edit rules method over gRPC. + + Changes the processing order of event edit rules on + the specified stream. + + Returns: + Callable[[~.ReorderEventEditRulesRequest], + Awaitable[~.Empty]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "reorder_event_edit_rules" not in self._stubs: + self._stubs["reorder_event_edit_rules"] = self.grpc_channel.unary_unary( + "/google.analytics.admin.v1alpha.AnalyticsAdminService/ReorderEventEditRules", + request_serializer=analytics_admin.ReorderEventEditRulesRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs["reorder_event_edit_rules"] + @property def update_data_redaction_settings( self, @@ -4201,20 +4600,20 @@ def delete_rollup_property_source_link( return self._stubs["delete_rollup_property_source_link"] @property - def create_subproperty( + def provision_subproperty( self, ) -> Callable[ - [analytics_admin.CreateSubpropertyRequest], - Awaitable[analytics_admin.CreateSubpropertyResponse], + [analytics_admin.ProvisionSubpropertyRequest], + Awaitable[analytics_admin.ProvisionSubpropertyResponse], ]: - r"""Return a callable for the create subproperty method over gRPC. + r"""Return a callable for the provision subproperty method over gRPC. Create a subproperty and a subproperty event filter that applies to the created subproperty. Returns: - Callable[[~.CreateSubpropertyRequest], - Awaitable[~.CreateSubpropertyResponse]]: + Callable[[~.ProvisionSubpropertyRequest], + Awaitable[~.ProvisionSubpropertyResponse]]: A function that, when called, will call the underlying RPC on the server. """ @@ -4222,13 +4621,13 @@ def create_subproperty( # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if "create_subproperty" not in self._stubs: - self._stubs["create_subproperty"] = self.grpc_channel.unary_unary( - "/google.analytics.admin.v1alpha.AnalyticsAdminService/CreateSubproperty", - request_serializer=analytics_admin.CreateSubpropertyRequest.serialize, - response_deserializer=analytics_admin.CreateSubpropertyResponse.deserialize, + if "provision_subproperty" not in self._stubs: + self._stubs["provision_subproperty"] = self.grpc_channel.unary_unary( + "/google.analytics.admin.v1alpha.AnalyticsAdminService/ProvisionSubproperty", + request_serializer=analytics_admin.ProvisionSubpropertyRequest.serialize, + response_deserializer=analytics_admin.ProvisionSubpropertyResponse.deserialize, ) - return self._stubs["create_subproperty"] + return self._stubs["provision_subproperty"] @property def create_subproperty_event_filter( @@ -4584,6 +4983,31 @@ def _prep_wrapped_messages(self, client_info): default_timeout=None, client_info=client_info, ), + self.create_key_event: gapic_v1.method_async.wrap_method( + self.create_key_event, + default_timeout=None, + client_info=client_info, + ), + self.update_key_event: gapic_v1.method_async.wrap_method( + self.update_key_event, + default_timeout=None, + client_info=client_info, + ), + self.get_key_event: gapic_v1.method_async.wrap_method( + self.get_key_event, + default_timeout=None, + client_info=client_info, + ), + self.delete_key_event: gapic_v1.method_async.wrap_method( + self.delete_key_event, + default_timeout=None, + client_info=client_info, + ), + self.list_key_events: gapic_v1.method_async.wrap_method( + self.list_key_events, + default_timeout=None, + client_info=client_info, + ), self.get_display_video360_advertiser_link: gapic_v1.method_async.wrap_method( self.get_display_video360_advertiser_link, default_timeout=None, @@ -4894,6 +5318,11 @@ def _prep_wrapped_messages(self, client_info): default_timeout=None, client_info=client_info, ), + self.create_big_query_link: gapic_v1.method_async.wrap_method( + self.create_big_query_link, + default_timeout=None, + client_info=client_info, + ), self.get_big_query_link: gapic_v1.method_async.wrap_method( self.get_big_query_link, default_timeout=None, @@ -4904,6 +5333,16 @@ def _prep_wrapped_messages(self, client_info): default_timeout=None, client_info=client_info, ), + self.delete_big_query_link: gapic_v1.method_async.wrap_method( + self.delete_big_query_link, + default_timeout=None, + client_info=client_info, + ), + self.update_big_query_link: gapic_v1.method_async.wrap_method( + self.update_big_query_link, + default_timeout=None, + client_info=client_info, + ), self.get_enhanced_measurement_settings: gapic_v1.method_async.wrap_method( self.get_enhanced_measurement_settings, default_timeout=60.0, @@ -4979,6 +5418,36 @@ def _prep_wrapped_messages(self, client_info): default_timeout=None, client_info=client_info, ), + self.get_event_edit_rule: gapic_v1.method_async.wrap_method( + self.get_event_edit_rule, + default_timeout=None, + client_info=client_info, + ), + self.list_event_edit_rules: gapic_v1.method_async.wrap_method( + self.list_event_edit_rules, + default_timeout=None, + client_info=client_info, + ), + self.create_event_edit_rule: gapic_v1.method_async.wrap_method( + self.create_event_edit_rule, + default_timeout=None, + client_info=client_info, + ), + self.update_event_edit_rule: gapic_v1.method_async.wrap_method( + self.update_event_edit_rule, + default_timeout=None, + client_info=client_info, + ), + self.delete_event_edit_rule: gapic_v1.method_async.wrap_method( + self.delete_event_edit_rule, + default_timeout=None, + client_info=client_info, + ), + self.reorder_event_edit_rules: gapic_v1.method_async.wrap_method( + self.reorder_event_edit_rules, + default_timeout=None, + client_info=client_info, + ), self.update_data_redaction_settings: gapic_v1.method_async.wrap_method( self.update_data_redaction_settings, default_timeout=None, @@ -5039,8 +5508,8 @@ def _prep_wrapped_messages(self, client_info): default_timeout=None, client_info=client_info, ), - self.create_subproperty: gapic_v1.method_async.wrap_method( - self.create_subproperty, + self.provision_subproperty: gapic_v1.method_async.wrap_method( + self.provision_subproperty, default_timeout=None, client_info=client_info, ), diff --git a/packages/google-analytics-admin/google/analytics/admin_v1alpha/services/analytics_admin_service/transports/rest.py b/packages/google-analytics-admin/google/analytics/admin_v1alpha/services/analytics_admin_service/transports/rest.py index 6efb1ce3be4f..18bf7bf2a0c4 100644 --- a/packages/google-analytics-admin/google/analytics/admin_v1alpha/services/analytics_admin_service/transports/rest.py +++ b/packages/google-analytics-admin/google/analytics/admin_v1alpha/services/analytics_admin_service/transports/rest.py @@ -167,6 +167,14 @@ def post_create_audience(self, response): logging.log(f"Received response: {response}") return response + def pre_create_big_query_link(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_create_big_query_link(self, response): + logging.log(f"Received response: {response}") + return response + def pre_create_calculated_metric(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata @@ -247,6 +255,14 @@ def post_create_event_create_rule(self, response): logging.log(f"Received response: {response}") return response + def pre_create_event_edit_rule(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_create_event_edit_rule(self, response): + logging.log(f"Received response: {response}") + return response + def pre_create_expanded_data_set(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata @@ -271,6 +287,14 @@ def post_create_google_ads_link(self, response): logging.log(f"Received response: {response}") return response + def pre_create_key_event(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_create_key_event(self, response): + logging.log(f"Received response: {response}") + return response + def pre_create_measurement_protocol_secret(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata @@ -319,14 +343,6 @@ def post_create_sk_ad_network_conversion_value_schema(self, response): logging.log(f"Received response: {response}") return response - def pre_create_subproperty(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_create_subproperty(self, response): - logging.log(f"Received response: {response}") - return response - def pre_create_subproperty_event_filter(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata @@ -347,6 +363,10 @@ def pre_delete_ad_sense_link(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata + def pre_delete_big_query_link(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + def pre_delete_calculated_metric(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata @@ -379,6 +399,10 @@ def pre_delete_event_create_rule(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata + def pre_delete_event_edit_rule(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + def pre_delete_expanded_data_set(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata @@ -391,6 +415,10 @@ def pre_delete_google_ads_link(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata + def pre_delete_key_event(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + def pre_delete_measurement_protocol_secret(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata @@ -587,6 +615,14 @@ def post_get_event_create_rule(self, response): logging.log(f"Received response: {response}") return response + def pre_get_event_edit_rule(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_event_edit_rule(self, response): + logging.log(f"Received response: {response}") + return response + def pre_get_expanded_data_set(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata @@ -611,6 +647,14 @@ def post_get_google_signals_settings(self, response): logging.log(f"Received response: {response}") return response + def pre_get_key_event(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_key_event(self, response): + logging.log(f"Received response: {response}") + return response + def pre_get_measurement_protocol_secret(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata @@ -787,6 +831,14 @@ def post_list_event_create_rules(self, response): logging.log(f"Received response: {response}") return response + def pre_list_event_edit_rules(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_event_edit_rules(self, response): + logging.log(f"Received response: {response}") + return response + def pre_list_expanded_data_sets(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata @@ -811,6 +863,14 @@ def post_list_google_ads_links(self, response): logging.log(f"Received response: {response}") return response + def pre_list_key_events(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_key_events(self, response): + logging.log(f"Received response: {response}") + return response + def pre_list_measurement_protocol_secrets(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata @@ -867,6 +927,18 @@ def post_provision_account_ticket(self, response): logging.log(f"Received response: {response}") return response + def pre_provision_subproperty(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_provision_subproperty(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_reorder_event_edit_rules(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + def pre_run_access_report(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata @@ -923,6 +995,14 @@ def post_update_audience(self, response): logging.log(f"Received response: {response}") return response + def pre_update_big_query_link(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_update_big_query_link(self, response): + logging.log(f"Received response: {response}") + return response + def pre_update_calculated_metric(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata @@ -1011,6 +1091,14 @@ def post_update_event_create_rule(self, response): logging.log(f"Received response: {response}") return response + def pre_update_event_edit_rule(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_update_event_edit_rule(self, response): + logging.log(f"Received response: {response}") + return response + def pre_update_expanded_data_set(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata @@ -1035,6 +1123,14 @@ def post_update_google_signals_settings(self, response): logging.log(f"Received response: {response}") return response + def pre_update_key_event(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_update_key_event(self, response): + logging.log(f"Received response: {response}") + return response + def pre_update_measurement_protocol_secret(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata @@ -1355,6 +1451,29 @@ def post_create_audience( """ return response + def pre_create_big_query_link( + self, + request: analytics_admin.CreateBigQueryLinkRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[analytics_admin.CreateBigQueryLinkRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for create_big_query_link + + Override in a subclass to manipulate the request or metadata + before they are sent to the AnalyticsAdminService server. + """ + return request, metadata + + def post_create_big_query_link( + self, response: resources.BigQueryLink + ) -> resources.BigQueryLink: + """Post-rpc interceptor for create_big_query_link + + Override in a subclass to manipulate the response + after it is returned by the AnalyticsAdminService server but before + it is returned to user code. + """ + return response + def pre_create_calculated_metric( self, request: analytics_admin.CreateCalculatedMetricRequest, @@ -1595,6 +1714,29 @@ def post_create_event_create_rule( """ return response + def pre_create_event_edit_rule( + self, + request: analytics_admin.CreateEventEditRuleRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[analytics_admin.CreateEventEditRuleRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for create_event_edit_rule + + Override in a subclass to manipulate the request or metadata + before they are sent to the AnalyticsAdminService server. + """ + return request, metadata + + def post_create_event_edit_rule( + self, response: event_create_and_edit.EventEditRule + ) -> event_create_and_edit.EventEditRule: + """Post-rpc interceptor for create_event_edit_rule + + Override in a subclass to manipulate the response + after it is returned by the AnalyticsAdminService server but before + it is returned to user code. + """ + return response + def pre_create_expanded_data_set( self, request: analytics_admin.CreateExpandedDataSetRequest, @@ -1664,6 +1806,27 @@ def post_create_google_ads_link( """ return response + def pre_create_key_event( + self, + request: analytics_admin.CreateKeyEventRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[analytics_admin.CreateKeyEventRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for create_key_event + + Override in a subclass to manipulate the request or metadata + before they are sent to the AnalyticsAdminService server. + """ + return request, metadata + + def post_create_key_event(self, response: resources.KeyEvent) -> resources.KeyEvent: + """Post-rpc interceptor for create_key_event + + Override in a subclass to manipulate the response + after it is returned by the AnalyticsAdminService server but before + it is returned to user code. + """ + return response + def pre_create_measurement_protocol_secret( self, request: analytics_admin.CreateMeasurementProtocolSecretRequest, @@ -1810,29 +1973,6 @@ def post_create_sk_ad_network_conversion_value_schema( """ return response - def pre_create_subproperty( - self, - request: analytics_admin.CreateSubpropertyRequest, - metadata: Sequence[Tuple[str, str]], - ) -> Tuple[analytics_admin.CreateSubpropertyRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for create_subproperty - - Override in a subclass to manipulate the request or metadata - before they are sent to the AnalyticsAdminService server. - """ - return request, metadata - - def post_create_subproperty( - self, response: analytics_admin.CreateSubpropertyResponse - ) -> analytics_admin.CreateSubpropertyResponse: - """Post-rpc interceptor for create_subproperty - - Override in a subclass to manipulate the response - after it is returned by the AnalyticsAdminService server but before - it is returned to user code. - """ - return response - def pre_create_subproperty_event_filter( self, request: analytics_admin.CreateSubpropertyEventFilterRequest, @@ -1894,6 +2034,18 @@ def pre_delete_ad_sense_link( """ return request, metadata + def pre_delete_big_query_link( + self, + request: analytics_admin.DeleteBigQueryLinkRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[analytics_admin.DeleteBigQueryLinkRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for delete_big_query_link + + Override in a subclass to manipulate the request or metadata + before they are sent to the AnalyticsAdminService server. + """ + return request, metadata + def pre_delete_calculated_metric( self, request: analytics_admin.DeleteCalculatedMetricRequest, @@ -2000,6 +2152,18 @@ def pre_delete_event_create_rule( """ return request, metadata + def pre_delete_event_edit_rule( + self, + request: analytics_admin.DeleteEventEditRuleRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[analytics_admin.DeleteEventEditRuleRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for delete_event_edit_rule + + Override in a subclass to manipulate the request or metadata + before they are sent to the AnalyticsAdminService server. + """ + return request, metadata + def pre_delete_expanded_data_set( self, request: analytics_admin.DeleteExpandedDataSetRequest, @@ -2036,6 +2200,18 @@ def pre_delete_google_ads_link( """ return request, metadata + def pre_delete_key_event( + self, + request: analytics_admin.DeleteKeyEventRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[analytics_admin.DeleteKeyEventRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for delete_key_event + + Override in a subclass to manipulate the request or metadata + before they are sent to the AnalyticsAdminService server. + """ + return request, metadata + def pre_delete_measurement_protocol_secret( self, request: analytics_admin.DeleteMeasurementProtocolSecretRequest, @@ -2629,6 +2805,29 @@ def post_get_event_create_rule( """ return response + def pre_get_event_edit_rule( + self, + request: analytics_admin.GetEventEditRuleRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[analytics_admin.GetEventEditRuleRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_event_edit_rule + + Override in a subclass to manipulate the request or metadata + before they are sent to the AnalyticsAdminService server. + """ + return request, metadata + + def post_get_event_edit_rule( + self, response: event_create_and_edit.EventEditRule + ) -> event_create_and_edit.EventEditRule: + """Post-rpc interceptor for get_event_edit_rule + + Override in a subclass to manipulate the response + after it is returned by the AnalyticsAdminService server but before + it is returned to user code. + """ + return response + def pre_get_expanded_data_set( self, request: analytics_admin.GetExpandedDataSetRequest, @@ -2700,6 +2899,27 @@ def post_get_google_signals_settings( """ return response + def pre_get_key_event( + self, + request: analytics_admin.GetKeyEventRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[analytics_admin.GetKeyEventRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_key_event + + Override in a subclass to manipulate the request or metadata + before they are sent to the AnalyticsAdminService server. + """ + return request, metadata + + def post_get_key_event(self, response: resources.KeyEvent) -> resources.KeyEvent: + """Post-rpc interceptor for get_key_event + + Override in a subclass to manipulate the response + after it is returned by the AnalyticsAdminService server but before + it is returned to user code. + """ + return response + def pre_get_measurement_protocol_secret( self, request: analytics_admin.GetMeasurementProtocolSecretRequest, @@ -3220,6 +3440,29 @@ def post_list_event_create_rules( """ return response + def pre_list_event_edit_rules( + self, + request: analytics_admin.ListEventEditRulesRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[analytics_admin.ListEventEditRulesRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_event_edit_rules + + Override in a subclass to manipulate the request or metadata + before they are sent to the AnalyticsAdminService server. + """ + return request, metadata + + def post_list_event_edit_rules( + self, response: analytics_admin.ListEventEditRulesResponse + ) -> analytics_admin.ListEventEditRulesResponse: + """Post-rpc interceptor for list_event_edit_rules + + Override in a subclass to manipulate the response + after it is returned by the AnalyticsAdminService server but before + it is returned to user code. + """ + return response + def pre_list_expanded_data_sets( self, request: analytics_admin.ListExpandedDataSetsRequest, @@ -3289,6 +3532,29 @@ def post_list_google_ads_links( """ return response + def pre_list_key_events( + self, + request: analytics_admin.ListKeyEventsRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[analytics_admin.ListKeyEventsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_key_events + + Override in a subclass to manipulate the request or metadata + before they are sent to the AnalyticsAdminService server. + """ + return request, metadata + + def post_list_key_events( + self, response: analytics_admin.ListKeyEventsResponse + ) -> analytics_admin.ListKeyEventsResponse: + """Post-rpc interceptor for list_key_events + + Override in a subclass to manipulate the response + after it is returned by the AnalyticsAdminService server but before + it is returned to user code. + """ + return response + def pre_list_measurement_protocol_secrets( self, request: analytics_admin.ListMeasurementProtocolSecretsRequest, @@ -3461,6 +3727,41 @@ def post_provision_account_ticket( """ return response + def pre_provision_subproperty( + self, + request: analytics_admin.ProvisionSubpropertyRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[analytics_admin.ProvisionSubpropertyRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for provision_subproperty + + Override in a subclass to manipulate the request or metadata + before they are sent to the AnalyticsAdminService server. + """ + return request, metadata + + def post_provision_subproperty( + self, response: analytics_admin.ProvisionSubpropertyResponse + ) -> analytics_admin.ProvisionSubpropertyResponse: + """Post-rpc interceptor for provision_subproperty + + Override in a subclass to manipulate the response + after it is returned by the AnalyticsAdminService server but before + it is returned to user code. + """ + return response + + def pre_reorder_event_edit_rules( + self, + request: analytics_admin.ReorderEventEditRulesRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[analytics_admin.ReorderEventEditRulesRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for reorder_event_edit_rules + + Override in a subclass to manipulate the request or metadata + before they are sent to the AnalyticsAdminService server. + """ + return request, metadata + def pre_run_access_report( self, request: analytics_admin.RunAccessReportRequest, @@ -3627,11 +3928,34 @@ def post_update_audience( """ return response - def pre_update_calculated_metric( + def pre_update_big_query_link( self, - request: analytics_admin.UpdateCalculatedMetricRequest, + request: analytics_admin.UpdateBigQueryLinkRequest, metadata: Sequence[Tuple[str, str]], - ) -> Tuple[ + ) -> Tuple[analytics_admin.UpdateBigQueryLinkRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for update_big_query_link + + Override in a subclass to manipulate the request or metadata + before they are sent to the AnalyticsAdminService server. + """ + return request, metadata + + def post_update_big_query_link( + self, response: resources.BigQueryLink + ) -> resources.BigQueryLink: + """Post-rpc interceptor for update_big_query_link + + Override in a subclass to manipulate the response + after it is returned by the AnalyticsAdminService server but before + it is returned to user code. + """ + return response + + def pre_update_calculated_metric( + self, + request: analytics_admin.UpdateCalculatedMetricRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[ analytics_admin.UpdateCalculatedMetricRequest, Sequence[Tuple[str, str]] ]: """Pre-rpc interceptor for update_calculated_metric @@ -3892,6 +4216,29 @@ def post_update_event_create_rule( """ return response + def pre_update_event_edit_rule( + self, + request: analytics_admin.UpdateEventEditRuleRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[analytics_admin.UpdateEventEditRuleRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for update_event_edit_rule + + Override in a subclass to manipulate the request or metadata + before they are sent to the AnalyticsAdminService server. + """ + return request, metadata + + def post_update_event_edit_rule( + self, response: event_create_and_edit.EventEditRule + ) -> event_create_and_edit.EventEditRule: + """Post-rpc interceptor for update_event_edit_rule + + Override in a subclass to manipulate the response + after it is returned by the AnalyticsAdminService server but before + it is returned to user code. + """ + return response + def pre_update_expanded_data_set( self, request: analytics_admin.UpdateExpandedDataSetRequest, @@ -3963,6 +4310,27 @@ def post_update_google_signals_settings( """ return response + def pre_update_key_event( + self, + request: analytics_admin.UpdateKeyEventRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[analytics_admin.UpdateKeyEventRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for update_key_event + + Override in a subclass to manipulate the request or metadata + before they are sent to the AnalyticsAdminService server. + """ + return request, metadata + + def post_update_key_event(self, response: resources.KeyEvent) -> resources.KeyEvent: + """Post-rpc interceptor for update_key_event + + Override in a subclass to manipulate the response + after it is returned by the AnalyticsAdminService server but before + it is returned to user code. + """ + return response + def pre_update_measurement_protocol_secret( self, request: analytics_admin.UpdateMeasurementProtocolSecretRequest, @@ -5450,6 +5818,103 @@ def __call__( resp = self._interceptor.post_create_audience(resp) return resp + class _CreateBigQueryLink(AnalyticsAdminServiceRestStub): + def __hash__(self): + return hash("CreateBigQueryLink") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: analytics_admin.CreateBigQueryLinkRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> resources.BigQueryLink: + r"""Call the create big query link method over HTTP. + + Args: + request (~.analytics_admin.CreateBigQueryLinkRequest): + The request object. Request message for + CreateBigQueryLink RPC. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.resources.BigQueryLink: + A link between a GA4 Property and + BigQuery project. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1alpha/{parent=properties/*}/bigQueryLinks", + "body": "bigquery_link", + }, + ] + request, metadata = self._interceptor.pre_create_big_query_link( + request, metadata + ) + pb_request = analytics_admin.CreateBigQueryLinkRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = resources.BigQueryLink() + pb_resp = resources.BigQueryLink.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_create_big_query_link(resp) + return resp + class _CreateCalculatedMetric(AnalyticsAdminServiceRestStub): def __hash__(self): return hash("CreateCalculatedMetric") @@ -6455,6 +6920,115 @@ def __call__( resp = self._interceptor.post_create_event_create_rule(resp) return resp + class _CreateEventEditRule(AnalyticsAdminServiceRestStub): + def __hash__(self): + return hash("CreateEventEditRule") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: analytics_admin.CreateEventEditRuleRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> event_create_and_edit.EventEditRule: + r"""Call the create event edit rule method over HTTP. + + Args: + request (~.analytics_admin.CreateEventEditRuleRequest): + The request object. Request message for + CreateEventEditRule RPC. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.event_create_and_edit.EventEditRule: + An Event Edit Rule defines conditions + that will trigger the creation of an + entirely new event based upon matched + criteria of a source event. Additional + mutations of the parameters from the + source event can be defined. + + Unlike Event Create rules, Event Edit + Rules are applied in their defined + order. + + Event Edit rules can't be used to modify + an event created from an Event Create + rule. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1alpha/{parent=properties/*/dataStreams/*}/eventEditRules", + "body": "event_edit_rule", + }, + ] + request, metadata = self._interceptor.pre_create_event_edit_rule( + request, metadata + ) + pb_request = analytics_admin.CreateEventEditRuleRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = event_create_and_edit.EventEditRule() + pb_resp = event_create_and_edit.EventEditRule.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_create_event_edit_rule(resp) + return resp + class _CreateExpandedDataSet(AnalyticsAdminServiceRestStub): def __hash__(self): return hash("CreateExpandedDataSet") @@ -6746,9 +7320,9 @@ def __call__( resp = self._interceptor.post_create_google_ads_link(resp) return resp - class _CreateMeasurementProtocolSecret(AnalyticsAdminServiceRestStub): + class _CreateKeyEvent(AnalyticsAdminServiceRestStub): def __hash__(self): - return hash("CreateMeasurementProtocolSecret") + return hash("CreateKeyEvent") __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} @@ -6762,48 +7336,42 @@ def _get_unset_required_fields(cls, message_dict): def __call__( self, - request: analytics_admin.CreateMeasurementProtocolSecretRequest, + request: analytics_admin.CreateKeyEventRequest, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), - ) -> resources.MeasurementProtocolSecret: - r"""Call the create measurement - protocol secret method over HTTP. + ) -> resources.KeyEvent: + r"""Call the create key event method over HTTP. - Args: - request (~.analytics_admin.CreateMeasurementProtocolSecretRequest): - The request object. Request message for - CreateMeasurementProtocolSecret RPC - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + Args: + request (~.analytics_admin.CreateKeyEventRequest): + The request object. Request message for CreateKeyEvent + RPC + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. - Returns: - ~.resources.MeasurementProtocolSecret: - A secret value used for sending hits - to Measurement Protocol. + Returns: + ~.resources.KeyEvent: + A key event in a Google Analytics + property. """ http_options: List[Dict[str, str]] = [ { "method": "post", - "uri": "/v1alpha/{parent=properties/*/dataStreams/*}/measurementProtocolSecrets", - "body": "measurement_protocol_secret", + "uri": "/v1alpha/{parent=properties/*}/keyEvents", + "body": "key_event", }, ] - ( - request, - metadata, - ) = self._interceptor.pre_create_measurement_protocol_secret( + request, metadata = self._interceptor.pre_create_key_event( request, metadata ) - pb_request = analytics_admin.CreateMeasurementProtocolSecretRequest.pb( - request - ) + pb_request = analytics_admin.CreateKeyEventRequest.pb(request) transcoded_request = path_template.transcode(http_options, pb_request) # Jsonify the request body @@ -6842,11 +7410,114 @@ def __call__( raise core_exceptions.from_http_response(response) # Return the response - resp = resources.MeasurementProtocolSecret() - pb_resp = resources.MeasurementProtocolSecret.pb(resp) + resp = resources.KeyEvent() + pb_resp = resources.KeyEvent.pb(resp) json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - resp = self._interceptor.post_create_measurement_protocol_secret(resp) + resp = self._interceptor.post_create_key_event(resp) + return resp + + class _CreateMeasurementProtocolSecret(AnalyticsAdminServiceRestStub): + def __hash__(self): + return hash("CreateMeasurementProtocolSecret") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: analytics_admin.CreateMeasurementProtocolSecretRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> resources.MeasurementProtocolSecret: + r"""Call the create measurement + protocol secret method over HTTP. + + Args: + request (~.analytics_admin.CreateMeasurementProtocolSecretRequest): + The request object. Request message for + CreateMeasurementProtocolSecret RPC + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.resources.MeasurementProtocolSecret: + A secret value used for sending hits + to Measurement Protocol. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1alpha/{parent=properties/*/dataStreams/*}/measurementProtocolSecrets", + "body": "measurement_protocol_secret", + }, + ] + ( + request, + metadata, + ) = self._interceptor.pre_create_measurement_protocol_secret( + request, metadata + ) + pb_request = analytics_admin.CreateMeasurementProtocolSecretRequest.pb( + request + ) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = resources.MeasurementProtocolSecret() + pb_resp = resources.MeasurementProtocolSecret.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_create_measurement_protocol_secret(resp) return resp class _CreateProperty(AnalyticsAdminServiceRestStub): @@ -7350,103 +8021,6 @@ def __call__( ) return resp - class _CreateSubproperty(AnalyticsAdminServiceRestStub): - def __hash__(self): - return hash("CreateSubproperty") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return { - k: v - for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() - if k not in message_dict - } - - def __call__( - self, - request: analytics_admin.CreateSubpropertyRequest, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> analytics_admin.CreateSubpropertyResponse: - r"""Call the create subproperty method over HTTP. - - Args: - request (~.analytics_admin.CreateSubpropertyRequest): - The request object. Request message for CreateSubproperty - RPC. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - ~.analytics_admin.CreateSubpropertyResponse: - Response message for - CreateSubproperty RPC. - - """ - - http_options: List[Dict[str, str]] = [ - { - "method": "post", - "uri": "/v1alpha/properties:createSubproperty", - "body": "*", - }, - ] - request, metadata = self._interceptor.pre_create_subproperty( - request, metadata - ) - pb_request = analytics_admin.CreateSubpropertyRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request["body"], use_integers_for_enums=True - ) - uri = transcoded_request["uri"] - method = transcoded_request["method"] - - # Jsonify the query params - query_params = json.loads( - json_format.MessageToJson( - transcoded_request["query_params"], - use_integers_for_enums=True, - ) - ) - query_params.update(self._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - - # Send the request - headers = dict(metadata) - headers["Content-Type"] = "application/json" - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = analytics_admin.CreateSubpropertyResponse() - pb_resp = analytics_admin.CreateSubpropertyResponse.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - resp = self._interceptor.post_create_subproperty(resp) - return resp - class _CreateSubpropertyEventFilter(AnalyticsAdminServiceRestStub): def __hash__(self): return hash("CreateSubpropertyEventFilter") @@ -7775,6 +8349,82 @@ def __call__( if response.status_code >= 400: raise core_exceptions.from_http_response(response) + class _DeleteBigQueryLink(AnalyticsAdminServiceRestStub): + def __hash__(self): + return hash("DeleteBigQueryLink") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: analytics_admin.DeleteBigQueryLinkRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ): + r"""Call the delete big query link method over HTTP. + + Args: + request (~.analytics_admin.DeleteBigQueryLinkRequest): + The request object. Request message for + DeleteBigQueryLink RPC. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "delete", + "uri": "/v1alpha/{name=properties/*/bigQueryLinks/*}", + }, + ] + request, metadata = self._interceptor.pre_delete_big_query_link( + request, metadata + ) + pb_request = analytics_admin.DeleteBigQueryLinkRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + class _DeleteCalculatedMetric(AnalyticsAdminServiceRestStub): def __hash__(self): return hash("DeleteCalculatedMetric") @@ -8394,9 +9044,85 @@ def __call__( if response.status_code >= 400: raise core_exceptions.from_http_response(response) - class _DeleteExpandedDataSet(AnalyticsAdminServiceRestStub): + class _DeleteEventEditRule(AnalyticsAdminServiceRestStub): def __hash__(self): - return hash("DeleteExpandedDataSet") + return hash("DeleteEventEditRule") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: analytics_admin.DeleteEventEditRuleRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ): + r"""Call the delete event edit rule method over HTTP. + + Args: + request (~.analytics_admin.DeleteEventEditRuleRequest): + The request object. Request message for + DeleteEventEditRule RPC. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "delete", + "uri": "/v1alpha/{name=properties/*/dataStreams/*/eventEditRules/*}", + }, + ] + request, metadata = self._interceptor.pre_delete_event_edit_rule( + request, metadata + ) + pb_request = analytics_admin.DeleteEventEditRuleRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + class _DeleteExpandedDataSet(AnalyticsAdminServiceRestStub): + def __hash__(self): + return hash("DeleteExpandedDataSet") __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} @@ -8622,6 +9348,82 @@ def __call__( if response.status_code >= 400: raise core_exceptions.from_http_response(response) + class _DeleteKeyEvent(AnalyticsAdminServiceRestStub): + def __hash__(self): + return hash("DeleteKeyEvent") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: analytics_admin.DeleteKeyEventRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ): + r"""Call the delete key event method over HTTP. + + Args: + request (~.analytics_admin.DeleteKeyEventRequest): + The request object. Request message for DeleteKeyEvent + RPC + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "delete", + "uri": "/v1alpha/{name=properties/*/keyEvents/*}", + }, + ] + request, metadata = self._interceptor.pre_delete_key_event( + request, metadata + ) + pb_request = analytics_admin.DeleteKeyEventRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + class _DeleteMeasurementProtocolSecret(AnalyticsAdminServiceRestStub): def __hash__(self): return hash("DeleteMeasurementProtocolSecret") @@ -11061,6 +11863,108 @@ def __call__( resp = self._interceptor.post_get_event_create_rule(resp) return resp + class _GetEventEditRule(AnalyticsAdminServiceRestStub): + def __hash__(self): + return hash("GetEventEditRule") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: analytics_admin.GetEventEditRuleRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> event_create_and_edit.EventEditRule: + r"""Call the get event edit rule method over HTTP. + + Args: + request (~.analytics_admin.GetEventEditRuleRequest): + The request object. Request message for GetEventEditRule + RPC. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.event_create_and_edit.EventEditRule: + An Event Edit Rule defines conditions + that will trigger the creation of an + entirely new event based upon matched + criteria of a source event. Additional + mutations of the parameters from the + source event can be defined. + + Unlike Event Create rules, Event Edit + Rules are applied in their defined + order. + + Event Edit rules can't be used to modify + an event created from an Event Create + rule. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1alpha/{name=properties/*/dataStreams/*/eventEditRules/*}", + }, + ] + request, metadata = self._interceptor.pre_get_event_edit_rule( + request, metadata + ) + pb_request = analytics_admin.GetEventEditRuleRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = event_create_and_edit.EventEditRule() + pb_resp = event_create_and_edit.EventEditRule.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_event_edit_rule(resp) + return resp + class _GetExpandedDataSet(AnalyticsAdminServiceRestStub): def __hash__(self): return hash("GetExpandedDataSet") @@ -11334,9 +12238,9 @@ def __call__( resp = self._interceptor.post_get_google_signals_settings(resp) return resp - class _GetMeasurementProtocolSecret(AnalyticsAdminServiceRestStub): + class _GetKeyEvent(AnalyticsAdminServiceRestStub): def __hash__(self): - return hash("GetMeasurementProtocolSecret") + return hash("GetKeyEvent") __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} @@ -11350,42 +12254,38 @@ def _get_unset_required_fields(cls, message_dict): def __call__( self, - request: analytics_admin.GetMeasurementProtocolSecretRequest, + request: analytics_admin.GetKeyEventRequest, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), - ) -> resources.MeasurementProtocolSecret: - r"""Call the get measurement protocol - secret method over HTTP. + ) -> resources.KeyEvent: + r"""Call the get key event method over HTTP. - Args: - request (~.analytics_admin.GetMeasurementProtocolSecretRequest): - The request object. Request message for - GetMeasurementProtocolSecret RPC. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + Args: + request (~.analytics_admin.GetKeyEventRequest): + The request object. Request message for GetKeyEvent RPC + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. - Returns: - ~.resources.MeasurementProtocolSecret: - A secret value used for sending hits - to Measurement Protocol. + Returns: + ~.resources.KeyEvent: + A key event in a Google Analytics + property. """ http_options: List[Dict[str, str]] = [ { "method": "get", - "uri": "/v1alpha/{name=properties/*/dataStreams/*/measurementProtocolSecrets/*}", + "uri": "/v1alpha/{name=properties/*/keyEvents/*}", }, ] - request, metadata = self._interceptor.pre_get_measurement_protocol_secret( - request, metadata - ) - pb_request = analytics_admin.GetMeasurementProtocolSecretRequest.pb(request) + request, metadata = self._interceptor.pre_get_key_event(request, metadata) + pb_request = analytics_admin.GetKeyEventRequest.pb(request) transcoded_request = path_template.transcode(http_options, pb_request) uri = transcoded_request["uri"] @@ -11418,16 +12318,16 @@ def __call__( raise core_exceptions.from_http_response(response) # Return the response - resp = resources.MeasurementProtocolSecret() - pb_resp = resources.MeasurementProtocolSecret.pb(resp) + resp = resources.KeyEvent() + pb_resp = resources.KeyEvent.pb(resp) json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - resp = self._interceptor.post_get_measurement_protocol_secret(resp) + resp = self._interceptor.post_get_key_event(resp) return resp - class _GetProperty(AnalyticsAdminServiceRestStub): + class _GetMeasurementProtocolSecret(AnalyticsAdminServiceRestStub): def __hash__(self): - return hash("GetProperty") + return hash("GetMeasurementProtocolSecret") __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} @@ -11441,15 +12341,106 @@ def _get_unset_required_fields(cls, message_dict): def __call__( self, - request: analytics_admin.GetPropertyRequest, + request: analytics_admin.GetMeasurementProtocolSecretRequest, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), - ) -> resources.Property: - r"""Call the get property method over HTTP. + ) -> resources.MeasurementProtocolSecret: + r"""Call the get measurement protocol + secret method over HTTP. - Args: + Args: + request (~.analytics_admin.GetMeasurementProtocolSecretRequest): + The request object. Request message for + GetMeasurementProtocolSecret RPC. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.resources.MeasurementProtocolSecret: + A secret value used for sending hits + to Measurement Protocol. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1alpha/{name=properties/*/dataStreams/*/measurementProtocolSecrets/*}", + }, + ] + request, metadata = self._interceptor.pre_get_measurement_protocol_secret( + request, metadata + ) + pb_request = analytics_admin.GetMeasurementProtocolSecretRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = resources.MeasurementProtocolSecret() + pb_resp = resources.MeasurementProtocolSecret.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_measurement_protocol_secret(resp) + return resp + + class _GetProperty(AnalyticsAdminServiceRestStub): + def __hash__(self): + return hash("GetProperty") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: analytics_admin.GetPropertyRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> resources.Property: + r"""Call the get property method over HTTP. + + Args: request (~.analytics_admin.GetPropertyRequest): The request object. Request message for GetProperty RPC. retry (google.api_core.retry.Retry): Designation of what errors, if any, @@ -13320,6 +14311,96 @@ def __call__( resp = self._interceptor.post_list_event_create_rules(resp) return resp + class _ListEventEditRules(AnalyticsAdminServiceRestStub): + def __hash__(self): + return hash("ListEventEditRules") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: analytics_admin.ListEventEditRulesRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> analytics_admin.ListEventEditRulesResponse: + r"""Call the list event edit rules method over HTTP. + + Args: + request (~.analytics_admin.ListEventEditRulesRequest): + The request object. Request message for + ListEventEditRules RPC. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.analytics_admin.ListEventEditRulesResponse: + Response message for + ListEventEditRules RPC. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1alpha/{parent=properties/*/dataStreams/*}/eventEditRules", + }, + ] + request, metadata = self._interceptor.pre_list_event_edit_rules( + request, metadata + ) + pb_request = analytics_admin.ListEventEditRulesRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = analytics_admin.ListEventEditRulesResponse() + pb_resp = analytics_admin.ListEventEditRulesResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_event_edit_rules(resp) + return resp + class _ListExpandedDataSets(AnalyticsAdminServiceRestStub): def __hash__(self): return hash("ListExpandedDataSets") @@ -13590,6 +14671,93 @@ def __call__( resp = self._interceptor.post_list_google_ads_links(resp) return resp + class _ListKeyEvents(AnalyticsAdminServiceRestStub): + def __hash__(self): + return hash("ListKeyEvents") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: analytics_admin.ListKeyEventsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> analytics_admin.ListKeyEventsResponse: + r"""Call the list key events method over HTTP. + + Args: + request (~.analytics_admin.ListKeyEventsRequest): + The request object. Request message for ListKeyEvents RPC + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.analytics_admin.ListKeyEventsResponse: + Response message for ListKeyEvents + RPC. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1alpha/{parent=properties/*}/keyEvents", + }, + ] + request, metadata = self._interceptor.pre_list_key_events(request, metadata) + pb_request = analytics_admin.ListKeyEventsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = analytics_admin.ListKeyEventsResponse() + pb_resp = analytics_admin.ListKeyEventsResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_key_events(resp) + return resp + class _ListMeasurementProtocolSecrets(AnalyticsAdminServiceRestStub): def __hash__(self): return hash("ListMeasurementProtocolSecrets") @@ -14102,16 +15270,198 @@ def __call__( http_options: List[Dict[str, str]] = [ { - "method": "get", - "uri": "/v1alpha/{parent=properties/*}/subpropertyEventFilters", + "method": "get", + "uri": "/v1alpha/{parent=properties/*}/subpropertyEventFilters", + }, + ] + request, metadata = self._interceptor.pre_list_subproperty_event_filters( + request, metadata + ) + pb_request = analytics_admin.ListSubpropertyEventFiltersRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = analytics_admin.ListSubpropertyEventFiltersResponse() + pb_resp = analytics_admin.ListSubpropertyEventFiltersResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_subproperty_event_filters(resp) + return resp + + class _ProvisionAccountTicket(AnalyticsAdminServiceRestStub): + def __hash__(self): + return hash("ProvisionAccountTicket") + + def __call__( + self, + request: analytics_admin.ProvisionAccountTicketRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> analytics_admin.ProvisionAccountTicketResponse: + r"""Call the provision account ticket method over HTTP. + + Args: + request (~.analytics_admin.ProvisionAccountTicketRequest): + The request object. Request message for + ProvisionAccountTicket RPC. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.analytics_admin.ProvisionAccountTicketResponse: + Response message for + ProvisionAccountTicket RPC. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1alpha/accounts:provisionAccountTicket", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_provision_account_ticket( + request, metadata + ) + pb_request = analytics_admin.ProvisionAccountTicketRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = analytics_admin.ProvisionAccountTicketResponse() + pb_resp = analytics_admin.ProvisionAccountTicketResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_provision_account_ticket(resp) + return resp + + class _ProvisionSubproperty(AnalyticsAdminServiceRestStub): + def __hash__(self): + return hash("ProvisionSubproperty") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: analytics_admin.ProvisionSubpropertyRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> analytics_admin.ProvisionSubpropertyResponse: + r"""Call the provision subproperty method over HTTP. + + Args: + request (~.analytics_admin.ProvisionSubpropertyRequest): + The request object. Request message for CreateSubproperty + RPC. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.analytics_admin.ProvisionSubpropertyResponse: + Response message for + ProvisionSubproperty RPC. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1alpha/properties:provisionSubproperty", + "body": "*", }, ] - request, metadata = self._interceptor.pre_list_subproperty_event_filters( + request, metadata = self._interceptor.pre_provision_subproperty( request, metadata ) - pb_request = analytics_admin.ListSubpropertyEventFiltersRequest.pb(request) + pb_request = analytics_admin.ProvisionSubpropertyRequest.pb(request) transcoded_request = path_template.transcode(http_options, pb_request) + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) uri = transcoded_request["uri"] method = transcoded_request["method"] @@ -14134,6 +15484,7 @@ def __call__( timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -14142,55 +15493,59 @@ def __call__( raise core_exceptions.from_http_response(response) # Return the response - resp = analytics_admin.ListSubpropertyEventFiltersResponse() - pb_resp = analytics_admin.ListSubpropertyEventFiltersResponse.pb(resp) + resp = analytics_admin.ProvisionSubpropertyResponse() + pb_resp = analytics_admin.ProvisionSubpropertyResponse.pb(resp) json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - resp = self._interceptor.post_list_subproperty_event_filters(resp) + resp = self._interceptor.post_provision_subproperty(resp) return resp - class _ProvisionAccountTicket(AnalyticsAdminServiceRestStub): + class _ReorderEventEditRules(AnalyticsAdminServiceRestStub): def __hash__(self): - return hash("ProvisionAccountTicket") + return hash("ReorderEventEditRules") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } def __call__( self, - request: analytics_admin.ProvisionAccountTicketRequest, + request: analytics_admin.ReorderEventEditRulesRequest, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), - ) -> analytics_admin.ProvisionAccountTicketResponse: - r"""Call the provision account ticket method over HTTP. + ): + r"""Call the reorder event edit rules method over HTTP. Args: - request (~.analytics_admin.ProvisionAccountTicketRequest): + request (~.analytics_admin.ReorderEventEditRulesRequest): The request object. Request message for - ProvisionAccountTicket RPC. + ReorderEventEditRules RPC. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. - - Returns: - ~.analytics_admin.ProvisionAccountTicketResponse: - Response message for - ProvisionAccountTicket RPC. - """ http_options: List[Dict[str, str]] = [ { "method": "post", - "uri": "/v1alpha/accounts:provisionAccountTicket", + "uri": "/v1alpha/{parent=properties/*/dataStreams/*}/eventEditRules:reorder", "body": "*", }, ] - request, metadata = self._interceptor.pre_provision_account_ticket( + request, metadata = self._interceptor.pre_reorder_event_edit_rules( request, metadata ) - pb_request = analytics_admin.ProvisionAccountTicketRequest.pb(request) + pb_request = analytics_admin.ReorderEventEditRulesRequest.pb(request) transcoded_request = path_template.transcode(http_options, pb_request) # Jsonify the request body @@ -14208,6 +15563,7 @@ def __call__( use_integers_for_enums=True, ) ) + query_params.update(self._get_unset_required_fields(query_params)) query_params["$alt"] = "json;enum-encoding=int" @@ -14227,14 +15583,6 @@ def __call__( if response.status_code >= 400: raise core_exceptions.from_http_response(response) - # Return the response - resp = analytics_admin.ProvisionAccountTicketResponse() - pb_resp = analytics_admin.ProvisionAccountTicketResponse.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - resp = self._interceptor.post_provision_account_ticket(resp) - return resp - class _RunAccessReport(AnalyticsAdminServiceRestStub): def __hash__(self): return hash("RunAccessReport") @@ -14928,6 +16276,105 @@ def __call__( resp = self._interceptor.post_update_audience(resp) return resp + class _UpdateBigQueryLink(AnalyticsAdminServiceRestStub): + def __hash__(self): + return hash("UpdateBigQueryLink") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "updateMask": {}, + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: analytics_admin.UpdateBigQueryLinkRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> resources.BigQueryLink: + r"""Call the update big query link method over HTTP. + + Args: + request (~.analytics_admin.UpdateBigQueryLinkRequest): + The request object. Request message for + UpdateBigQueryLink RPC. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.resources.BigQueryLink: + A link between a GA4 Property and + BigQuery project. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "patch", + "uri": "/v1alpha/{bigquery_link.name=properties/*/bigQueryLinks/*}", + "body": "bigquery_link", + }, + ] + request, metadata = self._interceptor.pre_update_big_query_link( + request, metadata + ) + pb_request = analytics_admin.UpdateBigQueryLinkRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = resources.BigQueryLink() + pb_resp = resources.BigQueryLink.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_update_big_query_link(resp) + return resp + class _UpdateCalculatedMetric(AnalyticsAdminServiceRestStub): def __hash__(self): return hash("UpdateCalculatedMetric") @@ -15849,41 +17296,152 @@ def __call__( r"""Call the update enhanced measurement settings method over HTTP. - Args: - request (~.analytics_admin.UpdateEnhancedMeasurementSettingsRequest): - The request object. Request message for - UpdateEnhancedMeasurementSettings RPC. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + Args: + request (~.analytics_admin.UpdateEnhancedMeasurementSettingsRequest): + The request object. Request message for + UpdateEnhancedMeasurementSettings RPC. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.resources.EnhancedMeasurementSettings: + Singleton resource under a web + DataStream, configuring measurement of + additional site interactions and + content. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "patch", + "uri": "/v1alpha/{enhanced_measurement_settings.name=properties/*/dataStreams/*/enhancedMeasurementSettings}", + "body": "enhanced_measurement_settings", + }, + ] + ( + request, + metadata, + ) = self._interceptor.pre_update_enhanced_measurement_settings( + request, metadata + ) + pb_request = analytics_admin.UpdateEnhancedMeasurementSettingsRequest.pb( + request + ) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = resources.EnhancedMeasurementSettings() + pb_resp = resources.EnhancedMeasurementSettings.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_update_enhanced_measurement_settings(resp) + return resp + + class _UpdateEventCreateRule(AnalyticsAdminServiceRestStub): + def __hash__(self): + return hash("UpdateEventCreateRule") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "updateMask": {}, + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: analytics_admin.UpdateEventCreateRuleRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> event_create_and_edit.EventCreateRule: + r"""Call the update event create rule method over HTTP. + + Args: + request (~.analytics_admin.UpdateEventCreateRuleRequest): + The request object. Request message for + UpdateEventCreateRule RPC. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.event_create_and_edit.EventCreateRule: + An Event Create Rule defines + conditions that will trigger the + creation of an entirely new event based + upon matched criteria of a source event. + Additional mutations of the parameters + from the source event can be defined. - Returns: - ~.resources.EnhancedMeasurementSettings: - Singleton resource under a web - DataStream, configuring measurement of - additional site interactions and - content. + Unlike Event Edit rules, Event Creation + Rules have no defined order. They will + all be run independently. + + Event Edit and Event Create rules can't + be used to modify an event created from + an Event Create rule. """ http_options: List[Dict[str, str]] = [ { "method": "patch", - "uri": "/v1alpha/{enhanced_measurement_settings.name=properties/*/dataStreams/*/enhancedMeasurementSettings}", - "body": "enhanced_measurement_settings", + "uri": "/v1alpha/{event_create_rule.name=properties/*/dataStreams/*/eventCreateRules/*}", + "body": "event_create_rule", }, ] - ( - request, - metadata, - ) = self._interceptor.pre_update_enhanced_measurement_settings( + request, metadata = self._interceptor.pre_update_event_create_rule( request, metadata ) - pb_request = analytics_admin.UpdateEnhancedMeasurementSettingsRequest.pb( - request - ) + pb_request = analytics_admin.UpdateEventCreateRuleRequest.pb(request) transcoded_request = path_template.transcode(http_options, pb_request) # Jsonify the request body @@ -15922,16 +17480,16 @@ def __call__( raise core_exceptions.from_http_response(response) # Return the response - resp = resources.EnhancedMeasurementSettings() - pb_resp = resources.EnhancedMeasurementSettings.pb(resp) + resp = event_create_and_edit.EventCreateRule() + pb_resp = event_create_and_edit.EventCreateRule.pb(resp) json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - resp = self._interceptor.post_update_enhanced_measurement_settings(resp) + resp = self._interceptor.post_update_event_create_rule(resp) return resp - class _UpdateEventCreateRule(AnalyticsAdminServiceRestStub): + class _UpdateEventEditRule(AnalyticsAdminServiceRestStub): def __hash__(self): - return hash("UpdateEventCreateRule") + return hash("UpdateEventEditRule") __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { "updateMask": {}, @@ -15947,18 +17505,18 @@ def _get_unset_required_fields(cls, message_dict): def __call__( self, - request: analytics_admin.UpdateEventCreateRuleRequest, + request: analytics_admin.UpdateEventEditRuleRequest, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), - ) -> event_create_and_edit.EventCreateRule: - r"""Call the update event create rule method over HTTP. + ) -> event_create_and_edit.EventEditRule: + r"""Call the update event edit rule method over HTTP. Args: - request (~.analytics_admin.UpdateEventCreateRuleRequest): + request (~.analytics_admin.UpdateEventEditRuleRequest): The request object. Request message for - UpdateEventCreateRule RPC. + UpdateEventEditRule RPC. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -15966,35 +17524,35 @@ def __call__( sent along with the request as metadata. Returns: - ~.event_create_and_edit.EventCreateRule: - An Event Create Rule defines - conditions that will trigger the - creation of an entirely new event based - upon matched criteria of a source event. - Additional mutations of the parameters - from the source event can be defined. - - Unlike Event Edit rules, Event Creation - Rules have no defined order. They will - all be run independently. - - Event Edit and Event Create rules can't - be used to modify an event created from - an Event Create rule. + ~.event_create_and_edit.EventEditRule: + An Event Edit Rule defines conditions + that will trigger the creation of an + entirely new event based upon matched + criteria of a source event. Additional + mutations of the parameters from the + source event can be defined. + + Unlike Event Create rules, Event Edit + Rules are applied in their defined + order. + + Event Edit rules can't be used to modify + an event created from an Event Create + rule. """ http_options: List[Dict[str, str]] = [ { "method": "patch", - "uri": "/v1alpha/{event_create_rule.name=properties/*/dataStreams/*/eventCreateRules/*}", - "body": "event_create_rule", + "uri": "/v1alpha/{event_edit_rule.name=properties/*/dataStreams/*/eventEditRules/*}", + "body": "event_edit_rule", }, ] - request, metadata = self._interceptor.pre_update_event_create_rule( + request, metadata = self._interceptor.pre_update_event_edit_rule( request, metadata ) - pb_request = analytics_admin.UpdateEventCreateRuleRequest.pb(request) + pb_request = analytics_admin.UpdateEventEditRuleRequest.pb(request) transcoded_request = path_template.transcode(http_options, pb_request) # Jsonify the request body @@ -16033,11 +17591,11 @@ def __call__( raise core_exceptions.from_http_response(response) # Return the response - resp = event_create_and_edit.EventCreateRule() - pb_resp = event_create_and_edit.EventCreateRule.pb(resp) + resp = event_create_and_edit.EventEditRule() + pb_resp = event_create_and_edit.EventEditRule.pb(resp) json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - resp = self._interceptor.post_update_event_create_rule(resp) + resp = self._interceptor.post_update_event_edit_rule(resp) return resp class _UpdateExpandedDataSet(AnalyticsAdminServiceRestStub): @@ -16338,6 +17896,105 @@ def __call__( resp = self._interceptor.post_update_google_signals_settings(resp) return resp + class _UpdateKeyEvent(AnalyticsAdminServiceRestStub): + def __hash__(self): + return hash("UpdateKeyEvent") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "updateMask": {}, + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: analytics_admin.UpdateKeyEventRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> resources.KeyEvent: + r"""Call the update key event method over HTTP. + + Args: + request (~.analytics_admin.UpdateKeyEventRequest): + The request object. Request message for UpdateKeyEvent + RPC + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.resources.KeyEvent: + A key event in a Google Analytics + property. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "patch", + "uri": "/v1alpha/{key_event.name=properties/*/keyEvents/*}", + "body": "key_event", + }, + ] + request, metadata = self._interceptor.pre_update_key_event( + request, metadata + ) + pb_request = analytics_admin.UpdateKeyEventRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = resources.KeyEvent() + pb_resp = resources.KeyEvent.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_update_key_event(resp) + return resp + class _UpdateMeasurementProtocolSecret(AnalyticsAdminServiceRestStub): def __hash__(self): return hash("UpdateMeasurementProtocolSecret") @@ -16973,6 +18630,14 @@ def create_audience( # In C++ this would require a dynamic_cast return self._CreateAudience(self._session, self._host, self._interceptor) # type: ignore + @property + def create_big_query_link( + self, + ) -> Callable[[analytics_admin.CreateBigQueryLinkRequest], resources.BigQueryLink]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._CreateBigQueryLink(self._session, self._host, self._interceptor) # type: ignore + @property def create_calculated_metric( self, @@ -17073,6 +18738,17 @@ def create_event_create_rule( # In C++ this would require a dynamic_cast return self._CreateEventCreateRule(self._session, self._host, self._interceptor) # type: ignore + @property + def create_event_edit_rule( + self, + ) -> Callable[ + [analytics_admin.CreateEventEditRuleRequest], + event_create_and_edit.EventEditRule, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._CreateEventEditRule(self._session, self._host, self._interceptor) # type: ignore + @property def create_expanded_data_set( self, @@ -17102,6 +18778,14 @@ def create_google_ads_link( # In C++ this would require a dynamic_cast return self._CreateGoogleAdsLink(self._session, self._host, self._interceptor) # type: ignore + @property + def create_key_event( + self, + ) -> Callable[[analytics_admin.CreateKeyEventRequest], resources.KeyEvent]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._CreateKeyEvent(self._session, self._host, self._interceptor) # type: ignore + @property def create_measurement_protocol_secret( self, @@ -17164,17 +18848,6 @@ def create_sk_ad_network_conversion_value_schema( # In C++ this would require a dynamic_cast return self._CreateSKAdNetworkConversionValueSchema(self._session, self._host, self._interceptor) # type: ignore - @property - def create_subproperty( - self, - ) -> Callable[ - [analytics_admin.CreateSubpropertyRequest], - analytics_admin.CreateSubpropertyResponse, - ]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._CreateSubproperty(self._session, self._host, self._interceptor) # type: ignore - @property def create_subproperty_event_filter( self, @@ -17210,6 +18883,14 @@ def delete_ad_sense_link( # In C++ this would require a dynamic_cast return self._DeleteAdSenseLink(self._session, self._host, self._interceptor) # type: ignore + @property + def delete_big_query_link( + self, + ) -> Callable[[analytics_admin.DeleteBigQueryLinkRequest], empty_pb2.Empty]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._DeleteBigQueryLink(self._session, self._host, self._interceptor) # type: ignore + @property def delete_calculated_metric( self, @@ -17279,6 +18960,14 @@ def delete_event_create_rule( # In C++ this would require a dynamic_cast return self._DeleteEventCreateRule(self._session, self._host, self._interceptor) # type: ignore + @property + def delete_event_edit_rule( + self, + ) -> Callable[[analytics_admin.DeleteEventEditRuleRequest], empty_pb2.Empty]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._DeleteEventEditRule(self._session, self._host, self._interceptor) # type: ignore + @property def delete_expanded_data_set( self, @@ -17303,6 +18992,14 @@ def delete_google_ads_link( # In C++ this would require a dynamic_cast return self._DeleteGoogleAdsLink(self._session, self._host, self._interceptor) # type: ignore + @property + def delete_key_event( + self, + ) -> Callable[[analytics_admin.DeleteKeyEventRequest], empty_pb2.Empty]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._DeleteKeyEvent(self._session, self._host, self._interceptor) # type: ignore + @property def delete_measurement_protocol_secret( self, @@ -17561,6 +19258,16 @@ def get_event_create_rule( # In C++ this would require a dynamic_cast return self._GetEventCreateRule(self._session, self._host, self._interceptor) # type: ignore + @property + def get_event_edit_rule( + self, + ) -> Callable[ + [analytics_admin.GetEventEditRuleRequest], event_create_and_edit.EventEditRule + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetEventEditRule(self._session, self._host, self._interceptor) # type: ignore + @property def get_expanded_data_set( self, @@ -17590,6 +19297,14 @@ def get_google_signals_settings( # In C++ this would require a dynamic_cast return self._GetGoogleSignalsSettings(self._session, self._host, self._interceptor) # type: ignore + @property + def get_key_event( + self, + ) -> Callable[[analytics_admin.GetKeyEventRequest], resources.KeyEvent]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetKeyEvent(self._session, self._host, self._interceptor) # type: ignore + @property def get_measurement_protocol_secret( self, @@ -17826,6 +19541,17 @@ def list_event_create_rules( # In C++ this would require a dynamic_cast return self._ListEventCreateRules(self._session, self._host, self._interceptor) # type: ignore + @property + def list_event_edit_rules( + self, + ) -> Callable[ + [analytics_admin.ListEventEditRulesRequest], + analytics_admin.ListEventEditRulesResponse, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListEventEditRules(self._session, self._host, self._interceptor) # type: ignore + @property def list_expanded_data_sets( self, @@ -17859,6 +19585,16 @@ def list_google_ads_links( # In C++ this would require a dynamic_cast return self._ListGoogleAdsLinks(self._session, self._host, self._interceptor) # type: ignore + @property + def list_key_events( + self, + ) -> Callable[ + [analytics_admin.ListKeyEventsRequest], analytics_admin.ListKeyEventsResponse + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListKeyEvents(self._session, self._host, self._interceptor) # type: ignore + @property def list_measurement_protocol_secrets( self, @@ -17935,6 +19671,25 @@ def provision_account_ticket( # In C++ this would require a dynamic_cast return self._ProvisionAccountTicket(self._session, self._host, self._interceptor) # type: ignore + @property + def provision_subproperty( + self, + ) -> Callable[ + [analytics_admin.ProvisionSubpropertyRequest], + analytics_admin.ProvisionSubpropertyResponse, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ProvisionSubproperty(self._session, self._host, self._interceptor) # type: ignore + + @property + def reorder_event_edit_rules( + self, + ) -> Callable[[analytics_admin.ReorderEventEditRulesRequest], empty_pb2.Empty]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ReorderEventEditRules(self._session, self._host, self._interceptor) # type: ignore + @property def run_access_report( self, @@ -18005,6 +19760,14 @@ def update_audience( # In C++ this would require a dynamic_cast return self._UpdateAudience(self._session, self._host, self._interceptor) # type: ignore + @property + def update_big_query_link( + self, + ) -> Callable[[analytics_admin.UpdateBigQueryLinkRequest], resources.BigQueryLink]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._UpdateBigQueryLink(self._session, self._host, self._interceptor) # type: ignore + @property def update_calculated_metric( self, @@ -18116,6 +19879,17 @@ def update_event_create_rule( # In C++ this would require a dynamic_cast return self._UpdateEventCreateRule(self._session, self._host, self._interceptor) # type: ignore + @property + def update_event_edit_rule( + self, + ) -> Callable[ + [analytics_admin.UpdateEventEditRuleRequest], + event_create_and_edit.EventEditRule, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._UpdateEventEditRule(self._session, self._host, self._interceptor) # type: ignore + @property def update_expanded_data_set( self, @@ -18148,6 +19922,14 @@ def update_google_signals_settings( # In C++ this would require a dynamic_cast return self._UpdateGoogleSignalsSettings(self._session, self._host, self._interceptor) # type: ignore + @property + def update_key_event( + self, + ) -> Callable[[analytics_admin.UpdateKeyEventRequest], resources.KeyEvent]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._UpdateKeyEvent(self._session, self._host, self._interceptor) # type: ignore + @property def update_measurement_protocol_secret( self, diff --git a/packages/google-analytics-admin/google/analytics/admin_v1alpha/types/__init__.py b/packages/google-analytics-admin/google/analytics/admin_v1alpha/types/__init__.py index aa5f7ff96098..5248eee2b7f0 100644 --- a/packages/google-analytics-admin/google/analytics/admin_v1alpha/types/__init__.py +++ b/packages/google-analytics-admin/google/analytics/admin_v1alpha/types/__init__.py @@ -53,6 +53,7 @@ CreateAccessBindingRequest, CreateAdSenseLinkRequest, CreateAudienceRequest, + CreateBigQueryLinkRequest, CreateCalculatedMetricRequest, CreateChannelGroupRequest, CreateConnectedSiteTagRequest, @@ -64,9 +65,11 @@ CreateDisplayVideo360AdvertiserLinkProposalRequest, CreateDisplayVideo360AdvertiserLinkRequest, CreateEventCreateRuleRequest, + CreateEventEditRuleRequest, CreateExpandedDataSetRequest, CreateFirebaseLinkRequest, CreateGoogleAdsLinkRequest, + CreateKeyEventRequest, CreateMeasurementProtocolSecretRequest, CreatePropertyRequest, CreateRollupPropertyRequest, @@ -75,11 +78,10 @@ CreateSearchAds360LinkRequest, CreateSKAdNetworkConversionValueSchemaRequest, CreateSubpropertyEventFilterRequest, - CreateSubpropertyRequest, - CreateSubpropertyResponse, DeleteAccessBindingRequest, DeleteAccountRequest, DeleteAdSenseLinkRequest, + DeleteBigQueryLinkRequest, DeleteCalculatedMetricRequest, DeleteChannelGroupRequest, DeleteConnectedSiteTagRequest, @@ -88,9 +90,11 @@ DeleteDisplayVideo360AdvertiserLinkProposalRequest, DeleteDisplayVideo360AdvertiserLinkRequest, DeleteEventCreateRuleRequest, + DeleteEventEditRuleRequest, DeleteExpandedDataSetRequest, DeleteFirebaseLinkRequest, DeleteGoogleAdsLinkRequest, + DeleteKeyEventRequest, DeleteMeasurementProtocolSecretRequest, DeletePropertyRequest, DeleteRollupPropertySourceLinkRequest, @@ -120,9 +124,11 @@ GetDisplayVideo360AdvertiserLinkRequest, GetEnhancedMeasurementSettingsRequest, GetEventCreateRuleRequest, + GetEventEditRuleRequest, GetExpandedDataSetRequest, GetGlobalSiteTagRequest, GetGoogleSignalsSettingsRequest, + GetKeyEventRequest, GetMeasurementProtocolSecretRequest, GetPropertyRequest, GetRollupPropertySourceLinkRequest, @@ -161,12 +167,16 @@ ListDisplayVideo360AdvertiserLinksResponse, ListEventCreateRulesRequest, ListEventCreateRulesResponse, + ListEventEditRulesRequest, + ListEventEditRulesResponse, ListExpandedDataSetsRequest, ListExpandedDataSetsResponse, ListFirebaseLinksRequest, ListFirebaseLinksResponse, ListGoogleAdsLinksRequest, ListGoogleAdsLinksResponse, + ListKeyEventsRequest, + ListKeyEventsResponse, ListMeasurementProtocolSecretsRequest, ListMeasurementProtocolSecretsResponse, ListPropertiesRequest, @@ -181,6 +191,9 @@ ListSubpropertyEventFiltersResponse, ProvisionAccountTicketRequest, ProvisionAccountTicketResponse, + ProvisionSubpropertyRequest, + ProvisionSubpropertyResponse, + ReorderEventEditRulesRequest, RunAccessReportRequest, RunAccessReportResponse, SearchChangeHistoryEventsRequest, @@ -191,6 +204,7 @@ UpdateAccountRequest, UpdateAttributionSettingsRequest, UpdateAudienceRequest, + UpdateBigQueryLinkRequest, UpdateCalculatedMetricRequest, UpdateChannelGroupRequest, UpdateConversionEventRequest, @@ -202,9 +216,11 @@ UpdateDisplayVideo360AdvertiserLinkRequest, UpdateEnhancedMeasurementSettingsRequest, UpdateEventCreateRuleRequest, + UpdateEventEditRuleRequest, UpdateExpandedDataSetRequest, UpdateGoogleAdsLinkRequest, UpdateGoogleSignalsSettingsRequest, + UpdateKeyEventRequest, UpdateMeasurementProtocolSecretRequest, UpdatePropertyRequest, UpdateSearchAds360LinkRequest, @@ -230,7 +246,12 @@ ChannelGroupFilterExpressionList, GroupingRule, ) -from .event_create_and_edit import EventCreateRule, MatchingCondition, ParameterMutation +from .event_create_and_edit import ( + EventCreateRule, + EventEditRule, + MatchingCondition, + ParameterMutation, +) from .expanded_data_set import ( ExpandedDataSet, ExpandedDataSetFilter, @@ -271,6 +292,7 @@ GoogleSignalsSettings, GoogleSignalsState, IndustryCategory, + KeyEvent, LinkProposalInitiatingProduct, LinkProposalState, LinkProposalStatusDetails, @@ -330,6 +352,7 @@ "CreateAccessBindingRequest", "CreateAdSenseLinkRequest", "CreateAudienceRequest", + "CreateBigQueryLinkRequest", "CreateCalculatedMetricRequest", "CreateChannelGroupRequest", "CreateConnectedSiteTagRequest", @@ -341,9 +364,11 @@ "CreateDisplayVideo360AdvertiserLinkProposalRequest", "CreateDisplayVideo360AdvertiserLinkRequest", "CreateEventCreateRuleRequest", + "CreateEventEditRuleRequest", "CreateExpandedDataSetRequest", "CreateFirebaseLinkRequest", "CreateGoogleAdsLinkRequest", + "CreateKeyEventRequest", "CreateMeasurementProtocolSecretRequest", "CreatePropertyRequest", "CreateRollupPropertyRequest", @@ -352,11 +377,10 @@ "CreateSearchAds360LinkRequest", "CreateSKAdNetworkConversionValueSchemaRequest", "CreateSubpropertyEventFilterRequest", - "CreateSubpropertyRequest", - "CreateSubpropertyResponse", "DeleteAccessBindingRequest", "DeleteAccountRequest", "DeleteAdSenseLinkRequest", + "DeleteBigQueryLinkRequest", "DeleteCalculatedMetricRequest", "DeleteChannelGroupRequest", "DeleteConnectedSiteTagRequest", @@ -365,9 +389,11 @@ "DeleteDisplayVideo360AdvertiserLinkProposalRequest", "DeleteDisplayVideo360AdvertiserLinkRequest", "DeleteEventCreateRuleRequest", + "DeleteEventEditRuleRequest", "DeleteExpandedDataSetRequest", "DeleteFirebaseLinkRequest", "DeleteGoogleAdsLinkRequest", + "DeleteKeyEventRequest", "DeleteMeasurementProtocolSecretRequest", "DeletePropertyRequest", "DeleteRollupPropertySourceLinkRequest", @@ -397,9 +423,11 @@ "GetDisplayVideo360AdvertiserLinkRequest", "GetEnhancedMeasurementSettingsRequest", "GetEventCreateRuleRequest", + "GetEventEditRuleRequest", "GetExpandedDataSetRequest", "GetGlobalSiteTagRequest", "GetGoogleSignalsSettingsRequest", + "GetKeyEventRequest", "GetMeasurementProtocolSecretRequest", "GetPropertyRequest", "GetRollupPropertySourceLinkRequest", @@ -438,12 +466,16 @@ "ListDisplayVideo360AdvertiserLinksResponse", "ListEventCreateRulesRequest", "ListEventCreateRulesResponse", + "ListEventEditRulesRequest", + "ListEventEditRulesResponse", "ListExpandedDataSetsRequest", "ListExpandedDataSetsResponse", "ListFirebaseLinksRequest", "ListFirebaseLinksResponse", "ListGoogleAdsLinksRequest", "ListGoogleAdsLinksResponse", + "ListKeyEventsRequest", + "ListKeyEventsResponse", "ListMeasurementProtocolSecretsRequest", "ListMeasurementProtocolSecretsResponse", "ListPropertiesRequest", @@ -458,6 +490,9 @@ "ListSubpropertyEventFiltersResponse", "ProvisionAccountTicketRequest", "ProvisionAccountTicketResponse", + "ProvisionSubpropertyRequest", + "ProvisionSubpropertyResponse", + "ReorderEventEditRulesRequest", "RunAccessReportRequest", "RunAccessReportResponse", "SearchChangeHistoryEventsRequest", @@ -468,6 +503,7 @@ "UpdateAccountRequest", "UpdateAttributionSettingsRequest", "UpdateAudienceRequest", + "UpdateBigQueryLinkRequest", "UpdateCalculatedMetricRequest", "UpdateChannelGroupRequest", "UpdateConversionEventRequest", @@ -479,9 +515,11 @@ "UpdateDisplayVideo360AdvertiserLinkRequest", "UpdateEnhancedMeasurementSettingsRequest", "UpdateEventCreateRuleRequest", + "UpdateEventEditRuleRequest", "UpdateExpandedDataSetRequest", "UpdateGoogleAdsLinkRequest", "UpdateGoogleSignalsSettingsRequest", + "UpdateKeyEventRequest", "UpdateMeasurementProtocolSecretRequest", "UpdatePropertyRequest", "UpdateSearchAds360LinkRequest", @@ -503,6 +541,7 @@ "ChannelGroupFilterExpressionList", "GroupingRule", "EventCreateRule", + "EventEditRule", "MatchingCondition", "ParameterMutation", "ExpandedDataSet", @@ -535,6 +574,7 @@ "GlobalSiteTag", "GoogleAdsLink", "GoogleSignalsSettings", + "KeyEvent", "LinkProposalStatusDetails", "MeasurementProtocolSecret", "PostbackWindow", diff --git a/packages/google-analytics-admin/google/analytics/admin_v1alpha/types/analytics_admin.py b/packages/google-analytics-admin/google/analytics/admin_v1alpha/types/analytics_admin.py index ea94cd377823..6178315944ed 100644 --- a/packages/google-analytics-admin/google/analytics/admin_v1alpha/types/analytics_admin.py +++ b/packages/google-analytics-admin/google/analytics/admin_v1alpha/types/analytics_admin.py @@ -88,6 +88,12 @@ "DeleteConversionEventRequest", "ListConversionEventsRequest", "ListConversionEventsResponse", + "CreateKeyEventRequest", + "UpdateKeyEventRequest", + "GetKeyEventRequest", + "DeleteKeyEventRequest", + "ListKeyEventsRequest", + "ListKeyEventsResponse", "GetDisplayVideo360AdvertiserLinkRequest", "ListDisplayVideo360AdvertiserLinksRequest", "ListDisplayVideo360AdvertiserLinksResponse", @@ -171,9 +177,12 @@ "SetAutomatedGa4ConfigurationOptOutResponse", "FetchAutomatedGa4ConfigurationOptOutRequest", "FetchAutomatedGa4ConfigurationOptOutResponse", + "CreateBigQueryLinkRequest", "GetBigQueryLinkRequest", "ListBigQueryLinksRequest", "ListBigQueryLinksResponse", + "UpdateBigQueryLinkRequest", + "DeleteBigQueryLinkRequest", "GetEnhancedMeasurementSettingsRequest", "UpdateEnhancedMeasurementSettingsRequest", "GetDataRedactionSettingsRequest", @@ -196,6 +205,13 @@ "GetEventCreateRuleRequest", "ListEventCreateRulesRequest", "ListEventCreateRulesResponse", + "CreateEventEditRuleRequest", + "UpdateEventEditRuleRequest", + "DeleteEventEditRuleRequest", + "GetEventEditRuleRequest", + "ListEventEditRulesRequest", + "ListEventEditRulesResponse", + "ReorderEventEditRulesRequest", "CreateRollupPropertyRequest", "CreateRollupPropertyResponse", "GetRollupPropertySourceLinkRequest", @@ -203,8 +219,8 @@ "ListRollupPropertySourceLinksResponse", "CreateRollupPropertySourceLinkRequest", "DeleteRollupPropertySourceLinkRequest", - "CreateSubpropertyRequest", - "CreateSubpropertyResponse", + "ProvisionSubpropertyRequest", + "ProvisionSubpropertyResponse", "CreateSubpropertyEventFilterRequest", "GetSubpropertyEventFilterRequest", "ListSubpropertyEventFiltersRequest", @@ -768,8 +784,9 @@ class CreateFirebaseLinkRequest(proto.Message): Attributes: parent (str): - Required. Format: properties/{property_id} Example: - properties/1234 + Required. Format: properties/{property_id} + + Example: ``properties/1234`` firebase_link (google.analytics.admin_v1alpha.types.FirebaseLink): Required. The Firebase link to create. """ @@ -792,7 +809,8 @@ class DeleteFirebaseLinkRequest(proto.Message): name (str): Required. Format: properties/{property_id}/firebaseLinks/{firebase_link_id} - Example: properties/1234/firebaseLinks/5678 + + Example: ``properties/1234/firebaseLinks/5678`` """ name: str = proto.Field( @@ -806,8 +824,9 @@ class ListFirebaseLinksRequest(proto.Message): Attributes: parent (str): - Required. Format: properties/{property_id} Example: - properties/1234 + Required. Format: properties/{property_id} + + Example: ``properties/1234`` page_size (int): The maximum number of resources to return. The service may return fewer than this value, @@ -874,7 +893,8 @@ class GetGlobalSiteTagRequest(proto.Message): Required. The name of the site tag to lookup. Note that site tags are singletons and do not have unique IDs. Format: properties/{property_id}/dataStreams/{stream_id}/globalSiteTag - Example: "properties/123/dataStreams/456/globalSiteTag". + + Example: ``properties/123/dataStreams/456/globalSiteTag`` """ name: str = proto.Field( @@ -1011,9 +1031,10 @@ class GetDataSharingSettingsRequest(proto.Message): Attributes: name (str): - Required. The name of the settings to lookup. - Format: accounts/{account}/dataSharingSettings - Example: "accounts/1000/dataSharingSettings". + Required. The name of the settings to lookup. Format: + accounts/{account}/dataSharingSettings + + Example: ``accounts/1000/dataSharingSettings`` """ name: str = proto.Field( @@ -1121,15 +1142,16 @@ class SearchChangeHistoryEventsRequest(proto.Message): Attributes: account (str): - Required. The account resource for which to - return change history resources. Format: - accounts/{account} Example: "accounts/100". + Required. The account resource for which to return change + history resources. Format: accounts/{account} + + Example: ``accounts/100`` property (str): - Optional. Resource name for a child property. - If set, only return changes made to this - property or its child resources. Format: - properties/{propertyId} - Example: "properties/100". + Optional. Resource name for a child property. If set, only + return changes made to this property or its child resources. + Format: properties/{propertyId} + + Example: ``properties/100`` resource_type (MutableSequence[google.analytics.admin_v1alpha.types.ChangeHistoryResourceType]): Optional. If set, only return changes if they are for a resource that matches at least one of @@ -1726,6 +1748,148 @@ def raw_page(self): ) +class CreateKeyEventRequest(proto.Message): + r"""Request message for CreateKeyEvent RPC + + Attributes: + key_event (google.analytics.admin_v1alpha.types.KeyEvent): + Required. The Key Event to create. + parent (str): + Required. The resource name of the parent + property where this Key Event will be created. + Format: properties/123 + """ + + key_event: resources.KeyEvent = proto.Field( + proto.MESSAGE, + number=1, + message=resources.KeyEvent, + ) + parent: str = proto.Field( + proto.STRING, + number=2, + ) + + +class UpdateKeyEventRequest(proto.Message): + r"""Request message for UpdateKeyEvent RPC + + Attributes: + key_event (google.analytics.admin_v1alpha.types.KeyEvent): + Required. The Key Event to update. The ``name`` field is + used to identify the settings to be updated. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Required. The list of fields to be updated. Field names must + be in snake case (e.g., "field_to_update"). Omitted fields + will not be updated. To replace the entire entity, use one + path with the string "*" to match all fields. + """ + + key_event: resources.KeyEvent = proto.Field( + proto.MESSAGE, + number=1, + message=resources.KeyEvent, + ) + update_mask: field_mask_pb2.FieldMask = proto.Field( + proto.MESSAGE, + number=2, + message=field_mask_pb2.FieldMask, + ) + + +class GetKeyEventRequest(proto.Message): + r"""Request message for GetKeyEvent RPC + + Attributes: + name (str): + Required. The resource name of the Key Event to retrieve. + Format: properties/{property}/keyEvents/{key_event} Example: + "properties/123/keyEvents/456". + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class DeleteKeyEventRequest(proto.Message): + r"""Request message for DeleteKeyEvent RPC + + Attributes: + name (str): + Required. The resource name of the Key Event to delete. + Format: properties/{property}/keyEvents/{key_event} Example: + "properties/123/keyEvents/456". + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class ListKeyEventsRequest(proto.Message): + r"""Request message for ListKeyEvents RPC + + Attributes: + parent (str): + Required. The resource name of the parent + property. Example: 'properties/123' + page_size (int): + The maximum number of resources to return. + If unspecified, at most 50 resources will be + returned. The maximum value is 200; (higher + values will be coerced to the maximum) + page_token (str): + A page token, received from a previous ``ListKeyEvents`` + call. Provide this to retrieve the subsequent page. When + paginating, all other parameters provided to + ``ListKeyEvents`` must match the call that provided the page + token. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + page_size: int = proto.Field( + proto.INT32, + number=2, + ) + page_token: str = proto.Field( + proto.STRING, + number=3, + ) + + +class ListKeyEventsResponse(proto.Message): + r"""Response message for ListKeyEvents RPC. + + Attributes: + key_events (MutableSequence[google.analytics.admin_v1alpha.types.KeyEvent]): + The requested Key Events + next_page_token (str): + A token, which can be sent as ``page_token`` to retrieve the + next page. If this field is omitted, there are no subsequent + pages. + """ + + @property + def raw_page(self): + return self + + key_events: MutableSequence[resources.KeyEvent] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=resources.KeyEvent, + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + + class GetDisplayVideo360AdvertiserLinkRequest(proto.Message): r"""Request message for GetDisplayVideo360AdvertiserLink RPC. @@ -2486,7 +2650,7 @@ class CreateCalculatedMetricRequest(proto.Message): resource name. This value should be 1-80 characters and valid characters - are `[a-zA-Z0-9_]`, no spaces allowed. calculated_metric_id + are /[a-zA-Z0-9_]/, no spaces allowed. calculated_metric_id must be unique between all calculated metrics under a property. The calculated_metric_id is used when referencing this calculated metric from external APIs, for example, @@ -3660,6 +3824,27 @@ class FetchAutomatedGa4ConfigurationOptOutResponse(proto.Message): ) +class CreateBigQueryLinkRequest(proto.Message): + r"""Request message for CreateBigQueryLink RPC. + + Attributes: + parent (str): + Required. Example format: properties/1234 + bigquery_link (google.analytics.admin_v1alpha.types.BigQueryLink): + Required. The BigQueryLink to create. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + bigquery_link: resources.BigQueryLink = proto.Field( + proto.MESSAGE, + number=2, + message=resources.BigQueryLink, + ) + + class GetBigQueryLinkRequest(proto.Message): r"""Request message for GetBigQueryLink RPC. @@ -3740,6 +3925,48 @@ def raw_page(self): ) +class UpdateBigQueryLinkRequest(proto.Message): + r"""Request message for UpdateBigQueryLink RPC. + + Attributes: + bigquery_link (google.analytics.admin_v1alpha.types.BigQueryLink): + Required. The settings to update. The ``name`` field is used + to identify the settings to be updated. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Required. The list of fields to be updated. Field names must + be in snake case (e.g., "field_to_update"). Omitted fields + will not be updated. To replace the entire entity, use one + path with the string "*" to match all fields. + """ + + bigquery_link: resources.BigQueryLink = proto.Field( + proto.MESSAGE, + number=1, + message=resources.BigQueryLink, + ) + update_mask: field_mask_pb2.FieldMask = proto.Field( + proto.MESSAGE, + number=2, + message=field_mask_pb2.FieldMask, + ) + + +class DeleteBigQueryLinkRequest(proto.Message): + r"""Request message for DeleteBigQueryLink RPC. + + Attributes: + name (str): + Required. The BigQueryLink to delete. + Example format: + properties/1234/bigQueryLinks/5678 + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + class GetEnhancedMeasurementSettingsRequest(proto.Message): r"""Request message for GetEnhancedMeasurementSettings RPC. @@ -4221,6 +4448,176 @@ def raw_page(self): ) +class CreateEventEditRuleRequest(proto.Message): + r"""Request message for CreateEventEditRule RPC. + + Attributes: + parent (str): + Required. Example format: + properties/123/dataStreams/456 + event_edit_rule (google.analytics.admin_v1alpha.types.EventEditRule): + Required. The EventEditRule to create. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + event_edit_rule: event_create_and_edit.EventEditRule = proto.Field( + proto.MESSAGE, + number=2, + message=event_create_and_edit.EventEditRule, + ) + + +class UpdateEventEditRuleRequest(proto.Message): + r"""Request message for UpdateEventEditRule RPC. + + Attributes: + event_edit_rule (google.analytics.admin_v1alpha.types.EventEditRule): + Required. The EventEditRule to update. The resource's + ``name`` field is used to identify the EventEditRule to be + updated. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Required. The list of fields to be updated. Field names must + be in snake case (e.g., "field_to_update"). Omitted fields + will not be updated. To replace the entire entity, use one + path with the string "*" to match all fields. + """ + + event_edit_rule: event_create_and_edit.EventEditRule = proto.Field( + proto.MESSAGE, + number=1, + message=event_create_and_edit.EventEditRule, + ) + update_mask: field_mask_pb2.FieldMask = proto.Field( + proto.MESSAGE, + number=2, + message=field_mask_pb2.FieldMask, + ) + + +class DeleteEventEditRuleRequest(proto.Message): + r"""Request message for DeleteEventEditRule RPC. + + Attributes: + name (str): + Required. Example format: + properties/123/dataStreams/456/eventEditRules/789 + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class GetEventEditRuleRequest(proto.Message): + r"""Request message for GetEventEditRule RPC. + + Attributes: + name (str): + Required. The name of the EventEditRule to + get. Example format: + properties/123/dataStreams/456/eventEditRules/789 + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class ListEventEditRulesRequest(proto.Message): + r"""Request message for ListEventEditRules RPC. + + Attributes: + parent (str): + Required. Example format: + properties/123/dataStreams/456 + page_size (int): + Optional. The maximum number of resources to + return. If unspecified, at most 50 resources + will be returned. The maximum value is 200 + (higher values will be coerced to the maximum). + page_token (str): + Optional. A page token, received from a previous + ``ListEventEditRules`` call. Provide this to retrieve the + subsequent page. + + When paginating, all other parameters provided to + ``ListEventEditRules`` must match the call that provided the + page token. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + page_size: int = proto.Field( + proto.INT32, + number=2, + ) + page_token: str = proto.Field( + proto.STRING, + number=3, + ) + + +class ListEventEditRulesResponse(proto.Message): + r"""Response message for ListEventEditRules RPC. + + Attributes: + event_edit_rules (MutableSequence[google.analytics.admin_v1alpha.types.EventEditRule]): + List of EventEditRules. These will be ordered + stably, but in an arbitrary order. + next_page_token (str): + A token, which can be sent as ``page_token`` to retrieve the + next page. If this field is omitted, there are no subsequent + pages. + """ + + @property + def raw_page(self): + return self + + event_edit_rules: MutableSequence[ + event_create_and_edit.EventEditRule + ] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=event_create_and_edit.EventEditRule, + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + + +class ReorderEventEditRulesRequest(proto.Message): + r"""Request message for ReorderEventEditRules RPC. + + Attributes: + parent (str): + Required. Example format: + properties/123/dataStreams/456 + event_edit_rules (MutableSequence[str]): + Required. EventEditRule resource names for + the specified data stream, in the needed + processing order. All EventEditRules for the + stream must be present in the list. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + event_edit_rules: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=2, + ) + + class CreateRollupPropertyRequest(proto.Message): r"""Request message for CreateRollupProperty RPC. @@ -4390,14 +4787,10 @@ class DeleteRollupPropertySourceLinkRequest(proto.Message): ) -class CreateSubpropertyRequest(proto.Message): +class ProvisionSubpropertyRequest(proto.Message): r"""Request message for CreateSubproperty RPC. Attributes: - parent (str): - Required. The ordinary property for which to create a - subproperty. Format: properties/property_id Example: - properties/123 subproperty (google.analytics.admin_v1alpha.types.Property): Required. The subproperty to create. subproperty_event_filter (google.analytics.admin_v1alpha.types.SubpropertyEventFilter): @@ -4405,10 +4798,6 @@ class CreateSubpropertyRequest(proto.Message): create on an ordinary property. """ - parent: str = proto.Field( - proto.STRING, - number=1, - ) subproperty: resources.Property = proto.Field( proto.MESSAGE, number=2, @@ -4423,8 +4812,8 @@ class CreateSubpropertyRequest(proto.Message): ) -class CreateSubpropertyResponse(proto.Message): - r"""Response message for CreateSubproperty RPC. +class ProvisionSubpropertyResponse(proto.Message): + r"""Response message for ProvisionSubproperty RPC. Attributes: subproperty (google.analytics.admin_v1alpha.types.Property): diff --git a/packages/google-analytics-admin/google/analytics/admin_v1alpha/types/audience.py b/packages/google-analytics-admin/google/analytics/admin_v1alpha/types/audience.py index b1f3905dd649..fad860d1132e 100644 --- a/packages/google-analytics-admin/google/analytics/admin_v1alpha/types/audience.py +++ b/packages/google-analytics-admin/google/analytics/admin_v1alpha/types/audience.py @@ -18,6 +18,7 @@ from typing import MutableMapping, MutableSequence from google.protobuf import duration_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore import proto # type: ignore __protobuf__ = proto.module( @@ -712,6 +713,9 @@ class Audience(proto.Message): Required. Immutable. Unordered list. Filter clauses that define the Audience. All clauses will be AND’ed together. + create_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. Time when the Audience was + created. """ class AudienceExclusionDurationMode(proto.Enum): @@ -767,6 +771,11 @@ class AudienceExclusionDurationMode(proto.Enum): number=8, message="AudienceFilterClause", ) + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=9, + message=timestamp_pb2.Timestamp, + ) __all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-analytics-admin/google/analytics/admin_v1alpha/types/channel_group.py b/packages/google-analytics-admin/google/analytics/admin_v1alpha/types/channel_group.py index 66494ae6dbf7..43d2c51386fb 100644 --- a/packages/google-analytics-admin/google/analytics/admin_v1alpha/types/channel_group.py +++ b/packages/google-analytics-admin/google/analytics/admin_v1alpha/types/channel_group.py @@ -268,6 +268,15 @@ class ChannelGroup(proto.Message): is the Default Channel Group predefined by Google Analytics. Display name and grouping rules cannot be updated for this channel group. + primary (bool): + Optional. If true, this channel group will be used as the + default channel group for reports. Only one channel group + can be set as ``primary`` at any time. If the ``primary`` + field gets set on a channel group, it will get unset on the + previous primary channel group. + + The Google Analytics predefined channel group is the primary + by default. """ name: str = proto.Field( @@ -291,6 +300,10 @@ class ChannelGroup(proto.Message): proto.BOOL, number=5, ) + primary: bool = proto.Field( + proto.BOOL, + number=6, + ) __all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-analytics-admin/google/analytics/admin_v1alpha/types/event_create_and_edit.py b/packages/google-analytics-admin/google/analytics/admin_v1alpha/types/event_create_and_edit.py index 6a4701ae4a98..c47f879d4ae0 100644 --- a/packages/google-analytics-admin/google/analytics/admin_v1alpha/types/event_create_and_edit.py +++ b/packages/google-analytics-admin/google/analytics/admin_v1alpha/types/event_create_and_edit.py @@ -24,6 +24,7 @@ manifest={ "ParameterMutation", "EventCreateRule", + "EventEditRule", "MatchingCondition", }, ) @@ -130,6 +131,71 @@ class EventCreateRule(proto.Message): ) +class EventEditRule(proto.Message): + r"""An Event Edit Rule defines conditions that will trigger the + creation of an entirely new event based upon matched criteria of + a source event. Additional mutations of the parameters from the + source event can be defined. + + Unlike Event Create rules, Event Edit Rules are applied in their + defined order. + + Event Edit rules can't be used to modify an event created from + an Event Create rule. + + Attributes: + name (str): + Identifier. Resource name for this EventEditRule resource. + Format: + properties/{property}/dataStreams/{data_stream}/eventEditRules/{event_edit_rule} + display_name (str): + Required. The display name of this event edit + rule. Maximum of 255 characters. + event_conditions (MutableSequence[google.analytics.admin_v1alpha.types.MatchingCondition]): + Required. Conditions on the source event must + match for this rule to be applied. Must have at + least one condition, and can have up to 10 max. + parameter_mutations (MutableSequence[google.analytics.admin_v1alpha.types.ParameterMutation]): + Required. Parameter mutations define + parameter behavior on the new event, and are + applied in order. A maximum of 20 mutations can + be applied. + processing_order (int): + Output only. The order for which this rule + will be processed. Rules with an order value + lower than this will be processed before this + rule, rules with an order value higher than this + will be processed after this rule. New event + edit rules will be assigned an order value at + the end of the order. + + This value does not apply to event create rules. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + display_name: str = proto.Field( + proto.STRING, + number=2, + ) + event_conditions: MutableSequence["MatchingCondition"] = proto.RepeatedField( + proto.MESSAGE, + number=3, + message="MatchingCondition", + ) + parameter_mutations: MutableSequence["ParameterMutation"] = proto.RepeatedField( + proto.MESSAGE, + number=4, + message="ParameterMutation", + ) + processing_order: int = proto.Field( + proto.INT64, + number=5, + ) + + class MatchingCondition(proto.Message): r"""Defines a condition for when an Event Edit or Event Creation rule applies to an event. diff --git a/packages/google-analytics-admin/google/analytics/admin_v1alpha/types/resources.py b/packages/google-analytics-admin/google/analytics/admin_v1alpha/types/resources.py index 914ae9dc615d..dee751dedb76 100644 --- a/packages/google-analytics-admin/google/analytics/admin_v1alpha/types/resources.py +++ b/packages/google-analytics-admin/google/analytics/admin_v1alpha/types/resources.py @@ -63,6 +63,7 @@ "SearchAds360Link", "LinkProposalStatusDetails", "ConversionEvent", + "KeyEvent", "GoogleSignalsSettings", "CustomDimension", "CustomMetric", @@ -268,6 +269,8 @@ class ChangeHistoryResourceType(proto.Enum): ExpandedDataSet resource CHANNEL_GROUP (22): ChannelGroup resource + BIGQUERY_LINK (23): + BigQuery link resource ENHANCED_MEASUREMENT_SETTINGS (24): EnhancedMeasurementSettings resource DATA_REDACTION_SETTINGS (25): @@ -301,6 +304,7 @@ class ChangeHistoryResourceType(proto.Enum): ATTRIBUTION_SETTINGS = 20 EXPANDED_DATA_SET = 21 CHANNEL_GROUP = 22 + BIGQUERY_LINK = 23 ENHANCED_MEASUREMENT_SETTINGS = 24 DATA_REDACTION_SETTINGS = 25 SKADNETWORK_CONVERSION_VALUE_SCHEMA = 26 @@ -474,6 +478,11 @@ class Account(proto.Message): is soft-deleted or not. Deleted accounts are excluded from List results unless specifically requested. + gmp_organization (str): + Output only. The URI for a Google Marketing Platform + organization resource. Only set when this account is + connected to a GMP organization. Format: + marketingplatformadmin.googleapis.com/organizations/{org_id} """ name: str = proto.Field( @@ -502,6 +511,10 @@ class Account(proto.Message): proto.BOOL, number=6, ) + gmp_organization: str = proto.Field( + proto.STRING, + number=7, + ) class Property(proto.Message): @@ -2147,6 +2160,119 @@ class DefaultConversionValue(proto.Message): ) +class KeyEvent(proto.Message): + r"""A key event in a Google Analytics property. + + Attributes: + name (str): + Output only. Resource name of this key event. Format: + properties/{property}/keyEvents/{key_event} + event_name (str): + Immutable. The event name for this key event. + Examples: 'click', 'purchase' + create_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. Time when this key event was + created in the property. + deletable (bool): + Output only. If set to true, this event can + be deleted. + custom (bool): + Output only. If set to true, this key event + refers to a custom event. If set to false, this + key event refers to a default event in GA. + Default events typically have special meaning in + GA. Default events are usually created for you + by the GA system, but in some cases can be + created by property admins. Custom events count + towards the maximum number of custom key events + that may be created per property. + counting_method (google.analytics.admin_v1alpha.types.KeyEvent.CountingMethod): + Required. The method by which Key Events will + be counted across multiple events within a + session. + default_value (google.analytics.admin_v1alpha.types.KeyEvent.DefaultValue): + Optional. Defines a default value/currency + for a key event. + """ + + class CountingMethod(proto.Enum): + r"""The method by which Key Events will be counted across + multiple events within a session. + + Values: + COUNTING_METHOD_UNSPECIFIED (0): + Counting method not specified. + ONCE_PER_EVENT (1): + Each Event instance is considered a Key + Event. + ONCE_PER_SESSION (2): + An Event instance is considered a Key Event + at most once per session per user. + """ + COUNTING_METHOD_UNSPECIFIED = 0 + ONCE_PER_EVENT = 1 + ONCE_PER_SESSION = 2 + + class DefaultValue(proto.Message): + r"""Defines a default value/currency for a key event. + + Attributes: + numeric_value (float): + Required. This will be used to populate the "value" + parameter for all occurrences of this Key Event (specified + by event_name) where that parameter is unset. + currency_code (str): + Required. When an occurrence of this Key Event (specified by + event_name) has no set currency this currency will be + applied as the default. Must be in ISO 4217 currency code + format. + + See https://en.wikipedia.org/wiki/ISO_4217 for more + information. + """ + + numeric_value: float = proto.Field( + proto.DOUBLE, + number=1, + ) + currency_code: str = proto.Field( + proto.STRING, + number=2, + ) + + name: str = proto.Field( + proto.STRING, + number=1, + ) + event_name: str = proto.Field( + proto.STRING, + number=2, + ) + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=3, + message=timestamp_pb2.Timestamp, + ) + deletable: bool = proto.Field( + proto.BOOL, + number=4, + ) + custom: bool = proto.Field( + proto.BOOL, + number=5, + ) + counting_method: CountingMethod = proto.Field( + proto.ENUM, + number=6, + enum=CountingMethod, + ) + default_value: DefaultValue = proto.Field( + proto.MESSAGE, + number=7, + message=DefaultValue, + ) + + class GoogleSignalsSettings(proto.Message): r"""Settings values for Google Signals. This is a singleton resource. @@ -2852,6 +2978,12 @@ class BigQueryLink(proto.Message): excluded_events (MutableSequence[str]): The list of event names that will be excluded from exports. + dataset_location (str): + Required. Immutable. The geographic location + where the created BigQuery dataset should + reside. See + https://cloud.google.com/bigquery/docs/locations + for supported locations. """ name: str = proto.Field( @@ -2891,6 +3023,10 @@ class BigQueryLink(proto.Message): proto.STRING, number=8, ) + dataset_location: str = proto.Field( + proto.STRING, + number=10, + ) class EnhancedMeasurementSettings(proto.Message): diff --git a/packages/google-analytics-admin/google/analytics/admin_v1beta/gapic_version.py b/packages/google-analytics-admin/google/analytics/admin_v1beta/gapic_version.py index f56358e27bf1..558c8aab67c5 100644 --- a/packages/google-analytics-admin/google/analytics/admin_v1beta/gapic_version.py +++ b/packages/google-analytics-admin/google/analytics/admin_v1beta/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.22.9" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-analytics-admin/google/analytics/admin_v1beta/services/analytics_admin_service/async_client.py b/packages/google-analytics-admin/google/analytics/admin_v1beta/services/analytics_admin_service/async_client.py index 7a6f3f1afc40..a86919c6511d 100644 --- a/packages/google-analytics-admin/google/analytics/admin_v1beta/services/analytics_admin_service/async_client.py +++ b/packages/google-analytics-admin/google/analytics/admin_v1beta/services/analytics_admin_service/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -262,10 +261,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(AnalyticsAdminServiceClient).get_transport_class, - type(AnalyticsAdminServiceClient), - ) + get_transport_class = AnalyticsAdminServiceClient.get_transport_class def __init__( self, diff --git a/packages/google-analytics-admin/google/analytics/admin_v1beta/services/analytics_admin_service/client.py b/packages/google-analytics-admin/google/analytics/admin_v1beta/services/analytics_admin_service/client.py index 89723720b9af..baf4b1ed050c 100644 --- a/packages/google-analytics-admin/google/analytics/admin_v1beta/services/analytics_admin_service/client.py +++ b/packages/google-analytics-admin/google/analytics/admin_v1beta/services/analytics_admin_service/client.py @@ -901,7 +901,7 @@ def __init__( Type[AnalyticsAdminServiceTransport], Callable[..., AnalyticsAdminServiceTransport], ] = ( - type(self).get_transport_class(transport) + AnalyticsAdminServiceClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., AnalyticsAdminServiceTransport], transport) ) diff --git a/packages/google-analytics-admin/samples/generated_samples/snippet_metadata_google.analytics.admin.v1beta.json b/packages/google-analytics-admin/samples/generated_samples/snippet_metadata_google.analytics.admin.v1beta.json index 4d71e33bddd5..5b91879363a3 100644 --- a/packages/google-analytics-admin/samples/generated_samples/snippet_metadata_google.analytics.admin.v1beta.json +++ b/packages/google-analytics-admin/samples/generated_samples/snippet_metadata_google.analytics.admin.v1beta.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-analytics-admin", - "version": "0.22.9" + "version": "0.1.0" }, "snippets": [ { diff --git a/packages/google-analytics-admin/scripts/fixup_admin_v1alpha_keywords.py b/packages/google-analytics-admin/scripts/fixup_admin_v1alpha_keywords.py index 3a2bd3eeaf1a..f64a22e0acb1 100644 --- a/packages/google-analytics-admin/scripts/fixup_admin_v1alpha_keywords.py +++ b/packages/google-analytics-admin/scripts/fixup_admin_v1alpha_keywords.py @@ -52,6 +52,7 @@ class adminCallTransformer(cst.CSTTransformer): 'create_access_binding': ('parent', 'access_binding', ), 'create_ad_sense_link': ('parent', 'adsense_link', ), 'create_audience': ('parent', 'audience', ), + 'create_big_query_link': ('parent', 'bigquery_link', ), 'create_calculated_metric': ('parent', 'calculated_metric_id', 'calculated_metric', ), 'create_channel_group': ('parent', 'channel_group', ), 'create_connected_site_tag': ('connected_site_tag', 'property', ), @@ -62,20 +63,22 @@ class adminCallTransformer(cst.CSTTransformer): 'create_display_video360_advertiser_link': ('parent', 'display_video_360_advertiser_link', ), 'create_display_video360_advertiser_link_proposal': ('parent', 'display_video_360_advertiser_link_proposal', ), 'create_event_create_rule': ('parent', 'event_create_rule', ), + 'create_event_edit_rule': ('parent', 'event_edit_rule', ), 'create_expanded_data_set': ('parent', 'expanded_data_set', ), 'create_firebase_link': ('parent', 'firebase_link', ), 'create_google_ads_link': ('parent', 'google_ads_link', ), + 'create_key_event': ('key_event', 'parent', ), 'create_measurement_protocol_secret': ('parent', 'measurement_protocol_secret', ), 'create_property': ('property', ), 'create_rollup_property': ('rollup_property', 'source_properties', ), 'create_rollup_property_source_link': ('parent', 'rollup_property_source_link', ), 'create_search_ads360_link': ('parent', 'search_ads_360_link', ), 'create_sk_ad_network_conversion_value_schema': ('parent', 'skadnetwork_conversion_value_schema', ), - 'create_subproperty': ('parent', 'subproperty', 'subproperty_event_filter', ), 'create_subproperty_event_filter': ('parent', 'subproperty_event_filter', ), 'delete_access_binding': ('name', ), 'delete_account': ('name', ), 'delete_ad_sense_link': ('name', ), + 'delete_big_query_link': ('name', ), 'delete_calculated_metric': ('name', ), 'delete_channel_group': ('name', ), 'delete_connected_site_tag': ('property', 'tag_id', ), @@ -84,9 +87,11 @@ class adminCallTransformer(cst.CSTTransformer): 'delete_display_video360_advertiser_link': ('name', ), 'delete_display_video360_advertiser_link_proposal': ('name', ), 'delete_event_create_rule': ('name', ), + 'delete_event_edit_rule': ('name', ), 'delete_expanded_data_set': ('name', ), 'delete_firebase_link': ('name', ), 'delete_google_ads_link': ('name', ), + 'delete_key_event': ('name', ), 'delete_measurement_protocol_secret': ('name', ), 'delete_property': ('name', ), 'delete_rollup_property_source_link': ('name', ), @@ -114,9 +119,11 @@ class adminCallTransformer(cst.CSTTransformer): 'get_display_video360_advertiser_link_proposal': ('name', ), 'get_enhanced_measurement_settings': ('name', ), 'get_event_create_rule': ('name', ), + 'get_event_edit_rule': ('name', ), 'get_expanded_data_set': ('name', ), 'get_global_site_tag': ('name', ), 'get_google_signals_settings': ('name', ), + 'get_key_event': ('name', ), 'get_measurement_protocol_secret': ('name', ), 'get_property': ('name', ), 'get_rollup_property_source_link': ('name', ), @@ -139,9 +146,11 @@ class adminCallTransformer(cst.CSTTransformer): 'list_display_video360_advertiser_link_proposals': ('parent', 'page_size', 'page_token', ), 'list_display_video360_advertiser_links': ('parent', 'page_size', 'page_token', ), 'list_event_create_rules': ('parent', 'page_size', 'page_token', ), + 'list_event_edit_rules': ('parent', 'page_size', 'page_token', ), 'list_expanded_data_sets': ('parent', 'page_size', 'page_token', ), 'list_firebase_links': ('parent', 'page_size', 'page_token', ), 'list_google_ads_links': ('parent', 'page_size', 'page_token', ), + 'list_key_events': ('parent', 'page_size', 'page_token', ), 'list_measurement_protocol_secrets': ('parent', 'page_size', 'page_token', ), 'list_properties': ('filter', 'page_size', 'page_token', 'show_deleted', ), 'list_rollup_property_source_links': ('parent', 'page_size', 'page_token', ), @@ -149,6 +158,8 @@ class adminCallTransformer(cst.CSTTransformer): 'list_sk_ad_network_conversion_value_schemas': ('parent', 'page_size', 'page_token', ), 'list_subproperty_event_filters': ('parent', 'page_size', 'page_token', ), 'provision_account_ticket': ('account', 'redirect_uri', ), + 'provision_subproperty': ('subproperty', 'subproperty_event_filter', ), + 'reorder_event_edit_rules': ('parent', 'event_edit_rules', ), 'run_access_report': ('entity', 'dimensions', 'metrics', 'date_ranges', 'dimension_filter', 'metric_filter', 'offset', 'limit', 'time_zone', 'order_bys', 'return_entity_quota', 'include_all_users', 'expand_groups', ), 'search_change_history_events': ('account', 'property', 'resource_type', 'action', 'actor_email', 'earliest_change_time', 'latest_change_time', 'page_size', 'page_token', ), 'set_automated_ga4_configuration_opt_out': ('property', 'opt_out', ), @@ -156,6 +167,7 @@ class adminCallTransformer(cst.CSTTransformer): 'update_account': ('account', 'update_mask', ), 'update_attribution_settings': ('attribution_settings', 'update_mask', ), 'update_audience': ('audience', 'update_mask', ), + 'update_big_query_link': ('bigquery_link', 'update_mask', ), 'update_calculated_metric': ('calculated_metric', 'update_mask', ), 'update_channel_group': ('channel_group', 'update_mask', ), 'update_conversion_event': ('conversion_event', 'update_mask', ), @@ -167,9 +179,11 @@ class adminCallTransformer(cst.CSTTransformer): 'update_display_video360_advertiser_link': ('update_mask', 'display_video_360_advertiser_link', ), 'update_enhanced_measurement_settings': ('enhanced_measurement_settings', 'update_mask', ), 'update_event_create_rule': ('event_create_rule', 'update_mask', ), + 'update_event_edit_rule': ('event_edit_rule', 'update_mask', ), 'update_expanded_data_set': ('expanded_data_set', 'update_mask', ), 'update_google_ads_link': ('update_mask', 'google_ads_link', ), 'update_google_signals_settings': ('google_signals_settings', 'update_mask', ), + 'update_key_event': ('key_event', 'update_mask', ), 'update_measurement_protocol_secret': ('measurement_protocol_secret', 'update_mask', ), 'update_property': ('property', 'update_mask', ), 'update_search_ads360_link': ('update_mask', 'search_ads_360_link', ), diff --git a/packages/google-analytics-admin/tests/unit/gapic/admin_v1alpha/test_analytics_admin_service.py b/packages/google-analytics-admin/tests/unit/gapic/admin_v1alpha/test_analytics_admin_service.py index b81a44f5eed5..24e6ceb09267 100644 --- a/packages/google-analytics-admin/tests/unit/gapic/admin_v1alpha/test_analytics_admin_service.py +++ b/packages/google-analytics-admin/tests/unit/gapic/admin_v1alpha/test_analytics_admin_service.py @@ -1235,6 +1235,7 @@ def test_get_account(request_type, transport: str = "grpc"): display_name="display_name_value", region_code="region_code_value", deleted=True, + gmp_organization="gmp_organization_value", ) response = client.get_account(request) @@ -1250,6 +1251,7 @@ def test_get_account(request_type, transport: str = "grpc"): assert response.display_name == "display_name_value" assert response.region_code == "region_code_value" assert response.deleted is True + assert response.gmp_organization == "gmp_organization_value" def test_get_account_empty_call(): @@ -1352,6 +1354,7 @@ async def test_get_account_empty_call_async(): display_name="display_name_value", region_code="region_code_value", deleted=True, + gmp_organization="gmp_organization_value", ) ) response = await client.get_account() @@ -1383,22 +1386,23 @@ async def test_get_account_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_account - ] = mock_object + ] = mock_rpc request = {} await client.get_account(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_account(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1423,6 +1427,7 @@ async def test_get_account_async( display_name="display_name_value", region_code="region_code_value", deleted=True, + gmp_organization="gmp_organization_value", ) ) response = await client.get_account(request) @@ -1439,6 +1444,7 @@ async def test_get_account_async( assert response.display_name == "display_name_value" assert response.region_code == "region_code_value" assert response.deleted is True + assert response.gmp_organization == "gmp_organization_value" @pytest.mark.asyncio @@ -1749,22 +1755,23 @@ async def test_list_accounts_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_accounts - ] = mock_object + ] = mock_rpc request = {} await client.list_accounts(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_accounts(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2154,22 +2161,23 @@ async def test_delete_account_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_account - ] = mock_object + ] = mock_rpc request = {} await client.delete_account(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_account(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2370,6 +2378,7 @@ def test_update_account(request_type, transport: str = "grpc"): display_name="display_name_value", region_code="region_code_value", deleted=True, + gmp_organization="gmp_organization_value", ) response = client.update_account(request) @@ -2385,6 +2394,7 @@ def test_update_account(request_type, transport: str = "grpc"): assert response.display_name == "display_name_value" assert response.region_code == "region_code_value" assert response.deleted is True + assert response.gmp_organization == "gmp_organization_value" def test_update_account_empty_call(): @@ -2483,6 +2493,7 @@ async def test_update_account_empty_call_async(): display_name="display_name_value", region_code="region_code_value", deleted=True, + gmp_organization="gmp_organization_value", ) ) response = await client.update_account() @@ -2514,22 +2525,23 @@ async def test_update_account_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_account - ] = mock_object + ] = mock_rpc request = {} await client.update_account(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_account(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2554,6 +2566,7 @@ async def test_update_account_async( display_name="display_name_value", region_code="region_code_value", deleted=True, + gmp_organization="gmp_organization_value", ) ) response = await client.update_account(request) @@ -2570,6 +2583,7 @@ async def test_update_account_async( assert response.display_name == "display_name_value" assert response.region_code == "region_code_value" assert response.deleted is True + assert response.gmp_organization == "gmp_organization_value" @pytest.mark.asyncio @@ -2903,22 +2917,23 @@ async def test_provision_account_ticket_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.provision_account_ticket - ] = mock_object + ] = mock_rpc request = {} await client.provision_account_ticket(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.provision_account_ticket(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3140,22 +3155,23 @@ async def test_list_account_summaries_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_account_summaries - ] = mock_object + ] = mock_rpc request = {} await client.list_account_summaries(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_account_summaries(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3587,22 +3603,23 @@ async def test_get_property_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_property - ] = mock_object + ] = mock_rpc request = {} await client.get_property(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_property(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3965,22 +3982,23 @@ async def test_list_properties_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_properties - ] = mock_object + ] = mock_rpc request = {} await client.list_properties(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_properties(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4397,22 +4415,23 @@ async def test_create_property_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_property - ] = mock_object + ] = mock_rpc request = {} await client.create_property(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_property(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4738,22 +4757,23 @@ async def test_delete_property_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_property - ] = mock_object + ] = mock_rpc request = {} await client.delete_property(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_property(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5134,22 +5154,23 @@ async def test_update_property_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_property - ] = mock_object + ] = mock_rpc request = {} await client.update_property(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_property(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5535,22 +5556,23 @@ async def test_create_firebase_link_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_firebase_link - ] = mock_object + ] = mock_rpc request = {} await client.create_firebase_link(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_firebase_link(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5927,22 +5949,23 @@ async def test_delete_firebase_link_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_firebase_link - ] = mock_object + ] = mock_rpc request = {} await client.delete_firebase_link(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_firebase_link(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -6307,22 +6330,23 @@ async def test_list_firebase_links_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_firebase_links - ] = mock_object + ] = mock_rpc request = {} await client.list_firebase_links(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_firebase_links(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -6899,22 +6923,23 @@ async def test_get_global_site_tag_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_global_site_tag - ] = mock_object + ] = mock_rpc request = {} await client.get_global_site_tag(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_global_site_tag(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -7298,22 +7323,23 @@ async def test_create_google_ads_link_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_google_ads_link - ] = mock_object + ] = mock_rpc request = {} await client.create_google_ads_link(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_google_ads_link(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -7707,22 +7733,23 @@ async def test_update_google_ads_link_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_google_ads_link - ] = mock_object + ] = mock_rpc request = {} await client.update_google_ads_link(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_google_ads_link(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -8104,22 +8131,23 @@ async def test_delete_google_ads_link_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_google_ads_link - ] = mock_object + ] = mock_rpc request = {} await client.delete_google_ads_link(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_google_ads_link(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -8485,22 +8513,23 @@ async def test_list_google_ads_links_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_google_ads_links - ] = mock_object + ] = mock_rpc request = {} await client.list_google_ads_links(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_google_ads_links(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -9090,22 +9119,23 @@ async def test_get_data_sharing_settings_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_data_sharing_settings - ] = mock_object + ] = mock_rpc request = {} await client.get_data_sharing_settings(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_data_sharing_settings(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -9494,22 +9524,23 @@ async def test_get_measurement_protocol_secret_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_measurement_protocol_secret - ] = mock_object + ] = mock_rpc request = {} await client.get_measurement_protocol_secret(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_measurement_protocol_secret(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -9888,22 +9919,23 @@ async def test_list_measurement_protocol_secrets_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_measurement_protocol_secrets - ] = mock_object + ] = mock_rpc request = {} await client.list_measurement_protocol_secrets(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_measurement_protocol_secrets(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -10488,22 +10520,23 @@ async def test_create_measurement_protocol_secret_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_measurement_protocol_secret - ] = mock_object + ] = mock_rpc request = {} await client.create_measurement_protocol_secret(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_measurement_protocol_secret(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -10891,22 +10924,23 @@ async def test_delete_measurement_protocol_secret_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_measurement_protocol_secret - ] = mock_object + ] = mock_rpc request = {} await client.delete_measurement_protocol_secret(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_measurement_protocol_secret(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -11272,22 +11306,23 @@ async def test_update_measurement_protocol_secret_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_measurement_protocol_secret - ] = mock_object + ] = mock_rpc request = {} await client.update_measurement_protocol_secret(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_measurement_protocol_secret(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -11679,22 +11714,23 @@ async def test_acknowledge_user_data_collection_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.acknowledge_user_data_collection - ] = mock_object + ] = mock_rpc request = {} await client.acknowledge_user_data_collection(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.acknowledge_user_data_collection(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -11983,22 +12019,23 @@ async def test_get_sk_ad_network_conversion_value_schema_async_use_cached_wrappe ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_sk_ad_network_conversion_value_schema - ] = mock_object + ] = mock_rpc request = {} await client.get_sk_ad_network_conversion_value_schema(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_sk_ad_network_conversion_value_schema(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -12382,22 +12419,23 @@ async def test_create_sk_ad_network_conversion_value_schema_async_use_cached_wra ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_sk_ad_network_conversion_value_schema - ] = mock_object + ] = mock_rpc request = {} await client.create_sk_ad_network_conversion_value_schema(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_sk_ad_network_conversion_value_schema(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -12789,22 +12827,23 @@ async def test_delete_sk_ad_network_conversion_value_schema_async_use_cached_wra ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_sk_ad_network_conversion_value_schema - ] = mock_object + ] = mock_rpc request = {} await client.delete_sk_ad_network_conversion_value_schema(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_sk_ad_network_conversion_value_schema(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -13175,22 +13214,23 @@ async def test_update_sk_ad_network_conversion_value_schema_async_use_cached_wra ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_sk_ad_network_conversion_value_schema - ] = mock_object + ] = mock_rpc request = {} await client.update_sk_ad_network_conversion_value_schema(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_sk_ad_network_conversion_value_schema(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -13589,22 +13629,23 @@ async def test_list_sk_ad_network_conversion_value_schemas_async_use_cached_wrap ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_sk_ad_network_conversion_value_schemas - ] = mock_object + ] = mock_rpc request = {} await client.list_sk_ad_network_conversion_value_schemas(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_sk_ad_network_conversion_value_schemas(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -14201,22 +14242,23 @@ async def test_search_change_history_events_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.search_change_history_events - ] = mock_object + ] = mock_rpc request = {} await client.search_change_history_events(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.search_change_history_events(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -14716,22 +14758,23 @@ async def test_get_google_signals_settings_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_google_signals_settings - ] = mock_object + ] = mock_rpc request = {} await client.get_google_signals_settings(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_google_signals_settings(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -15116,22 +15159,23 @@ async def test_update_google_signals_settings_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_google_signals_settings - ] = mock_object + ] = mock_rpc request = {} await client.update_google_signals_settings(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_google_signals_settings(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -15536,22 +15580,23 @@ async def test_create_conversion_event_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_conversion_event - ] = mock_object + ] = mock_rpc request = {} await client.create_conversion_event(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_conversion_event(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -15956,22 +16001,23 @@ async def test_update_conversion_event_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_conversion_event - ] = mock_object + ] = mock_rpc request = {} await client.update_conversion_event(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_conversion_event(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -16379,22 +16425,23 @@ async def test_get_conversion_event_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_conversion_event - ] = mock_object + ] = mock_rpc request = {} await client.get_conversion_event(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_conversion_event(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -16771,22 +16818,23 @@ async def test_delete_conversion_event_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_conversion_event - ] = mock_object + ] = mock_rpc request = {} await client.delete_conversion_event(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_conversion_event(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -17152,22 +17200,23 @@ async def test_list_conversion_events_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_conversion_events - ] = mock_object + ] = mock_rpc request = {} await client.list_conversion_events(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_conversion_events(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -17568,11 +17617,11 @@ async def test_list_conversion_events_async_pages(): @pytest.mark.parametrize( "request_type", [ - analytics_admin.GetDisplayVideo360AdvertiserLinkRequest, + analytics_admin.CreateKeyEventRequest, dict, ], ) -def test_get_display_video360_advertiser_link(request_type, transport: str = "grpc"): +def test_create_key_event(request_type, transport: str = "grpc"): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -17583,31 +17632,33 @@ def test_get_display_video360_advertiser_link(request_type, transport: str = "gr request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_display_video360_advertiser_link), "__call__" - ) as call: + with mock.patch.object(type(client.transport.create_key_event), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = resources.DisplayVideo360AdvertiserLink( + call.return_value = resources.KeyEvent( name="name_value", - advertiser_id="advertiser_id_value", - advertiser_display_name="advertiser_display_name_value", + event_name="event_name_value", + deletable=True, + custom=True, + counting_method=resources.KeyEvent.CountingMethod.ONCE_PER_EVENT, ) - response = client.get_display_video360_advertiser_link(request) + response = client.create_key_event(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - request = analytics_admin.GetDisplayVideo360AdvertiserLinkRequest() + request = analytics_admin.CreateKeyEventRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, resources.DisplayVideo360AdvertiserLink) + assert isinstance(response, resources.KeyEvent) assert response.name == "name_value" - assert response.advertiser_id == "advertiser_id_value" - assert response.advertiser_display_name == "advertiser_display_name_value" + assert response.event_name == "event_name_value" + assert response.deletable is True + assert response.custom is True + assert response.counting_method == resources.KeyEvent.CountingMethod.ONCE_PER_EVENT -def test_get_display_video360_advertiser_link_empty_call(): +def test_create_key_event_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = AnalyticsAdminServiceClient( @@ -17616,19 +17667,17 @@ def test_get_display_video360_advertiser_link_empty_call(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_display_video360_advertiser_link), "__call__" - ) as call: + with mock.patch.object(type(client.transport.create_key_event), "__call__") as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.get_display_video360_advertiser_link() + client.create_key_event() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == analytics_admin.GetDisplayVideo360AdvertiserLinkRequest() + assert args[0] == analytics_admin.CreateKeyEventRequest() -def test_get_display_video360_advertiser_link_non_empty_request_with_auto_populated_field(): +def test_create_key_event_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. client = AnalyticsAdminServiceClient( @@ -17639,26 +17688,24 @@ def test_get_display_video360_advertiser_link_non_empty_request_with_auto_popula # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. - request = analytics_admin.GetDisplayVideo360AdvertiserLinkRequest( - name="name_value", + request = analytics_admin.CreateKeyEventRequest( + parent="parent_value", ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_display_video360_advertiser_link), "__call__" - ) as call: + with mock.patch.object(type(client.transport.create_key_event), "__call__") as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.get_display_video360_advertiser_link(request=request) + client.create_key_event(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == analytics_admin.GetDisplayVideo360AdvertiserLinkRequest( - name="name_value", + assert args[0] == analytics_admin.CreateKeyEventRequest( + parent="parent_value", ) -def test_get_display_video360_advertiser_link_use_cached_wrapped_rpc(): +def test_create_key_event_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -17672,10 +17719,7 @@ def test_get_display_video360_advertiser_link_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert ( - client._transport.get_display_video360_advertiser_link - in client._transport._wrapped_methods - ) + assert client._transport.create_key_event in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() @@ -17683,15 +17727,15 @@ def test_get_display_video360_advertiser_link_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.get_display_video360_advertiser_link + client._transport.create_key_event ] = mock_rpc request = {} - client.get_display_video360_advertiser_link(request) + client.create_key_event(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.get_display_video360_advertiser_link(request) + client.create_key_event(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -17699,7 +17743,7 @@ def test_get_display_video360_advertiser_link_use_cached_wrapped_rpc(): @pytest.mark.asyncio -async def test_get_display_video360_advertiser_link_empty_call_async(): +async def test_create_key_event_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = AnalyticsAdminServiceAsyncClient( @@ -17708,25 +17752,25 @@ async def test_get_display_video360_advertiser_link_empty_call_async(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_display_video360_advertiser_link), "__call__" - ) as call: + with mock.patch.object(type(client.transport.create_key_event), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - resources.DisplayVideo360AdvertiserLink( + resources.KeyEvent( name="name_value", - advertiser_id="advertiser_id_value", - advertiser_display_name="advertiser_display_name_value", + event_name="event_name_value", + deletable=True, + custom=True, + counting_method=resources.KeyEvent.CountingMethod.ONCE_PER_EVENT, ) ) - response = await client.get_display_video360_advertiser_link() + response = await client.create_key_event() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == analytics_admin.GetDisplayVideo360AdvertiserLinkRequest() + assert args[0] == analytics_admin.CreateKeyEventRequest() @pytest.mark.asyncio -async def test_get_display_video360_advertiser_link_async_use_cached_wrapped_rpc( +async def test_create_key_event_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", ): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -17743,33 +17787,33 @@ async def test_get_display_video360_advertiser_link_async_use_cached_wrapped_rpc # Ensure method has been cached assert ( - client._client._transport.get_display_video360_advertiser_link + client._client._transport.create_key_event in client._client._transport._wrapped_methods ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ - client._client._transport.get_display_video360_advertiser_link - ] = mock_object + client._client._transport.create_key_event + ] = mock_rpc request = {} - await client.get_display_video360_advertiser_link(request) + await client.create_key_event(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - await client.get_display_video360_advertiser_link(request) + await client.create_key_event(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio -async def test_get_display_video360_advertiser_link_async( - transport: str = "grpc_asyncio", - request_type=analytics_admin.GetDisplayVideo360AdvertiserLinkRequest, +async def test_create_key_event_async( + transport: str = "grpc_asyncio", request_type=analytics_admin.CreateKeyEventRequest ): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), @@ -17781,54 +17825,54 @@ async def test_get_display_video360_advertiser_link_async( request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_display_video360_advertiser_link), "__call__" - ) as call: + with mock.patch.object(type(client.transport.create_key_event), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - resources.DisplayVideo360AdvertiserLink( + resources.KeyEvent( name="name_value", - advertiser_id="advertiser_id_value", - advertiser_display_name="advertiser_display_name_value", + event_name="event_name_value", + deletable=True, + custom=True, + counting_method=resources.KeyEvent.CountingMethod.ONCE_PER_EVENT, ) ) - response = await client.get_display_video360_advertiser_link(request) + response = await client.create_key_event(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request = analytics_admin.GetDisplayVideo360AdvertiserLinkRequest() + request = analytics_admin.CreateKeyEventRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, resources.DisplayVideo360AdvertiserLink) + assert isinstance(response, resources.KeyEvent) assert response.name == "name_value" - assert response.advertiser_id == "advertiser_id_value" - assert response.advertiser_display_name == "advertiser_display_name_value" + assert response.event_name == "event_name_value" + assert response.deletable is True + assert response.custom is True + assert response.counting_method == resources.KeyEvent.CountingMethod.ONCE_PER_EVENT @pytest.mark.asyncio -async def test_get_display_video360_advertiser_link_async_from_dict(): - await test_get_display_video360_advertiser_link_async(request_type=dict) +async def test_create_key_event_async_from_dict(): + await test_create_key_event_async(request_type=dict) -def test_get_display_video360_advertiser_link_field_headers(): +def test_create_key_event_field_headers(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = analytics_admin.GetDisplayVideo360AdvertiserLinkRequest() + request = analytics_admin.CreateKeyEventRequest() - request.name = "name_value" + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_display_video360_advertiser_link), "__call__" - ) as call: - call.return_value = resources.DisplayVideo360AdvertiserLink() - client.get_display_video360_advertiser_link(request) + with mock.patch.object(type(client.transport.create_key_event), "__call__") as call: + call.return_value = resources.KeyEvent() + client.create_key_event(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -17839,30 +17883,26 @@ def test_get_display_video360_advertiser_link_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "name=name_value", + "parent=parent_value", ) in kw["metadata"] @pytest.mark.asyncio -async def test_get_display_video360_advertiser_link_field_headers_async(): +async def test_create_key_event_field_headers_async(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = analytics_admin.GetDisplayVideo360AdvertiserLinkRequest() + request = analytics_admin.CreateKeyEventRequest() - request.name = "name_value" + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_display_video360_advertiser_link), "__call__" - ) as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - resources.DisplayVideo360AdvertiserLink() - ) - await client.get_display_video360_advertiser_link(request) + with mock.patch.object(type(client.transport.create_key_event), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(resources.KeyEvent()) + await client.create_key_event(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -17873,37 +17913,39 @@ async def test_get_display_video360_advertiser_link_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "name=name_value", + "parent=parent_value", ) in kw["metadata"] -def test_get_display_video360_advertiser_link_flattened(): +def test_create_key_event_flattened(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_display_video360_advertiser_link), "__call__" - ) as call: + with mock.patch.object(type(client.transport.create_key_event), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = resources.DisplayVideo360AdvertiserLink() + call.return_value = resources.KeyEvent() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.get_display_video360_advertiser_link( - name="name_value", + client.create_key_event( + parent="parent_value", + key_event=resources.KeyEvent(name="name_value"), ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].key_event + mock_val = resources.KeyEvent(name="name_value") assert arg == mock_val -def test_get_display_video360_advertiser_link_flattened_error(): +def test_create_key_event_flattened_error(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -17911,45 +17953,46 @@ def test_get_display_video360_advertiser_link_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.get_display_video360_advertiser_link( - analytics_admin.GetDisplayVideo360AdvertiserLinkRequest(), - name="name_value", + client.create_key_event( + analytics_admin.CreateKeyEventRequest(), + parent="parent_value", + key_event=resources.KeyEvent(name="name_value"), ) @pytest.mark.asyncio -async def test_get_display_video360_advertiser_link_flattened_async(): +async def test_create_key_event_flattened_async(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_display_video360_advertiser_link), "__call__" - ) as call: + with mock.patch.object(type(client.transport.create_key_event), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = resources.DisplayVideo360AdvertiserLink() + call.return_value = resources.KeyEvent() - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - resources.DisplayVideo360AdvertiserLink() - ) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(resources.KeyEvent()) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.get_display_video360_advertiser_link( - name="name_value", + response = await client.create_key_event( + parent="parent_value", + key_event=resources.KeyEvent(name="name_value"), ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].key_event + mock_val = resources.KeyEvent(name="name_value") assert arg == mock_val @pytest.mark.asyncio -async def test_get_display_video360_advertiser_link_flattened_error_async(): +async def test_create_key_event_flattened_error_async(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -17957,20 +18000,21 @@ async def test_get_display_video360_advertiser_link_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.get_display_video360_advertiser_link( - analytics_admin.GetDisplayVideo360AdvertiserLinkRequest(), - name="name_value", + await client.create_key_event( + analytics_admin.CreateKeyEventRequest(), + parent="parent_value", + key_event=resources.KeyEvent(name="name_value"), ) @pytest.mark.parametrize( "request_type", [ - analytics_admin.ListDisplayVideo360AdvertiserLinksRequest, + analytics_admin.UpdateKeyEventRequest, dict, ], ) -def test_list_display_video360_advertiser_links(request_type, transport: str = "grpc"): +def test_update_key_event(request_type, transport: str = "grpc"): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -17981,27 +18025,33 @@ def test_list_display_video360_advertiser_links(request_type, transport: str = " request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_display_video360_advertiser_links), "__call__" - ) as call: + with mock.patch.object(type(client.transport.update_key_event), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = analytics_admin.ListDisplayVideo360AdvertiserLinksResponse( - next_page_token="next_page_token_value", + call.return_value = resources.KeyEvent( + name="name_value", + event_name="event_name_value", + deletable=True, + custom=True, + counting_method=resources.KeyEvent.CountingMethod.ONCE_PER_EVENT, ) - response = client.list_display_video360_advertiser_links(request) + response = client.update_key_event(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - request = analytics_admin.ListDisplayVideo360AdvertiserLinksRequest() + request = analytics_admin.UpdateKeyEventRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListDisplayVideo360AdvertiserLinksPager) - assert response.next_page_token == "next_page_token_value" + assert isinstance(response, resources.KeyEvent) + assert response.name == "name_value" + assert response.event_name == "event_name_value" + assert response.deletable is True + assert response.custom is True + assert response.counting_method == resources.KeyEvent.CountingMethod.ONCE_PER_EVENT -def test_list_display_video360_advertiser_links_empty_call(): +def test_update_key_event_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = AnalyticsAdminServiceClient( @@ -18010,19 +18060,17 @@ def test_list_display_video360_advertiser_links_empty_call(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_display_video360_advertiser_links), "__call__" - ) as call: + with mock.patch.object(type(client.transport.update_key_event), "__call__") as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.list_display_video360_advertiser_links() + client.update_key_event() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == analytics_admin.ListDisplayVideo360AdvertiserLinksRequest() + assert args[0] == analytics_admin.UpdateKeyEventRequest() -def test_list_display_video360_advertiser_links_non_empty_request_with_auto_populated_field(): +def test_update_key_event_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. client = AnalyticsAdminServiceClient( @@ -18033,28 +18081,20 @@ def test_list_display_video360_advertiser_links_non_empty_request_with_auto_popu # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. - request = analytics_admin.ListDisplayVideo360AdvertiserLinksRequest( - parent="parent_value", - page_token="page_token_value", - ) + request = analytics_admin.UpdateKeyEventRequest() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_display_video360_advertiser_links), "__call__" - ) as call: + with mock.patch.object(type(client.transport.update_key_event), "__call__") as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.list_display_video360_advertiser_links(request=request) + client.update_key_event(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == analytics_admin.ListDisplayVideo360AdvertiserLinksRequest( - parent="parent_value", - page_token="page_token_value", - ) + assert args[0] == analytics_admin.UpdateKeyEventRequest() -def test_list_display_video360_advertiser_links_use_cached_wrapped_rpc(): +def test_update_key_event_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -18068,10 +18108,7 @@ def test_list_display_video360_advertiser_links_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert ( - client._transport.list_display_video360_advertiser_links - in client._transport._wrapped_methods - ) + assert client._transport.update_key_event in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() @@ -18079,15 +18116,15 @@ def test_list_display_video360_advertiser_links_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.list_display_video360_advertiser_links + client._transport.update_key_event ] = mock_rpc request = {} - client.list_display_video360_advertiser_links(request) + client.update_key_event(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.list_display_video360_advertiser_links(request) + client.update_key_event(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -18095,7 +18132,7 @@ def test_list_display_video360_advertiser_links_use_cached_wrapped_rpc(): @pytest.mark.asyncio -async def test_list_display_video360_advertiser_links_empty_call_async(): +async def test_update_key_event_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = AnalyticsAdminServiceAsyncClient( @@ -18104,23 +18141,25 @@ async def test_list_display_video360_advertiser_links_empty_call_async(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_display_video360_advertiser_links), "__call__" - ) as call: + with mock.patch.object(type(client.transport.update_key_event), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - analytics_admin.ListDisplayVideo360AdvertiserLinksResponse( - next_page_token="next_page_token_value", + resources.KeyEvent( + name="name_value", + event_name="event_name_value", + deletable=True, + custom=True, + counting_method=resources.KeyEvent.CountingMethod.ONCE_PER_EVENT, ) ) - response = await client.list_display_video360_advertiser_links() + response = await client.update_key_event() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == analytics_admin.ListDisplayVideo360AdvertiserLinksRequest() + assert args[0] == analytics_admin.UpdateKeyEventRequest() @pytest.mark.asyncio -async def test_list_display_video360_advertiser_links_async_use_cached_wrapped_rpc( +async def test_update_key_event_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", ): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -18137,33 +18176,33 @@ async def test_list_display_video360_advertiser_links_async_use_cached_wrapped_r # Ensure method has been cached assert ( - client._client._transport.list_display_video360_advertiser_links + client._client._transport.update_key_event in client._client._transport._wrapped_methods ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ - client._client._transport.list_display_video360_advertiser_links - ] = mock_object + client._client._transport.update_key_event + ] = mock_rpc request = {} - await client.list_display_video360_advertiser_links(request) + await client.update_key_event(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - await client.list_display_video360_advertiser_links(request) + await client.update_key_event(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio -async def test_list_display_video360_advertiser_links_async( - transport: str = "grpc_asyncio", - request_type=analytics_admin.ListDisplayVideo360AdvertiserLinksRequest, +async def test_update_key_event_async( + transport: str = "grpc_asyncio", request_type=analytics_admin.UpdateKeyEventRequest ): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), @@ -18175,50 +18214,54 @@ async def test_list_display_video360_advertiser_links_async( request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_display_video360_advertiser_links), "__call__" - ) as call: + with mock.patch.object(type(client.transport.update_key_event), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - analytics_admin.ListDisplayVideo360AdvertiserLinksResponse( - next_page_token="next_page_token_value", + resources.KeyEvent( + name="name_value", + event_name="event_name_value", + deletable=True, + custom=True, + counting_method=resources.KeyEvent.CountingMethod.ONCE_PER_EVENT, ) ) - response = await client.list_display_video360_advertiser_links(request) + response = await client.update_key_event(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request = analytics_admin.ListDisplayVideo360AdvertiserLinksRequest() + request = analytics_admin.UpdateKeyEventRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListDisplayVideo360AdvertiserLinksAsyncPager) - assert response.next_page_token == "next_page_token_value" + assert isinstance(response, resources.KeyEvent) + assert response.name == "name_value" + assert response.event_name == "event_name_value" + assert response.deletable is True + assert response.custom is True + assert response.counting_method == resources.KeyEvent.CountingMethod.ONCE_PER_EVENT @pytest.mark.asyncio -async def test_list_display_video360_advertiser_links_async_from_dict(): - await test_list_display_video360_advertiser_links_async(request_type=dict) +async def test_update_key_event_async_from_dict(): + await test_update_key_event_async(request_type=dict) -def test_list_display_video360_advertiser_links_field_headers(): +def test_update_key_event_field_headers(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = analytics_admin.ListDisplayVideo360AdvertiserLinksRequest() + request = analytics_admin.UpdateKeyEventRequest() - request.parent = "parent_value" + request.key_event.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_display_video360_advertiser_links), "__call__" - ) as call: - call.return_value = analytics_admin.ListDisplayVideo360AdvertiserLinksResponse() - client.list_display_video360_advertiser_links(request) + with mock.patch.object(type(client.transport.update_key_event), "__call__") as call: + call.return_value = resources.KeyEvent() + client.update_key_event(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -18229,30 +18272,26 @@ def test_list_display_video360_advertiser_links_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "parent=parent_value", + "key_event.name=name_value", ) in kw["metadata"] @pytest.mark.asyncio -async def test_list_display_video360_advertiser_links_field_headers_async(): +async def test_update_key_event_field_headers_async(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = analytics_admin.ListDisplayVideo360AdvertiserLinksRequest() + request = analytics_admin.UpdateKeyEventRequest() - request.parent = "parent_value" + request.key_event.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_display_video360_advertiser_links), "__call__" - ) as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - analytics_admin.ListDisplayVideo360AdvertiserLinksResponse() - ) - await client.list_display_video360_advertiser_links(request) + with mock.patch.object(type(client.transport.update_key_event), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(resources.KeyEvent()) + await client.update_key_event(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -18263,37 +18302,39 @@ async def test_list_display_video360_advertiser_links_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "parent=parent_value", + "key_event.name=name_value", ) in kw["metadata"] -def test_list_display_video360_advertiser_links_flattened(): +def test_update_key_event_flattened(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_display_video360_advertiser_links), "__call__" - ) as call: + with mock.patch.object(type(client.transport.update_key_event), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = analytics_admin.ListDisplayVideo360AdvertiserLinksResponse() + call.return_value = resources.KeyEvent() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.list_display_video360_advertiser_links( - parent="parent_value", + client.update_key_event( + key_event=resources.KeyEvent(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = "parent_value" + arg = args[0].key_event + mock_val = resources.KeyEvent(name="name_value") + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) assert arg == mock_val -def test_list_display_video360_advertiser_links_flattened_error(): +def test_update_key_event_flattened_error(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -18301,45 +18342,46 @@ def test_list_display_video360_advertiser_links_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.list_display_video360_advertiser_links( - analytics_admin.ListDisplayVideo360AdvertiserLinksRequest(), - parent="parent_value", + client.update_key_event( + analytics_admin.UpdateKeyEventRequest(), + key_event=resources.KeyEvent(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) @pytest.mark.asyncio -async def test_list_display_video360_advertiser_links_flattened_async(): +async def test_update_key_event_flattened_async(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_display_video360_advertiser_links), "__call__" - ) as call: + with mock.patch.object(type(client.transport.update_key_event), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = analytics_admin.ListDisplayVideo360AdvertiserLinksResponse() + call.return_value = resources.KeyEvent() - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - analytics_admin.ListDisplayVideo360AdvertiserLinksResponse() - ) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(resources.KeyEvent()) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.list_display_video360_advertiser_links( - parent="parent_value", + response = await client.update_key_event( + key_event=resources.KeyEvent(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = "parent_value" + arg = args[0].key_event + mock_val = resources.KeyEvent(name="name_value") + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) assert arg == mock_val @pytest.mark.asyncio -async def test_list_display_video360_advertiser_links_flattened_error_async(): +async def test_update_key_event_flattened_error_async(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -18347,314 +18389,105 @@ async def test_list_display_video360_advertiser_links_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.list_display_video360_advertiser_links( - analytics_admin.ListDisplayVideo360AdvertiserLinksRequest(), - parent="parent_value", + await client.update_key_event( + analytics_admin.UpdateKeyEventRequest(), + key_event=resources.KeyEvent(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) -def test_list_display_video360_advertiser_links_pager(transport_name: str = "grpc"): +@pytest.mark.parametrize( + "request_type", + [ + analytics_admin.GetKeyEventRequest, + dict, + ], +) +def test_get_key_event(request_type, transport: str = "grpc"): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, + transport=transport, ) + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_display_video360_advertiser_links), "__call__" - ) as call: - # Set the response to a series of pages. - call.side_effect = ( - analytics_admin.ListDisplayVideo360AdvertiserLinksResponse( - display_video_360_advertiser_links=[ - resources.DisplayVideo360AdvertiserLink(), - resources.DisplayVideo360AdvertiserLink(), - resources.DisplayVideo360AdvertiserLink(), - ], - next_page_token="abc", - ), - analytics_admin.ListDisplayVideo360AdvertiserLinksResponse( - display_video_360_advertiser_links=[], - next_page_token="def", - ), - analytics_admin.ListDisplayVideo360AdvertiserLinksResponse( - display_video_360_advertiser_links=[ - resources.DisplayVideo360AdvertiserLink(), - ], - next_page_token="ghi", - ), - analytics_admin.ListDisplayVideo360AdvertiserLinksResponse( - display_video_360_advertiser_links=[ - resources.DisplayVideo360AdvertiserLink(), - resources.DisplayVideo360AdvertiserLink(), - ], - ), - RuntimeError, + with mock.patch.object(type(client.transport.get_key_event), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = resources.KeyEvent( + name="name_value", + event_name="event_name_value", + deletable=True, + custom=True, + counting_method=resources.KeyEvent.CountingMethod.ONCE_PER_EVENT, ) + response = client.get_key_event(request) - expected_metadata = () - retry = retries.Retry() - timeout = 5 - expected_metadata = tuple(expected_metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), - ) - pager = client.list_display_video360_advertiser_links( - request={}, retry=retry, timeout=timeout - ) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = analytics_admin.GetKeyEventRequest() + assert args[0] == request - assert pager._metadata == expected_metadata - assert pager._retry == retry - assert pager._timeout == timeout + # Establish that the response is the type that we expect. + assert isinstance(response, resources.KeyEvent) + assert response.name == "name_value" + assert response.event_name == "event_name_value" + assert response.deletable is True + assert response.custom is True + assert response.counting_method == resources.KeyEvent.CountingMethod.ONCE_PER_EVENT - results = list(pager) - assert len(results) == 6 - assert all( - isinstance(i, resources.DisplayVideo360AdvertiserLink) for i in results + +def test_get_key_event_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = AnalyticsAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_key_event), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. ) + client.get_key_event() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == analytics_admin.GetKeyEventRequest() -def test_list_display_video360_advertiser_links_pages(transport_name: str = "grpc"): +def test_get_key_event_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = analytics_admin.GetKeyEventRequest( + name="name_value", ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_display_video360_advertiser_links), "__call__" - ) as call: - # Set the response to a series of pages. - call.side_effect = ( - analytics_admin.ListDisplayVideo360AdvertiserLinksResponse( - display_video_360_advertiser_links=[ - resources.DisplayVideo360AdvertiserLink(), - resources.DisplayVideo360AdvertiserLink(), - resources.DisplayVideo360AdvertiserLink(), - ], - next_page_token="abc", - ), - analytics_admin.ListDisplayVideo360AdvertiserLinksResponse( - display_video_360_advertiser_links=[], - next_page_token="def", - ), - analytics_admin.ListDisplayVideo360AdvertiserLinksResponse( - display_video_360_advertiser_links=[ - resources.DisplayVideo360AdvertiserLink(), - ], - next_page_token="ghi", - ), - analytics_admin.ListDisplayVideo360AdvertiserLinksResponse( - display_video_360_advertiser_links=[ - resources.DisplayVideo360AdvertiserLink(), - resources.DisplayVideo360AdvertiserLink(), - ], - ), - RuntimeError, - ) - pages = list(client.list_display_video360_advertiser_links(request={}).pages) - for page_, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page_.raw_page.next_page_token == token - - -@pytest.mark.asyncio -async def test_list_display_video360_advertiser_links_async_pager(): - client = AnalyticsAdminServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_display_video360_advertiser_links), - "__call__", - new_callable=mock.AsyncMock, - ) as call: - # Set the response to a series of pages. - call.side_effect = ( - analytics_admin.ListDisplayVideo360AdvertiserLinksResponse( - display_video_360_advertiser_links=[ - resources.DisplayVideo360AdvertiserLink(), - resources.DisplayVideo360AdvertiserLink(), - resources.DisplayVideo360AdvertiserLink(), - ], - next_page_token="abc", - ), - analytics_admin.ListDisplayVideo360AdvertiserLinksResponse( - display_video_360_advertiser_links=[], - next_page_token="def", - ), - analytics_admin.ListDisplayVideo360AdvertiserLinksResponse( - display_video_360_advertiser_links=[ - resources.DisplayVideo360AdvertiserLink(), - ], - next_page_token="ghi", - ), - analytics_admin.ListDisplayVideo360AdvertiserLinksResponse( - display_video_360_advertiser_links=[ - resources.DisplayVideo360AdvertiserLink(), - resources.DisplayVideo360AdvertiserLink(), - ], - ), - RuntimeError, - ) - async_pager = await client.list_display_video360_advertiser_links( - request={}, - ) - assert async_pager.next_page_token == "abc" - responses = [] - async for response in async_pager: # pragma: no branch - responses.append(response) - - assert len(responses) == 6 - assert all( - isinstance(i, resources.DisplayVideo360AdvertiserLink) for i in responses - ) - - -@pytest.mark.asyncio -async def test_list_display_video360_advertiser_links_async_pages(): - client = AnalyticsAdminServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_display_video360_advertiser_links), - "__call__", - new_callable=mock.AsyncMock, - ) as call: - # Set the response to a series of pages. - call.side_effect = ( - analytics_admin.ListDisplayVideo360AdvertiserLinksResponse( - display_video_360_advertiser_links=[ - resources.DisplayVideo360AdvertiserLink(), - resources.DisplayVideo360AdvertiserLink(), - resources.DisplayVideo360AdvertiserLink(), - ], - next_page_token="abc", - ), - analytics_admin.ListDisplayVideo360AdvertiserLinksResponse( - display_video_360_advertiser_links=[], - next_page_token="def", - ), - analytics_admin.ListDisplayVideo360AdvertiserLinksResponse( - display_video_360_advertiser_links=[ - resources.DisplayVideo360AdvertiserLink(), - ], - next_page_token="ghi", - ), - analytics_admin.ListDisplayVideo360AdvertiserLinksResponse( - display_video_360_advertiser_links=[ - resources.DisplayVideo360AdvertiserLink(), - resources.DisplayVideo360AdvertiserLink(), - ], - ), - RuntimeError, - ) - pages = [] - # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` - # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 - async for page_ in ( # pragma: no branch - await client.list_display_video360_advertiser_links(request={}) - ).pages: - pages.append(page_) - for page_, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page_.raw_page.next_page_token == token - - -@pytest.mark.parametrize( - "request_type", - [ - analytics_admin.CreateDisplayVideo360AdvertiserLinkRequest, - dict, - ], -) -def test_create_display_video360_advertiser_link(request_type, transport: str = "grpc"): - client = AnalyticsAdminServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_display_video360_advertiser_link), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = resources.DisplayVideo360AdvertiserLink( - name="name_value", - advertiser_id="advertiser_id_value", - advertiser_display_name="advertiser_display_name_value", - ) - response = client.create_display_video360_advertiser_link(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = analytics_admin.CreateDisplayVideo360AdvertiserLinkRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, resources.DisplayVideo360AdvertiserLink) - assert response.name == "name_value" - assert response.advertiser_id == "advertiser_id_value" - assert response.advertiser_display_name == "advertiser_display_name_value" - - -def test_create_display_video360_advertiser_link_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = AnalyticsAdminServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_display_video360_advertiser_link), "__call__" - ) as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.create_display_video360_advertiser_link() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == analytics_admin.CreateDisplayVideo360AdvertiserLinkRequest() - - -def test_create_display_video360_advertiser_link_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = AnalyticsAdminServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = analytics_admin.CreateDisplayVideo360AdvertiserLinkRequest( - parent="parent_value", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_display_video360_advertiser_link), "__call__" - ) as call: + with mock.patch.object(type(client.transport.get_key_event), "__call__") as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.create_display_video360_advertiser_link(request=request) + client.get_key_event(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == analytics_admin.CreateDisplayVideo360AdvertiserLinkRequest( - parent="parent_value", + assert args[0] == analytics_admin.GetKeyEventRequest( + name="name_value", ) -def test_create_display_video360_advertiser_link_use_cached_wrapped_rpc(): +def test_get_key_event_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -18668,26 +18501,21 @@ def test_create_display_video360_advertiser_link_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert ( - client._transport.create_display_video360_advertiser_link - in client._transport._wrapped_methods - ) + assert client._transport.get_key_event in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.create_display_video360_advertiser_link - ] = mock_rpc + client._transport._wrapped_methods[client._transport.get_key_event] = mock_rpc request = {} - client.create_display_video360_advertiser_link(request) + client.get_key_event(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.create_display_video360_advertiser_link(request) + client.get_key_event(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -18695,7 +18523,7 @@ def test_create_display_video360_advertiser_link_use_cached_wrapped_rpc(): @pytest.mark.asyncio -async def test_create_display_video360_advertiser_link_empty_call_async(): +async def test_get_key_event_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = AnalyticsAdminServiceAsyncClient( @@ -18704,25 +18532,25 @@ async def test_create_display_video360_advertiser_link_empty_call_async(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_display_video360_advertiser_link), "__call__" - ) as call: + with mock.patch.object(type(client.transport.get_key_event), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - resources.DisplayVideo360AdvertiserLink( + resources.KeyEvent( name="name_value", - advertiser_id="advertiser_id_value", - advertiser_display_name="advertiser_display_name_value", + event_name="event_name_value", + deletable=True, + custom=True, + counting_method=resources.KeyEvent.CountingMethod.ONCE_PER_EVENT, ) ) - response = await client.create_display_video360_advertiser_link() + response = await client.get_key_event() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == analytics_admin.CreateDisplayVideo360AdvertiserLinkRequest() + assert args[0] == analytics_admin.GetKeyEventRequest() @pytest.mark.asyncio -async def test_create_display_video360_advertiser_link_async_use_cached_wrapped_rpc( +async def test_get_key_event_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", ): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -18739,33 +18567,33 @@ async def test_create_display_video360_advertiser_link_async_use_cached_wrapped_ # Ensure method has been cached assert ( - client._client._transport.create_display_video360_advertiser_link + client._client._transport.get_key_event in client._client._transport._wrapped_methods ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ - client._client._transport.create_display_video360_advertiser_link - ] = mock_object + client._client._transport.get_key_event + ] = mock_rpc request = {} - await client.create_display_video360_advertiser_link(request) + await client.get_key_event(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - await client.create_display_video360_advertiser_link(request) + await client.get_key_event(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio -async def test_create_display_video360_advertiser_link_async( - transport: str = "grpc_asyncio", - request_type=analytics_admin.CreateDisplayVideo360AdvertiserLinkRequest, +async def test_get_key_event_async( + transport: str = "grpc_asyncio", request_type=analytics_admin.GetKeyEventRequest ): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), @@ -18777,54 +18605,54 @@ async def test_create_display_video360_advertiser_link_async( request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_display_video360_advertiser_link), "__call__" - ) as call: + with mock.patch.object(type(client.transport.get_key_event), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - resources.DisplayVideo360AdvertiserLink( + resources.KeyEvent( name="name_value", - advertiser_id="advertiser_id_value", - advertiser_display_name="advertiser_display_name_value", + event_name="event_name_value", + deletable=True, + custom=True, + counting_method=resources.KeyEvent.CountingMethod.ONCE_PER_EVENT, ) ) - response = await client.create_display_video360_advertiser_link(request) + response = await client.get_key_event(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request = analytics_admin.CreateDisplayVideo360AdvertiserLinkRequest() + request = analytics_admin.GetKeyEventRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, resources.DisplayVideo360AdvertiserLink) + assert isinstance(response, resources.KeyEvent) assert response.name == "name_value" - assert response.advertiser_id == "advertiser_id_value" - assert response.advertiser_display_name == "advertiser_display_name_value" + assert response.event_name == "event_name_value" + assert response.deletable is True + assert response.custom is True + assert response.counting_method == resources.KeyEvent.CountingMethod.ONCE_PER_EVENT @pytest.mark.asyncio -async def test_create_display_video360_advertiser_link_async_from_dict(): - await test_create_display_video360_advertiser_link_async(request_type=dict) +async def test_get_key_event_async_from_dict(): + await test_get_key_event_async(request_type=dict) -def test_create_display_video360_advertiser_link_field_headers(): +def test_get_key_event_field_headers(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = analytics_admin.CreateDisplayVideo360AdvertiserLinkRequest() + request = analytics_admin.GetKeyEventRequest() - request.parent = "parent_value" + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_display_video360_advertiser_link), "__call__" - ) as call: - call.return_value = resources.DisplayVideo360AdvertiserLink() - client.create_display_video360_advertiser_link(request) + with mock.patch.object(type(client.transport.get_key_event), "__call__") as call: + call.return_value = resources.KeyEvent() + client.get_key_event(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -18835,30 +18663,26 @@ def test_create_display_video360_advertiser_link_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "parent=parent_value", + "name=name_value", ) in kw["metadata"] @pytest.mark.asyncio -async def test_create_display_video360_advertiser_link_field_headers_async(): +async def test_get_key_event_field_headers_async(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = analytics_admin.CreateDisplayVideo360AdvertiserLinkRequest() + request = analytics_admin.GetKeyEventRequest() - request.parent = "parent_value" + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_display_video360_advertiser_link), "__call__" - ) as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - resources.DisplayVideo360AdvertiserLink() - ) - await client.create_display_video360_advertiser_link(request) + with mock.patch.object(type(client.transport.get_key_event), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(resources.KeyEvent()) + await client.get_key_event(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -18869,43 +18693,35 @@ async def test_create_display_video360_advertiser_link_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "parent=parent_value", + "name=name_value", ) in kw["metadata"] -def test_create_display_video360_advertiser_link_flattened(): +def test_get_key_event_flattened(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_display_video360_advertiser_link), "__call__" - ) as call: + with mock.patch.object(type(client.transport.get_key_event), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = resources.DisplayVideo360AdvertiserLink() + call.return_value = resources.KeyEvent() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.create_display_video360_advertiser_link( - parent="parent_value", - display_video_360_advertiser_link=resources.DisplayVideo360AdvertiserLink( - name="name_value" - ), + client.get_key_event( + name="name_value", ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = "parent_value" - assert arg == mock_val - arg = args[0].display_video_360_advertiser_link - mock_val = resources.DisplayVideo360AdvertiserLink(name="name_value") + arg = args[0].name + mock_val = "name_value" assert arg == mock_val -def test_create_display_video360_advertiser_link_flattened_error(): +def test_get_key_event_flattened_error(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -18913,54 +18729,41 @@ def test_create_display_video360_advertiser_link_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.create_display_video360_advertiser_link( - analytics_admin.CreateDisplayVideo360AdvertiserLinkRequest(), - parent="parent_value", - display_video_360_advertiser_link=resources.DisplayVideo360AdvertiserLink( - name="name_value" - ), + client.get_key_event( + analytics_admin.GetKeyEventRequest(), + name="name_value", ) @pytest.mark.asyncio -async def test_create_display_video360_advertiser_link_flattened_async(): +async def test_get_key_event_flattened_async(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_display_video360_advertiser_link), "__call__" - ) as call: + with mock.patch.object(type(client.transport.get_key_event), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = resources.DisplayVideo360AdvertiserLink() + call.return_value = resources.KeyEvent() - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - resources.DisplayVideo360AdvertiserLink() - ) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(resources.KeyEvent()) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.create_display_video360_advertiser_link( - parent="parent_value", - display_video_360_advertiser_link=resources.DisplayVideo360AdvertiserLink( - name="name_value" - ), + response = await client.get_key_event( + name="name_value", ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = "parent_value" - assert arg == mock_val - arg = args[0].display_video_360_advertiser_link - mock_val = resources.DisplayVideo360AdvertiserLink(name="name_value") + arg = args[0].name + mock_val = "name_value" assert arg == mock_val @pytest.mark.asyncio -async def test_create_display_video360_advertiser_link_flattened_error_async(): +async def test_get_key_event_flattened_error_async(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -18968,23 +18771,20 @@ async def test_create_display_video360_advertiser_link_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.create_display_video360_advertiser_link( - analytics_admin.CreateDisplayVideo360AdvertiserLinkRequest(), - parent="parent_value", - display_video_360_advertiser_link=resources.DisplayVideo360AdvertiserLink( - name="name_value" - ), + await client.get_key_event( + analytics_admin.GetKeyEventRequest(), + name="name_value", ) @pytest.mark.parametrize( "request_type", [ - analytics_admin.DeleteDisplayVideo360AdvertiserLinkRequest, + analytics_admin.DeleteKeyEventRequest, dict, ], ) -def test_delete_display_video360_advertiser_link(request_type, transport: str = "grpc"): +def test_delete_key_event(request_type, transport: str = "grpc"): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -18995,24 +18795,22 @@ def test_delete_display_video360_advertiser_link(request_type, transport: str = request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_display_video360_advertiser_link), "__call__" - ) as call: + with mock.patch.object(type(client.transport.delete_key_event), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = None - response = client.delete_display_video360_advertiser_link(request) + response = client.delete_key_event(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - request = analytics_admin.DeleteDisplayVideo360AdvertiserLinkRequest() + request = analytics_admin.DeleteKeyEventRequest() assert args[0] == request # Establish that the response is the type that we expect. assert response is None -def test_delete_display_video360_advertiser_link_empty_call(): +def test_delete_key_event_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = AnalyticsAdminServiceClient( @@ -19021,19 +18819,17 @@ def test_delete_display_video360_advertiser_link_empty_call(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_display_video360_advertiser_link), "__call__" - ) as call: + with mock.patch.object(type(client.transport.delete_key_event), "__call__") as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.delete_display_video360_advertiser_link() + client.delete_key_event() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == analytics_admin.DeleteDisplayVideo360AdvertiserLinkRequest() + assert args[0] == analytics_admin.DeleteKeyEventRequest() -def test_delete_display_video360_advertiser_link_non_empty_request_with_auto_populated_field(): +def test_delete_key_event_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. client = AnalyticsAdminServiceClient( @@ -19044,26 +18840,24 @@ def test_delete_display_video360_advertiser_link_non_empty_request_with_auto_pop # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. - request = analytics_admin.DeleteDisplayVideo360AdvertiserLinkRequest( + request = analytics_admin.DeleteKeyEventRequest( name="name_value", ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_display_video360_advertiser_link), "__call__" - ) as call: + with mock.patch.object(type(client.transport.delete_key_event), "__call__") as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.delete_display_video360_advertiser_link(request=request) + client.delete_key_event(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == analytics_admin.DeleteDisplayVideo360AdvertiserLinkRequest( + assert args[0] == analytics_admin.DeleteKeyEventRequest( name="name_value", ) -def test_delete_display_video360_advertiser_link_use_cached_wrapped_rpc(): +def test_delete_key_event_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -19077,10 +18871,7 @@ def test_delete_display_video360_advertiser_link_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert ( - client._transport.delete_display_video360_advertiser_link - in client._transport._wrapped_methods - ) + assert client._transport.delete_key_event in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() @@ -19088,15 +18879,15 @@ def test_delete_display_video360_advertiser_link_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.delete_display_video360_advertiser_link + client._transport.delete_key_event ] = mock_rpc request = {} - client.delete_display_video360_advertiser_link(request) + client.delete_key_event(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.delete_display_video360_advertiser_link(request) + client.delete_key_event(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -19104,7 +18895,7 @@ def test_delete_display_video360_advertiser_link_use_cached_wrapped_rpc(): @pytest.mark.asyncio -async def test_delete_display_video360_advertiser_link_empty_call_async(): +async def test_delete_key_event_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = AnalyticsAdminServiceAsyncClient( @@ -19113,19 +18904,17 @@ async def test_delete_display_video360_advertiser_link_empty_call_async(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_display_video360_advertiser_link), "__call__" - ) as call: + with mock.patch.object(type(client.transport.delete_key_event), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.delete_display_video360_advertiser_link() + response = await client.delete_key_event() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == analytics_admin.DeleteDisplayVideo360AdvertiserLinkRequest() + assert args[0] == analytics_admin.DeleteKeyEventRequest() @pytest.mark.asyncio -async def test_delete_display_video360_advertiser_link_async_use_cached_wrapped_rpc( +async def test_delete_key_event_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", ): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -19142,33 +18931,33 @@ async def test_delete_display_video360_advertiser_link_async_use_cached_wrapped_ # Ensure method has been cached assert ( - client._client._transport.delete_display_video360_advertiser_link + client._client._transport.delete_key_event in client._client._transport._wrapped_methods ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ - client._client._transport.delete_display_video360_advertiser_link - ] = mock_object + client._client._transport.delete_key_event + ] = mock_rpc request = {} - await client.delete_display_video360_advertiser_link(request) + await client.delete_key_event(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - await client.delete_display_video360_advertiser_link(request) + await client.delete_key_event(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio -async def test_delete_display_video360_advertiser_link_async( - transport: str = "grpc_asyncio", - request_type=analytics_admin.DeleteDisplayVideo360AdvertiserLinkRequest, +async def test_delete_key_event_async( + transport: str = "grpc_asyncio", request_type=analytics_admin.DeleteKeyEventRequest ): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), @@ -19180,17 +18969,15 @@ async def test_delete_display_video360_advertiser_link_async( request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_display_video360_advertiser_link), "__call__" - ) as call: + with mock.patch.object(type(client.transport.delete_key_event), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.delete_display_video360_advertiser_link(request) + response = await client.delete_key_event(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request = analytics_admin.DeleteDisplayVideo360AdvertiserLinkRequest() + request = analytics_admin.DeleteKeyEventRequest() assert args[0] == request # Establish that the response is the type that we expect. @@ -19198,27 +18985,25 @@ async def test_delete_display_video360_advertiser_link_async( @pytest.mark.asyncio -async def test_delete_display_video360_advertiser_link_async_from_dict(): - await test_delete_display_video360_advertiser_link_async(request_type=dict) +async def test_delete_key_event_async_from_dict(): + await test_delete_key_event_async(request_type=dict) -def test_delete_display_video360_advertiser_link_field_headers(): +def test_delete_key_event_field_headers(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = analytics_admin.DeleteDisplayVideo360AdvertiserLinkRequest() + request = analytics_admin.DeleteKeyEventRequest() request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_display_video360_advertiser_link), "__call__" - ) as call: + with mock.patch.object(type(client.transport.delete_key_event), "__call__") as call: call.return_value = None - client.delete_display_video360_advertiser_link(request) + client.delete_key_event(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -19234,23 +19019,21 @@ def test_delete_display_video360_advertiser_link_field_headers(): @pytest.mark.asyncio -async def test_delete_display_video360_advertiser_link_field_headers_async(): +async def test_delete_key_event_field_headers_async(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = analytics_admin.DeleteDisplayVideo360AdvertiserLinkRequest() + request = analytics_admin.DeleteKeyEventRequest() request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_display_video360_advertiser_link), "__call__" - ) as call: + with mock.patch.object(type(client.transport.delete_key_event), "__call__") as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - await client.delete_display_video360_advertiser_link(request) + await client.delete_key_event(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -19265,20 +19048,18 @@ async def test_delete_display_video360_advertiser_link_field_headers_async(): ) in kw["metadata"] -def test_delete_display_video360_advertiser_link_flattened(): +def test_delete_key_event_flattened(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_display_video360_advertiser_link), "__call__" - ) as call: + with mock.patch.object(type(client.transport.delete_key_event), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = None # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.delete_display_video360_advertiser_link( + client.delete_key_event( name="name_value", ) @@ -19291,7 +19072,7 @@ def test_delete_display_video360_advertiser_link_flattened(): assert arg == mock_val -def test_delete_display_video360_advertiser_link_flattened_error(): +def test_delete_key_event_flattened_error(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -19299,29 +19080,27 @@ def test_delete_display_video360_advertiser_link_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.delete_display_video360_advertiser_link( - analytics_admin.DeleteDisplayVideo360AdvertiserLinkRequest(), + client.delete_key_event( + analytics_admin.DeleteKeyEventRequest(), name="name_value", ) @pytest.mark.asyncio -async def test_delete_display_video360_advertiser_link_flattened_async(): +async def test_delete_key_event_flattened_async(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_display_video360_advertiser_link), "__call__" - ) as call: + with mock.patch.object(type(client.transport.delete_key_event), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = None call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.delete_display_video360_advertiser_link( + response = await client.delete_key_event( name="name_value", ) @@ -19335,7 +19114,7 @@ async def test_delete_display_video360_advertiser_link_flattened_async(): @pytest.mark.asyncio -async def test_delete_display_video360_advertiser_link_flattened_error_async(): +async def test_delete_key_event_flattened_error_async(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -19343,8 +19122,8 @@ async def test_delete_display_video360_advertiser_link_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.delete_display_video360_advertiser_link( - analytics_admin.DeleteDisplayVideo360AdvertiserLinkRequest(), + await client.delete_key_event( + analytics_admin.DeleteKeyEventRequest(), name="name_value", ) @@ -19352,11 +19131,11 @@ async def test_delete_display_video360_advertiser_link_flattened_error_async(): @pytest.mark.parametrize( "request_type", [ - analytics_admin.UpdateDisplayVideo360AdvertiserLinkRequest, + analytics_admin.ListKeyEventsRequest, dict, ], ) -def test_update_display_video360_advertiser_link(request_type, transport: str = "grpc"): +def test_list_key_events(request_type, transport: str = "grpc"): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -19367,31 +19146,25 @@ def test_update_display_video360_advertiser_link(request_type, transport: str = request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_display_video360_advertiser_link), "__call__" - ) as call: + with mock.patch.object(type(client.transport.list_key_events), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = resources.DisplayVideo360AdvertiserLink( - name="name_value", - advertiser_id="advertiser_id_value", - advertiser_display_name="advertiser_display_name_value", + call.return_value = analytics_admin.ListKeyEventsResponse( + next_page_token="next_page_token_value", ) - response = client.update_display_video360_advertiser_link(request) + response = client.list_key_events(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - request = analytics_admin.UpdateDisplayVideo360AdvertiserLinkRequest() + request = analytics_admin.ListKeyEventsRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, resources.DisplayVideo360AdvertiserLink) - assert response.name == "name_value" - assert response.advertiser_id == "advertiser_id_value" - assert response.advertiser_display_name == "advertiser_display_name_value" + assert isinstance(response, pagers.ListKeyEventsPager) + assert response.next_page_token == "next_page_token_value" -def test_update_display_video360_advertiser_link_empty_call(): +def test_list_key_events_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = AnalyticsAdminServiceClient( @@ -19400,19 +19173,17 @@ def test_update_display_video360_advertiser_link_empty_call(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_display_video360_advertiser_link), "__call__" - ) as call: + with mock.patch.object(type(client.transport.list_key_events), "__call__") as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.update_display_video360_advertiser_link() + client.list_key_events() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == analytics_admin.UpdateDisplayVideo360AdvertiserLinkRequest() + assert args[0] == analytics_admin.ListKeyEventsRequest() -def test_update_display_video360_advertiser_link_non_empty_request_with_auto_populated_field(): +def test_list_key_events_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. client = AnalyticsAdminServiceClient( @@ -19423,22 +19194,26 @@ def test_update_display_video360_advertiser_link_non_empty_request_with_auto_pop # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. - request = analytics_admin.UpdateDisplayVideo360AdvertiserLinkRequest() + request = analytics_admin.ListKeyEventsRequest( + parent="parent_value", + page_token="page_token_value", + ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_display_video360_advertiser_link), "__call__" - ) as call: + with mock.patch.object(type(client.transport.list_key_events), "__call__") as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.update_display_video360_advertiser_link(request=request) + client.list_key_events(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == analytics_admin.UpdateDisplayVideo360AdvertiserLinkRequest() + assert args[0] == analytics_admin.ListKeyEventsRequest( + parent="parent_value", + page_token="page_token_value", + ) -def test_update_display_video360_advertiser_link_use_cached_wrapped_rpc(): +def test_list_key_events_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -19452,26 +19227,21 @@ def test_update_display_video360_advertiser_link_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert ( - client._transport.update_display_video360_advertiser_link - in client._transport._wrapped_methods - ) + assert client._transport.list_key_events in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.update_display_video360_advertiser_link - ] = mock_rpc + client._transport._wrapped_methods[client._transport.list_key_events] = mock_rpc request = {} - client.update_display_video360_advertiser_link(request) + client.list_key_events(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.update_display_video360_advertiser_link(request) + client.list_key_events(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -19479,7 +19249,7 @@ def test_update_display_video360_advertiser_link_use_cached_wrapped_rpc(): @pytest.mark.asyncio -async def test_update_display_video360_advertiser_link_empty_call_async(): +async def test_list_key_events_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = AnalyticsAdminServiceAsyncClient( @@ -19488,25 +19258,21 @@ async def test_update_display_video360_advertiser_link_empty_call_async(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_display_video360_advertiser_link), "__call__" - ) as call: + with mock.patch.object(type(client.transport.list_key_events), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - resources.DisplayVideo360AdvertiserLink( - name="name_value", - advertiser_id="advertiser_id_value", - advertiser_display_name="advertiser_display_name_value", + analytics_admin.ListKeyEventsResponse( + next_page_token="next_page_token_value", ) ) - response = await client.update_display_video360_advertiser_link() + response = await client.list_key_events() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == analytics_admin.UpdateDisplayVideo360AdvertiserLinkRequest() + assert args[0] == analytics_admin.ListKeyEventsRequest() @pytest.mark.asyncio -async def test_update_display_video360_advertiser_link_async_use_cached_wrapped_rpc( +async def test_list_key_events_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", ): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -19523,33 +19289,33 @@ async def test_update_display_video360_advertiser_link_async_use_cached_wrapped_ # Ensure method has been cached assert ( - client._client._transport.update_display_video360_advertiser_link + client._client._transport.list_key_events in client._client._transport._wrapped_methods ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ - client._client._transport.update_display_video360_advertiser_link - ] = mock_object + client._client._transport.list_key_events + ] = mock_rpc request = {} - await client.update_display_video360_advertiser_link(request) + await client.list_key_events(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - await client.update_display_video360_advertiser_link(request) + await client.list_key_events(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio -async def test_update_display_video360_advertiser_link_async( - transport: str = "grpc_asyncio", - request_type=analytics_admin.UpdateDisplayVideo360AdvertiserLinkRequest, +async def test_list_key_events_async( + transport: str = "grpc_asyncio", request_type=analytics_admin.ListKeyEventsRequest ): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), @@ -19561,54 +19327,46 @@ async def test_update_display_video360_advertiser_link_async( request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_display_video360_advertiser_link), "__call__" - ) as call: + with mock.patch.object(type(client.transport.list_key_events), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - resources.DisplayVideo360AdvertiserLink( - name="name_value", - advertiser_id="advertiser_id_value", - advertiser_display_name="advertiser_display_name_value", + analytics_admin.ListKeyEventsResponse( + next_page_token="next_page_token_value", ) ) - response = await client.update_display_video360_advertiser_link(request) + response = await client.list_key_events(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request = analytics_admin.UpdateDisplayVideo360AdvertiserLinkRequest() + request = analytics_admin.ListKeyEventsRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, resources.DisplayVideo360AdvertiserLink) - assert response.name == "name_value" - assert response.advertiser_id == "advertiser_id_value" - assert response.advertiser_display_name == "advertiser_display_name_value" + assert isinstance(response, pagers.ListKeyEventsAsyncPager) + assert response.next_page_token == "next_page_token_value" @pytest.mark.asyncio -async def test_update_display_video360_advertiser_link_async_from_dict(): - await test_update_display_video360_advertiser_link_async(request_type=dict) +async def test_list_key_events_async_from_dict(): + await test_list_key_events_async(request_type=dict) -def test_update_display_video360_advertiser_link_field_headers(): +def test_list_key_events_field_headers(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = analytics_admin.UpdateDisplayVideo360AdvertiserLinkRequest() + request = analytics_admin.ListKeyEventsRequest() - request.display_video_360_advertiser_link.name = "name_value" + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_display_video360_advertiser_link), "__call__" - ) as call: - call.return_value = resources.DisplayVideo360AdvertiserLink() - client.update_display_video360_advertiser_link(request) + with mock.patch.object(type(client.transport.list_key_events), "__call__") as call: + call.return_value = analytics_admin.ListKeyEventsResponse() + client.list_key_events(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -19619,30 +19377,28 @@ def test_update_display_video360_advertiser_link_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "display_video_360_advertiser_link.name=name_value", + "parent=parent_value", ) in kw["metadata"] @pytest.mark.asyncio -async def test_update_display_video360_advertiser_link_field_headers_async(): +async def test_list_key_events_field_headers_async(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = analytics_admin.UpdateDisplayVideo360AdvertiserLinkRequest() + request = analytics_admin.ListKeyEventsRequest() - request.display_video_360_advertiser_link.name = "name_value" + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_display_video360_advertiser_link), "__call__" - ) as call: + with mock.patch.object(type(client.transport.list_key_events), "__call__") as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - resources.DisplayVideo360AdvertiserLink() + analytics_admin.ListKeyEventsResponse() ) - await client.update_display_video360_advertiser_link(request) + await client.list_key_events(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -19653,43 +19409,35 @@ async def test_update_display_video360_advertiser_link_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "display_video_360_advertiser_link.name=name_value", + "parent=parent_value", ) in kw["metadata"] -def test_update_display_video360_advertiser_link_flattened(): +def test_list_key_events_flattened(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_display_video360_advertiser_link), "__call__" - ) as call: + with mock.patch.object(type(client.transport.list_key_events), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = resources.DisplayVideo360AdvertiserLink() + call.return_value = analytics_admin.ListKeyEventsResponse() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.update_display_video360_advertiser_link( - display_video_360_advertiser_link=resources.DisplayVideo360AdvertiserLink( - name="name_value" - ), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + client.list_key_events( + parent="parent_value", ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - arg = args[0].display_video_360_advertiser_link - mock_val = resources.DisplayVideo360AdvertiserLink(name="name_value") - assert arg == mock_val - arg = args[0].update_mask - mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + arg = args[0].parent + mock_val = "parent_value" assert arg == mock_val -def test_update_display_video360_advertiser_link_flattened_error(): +def test_list_key_events_flattened_error(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -19697,54 +19445,43 @@ def test_update_display_video360_advertiser_link_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.update_display_video360_advertiser_link( - analytics_admin.UpdateDisplayVideo360AdvertiserLinkRequest(), - display_video_360_advertiser_link=resources.DisplayVideo360AdvertiserLink( - name="name_value" - ), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + client.list_key_events( + analytics_admin.ListKeyEventsRequest(), + parent="parent_value", ) @pytest.mark.asyncio -async def test_update_display_video360_advertiser_link_flattened_async(): +async def test_list_key_events_flattened_async(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_display_video360_advertiser_link), "__call__" - ) as call: + with mock.patch.object(type(client.transport.list_key_events), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = resources.DisplayVideo360AdvertiserLink() + call.return_value = analytics_admin.ListKeyEventsResponse() call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - resources.DisplayVideo360AdvertiserLink() + analytics_admin.ListKeyEventsResponse() ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.update_display_video360_advertiser_link( - display_video_360_advertiser_link=resources.DisplayVideo360AdvertiserLink( - name="name_value" - ), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + response = await client.list_key_events( + parent="parent_value", ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - arg = args[0].display_video_360_advertiser_link - mock_val = resources.DisplayVideo360AdvertiserLink(name="name_value") - assert arg == mock_val - arg = args[0].update_mask - mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + arg = args[0].parent + mock_val = "parent_value" assert arg == mock_val @pytest.mark.asyncio -async def test_update_display_video360_advertiser_link_flattened_error_async(): +async def test_list_key_events_flattened_error_async(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -19752,25 +19489,214 @@ async def test_update_display_video360_advertiser_link_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.update_display_video360_advertiser_link( - analytics_admin.UpdateDisplayVideo360AdvertiserLinkRequest(), - display_video_360_advertiser_link=resources.DisplayVideo360AdvertiserLink( - name="name_value" + await client.list_key_events( + analytics_admin.ListKeyEventsRequest(), + parent="parent_value", + ) + + +def test_list_key_events_pager(transport_name: str = "grpc"): + client = AnalyticsAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_key_events), "__call__") as call: + # Set the response to a series of pages. + call.side_effect = ( + analytics_admin.ListKeyEventsResponse( + key_events=[ + resources.KeyEvent(), + resources.KeyEvent(), + resources.KeyEvent(), + ], + next_page_token="abc", ), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + analytics_admin.ListKeyEventsResponse( + key_events=[], + next_page_token="def", + ), + analytics_admin.ListKeyEventsResponse( + key_events=[ + resources.KeyEvent(), + ], + next_page_token="ghi", + ), + analytics_admin.ListKeyEventsResponse( + key_events=[ + resources.KeyEvent(), + resources.KeyEvent(), + ], + ), + RuntimeError, + ) + + expected_metadata = () + retry = retries.Retry() + timeout = 5 + expected_metadata = tuple(expected_metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + ) + pager = client.list_key_events(request={}, retry=retry, timeout=timeout) + + assert pager._metadata == expected_metadata + assert pager._retry == retry + assert pager._timeout == timeout + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, resources.KeyEvent) for i in results) + + +def test_list_key_events_pages(transport_name: str = "grpc"): + client = AnalyticsAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_key_events), "__call__") as call: + # Set the response to a series of pages. + call.side_effect = ( + analytics_admin.ListKeyEventsResponse( + key_events=[ + resources.KeyEvent(), + resources.KeyEvent(), + resources.KeyEvent(), + ], + next_page_token="abc", + ), + analytics_admin.ListKeyEventsResponse( + key_events=[], + next_page_token="def", + ), + analytics_admin.ListKeyEventsResponse( + key_events=[ + resources.KeyEvent(), + ], + next_page_token="ghi", + ), + analytics_admin.ListKeyEventsResponse( + key_events=[ + resources.KeyEvent(), + resources.KeyEvent(), + ], + ), + RuntimeError, + ) + pages = list(client.list_key_events(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.asyncio +async def test_list_key_events_async_pager(): + client = AnalyticsAdminServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_key_events), "__call__", new_callable=mock.AsyncMock + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + analytics_admin.ListKeyEventsResponse( + key_events=[ + resources.KeyEvent(), + resources.KeyEvent(), + resources.KeyEvent(), + ], + next_page_token="abc", + ), + analytics_admin.ListKeyEventsResponse( + key_events=[], + next_page_token="def", + ), + analytics_admin.ListKeyEventsResponse( + key_events=[ + resources.KeyEvent(), + ], + next_page_token="ghi", + ), + analytics_admin.ListKeyEventsResponse( + key_events=[ + resources.KeyEvent(), + resources.KeyEvent(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_key_events( + request={}, + ) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, resources.KeyEvent) for i in responses) + + +@pytest.mark.asyncio +async def test_list_key_events_async_pages(): + client = AnalyticsAdminServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_key_events), "__call__", new_callable=mock.AsyncMock + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + analytics_admin.ListKeyEventsResponse( + key_events=[ + resources.KeyEvent(), + resources.KeyEvent(), + resources.KeyEvent(), + ], + next_page_token="abc", + ), + analytics_admin.ListKeyEventsResponse( + key_events=[], + next_page_token="def", + ), + analytics_admin.ListKeyEventsResponse( + key_events=[ + resources.KeyEvent(), + ], + next_page_token="ghi", + ), + analytics_admin.ListKeyEventsResponse( + key_events=[ + resources.KeyEvent(), + resources.KeyEvent(), + ], + ), + RuntimeError, ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_key_events(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token @pytest.mark.parametrize( "request_type", [ - analytics_admin.GetDisplayVideo360AdvertiserLinkProposalRequest, + analytics_admin.GetDisplayVideo360AdvertiserLinkRequest, dict, ], ) -def test_get_display_video360_advertiser_link_proposal( - request_type, transport: str = "grpc" -): +def test_get_display_video360_advertiser_link(request_type, transport: str = "grpc"): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -19782,32 +19708,30 @@ def test_get_display_video360_advertiser_link_proposal( # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.get_display_video360_advertiser_link_proposal), "__call__" + type(client.transport.get_display_video360_advertiser_link), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = resources.DisplayVideo360AdvertiserLinkProposal( + call.return_value = resources.DisplayVideo360AdvertiserLink( name="name_value", advertiser_id="advertiser_id_value", advertiser_display_name="advertiser_display_name_value", - validation_email="validation_email_value", ) - response = client.get_display_video360_advertiser_link_proposal(request) + response = client.get_display_video360_advertiser_link(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - request = analytics_admin.GetDisplayVideo360AdvertiserLinkProposalRequest() + request = analytics_admin.GetDisplayVideo360AdvertiserLinkRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, resources.DisplayVideo360AdvertiserLinkProposal) + assert isinstance(response, resources.DisplayVideo360AdvertiserLink) assert response.name == "name_value" assert response.advertiser_id == "advertiser_id_value" assert response.advertiser_display_name == "advertiser_display_name_value" - assert response.validation_email == "validation_email_value" -def test_get_display_video360_advertiser_link_proposal_empty_call(): +def test_get_display_video360_advertiser_link_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = AnalyticsAdminServiceClient( @@ -19817,20 +19741,18 @@ def test_get_display_video360_advertiser_link_proposal_empty_call(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.get_display_video360_advertiser_link_proposal), "__call__" + type(client.transport.get_display_video360_advertiser_link), "__call__" ) as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.get_display_video360_advertiser_link_proposal() + client.get_display_video360_advertiser_link() call.assert_called() _, args, _ = call.mock_calls[0] - assert ( - args[0] == analytics_admin.GetDisplayVideo360AdvertiserLinkProposalRequest() - ) + assert args[0] == analytics_admin.GetDisplayVideo360AdvertiserLinkRequest() -def test_get_display_video360_advertiser_link_proposal_non_empty_request_with_auto_populated_field(): +def test_get_display_video360_advertiser_link_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. client = AnalyticsAdminServiceClient( @@ -19841,28 +19763,26 @@ def test_get_display_video360_advertiser_link_proposal_non_empty_request_with_au # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. - request = analytics_admin.GetDisplayVideo360AdvertiserLinkProposalRequest( + request = analytics_admin.GetDisplayVideo360AdvertiserLinkRequest( name="name_value", ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.get_display_video360_advertiser_link_proposal), "__call__" + type(client.transport.get_display_video360_advertiser_link), "__call__" ) as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.get_display_video360_advertiser_link_proposal(request=request) + client.get_display_video360_advertiser_link(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - assert args[ - 0 - ] == analytics_admin.GetDisplayVideo360AdvertiserLinkProposalRequest( + assert args[0] == analytics_admin.GetDisplayVideo360AdvertiserLinkRequest( name="name_value", ) -def test_get_display_video360_advertiser_link_proposal_use_cached_wrapped_rpc(): +def test_get_display_video360_advertiser_link_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -19877,7 +19797,7 @@ def test_get_display_video360_advertiser_link_proposal_use_cached_wrapped_rpc(): # Ensure method has been cached assert ( - client._transport.get_display_video360_advertiser_link_proposal + client._transport.get_display_video360_advertiser_link in client._transport._wrapped_methods ) @@ -19887,15 +19807,15 @@ def test_get_display_video360_advertiser_link_proposal_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.get_display_video360_advertiser_link_proposal + client._transport.get_display_video360_advertiser_link ] = mock_rpc request = {} - client.get_display_video360_advertiser_link_proposal(request) + client.get_display_video360_advertiser_link(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.get_display_video360_advertiser_link_proposal(request) + client.get_display_video360_advertiser_link(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -19903,7 +19823,7 @@ def test_get_display_video360_advertiser_link_proposal_use_cached_wrapped_rpc(): @pytest.mark.asyncio -async def test_get_display_video360_advertiser_link_proposal_empty_call_async(): +async def test_get_display_video360_advertiser_link_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = AnalyticsAdminServiceAsyncClient( @@ -19913,27 +19833,24 @@ async def test_get_display_video360_advertiser_link_proposal_empty_call_async(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.get_display_video360_advertiser_link_proposal), "__call__" + type(client.transport.get_display_video360_advertiser_link), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - resources.DisplayVideo360AdvertiserLinkProposal( + resources.DisplayVideo360AdvertiserLink( name="name_value", advertiser_id="advertiser_id_value", advertiser_display_name="advertiser_display_name_value", - validation_email="validation_email_value", ) ) - response = await client.get_display_video360_advertiser_link_proposal() + response = await client.get_display_video360_advertiser_link() call.assert_called() _, args, _ = call.mock_calls[0] - assert ( - args[0] == analytics_admin.GetDisplayVideo360AdvertiserLinkProposalRequest() - ) + assert args[0] == analytics_admin.GetDisplayVideo360AdvertiserLinkRequest() @pytest.mark.asyncio -async def test_get_display_video360_advertiser_link_proposal_async_use_cached_wrapped_rpc( +async def test_get_display_video360_advertiser_link_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", ): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -19950,33 +19867,34 @@ async def test_get_display_video360_advertiser_link_proposal_async_use_cached_wr # Ensure method has been cached assert ( - client._client._transport.get_display_video360_advertiser_link_proposal + client._client._transport.get_display_video360_advertiser_link in client._client._transport._wrapped_methods ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ - client._client._transport.get_display_video360_advertiser_link_proposal - ] = mock_object + client._client._transport.get_display_video360_advertiser_link + ] = mock_rpc request = {} - await client.get_display_video360_advertiser_link_proposal(request) + await client.get_display_video360_advertiser_link(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - await client.get_display_video360_advertiser_link_proposal(request) + await client.get_display_video360_advertiser_link(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio -async def test_get_display_video360_advertiser_link_proposal_async( +async def test_get_display_video360_advertiser_link_async( transport: str = "grpc_asyncio", - request_type=analytics_admin.GetDisplayVideo360AdvertiserLinkProposalRequest, + request_type=analytics_admin.GetDisplayVideo360AdvertiserLinkRequest, ): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), @@ -19989,55 +19907,53 @@ async def test_get_display_video360_advertiser_link_proposal_async( # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.get_display_video360_advertiser_link_proposal), "__call__" + type(client.transport.get_display_video360_advertiser_link), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - resources.DisplayVideo360AdvertiserLinkProposal( + resources.DisplayVideo360AdvertiserLink( name="name_value", advertiser_id="advertiser_id_value", advertiser_display_name="advertiser_display_name_value", - validation_email="validation_email_value", ) ) - response = await client.get_display_video360_advertiser_link_proposal(request) + response = await client.get_display_video360_advertiser_link(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request = analytics_admin.GetDisplayVideo360AdvertiserLinkProposalRequest() + request = analytics_admin.GetDisplayVideo360AdvertiserLinkRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, resources.DisplayVideo360AdvertiserLinkProposal) + assert isinstance(response, resources.DisplayVideo360AdvertiserLink) assert response.name == "name_value" assert response.advertiser_id == "advertiser_id_value" assert response.advertiser_display_name == "advertiser_display_name_value" - assert response.validation_email == "validation_email_value" @pytest.mark.asyncio -async def test_get_display_video360_advertiser_link_proposal_async_from_dict(): - await test_get_display_video360_advertiser_link_proposal_async(request_type=dict) +async def test_get_display_video360_advertiser_link_async_from_dict(): + await test_get_display_video360_advertiser_link_async(request_type=dict) -def test_get_display_video360_advertiser_link_proposal_field_headers(): +def test_get_display_video360_advertiser_link_field_headers(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = analytics_admin.GetDisplayVideo360AdvertiserLinkProposalRequest() + request = analytics_admin.GetDisplayVideo360AdvertiserLinkRequest() request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.get_display_video360_advertiser_link_proposal), "__call__" + type(client.transport.get_display_video360_advertiser_link), "__call__" ) as call: - call.return_value = resources.DisplayVideo360AdvertiserLinkProposal() - client.get_display_video360_advertiser_link_proposal(request) + call.return_value = resources.DisplayVideo360AdvertiserLink() + client.get_display_video360_advertiser_link(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -20053,25 +19969,25 @@ def test_get_display_video360_advertiser_link_proposal_field_headers(): @pytest.mark.asyncio -async def test_get_display_video360_advertiser_link_proposal_field_headers_async(): +async def test_get_display_video360_advertiser_link_field_headers_async(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = analytics_admin.GetDisplayVideo360AdvertiserLinkProposalRequest() + request = analytics_admin.GetDisplayVideo360AdvertiserLinkRequest() request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.get_display_video360_advertiser_link_proposal), "__call__" + type(client.transport.get_display_video360_advertiser_link), "__call__" ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - resources.DisplayVideo360AdvertiserLinkProposal() + resources.DisplayVideo360AdvertiserLink() ) - await client.get_display_video360_advertiser_link_proposal(request) + await client.get_display_video360_advertiser_link(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -20086,20 +20002,20 @@ async def test_get_display_video360_advertiser_link_proposal_field_headers_async ) in kw["metadata"] -def test_get_display_video360_advertiser_link_proposal_flattened(): +def test_get_display_video360_advertiser_link_flattened(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.get_display_video360_advertiser_link_proposal), "__call__" + type(client.transport.get_display_video360_advertiser_link), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = resources.DisplayVideo360AdvertiserLinkProposal() + call.return_value = resources.DisplayVideo360AdvertiserLink() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.get_display_video360_advertiser_link_proposal( + client.get_display_video360_advertiser_link( name="name_value", ) @@ -20112,7 +20028,7 @@ def test_get_display_video360_advertiser_link_proposal_flattened(): assert arg == mock_val -def test_get_display_video360_advertiser_link_proposal_flattened_error(): +def test_get_display_video360_advertiser_link_flattened_error(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -20120,31 +20036,31 @@ def test_get_display_video360_advertiser_link_proposal_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.get_display_video360_advertiser_link_proposal( - analytics_admin.GetDisplayVideo360AdvertiserLinkProposalRequest(), + client.get_display_video360_advertiser_link( + analytics_admin.GetDisplayVideo360AdvertiserLinkRequest(), name="name_value", ) @pytest.mark.asyncio -async def test_get_display_video360_advertiser_link_proposal_flattened_async(): +async def test_get_display_video360_advertiser_link_flattened_async(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.get_display_video360_advertiser_link_proposal), "__call__" + type(client.transport.get_display_video360_advertiser_link), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = resources.DisplayVideo360AdvertiserLinkProposal() + call.return_value = resources.DisplayVideo360AdvertiserLink() call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - resources.DisplayVideo360AdvertiserLinkProposal() + resources.DisplayVideo360AdvertiserLink() ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.get_display_video360_advertiser_link_proposal( + response = await client.get_display_video360_advertiser_link( name="name_value", ) @@ -20158,7 +20074,7 @@ async def test_get_display_video360_advertiser_link_proposal_flattened_async(): @pytest.mark.asyncio -async def test_get_display_video360_advertiser_link_proposal_flattened_error_async(): +async def test_get_display_video360_advertiser_link_flattened_error_async(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -20166,8 +20082,8 @@ async def test_get_display_video360_advertiser_link_proposal_flattened_error_asy # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.get_display_video360_advertiser_link_proposal( - analytics_admin.GetDisplayVideo360AdvertiserLinkProposalRequest(), + await client.get_display_video360_advertiser_link( + analytics_admin.GetDisplayVideo360AdvertiserLinkRequest(), name="name_value", ) @@ -20175,13 +20091,11 @@ async def test_get_display_video360_advertiser_link_proposal_flattened_error_asy @pytest.mark.parametrize( "request_type", [ - analytics_admin.ListDisplayVideo360AdvertiserLinkProposalsRequest, + analytics_admin.ListDisplayVideo360AdvertiserLinksRequest, dict, ], ) -def test_list_display_video360_advertiser_link_proposals( - request_type, transport: str = "grpc" -): +def test_list_display_video360_advertiser_links(request_type, transport: str = "grpc"): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -20193,29 +20107,26 @@ def test_list_display_video360_advertiser_link_proposals( # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_display_video360_advertiser_link_proposals), - "__call__", + type(client.transport.list_display_video360_advertiser_links), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = ( - analytics_admin.ListDisplayVideo360AdvertiserLinkProposalsResponse( - next_page_token="next_page_token_value", - ) + call.return_value = analytics_admin.ListDisplayVideo360AdvertiserLinksResponse( + next_page_token="next_page_token_value", ) - response = client.list_display_video360_advertiser_link_proposals(request) + response = client.list_display_video360_advertiser_links(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - request = analytics_admin.ListDisplayVideo360AdvertiserLinkProposalsRequest() + request = analytics_admin.ListDisplayVideo360AdvertiserLinksRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListDisplayVideo360AdvertiserLinkProposalsPager) + assert isinstance(response, pagers.ListDisplayVideo360AdvertiserLinksPager) assert response.next_page_token == "next_page_token_value" -def test_list_display_video360_advertiser_link_proposals_empty_call(): +def test_list_display_video360_advertiser_links_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = AnalyticsAdminServiceClient( @@ -20225,22 +20136,18 @@ def test_list_display_video360_advertiser_link_proposals_empty_call(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_display_video360_advertiser_link_proposals), - "__call__", + type(client.transport.list_display_video360_advertiser_links), "__call__" ) as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.list_display_video360_advertiser_link_proposals() + client.list_display_video360_advertiser_links() call.assert_called() _, args, _ = call.mock_calls[0] - assert ( - args[0] - == analytics_admin.ListDisplayVideo360AdvertiserLinkProposalsRequest() - ) + assert args[0] == analytics_admin.ListDisplayVideo360AdvertiserLinksRequest() -def test_list_display_video360_advertiser_link_proposals_non_empty_request_with_auto_populated_field(): +def test_list_display_video360_advertiser_links_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. client = AnalyticsAdminServiceClient( @@ -20251,31 +20158,28 @@ def test_list_display_video360_advertiser_link_proposals_non_empty_request_with_ # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. - request = analytics_admin.ListDisplayVideo360AdvertiserLinkProposalsRequest( + request = analytics_admin.ListDisplayVideo360AdvertiserLinksRequest( parent="parent_value", page_token="page_token_value", ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_display_video360_advertiser_link_proposals), - "__call__", + type(client.transport.list_display_video360_advertiser_links), "__call__" ) as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.list_display_video360_advertiser_link_proposals(request=request) + client.list_display_video360_advertiser_links(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - assert args[ - 0 - ] == analytics_admin.ListDisplayVideo360AdvertiserLinkProposalsRequest( + assert args[0] == analytics_admin.ListDisplayVideo360AdvertiserLinksRequest( parent="parent_value", page_token="page_token_value", ) -def test_list_display_video360_advertiser_link_proposals_use_cached_wrapped_rpc(): +def test_list_display_video360_advertiser_links_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -20290,7 +20194,7 @@ def test_list_display_video360_advertiser_link_proposals_use_cached_wrapped_rpc( # Ensure method has been cached assert ( - client._transport.list_display_video360_advertiser_link_proposals + client._transport.list_display_video360_advertiser_links in client._transport._wrapped_methods ) @@ -20300,15 +20204,15 @@ def test_list_display_video360_advertiser_link_proposals_use_cached_wrapped_rpc( "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.list_display_video360_advertiser_link_proposals + client._transport.list_display_video360_advertiser_links ] = mock_rpc request = {} - client.list_display_video360_advertiser_link_proposals(request) + client.list_display_video360_advertiser_links(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.list_display_video360_advertiser_link_proposals(request) + client.list_display_video360_advertiser_links(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -20316,7 +20220,7 @@ def test_list_display_video360_advertiser_link_proposals_use_cached_wrapped_rpc( @pytest.mark.asyncio -async def test_list_display_video360_advertiser_link_proposals_empty_call_async(): +async def test_list_display_video360_advertiser_links_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = AnalyticsAdminServiceAsyncClient( @@ -20326,26 +20230,22 @@ async def test_list_display_video360_advertiser_link_proposals_empty_call_async( # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_display_video360_advertiser_link_proposals), - "__call__", + type(client.transport.list_display_video360_advertiser_links), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - analytics_admin.ListDisplayVideo360AdvertiserLinkProposalsResponse( + analytics_admin.ListDisplayVideo360AdvertiserLinksResponse( next_page_token="next_page_token_value", ) ) - response = await client.list_display_video360_advertiser_link_proposals() + response = await client.list_display_video360_advertiser_links() call.assert_called() _, args, _ = call.mock_calls[0] - assert ( - args[0] - == analytics_admin.ListDisplayVideo360AdvertiserLinkProposalsRequest() - ) + assert args[0] == analytics_admin.ListDisplayVideo360AdvertiserLinksRequest() @pytest.mark.asyncio -async def test_list_display_video360_advertiser_link_proposals_async_use_cached_wrapped_rpc( +async def test_list_display_video360_advertiser_links_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", ): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -20362,33 +20262,34 @@ async def test_list_display_video360_advertiser_link_proposals_async_use_cached_ # Ensure method has been cached assert ( - client._client._transport.list_display_video360_advertiser_link_proposals + client._client._transport.list_display_video360_advertiser_links in client._client._transport._wrapped_methods ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ - client._client._transport.list_display_video360_advertiser_link_proposals - ] = mock_object + client._client._transport.list_display_video360_advertiser_links + ] = mock_rpc request = {} - await client.list_display_video360_advertiser_link_proposals(request) + await client.list_display_video360_advertiser_links(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - await client.list_display_video360_advertiser_link_proposals(request) + await client.list_display_video360_advertiser_links(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio -async def test_list_display_video360_advertiser_link_proposals_async( +async def test_list_display_video360_advertiser_links_async( transport: str = "grpc_asyncio", - request_type=analytics_admin.ListDisplayVideo360AdvertiserLinkProposalsRequest, + request_type=analytics_admin.ListDisplayVideo360AdvertiserLinksRequest, ): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), @@ -20401,55 +20302,49 @@ async def test_list_display_video360_advertiser_link_proposals_async( # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_display_video360_advertiser_link_proposals), - "__call__", + type(client.transport.list_display_video360_advertiser_links), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - analytics_admin.ListDisplayVideo360AdvertiserLinkProposalsResponse( + analytics_admin.ListDisplayVideo360AdvertiserLinksResponse( next_page_token="next_page_token_value", ) ) - response = await client.list_display_video360_advertiser_link_proposals(request) + response = await client.list_display_video360_advertiser_links(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request = analytics_admin.ListDisplayVideo360AdvertiserLinkProposalsRequest() + request = analytics_admin.ListDisplayVideo360AdvertiserLinksRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance( - response, pagers.ListDisplayVideo360AdvertiserLinkProposalsAsyncPager - ) + assert isinstance(response, pagers.ListDisplayVideo360AdvertiserLinksAsyncPager) assert response.next_page_token == "next_page_token_value" @pytest.mark.asyncio -async def test_list_display_video360_advertiser_link_proposals_async_from_dict(): - await test_list_display_video360_advertiser_link_proposals_async(request_type=dict) +async def test_list_display_video360_advertiser_links_async_from_dict(): + await test_list_display_video360_advertiser_links_async(request_type=dict) -def test_list_display_video360_advertiser_link_proposals_field_headers(): +def test_list_display_video360_advertiser_links_field_headers(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = analytics_admin.ListDisplayVideo360AdvertiserLinkProposalsRequest() + request = analytics_admin.ListDisplayVideo360AdvertiserLinksRequest() request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_display_video360_advertiser_link_proposals), - "__call__", + type(client.transport.list_display_video360_advertiser_links), "__call__" ) as call: - call.return_value = ( - analytics_admin.ListDisplayVideo360AdvertiserLinkProposalsResponse() - ) - client.list_display_video360_advertiser_link_proposals(request) + call.return_value = analytics_admin.ListDisplayVideo360AdvertiserLinksResponse() + client.list_display_video360_advertiser_links(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -20465,26 +20360,25 @@ def test_list_display_video360_advertiser_link_proposals_field_headers(): @pytest.mark.asyncio -async def test_list_display_video360_advertiser_link_proposals_field_headers_async(): +async def test_list_display_video360_advertiser_links_field_headers_async(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = analytics_admin.ListDisplayVideo360AdvertiserLinkProposalsRequest() + request = analytics_admin.ListDisplayVideo360AdvertiserLinksRequest() request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_display_video360_advertiser_link_proposals), - "__call__", + type(client.transport.list_display_video360_advertiser_links), "__call__" ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - analytics_admin.ListDisplayVideo360AdvertiserLinkProposalsResponse() + analytics_admin.ListDisplayVideo360AdvertiserLinksResponse() ) - await client.list_display_video360_advertiser_link_proposals(request) + await client.list_display_video360_advertiser_links(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -20499,23 +20393,20 @@ async def test_list_display_video360_advertiser_link_proposals_field_headers_asy ) in kw["metadata"] -def test_list_display_video360_advertiser_link_proposals_flattened(): +def test_list_display_video360_advertiser_links_flattened(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_display_video360_advertiser_link_proposals), - "__call__", + type(client.transport.list_display_video360_advertiser_links), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = ( - analytics_admin.ListDisplayVideo360AdvertiserLinkProposalsResponse() - ) + call.return_value = analytics_admin.ListDisplayVideo360AdvertiserLinksResponse() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.list_display_video360_advertiser_link_proposals( + client.list_display_video360_advertiser_links( parent="parent_value", ) @@ -20528,7 +20419,7 @@ def test_list_display_video360_advertiser_link_proposals_flattened(): assert arg == mock_val -def test_list_display_video360_advertiser_link_proposals_flattened_error(): +def test_list_display_video360_advertiser_links_flattened_error(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -20536,34 +20427,31 @@ def test_list_display_video360_advertiser_link_proposals_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.list_display_video360_advertiser_link_proposals( - analytics_admin.ListDisplayVideo360AdvertiserLinkProposalsRequest(), + client.list_display_video360_advertiser_links( + analytics_admin.ListDisplayVideo360AdvertiserLinksRequest(), parent="parent_value", ) @pytest.mark.asyncio -async def test_list_display_video360_advertiser_link_proposals_flattened_async(): +async def test_list_display_video360_advertiser_links_flattened_async(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_display_video360_advertiser_link_proposals), - "__call__", + type(client.transport.list_display_video360_advertiser_links), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = ( - analytics_admin.ListDisplayVideo360AdvertiserLinkProposalsResponse() - ) + call.return_value = analytics_admin.ListDisplayVideo360AdvertiserLinksResponse() call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - analytics_admin.ListDisplayVideo360AdvertiserLinkProposalsResponse() + analytics_admin.ListDisplayVideo360AdvertiserLinksResponse() ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.list_display_video360_advertiser_link_proposals( + response = await client.list_display_video360_advertiser_links( parent="parent_value", ) @@ -20577,7 +20465,7 @@ async def test_list_display_video360_advertiser_link_proposals_flattened_async() @pytest.mark.asyncio -async def test_list_display_video360_advertiser_link_proposals_flattened_error_async(): +async def test_list_display_video360_advertiser_links_flattened_error_async(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -20585,15 +20473,13 @@ async def test_list_display_video360_advertiser_link_proposals_flattened_error_a # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.list_display_video360_advertiser_link_proposals( - analytics_admin.ListDisplayVideo360AdvertiserLinkProposalsRequest(), + await client.list_display_video360_advertiser_links( + analytics_admin.ListDisplayVideo360AdvertiserLinksRequest(), parent="parent_value", ) -def test_list_display_video360_advertiser_link_proposals_pager( - transport_name: str = "grpc", -): +def test_list_display_video360_advertiser_links_pager(transport_name: str = "grpc"): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport_name, @@ -20601,33 +20487,32 @@ def test_list_display_video360_advertiser_link_proposals_pager( # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_display_video360_advertiser_link_proposals), - "__call__", + type(client.transport.list_display_video360_advertiser_links), "__call__" ) as call: # Set the response to a series of pages. call.side_effect = ( - analytics_admin.ListDisplayVideo360AdvertiserLinkProposalsResponse( - display_video_360_advertiser_link_proposals=[ - resources.DisplayVideo360AdvertiserLinkProposal(), - resources.DisplayVideo360AdvertiserLinkProposal(), - resources.DisplayVideo360AdvertiserLinkProposal(), + analytics_admin.ListDisplayVideo360AdvertiserLinksResponse( + display_video_360_advertiser_links=[ + resources.DisplayVideo360AdvertiserLink(), + resources.DisplayVideo360AdvertiserLink(), + resources.DisplayVideo360AdvertiserLink(), ], next_page_token="abc", ), - analytics_admin.ListDisplayVideo360AdvertiserLinkProposalsResponse( - display_video_360_advertiser_link_proposals=[], + analytics_admin.ListDisplayVideo360AdvertiserLinksResponse( + display_video_360_advertiser_links=[], next_page_token="def", ), - analytics_admin.ListDisplayVideo360AdvertiserLinkProposalsResponse( - display_video_360_advertiser_link_proposals=[ - resources.DisplayVideo360AdvertiserLinkProposal(), + analytics_admin.ListDisplayVideo360AdvertiserLinksResponse( + display_video_360_advertiser_links=[ + resources.DisplayVideo360AdvertiserLink(), ], next_page_token="ghi", ), - analytics_admin.ListDisplayVideo360AdvertiserLinkProposalsResponse( - display_video_360_advertiser_link_proposals=[ - resources.DisplayVideo360AdvertiserLinkProposal(), - resources.DisplayVideo360AdvertiserLinkProposal(), + analytics_admin.ListDisplayVideo360AdvertiserLinksResponse( + display_video_360_advertiser_links=[ + resources.DisplayVideo360AdvertiserLink(), + resources.DisplayVideo360AdvertiserLink(), ], ), RuntimeError, @@ -20639,7 +20524,7 @@ def test_list_display_video360_advertiser_link_proposals_pager( expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) - pager = client.list_display_video360_advertiser_link_proposals( + pager = client.list_display_video360_advertiser_links( request={}, retry=retry, timeout=timeout ) @@ -20650,14 +20535,11 @@ def test_list_display_video360_advertiser_link_proposals_pager( results = list(pager) assert len(results) == 6 assert all( - isinstance(i, resources.DisplayVideo360AdvertiserLinkProposal) - for i in results + isinstance(i, resources.DisplayVideo360AdvertiserLink) for i in results ) -def test_list_display_video360_advertiser_link_proposals_pages( - transport_name: str = "grpc", -): +def test_list_display_video360_advertiser_links_pages(transport_name: str = "grpc"): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport_name, @@ -20665,85 +20547,82 @@ def test_list_display_video360_advertiser_link_proposals_pages( # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_display_video360_advertiser_link_proposals), - "__call__", + type(client.transport.list_display_video360_advertiser_links), "__call__" ) as call: # Set the response to a series of pages. call.side_effect = ( - analytics_admin.ListDisplayVideo360AdvertiserLinkProposalsResponse( - display_video_360_advertiser_link_proposals=[ - resources.DisplayVideo360AdvertiserLinkProposal(), - resources.DisplayVideo360AdvertiserLinkProposal(), - resources.DisplayVideo360AdvertiserLinkProposal(), + analytics_admin.ListDisplayVideo360AdvertiserLinksResponse( + display_video_360_advertiser_links=[ + resources.DisplayVideo360AdvertiserLink(), + resources.DisplayVideo360AdvertiserLink(), + resources.DisplayVideo360AdvertiserLink(), ], next_page_token="abc", ), - analytics_admin.ListDisplayVideo360AdvertiserLinkProposalsResponse( - display_video_360_advertiser_link_proposals=[], + analytics_admin.ListDisplayVideo360AdvertiserLinksResponse( + display_video_360_advertiser_links=[], next_page_token="def", ), - analytics_admin.ListDisplayVideo360AdvertiserLinkProposalsResponse( - display_video_360_advertiser_link_proposals=[ - resources.DisplayVideo360AdvertiserLinkProposal(), + analytics_admin.ListDisplayVideo360AdvertiserLinksResponse( + display_video_360_advertiser_links=[ + resources.DisplayVideo360AdvertiserLink(), ], next_page_token="ghi", ), - analytics_admin.ListDisplayVideo360AdvertiserLinkProposalsResponse( - display_video_360_advertiser_link_proposals=[ - resources.DisplayVideo360AdvertiserLinkProposal(), - resources.DisplayVideo360AdvertiserLinkProposal(), + analytics_admin.ListDisplayVideo360AdvertiserLinksResponse( + display_video_360_advertiser_links=[ + resources.DisplayVideo360AdvertiserLink(), + resources.DisplayVideo360AdvertiserLink(), ], ), RuntimeError, ) - pages = list( - client.list_display_video360_advertiser_link_proposals(request={}).pages - ) + pages = list(client.list_display_video360_advertiser_links(request={}).pages) for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token @pytest.mark.asyncio -async def test_list_display_video360_advertiser_link_proposals_async_pager(): +async def test_list_display_video360_advertiser_links_async_pager(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_display_video360_advertiser_link_proposals), + type(client.transport.list_display_video360_advertiser_links), "__call__", new_callable=mock.AsyncMock, ) as call: # Set the response to a series of pages. call.side_effect = ( - analytics_admin.ListDisplayVideo360AdvertiserLinkProposalsResponse( - display_video_360_advertiser_link_proposals=[ - resources.DisplayVideo360AdvertiserLinkProposal(), - resources.DisplayVideo360AdvertiserLinkProposal(), - resources.DisplayVideo360AdvertiserLinkProposal(), + analytics_admin.ListDisplayVideo360AdvertiserLinksResponse( + display_video_360_advertiser_links=[ + resources.DisplayVideo360AdvertiserLink(), + resources.DisplayVideo360AdvertiserLink(), + resources.DisplayVideo360AdvertiserLink(), ], next_page_token="abc", ), - analytics_admin.ListDisplayVideo360AdvertiserLinkProposalsResponse( - display_video_360_advertiser_link_proposals=[], + analytics_admin.ListDisplayVideo360AdvertiserLinksResponse( + display_video_360_advertiser_links=[], next_page_token="def", ), - analytics_admin.ListDisplayVideo360AdvertiserLinkProposalsResponse( - display_video_360_advertiser_link_proposals=[ - resources.DisplayVideo360AdvertiserLinkProposal(), + analytics_admin.ListDisplayVideo360AdvertiserLinksResponse( + display_video_360_advertiser_links=[ + resources.DisplayVideo360AdvertiserLink(), ], next_page_token="ghi", ), - analytics_admin.ListDisplayVideo360AdvertiserLinkProposalsResponse( - display_video_360_advertiser_link_proposals=[ - resources.DisplayVideo360AdvertiserLinkProposal(), - resources.DisplayVideo360AdvertiserLinkProposal(), + analytics_admin.ListDisplayVideo360AdvertiserLinksResponse( + display_video_360_advertiser_links=[ + resources.DisplayVideo360AdvertiserLink(), + resources.DisplayVideo360AdvertiserLink(), ], ), RuntimeError, ) - async_pager = await client.list_display_video360_advertiser_link_proposals( + async_pager = await client.list_display_video360_advertiser_links( request={}, ) assert async_pager.next_page_token == "abc" @@ -20753,47 +20632,46 @@ async def test_list_display_video360_advertiser_link_proposals_async_pager(): assert len(responses) == 6 assert all( - isinstance(i, resources.DisplayVideo360AdvertiserLinkProposal) - for i in responses + isinstance(i, resources.DisplayVideo360AdvertiserLink) for i in responses ) @pytest.mark.asyncio -async def test_list_display_video360_advertiser_link_proposals_async_pages(): +async def test_list_display_video360_advertiser_links_async_pages(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_display_video360_advertiser_link_proposals), + type(client.transport.list_display_video360_advertiser_links), "__call__", new_callable=mock.AsyncMock, ) as call: # Set the response to a series of pages. call.side_effect = ( - analytics_admin.ListDisplayVideo360AdvertiserLinkProposalsResponse( - display_video_360_advertiser_link_proposals=[ - resources.DisplayVideo360AdvertiserLinkProposal(), - resources.DisplayVideo360AdvertiserLinkProposal(), - resources.DisplayVideo360AdvertiserLinkProposal(), + analytics_admin.ListDisplayVideo360AdvertiserLinksResponse( + display_video_360_advertiser_links=[ + resources.DisplayVideo360AdvertiserLink(), + resources.DisplayVideo360AdvertiserLink(), + resources.DisplayVideo360AdvertiserLink(), ], next_page_token="abc", ), - analytics_admin.ListDisplayVideo360AdvertiserLinkProposalsResponse( - display_video_360_advertiser_link_proposals=[], + analytics_admin.ListDisplayVideo360AdvertiserLinksResponse( + display_video_360_advertiser_links=[], next_page_token="def", ), - analytics_admin.ListDisplayVideo360AdvertiserLinkProposalsResponse( - display_video_360_advertiser_link_proposals=[ - resources.DisplayVideo360AdvertiserLinkProposal(), + analytics_admin.ListDisplayVideo360AdvertiserLinksResponse( + display_video_360_advertiser_links=[ + resources.DisplayVideo360AdvertiserLink(), ], next_page_token="ghi", ), - analytics_admin.ListDisplayVideo360AdvertiserLinkProposalsResponse( - display_video_360_advertiser_link_proposals=[ - resources.DisplayVideo360AdvertiserLinkProposal(), - resources.DisplayVideo360AdvertiserLinkProposal(), + analytics_admin.ListDisplayVideo360AdvertiserLinksResponse( + display_video_360_advertiser_links=[ + resources.DisplayVideo360AdvertiserLink(), + resources.DisplayVideo360AdvertiserLink(), ], ), RuntimeError, @@ -20802,7 +20680,7 @@ async def test_list_display_video360_advertiser_link_proposals_async_pages(): # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 async for page_ in ( # pragma: no branch - await client.list_display_video360_advertiser_link_proposals(request={}) + await client.list_display_video360_advertiser_links(request={}) ).pages: pages.append(page_) for page_, token in zip(pages, ["abc", "def", "ghi", ""]): @@ -20812,13 +20690,11 @@ async def test_list_display_video360_advertiser_link_proposals_async_pages(): @pytest.mark.parametrize( "request_type", [ - analytics_admin.CreateDisplayVideo360AdvertiserLinkProposalRequest, + analytics_admin.CreateDisplayVideo360AdvertiserLinkRequest, dict, ], ) -def test_create_display_video360_advertiser_link_proposal( - request_type, transport: str = "grpc" -): +def test_create_display_video360_advertiser_link(request_type, transport: str = "grpc"): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -20830,33 +20706,30 @@ def test_create_display_video360_advertiser_link_proposal( # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.create_display_video360_advertiser_link_proposal), - "__call__", + type(client.transport.create_display_video360_advertiser_link), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = resources.DisplayVideo360AdvertiserLinkProposal( + call.return_value = resources.DisplayVideo360AdvertiserLink( name="name_value", advertiser_id="advertiser_id_value", advertiser_display_name="advertiser_display_name_value", - validation_email="validation_email_value", ) - response = client.create_display_video360_advertiser_link_proposal(request) + response = client.create_display_video360_advertiser_link(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - request = analytics_admin.CreateDisplayVideo360AdvertiserLinkProposalRequest() + request = analytics_admin.CreateDisplayVideo360AdvertiserLinkRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, resources.DisplayVideo360AdvertiserLinkProposal) + assert isinstance(response, resources.DisplayVideo360AdvertiserLink) assert response.name == "name_value" assert response.advertiser_id == "advertiser_id_value" assert response.advertiser_display_name == "advertiser_display_name_value" - assert response.validation_email == "validation_email_value" -def test_create_display_video360_advertiser_link_proposal_empty_call(): +def test_create_display_video360_advertiser_link_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = AnalyticsAdminServiceClient( @@ -20866,22 +20739,18 @@ def test_create_display_video360_advertiser_link_proposal_empty_call(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.create_display_video360_advertiser_link_proposal), - "__call__", + type(client.transport.create_display_video360_advertiser_link), "__call__" ) as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.create_display_video360_advertiser_link_proposal() + client.create_display_video360_advertiser_link() call.assert_called() _, args, _ = call.mock_calls[0] - assert ( - args[0] - == analytics_admin.CreateDisplayVideo360AdvertiserLinkProposalRequest() - ) + assert args[0] == analytics_admin.CreateDisplayVideo360AdvertiserLinkRequest() -def test_create_display_video360_advertiser_link_proposal_non_empty_request_with_auto_populated_field(): +def test_create_display_video360_advertiser_link_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. client = AnalyticsAdminServiceClient( @@ -20892,29 +20761,26 @@ def test_create_display_video360_advertiser_link_proposal_non_empty_request_with # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. - request = analytics_admin.CreateDisplayVideo360AdvertiserLinkProposalRequest( + request = analytics_admin.CreateDisplayVideo360AdvertiserLinkRequest( parent="parent_value", ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.create_display_video360_advertiser_link_proposal), - "__call__", + type(client.transport.create_display_video360_advertiser_link), "__call__" ) as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.create_display_video360_advertiser_link_proposal(request=request) + client.create_display_video360_advertiser_link(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - assert args[ - 0 - ] == analytics_admin.CreateDisplayVideo360AdvertiserLinkProposalRequest( + assert args[0] == analytics_admin.CreateDisplayVideo360AdvertiserLinkRequest( parent="parent_value", ) -def test_create_display_video360_advertiser_link_proposal_use_cached_wrapped_rpc(): +def test_create_display_video360_advertiser_link_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -20929,7 +20795,7 @@ def test_create_display_video360_advertiser_link_proposal_use_cached_wrapped_rpc # Ensure method has been cached assert ( - client._transport.create_display_video360_advertiser_link_proposal + client._transport.create_display_video360_advertiser_link in client._transport._wrapped_methods ) @@ -20939,15 +20805,15 @@ def test_create_display_video360_advertiser_link_proposal_use_cached_wrapped_rpc "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.create_display_video360_advertiser_link_proposal + client._transport.create_display_video360_advertiser_link ] = mock_rpc request = {} - client.create_display_video360_advertiser_link_proposal(request) + client.create_display_video360_advertiser_link(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.create_display_video360_advertiser_link_proposal(request) + client.create_display_video360_advertiser_link(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -20955,7 +20821,7 @@ def test_create_display_video360_advertiser_link_proposal_use_cached_wrapped_rpc @pytest.mark.asyncio -async def test_create_display_video360_advertiser_link_proposal_empty_call_async(): +async def test_create_display_video360_advertiser_link_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = AnalyticsAdminServiceAsyncClient( @@ -20965,29 +20831,24 @@ async def test_create_display_video360_advertiser_link_proposal_empty_call_async # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.create_display_video360_advertiser_link_proposal), - "__call__", + type(client.transport.create_display_video360_advertiser_link), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - resources.DisplayVideo360AdvertiserLinkProposal( + resources.DisplayVideo360AdvertiserLink( name="name_value", advertiser_id="advertiser_id_value", advertiser_display_name="advertiser_display_name_value", - validation_email="validation_email_value", ) ) - response = await client.create_display_video360_advertiser_link_proposal() + response = await client.create_display_video360_advertiser_link() call.assert_called() _, args, _ = call.mock_calls[0] - assert ( - args[0] - == analytics_admin.CreateDisplayVideo360AdvertiserLinkProposalRequest() - ) + assert args[0] == analytics_admin.CreateDisplayVideo360AdvertiserLinkRequest() @pytest.mark.asyncio -async def test_create_display_video360_advertiser_link_proposal_async_use_cached_wrapped_rpc( +async def test_create_display_video360_advertiser_link_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", ): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -21004,33 +20865,34 @@ async def test_create_display_video360_advertiser_link_proposal_async_use_cached # Ensure method has been cached assert ( - client._client._transport.create_display_video360_advertiser_link_proposal + client._client._transport.create_display_video360_advertiser_link in client._client._transport._wrapped_methods ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ - client._client._transport.create_display_video360_advertiser_link_proposal - ] = mock_object + client._client._transport.create_display_video360_advertiser_link + ] = mock_rpc request = {} - await client.create_display_video360_advertiser_link_proposal(request) + await client.create_display_video360_advertiser_link(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - await client.create_display_video360_advertiser_link_proposal(request) + await client.create_display_video360_advertiser_link(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio -async def test_create_display_video360_advertiser_link_proposal_async( +async def test_create_display_video360_advertiser_link_async( transport: str = "grpc_asyncio", - request_type=analytics_admin.CreateDisplayVideo360AdvertiserLinkProposalRequest, + request_type=analytics_admin.CreateDisplayVideo360AdvertiserLinkRequest, ): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), @@ -21043,59 +20905,53 @@ async def test_create_display_video360_advertiser_link_proposal_async( # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.create_display_video360_advertiser_link_proposal), - "__call__", + type(client.transport.create_display_video360_advertiser_link), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - resources.DisplayVideo360AdvertiserLinkProposal( + resources.DisplayVideo360AdvertiserLink( name="name_value", advertiser_id="advertiser_id_value", advertiser_display_name="advertiser_display_name_value", - validation_email="validation_email_value", ) ) - response = await client.create_display_video360_advertiser_link_proposal( - request - ) + response = await client.create_display_video360_advertiser_link(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request = analytics_admin.CreateDisplayVideo360AdvertiserLinkProposalRequest() + request = analytics_admin.CreateDisplayVideo360AdvertiserLinkRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, resources.DisplayVideo360AdvertiserLinkProposal) + assert isinstance(response, resources.DisplayVideo360AdvertiserLink) assert response.name == "name_value" assert response.advertiser_id == "advertiser_id_value" assert response.advertiser_display_name == "advertiser_display_name_value" - assert response.validation_email == "validation_email_value" @pytest.mark.asyncio -async def test_create_display_video360_advertiser_link_proposal_async_from_dict(): - await test_create_display_video360_advertiser_link_proposal_async(request_type=dict) +async def test_create_display_video360_advertiser_link_async_from_dict(): + await test_create_display_video360_advertiser_link_async(request_type=dict) -def test_create_display_video360_advertiser_link_proposal_field_headers(): +def test_create_display_video360_advertiser_link_field_headers(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = analytics_admin.CreateDisplayVideo360AdvertiserLinkProposalRequest() + request = analytics_admin.CreateDisplayVideo360AdvertiserLinkRequest() request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.create_display_video360_advertiser_link_proposal), - "__call__", + type(client.transport.create_display_video360_advertiser_link), "__call__" ) as call: - call.return_value = resources.DisplayVideo360AdvertiserLinkProposal() - client.create_display_video360_advertiser_link_proposal(request) + call.return_value = resources.DisplayVideo360AdvertiserLink() + client.create_display_video360_advertiser_link(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -21111,26 +20967,25 @@ def test_create_display_video360_advertiser_link_proposal_field_headers(): @pytest.mark.asyncio -async def test_create_display_video360_advertiser_link_proposal_field_headers_async(): +async def test_create_display_video360_advertiser_link_field_headers_async(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = analytics_admin.CreateDisplayVideo360AdvertiserLinkProposalRequest() + request = analytics_admin.CreateDisplayVideo360AdvertiserLinkRequest() request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.create_display_video360_advertiser_link_proposal), - "__call__", + type(client.transport.create_display_video360_advertiser_link), "__call__" ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - resources.DisplayVideo360AdvertiserLinkProposal() + resources.DisplayVideo360AdvertiserLink() ) - await client.create_display_video360_advertiser_link_proposal(request) + await client.create_display_video360_advertiser_link(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -21145,23 +21000,22 @@ async def test_create_display_video360_advertiser_link_proposal_field_headers_as ) in kw["metadata"] -def test_create_display_video360_advertiser_link_proposal_flattened(): +def test_create_display_video360_advertiser_link_flattened(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.create_display_video360_advertiser_link_proposal), - "__call__", + type(client.transport.create_display_video360_advertiser_link), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = resources.DisplayVideo360AdvertiserLinkProposal() + call.return_value = resources.DisplayVideo360AdvertiserLink() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.create_display_video360_advertiser_link_proposal( + client.create_display_video360_advertiser_link( parent="parent_value", - display_video_360_advertiser_link_proposal=resources.DisplayVideo360AdvertiserLinkProposal( + display_video_360_advertiser_link=resources.DisplayVideo360AdvertiserLink( name="name_value" ), ) @@ -21173,12 +21027,12 @@ def test_create_display_video360_advertiser_link_proposal_flattened(): arg = args[0].parent mock_val = "parent_value" assert arg == mock_val - arg = args[0].display_video_360_advertiser_link_proposal - mock_val = resources.DisplayVideo360AdvertiserLinkProposal(name="name_value") + arg = args[0].display_video_360_advertiser_link + mock_val = resources.DisplayVideo360AdvertiserLink(name="name_value") assert arg == mock_val -def test_create_display_video360_advertiser_link_proposal_flattened_error(): +def test_create_display_video360_advertiser_link_flattened_error(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -21186,37 +21040,36 @@ def test_create_display_video360_advertiser_link_proposal_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.create_display_video360_advertiser_link_proposal( - analytics_admin.CreateDisplayVideo360AdvertiserLinkProposalRequest(), + client.create_display_video360_advertiser_link( + analytics_admin.CreateDisplayVideo360AdvertiserLinkRequest(), parent="parent_value", - display_video_360_advertiser_link_proposal=resources.DisplayVideo360AdvertiserLinkProposal( + display_video_360_advertiser_link=resources.DisplayVideo360AdvertiserLink( name="name_value" ), ) @pytest.mark.asyncio -async def test_create_display_video360_advertiser_link_proposal_flattened_async(): +async def test_create_display_video360_advertiser_link_flattened_async(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.create_display_video360_advertiser_link_proposal), - "__call__", + type(client.transport.create_display_video360_advertiser_link), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = resources.DisplayVideo360AdvertiserLinkProposal() + call.return_value = resources.DisplayVideo360AdvertiserLink() call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - resources.DisplayVideo360AdvertiserLinkProposal() + resources.DisplayVideo360AdvertiserLink() ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.create_display_video360_advertiser_link_proposal( + response = await client.create_display_video360_advertiser_link( parent="parent_value", - display_video_360_advertiser_link_proposal=resources.DisplayVideo360AdvertiserLinkProposal( + display_video_360_advertiser_link=resources.DisplayVideo360AdvertiserLink( name="name_value" ), ) @@ -21228,13 +21081,13 @@ async def test_create_display_video360_advertiser_link_proposal_flattened_async( arg = args[0].parent mock_val = "parent_value" assert arg == mock_val - arg = args[0].display_video_360_advertiser_link_proposal - mock_val = resources.DisplayVideo360AdvertiserLinkProposal(name="name_value") + arg = args[0].display_video_360_advertiser_link + mock_val = resources.DisplayVideo360AdvertiserLink(name="name_value") assert arg == mock_val @pytest.mark.asyncio -async def test_create_display_video360_advertiser_link_proposal_flattened_error_async(): +async def test_create_display_video360_advertiser_link_flattened_error_async(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -21242,10 +21095,10 @@ async def test_create_display_video360_advertiser_link_proposal_flattened_error_ # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.create_display_video360_advertiser_link_proposal( - analytics_admin.CreateDisplayVideo360AdvertiserLinkProposalRequest(), + await client.create_display_video360_advertiser_link( + analytics_admin.CreateDisplayVideo360AdvertiserLinkRequest(), parent="parent_value", - display_video_360_advertiser_link_proposal=resources.DisplayVideo360AdvertiserLinkProposal( + display_video_360_advertiser_link=resources.DisplayVideo360AdvertiserLink( name="name_value" ), ) @@ -21254,13 +21107,11 @@ async def test_create_display_video360_advertiser_link_proposal_flattened_error_ @pytest.mark.parametrize( "request_type", [ - analytics_admin.DeleteDisplayVideo360AdvertiserLinkProposalRequest, + analytics_admin.DeleteDisplayVideo360AdvertiserLinkRequest, dict, ], ) -def test_delete_display_video360_advertiser_link_proposal( - request_type, transport: str = "grpc" -): +def test_delete_display_video360_advertiser_link(request_type, transport: str = "grpc"): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -21272,24 +21123,23 @@ def test_delete_display_video360_advertiser_link_proposal( # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.delete_display_video360_advertiser_link_proposal), - "__call__", + type(client.transport.delete_display_video360_advertiser_link), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = None - response = client.delete_display_video360_advertiser_link_proposal(request) + response = client.delete_display_video360_advertiser_link(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - request = analytics_admin.DeleteDisplayVideo360AdvertiserLinkProposalRequest() + request = analytics_admin.DeleteDisplayVideo360AdvertiserLinkRequest() assert args[0] == request # Establish that the response is the type that we expect. assert response is None -def test_delete_display_video360_advertiser_link_proposal_empty_call(): +def test_delete_display_video360_advertiser_link_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = AnalyticsAdminServiceClient( @@ -21299,22 +21149,18 @@ def test_delete_display_video360_advertiser_link_proposal_empty_call(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.delete_display_video360_advertiser_link_proposal), - "__call__", + type(client.transport.delete_display_video360_advertiser_link), "__call__" ) as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.delete_display_video360_advertiser_link_proposal() + client.delete_display_video360_advertiser_link() call.assert_called() _, args, _ = call.mock_calls[0] - assert ( - args[0] - == analytics_admin.DeleteDisplayVideo360AdvertiserLinkProposalRequest() - ) + assert args[0] == analytics_admin.DeleteDisplayVideo360AdvertiserLinkRequest() -def test_delete_display_video360_advertiser_link_proposal_non_empty_request_with_auto_populated_field(): +def test_delete_display_video360_advertiser_link_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. client = AnalyticsAdminServiceClient( @@ -21325,29 +21171,26 @@ def test_delete_display_video360_advertiser_link_proposal_non_empty_request_with # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. - request = analytics_admin.DeleteDisplayVideo360AdvertiserLinkProposalRequest( + request = analytics_admin.DeleteDisplayVideo360AdvertiserLinkRequest( name="name_value", ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.delete_display_video360_advertiser_link_proposal), - "__call__", + type(client.transport.delete_display_video360_advertiser_link), "__call__" ) as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.delete_display_video360_advertiser_link_proposal(request=request) + client.delete_display_video360_advertiser_link(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - assert args[ - 0 - ] == analytics_admin.DeleteDisplayVideo360AdvertiserLinkProposalRequest( + assert args[0] == analytics_admin.DeleteDisplayVideo360AdvertiserLinkRequest( name="name_value", ) -def test_delete_display_video360_advertiser_link_proposal_use_cached_wrapped_rpc(): +def test_delete_display_video360_advertiser_link_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -21362,7 +21205,7 @@ def test_delete_display_video360_advertiser_link_proposal_use_cached_wrapped_rpc # Ensure method has been cached assert ( - client._transport.delete_display_video360_advertiser_link_proposal + client._transport.delete_display_video360_advertiser_link in client._transport._wrapped_methods ) @@ -21372,15 +21215,15 @@ def test_delete_display_video360_advertiser_link_proposal_use_cached_wrapped_rpc "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.delete_display_video360_advertiser_link_proposal + client._transport.delete_display_video360_advertiser_link ] = mock_rpc request = {} - client.delete_display_video360_advertiser_link_proposal(request) + client.delete_display_video360_advertiser_link(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.delete_display_video360_advertiser_link_proposal(request) + client.delete_display_video360_advertiser_link(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -21388,7 +21231,7 @@ def test_delete_display_video360_advertiser_link_proposal_use_cached_wrapped_rpc @pytest.mark.asyncio -async def test_delete_display_video360_advertiser_link_proposal_empty_call_async(): +async def test_delete_display_video360_advertiser_link_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = AnalyticsAdminServiceAsyncClient( @@ -21398,22 +21241,18 @@ async def test_delete_display_video360_advertiser_link_proposal_empty_call_async # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.delete_display_video360_advertiser_link_proposal), - "__call__", + type(client.transport.delete_display_video360_advertiser_link), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.delete_display_video360_advertiser_link_proposal() + response = await client.delete_display_video360_advertiser_link() call.assert_called() _, args, _ = call.mock_calls[0] - assert ( - args[0] - == analytics_admin.DeleteDisplayVideo360AdvertiserLinkProposalRequest() - ) + assert args[0] == analytics_admin.DeleteDisplayVideo360AdvertiserLinkRequest() @pytest.mark.asyncio -async def test_delete_display_video360_advertiser_link_proposal_async_use_cached_wrapped_rpc( +async def test_delete_display_video360_advertiser_link_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", ): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -21430,33 +21269,34 @@ async def test_delete_display_video360_advertiser_link_proposal_async_use_cached # Ensure method has been cached assert ( - client._client._transport.delete_display_video360_advertiser_link_proposal + client._client._transport.delete_display_video360_advertiser_link in client._client._transport._wrapped_methods ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ - client._client._transport.delete_display_video360_advertiser_link_proposal - ] = mock_object + client._client._transport.delete_display_video360_advertiser_link + ] = mock_rpc request = {} - await client.delete_display_video360_advertiser_link_proposal(request) + await client.delete_display_video360_advertiser_link(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - await client.delete_display_video360_advertiser_link_proposal(request) + await client.delete_display_video360_advertiser_link(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio -async def test_delete_display_video360_advertiser_link_proposal_async( +async def test_delete_display_video360_advertiser_link_async( transport: str = "grpc_asyncio", - request_type=analytics_admin.DeleteDisplayVideo360AdvertiserLinkProposalRequest, + request_type=analytics_admin.DeleteDisplayVideo360AdvertiserLinkRequest, ): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), @@ -21469,19 +21309,16 @@ async def test_delete_display_video360_advertiser_link_proposal_async( # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.delete_display_video360_advertiser_link_proposal), - "__call__", + type(client.transport.delete_display_video360_advertiser_link), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.delete_display_video360_advertiser_link_proposal( - request - ) + response = await client.delete_display_video360_advertiser_link(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request = analytics_admin.DeleteDisplayVideo360AdvertiserLinkProposalRequest() + request = analytics_admin.DeleteDisplayVideo360AdvertiserLinkRequest() assert args[0] == request # Establish that the response is the type that we expect. @@ -21489,28 +21326,27 @@ async def test_delete_display_video360_advertiser_link_proposal_async( @pytest.mark.asyncio -async def test_delete_display_video360_advertiser_link_proposal_async_from_dict(): - await test_delete_display_video360_advertiser_link_proposal_async(request_type=dict) +async def test_delete_display_video360_advertiser_link_async_from_dict(): + await test_delete_display_video360_advertiser_link_async(request_type=dict) -def test_delete_display_video360_advertiser_link_proposal_field_headers(): +def test_delete_display_video360_advertiser_link_field_headers(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = analytics_admin.DeleteDisplayVideo360AdvertiserLinkProposalRequest() + request = analytics_admin.DeleteDisplayVideo360AdvertiserLinkRequest() request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.delete_display_video360_advertiser_link_proposal), - "__call__", + type(client.transport.delete_display_video360_advertiser_link), "__call__" ) as call: call.return_value = None - client.delete_display_video360_advertiser_link_proposal(request) + client.delete_display_video360_advertiser_link(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -21526,24 +21362,23 @@ def test_delete_display_video360_advertiser_link_proposal_field_headers(): @pytest.mark.asyncio -async def test_delete_display_video360_advertiser_link_proposal_field_headers_async(): +async def test_delete_display_video360_advertiser_link_field_headers_async(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = analytics_admin.DeleteDisplayVideo360AdvertiserLinkProposalRequest() + request = analytics_admin.DeleteDisplayVideo360AdvertiserLinkRequest() request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.delete_display_video360_advertiser_link_proposal), - "__call__", + type(client.transport.delete_display_video360_advertiser_link), "__call__" ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - await client.delete_display_video360_advertiser_link_proposal(request) + await client.delete_display_video360_advertiser_link(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -21558,21 +21393,20 @@ async def test_delete_display_video360_advertiser_link_proposal_field_headers_as ) in kw["metadata"] -def test_delete_display_video360_advertiser_link_proposal_flattened(): +def test_delete_display_video360_advertiser_link_flattened(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.delete_display_video360_advertiser_link_proposal), - "__call__", + type(client.transport.delete_display_video360_advertiser_link), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = None # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.delete_display_video360_advertiser_link_proposal( + client.delete_display_video360_advertiser_link( name="name_value", ) @@ -21585,7 +21419,7 @@ def test_delete_display_video360_advertiser_link_proposal_flattened(): assert arg == mock_val -def test_delete_display_video360_advertiser_link_proposal_flattened_error(): +def test_delete_display_video360_advertiser_link_flattened_error(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -21593,22 +21427,21 @@ def test_delete_display_video360_advertiser_link_proposal_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.delete_display_video360_advertiser_link_proposal( - analytics_admin.DeleteDisplayVideo360AdvertiserLinkProposalRequest(), + client.delete_display_video360_advertiser_link( + analytics_admin.DeleteDisplayVideo360AdvertiserLinkRequest(), name="name_value", ) @pytest.mark.asyncio -async def test_delete_display_video360_advertiser_link_proposal_flattened_async(): +async def test_delete_display_video360_advertiser_link_flattened_async(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.delete_display_video360_advertiser_link_proposal), - "__call__", + type(client.transport.delete_display_video360_advertiser_link), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = None @@ -21616,7 +21449,7 @@ async def test_delete_display_video360_advertiser_link_proposal_flattened_async( call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.delete_display_video360_advertiser_link_proposal( + response = await client.delete_display_video360_advertiser_link( name="name_value", ) @@ -21630,7 +21463,7 @@ async def test_delete_display_video360_advertiser_link_proposal_flattened_async( @pytest.mark.asyncio -async def test_delete_display_video360_advertiser_link_proposal_flattened_error_async(): +async def test_delete_display_video360_advertiser_link_flattened_error_async(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -21638,8 +21471,8 @@ async def test_delete_display_video360_advertiser_link_proposal_flattened_error_ # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.delete_display_video360_advertiser_link_proposal( - analytics_admin.DeleteDisplayVideo360AdvertiserLinkProposalRequest(), + await client.delete_display_video360_advertiser_link( + analytics_admin.DeleteDisplayVideo360AdvertiserLinkRequest(), name="name_value", ) @@ -21647,13 +21480,11 @@ async def test_delete_display_video360_advertiser_link_proposal_flattened_error_ @pytest.mark.parametrize( "request_type", [ - analytics_admin.ApproveDisplayVideo360AdvertiserLinkProposalRequest, + analytics_admin.UpdateDisplayVideo360AdvertiserLinkRequest, dict, ], ) -def test_approve_display_video360_advertiser_link_proposal( - request_type, transport: str = "grpc" -): +def test_update_display_video360_advertiser_link(request_type, transport: str = "grpc"): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -21665,28 +21496,30 @@ def test_approve_display_video360_advertiser_link_proposal( # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.approve_display_video360_advertiser_link_proposal), - "__call__", + type(client.transport.update_display_video360_advertiser_link), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = ( - analytics_admin.ApproveDisplayVideo360AdvertiserLinkProposalResponse() + call.return_value = resources.DisplayVideo360AdvertiserLink( + name="name_value", + advertiser_id="advertiser_id_value", + advertiser_display_name="advertiser_display_name_value", ) - response = client.approve_display_video360_advertiser_link_proposal(request) + response = client.update_display_video360_advertiser_link(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - request = analytics_admin.ApproveDisplayVideo360AdvertiserLinkProposalRequest() + request = analytics_admin.UpdateDisplayVideo360AdvertiserLinkRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance( - response, analytics_admin.ApproveDisplayVideo360AdvertiserLinkProposalResponse - ) + assert isinstance(response, resources.DisplayVideo360AdvertiserLink) + assert response.name == "name_value" + assert response.advertiser_id == "advertiser_id_value" + assert response.advertiser_display_name == "advertiser_display_name_value" -def test_approve_display_video360_advertiser_link_proposal_empty_call(): +def test_update_display_video360_advertiser_link_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = AnalyticsAdminServiceClient( @@ -21696,22 +21529,18 @@ def test_approve_display_video360_advertiser_link_proposal_empty_call(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.approve_display_video360_advertiser_link_proposal), - "__call__", + type(client.transport.update_display_video360_advertiser_link), "__call__" ) as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.approve_display_video360_advertiser_link_proposal() + client.update_display_video360_advertiser_link() call.assert_called() _, args, _ = call.mock_calls[0] - assert ( - args[0] - == analytics_admin.ApproveDisplayVideo360AdvertiserLinkProposalRequest() - ) + assert args[0] == analytics_admin.UpdateDisplayVideo360AdvertiserLinkRequest() -def test_approve_display_video360_advertiser_link_proposal_non_empty_request_with_auto_populated_field(): +def test_update_display_video360_advertiser_link_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. client = AnalyticsAdminServiceClient( @@ -21722,29 +21551,22 @@ def test_approve_display_video360_advertiser_link_proposal_non_empty_request_wit # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. - request = analytics_admin.ApproveDisplayVideo360AdvertiserLinkProposalRequest( - name="name_value", - ) + request = analytics_admin.UpdateDisplayVideo360AdvertiserLinkRequest() # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.approve_display_video360_advertiser_link_proposal), - "__call__", + type(client.transport.update_display_video360_advertiser_link), "__call__" ) as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.approve_display_video360_advertiser_link_proposal(request=request) + client.update_display_video360_advertiser_link(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - assert args[ - 0 - ] == analytics_admin.ApproveDisplayVideo360AdvertiserLinkProposalRequest( - name="name_value", - ) + assert args[0] == analytics_admin.UpdateDisplayVideo360AdvertiserLinkRequest() -def test_approve_display_video360_advertiser_link_proposal_use_cached_wrapped_rpc(): +def test_update_display_video360_advertiser_link_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -21759,7 +21581,7 @@ def test_approve_display_video360_advertiser_link_proposal_use_cached_wrapped_rp # Ensure method has been cached assert ( - client._transport.approve_display_video360_advertiser_link_proposal + client._transport.update_display_video360_advertiser_link in client._transport._wrapped_methods ) @@ -21769,15 +21591,15 @@ def test_approve_display_video360_advertiser_link_proposal_use_cached_wrapped_rp "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.approve_display_video360_advertiser_link_proposal + client._transport.update_display_video360_advertiser_link ] = mock_rpc request = {} - client.approve_display_video360_advertiser_link_proposal(request) + client.update_display_video360_advertiser_link(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.approve_display_video360_advertiser_link_proposal(request) + client.update_display_video360_advertiser_link(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -21785,7 +21607,7 @@ def test_approve_display_video360_advertiser_link_proposal_use_cached_wrapped_rp @pytest.mark.asyncio -async def test_approve_display_video360_advertiser_link_proposal_empty_call_async(): +async def test_update_display_video360_advertiser_link_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = AnalyticsAdminServiceAsyncClient( @@ -21795,24 +21617,24 @@ async def test_approve_display_video360_advertiser_link_proposal_empty_call_asyn # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.approve_display_video360_advertiser_link_proposal), - "__call__", + type(client.transport.update_display_video360_advertiser_link), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - analytics_admin.ApproveDisplayVideo360AdvertiserLinkProposalResponse() + resources.DisplayVideo360AdvertiserLink( + name="name_value", + advertiser_id="advertiser_id_value", + advertiser_display_name="advertiser_display_name_value", + ) ) - response = await client.approve_display_video360_advertiser_link_proposal() + response = await client.update_display_video360_advertiser_link() call.assert_called() _, args, _ = call.mock_calls[0] - assert ( - args[0] - == analytics_admin.ApproveDisplayVideo360AdvertiserLinkProposalRequest() - ) + assert args[0] == analytics_admin.UpdateDisplayVideo360AdvertiserLinkRequest() @pytest.mark.asyncio -async def test_approve_display_video360_advertiser_link_proposal_async_use_cached_wrapped_rpc( +async def test_update_display_video360_advertiser_link_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", ): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -21829,33 +21651,34 @@ async def test_approve_display_video360_advertiser_link_proposal_async_use_cache # Ensure method has been cached assert ( - client._client._transport.approve_display_video360_advertiser_link_proposal + client._client._transport.update_display_video360_advertiser_link in client._client._transport._wrapped_methods ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ - client._client._transport.approve_display_video360_advertiser_link_proposal - ] = mock_object + client._client._transport.update_display_video360_advertiser_link + ] = mock_rpc request = {} - await client.approve_display_video360_advertiser_link_proposal(request) + await client.update_display_video360_advertiser_link(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - await client.approve_display_video360_advertiser_link_proposal(request) + await client.update_display_video360_advertiser_link(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio -async def test_approve_display_video360_advertiser_link_proposal_async( +async def test_update_display_video360_advertiser_link_async( transport: str = "grpc_asyncio", - request_type=analytics_admin.ApproveDisplayVideo360AdvertiserLinkProposalRequest, + request_type=analytics_admin.UpdateDisplayVideo360AdvertiserLinkRequest, ): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), @@ -21868,56 +21691,53 @@ async def test_approve_display_video360_advertiser_link_proposal_async( # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.approve_display_video360_advertiser_link_proposal), - "__call__", + type(client.transport.update_display_video360_advertiser_link), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - analytics_admin.ApproveDisplayVideo360AdvertiserLinkProposalResponse() - ) - response = await client.approve_display_video360_advertiser_link_proposal( - request + resources.DisplayVideo360AdvertiserLink( + name="name_value", + advertiser_id="advertiser_id_value", + advertiser_display_name="advertiser_display_name_value", + ) ) + response = await client.update_display_video360_advertiser_link(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request = analytics_admin.ApproveDisplayVideo360AdvertiserLinkProposalRequest() + request = analytics_admin.UpdateDisplayVideo360AdvertiserLinkRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance( - response, analytics_admin.ApproveDisplayVideo360AdvertiserLinkProposalResponse - ) + assert isinstance(response, resources.DisplayVideo360AdvertiserLink) + assert response.name == "name_value" + assert response.advertiser_id == "advertiser_id_value" + assert response.advertiser_display_name == "advertiser_display_name_value" @pytest.mark.asyncio -async def test_approve_display_video360_advertiser_link_proposal_async_from_dict(): - await test_approve_display_video360_advertiser_link_proposal_async( - request_type=dict - ) +async def test_update_display_video360_advertiser_link_async_from_dict(): + await test_update_display_video360_advertiser_link_async(request_type=dict) -def test_approve_display_video360_advertiser_link_proposal_field_headers(): +def test_update_display_video360_advertiser_link_field_headers(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = analytics_admin.ApproveDisplayVideo360AdvertiserLinkProposalRequest() + request = analytics_admin.UpdateDisplayVideo360AdvertiserLinkRequest() - request.name = "name_value" + request.display_video_360_advertiser_link.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.approve_display_video360_advertiser_link_proposal), - "__call__", + type(client.transport.update_display_video360_advertiser_link), "__call__" ) as call: - call.return_value = ( - analytics_admin.ApproveDisplayVideo360AdvertiserLinkProposalResponse() - ) - client.approve_display_video360_advertiser_link_proposal(request) + call.return_value = resources.DisplayVideo360AdvertiserLink() + client.update_display_video360_advertiser_link(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -21928,31 +21748,30 @@ def test_approve_display_video360_advertiser_link_proposal_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "name=name_value", + "display_video_360_advertiser_link.name=name_value", ) in kw["metadata"] @pytest.mark.asyncio -async def test_approve_display_video360_advertiser_link_proposal_field_headers_async(): +async def test_update_display_video360_advertiser_link_field_headers_async(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = analytics_admin.ApproveDisplayVideo360AdvertiserLinkProposalRequest() + request = analytics_admin.UpdateDisplayVideo360AdvertiserLinkRequest() - request.name = "name_value" + request.display_video_360_advertiser_link.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.approve_display_video360_advertiser_link_proposal), - "__call__", + type(client.transport.update_display_video360_advertiser_link), "__call__" ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - analytics_admin.ApproveDisplayVideo360AdvertiserLinkProposalResponse() + resources.DisplayVideo360AdvertiserLink() ) - await client.approve_display_video360_advertiser_link_proposal(request) + await client.update_display_video360_advertiser_link(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -21963,18 +21782,122 @@ async def test_approve_display_video360_advertiser_link_proposal_field_headers_a _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "name=name_value", + "display_video_360_advertiser_link.name=name_value", ) in kw["metadata"] +def test_update_display_video360_advertiser_link_flattened(): + client = AnalyticsAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_display_video360_advertiser_link), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = resources.DisplayVideo360AdvertiserLink() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.update_display_video360_advertiser_link( + display_video_360_advertiser_link=resources.DisplayVideo360AdvertiserLink( + name="name_value" + ), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].display_video_360_advertiser_link + mock_val = resources.DisplayVideo360AdvertiserLink(name="name_value") + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + assert arg == mock_val + + +def test_update_display_video360_advertiser_link_flattened_error(): + client = AnalyticsAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_display_video360_advertiser_link( + analytics_admin.UpdateDisplayVideo360AdvertiserLinkRequest(), + display_video_360_advertiser_link=resources.DisplayVideo360AdvertiserLink( + name="name_value" + ), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +@pytest.mark.asyncio +async def test_update_display_video360_advertiser_link_flattened_async(): + client = AnalyticsAdminServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_display_video360_advertiser_link), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = resources.DisplayVideo360AdvertiserLink() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + resources.DisplayVideo360AdvertiserLink() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.update_display_video360_advertiser_link( + display_video_360_advertiser_link=resources.DisplayVideo360AdvertiserLink( + name="name_value" + ), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].display_video_360_advertiser_link + mock_val = resources.DisplayVideo360AdvertiserLink(name="name_value") + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_update_display_video360_advertiser_link_flattened_error_async(): + client = AnalyticsAdminServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.update_display_video360_advertiser_link( + analytics_admin.UpdateDisplayVideo360AdvertiserLinkRequest(), + display_video_360_advertiser_link=resources.DisplayVideo360AdvertiserLink( + name="name_value" + ), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + @pytest.mark.parametrize( "request_type", [ - analytics_admin.CancelDisplayVideo360AdvertiserLinkProposalRequest, + analytics_admin.GetDisplayVideo360AdvertiserLinkProposalRequest, dict, ], ) -def test_cancel_display_video360_advertiser_link_proposal( +def test_get_display_video360_advertiser_link_proposal( request_type, transport: str = "grpc" ): client = AnalyticsAdminServiceClient( @@ -21988,8 +21911,7 @@ def test_cancel_display_video360_advertiser_link_proposal( # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.cancel_display_video360_advertiser_link_proposal), - "__call__", + type(client.transport.get_display_video360_advertiser_link_proposal), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = resources.DisplayVideo360AdvertiserLinkProposal( @@ -21998,12 +21920,12 @@ def test_cancel_display_video360_advertiser_link_proposal( advertiser_display_name="advertiser_display_name_value", validation_email="validation_email_value", ) - response = client.cancel_display_video360_advertiser_link_proposal(request) + response = client.get_display_video360_advertiser_link_proposal(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - request = analytics_admin.CancelDisplayVideo360AdvertiserLinkProposalRequest() + request = analytics_admin.GetDisplayVideo360AdvertiserLinkProposalRequest() assert args[0] == request # Establish that the response is the type that we expect. @@ -22014,7 +21936,7 @@ def test_cancel_display_video360_advertiser_link_proposal( assert response.validation_email == "validation_email_value" -def test_cancel_display_video360_advertiser_link_proposal_empty_call(): +def test_get_display_video360_advertiser_link_proposal_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = AnalyticsAdminServiceClient( @@ -22024,22 +21946,20 @@ def test_cancel_display_video360_advertiser_link_proposal_empty_call(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.cancel_display_video360_advertiser_link_proposal), - "__call__", + type(client.transport.get_display_video360_advertiser_link_proposal), "__call__" ) as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.cancel_display_video360_advertiser_link_proposal() + client.get_display_video360_advertiser_link_proposal() call.assert_called() _, args, _ = call.mock_calls[0] assert ( - args[0] - == analytics_admin.CancelDisplayVideo360AdvertiserLinkProposalRequest() + args[0] == analytics_admin.GetDisplayVideo360AdvertiserLinkProposalRequest() ) -def test_cancel_display_video360_advertiser_link_proposal_non_empty_request_with_auto_populated_field(): +def test_get_display_video360_advertiser_link_proposal_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. client = AnalyticsAdminServiceClient( @@ -22050,29 +21970,28 @@ def test_cancel_display_video360_advertiser_link_proposal_non_empty_request_with # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. - request = analytics_admin.CancelDisplayVideo360AdvertiserLinkProposalRequest( + request = analytics_admin.GetDisplayVideo360AdvertiserLinkProposalRequest( name="name_value", ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.cancel_display_video360_advertiser_link_proposal), - "__call__", + type(client.transport.get_display_video360_advertiser_link_proposal), "__call__" ) as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.cancel_display_video360_advertiser_link_proposal(request=request) + client.get_display_video360_advertiser_link_proposal(request=request) call.assert_called() _, args, _ = call.mock_calls[0] assert args[ 0 - ] == analytics_admin.CancelDisplayVideo360AdvertiserLinkProposalRequest( + ] == analytics_admin.GetDisplayVideo360AdvertiserLinkProposalRequest( name="name_value", ) -def test_cancel_display_video360_advertiser_link_proposal_use_cached_wrapped_rpc(): +def test_get_display_video360_advertiser_link_proposal_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -22087,7 +22006,7 @@ def test_cancel_display_video360_advertiser_link_proposal_use_cached_wrapped_rpc # Ensure method has been cached assert ( - client._transport.cancel_display_video360_advertiser_link_proposal + client._transport.get_display_video360_advertiser_link_proposal in client._transport._wrapped_methods ) @@ -22097,15 +22016,15 @@ def test_cancel_display_video360_advertiser_link_proposal_use_cached_wrapped_rpc "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.cancel_display_video360_advertiser_link_proposal + client._transport.get_display_video360_advertiser_link_proposal ] = mock_rpc request = {} - client.cancel_display_video360_advertiser_link_proposal(request) + client.get_display_video360_advertiser_link_proposal(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.cancel_display_video360_advertiser_link_proposal(request) + client.get_display_video360_advertiser_link_proposal(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -22113,7 +22032,7 @@ def test_cancel_display_video360_advertiser_link_proposal_use_cached_wrapped_rpc @pytest.mark.asyncio -async def test_cancel_display_video360_advertiser_link_proposal_empty_call_async(): +async def test_get_display_video360_advertiser_link_proposal_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = AnalyticsAdminServiceAsyncClient( @@ -22123,8 +22042,7 @@ async def test_cancel_display_video360_advertiser_link_proposal_empty_call_async # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.cancel_display_video360_advertiser_link_proposal), - "__call__", + type(client.transport.get_display_video360_advertiser_link_proposal), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( @@ -22135,17 +22053,16 @@ async def test_cancel_display_video360_advertiser_link_proposal_empty_call_async validation_email="validation_email_value", ) ) - response = await client.cancel_display_video360_advertiser_link_proposal() + response = await client.get_display_video360_advertiser_link_proposal() call.assert_called() _, args, _ = call.mock_calls[0] assert ( - args[0] - == analytics_admin.CancelDisplayVideo360AdvertiserLinkProposalRequest() + args[0] == analytics_admin.GetDisplayVideo360AdvertiserLinkProposalRequest() ) @pytest.mark.asyncio -async def test_cancel_display_video360_advertiser_link_proposal_async_use_cached_wrapped_rpc( +async def test_get_display_video360_advertiser_link_proposal_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", ): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -22162,33 +22079,34 @@ async def test_cancel_display_video360_advertiser_link_proposal_async_use_cached # Ensure method has been cached assert ( - client._client._transport.cancel_display_video360_advertiser_link_proposal + client._client._transport.get_display_video360_advertiser_link_proposal in client._client._transport._wrapped_methods ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ - client._client._transport.cancel_display_video360_advertiser_link_proposal - ] = mock_object + client._client._transport.get_display_video360_advertiser_link_proposal + ] = mock_rpc request = {} - await client.cancel_display_video360_advertiser_link_proposal(request) + await client.get_display_video360_advertiser_link_proposal(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - await client.cancel_display_video360_advertiser_link_proposal(request) + await client.get_display_video360_advertiser_link_proposal(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio -async def test_cancel_display_video360_advertiser_link_proposal_async( +async def test_get_display_video360_advertiser_link_proposal_async( transport: str = "grpc_asyncio", - request_type=analytics_admin.CancelDisplayVideo360AdvertiserLinkProposalRequest, + request_type=analytics_admin.GetDisplayVideo360AdvertiserLinkProposalRequest, ): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), @@ -22201,8 +22119,7 @@ async def test_cancel_display_video360_advertiser_link_proposal_async( # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.cancel_display_video360_advertiser_link_proposal), - "__call__", + type(client.transport.get_display_video360_advertiser_link_proposal), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( @@ -22213,14 +22130,12 @@ async def test_cancel_display_video360_advertiser_link_proposal_async( validation_email="validation_email_value", ) ) - response = await client.cancel_display_video360_advertiser_link_proposal( - request - ) + response = await client.get_display_video360_advertiser_link_proposal(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request = analytics_admin.CancelDisplayVideo360AdvertiserLinkProposalRequest() + request = analytics_admin.GetDisplayVideo360AdvertiserLinkProposalRequest() assert args[0] == request # Establish that the response is the type that we expect. @@ -22232,28 +22147,27 @@ async def test_cancel_display_video360_advertiser_link_proposal_async( @pytest.mark.asyncio -async def test_cancel_display_video360_advertiser_link_proposal_async_from_dict(): - await test_cancel_display_video360_advertiser_link_proposal_async(request_type=dict) +async def test_get_display_video360_advertiser_link_proposal_async_from_dict(): + await test_get_display_video360_advertiser_link_proposal_async(request_type=dict) -def test_cancel_display_video360_advertiser_link_proposal_field_headers(): +def test_get_display_video360_advertiser_link_proposal_field_headers(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = analytics_admin.CancelDisplayVideo360AdvertiserLinkProposalRequest() + request = analytics_admin.GetDisplayVideo360AdvertiserLinkProposalRequest() request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.cancel_display_video360_advertiser_link_proposal), - "__call__", + type(client.transport.get_display_video360_advertiser_link_proposal), "__call__" ) as call: call.return_value = resources.DisplayVideo360AdvertiserLinkProposal() - client.cancel_display_video360_advertiser_link_proposal(request) + client.get_display_video360_advertiser_link_proposal(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -22269,26 +22183,25 @@ def test_cancel_display_video360_advertiser_link_proposal_field_headers(): @pytest.mark.asyncio -async def test_cancel_display_video360_advertiser_link_proposal_field_headers_async(): +async def test_get_display_video360_advertiser_link_proposal_field_headers_async(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = analytics_admin.CancelDisplayVideo360AdvertiserLinkProposalRequest() + request = analytics_admin.GetDisplayVideo360AdvertiserLinkProposalRequest() request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.cancel_display_video360_advertiser_link_proposal), - "__call__", + type(client.transport.get_display_video360_advertiser_link_proposal), "__call__" ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( resources.DisplayVideo360AdvertiserLinkProposal() ) - await client.cancel_display_video360_advertiser_link_proposal(request) + await client.get_display_video360_advertiser_link_proposal(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -22303,14 +22216,102 @@ async def test_cancel_display_video360_advertiser_link_proposal_field_headers_as ) in kw["metadata"] +def test_get_display_video360_advertiser_link_proposal_flattened(): + client = AnalyticsAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_display_video360_advertiser_link_proposal), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = resources.DisplayVideo360AdvertiserLinkProposal() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_display_video360_advertiser_link_proposal( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_get_display_video360_advertiser_link_proposal_flattened_error(): + client = AnalyticsAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_display_video360_advertiser_link_proposal( + analytics_admin.GetDisplayVideo360AdvertiserLinkProposalRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_get_display_video360_advertiser_link_proposal_flattened_async(): + client = AnalyticsAdminServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_display_video360_advertiser_link_proposal), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = resources.DisplayVideo360AdvertiserLinkProposal() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + resources.DisplayVideo360AdvertiserLinkProposal() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_display_video360_advertiser_link_proposal( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_get_display_video360_advertiser_link_proposal_flattened_error_async(): + client = AnalyticsAdminServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_display_video360_advertiser_link_proposal( + analytics_admin.GetDisplayVideo360AdvertiserLinkProposalRequest(), + name="name_value", + ) + + @pytest.mark.parametrize( "request_type", [ - analytics_admin.CreateCustomDimensionRequest, + analytics_admin.ListDisplayVideo360AdvertiserLinkProposalsRequest, dict, ], ) -def test_create_custom_dimension(request_type, transport: str = "grpc"): +def test_list_display_video360_advertiser_link_proposals( + request_type, transport: str = "grpc" +): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -22322,36 +22323,29 @@ def test_create_custom_dimension(request_type, transport: str = "grpc"): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.create_custom_dimension), "__call__" + type(client.transport.list_display_video360_advertiser_link_proposals), + "__call__", ) as call: # Designate an appropriate return value for the call. - call.return_value = resources.CustomDimension( - name="name_value", - parameter_name="parameter_name_value", - display_name="display_name_value", - description="description_value", - scope=resources.CustomDimension.DimensionScope.EVENT, - disallow_ads_personalization=True, + call.return_value = ( + analytics_admin.ListDisplayVideo360AdvertiserLinkProposalsResponse( + next_page_token="next_page_token_value", + ) ) - response = client.create_custom_dimension(request) + response = client.list_display_video360_advertiser_link_proposals(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - request = analytics_admin.CreateCustomDimensionRequest() + request = analytics_admin.ListDisplayVideo360AdvertiserLinkProposalsRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, resources.CustomDimension) - assert response.name == "name_value" - assert response.parameter_name == "parameter_name_value" - assert response.display_name == "display_name_value" - assert response.description == "description_value" - assert response.scope == resources.CustomDimension.DimensionScope.EVENT - assert response.disallow_ads_personalization is True + assert isinstance(response, pagers.ListDisplayVideo360AdvertiserLinkProposalsPager) + assert response.next_page_token == "next_page_token_value" -def test_create_custom_dimension_empty_call(): +def test_list_display_video360_advertiser_link_proposals_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = AnalyticsAdminServiceClient( @@ -22361,18 +22355,22 @@ def test_create_custom_dimension_empty_call(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.create_custom_dimension), "__call__" + type(client.transport.list_display_video360_advertiser_link_proposals), + "__call__", ) as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.create_custom_dimension() + client.list_display_video360_advertiser_link_proposals() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == analytics_admin.CreateCustomDimensionRequest() + assert ( + args[0] + == analytics_admin.ListDisplayVideo360AdvertiserLinkProposalsRequest() + ) -def test_create_custom_dimension_non_empty_request_with_auto_populated_field(): +def test_list_display_video360_advertiser_link_proposals_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. client = AnalyticsAdminServiceClient( @@ -22383,26 +22381,31 @@ def test_create_custom_dimension_non_empty_request_with_auto_populated_field(): # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. - request = analytics_admin.CreateCustomDimensionRequest( + request = analytics_admin.ListDisplayVideo360AdvertiserLinkProposalsRequest( parent="parent_value", + page_token="page_token_value", ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.create_custom_dimension), "__call__" + type(client.transport.list_display_video360_advertiser_link_proposals), + "__call__", ) as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.create_custom_dimension(request=request) + client.list_display_video360_advertiser_link_proposals(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == analytics_admin.CreateCustomDimensionRequest( + assert args[ + 0 + ] == analytics_admin.ListDisplayVideo360AdvertiserLinkProposalsRequest( parent="parent_value", + page_token="page_token_value", ) -def test_create_custom_dimension_use_cached_wrapped_rpc(): +def test_list_display_video360_advertiser_link_proposals_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -22417,7 +22420,7 @@ def test_create_custom_dimension_use_cached_wrapped_rpc(): # Ensure method has been cached assert ( - client._transport.create_custom_dimension + client._transport.list_display_video360_advertiser_link_proposals in client._transport._wrapped_methods ) @@ -22427,15 +22430,15 @@ def test_create_custom_dimension_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.create_custom_dimension + client._transport.list_display_video360_advertiser_link_proposals ] = mock_rpc request = {} - client.create_custom_dimension(request) + client.list_display_video360_advertiser_link_proposals(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.create_custom_dimension(request) + client.list_display_video360_advertiser_link_proposals(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -22443,7 +22446,7 @@ def test_create_custom_dimension_use_cached_wrapped_rpc(): @pytest.mark.asyncio -async def test_create_custom_dimension_empty_call_async(): +async def test_list_display_video360_advertiser_link_proposals_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = AnalyticsAdminServiceAsyncClient( @@ -22453,27 +22456,26 @@ async def test_create_custom_dimension_empty_call_async(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.create_custom_dimension), "__call__" + type(client.transport.list_display_video360_advertiser_link_proposals), + "__call__", ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - resources.CustomDimension( - name="name_value", - parameter_name="parameter_name_value", - display_name="display_name_value", - description="description_value", - scope=resources.CustomDimension.DimensionScope.EVENT, - disallow_ads_personalization=True, + analytics_admin.ListDisplayVideo360AdvertiserLinkProposalsResponse( + next_page_token="next_page_token_value", ) ) - response = await client.create_custom_dimension() + response = await client.list_display_video360_advertiser_link_proposals() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == analytics_admin.CreateCustomDimensionRequest() + assert ( + args[0] + == analytics_admin.ListDisplayVideo360AdvertiserLinkProposalsRequest() + ) @pytest.mark.asyncio -async def test_create_custom_dimension_async_use_cached_wrapped_rpc( +async def test_list_display_video360_advertiser_link_proposals_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", ): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -22490,33 +22492,34 @@ async def test_create_custom_dimension_async_use_cached_wrapped_rpc( # Ensure method has been cached assert ( - client._client._transport.create_custom_dimension + client._client._transport.list_display_video360_advertiser_link_proposals in client._client._transport._wrapped_methods ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ - client._client._transport.create_custom_dimension - ] = mock_object + client._client._transport.list_display_video360_advertiser_link_proposals + ] = mock_rpc request = {} - await client.create_custom_dimension(request) + await client.list_display_video360_advertiser_link_proposals(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - await client.create_custom_dimension(request) + await client.list_display_video360_advertiser_link_proposals(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio -async def test_create_custom_dimension_async( +async def test_list_display_video360_advertiser_link_proposals_async( transport: str = "grpc_asyncio", - request_type=analytics_admin.CreateCustomDimensionRequest, + request_type=analytics_admin.ListDisplayVideo360AdvertiserLinkProposalsRequest, ): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), @@ -22529,59 +22532,55 @@ async def test_create_custom_dimension_async( # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.create_custom_dimension), "__call__" + type(client.transport.list_display_video360_advertiser_link_proposals), + "__call__", ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - resources.CustomDimension( - name="name_value", - parameter_name="parameter_name_value", - display_name="display_name_value", - description="description_value", - scope=resources.CustomDimension.DimensionScope.EVENT, - disallow_ads_personalization=True, + analytics_admin.ListDisplayVideo360AdvertiserLinkProposalsResponse( + next_page_token="next_page_token_value", ) ) - response = await client.create_custom_dimension(request) + response = await client.list_display_video360_advertiser_link_proposals(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request = analytics_admin.CreateCustomDimensionRequest() + request = analytics_admin.ListDisplayVideo360AdvertiserLinkProposalsRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, resources.CustomDimension) - assert response.name == "name_value" - assert response.parameter_name == "parameter_name_value" - assert response.display_name == "display_name_value" - assert response.description == "description_value" - assert response.scope == resources.CustomDimension.DimensionScope.EVENT - assert response.disallow_ads_personalization is True + assert isinstance( + response, pagers.ListDisplayVideo360AdvertiserLinkProposalsAsyncPager + ) + assert response.next_page_token == "next_page_token_value" @pytest.mark.asyncio -async def test_create_custom_dimension_async_from_dict(): - await test_create_custom_dimension_async(request_type=dict) +async def test_list_display_video360_advertiser_link_proposals_async_from_dict(): + await test_list_display_video360_advertiser_link_proposals_async(request_type=dict) -def test_create_custom_dimension_field_headers(): +def test_list_display_video360_advertiser_link_proposals_field_headers(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = analytics_admin.CreateCustomDimensionRequest() + request = analytics_admin.ListDisplayVideo360AdvertiserLinkProposalsRequest() request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.create_custom_dimension), "__call__" + type(client.transport.list_display_video360_advertiser_link_proposals), + "__call__", ) as call: - call.return_value = resources.CustomDimension() - client.create_custom_dimension(request) + call.return_value = ( + analytics_admin.ListDisplayVideo360AdvertiserLinkProposalsResponse() + ) + client.list_display_video360_advertiser_link_proposals(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -22597,25 +22596,26 @@ def test_create_custom_dimension_field_headers(): @pytest.mark.asyncio -async def test_create_custom_dimension_field_headers_async(): +async def test_list_display_video360_advertiser_link_proposals_field_headers_async(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = analytics_admin.CreateCustomDimensionRequest() + request = analytics_admin.ListDisplayVideo360AdvertiserLinkProposalsRequest() request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.create_custom_dimension), "__call__" + type(client.transport.list_display_video360_advertiser_link_proposals), + "__call__", ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - resources.CustomDimension() + analytics_admin.ListDisplayVideo360AdvertiserLinkProposalsResponse() ) - await client.create_custom_dimension(request) + await client.list_display_video360_advertiser_link_proposals(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -22630,22 +22630,24 @@ async def test_create_custom_dimension_field_headers_async(): ) in kw["metadata"] -def test_create_custom_dimension_flattened(): +def test_list_display_video360_advertiser_link_proposals_flattened(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.create_custom_dimension), "__call__" + type(client.transport.list_display_video360_advertiser_link_proposals), + "__call__", ) as call: # Designate an appropriate return value for the call. - call.return_value = resources.CustomDimension() + call.return_value = ( + analytics_admin.ListDisplayVideo360AdvertiserLinkProposalsResponse() + ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.create_custom_dimension( + client.list_display_video360_advertiser_link_proposals( parent="parent_value", - custom_dimension=resources.CustomDimension(name="name_value"), ) # Establish that the underlying call was made with the expected @@ -22655,12 +22657,9 @@ def test_create_custom_dimension_flattened(): arg = args[0].parent mock_val = "parent_value" assert arg == mock_val - arg = args[0].custom_dimension - mock_val = resources.CustomDimension(name="name_value") - assert arg == mock_val -def test_create_custom_dimension_flattened_error(): +def test_list_display_video360_advertiser_link_proposals_flattened_error(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -22668,34 +22667,35 @@ def test_create_custom_dimension_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.create_custom_dimension( - analytics_admin.CreateCustomDimensionRequest(), + client.list_display_video360_advertiser_link_proposals( + analytics_admin.ListDisplayVideo360AdvertiserLinkProposalsRequest(), parent="parent_value", - custom_dimension=resources.CustomDimension(name="name_value"), ) @pytest.mark.asyncio -async def test_create_custom_dimension_flattened_async(): +async def test_list_display_video360_advertiser_link_proposals_flattened_async(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.create_custom_dimension), "__call__" + type(client.transport.list_display_video360_advertiser_link_proposals), + "__call__", ) as call: # Designate an appropriate return value for the call. - call.return_value = resources.CustomDimension() + call.return_value = ( + analytics_admin.ListDisplayVideo360AdvertiserLinkProposalsResponse() + ) call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - resources.CustomDimension() + analytics_admin.ListDisplayVideo360AdvertiserLinkProposalsResponse() ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.create_custom_dimension( + response = await client.list_display_video360_advertiser_link_proposals( parent="parent_value", - custom_dimension=resources.CustomDimension(name="name_value"), ) # Establish that the underlying call was made with the expected @@ -22705,13 +22705,10 @@ async def test_create_custom_dimension_flattened_async(): arg = args[0].parent mock_val = "parent_value" assert arg == mock_val - arg = args[0].custom_dimension - mock_val = resources.CustomDimension(name="name_value") - assert arg == mock_val @pytest.mark.asyncio -async def test_create_custom_dimension_flattened_error_async(): +async def test_list_display_video360_advertiser_link_proposals_flattened_error_async(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -22719,21 +22716,240 @@ async def test_create_custom_dimension_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.create_custom_dimension( - analytics_admin.CreateCustomDimensionRequest(), + await client.list_display_video360_advertiser_link_proposals( + analytics_admin.ListDisplayVideo360AdvertiserLinkProposalsRequest(), parent="parent_value", - custom_dimension=resources.CustomDimension(name="name_value"), ) +def test_list_display_video360_advertiser_link_proposals_pager( + transport_name: str = "grpc", +): + client = AnalyticsAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_display_video360_advertiser_link_proposals), + "__call__", + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + analytics_admin.ListDisplayVideo360AdvertiserLinkProposalsResponse( + display_video_360_advertiser_link_proposals=[ + resources.DisplayVideo360AdvertiserLinkProposal(), + resources.DisplayVideo360AdvertiserLinkProposal(), + resources.DisplayVideo360AdvertiserLinkProposal(), + ], + next_page_token="abc", + ), + analytics_admin.ListDisplayVideo360AdvertiserLinkProposalsResponse( + display_video_360_advertiser_link_proposals=[], + next_page_token="def", + ), + analytics_admin.ListDisplayVideo360AdvertiserLinkProposalsResponse( + display_video_360_advertiser_link_proposals=[ + resources.DisplayVideo360AdvertiserLinkProposal(), + ], + next_page_token="ghi", + ), + analytics_admin.ListDisplayVideo360AdvertiserLinkProposalsResponse( + display_video_360_advertiser_link_proposals=[ + resources.DisplayVideo360AdvertiserLinkProposal(), + resources.DisplayVideo360AdvertiserLinkProposal(), + ], + ), + RuntimeError, + ) + + expected_metadata = () + retry = retries.Retry() + timeout = 5 + expected_metadata = tuple(expected_metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + ) + pager = client.list_display_video360_advertiser_link_proposals( + request={}, retry=retry, timeout=timeout + ) + + assert pager._metadata == expected_metadata + assert pager._retry == retry + assert pager._timeout == timeout + + results = list(pager) + assert len(results) == 6 + assert all( + isinstance(i, resources.DisplayVideo360AdvertiserLinkProposal) + for i in results + ) + + +def test_list_display_video360_advertiser_link_proposals_pages( + transport_name: str = "grpc", +): + client = AnalyticsAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_display_video360_advertiser_link_proposals), + "__call__", + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + analytics_admin.ListDisplayVideo360AdvertiserLinkProposalsResponse( + display_video_360_advertiser_link_proposals=[ + resources.DisplayVideo360AdvertiserLinkProposal(), + resources.DisplayVideo360AdvertiserLinkProposal(), + resources.DisplayVideo360AdvertiserLinkProposal(), + ], + next_page_token="abc", + ), + analytics_admin.ListDisplayVideo360AdvertiserLinkProposalsResponse( + display_video_360_advertiser_link_proposals=[], + next_page_token="def", + ), + analytics_admin.ListDisplayVideo360AdvertiserLinkProposalsResponse( + display_video_360_advertiser_link_proposals=[ + resources.DisplayVideo360AdvertiserLinkProposal(), + ], + next_page_token="ghi", + ), + analytics_admin.ListDisplayVideo360AdvertiserLinkProposalsResponse( + display_video_360_advertiser_link_proposals=[ + resources.DisplayVideo360AdvertiserLinkProposal(), + resources.DisplayVideo360AdvertiserLinkProposal(), + ], + ), + RuntimeError, + ) + pages = list( + client.list_display_video360_advertiser_link_proposals(request={}).pages + ) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.asyncio +async def test_list_display_video360_advertiser_link_proposals_async_pager(): + client = AnalyticsAdminServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_display_video360_advertiser_link_proposals), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + analytics_admin.ListDisplayVideo360AdvertiserLinkProposalsResponse( + display_video_360_advertiser_link_proposals=[ + resources.DisplayVideo360AdvertiserLinkProposal(), + resources.DisplayVideo360AdvertiserLinkProposal(), + resources.DisplayVideo360AdvertiserLinkProposal(), + ], + next_page_token="abc", + ), + analytics_admin.ListDisplayVideo360AdvertiserLinkProposalsResponse( + display_video_360_advertiser_link_proposals=[], + next_page_token="def", + ), + analytics_admin.ListDisplayVideo360AdvertiserLinkProposalsResponse( + display_video_360_advertiser_link_proposals=[ + resources.DisplayVideo360AdvertiserLinkProposal(), + ], + next_page_token="ghi", + ), + analytics_admin.ListDisplayVideo360AdvertiserLinkProposalsResponse( + display_video_360_advertiser_link_proposals=[ + resources.DisplayVideo360AdvertiserLinkProposal(), + resources.DisplayVideo360AdvertiserLinkProposal(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_display_video360_advertiser_link_proposals( + request={}, + ) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all( + isinstance(i, resources.DisplayVideo360AdvertiserLinkProposal) + for i in responses + ) + + +@pytest.mark.asyncio +async def test_list_display_video360_advertiser_link_proposals_async_pages(): + client = AnalyticsAdminServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_display_video360_advertiser_link_proposals), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + analytics_admin.ListDisplayVideo360AdvertiserLinkProposalsResponse( + display_video_360_advertiser_link_proposals=[ + resources.DisplayVideo360AdvertiserLinkProposal(), + resources.DisplayVideo360AdvertiserLinkProposal(), + resources.DisplayVideo360AdvertiserLinkProposal(), + ], + next_page_token="abc", + ), + analytics_admin.ListDisplayVideo360AdvertiserLinkProposalsResponse( + display_video_360_advertiser_link_proposals=[], + next_page_token="def", + ), + analytics_admin.ListDisplayVideo360AdvertiserLinkProposalsResponse( + display_video_360_advertiser_link_proposals=[ + resources.DisplayVideo360AdvertiserLinkProposal(), + ], + next_page_token="ghi", + ), + analytics_admin.ListDisplayVideo360AdvertiserLinkProposalsResponse( + display_video_360_advertiser_link_proposals=[ + resources.DisplayVideo360AdvertiserLinkProposal(), + resources.DisplayVideo360AdvertiserLinkProposal(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_display_video360_advertiser_link_proposals(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + @pytest.mark.parametrize( "request_type", [ - analytics_admin.UpdateCustomDimensionRequest, + analytics_admin.CreateDisplayVideo360AdvertiserLinkProposalRequest, dict, ], ) -def test_update_custom_dimension(request_type, transport: str = "grpc"): +def test_create_display_video360_advertiser_link_proposal( + request_type, transport: str = "grpc" +): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -22745,36 +22961,33 @@ def test_update_custom_dimension(request_type, transport: str = "grpc"): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.update_custom_dimension), "__call__" + type(client.transport.create_display_video360_advertiser_link_proposal), + "__call__", ) as call: # Designate an appropriate return value for the call. - call.return_value = resources.CustomDimension( + call.return_value = resources.DisplayVideo360AdvertiserLinkProposal( name="name_value", - parameter_name="parameter_name_value", - display_name="display_name_value", - description="description_value", - scope=resources.CustomDimension.DimensionScope.EVENT, - disallow_ads_personalization=True, + advertiser_id="advertiser_id_value", + advertiser_display_name="advertiser_display_name_value", + validation_email="validation_email_value", ) - response = client.update_custom_dimension(request) + response = client.create_display_video360_advertiser_link_proposal(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - request = analytics_admin.UpdateCustomDimensionRequest() + request = analytics_admin.CreateDisplayVideo360AdvertiserLinkProposalRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, resources.CustomDimension) + assert isinstance(response, resources.DisplayVideo360AdvertiserLinkProposal) assert response.name == "name_value" - assert response.parameter_name == "parameter_name_value" - assert response.display_name == "display_name_value" - assert response.description == "description_value" - assert response.scope == resources.CustomDimension.DimensionScope.EVENT - assert response.disallow_ads_personalization is True + assert response.advertiser_id == "advertiser_id_value" + assert response.advertiser_display_name == "advertiser_display_name_value" + assert response.validation_email == "validation_email_value" -def test_update_custom_dimension_empty_call(): +def test_create_display_video360_advertiser_link_proposal_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = AnalyticsAdminServiceClient( @@ -22784,18 +22997,22 @@ def test_update_custom_dimension_empty_call(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.update_custom_dimension), "__call__" + type(client.transport.create_display_video360_advertiser_link_proposal), + "__call__", ) as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.update_custom_dimension() + client.create_display_video360_advertiser_link_proposal() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == analytics_admin.UpdateCustomDimensionRequest() + assert ( + args[0] + == analytics_admin.CreateDisplayVideo360AdvertiserLinkProposalRequest() + ) -def test_update_custom_dimension_non_empty_request_with_auto_populated_field(): +def test_create_display_video360_advertiser_link_proposal_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. client = AnalyticsAdminServiceClient( @@ -22806,22 +23023,29 @@ def test_update_custom_dimension_non_empty_request_with_auto_populated_field(): # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. - request = analytics_admin.UpdateCustomDimensionRequest() + request = analytics_admin.CreateDisplayVideo360AdvertiserLinkProposalRequest( + parent="parent_value", + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.update_custom_dimension), "__call__" + type(client.transport.create_display_video360_advertiser_link_proposal), + "__call__", ) as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.update_custom_dimension(request=request) + client.create_display_video360_advertiser_link_proposal(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == analytics_admin.UpdateCustomDimensionRequest() + assert args[ + 0 + ] == analytics_admin.CreateDisplayVideo360AdvertiserLinkProposalRequest( + parent="parent_value", + ) -def test_update_custom_dimension_use_cached_wrapped_rpc(): +def test_create_display_video360_advertiser_link_proposal_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -22836,7 +23060,7 @@ def test_update_custom_dimension_use_cached_wrapped_rpc(): # Ensure method has been cached assert ( - client._transport.update_custom_dimension + client._transport.create_display_video360_advertiser_link_proposal in client._transport._wrapped_methods ) @@ -22846,15 +23070,15 @@ def test_update_custom_dimension_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.update_custom_dimension + client._transport.create_display_video360_advertiser_link_proposal ] = mock_rpc request = {} - client.update_custom_dimension(request) + client.create_display_video360_advertiser_link_proposal(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.update_custom_dimension(request) + client.create_display_video360_advertiser_link_proposal(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -22862,7 +23086,7 @@ def test_update_custom_dimension_use_cached_wrapped_rpc(): @pytest.mark.asyncio -async def test_update_custom_dimension_empty_call_async(): +async def test_create_display_video360_advertiser_link_proposal_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = AnalyticsAdminServiceAsyncClient( @@ -22872,27 +23096,29 @@ async def test_update_custom_dimension_empty_call_async(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.update_custom_dimension), "__call__" + type(client.transport.create_display_video360_advertiser_link_proposal), + "__call__", ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - resources.CustomDimension( + resources.DisplayVideo360AdvertiserLinkProposal( name="name_value", - parameter_name="parameter_name_value", - display_name="display_name_value", - description="description_value", - scope=resources.CustomDimension.DimensionScope.EVENT, - disallow_ads_personalization=True, + advertiser_id="advertiser_id_value", + advertiser_display_name="advertiser_display_name_value", + validation_email="validation_email_value", ) ) - response = await client.update_custom_dimension() + response = await client.create_display_video360_advertiser_link_proposal() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == analytics_admin.UpdateCustomDimensionRequest() + assert ( + args[0] + == analytics_admin.CreateDisplayVideo360AdvertiserLinkProposalRequest() + ) @pytest.mark.asyncio -async def test_update_custom_dimension_async_use_cached_wrapped_rpc( +async def test_create_display_video360_advertiser_link_proposal_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", ): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -22909,33 +23135,34 @@ async def test_update_custom_dimension_async_use_cached_wrapped_rpc( # Ensure method has been cached assert ( - client._client._transport.update_custom_dimension + client._client._transport.create_display_video360_advertiser_link_proposal in client._client._transport._wrapped_methods ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ - client._client._transport.update_custom_dimension - ] = mock_object + client._client._transport.create_display_video360_advertiser_link_proposal + ] = mock_rpc request = {} - await client.update_custom_dimension(request) + await client.create_display_video360_advertiser_link_proposal(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - await client.update_custom_dimension(request) + await client.create_display_video360_advertiser_link_proposal(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio -async def test_update_custom_dimension_async( +async def test_create_display_video360_advertiser_link_proposal_async( transport: str = "grpc_asyncio", - request_type=analytics_admin.UpdateCustomDimensionRequest, + request_type=analytics_admin.CreateDisplayVideo360AdvertiserLinkProposalRequest, ): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), @@ -22948,59 +23175,59 @@ async def test_update_custom_dimension_async( # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.update_custom_dimension), "__call__" + type(client.transport.create_display_video360_advertiser_link_proposal), + "__call__", ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - resources.CustomDimension( + resources.DisplayVideo360AdvertiserLinkProposal( name="name_value", - parameter_name="parameter_name_value", - display_name="display_name_value", - description="description_value", - scope=resources.CustomDimension.DimensionScope.EVENT, - disallow_ads_personalization=True, + advertiser_id="advertiser_id_value", + advertiser_display_name="advertiser_display_name_value", + validation_email="validation_email_value", ) ) - response = await client.update_custom_dimension(request) + response = await client.create_display_video360_advertiser_link_proposal( + request + ) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request = analytics_admin.UpdateCustomDimensionRequest() + request = analytics_admin.CreateDisplayVideo360AdvertiserLinkProposalRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, resources.CustomDimension) + assert isinstance(response, resources.DisplayVideo360AdvertiserLinkProposal) assert response.name == "name_value" - assert response.parameter_name == "parameter_name_value" - assert response.display_name == "display_name_value" - assert response.description == "description_value" - assert response.scope == resources.CustomDimension.DimensionScope.EVENT - assert response.disallow_ads_personalization is True + assert response.advertiser_id == "advertiser_id_value" + assert response.advertiser_display_name == "advertiser_display_name_value" + assert response.validation_email == "validation_email_value" @pytest.mark.asyncio -async def test_update_custom_dimension_async_from_dict(): - await test_update_custom_dimension_async(request_type=dict) +async def test_create_display_video360_advertiser_link_proposal_async_from_dict(): + await test_create_display_video360_advertiser_link_proposal_async(request_type=dict) -def test_update_custom_dimension_field_headers(): +def test_create_display_video360_advertiser_link_proposal_field_headers(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = analytics_admin.UpdateCustomDimensionRequest() + request = analytics_admin.CreateDisplayVideo360AdvertiserLinkProposalRequest() - request.custom_dimension.name = "name_value" + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.update_custom_dimension), "__call__" + type(client.transport.create_display_video360_advertiser_link_proposal), + "__call__", ) as call: - call.return_value = resources.CustomDimension() - client.update_custom_dimension(request) + call.return_value = resources.DisplayVideo360AdvertiserLinkProposal() + client.create_display_video360_advertiser_link_proposal(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -23011,30 +23238,31 @@ def test_update_custom_dimension_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "custom_dimension.name=name_value", + "parent=parent_value", ) in kw["metadata"] @pytest.mark.asyncio -async def test_update_custom_dimension_field_headers_async(): +async def test_create_display_video360_advertiser_link_proposal_field_headers_async(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = analytics_admin.UpdateCustomDimensionRequest() + request = analytics_admin.CreateDisplayVideo360AdvertiserLinkProposalRequest() - request.custom_dimension.name = "name_value" + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.update_custom_dimension), "__call__" + type(client.transport.create_display_video360_advertiser_link_proposal), + "__call__", ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - resources.CustomDimension() + resources.DisplayVideo360AdvertiserLinkProposal() ) - await client.update_custom_dimension(request) + await client.create_display_video360_advertiser_link_proposal(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -23045,41 +23273,44 @@ async def test_update_custom_dimension_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "custom_dimension.name=name_value", + "parent=parent_value", ) in kw["metadata"] -def test_update_custom_dimension_flattened(): +def test_create_display_video360_advertiser_link_proposal_flattened(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.update_custom_dimension), "__call__" + type(client.transport.create_display_video360_advertiser_link_proposal), + "__call__", ) as call: # Designate an appropriate return value for the call. - call.return_value = resources.CustomDimension() + call.return_value = resources.DisplayVideo360AdvertiserLinkProposal() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.update_custom_dimension( - custom_dimension=resources.CustomDimension(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + client.create_display_video360_advertiser_link_proposal( + parent="parent_value", + display_video_360_advertiser_link_proposal=resources.DisplayVideo360AdvertiserLinkProposal( + name="name_value" + ), ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - arg = args[0].custom_dimension - mock_val = resources.CustomDimension(name="name_value") - assert arg == mock_val - arg = args[0].update_mask - mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].display_video_360_advertiser_link_proposal + mock_val = resources.DisplayVideo360AdvertiserLinkProposal(name="name_value") assert arg == mock_val -def test_update_custom_dimension_flattened_error(): +def test_create_display_video360_advertiser_link_proposal_flattened_error(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -23087,50 +23318,55 @@ def test_update_custom_dimension_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.update_custom_dimension( - analytics_admin.UpdateCustomDimensionRequest(), - custom_dimension=resources.CustomDimension(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + client.create_display_video360_advertiser_link_proposal( + analytics_admin.CreateDisplayVideo360AdvertiserLinkProposalRequest(), + parent="parent_value", + display_video_360_advertiser_link_proposal=resources.DisplayVideo360AdvertiserLinkProposal( + name="name_value" + ), ) @pytest.mark.asyncio -async def test_update_custom_dimension_flattened_async(): +async def test_create_display_video360_advertiser_link_proposal_flattened_async(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.update_custom_dimension), "__call__" + type(client.transport.create_display_video360_advertiser_link_proposal), + "__call__", ) as call: # Designate an appropriate return value for the call. - call.return_value = resources.CustomDimension() + call.return_value = resources.DisplayVideo360AdvertiserLinkProposal() call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - resources.CustomDimension() + resources.DisplayVideo360AdvertiserLinkProposal() ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.update_custom_dimension( - custom_dimension=resources.CustomDimension(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + response = await client.create_display_video360_advertiser_link_proposal( + parent="parent_value", + display_video_360_advertiser_link_proposal=resources.DisplayVideo360AdvertiserLinkProposal( + name="name_value" + ), ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - arg = args[0].custom_dimension - mock_val = resources.CustomDimension(name="name_value") + arg = args[0].parent + mock_val = "parent_value" assert arg == mock_val - arg = args[0].update_mask - mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + arg = args[0].display_video_360_advertiser_link_proposal + mock_val = resources.DisplayVideo360AdvertiserLinkProposal(name="name_value") assert arg == mock_val @pytest.mark.asyncio -async def test_update_custom_dimension_flattened_error_async(): +async def test_create_display_video360_advertiser_link_proposal_flattened_error_async(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -23138,21 +23374,25 @@ async def test_update_custom_dimension_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.update_custom_dimension( - analytics_admin.UpdateCustomDimensionRequest(), - custom_dimension=resources.CustomDimension(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + await client.create_display_video360_advertiser_link_proposal( + analytics_admin.CreateDisplayVideo360AdvertiserLinkProposalRequest(), + parent="parent_value", + display_video_360_advertiser_link_proposal=resources.DisplayVideo360AdvertiserLinkProposal( + name="name_value" + ), ) @pytest.mark.parametrize( "request_type", [ - analytics_admin.ListCustomDimensionsRequest, + analytics_admin.DeleteDisplayVideo360AdvertiserLinkProposalRequest, dict, ], ) -def test_list_custom_dimensions(request_type, transport: str = "grpc"): +def test_delete_display_video360_advertiser_link_proposal( + request_type, transport: str = "grpc" +): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -23164,26 +23404,24 @@ def test_list_custom_dimensions(request_type, transport: str = "grpc"): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_custom_dimensions), "__call__" + type(client.transport.delete_display_video360_advertiser_link_proposal), + "__call__", ) as call: # Designate an appropriate return value for the call. - call.return_value = analytics_admin.ListCustomDimensionsResponse( - next_page_token="next_page_token_value", - ) - response = client.list_custom_dimensions(request) + call.return_value = None + response = client.delete_display_video360_advertiser_link_proposal(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - request = analytics_admin.ListCustomDimensionsRequest() + request = analytics_admin.DeleteDisplayVideo360AdvertiserLinkProposalRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListCustomDimensionsPager) - assert response.next_page_token == "next_page_token_value" + assert response is None -def test_list_custom_dimensions_empty_call(): +def test_delete_display_video360_advertiser_link_proposal_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = AnalyticsAdminServiceClient( @@ -23193,18 +23431,22 @@ def test_list_custom_dimensions_empty_call(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_custom_dimensions), "__call__" + type(client.transport.delete_display_video360_advertiser_link_proposal), + "__call__", ) as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.list_custom_dimensions() + client.delete_display_video360_advertiser_link_proposal() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == analytics_admin.ListCustomDimensionsRequest() + assert ( + args[0] + == analytics_admin.DeleteDisplayVideo360AdvertiserLinkProposalRequest() + ) -def test_list_custom_dimensions_non_empty_request_with_auto_populated_field(): +def test_delete_display_video360_advertiser_link_proposal_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. client = AnalyticsAdminServiceClient( @@ -23215,28 +23457,29 @@ def test_list_custom_dimensions_non_empty_request_with_auto_populated_field(): # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. - request = analytics_admin.ListCustomDimensionsRequest( - parent="parent_value", - page_token="page_token_value", + request = analytics_admin.DeleteDisplayVideo360AdvertiserLinkProposalRequest( + name="name_value", ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_custom_dimensions), "__call__" + type(client.transport.delete_display_video360_advertiser_link_proposal), + "__call__", ) as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.list_custom_dimensions(request=request) + client.delete_display_video360_advertiser_link_proposal(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == analytics_admin.ListCustomDimensionsRequest( - parent="parent_value", - page_token="page_token_value", + assert args[ + 0 + ] == analytics_admin.DeleteDisplayVideo360AdvertiserLinkProposalRequest( + name="name_value", ) -def test_list_custom_dimensions_use_cached_wrapped_rpc(): +def test_delete_display_video360_advertiser_link_proposal_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -23251,7 +23494,7 @@ def test_list_custom_dimensions_use_cached_wrapped_rpc(): # Ensure method has been cached assert ( - client._transport.list_custom_dimensions + client._transport.delete_display_video360_advertiser_link_proposal in client._transport._wrapped_methods ) @@ -23261,15 +23504,15 @@ def test_list_custom_dimensions_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.list_custom_dimensions + client._transport.delete_display_video360_advertiser_link_proposal ] = mock_rpc request = {} - client.list_custom_dimensions(request) + client.delete_display_video360_advertiser_link_proposal(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.list_custom_dimensions(request) + client.delete_display_video360_advertiser_link_proposal(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -23277,7 +23520,7 @@ def test_list_custom_dimensions_use_cached_wrapped_rpc(): @pytest.mark.asyncio -async def test_list_custom_dimensions_empty_call_async(): +async def test_delete_display_video360_advertiser_link_proposal_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = AnalyticsAdminServiceAsyncClient( @@ -23287,22 +23530,22 @@ async def test_list_custom_dimensions_empty_call_async(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_custom_dimensions), "__call__" + type(client.transport.delete_display_video360_advertiser_link_proposal), + "__call__", ) as call: # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - analytics_admin.ListCustomDimensionsResponse( - next_page_token="next_page_token_value", - ) - ) - response = await client.list_custom_dimensions() + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.delete_display_video360_advertiser_link_proposal() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == analytics_admin.ListCustomDimensionsRequest() + assert ( + args[0] + == analytics_admin.DeleteDisplayVideo360AdvertiserLinkProposalRequest() + ) @pytest.mark.asyncio -async def test_list_custom_dimensions_async_use_cached_wrapped_rpc( +async def test_delete_display_video360_advertiser_link_proposal_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", ): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -23319,33 +23562,34 @@ async def test_list_custom_dimensions_async_use_cached_wrapped_rpc( # Ensure method has been cached assert ( - client._client._transport.list_custom_dimensions + client._client._transport.delete_display_video360_advertiser_link_proposal in client._client._transport._wrapped_methods ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ - client._client._transport.list_custom_dimensions - ] = mock_object + client._client._transport.delete_display_video360_advertiser_link_proposal + ] = mock_rpc request = {} - await client.list_custom_dimensions(request) + await client.delete_display_video360_advertiser_link_proposal(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - await client.list_custom_dimensions(request) + await client.delete_display_video360_advertiser_link_proposal(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio -async def test_list_custom_dimensions_async( +async def test_delete_display_video360_advertiser_link_proposal_async( transport: str = "grpc_asyncio", - request_type=analytics_admin.ListCustomDimensionsRequest, + request_type=analytics_admin.DeleteDisplayVideo360AdvertiserLinkProposalRequest, ): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), @@ -23358,49 +23602,48 @@ async def test_list_custom_dimensions_async( # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_custom_dimensions), "__call__" + type(client.transport.delete_display_video360_advertiser_link_proposal), + "__call__", ) as call: # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - analytics_admin.ListCustomDimensionsResponse( - next_page_token="next_page_token_value", - ) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.delete_display_video360_advertiser_link_proposal( + request ) - response = await client.list_custom_dimensions(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request = analytics_admin.ListCustomDimensionsRequest() + request = analytics_admin.DeleteDisplayVideo360AdvertiserLinkProposalRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListCustomDimensionsAsyncPager) - assert response.next_page_token == "next_page_token_value" + assert response is None @pytest.mark.asyncio -async def test_list_custom_dimensions_async_from_dict(): - await test_list_custom_dimensions_async(request_type=dict) +async def test_delete_display_video360_advertiser_link_proposal_async_from_dict(): + await test_delete_display_video360_advertiser_link_proposal_async(request_type=dict) -def test_list_custom_dimensions_field_headers(): +def test_delete_display_video360_advertiser_link_proposal_field_headers(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = analytics_admin.ListCustomDimensionsRequest() + request = analytics_admin.DeleteDisplayVideo360AdvertiserLinkProposalRequest() - request.parent = "parent_value" + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_custom_dimensions), "__call__" + type(client.transport.delete_display_video360_advertiser_link_proposal), + "__call__", ) as call: - call.return_value = analytics_admin.ListCustomDimensionsResponse() - client.list_custom_dimensions(request) + call.return_value = None + client.delete_display_video360_advertiser_link_proposal(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -23411,30 +23654,29 @@ def test_list_custom_dimensions_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "parent=parent_value", + "name=name_value", ) in kw["metadata"] @pytest.mark.asyncio -async def test_list_custom_dimensions_field_headers_async(): +async def test_delete_display_video360_advertiser_link_proposal_field_headers_async(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = analytics_admin.ListCustomDimensionsRequest() + request = analytics_admin.DeleteDisplayVideo360AdvertiserLinkProposalRequest() - request.parent = "parent_value" + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_custom_dimensions), "__call__" + type(client.transport.delete_display_video360_advertiser_link_proposal), + "__call__", ) as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - analytics_admin.ListCustomDimensionsResponse() - ) - await client.list_custom_dimensions(request) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.delete_display_video360_advertiser_link_proposal(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -23445,37 +23687,38 @@ async def test_list_custom_dimensions_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "parent=parent_value", + "name=name_value", ) in kw["metadata"] -def test_list_custom_dimensions_flattened(): +def test_delete_display_video360_advertiser_link_proposal_flattened(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_custom_dimensions), "__call__" + type(client.transport.delete_display_video360_advertiser_link_proposal), + "__call__", ) as call: # Designate an appropriate return value for the call. - call.return_value = analytics_admin.ListCustomDimensionsResponse() + call.return_value = None # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.list_custom_dimensions( - parent="parent_value", + client.delete_display_video360_advertiser_link_proposal( + name="name_value", ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = "parent_value" + arg = args[0].name + mock_val = "name_value" assert arg == mock_val -def test_list_custom_dimensions_flattened_error(): +def test_delete_display_video360_advertiser_link_proposal_flattened_error(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -23483,45 +23726,44 @@ def test_list_custom_dimensions_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.list_custom_dimensions( - analytics_admin.ListCustomDimensionsRequest(), - parent="parent_value", + client.delete_display_video360_advertiser_link_proposal( + analytics_admin.DeleteDisplayVideo360AdvertiserLinkProposalRequest(), + name="name_value", ) @pytest.mark.asyncio -async def test_list_custom_dimensions_flattened_async(): +async def test_delete_display_video360_advertiser_link_proposal_flattened_async(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_custom_dimensions), "__call__" + type(client.transport.delete_display_video360_advertiser_link_proposal), + "__call__", ) as call: # Designate an appropriate return value for the call. - call.return_value = analytics_admin.ListCustomDimensionsResponse() + call.return_value = None - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - analytics_admin.ListCustomDimensionsResponse() - ) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.list_custom_dimensions( - parent="parent_value", + response = await client.delete_display_video360_advertiser_link_proposal( + name="name_value", ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = "parent_value" + arg = args[0].name + mock_val = "name_value" assert arg == mock_val @pytest.mark.asyncio -async def test_list_custom_dimensions_flattened_error_async(): +async def test_delete_display_video360_advertiser_link_proposal_flattened_error_async(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -23529,222 +23771,22 @@ async def test_list_custom_dimensions_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.list_custom_dimensions( - analytics_admin.ListCustomDimensionsRequest(), - parent="parent_value", - ) - - -def test_list_custom_dimensions_pager(transport_name: str = "grpc"): - client = AnalyticsAdminServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_custom_dimensions), "__call__" - ) as call: - # Set the response to a series of pages. - call.side_effect = ( - analytics_admin.ListCustomDimensionsResponse( - custom_dimensions=[ - resources.CustomDimension(), - resources.CustomDimension(), - resources.CustomDimension(), - ], - next_page_token="abc", - ), - analytics_admin.ListCustomDimensionsResponse( - custom_dimensions=[], - next_page_token="def", - ), - analytics_admin.ListCustomDimensionsResponse( - custom_dimensions=[ - resources.CustomDimension(), - ], - next_page_token="ghi", - ), - analytics_admin.ListCustomDimensionsResponse( - custom_dimensions=[ - resources.CustomDimension(), - resources.CustomDimension(), - ], - ), - RuntimeError, - ) - - expected_metadata = () - retry = retries.Retry() - timeout = 5 - expected_metadata = tuple(expected_metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), - ) - pager = client.list_custom_dimensions(request={}, retry=retry, timeout=timeout) - - assert pager._metadata == expected_metadata - assert pager._retry == retry - assert pager._timeout == timeout - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, resources.CustomDimension) for i in results) - - -def test_list_custom_dimensions_pages(transport_name: str = "grpc"): - client = AnalyticsAdminServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_custom_dimensions), "__call__" - ) as call: - # Set the response to a series of pages. - call.side_effect = ( - analytics_admin.ListCustomDimensionsResponse( - custom_dimensions=[ - resources.CustomDimension(), - resources.CustomDimension(), - resources.CustomDimension(), - ], - next_page_token="abc", - ), - analytics_admin.ListCustomDimensionsResponse( - custom_dimensions=[], - next_page_token="def", - ), - analytics_admin.ListCustomDimensionsResponse( - custom_dimensions=[ - resources.CustomDimension(), - ], - next_page_token="ghi", - ), - analytics_admin.ListCustomDimensionsResponse( - custom_dimensions=[ - resources.CustomDimension(), - resources.CustomDimension(), - ], - ), - RuntimeError, - ) - pages = list(client.list_custom_dimensions(request={}).pages) - for page_, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page_.raw_page.next_page_token == token - - -@pytest.mark.asyncio -async def test_list_custom_dimensions_async_pager(): - client = AnalyticsAdminServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_custom_dimensions), - "__call__", - new_callable=mock.AsyncMock, - ) as call: - # Set the response to a series of pages. - call.side_effect = ( - analytics_admin.ListCustomDimensionsResponse( - custom_dimensions=[ - resources.CustomDimension(), - resources.CustomDimension(), - resources.CustomDimension(), - ], - next_page_token="abc", - ), - analytics_admin.ListCustomDimensionsResponse( - custom_dimensions=[], - next_page_token="def", - ), - analytics_admin.ListCustomDimensionsResponse( - custom_dimensions=[ - resources.CustomDimension(), - ], - next_page_token="ghi", - ), - analytics_admin.ListCustomDimensionsResponse( - custom_dimensions=[ - resources.CustomDimension(), - resources.CustomDimension(), - ], - ), - RuntimeError, - ) - async_pager = await client.list_custom_dimensions( - request={}, - ) - assert async_pager.next_page_token == "abc" - responses = [] - async for response in async_pager: # pragma: no branch - responses.append(response) - - assert len(responses) == 6 - assert all(isinstance(i, resources.CustomDimension) for i in responses) - - -@pytest.mark.asyncio -async def test_list_custom_dimensions_async_pages(): - client = AnalyticsAdminServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_custom_dimensions), - "__call__", - new_callable=mock.AsyncMock, - ) as call: - # Set the response to a series of pages. - call.side_effect = ( - analytics_admin.ListCustomDimensionsResponse( - custom_dimensions=[ - resources.CustomDimension(), - resources.CustomDimension(), - resources.CustomDimension(), - ], - next_page_token="abc", - ), - analytics_admin.ListCustomDimensionsResponse( - custom_dimensions=[], - next_page_token="def", - ), - analytics_admin.ListCustomDimensionsResponse( - custom_dimensions=[ - resources.CustomDimension(), - ], - next_page_token="ghi", - ), - analytics_admin.ListCustomDimensionsResponse( - custom_dimensions=[ - resources.CustomDimension(), - resources.CustomDimension(), - ], - ), - RuntimeError, + await client.delete_display_video360_advertiser_link_proposal( + analytics_admin.DeleteDisplayVideo360AdvertiserLinkProposalRequest(), + name="name_value", ) - pages = [] - # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` - # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 - async for page_ in ( # pragma: no branch - await client.list_custom_dimensions(request={}) - ).pages: - pages.append(page_) - for page_, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page_.raw_page.next_page_token == token @pytest.mark.parametrize( "request_type", [ - analytics_admin.ArchiveCustomDimensionRequest, + analytics_admin.ApproveDisplayVideo360AdvertiserLinkProposalRequest, dict, ], ) -def test_archive_custom_dimension(request_type, transport: str = "grpc"): +def test_approve_display_video360_advertiser_link_proposal( + request_type, transport: str = "grpc" +): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -23756,23 +23798,28 @@ def test_archive_custom_dimension(request_type, transport: str = "grpc"): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.archive_custom_dimension), "__call__" + type(client.transport.approve_display_video360_advertiser_link_proposal), + "__call__", ) as call: # Designate an appropriate return value for the call. - call.return_value = None - response = client.archive_custom_dimension(request) + call.return_value = ( + analytics_admin.ApproveDisplayVideo360AdvertiserLinkProposalResponse() + ) + response = client.approve_display_video360_advertiser_link_proposal(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - request = analytics_admin.ArchiveCustomDimensionRequest() + request = analytics_admin.ApproveDisplayVideo360AdvertiserLinkProposalRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert response is None + assert isinstance( + response, analytics_admin.ApproveDisplayVideo360AdvertiserLinkProposalResponse + ) -def test_archive_custom_dimension_empty_call(): +def test_approve_display_video360_advertiser_link_proposal_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = AnalyticsAdminServiceClient( @@ -23782,18 +23829,22 @@ def test_archive_custom_dimension_empty_call(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.archive_custom_dimension), "__call__" + type(client.transport.approve_display_video360_advertiser_link_proposal), + "__call__", ) as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.archive_custom_dimension() + client.approve_display_video360_advertiser_link_proposal() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == analytics_admin.ArchiveCustomDimensionRequest() + assert ( + args[0] + == analytics_admin.ApproveDisplayVideo360AdvertiserLinkProposalRequest() + ) -def test_archive_custom_dimension_non_empty_request_with_auto_populated_field(): +def test_approve_display_video360_advertiser_link_proposal_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. client = AnalyticsAdminServiceClient( @@ -23804,26 +23855,29 @@ def test_archive_custom_dimension_non_empty_request_with_auto_populated_field(): # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. - request = analytics_admin.ArchiveCustomDimensionRequest( + request = analytics_admin.ApproveDisplayVideo360AdvertiserLinkProposalRequest( name="name_value", ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.archive_custom_dimension), "__call__" + type(client.transport.approve_display_video360_advertiser_link_proposal), + "__call__", ) as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.archive_custom_dimension(request=request) + client.approve_display_video360_advertiser_link_proposal(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == analytics_admin.ArchiveCustomDimensionRequest( + assert args[ + 0 + ] == analytics_admin.ApproveDisplayVideo360AdvertiserLinkProposalRequest( name="name_value", ) -def test_archive_custom_dimension_use_cached_wrapped_rpc(): +def test_approve_display_video360_advertiser_link_proposal_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -23838,7 +23892,7 @@ def test_archive_custom_dimension_use_cached_wrapped_rpc(): # Ensure method has been cached assert ( - client._transport.archive_custom_dimension + client._transport.approve_display_video360_advertiser_link_proposal in client._transport._wrapped_methods ) @@ -23848,15 +23902,15 @@ def test_archive_custom_dimension_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.archive_custom_dimension + client._transport.approve_display_video360_advertiser_link_proposal ] = mock_rpc request = {} - client.archive_custom_dimension(request) + client.approve_display_video360_advertiser_link_proposal(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.archive_custom_dimension(request) + client.approve_display_video360_advertiser_link_proposal(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -23864,7 +23918,7 @@ def test_archive_custom_dimension_use_cached_wrapped_rpc(): @pytest.mark.asyncio -async def test_archive_custom_dimension_empty_call_async(): +async def test_approve_display_video360_advertiser_link_proposal_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = AnalyticsAdminServiceAsyncClient( @@ -23874,18 +23928,24 @@ async def test_archive_custom_dimension_empty_call_async(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.archive_custom_dimension), "__call__" - ) as call: + type(client.transport.approve_display_video360_advertiser_link_proposal), + "__call__", + ) as call: # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.archive_custom_dimension() + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + analytics_admin.ApproveDisplayVideo360AdvertiserLinkProposalResponse() + ) + response = await client.approve_display_video360_advertiser_link_proposal() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == analytics_admin.ArchiveCustomDimensionRequest() + assert ( + args[0] + == analytics_admin.ApproveDisplayVideo360AdvertiserLinkProposalRequest() + ) @pytest.mark.asyncio -async def test_archive_custom_dimension_async_use_cached_wrapped_rpc( +async def test_approve_display_video360_advertiser_link_proposal_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", ): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -23902,33 +23962,34 @@ async def test_archive_custom_dimension_async_use_cached_wrapped_rpc( # Ensure method has been cached assert ( - client._client._transport.archive_custom_dimension + client._client._transport.approve_display_video360_advertiser_link_proposal in client._client._transport._wrapped_methods ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ - client._client._transport.archive_custom_dimension - ] = mock_object + client._client._transport.approve_display_video360_advertiser_link_proposal + ] = mock_rpc request = {} - await client.archive_custom_dimension(request) + await client.approve_display_video360_advertiser_link_proposal(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - await client.archive_custom_dimension(request) + await client.approve_display_video360_advertiser_link_proposal(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio -async def test_archive_custom_dimension_async( +async def test_approve_display_video360_advertiser_link_proposal_async( transport: str = "grpc_asyncio", - request_type=analytics_admin.ArchiveCustomDimensionRequest, + request_type=analytics_admin.ApproveDisplayVideo360AdvertiserLinkProposalRequest, ): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), @@ -23941,44 +24002,56 @@ async def test_archive_custom_dimension_async( # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.archive_custom_dimension), "__call__" + type(client.transport.approve_display_video360_advertiser_link_proposal), + "__call__", ) as call: # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.archive_custom_dimension(request) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + analytics_admin.ApproveDisplayVideo360AdvertiserLinkProposalResponse() + ) + response = await client.approve_display_video360_advertiser_link_proposal( + request + ) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request = analytics_admin.ArchiveCustomDimensionRequest() + request = analytics_admin.ApproveDisplayVideo360AdvertiserLinkProposalRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert response is None + assert isinstance( + response, analytics_admin.ApproveDisplayVideo360AdvertiserLinkProposalResponse + ) @pytest.mark.asyncio -async def test_archive_custom_dimension_async_from_dict(): - await test_archive_custom_dimension_async(request_type=dict) +async def test_approve_display_video360_advertiser_link_proposal_async_from_dict(): + await test_approve_display_video360_advertiser_link_proposal_async( + request_type=dict + ) -def test_archive_custom_dimension_field_headers(): +def test_approve_display_video360_advertiser_link_proposal_field_headers(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = analytics_admin.ArchiveCustomDimensionRequest() + request = analytics_admin.ApproveDisplayVideo360AdvertiserLinkProposalRequest() request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.archive_custom_dimension), "__call__" + type(client.transport.approve_display_video360_advertiser_link_proposal), + "__call__", ) as call: - call.return_value = None - client.archive_custom_dimension(request) + call.return_value = ( + analytics_admin.ApproveDisplayVideo360AdvertiserLinkProposalResponse() + ) + client.approve_display_video360_advertiser_link_proposal(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -23994,23 +24067,26 @@ def test_archive_custom_dimension_field_headers(): @pytest.mark.asyncio -async def test_archive_custom_dimension_field_headers_async(): +async def test_approve_display_video360_advertiser_link_proposal_field_headers_async(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = analytics_admin.ArchiveCustomDimensionRequest() + request = analytics_admin.ApproveDisplayVideo360AdvertiserLinkProposalRequest() request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.archive_custom_dimension), "__call__" + type(client.transport.approve_display_video360_advertiser_link_proposal), + "__call__", ) as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - await client.archive_custom_dimension(request) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + analytics_admin.ApproveDisplayVideo360AdvertiserLinkProposalResponse() + ) + await client.approve_display_video360_advertiser_link_proposal(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -24025,98 +24101,16 @@ async def test_archive_custom_dimension_field_headers_async(): ) in kw["metadata"] -def test_archive_custom_dimension_flattened(): - client = AnalyticsAdminServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.archive_custom_dimension), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = None - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.archive_custom_dimension( - name="name_value", - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" - assert arg == mock_val - - -def test_archive_custom_dimension_flattened_error(): - client = AnalyticsAdminServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.archive_custom_dimension( - analytics_admin.ArchiveCustomDimensionRequest(), - name="name_value", - ) - - -@pytest.mark.asyncio -async def test_archive_custom_dimension_flattened_async(): - client = AnalyticsAdminServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.archive_custom_dimension), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = None - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.archive_custom_dimension( - name="name_value", - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" - assert arg == mock_val - - -@pytest.mark.asyncio -async def test_archive_custom_dimension_flattened_error_async(): - client = AnalyticsAdminServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.archive_custom_dimension( - analytics_admin.ArchiveCustomDimensionRequest(), - name="name_value", - ) - - @pytest.mark.parametrize( "request_type", [ - analytics_admin.GetCustomDimensionRequest, + analytics_admin.CancelDisplayVideo360AdvertiserLinkProposalRequest, dict, ], ) -def test_get_custom_dimension(request_type, transport: str = "grpc"): +def test_cancel_display_video360_advertiser_link_proposal( + request_type, transport: str = "grpc" +): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -24128,36 +24122,33 @@ def test_get_custom_dimension(request_type, transport: str = "grpc"): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.get_custom_dimension), "__call__" + type(client.transport.cancel_display_video360_advertiser_link_proposal), + "__call__", ) as call: # Designate an appropriate return value for the call. - call.return_value = resources.CustomDimension( + call.return_value = resources.DisplayVideo360AdvertiserLinkProposal( name="name_value", - parameter_name="parameter_name_value", - display_name="display_name_value", - description="description_value", - scope=resources.CustomDimension.DimensionScope.EVENT, - disallow_ads_personalization=True, + advertiser_id="advertiser_id_value", + advertiser_display_name="advertiser_display_name_value", + validation_email="validation_email_value", ) - response = client.get_custom_dimension(request) + response = client.cancel_display_video360_advertiser_link_proposal(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - request = analytics_admin.GetCustomDimensionRequest() + request = analytics_admin.CancelDisplayVideo360AdvertiserLinkProposalRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, resources.CustomDimension) + assert isinstance(response, resources.DisplayVideo360AdvertiserLinkProposal) assert response.name == "name_value" - assert response.parameter_name == "parameter_name_value" - assert response.display_name == "display_name_value" - assert response.description == "description_value" - assert response.scope == resources.CustomDimension.DimensionScope.EVENT - assert response.disallow_ads_personalization is True + assert response.advertiser_id == "advertiser_id_value" + assert response.advertiser_display_name == "advertiser_display_name_value" + assert response.validation_email == "validation_email_value" -def test_get_custom_dimension_empty_call(): +def test_cancel_display_video360_advertiser_link_proposal_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = AnalyticsAdminServiceClient( @@ -24167,18 +24158,22 @@ def test_get_custom_dimension_empty_call(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.get_custom_dimension), "__call__" + type(client.transport.cancel_display_video360_advertiser_link_proposal), + "__call__", ) as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.get_custom_dimension() + client.cancel_display_video360_advertiser_link_proposal() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == analytics_admin.GetCustomDimensionRequest() + assert ( + args[0] + == analytics_admin.CancelDisplayVideo360AdvertiserLinkProposalRequest() + ) -def test_get_custom_dimension_non_empty_request_with_auto_populated_field(): +def test_cancel_display_video360_advertiser_link_proposal_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. client = AnalyticsAdminServiceClient( @@ -24189,26 +24184,29 @@ def test_get_custom_dimension_non_empty_request_with_auto_populated_field(): # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. - request = analytics_admin.GetCustomDimensionRequest( + request = analytics_admin.CancelDisplayVideo360AdvertiserLinkProposalRequest( name="name_value", ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.get_custom_dimension), "__call__" + type(client.transport.cancel_display_video360_advertiser_link_proposal), + "__call__", ) as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.get_custom_dimension(request=request) + client.cancel_display_video360_advertiser_link_proposal(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == analytics_admin.GetCustomDimensionRequest( + assert args[ + 0 + ] == analytics_admin.CancelDisplayVideo360AdvertiserLinkProposalRequest( name="name_value", ) -def test_get_custom_dimension_use_cached_wrapped_rpc(): +def test_cancel_display_video360_advertiser_link_proposal_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -24223,7 +24221,8 @@ def test_get_custom_dimension_use_cached_wrapped_rpc(): # Ensure method has been cached assert ( - client._transport.get_custom_dimension in client._transport._wrapped_methods + client._transport.cancel_display_video360_advertiser_link_proposal + in client._transport._wrapped_methods ) # Replace cached wrapped function with mock @@ -24232,15 +24231,15 @@ def test_get_custom_dimension_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.get_custom_dimension + client._transport.cancel_display_video360_advertiser_link_proposal ] = mock_rpc request = {} - client.get_custom_dimension(request) + client.cancel_display_video360_advertiser_link_proposal(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.get_custom_dimension(request) + client.cancel_display_video360_advertiser_link_proposal(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -24248,7 +24247,7 @@ def test_get_custom_dimension_use_cached_wrapped_rpc(): @pytest.mark.asyncio -async def test_get_custom_dimension_empty_call_async(): +async def test_cancel_display_video360_advertiser_link_proposal_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = AnalyticsAdminServiceAsyncClient( @@ -24258,27 +24257,29 @@ async def test_get_custom_dimension_empty_call_async(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.get_custom_dimension), "__call__" + type(client.transport.cancel_display_video360_advertiser_link_proposal), + "__call__", ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - resources.CustomDimension( + resources.DisplayVideo360AdvertiserLinkProposal( name="name_value", - parameter_name="parameter_name_value", - display_name="display_name_value", - description="description_value", - scope=resources.CustomDimension.DimensionScope.EVENT, - disallow_ads_personalization=True, + advertiser_id="advertiser_id_value", + advertiser_display_name="advertiser_display_name_value", + validation_email="validation_email_value", ) ) - response = await client.get_custom_dimension() + response = await client.cancel_display_video360_advertiser_link_proposal() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == analytics_admin.GetCustomDimensionRequest() + assert ( + args[0] + == analytics_admin.CancelDisplayVideo360AdvertiserLinkProposalRequest() + ) @pytest.mark.asyncio -async def test_get_custom_dimension_async_use_cached_wrapped_rpc( +async def test_cancel_display_video360_advertiser_link_proposal_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", ): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -24295,33 +24296,34 @@ async def test_get_custom_dimension_async_use_cached_wrapped_rpc( # Ensure method has been cached assert ( - client._client._transport.get_custom_dimension + client._client._transport.cancel_display_video360_advertiser_link_proposal in client._client._transport._wrapped_methods ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ - client._client._transport.get_custom_dimension - ] = mock_object + client._client._transport.cancel_display_video360_advertiser_link_proposal + ] = mock_rpc request = {} - await client.get_custom_dimension(request) + await client.cancel_display_video360_advertiser_link_proposal(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - await client.get_custom_dimension(request) + await client.cancel_display_video360_advertiser_link_proposal(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio -async def test_get_custom_dimension_async( +async def test_cancel_display_video360_advertiser_link_proposal_async( transport: str = "grpc_asyncio", - request_type=analytics_admin.GetCustomDimensionRequest, + request_type=analytics_admin.CancelDisplayVideo360AdvertiserLinkProposalRequest, ): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), @@ -24334,59 +24336,59 @@ async def test_get_custom_dimension_async( # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.get_custom_dimension), "__call__" + type(client.transport.cancel_display_video360_advertiser_link_proposal), + "__call__", ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - resources.CustomDimension( + resources.DisplayVideo360AdvertiserLinkProposal( name="name_value", - parameter_name="parameter_name_value", - display_name="display_name_value", - description="description_value", - scope=resources.CustomDimension.DimensionScope.EVENT, - disallow_ads_personalization=True, + advertiser_id="advertiser_id_value", + advertiser_display_name="advertiser_display_name_value", + validation_email="validation_email_value", ) ) - response = await client.get_custom_dimension(request) + response = await client.cancel_display_video360_advertiser_link_proposal( + request + ) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request = analytics_admin.GetCustomDimensionRequest() + request = analytics_admin.CancelDisplayVideo360AdvertiserLinkProposalRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, resources.CustomDimension) + assert isinstance(response, resources.DisplayVideo360AdvertiserLinkProposal) assert response.name == "name_value" - assert response.parameter_name == "parameter_name_value" - assert response.display_name == "display_name_value" - assert response.description == "description_value" - assert response.scope == resources.CustomDimension.DimensionScope.EVENT - assert response.disallow_ads_personalization is True + assert response.advertiser_id == "advertiser_id_value" + assert response.advertiser_display_name == "advertiser_display_name_value" + assert response.validation_email == "validation_email_value" @pytest.mark.asyncio -async def test_get_custom_dimension_async_from_dict(): - await test_get_custom_dimension_async(request_type=dict) +async def test_cancel_display_video360_advertiser_link_proposal_async_from_dict(): + await test_cancel_display_video360_advertiser_link_proposal_async(request_type=dict) -def test_get_custom_dimension_field_headers(): +def test_cancel_display_video360_advertiser_link_proposal_field_headers(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = analytics_admin.GetCustomDimensionRequest() + request = analytics_admin.CancelDisplayVideo360AdvertiserLinkProposalRequest() request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.get_custom_dimension), "__call__" + type(client.transport.cancel_display_video360_advertiser_link_proposal), + "__call__", ) as call: - call.return_value = resources.CustomDimension() - client.get_custom_dimension(request) + call.return_value = resources.DisplayVideo360AdvertiserLinkProposal() + client.cancel_display_video360_advertiser_link_proposal(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -24402,25 +24404,26 @@ def test_get_custom_dimension_field_headers(): @pytest.mark.asyncio -async def test_get_custom_dimension_field_headers_async(): +async def test_cancel_display_video360_advertiser_link_proposal_field_headers_async(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = analytics_admin.GetCustomDimensionRequest() + request = analytics_admin.CancelDisplayVideo360AdvertiserLinkProposalRequest() request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.get_custom_dimension), "__call__" + type(client.transport.cancel_display_video360_advertiser_link_proposal), + "__call__", ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - resources.CustomDimension() + resources.DisplayVideo360AdvertiserLinkProposal() ) - await client.get_custom_dimension(request) + await client.cancel_display_video360_advertiser_link_proposal(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -24435,100 +24438,14 @@ async def test_get_custom_dimension_field_headers_async(): ) in kw["metadata"] -def test_get_custom_dimension_flattened(): - client = AnalyticsAdminServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_custom_dimension), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = resources.CustomDimension() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.get_custom_dimension( - name="name_value", - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" - assert arg == mock_val - - -def test_get_custom_dimension_flattened_error(): - client = AnalyticsAdminServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_custom_dimension( - analytics_admin.GetCustomDimensionRequest(), - name="name_value", - ) - - -@pytest.mark.asyncio -async def test_get_custom_dimension_flattened_async(): - client = AnalyticsAdminServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_custom_dimension), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = resources.CustomDimension() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - resources.CustomDimension() - ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.get_custom_dimension( - name="name_value", - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" - assert arg == mock_val - - -@pytest.mark.asyncio -async def test_get_custom_dimension_flattened_error_async(): - client = AnalyticsAdminServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.get_custom_dimension( - analytics_admin.GetCustomDimensionRequest(), - name="name_value", - ) - - @pytest.mark.parametrize( "request_type", [ - analytics_admin.CreateCustomMetricRequest, + analytics_admin.CreateCustomDimensionRequest, dict, ], ) -def test_create_custom_metric(request_type, transport: str = "grpc"): +def test_create_custom_dimension(request_type, transport: str = "grpc"): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -24540,42 +24457,36 @@ def test_create_custom_metric(request_type, transport: str = "grpc"): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.create_custom_metric), "__call__" + type(client.transport.create_custom_dimension), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = resources.CustomMetric( + call.return_value = resources.CustomDimension( name="name_value", parameter_name="parameter_name_value", display_name="display_name_value", description="description_value", - measurement_unit=resources.CustomMetric.MeasurementUnit.STANDARD, - scope=resources.CustomMetric.MetricScope.EVENT, - restricted_metric_type=[ - resources.CustomMetric.RestrictedMetricType.COST_DATA - ], + scope=resources.CustomDimension.DimensionScope.EVENT, + disallow_ads_personalization=True, ) - response = client.create_custom_metric(request) + response = client.create_custom_dimension(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - request = analytics_admin.CreateCustomMetricRequest() + request = analytics_admin.CreateCustomDimensionRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, resources.CustomMetric) + assert isinstance(response, resources.CustomDimension) assert response.name == "name_value" assert response.parameter_name == "parameter_name_value" assert response.display_name == "display_name_value" assert response.description == "description_value" - assert response.measurement_unit == resources.CustomMetric.MeasurementUnit.STANDARD - assert response.scope == resources.CustomMetric.MetricScope.EVENT - assert response.restricted_metric_type == [ - resources.CustomMetric.RestrictedMetricType.COST_DATA - ] + assert response.scope == resources.CustomDimension.DimensionScope.EVENT + assert response.disallow_ads_personalization is True -def test_create_custom_metric_empty_call(): +def test_create_custom_dimension_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = AnalyticsAdminServiceClient( @@ -24585,18 +24496,18 @@ def test_create_custom_metric_empty_call(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.create_custom_metric), "__call__" + type(client.transport.create_custom_dimension), "__call__" ) as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.create_custom_metric() + client.create_custom_dimension() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == analytics_admin.CreateCustomMetricRequest() + assert args[0] == analytics_admin.CreateCustomDimensionRequest() -def test_create_custom_metric_non_empty_request_with_auto_populated_field(): +def test_create_custom_dimension_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. client = AnalyticsAdminServiceClient( @@ -24607,26 +24518,26 @@ def test_create_custom_metric_non_empty_request_with_auto_populated_field(): # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. - request = analytics_admin.CreateCustomMetricRequest( + request = analytics_admin.CreateCustomDimensionRequest( parent="parent_value", ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.create_custom_metric), "__call__" + type(client.transport.create_custom_dimension), "__call__" ) as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.create_custom_metric(request=request) + client.create_custom_dimension(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == analytics_admin.CreateCustomMetricRequest( + assert args[0] == analytics_admin.CreateCustomDimensionRequest( parent="parent_value", ) -def test_create_custom_metric_use_cached_wrapped_rpc(): +def test_create_custom_dimension_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -24641,7 +24552,8 @@ def test_create_custom_metric_use_cached_wrapped_rpc(): # Ensure method has been cached assert ( - client._transport.create_custom_metric in client._transport._wrapped_methods + client._transport.create_custom_dimension + in client._transport._wrapped_methods ) # Replace cached wrapped function with mock @@ -24650,15 +24562,15 @@ def test_create_custom_metric_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.create_custom_metric + client._transport.create_custom_dimension ] = mock_rpc request = {} - client.create_custom_metric(request) + client.create_custom_dimension(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.create_custom_metric(request) + client.create_custom_dimension(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -24666,7 +24578,7 @@ def test_create_custom_metric_use_cached_wrapped_rpc(): @pytest.mark.asyncio -async def test_create_custom_metric_empty_call_async(): +async def test_create_custom_dimension_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = AnalyticsAdminServiceAsyncClient( @@ -24676,30 +24588,27 @@ async def test_create_custom_metric_empty_call_async(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.create_custom_metric), "__call__" + type(client.transport.create_custom_dimension), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - resources.CustomMetric( + resources.CustomDimension( name="name_value", parameter_name="parameter_name_value", display_name="display_name_value", description="description_value", - measurement_unit=resources.CustomMetric.MeasurementUnit.STANDARD, - scope=resources.CustomMetric.MetricScope.EVENT, - restricted_metric_type=[ - resources.CustomMetric.RestrictedMetricType.COST_DATA - ], + scope=resources.CustomDimension.DimensionScope.EVENT, + disallow_ads_personalization=True, ) ) - response = await client.create_custom_metric() + response = await client.create_custom_dimension() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == analytics_admin.CreateCustomMetricRequest() + assert args[0] == analytics_admin.CreateCustomDimensionRequest() @pytest.mark.asyncio -async def test_create_custom_metric_async_use_cached_wrapped_rpc( +async def test_create_custom_dimension_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", ): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -24716,33 +24625,34 @@ async def test_create_custom_metric_async_use_cached_wrapped_rpc( # Ensure method has been cached assert ( - client._client._transport.create_custom_metric + client._client._transport.create_custom_dimension in client._client._transport._wrapped_methods ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ - client._client._transport.create_custom_metric - ] = mock_object + client._client._transport.create_custom_dimension + ] = mock_rpc request = {} - await client.create_custom_metric(request) + await client.create_custom_dimension(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - await client.create_custom_metric(request) + await client.create_custom_dimension(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio -async def test_create_custom_metric_async( +async def test_create_custom_dimension_async( transport: str = "grpc_asyncio", - request_type=analytics_admin.CreateCustomMetricRequest, + request_type=analytics_admin.CreateCustomDimensionRequest, ): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), @@ -24755,65 +24665,59 @@ async def test_create_custom_metric_async( # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.create_custom_metric), "__call__" + type(client.transport.create_custom_dimension), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - resources.CustomMetric( + resources.CustomDimension( name="name_value", parameter_name="parameter_name_value", display_name="display_name_value", description="description_value", - measurement_unit=resources.CustomMetric.MeasurementUnit.STANDARD, - scope=resources.CustomMetric.MetricScope.EVENT, - restricted_metric_type=[ - resources.CustomMetric.RestrictedMetricType.COST_DATA - ], + scope=resources.CustomDimension.DimensionScope.EVENT, + disallow_ads_personalization=True, ) ) - response = await client.create_custom_metric(request) + response = await client.create_custom_dimension(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request = analytics_admin.CreateCustomMetricRequest() + request = analytics_admin.CreateCustomDimensionRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, resources.CustomMetric) + assert isinstance(response, resources.CustomDimension) assert response.name == "name_value" assert response.parameter_name == "parameter_name_value" assert response.display_name == "display_name_value" assert response.description == "description_value" - assert response.measurement_unit == resources.CustomMetric.MeasurementUnit.STANDARD - assert response.scope == resources.CustomMetric.MetricScope.EVENT - assert response.restricted_metric_type == [ - resources.CustomMetric.RestrictedMetricType.COST_DATA - ] + assert response.scope == resources.CustomDimension.DimensionScope.EVENT + assert response.disallow_ads_personalization is True @pytest.mark.asyncio -async def test_create_custom_metric_async_from_dict(): - await test_create_custom_metric_async(request_type=dict) +async def test_create_custom_dimension_async_from_dict(): + await test_create_custom_dimension_async(request_type=dict) -def test_create_custom_metric_field_headers(): +def test_create_custom_dimension_field_headers(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = analytics_admin.CreateCustomMetricRequest() + request = analytics_admin.CreateCustomDimensionRequest() request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.create_custom_metric), "__call__" + type(client.transport.create_custom_dimension), "__call__" ) as call: - call.return_value = resources.CustomMetric() - client.create_custom_metric(request) + call.return_value = resources.CustomDimension() + client.create_custom_dimension(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -24829,25 +24733,25 @@ def test_create_custom_metric_field_headers(): @pytest.mark.asyncio -async def test_create_custom_metric_field_headers_async(): +async def test_create_custom_dimension_field_headers_async(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = analytics_admin.CreateCustomMetricRequest() + request = analytics_admin.CreateCustomDimensionRequest() request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.create_custom_metric), "__call__" + type(client.transport.create_custom_dimension), "__call__" ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - resources.CustomMetric() + resources.CustomDimension() ) - await client.create_custom_metric(request) + await client.create_custom_dimension(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -24862,22 +24766,22 @@ async def test_create_custom_metric_field_headers_async(): ) in kw["metadata"] -def test_create_custom_metric_flattened(): +def test_create_custom_dimension_flattened(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.create_custom_metric), "__call__" + type(client.transport.create_custom_dimension), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = resources.CustomMetric() + call.return_value = resources.CustomDimension() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.create_custom_metric( + client.create_custom_dimension( parent="parent_value", - custom_metric=resources.CustomMetric(name="name_value"), + custom_dimension=resources.CustomDimension(name="name_value"), ) # Establish that the underlying call was made with the expected @@ -24887,12 +24791,12 @@ def test_create_custom_metric_flattened(): arg = args[0].parent mock_val = "parent_value" assert arg == mock_val - arg = args[0].custom_metric - mock_val = resources.CustomMetric(name="name_value") + arg = args[0].custom_dimension + mock_val = resources.CustomDimension(name="name_value") assert arg == mock_val -def test_create_custom_metric_flattened_error(): +def test_create_custom_dimension_flattened_error(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -24900,34 +24804,34 @@ def test_create_custom_metric_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.create_custom_metric( - analytics_admin.CreateCustomMetricRequest(), + client.create_custom_dimension( + analytics_admin.CreateCustomDimensionRequest(), parent="parent_value", - custom_metric=resources.CustomMetric(name="name_value"), + custom_dimension=resources.CustomDimension(name="name_value"), ) @pytest.mark.asyncio -async def test_create_custom_metric_flattened_async(): +async def test_create_custom_dimension_flattened_async(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.create_custom_metric), "__call__" + type(client.transport.create_custom_dimension), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = resources.CustomMetric() + call.return_value = resources.CustomDimension() call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - resources.CustomMetric() + resources.CustomDimension() ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.create_custom_metric( + response = await client.create_custom_dimension( parent="parent_value", - custom_metric=resources.CustomMetric(name="name_value"), + custom_dimension=resources.CustomDimension(name="name_value"), ) # Establish that the underlying call was made with the expected @@ -24937,13 +24841,13 @@ async def test_create_custom_metric_flattened_async(): arg = args[0].parent mock_val = "parent_value" assert arg == mock_val - arg = args[0].custom_metric - mock_val = resources.CustomMetric(name="name_value") + arg = args[0].custom_dimension + mock_val = resources.CustomDimension(name="name_value") assert arg == mock_val @pytest.mark.asyncio -async def test_create_custom_metric_flattened_error_async(): +async def test_create_custom_dimension_flattened_error_async(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -24951,21 +24855,21 @@ async def test_create_custom_metric_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.create_custom_metric( - analytics_admin.CreateCustomMetricRequest(), + await client.create_custom_dimension( + analytics_admin.CreateCustomDimensionRequest(), parent="parent_value", - custom_metric=resources.CustomMetric(name="name_value"), + custom_dimension=resources.CustomDimension(name="name_value"), ) @pytest.mark.parametrize( "request_type", [ - analytics_admin.UpdateCustomMetricRequest, + analytics_admin.UpdateCustomDimensionRequest, dict, ], ) -def test_update_custom_metric(request_type, transport: str = "grpc"): +def test_update_custom_dimension(request_type, transport: str = "grpc"): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -24977,42 +24881,36 @@ def test_update_custom_metric(request_type, transport: str = "grpc"): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.update_custom_metric), "__call__" + type(client.transport.update_custom_dimension), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = resources.CustomMetric( + call.return_value = resources.CustomDimension( name="name_value", parameter_name="parameter_name_value", display_name="display_name_value", description="description_value", - measurement_unit=resources.CustomMetric.MeasurementUnit.STANDARD, - scope=resources.CustomMetric.MetricScope.EVENT, - restricted_metric_type=[ - resources.CustomMetric.RestrictedMetricType.COST_DATA - ], + scope=resources.CustomDimension.DimensionScope.EVENT, + disallow_ads_personalization=True, ) - response = client.update_custom_metric(request) + response = client.update_custom_dimension(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - request = analytics_admin.UpdateCustomMetricRequest() + request = analytics_admin.UpdateCustomDimensionRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, resources.CustomMetric) + assert isinstance(response, resources.CustomDimension) assert response.name == "name_value" assert response.parameter_name == "parameter_name_value" assert response.display_name == "display_name_value" assert response.description == "description_value" - assert response.measurement_unit == resources.CustomMetric.MeasurementUnit.STANDARD - assert response.scope == resources.CustomMetric.MetricScope.EVENT - assert response.restricted_metric_type == [ - resources.CustomMetric.RestrictedMetricType.COST_DATA - ] + assert response.scope == resources.CustomDimension.DimensionScope.EVENT + assert response.disallow_ads_personalization is True -def test_update_custom_metric_empty_call(): +def test_update_custom_dimension_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = AnalyticsAdminServiceClient( @@ -25022,18 +24920,18 @@ def test_update_custom_metric_empty_call(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.update_custom_metric), "__call__" + type(client.transport.update_custom_dimension), "__call__" ) as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.update_custom_metric() + client.update_custom_dimension() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == analytics_admin.UpdateCustomMetricRequest() + assert args[0] == analytics_admin.UpdateCustomDimensionRequest() -def test_update_custom_metric_non_empty_request_with_auto_populated_field(): +def test_update_custom_dimension_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. client = AnalyticsAdminServiceClient( @@ -25044,22 +24942,22 @@ def test_update_custom_metric_non_empty_request_with_auto_populated_field(): # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. - request = analytics_admin.UpdateCustomMetricRequest() + request = analytics_admin.UpdateCustomDimensionRequest() # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.update_custom_metric), "__call__" + type(client.transport.update_custom_dimension), "__call__" ) as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.update_custom_metric(request=request) + client.update_custom_dimension(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == analytics_admin.UpdateCustomMetricRequest() + assert args[0] == analytics_admin.UpdateCustomDimensionRequest() -def test_update_custom_metric_use_cached_wrapped_rpc(): +def test_update_custom_dimension_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -25074,7 +24972,8 @@ def test_update_custom_metric_use_cached_wrapped_rpc(): # Ensure method has been cached assert ( - client._transport.update_custom_metric in client._transport._wrapped_methods + client._transport.update_custom_dimension + in client._transport._wrapped_methods ) # Replace cached wrapped function with mock @@ -25083,15 +24982,15 @@ def test_update_custom_metric_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.update_custom_metric + client._transport.update_custom_dimension ] = mock_rpc request = {} - client.update_custom_metric(request) + client.update_custom_dimension(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.update_custom_metric(request) + client.update_custom_dimension(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -25099,7 +24998,7 @@ def test_update_custom_metric_use_cached_wrapped_rpc(): @pytest.mark.asyncio -async def test_update_custom_metric_empty_call_async(): +async def test_update_custom_dimension_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = AnalyticsAdminServiceAsyncClient( @@ -25109,30 +25008,27 @@ async def test_update_custom_metric_empty_call_async(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.update_custom_metric), "__call__" + type(client.transport.update_custom_dimension), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - resources.CustomMetric( + resources.CustomDimension( name="name_value", parameter_name="parameter_name_value", display_name="display_name_value", description="description_value", - measurement_unit=resources.CustomMetric.MeasurementUnit.STANDARD, - scope=resources.CustomMetric.MetricScope.EVENT, - restricted_metric_type=[ - resources.CustomMetric.RestrictedMetricType.COST_DATA - ], + scope=resources.CustomDimension.DimensionScope.EVENT, + disallow_ads_personalization=True, ) ) - response = await client.update_custom_metric() + response = await client.update_custom_dimension() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == analytics_admin.UpdateCustomMetricRequest() + assert args[0] == analytics_admin.UpdateCustomDimensionRequest() @pytest.mark.asyncio -async def test_update_custom_metric_async_use_cached_wrapped_rpc( +async def test_update_custom_dimension_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", ): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -25149,33 +25045,34 @@ async def test_update_custom_metric_async_use_cached_wrapped_rpc( # Ensure method has been cached assert ( - client._client._transport.update_custom_metric + client._client._transport.update_custom_dimension in client._client._transport._wrapped_methods ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ - client._client._transport.update_custom_metric - ] = mock_object + client._client._transport.update_custom_dimension + ] = mock_rpc request = {} - await client.update_custom_metric(request) + await client.update_custom_dimension(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - await client.update_custom_metric(request) + await client.update_custom_dimension(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio -async def test_update_custom_metric_async( +async def test_update_custom_dimension_async( transport: str = "grpc_asyncio", - request_type=analytics_admin.UpdateCustomMetricRequest, + request_type=analytics_admin.UpdateCustomDimensionRequest, ): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), @@ -25188,65 +25085,59 @@ async def test_update_custom_metric_async( # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.update_custom_metric), "__call__" + type(client.transport.update_custom_dimension), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - resources.CustomMetric( + resources.CustomDimension( name="name_value", parameter_name="parameter_name_value", display_name="display_name_value", description="description_value", - measurement_unit=resources.CustomMetric.MeasurementUnit.STANDARD, - scope=resources.CustomMetric.MetricScope.EVENT, - restricted_metric_type=[ - resources.CustomMetric.RestrictedMetricType.COST_DATA - ], + scope=resources.CustomDimension.DimensionScope.EVENT, + disallow_ads_personalization=True, ) ) - response = await client.update_custom_metric(request) + response = await client.update_custom_dimension(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request = analytics_admin.UpdateCustomMetricRequest() + request = analytics_admin.UpdateCustomDimensionRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, resources.CustomMetric) + assert isinstance(response, resources.CustomDimension) assert response.name == "name_value" assert response.parameter_name == "parameter_name_value" assert response.display_name == "display_name_value" assert response.description == "description_value" - assert response.measurement_unit == resources.CustomMetric.MeasurementUnit.STANDARD - assert response.scope == resources.CustomMetric.MetricScope.EVENT - assert response.restricted_metric_type == [ - resources.CustomMetric.RestrictedMetricType.COST_DATA - ] + assert response.scope == resources.CustomDimension.DimensionScope.EVENT + assert response.disallow_ads_personalization is True @pytest.mark.asyncio -async def test_update_custom_metric_async_from_dict(): - await test_update_custom_metric_async(request_type=dict) +async def test_update_custom_dimension_async_from_dict(): + await test_update_custom_dimension_async(request_type=dict) -def test_update_custom_metric_field_headers(): +def test_update_custom_dimension_field_headers(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = analytics_admin.UpdateCustomMetricRequest() + request = analytics_admin.UpdateCustomDimensionRequest() - request.custom_metric.name = "name_value" + request.custom_dimension.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.update_custom_metric), "__call__" + type(client.transport.update_custom_dimension), "__call__" ) as call: - call.return_value = resources.CustomMetric() - client.update_custom_metric(request) + call.return_value = resources.CustomDimension() + client.update_custom_dimension(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -25257,30 +25148,30 @@ def test_update_custom_metric_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "custom_metric.name=name_value", + "custom_dimension.name=name_value", ) in kw["metadata"] @pytest.mark.asyncio -async def test_update_custom_metric_field_headers_async(): +async def test_update_custom_dimension_field_headers_async(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = analytics_admin.UpdateCustomMetricRequest() + request = analytics_admin.UpdateCustomDimensionRequest() - request.custom_metric.name = "name_value" + request.custom_dimension.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.update_custom_metric), "__call__" + type(client.transport.update_custom_dimension), "__call__" ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - resources.CustomMetric() + resources.CustomDimension() ) - await client.update_custom_metric(request) + await client.update_custom_dimension(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -25291,25 +25182,25 @@ async def test_update_custom_metric_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "custom_metric.name=name_value", + "custom_dimension.name=name_value", ) in kw["metadata"] -def test_update_custom_metric_flattened(): +def test_update_custom_dimension_flattened(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.update_custom_metric), "__call__" + type(client.transport.update_custom_dimension), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = resources.CustomMetric() + call.return_value = resources.CustomDimension() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.update_custom_metric( - custom_metric=resources.CustomMetric(name="name_value"), + client.update_custom_dimension( + custom_dimension=resources.CustomDimension(name="name_value"), update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) @@ -25317,15 +25208,15 @@ def test_update_custom_metric_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - arg = args[0].custom_metric - mock_val = resources.CustomMetric(name="name_value") + arg = args[0].custom_dimension + mock_val = resources.CustomDimension(name="name_value") assert arg == mock_val arg = args[0].update_mask mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) assert arg == mock_val -def test_update_custom_metric_flattened_error(): +def test_update_custom_dimension_flattened_error(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -25333,33 +25224,33 @@ def test_update_custom_metric_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.update_custom_metric( - analytics_admin.UpdateCustomMetricRequest(), - custom_metric=resources.CustomMetric(name="name_value"), + client.update_custom_dimension( + analytics_admin.UpdateCustomDimensionRequest(), + custom_dimension=resources.CustomDimension(name="name_value"), update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) @pytest.mark.asyncio -async def test_update_custom_metric_flattened_async(): +async def test_update_custom_dimension_flattened_async(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.update_custom_metric), "__call__" + type(client.transport.update_custom_dimension), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = resources.CustomMetric() + call.return_value = resources.CustomDimension() call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - resources.CustomMetric() + resources.CustomDimension() ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.update_custom_metric( - custom_metric=resources.CustomMetric(name="name_value"), + response = await client.update_custom_dimension( + custom_dimension=resources.CustomDimension(name="name_value"), update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) @@ -25367,8 +25258,8 @@ async def test_update_custom_metric_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - arg = args[0].custom_metric - mock_val = resources.CustomMetric(name="name_value") + arg = args[0].custom_dimension + mock_val = resources.CustomDimension(name="name_value") assert arg == mock_val arg = args[0].update_mask mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) @@ -25376,7 +25267,7 @@ async def test_update_custom_metric_flattened_async(): @pytest.mark.asyncio -async def test_update_custom_metric_flattened_error_async(): +async def test_update_custom_dimension_flattened_error_async(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -25384,9 +25275,9 @@ async def test_update_custom_metric_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.update_custom_metric( - analytics_admin.UpdateCustomMetricRequest(), - custom_metric=resources.CustomMetric(name="name_value"), + await client.update_custom_dimension( + analytics_admin.UpdateCustomDimensionRequest(), + custom_dimension=resources.CustomDimension(name="name_value"), update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) @@ -25394,11 +25285,11 @@ async def test_update_custom_metric_flattened_error_async(): @pytest.mark.parametrize( "request_type", [ - analytics_admin.ListCustomMetricsRequest, + analytics_admin.ListCustomDimensionsRequest, dict, ], ) -def test_list_custom_metrics(request_type, transport: str = "grpc"): +def test_list_custom_dimensions(request_type, transport: str = "grpc"): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -25410,26 +25301,26 @@ def test_list_custom_metrics(request_type, transport: str = "grpc"): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_custom_metrics), "__call__" + type(client.transport.list_custom_dimensions), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = analytics_admin.ListCustomMetricsResponse( + call.return_value = analytics_admin.ListCustomDimensionsResponse( next_page_token="next_page_token_value", ) - response = client.list_custom_metrics(request) + response = client.list_custom_dimensions(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - request = analytics_admin.ListCustomMetricsRequest() + request = analytics_admin.ListCustomDimensionsRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListCustomMetricsPager) + assert isinstance(response, pagers.ListCustomDimensionsPager) assert response.next_page_token == "next_page_token_value" -def test_list_custom_metrics_empty_call(): +def test_list_custom_dimensions_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = AnalyticsAdminServiceClient( @@ -25439,18 +25330,18 @@ def test_list_custom_metrics_empty_call(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_custom_metrics), "__call__" + type(client.transport.list_custom_dimensions), "__call__" ) as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.list_custom_metrics() + client.list_custom_dimensions() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == analytics_admin.ListCustomMetricsRequest() + assert args[0] == analytics_admin.ListCustomDimensionsRequest() -def test_list_custom_metrics_non_empty_request_with_auto_populated_field(): +def test_list_custom_dimensions_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. client = AnalyticsAdminServiceClient( @@ -25461,28 +25352,28 @@ def test_list_custom_metrics_non_empty_request_with_auto_populated_field(): # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. - request = analytics_admin.ListCustomMetricsRequest( + request = analytics_admin.ListCustomDimensionsRequest( parent="parent_value", page_token="page_token_value", ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_custom_metrics), "__call__" + type(client.transport.list_custom_dimensions), "__call__" ) as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.list_custom_metrics(request=request) + client.list_custom_dimensions(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == analytics_admin.ListCustomMetricsRequest( + assert args[0] == analytics_admin.ListCustomDimensionsRequest( parent="parent_value", page_token="page_token_value", ) -def test_list_custom_metrics_use_cached_wrapped_rpc(): +def test_list_custom_dimensions_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -25497,7 +25388,8 @@ def test_list_custom_metrics_use_cached_wrapped_rpc(): # Ensure method has been cached assert ( - client._transport.list_custom_metrics in client._transport._wrapped_methods + client._transport.list_custom_dimensions + in client._transport._wrapped_methods ) # Replace cached wrapped function with mock @@ -25506,15 +25398,15 @@ def test_list_custom_metrics_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.list_custom_metrics + client._transport.list_custom_dimensions ] = mock_rpc request = {} - client.list_custom_metrics(request) + client.list_custom_dimensions(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.list_custom_metrics(request) + client.list_custom_dimensions(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -25522,7 +25414,7 @@ def test_list_custom_metrics_use_cached_wrapped_rpc(): @pytest.mark.asyncio -async def test_list_custom_metrics_empty_call_async(): +async def test_list_custom_dimensions_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = AnalyticsAdminServiceAsyncClient( @@ -25532,22 +25424,22 @@ async def test_list_custom_metrics_empty_call_async(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_custom_metrics), "__call__" + type(client.transport.list_custom_dimensions), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - analytics_admin.ListCustomMetricsResponse( + analytics_admin.ListCustomDimensionsResponse( next_page_token="next_page_token_value", ) ) - response = await client.list_custom_metrics() + response = await client.list_custom_dimensions() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == analytics_admin.ListCustomMetricsRequest() + assert args[0] == analytics_admin.ListCustomDimensionsRequest() @pytest.mark.asyncio -async def test_list_custom_metrics_async_use_cached_wrapped_rpc( +async def test_list_custom_dimensions_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", ): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -25564,33 +25456,34 @@ async def test_list_custom_metrics_async_use_cached_wrapped_rpc( # Ensure method has been cached assert ( - client._client._transport.list_custom_metrics + client._client._transport.list_custom_dimensions in client._client._transport._wrapped_methods ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ - client._client._transport.list_custom_metrics - ] = mock_object + client._client._transport.list_custom_dimensions + ] = mock_rpc request = {} - await client.list_custom_metrics(request) + await client.list_custom_dimensions(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - await client.list_custom_metrics(request) + await client.list_custom_dimensions(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio -async def test_list_custom_metrics_async( +async def test_list_custom_dimensions_async( transport: str = "grpc_asyncio", - request_type=analytics_admin.ListCustomMetricsRequest, + request_type=analytics_admin.ListCustomDimensionsRequest, ): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), @@ -25603,49 +25496,49 @@ async def test_list_custom_metrics_async( # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_custom_metrics), "__call__" + type(client.transport.list_custom_dimensions), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - analytics_admin.ListCustomMetricsResponse( + analytics_admin.ListCustomDimensionsResponse( next_page_token="next_page_token_value", ) ) - response = await client.list_custom_metrics(request) + response = await client.list_custom_dimensions(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request = analytics_admin.ListCustomMetricsRequest() + request = analytics_admin.ListCustomDimensionsRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListCustomMetricsAsyncPager) + assert isinstance(response, pagers.ListCustomDimensionsAsyncPager) assert response.next_page_token == "next_page_token_value" @pytest.mark.asyncio -async def test_list_custom_metrics_async_from_dict(): - await test_list_custom_metrics_async(request_type=dict) +async def test_list_custom_dimensions_async_from_dict(): + await test_list_custom_dimensions_async(request_type=dict) -def test_list_custom_metrics_field_headers(): +def test_list_custom_dimensions_field_headers(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = analytics_admin.ListCustomMetricsRequest() + request = analytics_admin.ListCustomDimensionsRequest() request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_custom_metrics), "__call__" + type(client.transport.list_custom_dimensions), "__call__" ) as call: - call.return_value = analytics_admin.ListCustomMetricsResponse() - client.list_custom_metrics(request) + call.return_value = analytics_admin.ListCustomDimensionsResponse() + client.list_custom_dimensions(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -25661,25 +25554,25 @@ def test_list_custom_metrics_field_headers(): @pytest.mark.asyncio -async def test_list_custom_metrics_field_headers_async(): +async def test_list_custom_dimensions_field_headers_async(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = analytics_admin.ListCustomMetricsRequest() + request = analytics_admin.ListCustomDimensionsRequest() request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_custom_metrics), "__call__" + type(client.transport.list_custom_dimensions), "__call__" ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - analytics_admin.ListCustomMetricsResponse() + analytics_admin.ListCustomDimensionsResponse() ) - await client.list_custom_metrics(request) + await client.list_custom_dimensions(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -25694,20 +25587,20 @@ async def test_list_custom_metrics_field_headers_async(): ) in kw["metadata"] -def test_list_custom_metrics_flattened(): +def test_list_custom_dimensions_flattened(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_custom_metrics), "__call__" + type(client.transport.list_custom_dimensions), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = analytics_admin.ListCustomMetricsResponse() + call.return_value = analytics_admin.ListCustomDimensionsResponse() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.list_custom_metrics( + client.list_custom_dimensions( parent="parent_value", ) @@ -25720,7 +25613,7 @@ def test_list_custom_metrics_flattened(): assert arg == mock_val -def test_list_custom_metrics_flattened_error(): +def test_list_custom_dimensions_flattened_error(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -25728,31 +25621,31 @@ def test_list_custom_metrics_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.list_custom_metrics( - analytics_admin.ListCustomMetricsRequest(), + client.list_custom_dimensions( + analytics_admin.ListCustomDimensionsRequest(), parent="parent_value", ) @pytest.mark.asyncio -async def test_list_custom_metrics_flattened_async(): +async def test_list_custom_dimensions_flattened_async(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_custom_metrics), "__call__" + type(client.transport.list_custom_dimensions), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = analytics_admin.ListCustomMetricsResponse() + call.return_value = analytics_admin.ListCustomDimensionsResponse() call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - analytics_admin.ListCustomMetricsResponse() + analytics_admin.ListCustomDimensionsResponse() ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.list_custom_metrics( + response = await client.list_custom_dimensions( parent="parent_value", ) @@ -25766,7 +25659,7 @@ async def test_list_custom_metrics_flattened_async(): @pytest.mark.asyncio -async def test_list_custom_metrics_flattened_error_async(): +async def test_list_custom_dimensions_flattened_error_async(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -25774,13 +25667,13 @@ async def test_list_custom_metrics_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.list_custom_metrics( - analytics_admin.ListCustomMetricsRequest(), + await client.list_custom_dimensions( + analytics_admin.ListCustomDimensionsRequest(), parent="parent_value", ) -def test_list_custom_metrics_pager(transport_name: str = "grpc"): +def test_list_custom_dimensions_pager(transport_name: str = "grpc"): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport_name, @@ -25788,32 +25681,32 @@ def test_list_custom_metrics_pager(transport_name: str = "grpc"): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_custom_metrics), "__call__" + type(client.transport.list_custom_dimensions), "__call__" ) as call: # Set the response to a series of pages. call.side_effect = ( - analytics_admin.ListCustomMetricsResponse( - custom_metrics=[ - resources.CustomMetric(), - resources.CustomMetric(), - resources.CustomMetric(), + analytics_admin.ListCustomDimensionsResponse( + custom_dimensions=[ + resources.CustomDimension(), + resources.CustomDimension(), + resources.CustomDimension(), ], next_page_token="abc", ), - analytics_admin.ListCustomMetricsResponse( - custom_metrics=[], + analytics_admin.ListCustomDimensionsResponse( + custom_dimensions=[], next_page_token="def", ), - analytics_admin.ListCustomMetricsResponse( - custom_metrics=[ - resources.CustomMetric(), + analytics_admin.ListCustomDimensionsResponse( + custom_dimensions=[ + resources.CustomDimension(), ], next_page_token="ghi", ), - analytics_admin.ListCustomMetricsResponse( - custom_metrics=[ - resources.CustomMetric(), - resources.CustomMetric(), + analytics_admin.ListCustomDimensionsResponse( + custom_dimensions=[ + resources.CustomDimension(), + resources.CustomDimension(), ], ), RuntimeError, @@ -25825,7 +25718,7 @@ def test_list_custom_metrics_pager(transport_name: str = "grpc"): expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) - pager = client.list_custom_metrics(request={}, retry=retry, timeout=timeout) + pager = client.list_custom_dimensions(request={}, retry=retry, timeout=timeout) assert pager._metadata == expected_metadata assert pager._retry == retry @@ -25833,10 +25726,10 @@ def test_list_custom_metrics_pager(transport_name: str = "grpc"): results = list(pager) assert len(results) == 6 - assert all(isinstance(i, resources.CustomMetric) for i in results) + assert all(isinstance(i, resources.CustomDimension) for i in results) -def test_list_custom_metrics_pages(transport_name: str = "grpc"): +def test_list_custom_dimensions_pages(transport_name: str = "grpc"): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport_name, @@ -25844,82 +25737,82 @@ def test_list_custom_metrics_pages(transport_name: str = "grpc"): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_custom_metrics), "__call__" + type(client.transport.list_custom_dimensions), "__call__" ) as call: # Set the response to a series of pages. call.side_effect = ( - analytics_admin.ListCustomMetricsResponse( - custom_metrics=[ - resources.CustomMetric(), - resources.CustomMetric(), - resources.CustomMetric(), + analytics_admin.ListCustomDimensionsResponse( + custom_dimensions=[ + resources.CustomDimension(), + resources.CustomDimension(), + resources.CustomDimension(), ], next_page_token="abc", ), - analytics_admin.ListCustomMetricsResponse( - custom_metrics=[], + analytics_admin.ListCustomDimensionsResponse( + custom_dimensions=[], next_page_token="def", ), - analytics_admin.ListCustomMetricsResponse( - custom_metrics=[ - resources.CustomMetric(), + analytics_admin.ListCustomDimensionsResponse( + custom_dimensions=[ + resources.CustomDimension(), ], next_page_token="ghi", ), - analytics_admin.ListCustomMetricsResponse( - custom_metrics=[ - resources.CustomMetric(), - resources.CustomMetric(), + analytics_admin.ListCustomDimensionsResponse( + custom_dimensions=[ + resources.CustomDimension(), + resources.CustomDimension(), ], ), RuntimeError, ) - pages = list(client.list_custom_metrics(request={}).pages) + pages = list(client.list_custom_dimensions(request={}).pages) for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token @pytest.mark.asyncio -async def test_list_custom_metrics_async_pager(): +async def test_list_custom_dimensions_async_pager(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_custom_metrics), + type(client.transport.list_custom_dimensions), "__call__", new_callable=mock.AsyncMock, ) as call: # Set the response to a series of pages. call.side_effect = ( - analytics_admin.ListCustomMetricsResponse( - custom_metrics=[ - resources.CustomMetric(), - resources.CustomMetric(), - resources.CustomMetric(), + analytics_admin.ListCustomDimensionsResponse( + custom_dimensions=[ + resources.CustomDimension(), + resources.CustomDimension(), + resources.CustomDimension(), ], next_page_token="abc", ), - analytics_admin.ListCustomMetricsResponse( - custom_metrics=[], + analytics_admin.ListCustomDimensionsResponse( + custom_dimensions=[], next_page_token="def", ), - analytics_admin.ListCustomMetricsResponse( - custom_metrics=[ - resources.CustomMetric(), + analytics_admin.ListCustomDimensionsResponse( + custom_dimensions=[ + resources.CustomDimension(), ], next_page_token="ghi", ), - analytics_admin.ListCustomMetricsResponse( - custom_metrics=[ - resources.CustomMetric(), - resources.CustomMetric(), - ], + analytics_admin.ListCustomDimensionsResponse( + custom_dimensions=[ + resources.CustomDimension(), + resources.CustomDimension(), + ], ), RuntimeError, ) - async_pager = await client.list_custom_metrics( + async_pager = await client.list_custom_dimensions( request={}, ) assert async_pager.next_page_token == "abc" @@ -25928,45 +25821,45 @@ async def test_list_custom_metrics_async_pager(): responses.append(response) assert len(responses) == 6 - assert all(isinstance(i, resources.CustomMetric) for i in responses) + assert all(isinstance(i, resources.CustomDimension) for i in responses) @pytest.mark.asyncio -async def test_list_custom_metrics_async_pages(): +async def test_list_custom_dimensions_async_pages(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_custom_metrics), + type(client.transport.list_custom_dimensions), "__call__", new_callable=mock.AsyncMock, ) as call: # Set the response to a series of pages. call.side_effect = ( - analytics_admin.ListCustomMetricsResponse( - custom_metrics=[ - resources.CustomMetric(), - resources.CustomMetric(), - resources.CustomMetric(), + analytics_admin.ListCustomDimensionsResponse( + custom_dimensions=[ + resources.CustomDimension(), + resources.CustomDimension(), + resources.CustomDimension(), ], next_page_token="abc", ), - analytics_admin.ListCustomMetricsResponse( - custom_metrics=[], + analytics_admin.ListCustomDimensionsResponse( + custom_dimensions=[], next_page_token="def", ), - analytics_admin.ListCustomMetricsResponse( - custom_metrics=[ - resources.CustomMetric(), + analytics_admin.ListCustomDimensionsResponse( + custom_dimensions=[ + resources.CustomDimension(), ], next_page_token="ghi", ), - analytics_admin.ListCustomMetricsResponse( - custom_metrics=[ - resources.CustomMetric(), - resources.CustomMetric(), + analytics_admin.ListCustomDimensionsResponse( + custom_dimensions=[ + resources.CustomDimension(), + resources.CustomDimension(), ], ), RuntimeError, @@ -25975,7 +25868,7 @@ async def test_list_custom_metrics_async_pages(): # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 async for page_ in ( # pragma: no branch - await client.list_custom_metrics(request={}) + await client.list_custom_dimensions(request={}) ).pages: pages.append(page_) for page_, token in zip(pages, ["abc", "def", "ghi", ""]): @@ -25985,11 +25878,11 @@ async def test_list_custom_metrics_async_pages(): @pytest.mark.parametrize( "request_type", [ - analytics_admin.ArchiveCustomMetricRequest, + analytics_admin.ArchiveCustomDimensionRequest, dict, ], ) -def test_archive_custom_metric(request_type, transport: str = "grpc"): +def test_archive_custom_dimension(request_type, transport: str = "grpc"): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -26001,23 +25894,23 @@ def test_archive_custom_metric(request_type, transport: str = "grpc"): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.archive_custom_metric), "__call__" + type(client.transport.archive_custom_dimension), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = None - response = client.archive_custom_metric(request) + response = client.archive_custom_dimension(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - request = analytics_admin.ArchiveCustomMetricRequest() + request = analytics_admin.ArchiveCustomDimensionRequest() assert args[0] == request # Establish that the response is the type that we expect. assert response is None -def test_archive_custom_metric_empty_call(): +def test_archive_custom_dimension_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = AnalyticsAdminServiceClient( @@ -26027,18 +25920,18 @@ def test_archive_custom_metric_empty_call(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.archive_custom_metric), "__call__" + type(client.transport.archive_custom_dimension), "__call__" ) as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.archive_custom_metric() + client.archive_custom_dimension() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == analytics_admin.ArchiveCustomMetricRequest() + assert args[0] == analytics_admin.ArchiveCustomDimensionRequest() -def test_archive_custom_metric_non_empty_request_with_auto_populated_field(): +def test_archive_custom_dimension_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. client = AnalyticsAdminServiceClient( @@ -26049,26 +25942,26 @@ def test_archive_custom_metric_non_empty_request_with_auto_populated_field(): # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. - request = analytics_admin.ArchiveCustomMetricRequest( + request = analytics_admin.ArchiveCustomDimensionRequest( name="name_value", ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.archive_custom_metric), "__call__" + type(client.transport.archive_custom_dimension), "__call__" ) as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.archive_custom_metric(request=request) + client.archive_custom_dimension(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == analytics_admin.ArchiveCustomMetricRequest( + assert args[0] == analytics_admin.ArchiveCustomDimensionRequest( name="name_value", ) -def test_archive_custom_metric_use_cached_wrapped_rpc(): +def test_archive_custom_dimension_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -26083,7 +25976,7 @@ def test_archive_custom_metric_use_cached_wrapped_rpc(): # Ensure method has been cached assert ( - client._transport.archive_custom_metric + client._transport.archive_custom_dimension in client._transport._wrapped_methods ) @@ -26093,15 +25986,15 @@ def test_archive_custom_metric_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.archive_custom_metric + client._transport.archive_custom_dimension ] = mock_rpc request = {} - client.archive_custom_metric(request) + client.archive_custom_dimension(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.archive_custom_metric(request) + client.archive_custom_dimension(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -26109,7 +26002,7 @@ def test_archive_custom_metric_use_cached_wrapped_rpc(): @pytest.mark.asyncio -async def test_archive_custom_metric_empty_call_async(): +async def test_archive_custom_dimension_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = AnalyticsAdminServiceAsyncClient( @@ -26119,18 +26012,18 @@ async def test_archive_custom_metric_empty_call_async(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.archive_custom_metric), "__call__" + type(client.transport.archive_custom_dimension), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.archive_custom_metric() + response = await client.archive_custom_dimension() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == analytics_admin.ArchiveCustomMetricRequest() + assert args[0] == analytics_admin.ArchiveCustomDimensionRequest() @pytest.mark.asyncio -async def test_archive_custom_metric_async_use_cached_wrapped_rpc( +async def test_archive_custom_dimension_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", ): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -26147,33 +26040,34 @@ async def test_archive_custom_metric_async_use_cached_wrapped_rpc( # Ensure method has been cached assert ( - client._client._transport.archive_custom_metric + client._client._transport.archive_custom_dimension in client._client._transport._wrapped_methods ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ - client._client._transport.archive_custom_metric - ] = mock_object + client._client._transport.archive_custom_dimension + ] = mock_rpc request = {} - await client.archive_custom_metric(request) + await client.archive_custom_dimension(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - await client.archive_custom_metric(request) + await client.archive_custom_dimension(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio -async def test_archive_custom_metric_async( +async def test_archive_custom_dimension_async( transport: str = "grpc_asyncio", - request_type=analytics_admin.ArchiveCustomMetricRequest, + request_type=analytics_admin.ArchiveCustomDimensionRequest, ): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), @@ -26186,16 +26080,16 @@ async def test_archive_custom_metric_async( # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.archive_custom_metric), "__call__" + type(client.transport.archive_custom_dimension), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.archive_custom_metric(request) + response = await client.archive_custom_dimension(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request = analytics_admin.ArchiveCustomMetricRequest() + request = analytics_admin.ArchiveCustomDimensionRequest() assert args[0] == request # Establish that the response is the type that we expect. @@ -26203,27 +26097,27 @@ async def test_archive_custom_metric_async( @pytest.mark.asyncio -async def test_archive_custom_metric_async_from_dict(): - await test_archive_custom_metric_async(request_type=dict) +async def test_archive_custom_dimension_async_from_dict(): + await test_archive_custom_dimension_async(request_type=dict) -def test_archive_custom_metric_field_headers(): +def test_archive_custom_dimension_field_headers(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = analytics_admin.ArchiveCustomMetricRequest() + request = analytics_admin.ArchiveCustomDimensionRequest() request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.archive_custom_metric), "__call__" + type(client.transport.archive_custom_dimension), "__call__" ) as call: call.return_value = None - client.archive_custom_metric(request) + client.archive_custom_dimension(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -26239,23 +26133,23 @@ def test_archive_custom_metric_field_headers(): @pytest.mark.asyncio -async def test_archive_custom_metric_field_headers_async(): +async def test_archive_custom_dimension_field_headers_async(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = analytics_admin.ArchiveCustomMetricRequest() + request = analytics_admin.ArchiveCustomDimensionRequest() request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.archive_custom_metric), "__call__" + type(client.transport.archive_custom_dimension), "__call__" ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - await client.archive_custom_metric(request) + await client.archive_custom_dimension(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -26270,20 +26164,20 @@ async def test_archive_custom_metric_field_headers_async(): ) in kw["metadata"] -def test_archive_custom_metric_flattened(): +def test_archive_custom_dimension_flattened(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.archive_custom_metric), "__call__" + type(client.transport.archive_custom_dimension), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = None # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.archive_custom_metric( + client.archive_custom_dimension( name="name_value", ) @@ -26296,7 +26190,7 @@ def test_archive_custom_metric_flattened(): assert arg == mock_val -def test_archive_custom_metric_flattened_error(): +def test_archive_custom_dimension_flattened_error(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -26304,21 +26198,21 @@ def test_archive_custom_metric_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.archive_custom_metric( - analytics_admin.ArchiveCustomMetricRequest(), + client.archive_custom_dimension( + analytics_admin.ArchiveCustomDimensionRequest(), name="name_value", ) @pytest.mark.asyncio -async def test_archive_custom_metric_flattened_async(): +async def test_archive_custom_dimension_flattened_async(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.archive_custom_metric), "__call__" + type(client.transport.archive_custom_dimension), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = None @@ -26326,7 +26220,7 @@ async def test_archive_custom_metric_flattened_async(): call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.archive_custom_metric( + response = await client.archive_custom_dimension( name="name_value", ) @@ -26340,7 +26234,7 @@ async def test_archive_custom_metric_flattened_async(): @pytest.mark.asyncio -async def test_archive_custom_metric_flattened_error_async(): +async def test_archive_custom_dimension_flattened_error_async(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -26348,8 +26242,8 @@ async def test_archive_custom_metric_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.archive_custom_metric( - analytics_admin.ArchiveCustomMetricRequest(), + await client.archive_custom_dimension( + analytics_admin.ArchiveCustomDimensionRequest(), name="name_value", ) @@ -26357,11 +26251,11 @@ async def test_archive_custom_metric_flattened_error_async(): @pytest.mark.parametrize( "request_type", [ - analytics_admin.GetCustomMetricRequest, + analytics_admin.GetCustomDimensionRequest, dict, ], ) -def test_get_custom_metric(request_type, transport: str = "grpc"): +def test_get_custom_dimension(request_type, transport: str = "grpc"): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -26373,42 +26267,36 @@ def test_get_custom_metric(request_type, transport: str = "grpc"): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.get_custom_metric), "__call__" + type(client.transport.get_custom_dimension), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = resources.CustomMetric( + call.return_value = resources.CustomDimension( name="name_value", parameter_name="parameter_name_value", display_name="display_name_value", description="description_value", - measurement_unit=resources.CustomMetric.MeasurementUnit.STANDARD, - scope=resources.CustomMetric.MetricScope.EVENT, - restricted_metric_type=[ - resources.CustomMetric.RestrictedMetricType.COST_DATA - ], + scope=resources.CustomDimension.DimensionScope.EVENT, + disallow_ads_personalization=True, ) - response = client.get_custom_metric(request) + response = client.get_custom_dimension(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - request = analytics_admin.GetCustomMetricRequest() + request = analytics_admin.GetCustomDimensionRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, resources.CustomMetric) + assert isinstance(response, resources.CustomDimension) assert response.name == "name_value" assert response.parameter_name == "parameter_name_value" assert response.display_name == "display_name_value" assert response.description == "description_value" - assert response.measurement_unit == resources.CustomMetric.MeasurementUnit.STANDARD - assert response.scope == resources.CustomMetric.MetricScope.EVENT - assert response.restricted_metric_type == [ - resources.CustomMetric.RestrictedMetricType.COST_DATA - ] + assert response.scope == resources.CustomDimension.DimensionScope.EVENT + assert response.disallow_ads_personalization is True -def test_get_custom_metric_empty_call(): +def test_get_custom_dimension_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = AnalyticsAdminServiceClient( @@ -26418,18 +26306,18 @@ def test_get_custom_metric_empty_call(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.get_custom_metric), "__call__" + type(client.transport.get_custom_dimension), "__call__" ) as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.get_custom_metric() + client.get_custom_dimension() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == analytics_admin.GetCustomMetricRequest() + assert args[0] == analytics_admin.GetCustomDimensionRequest() -def test_get_custom_metric_non_empty_request_with_auto_populated_field(): +def test_get_custom_dimension_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. client = AnalyticsAdminServiceClient( @@ -26440,26 +26328,26 @@ def test_get_custom_metric_non_empty_request_with_auto_populated_field(): # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. - request = analytics_admin.GetCustomMetricRequest( + request = analytics_admin.GetCustomDimensionRequest( name="name_value", ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.get_custom_metric), "__call__" + type(client.transport.get_custom_dimension), "__call__" ) as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.get_custom_metric(request=request) + client.get_custom_dimension(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == analytics_admin.GetCustomMetricRequest( + assert args[0] == analytics_admin.GetCustomDimensionRequest( name="name_value", ) -def test_get_custom_metric_use_cached_wrapped_rpc(): +def test_get_custom_dimension_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -26473,7 +26361,9 @@ def test_get_custom_metric_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.get_custom_metric in client._transport._wrapped_methods + assert ( + client._transport.get_custom_dimension in client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.Mock() @@ -26481,15 +26371,15 @@ def test_get_custom_metric_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.get_custom_metric + client._transport.get_custom_dimension ] = mock_rpc request = {} - client.get_custom_metric(request) + client.get_custom_dimension(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.get_custom_metric(request) + client.get_custom_dimension(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -26497,7 +26387,7 @@ def test_get_custom_metric_use_cached_wrapped_rpc(): @pytest.mark.asyncio -async def test_get_custom_metric_empty_call_async(): +async def test_get_custom_dimension_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = AnalyticsAdminServiceAsyncClient( @@ -26507,30 +26397,27 @@ async def test_get_custom_metric_empty_call_async(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.get_custom_metric), "__call__" + type(client.transport.get_custom_dimension), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - resources.CustomMetric( + resources.CustomDimension( name="name_value", parameter_name="parameter_name_value", display_name="display_name_value", description="description_value", - measurement_unit=resources.CustomMetric.MeasurementUnit.STANDARD, - scope=resources.CustomMetric.MetricScope.EVENT, - restricted_metric_type=[ - resources.CustomMetric.RestrictedMetricType.COST_DATA - ], + scope=resources.CustomDimension.DimensionScope.EVENT, + disallow_ads_personalization=True, ) ) - response = await client.get_custom_metric() + response = await client.get_custom_dimension() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == analytics_admin.GetCustomMetricRequest() + assert args[0] == analytics_admin.GetCustomDimensionRequest() @pytest.mark.asyncio -async def test_get_custom_metric_async_use_cached_wrapped_rpc( +async def test_get_custom_dimension_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", ): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -26547,32 +26434,34 @@ async def test_get_custom_metric_async_use_cached_wrapped_rpc( # Ensure method has been cached assert ( - client._client._transport.get_custom_metric + client._client._transport.get_custom_dimension in client._client._transport._wrapped_methods ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ - client._client._transport.get_custom_metric - ] = mock_object + client._client._transport.get_custom_dimension + ] = mock_rpc request = {} - await client.get_custom_metric(request) + await client.get_custom_dimension(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - await client.get_custom_metric(request) + await client.get_custom_dimension(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio -async def test_get_custom_metric_async( - transport: str = "grpc_asyncio", request_type=analytics_admin.GetCustomMetricRequest +async def test_get_custom_dimension_async( + transport: str = "grpc_asyncio", + request_type=analytics_admin.GetCustomDimensionRequest, ): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), @@ -26585,65 +26474,59 @@ async def test_get_custom_metric_async( # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.get_custom_metric), "__call__" + type(client.transport.get_custom_dimension), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - resources.CustomMetric( + resources.CustomDimension( name="name_value", parameter_name="parameter_name_value", display_name="display_name_value", description="description_value", - measurement_unit=resources.CustomMetric.MeasurementUnit.STANDARD, - scope=resources.CustomMetric.MetricScope.EVENT, - restricted_metric_type=[ - resources.CustomMetric.RestrictedMetricType.COST_DATA - ], + scope=resources.CustomDimension.DimensionScope.EVENT, + disallow_ads_personalization=True, ) ) - response = await client.get_custom_metric(request) + response = await client.get_custom_dimension(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request = analytics_admin.GetCustomMetricRequest() + request = analytics_admin.GetCustomDimensionRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, resources.CustomMetric) + assert isinstance(response, resources.CustomDimension) assert response.name == "name_value" assert response.parameter_name == "parameter_name_value" assert response.display_name == "display_name_value" assert response.description == "description_value" - assert response.measurement_unit == resources.CustomMetric.MeasurementUnit.STANDARD - assert response.scope == resources.CustomMetric.MetricScope.EVENT - assert response.restricted_metric_type == [ - resources.CustomMetric.RestrictedMetricType.COST_DATA - ] + assert response.scope == resources.CustomDimension.DimensionScope.EVENT + assert response.disallow_ads_personalization is True @pytest.mark.asyncio -async def test_get_custom_metric_async_from_dict(): - await test_get_custom_metric_async(request_type=dict) +async def test_get_custom_dimension_async_from_dict(): + await test_get_custom_dimension_async(request_type=dict) -def test_get_custom_metric_field_headers(): +def test_get_custom_dimension_field_headers(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = analytics_admin.GetCustomMetricRequest() + request = analytics_admin.GetCustomDimensionRequest() request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.get_custom_metric), "__call__" + type(client.transport.get_custom_dimension), "__call__" ) as call: - call.return_value = resources.CustomMetric() - client.get_custom_metric(request) + call.return_value = resources.CustomDimension() + client.get_custom_dimension(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -26659,25 +26542,25 @@ def test_get_custom_metric_field_headers(): @pytest.mark.asyncio -async def test_get_custom_metric_field_headers_async(): +async def test_get_custom_dimension_field_headers_async(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = analytics_admin.GetCustomMetricRequest() + request = analytics_admin.GetCustomDimensionRequest() request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.get_custom_metric), "__call__" + type(client.transport.get_custom_dimension), "__call__" ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - resources.CustomMetric() + resources.CustomDimension() ) - await client.get_custom_metric(request) + await client.get_custom_dimension(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -26692,20 +26575,20 @@ async def test_get_custom_metric_field_headers_async(): ) in kw["metadata"] -def test_get_custom_metric_flattened(): +def test_get_custom_dimension_flattened(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.get_custom_metric), "__call__" + type(client.transport.get_custom_dimension), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = resources.CustomMetric() + call.return_value = resources.CustomDimension() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.get_custom_metric( + client.get_custom_dimension( name="name_value", ) @@ -26718,7 +26601,7 @@ def test_get_custom_metric_flattened(): assert arg == mock_val -def test_get_custom_metric_flattened_error(): +def test_get_custom_dimension_flattened_error(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -26726,31 +26609,31 @@ def test_get_custom_metric_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.get_custom_metric( - analytics_admin.GetCustomMetricRequest(), + client.get_custom_dimension( + analytics_admin.GetCustomDimensionRequest(), name="name_value", ) @pytest.mark.asyncio -async def test_get_custom_metric_flattened_async(): +async def test_get_custom_dimension_flattened_async(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.get_custom_metric), "__call__" + type(client.transport.get_custom_dimension), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = resources.CustomMetric() + call.return_value = resources.CustomDimension() call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - resources.CustomMetric() + resources.CustomDimension() ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.get_custom_metric( + response = await client.get_custom_dimension( name="name_value", ) @@ -26764,7 +26647,7 @@ async def test_get_custom_metric_flattened_async(): @pytest.mark.asyncio -async def test_get_custom_metric_flattened_error_async(): +async def test_get_custom_dimension_flattened_error_async(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -26772,8 +26655,8 @@ async def test_get_custom_metric_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.get_custom_metric( - analytics_admin.GetCustomMetricRequest(), + await client.get_custom_dimension( + analytics_admin.GetCustomDimensionRequest(), name="name_value", ) @@ -26781,11 +26664,11 @@ async def test_get_custom_metric_flattened_error_async(): @pytest.mark.parametrize( "request_type", [ - analytics_admin.GetDataRetentionSettingsRequest, + analytics_admin.CreateCustomMetricRequest, dict, ], ) -def test_get_data_retention_settings(request_type, transport: str = "grpc"): +def test_create_custom_metric(request_type, transport: str = "grpc"): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -26797,33 +26680,42 @@ def test_get_data_retention_settings(request_type, transport: str = "grpc"): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.get_data_retention_settings), "__call__" + type(client.transport.create_custom_metric), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = resources.DataRetentionSettings( + call.return_value = resources.CustomMetric( name="name_value", - event_data_retention=resources.DataRetentionSettings.RetentionDuration.TWO_MONTHS, - reset_user_data_on_new_activity=True, + parameter_name="parameter_name_value", + display_name="display_name_value", + description="description_value", + measurement_unit=resources.CustomMetric.MeasurementUnit.STANDARD, + scope=resources.CustomMetric.MetricScope.EVENT, + restricted_metric_type=[ + resources.CustomMetric.RestrictedMetricType.COST_DATA + ], ) - response = client.get_data_retention_settings(request) + response = client.create_custom_metric(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - request = analytics_admin.GetDataRetentionSettingsRequest() + request = analytics_admin.CreateCustomMetricRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, resources.DataRetentionSettings) + assert isinstance(response, resources.CustomMetric) assert response.name == "name_value" - assert ( - response.event_data_retention - == resources.DataRetentionSettings.RetentionDuration.TWO_MONTHS - ) - assert response.reset_user_data_on_new_activity is True + assert response.parameter_name == "parameter_name_value" + assert response.display_name == "display_name_value" + assert response.description == "description_value" + assert response.measurement_unit == resources.CustomMetric.MeasurementUnit.STANDARD + assert response.scope == resources.CustomMetric.MetricScope.EVENT + assert response.restricted_metric_type == [ + resources.CustomMetric.RestrictedMetricType.COST_DATA + ] -def test_get_data_retention_settings_empty_call(): +def test_create_custom_metric_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = AnalyticsAdminServiceClient( @@ -26833,18 +26725,18 @@ def test_get_data_retention_settings_empty_call(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.get_data_retention_settings), "__call__" + type(client.transport.create_custom_metric), "__call__" ) as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.get_data_retention_settings() + client.create_custom_metric() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == analytics_admin.GetDataRetentionSettingsRequest() + assert args[0] == analytics_admin.CreateCustomMetricRequest() -def test_get_data_retention_settings_non_empty_request_with_auto_populated_field(): +def test_create_custom_metric_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. client = AnalyticsAdminServiceClient( @@ -26855,26 +26747,26 @@ def test_get_data_retention_settings_non_empty_request_with_auto_populated_field # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. - request = analytics_admin.GetDataRetentionSettingsRequest( - name="name_value", + request = analytics_admin.CreateCustomMetricRequest( + parent="parent_value", ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.get_data_retention_settings), "__call__" + type(client.transport.create_custom_metric), "__call__" ) as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.get_data_retention_settings(request=request) + client.create_custom_metric(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == analytics_admin.GetDataRetentionSettingsRequest( - name="name_value", + assert args[0] == analytics_admin.CreateCustomMetricRequest( + parent="parent_value", ) -def test_get_data_retention_settings_use_cached_wrapped_rpc(): +def test_create_custom_metric_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -26889,8 +26781,7 @@ def test_get_data_retention_settings_use_cached_wrapped_rpc(): # Ensure method has been cached assert ( - client._transport.get_data_retention_settings - in client._transport._wrapped_methods + client._transport.create_custom_metric in client._transport._wrapped_methods ) # Replace cached wrapped function with mock @@ -26899,15 +26790,15 @@ def test_get_data_retention_settings_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.get_data_retention_settings + client._transport.create_custom_metric ] = mock_rpc request = {} - client.get_data_retention_settings(request) + client.create_custom_metric(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.get_data_retention_settings(request) + client.create_custom_metric(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -26915,7 +26806,7 @@ def test_get_data_retention_settings_use_cached_wrapped_rpc(): @pytest.mark.asyncio -async def test_get_data_retention_settings_empty_call_async(): +async def test_create_custom_metric_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = AnalyticsAdminServiceAsyncClient( @@ -26925,24 +26816,30 @@ async def test_get_data_retention_settings_empty_call_async(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.get_data_retention_settings), "__call__" + type(client.transport.create_custom_metric), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - resources.DataRetentionSettings( + resources.CustomMetric( name="name_value", - event_data_retention=resources.DataRetentionSettings.RetentionDuration.TWO_MONTHS, - reset_user_data_on_new_activity=True, + parameter_name="parameter_name_value", + display_name="display_name_value", + description="description_value", + measurement_unit=resources.CustomMetric.MeasurementUnit.STANDARD, + scope=resources.CustomMetric.MetricScope.EVENT, + restricted_metric_type=[ + resources.CustomMetric.RestrictedMetricType.COST_DATA + ], ) ) - response = await client.get_data_retention_settings() + response = await client.create_custom_metric() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == analytics_admin.GetDataRetentionSettingsRequest() + assert args[0] == analytics_admin.CreateCustomMetricRequest() @pytest.mark.asyncio -async def test_get_data_retention_settings_async_use_cached_wrapped_rpc( +async def test_create_custom_metric_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", ): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -26959,33 +26856,34 @@ async def test_get_data_retention_settings_async_use_cached_wrapped_rpc( # Ensure method has been cached assert ( - client._client._transport.get_data_retention_settings + client._client._transport.create_custom_metric in client._client._transport._wrapped_methods ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ - client._client._transport.get_data_retention_settings - ] = mock_object + client._client._transport.create_custom_metric + ] = mock_rpc request = {} - await client.get_data_retention_settings(request) + await client.create_custom_metric(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - await client.get_data_retention_settings(request) + await client.create_custom_metric(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio -async def test_get_data_retention_settings_async( +async def test_create_custom_metric_async( transport: str = "grpc_asyncio", - request_type=analytics_admin.GetDataRetentionSettingsRequest, + request_type=analytics_admin.CreateCustomMetricRequest, ): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), @@ -26998,56 +26896,65 @@ async def test_get_data_retention_settings_async( # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.get_data_retention_settings), "__call__" + type(client.transport.create_custom_metric), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - resources.DataRetentionSettings( + resources.CustomMetric( name="name_value", - event_data_retention=resources.DataRetentionSettings.RetentionDuration.TWO_MONTHS, - reset_user_data_on_new_activity=True, + parameter_name="parameter_name_value", + display_name="display_name_value", + description="description_value", + measurement_unit=resources.CustomMetric.MeasurementUnit.STANDARD, + scope=resources.CustomMetric.MetricScope.EVENT, + restricted_metric_type=[ + resources.CustomMetric.RestrictedMetricType.COST_DATA + ], ) ) - response = await client.get_data_retention_settings(request) + response = await client.create_custom_metric(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request = analytics_admin.GetDataRetentionSettingsRequest() + request = analytics_admin.CreateCustomMetricRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, resources.DataRetentionSettings) + assert isinstance(response, resources.CustomMetric) assert response.name == "name_value" - assert ( - response.event_data_retention - == resources.DataRetentionSettings.RetentionDuration.TWO_MONTHS - ) - assert response.reset_user_data_on_new_activity is True + assert response.parameter_name == "parameter_name_value" + assert response.display_name == "display_name_value" + assert response.description == "description_value" + assert response.measurement_unit == resources.CustomMetric.MeasurementUnit.STANDARD + assert response.scope == resources.CustomMetric.MetricScope.EVENT + assert response.restricted_metric_type == [ + resources.CustomMetric.RestrictedMetricType.COST_DATA + ] @pytest.mark.asyncio -async def test_get_data_retention_settings_async_from_dict(): - await test_get_data_retention_settings_async(request_type=dict) +async def test_create_custom_metric_async_from_dict(): + await test_create_custom_metric_async(request_type=dict) -def test_get_data_retention_settings_field_headers(): +def test_create_custom_metric_field_headers(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = analytics_admin.GetDataRetentionSettingsRequest() + request = analytics_admin.CreateCustomMetricRequest() - request.name = "name_value" + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.get_data_retention_settings), "__call__" + type(client.transport.create_custom_metric), "__call__" ) as call: - call.return_value = resources.DataRetentionSettings() - client.get_data_retention_settings(request) + call.return_value = resources.CustomMetric() + client.create_custom_metric(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -27058,30 +26965,30 @@ def test_get_data_retention_settings_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "name=name_value", + "parent=parent_value", ) in kw["metadata"] @pytest.mark.asyncio -async def test_get_data_retention_settings_field_headers_async(): +async def test_create_custom_metric_field_headers_async(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = analytics_admin.GetDataRetentionSettingsRequest() + request = analytics_admin.CreateCustomMetricRequest() - request.name = "name_value" + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.get_data_retention_settings), "__call__" + type(client.transport.create_custom_metric), "__call__" ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - resources.DataRetentionSettings() + resources.CustomMetric() ) - await client.get_data_retention_settings(request) + await client.create_custom_metric(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -27092,37 +26999,41 @@ async def test_get_data_retention_settings_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "name=name_value", + "parent=parent_value", ) in kw["metadata"] -def test_get_data_retention_settings_flattened(): +def test_create_custom_metric_flattened(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.get_data_retention_settings), "__call__" + type(client.transport.create_custom_metric), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = resources.DataRetentionSettings() + call.return_value = resources.CustomMetric() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.get_data_retention_settings( - name="name_value", + client.create_custom_metric( + parent="parent_value", + custom_metric=resources.CustomMetric(name="name_value"), ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].custom_metric + mock_val = resources.CustomMetric(name="name_value") assert arg == mock_val -def test_get_data_retention_settings_flattened_error(): +def test_create_custom_metric_flattened_error(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -27130,45 +27041,50 @@ def test_get_data_retention_settings_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.get_data_retention_settings( - analytics_admin.GetDataRetentionSettingsRequest(), - name="name_value", + client.create_custom_metric( + analytics_admin.CreateCustomMetricRequest(), + parent="parent_value", + custom_metric=resources.CustomMetric(name="name_value"), ) @pytest.mark.asyncio -async def test_get_data_retention_settings_flattened_async(): +async def test_create_custom_metric_flattened_async(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.get_data_retention_settings), "__call__" + type(client.transport.create_custom_metric), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = resources.DataRetentionSettings() + call.return_value = resources.CustomMetric() call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - resources.DataRetentionSettings() + resources.CustomMetric() ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.get_data_retention_settings( - name="name_value", + response = await client.create_custom_metric( + parent="parent_value", + custom_metric=resources.CustomMetric(name="name_value"), ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].custom_metric + mock_val = resources.CustomMetric(name="name_value") assert arg == mock_val @pytest.mark.asyncio -async def test_get_data_retention_settings_flattened_error_async(): +async def test_create_custom_metric_flattened_error_async(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -27176,20 +27092,21 @@ async def test_get_data_retention_settings_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.get_data_retention_settings( - analytics_admin.GetDataRetentionSettingsRequest(), - name="name_value", + await client.create_custom_metric( + analytics_admin.CreateCustomMetricRequest(), + parent="parent_value", + custom_metric=resources.CustomMetric(name="name_value"), ) @pytest.mark.parametrize( "request_type", [ - analytics_admin.UpdateDataRetentionSettingsRequest, + analytics_admin.UpdateCustomMetricRequest, dict, ], ) -def test_update_data_retention_settings(request_type, transport: str = "grpc"): +def test_update_custom_metric(request_type, transport: str = "grpc"): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -27201,33 +27118,42 @@ def test_update_data_retention_settings(request_type, transport: str = "grpc"): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.update_data_retention_settings), "__call__" + type(client.transport.update_custom_metric), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = resources.DataRetentionSettings( + call.return_value = resources.CustomMetric( name="name_value", - event_data_retention=resources.DataRetentionSettings.RetentionDuration.TWO_MONTHS, - reset_user_data_on_new_activity=True, + parameter_name="parameter_name_value", + display_name="display_name_value", + description="description_value", + measurement_unit=resources.CustomMetric.MeasurementUnit.STANDARD, + scope=resources.CustomMetric.MetricScope.EVENT, + restricted_metric_type=[ + resources.CustomMetric.RestrictedMetricType.COST_DATA + ], ) - response = client.update_data_retention_settings(request) + response = client.update_custom_metric(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - request = analytics_admin.UpdateDataRetentionSettingsRequest() + request = analytics_admin.UpdateCustomMetricRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, resources.DataRetentionSettings) + assert isinstance(response, resources.CustomMetric) assert response.name == "name_value" - assert ( - response.event_data_retention - == resources.DataRetentionSettings.RetentionDuration.TWO_MONTHS - ) - assert response.reset_user_data_on_new_activity is True + assert response.parameter_name == "parameter_name_value" + assert response.display_name == "display_name_value" + assert response.description == "description_value" + assert response.measurement_unit == resources.CustomMetric.MeasurementUnit.STANDARD + assert response.scope == resources.CustomMetric.MetricScope.EVENT + assert response.restricted_metric_type == [ + resources.CustomMetric.RestrictedMetricType.COST_DATA + ] -def test_update_data_retention_settings_empty_call(): +def test_update_custom_metric_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = AnalyticsAdminServiceClient( @@ -27237,18 +27163,18 @@ def test_update_data_retention_settings_empty_call(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.update_data_retention_settings), "__call__" + type(client.transport.update_custom_metric), "__call__" ) as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.update_data_retention_settings() + client.update_custom_metric() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == analytics_admin.UpdateDataRetentionSettingsRequest() + assert args[0] == analytics_admin.UpdateCustomMetricRequest() -def test_update_data_retention_settings_non_empty_request_with_auto_populated_field(): +def test_update_custom_metric_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. client = AnalyticsAdminServiceClient( @@ -27259,22 +27185,22 @@ def test_update_data_retention_settings_non_empty_request_with_auto_populated_fi # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. - request = analytics_admin.UpdateDataRetentionSettingsRequest() + request = analytics_admin.UpdateCustomMetricRequest() # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.update_data_retention_settings), "__call__" + type(client.transport.update_custom_metric), "__call__" ) as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.update_data_retention_settings(request=request) + client.update_custom_metric(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == analytics_admin.UpdateDataRetentionSettingsRequest() + assert args[0] == analytics_admin.UpdateCustomMetricRequest() -def test_update_data_retention_settings_use_cached_wrapped_rpc(): +def test_update_custom_metric_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -27289,8 +27215,7 @@ def test_update_data_retention_settings_use_cached_wrapped_rpc(): # Ensure method has been cached assert ( - client._transport.update_data_retention_settings - in client._transport._wrapped_methods + client._transport.update_custom_metric in client._transport._wrapped_methods ) # Replace cached wrapped function with mock @@ -27299,15 +27224,15 @@ def test_update_data_retention_settings_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.update_data_retention_settings + client._transport.update_custom_metric ] = mock_rpc request = {} - client.update_data_retention_settings(request) + client.update_custom_metric(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.update_data_retention_settings(request) + client.update_custom_metric(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -27315,7 +27240,7 @@ def test_update_data_retention_settings_use_cached_wrapped_rpc(): @pytest.mark.asyncio -async def test_update_data_retention_settings_empty_call_async(): +async def test_update_custom_metric_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = AnalyticsAdminServiceAsyncClient( @@ -27325,24 +27250,30 @@ async def test_update_data_retention_settings_empty_call_async(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.update_data_retention_settings), "__call__" + type(client.transport.update_custom_metric), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - resources.DataRetentionSettings( + resources.CustomMetric( name="name_value", - event_data_retention=resources.DataRetentionSettings.RetentionDuration.TWO_MONTHS, - reset_user_data_on_new_activity=True, + parameter_name="parameter_name_value", + display_name="display_name_value", + description="description_value", + measurement_unit=resources.CustomMetric.MeasurementUnit.STANDARD, + scope=resources.CustomMetric.MetricScope.EVENT, + restricted_metric_type=[ + resources.CustomMetric.RestrictedMetricType.COST_DATA + ], ) ) - response = await client.update_data_retention_settings() + response = await client.update_custom_metric() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == analytics_admin.UpdateDataRetentionSettingsRequest() + assert args[0] == analytics_admin.UpdateCustomMetricRequest() @pytest.mark.asyncio -async def test_update_data_retention_settings_async_use_cached_wrapped_rpc( +async def test_update_custom_metric_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", ): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -27359,33 +27290,34 @@ async def test_update_data_retention_settings_async_use_cached_wrapped_rpc( # Ensure method has been cached assert ( - client._client._transport.update_data_retention_settings + client._client._transport.update_custom_metric in client._client._transport._wrapped_methods ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ - client._client._transport.update_data_retention_settings - ] = mock_object + client._client._transport.update_custom_metric + ] = mock_rpc request = {} - await client.update_data_retention_settings(request) + await client.update_custom_metric(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - await client.update_data_retention_settings(request) + await client.update_custom_metric(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio -async def test_update_data_retention_settings_async( +async def test_update_custom_metric_async( transport: str = "grpc_asyncio", - request_type=analytics_admin.UpdateDataRetentionSettingsRequest, + request_type=analytics_admin.UpdateCustomMetricRequest, ): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), @@ -27398,56 +27330,65 @@ async def test_update_data_retention_settings_async( # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.update_data_retention_settings), "__call__" + type(client.transport.update_custom_metric), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - resources.DataRetentionSettings( + resources.CustomMetric( name="name_value", - event_data_retention=resources.DataRetentionSettings.RetentionDuration.TWO_MONTHS, - reset_user_data_on_new_activity=True, + parameter_name="parameter_name_value", + display_name="display_name_value", + description="description_value", + measurement_unit=resources.CustomMetric.MeasurementUnit.STANDARD, + scope=resources.CustomMetric.MetricScope.EVENT, + restricted_metric_type=[ + resources.CustomMetric.RestrictedMetricType.COST_DATA + ], ) ) - response = await client.update_data_retention_settings(request) + response = await client.update_custom_metric(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request = analytics_admin.UpdateDataRetentionSettingsRequest() + request = analytics_admin.UpdateCustomMetricRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, resources.DataRetentionSettings) + assert isinstance(response, resources.CustomMetric) assert response.name == "name_value" - assert ( - response.event_data_retention - == resources.DataRetentionSettings.RetentionDuration.TWO_MONTHS - ) - assert response.reset_user_data_on_new_activity is True + assert response.parameter_name == "parameter_name_value" + assert response.display_name == "display_name_value" + assert response.description == "description_value" + assert response.measurement_unit == resources.CustomMetric.MeasurementUnit.STANDARD + assert response.scope == resources.CustomMetric.MetricScope.EVENT + assert response.restricted_metric_type == [ + resources.CustomMetric.RestrictedMetricType.COST_DATA + ] @pytest.mark.asyncio -async def test_update_data_retention_settings_async_from_dict(): - await test_update_data_retention_settings_async(request_type=dict) +async def test_update_custom_metric_async_from_dict(): + await test_update_custom_metric_async(request_type=dict) -def test_update_data_retention_settings_field_headers(): +def test_update_custom_metric_field_headers(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = analytics_admin.UpdateDataRetentionSettingsRequest() + request = analytics_admin.UpdateCustomMetricRequest() - request.data_retention_settings.name = "name_value" + request.custom_metric.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.update_data_retention_settings), "__call__" + type(client.transport.update_custom_metric), "__call__" ) as call: - call.return_value = resources.DataRetentionSettings() - client.update_data_retention_settings(request) + call.return_value = resources.CustomMetric() + client.update_custom_metric(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -27458,30 +27399,30 @@ def test_update_data_retention_settings_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "data_retention_settings.name=name_value", + "custom_metric.name=name_value", ) in kw["metadata"] @pytest.mark.asyncio -async def test_update_data_retention_settings_field_headers_async(): +async def test_update_custom_metric_field_headers_async(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = analytics_admin.UpdateDataRetentionSettingsRequest() + request = analytics_admin.UpdateCustomMetricRequest() - request.data_retention_settings.name = "name_value" + request.custom_metric.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.update_data_retention_settings), "__call__" + type(client.transport.update_custom_metric), "__call__" ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - resources.DataRetentionSettings() + resources.CustomMetric() ) - await client.update_data_retention_settings(request) + await client.update_custom_metric(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -27492,25 +27433,25 @@ async def test_update_data_retention_settings_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "data_retention_settings.name=name_value", + "custom_metric.name=name_value", ) in kw["metadata"] -def test_update_data_retention_settings_flattened(): +def test_update_custom_metric_flattened(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.update_data_retention_settings), "__call__" + type(client.transport.update_custom_metric), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = resources.DataRetentionSettings() + call.return_value = resources.CustomMetric() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.update_data_retention_settings( - data_retention_settings=resources.DataRetentionSettings(name="name_value"), + client.update_custom_metric( + custom_metric=resources.CustomMetric(name="name_value"), update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) @@ -27518,15 +27459,15 @@ def test_update_data_retention_settings_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - arg = args[0].data_retention_settings - mock_val = resources.DataRetentionSettings(name="name_value") + arg = args[0].custom_metric + mock_val = resources.CustomMetric(name="name_value") assert arg == mock_val arg = args[0].update_mask mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) assert arg == mock_val -def test_update_data_retention_settings_flattened_error(): +def test_update_custom_metric_flattened_error(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -27534,33 +27475,33 @@ def test_update_data_retention_settings_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.update_data_retention_settings( - analytics_admin.UpdateDataRetentionSettingsRequest(), - data_retention_settings=resources.DataRetentionSettings(name="name_value"), + client.update_custom_metric( + analytics_admin.UpdateCustomMetricRequest(), + custom_metric=resources.CustomMetric(name="name_value"), update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) @pytest.mark.asyncio -async def test_update_data_retention_settings_flattened_async(): +async def test_update_custom_metric_flattened_async(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.update_data_retention_settings), "__call__" + type(client.transport.update_custom_metric), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = resources.DataRetentionSettings() + call.return_value = resources.CustomMetric() call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - resources.DataRetentionSettings() + resources.CustomMetric() ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.update_data_retention_settings( - data_retention_settings=resources.DataRetentionSettings(name="name_value"), + response = await client.update_custom_metric( + custom_metric=resources.CustomMetric(name="name_value"), update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) @@ -27568,8 +27509,8 @@ async def test_update_data_retention_settings_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - arg = args[0].data_retention_settings - mock_val = resources.DataRetentionSettings(name="name_value") + arg = args[0].custom_metric + mock_val = resources.CustomMetric(name="name_value") assert arg == mock_val arg = args[0].update_mask mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) @@ -27577,7 +27518,7 @@ async def test_update_data_retention_settings_flattened_async(): @pytest.mark.asyncio -async def test_update_data_retention_settings_flattened_error_async(): +async def test_update_custom_metric_flattened_error_async(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -27585,9 +27526,9 @@ async def test_update_data_retention_settings_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.update_data_retention_settings( - analytics_admin.UpdateDataRetentionSettingsRequest(), - data_retention_settings=resources.DataRetentionSettings(name="name_value"), + await client.update_custom_metric( + analytics_admin.UpdateCustomMetricRequest(), + custom_metric=resources.CustomMetric(name="name_value"), update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) @@ -27595,11 +27536,11 @@ async def test_update_data_retention_settings_flattened_error_async(): @pytest.mark.parametrize( "request_type", [ - analytics_admin.CreateDataStreamRequest, + analytics_admin.ListCustomMetricsRequest, dict, ], ) -def test_create_data_stream(request_type, transport: str = "grpc"): +def test_list_custom_metrics(request_type, transport: str = "grpc"): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -27611,30 +27552,26 @@ def test_create_data_stream(request_type, transport: str = "grpc"): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.create_data_stream), "__call__" + type(client.transport.list_custom_metrics), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = resources.DataStream( - name="name_value", - type_=resources.DataStream.DataStreamType.WEB_DATA_STREAM, - display_name="display_name_value", + call.return_value = analytics_admin.ListCustomMetricsResponse( + next_page_token="next_page_token_value", ) - response = client.create_data_stream(request) + response = client.list_custom_metrics(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - request = analytics_admin.CreateDataStreamRequest() + request = analytics_admin.ListCustomMetricsRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, resources.DataStream) - assert response.name == "name_value" - assert response.type_ == resources.DataStream.DataStreamType.WEB_DATA_STREAM - assert response.display_name == "display_name_value" + assert isinstance(response, pagers.ListCustomMetricsPager) + assert response.next_page_token == "next_page_token_value" -def test_create_data_stream_empty_call(): +def test_list_custom_metrics_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = AnalyticsAdminServiceClient( @@ -27644,18 +27581,18 @@ def test_create_data_stream_empty_call(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.create_data_stream), "__call__" + type(client.transport.list_custom_metrics), "__call__" ) as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.create_data_stream() + client.list_custom_metrics() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == analytics_admin.CreateDataStreamRequest() + assert args[0] == analytics_admin.ListCustomMetricsRequest() -def test_create_data_stream_non_empty_request_with_auto_populated_field(): +def test_list_custom_metrics_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. client = AnalyticsAdminServiceClient( @@ -27666,26 +27603,28 @@ def test_create_data_stream_non_empty_request_with_auto_populated_field(): # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. - request = analytics_admin.CreateDataStreamRequest( + request = analytics_admin.ListCustomMetricsRequest( parent="parent_value", + page_token="page_token_value", ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.create_data_stream), "__call__" + type(client.transport.list_custom_metrics), "__call__" ) as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.create_data_stream(request=request) + client.list_custom_metrics(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == analytics_admin.CreateDataStreamRequest( + assert args[0] == analytics_admin.ListCustomMetricsRequest( parent="parent_value", + page_token="page_token_value", ) -def test_create_data_stream_use_cached_wrapped_rpc(): +def test_list_custom_metrics_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -27700,7 +27639,7 @@ def test_create_data_stream_use_cached_wrapped_rpc(): # Ensure method has been cached assert ( - client._transport.create_data_stream in client._transport._wrapped_methods + client._transport.list_custom_metrics in client._transport._wrapped_methods ) # Replace cached wrapped function with mock @@ -27709,15 +27648,15 @@ def test_create_data_stream_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.create_data_stream + client._transport.list_custom_metrics ] = mock_rpc request = {} - client.create_data_stream(request) + client.list_custom_metrics(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.create_data_stream(request) + client.list_custom_metrics(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -27725,7 +27664,7 @@ def test_create_data_stream_use_cached_wrapped_rpc(): @pytest.mark.asyncio -async def test_create_data_stream_empty_call_async(): +async def test_list_custom_metrics_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = AnalyticsAdminServiceAsyncClient( @@ -27735,24 +27674,22 @@ async def test_create_data_stream_empty_call_async(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.create_data_stream), "__call__" + type(client.transport.list_custom_metrics), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - resources.DataStream( - name="name_value", - type_=resources.DataStream.DataStreamType.WEB_DATA_STREAM, - display_name="display_name_value", + analytics_admin.ListCustomMetricsResponse( + next_page_token="next_page_token_value", ) ) - response = await client.create_data_stream() + response = await client.list_custom_metrics() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == analytics_admin.CreateDataStreamRequest() + assert args[0] == analytics_admin.ListCustomMetricsRequest() @pytest.mark.asyncio -async def test_create_data_stream_async_use_cached_wrapped_rpc( +async def test_list_custom_metrics_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", ): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -27769,33 +27706,34 @@ async def test_create_data_stream_async_use_cached_wrapped_rpc( # Ensure method has been cached assert ( - client._client._transport.create_data_stream + client._client._transport.list_custom_metrics in client._client._transport._wrapped_methods ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ - client._client._transport.create_data_stream - ] = mock_object + client._client._transport.list_custom_metrics + ] = mock_rpc request = {} - await client.create_data_stream(request) + await client.list_custom_metrics(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - await client.create_data_stream(request) + await client.list_custom_metrics(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio -async def test_create_data_stream_async( +async def test_list_custom_metrics_async( transport: str = "grpc_asyncio", - request_type=analytics_admin.CreateDataStreamRequest, + request_type=analytics_admin.ListCustomMetricsRequest, ): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), @@ -27808,53 +27746,49 @@ async def test_create_data_stream_async( # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.create_data_stream), "__call__" + type(client.transport.list_custom_metrics), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - resources.DataStream( - name="name_value", - type_=resources.DataStream.DataStreamType.WEB_DATA_STREAM, - display_name="display_name_value", + analytics_admin.ListCustomMetricsResponse( + next_page_token="next_page_token_value", ) ) - response = await client.create_data_stream(request) + response = await client.list_custom_metrics(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request = analytics_admin.CreateDataStreamRequest() + request = analytics_admin.ListCustomMetricsRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, resources.DataStream) - assert response.name == "name_value" - assert response.type_ == resources.DataStream.DataStreamType.WEB_DATA_STREAM - assert response.display_name == "display_name_value" + assert isinstance(response, pagers.ListCustomMetricsAsyncPager) + assert response.next_page_token == "next_page_token_value" @pytest.mark.asyncio -async def test_create_data_stream_async_from_dict(): - await test_create_data_stream_async(request_type=dict) +async def test_list_custom_metrics_async_from_dict(): + await test_list_custom_metrics_async(request_type=dict) -def test_create_data_stream_field_headers(): +def test_list_custom_metrics_field_headers(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = analytics_admin.CreateDataStreamRequest() + request = analytics_admin.ListCustomMetricsRequest() request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.create_data_stream), "__call__" + type(client.transport.list_custom_metrics), "__call__" ) as call: - call.return_value = resources.DataStream() - client.create_data_stream(request) + call.return_value = analytics_admin.ListCustomMetricsResponse() + client.list_custom_metrics(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -27870,25 +27804,25 @@ def test_create_data_stream_field_headers(): @pytest.mark.asyncio -async def test_create_data_stream_field_headers_async(): +async def test_list_custom_metrics_field_headers_async(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = analytics_admin.CreateDataStreamRequest() + request = analytics_admin.ListCustomMetricsRequest() request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.create_data_stream), "__call__" + type(client.transport.list_custom_metrics), "__call__" ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - resources.DataStream() + analytics_admin.ListCustomMetricsResponse() ) - await client.create_data_stream(request) + await client.list_custom_metrics(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -27903,26 +27837,21 @@ async def test_create_data_stream_field_headers_async(): ) in kw["metadata"] -def test_create_data_stream_flattened(): +def test_list_custom_metrics_flattened(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.create_data_stream), "__call__" + type(client.transport.list_custom_metrics), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = resources.DataStream() + call.return_value = analytics_admin.ListCustomMetricsResponse() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.create_data_stream( + client.list_custom_metrics( parent="parent_value", - data_stream=resources.DataStream( - web_stream_data=resources.DataStream.WebStreamData( - measurement_id="measurement_id_value" - ) - ), ) # Establish that the underlying call was made with the expected @@ -27932,16 +27861,9 @@ def test_create_data_stream_flattened(): arg = args[0].parent mock_val = "parent_value" assert arg == mock_val - arg = args[0].data_stream - mock_val = resources.DataStream( - web_stream_data=resources.DataStream.WebStreamData( - measurement_id="measurement_id_value" - ) - ) - assert arg == mock_val -def test_create_data_stream_flattened_error(): +def test_list_custom_metrics_flattened_error(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -27949,42 +27871,32 @@ def test_create_data_stream_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.create_data_stream( - analytics_admin.CreateDataStreamRequest(), + client.list_custom_metrics( + analytics_admin.ListCustomMetricsRequest(), parent="parent_value", - data_stream=resources.DataStream( - web_stream_data=resources.DataStream.WebStreamData( - measurement_id="measurement_id_value" - ) - ), ) @pytest.mark.asyncio -async def test_create_data_stream_flattened_async(): +async def test_list_custom_metrics_flattened_async(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.create_data_stream), "__call__" + type(client.transport.list_custom_metrics), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = resources.DataStream() + call.return_value = analytics_admin.ListCustomMetricsResponse() call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - resources.DataStream() + analytics_admin.ListCustomMetricsResponse() ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.create_data_stream( + response = await client.list_custom_metrics( parent="parent_value", - data_stream=resources.DataStream( - web_stream_data=resources.DataStream.WebStreamData( - measurement_id="measurement_id_value" - ) - ), ) # Establish that the underlying call was made with the expected @@ -27994,17 +27906,10 @@ async def test_create_data_stream_flattened_async(): arg = args[0].parent mock_val = "parent_value" assert arg == mock_val - arg = args[0].data_stream - mock_val = resources.DataStream( - web_stream_data=resources.DataStream.WebStreamData( - measurement_id="measurement_id_value" - ) - ) - assert arg == mock_val @pytest.mark.asyncio -async def test_create_data_stream_flattened_error_async(): +async def test_list_custom_metrics_flattened_error_async(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -28012,53 +27917,250 @@ async def test_create_data_stream_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.create_data_stream( - analytics_admin.CreateDataStreamRequest(), + await client.list_custom_metrics( + analytics_admin.ListCustomMetricsRequest(), parent="parent_value", - data_stream=resources.DataStream( - web_stream_data=resources.DataStream.WebStreamData( - measurement_id="measurement_id_value" - ) - ), ) -@pytest.mark.parametrize( - "request_type", - [ - analytics_admin.DeleteDataStreamRequest, - dict, - ], -) -def test_delete_data_stream(request_type, transport: str = "grpc"): +def test_list_custom_metrics_pager(transport_name: str = "grpc"): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + transport=transport_name, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.delete_data_stream), "__call__" + type(client.transport.list_custom_metrics), "__call__" ) as call: - # Designate an appropriate return value for the call. - call.return_value = None - response = client.delete_data_stream(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = analytics_admin.DeleteDataStreamRequest() - assert args[0] == request + # Set the response to a series of pages. + call.side_effect = ( + analytics_admin.ListCustomMetricsResponse( + custom_metrics=[ + resources.CustomMetric(), + resources.CustomMetric(), + resources.CustomMetric(), + ], + next_page_token="abc", + ), + analytics_admin.ListCustomMetricsResponse( + custom_metrics=[], + next_page_token="def", + ), + analytics_admin.ListCustomMetricsResponse( + custom_metrics=[ + resources.CustomMetric(), + ], + next_page_token="ghi", + ), + analytics_admin.ListCustomMetricsResponse( + custom_metrics=[ + resources.CustomMetric(), + resources.CustomMetric(), + ], + ), + RuntimeError, + ) - # Establish that the response is the type that we expect. - assert response is None + expected_metadata = () + retry = retries.Retry() + timeout = 5 + expected_metadata = tuple(expected_metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + ) + pager = client.list_custom_metrics(request={}, retry=retry, timeout=timeout) + assert pager._metadata == expected_metadata + assert pager._retry == retry + assert pager._timeout == timeout -def test_delete_data_stream_empty_call(): + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, resources.CustomMetric) for i in results) + + +def test_list_custom_metrics_pages(transport_name: str = "grpc"): + client = AnalyticsAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_custom_metrics), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + analytics_admin.ListCustomMetricsResponse( + custom_metrics=[ + resources.CustomMetric(), + resources.CustomMetric(), + resources.CustomMetric(), + ], + next_page_token="abc", + ), + analytics_admin.ListCustomMetricsResponse( + custom_metrics=[], + next_page_token="def", + ), + analytics_admin.ListCustomMetricsResponse( + custom_metrics=[ + resources.CustomMetric(), + ], + next_page_token="ghi", + ), + analytics_admin.ListCustomMetricsResponse( + custom_metrics=[ + resources.CustomMetric(), + resources.CustomMetric(), + ], + ), + RuntimeError, + ) + pages = list(client.list_custom_metrics(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.asyncio +async def test_list_custom_metrics_async_pager(): + client = AnalyticsAdminServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_custom_metrics), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + analytics_admin.ListCustomMetricsResponse( + custom_metrics=[ + resources.CustomMetric(), + resources.CustomMetric(), + resources.CustomMetric(), + ], + next_page_token="abc", + ), + analytics_admin.ListCustomMetricsResponse( + custom_metrics=[], + next_page_token="def", + ), + analytics_admin.ListCustomMetricsResponse( + custom_metrics=[ + resources.CustomMetric(), + ], + next_page_token="ghi", + ), + analytics_admin.ListCustomMetricsResponse( + custom_metrics=[ + resources.CustomMetric(), + resources.CustomMetric(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_custom_metrics( + request={}, + ) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, resources.CustomMetric) for i in responses) + + +@pytest.mark.asyncio +async def test_list_custom_metrics_async_pages(): + client = AnalyticsAdminServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_custom_metrics), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + analytics_admin.ListCustomMetricsResponse( + custom_metrics=[ + resources.CustomMetric(), + resources.CustomMetric(), + resources.CustomMetric(), + ], + next_page_token="abc", + ), + analytics_admin.ListCustomMetricsResponse( + custom_metrics=[], + next_page_token="def", + ), + analytics_admin.ListCustomMetricsResponse( + custom_metrics=[ + resources.CustomMetric(), + ], + next_page_token="ghi", + ), + analytics_admin.ListCustomMetricsResponse( + custom_metrics=[ + resources.CustomMetric(), + resources.CustomMetric(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_custom_metrics(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + analytics_admin.ArchiveCustomMetricRequest, + dict, + ], +) +def test_archive_custom_metric(request_type, transport: str = "grpc"): + client = AnalyticsAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.archive_custom_metric), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.archive_custom_metric(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = analytics_admin.ArchiveCustomMetricRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +def test_archive_custom_metric_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = AnalyticsAdminServiceClient( @@ -28068,18 +28170,18 @@ def test_delete_data_stream_empty_call(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.delete_data_stream), "__call__" + type(client.transport.archive_custom_metric), "__call__" ) as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.delete_data_stream() + client.archive_custom_metric() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == analytics_admin.DeleteDataStreamRequest() + assert args[0] == analytics_admin.ArchiveCustomMetricRequest() -def test_delete_data_stream_non_empty_request_with_auto_populated_field(): +def test_archive_custom_metric_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. client = AnalyticsAdminServiceClient( @@ -28090,26 +28192,26 @@ def test_delete_data_stream_non_empty_request_with_auto_populated_field(): # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. - request = analytics_admin.DeleteDataStreamRequest( + request = analytics_admin.ArchiveCustomMetricRequest( name="name_value", ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.delete_data_stream), "__call__" + type(client.transport.archive_custom_metric), "__call__" ) as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.delete_data_stream(request=request) + client.archive_custom_metric(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == analytics_admin.DeleteDataStreamRequest( + assert args[0] == analytics_admin.ArchiveCustomMetricRequest( name="name_value", ) -def test_delete_data_stream_use_cached_wrapped_rpc(): +def test_archive_custom_metric_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -28124,7 +28226,8 @@ def test_delete_data_stream_use_cached_wrapped_rpc(): # Ensure method has been cached assert ( - client._transport.delete_data_stream in client._transport._wrapped_methods + client._transport.archive_custom_metric + in client._transport._wrapped_methods ) # Replace cached wrapped function with mock @@ -28133,15 +28236,15 @@ def test_delete_data_stream_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.delete_data_stream + client._transport.archive_custom_metric ] = mock_rpc request = {} - client.delete_data_stream(request) + client.archive_custom_metric(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.delete_data_stream(request) + client.archive_custom_metric(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -28149,7 +28252,7 @@ def test_delete_data_stream_use_cached_wrapped_rpc(): @pytest.mark.asyncio -async def test_delete_data_stream_empty_call_async(): +async def test_archive_custom_metric_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = AnalyticsAdminServiceAsyncClient( @@ -28159,18 +28262,18 @@ async def test_delete_data_stream_empty_call_async(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.delete_data_stream), "__call__" + type(client.transport.archive_custom_metric), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.delete_data_stream() + response = await client.archive_custom_metric() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == analytics_admin.DeleteDataStreamRequest() + assert args[0] == analytics_admin.ArchiveCustomMetricRequest() @pytest.mark.asyncio -async def test_delete_data_stream_async_use_cached_wrapped_rpc( +async def test_archive_custom_metric_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", ): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -28187,33 +28290,34 @@ async def test_delete_data_stream_async_use_cached_wrapped_rpc( # Ensure method has been cached assert ( - client._client._transport.delete_data_stream + client._client._transport.archive_custom_metric in client._client._transport._wrapped_methods ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ - client._client._transport.delete_data_stream - ] = mock_object + client._client._transport.archive_custom_metric + ] = mock_rpc request = {} - await client.delete_data_stream(request) + await client.archive_custom_metric(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - await client.delete_data_stream(request) + await client.archive_custom_metric(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio -async def test_delete_data_stream_async( +async def test_archive_custom_metric_async( transport: str = "grpc_asyncio", - request_type=analytics_admin.DeleteDataStreamRequest, + request_type=analytics_admin.ArchiveCustomMetricRequest, ): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), @@ -28226,16 +28330,16 @@ async def test_delete_data_stream_async( # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.delete_data_stream), "__call__" + type(client.transport.archive_custom_metric), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.delete_data_stream(request) + response = await client.archive_custom_metric(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request = analytics_admin.DeleteDataStreamRequest() + request = analytics_admin.ArchiveCustomMetricRequest() assert args[0] == request # Establish that the response is the type that we expect. @@ -28243,27 +28347,27 @@ async def test_delete_data_stream_async( @pytest.mark.asyncio -async def test_delete_data_stream_async_from_dict(): - await test_delete_data_stream_async(request_type=dict) +async def test_archive_custom_metric_async_from_dict(): + await test_archive_custom_metric_async(request_type=dict) -def test_delete_data_stream_field_headers(): +def test_archive_custom_metric_field_headers(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = analytics_admin.DeleteDataStreamRequest() + request = analytics_admin.ArchiveCustomMetricRequest() request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.delete_data_stream), "__call__" + type(client.transport.archive_custom_metric), "__call__" ) as call: call.return_value = None - client.delete_data_stream(request) + client.archive_custom_metric(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -28279,23 +28383,23 @@ def test_delete_data_stream_field_headers(): @pytest.mark.asyncio -async def test_delete_data_stream_field_headers_async(): +async def test_archive_custom_metric_field_headers_async(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = analytics_admin.DeleteDataStreamRequest() + request = analytics_admin.ArchiveCustomMetricRequest() request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.delete_data_stream), "__call__" + type(client.transport.archive_custom_metric), "__call__" ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - await client.delete_data_stream(request) + await client.archive_custom_metric(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -28310,20 +28414,20 @@ async def test_delete_data_stream_field_headers_async(): ) in kw["metadata"] -def test_delete_data_stream_flattened(): +def test_archive_custom_metric_flattened(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.delete_data_stream), "__call__" + type(client.transport.archive_custom_metric), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = None # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.delete_data_stream( + client.archive_custom_metric( name="name_value", ) @@ -28336,7 +28440,7 @@ def test_delete_data_stream_flattened(): assert arg == mock_val -def test_delete_data_stream_flattened_error(): +def test_archive_custom_metric_flattened_error(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -28344,21 +28448,21 @@ def test_delete_data_stream_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.delete_data_stream( - analytics_admin.DeleteDataStreamRequest(), + client.archive_custom_metric( + analytics_admin.ArchiveCustomMetricRequest(), name="name_value", ) @pytest.mark.asyncio -async def test_delete_data_stream_flattened_async(): +async def test_archive_custom_metric_flattened_async(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.delete_data_stream), "__call__" + type(client.transport.archive_custom_metric), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = None @@ -28366,7 +28470,7 @@ async def test_delete_data_stream_flattened_async(): call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.delete_data_stream( + response = await client.archive_custom_metric( name="name_value", ) @@ -28380,7 +28484,7 @@ async def test_delete_data_stream_flattened_async(): @pytest.mark.asyncio -async def test_delete_data_stream_flattened_error_async(): +async def test_archive_custom_metric_flattened_error_async(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -28388,8 +28492,8 @@ async def test_delete_data_stream_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.delete_data_stream( - analytics_admin.DeleteDataStreamRequest(), + await client.archive_custom_metric( + analytics_admin.ArchiveCustomMetricRequest(), name="name_value", ) @@ -28397,11 +28501,11 @@ async def test_delete_data_stream_flattened_error_async(): @pytest.mark.parametrize( "request_type", [ - analytics_admin.UpdateDataStreamRequest, + analytics_admin.GetCustomMetricRequest, dict, ], ) -def test_update_data_stream(request_type, transport: str = "grpc"): +def test_get_custom_metric(request_type, transport: str = "grpc"): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -28413,30 +28517,42 @@ def test_update_data_stream(request_type, transport: str = "grpc"): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.update_data_stream), "__call__" + type(client.transport.get_custom_metric), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = resources.DataStream( + call.return_value = resources.CustomMetric( name="name_value", - type_=resources.DataStream.DataStreamType.WEB_DATA_STREAM, + parameter_name="parameter_name_value", display_name="display_name_value", + description="description_value", + measurement_unit=resources.CustomMetric.MeasurementUnit.STANDARD, + scope=resources.CustomMetric.MetricScope.EVENT, + restricted_metric_type=[ + resources.CustomMetric.RestrictedMetricType.COST_DATA + ], ) - response = client.update_data_stream(request) + response = client.get_custom_metric(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - request = analytics_admin.UpdateDataStreamRequest() + request = analytics_admin.GetCustomMetricRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, resources.DataStream) + assert isinstance(response, resources.CustomMetric) assert response.name == "name_value" - assert response.type_ == resources.DataStream.DataStreamType.WEB_DATA_STREAM + assert response.parameter_name == "parameter_name_value" assert response.display_name == "display_name_value" + assert response.description == "description_value" + assert response.measurement_unit == resources.CustomMetric.MeasurementUnit.STANDARD + assert response.scope == resources.CustomMetric.MetricScope.EVENT + assert response.restricted_metric_type == [ + resources.CustomMetric.RestrictedMetricType.COST_DATA + ] -def test_update_data_stream_empty_call(): +def test_get_custom_metric_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = AnalyticsAdminServiceClient( @@ -28446,18 +28562,18 @@ def test_update_data_stream_empty_call(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.update_data_stream), "__call__" + type(client.transport.get_custom_metric), "__call__" ) as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.update_data_stream() + client.get_custom_metric() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == analytics_admin.UpdateDataStreamRequest() + assert args[0] == analytics_admin.GetCustomMetricRequest() -def test_update_data_stream_non_empty_request_with_auto_populated_field(): +def test_get_custom_metric_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. client = AnalyticsAdminServiceClient( @@ -28468,22 +28584,26 @@ def test_update_data_stream_non_empty_request_with_auto_populated_field(): # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. - request = analytics_admin.UpdateDataStreamRequest() + request = analytics_admin.GetCustomMetricRequest( + name="name_value", + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.update_data_stream), "__call__" + type(client.transport.get_custom_metric), "__call__" ) as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.update_data_stream(request=request) + client.get_custom_metric(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == analytics_admin.UpdateDataStreamRequest() + assert args[0] == analytics_admin.GetCustomMetricRequest( + name="name_value", + ) -def test_update_data_stream_use_cached_wrapped_rpc(): +def test_get_custom_metric_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -28497,9 +28617,7 @@ def test_update_data_stream_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert ( - client._transport.update_data_stream in client._transport._wrapped_methods - ) + assert client._transport.get_custom_metric in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() @@ -28507,15 +28625,15 @@ def test_update_data_stream_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.update_data_stream + client._transport.get_custom_metric ] = mock_rpc request = {} - client.update_data_stream(request) + client.get_custom_metric(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.update_data_stream(request) + client.get_custom_metric(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -28523,7 +28641,7 @@ def test_update_data_stream_use_cached_wrapped_rpc(): @pytest.mark.asyncio -async def test_update_data_stream_empty_call_async(): +async def test_get_custom_metric_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = AnalyticsAdminServiceAsyncClient( @@ -28533,24 +28651,30 @@ async def test_update_data_stream_empty_call_async(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.update_data_stream), "__call__" + type(client.transport.get_custom_metric), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - resources.DataStream( + resources.CustomMetric( name="name_value", - type_=resources.DataStream.DataStreamType.WEB_DATA_STREAM, + parameter_name="parameter_name_value", display_name="display_name_value", + description="description_value", + measurement_unit=resources.CustomMetric.MeasurementUnit.STANDARD, + scope=resources.CustomMetric.MetricScope.EVENT, + restricted_metric_type=[ + resources.CustomMetric.RestrictedMetricType.COST_DATA + ], ) ) - response = await client.update_data_stream() + response = await client.get_custom_metric() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == analytics_admin.UpdateDataStreamRequest() + assert args[0] == analytics_admin.GetCustomMetricRequest() @pytest.mark.asyncio -async def test_update_data_stream_async_use_cached_wrapped_rpc( +async def test_get_custom_metric_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", ): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -28567,33 +28691,33 @@ async def test_update_data_stream_async_use_cached_wrapped_rpc( # Ensure method has been cached assert ( - client._client._transport.update_data_stream + client._client._transport.get_custom_metric in client._client._transport._wrapped_methods ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ - client._client._transport.update_data_stream - ] = mock_object + client._client._transport.get_custom_metric + ] = mock_rpc request = {} - await client.update_data_stream(request) + await client.get_custom_metric(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - await client.update_data_stream(request) + await client.get_custom_metric(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio -async def test_update_data_stream_async( - transport: str = "grpc_asyncio", - request_type=analytics_admin.UpdateDataStreamRequest, +async def test_get_custom_metric_async( + transport: str = "grpc_asyncio", request_type=analytics_admin.GetCustomMetricRequest ): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), @@ -28606,53 +28730,65 @@ async def test_update_data_stream_async( # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.update_data_stream), "__call__" + type(client.transport.get_custom_metric), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - resources.DataStream( + resources.CustomMetric( name="name_value", - type_=resources.DataStream.DataStreamType.WEB_DATA_STREAM, + parameter_name="parameter_name_value", display_name="display_name_value", + description="description_value", + measurement_unit=resources.CustomMetric.MeasurementUnit.STANDARD, + scope=resources.CustomMetric.MetricScope.EVENT, + restricted_metric_type=[ + resources.CustomMetric.RestrictedMetricType.COST_DATA + ], ) ) - response = await client.update_data_stream(request) + response = await client.get_custom_metric(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request = analytics_admin.UpdateDataStreamRequest() + request = analytics_admin.GetCustomMetricRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, resources.DataStream) + assert isinstance(response, resources.CustomMetric) assert response.name == "name_value" - assert response.type_ == resources.DataStream.DataStreamType.WEB_DATA_STREAM + assert response.parameter_name == "parameter_name_value" assert response.display_name == "display_name_value" + assert response.description == "description_value" + assert response.measurement_unit == resources.CustomMetric.MeasurementUnit.STANDARD + assert response.scope == resources.CustomMetric.MetricScope.EVENT + assert response.restricted_metric_type == [ + resources.CustomMetric.RestrictedMetricType.COST_DATA + ] @pytest.mark.asyncio -async def test_update_data_stream_async_from_dict(): - await test_update_data_stream_async(request_type=dict) +async def test_get_custom_metric_async_from_dict(): + await test_get_custom_metric_async(request_type=dict) -def test_update_data_stream_field_headers(): +def test_get_custom_metric_field_headers(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = analytics_admin.UpdateDataStreamRequest() + request = analytics_admin.GetCustomMetricRequest() - request.data_stream.name = "name_value" + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.update_data_stream), "__call__" + type(client.transport.get_custom_metric), "__call__" ) as call: - call.return_value = resources.DataStream() - client.update_data_stream(request) + call.return_value = resources.CustomMetric() + client.get_custom_metric(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -28663,30 +28799,30 @@ def test_update_data_stream_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "data_stream.name=name_value", + "name=name_value", ) in kw["metadata"] @pytest.mark.asyncio -async def test_update_data_stream_field_headers_async(): +async def test_get_custom_metric_field_headers_async(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = analytics_admin.UpdateDataStreamRequest() + request = analytics_admin.GetCustomMetricRequest() - request.data_stream.name = "name_value" + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.update_data_stream), "__call__" + type(client.transport.get_custom_metric), "__call__" ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - resources.DataStream() + resources.CustomMetric() ) - await client.update_data_stream(request) + await client.get_custom_metric(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -28697,49 +28833,37 @@ async def test_update_data_stream_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "data_stream.name=name_value", + "name=name_value", ) in kw["metadata"] -def test_update_data_stream_flattened(): +def test_get_custom_metric_flattened(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.update_data_stream), "__call__" + type(client.transport.get_custom_metric), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = resources.DataStream() + call.return_value = resources.CustomMetric() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.update_data_stream( - data_stream=resources.DataStream( - web_stream_data=resources.DataStream.WebStreamData( - measurement_id="measurement_id_value" - ) - ), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + client.get_custom_metric( + name="name_value", ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - arg = args[0].data_stream - mock_val = resources.DataStream( - web_stream_data=resources.DataStream.WebStreamData( - measurement_id="measurement_id_value" - ) - ) - assert arg == mock_val - arg = args[0].update_mask - mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + arg = args[0].name + mock_val = "name_value" assert arg == mock_val -def test_update_data_stream_flattened_error(): +def test_get_custom_metric_flattened_error(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -28747,62 +28871,45 @@ def test_update_data_stream_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.update_data_stream( - analytics_admin.UpdateDataStreamRequest(), - data_stream=resources.DataStream( - web_stream_data=resources.DataStream.WebStreamData( - measurement_id="measurement_id_value" - ) - ), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + client.get_custom_metric( + analytics_admin.GetCustomMetricRequest(), + name="name_value", ) @pytest.mark.asyncio -async def test_update_data_stream_flattened_async(): +async def test_get_custom_metric_flattened_async(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.update_data_stream), "__call__" + type(client.transport.get_custom_metric), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = resources.DataStream() + call.return_value = resources.CustomMetric() call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - resources.DataStream() + resources.CustomMetric() ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.update_data_stream( - data_stream=resources.DataStream( - web_stream_data=resources.DataStream.WebStreamData( - measurement_id="measurement_id_value" - ) - ), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + response = await client.get_custom_metric( + name="name_value", ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - arg = args[0].data_stream - mock_val = resources.DataStream( - web_stream_data=resources.DataStream.WebStreamData( - measurement_id="measurement_id_value" - ) - ) - assert arg == mock_val - arg = args[0].update_mask - mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + arg = args[0].name + mock_val = "name_value" assert arg == mock_val @pytest.mark.asyncio -async def test_update_data_stream_flattened_error_async(): +async def test_get_custom_metric_flattened_error_async(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -28810,25 +28917,20 @@ async def test_update_data_stream_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.update_data_stream( - analytics_admin.UpdateDataStreamRequest(), - data_stream=resources.DataStream( - web_stream_data=resources.DataStream.WebStreamData( - measurement_id="measurement_id_value" - ) - ), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + await client.get_custom_metric( + analytics_admin.GetCustomMetricRequest(), + name="name_value", ) @pytest.mark.parametrize( "request_type", [ - analytics_admin.ListDataStreamsRequest, + analytics_admin.GetDataRetentionSettingsRequest, dict, ], ) -def test_list_data_streams(request_type, transport: str = "grpc"): +def test_get_data_retention_settings(request_type, transport: str = "grpc"): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -28840,26 +28942,33 @@ def test_list_data_streams(request_type, transport: str = "grpc"): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_data_streams), "__call__" + type(client.transport.get_data_retention_settings), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = analytics_admin.ListDataStreamsResponse( - next_page_token="next_page_token_value", + call.return_value = resources.DataRetentionSettings( + name="name_value", + event_data_retention=resources.DataRetentionSettings.RetentionDuration.TWO_MONTHS, + reset_user_data_on_new_activity=True, ) - response = client.list_data_streams(request) + response = client.get_data_retention_settings(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - request = analytics_admin.ListDataStreamsRequest() + request = analytics_admin.GetDataRetentionSettingsRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListDataStreamsPager) - assert response.next_page_token == "next_page_token_value" + assert isinstance(response, resources.DataRetentionSettings) + assert response.name == "name_value" + assert ( + response.event_data_retention + == resources.DataRetentionSettings.RetentionDuration.TWO_MONTHS + ) + assert response.reset_user_data_on_new_activity is True -def test_list_data_streams_empty_call(): +def test_get_data_retention_settings_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = AnalyticsAdminServiceClient( @@ -28869,18 +28978,18 @@ def test_list_data_streams_empty_call(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_data_streams), "__call__" + type(client.transport.get_data_retention_settings), "__call__" ) as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.list_data_streams() + client.get_data_retention_settings() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == analytics_admin.ListDataStreamsRequest() + assert args[0] == analytics_admin.GetDataRetentionSettingsRequest() -def test_list_data_streams_non_empty_request_with_auto_populated_field(): +def test_get_data_retention_settings_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. client = AnalyticsAdminServiceClient( @@ -28891,28 +29000,26 @@ def test_list_data_streams_non_empty_request_with_auto_populated_field(): # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. - request = analytics_admin.ListDataStreamsRequest( - parent="parent_value", - page_token="page_token_value", + request = analytics_admin.GetDataRetentionSettingsRequest( + name="name_value", ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_data_streams), "__call__" + type(client.transport.get_data_retention_settings), "__call__" ) as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.list_data_streams(request=request) + client.get_data_retention_settings(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == analytics_admin.ListDataStreamsRequest( - parent="parent_value", - page_token="page_token_value", + assert args[0] == analytics_admin.GetDataRetentionSettingsRequest( + name="name_value", ) -def test_list_data_streams_use_cached_wrapped_rpc(): +def test_get_data_retention_settings_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -28926,7 +29033,10 @@ def test_list_data_streams_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.list_data_streams in client._transport._wrapped_methods + assert ( + client._transport.get_data_retention_settings + in client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.Mock() @@ -28934,15 +29044,15 @@ def test_list_data_streams_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.list_data_streams + client._transport.get_data_retention_settings ] = mock_rpc request = {} - client.list_data_streams(request) + client.get_data_retention_settings(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.list_data_streams(request) + client.get_data_retention_settings(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -28950,7 +29060,7 @@ def test_list_data_streams_use_cached_wrapped_rpc(): @pytest.mark.asyncio -async def test_list_data_streams_empty_call_async(): +async def test_get_data_retention_settings_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = AnalyticsAdminServiceAsyncClient( @@ -28960,22 +29070,24 @@ async def test_list_data_streams_empty_call_async(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_data_streams), "__call__" + type(client.transport.get_data_retention_settings), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - analytics_admin.ListDataStreamsResponse( - next_page_token="next_page_token_value", + resources.DataRetentionSettings( + name="name_value", + event_data_retention=resources.DataRetentionSettings.RetentionDuration.TWO_MONTHS, + reset_user_data_on_new_activity=True, ) ) - response = await client.list_data_streams() + response = await client.get_data_retention_settings() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == analytics_admin.ListDataStreamsRequest() + assert args[0] == analytics_admin.GetDataRetentionSettingsRequest() @pytest.mark.asyncio -async def test_list_data_streams_async_use_cached_wrapped_rpc( +async def test_get_data_retention_settings_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", ): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -28992,32 +29104,34 @@ async def test_list_data_streams_async_use_cached_wrapped_rpc( # Ensure method has been cached assert ( - client._client._transport.list_data_streams + client._client._transport.get_data_retention_settings in client._client._transport._wrapped_methods ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ - client._client._transport.list_data_streams - ] = mock_object + client._client._transport.get_data_retention_settings + ] = mock_rpc request = {} - await client.list_data_streams(request) + await client.get_data_retention_settings(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - await client.list_data_streams(request) + await client.get_data_retention_settings(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio -async def test_list_data_streams_async( - transport: str = "grpc_asyncio", request_type=analytics_admin.ListDataStreamsRequest +async def test_get_data_retention_settings_async( + transport: str = "grpc_asyncio", + request_type=analytics_admin.GetDataRetentionSettingsRequest, ): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), @@ -29030,49 +29144,56 @@ async def test_list_data_streams_async( # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_data_streams), "__call__" + type(client.transport.get_data_retention_settings), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - analytics_admin.ListDataStreamsResponse( - next_page_token="next_page_token_value", + resources.DataRetentionSettings( + name="name_value", + event_data_retention=resources.DataRetentionSettings.RetentionDuration.TWO_MONTHS, + reset_user_data_on_new_activity=True, ) ) - response = await client.list_data_streams(request) + response = await client.get_data_retention_settings(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request = analytics_admin.ListDataStreamsRequest() + request = analytics_admin.GetDataRetentionSettingsRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListDataStreamsAsyncPager) - assert response.next_page_token == "next_page_token_value" + assert isinstance(response, resources.DataRetentionSettings) + assert response.name == "name_value" + assert ( + response.event_data_retention + == resources.DataRetentionSettings.RetentionDuration.TWO_MONTHS + ) + assert response.reset_user_data_on_new_activity is True @pytest.mark.asyncio -async def test_list_data_streams_async_from_dict(): - await test_list_data_streams_async(request_type=dict) +async def test_get_data_retention_settings_async_from_dict(): + await test_get_data_retention_settings_async(request_type=dict) -def test_list_data_streams_field_headers(): +def test_get_data_retention_settings_field_headers(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = analytics_admin.ListDataStreamsRequest() + request = analytics_admin.GetDataRetentionSettingsRequest() - request.parent = "parent_value" + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_data_streams), "__call__" + type(client.transport.get_data_retention_settings), "__call__" ) as call: - call.return_value = analytics_admin.ListDataStreamsResponse() - client.list_data_streams(request) + call.return_value = resources.DataRetentionSettings() + client.get_data_retention_settings(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -29083,30 +29204,30 @@ def test_list_data_streams_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "parent=parent_value", + "name=name_value", ) in kw["metadata"] @pytest.mark.asyncio -async def test_list_data_streams_field_headers_async(): +async def test_get_data_retention_settings_field_headers_async(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = analytics_admin.ListDataStreamsRequest() + request = analytics_admin.GetDataRetentionSettingsRequest() - request.parent = "parent_value" + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_data_streams), "__call__" + type(client.transport.get_data_retention_settings), "__call__" ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - analytics_admin.ListDataStreamsResponse() + resources.DataRetentionSettings() ) - await client.list_data_streams(request) + await client.get_data_retention_settings(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -29117,37 +29238,37 @@ async def test_list_data_streams_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "parent=parent_value", + "name=name_value", ) in kw["metadata"] -def test_list_data_streams_flattened(): +def test_get_data_retention_settings_flattened(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_data_streams), "__call__" + type(client.transport.get_data_retention_settings), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = analytics_admin.ListDataStreamsResponse() + call.return_value = resources.DataRetentionSettings() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.list_data_streams( - parent="parent_value", + client.get_data_retention_settings( + name="name_value", ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = "parent_value" + arg = args[0].name + mock_val = "name_value" assert arg == mock_val -def test_list_data_streams_flattened_error(): +def test_get_data_retention_settings_flattened_error(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -29155,45 +29276,45 @@ def test_list_data_streams_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.list_data_streams( - analytics_admin.ListDataStreamsRequest(), - parent="parent_value", + client.get_data_retention_settings( + analytics_admin.GetDataRetentionSettingsRequest(), + name="name_value", ) @pytest.mark.asyncio -async def test_list_data_streams_flattened_async(): +async def test_get_data_retention_settings_flattened_async(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_data_streams), "__call__" + type(client.transport.get_data_retention_settings), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = analytics_admin.ListDataStreamsResponse() + call.return_value = resources.DataRetentionSettings() call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - analytics_admin.ListDataStreamsResponse() + resources.DataRetentionSettings() ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.list_data_streams( - parent="parent_value", + response = await client.get_data_retention_settings( + name="name_value", ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = "parent_value" + arg = args[0].name + mock_val = "name_value" assert arg == mock_val @pytest.mark.asyncio -async def test_list_data_streams_flattened_error_async(): +async def test_get_data_retention_settings_flattened_error_async(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -29201,222 +29322,20 @@ async def test_list_data_streams_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.list_data_streams( - analytics_admin.ListDataStreamsRequest(), - parent="parent_value", - ) - - -def test_list_data_streams_pager(transport_name: str = "grpc"): - client = AnalyticsAdminServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_data_streams), "__call__" - ) as call: - # Set the response to a series of pages. - call.side_effect = ( - analytics_admin.ListDataStreamsResponse( - data_streams=[ - resources.DataStream(), - resources.DataStream(), - resources.DataStream(), - ], - next_page_token="abc", - ), - analytics_admin.ListDataStreamsResponse( - data_streams=[], - next_page_token="def", - ), - analytics_admin.ListDataStreamsResponse( - data_streams=[ - resources.DataStream(), - ], - next_page_token="ghi", - ), - analytics_admin.ListDataStreamsResponse( - data_streams=[ - resources.DataStream(), - resources.DataStream(), - ], - ), - RuntimeError, - ) - - expected_metadata = () - retry = retries.Retry() - timeout = 5 - expected_metadata = tuple(expected_metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), - ) - pager = client.list_data_streams(request={}, retry=retry, timeout=timeout) - - assert pager._metadata == expected_metadata - assert pager._retry == retry - assert pager._timeout == timeout - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, resources.DataStream) for i in results) - - -def test_list_data_streams_pages(transport_name: str = "grpc"): - client = AnalyticsAdminServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_data_streams), "__call__" - ) as call: - # Set the response to a series of pages. - call.side_effect = ( - analytics_admin.ListDataStreamsResponse( - data_streams=[ - resources.DataStream(), - resources.DataStream(), - resources.DataStream(), - ], - next_page_token="abc", - ), - analytics_admin.ListDataStreamsResponse( - data_streams=[], - next_page_token="def", - ), - analytics_admin.ListDataStreamsResponse( - data_streams=[ - resources.DataStream(), - ], - next_page_token="ghi", - ), - analytics_admin.ListDataStreamsResponse( - data_streams=[ - resources.DataStream(), - resources.DataStream(), - ], - ), - RuntimeError, - ) - pages = list(client.list_data_streams(request={}).pages) - for page_, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page_.raw_page.next_page_token == token - - -@pytest.mark.asyncio -async def test_list_data_streams_async_pager(): - client = AnalyticsAdminServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_data_streams), - "__call__", - new_callable=mock.AsyncMock, - ) as call: - # Set the response to a series of pages. - call.side_effect = ( - analytics_admin.ListDataStreamsResponse( - data_streams=[ - resources.DataStream(), - resources.DataStream(), - resources.DataStream(), - ], - next_page_token="abc", - ), - analytics_admin.ListDataStreamsResponse( - data_streams=[], - next_page_token="def", - ), - analytics_admin.ListDataStreamsResponse( - data_streams=[ - resources.DataStream(), - ], - next_page_token="ghi", - ), - analytics_admin.ListDataStreamsResponse( - data_streams=[ - resources.DataStream(), - resources.DataStream(), - ], - ), - RuntimeError, - ) - async_pager = await client.list_data_streams( - request={}, - ) - assert async_pager.next_page_token == "abc" - responses = [] - async for response in async_pager: # pragma: no branch - responses.append(response) - - assert len(responses) == 6 - assert all(isinstance(i, resources.DataStream) for i in responses) - - -@pytest.mark.asyncio -async def test_list_data_streams_async_pages(): - client = AnalyticsAdminServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_data_streams), - "__call__", - new_callable=mock.AsyncMock, - ) as call: - # Set the response to a series of pages. - call.side_effect = ( - analytics_admin.ListDataStreamsResponse( - data_streams=[ - resources.DataStream(), - resources.DataStream(), - resources.DataStream(), - ], - next_page_token="abc", - ), - analytics_admin.ListDataStreamsResponse( - data_streams=[], - next_page_token="def", - ), - analytics_admin.ListDataStreamsResponse( - data_streams=[ - resources.DataStream(), - ], - next_page_token="ghi", - ), - analytics_admin.ListDataStreamsResponse( - data_streams=[ - resources.DataStream(), - resources.DataStream(), - ], - ), - RuntimeError, + await client.get_data_retention_settings( + analytics_admin.GetDataRetentionSettingsRequest(), + name="name_value", ) - pages = [] - # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` - # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 - async for page_ in ( # pragma: no branch - await client.list_data_streams(request={}) - ).pages: - pages.append(page_) - for page_, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page_.raw_page.next_page_token == token @pytest.mark.parametrize( "request_type", [ - analytics_admin.GetDataStreamRequest, + analytics_admin.UpdateDataRetentionSettingsRequest, dict, ], ) -def test_get_data_stream(request_type, transport: str = "grpc"): +def test_update_data_retention_settings(request_type, transport: str = "grpc"): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -29427,29 +29346,34 @@ def test_get_data_stream(request_type, transport: str = "grpc"): request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_data_stream), "__call__") as call: + with mock.patch.object( + type(client.transport.update_data_retention_settings), "__call__" + ) as call: # Designate an appropriate return value for the call. - call.return_value = resources.DataStream( + call.return_value = resources.DataRetentionSettings( name="name_value", - type_=resources.DataStream.DataStreamType.WEB_DATA_STREAM, - display_name="display_name_value", + event_data_retention=resources.DataRetentionSettings.RetentionDuration.TWO_MONTHS, + reset_user_data_on_new_activity=True, ) - response = client.get_data_stream(request) + response = client.update_data_retention_settings(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - request = analytics_admin.GetDataStreamRequest() + request = analytics_admin.UpdateDataRetentionSettingsRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, resources.DataStream) + assert isinstance(response, resources.DataRetentionSettings) assert response.name == "name_value" - assert response.type_ == resources.DataStream.DataStreamType.WEB_DATA_STREAM - assert response.display_name == "display_name_value" + assert ( + response.event_data_retention + == resources.DataRetentionSettings.RetentionDuration.TWO_MONTHS + ) + assert response.reset_user_data_on_new_activity is True -def test_get_data_stream_empty_call(): +def test_update_data_retention_settings_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = AnalyticsAdminServiceClient( @@ -29458,17 +29382,19 @@ def test_get_data_stream_empty_call(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_data_stream), "__call__") as call: + with mock.patch.object( + type(client.transport.update_data_retention_settings), "__call__" + ) as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.get_data_stream() + client.update_data_retention_settings() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == analytics_admin.GetDataStreamRequest() + assert args[0] == analytics_admin.UpdateDataRetentionSettingsRequest() -def test_get_data_stream_non_empty_request_with_auto_populated_field(): +def test_update_data_retention_settings_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. client = AnalyticsAdminServiceClient( @@ -29479,24 +29405,22 @@ def test_get_data_stream_non_empty_request_with_auto_populated_field(): # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. - request = analytics_admin.GetDataStreamRequest( - name="name_value", - ) + request = analytics_admin.UpdateDataRetentionSettingsRequest() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_data_stream), "__call__") as call: + with mock.patch.object( + type(client.transport.update_data_retention_settings), "__call__" + ) as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.get_data_stream(request=request) + client.update_data_retention_settings(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == analytics_admin.GetDataStreamRequest( - name="name_value", - ) + assert args[0] == analytics_admin.UpdateDataRetentionSettingsRequest() -def test_get_data_stream_use_cached_wrapped_rpc(): +def test_update_data_retention_settings_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -29510,21 +29434,26 @@ def test_get_data_stream_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.get_data_stream in client._transport._wrapped_methods + assert ( + client._transport.update_data_retention_settings + in client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.get_data_stream] = mock_rpc + client._transport._wrapped_methods[ + client._transport.update_data_retention_settings + ] = mock_rpc request = {} - client.get_data_stream(request) + client.update_data_retention_settings(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.get_data_stream(request) + client.update_data_retention_settings(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -29532,7 +29461,7 @@ def test_get_data_stream_use_cached_wrapped_rpc(): @pytest.mark.asyncio -async def test_get_data_stream_empty_call_async(): +async def test_update_data_retention_settings_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = AnalyticsAdminServiceAsyncClient( @@ -29541,23 +29470,25 @@ async def test_get_data_stream_empty_call_async(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_data_stream), "__call__") as call: + with mock.patch.object( + type(client.transport.update_data_retention_settings), "__call__" + ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - resources.DataStream( + resources.DataRetentionSettings( name="name_value", - type_=resources.DataStream.DataStreamType.WEB_DATA_STREAM, - display_name="display_name_value", + event_data_retention=resources.DataRetentionSettings.RetentionDuration.TWO_MONTHS, + reset_user_data_on_new_activity=True, ) ) - response = await client.get_data_stream() + response = await client.update_data_retention_settings() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == analytics_admin.GetDataStreamRequest() + assert args[0] == analytics_admin.UpdateDataRetentionSettingsRequest() @pytest.mark.asyncio -async def test_get_data_stream_async_use_cached_wrapped_rpc( +async def test_update_data_retention_settings_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", ): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -29574,34 +29505,36 @@ async def test_get_data_stream_async_use_cached_wrapped_rpc( # Ensure method has been cached assert ( - client._client._transport.get_data_stream + client._client._transport.update_data_retention_settings in client._client._transport._wrapped_methods ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ - client._client._transport.get_data_stream - ] = mock_object + client._client._transport.update_data_retention_settings + ] = mock_rpc request = {} - await client.get_data_stream(request) + await client.update_data_retention_settings(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - await client.get_data_stream(request) + await client.update_data_retention_settings(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio -async def test_get_data_stream_async( - transport: str = "grpc_asyncio", request_type=analytics_admin.GetDataStreamRequest -): - client = AnalyticsAdminServiceAsyncClient( +async def test_update_data_retention_settings_async( + transport: str = "grpc_asyncio", + request_type=analytics_admin.UpdateDataRetentionSettingsRequest, +): + client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -29611,50 +29544,57 @@ async def test_get_data_stream_async( request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_data_stream), "__call__") as call: + with mock.patch.object( + type(client.transport.update_data_retention_settings), "__call__" + ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - resources.DataStream( + resources.DataRetentionSettings( name="name_value", - type_=resources.DataStream.DataStreamType.WEB_DATA_STREAM, - display_name="display_name_value", + event_data_retention=resources.DataRetentionSettings.RetentionDuration.TWO_MONTHS, + reset_user_data_on_new_activity=True, ) ) - response = await client.get_data_stream(request) + response = await client.update_data_retention_settings(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request = analytics_admin.GetDataStreamRequest() + request = analytics_admin.UpdateDataRetentionSettingsRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, resources.DataStream) + assert isinstance(response, resources.DataRetentionSettings) assert response.name == "name_value" - assert response.type_ == resources.DataStream.DataStreamType.WEB_DATA_STREAM - assert response.display_name == "display_name_value" + assert ( + response.event_data_retention + == resources.DataRetentionSettings.RetentionDuration.TWO_MONTHS + ) + assert response.reset_user_data_on_new_activity is True @pytest.mark.asyncio -async def test_get_data_stream_async_from_dict(): - await test_get_data_stream_async(request_type=dict) +async def test_update_data_retention_settings_async_from_dict(): + await test_update_data_retention_settings_async(request_type=dict) -def test_get_data_stream_field_headers(): +def test_update_data_retention_settings_field_headers(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = analytics_admin.GetDataStreamRequest() + request = analytics_admin.UpdateDataRetentionSettingsRequest() - request.name = "name_value" + request.data_retention_settings.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_data_stream), "__call__") as call: - call.return_value = resources.DataStream() - client.get_data_stream(request) + with mock.patch.object( + type(client.transport.update_data_retention_settings), "__call__" + ) as call: + call.return_value = resources.DataRetentionSettings() + client.update_data_retention_settings(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -29665,28 +29605,30 @@ def test_get_data_stream_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "name=name_value", + "data_retention_settings.name=name_value", ) in kw["metadata"] @pytest.mark.asyncio -async def test_get_data_stream_field_headers_async(): +async def test_update_data_retention_settings_field_headers_async(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = analytics_admin.GetDataStreamRequest() + request = analytics_admin.UpdateDataRetentionSettingsRequest() - request.name = "name_value" + request.data_retention_settings.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_data_stream), "__call__") as call: + with mock.patch.object( + type(client.transport.update_data_retention_settings), "__call__" + ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - resources.DataStream() + resources.DataRetentionSettings() ) - await client.get_data_stream(request) + await client.update_data_retention_settings(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -29697,35 +29639,41 @@ async def test_get_data_stream_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "name=name_value", + "data_retention_settings.name=name_value", ) in kw["metadata"] -def test_get_data_stream_flattened(): +def test_update_data_retention_settings_flattened(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_data_stream), "__call__") as call: + with mock.patch.object( + type(client.transport.update_data_retention_settings), "__call__" + ) as call: # Designate an appropriate return value for the call. - call.return_value = resources.DataStream() + call.return_value = resources.DataRetentionSettings() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.get_data_stream( - name="name_value", + client.update_data_retention_settings( + data_retention_settings=resources.DataRetentionSettings(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" + arg = args[0].data_retention_settings + mock_val = resources.DataRetentionSettings(name="name_value") + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) assert arg == mock_val -def test_get_data_stream_flattened_error(): +def test_update_data_retention_settings_flattened_error(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -29733,43 +29681,50 @@ def test_get_data_stream_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.get_data_stream( - analytics_admin.GetDataStreamRequest(), - name="name_value", + client.update_data_retention_settings( + analytics_admin.UpdateDataRetentionSettingsRequest(), + data_retention_settings=resources.DataRetentionSettings(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) @pytest.mark.asyncio -async def test_get_data_stream_flattened_async(): +async def test_update_data_retention_settings_flattened_async(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_data_stream), "__call__") as call: + with mock.patch.object( + type(client.transport.update_data_retention_settings), "__call__" + ) as call: # Designate an appropriate return value for the call. - call.return_value = resources.DataStream() + call.return_value = resources.DataRetentionSettings() call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - resources.DataStream() + resources.DataRetentionSettings() ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.get_data_stream( - name="name_value", + response = await client.update_data_retention_settings( + data_retention_settings=resources.DataRetentionSettings(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" + arg = args[0].data_retention_settings + mock_val = resources.DataRetentionSettings(name="name_value") + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) assert arg == mock_val @pytest.mark.asyncio -async def test_get_data_stream_flattened_error_async(): +async def test_update_data_retention_settings_flattened_error_async(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -29777,20 +29732,21 @@ async def test_get_data_stream_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.get_data_stream( - analytics_admin.GetDataStreamRequest(), - name="name_value", + await client.update_data_retention_settings( + analytics_admin.UpdateDataRetentionSettingsRequest(), + data_retention_settings=resources.DataRetentionSettings(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) @pytest.mark.parametrize( "request_type", [ - analytics_admin.GetAudienceRequest, + analytics_admin.CreateDataStreamRequest, dict, ], ) -def test_get_audience(request_type, transport: str = "grpc"): +def test_create_data_stream(request_type, transport: str = "grpc"): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -29801,38 +29757,31 @@ def test_get_audience(request_type, transport: str = "grpc"): request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_audience), "__call__") as call: + with mock.patch.object( + type(client.transport.create_data_stream), "__call__" + ) as call: # Designate an appropriate return value for the call. - call.return_value = audience.Audience( + call.return_value = resources.DataStream( name="name_value", + type_=resources.DataStream.DataStreamType.WEB_DATA_STREAM, display_name="display_name_value", - description="description_value", - membership_duration_days=2561, - ads_personalization_enabled=True, - exclusion_duration_mode=audience.Audience.AudienceExclusionDurationMode.EXCLUDE_TEMPORARILY, ) - response = client.get_audience(request) + response = client.create_data_stream(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - request = analytics_admin.GetAudienceRequest() + request = analytics_admin.CreateDataStreamRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, audience.Audience) + assert isinstance(response, resources.DataStream) assert response.name == "name_value" + assert response.type_ == resources.DataStream.DataStreamType.WEB_DATA_STREAM assert response.display_name == "display_name_value" - assert response.description == "description_value" - assert response.membership_duration_days == 2561 - assert response.ads_personalization_enabled is True - assert ( - response.exclusion_duration_mode - == audience.Audience.AudienceExclusionDurationMode.EXCLUDE_TEMPORARILY - ) -def test_get_audience_empty_call(): +def test_create_data_stream_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = AnalyticsAdminServiceClient( @@ -29841,17 +29790,19 @@ def test_get_audience_empty_call(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_audience), "__call__") as call: + with mock.patch.object( + type(client.transport.create_data_stream), "__call__" + ) as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.get_audience() + client.create_data_stream() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == analytics_admin.GetAudienceRequest() + assert args[0] == analytics_admin.CreateDataStreamRequest() -def test_get_audience_non_empty_request_with_auto_populated_field(): +def test_create_data_stream_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. client = AnalyticsAdminServiceClient( @@ -29862,24 +29813,26 @@ def test_get_audience_non_empty_request_with_auto_populated_field(): # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. - request = analytics_admin.GetAudienceRequest( - name="name_value", + request = analytics_admin.CreateDataStreamRequest( + parent="parent_value", ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_audience), "__call__") as call: + with mock.patch.object( + type(client.transport.create_data_stream), "__call__" + ) as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.get_audience(request=request) + client.create_data_stream(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == analytics_admin.GetAudienceRequest( - name="name_value", + assert args[0] == analytics_admin.CreateDataStreamRequest( + parent="parent_value", ) -def test_get_audience_use_cached_wrapped_rpc(): +def test_create_data_stream_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -29893,21 +29846,25 @@ def test_get_audience_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.get_audience in client._transport._wrapped_methods + assert ( + client._transport.create_data_stream in client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.get_audience] = mock_rpc + client._transport._wrapped_methods[ + client._transport.create_data_stream + ] = mock_rpc request = {} - client.get_audience(request) + client.create_data_stream(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.get_audience(request) + client.create_data_stream(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -29915,7 +29872,7 @@ def test_get_audience_use_cached_wrapped_rpc(): @pytest.mark.asyncio -async def test_get_audience_empty_call_async(): +async def test_create_data_stream_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = AnalyticsAdminServiceAsyncClient( @@ -29924,26 +29881,25 @@ async def test_get_audience_empty_call_async(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_audience), "__call__") as call: + with mock.patch.object( + type(client.transport.create_data_stream), "__call__" + ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - audience.Audience( + resources.DataStream( name="name_value", + type_=resources.DataStream.DataStreamType.WEB_DATA_STREAM, display_name="display_name_value", - description="description_value", - membership_duration_days=2561, - ads_personalization_enabled=True, - exclusion_duration_mode=audience.Audience.AudienceExclusionDurationMode.EXCLUDE_TEMPORARILY, ) ) - response = await client.get_audience() + response = await client.create_data_stream() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == analytics_admin.GetAudienceRequest() + assert args[0] == analytics_admin.CreateDataStreamRequest() @pytest.mark.asyncio -async def test_get_audience_async_use_cached_wrapped_rpc( +async def test_create_data_stream_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", ): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -29960,32 +29916,34 @@ async def test_get_audience_async_use_cached_wrapped_rpc( # Ensure method has been cached assert ( - client._client._transport.get_audience + client._client._transport.create_data_stream in client._client._transport._wrapped_methods ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ - client._client._transport.get_audience - ] = mock_object + client._client._transport.create_data_stream + ] = mock_rpc request = {} - await client.get_audience(request) + await client.create_data_stream(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - await client.get_audience(request) + await client.create_data_stream(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio -async def test_get_audience_async( - transport: str = "grpc_asyncio", request_type=analytics_admin.GetAudienceRequest +async def test_create_data_stream_async( + transport: str = "grpc_asyncio", + request_type=analytics_admin.CreateDataStreamRequest, ): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), @@ -29997,59 +29955,54 @@ async def test_get_audience_async( request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_audience), "__call__") as call: + with mock.patch.object( + type(client.transport.create_data_stream), "__call__" + ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - audience.Audience( + resources.DataStream( name="name_value", + type_=resources.DataStream.DataStreamType.WEB_DATA_STREAM, display_name="display_name_value", - description="description_value", - membership_duration_days=2561, - ads_personalization_enabled=True, - exclusion_duration_mode=audience.Audience.AudienceExclusionDurationMode.EXCLUDE_TEMPORARILY, ) ) - response = await client.get_audience(request) + response = await client.create_data_stream(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request = analytics_admin.GetAudienceRequest() + request = analytics_admin.CreateDataStreamRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, audience.Audience) + assert isinstance(response, resources.DataStream) assert response.name == "name_value" + assert response.type_ == resources.DataStream.DataStreamType.WEB_DATA_STREAM assert response.display_name == "display_name_value" - assert response.description == "description_value" - assert response.membership_duration_days == 2561 - assert response.ads_personalization_enabled is True - assert ( - response.exclusion_duration_mode - == audience.Audience.AudienceExclusionDurationMode.EXCLUDE_TEMPORARILY - ) @pytest.mark.asyncio -async def test_get_audience_async_from_dict(): - await test_get_audience_async(request_type=dict) +async def test_create_data_stream_async_from_dict(): + await test_create_data_stream_async(request_type=dict) -def test_get_audience_field_headers(): +def test_create_data_stream_field_headers(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = analytics_admin.GetAudienceRequest() + request = analytics_admin.CreateDataStreamRequest() - request.name = "name_value" + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_audience), "__call__") as call: - call.return_value = audience.Audience() - client.get_audience(request) + with mock.patch.object( + type(client.transport.create_data_stream), "__call__" + ) as call: + call.return_value = resources.DataStream() + client.create_data_stream(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -30060,26 +30013,30 @@ def test_get_audience_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "name=name_value", + "parent=parent_value", ) in kw["metadata"] @pytest.mark.asyncio -async def test_get_audience_field_headers_async(): +async def test_create_data_stream_field_headers_async(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = analytics_admin.GetAudienceRequest() + request = analytics_admin.CreateDataStreamRequest() - request.name = "name_value" + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_audience), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(audience.Audience()) - await client.get_audience(request) + with mock.patch.object( + type(client.transport.create_data_stream), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + resources.DataStream() + ) + await client.create_data_stream(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -30090,35 +30047,49 @@ async def test_get_audience_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "name=name_value", + "parent=parent_value", ) in kw["metadata"] -def test_get_audience_flattened(): +def test_create_data_stream_flattened(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_audience), "__call__") as call: + with mock.patch.object( + type(client.transport.create_data_stream), "__call__" + ) as call: # Designate an appropriate return value for the call. - call.return_value = audience.Audience() + call.return_value = resources.DataStream() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.get_audience( - name="name_value", + client.create_data_stream( + parent="parent_value", + data_stream=resources.DataStream( + web_stream_data=resources.DataStream.WebStreamData( + measurement_id="measurement_id_value" + ) + ), ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].data_stream + mock_val = resources.DataStream( + web_stream_data=resources.DataStream.WebStreamData( + measurement_id="measurement_id_value" + ) + ) assert arg == mock_val -def test_get_audience_flattened_error(): +def test_create_data_stream_flattened_error(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -30126,41 +30097,62 @@ def test_get_audience_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.get_audience( - analytics_admin.GetAudienceRequest(), - name="name_value", + client.create_data_stream( + analytics_admin.CreateDataStreamRequest(), + parent="parent_value", + data_stream=resources.DataStream( + web_stream_data=resources.DataStream.WebStreamData( + measurement_id="measurement_id_value" + ) + ), ) @pytest.mark.asyncio -async def test_get_audience_flattened_async(): +async def test_create_data_stream_flattened_async(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_audience), "__call__") as call: + with mock.patch.object( + type(client.transport.create_data_stream), "__call__" + ) as call: # Designate an appropriate return value for the call. - call.return_value = audience.Audience() + call.return_value = resources.DataStream() - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(audience.Audience()) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + resources.DataStream() + ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.get_audience( - name="name_value", + response = await client.create_data_stream( + parent="parent_value", + data_stream=resources.DataStream( + web_stream_data=resources.DataStream.WebStreamData( + measurement_id="measurement_id_value" + ) + ), ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].data_stream + mock_val = resources.DataStream( + web_stream_data=resources.DataStream.WebStreamData( + measurement_id="measurement_id_value" + ) + ) assert arg == mock_val @pytest.mark.asyncio -async def test_get_audience_flattened_error_async(): +async def test_create_data_stream_flattened_error_async(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -30168,20 +30160,25 @@ async def test_get_audience_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.get_audience( - analytics_admin.GetAudienceRequest(), - name="name_value", + await client.create_data_stream( + analytics_admin.CreateDataStreamRequest(), + parent="parent_value", + data_stream=resources.DataStream( + web_stream_data=resources.DataStream.WebStreamData( + measurement_id="measurement_id_value" + ) + ), ) @pytest.mark.parametrize( "request_type", [ - analytics_admin.ListAudiencesRequest, + analytics_admin.DeleteDataStreamRequest, dict, ], ) -def test_list_audiences(request_type, transport: str = "grpc"): +def test_delete_data_stream(request_type, transport: str = "grpc"): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -30192,25 +30189,24 @@ def test_list_audiences(request_type, transport: str = "grpc"): request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_audiences), "__call__") as call: + with mock.patch.object( + type(client.transport.delete_data_stream), "__call__" + ) as call: # Designate an appropriate return value for the call. - call.return_value = analytics_admin.ListAudiencesResponse( - next_page_token="next_page_token_value", - ) - response = client.list_audiences(request) + call.return_value = None + response = client.delete_data_stream(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - request = analytics_admin.ListAudiencesRequest() + request = analytics_admin.DeleteDataStreamRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListAudiencesPager) - assert response.next_page_token == "next_page_token_value" + assert response is None -def test_list_audiences_empty_call(): +def test_delete_data_stream_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = AnalyticsAdminServiceClient( @@ -30219,17 +30215,19 @@ def test_list_audiences_empty_call(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_audiences), "__call__") as call: + with mock.patch.object( + type(client.transport.delete_data_stream), "__call__" + ) as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.list_audiences() + client.delete_data_stream() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == analytics_admin.ListAudiencesRequest() + assert args[0] == analytics_admin.DeleteDataStreamRequest() -def test_list_audiences_non_empty_request_with_auto_populated_field(): +def test_delete_data_stream_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. client = AnalyticsAdminServiceClient( @@ -30240,26 +30238,26 @@ def test_list_audiences_non_empty_request_with_auto_populated_field(): # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. - request = analytics_admin.ListAudiencesRequest( - parent="parent_value", - page_token="page_token_value", + request = analytics_admin.DeleteDataStreamRequest( + name="name_value", ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_audiences), "__call__") as call: + with mock.patch.object( + type(client.transport.delete_data_stream), "__call__" + ) as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.list_audiences(request=request) + client.delete_data_stream(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == analytics_admin.ListAudiencesRequest( - parent="parent_value", - page_token="page_token_value", + assert args[0] == analytics_admin.DeleteDataStreamRequest( + name="name_value", ) -def test_list_audiences_use_cached_wrapped_rpc(): +def test_delete_data_stream_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -30273,21 +30271,25 @@ def test_list_audiences_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.list_audiences in client._transport._wrapped_methods + assert ( + client._transport.delete_data_stream in client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.list_audiences] = mock_rpc + client._transport._wrapped_methods[ + client._transport.delete_data_stream + ] = mock_rpc request = {} - client.list_audiences(request) + client.delete_data_stream(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.list_audiences(request) + client.delete_data_stream(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -30295,7 +30297,7 @@ def test_list_audiences_use_cached_wrapped_rpc(): @pytest.mark.asyncio -async def test_list_audiences_empty_call_async(): +async def test_delete_data_stream_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = AnalyticsAdminServiceAsyncClient( @@ -30304,21 +30306,19 @@ async def test_list_audiences_empty_call_async(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_audiences), "__call__") as call: + with mock.patch.object( + type(client.transport.delete_data_stream), "__call__" + ) as call: # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - analytics_admin.ListAudiencesResponse( - next_page_token="next_page_token_value", - ) - ) - response = await client.list_audiences() + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.delete_data_stream() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == analytics_admin.ListAudiencesRequest() + assert args[0] == analytics_admin.DeleteDataStreamRequest() @pytest.mark.asyncio -async def test_list_audiences_async_use_cached_wrapped_rpc( +async def test_delete_data_stream_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", ): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -30335,32 +30335,34 @@ async def test_list_audiences_async_use_cached_wrapped_rpc( # Ensure method has been cached assert ( - client._client._transport.list_audiences + client._client._transport.delete_data_stream in client._client._transport._wrapped_methods ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ - client._client._transport.list_audiences - ] = mock_object + client._client._transport.delete_data_stream + ] = mock_rpc request = {} - await client.list_audiences(request) + await client.delete_data_stream(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - await client.list_audiences(request) + await client.delete_data_stream(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio -async def test_list_audiences_async( - transport: str = "grpc_asyncio", request_type=analytics_admin.ListAudiencesRequest +async def test_delete_data_stream_async( + transport: str = "grpc_asyncio", + request_type=analytics_admin.DeleteDataStreamRequest, ): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), @@ -30372,46 +30374,45 @@ async def test_list_audiences_async( request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_audiences), "__call__") as call: + with mock.patch.object( + type(client.transport.delete_data_stream), "__call__" + ) as call: # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - analytics_admin.ListAudiencesResponse( - next_page_token="next_page_token_value", - ) - ) - response = await client.list_audiences(request) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.delete_data_stream(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request = analytics_admin.ListAudiencesRequest() + request = analytics_admin.DeleteDataStreamRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListAudiencesAsyncPager) - assert response.next_page_token == "next_page_token_value" + assert response is None @pytest.mark.asyncio -async def test_list_audiences_async_from_dict(): - await test_list_audiences_async(request_type=dict) +async def test_delete_data_stream_async_from_dict(): + await test_delete_data_stream_async(request_type=dict) -def test_list_audiences_field_headers(): +def test_delete_data_stream_field_headers(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = analytics_admin.ListAudiencesRequest() + request = analytics_admin.DeleteDataStreamRequest() - request.parent = "parent_value" + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_audiences), "__call__") as call: - call.return_value = analytics_admin.ListAudiencesResponse() - client.list_audiences(request) + with mock.patch.object( + type(client.transport.delete_data_stream), "__call__" + ) as call: + call.return_value = None + client.delete_data_stream(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -30422,28 +30423,28 @@ def test_list_audiences_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "parent=parent_value", + "name=name_value", ) in kw["metadata"] @pytest.mark.asyncio -async def test_list_audiences_field_headers_async(): +async def test_delete_data_stream_field_headers_async(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = analytics_admin.ListAudiencesRequest() + request = analytics_admin.DeleteDataStreamRequest() - request.parent = "parent_value" + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_audiences), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - analytics_admin.ListAudiencesResponse() - ) - await client.list_audiences(request) + with mock.patch.object( + type(client.transport.delete_data_stream), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.delete_data_stream(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -30454,35 +30455,37 @@ async def test_list_audiences_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "parent=parent_value", + "name=name_value", ) in kw["metadata"] -def test_list_audiences_flattened(): +def test_delete_data_stream_flattened(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_audiences), "__call__") as call: + with mock.patch.object( + type(client.transport.delete_data_stream), "__call__" + ) as call: # Designate an appropriate return value for the call. - call.return_value = analytics_admin.ListAudiencesResponse() + call.return_value = None # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.list_audiences( - parent="parent_value", + client.delete_data_stream( + name="name_value", ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = "parent_value" + arg = args[0].name + mock_val = "name_value" assert arg == mock_val -def test_list_audiences_flattened_error(): +def test_delete_data_stream_flattened_error(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -30490,43 +30493,43 @@ def test_list_audiences_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.list_audiences( - analytics_admin.ListAudiencesRequest(), - parent="parent_value", + client.delete_data_stream( + analytics_admin.DeleteDataStreamRequest(), + name="name_value", ) @pytest.mark.asyncio -async def test_list_audiences_flattened_async(): +async def test_delete_data_stream_flattened_async(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_audiences), "__call__") as call: + with mock.patch.object( + type(client.transport.delete_data_stream), "__call__" + ) as call: # Designate an appropriate return value for the call. - call.return_value = analytics_admin.ListAudiencesResponse() + call.return_value = None - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - analytics_admin.ListAudiencesResponse() - ) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.list_audiences( - parent="parent_value", + response = await client.delete_data_stream( + name="name_value", ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = "parent_value" + arg = args[0].name + mock_val = "name_value" assert arg == mock_val @pytest.mark.asyncio -async def test_list_audiences_flattened_error_async(): +async def test_delete_data_stream_flattened_error_async(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -30534,214 +30537,20 @@ async def test_list_audiences_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.list_audiences( - analytics_admin.ListAudiencesRequest(), - parent="parent_value", - ) - - -def test_list_audiences_pager(transport_name: str = "grpc"): - client = AnalyticsAdminServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_audiences), "__call__") as call: - # Set the response to a series of pages. - call.side_effect = ( - analytics_admin.ListAudiencesResponse( - audiences=[ - audience.Audience(), - audience.Audience(), - audience.Audience(), - ], - next_page_token="abc", - ), - analytics_admin.ListAudiencesResponse( - audiences=[], - next_page_token="def", - ), - analytics_admin.ListAudiencesResponse( - audiences=[ - audience.Audience(), - ], - next_page_token="ghi", - ), - analytics_admin.ListAudiencesResponse( - audiences=[ - audience.Audience(), - audience.Audience(), - ], - ), - RuntimeError, - ) - - expected_metadata = () - retry = retries.Retry() - timeout = 5 - expected_metadata = tuple(expected_metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), - ) - pager = client.list_audiences(request={}, retry=retry, timeout=timeout) - - assert pager._metadata == expected_metadata - assert pager._retry == retry - assert pager._timeout == timeout - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, audience.Audience) for i in results) - - -def test_list_audiences_pages(transport_name: str = "grpc"): - client = AnalyticsAdminServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_audiences), "__call__") as call: - # Set the response to a series of pages. - call.side_effect = ( - analytics_admin.ListAudiencesResponse( - audiences=[ - audience.Audience(), - audience.Audience(), - audience.Audience(), - ], - next_page_token="abc", - ), - analytics_admin.ListAudiencesResponse( - audiences=[], - next_page_token="def", - ), - analytics_admin.ListAudiencesResponse( - audiences=[ - audience.Audience(), - ], - next_page_token="ghi", - ), - analytics_admin.ListAudiencesResponse( - audiences=[ - audience.Audience(), - audience.Audience(), - ], - ), - RuntimeError, - ) - pages = list(client.list_audiences(request={}).pages) - for page_, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page_.raw_page.next_page_token == token - - -@pytest.mark.asyncio -async def test_list_audiences_async_pager(): - client = AnalyticsAdminServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_audiences), "__call__", new_callable=mock.AsyncMock - ) as call: - # Set the response to a series of pages. - call.side_effect = ( - analytics_admin.ListAudiencesResponse( - audiences=[ - audience.Audience(), - audience.Audience(), - audience.Audience(), - ], - next_page_token="abc", - ), - analytics_admin.ListAudiencesResponse( - audiences=[], - next_page_token="def", - ), - analytics_admin.ListAudiencesResponse( - audiences=[ - audience.Audience(), - ], - next_page_token="ghi", - ), - analytics_admin.ListAudiencesResponse( - audiences=[ - audience.Audience(), - audience.Audience(), - ], - ), - RuntimeError, - ) - async_pager = await client.list_audiences( - request={}, - ) - assert async_pager.next_page_token == "abc" - responses = [] - async for response in async_pager: # pragma: no branch - responses.append(response) - - assert len(responses) == 6 - assert all(isinstance(i, audience.Audience) for i in responses) - - -@pytest.mark.asyncio -async def test_list_audiences_async_pages(): - client = AnalyticsAdminServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_audiences), "__call__", new_callable=mock.AsyncMock - ) as call: - # Set the response to a series of pages. - call.side_effect = ( - analytics_admin.ListAudiencesResponse( - audiences=[ - audience.Audience(), - audience.Audience(), - audience.Audience(), - ], - next_page_token="abc", - ), - analytics_admin.ListAudiencesResponse( - audiences=[], - next_page_token="def", - ), - analytics_admin.ListAudiencesResponse( - audiences=[ - audience.Audience(), - ], - next_page_token="ghi", - ), - analytics_admin.ListAudiencesResponse( - audiences=[ - audience.Audience(), - audience.Audience(), - ], - ), - RuntimeError, + await client.delete_data_stream( + analytics_admin.DeleteDataStreamRequest(), + name="name_value", ) - pages = [] - # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` - # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 - async for page_ in ( # pragma: no branch - await client.list_audiences(request={}) - ).pages: - pages.append(page_) - for page_, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page_.raw_page.next_page_token == token @pytest.mark.parametrize( "request_type", [ - analytics_admin.CreateAudienceRequest, + analytics_admin.UpdateDataStreamRequest, dict, ], ) -def test_create_audience(request_type, transport: str = "grpc"): +def test_update_data_stream(request_type, transport: str = "grpc"): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -30752,38 +30561,31 @@ def test_create_audience(request_type, transport: str = "grpc"): request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.create_audience), "__call__") as call: + with mock.patch.object( + type(client.transport.update_data_stream), "__call__" + ) as call: # Designate an appropriate return value for the call. - call.return_value = gaa_audience.Audience( + call.return_value = resources.DataStream( name="name_value", + type_=resources.DataStream.DataStreamType.WEB_DATA_STREAM, display_name="display_name_value", - description="description_value", - membership_duration_days=2561, - ads_personalization_enabled=True, - exclusion_duration_mode=gaa_audience.Audience.AudienceExclusionDurationMode.EXCLUDE_TEMPORARILY, ) - response = client.create_audience(request) + response = client.update_data_stream(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - request = analytics_admin.CreateAudienceRequest() + request = analytics_admin.UpdateDataStreamRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, gaa_audience.Audience) + assert isinstance(response, resources.DataStream) assert response.name == "name_value" + assert response.type_ == resources.DataStream.DataStreamType.WEB_DATA_STREAM assert response.display_name == "display_name_value" - assert response.description == "description_value" - assert response.membership_duration_days == 2561 - assert response.ads_personalization_enabled is True - assert ( - response.exclusion_duration_mode - == gaa_audience.Audience.AudienceExclusionDurationMode.EXCLUDE_TEMPORARILY - ) -def test_create_audience_empty_call(): +def test_update_data_stream_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = AnalyticsAdminServiceClient( @@ -30792,17 +30594,19 @@ def test_create_audience_empty_call(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.create_audience), "__call__") as call: + with mock.patch.object( + type(client.transport.update_data_stream), "__call__" + ) as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.create_audience() + client.update_data_stream() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == analytics_admin.CreateAudienceRequest() + assert args[0] == analytics_admin.UpdateDataStreamRequest() -def test_create_audience_non_empty_request_with_auto_populated_field(): +def test_update_data_stream_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. client = AnalyticsAdminServiceClient( @@ -30813,24 +30617,22 @@ def test_create_audience_non_empty_request_with_auto_populated_field(): # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. - request = analytics_admin.CreateAudienceRequest( - parent="parent_value", - ) + request = analytics_admin.UpdateDataStreamRequest() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.create_audience), "__call__") as call: + with mock.patch.object( + type(client.transport.update_data_stream), "__call__" + ) as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.create_audience(request=request) + client.update_data_stream(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == analytics_admin.CreateAudienceRequest( - parent="parent_value", - ) + assert args[0] == analytics_admin.UpdateDataStreamRequest() -def test_create_audience_use_cached_wrapped_rpc(): +def test_update_data_stream_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -30844,21 +30646,25 @@ def test_create_audience_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.create_audience in client._transport._wrapped_methods + assert ( + client._transport.update_data_stream in client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.create_audience] = mock_rpc + client._transport._wrapped_methods[ + client._transport.update_data_stream + ] = mock_rpc request = {} - client.create_audience(request) + client.update_data_stream(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.create_audience(request) + client.update_data_stream(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -30866,7 +30672,7 @@ def test_create_audience_use_cached_wrapped_rpc(): @pytest.mark.asyncio -async def test_create_audience_empty_call_async(): +async def test_update_data_stream_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = AnalyticsAdminServiceAsyncClient( @@ -30875,26 +30681,25 @@ async def test_create_audience_empty_call_async(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.create_audience), "__call__") as call: + with mock.patch.object( + type(client.transport.update_data_stream), "__call__" + ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - gaa_audience.Audience( + resources.DataStream( name="name_value", + type_=resources.DataStream.DataStreamType.WEB_DATA_STREAM, display_name="display_name_value", - description="description_value", - membership_duration_days=2561, - ads_personalization_enabled=True, - exclusion_duration_mode=gaa_audience.Audience.AudienceExclusionDurationMode.EXCLUDE_TEMPORARILY, ) ) - response = await client.create_audience() + response = await client.update_data_stream() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == analytics_admin.CreateAudienceRequest() + assert args[0] == analytics_admin.UpdateDataStreamRequest() @pytest.mark.asyncio -async def test_create_audience_async_use_cached_wrapped_rpc( +async def test_update_data_stream_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", ): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -30911,32 +30716,34 @@ async def test_create_audience_async_use_cached_wrapped_rpc( # Ensure method has been cached assert ( - client._client._transport.create_audience + client._client._transport.update_data_stream in client._client._transport._wrapped_methods ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ - client._client._transport.create_audience - ] = mock_object + client._client._transport.update_data_stream + ] = mock_rpc request = {} - await client.create_audience(request) + await client.update_data_stream(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - await client.create_audience(request) + await client.update_data_stream(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio -async def test_create_audience_async( - transport: str = "grpc_asyncio", request_type=analytics_admin.CreateAudienceRequest +async def test_update_data_stream_async( + transport: str = "grpc_asyncio", + request_type=analytics_admin.UpdateDataStreamRequest, ): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), @@ -30948,59 +30755,54 @@ async def test_create_audience_async( request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.create_audience), "__call__") as call: + with mock.patch.object( + type(client.transport.update_data_stream), "__call__" + ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - gaa_audience.Audience( + resources.DataStream( name="name_value", + type_=resources.DataStream.DataStreamType.WEB_DATA_STREAM, display_name="display_name_value", - description="description_value", - membership_duration_days=2561, - ads_personalization_enabled=True, - exclusion_duration_mode=gaa_audience.Audience.AudienceExclusionDurationMode.EXCLUDE_TEMPORARILY, ) ) - response = await client.create_audience(request) + response = await client.update_data_stream(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request = analytics_admin.CreateAudienceRequest() + request = analytics_admin.UpdateDataStreamRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, gaa_audience.Audience) + assert isinstance(response, resources.DataStream) assert response.name == "name_value" + assert response.type_ == resources.DataStream.DataStreamType.WEB_DATA_STREAM assert response.display_name == "display_name_value" - assert response.description == "description_value" - assert response.membership_duration_days == 2561 - assert response.ads_personalization_enabled is True - assert ( - response.exclusion_duration_mode - == gaa_audience.Audience.AudienceExclusionDurationMode.EXCLUDE_TEMPORARILY - ) @pytest.mark.asyncio -async def test_create_audience_async_from_dict(): - await test_create_audience_async(request_type=dict) +async def test_update_data_stream_async_from_dict(): + await test_update_data_stream_async(request_type=dict) -def test_create_audience_field_headers(): +def test_update_data_stream_field_headers(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = analytics_admin.CreateAudienceRequest() + request = analytics_admin.UpdateDataStreamRequest() - request.parent = "parent_value" + request.data_stream.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.create_audience), "__call__") as call: - call.return_value = gaa_audience.Audience() - client.create_audience(request) + with mock.patch.object( + type(client.transport.update_data_stream), "__call__" + ) as call: + call.return_value = resources.DataStream() + client.update_data_stream(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -31011,28 +30813,30 @@ def test_create_audience_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "parent=parent_value", + "data_stream.name=name_value", ) in kw["metadata"] @pytest.mark.asyncio -async def test_create_audience_field_headers_async(): +async def test_update_data_stream_field_headers_async(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = analytics_admin.CreateAudienceRequest() + request = analytics_admin.UpdateDataStreamRequest() - request.parent = "parent_value" + request.data_stream.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.create_audience), "__call__") as call: + with mock.patch.object( + type(client.transport.update_data_stream), "__call__" + ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - gaa_audience.Audience() + resources.DataStream() ) - await client.create_audience(request) + await client.update_data_stream(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -31043,39 +30847,49 @@ async def test_create_audience_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "parent=parent_value", + "data_stream.name=name_value", ) in kw["metadata"] -def test_create_audience_flattened(): +def test_update_data_stream_flattened(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.create_audience), "__call__") as call: + with mock.patch.object( + type(client.transport.update_data_stream), "__call__" + ) as call: # Designate an appropriate return value for the call. - call.return_value = gaa_audience.Audience() + call.return_value = resources.DataStream() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.create_audience( - parent="parent_value", - audience=gaa_audience.Audience(name="name_value"), + client.update_data_stream( + data_stream=resources.DataStream( + web_stream_data=resources.DataStream.WebStreamData( + measurement_id="measurement_id_value" + ) + ), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = "parent_value" + arg = args[0].data_stream + mock_val = resources.DataStream( + web_stream_data=resources.DataStream.WebStreamData( + measurement_id="measurement_id_value" + ) + ) assert arg == mock_val - arg = args[0].audience - mock_val = gaa_audience.Audience(name="name_value") + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) assert arg == mock_val -def test_create_audience_flattened_error(): +def test_update_data_stream_flattened_error(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -31083,48 +30897,62 @@ def test_create_audience_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.create_audience( - analytics_admin.CreateAudienceRequest(), - parent="parent_value", - audience=gaa_audience.Audience(name="name_value"), + client.update_data_stream( + analytics_admin.UpdateDataStreamRequest(), + data_stream=resources.DataStream( + web_stream_data=resources.DataStream.WebStreamData( + measurement_id="measurement_id_value" + ) + ), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) @pytest.mark.asyncio -async def test_create_audience_flattened_async(): +async def test_update_data_stream_flattened_async(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.create_audience), "__call__") as call: + with mock.patch.object( + type(client.transport.update_data_stream), "__call__" + ) as call: # Designate an appropriate return value for the call. - call.return_value = gaa_audience.Audience() + call.return_value = resources.DataStream() call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - gaa_audience.Audience() + resources.DataStream() ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.create_audience( - parent="parent_value", - audience=gaa_audience.Audience(name="name_value"), + response = await client.update_data_stream( + data_stream=resources.DataStream( + web_stream_data=resources.DataStream.WebStreamData( + measurement_id="measurement_id_value" + ) + ), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = "parent_value" + arg = args[0].data_stream + mock_val = resources.DataStream( + web_stream_data=resources.DataStream.WebStreamData( + measurement_id="measurement_id_value" + ) + ) assert arg == mock_val - arg = args[0].audience - mock_val = gaa_audience.Audience(name="name_value") + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) assert arg == mock_val @pytest.mark.asyncio -async def test_create_audience_flattened_error_async(): +async def test_update_data_stream_flattened_error_async(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -31132,21 +30960,25 @@ async def test_create_audience_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.create_audience( - analytics_admin.CreateAudienceRequest(), - parent="parent_value", - audience=gaa_audience.Audience(name="name_value"), - ) - - -@pytest.mark.parametrize( + await client.update_data_stream( + analytics_admin.UpdateDataStreamRequest(), + data_stream=resources.DataStream( + web_stream_data=resources.DataStream.WebStreamData( + measurement_id="measurement_id_value" + ) + ), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +@pytest.mark.parametrize( "request_type", [ - analytics_admin.UpdateAudienceRequest, + analytics_admin.ListDataStreamsRequest, dict, ], ) -def test_update_audience(request_type, transport: str = "grpc"): +def test_list_data_streams(request_type, transport: str = "grpc"): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -31157,38 +30989,27 @@ def test_update_audience(request_type, transport: str = "grpc"): request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.update_audience), "__call__") as call: + with mock.patch.object( + type(client.transport.list_data_streams), "__call__" + ) as call: # Designate an appropriate return value for the call. - call.return_value = gaa_audience.Audience( - name="name_value", - display_name="display_name_value", - description="description_value", - membership_duration_days=2561, - ads_personalization_enabled=True, - exclusion_duration_mode=gaa_audience.Audience.AudienceExclusionDurationMode.EXCLUDE_TEMPORARILY, + call.return_value = analytics_admin.ListDataStreamsResponse( + next_page_token="next_page_token_value", ) - response = client.update_audience(request) + response = client.list_data_streams(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - request = analytics_admin.UpdateAudienceRequest() + request = analytics_admin.ListDataStreamsRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, gaa_audience.Audience) - assert response.name == "name_value" - assert response.display_name == "display_name_value" - assert response.description == "description_value" - assert response.membership_duration_days == 2561 - assert response.ads_personalization_enabled is True - assert ( - response.exclusion_duration_mode - == gaa_audience.Audience.AudienceExclusionDurationMode.EXCLUDE_TEMPORARILY - ) + assert isinstance(response, pagers.ListDataStreamsPager) + assert response.next_page_token == "next_page_token_value" -def test_update_audience_empty_call(): +def test_list_data_streams_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = AnalyticsAdminServiceClient( @@ -31197,17 +31018,19 @@ def test_update_audience_empty_call(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.update_audience), "__call__") as call: + with mock.patch.object( + type(client.transport.list_data_streams), "__call__" + ) as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.update_audience() + client.list_data_streams() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == analytics_admin.UpdateAudienceRequest() + assert args[0] == analytics_admin.ListDataStreamsRequest() -def test_update_audience_non_empty_request_with_auto_populated_field(): +def test_list_data_streams_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. client = AnalyticsAdminServiceClient( @@ -31218,20 +31041,28 @@ def test_update_audience_non_empty_request_with_auto_populated_field(): # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. - request = analytics_admin.UpdateAudienceRequest() + request = analytics_admin.ListDataStreamsRequest( + parent="parent_value", + page_token="page_token_value", + ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.update_audience), "__call__") as call: + with mock.patch.object( + type(client.transport.list_data_streams), "__call__" + ) as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.update_audience(request=request) + client.list_data_streams(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == analytics_admin.UpdateAudienceRequest() + assert args[0] == analytics_admin.ListDataStreamsRequest( + parent="parent_value", + page_token="page_token_value", + ) -def test_update_audience_use_cached_wrapped_rpc(): +def test_list_data_streams_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -31245,21 +31076,23 @@ def test_update_audience_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.update_audience in client._transport._wrapped_methods + assert client._transport.list_data_streams in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.update_audience] = mock_rpc + client._transport._wrapped_methods[ + client._transport.list_data_streams + ] = mock_rpc request = {} - client.update_audience(request) + client.list_data_streams(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.update_audience(request) + client.list_data_streams(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -31267,7 +31100,7 @@ def test_update_audience_use_cached_wrapped_rpc(): @pytest.mark.asyncio -async def test_update_audience_empty_call_async(): +async def test_list_data_streams_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = AnalyticsAdminServiceAsyncClient( @@ -31276,26 +31109,23 @@ async def test_update_audience_empty_call_async(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.update_audience), "__call__") as call: + with mock.patch.object( + type(client.transport.list_data_streams), "__call__" + ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - gaa_audience.Audience( - name="name_value", - display_name="display_name_value", - description="description_value", - membership_duration_days=2561, - ads_personalization_enabled=True, - exclusion_duration_mode=gaa_audience.Audience.AudienceExclusionDurationMode.EXCLUDE_TEMPORARILY, + analytics_admin.ListDataStreamsResponse( + next_page_token="next_page_token_value", ) ) - response = await client.update_audience() + response = await client.list_data_streams() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == analytics_admin.UpdateAudienceRequest() + assert args[0] == analytics_admin.ListDataStreamsRequest() @pytest.mark.asyncio -async def test_update_audience_async_use_cached_wrapped_rpc( +async def test_list_data_streams_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", ): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -31312,32 +31142,33 @@ async def test_update_audience_async_use_cached_wrapped_rpc( # Ensure method has been cached assert ( - client._client._transport.update_audience + client._client._transport.list_data_streams in client._client._transport._wrapped_methods ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ - client._client._transport.update_audience - ] = mock_object + client._client._transport.list_data_streams + ] = mock_rpc request = {} - await client.update_audience(request) + await client.list_data_streams(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - await client.update_audience(request) + await client.list_data_streams(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio -async def test_update_audience_async( - transport: str = "grpc_asyncio", request_type=analytics_admin.UpdateAudienceRequest +async def test_list_data_streams_async( + transport: str = "grpc_asyncio", request_type=analytics_admin.ListDataStreamsRequest ): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), @@ -31349,59 +31180,50 @@ async def test_update_audience_async( request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.update_audience), "__call__") as call: + with mock.patch.object( + type(client.transport.list_data_streams), "__call__" + ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - gaa_audience.Audience( - name="name_value", - display_name="display_name_value", - description="description_value", - membership_duration_days=2561, - ads_personalization_enabled=True, - exclusion_duration_mode=gaa_audience.Audience.AudienceExclusionDurationMode.EXCLUDE_TEMPORARILY, + analytics_admin.ListDataStreamsResponse( + next_page_token="next_page_token_value", ) ) - response = await client.update_audience(request) + response = await client.list_data_streams(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request = analytics_admin.UpdateAudienceRequest() + request = analytics_admin.ListDataStreamsRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, gaa_audience.Audience) - assert response.name == "name_value" - assert response.display_name == "display_name_value" - assert response.description == "description_value" - assert response.membership_duration_days == 2561 - assert response.ads_personalization_enabled is True - assert ( - response.exclusion_duration_mode - == gaa_audience.Audience.AudienceExclusionDurationMode.EXCLUDE_TEMPORARILY - ) + assert isinstance(response, pagers.ListDataStreamsAsyncPager) + assert response.next_page_token == "next_page_token_value" @pytest.mark.asyncio -async def test_update_audience_async_from_dict(): - await test_update_audience_async(request_type=dict) +async def test_list_data_streams_async_from_dict(): + await test_list_data_streams_async(request_type=dict) -def test_update_audience_field_headers(): +def test_list_data_streams_field_headers(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = analytics_admin.UpdateAudienceRequest() + request = analytics_admin.ListDataStreamsRequest() - request.audience.name = "name_value" + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.update_audience), "__call__") as call: - call.return_value = gaa_audience.Audience() - client.update_audience(request) + with mock.patch.object( + type(client.transport.list_data_streams), "__call__" + ) as call: + call.return_value = analytics_admin.ListDataStreamsResponse() + client.list_data_streams(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -31412,28 +31234,30 @@ def test_update_audience_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "audience.name=name_value", + "parent=parent_value", ) in kw["metadata"] @pytest.mark.asyncio -async def test_update_audience_field_headers_async(): +async def test_list_data_streams_field_headers_async(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = analytics_admin.UpdateAudienceRequest() + request = analytics_admin.ListDataStreamsRequest() - request.audience.name = "name_value" + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.update_audience), "__call__") as call: + with mock.patch.object( + type(client.transport.list_data_streams), "__call__" + ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - gaa_audience.Audience() + analytics_admin.ListDataStreamsResponse() ) - await client.update_audience(request) + await client.list_data_streams(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -31444,39 +31268,37 @@ async def test_update_audience_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "audience.name=name_value", + "parent=parent_value", ) in kw["metadata"] -def test_update_audience_flattened(): +def test_list_data_streams_flattened(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.update_audience), "__call__") as call: + with mock.patch.object( + type(client.transport.list_data_streams), "__call__" + ) as call: # Designate an appropriate return value for the call. - call.return_value = gaa_audience.Audience() + call.return_value = analytics_admin.ListDataStreamsResponse() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.update_audience( - audience=gaa_audience.Audience(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + client.list_data_streams( + parent="parent_value", ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - arg = args[0].audience - mock_val = gaa_audience.Audience(name="name_value") - assert arg == mock_val - arg = args[0].update_mask - mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + arg = args[0].parent + mock_val = "parent_value" assert arg == mock_val -def test_update_audience_flattened_error(): +def test_list_data_streams_flattened_error(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -31484,48 +31306,45 @@ def test_update_audience_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.update_audience( - analytics_admin.UpdateAudienceRequest(), - audience=gaa_audience.Audience(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + client.list_data_streams( + analytics_admin.ListDataStreamsRequest(), + parent="parent_value", ) @pytest.mark.asyncio -async def test_update_audience_flattened_async(): +async def test_list_data_streams_flattened_async(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.update_audience), "__call__") as call: + with mock.patch.object( + type(client.transport.list_data_streams), "__call__" + ) as call: # Designate an appropriate return value for the call. - call.return_value = gaa_audience.Audience() + call.return_value = analytics_admin.ListDataStreamsResponse() call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - gaa_audience.Audience() + analytics_admin.ListDataStreamsResponse() ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.update_audience( - audience=gaa_audience.Audience(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + response = await client.list_data_streams( + parent="parent_value", ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - arg = args[0].audience - mock_val = gaa_audience.Audience(name="name_value") - assert arg == mock_val - arg = args[0].update_mask - mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + arg = args[0].parent + mock_val = "parent_value" assert arg == mock_val @pytest.mark.asyncio -async def test_update_audience_flattened_error_async(): +async def test_list_data_streams_flattened_error_async(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -31533,21 +31352,222 @@ async def test_update_audience_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.update_audience( - analytics_admin.UpdateAudienceRequest(), - audience=gaa_audience.Audience(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + await client.list_data_streams( + analytics_admin.ListDataStreamsRequest(), + parent="parent_value", + ) + + +def test_list_data_streams_pager(transport_name: str = "grpc"): + client = AnalyticsAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_data_streams), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + analytics_admin.ListDataStreamsResponse( + data_streams=[ + resources.DataStream(), + resources.DataStream(), + resources.DataStream(), + ], + next_page_token="abc", + ), + analytics_admin.ListDataStreamsResponse( + data_streams=[], + next_page_token="def", + ), + analytics_admin.ListDataStreamsResponse( + data_streams=[ + resources.DataStream(), + ], + next_page_token="ghi", + ), + analytics_admin.ListDataStreamsResponse( + data_streams=[ + resources.DataStream(), + resources.DataStream(), + ], + ), + RuntimeError, + ) + + expected_metadata = () + retry = retries.Retry() + timeout = 5 + expected_metadata = tuple(expected_metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + ) + pager = client.list_data_streams(request={}, retry=retry, timeout=timeout) + + assert pager._metadata == expected_metadata + assert pager._retry == retry + assert pager._timeout == timeout + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, resources.DataStream) for i in results) + + +def test_list_data_streams_pages(transport_name: str = "grpc"): + client = AnalyticsAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_data_streams), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + analytics_admin.ListDataStreamsResponse( + data_streams=[ + resources.DataStream(), + resources.DataStream(), + resources.DataStream(), + ], + next_page_token="abc", + ), + analytics_admin.ListDataStreamsResponse( + data_streams=[], + next_page_token="def", + ), + analytics_admin.ListDataStreamsResponse( + data_streams=[ + resources.DataStream(), + ], + next_page_token="ghi", + ), + analytics_admin.ListDataStreamsResponse( + data_streams=[ + resources.DataStream(), + resources.DataStream(), + ], + ), + RuntimeError, + ) + pages = list(client.list_data_streams(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.asyncio +async def test_list_data_streams_async_pager(): + client = AnalyticsAdminServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_data_streams), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + analytics_admin.ListDataStreamsResponse( + data_streams=[ + resources.DataStream(), + resources.DataStream(), + resources.DataStream(), + ], + next_page_token="abc", + ), + analytics_admin.ListDataStreamsResponse( + data_streams=[], + next_page_token="def", + ), + analytics_admin.ListDataStreamsResponse( + data_streams=[ + resources.DataStream(), + ], + next_page_token="ghi", + ), + analytics_admin.ListDataStreamsResponse( + data_streams=[ + resources.DataStream(), + resources.DataStream(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_data_streams( + request={}, + ) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, resources.DataStream) for i in responses) + + +@pytest.mark.asyncio +async def test_list_data_streams_async_pages(): + client = AnalyticsAdminServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_data_streams), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + analytics_admin.ListDataStreamsResponse( + data_streams=[ + resources.DataStream(), + resources.DataStream(), + resources.DataStream(), + ], + next_page_token="abc", + ), + analytics_admin.ListDataStreamsResponse( + data_streams=[], + next_page_token="def", + ), + analytics_admin.ListDataStreamsResponse( + data_streams=[ + resources.DataStream(), + ], + next_page_token="ghi", + ), + analytics_admin.ListDataStreamsResponse( + data_streams=[ + resources.DataStream(), + resources.DataStream(), + ], + ), + RuntimeError, ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_data_streams(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token @pytest.mark.parametrize( "request_type", [ - analytics_admin.ArchiveAudienceRequest, + analytics_admin.GetDataStreamRequest, dict, ], ) -def test_archive_audience(request_type, transport: str = "grpc"): +def test_get_data_stream(request_type, transport: str = "grpc"): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -31558,22 +31578,29 @@ def test_archive_audience(request_type, transport: str = "grpc"): request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.archive_audience), "__call__") as call: + with mock.patch.object(type(client.transport.get_data_stream), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = None - response = client.archive_audience(request) + call.return_value = resources.DataStream( + name="name_value", + type_=resources.DataStream.DataStreamType.WEB_DATA_STREAM, + display_name="display_name_value", + ) + response = client.get_data_stream(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - request = analytics_admin.ArchiveAudienceRequest() + request = analytics_admin.GetDataStreamRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert response is None + assert isinstance(response, resources.DataStream) + assert response.name == "name_value" + assert response.type_ == resources.DataStream.DataStreamType.WEB_DATA_STREAM + assert response.display_name == "display_name_value" -def test_archive_audience_empty_call(): +def test_get_data_stream_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = AnalyticsAdminServiceClient( @@ -31582,17 +31609,17 @@ def test_archive_audience_empty_call(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.archive_audience), "__call__") as call: + with mock.patch.object(type(client.transport.get_data_stream), "__call__") as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.archive_audience() + client.get_data_stream() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == analytics_admin.ArchiveAudienceRequest() + assert args[0] == analytics_admin.GetDataStreamRequest() -def test_archive_audience_non_empty_request_with_auto_populated_field(): +def test_get_data_stream_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. client = AnalyticsAdminServiceClient( @@ -31603,24 +31630,24 @@ def test_archive_audience_non_empty_request_with_auto_populated_field(): # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. - request = analytics_admin.ArchiveAudienceRequest( + request = analytics_admin.GetDataStreamRequest( name="name_value", ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.archive_audience), "__call__") as call: + with mock.patch.object(type(client.transport.get_data_stream), "__call__") as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.archive_audience(request=request) + client.get_data_stream(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == analytics_admin.ArchiveAudienceRequest( + assert args[0] == analytics_admin.GetDataStreamRequest( name="name_value", ) -def test_archive_audience_use_cached_wrapped_rpc(): +def test_get_data_stream_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -31634,23 +31661,21 @@ def test_archive_audience_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.archive_audience in client._transport._wrapped_methods + assert client._transport.get_data_stream in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.archive_audience - ] = mock_rpc + client._transport._wrapped_methods[client._transport.get_data_stream] = mock_rpc request = {} - client.archive_audience(request) + client.get_data_stream(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.archive_audience(request) + client.get_data_stream(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -31658,7 +31683,7 @@ def test_archive_audience_use_cached_wrapped_rpc(): @pytest.mark.asyncio -async def test_archive_audience_empty_call_async(): +async def test_get_data_stream_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = AnalyticsAdminServiceAsyncClient( @@ -31667,17 +31692,23 @@ async def test_archive_audience_empty_call_async(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.archive_audience), "__call__") as call: + with mock.patch.object(type(client.transport.get_data_stream), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.archive_audience() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == analytics_admin.ArchiveAudienceRequest() - - -@pytest.mark.asyncio -async def test_archive_audience_async_use_cached_wrapped_rpc( + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + resources.DataStream( + name="name_value", + type_=resources.DataStream.DataStreamType.WEB_DATA_STREAM, + display_name="display_name_value", + ) + ) + response = await client.get_data_stream() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == analytics_admin.GetDataStreamRequest() + + +@pytest.mark.asyncio +async def test_get_data_stream_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", ): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -31694,32 +31725,33 @@ async def test_archive_audience_async_use_cached_wrapped_rpc( # Ensure method has been cached assert ( - client._client._transport.archive_audience + client._client._transport.get_data_stream in client._client._transport._wrapped_methods ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ - client._client._transport.archive_audience - ] = mock_object + client._client._transport.get_data_stream + ] = mock_rpc request = {} - await client.archive_audience(request) + await client.get_data_stream(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - await client.archive_audience(request) + await client.get_data_stream(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio -async def test_archive_audience_async( - transport: str = "grpc_asyncio", request_type=analytics_admin.ArchiveAudienceRequest +async def test_get_data_stream_async( + transport: str = "grpc_asyncio", request_type=analytics_admin.GetDataStreamRequest ): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), @@ -31731,41 +31763,50 @@ async def test_archive_audience_async( request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.archive_audience), "__call__") as call: + with mock.patch.object(type(client.transport.get_data_stream), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.archive_audience(request) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + resources.DataStream( + name="name_value", + type_=resources.DataStream.DataStreamType.WEB_DATA_STREAM, + display_name="display_name_value", + ) + ) + response = await client.get_data_stream(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request = analytics_admin.ArchiveAudienceRequest() + request = analytics_admin.GetDataStreamRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert response is None + assert isinstance(response, resources.DataStream) + assert response.name == "name_value" + assert response.type_ == resources.DataStream.DataStreamType.WEB_DATA_STREAM + assert response.display_name == "display_name_value" @pytest.mark.asyncio -async def test_archive_audience_async_from_dict(): - await test_archive_audience_async(request_type=dict) +async def test_get_data_stream_async_from_dict(): + await test_get_data_stream_async(request_type=dict) -def test_archive_audience_field_headers(): +def test_get_data_stream_field_headers(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = analytics_admin.ArchiveAudienceRequest() + request = analytics_admin.GetDataStreamRequest() request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.archive_audience), "__call__") as call: - call.return_value = None - client.archive_audience(request) + with mock.patch.object(type(client.transport.get_data_stream), "__call__") as call: + call.return_value = resources.DataStream() + client.get_data_stream(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -31781,21 +31822,23 @@ def test_archive_audience_field_headers(): @pytest.mark.asyncio -async def test_archive_audience_field_headers_async(): +async def test_get_data_stream_field_headers_async(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = analytics_admin.ArchiveAudienceRequest() + request = analytics_admin.GetDataStreamRequest() request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.archive_audience), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - await client.archive_audience(request) + with mock.patch.object(type(client.transport.get_data_stream), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + resources.DataStream() + ) + await client.get_data_stream(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -31810,14 +31853,96 @@ async def test_archive_audience_field_headers_async(): ) in kw["metadata"] +def test_get_data_stream_flattened(): + client = AnalyticsAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_data_stream), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = resources.DataStream() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_data_stream( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_get_data_stream_flattened_error(): + client = AnalyticsAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_data_stream( + analytics_admin.GetDataStreamRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_get_data_stream_flattened_async(): + client = AnalyticsAdminServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_data_stream), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = resources.DataStream() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + resources.DataStream() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_data_stream( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_get_data_stream_flattened_error_async(): + client = AnalyticsAdminServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_data_stream( + analytics_admin.GetDataStreamRequest(), + name="name_value", + ) + + @pytest.mark.parametrize( "request_type", [ - analytics_admin.GetSearchAds360LinkRequest, + analytics_admin.GetAudienceRequest, dict, ], ) -def test_get_search_ads360_link(request_type, transport: str = "grpc"): +def test_get_audience(request_type, transport: str = "grpc"): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -31828,31 +31953,38 @@ def test_get_search_ads360_link(request_type, transport: str = "grpc"): request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_search_ads360_link), "__call__" - ) as call: + with mock.patch.object(type(client.transport.get_audience), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = resources.SearchAds360Link( + call.return_value = audience.Audience( name="name_value", - advertiser_id="advertiser_id_value", - advertiser_display_name="advertiser_display_name_value", + display_name="display_name_value", + description="description_value", + membership_duration_days=2561, + ads_personalization_enabled=True, + exclusion_duration_mode=audience.Audience.AudienceExclusionDurationMode.EXCLUDE_TEMPORARILY, ) - response = client.get_search_ads360_link(request) + response = client.get_audience(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - request = analytics_admin.GetSearchAds360LinkRequest() + request = analytics_admin.GetAudienceRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, resources.SearchAds360Link) + assert isinstance(response, audience.Audience) assert response.name == "name_value" - assert response.advertiser_id == "advertiser_id_value" - assert response.advertiser_display_name == "advertiser_display_name_value" + assert response.display_name == "display_name_value" + assert response.description == "description_value" + assert response.membership_duration_days == 2561 + assert response.ads_personalization_enabled is True + assert ( + response.exclusion_duration_mode + == audience.Audience.AudienceExclusionDurationMode.EXCLUDE_TEMPORARILY + ) -def test_get_search_ads360_link_empty_call(): +def test_get_audience_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = AnalyticsAdminServiceClient( @@ -31861,19 +31993,17 @@ def test_get_search_ads360_link_empty_call(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_search_ads360_link), "__call__" - ) as call: + with mock.patch.object(type(client.transport.get_audience), "__call__") as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.get_search_ads360_link() + client.get_audience() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == analytics_admin.GetSearchAds360LinkRequest() + assert args[0] == analytics_admin.GetAudienceRequest() -def test_get_search_ads360_link_non_empty_request_with_auto_populated_field(): +def test_get_audience_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. client = AnalyticsAdminServiceClient( @@ -31884,26 +32014,24 @@ def test_get_search_ads360_link_non_empty_request_with_auto_populated_field(): # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. - request = analytics_admin.GetSearchAds360LinkRequest( + request = analytics_admin.GetAudienceRequest( name="name_value", ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_search_ads360_link), "__call__" - ) as call: + with mock.patch.object(type(client.transport.get_audience), "__call__") as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.get_search_ads360_link(request=request) + client.get_audience(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == analytics_admin.GetSearchAds360LinkRequest( + assert args[0] == analytics_admin.GetAudienceRequest( name="name_value", ) -def test_get_search_ads360_link_use_cached_wrapped_rpc(): +def test_get_audience_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -31917,26 +32045,21 @@ def test_get_search_ads360_link_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert ( - client._transport.get_search_ads360_link - in client._transport._wrapped_methods - ) + assert client._transport.get_audience in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.get_search_ads360_link - ] = mock_rpc + client._transport._wrapped_methods[client._transport.get_audience] = mock_rpc request = {} - client.get_search_ads360_link(request) + client.get_audience(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.get_search_ads360_link(request) + client.get_audience(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -31944,7 +32067,7 @@ def test_get_search_ads360_link_use_cached_wrapped_rpc(): @pytest.mark.asyncio -async def test_get_search_ads360_link_empty_call_async(): +async def test_get_audience_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = AnalyticsAdminServiceAsyncClient( @@ -31953,25 +32076,26 @@ async def test_get_search_ads360_link_empty_call_async(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_search_ads360_link), "__call__" - ) as call: + with mock.patch.object(type(client.transport.get_audience), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - resources.SearchAds360Link( + audience.Audience( name="name_value", - advertiser_id="advertiser_id_value", - advertiser_display_name="advertiser_display_name_value", + display_name="display_name_value", + description="description_value", + membership_duration_days=2561, + ads_personalization_enabled=True, + exclusion_duration_mode=audience.Audience.AudienceExclusionDurationMode.EXCLUDE_TEMPORARILY, ) ) - response = await client.get_search_ads360_link() + response = await client.get_audience() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == analytics_admin.GetSearchAds360LinkRequest() + assert args[0] == analytics_admin.GetAudienceRequest() @pytest.mark.asyncio -async def test_get_search_ads360_link_async_use_cached_wrapped_rpc( +async def test_get_audience_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", ): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -31988,33 +32112,33 @@ async def test_get_search_ads360_link_async_use_cached_wrapped_rpc( # Ensure method has been cached assert ( - client._client._transport.get_search_ads360_link + client._client._transport.get_audience in client._client._transport._wrapped_methods ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ - client._client._transport.get_search_ads360_link - ] = mock_object + client._client._transport.get_audience + ] = mock_rpc request = {} - await client.get_search_ads360_link(request) + await client.get_audience(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - await client.get_search_ads360_link(request) + await client.get_audience(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio -async def test_get_search_ads360_link_async( - transport: str = "grpc_asyncio", - request_type=analytics_admin.GetSearchAds360LinkRequest, +async def test_get_audience_async( + transport: str = "grpc_asyncio", request_type=analytics_admin.GetAudienceRequest ): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), @@ -32026,54 +32150,59 @@ async def test_get_search_ads360_link_async( request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_search_ads360_link), "__call__" - ) as call: + with mock.patch.object(type(client.transport.get_audience), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - resources.SearchAds360Link( + audience.Audience( name="name_value", - advertiser_id="advertiser_id_value", - advertiser_display_name="advertiser_display_name_value", + display_name="display_name_value", + description="description_value", + membership_duration_days=2561, + ads_personalization_enabled=True, + exclusion_duration_mode=audience.Audience.AudienceExclusionDurationMode.EXCLUDE_TEMPORARILY, ) ) - response = await client.get_search_ads360_link(request) + response = await client.get_audience(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request = analytics_admin.GetSearchAds360LinkRequest() + request = analytics_admin.GetAudienceRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, resources.SearchAds360Link) + assert isinstance(response, audience.Audience) assert response.name == "name_value" - assert response.advertiser_id == "advertiser_id_value" - assert response.advertiser_display_name == "advertiser_display_name_value" + assert response.display_name == "display_name_value" + assert response.description == "description_value" + assert response.membership_duration_days == 2561 + assert response.ads_personalization_enabled is True + assert ( + response.exclusion_duration_mode + == audience.Audience.AudienceExclusionDurationMode.EXCLUDE_TEMPORARILY + ) @pytest.mark.asyncio -async def test_get_search_ads360_link_async_from_dict(): - await test_get_search_ads360_link_async(request_type=dict) +async def test_get_audience_async_from_dict(): + await test_get_audience_async(request_type=dict) -def test_get_search_ads360_link_field_headers(): +def test_get_audience_field_headers(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = analytics_admin.GetSearchAds360LinkRequest() + request = analytics_admin.GetAudienceRequest() request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_search_ads360_link), "__call__" - ) as call: - call.return_value = resources.SearchAds360Link() - client.get_search_ads360_link(request) + with mock.patch.object(type(client.transport.get_audience), "__call__") as call: + call.return_value = audience.Audience() + client.get_audience(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -32089,25 +32218,21 @@ def test_get_search_ads360_link_field_headers(): @pytest.mark.asyncio -async def test_get_search_ads360_link_field_headers_async(): +async def test_get_audience_field_headers_async(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = analytics_admin.GetSearchAds360LinkRequest() + request = analytics_admin.GetAudienceRequest() request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_search_ads360_link), "__call__" - ) as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - resources.SearchAds360Link() - ) - await client.get_search_ads360_link(request) + with mock.patch.object(type(client.transport.get_audience), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(audience.Audience()) + await client.get_audience(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -32122,20 +32247,18 @@ async def test_get_search_ads360_link_field_headers_async(): ) in kw["metadata"] -def test_get_search_ads360_link_flattened(): +def test_get_audience_flattened(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_search_ads360_link), "__call__" - ) as call: + with mock.patch.object(type(client.transport.get_audience), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = resources.SearchAds360Link() + call.return_value = audience.Audience() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.get_search_ads360_link( + client.get_audience( name="name_value", ) @@ -32148,7 +32271,7 @@ def test_get_search_ads360_link_flattened(): assert arg == mock_val -def test_get_search_ads360_link_flattened_error(): +def test_get_audience_flattened_error(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -32156,31 +32279,27 @@ def test_get_search_ads360_link_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.get_search_ads360_link( - analytics_admin.GetSearchAds360LinkRequest(), + client.get_audience( + analytics_admin.GetAudienceRequest(), name="name_value", ) @pytest.mark.asyncio -async def test_get_search_ads360_link_flattened_async(): +async def test_get_audience_flattened_async(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_search_ads360_link), "__call__" - ) as call: + with mock.patch.object(type(client.transport.get_audience), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = resources.SearchAds360Link() + call.return_value = audience.Audience() - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - resources.SearchAds360Link() - ) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(audience.Audience()) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.get_search_ads360_link( + response = await client.get_audience( name="name_value", ) @@ -32194,7 +32313,7 @@ async def test_get_search_ads360_link_flattened_async(): @pytest.mark.asyncio -async def test_get_search_ads360_link_flattened_error_async(): +async def test_get_audience_flattened_error_async(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -32202,8 +32321,8 @@ async def test_get_search_ads360_link_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.get_search_ads360_link( - analytics_admin.GetSearchAds360LinkRequest(), + await client.get_audience( + analytics_admin.GetAudienceRequest(), name="name_value", ) @@ -32211,11 +32330,11 @@ async def test_get_search_ads360_link_flattened_error_async(): @pytest.mark.parametrize( "request_type", [ - analytics_admin.ListSearchAds360LinksRequest, + analytics_admin.ListAudiencesRequest, dict, ], ) -def test_list_search_ads360_links(request_type, transport: str = "grpc"): +def test_list_audiences(request_type, transport: str = "grpc"): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -32226,27 +32345,25 @@ def test_list_search_ads360_links(request_type, transport: str = "grpc"): request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_search_ads360_links), "__call__" - ) as call: + with mock.patch.object(type(client.transport.list_audiences), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = analytics_admin.ListSearchAds360LinksResponse( + call.return_value = analytics_admin.ListAudiencesResponse( next_page_token="next_page_token_value", ) - response = client.list_search_ads360_links(request) + response = client.list_audiences(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - request = analytics_admin.ListSearchAds360LinksRequest() + request = analytics_admin.ListAudiencesRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListSearchAds360LinksPager) + assert isinstance(response, pagers.ListAudiencesPager) assert response.next_page_token == "next_page_token_value" -def test_list_search_ads360_links_empty_call(): +def test_list_audiences_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = AnalyticsAdminServiceClient( @@ -32255,19 +32372,17 @@ def test_list_search_ads360_links_empty_call(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_search_ads360_links), "__call__" - ) as call: + with mock.patch.object(type(client.transport.list_audiences), "__call__") as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.list_search_ads360_links() + client.list_audiences() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == analytics_admin.ListSearchAds360LinksRequest() + assert args[0] == analytics_admin.ListAudiencesRequest() -def test_list_search_ads360_links_non_empty_request_with_auto_populated_field(): +def test_list_audiences_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. client = AnalyticsAdminServiceClient( @@ -32278,28 +32393,26 @@ def test_list_search_ads360_links_non_empty_request_with_auto_populated_field(): # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. - request = analytics_admin.ListSearchAds360LinksRequest( + request = analytics_admin.ListAudiencesRequest( parent="parent_value", page_token="page_token_value", ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_search_ads360_links), "__call__" - ) as call: + with mock.patch.object(type(client.transport.list_audiences), "__call__") as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.list_search_ads360_links(request=request) + client.list_audiences(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == analytics_admin.ListSearchAds360LinksRequest( + assert args[0] == analytics_admin.ListAudiencesRequest( parent="parent_value", page_token="page_token_value", ) -def test_list_search_ads360_links_use_cached_wrapped_rpc(): +def test_list_audiences_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -32313,26 +32426,21 @@ def test_list_search_ads360_links_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert ( - client._transport.list_search_ads360_links - in client._transport._wrapped_methods - ) + assert client._transport.list_audiences in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.list_search_ads360_links - ] = mock_rpc + client._transport._wrapped_methods[client._transport.list_audiences] = mock_rpc request = {} - client.list_search_ads360_links(request) + client.list_audiences(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.list_search_ads360_links(request) + client.list_audiences(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -32340,7 +32448,7 @@ def test_list_search_ads360_links_use_cached_wrapped_rpc(): @pytest.mark.asyncio -async def test_list_search_ads360_links_empty_call_async(): +async def test_list_audiences_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = AnalyticsAdminServiceAsyncClient( @@ -32349,23 +32457,21 @@ async def test_list_search_ads360_links_empty_call_async(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_search_ads360_links), "__call__" - ) as call: + with mock.patch.object(type(client.transport.list_audiences), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - analytics_admin.ListSearchAds360LinksResponse( + analytics_admin.ListAudiencesResponse( next_page_token="next_page_token_value", ) ) - response = await client.list_search_ads360_links() + response = await client.list_audiences() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == analytics_admin.ListSearchAds360LinksRequest() + assert args[0] == analytics_admin.ListAudiencesRequest() @pytest.mark.asyncio -async def test_list_search_ads360_links_async_use_cached_wrapped_rpc( +async def test_list_audiences_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", ): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -32382,33 +32488,33 @@ async def test_list_search_ads360_links_async_use_cached_wrapped_rpc( # Ensure method has been cached assert ( - client._client._transport.list_search_ads360_links + client._client._transport.list_audiences in client._client._transport._wrapped_methods ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ - client._client._transport.list_search_ads360_links - ] = mock_object + client._client._transport.list_audiences + ] = mock_rpc request = {} - await client.list_search_ads360_links(request) + await client.list_audiences(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - await client.list_search_ads360_links(request) + await client.list_audiences(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio -async def test_list_search_ads360_links_async( - transport: str = "grpc_asyncio", - request_type=analytics_admin.ListSearchAds360LinksRequest, +async def test_list_audiences_async( + transport: str = "grpc_asyncio", request_type=analytics_admin.ListAudiencesRequest ): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), @@ -32420,50 +32526,46 @@ async def test_list_search_ads360_links_async( request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_search_ads360_links), "__call__" - ) as call: + with mock.patch.object(type(client.transport.list_audiences), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - analytics_admin.ListSearchAds360LinksResponse( + analytics_admin.ListAudiencesResponse( next_page_token="next_page_token_value", ) ) - response = await client.list_search_ads360_links(request) + response = await client.list_audiences(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request = analytics_admin.ListSearchAds360LinksRequest() + request = analytics_admin.ListAudiencesRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListSearchAds360LinksAsyncPager) + assert isinstance(response, pagers.ListAudiencesAsyncPager) assert response.next_page_token == "next_page_token_value" @pytest.mark.asyncio -async def test_list_search_ads360_links_async_from_dict(): - await test_list_search_ads360_links_async(request_type=dict) +async def test_list_audiences_async_from_dict(): + await test_list_audiences_async(request_type=dict) -def test_list_search_ads360_links_field_headers(): +def test_list_audiences_field_headers(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = analytics_admin.ListSearchAds360LinksRequest() + request = analytics_admin.ListAudiencesRequest() request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_search_ads360_links), "__call__" - ) as call: - call.return_value = analytics_admin.ListSearchAds360LinksResponse() - client.list_search_ads360_links(request) + with mock.patch.object(type(client.transport.list_audiences), "__call__") as call: + call.return_value = analytics_admin.ListAudiencesResponse() + client.list_audiences(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -32479,25 +32581,23 @@ def test_list_search_ads360_links_field_headers(): @pytest.mark.asyncio -async def test_list_search_ads360_links_field_headers_async(): +async def test_list_audiences_field_headers_async(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = analytics_admin.ListSearchAds360LinksRequest() + request = analytics_admin.ListAudiencesRequest() request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_search_ads360_links), "__call__" - ) as call: + with mock.patch.object(type(client.transport.list_audiences), "__call__") as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - analytics_admin.ListSearchAds360LinksResponse() + analytics_admin.ListAudiencesResponse() ) - await client.list_search_ads360_links(request) + await client.list_audiences(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -32512,20 +32612,18 @@ async def test_list_search_ads360_links_field_headers_async(): ) in kw["metadata"] -def test_list_search_ads360_links_flattened(): +def test_list_audiences_flattened(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_search_ads360_links), "__call__" - ) as call: + with mock.patch.object(type(client.transport.list_audiences), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = analytics_admin.ListSearchAds360LinksResponse() + call.return_value = analytics_admin.ListAudiencesResponse() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.list_search_ads360_links( + client.list_audiences( parent="parent_value", ) @@ -32538,7 +32636,7 @@ def test_list_search_ads360_links_flattened(): assert arg == mock_val -def test_list_search_ads360_links_flattened_error(): +def test_list_audiences_flattened_error(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -32546,31 +32644,29 @@ def test_list_search_ads360_links_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.list_search_ads360_links( - analytics_admin.ListSearchAds360LinksRequest(), + client.list_audiences( + analytics_admin.ListAudiencesRequest(), parent="parent_value", ) @pytest.mark.asyncio -async def test_list_search_ads360_links_flattened_async(): +async def test_list_audiences_flattened_async(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_search_ads360_links), "__call__" - ) as call: + with mock.patch.object(type(client.transport.list_audiences), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = analytics_admin.ListSearchAds360LinksResponse() + call.return_value = analytics_admin.ListAudiencesResponse() call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - analytics_admin.ListSearchAds360LinksResponse() + analytics_admin.ListAudiencesResponse() ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.list_search_ads360_links( + response = await client.list_audiences( parent="parent_value", ) @@ -32584,7 +32680,7 @@ async def test_list_search_ads360_links_flattened_async(): @pytest.mark.asyncio -async def test_list_search_ads360_links_flattened_error_async(): +async def test_list_audiences_flattened_error_async(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -32592,46 +32688,44 @@ async def test_list_search_ads360_links_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.list_search_ads360_links( - analytics_admin.ListSearchAds360LinksRequest(), + await client.list_audiences( + analytics_admin.ListAudiencesRequest(), parent="parent_value", ) -def test_list_search_ads360_links_pager(transport_name: str = "grpc"): +def test_list_audiences_pager(transport_name: str = "grpc"): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport_name, ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_search_ads360_links), "__call__" - ) as call: + with mock.patch.object(type(client.transport.list_audiences), "__call__") as call: # Set the response to a series of pages. call.side_effect = ( - analytics_admin.ListSearchAds360LinksResponse( - search_ads_360_links=[ - resources.SearchAds360Link(), - resources.SearchAds360Link(), - resources.SearchAds360Link(), + analytics_admin.ListAudiencesResponse( + audiences=[ + audience.Audience(), + audience.Audience(), + audience.Audience(), ], next_page_token="abc", ), - analytics_admin.ListSearchAds360LinksResponse( - search_ads_360_links=[], + analytics_admin.ListAudiencesResponse( + audiences=[], next_page_token="def", ), - analytics_admin.ListSearchAds360LinksResponse( - search_ads_360_links=[ - resources.SearchAds360Link(), + analytics_admin.ListAudiencesResponse( + audiences=[ + audience.Audience(), ], next_page_token="ghi", ), - analytics_admin.ListSearchAds360LinksResponse( - search_ads_360_links=[ - resources.SearchAds360Link(), - resources.SearchAds360Link(), + analytics_admin.ListAudiencesResponse( + audiences=[ + audience.Audience(), + audience.Audience(), ], ), RuntimeError, @@ -32643,9 +32737,7 @@ def test_list_search_ads360_links_pager(transport_name: str = "grpc"): expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) - pager = client.list_search_ads360_links( - request={}, retry=retry, timeout=timeout - ) + pager = client.list_audiences(request={}, retry=retry, timeout=timeout) assert pager._metadata == expected_metadata assert pager._retry == retry @@ -32653,93 +32745,89 @@ def test_list_search_ads360_links_pager(transport_name: str = "grpc"): results = list(pager) assert len(results) == 6 - assert all(isinstance(i, resources.SearchAds360Link) for i in results) + assert all(isinstance(i, audience.Audience) for i in results) -def test_list_search_ads360_links_pages(transport_name: str = "grpc"): +def test_list_audiences_pages(transport_name: str = "grpc"): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport_name, ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_search_ads360_links), "__call__" - ) as call: + with mock.patch.object(type(client.transport.list_audiences), "__call__") as call: # Set the response to a series of pages. call.side_effect = ( - analytics_admin.ListSearchAds360LinksResponse( - search_ads_360_links=[ - resources.SearchAds360Link(), - resources.SearchAds360Link(), - resources.SearchAds360Link(), + analytics_admin.ListAudiencesResponse( + audiences=[ + audience.Audience(), + audience.Audience(), + audience.Audience(), ], next_page_token="abc", ), - analytics_admin.ListSearchAds360LinksResponse( - search_ads_360_links=[], + analytics_admin.ListAudiencesResponse( + audiences=[], next_page_token="def", ), - analytics_admin.ListSearchAds360LinksResponse( - search_ads_360_links=[ - resources.SearchAds360Link(), + analytics_admin.ListAudiencesResponse( + audiences=[ + audience.Audience(), ], next_page_token="ghi", ), - analytics_admin.ListSearchAds360LinksResponse( - search_ads_360_links=[ - resources.SearchAds360Link(), - resources.SearchAds360Link(), + analytics_admin.ListAudiencesResponse( + audiences=[ + audience.Audience(), + audience.Audience(), ], ), RuntimeError, ) - pages = list(client.list_search_ads360_links(request={}).pages) + pages = list(client.list_audiences(request={}).pages) for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token @pytest.mark.asyncio -async def test_list_search_ads360_links_async_pager(): +async def test_list_audiences_async_pager(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_search_ads360_links), - "__call__", - new_callable=mock.AsyncMock, + type(client.transport.list_audiences), "__call__", new_callable=mock.AsyncMock ) as call: # Set the response to a series of pages. call.side_effect = ( - analytics_admin.ListSearchAds360LinksResponse( - search_ads_360_links=[ - resources.SearchAds360Link(), - resources.SearchAds360Link(), - resources.SearchAds360Link(), + analytics_admin.ListAudiencesResponse( + audiences=[ + audience.Audience(), + audience.Audience(), + audience.Audience(), ], next_page_token="abc", ), - analytics_admin.ListSearchAds360LinksResponse( - search_ads_360_links=[], + analytics_admin.ListAudiencesResponse( + audiences=[], next_page_token="def", ), - analytics_admin.ListSearchAds360LinksResponse( - search_ads_360_links=[ - resources.SearchAds360Link(), + analytics_admin.ListAudiencesResponse( + audiences=[ + audience.Audience(), ], next_page_token="ghi", ), - analytics_admin.ListSearchAds360LinksResponse( - search_ads_360_links=[ - resources.SearchAds360Link(), - resources.SearchAds360Link(), + analytics_admin.ListAudiencesResponse( + audiences=[ + audience.Audience(), + audience.Audience(), ], ), RuntimeError, ) - async_pager = await client.list_search_ads360_links( + async_pager = await client.list_audiences( request={}, ) assert async_pager.next_page_token == "abc" @@ -32748,45 +32836,43 @@ async def test_list_search_ads360_links_async_pager(): responses.append(response) assert len(responses) == 6 - assert all(isinstance(i, resources.SearchAds360Link) for i in responses) + assert all(isinstance(i, audience.Audience) for i in responses) @pytest.mark.asyncio -async def test_list_search_ads360_links_async_pages(): +async def test_list_audiences_async_pages(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_search_ads360_links), - "__call__", - new_callable=mock.AsyncMock, + type(client.transport.list_audiences), "__call__", new_callable=mock.AsyncMock ) as call: # Set the response to a series of pages. call.side_effect = ( - analytics_admin.ListSearchAds360LinksResponse( - search_ads_360_links=[ - resources.SearchAds360Link(), - resources.SearchAds360Link(), - resources.SearchAds360Link(), + analytics_admin.ListAudiencesResponse( + audiences=[ + audience.Audience(), + audience.Audience(), + audience.Audience(), ], next_page_token="abc", ), - analytics_admin.ListSearchAds360LinksResponse( - search_ads_360_links=[], + analytics_admin.ListAudiencesResponse( + audiences=[], next_page_token="def", ), - analytics_admin.ListSearchAds360LinksResponse( - search_ads_360_links=[ - resources.SearchAds360Link(), + analytics_admin.ListAudiencesResponse( + audiences=[ + audience.Audience(), ], next_page_token="ghi", ), - analytics_admin.ListSearchAds360LinksResponse( - search_ads_360_links=[ - resources.SearchAds360Link(), - resources.SearchAds360Link(), + analytics_admin.ListAudiencesResponse( + audiences=[ + audience.Audience(), + audience.Audience(), ], ), RuntimeError, @@ -32795,7 +32881,7 @@ async def test_list_search_ads360_links_async_pages(): # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 async for page_ in ( # pragma: no branch - await client.list_search_ads360_links(request={}) + await client.list_audiences(request={}) ).pages: pages.append(page_) for page_, token in zip(pages, ["abc", "def", "ghi", ""]): @@ -32805,11 +32891,11 @@ async def test_list_search_ads360_links_async_pages(): @pytest.mark.parametrize( "request_type", [ - analytics_admin.CreateSearchAds360LinkRequest, + analytics_admin.CreateAudienceRequest, dict, ], ) -def test_create_search_ads360_link(request_type, transport: str = "grpc"): +def test_create_audience(request_type, transport: str = "grpc"): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -32820,31 +32906,38 @@ def test_create_search_ads360_link(request_type, transport: str = "grpc"): request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_search_ads360_link), "__call__" - ) as call: + with mock.patch.object(type(client.transport.create_audience), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = resources.SearchAds360Link( + call.return_value = gaa_audience.Audience( name="name_value", - advertiser_id="advertiser_id_value", - advertiser_display_name="advertiser_display_name_value", + display_name="display_name_value", + description="description_value", + membership_duration_days=2561, + ads_personalization_enabled=True, + exclusion_duration_mode=gaa_audience.Audience.AudienceExclusionDurationMode.EXCLUDE_TEMPORARILY, ) - response = client.create_search_ads360_link(request) + response = client.create_audience(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - request = analytics_admin.CreateSearchAds360LinkRequest() + request = analytics_admin.CreateAudienceRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, resources.SearchAds360Link) + assert isinstance(response, gaa_audience.Audience) assert response.name == "name_value" - assert response.advertiser_id == "advertiser_id_value" - assert response.advertiser_display_name == "advertiser_display_name_value" + assert response.display_name == "display_name_value" + assert response.description == "description_value" + assert response.membership_duration_days == 2561 + assert response.ads_personalization_enabled is True + assert ( + response.exclusion_duration_mode + == gaa_audience.Audience.AudienceExclusionDurationMode.EXCLUDE_TEMPORARILY + ) -def test_create_search_ads360_link_empty_call(): +def test_create_audience_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = AnalyticsAdminServiceClient( @@ -32853,19 +32946,17 @@ def test_create_search_ads360_link_empty_call(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_search_ads360_link), "__call__" - ) as call: + with mock.patch.object(type(client.transport.create_audience), "__call__") as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.create_search_ads360_link() + client.create_audience() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == analytics_admin.CreateSearchAds360LinkRequest() + assert args[0] == analytics_admin.CreateAudienceRequest() -def test_create_search_ads360_link_non_empty_request_with_auto_populated_field(): +def test_create_audience_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. client = AnalyticsAdminServiceClient( @@ -32876,26 +32967,24 @@ def test_create_search_ads360_link_non_empty_request_with_auto_populated_field() # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. - request = analytics_admin.CreateSearchAds360LinkRequest( + request = analytics_admin.CreateAudienceRequest( parent="parent_value", ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_search_ads360_link), "__call__" - ) as call: + with mock.patch.object(type(client.transport.create_audience), "__call__") as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.create_search_ads360_link(request=request) + client.create_audience(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == analytics_admin.CreateSearchAds360LinkRequest( + assert args[0] == analytics_admin.CreateAudienceRequest( parent="parent_value", ) -def test_create_search_ads360_link_use_cached_wrapped_rpc(): +def test_create_audience_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -32909,26 +32998,21 @@ def test_create_search_ads360_link_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert ( - client._transport.create_search_ads360_link - in client._transport._wrapped_methods - ) + assert client._transport.create_audience in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.create_search_ads360_link - ] = mock_rpc + client._transport._wrapped_methods[client._transport.create_audience] = mock_rpc request = {} - client.create_search_ads360_link(request) + client.create_audience(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.create_search_ads360_link(request) + client.create_audience(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -32936,7 +33020,7 @@ def test_create_search_ads360_link_use_cached_wrapped_rpc(): @pytest.mark.asyncio -async def test_create_search_ads360_link_empty_call_async(): +async def test_create_audience_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = AnalyticsAdminServiceAsyncClient( @@ -32945,25 +33029,26 @@ async def test_create_search_ads360_link_empty_call_async(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_search_ads360_link), "__call__" - ) as call: + with mock.patch.object(type(client.transport.create_audience), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - resources.SearchAds360Link( + gaa_audience.Audience( name="name_value", - advertiser_id="advertiser_id_value", - advertiser_display_name="advertiser_display_name_value", + display_name="display_name_value", + description="description_value", + membership_duration_days=2561, + ads_personalization_enabled=True, + exclusion_duration_mode=gaa_audience.Audience.AudienceExclusionDurationMode.EXCLUDE_TEMPORARILY, ) ) - response = await client.create_search_ads360_link() + response = await client.create_audience() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == analytics_admin.CreateSearchAds360LinkRequest() + assert args[0] == analytics_admin.CreateAudienceRequest() @pytest.mark.asyncio -async def test_create_search_ads360_link_async_use_cached_wrapped_rpc( +async def test_create_audience_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", ): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -32980,33 +33065,33 @@ async def test_create_search_ads360_link_async_use_cached_wrapped_rpc( # Ensure method has been cached assert ( - client._client._transport.create_search_ads360_link + client._client._transport.create_audience in client._client._transport._wrapped_methods ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ - client._client._transport.create_search_ads360_link - ] = mock_object + client._client._transport.create_audience + ] = mock_rpc request = {} - await client.create_search_ads360_link(request) + await client.create_audience(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - await client.create_search_ads360_link(request) + await client.create_audience(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio -async def test_create_search_ads360_link_async( - transport: str = "grpc_asyncio", - request_type=analytics_admin.CreateSearchAds360LinkRequest, +async def test_create_audience_async( + transport: str = "grpc_asyncio", request_type=analytics_admin.CreateAudienceRequest ): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), @@ -33018,54 +33103,59 @@ async def test_create_search_ads360_link_async( request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_search_ads360_link), "__call__" - ) as call: + with mock.patch.object(type(client.transport.create_audience), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - resources.SearchAds360Link( + gaa_audience.Audience( name="name_value", - advertiser_id="advertiser_id_value", - advertiser_display_name="advertiser_display_name_value", + display_name="display_name_value", + description="description_value", + membership_duration_days=2561, + ads_personalization_enabled=True, + exclusion_duration_mode=gaa_audience.Audience.AudienceExclusionDurationMode.EXCLUDE_TEMPORARILY, ) ) - response = await client.create_search_ads360_link(request) + response = await client.create_audience(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request = analytics_admin.CreateSearchAds360LinkRequest() + request = analytics_admin.CreateAudienceRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, resources.SearchAds360Link) + assert isinstance(response, gaa_audience.Audience) assert response.name == "name_value" - assert response.advertiser_id == "advertiser_id_value" - assert response.advertiser_display_name == "advertiser_display_name_value" + assert response.display_name == "display_name_value" + assert response.description == "description_value" + assert response.membership_duration_days == 2561 + assert response.ads_personalization_enabled is True + assert ( + response.exclusion_duration_mode + == gaa_audience.Audience.AudienceExclusionDurationMode.EXCLUDE_TEMPORARILY + ) @pytest.mark.asyncio -async def test_create_search_ads360_link_async_from_dict(): - await test_create_search_ads360_link_async(request_type=dict) +async def test_create_audience_async_from_dict(): + await test_create_audience_async(request_type=dict) -def test_create_search_ads360_link_field_headers(): +def test_create_audience_field_headers(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = analytics_admin.CreateSearchAds360LinkRequest() + request = analytics_admin.CreateAudienceRequest() request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_search_ads360_link), "__call__" - ) as call: - call.return_value = resources.SearchAds360Link() - client.create_search_ads360_link(request) + with mock.patch.object(type(client.transport.create_audience), "__call__") as call: + call.return_value = gaa_audience.Audience() + client.create_audience(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -33081,25 +33171,23 @@ def test_create_search_ads360_link_field_headers(): @pytest.mark.asyncio -async def test_create_search_ads360_link_field_headers_async(): +async def test_create_audience_field_headers_async(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = analytics_admin.CreateSearchAds360LinkRequest() + request = analytics_admin.CreateAudienceRequest() request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_search_ads360_link), "__call__" - ) as call: + with mock.patch.object(type(client.transport.create_audience), "__call__") as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - resources.SearchAds360Link() + gaa_audience.Audience() ) - await client.create_search_ads360_link(request) + await client.create_audience(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -33114,22 +33202,20 @@ async def test_create_search_ads360_link_field_headers_async(): ) in kw["metadata"] -def test_create_search_ads360_link_flattened(): +def test_create_audience_flattened(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_search_ads360_link), "__call__" - ) as call: + with mock.patch.object(type(client.transport.create_audience), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = resources.SearchAds360Link() + call.return_value = gaa_audience.Audience() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.create_search_ads360_link( + client.create_audience( parent="parent_value", - search_ads_360_link=resources.SearchAds360Link(name="name_value"), + audience=gaa_audience.Audience(name="name_value"), ) # Establish that the underlying call was made with the expected @@ -33139,12 +33225,12 @@ def test_create_search_ads360_link_flattened(): arg = args[0].parent mock_val = "parent_value" assert arg == mock_val - arg = args[0].search_ads_360_link - mock_val = resources.SearchAds360Link(name="name_value") + arg = args[0].audience + mock_val = gaa_audience.Audience(name="name_value") assert arg == mock_val -def test_create_search_ads360_link_flattened_error(): +def test_create_audience_flattened_error(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -33152,34 +33238,32 @@ def test_create_search_ads360_link_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.create_search_ads360_link( - analytics_admin.CreateSearchAds360LinkRequest(), + client.create_audience( + analytics_admin.CreateAudienceRequest(), parent="parent_value", - search_ads_360_link=resources.SearchAds360Link(name="name_value"), + audience=gaa_audience.Audience(name="name_value"), ) @pytest.mark.asyncio -async def test_create_search_ads360_link_flattened_async(): +async def test_create_audience_flattened_async(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_search_ads360_link), "__call__" - ) as call: + with mock.patch.object(type(client.transport.create_audience), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = resources.SearchAds360Link() + call.return_value = gaa_audience.Audience() call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - resources.SearchAds360Link() + gaa_audience.Audience() ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.create_search_ads360_link( + response = await client.create_audience( parent="parent_value", - search_ads_360_link=resources.SearchAds360Link(name="name_value"), + audience=gaa_audience.Audience(name="name_value"), ) # Establish that the underlying call was made with the expected @@ -33189,13 +33273,13 @@ async def test_create_search_ads360_link_flattened_async(): arg = args[0].parent mock_val = "parent_value" assert arg == mock_val - arg = args[0].search_ads_360_link - mock_val = resources.SearchAds360Link(name="name_value") + arg = args[0].audience + mock_val = gaa_audience.Audience(name="name_value") assert arg == mock_val @pytest.mark.asyncio -async def test_create_search_ads360_link_flattened_error_async(): +async def test_create_audience_flattened_error_async(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -33203,21 +33287,21 @@ async def test_create_search_ads360_link_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.create_search_ads360_link( - analytics_admin.CreateSearchAds360LinkRequest(), + await client.create_audience( + analytics_admin.CreateAudienceRequest(), parent="parent_value", - search_ads_360_link=resources.SearchAds360Link(name="name_value"), + audience=gaa_audience.Audience(name="name_value"), ) @pytest.mark.parametrize( "request_type", [ - analytics_admin.DeleteSearchAds360LinkRequest, + analytics_admin.UpdateAudienceRequest, dict, ], ) -def test_delete_search_ads360_link(request_type, transport: str = "grpc"): +def test_update_audience(request_type, transport: str = "grpc"): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -33228,24 +33312,38 @@ def test_delete_search_ads360_link(request_type, transport: str = "grpc"): request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_search_ads360_link), "__call__" - ) as call: + with mock.patch.object(type(client.transport.update_audience), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = None - response = client.delete_search_ads360_link(request) + call.return_value = gaa_audience.Audience( + name="name_value", + display_name="display_name_value", + description="description_value", + membership_duration_days=2561, + ads_personalization_enabled=True, + exclusion_duration_mode=gaa_audience.Audience.AudienceExclusionDurationMode.EXCLUDE_TEMPORARILY, + ) + response = client.update_audience(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - request = analytics_admin.DeleteSearchAds360LinkRequest() + request = analytics_admin.UpdateAudienceRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert response is None + assert isinstance(response, gaa_audience.Audience) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.description == "description_value" + assert response.membership_duration_days == 2561 + assert response.ads_personalization_enabled is True + assert ( + response.exclusion_duration_mode + == gaa_audience.Audience.AudienceExclusionDurationMode.EXCLUDE_TEMPORARILY + ) -def test_delete_search_ads360_link_empty_call(): +def test_update_audience_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = AnalyticsAdminServiceClient( @@ -33254,19 +33352,17 @@ def test_delete_search_ads360_link_empty_call(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_search_ads360_link), "__call__" - ) as call: + with mock.patch.object(type(client.transport.update_audience), "__call__") as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.delete_search_ads360_link() + client.update_audience() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == analytics_admin.DeleteSearchAds360LinkRequest() + assert args[0] == analytics_admin.UpdateAudienceRequest() -def test_delete_search_ads360_link_non_empty_request_with_auto_populated_field(): +def test_update_audience_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. client = AnalyticsAdminServiceClient( @@ -33277,26 +33373,20 @@ def test_delete_search_ads360_link_non_empty_request_with_auto_populated_field() # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. - request = analytics_admin.DeleteSearchAds360LinkRequest( - name="name_value", - ) + request = analytics_admin.UpdateAudienceRequest() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_search_ads360_link), "__call__" - ) as call: + with mock.patch.object(type(client.transport.update_audience), "__call__") as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.delete_search_ads360_link(request=request) + client.update_audience(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == analytics_admin.DeleteSearchAds360LinkRequest( - name="name_value", - ) + assert args[0] == analytics_admin.UpdateAudienceRequest() -def test_delete_search_ads360_link_use_cached_wrapped_rpc(): +def test_update_audience_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -33310,26 +33400,21 @@ def test_delete_search_ads360_link_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert ( - client._transport.delete_search_ads360_link - in client._transport._wrapped_methods - ) + assert client._transport.update_audience in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.delete_search_ads360_link - ] = mock_rpc + client._transport._wrapped_methods[client._transport.update_audience] = mock_rpc request = {} - client.delete_search_ads360_link(request) + client.update_audience(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.delete_search_ads360_link(request) + client.update_audience(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -33337,7 +33422,7 @@ def test_delete_search_ads360_link_use_cached_wrapped_rpc(): @pytest.mark.asyncio -async def test_delete_search_ads360_link_empty_call_async(): +async def test_update_audience_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = AnalyticsAdminServiceAsyncClient( @@ -33346,19 +33431,26 @@ async def test_delete_search_ads360_link_empty_call_async(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_search_ads360_link), "__call__" - ) as call: + with mock.patch.object(type(client.transport.update_audience), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.delete_search_ads360_link() + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + gaa_audience.Audience( + name="name_value", + display_name="display_name_value", + description="description_value", + membership_duration_days=2561, + ads_personalization_enabled=True, + exclusion_duration_mode=gaa_audience.Audience.AudienceExclusionDurationMode.EXCLUDE_TEMPORARILY, + ) + ) + response = await client.update_audience() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == analytics_admin.DeleteSearchAds360LinkRequest() + assert args[0] == analytics_admin.UpdateAudienceRequest() @pytest.mark.asyncio -async def test_delete_search_ads360_link_async_use_cached_wrapped_rpc( +async def test_update_audience_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", ): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -33375,33 +33467,33 @@ async def test_delete_search_ads360_link_async_use_cached_wrapped_rpc( # Ensure method has been cached assert ( - client._client._transport.delete_search_ads360_link + client._client._transport.update_audience in client._client._transport._wrapped_methods ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ - client._client._transport.delete_search_ads360_link - ] = mock_object + client._client._transport.update_audience + ] = mock_rpc request = {} - await client.delete_search_ads360_link(request) + await client.update_audience(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - await client.delete_search_ads360_link(request) + await client.update_audience(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio -async def test_delete_search_ads360_link_async( - transport: str = "grpc_asyncio", - request_type=analytics_admin.DeleteSearchAds360LinkRequest, +async def test_update_audience_async( + transport: str = "grpc_asyncio", request_type=analytics_admin.UpdateAudienceRequest ): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), @@ -33413,45 +33505,59 @@ async def test_delete_search_ads360_link_async( request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_search_ads360_link), "__call__" - ) as call: + with mock.patch.object(type(client.transport.update_audience), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.delete_search_ads360_link(request) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + gaa_audience.Audience( + name="name_value", + display_name="display_name_value", + description="description_value", + membership_duration_days=2561, + ads_personalization_enabled=True, + exclusion_duration_mode=gaa_audience.Audience.AudienceExclusionDurationMode.EXCLUDE_TEMPORARILY, + ) + ) + response = await client.update_audience(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request = analytics_admin.DeleteSearchAds360LinkRequest() + request = analytics_admin.UpdateAudienceRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert response is None + assert isinstance(response, gaa_audience.Audience) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.description == "description_value" + assert response.membership_duration_days == 2561 + assert response.ads_personalization_enabled is True + assert ( + response.exclusion_duration_mode + == gaa_audience.Audience.AudienceExclusionDurationMode.EXCLUDE_TEMPORARILY + ) @pytest.mark.asyncio -async def test_delete_search_ads360_link_async_from_dict(): - await test_delete_search_ads360_link_async(request_type=dict) +async def test_update_audience_async_from_dict(): + await test_update_audience_async(request_type=dict) -def test_delete_search_ads360_link_field_headers(): +def test_update_audience_field_headers(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = analytics_admin.DeleteSearchAds360LinkRequest() + request = analytics_admin.UpdateAudienceRequest() - request.name = "name_value" + request.audience.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_search_ads360_link), "__call__" - ) as call: - call.return_value = None - client.delete_search_ads360_link(request) + with mock.patch.object(type(client.transport.update_audience), "__call__") as call: + call.return_value = gaa_audience.Audience() + client.update_audience(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -33462,28 +33568,28 @@ def test_delete_search_ads360_link_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "name=name_value", + "audience.name=name_value", ) in kw["metadata"] @pytest.mark.asyncio -async def test_delete_search_ads360_link_field_headers_async(): +async def test_update_audience_field_headers_async(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = analytics_admin.DeleteSearchAds360LinkRequest() + request = analytics_admin.UpdateAudienceRequest() - request.name = "name_value" + request.audience.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_search_ads360_link), "__call__" - ) as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - await client.delete_search_ads360_link(request) + with mock.patch.object(type(client.transport.update_audience), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + gaa_audience.Audience() + ) + await client.update_audience(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -33494,37 +33600,39 @@ async def test_delete_search_ads360_link_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "name=name_value", + "audience.name=name_value", ) in kw["metadata"] -def test_delete_search_ads360_link_flattened(): +def test_update_audience_flattened(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_search_ads360_link), "__call__" - ) as call: + with mock.patch.object(type(client.transport.update_audience), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = None + call.return_value = gaa_audience.Audience() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.delete_search_ads360_link( - name="name_value", + client.update_audience( + audience=gaa_audience.Audience(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" + arg = args[0].audience + mock_val = gaa_audience.Audience(name="name_value") + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) assert arg == mock_val -def test_delete_search_ads360_link_flattened_error(): +def test_update_audience_flattened_error(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -33532,43 +33640,48 @@ def test_delete_search_ads360_link_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.delete_search_ads360_link( - analytics_admin.DeleteSearchAds360LinkRequest(), - name="name_value", + client.update_audience( + analytics_admin.UpdateAudienceRequest(), + audience=gaa_audience.Audience(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) @pytest.mark.asyncio -async def test_delete_search_ads360_link_flattened_async(): +async def test_update_audience_flattened_async(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_search_ads360_link), "__call__" - ) as call: + with mock.patch.object(type(client.transport.update_audience), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = None + call.return_value = gaa_audience.Audience() - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + gaa_audience.Audience() + ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.delete_search_ads360_link( - name="name_value", + response = await client.update_audience( + audience=gaa_audience.Audience(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" + arg = args[0].audience + mock_val = gaa_audience.Audience(name="name_value") + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) assert arg == mock_val @pytest.mark.asyncio -async def test_delete_search_ads360_link_flattened_error_async(): +async def test_update_audience_flattened_error_async(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -33576,20 +33689,21 @@ async def test_delete_search_ads360_link_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.delete_search_ads360_link( - analytics_admin.DeleteSearchAds360LinkRequest(), - name="name_value", + await client.update_audience( + analytics_admin.UpdateAudienceRequest(), + audience=gaa_audience.Audience(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) @pytest.mark.parametrize( "request_type", [ - analytics_admin.UpdateSearchAds360LinkRequest, + analytics_admin.ArchiveAudienceRequest, dict, ], ) -def test_update_search_ads360_link(request_type, transport: str = "grpc"): +def test_archive_audience(request_type, transport: str = "grpc"): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -33600,31 +33714,22 @@ def test_update_search_ads360_link(request_type, transport: str = "grpc"): request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_search_ads360_link), "__call__" - ) as call: + with mock.patch.object(type(client.transport.archive_audience), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = resources.SearchAds360Link( - name="name_value", - advertiser_id="advertiser_id_value", - advertiser_display_name="advertiser_display_name_value", - ) - response = client.update_search_ads360_link(request) + call.return_value = None + response = client.archive_audience(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - request = analytics_admin.UpdateSearchAds360LinkRequest() + request = analytics_admin.ArchiveAudienceRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, resources.SearchAds360Link) - assert response.name == "name_value" - assert response.advertiser_id == "advertiser_id_value" - assert response.advertiser_display_name == "advertiser_display_name_value" + assert response is None -def test_update_search_ads360_link_empty_call(): +def test_archive_audience_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = AnalyticsAdminServiceClient( @@ -33633,19 +33738,17 @@ def test_update_search_ads360_link_empty_call(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_search_ads360_link), "__call__" - ) as call: + with mock.patch.object(type(client.transport.archive_audience), "__call__") as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.update_search_ads360_link() + client.archive_audience() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == analytics_admin.UpdateSearchAds360LinkRequest() + assert args[0] == analytics_admin.ArchiveAudienceRequest() -def test_update_search_ads360_link_non_empty_request_with_auto_populated_field(): +def test_archive_audience_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. client = AnalyticsAdminServiceClient( @@ -33656,22 +33759,24 @@ def test_update_search_ads360_link_non_empty_request_with_auto_populated_field() # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. - request = analytics_admin.UpdateSearchAds360LinkRequest() + request = analytics_admin.ArchiveAudienceRequest( + name="name_value", + ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_search_ads360_link), "__call__" - ) as call: + with mock.patch.object(type(client.transport.archive_audience), "__call__") as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.update_search_ads360_link(request=request) + client.archive_audience(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == analytics_admin.UpdateSearchAds360LinkRequest() + assert args[0] == analytics_admin.ArchiveAudienceRequest( + name="name_value", + ) -def test_update_search_ads360_link_use_cached_wrapped_rpc(): +def test_archive_audience_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -33685,10 +33790,7 @@ def test_update_search_ads360_link_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert ( - client._transport.update_search_ads360_link - in client._transport._wrapped_methods - ) + assert client._transport.archive_audience in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() @@ -33696,15 +33798,15 @@ def test_update_search_ads360_link_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.update_search_ads360_link + client._transport.archive_audience ] = mock_rpc request = {} - client.update_search_ads360_link(request) + client.archive_audience(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.update_search_ads360_link(request) + client.archive_audience(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -33712,7 +33814,7 @@ def test_update_search_ads360_link_use_cached_wrapped_rpc(): @pytest.mark.asyncio -async def test_update_search_ads360_link_empty_call_async(): +async def test_archive_audience_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = AnalyticsAdminServiceAsyncClient( @@ -33721,25 +33823,17 @@ async def test_update_search_ads360_link_empty_call_async(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_search_ads360_link), "__call__" - ) as call: + with mock.patch.object(type(client.transport.archive_audience), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - resources.SearchAds360Link( - name="name_value", - advertiser_id="advertiser_id_value", - advertiser_display_name="advertiser_display_name_value", - ) - ) - response = await client.update_search_ads360_link() + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.archive_audience() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == analytics_admin.UpdateSearchAds360LinkRequest() + assert args[0] == analytics_admin.ArchiveAudienceRequest() @pytest.mark.asyncio -async def test_update_search_ads360_link_async_use_cached_wrapped_rpc( +async def test_archive_audience_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", ): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -33756,33 +33850,33 @@ async def test_update_search_ads360_link_async_use_cached_wrapped_rpc( # Ensure method has been cached assert ( - client._client._transport.update_search_ads360_link + client._client._transport.archive_audience in client._client._transport._wrapped_methods ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ - client._client._transport.update_search_ads360_link - ] = mock_object + client._client._transport.archive_audience + ] = mock_rpc request = {} - await client.update_search_ads360_link(request) + await client.archive_audience(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - await client.update_search_ads360_link(request) + await client.archive_audience(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio -async def test_update_search_ads360_link_async( - transport: str = "grpc_asyncio", - request_type=analytics_admin.UpdateSearchAds360LinkRequest, +async def test_archive_audience_async( + transport: str = "grpc_asyncio", request_type=analytics_admin.ArchiveAudienceRequest ): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), @@ -33794,54 +33888,41 @@ async def test_update_search_ads360_link_async( request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_search_ads360_link), "__call__" - ) as call: + with mock.patch.object(type(client.transport.archive_audience), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - resources.SearchAds360Link( - name="name_value", - advertiser_id="advertiser_id_value", - advertiser_display_name="advertiser_display_name_value", - ) - ) - response = await client.update_search_ads360_link(request) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.archive_audience(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request = analytics_admin.UpdateSearchAds360LinkRequest() + request = analytics_admin.ArchiveAudienceRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, resources.SearchAds360Link) - assert response.name == "name_value" - assert response.advertiser_id == "advertiser_id_value" - assert response.advertiser_display_name == "advertiser_display_name_value" + assert response is None @pytest.mark.asyncio -async def test_update_search_ads360_link_async_from_dict(): - await test_update_search_ads360_link_async(request_type=dict) +async def test_archive_audience_async_from_dict(): + await test_archive_audience_async(request_type=dict) -def test_update_search_ads360_link_field_headers(): +def test_archive_audience_field_headers(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = analytics_admin.UpdateSearchAds360LinkRequest() + request = analytics_admin.ArchiveAudienceRequest() - request.search_ads_360_link.name = "name_value" + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_search_ads360_link), "__call__" - ) as call: - call.return_value = resources.SearchAds360Link() - client.update_search_ads360_link(request) + with mock.patch.object(type(client.transport.archive_audience), "__call__") as call: + call.return_value = None + client.archive_audience(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -33852,30 +33933,26 @@ def test_update_search_ads360_link_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "search_ads_360_link.name=name_value", + "name=name_value", ) in kw["metadata"] @pytest.mark.asyncio -async def test_update_search_ads360_link_field_headers_async(): +async def test_archive_audience_field_headers_async(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = analytics_admin.UpdateSearchAds360LinkRequest() + request = analytics_admin.ArchiveAudienceRequest() - request.search_ads_360_link.name = "name_value" + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_search_ads360_link), "__call__" - ) as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - resources.SearchAds360Link() - ) - await client.update_search_ads360_link(request) + with mock.patch.object(type(client.transport.archive_audience), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.archive_audience(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -33886,114 +33963,18 @@ async def test_update_search_ads360_link_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "search_ads_360_link.name=name_value", + "name=name_value", ) in kw["metadata"] -def test_update_search_ads360_link_flattened(): - client = AnalyticsAdminServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_search_ads360_link), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = resources.SearchAds360Link() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.update_search_ads360_link( - search_ads_360_link=resources.SearchAds360Link(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].search_ads_360_link - mock_val = resources.SearchAds360Link(name="name_value") - assert arg == mock_val - arg = args[0].update_mask - mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) - assert arg == mock_val - - -def test_update_search_ads360_link_flattened_error(): - client = AnalyticsAdminServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.update_search_ads360_link( - analytics_admin.UpdateSearchAds360LinkRequest(), - search_ads_360_link=resources.SearchAds360Link(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), - ) - - -@pytest.mark.asyncio -async def test_update_search_ads360_link_flattened_async(): - client = AnalyticsAdminServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_search_ads360_link), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = resources.SearchAds360Link() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - resources.SearchAds360Link() - ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.update_search_ads360_link( - search_ads_360_link=resources.SearchAds360Link(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].search_ads_360_link - mock_val = resources.SearchAds360Link(name="name_value") - assert arg == mock_val - arg = args[0].update_mask - mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) - assert arg == mock_val - - -@pytest.mark.asyncio -async def test_update_search_ads360_link_flattened_error_async(): - client = AnalyticsAdminServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.update_search_ads360_link( - analytics_admin.UpdateSearchAds360LinkRequest(), - search_ads_360_link=resources.SearchAds360Link(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), - ) - - @pytest.mark.parametrize( "request_type", [ - analytics_admin.GetAttributionSettingsRequest, + analytics_admin.GetSearchAds360LinkRequest, dict, ], ) -def test_get_attribution_settings(request_type, transport: str = "grpc"): +def test_get_search_ads360_link(request_type, transport: str = "grpc"): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -34005,46 +33986,30 @@ def test_get_attribution_settings(request_type, transport: str = "grpc"): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.get_attribution_settings), "__call__" + type(client.transport.get_search_ads360_link), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = resources.AttributionSettings( + call.return_value = resources.SearchAds360Link( name="name_value", - acquisition_conversion_event_lookback_window=resources.AttributionSettings.AcquisitionConversionEventLookbackWindow.ACQUISITION_CONVERSION_EVENT_LOOKBACK_WINDOW_7_DAYS, - other_conversion_event_lookback_window=resources.AttributionSettings.OtherConversionEventLookbackWindow.OTHER_CONVERSION_EVENT_LOOKBACK_WINDOW_30_DAYS, - reporting_attribution_model=resources.AttributionSettings.ReportingAttributionModel.PAID_AND_ORGANIC_CHANNELS_DATA_DRIVEN, - ads_web_conversion_data_export_scope=resources.AttributionSettings.AdsWebConversionDataExportScope.NOT_SELECTED_YET, + advertiser_id="advertiser_id_value", + advertiser_display_name="advertiser_display_name_value", ) - response = client.get_attribution_settings(request) + response = client.get_search_ads360_link(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - request = analytics_admin.GetAttributionSettingsRequest() + request = analytics_admin.GetSearchAds360LinkRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, resources.AttributionSettings) + assert isinstance(response, resources.SearchAds360Link) assert response.name == "name_value" - assert ( - response.acquisition_conversion_event_lookback_window - == resources.AttributionSettings.AcquisitionConversionEventLookbackWindow.ACQUISITION_CONVERSION_EVENT_LOOKBACK_WINDOW_7_DAYS - ) - assert ( - response.other_conversion_event_lookback_window - == resources.AttributionSettings.OtherConversionEventLookbackWindow.OTHER_CONVERSION_EVENT_LOOKBACK_WINDOW_30_DAYS - ) - assert ( - response.reporting_attribution_model - == resources.AttributionSettings.ReportingAttributionModel.PAID_AND_ORGANIC_CHANNELS_DATA_DRIVEN - ) - assert ( - response.ads_web_conversion_data_export_scope - == resources.AttributionSettings.AdsWebConversionDataExportScope.NOT_SELECTED_YET - ) + assert response.advertiser_id == "advertiser_id_value" + assert response.advertiser_display_name == "advertiser_display_name_value" -def test_get_attribution_settings_empty_call(): +def test_get_search_ads360_link_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = AnalyticsAdminServiceClient( @@ -34054,18 +34019,18 @@ def test_get_attribution_settings_empty_call(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.get_attribution_settings), "__call__" + type(client.transport.get_search_ads360_link), "__call__" ) as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.get_attribution_settings() + client.get_search_ads360_link() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == analytics_admin.GetAttributionSettingsRequest() + assert args[0] == analytics_admin.GetSearchAds360LinkRequest() -def test_get_attribution_settings_non_empty_request_with_auto_populated_field(): +def test_get_search_ads360_link_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. client = AnalyticsAdminServiceClient( @@ -34076,26 +34041,26 @@ def test_get_attribution_settings_non_empty_request_with_auto_populated_field(): # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. - request = analytics_admin.GetAttributionSettingsRequest( + request = analytics_admin.GetSearchAds360LinkRequest( name="name_value", ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.get_attribution_settings), "__call__" + type(client.transport.get_search_ads360_link), "__call__" ) as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.get_attribution_settings(request=request) + client.get_search_ads360_link(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == analytics_admin.GetAttributionSettingsRequest( + assert args[0] == analytics_admin.GetSearchAds360LinkRequest( name="name_value", ) -def test_get_attribution_settings_use_cached_wrapped_rpc(): +def test_get_search_ads360_link_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -34110,7 +34075,7 @@ def test_get_attribution_settings_use_cached_wrapped_rpc(): # Ensure method has been cached assert ( - client._transport.get_attribution_settings + client._transport.get_search_ads360_link in client._transport._wrapped_methods ) @@ -34120,15 +34085,15 @@ def test_get_attribution_settings_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.get_attribution_settings + client._transport.get_search_ads360_link ] = mock_rpc request = {} - client.get_attribution_settings(request) + client.get_search_ads360_link(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.get_attribution_settings(request) + client.get_search_ads360_link(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -34136,7 +34101,7 @@ def test_get_attribution_settings_use_cached_wrapped_rpc(): @pytest.mark.asyncio -async def test_get_attribution_settings_empty_call_async(): +async def test_get_search_ads360_link_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = AnalyticsAdminServiceAsyncClient( @@ -34146,26 +34111,24 @@ async def test_get_attribution_settings_empty_call_async(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.get_attribution_settings), "__call__" + type(client.transport.get_search_ads360_link), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - resources.AttributionSettings( + resources.SearchAds360Link( name="name_value", - acquisition_conversion_event_lookback_window=resources.AttributionSettings.AcquisitionConversionEventLookbackWindow.ACQUISITION_CONVERSION_EVENT_LOOKBACK_WINDOW_7_DAYS, - other_conversion_event_lookback_window=resources.AttributionSettings.OtherConversionEventLookbackWindow.OTHER_CONVERSION_EVENT_LOOKBACK_WINDOW_30_DAYS, - reporting_attribution_model=resources.AttributionSettings.ReportingAttributionModel.PAID_AND_ORGANIC_CHANNELS_DATA_DRIVEN, - ads_web_conversion_data_export_scope=resources.AttributionSettings.AdsWebConversionDataExportScope.NOT_SELECTED_YET, + advertiser_id="advertiser_id_value", + advertiser_display_name="advertiser_display_name_value", ) ) - response = await client.get_attribution_settings() + response = await client.get_search_ads360_link() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == analytics_admin.GetAttributionSettingsRequest() + assert args[0] == analytics_admin.GetSearchAds360LinkRequest() @pytest.mark.asyncio -async def test_get_attribution_settings_async_use_cached_wrapped_rpc( +async def test_get_search_ads360_link_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", ): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -34182,33 +34145,34 @@ async def test_get_attribution_settings_async_use_cached_wrapped_rpc( # Ensure method has been cached assert ( - client._client._transport.get_attribution_settings + client._client._transport.get_search_ads360_link in client._client._transport._wrapped_methods ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ - client._client._transport.get_attribution_settings - ] = mock_object + client._client._transport.get_search_ads360_link + ] = mock_rpc request = {} - await client.get_attribution_settings(request) + await client.get_search_ads360_link(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - await client.get_attribution_settings(request) + await client.get_search_ads360_link(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio -async def test_get_attribution_settings_async( +async def test_get_search_ads360_link_async( transport: str = "grpc_asyncio", - request_type=analytics_admin.GetAttributionSettingsRequest, + request_type=analytics_admin.GetSearchAds360LinkRequest, ): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), @@ -34221,69 +34185,53 @@ async def test_get_attribution_settings_async( # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.get_attribution_settings), "__call__" + type(client.transport.get_search_ads360_link), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - resources.AttributionSettings( + resources.SearchAds360Link( name="name_value", - acquisition_conversion_event_lookback_window=resources.AttributionSettings.AcquisitionConversionEventLookbackWindow.ACQUISITION_CONVERSION_EVENT_LOOKBACK_WINDOW_7_DAYS, - other_conversion_event_lookback_window=resources.AttributionSettings.OtherConversionEventLookbackWindow.OTHER_CONVERSION_EVENT_LOOKBACK_WINDOW_30_DAYS, - reporting_attribution_model=resources.AttributionSettings.ReportingAttributionModel.PAID_AND_ORGANIC_CHANNELS_DATA_DRIVEN, - ads_web_conversion_data_export_scope=resources.AttributionSettings.AdsWebConversionDataExportScope.NOT_SELECTED_YET, + advertiser_id="advertiser_id_value", + advertiser_display_name="advertiser_display_name_value", ) ) - response = await client.get_attribution_settings(request) + response = await client.get_search_ads360_link(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request = analytics_admin.GetAttributionSettingsRequest() + request = analytics_admin.GetSearchAds360LinkRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, resources.AttributionSettings) + assert isinstance(response, resources.SearchAds360Link) assert response.name == "name_value" - assert ( - response.acquisition_conversion_event_lookback_window - == resources.AttributionSettings.AcquisitionConversionEventLookbackWindow.ACQUISITION_CONVERSION_EVENT_LOOKBACK_WINDOW_7_DAYS - ) - assert ( - response.other_conversion_event_lookback_window - == resources.AttributionSettings.OtherConversionEventLookbackWindow.OTHER_CONVERSION_EVENT_LOOKBACK_WINDOW_30_DAYS - ) - assert ( - response.reporting_attribution_model - == resources.AttributionSettings.ReportingAttributionModel.PAID_AND_ORGANIC_CHANNELS_DATA_DRIVEN - ) - assert ( - response.ads_web_conversion_data_export_scope - == resources.AttributionSettings.AdsWebConversionDataExportScope.NOT_SELECTED_YET - ) + assert response.advertiser_id == "advertiser_id_value" + assert response.advertiser_display_name == "advertiser_display_name_value" @pytest.mark.asyncio -async def test_get_attribution_settings_async_from_dict(): - await test_get_attribution_settings_async(request_type=dict) +async def test_get_search_ads360_link_async_from_dict(): + await test_get_search_ads360_link_async(request_type=dict) -def test_get_attribution_settings_field_headers(): +def test_get_search_ads360_link_field_headers(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = analytics_admin.GetAttributionSettingsRequest() + request = analytics_admin.GetSearchAds360LinkRequest() request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.get_attribution_settings), "__call__" + type(client.transport.get_search_ads360_link), "__call__" ) as call: - call.return_value = resources.AttributionSettings() - client.get_attribution_settings(request) + call.return_value = resources.SearchAds360Link() + client.get_search_ads360_link(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -34299,25 +34247,25 @@ def test_get_attribution_settings_field_headers(): @pytest.mark.asyncio -async def test_get_attribution_settings_field_headers_async(): +async def test_get_search_ads360_link_field_headers_async(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = analytics_admin.GetAttributionSettingsRequest() + request = analytics_admin.GetSearchAds360LinkRequest() request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.get_attribution_settings), "__call__" + type(client.transport.get_search_ads360_link), "__call__" ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - resources.AttributionSettings() + resources.SearchAds360Link() ) - await client.get_attribution_settings(request) + await client.get_search_ads360_link(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -34332,20 +34280,20 @@ async def test_get_attribution_settings_field_headers_async(): ) in kw["metadata"] -def test_get_attribution_settings_flattened(): +def test_get_search_ads360_link_flattened(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.get_attribution_settings), "__call__" + type(client.transport.get_search_ads360_link), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = resources.AttributionSettings() + call.return_value = resources.SearchAds360Link() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.get_attribution_settings( + client.get_search_ads360_link( name="name_value", ) @@ -34358,7 +34306,7 @@ def test_get_attribution_settings_flattened(): assert arg == mock_val -def test_get_attribution_settings_flattened_error(): +def test_get_search_ads360_link_flattened_error(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -34366,31 +34314,31 @@ def test_get_attribution_settings_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.get_attribution_settings( - analytics_admin.GetAttributionSettingsRequest(), + client.get_search_ads360_link( + analytics_admin.GetSearchAds360LinkRequest(), name="name_value", ) @pytest.mark.asyncio -async def test_get_attribution_settings_flattened_async(): +async def test_get_search_ads360_link_flattened_async(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.get_attribution_settings), "__call__" + type(client.transport.get_search_ads360_link), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = resources.AttributionSettings() + call.return_value = resources.SearchAds360Link() call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - resources.AttributionSettings() + resources.SearchAds360Link() ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.get_attribution_settings( + response = await client.get_search_ads360_link( name="name_value", ) @@ -34404,7 +34352,7 @@ async def test_get_attribution_settings_flattened_async(): @pytest.mark.asyncio -async def test_get_attribution_settings_flattened_error_async(): +async def test_get_search_ads360_link_flattened_error_async(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -34412,8 +34360,8 @@ async def test_get_attribution_settings_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.get_attribution_settings( - analytics_admin.GetAttributionSettingsRequest(), + await client.get_search_ads360_link( + analytics_admin.GetSearchAds360LinkRequest(), name="name_value", ) @@ -34421,11 +34369,11 @@ async def test_get_attribution_settings_flattened_error_async(): @pytest.mark.parametrize( "request_type", [ - analytics_admin.UpdateAttributionSettingsRequest, + analytics_admin.ListSearchAds360LinksRequest, dict, ], ) -def test_update_attribution_settings(request_type, transport: str = "grpc"): +def test_list_search_ads360_links(request_type, transport: str = "grpc"): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -34437,46 +34385,26 @@ def test_update_attribution_settings(request_type, transport: str = "grpc"): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.update_attribution_settings), "__call__" + type(client.transport.list_search_ads360_links), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = resources.AttributionSettings( - name="name_value", - acquisition_conversion_event_lookback_window=resources.AttributionSettings.AcquisitionConversionEventLookbackWindow.ACQUISITION_CONVERSION_EVENT_LOOKBACK_WINDOW_7_DAYS, - other_conversion_event_lookback_window=resources.AttributionSettings.OtherConversionEventLookbackWindow.OTHER_CONVERSION_EVENT_LOOKBACK_WINDOW_30_DAYS, - reporting_attribution_model=resources.AttributionSettings.ReportingAttributionModel.PAID_AND_ORGANIC_CHANNELS_DATA_DRIVEN, - ads_web_conversion_data_export_scope=resources.AttributionSettings.AdsWebConversionDataExportScope.NOT_SELECTED_YET, + call.return_value = analytics_admin.ListSearchAds360LinksResponse( + next_page_token="next_page_token_value", ) - response = client.update_attribution_settings(request) + response = client.list_search_ads360_links(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - request = analytics_admin.UpdateAttributionSettingsRequest() + request = analytics_admin.ListSearchAds360LinksRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, resources.AttributionSettings) - assert response.name == "name_value" - assert ( - response.acquisition_conversion_event_lookback_window - == resources.AttributionSettings.AcquisitionConversionEventLookbackWindow.ACQUISITION_CONVERSION_EVENT_LOOKBACK_WINDOW_7_DAYS - ) - assert ( - response.other_conversion_event_lookback_window - == resources.AttributionSettings.OtherConversionEventLookbackWindow.OTHER_CONVERSION_EVENT_LOOKBACK_WINDOW_30_DAYS - ) - assert ( - response.reporting_attribution_model - == resources.AttributionSettings.ReportingAttributionModel.PAID_AND_ORGANIC_CHANNELS_DATA_DRIVEN - ) - assert ( - response.ads_web_conversion_data_export_scope - == resources.AttributionSettings.AdsWebConversionDataExportScope.NOT_SELECTED_YET - ) + assert isinstance(response, pagers.ListSearchAds360LinksPager) + assert response.next_page_token == "next_page_token_value" -def test_update_attribution_settings_empty_call(): +def test_list_search_ads360_links_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = AnalyticsAdminServiceClient( @@ -34486,18 +34414,18 @@ def test_update_attribution_settings_empty_call(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.update_attribution_settings), "__call__" + type(client.transport.list_search_ads360_links), "__call__" ) as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.update_attribution_settings() + client.list_search_ads360_links() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == analytics_admin.UpdateAttributionSettingsRequest() + assert args[0] == analytics_admin.ListSearchAds360LinksRequest() -def test_update_attribution_settings_non_empty_request_with_auto_populated_field(): +def test_list_search_ads360_links_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. client = AnalyticsAdminServiceClient( @@ -34508,22 +34436,28 @@ def test_update_attribution_settings_non_empty_request_with_auto_populated_field # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. - request = analytics_admin.UpdateAttributionSettingsRequest() + request = analytics_admin.ListSearchAds360LinksRequest( + parent="parent_value", + page_token="page_token_value", + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.update_attribution_settings), "__call__" + type(client.transport.list_search_ads360_links), "__call__" ) as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.update_attribution_settings(request=request) + client.list_search_ads360_links(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == analytics_admin.UpdateAttributionSettingsRequest() + assert args[0] == analytics_admin.ListSearchAds360LinksRequest( + parent="parent_value", + page_token="page_token_value", + ) -def test_update_attribution_settings_use_cached_wrapped_rpc(): +def test_list_search_ads360_links_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -34538,7 +34472,7 @@ def test_update_attribution_settings_use_cached_wrapped_rpc(): # Ensure method has been cached assert ( - client._transport.update_attribution_settings + client._transport.list_search_ads360_links in client._transport._wrapped_methods ) @@ -34548,15 +34482,15 @@ def test_update_attribution_settings_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.update_attribution_settings + client._transport.list_search_ads360_links ] = mock_rpc request = {} - client.update_attribution_settings(request) + client.list_search_ads360_links(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.update_attribution_settings(request) + client.list_search_ads360_links(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -34564,7 +34498,7 @@ def test_update_attribution_settings_use_cached_wrapped_rpc(): @pytest.mark.asyncio -async def test_update_attribution_settings_empty_call_async(): +async def test_list_search_ads360_links_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = AnalyticsAdminServiceAsyncClient( @@ -34574,26 +34508,22 @@ async def test_update_attribution_settings_empty_call_async(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.update_attribution_settings), "__call__" + type(client.transport.list_search_ads360_links), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - resources.AttributionSettings( - name="name_value", - acquisition_conversion_event_lookback_window=resources.AttributionSettings.AcquisitionConversionEventLookbackWindow.ACQUISITION_CONVERSION_EVENT_LOOKBACK_WINDOW_7_DAYS, - other_conversion_event_lookback_window=resources.AttributionSettings.OtherConversionEventLookbackWindow.OTHER_CONVERSION_EVENT_LOOKBACK_WINDOW_30_DAYS, - reporting_attribution_model=resources.AttributionSettings.ReportingAttributionModel.PAID_AND_ORGANIC_CHANNELS_DATA_DRIVEN, - ads_web_conversion_data_export_scope=resources.AttributionSettings.AdsWebConversionDataExportScope.NOT_SELECTED_YET, + analytics_admin.ListSearchAds360LinksResponse( + next_page_token="next_page_token_value", ) ) - response = await client.update_attribution_settings() + response = await client.list_search_ads360_links() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == analytics_admin.UpdateAttributionSettingsRequest() + assert args[0] == analytics_admin.ListSearchAds360LinksRequest() @pytest.mark.asyncio -async def test_update_attribution_settings_async_use_cached_wrapped_rpc( +async def test_list_search_ads360_links_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", ): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -34610,33 +34540,34 @@ async def test_update_attribution_settings_async_use_cached_wrapped_rpc( # Ensure method has been cached assert ( - client._client._transport.update_attribution_settings + client._client._transport.list_search_ads360_links in client._client._transport._wrapped_methods ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ - client._client._transport.update_attribution_settings - ] = mock_object + client._client._transport.list_search_ads360_links + ] = mock_rpc request = {} - await client.update_attribution_settings(request) + await client.list_search_ads360_links(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - await client.update_attribution_settings(request) + await client.list_search_ads360_links(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio -async def test_update_attribution_settings_async( +async def test_list_search_ads360_links_async( transport: str = "grpc_asyncio", - request_type=analytics_admin.UpdateAttributionSettingsRequest, + request_type=analytics_admin.ListSearchAds360LinksRequest, ): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), @@ -34649,69 +34580,49 @@ async def test_update_attribution_settings_async( # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.update_attribution_settings), "__call__" + type(client.transport.list_search_ads360_links), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - resources.AttributionSettings( - name="name_value", - acquisition_conversion_event_lookback_window=resources.AttributionSettings.AcquisitionConversionEventLookbackWindow.ACQUISITION_CONVERSION_EVENT_LOOKBACK_WINDOW_7_DAYS, - other_conversion_event_lookback_window=resources.AttributionSettings.OtherConversionEventLookbackWindow.OTHER_CONVERSION_EVENT_LOOKBACK_WINDOW_30_DAYS, - reporting_attribution_model=resources.AttributionSettings.ReportingAttributionModel.PAID_AND_ORGANIC_CHANNELS_DATA_DRIVEN, - ads_web_conversion_data_export_scope=resources.AttributionSettings.AdsWebConversionDataExportScope.NOT_SELECTED_YET, + analytics_admin.ListSearchAds360LinksResponse( + next_page_token="next_page_token_value", ) ) - response = await client.update_attribution_settings(request) + response = await client.list_search_ads360_links(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request = analytics_admin.UpdateAttributionSettingsRequest() + request = analytics_admin.ListSearchAds360LinksRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, resources.AttributionSettings) - assert response.name == "name_value" - assert ( - response.acquisition_conversion_event_lookback_window - == resources.AttributionSettings.AcquisitionConversionEventLookbackWindow.ACQUISITION_CONVERSION_EVENT_LOOKBACK_WINDOW_7_DAYS - ) - assert ( - response.other_conversion_event_lookback_window - == resources.AttributionSettings.OtherConversionEventLookbackWindow.OTHER_CONVERSION_EVENT_LOOKBACK_WINDOW_30_DAYS - ) - assert ( - response.reporting_attribution_model - == resources.AttributionSettings.ReportingAttributionModel.PAID_AND_ORGANIC_CHANNELS_DATA_DRIVEN - ) - assert ( - response.ads_web_conversion_data_export_scope - == resources.AttributionSettings.AdsWebConversionDataExportScope.NOT_SELECTED_YET - ) + assert isinstance(response, pagers.ListSearchAds360LinksAsyncPager) + assert response.next_page_token == "next_page_token_value" @pytest.mark.asyncio -async def test_update_attribution_settings_async_from_dict(): - await test_update_attribution_settings_async(request_type=dict) +async def test_list_search_ads360_links_async_from_dict(): + await test_list_search_ads360_links_async(request_type=dict) -def test_update_attribution_settings_field_headers(): +def test_list_search_ads360_links_field_headers(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = analytics_admin.UpdateAttributionSettingsRequest() + request = analytics_admin.ListSearchAds360LinksRequest() - request.attribution_settings.name = "name_value" + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.update_attribution_settings), "__call__" + type(client.transport.list_search_ads360_links), "__call__" ) as call: - call.return_value = resources.AttributionSettings() - client.update_attribution_settings(request) + call.return_value = analytics_admin.ListSearchAds360LinksResponse() + client.list_search_ads360_links(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -34722,30 +34633,30 @@ def test_update_attribution_settings_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "attribution_settings.name=name_value", + "parent=parent_value", ) in kw["metadata"] @pytest.mark.asyncio -async def test_update_attribution_settings_field_headers_async(): +async def test_list_search_ads360_links_field_headers_async(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = analytics_admin.UpdateAttributionSettingsRequest() + request = analytics_admin.ListSearchAds360LinksRequest() - request.attribution_settings.name = "name_value" + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.update_attribution_settings), "__call__" + type(client.transport.list_search_ads360_links), "__call__" ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - resources.AttributionSettings() + analytics_admin.ListSearchAds360LinksResponse() ) - await client.update_attribution_settings(request) + await client.list_search_ads360_links(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -34756,41 +34667,37 @@ async def test_update_attribution_settings_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "attribution_settings.name=name_value", + "parent=parent_value", ) in kw["metadata"] -def test_update_attribution_settings_flattened(): +def test_list_search_ads360_links_flattened(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.update_attribution_settings), "__call__" + type(client.transport.list_search_ads360_links), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = resources.AttributionSettings() + call.return_value = analytics_admin.ListSearchAds360LinksResponse() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.update_attribution_settings( - attribution_settings=resources.AttributionSettings(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + client.list_search_ads360_links( + parent="parent_value", ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - arg = args[0].attribution_settings - mock_val = resources.AttributionSettings(name="name_value") - assert arg == mock_val - arg = args[0].update_mask - mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + arg = args[0].parent + mock_val = "parent_value" assert arg == mock_val -def test_update_attribution_settings_flattened_error(): +def test_list_search_ads360_links_flattened_error(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -34798,50 +34705,45 @@ def test_update_attribution_settings_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.update_attribution_settings( - analytics_admin.UpdateAttributionSettingsRequest(), - attribution_settings=resources.AttributionSettings(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + client.list_search_ads360_links( + analytics_admin.ListSearchAds360LinksRequest(), + parent="parent_value", ) @pytest.mark.asyncio -async def test_update_attribution_settings_flattened_async(): +async def test_list_search_ads360_links_flattened_async(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.update_attribution_settings), "__call__" + type(client.transport.list_search_ads360_links), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = resources.AttributionSettings() + call.return_value = analytics_admin.ListSearchAds360LinksResponse() call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - resources.AttributionSettings() + analytics_admin.ListSearchAds360LinksResponse() ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.update_attribution_settings( - attribution_settings=resources.AttributionSettings(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + response = await client.list_search_ads360_links( + parent="parent_value", ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - arg = args[0].attribution_settings - mock_val = resources.AttributionSettings(name="name_value") - assert arg == mock_val - arg = args[0].update_mask - mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + arg = args[0].parent + mock_val = "parent_value" assert arg == mock_val @pytest.mark.asyncio -async def test_update_attribution_settings_flattened_error_async(): +async def test_list_search_ads360_links_flattened_error_async(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -34849,21 +34751,224 @@ async def test_update_attribution_settings_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.update_attribution_settings( - analytics_admin.UpdateAttributionSettingsRequest(), - attribution_settings=resources.AttributionSettings(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + await client.list_search_ads360_links( + analytics_admin.ListSearchAds360LinksRequest(), + parent="parent_value", + ) + + +def test_list_search_ads360_links_pager(transport_name: str = "grpc"): + client = AnalyticsAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_search_ads360_links), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + analytics_admin.ListSearchAds360LinksResponse( + search_ads_360_links=[ + resources.SearchAds360Link(), + resources.SearchAds360Link(), + resources.SearchAds360Link(), + ], + next_page_token="abc", + ), + analytics_admin.ListSearchAds360LinksResponse( + search_ads_360_links=[], + next_page_token="def", + ), + analytics_admin.ListSearchAds360LinksResponse( + search_ads_360_links=[ + resources.SearchAds360Link(), + ], + next_page_token="ghi", + ), + analytics_admin.ListSearchAds360LinksResponse( + search_ads_360_links=[ + resources.SearchAds360Link(), + resources.SearchAds360Link(), + ], + ), + RuntimeError, + ) + + expected_metadata = () + retry = retries.Retry() + timeout = 5 + expected_metadata = tuple(expected_metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + ) + pager = client.list_search_ads360_links( + request={}, retry=retry, timeout=timeout + ) + + assert pager._metadata == expected_metadata + assert pager._retry == retry + assert pager._timeout == timeout + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, resources.SearchAds360Link) for i in results) + + +def test_list_search_ads360_links_pages(transport_name: str = "grpc"): + client = AnalyticsAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_search_ads360_links), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + analytics_admin.ListSearchAds360LinksResponse( + search_ads_360_links=[ + resources.SearchAds360Link(), + resources.SearchAds360Link(), + resources.SearchAds360Link(), + ], + next_page_token="abc", + ), + analytics_admin.ListSearchAds360LinksResponse( + search_ads_360_links=[], + next_page_token="def", + ), + analytics_admin.ListSearchAds360LinksResponse( + search_ads_360_links=[ + resources.SearchAds360Link(), + ], + next_page_token="ghi", + ), + analytics_admin.ListSearchAds360LinksResponse( + search_ads_360_links=[ + resources.SearchAds360Link(), + resources.SearchAds360Link(), + ], + ), + RuntimeError, + ) + pages = list(client.list_search_ads360_links(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.asyncio +async def test_list_search_ads360_links_async_pager(): + client = AnalyticsAdminServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_search_ads360_links), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + analytics_admin.ListSearchAds360LinksResponse( + search_ads_360_links=[ + resources.SearchAds360Link(), + resources.SearchAds360Link(), + resources.SearchAds360Link(), + ], + next_page_token="abc", + ), + analytics_admin.ListSearchAds360LinksResponse( + search_ads_360_links=[], + next_page_token="def", + ), + analytics_admin.ListSearchAds360LinksResponse( + search_ads_360_links=[ + resources.SearchAds360Link(), + ], + next_page_token="ghi", + ), + analytics_admin.ListSearchAds360LinksResponse( + search_ads_360_links=[ + resources.SearchAds360Link(), + resources.SearchAds360Link(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_search_ads360_links( + request={}, + ) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, resources.SearchAds360Link) for i in responses) + + +@pytest.mark.asyncio +async def test_list_search_ads360_links_async_pages(): + client = AnalyticsAdminServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_search_ads360_links), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + analytics_admin.ListSearchAds360LinksResponse( + search_ads_360_links=[ + resources.SearchAds360Link(), + resources.SearchAds360Link(), + resources.SearchAds360Link(), + ], + next_page_token="abc", + ), + analytics_admin.ListSearchAds360LinksResponse( + search_ads_360_links=[], + next_page_token="def", + ), + analytics_admin.ListSearchAds360LinksResponse( + search_ads_360_links=[ + resources.SearchAds360Link(), + ], + next_page_token="ghi", + ), + analytics_admin.ListSearchAds360LinksResponse( + search_ads_360_links=[ + resources.SearchAds360Link(), + resources.SearchAds360Link(), + ], + ), + RuntimeError, ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_search_ads360_links(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token @pytest.mark.parametrize( "request_type", [ - analytics_admin.RunAccessReportRequest, + analytics_admin.CreateSearchAds360LinkRequest, dict, ], ) -def test_run_access_report(request_type, transport: str = "grpc"): +def test_create_search_ads360_link(request_type, transport: str = "grpc"): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -34875,26 +34980,30 @@ def test_run_access_report(request_type, transport: str = "grpc"): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.run_access_report), "__call__" + type(client.transport.create_search_ads360_link), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = analytics_admin.RunAccessReportResponse( - row_count=992, + call.return_value = resources.SearchAds360Link( + name="name_value", + advertiser_id="advertiser_id_value", + advertiser_display_name="advertiser_display_name_value", ) - response = client.run_access_report(request) + response = client.create_search_ads360_link(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - request = analytics_admin.RunAccessReportRequest() + request = analytics_admin.CreateSearchAds360LinkRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, analytics_admin.RunAccessReportResponse) - assert response.row_count == 992 + assert isinstance(response, resources.SearchAds360Link) + assert response.name == "name_value" + assert response.advertiser_id == "advertiser_id_value" + assert response.advertiser_display_name == "advertiser_display_name_value" -def test_run_access_report_empty_call(): +def test_create_search_ads360_link_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = AnalyticsAdminServiceClient( @@ -34904,18 +35013,18 @@ def test_run_access_report_empty_call(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.run_access_report), "__call__" + type(client.transport.create_search_ads360_link), "__call__" ) as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.run_access_report() + client.create_search_ads360_link() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == analytics_admin.RunAccessReportRequest() + assert args[0] == analytics_admin.CreateSearchAds360LinkRequest() -def test_run_access_report_non_empty_request_with_auto_populated_field(): +def test_create_search_ads360_link_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. client = AnalyticsAdminServiceClient( @@ -34926,28 +35035,26 @@ def test_run_access_report_non_empty_request_with_auto_populated_field(): # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. - request = analytics_admin.RunAccessReportRequest( - entity="entity_value", - time_zone="time_zone_value", + request = analytics_admin.CreateSearchAds360LinkRequest( + parent="parent_value", ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.run_access_report), "__call__" + type(client.transport.create_search_ads360_link), "__call__" ) as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.run_access_report(request=request) + client.create_search_ads360_link(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == analytics_admin.RunAccessReportRequest( - entity="entity_value", - time_zone="time_zone_value", + assert args[0] == analytics_admin.CreateSearchAds360LinkRequest( + parent="parent_value", ) -def test_run_access_report_use_cached_wrapped_rpc(): +def test_create_search_ads360_link_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -34961,7 +35068,10 @@ def test_run_access_report_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.run_access_report in client._transport._wrapped_methods + assert ( + client._transport.create_search_ads360_link + in client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.Mock() @@ -34969,15 +35079,15 @@ def test_run_access_report_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.run_access_report + client._transport.create_search_ads360_link ] = mock_rpc request = {} - client.run_access_report(request) + client.create_search_ads360_link(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.run_access_report(request) + client.create_search_ads360_link(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -34985,7 +35095,7 @@ def test_run_access_report_use_cached_wrapped_rpc(): @pytest.mark.asyncio -async def test_run_access_report_empty_call_async(): +async def test_create_search_ads360_link_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = AnalyticsAdminServiceAsyncClient( @@ -34995,22 +35105,24 @@ async def test_run_access_report_empty_call_async(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.run_access_report), "__call__" + type(client.transport.create_search_ads360_link), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - analytics_admin.RunAccessReportResponse( - row_count=992, + resources.SearchAds360Link( + name="name_value", + advertiser_id="advertiser_id_value", + advertiser_display_name="advertiser_display_name_value", ) ) - response = await client.run_access_report() + response = await client.create_search_ads360_link() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == analytics_admin.RunAccessReportRequest() + assert args[0] == analytics_admin.CreateSearchAds360LinkRequest() @pytest.mark.asyncio -async def test_run_access_report_async_use_cached_wrapped_rpc( +async def test_create_search_ads360_link_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", ): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -35027,32 +35139,34 @@ async def test_run_access_report_async_use_cached_wrapped_rpc( # Ensure method has been cached assert ( - client._client._transport.run_access_report + client._client._transport.create_search_ads360_link in client._client._transport._wrapped_methods ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ - client._client._transport.run_access_report - ] = mock_object + client._client._transport.create_search_ads360_link + ] = mock_rpc request = {} - await client.run_access_report(request) + await client.create_search_ads360_link(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - await client.run_access_report(request) + await client.create_search_ads360_link(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio -async def test_run_access_report_async( - transport: str = "grpc_asyncio", request_type=analytics_admin.RunAccessReportRequest +async def test_create_search_ads360_link_async( + transport: str = "grpc_asyncio", + request_type=analytics_admin.CreateSearchAds360LinkRequest, ): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), @@ -35065,49 +35179,53 @@ async def test_run_access_report_async( # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.run_access_report), "__call__" + type(client.transport.create_search_ads360_link), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - analytics_admin.RunAccessReportResponse( - row_count=992, + resources.SearchAds360Link( + name="name_value", + advertiser_id="advertiser_id_value", + advertiser_display_name="advertiser_display_name_value", ) ) - response = await client.run_access_report(request) + response = await client.create_search_ads360_link(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request = analytics_admin.RunAccessReportRequest() + request = analytics_admin.CreateSearchAds360LinkRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, analytics_admin.RunAccessReportResponse) - assert response.row_count == 992 + assert isinstance(response, resources.SearchAds360Link) + assert response.name == "name_value" + assert response.advertiser_id == "advertiser_id_value" + assert response.advertiser_display_name == "advertiser_display_name_value" @pytest.mark.asyncio -async def test_run_access_report_async_from_dict(): - await test_run_access_report_async(request_type=dict) +async def test_create_search_ads360_link_async_from_dict(): + await test_create_search_ads360_link_async(request_type=dict) -def test_run_access_report_field_headers(): +def test_create_search_ads360_link_field_headers(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = analytics_admin.RunAccessReportRequest() + request = analytics_admin.CreateSearchAds360LinkRequest() - request.entity = "entity_value" + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.run_access_report), "__call__" + type(client.transport.create_search_ads360_link), "__call__" ) as call: - call.return_value = analytics_admin.RunAccessReportResponse() - client.run_access_report(request) + call.return_value = resources.SearchAds360Link() + client.create_search_ads360_link(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -35118,30 +35236,30 @@ def test_run_access_report_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "entity=entity_value", + "parent=parent_value", ) in kw["metadata"] @pytest.mark.asyncio -async def test_run_access_report_field_headers_async(): +async def test_create_search_ads360_link_field_headers_async(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = analytics_admin.RunAccessReportRequest() + request = analytics_admin.CreateSearchAds360LinkRequest() - request.entity = "entity_value" + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.run_access_report), "__call__" + type(client.transport.create_search_ads360_link), "__call__" ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - analytics_admin.RunAccessReportResponse() + resources.SearchAds360Link() ) - await client.run_access_report(request) + await client.create_search_ads360_link(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -35152,18 +35270,114 @@ async def test_run_access_report_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "entity=entity_value", + "parent=parent_value", ) in kw["metadata"] +def test_create_search_ads360_link_flattened(): + client = AnalyticsAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_search_ads360_link), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = resources.SearchAds360Link() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.create_search_ads360_link( + parent="parent_value", + search_ads_360_link=resources.SearchAds360Link(name="name_value"), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].search_ads_360_link + mock_val = resources.SearchAds360Link(name="name_value") + assert arg == mock_val + + +def test_create_search_ads360_link_flattened_error(): + client = AnalyticsAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_search_ads360_link( + analytics_admin.CreateSearchAds360LinkRequest(), + parent="parent_value", + search_ads_360_link=resources.SearchAds360Link(name="name_value"), + ) + + +@pytest.mark.asyncio +async def test_create_search_ads360_link_flattened_async(): + client = AnalyticsAdminServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_search_ads360_link), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = resources.SearchAds360Link() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + resources.SearchAds360Link() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.create_search_ads360_link( + parent="parent_value", + search_ads_360_link=resources.SearchAds360Link(name="name_value"), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].search_ads_360_link + mock_val = resources.SearchAds360Link(name="name_value") + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_create_search_ads360_link_flattened_error_async(): + client = AnalyticsAdminServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.create_search_ads360_link( + analytics_admin.CreateSearchAds360LinkRequest(), + parent="parent_value", + search_ads_360_link=resources.SearchAds360Link(name="name_value"), + ) + + @pytest.mark.parametrize( "request_type", [ - analytics_admin.CreateAccessBindingRequest, + analytics_admin.DeleteSearchAds360LinkRequest, dict, ], ) -def test_create_access_binding(request_type, transport: str = "grpc"): +def test_delete_search_ads360_link(request_type, transport: str = "grpc"): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -35175,29 +35389,23 @@ def test_create_access_binding(request_type, transport: str = "grpc"): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.create_access_binding), "__call__" + type(client.transport.delete_search_ads360_link), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = resources.AccessBinding( - name="name_value", - roles=["roles_value"], - user="user_value", - ) - response = client.create_access_binding(request) + call.return_value = None + response = client.delete_search_ads360_link(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - request = analytics_admin.CreateAccessBindingRequest() + request = analytics_admin.DeleteSearchAds360LinkRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, resources.AccessBinding) - assert response.name == "name_value" - assert response.roles == ["roles_value"] + assert response is None -def test_create_access_binding_empty_call(): +def test_delete_search_ads360_link_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = AnalyticsAdminServiceClient( @@ -35207,18 +35415,18 @@ def test_create_access_binding_empty_call(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.create_access_binding), "__call__" + type(client.transport.delete_search_ads360_link), "__call__" ) as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.create_access_binding() + client.delete_search_ads360_link() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == analytics_admin.CreateAccessBindingRequest() + assert args[0] == analytics_admin.DeleteSearchAds360LinkRequest() -def test_create_access_binding_non_empty_request_with_auto_populated_field(): +def test_delete_search_ads360_link_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. client = AnalyticsAdminServiceClient( @@ -35229,26 +35437,26 @@ def test_create_access_binding_non_empty_request_with_auto_populated_field(): # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. - request = analytics_admin.CreateAccessBindingRequest( - parent="parent_value", + request = analytics_admin.DeleteSearchAds360LinkRequest( + name="name_value", ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.create_access_binding), "__call__" + type(client.transport.delete_search_ads360_link), "__call__" ) as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.create_access_binding(request=request) + client.delete_search_ads360_link(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == analytics_admin.CreateAccessBindingRequest( - parent="parent_value", + assert args[0] == analytics_admin.DeleteSearchAds360LinkRequest( + name="name_value", ) -def test_create_access_binding_use_cached_wrapped_rpc(): +def test_delete_search_ads360_link_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -35263,7 +35471,7 @@ def test_create_access_binding_use_cached_wrapped_rpc(): # Ensure method has been cached assert ( - client._transport.create_access_binding + client._transport.delete_search_ads360_link in client._transport._wrapped_methods ) @@ -35273,15 +35481,15 @@ def test_create_access_binding_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.create_access_binding + client._transport.delete_search_ads360_link ] = mock_rpc request = {} - client.create_access_binding(request) + client.delete_search_ads360_link(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.create_access_binding(request) + client.delete_search_ads360_link(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -35289,7 +35497,7 @@ def test_create_access_binding_use_cached_wrapped_rpc(): @pytest.mark.asyncio -async def test_create_access_binding_empty_call_async(): +async def test_delete_search_ads360_link_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = AnalyticsAdminServiceAsyncClient( @@ -35299,23 +35507,18 @@ async def test_create_access_binding_empty_call_async(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.create_access_binding), "__call__" + type(client.transport.delete_search_ads360_link), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - resources.AccessBinding( - name="name_value", - roles=["roles_value"], - ) - ) - response = await client.create_access_binding() + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.delete_search_ads360_link() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == analytics_admin.CreateAccessBindingRequest() + assert args[0] == analytics_admin.DeleteSearchAds360LinkRequest() @pytest.mark.asyncio -async def test_create_access_binding_async_use_cached_wrapped_rpc( +async def test_delete_search_ads360_link_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", ): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -35332,33 +35535,34 @@ async def test_create_access_binding_async_use_cached_wrapped_rpc( # Ensure method has been cached assert ( - client._client._transport.create_access_binding + client._client._transport.delete_search_ads360_link in client._client._transport._wrapped_methods ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ - client._client._transport.create_access_binding - ] = mock_object + client._client._transport.delete_search_ads360_link + ] = mock_rpc request = {} - await client.create_access_binding(request) + await client.delete_search_ads360_link(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - await client.create_access_binding(request) + await client.delete_search_ads360_link(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio -async def test_create_access_binding_async( +async def test_delete_search_ads360_link_async( transport: str = "grpc_asyncio", - request_type=analytics_admin.CreateAccessBindingRequest, + request_type=analytics_admin.DeleteSearchAds360LinkRequest, ): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), @@ -35371,51 +35575,44 @@ async def test_create_access_binding_async( # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.create_access_binding), "__call__" + type(client.transport.delete_search_ads360_link), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - resources.AccessBinding( - name="name_value", - roles=["roles_value"], - ) - ) - response = await client.create_access_binding(request) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.delete_search_ads360_link(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request = analytics_admin.CreateAccessBindingRequest() + request = analytics_admin.DeleteSearchAds360LinkRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, resources.AccessBinding) - assert response.name == "name_value" - assert response.roles == ["roles_value"] + assert response is None @pytest.mark.asyncio -async def test_create_access_binding_async_from_dict(): - await test_create_access_binding_async(request_type=dict) +async def test_delete_search_ads360_link_async_from_dict(): + await test_delete_search_ads360_link_async(request_type=dict) -def test_create_access_binding_field_headers(): +def test_delete_search_ads360_link_field_headers(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = analytics_admin.CreateAccessBindingRequest() + request = analytics_admin.DeleteSearchAds360LinkRequest() - request.parent = "parent_value" + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.create_access_binding), "__call__" + type(client.transport.delete_search_ads360_link), "__call__" ) as call: - call.return_value = resources.AccessBinding() - client.create_access_binding(request) + call.return_value = None + client.delete_search_ads360_link(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -35426,30 +35623,28 @@ def test_create_access_binding_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "parent=parent_value", + "name=name_value", ) in kw["metadata"] @pytest.mark.asyncio -async def test_create_access_binding_field_headers_async(): +async def test_delete_search_ads360_link_field_headers_async(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = analytics_admin.CreateAccessBindingRequest() + request = analytics_admin.DeleteSearchAds360LinkRequest() - request.parent = "parent_value" + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.create_access_binding), "__call__" + type(client.transport.delete_search_ads360_link), "__call__" ) as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - resources.AccessBinding() - ) - await client.create_access_binding(request) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.delete_search_ads360_link(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -35460,41 +35655,37 @@ async def test_create_access_binding_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "parent=parent_value", + "name=name_value", ) in kw["metadata"] -def test_create_access_binding_flattened(): +def test_delete_search_ads360_link_flattened(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.create_access_binding), "__call__" + type(client.transport.delete_search_ads360_link), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = resources.AccessBinding() + call.return_value = None # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.create_access_binding( - parent="parent_value", - access_binding=resources.AccessBinding(user="user_value"), + client.delete_search_ads360_link( + name="name_value", ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = "parent_value" - assert arg == mock_val - arg = args[0].access_binding - mock_val = resources.AccessBinding(user="user_value") + arg = args[0].name + mock_val = "name_value" assert arg == mock_val -def test_create_access_binding_flattened_error(): +def test_delete_search_ads360_link_flattened_error(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -35502,50 +35693,43 @@ def test_create_access_binding_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.create_access_binding( - analytics_admin.CreateAccessBindingRequest(), - parent="parent_value", - access_binding=resources.AccessBinding(user="user_value"), + client.delete_search_ads360_link( + analytics_admin.DeleteSearchAds360LinkRequest(), + name="name_value", ) @pytest.mark.asyncio -async def test_create_access_binding_flattened_async(): +async def test_delete_search_ads360_link_flattened_async(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.create_access_binding), "__call__" + type(client.transport.delete_search_ads360_link), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = resources.AccessBinding() + call.return_value = None - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - resources.AccessBinding() - ) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.create_access_binding( - parent="parent_value", - access_binding=resources.AccessBinding(user="user_value"), + response = await client.delete_search_ads360_link( + name="name_value", ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = "parent_value" - assert arg == mock_val - arg = args[0].access_binding - mock_val = resources.AccessBinding(user="user_value") + arg = args[0].name + mock_val = "name_value" assert arg == mock_val @pytest.mark.asyncio -async def test_create_access_binding_flattened_error_async(): +async def test_delete_search_ads360_link_flattened_error_async(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -35553,21 +35737,20 @@ async def test_create_access_binding_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.create_access_binding( - analytics_admin.CreateAccessBindingRequest(), - parent="parent_value", - access_binding=resources.AccessBinding(user="user_value"), + await client.delete_search_ads360_link( + analytics_admin.DeleteSearchAds360LinkRequest(), + name="name_value", ) @pytest.mark.parametrize( "request_type", [ - analytics_admin.GetAccessBindingRequest, + analytics_admin.UpdateSearchAds360LinkRequest, dict, ], ) -def test_get_access_binding(request_type, transport: str = "grpc"): +def test_update_search_ads360_link(request_type, transport: str = "grpc"): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -35579,29 +35762,30 @@ def test_get_access_binding(request_type, transport: str = "grpc"): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.get_access_binding), "__call__" + type(client.transport.update_search_ads360_link), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = resources.AccessBinding( + call.return_value = resources.SearchAds360Link( name="name_value", - roles=["roles_value"], - user="user_value", + advertiser_id="advertiser_id_value", + advertiser_display_name="advertiser_display_name_value", ) - response = client.get_access_binding(request) + response = client.update_search_ads360_link(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - request = analytics_admin.GetAccessBindingRequest() + request = analytics_admin.UpdateSearchAds360LinkRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, resources.AccessBinding) + assert isinstance(response, resources.SearchAds360Link) assert response.name == "name_value" - assert response.roles == ["roles_value"] + assert response.advertiser_id == "advertiser_id_value" + assert response.advertiser_display_name == "advertiser_display_name_value" -def test_get_access_binding_empty_call(): +def test_update_search_ads360_link_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = AnalyticsAdminServiceClient( @@ -35611,18 +35795,18 @@ def test_get_access_binding_empty_call(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.get_access_binding), "__call__" + type(client.transport.update_search_ads360_link), "__call__" ) as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.get_access_binding() + client.update_search_ads360_link() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == analytics_admin.GetAccessBindingRequest() + assert args[0] == analytics_admin.UpdateSearchAds360LinkRequest() -def test_get_access_binding_non_empty_request_with_auto_populated_field(): +def test_update_search_ads360_link_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. client = AnalyticsAdminServiceClient( @@ -35633,26 +35817,22 @@ def test_get_access_binding_non_empty_request_with_auto_populated_field(): # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. - request = analytics_admin.GetAccessBindingRequest( - name="name_value", - ) + request = analytics_admin.UpdateSearchAds360LinkRequest() # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.get_access_binding), "__call__" + type(client.transport.update_search_ads360_link), "__call__" ) as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.get_access_binding(request=request) + client.update_search_ads360_link(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == analytics_admin.GetAccessBindingRequest( - name="name_value", - ) + assert args[0] == analytics_admin.UpdateSearchAds360LinkRequest() -def test_get_access_binding_use_cached_wrapped_rpc(): +def test_update_search_ads360_link_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -35667,7 +35847,8 @@ def test_get_access_binding_use_cached_wrapped_rpc(): # Ensure method has been cached assert ( - client._transport.get_access_binding in client._transport._wrapped_methods + client._transport.update_search_ads360_link + in client._transport._wrapped_methods ) # Replace cached wrapped function with mock @@ -35676,15 +35857,15 @@ def test_get_access_binding_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.get_access_binding + client._transport.update_search_ads360_link ] = mock_rpc request = {} - client.get_access_binding(request) + client.update_search_ads360_link(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.get_access_binding(request) + client.update_search_ads360_link(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -35692,7 +35873,7 @@ def test_get_access_binding_use_cached_wrapped_rpc(): @pytest.mark.asyncio -async def test_get_access_binding_empty_call_async(): +async def test_update_search_ads360_link_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = AnalyticsAdminServiceAsyncClient( @@ -35702,23 +35883,24 @@ async def test_get_access_binding_empty_call_async(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.get_access_binding), "__call__" + type(client.transport.update_search_ads360_link), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - resources.AccessBinding( + resources.SearchAds360Link( name="name_value", - roles=["roles_value"], + advertiser_id="advertiser_id_value", + advertiser_display_name="advertiser_display_name_value", ) ) - response = await client.get_access_binding() + response = await client.update_search_ads360_link() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == analytics_admin.GetAccessBindingRequest() + assert args[0] == analytics_admin.UpdateSearchAds360LinkRequest() @pytest.mark.asyncio -async def test_get_access_binding_async_use_cached_wrapped_rpc( +async def test_update_search_ads360_link_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", ): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -35735,33 +35917,34 @@ async def test_get_access_binding_async_use_cached_wrapped_rpc( # Ensure method has been cached assert ( - client._client._transport.get_access_binding + client._client._transport.update_search_ads360_link in client._client._transport._wrapped_methods ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ - client._client._transport.get_access_binding - ] = mock_object + client._client._transport.update_search_ads360_link + ] = mock_rpc request = {} - await client.get_access_binding(request) + await client.update_search_ads360_link(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - await client.get_access_binding(request) + await client.update_search_ads360_link(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio -async def test_get_access_binding_async( +async def test_update_search_ads360_link_async( transport: str = "grpc_asyncio", - request_type=analytics_admin.GetAccessBindingRequest, + request_type=analytics_admin.UpdateSearchAds360LinkRequest, ): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), @@ -35774,51 +35957,53 @@ async def test_get_access_binding_async( # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.get_access_binding), "__call__" + type(client.transport.update_search_ads360_link), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - resources.AccessBinding( + resources.SearchAds360Link( name="name_value", - roles=["roles_value"], + advertiser_id="advertiser_id_value", + advertiser_display_name="advertiser_display_name_value", ) ) - response = await client.get_access_binding(request) + response = await client.update_search_ads360_link(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request = analytics_admin.GetAccessBindingRequest() + request = analytics_admin.UpdateSearchAds360LinkRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, resources.AccessBinding) + assert isinstance(response, resources.SearchAds360Link) assert response.name == "name_value" - assert response.roles == ["roles_value"] + assert response.advertiser_id == "advertiser_id_value" + assert response.advertiser_display_name == "advertiser_display_name_value" @pytest.mark.asyncio -async def test_get_access_binding_async_from_dict(): - await test_get_access_binding_async(request_type=dict) +async def test_update_search_ads360_link_async_from_dict(): + await test_update_search_ads360_link_async(request_type=dict) -def test_get_access_binding_field_headers(): +def test_update_search_ads360_link_field_headers(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = analytics_admin.GetAccessBindingRequest() + request = analytics_admin.UpdateSearchAds360LinkRequest() - request.name = "name_value" + request.search_ads_360_link.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.get_access_binding), "__call__" + type(client.transport.update_search_ads360_link), "__call__" ) as call: - call.return_value = resources.AccessBinding() - client.get_access_binding(request) + call.return_value = resources.SearchAds360Link() + client.update_search_ads360_link(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -35829,30 +36014,30 @@ def test_get_access_binding_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "name=name_value", + "search_ads_360_link.name=name_value", ) in kw["metadata"] @pytest.mark.asyncio -async def test_get_access_binding_field_headers_async(): +async def test_update_search_ads360_link_field_headers_async(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = analytics_admin.GetAccessBindingRequest() + request = analytics_admin.UpdateSearchAds360LinkRequest() - request.name = "name_value" + request.search_ads_360_link.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.get_access_binding), "__call__" + type(client.transport.update_search_ads360_link), "__call__" ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - resources.AccessBinding() + resources.SearchAds360Link() ) - await client.get_access_binding(request) + await client.update_search_ads360_link(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -35863,37 +36048,41 @@ async def test_get_access_binding_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "name=name_value", + "search_ads_360_link.name=name_value", ) in kw["metadata"] -def test_get_access_binding_flattened(): +def test_update_search_ads360_link_flattened(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.get_access_binding), "__call__" + type(client.transport.update_search_ads360_link), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = resources.AccessBinding() + call.return_value = resources.SearchAds360Link() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.get_access_binding( - name="name_value", + client.update_search_ads360_link( + search_ads_360_link=resources.SearchAds360Link(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" + arg = args[0].search_ads_360_link + mock_val = resources.SearchAds360Link(name="name_value") + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) assert arg == mock_val -def test_get_access_binding_flattened_error(): +def test_update_search_ads360_link_flattened_error(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -35901,45 +36090,50 @@ def test_get_access_binding_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.get_access_binding( - analytics_admin.GetAccessBindingRequest(), - name="name_value", + client.update_search_ads360_link( + analytics_admin.UpdateSearchAds360LinkRequest(), + search_ads_360_link=resources.SearchAds360Link(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) @pytest.mark.asyncio -async def test_get_access_binding_flattened_async(): +async def test_update_search_ads360_link_flattened_async(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.get_access_binding), "__call__" + type(client.transport.update_search_ads360_link), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = resources.AccessBinding() + call.return_value = resources.SearchAds360Link() call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - resources.AccessBinding() + resources.SearchAds360Link() ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.get_access_binding( - name="name_value", + response = await client.update_search_ads360_link( + search_ads_360_link=resources.SearchAds360Link(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" + arg = args[0].search_ads_360_link + mock_val = resources.SearchAds360Link(name="name_value") + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) assert arg == mock_val @pytest.mark.asyncio -async def test_get_access_binding_flattened_error_async(): +async def test_update_search_ads360_link_flattened_error_async(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -35947,20 +36141,21 @@ async def test_get_access_binding_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.get_access_binding( - analytics_admin.GetAccessBindingRequest(), - name="name_value", + await client.update_search_ads360_link( + analytics_admin.UpdateSearchAds360LinkRequest(), + search_ads_360_link=resources.SearchAds360Link(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) @pytest.mark.parametrize( "request_type", [ - analytics_admin.UpdateAccessBindingRequest, + analytics_admin.GetAttributionSettingsRequest, dict, ], ) -def test_update_access_binding(request_type, transport: str = "grpc"): +def test_get_attribution_settings(request_type, transport: str = "grpc"): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -35972,29 +36167,46 @@ def test_update_access_binding(request_type, transport: str = "grpc"): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.update_access_binding), "__call__" + type(client.transport.get_attribution_settings), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = resources.AccessBinding( + call.return_value = resources.AttributionSettings( name="name_value", - roles=["roles_value"], - user="user_value", + acquisition_conversion_event_lookback_window=resources.AttributionSettings.AcquisitionConversionEventLookbackWindow.ACQUISITION_CONVERSION_EVENT_LOOKBACK_WINDOW_7_DAYS, + other_conversion_event_lookback_window=resources.AttributionSettings.OtherConversionEventLookbackWindow.OTHER_CONVERSION_EVENT_LOOKBACK_WINDOW_30_DAYS, + reporting_attribution_model=resources.AttributionSettings.ReportingAttributionModel.PAID_AND_ORGANIC_CHANNELS_DATA_DRIVEN, + ads_web_conversion_data_export_scope=resources.AttributionSettings.AdsWebConversionDataExportScope.NOT_SELECTED_YET, ) - response = client.update_access_binding(request) + response = client.get_attribution_settings(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - request = analytics_admin.UpdateAccessBindingRequest() + request = analytics_admin.GetAttributionSettingsRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, resources.AccessBinding) + assert isinstance(response, resources.AttributionSettings) assert response.name == "name_value" - assert response.roles == ["roles_value"] + assert ( + response.acquisition_conversion_event_lookback_window + == resources.AttributionSettings.AcquisitionConversionEventLookbackWindow.ACQUISITION_CONVERSION_EVENT_LOOKBACK_WINDOW_7_DAYS + ) + assert ( + response.other_conversion_event_lookback_window + == resources.AttributionSettings.OtherConversionEventLookbackWindow.OTHER_CONVERSION_EVENT_LOOKBACK_WINDOW_30_DAYS + ) + assert ( + response.reporting_attribution_model + == resources.AttributionSettings.ReportingAttributionModel.PAID_AND_ORGANIC_CHANNELS_DATA_DRIVEN + ) + assert ( + response.ads_web_conversion_data_export_scope + == resources.AttributionSettings.AdsWebConversionDataExportScope.NOT_SELECTED_YET + ) -def test_update_access_binding_empty_call(): +def test_get_attribution_settings_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = AnalyticsAdminServiceClient( @@ -36004,18 +36216,18 @@ def test_update_access_binding_empty_call(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.update_access_binding), "__call__" + type(client.transport.get_attribution_settings), "__call__" ) as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.update_access_binding() + client.get_attribution_settings() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == analytics_admin.UpdateAccessBindingRequest() + assert args[0] == analytics_admin.GetAttributionSettingsRequest() -def test_update_access_binding_non_empty_request_with_auto_populated_field(): +def test_get_attribution_settings_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. client = AnalyticsAdminServiceClient( @@ -36026,22 +36238,26 @@ def test_update_access_binding_non_empty_request_with_auto_populated_field(): # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. - request = analytics_admin.UpdateAccessBindingRequest() + request = analytics_admin.GetAttributionSettingsRequest( + name="name_value", + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.update_access_binding), "__call__" + type(client.transport.get_attribution_settings), "__call__" ) as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.update_access_binding(request=request) + client.get_attribution_settings(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == analytics_admin.UpdateAccessBindingRequest() + assert args[0] == analytics_admin.GetAttributionSettingsRequest( + name="name_value", + ) -def test_update_access_binding_use_cached_wrapped_rpc(): +def test_get_attribution_settings_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -36056,7 +36272,7 @@ def test_update_access_binding_use_cached_wrapped_rpc(): # Ensure method has been cached assert ( - client._transport.update_access_binding + client._transport.get_attribution_settings in client._transport._wrapped_methods ) @@ -36066,15 +36282,15 @@ def test_update_access_binding_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.update_access_binding + client._transport.get_attribution_settings ] = mock_rpc request = {} - client.update_access_binding(request) + client.get_attribution_settings(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.update_access_binding(request) + client.get_attribution_settings(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -36082,7 +36298,7 @@ def test_update_access_binding_use_cached_wrapped_rpc(): @pytest.mark.asyncio -async def test_update_access_binding_empty_call_async(): +async def test_get_attribution_settings_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = AnalyticsAdminServiceAsyncClient( @@ -36092,23 +36308,26 @@ async def test_update_access_binding_empty_call_async(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.update_access_binding), "__call__" + type(client.transport.get_attribution_settings), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - resources.AccessBinding( + resources.AttributionSettings( name="name_value", - roles=["roles_value"], + acquisition_conversion_event_lookback_window=resources.AttributionSettings.AcquisitionConversionEventLookbackWindow.ACQUISITION_CONVERSION_EVENT_LOOKBACK_WINDOW_7_DAYS, + other_conversion_event_lookback_window=resources.AttributionSettings.OtherConversionEventLookbackWindow.OTHER_CONVERSION_EVENT_LOOKBACK_WINDOW_30_DAYS, + reporting_attribution_model=resources.AttributionSettings.ReportingAttributionModel.PAID_AND_ORGANIC_CHANNELS_DATA_DRIVEN, + ads_web_conversion_data_export_scope=resources.AttributionSettings.AdsWebConversionDataExportScope.NOT_SELECTED_YET, ) ) - response = await client.update_access_binding() + response = await client.get_attribution_settings() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == analytics_admin.UpdateAccessBindingRequest() + assert args[0] == analytics_admin.GetAttributionSettingsRequest() @pytest.mark.asyncio -async def test_update_access_binding_async_use_cached_wrapped_rpc( +async def test_get_attribution_settings_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", ): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -36125,33 +36344,34 @@ async def test_update_access_binding_async_use_cached_wrapped_rpc( # Ensure method has been cached assert ( - client._client._transport.update_access_binding + client._client._transport.get_attribution_settings in client._client._transport._wrapped_methods ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ - client._client._transport.update_access_binding - ] = mock_object + client._client._transport.get_attribution_settings + ] = mock_rpc request = {} - await client.update_access_binding(request) + await client.get_attribution_settings(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - await client.update_access_binding(request) + await client.get_attribution_settings(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio -async def test_update_access_binding_async( +async def test_get_attribution_settings_async( transport: str = "grpc_asyncio", - request_type=analytics_admin.UpdateAccessBindingRequest, + request_type=analytics_admin.GetAttributionSettingsRequest, ): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), @@ -36164,51 +36384,69 @@ async def test_update_access_binding_async( # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.update_access_binding), "__call__" + type(client.transport.get_attribution_settings), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - resources.AccessBinding( + resources.AttributionSettings( name="name_value", - roles=["roles_value"], + acquisition_conversion_event_lookback_window=resources.AttributionSettings.AcquisitionConversionEventLookbackWindow.ACQUISITION_CONVERSION_EVENT_LOOKBACK_WINDOW_7_DAYS, + other_conversion_event_lookback_window=resources.AttributionSettings.OtherConversionEventLookbackWindow.OTHER_CONVERSION_EVENT_LOOKBACK_WINDOW_30_DAYS, + reporting_attribution_model=resources.AttributionSettings.ReportingAttributionModel.PAID_AND_ORGANIC_CHANNELS_DATA_DRIVEN, + ads_web_conversion_data_export_scope=resources.AttributionSettings.AdsWebConversionDataExportScope.NOT_SELECTED_YET, ) ) - response = await client.update_access_binding(request) + response = await client.get_attribution_settings(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request = analytics_admin.UpdateAccessBindingRequest() + request = analytics_admin.GetAttributionSettingsRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, resources.AccessBinding) + assert isinstance(response, resources.AttributionSettings) assert response.name == "name_value" - assert response.roles == ["roles_value"] + assert ( + response.acquisition_conversion_event_lookback_window + == resources.AttributionSettings.AcquisitionConversionEventLookbackWindow.ACQUISITION_CONVERSION_EVENT_LOOKBACK_WINDOW_7_DAYS + ) + assert ( + response.other_conversion_event_lookback_window + == resources.AttributionSettings.OtherConversionEventLookbackWindow.OTHER_CONVERSION_EVENT_LOOKBACK_WINDOW_30_DAYS + ) + assert ( + response.reporting_attribution_model + == resources.AttributionSettings.ReportingAttributionModel.PAID_AND_ORGANIC_CHANNELS_DATA_DRIVEN + ) + assert ( + response.ads_web_conversion_data_export_scope + == resources.AttributionSettings.AdsWebConversionDataExportScope.NOT_SELECTED_YET + ) @pytest.mark.asyncio -async def test_update_access_binding_async_from_dict(): - await test_update_access_binding_async(request_type=dict) +async def test_get_attribution_settings_async_from_dict(): + await test_get_attribution_settings_async(request_type=dict) -def test_update_access_binding_field_headers(): +def test_get_attribution_settings_field_headers(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = analytics_admin.UpdateAccessBindingRequest() + request = analytics_admin.GetAttributionSettingsRequest() - request.access_binding.name = "name_value" + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.update_access_binding), "__call__" + type(client.transport.get_attribution_settings), "__call__" ) as call: - call.return_value = resources.AccessBinding() - client.update_access_binding(request) + call.return_value = resources.AttributionSettings() + client.get_attribution_settings(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -36219,30 +36457,30 @@ def test_update_access_binding_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "access_binding.name=name_value", + "name=name_value", ) in kw["metadata"] @pytest.mark.asyncio -async def test_update_access_binding_field_headers_async(): +async def test_get_attribution_settings_field_headers_async(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = analytics_admin.UpdateAccessBindingRequest() + request = analytics_admin.GetAttributionSettingsRequest() - request.access_binding.name = "name_value" + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.update_access_binding), "__call__" + type(client.transport.get_attribution_settings), "__call__" ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - resources.AccessBinding() + resources.AttributionSettings() ) - await client.update_access_binding(request) + await client.get_attribution_settings(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -36253,37 +36491,37 @@ async def test_update_access_binding_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "access_binding.name=name_value", + "name=name_value", ) in kw["metadata"] -def test_update_access_binding_flattened(): +def test_get_attribution_settings_flattened(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.update_access_binding), "__call__" + type(client.transport.get_attribution_settings), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = resources.AccessBinding() + call.return_value = resources.AttributionSettings() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.update_access_binding( - access_binding=resources.AccessBinding(user="user_value"), + client.get_attribution_settings( + name="name_value", ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - arg = args[0].access_binding - mock_val = resources.AccessBinding(user="user_value") + arg = args[0].name + mock_val = "name_value" assert arg == mock_val -def test_update_access_binding_flattened_error(): +def test_get_attribution_settings_flattened_error(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -36291,45 +36529,45 @@ def test_update_access_binding_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.update_access_binding( - analytics_admin.UpdateAccessBindingRequest(), - access_binding=resources.AccessBinding(user="user_value"), + client.get_attribution_settings( + analytics_admin.GetAttributionSettingsRequest(), + name="name_value", ) @pytest.mark.asyncio -async def test_update_access_binding_flattened_async(): +async def test_get_attribution_settings_flattened_async(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.update_access_binding), "__call__" + type(client.transport.get_attribution_settings), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = resources.AccessBinding() + call.return_value = resources.AttributionSettings() call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - resources.AccessBinding() + resources.AttributionSettings() ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.update_access_binding( - access_binding=resources.AccessBinding(user="user_value"), + response = await client.get_attribution_settings( + name="name_value", ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - arg = args[0].access_binding - mock_val = resources.AccessBinding(user="user_value") + arg = args[0].name + mock_val = "name_value" assert arg == mock_val @pytest.mark.asyncio -async def test_update_access_binding_flattened_error_async(): +async def test_get_attribution_settings_flattened_error_async(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -36337,20 +36575,20 @@ async def test_update_access_binding_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.update_access_binding( - analytics_admin.UpdateAccessBindingRequest(), - access_binding=resources.AccessBinding(user="user_value"), + await client.get_attribution_settings( + analytics_admin.GetAttributionSettingsRequest(), + name="name_value", ) @pytest.mark.parametrize( "request_type", [ - analytics_admin.DeleteAccessBindingRequest, + analytics_admin.UpdateAttributionSettingsRequest, dict, ], ) -def test_delete_access_binding(request_type, transport: str = "grpc"): +def test_update_attribution_settings(request_type, transport: str = "grpc"): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -36362,23 +36600,46 @@ def test_delete_access_binding(request_type, transport: str = "grpc"): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.delete_access_binding), "__call__" + type(client.transport.update_attribution_settings), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = None - response = client.delete_access_binding(request) + call.return_value = resources.AttributionSettings( + name="name_value", + acquisition_conversion_event_lookback_window=resources.AttributionSettings.AcquisitionConversionEventLookbackWindow.ACQUISITION_CONVERSION_EVENT_LOOKBACK_WINDOW_7_DAYS, + other_conversion_event_lookback_window=resources.AttributionSettings.OtherConversionEventLookbackWindow.OTHER_CONVERSION_EVENT_LOOKBACK_WINDOW_30_DAYS, + reporting_attribution_model=resources.AttributionSettings.ReportingAttributionModel.PAID_AND_ORGANIC_CHANNELS_DATA_DRIVEN, + ads_web_conversion_data_export_scope=resources.AttributionSettings.AdsWebConversionDataExportScope.NOT_SELECTED_YET, + ) + response = client.update_attribution_settings(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - request = analytics_admin.DeleteAccessBindingRequest() + request = analytics_admin.UpdateAttributionSettingsRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert response is None + assert isinstance(response, resources.AttributionSettings) + assert response.name == "name_value" + assert ( + response.acquisition_conversion_event_lookback_window + == resources.AttributionSettings.AcquisitionConversionEventLookbackWindow.ACQUISITION_CONVERSION_EVENT_LOOKBACK_WINDOW_7_DAYS + ) + assert ( + response.other_conversion_event_lookback_window + == resources.AttributionSettings.OtherConversionEventLookbackWindow.OTHER_CONVERSION_EVENT_LOOKBACK_WINDOW_30_DAYS + ) + assert ( + response.reporting_attribution_model + == resources.AttributionSettings.ReportingAttributionModel.PAID_AND_ORGANIC_CHANNELS_DATA_DRIVEN + ) + assert ( + response.ads_web_conversion_data_export_scope + == resources.AttributionSettings.AdsWebConversionDataExportScope.NOT_SELECTED_YET + ) -def test_delete_access_binding_empty_call(): +def test_update_attribution_settings_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = AnalyticsAdminServiceClient( @@ -36388,18 +36649,18 @@ def test_delete_access_binding_empty_call(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.delete_access_binding), "__call__" + type(client.transport.update_attribution_settings), "__call__" ) as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.delete_access_binding() + client.update_attribution_settings() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == analytics_admin.DeleteAccessBindingRequest() + assert args[0] == analytics_admin.UpdateAttributionSettingsRequest() -def test_delete_access_binding_non_empty_request_with_auto_populated_field(): +def test_update_attribution_settings_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. client = AnalyticsAdminServiceClient( @@ -36410,26 +36671,22 @@ def test_delete_access_binding_non_empty_request_with_auto_populated_field(): # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. - request = analytics_admin.DeleteAccessBindingRequest( - name="name_value", - ) + request = analytics_admin.UpdateAttributionSettingsRequest() # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.delete_access_binding), "__call__" + type(client.transport.update_attribution_settings), "__call__" ) as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.delete_access_binding(request=request) + client.update_attribution_settings(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == analytics_admin.DeleteAccessBindingRequest( - name="name_value", - ) + assert args[0] == analytics_admin.UpdateAttributionSettingsRequest() -def test_delete_access_binding_use_cached_wrapped_rpc(): +def test_update_attribution_settings_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -36444,7 +36701,7 @@ def test_delete_access_binding_use_cached_wrapped_rpc(): # Ensure method has been cached assert ( - client._transport.delete_access_binding + client._transport.update_attribution_settings in client._transport._wrapped_methods ) @@ -36454,15 +36711,15 @@ def test_delete_access_binding_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.delete_access_binding + client._transport.update_attribution_settings ] = mock_rpc request = {} - client.delete_access_binding(request) + client.update_attribution_settings(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.delete_access_binding(request) + client.update_attribution_settings(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -36470,7 +36727,7 @@ def test_delete_access_binding_use_cached_wrapped_rpc(): @pytest.mark.asyncio -async def test_delete_access_binding_empty_call_async(): +async def test_update_attribution_settings_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = AnalyticsAdminServiceAsyncClient( @@ -36480,18 +36737,26 @@ async def test_delete_access_binding_empty_call_async(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.delete_access_binding), "__call__" + type(client.transport.update_attribution_settings), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.delete_access_binding() + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + resources.AttributionSettings( + name="name_value", + acquisition_conversion_event_lookback_window=resources.AttributionSettings.AcquisitionConversionEventLookbackWindow.ACQUISITION_CONVERSION_EVENT_LOOKBACK_WINDOW_7_DAYS, + other_conversion_event_lookback_window=resources.AttributionSettings.OtherConversionEventLookbackWindow.OTHER_CONVERSION_EVENT_LOOKBACK_WINDOW_30_DAYS, + reporting_attribution_model=resources.AttributionSettings.ReportingAttributionModel.PAID_AND_ORGANIC_CHANNELS_DATA_DRIVEN, + ads_web_conversion_data_export_scope=resources.AttributionSettings.AdsWebConversionDataExportScope.NOT_SELECTED_YET, + ) + ) + response = await client.update_attribution_settings() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == analytics_admin.DeleteAccessBindingRequest() + assert args[0] == analytics_admin.UpdateAttributionSettingsRequest() @pytest.mark.asyncio -async def test_delete_access_binding_async_use_cached_wrapped_rpc( +async def test_update_attribution_settings_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", ): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -36508,33 +36773,34 @@ async def test_delete_access_binding_async_use_cached_wrapped_rpc( # Ensure method has been cached assert ( - client._client._transport.delete_access_binding + client._client._transport.update_attribution_settings in client._client._transport._wrapped_methods ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ - client._client._transport.delete_access_binding - ] = mock_object + client._client._transport.update_attribution_settings + ] = mock_rpc request = {} - await client.delete_access_binding(request) + await client.update_attribution_settings(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - await client.delete_access_binding(request) + await client.update_attribution_settings(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio -async def test_delete_access_binding_async( +async def test_update_attribution_settings_async( transport: str = "grpc_asyncio", - request_type=analytics_admin.DeleteAccessBindingRequest, + request_type=analytics_admin.UpdateAttributionSettingsRequest, ): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), @@ -36547,44 +36813,69 @@ async def test_delete_access_binding_async( # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.delete_access_binding), "__call__" + type(client.transport.update_attribution_settings), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.delete_access_binding(request) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + resources.AttributionSettings( + name="name_value", + acquisition_conversion_event_lookback_window=resources.AttributionSettings.AcquisitionConversionEventLookbackWindow.ACQUISITION_CONVERSION_EVENT_LOOKBACK_WINDOW_7_DAYS, + other_conversion_event_lookback_window=resources.AttributionSettings.OtherConversionEventLookbackWindow.OTHER_CONVERSION_EVENT_LOOKBACK_WINDOW_30_DAYS, + reporting_attribution_model=resources.AttributionSettings.ReportingAttributionModel.PAID_AND_ORGANIC_CHANNELS_DATA_DRIVEN, + ads_web_conversion_data_export_scope=resources.AttributionSettings.AdsWebConversionDataExportScope.NOT_SELECTED_YET, + ) + ) + response = await client.update_attribution_settings(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request = analytics_admin.DeleteAccessBindingRequest() + request = analytics_admin.UpdateAttributionSettingsRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert response is None + assert isinstance(response, resources.AttributionSettings) + assert response.name == "name_value" + assert ( + response.acquisition_conversion_event_lookback_window + == resources.AttributionSettings.AcquisitionConversionEventLookbackWindow.ACQUISITION_CONVERSION_EVENT_LOOKBACK_WINDOW_7_DAYS + ) + assert ( + response.other_conversion_event_lookback_window + == resources.AttributionSettings.OtherConversionEventLookbackWindow.OTHER_CONVERSION_EVENT_LOOKBACK_WINDOW_30_DAYS + ) + assert ( + response.reporting_attribution_model + == resources.AttributionSettings.ReportingAttributionModel.PAID_AND_ORGANIC_CHANNELS_DATA_DRIVEN + ) + assert ( + response.ads_web_conversion_data_export_scope + == resources.AttributionSettings.AdsWebConversionDataExportScope.NOT_SELECTED_YET + ) @pytest.mark.asyncio -async def test_delete_access_binding_async_from_dict(): - await test_delete_access_binding_async(request_type=dict) +async def test_update_attribution_settings_async_from_dict(): + await test_update_attribution_settings_async(request_type=dict) -def test_delete_access_binding_field_headers(): +def test_update_attribution_settings_field_headers(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = analytics_admin.DeleteAccessBindingRequest() + request = analytics_admin.UpdateAttributionSettingsRequest() - request.name = "name_value" + request.attribution_settings.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.delete_access_binding), "__call__" + type(client.transport.update_attribution_settings), "__call__" ) as call: - call.return_value = None - client.delete_access_binding(request) + call.return_value = resources.AttributionSettings() + client.update_attribution_settings(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -36595,28 +36886,30 @@ def test_delete_access_binding_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "name=name_value", + "attribution_settings.name=name_value", ) in kw["metadata"] @pytest.mark.asyncio -async def test_delete_access_binding_field_headers_async(): +async def test_update_attribution_settings_field_headers_async(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = analytics_admin.DeleteAccessBindingRequest() + request = analytics_admin.UpdateAttributionSettingsRequest() - request.name = "name_value" + request.attribution_settings.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.delete_access_binding), "__call__" + type(client.transport.update_attribution_settings), "__call__" ) as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - await client.delete_access_binding(request) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + resources.AttributionSettings() + ) + await client.update_attribution_settings(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -36627,37 +36920,41 @@ async def test_delete_access_binding_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "name=name_value", + "attribution_settings.name=name_value", ) in kw["metadata"] -def test_delete_access_binding_flattened(): +def test_update_attribution_settings_flattened(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.delete_access_binding), "__call__" + type(client.transport.update_attribution_settings), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = None + call.return_value = resources.AttributionSettings() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.delete_access_binding( - name="name_value", + client.update_attribution_settings( + attribution_settings=resources.AttributionSettings(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" + arg = args[0].attribution_settings + mock_val = resources.AttributionSettings(name="name_value") + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) assert arg == mock_val -def test_delete_access_binding_flattened_error(): +def test_update_attribution_settings_flattened_error(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -36665,43 +36962,50 @@ def test_delete_access_binding_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.delete_access_binding( - analytics_admin.DeleteAccessBindingRequest(), - name="name_value", + client.update_attribution_settings( + analytics_admin.UpdateAttributionSettingsRequest(), + attribution_settings=resources.AttributionSettings(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) @pytest.mark.asyncio -async def test_delete_access_binding_flattened_async(): +async def test_update_attribution_settings_flattened_async(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.delete_access_binding), "__call__" + type(client.transport.update_attribution_settings), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = None + call.return_value = resources.AttributionSettings() - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + resources.AttributionSettings() + ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.delete_access_binding( - name="name_value", + response = await client.update_attribution_settings( + attribution_settings=resources.AttributionSettings(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" + arg = args[0].attribution_settings + mock_val = resources.AttributionSettings(name="name_value") + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) assert arg == mock_val @pytest.mark.asyncio -async def test_delete_access_binding_flattened_error_async(): +async def test_update_attribution_settings_flattened_error_async(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -36709,20 +37013,21 @@ async def test_delete_access_binding_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.delete_access_binding( - analytics_admin.DeleteAccessBindingRequest(), - name="name_value", + await client.update_attribution_settings( + analytics_admin.UpdateAttributionSettingsRequest(), + attribution_settings=resources.AttributionSettings(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) @pytest.mark.parametrize( "request_type", [ - analytics_admin.ListAccessBindingsRequest, + analytics_admin.RunAccessReportRequest, dict, ], ) -def test_list_access_bindings(request_type, transport: str = "grpc"): +def test_run_access_report(request_type, transport: str = "grpc"): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -36734,26 +37039,26 @@ def test_list_access_bindings(request_type, transport: str = "grpc"): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_access_bindings), "__call__" + type(client.transport.run_access_report), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = analytics_admin.ListAccessBindingsResponse( - next_page_token="next_page_token_value", + call.return_value = analytics_admin.RunAccessReportResponse( + row_count=992, ) - response = client.list_access_bindings(request) + response = client.run_access_report(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - request = analytics_admin.ListAccessBindingsRequest() + request = analytics_admin.RunAccessReportRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListAccessBindingsPager) - assert response.next_page_token == "next_page_token_value" + assert isinstance(response, analytics_admin.RunAccessReportResponse) + assert response.row_count == 992 -def test_list_access_bindings_empty_call(): +def test_run_access_report_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = AnalyticsAdminServiceClient( @@ -36763,18 +37068,18 @@ def test_list_access_bindings_empty_call(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_access_bindings), "__call__" + type(client.transport.run_access_report), "__call__" ) as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.list_access_bindings() + client.run_access_report() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == analytics_admin.ListAccessBindingsRequest() + assert args[0] == analytics_admin.RunAccessReportRequest() -def test_list_access_bindings_non_empty_request_with_auto_populated_field(): +def test_run_access_report_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. client = AnalyticsAdminServiceClient( @@ -36785,28 +37090,28 @@ def test_list_access_bindings_non_empty_request_with_auto_populated_field(): # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. - request = analytics_admin.ListAccessBindingsRequest( - parent="parent_value", - page_token="page_token_value", + request = analytics_admin.RunAccessReportRequest( + entity="entity_value", + time_zone="time_zone_value", ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_access_bindings), "__call__" + type(client.transport.run_access_report), "__call__" ) as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.list_access_bindings(request=request) + client.run_access_report(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == analytics_admin.ListAccessBindingsRequest( - parent="parent_value", - page_token="page_token_value", + assert args[0] == analytics_admin.RunAccessReportRequest( + entity="entity_value", + time_zone="time_zone_value", ) -def test_list_access_bindings_use_cached_wrapped_rpc(): +def test_run_access_report_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -36820,9 +37125,7 @@ def test_list_access_bindings_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert ( - client._transport.list_access_bindings in client._transport._wrapped_methods - ) + assert client._transport.run_access_report in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() @@ -36830,15 +37133,15 @@ def test_list_access_bindings_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.list_access_bindings + client._transport.run_access_report ] = mock_rpc request = {} - client.list_access_bindings(request) + client.run_access_report(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.list_access_bindings(request) + client.run_access_report(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -36846,7 +37149,7 @@ def test_list_access_bindings_use_cached_wrapped_rpc(): @pytest.mark.asyncio -async def test_list_access_bindings_empty_call_async(): +async def test_run_access_report_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = AnalyticsAdminServiceAsyncClient( @@ -36856,22 +37159,22 @@ async def test_list_access_bindings_empty_call_async(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_access_bindings), "__call__" + type(client.transport.run_access_report), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - analytics_admin.ListAccessBindingsResponse( - next_page_token="next_page_token_value", + analytics_admin.RunAccessReportResponse( + row_count=992, ) ) - response = await client.list_access_bindings() + response = await client.run_access_report() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == analytics_admin.ListAccessBindingsRequest() + assert args[0] == analytics_admin.RunAccessReportRequest() @pytest.mark.asyncio -async def test_list_access_bindings_async_use_cached_wrapped_rpc( +async def test_run_access_report_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", ): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -36888,33 +37191,33 @@ async def test_list_access_bindings_async_use_cached_wrapped_rpc( # Ensure method has been cached assert ( - client._client._transport.list_access_bindings + client._client._transport.run_access_report in client._client._transport._wrapped_methods ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ - client._client._transport.list_access_bindings - ] = mock_object + client._client._transport.run_access_report + ] = mock_rpc request = {} - await client.list_access_bindings(request) + await client.run_access_report(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - await client.list_access_bindings(request) + await client.run_access_report(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio -async def test_list_access_bindings_async( - transport: str = "grpc_asyncio", - request_type=analytics_admin.ListAccessBindingsRequest, +async def test_run_access_report_async( + transport: str = "grpc_asyncio", request_type=analytics_admin.RunAccessReportRequest ): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), @@ -36927,49 +37230,49 @@ async def test_list_access_bindings_async( # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_access_bindings), "__call__" + type(client.transport.run_access_report), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - analytics_admin.ListAccessBindingsResponse( - next_page_token="next_page_token_value", + analytics_admin.RunAccessReportResponse( + row_count=992, ) ) - response = await client.list_access_bindings(request) + response = await client.run_access_report(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request = analytics_admin.ListAccessBindingsRequest() + request = analytics_admin.RunAccessReportRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListAccessBindingsAsyncPager) - assert response.next_page_token == "next_page_token_value" + assert isinstance(response, analytics_admin.RunAccessReportResponse) + assert response.row_count == 992 @pytest.mark.asyncio -async def test_list_access_bindings_async_from_dict(): - await test_list_access_bindings_async(request_type=dict) +async def test_run_access_report_async_from_dict(): + await test_run_access_report_async(request_type=dict) -def test_list_access_bindings_field_headers(): +def test_run_access_report_field_headers(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = analytics_admin.ListAccessBindingsRequest() + request = analytics_admin.RunAccessReportRequest() - request.parent = "parent_value" + request.entity = "entity_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_access_bindings), "__call__" + type(client.transport.run_access_report), "__call__" ) as call: - call.return_value = analytics_admin.ListAccessBindingsResponse() - client.list_access_bindings(request) + call.return_value = analytics_admin.RunAccessReportResponse() + client.run_access_report(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -36980,30 +37283,30 @@ def test_list_access_bindings_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "parent=parent_value", + "entity=entity_value", ) in kw["metadata"] @pytest.mark.asyncio -async def test_list_access_bindings_field_headers_async(): +async def test_run_access_report_field_headers_async(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = analytics_admin.ListAccessBindingsRequest() + request = analytics_admin.RunAccessReportRequest() - request.parent = "parent_value" + request.entity = "entity_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_access_bindings), "__call__" + type(client.transport.run_access_report), "__call__" ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - analytics_admin.ListAccessBindingsResponse() + analytics_admin.RunAccessReportResponse() ) - await client.list_access_bindings(request) + await client.run_access_report(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -37014,306 +37317,18 @@ async def test_list_access_bindings_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "parent=parent_value", + "entity=entity_value", ) in kw["metadata"] -def test_list_access_bindings_flattened(): - client = AnalyticsAdminServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_access_bindings), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = analytics_admin.ListAccessBindingsResponse() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.list_access_bindings( - parent="parent_value", - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = "parent_value" - assert arg == mock_val - - -def test_list_access_bindings_flattened_error(): - client = AnalyticsAdminServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_access_bindings( - analytics_admin.ListAccessBindingsRequest(), - parent="parent_value", - ) - - -@pytest.mark.asyncio -async def test_list_access_bindings_flattened_async(): - client = AnalyticsAdminServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_access_bindings), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = analytics_admin.ListAccessBindingsResponse() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - analytics_admin.ListAccessBindingsResponse() - ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.list_access_bindings( - parent="parent_value", - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = "parent_value" - assert arg == mock_val - - -@pytest.mark.asyncio -async def test_list_access_bindings_flattened_error_async(): - client = AnalyticsAdminServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.list_access_bindings( - analytics_admin.ListAccessBindingsRequest(), - parent="parent_value", - ) - - -def test_list_access_bindings_pager(transport_name: str = "grpc"): - client = AnalyticsAdminServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_access_bindings), "__call__" - ) as call: - # Set the response to a series of pages. - call.side_effect = ( - analytics_admin.ListAccessBindingsResponse( - access_bindings=[ - resources.AccessBinding(), - resources.AccessBinding(), - resources.AccessBinding(), - ], - next_page_token="abc", - ), - analytics_admin.ListAccessBindingsResponse( - access_bindings=[], - next_page_token="def", - ), - analytics_admin.ListAccessBindingsResponse( - access_bindings=[ - resources.AccessBinding(), - ], - next_page_token="ghi", - ), - analytics_admin.ListAccessBindingsResponse( - access_bindings=[ - resources.AccessBinding(), - resources.AccessBinding(), - ], - ), - RuntimeError, - ) - - expected_metadata = () - retry = retries.Retry() - timeout = 5 - expected_metadata = tuple(expected_metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), - ) - pager = client.list_access_bindings(request={}, retry=retry, timeout=timeout) - - assert pager._metadata == expected_metadata - assert pager._retry == retry - assert pager._timeout == timeout - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, resources.AccessBinding) for i in results) - - -def test_list_access_bindings_pages(transport_name: str = "grpc"): - client = AnalyticsAdminServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_access_bindings), "__call__" - ) as call: - # Set the response to a series of pages. - call.side_effect = ( - analytics_admin.ListAccessBindingsResponse( - access_bindings=[ - resources.AccessBinding(), - resources.AccessBinding(), - resources.AccessBinding(), - ], - next_page_token="abc", - ), - analytics_admin.ListAccessBindingsResponse( - access_bindings=[], - next_page_token="def", - ), - analytics_admin.ListAccessBindingsResponse( - access_bindings=[ - resources.AccessBinding(), - ], - next_page_token="ghi", - ), - analytics_admin.ListAccessBindingsResponse( - access_bindings=[ - resources.AccessBinding(), - resources.AccessBinding(), - ], - ), - RuntimeError, - ) - pages = list(client.list_access_bindings(request={}).pages) - for page_, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page_.raw_page.next_page_token == token - - -@pytest.mark.asyncio -async def test_list_access_bindings_async_pager(): - client = AnalyticsAdminServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_access_bindings), - "__call__", - new_callable=mock.AsyncMock, - ) as call: - # Set the response to a series of pages. - call.side_effect = ( - analytics_admin.ListAccessBindingsResponse( - access_bindings=[ - resources.AccessBinding(), - resources.AccessBinding(), - resources.AccessBinding(), - ], - next_page_token="abc", - ), - analytics_admin.ListAccessBindingsResponse( - access_bindings=[], - next_page_token="def", - ), - analytics_admin.ListAccessBindingsResponse( - access_bindings=[ - resources.AccessBinding(), - ], - next_page_token="ghi", - ), - analytics_admin.ListAccessBindingsResponse( - access_bindings=[ - resources.AccessBinding(), - resources.AccessBinding(), - ], - ), - RuntimeError, - ) - async_pager = await client.list_access_bindings( - request={}, - ) - assert async_pager.next_page_token == "abc" - responses = [] - async for response in async_pager: # pragma: no branch - responses.append(response) - - assert len(responses) == 6 - assert all(isinstance(i, resources.AccessBinding) for i in responses) - - -@pytest.mark.asyncio -async def test_list_access_bindings_async_pages(): - client = AnalyticsAdminServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_access_bindings), - "__call__", - new_callable=mock.AsyncMock, - ) as call: - # Set the response to a series of pages. - call.side_effect = ( - analytics_admin.ListAccessBindingsResponse( - access_bindings=[ - resources.AccessBinding(), - resources.AccessBinding(), - resources.AccessBinding(), - ], - next_page_token="abc", - ), - analytics_admin.ListAccessBindingsResponse( - access_bindings=[], - next_page_token="def", - ), - analytics_admin.ListAccessBindingsResponse( - access_bindings=[ - resources.AccessBinding(), - ], - next_page_token="ghi", - ), - analytics_admin.ListAccessBindingsResponse( - access_bindings=[ - resources.AccessBinding(), - resources.AccessBinding(), - ], - ), - RuntimeError, - ) - pages = [] - # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` - # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 - async for page_ in ( # pragma: no branch - await client.list_access_bindings(request={}) - ).pages: - pages.append(page_) - for page_, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page_.raw_page.next_page_token == token - - @pytest.mark.parametrize( "request_type", [ - analytics_admin.BatchCreateAccessBindingsRequest, + analytics_admin.CreateAccessBindingRequest, dict, ], ) -def test_batch_create_access_bindings(request_type, transport: str = "grpc"): +def test_create_access_binding(request_type, transport: str = "grpc"): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -37325,23 +37340,29 @@ def test_batch_create_access_bindings(request_type, transport: str = "grpc"): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.batch_create_access_bindings), "__call__" + type(client.transport.create_access_binding), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = analytics_admin.BatchCreateAccessBindingsResponse() - response = client.batch_create_access_bindings(request) + call.return_value = resources.AccessBinding( + name="name_value", + roles=["roles_value"], + user="user_value", + ) + response = client.create_access_binding(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - request = analytics_admin.BatchCreateAccessBindingsRequest() + request = analytics_admin.CreateAccessBindingRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, analytics_admin.BatchCreateAccessBindingsResponse) + assert isinstance(response, resources.AccessBinding) + assert response.name == "name_value" + assert response.roles == ["roles_value"] -def test_batch_create_access_bindings_empty_call(): +def test_create_access_binding_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = AnalyticsAdminServiceClient( @@ -37351,18 +37372,18 @@ def test_batch_create_access_bindings_empty_call(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.batch_create_access_bindings), "__call__" + type(client.transport.create_access_binding), "__call__" ) as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.batch_create_access_bindings() + client.create_access_binding() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == analytics_admin.BatchCreateAccessBindingsRequest() + assert args[0] == analytics_admin.CreateAccessBindingRequest() -def test_batch_create_access_bindings_non_empty_request_with_auto_populated_field(): +def test_create_access_binding_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. client = AnalyticsAdminServiceClient( @@ -37373,26 +37394,26 @@ def test_batch_create_access_bindings_non_empty_request_with_auto_populated_fiel # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. - request = analytics_admin.BatchCreateAccessBindingsRequest( + request = analytics_admin.CreateAccessBindingRequest( parent="parent_value", ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.batch_create_access_bindings), "__call__" + type(client.transport.create_access_binding), "__call__" ) as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.batch_create_access_bindings(request=request) + client.create_access_binding(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == analytics_admin.BatchCreateAccessBindingsRequest( + assert args[0] == analytics_admin.CreateAccessBindingRequest( parent="parent_value", ) -def test_batch_create_access_bindings_use_cached_wrapped_rpc(): +def test_create_access_binding_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -37407,7 +37428,7 @@ def test_batch_create_access_bindings_use_cached_wrapped_rpc(): # Ensure method has been cached assert ( - client._transport.batch_create_access_bindings + client._transport.create_access_binding in client._transport._wrapped_methods ) @@ -37417,15 +37438,15 @@ def test_batch_create_access_bindings_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.batch_create_access_bindings + client._transport.create_access_binding ] = mock_rpc request = {} - client.batch_create_access_bindings(request) + client.create_access_binding(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.batch_create_access_bindings(request) + client.create_access_binding(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -37433,7 +37454,7 @@ def test_batch_create_access_bindings_use_cached_wrapped_rpc(): @pytest.mark.asyncio -async def test_batch_create_access_bindings_empty_call_async(): +async def test_create_access_binding_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = AnalyticsAdminServiceAsyncClient( @@ -37443,20 +37464,23 @@ async def test_batch_create_access_bindings_empty_call_async(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.batch_create_access_bindings), "__call__" + type(client.transport.create_access_binding), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - analytics_admin.BatchCreateAccessBindingsResponse() + resources.AccessBinding( + name="name_value", + roles=["roles_value"], + ) ) - response = await client.batch_create_access_bindings() + response = await client.create_access_binding() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == analytics_admin.BatchCreateAccessBindingsRequest() + assert args[0] == analytics_admin.CreateAccessBindingRequest() @pytest.mark.asyncio -async def test_batch_create_access_bindings_async_use_cached_wrapped_rpc( +async def test_create_access_binding_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", ): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -37473,33 +37497,34 @@ async def test_batch_create_access_bindings_async_use_cached_wrapped_rpc( # Ensure method has been cached assert ( - client._client._transport.batch_create_access_bindings + client._client._transport.create_access_binding in client._client._transport._wrapped_methods ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ - client._client._transport.batch_create_access_bindings - ] = mock_object + client._client._transport.create_access_binding + ] = mock_rpc request = {} - await client.batch_create_access_bindings(request) + await client.create_access_binding(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - await client.batch_create_access_bindings(request) + await client.create_access_binding(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio -async def test_batch_create_access_bindings_async( +async def test_create_access_binding_async( transport: str = "grpc_asyncio", - request_type=analytics_admin.BatchCreateAccessBindingsRequest, + request_type=analytics_admin.CreateAccessBindingRequest, ): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), @@ -37512,46 +37537,51 @@ async def test_batch_create_access_bindings_async( # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.batch_create_access_bindings), "__call__" + type(client.transport.create_access_binding), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - analytics_admin.BatchCreateAccessBindingsResponse() + resources.AccessBinding( + name="name_value", + roles=["roles_value"], + ) ) - response = await client.batch_create_access_bindings(request) + response = await client.create_access_binding(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request = analytics_admin.BatchCreateAccessBindingsRequest() + request = analytics_admin.CreateAccessBindingRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, analytics_admin.BatchCreateAccessBindingsResponse) + assert isinstance(response, resources.AccessBinding) + assert response.name == "name_value" + assert response.roles == ["roles_value"] @pytest.mark.asyncio -async def test_batch_create_access_bindings_async_from_dict(): - await test_batch_create_access_bindings_async(request_type=dict) +async def test_create_access_binding_async_from_dict(): + await test_create_access_binding_async(request_type=dict) -def test_batch_create_access_bindings_field_headers(): +def test_create_access_binding_field_headers(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = analytics_admin.BatchCreateAccessBindingsRequest() + request = analytics_admin.CreateAccessBindingRequest() request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.batch_create_access_bindings), "__call__" + type(client.transport.create_access_binding), "__call__" ) as call: - call.return_value = analytics_admin.BatchCreateAccessBindingsResponse() - client.batch_create_access_bindings(request) + call.return_value = resources.AccessBinding() + client.create_access_binding(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -37567,25 +37597,25 @@ def test_batch_create_access_bindings_field_headers(): @pytest.mark.asyncio -async def test_batch_create_access_bindings_field_headers_async(): +async def test_create_access_binding_field_headers_async(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = analytics_admin.BatchCreateAccessBindingsRequest() + request = analytics_admin.CreateAccessBindingRequest() request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.batch_create_access_bindings), "__call__" + type(client.transport.create_access_binding), "__call__" ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - analytics_admin.BatchCreateAccessBindingsResponse() + resources.AccessBinding() ) - await client.batch_create_access_bindings(request) + await client.create_access_binding(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -37600,14 +37630,110 @@ async def test_batch_create_access_bindings_field_headers_async(): ) in kw["metadata"] +def test_create_access_binding_flattened(): + client = AnalyticsAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_access_binding), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = resources.AccessBinding() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.create_access_binding( + parent="parent_value", + access_binding=resources.AccessBinding(user="user_value"), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].access_binding + mock_val = resources.AccessBinding(user="user_value") + assert arg == mock_val + + +def test_create_access_binding_flattened_error(): + client = AnalyticsAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_access_binding( + analytics_admin.CreateAccessBindingRequest(), + parent="parent_value", + access_binding=resources.AccessBinding(user="user_value"), + ) + + +@pytest.mark.asyncio +async def test_create_access_binding_flattened_async(): + client = AnalyticsAdminServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_access_binding), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = resources.AccessBinding() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + resources.AccessBinding() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.create_access_binding( + parent="parent_value", + access_binding=resources.AccessBinding(user="user_value"), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].access_binding + mock_val = resources.AccessBinding(user="user_value") + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_create_access_binding_flattened_error_async(): + client = AnalyticsAdminServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.create_access_binding( + analytics_admin.CreateAccessBindingRequest(), + parent="parent_value", + access_binding=resources.AccessBinding(user="user_value"), + ) + + @pytest.mark.parametrize( "request_type", [ - analytics_admin.BatchGetAccessBindingsRequest, + analytics_admin.GetAccessBindingRequest, dict, ], ) -def test_batch_get_access_bindings(request_type, transport: str = "grpc"): +def test_get_access_binding(request_type, transport: str = "grpc"): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -37619,23 +37745,29 @@ def test_batch_get_access_bindings(request_type, transport: str = "grpc"): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.batch_get_access_bindings), "__call__" + type(client.transport.get_access_binding), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = analytics_admin.BatchGetAccessBindingsResponse() - response = client.batch_get_access_bindings(request) + call.return_value = resources.AccessBinding( + name="name_value", + roles=["roles_value"], + user="user_value", + ) + response = client.get_access_binding(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - request = analytics_admin.BatchGetAccessBindingsRequest() + request = analytics_admin.GetAccessBindingRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, analytics_admin.BatchGetAccessBindingsResponse) + assert isinstance(response, resources.AccessBinding) + assert response.name == "name_value" + assert response.roles == ["roles_value"] -def test_batch_get_access_bindings_empty_call(): +def test_get_access_binding_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = AnalyticsAdminServiceClient( @@ -37645,18 +37777,18 @@ def test_batch_get_access_bindings_empty_call(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.batch_get_access_bindings), "__call__" + type(client.transport.get_access_binding), "__call__" ) as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.batch_get_access_bindings() + client.get_access_binding() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == analytics_admin.BatchGetAccessBindingsRequest() + assert args[0] == analytics_admin.GetAccessBindingRequest() -def test_batch_get_access_bindings_non_empty_request_with_auto_populated_field(): +def test_get_access_binding_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. client = AnalyticsAdminServiceClient( @@ -37667,26 +37799,26 @@ def test_batch_get_access_bindings_non_empty_request_with_auto_populated_field() # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. - request = analytics_admin.BatchGetAccessBindingsRequest( - parent="parent_value", + request = analytics_admin.GetAccessBindingRequest( + name="name_value", ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.batch_get_access_bindings), "__call__" + type(client.transport.get_access_binding), "__call__" ) as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.batch_get_access_bindings(request=request) + client.get_access_binding(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == analytics_admin.BatchGetAccessBindingsRequest( - parent="parent_value", + assert args[0] == analytics_admin.GetAccessBindingRequest( + name="name_value", ) -def test_batch_get_access_bindings_use_cached_wrapped_rpc(): +def test_get_access_binding_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -37701,8 +37833,7 @@ def test_batch_get_access_bindings_use_cached_wrapped_rpc(): # Ensure method has been cached assert ( - client._transport.batch_get_access_bindings - in client._transport._wrapped_methods + client._transport.get_access_binding in client._transport._wrapped_methods ) # Replace cached wrapped function with mock @@ -37711,15 +37842,15 @@ def test_batch_get_access_bindings_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.batch_get_access_bindings + client._transport.get_access_binding ] = mock_rpc request = {} - client.batch_get_access_bindings(request) + client.get_access_binding(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.batch_get_access_bindings(request) + client.get_access_binding(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -37727,7 +37858,7 @@ def test_batch_get_access_bindings_use_cached_wrapped_rpc(): @pytest.mark.asyncio -async def test_batch_get_access_bindings_empty_call_async(): +async def test_get_access_binding_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = AnalyticsAdminServiceAsyncClient( @@ -37737,20 +37868,23 @@ async def test_batch_get_access_bindings_empty_call_async(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.batch_get_access_bindings), "__call__" + type(client.transport.get_access_binding), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - analytics_admin.BatchGetAccessBindingsResponse() + resources.AccessBinding( + name="name_value", + roles=["roles_value"], + ) ) - response = await client.batch_get_access_bindings() + response = await client.get_access_binding() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == analytics_admin.BatchGetAccessBindingsRequest() + assert args[0] == analytics_admin.GetAccessBindingRequest() @pytest.mark.asyncio -async def test_batch_get_access_bindings_async_use_cached_wrapped_rpc( +async def test_get_access_binding_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", ): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -37767,33 +37901,34 @@ async def test_batch_get_access_bindings_async_use_cached_wrapped_rpc( # Ensure method has been cached assert ( - client._client._transport.batch_get_access_bindings + client._client._transport.get_access_binding in client._client._transport._wrapped_methods ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ - client._client._transport.batch_get_access_bindings - ] = mock_object + client._client._transport.get_access_binding + ] = mock_rpc request = {} - await client.batch_get_access_bindings(request) + await client.get_access_binding(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - await client.batch_get_access_bindings(request) + await client.get_access_binding(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio -async def test_batch_get_access_bindings_async( +async def test_get_access_binding_async( transport: str = "grpc_asyncio", - request_type=analytics_admin.BatchGetAccessBindingsRequest, + request_type=analytics_admin.GetAccessBindingRequest, ): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), @@ -37806,46 +37941,51 @@ async def test_batch_get_access_bindings_async( # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.batch_get_access_bindings), "__call__" + type(client.transport.get_access_binding), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - analytics_admin.BatchGetAccessBindingsResponse() + resources.AccessBinding( + name="name_value", + roles=["roles_value"], + ) ) - response = await client.batch_get_access_bindings(request) + response = await client.get_access_binding(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request = analytics_admin.BatchGetAccessBindingsRequest() + request = analytics_admin.GetAccessBindingRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, analytics_admin.BatchGetAccessBindingsResponse) + assert isinstance(response, resources.AccessBinding) + assert response.name == "name_value" + assert response.roles == ["roles_value"] @pytest.mark.asyncio -async def test_batch_get_access_bindings_async_from_dict(): - await test_batch_get_access_bindings_async(request_type=dict) +async def test_get_access_binding_async_from_dict(): + await test_get_access_binding_async(request_type=dict) -def test_batch_get_access_bindings_field_headers(): +def test_get_access_binding_field_headers(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = analytics_admin.BatchGetAccessBindingsRequest() + request = analytics_admin.GetAccessBindingRequest() - request.parent = "parent_value" + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.batch_get_access_bindings), "__call__" + type(client.transport.get_access_binding), "__call__" ) as call: - call.return_value = analytics_admin.BatchGetAccessBindingsResponse() - client.batch_get_access_bindings(request) + call.return_value = resources.AccessBinding() + client.get_access_binding(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -37856,30 +37996,30 @@ def test_batch_get_access_bindings_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "parent=parent_value", + "name=name_value", ) in kw["metadata"] @pytest.mark.asyncio -async def test_batch_get_access_bindings_field_headers_async(): +async def test_get_access_binding_field_headers_async(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = analytics_admin.BatchGetAccessBindingsRequest() + request = analytics_admin.GetAccessBindingRequest() - request.parent = "parent_value" + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.batch_get_access_bindings), "__call__" + type(client.transport.get_access_binding), "__call__" ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - analytics_admin.BatchGetAccessBindingsResponse() + resources.AccessBinding() ) - await client.batch_get_access_bindings(request) + await client.get_access_binding(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -37890,18 +38030,104 @@ async def test_batch_get_access_bindings_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "parent=parent_value", + "name=name_value", ) in kw["metadata"] +def test_get_access_binding_flattened(): + client = AnalyticsAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_access_binding), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = resources.AccessBinding() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_access_binding( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_get_access_binding_flattened_error(): + client = AnalyticsAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_access_binding( + analytics_admin.GetAccessBindingRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_get_access_binding_flattened_async(): + client = AnalyticsAdminServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_access_binding), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = resources.AccessBinding() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + resources.AccessBinding() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_access_binding( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_get_access_binding_flattened_error_async(): + client = AnalyticsAdminServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_access_binding( + analytics_admin.GetAccessBindingRequest(), + name="name_value", + ) + + @pytest.mark.parametrize( "request_type", [ - analytics_admin.BatchUpdateAccessBindingsRequest, + analytics_admin.UpdateAccessBindingRequest, dict, ], ) -def test_batch_update_access_bindings(request_type, transport: str = "grpc"): +def test_update_access_binding(request_type, transport: str = "grpc"): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -37913,23 +38139,29 @@ def test_batch_update_access_bindings(request_type, transport: str = "grpc"): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.batch_update_access_bindings), "__call__" + type(client.transport.update_access_binding), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = analytics_admin.BatchUpdateAccessBindingsResponse() - response = client.batch_update_access_bindings(request) + call.return_value = resources.AccessBinding( + name="name_value", + roles=["roles_value"], + user="user_value", + ) + response = client.update_access_binding(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - request = analytics_admin.BatchUpdateAccessBindingsRequest() + request = analytics_admin.UpdateAccessBindingRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, analytics_admin.BatchUpdateAccessBindingsResponse) + assert isinstance(response, resources.AccessBinding) + assert response.name == "name_value" + assert response.roles == ["roles_value"] -def test_batch_update_access_bindings_empty_call(): +def test_update_access_binding_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = AnalyticsAdminServiceClient( @@ -37939,18 +38171,18 @@ def test_batch_update_access_bindings_empty_call(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.batch_update_access_bindings), "__call__" + type(client.transport.update_access_binding), "__call__" ) as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.batch_update_access_bindings() + client.update_access_binding() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == analytics_admin.BatchUpdateAccessBindingsRequest() + assert args[0] == analytics_admin.UpdateAccessBindingRequest() -def test_batch_update_access_bindings_non_empty_request_with_auto_populated_field(): +def test_update_access_binding_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. client = AnalyticsAdminServiceClient( @@ -37961,26 +38193,22 @@ def test_batch_update_access_bindings_non_empty_request_with_auto_populated_fiel # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. - request = analytics_admin.BatchUpdateAccessBindingsRequest( - parent="parent_value", - ) + request = analytics_admin.UpdateAccessBindingRequest() # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.batch_update_access_bindings), "__call__" + type(client.transport.update_access_binding), "__call__" ) as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.batch_update_access_bindings(request=request) + client.update_access_binding(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == analytics_admin.BatchUpdateAccessBindingsRequest( - parent="parent_value", - ) + assert args[0] == analytics_admin.UpdateAccessBindingRequest() -def test_batch_update_access_bindings_use_cached_wrapped_rpc(): +def test_update_access_binding_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -37995,7 +38223,7 @@ def test_batch_update_access_bindings_use_cached_wrapped_rpc(): # Ensure method has been cached assert ( - client._transport.batch_update_access_bindings + client._transport.update_access_binding in client._transport._wrapped_methods ) @@ -38005,15 +38233,15 @@ def test_batch_update_access_bindings_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.batch_update_access_bindings + client._transport.update_access_binding ] = mock_rpc request = {} - client.batch_update_access_bindings(request) + client.update_access_binding(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.batch_update_access_bindings(request) + client.update_access_binding(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -38021,7 +38249,7 @@ def test_batch_update_access_bindings_use_cached_wrapped_rpc(): @pytest.mark.asyncio -async def test_batch_update_access_bindings_empty_call_async(): +async def test_update_access_binding_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = AnalyticsAdminServiceAsyncClient( @@ -38031,20 +38259,23 @@ async def test_batch_update_access_bindings_empty_call_async(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.batch_update_access_bindings), "__call__" + type(client.transport.update_access_binding), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - analytics_admin.BatchUpdateAccessBindingsResponse() + resources.AccessBinding( + name="name_value", + roles=["roles_value"], + ) ) - response = await client.batch_update_access_bindings() + response = await client.update_access_binding() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == analytics_admin.BatchUpdateAccessBindingsRequest() + assert args[0] == analytics_admin.UpdateAccessBindingRequest() @pytest.mark.asyncio -async def test_batch_update_access_bindings_async_use_cached_wrapped_rpc( +async def test_update_access_binding_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", ): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -38061,33 +38292,34 @@ async def test_batch_update_access_bindings_async_use_cached_wrapped_rpc( # Ensure method has been cached assert ( - client._client._transport.batch_update_access_bindings + client._client._transport.update_access_binding in client._client._transport._wrapped_methods ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ - client._client._transport.batch_update_access_bindings - ] = mock_object + client._client._transport.update_access_binding + ] = mock_rpc request = {} - await client.batch_update_access_bindings(request) + await client.update_access_binding(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - await client.batch_update_access_bindings(request) + await client.update_access_binding(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio -async def test_batch_update_access_bindings_async( +async def test_update_access_binding_async( transport: str = "grpc_asyncio", - request_type=analytics_admin.BatchUpdateAccessBindingsRequest, + request_type=analytics_admin.UpdateAccessBindingRequest, ): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), @@ -38100,46 +38332,51 @@ async def test_batch_update_access_bindings_async( # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.batch_update_access_bindings), "__call__" + type(client.transport.update_access_binding), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - analytics_admin.BatchUpdateAccessBindingsResponse() + resources.AccessBinding( + name="name_value", + roles=["roles_value"], + ) ) - response = await client.batch_update_access_bindings(request) + response = await client.update_access_binding(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request = analytics_admin.BatchUpdateAccessBindingsRequest() + request = analytics_admin.UpdateAccessBindingRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, analytics_admin.BatchUpdateAccessBindingsResponse) + assert isinstance(response, resources.AccessBinding) + assert response.name == "name_value" + assert response.roles == ["roles_value"] @pytest.mark.asyncio -async def test_batch_update_access_bindings_async_from_dict(): - await test_batch_update_access_bindings_async(request_type=dict) +async def test_update_access_binding_async_from_dict(): + await test_update_access_binding_async(request_type=dict) -def test_batch_update_access_bindings_field_headers(): +def test_update_access_binding_field_headers(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = analytics_admin.BatchUpdateAccessBindingsRequest() + request = analytics_admin.UpdateAccessBindingRequest() - request.parent = "parent_value" + request.access_binding.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.batch_update_access_bindings), "__call__" + type(client.transport.update_access_binding), "__call__" ) as call: - call.return_value = analytics_admin.BatchUpdateAccessBindingsResponse() - client.batch_update_access_bindings(request) + call.return_value = resources.AccessBinding() + client.update_access_binding(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -38150,30 +38387,30 @@ def test_batch_update_access_bindings_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "parent=parent_value", + "access_binding.name=name_value", ) in kw["metadata"] @pytest.mark.asyncio -async def test_batch_update_access_bindings_field_headers_async(): +async def test_update_access_binding_field_headers_async(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = analytics_admin.BatchUpdateAccessBindingsRequest() + request = analytics_admin.UpdateAccessBindingRequest() - request.parent = "parent_value" + request.access_binding.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.batch_update_access_bindings), "__call__" + type(client.transport.update_access_binding), "__call__" ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - analytics_admin.BatchUpdateAccessBindingsResponse() + resources.AccessBinding() ) - await client.batch_update_access_bindings(request) + await client.update_access_binding(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -38184,306 +38421,104 @@ async def test_batch_update_access_bindings_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "parent=parent_value", + "access_binding.name=name_value", ) in kw["metadata"] -@pytest.mark.parametrize( - "request_type", - [ - analytics_admin.BatchDeleteAccessBindingsRequest, - dict, - ], -) -def test_batch_delete_access_bindings(request_type, transport: str = "grpc"): +def test_update_access_binding_flattened(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.batch_delete_access_bindings), "__call__" + type(client.transport.update_access_binding), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = None - response = client.batch_delete_access_bindings(request) + call.return_value = resources.AccessBinding() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.update_access_binding( + access_binding=resources.AccessBinding(user="user_value"), + ) - # Establish that the underlying gRPC stub method was called. + # Establish that the underlying call was made with the expected + # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - request = analytics_admin.BatchDeleteAccessBindingsRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert response is None + arg = args[0].access_binding + mock_val = resources.AccessBinding(user="user_value") + assert arg == mock_val -def test_batch_delete_access_bindings_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. +def test_update_access_binding_flattened_error(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.batch_delete_access_bindings), "__call__" - ) as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_access_binding( + analytics_admin.UpdateAccessBindingRequest(), + access_binding=resources.AccessBinding(user="user_value"), ) - client.batch_delete_access_bindings() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == analytics_admin.BatchDeleteAccessBindingsRequest() -def test_batch_delete_access_bindings_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = AnalyticsAdminServiceClient( +@pytest.mark.asyncio +async def test_update_access_binding_flattened_async(): + client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = analytics_admin.BatchDeleteAccessBindingsRequest( - parent="parent_value", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.batch_delete_access_bindings), "__call__" - ) as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.batch_delete_access_bindings(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == analytics_admin.BatchDeleteAccessBindingsRequest( - parent="parent_value", - ) - - -def test_batch_delete_access_bindings_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = AnalyticsAdminServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert ( - client._transport.batch_delete_access_bindings - in client._transport._wrapped_methods - ) - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client._transport._wrapped_methods[ - client._transport.batch_delete_access_bindings - ] = mock_rpc - request = {} - client.batch_delete_access_bindings(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.batch_delete_access_bindings(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -@pytest.mark.asyncio -async def test_batch_delete_access_bindings_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = AnalyticsAdminServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc_asyncio", ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.batch_delete_access_bindings), "__call__" + type(client.transport.update_access_binding), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.batch_delete_access_bindings() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == analytics_admin.BatchDeleteAccessBindingsRequest() - + call.return_value = resources.AccessBinding() -@pytest.mark.asyncio -async def test_batch_delete_access_bindings_async_use_cached_wrapped_rpc( - transport: str = "grpc_asyncio", -): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = AnalyticsAdminServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + resources.AccessBinding() ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert ( - client._client._transport.batch_delete_access_bindings - in client._client._transport._wrapped_methods + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.update_access_binding( + access_binding=resources.AccessBinding(user="user_value"), ) - # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() - client._client._transport._wrapped_methods[ - client._client._transport.batch_delete_access_bindings - ] = mock_object - - request = {} - await client.batch_delete_access_bindings(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 - - await client.batch_delete_access_bindings(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 - - -@pytest.mark.asyncio -async def test_batch_delete_access_bindings_async( - transport: str = "grpc_asyncio", - request_type=analytics_admin.BatchDeleteAccessBindingsRequest, -): - client = AnalyticsAdminServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.batch_delete_access_bindings), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.batch_delete_access_bindings(request) - - # Establish that the underlying gRPC stub method was called. + # Establish that the underlying call was made with the expected + # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request = analytics_admin.BatchDeleteAccessBindingsRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert response is None - - -@pytest.mark.asyncio -async def test_batch_delete_access_bindings_async_from_dict(): - await test_batch_delete_access_bindings_async(request_type=dict) - - -def test_batch_delete_access_bindings_field_headers(): - client = AnalyticsAdminServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = analytics_admin.BatchDeleteAccessBindingsRequest() - - request.parent = "parent_value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.batch_delete_access_bindings), "__call__" - ) as call: - call.return_value = None - client.batch_delete_access_bindings(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "parent=parent_value", - ) in kw["metadata"] + arg = args[0].access_binding + mock_val = resources.AccessBinding(user="user_value") + assert arg == mock_val @pytest.mark.asyncio -async def test_batch_delete_access_bindings_field_headers_async(): +async def test_update_access_binding_flattened_error_async(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = analytics_admin.BatchDeleteAccessBindingsRequest() - - request.parent = "parent_value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.batch_delete_access_bindings), "__call__" - ) as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - await client.batch_delete_access_bindings(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "parent=parent_value", - ) in kw["metadata"] + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.update_access_binding( + analytics_admin.UpdateAccessBindingRequest(), + access_binding=resources.AccessBinding(user="user_value"), + ) @pytest.mark.parametrize( "request_type", [ - analytics_admin.GetExpandedDataSetRequest, + analytics_admin.DeleteAccessBindingRequest, dict, ], ) -def test_get_expanded_data_set(request_type, transport: str = "grpc"): +def test_delete_access_binding(request_type, transport: str = "grpc"): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -38495,34 +38530,23 @@ def test_get_expanded_data_set(request_type, transport: str = "grpc"): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.get_expanded_data_set), "__call__" + type(client.transport.delete_access_binding), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = expanded_data_set.ExpandedDataSet( - name="name_value", - display_name="display_name_value", - description="description_value", - dimension_names=["dimension_names_value"], - metric_names=["metric_names_value"], - ) - response = client.get_expanded_data_set(request) + call.return_value = None + response = client.delete_access_binding(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - request = analytics_admin.GetExpandedDataSetRequest() + request = analytics_admin.DeleteAccessBindingRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, expanded_data_set.ExpandedDataSet) - assert response.name == "name_value" - assert response.display_name == "display_name_value" - assert response.description == "description_value" - assert response.dimension_names == ["dimension_names_value"] - assert response.metric_names == ["metric_names_value"] + assert response is None -def test_get_expanded_data_set_empty_call(): +def test_delete_access_binding_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = AnalyticsAdminServiceClient( @@ -38532,18 +38556,18 @@ def test_get_expanded_data_set_empty_call(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.get_expanded_data_set), "__call__" + type(client.transport.delete_access_binding), "__call__" ) as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.get_expanded_data_set() + client.delete_access_binding() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == analytics_admin.GetExpandedDataSetRequest() + assert args[0] == analytics_admin.DeleteAccessBindingRequest() -def test_get_expanded_data_set_non_empty_request_with_auto_populated_field(): +def test_delete_access_binding_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. client = AnalyticsAdminServiceClient( @@ -38554,26 +38578,26 @@ def test_get_expanded_data_set_non_empty_request_with_auto_populated_field(): # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. - request = analytics_admin.GetExpandedDataSetRequest( + request = analytics_admin.DeleteAccessBindingRequest( name="name_value", ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.get_expanded_data_set), "__call__" + type(client.transport.delete_access_binding), "__call__" ) as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.get_expanded_data_set(request=request) + client.delete_access_binding(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == analytics_admin.GetExpandedDataSetRequest( + assert args[0] == analytics_admin.DeleteAccessBindingRequest( name="name_value", ) -def test_get_expanded_data_set_use_cached_wrapped_rpc(): +def test_delete_access_binding_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -38588,7 +38612,7 @@ def test_get_expanded_data_set_use_cached_wrapped_rpc(): # Ensure method has been cached assert ( - client._transport.get_expanded_data_set + client._transport.delete_access_binding in client._transport._wrapped_methods ) @@ -38598,15 +38622,15 @@ def test_get_expanded_data_set_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.get_expanded_data_set + client._transport.delete_access_binding ] = mock_rpc request = {} - client.get_expanded_data_set(request) + client.delete_access_binding(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.get_expanded_data_set(request) + client.delete_access_binding(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -38614,7 +38638,7 @@ def test_get_expanded_data_set_use_cached_wrapped_rpc(): @pytest.mark.asyncio -async def test_get_expanded_data_set_empty_call_async(): +async def test_delete_access_binding_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = AnalyticsAdminServiceAsyncClient( @@ -38624,26 +38648,18 @@ async def test_get_expanded_data_set_empty_call_async(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.get_expanded_data_set), "__call__" + type(client.transport.delete_access_binding), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - expanded_data_set.ExpandedDataSet( - name="name_value", - display_name="display_name_value", - description="description_value", - dimension_names=["dimension_names_value"], - metric_names=["metric_names_value"], - ) - ) - response = await client.get_expanded_data_set() + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.delete_access_binding() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == analytics_admin.GetExpandedDataSetRequest() + assert args[0] == analytics_admin.DeleteAccessBindingRequest() @pytest.mark.asyncio -async def test_get_expanded_data_set_async_use_cached_wrapped_rpc( +async def test_delete_access_binding_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", ): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -38660,33 +38676,34 @@ async def test_get_expanded_data_set_async_use_cached_wrapped_rpc( # Ensure method has been cached assert ( - client._client._transport.get_expanded_data_set + client._client._transport.delete_access_binding in client._client._transport._wrapped_methods ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ - client._client._transport.get_expanded_data_set - ] = mock_object + client._client._transport.delete_access_binding + ] = mock_rpc request = {} - await client.get_expanded_data_set(request) + await client.delete_access_binding(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - await client.get_expanded_data_set(request) + await client.delete_access_binding(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio -async def test_get_expanded_data_set_async( +async def test_delete_access_binding_async( transport: str = "grpc_asyncio", - request_type=analytics_admin.GetExpandedDataSetRequest, + request_type=analytics_admin.DeleteAccessBindingRequest, ): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), @@ -38699,57 +38716,44 @@ async def test_get_expanded_data_set_async( # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.get_expanded_data_set), "__call__" + type(client.transport.delete_access_binding), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - expanded_data_set.ExpandedDataSet( - name="name_value", - display_name="display_name_value", - description="description_value", - dimension_names=["dimension_names_value"], - metric_names=["metric_names_value"], - ) - ) - response = await client.get_expanded_data_set(request) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.delete_access_binding(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request = analytics_admin.GetExpandedDataSetRequest() + request = analytics_admin.DeleteAccessBindingRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, expanded_data_set.ExpandedDataSet) - assert response.name == "name_value" - assert response.display_name == "display_name_value" - assert response.description == "description_value" - assert response.dimension_names == ["dimension_names_value"] - assert response.metric_names == ["metric_names_value"] + assert response is None @pytest.mark.asyncio -async def test_get_expanded_data_set_async_from_dict(): - await test_get_expanded_data_set_async(request_type=dict) +async def test_delete_access_binding_async_from_dict(): + await test_delete_access_binding_async(request_type=dict) -def test_get_expanded_data_set_field_headers(): +def test_delete_access_binding_field_headers(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = analytics_admin.GetExpandedDataSetRequest() + request = analytics_admin.DeleteAccessBindingRequest() request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.get_expanded_data_set), "__call__" + type(client.transport.delete_access_binding), "__call__" ) as call: - call.return_value = expanded_data_set.ExpandedDataSet() - client.get_expanded_data_set(request) + call.return_value = None + client.delete_access_binding(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -38765,25 +38769,23 @@ def test_get_expanded_data_set_field_headers(): @pytest.mark.asyncio -async def test_get_expanded_data_set_field_headers_async(): +async def test_delete_access_binding_field_headers_async(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = analytics_admin.GetExpandedDataSetRequest() + request = analytics_admin.DeleteAccessBindingRequest() request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.get_expanded_data_set), "__call__" + type(client.transport.delete_access_binding), "__call__" ) as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - expanded_data_set.ExpandedDataSet() - ) - await client.get_expanded_data_set(request) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.delete_access_binding(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -38798,20 +38800,20 @@ async def test_get_expanded_data_set_field_headers_async(): ) in kw["metadata"] -def test_get_expanded_data_set_flattened(): +def test_delete_access_binding_flattened(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.get_expanded_data_set), "__call__" + type(client.transport.delete_access_binding), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = expanded_data_set.ExpandedDataSet() + call.return_value = None # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.get_expanded_data_set( + client.delete_access_binding( name="name_value", ) @@ -38824,7 +38826,7 @@ def test_get_expanded_data_set_flattened(): assert arg == mock_val -def test_get_expanded_data_set_flattened_error(): +def test_delete_access_binding_flattened_error(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -38832,31 +38834,29 @@ def test_get_expanded_data_set_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.get_expanded_data_set( - analytics_admin.GetExpandedDataSetRequest(), + client.delete_access_binding( + analytics_admin.DeleteAccessBindingRequest(), name="name_value", ) @pytest.mark.asyncio -async def test_get_expanded_data_set_flattened_async(): +async def test_delete_access_binding_flattened_async(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.get_expanded_data_set), "__call__" + type(client.transport.delete_access_binding), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = expanded_data_set.ExpandedDataSet() + call.return_value = None - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - expanded_data_set.ExpandedDataSet() - ) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.get_expanded_data_set( + response = await client.delete_access_binding( name="name_value", ) @@ -38870,7 +38870,7 @@ async def test_get_expanded_data_set_flattened_async(): @pytest.mark.asyncio -async def test_get_expanded_data_set_flattened_error_async(): +async def test_delete_access_binding_flattened_error_async(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -38878,8 +38878,8 @@ async def test_get_expanded_data_set_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.get_expanded_data_set( - analytics_admin.GetExpandedDataSetRequest(), + await client.delete_access_binding( + analytics_admin.DeleteAccessBindingRequest(), name="name_value", ) @@ -38887,11 +38887,11 @@ async def test_get_expanded_data_set_flattened_error_async(): @pytest.mark.parametrize( "request_type", [ - analytics_admin.ListExpandedDataSetsRequest, + analytics_admin.ListAccessBindingsRequest, dict, ], ) -def test_list_expanded_data_sets(request_type, transport: str = "grpc"): +def test_list_access_bindings(request_type, transport: str = "grpc"): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -38903,26 +38903,26 @@ def test_list_expanded_data_sets(request_type, transport: str = "grpc"): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_expanded_data_sets), "__call__" + type(client.transport.list_access_bindings), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = analytics_admin.ListExpandedDataSetsResponse( + call.return_value = analytics_admin.ListAccessBindingsResponse( next_page_token="next_page_token_value", ) - response = client.list_expanded_data_sets(request) + response = client.list_access_bindings(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - request = analytics_admin.ListExpandedDataSetsRequest() + request = analytics_admin.ListAccessBindingsRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListExpandedDataSetsPager) + assert isinstance(response, pagers.ListAccessBindingsPager) assert response.next_page_token == "next_page_token_value" -def test_list_expanded_data_sets_empty_call(): +def test_list_access_bindings_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = AnalyticsAdminServiceClient( @@ -38932,18 +38932,18 @@ def test_list_expanded_data_sets_empty_call(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_expanded_data_sets), "__call__" + type(client.transport.list_access_bindings), "__call__" ) as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.list_expanded_data_sets() + client.list_access_bindings() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == analytics_admin.ListExpandedDataSetsRequest() + assert args[0] == analytics_admin.ListAccessBindingsRequest() -def test_list_expanded_data_sets_non_empty_request_with_auto_populated_field(): +def test_list_access_bindings_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. client = AnalyticsAdminServiceClient( @@ -38954,28 +38954,28 @@ def test_list_expanded_data_sets_non_empty_request_with_auto_populated_field(): # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. - request = analytics_admin.ListExpandedDataSetsRequest( + request = analytics_admin.ListAccessBindingsRequest( parent="parent_value", page_token="page_token_value", ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_expanded_data_sets), "__call__" + type(client.transport.list_access_bindings), "__call__" ) as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.list_expanded_data_sets(request=request) + client.list_access_bindings(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == analytics_admin.ListExpandedDataSetsRequest( + assert args[0] == analytics_admin.ListAccessBindingsRequest( parent="parent_value", page_token="page_token_value", ) -def test_list_expanded_data_sets_use_cached_wrapped_rpc(): +def test_list_access_bindings_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -38990,8 +38990,7 @@ def test_list_expanded_data_sets_use_cached_wrapped_rpc(): # Ensure method has been cached assert ( - client._transport.list_expanded_data_sets - in client._transport._wrapped_methods + client._transport.list_access_bindings in client._transport._wrapped_methods ) # Replace cached wrapped function with mock @@ -39000,15 +38999,15 @@ def test_list_expanded_data_sets_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.list_expanded_data_sets + client._transport.list_access_bindings ] = mock_rpc request = {} - client.list_expanded_data_sets(request) + client.list_access_bindings(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.list_expanded_data_sets(request) + client.list_access_bindings(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -39016,7 +39015,7 @@ def test_list_expanded_data_sets_use_cached_wrapped_rpc(): @pytest.mark.asyncio -async def test_list_expanded_data_sets_empty_call_async(): +async def test_list_access_bindings_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = AnalyticsAdminServiceAsyncClient( @@ -39026,22 +39025,22 @@ async def test_list_expanded_data_sets_empty_call_async(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_expanded_data_sets), "__call__" + type(client.transport.list_access_bindings), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - analytics_admin.ListExpandedDataSetsResponse( + analytics_admin.ListAccessBindingsResponse( next_page_token="next_page_token_value", ) ) - response = await client.list_expanded_data_sets() + response = await client.list_access_bindings() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == analytics_admin.ListExpandedDataSetsRequest() + assert args[0] == analytics_admin.ListAccessBindingsRequest() @pytest.mark.asyncio -async def test_list_expanded_data_sets_async_use_cached_wrapped_rpc( +async def test_list_access_bindings_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", ): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -39058,33 +39057,34 @@ async def test_list_expanded_data_sets_async_use_cached_wrapped_rpc( # Ensure method has been cached assert ( - client._client._transport.list_expanded_data_sets + client._client._transport.list_access_bindings in client._client._transport._wrapped_methods ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ - client._client._transport.list_expanded_data_sets - ] = mock_object + client._client._transport.list_access_bindings + ] = mock_rpc request = {} - await client.list_expanded_data_sets(request) + await client.list_access_bindings(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - await client.list_expanded_data_sets(request) + await client.list_access_bindings(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio -async def test_list_expanded_data_sets_async( +async def test_list_access_bindings_async( transport: str = "grpc_asyncio", - request_type=analytics_admin.ListExpandedDataSetsRequest, + request_type=analytics_admin.ListAccessBindingsRequest, ): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), @@ -39097,49 +39097,49 @@ async def test_list_expanded_data_sets_async( # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_expanded_data_sets), "__call__" + type(client.transport.list_access_bindings), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - analytics_admin.ListExpandedDataSetsResponse( + analytics_admin.ListAccessBindingsResponse( next_page_token="next_page_token_value", ) ) - response = await client.list_expanded_data_sets(request) + response = await client.list_access_bindings(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request = analytics_admin.ListExpandedDataSetsRequest() + request = analytics_admin.ListAccessBindingsRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListExpandedDataSetsAsyncPager) + assert isinstance(response, pagers.ListAccessBindingsAsyncPager) assert response.next_page_token == "next_page_token_value" @pytest.mark.asyncio -async def test_list_expanded_data_sets_async_from_dict(): - await test_list_expanded_data_sets_async(request_type=dict) +async def test_list_access_bindings_async_from_dict(): + await test_list_access_bindings_async(request_type=dict) -def test_list_expanded_data_sets_field_headers(): +def test_list_access_bindings_field_headers(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = analytics_admin.ListExpandedDataSetsRequest() + request = analytics_admin.ListAccessBindingsRequest() request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_expanded_data_sets), "__call__" + type(client.transport.list_access_bindings), "__call__" ) as call: - call.return_value = analytics_admin.ListExpandedDataSetsResponse() - client.list_expanded_data_sets(request) + call.return_value = analytics_admin.ListAccessBindingsResponse() + client.list_access_bindings(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -39155,25 +39155,25 @@ def test_list_expanded_data_sets_field_headers(): @pytest.mark.asyncio -async def test_list_expanded_data_sets_field_headers_async(): +async def test_list_access_bindings_field_headers_async(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = analytics_admin.ListExpandedDataSetsRequest() + request = analytics_admin.ListAccessBindingsRequest() request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_expanded_data_sets), "__call__" + type(client.transport.list_access_bindings), "__call__" ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - analytics_admin.ListExpandedDataSetsResponse() + analytics_admin.ListAccessBindingsResponse() ) - await client.list_expanded_data_sets(request) + await client.list_access_bindings(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -39188,20 +39188,20 @@ async def test_list_expanded_data_sets_field_headers_async(): ) in kw["metadata"] -def test_list_expanded_data_sets_flattened(): +def test_list_access_bindings_flattened(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_expanded_data_sets), "__call__" + type(client.transport.list_access_bindings), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = analytics_admin.ListExpandedDataSetsResponse() + call.return_value = analytics_admin.ListAccessBindingsResponse() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.list_expanded_data_sets( + client.list_access_bindings( parent="parent_value", ) @@ -39214,7 +39214,7 @@ def test_list_expanded_data_sets_flattened(): assert arg == mock_val -def test_list_expanded_data_sets_flattened_error(): +def test_list_access_bindings_flattened_error(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -39222,31 +39222,31 @@ def test_list_expanded_data_sets_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.list_expanded_data_sets( - analytics_admin.ListExpandedDataSetsRequest(), + client.list_access_bindings( + analytics_admin.ListAccessBindingsRequest(), parent="parent_value", ) @pytest.mark.asyncio -async def test_list_expanded_data_sets_flattened_async(): +async def test_list_access_bindings_flattened_async(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_expanded_data_sets), "__call__" + type(client.transport.list_access_bindings), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = analytics_admin.ListExpandedDataSetsResponse() + call.return_value = analytics_admin.ListAccessBindingsResponse() call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - analytics_admin.ListExpandedDataSetsResponse() + analytics_admin.ListAccessBindingsResponse() ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.list_expanded_data_sets( + response = await client.list_access_bindings( parent="parent_value", ) @@ -39260,7 +39260,7 @@ async def test_list_expanded_data_sets_flattened_async(): @pytest.mark.asyncio -async def test_list_expanded_data_sets_flattened_error_async(): +async def test_list_access_bindings_flattened_error_async(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -39268,13 +39268,13 @@ async def test_list_expanded_data_sets_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.list_expanded_data_sets( - analytics_admin.ListExpandedDataSetsRequest(), + await client.list_access_bindings( + analytics_admin.ListAccessBindingsRequest(), parent="parent_value", ) -def test_list_expanded_data_sets_pager(transport_name: str = "grpc"): +def test_list_access_bindings_pager(transport_name: str = "grpc"): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport_name, @@ -39282,32 +39282,32 @@ def test_list_expanded_data_sets_pager(transport_name: str = "grpc"): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_expanded_data_sets), "__call__" + type(client.transport.list_access_bindings), "__call__" ) as call: # Set the response to a series of pages. call.side_effect = ( - analytics_admin.ListExpandedDataSetsResponse( - expanded_data_sets=[ - expanded_data_set.ExpandedDataSet(), - expanded_data_set.ExpandedDataSet(), - expanded_data_set.ExpandedDataSet(), + analytics_admin.ListAccessBindingsResponse( + access_bindings=[ + resources.AccessBinding(), + resources.AccessBinding(), + resources.AccessBinding(), ], next_page_token="abc", ), - analytics_admin.ListExpandedDataSetsResponse( - expanded_data_sets=[], + analytics_admin.ListAccessBindingsResponse( + access_bindings=[], next_page_token="def", ), - analytics_admin.ListExpandedDataSetsResponse( - expanded_data_sets=[ - expanded_data_set.ExpandedDataSet(), + analytics_admin.ListAccessBindingsResponse( + access_bindings=[ + resources.AccessBinding(), ], next_page_token="ghi", ), - analytics_admin.ListExpandedDataSetsResponse( - expanded_data_sets=[ - expanded_data_set.ExpandedDataSet(), - expanded_data_set.ExpandedDataSet(), + analytics_admin.ListAccessBindingsResponse( + access_bindings=[ + resources.AccessBinding(), + resources.AccessBinding(), ], ), RuntimeError, @@ -39319,7 +39319,7 @@ def test_list_expanded_data_sets_pager(transport_name: str = "grpc"): expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) - pager = client.list_expanded_data_sets(request={}, retry=retry, timeout=timeout) + pager = client.list_access_bindings(request={}, retry=retry, timeout=timeout) assert pager._metadata == expected_metadata assert pager._retry == retry @@ -39327,10 +39327,10 @@ def test_list_expanded_data_sets_pager(transport_name: str = "grpc"): results = list(pager) assert len(results) == 6 - assert all(isinstance(i, expanded_data_set.ExpandedDataSet) for i in results) + assert all(isinstance(i, resources.AccessBinding) for i in results) -def test_list_expanded_data_sets_pages(transport_name: str = "grpc"): +def test_list_access_bindings_pages(transport_name: str = "grpc"): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport_name, @@ -39338,82 +39338,82 @@ def test_list_expanded_data_sets_pages(transport_name: str = "grpc"): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_expanded_data_sets), "__call__" + type(client.transport.list_access_bindings), "__call__" ) as call: # Set the response to a series of pages. call.side_effect = ( - analytics_admin.ListExpandedDataSetsResponse( - expanded_data_sets=[ - expanded_data_set.ExpandedDataSet(), - expanded_data_set.ExpandedDataSet(), - expanded_data_set.ExpandedDataSet(), + analytics_admin.ListAccessBindingsResponse( + access_bindings=[ + resources.AccessBinding(), + resources.AccessBinding(), + resources.AccessBinding(), ], next_page_token="abc", ), - analytics_admin.ListExpandedDataSetsResponse( - expanded_data_sets=[], + analytics_admin.ListAccessBindingsResponse( + access_bindings=[], next_page_token="def", ), - analytics_admin.ListExpandedDataSetsResponse( - expanded_data_sets=[ - expanded_data_set.ExpandedDataSet(), + analytics_admin.ListAccessBindingsResponse( + access_bindings=[ + resources.AccessBinding(), ], next_page_token="ghi", ), - analytics_admin.ListExpandedDataSetsResponse( - expanded_data_sets=[ - expanded_data_set.ExpandedDataSet(), - expanded_data_set.ExpandedDataSet(), + analytics_admin.ListAccessBindingsResponse( + access_bindings=[ + resources.AccessBinding(), + resources.AccessBinding(), ], ), RuntimeError, ) - pages = list(client.list_expanded_data_sets(request={}).pages) + pages = list(client.list_access_bindings(request={}).pages) for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token @pytest.mark.asyncio -async def test_list_expanded_data_sets_async_pager(): +async def test_list_access_bindings_async_pager(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_expanded_data_sets), + type(client.transport.list_access_bindings), "__call__", new_callable=mock.AsyncMock, ) as call: # Set the response to a series of pages. call.side_effect = ( - analytics_admin.ListExpandedDataSetsResponse( - expanded_data_sets=[ - expanded_data_set.ExpandedDataSet(), - expanded_data_set.ExpandedDataSet(), - expanded_data_set.ExpandedDataSet(), + analytics_admin.ListAccessBindingsResponse( + access_bindings=[ + resources.AccessBinding(), + resources.AccessBinding(), + resources.AccessBinding(), ], next_page_token="abc", ), - analytics_admin.ListExpandedDataSetsResponse( - expanded_data_sets=[], + analytics_admin.ListAccessBindingsResponse( + access_bindings=[], next_page_token="def", ), - analytics_admin.ListExpandedDataSetsResponse( - expanded_data_sets=[ - expanded_data_set.ExpandedDataSet(), + analytics_admin.ListAccessBindingsResponse( + access_bindings=[ + resources.AccessBinding(), ], next_page_token="ghi", ), - analytics_admin.ListExpandedDataSetsResponse( - expanded_data_sets=[ - expanded_data_set.ExpandedDataSet(), - expanded_data_set.ExpandedDataSet(), + analytics_admin.ListAccessBindingsResponse( + access_bindings=[ + resources.AccessBinding(), + resources.AccessBinding(), ], ), RuntimeError, ) - async_pager = await client.list_expanded_data_sets( + async_pager = await client.list_access_bindings( request={}, ) assert async_pager.next_page_token == "abc" @@ -39422,45 +39422,45 @@ async def test_list_expanded_data_sets_async_pager(): responses.append(response) assert len(responses) == 6 - assert all(isinstance(i, expanded_data_set.ExpandedDataSet) for i in responses) + assert all(isinstance(i, resources.AccessBinding) for i in responses) @pytest.mark.asyncio -async def test_list_expanded_data_sets_async_pages(): +async def test_list_access_bindings_async_pages(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_expanded_data_sets), + type(client.transport.list_access_bindings), "__call__", new_callable=mock.AsyncMock, ) as call: # Set the response to a series of pages. call.side_effect = ( - analytics_admin.ListExpandedDataSetsResponse( - expanded_data_sets=[ - expanded_data_set.ExpandedDataSet(), - expanded_data_set.ExpandedDataSet(), - expanded_data_set.ExpandedDataSet(), + analytics_admin.ListAccessBindingsResponse( + access_bindings=[ + resources.AccessBinding(), + resources.AccessBinding(), + resources.AccessBinding(), ], next_page_token="abc", ), - analytics_admin.ListExpandedDataSetsResponse( - expanded_data_sets=[], + analytics_admin.ListAccessBindingsResponse( + access_bindings=[], next_page_token="def", ), - analytics_admin.ListExpandedDataSetsResponse( - expanded_data_sets=[ - expanded_data_set.ExpandedDataSet(), + analytics_admin.ListAccessBindingsResponse( + access_bindings=[ + resources.AccessBinding(), ], next_page_token="ghi", ), - analytics_admin.ListExpandedDataSetsResponse( - expanded_data_sets=[ - expanded_data_set.ExpandedDataSet(), - expanded_data_set.ExpandedDataSet(), + analytics_admin.ListAccessBindingsResponse( + access_bindings=[ + resources.AccessBinding(), + resources.AccessBinding(), ], ), RuntimeError, @@ -39469,7 +39469,7 @@ async def test_list_expanded_data_sets_async_pages(): # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 async for page_ in ( # pragma: no branch - await client.list_expanded_data_sets(request={}) + await client.list_access_bindings(request={}) ).pages: pages.append(page_) for page_, token in zip(pages, ["abc", "def", "ghi", ""]): @@ -39479,11 +39479,11 @@ async def test_list_expanded_data_sets_async_pages(): @pytest.mark.parametrize( "request_type", [ - analytics_admin.CreateExpandedDataSetRequest, + analytics_admin.BatchCreateAccessBindingsRequest, dict, ], ) -def test_create_expanded_data_set(request_type, transport: str = "grpc"): +def test_batch_create_access_bindings(request_type, transport: str = "grpc"): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -39495,34 +39495,23 @@ def test_create_expanded_data_set(request_type, transport: str = "grpc"): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.create_expanded_data_set), "__call__" + type(client.transport.batch_create_access_bindings), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = gaa_expanded_data_set.ExpandedDataSet( - name="name_value", - display_name="display_name_value", - description="description_value", - dimension_names=["dimension_names_value"], - metric_names=["metric_names_value"], - ) - response = client.create_expanded_data_set(request) + call.return_value = analytics_admin.BatchCreateAccessBindingsResponse() + response = client.batch_create_access_bindings(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - request = analytics_admin.CreateExpandedDataSetRequest() + request = analytics_admin.BatchCreateAccessBindingsRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, gaa_expanded_data_set.ExpandedDataSet) - assert response.name == "name_value" - assert response.display_name == "display_name_value" - assert response.description == "description_value" - assert response.dimension_names == ["dimension_names_value"] - assert response.metric_names == ["metric_names_value"] + assert isinstance(response, analytics_admin.BatchCreateAccessBindingsResponse) -def test_create_expanded_data_set_empty_call(): +def test_batch_create_access_bindings_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = AnalyticsAdminServiceClient( @@ -39532,18 +39521,18 @@ def test_create_expanded_data_set_empty_call(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.create_expanded_data_set), "__call__" + type(client.transport.batch_create_access_bindings), "__call__" ) as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.create_expanded_data_set() + client.batch_create_access_bindings() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == analytics_admin.CreateExpandedDataSetRequest() + assert args[0] == analytics_admin.BatchCreateAccessBindingsRequest() -def test_create_expanded_data_set_non_empty_request_with_auto_populated_field(): +def test_batch_create_access_bindings_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. client = AnalyticsAdminServiceClient( @@ -39554,26 +39543,26 @@ def test_create_expanded_data_set_non_empty_request_with_auto_populated_field(): # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. - request = analytics_admin.CreateExpandedDataSetRequest( + request = analytics_admin.BatchCreateAccessBindingsRequest( parent="parent_value", ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.create_expanded_data_set), "__call__" + type(client.transport.batch_create_access_bindings), "__call__" ) as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.create_expanded_data_set(request=request) + client.batch_create_access_bindings(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == analytics_admin.CreateExpandedDataSetRequest( + assert args[0] == analytics_admin.BatchCreateAccessBindingsRequest( parent="parent_value", ) -def test_create_expanded_data_set_use_cached_wrapped_rpc(): +def test_batch_create_access_bindings_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -39588,7 +39577,7 @@ def test_create_expanded_data_set_use_cached_wrapped_rpc(): # Ensure method has been cached assert ( - client._transport.create_expanded_data_set + client._transport.batch_create_access_bindings in client._transport._wrapped_methods ) @@ -39598,15 +39587,15 @@ def test_create_expanded_data_set_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.create_expanded_data_set + client._transport.batch_create_access_bindings ] = mock_rpc request = {} - client.create_expanded_data_set(request) + client.batch_create_access_bindings(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.create_expanded_data_set(request) + client.batch_create_access_bindings(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -39614,7 +39603,7 @@ def test_create_expanded_data_set_use_cached_wrapped_rpc(): @pytest.mark.asyncio -async def test_create_expanded_data_set_empty_call_async(): +async def test_batch_create_access_bindings_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = AnalyticsAdminServiceAsyncClient( @@ -39624,26 +39613,20 @@ async def test_create_expanded_data_set_empty_call_async(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.create_expanded_data_set), "__call__" + type(client.transport.batch_create_access_bindings), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - gaa_expanded_data_set.ExpandedDataSet( - name="name_value", - display_name="display_name_value", - description="description_value", - dimension_names=["dimension_names_value"], - metric_names=["metric_names_value"], - ) + analytics_admin.BatchCreateAccessBindingsResponse() ) - response = await client.create_expanded_data_set() + response = await client.batch_create_access_bindings() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == analytics_admin.CreateExpandedDataSetRequest() + assert args[0] == analytics_admin.BatchCreateAccessBindingsRequest() @pytest.mark.asyncio -async def test_create_expanded_data_set_async_use_cached_wrapped_rpc( +async def test_batch_create_access_bindings_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", ): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -39660,33 +39643,34 @@ async def test_create_expanded_data_set_async_use_cached_wrapped_rpc( # Ensure method has been cached assert ( - client._client._transport.create_expanded_data_set + client._client._transport.batch_create_access_bindings in client._client._transport._wrapped_methods ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ - client._client._transport.create_expanded_data_set - ] = mock_object + client._client._transport.batch_create_access_bindings + ] = mock_rpc request = {} - await client.create_expanded_data_set(request) + await client.batch_create_access_bindings(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - await client.create_expanded_data_set(request) + await client.batch_create_access_bindings(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio -async def test_create_expanded_data_set_async( +async def test_batch_create_access_bindings_async( transport: str = "grpc_asyncio", - request_type=analytics_admin.CreateExpandedDataSetRequest, + request_type=analytics_admin.BatchCreateAccessBindingsRequest, ): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), @@ -39699,57 +39683,46 @@ async def test_create_expanded_data_set_async( # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.create_expanded_data_set), "__call__" + type(client.transport.batch_create_access_bindings), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - gaa_expanded_data_set.ExpandedDataSet( - name="name_value", - display_name="display_name_value", - description="description_value", - dimension_names=["dimension_names_value"], - metric_names=["metric_names_value"], - ) + analytics_admin.BatchCreateAccessBindingsResponse() ) - response = await client.create_expanded_data_set(request) + response = await client.batch_create_access_bindings(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request = analytics_admin.CreateExpandedDataSetRequest() + request = analytics_admin.BatchCreateAccessBindingsRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, gaa_expanded_data_set.ExpandedDataSet) - assert response.name == "name_value" - assert response.display_name == "display_name_value" - assert response.description == "description_value" - assert response.dimension_names == ["dimension_names_value"] - assert response.metric_names == ["metric_names_value"] + assert isinstance(response, analytics_admin.BatchCreateAccessBindingsResponse) @pytest.mark.asyncio -async def test_create_expanded_data_set_async_from_dict(): - await test_create_expanded_data_set_async(request_type=dict) +async def test_batch_create_access_bindings_async_from_dict(): + await test_batch_create_access_bindings_async(request_type=dict) -def test_create_expanded_data_set_field_headers(): +def test_batch_create_access_bindings_field_headers(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = analytics_admin.CreateExpandedDataSetRequest() + request = analytics_admin.BatchCreateAccessBindingsRequest() request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.create_expanded_data_set), "__call__" + type(client.transport.batch_create_access_bindings), "__call__" ) as call: - call.return_value = gaa_expanded_data_set.ExpandedDataSet() - client.create_expanded_data_set(request) + call.return_value = analytics_admin.BatchCreateAccessBindingsResponse() + client.batch_create_access_bindings(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -39765,25 +39738,25 @@ def test_create_expanded_data_set_field_headers(): @pytest.mark.asyncio -async def test_create_expanded_data_set_field_headers_async(): +async def test_batch_create_access_bindings_field_headers_async(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = analytics_admin.CreateExpandedDataSetRequest() + request = analytics_admin.BatchCreateAccessBindingsRequest() request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.create_expanded_data_set), "__call__" + type(client.transport.batch_create_access_bindings), "__call__" ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - gaa_expanded_data_set.ExpandedDataSet() + analytics_admin.BatchCreateAccessBindingsResponse() ) - await client.create_expanded_data_set(request) + await client.batch_create_access_bindings(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -39798,110 +39771,14 @@ async def test_create_expanded_data_set_field_headers_async(): ) in kw["metadata"] -def test_create_expanded_data_set_flattened(): - client = AnalyticsAdminServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_expanded_data_set), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = gaa_expanded_data_set.ExpandedDataSet() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.create_expanded_data_set( - parent="parent_value", - expanded_data_set=gaa_expanded_data_set.ExpandedDataSet(name="name_value"), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = "parent_value" - assert arg == mock_val - arg = args[0].expanded_data_set - mock_val = gaa_expanded_data_set.ExpandedDataSet(name="name_value") - assert arg == mock_val - - -def test_create_expanded_data_set_flattened_error(): - client = AnalyticsAdminServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.create_expanded_data_set( - analytics_admin.CreateExpandedDataSetRequest(), - parent="parent_value", - expanded_data_set=gaa_expanded_data_set.ExpandedDataSet(name="name_value"), - ) - - -@pytest.mark.asyncio -async def test_create_expanded_data_set_flattened_async(): - client = AnalyticsAdminServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_expanded_data_set), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = gaa_expanded_data_set.ExpandedDataSet() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - gaa_expanded_data_set.ExpandedDataSet() - ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.create_expanded_data_set( - parent="parent_value", - expanded_data_set=gaa_expanded_data_set.ExpandedDataSet(name="name_value"), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = "parent_value" - assert arg == mock_val - arg = args[0].expanded_data_set - mock_val = gaa_expanded_data_set.ExpandedDataSet(name="name_value") - assert arg == mock_val - - -@pytest.mark.asyncio -async def test_create_expanded_data_set_flattened_error_async(): - client = AnalyticsAdminServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.create_expanded_data_set( - analytics_admin.CreateExpandedDataSetRequest(), - parent="parent_value", - expanded_data_set=gaa_expanded_data_set.ExpandedDataSet(name="name_value"), - ) - - @pytest.mark.parametrize( "request_type", [ - analytics_admin.UpdateExpandedDataSetRequest, + analytics_admin.BatchGetAccessBindingsRequest, dict, ], ) -def test_update_expanded_data_set(request_type, transport: str = "grpc"): +def test_batch_get_access_bindings(request_type, transport: str = "grpc"): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -39913,34 +39790,23 @@ def test_update_expanded_data_set(request_type, transport: str = "grpc"): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.update_expanded_data_set), "__call__" + type(client.transport.batch_get_access_bindings), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = gaa_expanded_data_set.ExpandedDataSet( - name="name_value", - display_name="display_name_value", - description="description_value", - dimension_names=["dimension_names_value"], - metric_names=["metric_names_value"], - ) - response = client.update_expanded_data_set(request) + call.return_value = analytics_admin.BatchGetAccessBindingsResponse() + response = client.batch_get_access_bindings(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - request = analytics_admin.UpdateExpandedDataSetRequest() + request = analytics_admin.BatchGetAccessBindingsRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, gaa_expanded_data_set.ExpandedDataSet) - assert response.name == "name_value" - assert response.display_name == "display_name_value" - assert response.description == "description_value" - assert response.dimension_names == ["dimension_names_value"] - assert response.metric_names == ["metric_names_value"] + assert isinstance(response, analytics_admin.BatchGetAccessBindingsResponse) -def test_update_expanded_data_set_empty_call(): +def test_batch_get_access_bindings_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = AnalyticsAdminServiceClient( @@ -39950,18 +39816,18 @@ def test_update_expanded_data_set_empty_call(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.update_expanded_data_set), "__call__" + type(client.transport.batch_get_access_bindings), "__call__" ) as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.update_expanded_data_set() + client.batch_get_access_bindings() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == analytics_admin.UpdateExpandedDataSetRequest() + assert args[0] == analytics_admin.BatchGetAccessBindingsRequest() -def test_update_expanded_data_set_non_empty_request_with_auto_populated_field(): +def test_batch_get_access_bindings_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. client = AnalyticsAdminServiceClient( @@ -39972,22 +39838,26 @@ def test_update_expanded_data_set_non_empty_request_with_auto_populated_field(): # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. - request = analytics_admin.UpdateExpandedDataSetRequest() + request = analytics_admin.BatchGetAccessBindingsRequest( + parent="parent_value", + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.update_expanded_data_set), "__call__" + type(client.transport.batch_get_access_bindings), "__call__" ) as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.update_expanded_data_set(request=request) + client.batch_get_access_bindings(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == analytics_admin.UpdateExpandedDataSetRequest() + assert args[0] == analytics_admin.BatchGetAccessBindingsRequest( + parent="parent_value", + ) -def test_update_expanded_data_set_use_cached_wrapped_rpc(): +def test_batch_get_access_bindings_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -40002,7 +39872,7 @@ def test_update_expanded_data_set_use_cached_wrapped_rpc(): # Ensure method has been cached assert ( - client._transport.update_expanded_data_set + client._transport.batch_get_access_bindings in client._transport._wrapped_methods ) @@ -40012,15 +39882,15 @@ def test_update_expanded_data_set_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.update_expanded_data_set + client._transport.batch_get_access_bindings ] = mock_rpc request = {} - client.update_expanded_data_set(request) + client.batch_get_access_bindings(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.update_expanded_data_set(request) + client.batch_get_access_bindings(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -40028,7 +39898,7 @@ def test_update_expanded_data_set_use_cached_wrapped_rpc(): @pytest.mark.asyncio -async def test_update_expanded_data_set_empty_call_async(): +async def test_batch_get_access_bindings_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = AnalyticsAdminServiceAsyncClient( @@ -40038,26 +39908,20 @@ async def test_update_expanded_data_set_empty_call_async(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.update_expanded_data_set), "__call__" + type(client.transport.batch_get_access_bindings), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - gaa_expanded_data_set.ExpandedDataSet( - name="name_value", - display_name="display_name_value", - description="description_value", - dimension_names=["dimension_names_value"], - metric_names=["metric_names_value"], - ) + analytics_admin.BatchGetAccessBindingsResponse() ) - response = await client.update_expanded_data_set() + response = await client.batch_get_access_bindings() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == analytics_admin.UpdateExpandedDataSetRequest() + assert args[0] == analytics_admin.BatchGetAccessBindingsRequest() @pytest.mark.asyncio -async def test_update_expanded_data_set_async_use_cached_wrapped_rpc( +async def test_batch_get_access_bindings_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", ): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -40074,33 +39938,34 @@ async def test_update_expanded_data_set_async_use_cached_wrapped_rpc( # Ensure method has been cached assert ( - client._client._transport.update_expanded_data_set + client._client._transport.batch_get_access_bindings in client._client._transport._wrapped_methods ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ - client._client._transport.update_expanded_data_set - ] = mock_object + client._client._transport.batch_get_access_bindings + ] = mock_rpc request = {} - await client.update_expanded_data_set(request) + await client.batch_get_access_bindings(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - await client.update_expanded_data_set(request) + await client.batch_get_access_bindings(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio -async def test_update_expanded_data_set_async( +async def test_batch_get_access_bindings_async( transport: str = "grpc_asyncio", - request_type=analytics_admin.UpdateExpandedDataSetRequest, + request_type=analytics_admin.BatchGetAccessBindingsRequest, ): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), @@ -40113,57 +39978,46 @@ async def test_update_expanded_data_set_async( # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.update_expanded_data_set), "__call__" + type(client.transport.batch_get_access_bindings), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - gaa_expanded_data_set.ExpandedDataSet( - name="name_value", - display_name="display_name_value", - description="description_value", - dimension_names=["dimension_names_value"], - metric_names=["metric_names_value"], - ) + analytics_admin.BatchGetAccessBindingsResponse() ) - response = await client.update_expanded_data_set(request) + response = await client.batch_get_access_bindings(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request = analytics_admin.UpdateExpandedDataSetRequest() + request = analytics_admin.BatchGetAccessBindingsRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, gaa_expanded_data_set.ExpandedDataSet) - assert response.name == "name_value" - assert response.display_name == "display_name_value" - assert response.description == "description_value" - assert response.dimension_names == ["dimension_names_value"] - assert response.metric_names == ["metric_names_value"] + assert isinstance(response, analytics_admin.BatchGetAccessBindingsResponse) @pytest.mark.asyncio -async def test_update_expanded_data_set_async_from_dict(): - await test_update_expanded_data_set_async(request_type=dict) +async def test_batch_get_access_bindings_async_from_dict(): + await test_batch_get_access_bindings_async(request_type=dict) -def test_update_expanded_data_set_field_headers(): +def test_batch_get_access_bindings_field_headers(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = analytics_admin.UpdateExpandedDataSetRequest() + request = analytics_admin.BatchGetAccessBindingsRequest() - request.expanded_data_set.name = "name_value" + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.update_expanded_data_set), "__call__" + type(client.transport.batch_get_access_bindings), "__call__" ) as call: - call.return_value = gaa_expanded_data_set.ExpandedDataSet() - client.update_expanded_data_set(request) + call.return_value = analytics_admin.BatchGetAccessBindingsResponse() + client.batch_get_access_bindings(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -40174,30 +40028,30 @@ def test_update_expanded_data_set_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "expanded_data_set.name=name_value", + "parent=parent_value", ) in kw["metadata"] @pytest.mark.asyncio -async def test_update_expanded_data_set_field_headers_async(): +async def test_batch_get_access_bindings_field_headers_async(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = analytics_admin.UpdateExpandedDataSetRequest() + request = analytics_admin.BatchGetAccessBindingsRequest() - request.expanded_data_set.name = "name_value" + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.update_expanded_data_set), "__call__" + type(client.transport.batch_get_access_bindings), "__call__" ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - gaa_expanded_data_set.ExpandedDataSet() + analytics_admin.BatchGetAccessBindingsResponse() ) - await client.update_expanded_data_set(request) + await client.batch_get_access_bindings(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -40208,114 +40062,18 @@ async def test_update_expanded_data_set_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "expanded_data_set.name=name_value", + "parent=parent_value", ) in kw["metadata"] -def test_update_expanded_data_set_flattened(): - client = AnalyticsAdminServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_expanded_data_set), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = gaa_expanded_data_set.ExpandedDataSet() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.update_expanded_data_set( - expanded_data_set=gaa_expanded_data_set.ExpandedDataSet(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].expanded_data_set - mock_val = gaa_expanded_data_set.ExpandedDataSet(name="name_value") - assert arg == mock_val - arg = args[0].update_mask - mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) - assert arg == mock_val - - -def test_update_expanded_data_set_flattened_error(): - client = AnalyticsAdminServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.update_expanded_data_set( - analytics_admin.UpdateExpandedDataSetRequest(), - expanded_data_set=gaa_expanded_data_set.ExpandedDataSet(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), - ) - - -@pytest.mark.asyncio -async def test_update_expanded_data_set_flattened_async(): - client = AnalyticsAdminServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_expanded_data_set), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = gaa_expanded_data_set.ExpandedDataSet() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - gaa_expanded_data_set.ExpandedDataSet() - ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.update_expanded_data_set( - expanded_data_set=gaa_expanded_data_set.ExpandedDataSet(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].expanded_data_set - mock_val = gaa_expanded_data_set.ExpandedDataSet(name="name_value") - assert arg == mock_val - arg = args[0].update_mask - mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) - assert arg == mock_val - - -@pytest.mark.asyncio -async def test_update_expanded_data_set_flattened_error_async(): - client = AnalyticsAdminServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.update_expanded_data_set( - analytics_admin.UpdateExpandedDataSetRequest(), - expanded_data_set=gaa_expanded_data_set.ExpandedDataSet(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), - ) - - @pytest.mark.parametrize( "request_type", [ - analytics_admin.DeleteExpandedDataSetRequest, + analytics_admin.BatchUpdateAccessBindingsRequest, dict, ], ) -def test_delete_expanded_data_set(request_type, transport: str = "grpc"): +def test_batch_update_access_bindings(request_type, transport: str = "grpc"): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -40327,23 +40085,23 @@ def test_delete_expanded_data_set(request_type, transport: str = "grpc"): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.delete_expanded_data_set), "__call__" + type(client.transport.batch_update_access_bindings), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = None - response = client.delete_expanded_data_set(request) + call.return_value = analytics_admin.BatchUpdateAccessBindingsResponse() + response = client.batch_update_access_bindings(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - request = analytics_admin.DeleteExpandedDataSetRequest() + request = analytics_admin.BatchUpdateAccessBindingsRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert response is None + assert isinstance(response, analytics_admin.BatchUpdateAccessBindingsResponse) -def test_delete_expanded_data_set_empty_call(): +def test_batch_update_access_bindings_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = AnalyticsAdminServiceClient( @@ -40353,18 +40111,18 @@ def test_delete_expanded_data_set_empty_call(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.delete_expanded_data_set), "__call__" + type(client.transport.batch_update_access_bindings), "__call__" ) as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.delete_expanded_data_set() + client.batch_update_access_bindings() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == analytics_admin.DeleteExpandedDataSetRequest() + assert args[0] == analytics_admin.BatchUpdateAccessBindingsRequest() -def test_delete_expanded_data_set_non_empty_request_with_auto_populated_field(): +def test_batch_update_access_bindings_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. client = AnalyticsAdminServiceClient( @@ -40375,26 +40133,26 @@ def test_delete_expanded_data_set_non_empty_request_with_auto_populated_field(): # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. - request = analytics_admin.DeleteExpandedDataSetRequest( - name="name_value", + request = analytics_admin.BatchUpdateAccessBindingsRequest( + parent="parent_value", ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.delete_expanded_data_set), "__call__" + type(client.transport.batch_update_access_bindings), "__call__" ) as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.delete_expanded_data_set(request=request) + client.batch_update_access_bindings(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == analytics_admin.DeleteExpandedDataSetRequest( - name="name_value", + assert args[0] == analytics_admin.BatchUpdateAccessBindingsRequest( + parent="parent_value", ) -def test_delete_expanded_data_set_use_cached_wrapped_rpc(): +def test_batch_update_access_bindings_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -40409,7 +40167,7 @@ def test_delete_expanded_data_set_use_cached_wrapped_rpc(): # Ensure method has been cached assert ( - client._transport.delete_expanded_data_set + client._transport.batch_update_access_bindings in client._transport._wrapped_methods ) @@ -40419,15 +40177,15 @@ def test_delete_expanded_data_set_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.delete_expanded_data_set + client._transport.batch_update_access_bindings ] = mock_rpc request = {} - client.delete_expanded_data_set(request) + client.batch_update_access_bindings(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.delete_expanded_data_set(request) + client.batch_update_access_bindings(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -40435,7 +40193,7 @@ def test_delete_expanded_data_set_use_cached_wrapped_rpc(): @pytest.mark.asyncio -async def test_delete_expanded_data_set_empty_call_async(): +async def test_batch_update_access_bindings_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = AnalyticsAdminServiceAsyncClient( @@ -40445,18 +40203,20 @@ async def test_delete_expanded_data_set_empty_call_async(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.delete_expanded_data_set), "__call__" + type(client.transport.batch_update_access_bindings), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.delete_expanded_data_set() + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + analytics_admin.BatchUpdateAccessBindingsResponse() + ) + response = await client.batch_update_access_bindings() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == analytics_admin.DeleteExpandedDataSetRequest() + assert args[0] == analytics_admin.BatchUpdateAccessBindingsRequest() @pytest.mark.asyncio -async def test_delete_expanded_data_set_async_use_cached_wrapped_rpc( +async def test_batch_update_access_bindings_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", ): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -40473,33 +40233,34 @@ async def test_delete_expanded_data_set_async_use_cached_wrapped_rpc( # Ensure method has been cached assert ( - client._client._transport.delete_expanded_data_set + client._client._transport.batch_update_access_bindings in client._client._transport._wrapped_methods ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ - client._client._transport.delete_expanded_data_set - ] = mock_object + client._client._transport.batch_update_access_bindings + ] = mock_rpc request = {} - await client.delete_expanded_data_set(request) + await client.batch_update_access_bindings(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - await client.delete_expanded_data_set(request) + await client.batch_update_access_bindings(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio -async def test_delete_expanded_data_set_async( +async def test_batch_update_access_bindings_async( transport: str = "grpc_asyncio", - request_type=analytics_admin.DeleteExpandedDataSetRequest, + request_type=analytics_admin.BatchUpdateAccessBindingsRequest, ): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), @@ -40512,44 +40273,46 @@ async def test_delete_expanded_data_set_async( # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.delete_expanded_data_set), "__call__" + type(client.transport.batch_update_access_bindings), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.delete_expanded_data_set(request) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + analytics_admin.BatchUpdateAccessBindingsResponse() + ) + response = await client.batch_update_access_bindings(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request = analytics_admin.DeleteExpandedDataSetRequest() + request = analytics_admin.BatchUpdateAccessBindingsRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert response is None + assert isinstance(response, analytics_admin.BatchUpdateAccessBindingsResponse) @pytest.mark.asyncio -async def test_delete_expanded_data_set_async_from_dict(): - await test_delete_expanded_data_set_async(request_type=dict) +async def test_batch_update_access_bindings_async_from_dict(): + await test_batch_update_access_bindings_async(request_type=dict) -def test_delete_expanded_data_set_field_headers(): +def test_batch_update_access_bindings_field_headers(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = analytics_admin.DeleteExpandedDataSetRequest() + request = analytics_admin.BatchUpdateAccessBindingsRequest() - request.name = "name_value" + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.delete_expanded_data_set), "__call__" + type(client.transport.batch_update_access_bindings), "__call__" ) as call: - call.return_value = None - client.delete_expanded_data_set(request) + call.return_value = analytics_admin.BatchUpdateAccessBindingsResponse() + client.batch_update_access_bindings(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -40560,28 +40323,30 @@ def test_delete_expanded_data_set_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "name=name_value", + "parent=parent_value", ) in kw["metadata"] @pytest.mark.asyncio -async def test_delete_expanded_data_set_field_headers_async(): +async def test_batch_update_access_bindings_field_headers_async(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = analytics_admin.DeleteExpandedDataSetRequest() + request = analytics_admin.BatchUpdateAccessBindingsRequest() - request.name = "name_value" + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.delete_expanded_data_set), "__call__" + type(client.transport.batch_update_access_bindings), "__call__" ) as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - await client.delete_expanded_data_set(request) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + analytics_admin.BatchUpdateAccessBindingsResponse() + ) + await client.batch_update_access_bindings(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -40592,102 +40357,18 @@ async def test_delete_expanded_data_set_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "name=name_value", + "parent=parent_value", ) in kw["metadata"] -def test_delete_expanded_data_set_flattened(): - client = AnalyticsAdminServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_expanded_data_set), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = None - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.delete_expanded_data_set( - name="name_value", - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" - assert arg == mock_val - - -def test_delete_expanded_data_set_flattened_error(): - client = AnalyticsAdminServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.delete_expanded_data_set( - analytics_admin.DeleteExpandedDataSetRequest(), - name="name_value", - ) - - -@pytest.mark.asyncio -async def test_delete_expanded_data_set_flattened_async(): - client = AnalyticsAdminServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_expanded_data_set), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = None - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.delete_expanded_data_set( - name="name_value", - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" - assert arg == mock_val - - -@pytest.mark.asyncio -async def test_delete_expanded_data_set_flattened_error_async(): - client = AnalyticsAdminServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.delete_expanded_data_set( - analytics_admin.DeleteExpandedDataSetRequest(), - name="name_value", - ) - - @pytest.mark.parametrize( "request_type", [ - analytics_admin.GetChannelGroupRequest, + analytics_admin.BatchDeleteAccessBindingsRequest, dict, ], ) -def test_get_channel_group(request_type, transport: str = "grpc"): +def test_batch_delete_access_bindings(request_type, transport: str = "grpc"): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -40699,32 +40380,23 @@ def test_get_channel_group(request_type, transport: str = "grpc"): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.get_channel_group), "__call__" + type(client.transport.batch_delete_access_bindings), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = channel_group.ChannelGroup( - name="name_value", - display_name="display_name_value", - description="description_value", - system_defined=True, - ) - response = client.get_channel_group(request) + call.return_value = None + response = client.batch_delete_access_bindings(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - request = analytics_admin.GetChannelGroupRequest() + request = analytics_admin.BatchDeleteAccessBindingsRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, channel_group.ChannelGroup) - assert response.name == "name_value" - assert response.display_name == "display_name_value" - assert response.description == "description_value" - assert response.system_defined is True + assert response is None -def test_get_channel_group_empty_call(): +def test_batch_delete_access_bindings_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = AnalyticsAdminServiceClient( @@ -40734,18 +40406,18 @@ def test_get_channel_group_empty_call(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.get_channel_group), "__call__" + type(client.transport.batch_delete_access_bindings), "__call__" ) as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.get_channel_group() + client.batch_delete_access_bindings() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == analytics_admin.GetChannelGroupRequest() + assert args[0] == analytics_admin.BatchDeleteAccessBindingsRequest() -def test_get_channel_group_non_empty_request_with_auto_populated_field(): +def test_batch_delete_access_bindings_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. client = AnalyticsAdminServiceClient( @@ -40756,26 +40428,26 @@ def test_get_channel_group_non_empty_request_with_auto_populated_field(): # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. - request = analytics_admin.GetChannelGroupRequest( - name="name_value", + request = analytics_admin.BatchDeleteAccessBindingsRequest( + parent="parent_value", ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.get_channel_group), "__call__" + type(client.transport.batch_delete_access_bindings), "__call__" ) as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.get_channel_group(request=request) + client.batch_delete_access_bindings(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == analytics_admin.GetChannelGroupRequest( - name="name_value", + assert args[0] == analytics_admin.BatchDeleteAccessBindingsRequest( + parent="parent_value", ) -def test_get_channel_group_use_cached_wrapped_rpc(): +def test_batch_delete_access_bindings_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -40789,7 +40461,10 @@ def test_get_channel_group_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.get_channel_group in client._transport._wrapped_methods + assert ( + client._transport.batch_delete_access_bindings + in client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.Mock() @@ -40797,15 +40472,15 @@ def test_get_channel_group_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.get_channel_group + client._transport.batch_delete_access_bindings ] = mock_rpc request = {} - client.get_channel_group(request) + client.batch_delete_access_bindings(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.get_channel_group(request) + client.batch_delete_access_bindings(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -40813,7 +40488,7 @@ def test_get_channel_group_use_cached_wrapped_rpc(): @pytest.mark.asyncio -async def test_get_channel_group_empty_call_async(): +async def test_batch_delete_access_bindings_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = AnalyticsAdminServiceAsyncClient( @@ -40823,25 +40498,18 @@ async def test_get_channel_group_empty_call_async(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.get_channel_group), "__call__" + type(client.transport.batch_delete_access_bindings), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - channel_group.ChannelGroup( - name="name_value", - display_name="display_name_value", - description="description_value", - system_defined=True, - ) - ) - response = await client.get_channel_group() + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.batch_delete_access_bindings() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == analytics_admin.GetChannelGroupRequest() + assert args[0] == analytics_admin.BatchDeleteAccessBindingsRequest() @pytest.mark.asyncio -async def test_get_channel_group_async_use_cached_wrapped_rpc( +async def test_batch_delete_access_bindings_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", ): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -40858,32 +40526,34 @@ async def test_get_channel_group_async_use_cached_wrapped_rpc( # Ensure method has been cached assert ( - client._client._transport.get_channel_group + client._client._transport.batch_delete_access_bindings in client._client._transport._wrapped_methods ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ - client._client._transport.get_channel_group - ] = mock_object + client._client._transport.batch_delete_access_bindings + ] = mock_rpc request = {} - await client.get_channel_group(request) + await client.batch_delete_access_bindings(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - await client.get_channel_group(request) + await client.batch_delete_access_bindings(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio -async def test_get_channel_group_async( - transport: str = "grpc_asyncio", request_type=analytics_admin.GetChannelGroupRequest +async def test_batch_delete_access_bindings_async( + transport: str = "grpc_asyncio", + request_type=analytics_admin.BatchDeleteAccessBindingsRequest, ): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), @@ -40896,55 +40566,44 @@ async def test_get_channel_group_async( # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.get_channel_group), "__call__" + type(client.transport.batch_delete_access_bindings), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - channel_group.ChannelGroup( - name="name_value", - display_name="display_name_value", - description="description_value", - system_defined=True, - ) - ) - response = await client.get_channel_group(request) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.batch_delete_access_bindings(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request = analytics_admin.GetChannelGroupRequest() + request = analytics_admin.BatchDeleteAccessBindingsRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, channel_group.ChannelGroup) - assert response.name == "name_value" - assert response.display_name == "display_name_value" - assert response.description == "description_value" - assert response.system_defined is True + assert response is None @pytest.mark.asyncio -async def test_get_channel_group_async_from_dict(): - await test_get_channel_group_async(request_type=dict) +async def test_batch_delete_access_bindings_async_from_dict(): + await test_batch_delete_access_bindings_async(request_type=dict) -def test_get_channel_group_field_headers(): +def test_batch_delete_access_bindings_field_headers(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = analytics_admin.GetChannelGroupRequest() + request = analytics_admin.BatchDeleteAccessBindingsRequest() - request.name = "name_value" + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.get_channel_group), "__call__" + type(client.transport.batch_delete_access_bindings), "__call__" ) as call: - call.return_value = channel_group.ChannelGroup() - client.get_channel_group(request) + call.return_value = None + client.batch_delete_access_bindings(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -40955,30 +40614,28 @@ def test_get_channel_group_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "name=name_value", + "parent=parent_value", ) in kw["metadata"] @pytest.mark.asyncio -async def test_get_channel_group_field_headers_async(): +async def test_batch_delete_access_bindings_field_headers_async(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = analytics_admin.GetChannelGroupRequest() + request = analytics_admin.BatchDeleteAccessBindingsRequest() - request.name = "name_value" + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.get_channel_group), "__call__" + type(client.transport.batch_delete_access_bindings), "__call__" ) as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - channel_group.ChannelGroup() - ) - await client.get_channel_group(request) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.batch_delete_access_bindings(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -40989,69 +40646,392 @@ async def test_get_channel_group_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "name=name_value", + "parent=parent_value", ) in kw["metadata"] -def test_get_channel_group_flattened(): +@pytest.mark.parametrize( + "request_type", + [ + analytics_admin.GetExpandedDataSetRequest, + dict, + ], +) +def test_get_expanded_data_set(request_type, transport: str = "grpc"): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.get_channel_group), "__call__" + type(client.transport.get_expanded_data_set), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = channel_group.ChannelGroup() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.get_channel_group( + call.return_value = expanded_data_set.ExpandedDataSet( name="name_value", + display_name="display_name_value", + description="description_value", + dimension_names=["dimension_names_value"], + metric_names=["metric_names_value"], ) + response = client.get_expanded_data_set(request) - # Establish that the underlying call was made with the expected - # request object values. + # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" - assert arg == mock_val + request = analytics_admin.GetExpandedDataSetRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, expanded_data_set.ExpandedDataSet) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.description == "description_value" + assert response.dimension_names == ["dimension_names_value"] + assert response.metric_names == ["metric_names_value"] -def test_get_channel_group_flattened_error(): +def test_get_expanded_data_set_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", ) - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_channel_group( - analytics_admin.GetChannelGroupRequest(), - name="name_value", + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_expanded_data_set), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.get_expanded_data_set() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == analytics_admin.GetExpandedDataSetRequest() + + +def test_get_expanded_data_set_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = AnalyticsAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = analytics_admin.GetExpandedDataSetRequest( + name="name_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_expanded_data_set), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.get_expanded_data_set(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == analytics_admin.GetExpandedDataSetRequest( + name="name_value", + ) + + +def test_get_expanded_data_set_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = AnalyticsAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.get_expanded_data_set + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. ) + client._transport._wrapped_methods[ + client._transport.get_expanded_data_set + ] = mock_rpc + request = {} + client.get_expanded_data_set(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_expanded_data_set(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio -async def test_get_channel_group_flattened_async(): +async def test_get_expanded_data_set_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.get_channel_group), "__call__" + type(client.transport.get_expanded_data_set), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = channel_group.ChannelGroup() + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + expanded_data_set.ExpandedDataSet( + name="name_value", + display_name="display_name_value", + description="description_value", + dimension_names=["dimension_names_value"], + metric_names=["metric_names_value"], + ) + ) + response = await client.get_expanded_data_set() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == analytics_admin.GetExpandedDataSetRequest() + + +@pytest.mark.asyncio +async def test_get_expanded_data_set_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = AnalyticsAdminServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.get_expanded_data_set + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.get_expanded_data_set + ] = mock_rpc + + request = {} + await client.get_expanded_data_set(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.get_expanded_data_set(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_get_expanded_data_set_async( + transport: str = "grpc_asyncio", + request_type=analytics_admin.GetExpandedDataSetRequest, +): + client = AnalyticsAdminServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_expanded_data_set), "__call__" + ) as call: + # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - channel_group.ChannelGroup() + expanded_data_set.ExpandedDataSet( + name="name_value", + display_name="display_name_value", + description="description_value", + dimension_names=["dimension_names_value"], + metric_names=["metric_names_value"], + ) + ) + response = await client.get_expanded_data_set(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = analytics_admin.GetExpandedDataSetRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, expanded_data_set.ExpandedDataSet) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.description == "description_value" + assert response.dimension_names == ["dimension_names_value"] + assert response.metric_names == ["metric_names_value"] + + +@pytest.mark.asyncio +async def test_get_expanded_data_set_async_from_dict(): + await test_get_expanded_data_set_async(request_type=dict) + + +def test_get_expanded_data_set_field_headers(): + client = AnalyticsAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = analytics_admin.GetExpandedDataSetRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_expanded_data_set), "__call__" + ) as call: + call.return_value = expanded_data_set.ExpandedDataSet() + client.get_expanded_data_set(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_expanded_data_set_field_headers_async(): + client = AnalyticsAdminServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = analytics_admin.GetExpandedDataSetRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_expanded_data_set), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + expanded_data_set.ExpandedDataSet() + ) + await client.get_expanded_data_set(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_get_expanded_data_set_flattened(): + client = AnalyticsAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_expanded_data_set), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = expanded_data_set.ExpandedDataSet() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_expanded_data_set( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_get_expanded_data_set_flattened_error(): + client = AnalyticsAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_expanded_data_set( + analytics_admin.GetExpandedDataSetRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_get_expanded_data_set_flattened_async(): + client = AnalyticsAdminServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_expanded_data_set), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = expanded_data_set.ExpandedDataSet() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + expanded_data_set.ExpandedDataSet() ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.get_channel_group( + response = await client.get_expanded_data_set( name="name_value", ) @@ -41065,7 +41045,7 @@ async def test_get_channel_group_flattened_async(): @pytest.mark.asyncio -async def test_get_channel_group_flattened_error_async(): +async def test_get_expanded_data_set_flattened_error_async(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -41073,8 +41053,8 @@ async def test_get_channel_group_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.get_channel_group( - analytics_admin.GetChannelGroupRequest(), + await client.get_expanded_data_set( + analytics_admin.GetExpandedDataSetRequest(), name="name_value", ) @@ -41082,11 +41062,11 @@ async def test_get_channel_group_flattened_error_async(): @pytest.mark.parametrize( "request_type", [ - analytics_admin.ListChannelGroupsRequest, + analytics_admin.ListExpandedDataSetsRequest, dict, ], ) -def test_list_channel_groups(request_type, transport: str = "grpc"): +def test_list_expanded_data_sets(request_type, transport: str = "grpc"): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -41098,26 +41078,26 @@ def test_list_channel_groups(request_type, transport: str = "grpc"): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_channel_groups), "__call__" + type(client.transport.list_expanded_data_sets), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = analytics_admin.ListChannelGroupsResponse( + call.return_value = analytics_admin.ListExpandedDataSetsResponse( next_page_token="next_page_token_value", ) - response = client.list_channel_groups(request) + response = client.list_expanded_data_sets(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - request = analytics_admin.ListChannelGroupsRequest() + request = analytics_admin.ListExpandedDataSetsRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListChannelGroupsPager) + assert isinstance(response, pagers.ListExpandedDataSetsPager) assert response.next_page_token == "next_page_token_value" -def test_list_channel_groups_empty_call(): +def test_list_expanded_data_sets_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = AnalyticsAdminServiceClient( @@ -41127,18 +41107,18 @@ def test_list_channel_groups_empty_call(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_channel_groups), "__call__" + type(client.transport.list_expanded_data_sets), "__call__" ) as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.list_channel_groups() + client.list_expanded_data_sets() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == analytics_admin.ListChannelGroupsRequest() + assert args[0] == analytics_admin.ListExpandedDataSetsRequest() -def test_list_channel_groups_non_empty_request_with_auto_populated_field(): +def test_list_expanded_data_sets_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. client = AnalyticsAdminServiceClient( @@ -41149,28 +41129,28 @@ def test_list_channel_groups_non_empty_request_with_auto_populated_field(): # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. - request = analytics_admin.ListChannelGroupsRequest( + request = analytics_admin.ListExpandedDataSetsRequest( parent="parent_value", page_token="page_token_value", ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_channel_groups), "__call__" + type(client.transport.list_expanded_data_sets), "__call__" ) as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.list_channel_groups(request=request) + client.list_expanded_data_sets(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == analytics_admin.ListChannelGroupsRequest( + assert args[0] == analytics_admin.ListExpandedDataSetsRequest( parent="parent_value", page_token="page_token_value", ) -def test_list_channel_groups_use_cached_wrapped_rpc(): +def test_list_expanded_data_sets_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -41185,7 +41165,8 @@ def test_list_channel_groups_use_cached_wrapped_rpc(): # Ensure method has been cached assert ( - client._transport.list_channel_groups in client._transport._wrapped_methods + client._transport.list_expanded_data_sets + in client._transport._wrapped_methods ) # Replace cached wrapped function with mock @@ -41194,15 +41175,15 @@ def test_list_channel_groups_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.list_channel_groups + client._transport.list_expanded_data_sets ] = mock_rpc request = {} - client.list_channel_groups(request) + client.list_expanded_data_sets(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.list_channel_groups(request) + client.list_expanded_data_sets(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -41210,7 +41191,7 @@ def test_list_channel_groups_use_cached_wrapped_rpc(): @pytest.mark.asyncio -async def test_list_channel_groups_empty_call_async(): +async def test_list_expanded_data_sets_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = AnalyticsAdminServiceAsyncClient( @@ -41220,22 +41201,22 @@ async def test_list_channel_groups_empty_call_async(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_channel_groups), "__call__" + type(client.transport.list_expanded_data_sets), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - analytics_admin.ListChannelGroupsResponse( + analytics_admin.ListExpandedDataSetsResponse( next_page_token="next_page_token_value", ) ) - response = await client.list_channel_groups() + response = await client.list_expanded_data_sets() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == analytics_admin.ListChannelGroupsRequest() + assert args[0] == analytics_admin.ListExpandedDataSetsRequest() @pytest.mark.asyncio -async def test_list_channel_groups_async_use_cached_wrapped_rpc( +async def test_list_expanded_data_sets_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", ): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -41252,33 +41233,34 @@ async def test_list_channel_groups_async_use_cached_wrapped_rpc( # Ensure method has been cached assert ( - client._client._transport.list_channel_groups + client._client._transport.list_expanded_data_sets in client._client._transport._wrapped_methods ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ - client._client._transport.list_channel_groups - ] = mock_object + client._client._transport.list_expanded_data_sets + ] = mock_rpc request = {} - await client.list_channel_groups(request) + await client.list_expanded_data_sets(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - await client.list_channel_groups(request) + await client.list_expanded_data_sets(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio -async def test_list_channel_groups_async( +async def test_list_expanded_data_sets_async( transport: str = "grpc_asyncio", - request_type=analytics_admin.ListChannelGroupsRequest, + request_type=analytics_admin.ListExpandedDataSetsRequest, ): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), @@ -41291,49 +41273,49 @@ async def test_list_channel_groups_async( # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_channel_groups), "__call__" + type(client.transport.list_expanded_data_sets), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - analytics_admin.ListChannelGroupsResponse( + analytics_admin.ListExpandedDataSetsResponse( next_page_token="next_page_token_value", ) ) - response = await client.list_channel_groups(request) + response = await client.list_expanded_data_sets(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request = analytics_admin.ListChannelGroupsRequest() + request = analytics_admin.ListExpandedDataSetsRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListChannelGroupsAsyncPager) + assert isinstance(response, pagers.ListExpandedDataSetsAsyncPager) assert response.next_page_token == "next_page_token_value" @pytest.mark.asyncio -async def test_list_channel_groups_async_from_dict(): - await test_list_channel_groups_async(request_type=dict) +async def test_list_expanded_data_sets_async_from_dict(): + await test_list_expanded_data_sets_async(request_type=dict) -def test_list_channel_groups_field_headers(): +def test_list_expanded_data_sets_field_headers(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = analytics_admin.ListChannelGroupsRequest() + request = analytics_admin.ListExpandedDataSetsRequest() request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_channel_groups), "__call__" + type(client.transport.list_expanded_data_sets), "__call__" ) as call: - call.return_value = analytics_admin.ListChannelGroupsResponse() - client.list_channel_groups(request) + call.return_value = analytics_admin.ListExpandedDataSetsResponse() + client.list_expanded_data_sets(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -41349,25 +41331,25 @@ def test_list_channel_groups_field_headers(): @pytest.mark.asyncio -async def test_list_channel_groups_field_headers_async(): +async def test_list_expanded_data_sets_field_headers_async(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = analytics_admin.ListChannelGroupsRequest() + request = analytics_admin.ListExpandedDataSetsRequest() request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_channel_groups), "__call__" + type(client.transport.list_expanded_data_sets), "__call__" ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - analytics_admin.ListChannelGroupsResponse() + analytics_admin.ListExpandedDataSetsResponse() ) - await client.list_channel_groups(request) + await client.list_expanded_data_sets(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -41382,20 +41364,20 @@ async def test_list_channel_groups_field_headers_async(): ) in kw["metadata"] -def test_list_channel_groups_flattened(): +def test_list_expanded_data_sets_flattened(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_channel_groups), "__call__" + type(client.transport.list_expanded_data_sets), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = analytics_admin.ListChannelGroupsResponse() + call.return_value = analytics_admin.ListExpandedDataSetsResponse() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.list_channel_groups( + client.list_expanded_data_sets( parent="parent_value", ) @@ -41408,7 +41390,7 @@ def test_list_channel_groups_flattened(): assert arg == mock_val -def test_list_channel_groups_flattened_error(): +def test_list_expanded_data_sets_flattened_error(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -41416,31 +41398,31 @@ def test_list_channel_groups_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.list_channel_groups( - analytics_admin.ListChannelGroupsRequest(), + client.list_expanded_data_sets( + analytics_admin.ListExpandedDataSetsRequest(), parent="parent_value", ) @pytest.mark.asyncio -async def test_list_channel_groups_flattened_async(): +async def test_list_expanded_data_sets_flattened_async(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_channel_groups), "__call__" + type(client.transport.list_expanded_data_sets), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = analytics_admin.ListChannelGroupsResponse() + call.return_value = analytics_admin.ListExpandedDataSetsResponse() call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - analytics_admin.ListChannelGroupsResponse() + analytics_admin.ListExpandedDataSetsResponse() ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.list_channel_groups( + response = await client.list_expanded_data_sets( parent="parent_value", ) @@ -41454,7 +41436,7 @@ async def test_list_channel_groups_flattened_async(): @pytest.mark.asyncio -async def test_list_channel_groups_flattened_error_async(): +async def test_list_expanded_data_sets_flattened_error_async(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -41462,13 +41444,13 @@ async def test_list_channel_groups_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.list_channel_groups( - analytics_admin.ListChannelGroupsRequest(), + await client.list_expanded_data_sets( + analytics_admin.ListExpandedDataSetsRequest(), parent="parent_value", ) -def test_list_channel_groups_pager(transport_name: str = "grpc"): +def test_list_expanded_data_sets_pager(transport_name: str = "grpc"): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport_name, @@ -41476,32 +41458,32 @@ def test_list_channel_groups_pager(transport_name: str = "grpc"): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_channel_groups), "__call__" + type(client.transport.list_expanded_data_sets), "__call__" ) as call: # Set the response to a series of pages. call.side_effect = ( - analytics_admin.ListChannelGroupsResponse( - channel_groups=[ - channel_group.ChannelGroup(), - channel_group.ChannelGroup(), - channel_group.ChannelGroup(), + analytics_admin.ListExpandedDataSetsResponse( + expanded_data_sets=[ + expanded_data_set.ExpandedDataSet(), + expanded_data_set.ExpandedDataSet(), + expanded_data_set.ExpandedDataSet(), ], next_page_token="abc", ), - analytics_admin.ListChannelGroupsResponse( - channel_groups=[], + analytics_admin.ListExpandedDataSetsResponse( + expanded_data_sets=[], next_page_token="def", ), - analytics_admin.ListChannelGroupsResponse( - channel_groups=[ - channel_group.ChannelGroup(), + analytics_admin.ListExpandedDataSetsResponse( + expanded_data_sets=[ + expanded_data_set.ExpandedDataSet(), ], next_page_token="ghi", ), - analytics_admin.ListChannelGroupsResponse( - channel_groups=[ - channel_group.ChannelGroup(), - channel_group.ChannelGroup(), + analytics_admin.ListExpandedDataSetsResponse( + expanded_data_sets=[ + expanded_data_set.ExpandedDataSet(), + expanded_data_set.ExpandedDataSet(), ], ), RuntimeError, @@ -41513,7 +41495,7 @@ def test_list_channel_groups_pager(transport_name: str = "grpc"): expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) - pager = client.list_channel_groups(request={}, retry=retry, timeout=timeout) + pager = client.list_expanded_data_sets(request={}, retry=retry, timeout=timeout) assert pager._metadata == expected_metadata assert pager._retry == retry @@ -41521,10 +41503,10 @@ def test_list_channel_groups_pager(transport_name: str = "grpc"): results = list(pager) assert len(results) == 6 - assert all(isinstance(i, channel_group.ChannelGroup) for i in results) + assert all(isinstance(i, expanded_data_set.ExpandedDataSet) for i in results) -def test_list_channel_groups_pages(transport_name: str = "grpc"): +def test_list_expanded_data_sets_pages(transport_name: str = "grpc"): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport_name, @@ -41532,82 +41514,82 @@ def test_list_channel_groups_pages(transport_name: str = "grpc"): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_channel_groups), "__call__" + type(client.transport.list_expanded_data_sets), "__call__" ) as call: # Set the response to a series of pages. call.side_effect = ( - analytics_admin.ListChannelGroupsResponse( - channel_groups=[ - channel_group.ChannelGroup(), - channel_group.ChannelGroup(), - channel_group.ChannelGroup(), + analytics_admin.ListExpandedDataSetsResponse( + expanded_data_sets=[ + expanded_data_set.ExpandedDataSet(), + expanded_data_set.ExpandedDataSet(), + expanded_data_set.ExpandedDataSet(), ], next_page_token="abc", ), - analytics_admin.ListChannelGroupsResponse( - channel_groups=[], + analytics_admin.ListExpandedDataSetsResponse( + expanded_data_sets=[], next_page_token="def", ), - analytics_admin.ListChannelGroupsResponse( - channel_groups=[ - channel_group.ChannelGroup(), - ], + analytics_admin.ListExpandedDataSetsResponse( + expanded_data_sets=[ + expanded_data_set.ExpandedDataSet(), + ], next_page_token="ghi", ), - analytics_admin.ListChannelGroupsResponse( - channel_groups=[ - channel_group.ChannelGroup(), - channel_group.ChannelGroup(), + analytics_admin.ListExpandedDataSetsResponse( + expanded_data_sets=[ + expanded_data_set.ExpandedDataSet(), + expanded_data_set.ExpandedDataSet(), ], ), RuntimeError, ) - pages = list(client.list_channel_groups(request={}).pages) + pages = list(client.list_expanded_data_sets(request={}).pages) for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token @pytest.mark.asyncio -async def test_list_channel_groups_async_pager(): +async def test_list_expanded_data_sets_async_pager(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_channel_groups), + type(client.transport.list_expanded_data_sets), "__call__", new_callable=mock.AsyncMock, ) as call: # Set the response to a series of pages. call.side_effect = ( - analytics_admin.ListChannelGroupsResponse( - channel_groups=[ - channel_group.ChannelGroup(), - channel_group.ChannelGroup(), - channel_group.ChannelGroup(), + analytics_admin.ListExpandedDataSetsResponse( + expanded_data_sets=[ + expanded_data_set.ExpandedDataSet(), + expanded_data_set.ExpandedDataSet(), + expanded_data_set.ExpandedDataSet(), ], next_page_token="abc", ), - analytics_admin.ListChannelGroupsResponse( - channel_groups=[], + analytics_admin.ListExpandedDataSetsResponse( + expanded_data_sets=[], next_page_token="def", ), - analytics_admin.ListChannelGroupsResponse( - channel_groups=[ - channel_group.ChannelGroup(), + analytics_admin.ListExpandedDataSetsResponse( + expanded_data_sets=[ + expanded_data_set.ExpandedDataSet(), ], next_page_token="ghi", ), - analytics_admin.ListChannelGroupsResponse( - channel_groups=[ - channel_group.ChannelGroup(), - channel_group.ChannelGroup(), + analytics_admin.ListExpandedDataSetsResponse( + expanded_data_sets=[ + expanded_data_set.ExpandedDataSet(), + expanded_data_set.ExpandedDataSet(), ], ), RuntimeError, ) - async_pager = await client.list_channel_groups( + async_pager = await client.list_expanded_data_sets( request={}, ) assert async_pager.next_page_token == "abc" @@ -41616,45 +41598,45 @@ async def test_list_channel_groups_async_pager(): responses.append(response) assert len(responses) == 6 - assert all(isinstance(i, channel_group.ChannelGroup) for i in responses) + assert all(isinstance(i, expanded_data_set.ExpandedDataSet) for i in responses) @pytest.mark.asyncio -async def test_list_channel_groups_async_pages(): +async def test_list_expanded_data_sets_async_pages(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_channel_groups), + type(client.transport.list_expanded_data_sets), "__call__", new_callable=mock.AsyncMock, ) as call: # Set the response to a series of pages. call.side_effect = ( - analytics_admin.ListChannelGroupsResponse( - channel_groups=[ - channel_group.ChannelGroup(), - channel_group.ChannelGroup(), - channel_group.ChannelGroup(), + analytics_admin.ListExpandedDataSetsResponse( + expanded_data_sets=[ + expanded_data_set.ExpandedDataSet(), + expanded_data_set.ExpandedDataSet(), + expanded_data_set.ExpandedDataSet(), ], next_page_token="abc", ), - analytics_admin.ListChannelGroupsResponse( - channel_groups=[], + analytics_admin.ListExpandedDataSetsResponse( + expanded_data_sets=[], next_page_token="def", ), - analytics_admin.ListChannelGroupsResponse( - channel_groups=[ - channel_group.ChannelGroup(), + analytics_admin.ListExpandedDataSetsResponse( + expanded_data_sets=[ + expanded_data_set.ExpandedDataSet(), ], next_page_token="ghi", ), - analytics_admin.ListChannelGroupsResponse( - channel_groups=[ - channel_group.ChannelGroup(), - channel_group.ChannelGroup(), + analytics_admin.ListExpandedDataSetsResponse( + expanded_data_sets=[ + expanded_data_set.ExpandedDataSet(), + expanded_data_set.ExpandedDataSet(), ], ), RuntimeError, @@ -41663,7 +41645,7 @@ async def test_list_channel_groups_async_pages(): # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 async for page_ in ( # pragma: no branch - await client.list_channel_groups(request={}) + await client.list_expanded_data_sets(request={}) ).pages: pages.append(page_) for page_, token in zip(pages, ["abc", "def", "ghi", ""]): @@ -41673,11 +41655,11 @@ async def test_list_channel_groups_async_pages(): @pytest.mark.parametrize( "request_type", [ - analytics_admin.CreateChannelGroupRequest, + analytics_admin.CreateExpandedDataSetRequest, dict, ], ) -def test_create_channel_group(request_type, transport: str = "grpc"): +def test_create_expanded_data_set(request_type, transport: str = "grpc"): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -41689,32 +41671,34 @@ def test_create_channel_group(request_type, transport: str = "grpc"): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.create_channel_group), "__call__" + type(client.transport.create_expanded_data_set), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = gaa_channel_group.ChannelGroup( + call.return_value = gaa_expanded_data_set.ExpandedDataSet( name="name_value", display_name="display_name_value", description="description_value", - system_defined=True, + dimension_names=["dimension_names_value"], + metric_names=["metric_names_value"], ) - response = client.create_channel_group(request) + response = client.create_expanded_data_set(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - request = analytics_admin.CreateChannelGroupRequest() + request = analytics_admin.CreateExpandedDataSetRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, gaa_channel_group.ChannelGroup) + assert isinstance(response, gaa_expanded_data_set.ExpandedDataSet) assert response.name == "name_value" assert response.display_name == "display_name_value" assert response.description == "description_value" - assert response.system_defined is True + assert response.dimension_names == ["dimension_names_value"] + assert response.metric_names == ["metric_names_value"] -def test_create_channel_group_empty_call(): +def test_create_expanded_data_set_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = AnalyticsAdminServiceClient( @@ -41724,18 +41708,18 @@ def test_create_channel_group_empty_call(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.create_channel_group), "__call__" + type(client.transport.create_expanded_data_set), "__call__" ) as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.create_channel_group() + client.create_expanded_data_set() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == analytics_admin.CreateChannelGroupRequest() + assert args[0] == analytics_admin.CreateExpandedDataSetRequest() -def test_create_channel_group_non_empty_request_with_auto_populated_field(): +def test_create_expanded_data_set_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. client = AnalyticsAdminServiceClient( @@ -41746,26 +41730,26 @@ def test_create_channel_group_non_empty_request_with_auto_populated_field(): # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. - request = analytics_admin.CreateChannelGroupRequest( + request = analytics_admin.CreateExpandedDataSetRequest( parent="parent_value", ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.create_channel_group), "__call__" + type(client.transport.create_expanded_data_set), "__call__" ) as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.create_channel_group(request=request) + client.create_expanded_data_set(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == analytics_admin.CreateChannelGroupRequest( + assert args[0] == analytics_admin.CreateExpandedDataSetRequest( parent="parent_value", ) -def test_create_channel_group_use_cached_wrapped_rpc(): +def test_create_expanded_data_set_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -41780,7 +41764,8 @@ def test_create_channel_group_use_cached_wrapped_rpc(): # Ensure method has been cached assert ( - client._transport.create_channel_group in client._transport._wrapped_methods + client._transport.create_expanded_data_set + in client._transport._wrapped_methods ) # Replace cached wrapped function with mock @@ -41789,15 +41774,15 @@ def test_create_channel_group_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.create_channel_group + client._transport.create_expanded_data_set ] = mock_rpc request = {} - client.create_channel_group(request) + client.create_expanded_data_set(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.create_channel_group(request) + client.create_expanded_data_set(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -41805,7 +41790,7 @@ def test_create_channel_group_use_cached_wrapped_rpc(): @pytest.mark.asyncio -async def test_create_channel_group_empty_call_async(): +async def test_create_expanded_data_set_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = AnalyticsAdminServiceAsyncClient( @@ -41815,25 +41800,26 @@ async def test_create_channel_group_empty_call_async(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.create_channel_group), "__call__" + type(client.transport.create_expanded_data_set), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - gaa_channel_group.ChannelGroup( + gaa_expanded_data_set.ExpandedDataSet( name="name_value", display_name="display_name_value", description="description_value", - system_defined=True, + dimension_names=["dimension_names_value"], + metric_names=["metric_names_value"], ) ) - response = await client.create_channel_group() + response = await client.create_expanded_data_set() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == analytics_admin.CreateChannelGroupRequest() + assert args[0] == analytics_admin.CreateExpandedDataSetRequest() @pytest.mark.asyncio -async def test_create_channel_group_async_use_cached_wrapped_rpc( +async def test_create_expanded_data_set_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", ): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -41850,33 +41836,34 @@ async def test_create_channel_group_async_use_cached_wrapped_rpc( # Ensure method has been cached assert ( - client._client._transport.create_channel_group + client._client._transport.create_expanded_data_set in client._client._transport._wrapped_methods ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ - client._client._transport.create_channel_group - ] = mock_object + client._client._transport.create_expanded_data_set + ] = mock_rpc request = {} - await client.create_channel_group(request) + await client.create_expanded_data_set(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - await client.create_channel_group(request) + await client.create_expanded_data_set(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio -async def test_create_channel_group_async( +async def test_create_expanded_data_set_async( transport: str = "grpc_asyncio", - request_type=analytics_admin.CreateChannelGroupRequest, + request_type=analytics_admin.CreateExpandedDataSetRequest, ): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), @@ -41889,55 +41876,57 @@ async def test_create_channel_group_async( # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.create_channel_group), "__call__" + type(client.transport.create_expanded_data_set), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - gaa_channel_group.ChannelGroup( + gaa_expanded_data_set.ExpandedDataSet( name="name_value", display_name="display_name_value", description="description_value", - system_defined=True, + dimension_names=["dimension_names_value"], + metric_names=["metric_names_value"], ) ) - response = await client.create_channel_group(request) + response = await client.create_expanded_data_set(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request = analytics_admin.CreateChannelGroupRequest() + request = analytics_admin.CreateExpandedDataSetRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, gaa_channel_group.ChannelGroup) + assert isinstance(response, gaa_expanded_data_set.ExpandedDataSet) assert response.name == "name_value" assert response.display_name == "display_name_value" assert response.description == "description_value" - assert response.system_defined is True + assert response.dimension_names == ["dimension_names_value"] + assert response.metric_names == ["metric_names_value"] @pytest.mark.asyncio -async def test_create_channel_group_async_from_dict(): - await test_create_channel_group_async(request_type=dict) +async def test_create_expanded_data_set_async_from_dict(): + await test_create_expanded_data_set_async(request_type=dict) -def test_create_channel_group_field_headers(): +def test_create_expanded_data_set_field_headers(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = analytics_admin.CreateChannelGroupRequest() + request = analytics_admin.CreateExpandedDataSetRequest() request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.create_channel_group), "__call__" + type(client.transport.create_expanded_data_set), "__call__" ) as call: - call.return_value = gaa_channel_group.ChannelGroup() - client.create_channel_group(request) + call.return_value = gaa_expanded_data_set.ExpandedDataSet() + client.create_expanded_data_set(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -41953,25 +41942,25 @@ def test_create_channel_group_field_headers(): @pytest.mark.asyncio -async def test_create_channel_group_field_headers_async(): +async def test_create_expanded_data_set_field_headers_async(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = analytics_admin.CreateChannelGroupRequest() + request = analytics_admin.CreateExpandedDataSetRequest() request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.create_channel_group), "__call__" + type(client.transport.create_expanded_data_set), "__call__" ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - gaa_channel_group.ChannelGroup() + gaa_expanded_data_set.ExpandedDataSet() ) - await client.create_channel_group(request) + await client.create_expanded_data_set(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -41986,22 +41975,22 @@ async def test_create_channel_group_field_headers_async(): ) in kw["metadata"] -def test_create_channel_group_flattened(): +def test_create_expanded_data_set_flattened(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.create_channel_group), "__call__" + type(client.transport.create_expanded_data_set), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = gaa_channel_group.ChannelGroup() + call.return_value = gaa_expanded_data_set.ExpandedDataSet() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.create_channel_group( + client.create_expanded_data_set( parent="parent_value", - channel_group=gaa_channel_group.ChannelGroup(name="name_value"), + expanded_data_set=gaa_expanded_data_set.ExpandedDataSet(name="name_value"), ) # Establish that the underlying call was made with the expected @@ -42011,12 +42000,12 @@ def test_create_channel_group_flattened(): arg = args[0].parent mock_val = "parent_value" assert arg == mock_val - arg = args[0].channel_group - mock_val = gaa_channel_group.ChannelGroup(name="name_value") + arg = args[0].expanded_data_set + mock_val = gaa_expanded_data_set.ExpandedDataSet(name="name_value") assert arg == mock_val -def test_create_channel_group_flattened_error(): +def test_create_expanded_data_set_flattened_error(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -42024,34 +42013,34 @@ def test_create_channel_group_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.create_channel_group( - analytics_admin.CreateChannelGroupRequest(), + client.create_expanded_data_set( + analytics_admin.CreateExpandedDataSetRequest(), parent="parent_value", - channel_group=gaa_channel_group.ChannelGroup(name="name_value"), + expanded_data_set=gaa_expanded_data_set.ExpandedDataSet(name="name_value"), ) @pytest.mark.asyncio -async def test_create_channel_group_flattened_async(): +async def test_create_expanded_data_set_flattened_async(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.create_channel_group), "__call__" + type(client.transport.create_expanded_data_set), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = gaa_channel_group.ChannelGroup() + call.return_value = gaa_expanded_data_set.ExpandedDataSet() call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - gaa_channel_group.ChannelGroup() + gaa_expanded_data_set.ExpandedDataSet() ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.create_channel_group( + response = await client.create_expanded_data_set( parent="parent_value", - channel_group=gaa_channel_group.ChannelGroup(name="name_value"), + expanded_data_set=gaa_expanded_data_set.ExpandedDataSet(name="name_value"), ) # Establish that the underlying call was made with the expected @@ -42061,13 +42050,13 @@ async def test_create_channel_group_flattened_async(): arg = args[0].parent mock_val = "parent_value" assert arg == mock_val - arg = args[0].channel_group - mock_val = gaa_channel_group.ChannelGroup(name="name_value") + arg = args[0].expanded_data_set + mock_val = gaa_expanded_data_set.ExpandedDataSet(name="name_value") assert arg == mock_val @pytest.mark.asyncio -async def test_create_channel_group_flattened_error_async(): +async def test_create_expanded_data_set_flattened_error_async(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -42075,21 +42064,21 @@ async def test_create_channel_group_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.create_channel_group( - analytics_admin.CreateChannelGroupRequest(), + await client.create_expanded_data_set( + analytics_admin.CreateExpandedDataSetRequest(), parent="parent_value", - channel_group=gaa_channel_group.ChannelGroup(name="name_value"), + expanded_data_set=gaa_expanded_data_set.ExpandedDataSet(name="name_value"), ) @pytest.mark.parametrize( "request_type", [ - analytics_admin.UpdateChannelGroupRequest, + analytics_admin.UpdateExpandedDataSetRequest, dict, ], ) -def test_update_channel_group(request_type, transport: str = "grpc"): +def test_update_expanded_data_set(request_type, transport: str = "grpc"): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -42101,32 +42090,34 @@ def test_update_channel_group(request_type, transport: str = "grpc"): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.update_channel_group), "__call__" + type(client.transport.update_expanded_data_set), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = gaa_channel_group.ChannelGroup( + call.return_value = gaa_expanded_data_set.ExpandedDataSet( name="name_value", display_name="display_name_value", description="description_value", - system_defined=True, + dimension_names=["dimension_names_value"], + metric_names=["metric_names_value"], ) - response = client.update_channel_group(request) + response = client.update_expanded_data_set(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - request = analytics_admin.UpdateChannelGroupRequest() + request = analytics_admin.UpdateExpandedDataSetRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, gaa_channel_group.ChannelGroup) + assert isinstance(response, gaa_expanded_data_set.ExpandedDataSet) assert response.name == "name_value" assert response.display_name == "display_name_value" assert response.description == "description_value" - assert response.system_defined is True + assert response.dimension_names == ["dimension_names_value"] + assert response.metric_names == ["metric_names_value"] -def test_update_channel_group_empty_call(): +def test_update_expanded_data_set_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = AnalyticsAdminServiceClient( @@ -42136,18 +42127,18 @@ def test_update_channel_group_empty_call(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.update_channel_group), "__call__" + type(client.transport.update_expanded_data_set), "__call__" ) as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.update_channel_group() + client.update_expanded_data_set() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == analytics_admin.UpdateChannelGroupRequest() + assert args[0] == analytics_admin.UpdateExpandedDataSetRequest() -def test_update_channel_group_non_empty_request_with_auto_populated_field(): +def test_update_expanded_data_set_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. client = AnalyticsAdminServiceClient( @@ -42158,22 +42149,22 @@ def test_update_channel_group_non_empty_request_with_auto_populated_field(): # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. - request = analytics_admin.UpdateChannelGroupRequest() + request = analytics_admin.UpdateExpandedDataSetRequest() # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.update_channel_group), "__call__" + type(client.transport.update_expanded_data_set), "__call__" ) as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.update_channel_group(request=request) + client.update_expanded_data_set(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == analytics_admin.UpdateChannelGroupRequest() + assert args[0] == analytics_admin.UpdateExpandedDataSetRequest() -def test_update_channel_group_use_cached_wrapped_rpc(): +def test_update_expanded_data_set_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -42188,7 +42179,8 @@ def test_update_channel_group_use_cached_wrapped_rpc(): # Ensure method has been cached assert ( - client._transport.update_channel_group in client._transport._wrapped_methods + client._transport.update_expanded_data_set + in client._transport._wrapped_methods ) # Replace cached wrapped function with mock @@ -42197,15 +42189,15 @@ def test_update_channel_group_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.update_channel_group + client._transport.update_expanded_data_set ] = mock_rpc request = {} - client.update_channel_group(request) + client.update_expanded_data_set(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.update_channel_group(request) + client.update_expanded_data_set(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -42213,7 +42205,7 @@ def test_update_channel_group_use_cached_wrapped_rpc(): @pytest.mark.asyncio -async def test_update_channel_group_empty_call_async(): +async def test_update_expanded_data_set_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = AnalyticsAdminServiceAsyncClient( @@ -42223,25 +42215,26 @@ async def test_update_channel_group_empty_call_async(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.update_channel_group), "__call__" + type(client.transport.update_expanded_data_set), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - gaa_channel_group.ChannelGroup( + gaa_expanded_data_set.ExpandedDataSet( name="name_value", display_name="display_name_value", description="description_value", - system_defined=True, + dimension_names=["dimension_names_value"], + metric_names=["metric_names_value"], ) ) - response = await client.update_channel_group() + response = await client.update_expanded_data_set() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == analytics_admin.UpdateChannelGroupRequest() + assert args[0] == analytics_admin.UpdateExpandedDataSetRequest() @pytest.mark.asyncio -async def test_update_channel_group_async_use_cached_wrapped_rpc( +async def test_update_expanded_data_set_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", ): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -42258,33 +42251,34 @@ async def test_update_channel_group_async_use_cached_wrapped_rpc( # Ensure method has been cached assert ( - client._client._transport.update_channel_group + client._client._transport.update_expanded_data_set in client._client._transport._wrapped_methods ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ - client._client._transport.update_channel_group - ] = mock_object + client._client._transport.update_expanded_data_set + ] = mock_rpc request = {} - await client.update_channel_group(request) + await client.update_expanded_data_set(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - await client.update_channel_group(request) + await client.update_expanded_data_set(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio -async def test_update_channel_group_async( +async def test_update_expanded_data_set_async( transport: str = "grpc_asyncio", - request_type=analytics_admin.UpdateChannelGroupRequest, + request_type=analytics_admin.UpdateExpandedDataSetRequest, ): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), @@ -42297,55 +42291,57 @@ async def test_update_channel_group_async( # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.update_channel_group), "__call__" + type(client.transport.update_expanded_data_set), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - gaa_channel_group.ChannelGroup( + gaa_expanded_data_set.ExpandedDataSet( name="name_value", display_name="display_name_value", description="description_value", - system_defined=True, + dimension_names=["dimension_names_value"], + metric_names=["metric_names_value"], ) ) - response = await client.update_channel_group(request) + response = await client.update_expanded_data_set(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request = analytics_admin.UpdateChannelGroupRequest() + request = analytics_admin.UpdateExpandedDataSetRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, gaa_channel_group.ChannelGroup) + assert isinstance(response, gaa_expanded_data_set.ExpandedDataSet) assert response.name == "name_value" assert response.display_name == "display_name_value" assert response.description == "description_value" - assert response.system_defined is True + assert response.dimension_names == ["dimension_names_value"] + assert response.metric_names == ["metric_names_value"] @pytest.mark.asyncio -async def test_update_channel_group_async_from_dict(): - await test_update_channel_group_async(request_type=dict) +async def test_update_expanded_data_set_async_from_dict(): + await test_update_expanded_data_set_async(request_type=dict) -def test_update_channel_group_field_headers(): +def test_update_expanded_data_set_field_headers(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = analytics_admin.UpdateChannelGroupRequest() + request = analytics_admin.UpdateExpandedDataSetRequest() - request.channel_group.name = "name_value" + request.expanded_data_set.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.update_channel_group), "__call__" + type(client.transport.update_expanded_data_set), "__call__" ) as call: - call.return_value = gaa_channel_group.ChannelGroup() - client.update_channel_group(request) + call.return_value = gaa_expanded_data_set.ExpandedDataSet() + client.update_expanded_data_set(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -42356,30 +42352,30 @@ def test_update_channel_group_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "channel_group.name=name_value", + "expanded_data_set.name=name_value", ) in kw["metadata"] @pytest.mark.asyncio -async def test_update_channel_group_field_headers_async(): +async def test_update_expanded_data_set_field_headers_async(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = analytics_admin.UpdateChannelGroupRequest() + request = analytics_admin.UpdateExpandedDataSetRequest() - request.channel_group.name = "name_value" + request.expanded_data_set.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.update_channel_group), "__call__" + type(client.transport.update_expanded_data_set), "__call__" ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - gaa_channel_group.ChannelGroup() + gaa_expanded_data_set.ExpandedDataSet() ) - await client.update_channel_group(request) + await client.update_expanded_data_set(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -42390,25 +42386,25 @@ async def test_update_channel_group_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "channel_group.name=name_value", + "expanded_data_set.name=name_value", ) in kw["metadata"] -def test_update_channel_group_flattened(): +def test_update_expanded_data_set_flattened(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.update_channel_group), "__call__" + type(client.transport.update_expanded_data_set), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = gaa_channel_group.ChannelGroup() + call.return_value = gaa_expanded_data_set.ExpandedDataSet() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.update_channel_group( - channel_group=gaa_channel_group.ChannelGroup(name="name_value"), + client.update_expanded_data_set( + expanded_data_set=gaa_expanded_data_set.ExpandedDataSet(name="name_value"), update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) @@ -42416,15 +42412,15 @@ def test_update_channel_group_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - arg = args[0].channel_group - mock_val = gaa_channel_group.ChannelGroup(name="name_value") + arg = args[0].expanded_data_set + mock_val = gaa_expanded_data_set.ExpandedDataSet(name="name_value") assert arg == mock_val arg = args[0].update_mask mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) assert arg == mock_val -def test_update_channel_group_flattened_error(): +def test_update_expanded_data_set_flattened_error(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -42432,33 +42428,33 @@ def test_update_channel_group_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.update_channel_group( - analytics_admin.UpdateChannelGroupRequest(), - channel_group=gaa_channel_group.ChannelGroup(name="name_value"), + client.update_expanded_data_set( + analytics_admin.UpdateExpandedDataSetRequest(), + expanded_data_set=gaa_expanded_data_set.ExpandedDataSet(name="name_value"), update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) @pytest.mark.asyncio -async def test_update_channel_group_flattened_async(): +async def test_update_expanded_data_set_flattened_async(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.update_channel_group), "__call__" + type(client.transport.update_expanded_data_set), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = gaa_channel_group.ChannelGroup() + call.return_value = gaa_expanded_data_set.ExpandedDataSet() call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - gaa_channel_group.ChannelGroup() + gaa_expanded_data_set.ExpandedDataSet() ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.update_channel_group( - channel_group=gaa_channel_group.ChannelGroup(name="name_value"), + response = await client.update_expanded_data_set( + expanded_data_set=gaa_expanded_data_set.ExpandedDataSet(name="name_value"), update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) @@ -42466,8 +42462,8 @@ async def test_update_channel_group_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - arg = args[0].channel_group - mock_val = gaa_channel_group.ChannelGroup(name="name_value") + arg = args[0].expanded_data_set + mock_val = gaa_expanded_data_set.ExpandedDataSet(name="name_value") assert arg == mock_val arg = args[0].update_mask mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) @@ -42475,7 +42471,7 @@ async def test_update_channel_group_flattened_async(): @pytest.mark.asyncio -async def test_update_channel_group_flattened_error_async(): +async def test_update_expanded_data_set_flattened_error_async(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -42483,9 +42479,9 @@ async def test_update_channel_group_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.update_channel_group( - analytics_admin.UpdateChannelGroupRequest(), - channel_group=gaa_channel_group.ChannelGroup(name="name_value"), + await client.update_expanded_data_set( + analytics_admin.UpdateExpandedDataSetRequest(), + expanded_data_set=gaa_expanded_data_set.ExpandedDataSet(name="name_value"), update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) @@ -42493,11 +42489,11 @@ async def test_update_channel_group_flattened_error_async(): @pytest.mark.parametrize( "request_type", [ - analytics_admin.DeleteChannelGroupRequest, + analytics_admin.DeleteExpandedDataSetRequest, dict, ], ) -def test_delete_channel_group(request_type, transport: str = "grpc"): +def test_delete_expanded_data_set(request_type, transport: str = "grpc"): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -42509,23 +42505,23 @@ def test_delete_channel_group(request_type, transport: str = "grpc"): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.delete_channel_group), "__call__" + type(client.transport.delete_expanded_data_set), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = None - response = client.delete_channel_group(request) + response = client.delete_expanded_data_set(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - request = analytics_admin.DeleteChannelGroupRequest() + request = analytics_admin.DeleteExpandedDataSetRequest() assert args[0] == request # Establish that the response is the type that we expect. assert response is None -def test_delete_channel_group_empty_call(): +def test_delete_expanded_data_set_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = AnalyticsAdminServiceClient( @@ -42535,18 +42531,18 @@ def test_delete_channel_group_empty_call(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.delete_channel_group), "__call__" + type(client.transport.delete_expanded_data_set), "__call__" ) as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.delete_channel_group() + client.delete_expanded_data_set() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == analytics_admin.DeleteChannelGroupRequest() + assert args[0] == analytics_admin.DeleteExpandedDataSetRequest() -def test_delete_channel_group_non_empty_request_with_auto_populated_field(): +def test_delete_expanded_data_set_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. client = AnalyticsAdminServiceClient( @@ -42557,26 +42553,26 @@ def test_delete_channel_group_non_empty_request_with_auto_populated_field(): # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. - request = analytics_admin.DeleteChannelGroupRequest( + request = analytics_admin.DeleteExpandedDataSetRequest( name="name_value", ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.delete_channel_group), "__call__" + type(client.transport.delete_expanded_data_set), "__call__" ) as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.delete_channel_group(request=request) + client.delete_expanded_data_set(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == analytics_admin.DeleteChannelGroupRequest( + assert args[0] == analytics_admin.DeleteExpandedDataSetRequest( name="name_value", ) -def test_delete_channel_group_use_cached_wrapped_rpc(): +def test_delete_expanded_data_set_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -42591,7 +42587,8 @@ def test_delete_channel_group_use_cached_wrapped_rpc(): # Ensure method has been cached assert ( - client._transport.delete_channel_group in client._transport._wrapped_methods + client._transport.delete_expanded_data_set + in client._transport._wrapped_methods ) # Replace cached wrapped function with mock @@ -42600,15 +42597,15 @@ def test_delete_channel_group_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.delete_channel_group + client._transport.delete_expanded_data_set ] = mock_rpc request = {} - client.delete_channel_group(request) + client.delete_expanded_data_set(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.delete_channel_group(request) + client.delete_expanded_data_set(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -42616,7 +42613,7 @@ def test_delete_channel_group_use_cached_wrapped_rpc(): @pytest.mark.asyncio -async def test_delete_channel_group_empty_call_async(): +async def test_delete_expanded_data_set_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = AnalyticsAdminServiceAsyncClient( @@ -42626,18 +42623,18 @@ async def test_delete_channel_group_empty_call_async(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.delete_channel_group), "__call__" + type(client.transport.delete_expanded_data_set), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.delete_channel_group() + response = await client.delete_expanded_data_set() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == analytics_admin.DeleteChannelGroupRequest() + assert args[0] == analytics_admin.DeleteExpandedDataSetRequest() @pytest.mark.asyncio -async def test_delete_channel_group_async_use_cached_wrapped_rpc( +async def test_delete_expanded_data_set_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", ): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -42654,33 +42651,34 @@ async def test_delete_channel_group_async_use_cached_wrapped_rpc( # Ensure method has been cached assert ( - client._client._transport.delete_channel_group + client._client._transport.delete_expanded_data_set in client._client._transport._wrapped_methods ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ - client._client._transport.delete_channel_group - ] = mock_object + client._client._transport.delete_expanded_data_set + ] = mock_rpc request = {} - await client.delete_channel_group(request) + await client.delete_expanded_data_set(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - await client.delete_channel_group(request) + await client.delete_expanded_data_set(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio -async def test_delete_channel_group_async( +async def test_delete_expanded_data_set_async( transport: str = "grpc_asyncio", - request_type=analytics_admin.DeleteChannelGroupRequest, + request_type=analytics_admin.DeleteExpandedDataSetRequest, ): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), @@ -42693,16 +42691,16 @@ async def test_delete_channel_group_async( # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.delete_channel_group), "__call__" + type(client.transport.delete_expanded_data_set), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.delete_channel_group(request) + response = await client.delete_expanded_data_set(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request = analytics_admin.DeleteChannelGroupRequest() + request = analytics_admin.DeleteExpandedDataSetRequest() assert args[0] == request # Establish that the response is the type that we expect. @@ -42710,27 +42708,27 @@ async def test_delete_channel_group_async( @pytest.mark.asyncio -async def test_delete_channel_group_async_from_dict(): - await test_delete_channel_group_async(request_type=dict) +async def test_delete_expanded_data_set_async_from_dict(): + await test_delete_expanded_data_set_async(request_type=dict) -def test_delete_channel_group_field_headers(): +def test_delete_expanded_data_set_field_headers(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = analytics_admin.DeleteChannelGroupRequest() + request = analytics_admin.DeleteExpandedDataSetRequest() request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.delete_channel_group), "__call__" + type(client.transport.delete_expanded_data_set), "__call__" ) as call: call.return_value = None - client.delete_channel_group(request) + client.delete_expanded_data_set(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -42746,23 +42744,23 @@ def test_delete_channel_group_field_headers(): @pytest.mark.asyncio -async def test_delete_channel_group_field_headers_async(): +async def test_delete_expanded_data_set_field_headers_async(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = analytics_admin.DeleteChannelGroupRequest() + request = analytics_admin.DeleteExpandedDataSetRequest() request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.delete_channel_group), "__call__" + type(client.transport.delete_expanded_data_set), "__call__" ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - await client.delete_channel_group(request) + await client.delete_expanded_data_set(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -42777,20 +42775,20 @@ async def test_delete_channel_group_field_headers_async(): ) in kw["metadata"] -def test_delete_channel_group_flattened(): +def test_delete_expanded_data_set_flattened(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.delete_channel_group), "__call__" + type(client.transport.delete_expanded_data_set), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = None # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.delete_channel_group( + client.delete_expanded_data_set( name="name_value", ) @@ -42803,7 +42801,7 @@ def test_delete_channel_group_flattened(): assert arg == mock_val -def test_delete_channel_group_flattened_error(): +def test_delete_expanded_data_set_flattened_error(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -42811,21 +42809,21 @@ def test_delete_channel_group_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.delete_channel_group( - analytics_admin.DeleteChannelGroupRequest(), + client.delete_expanded_data_set( + analytics_admin.DeleteExpandedDataSetRequest(), name="name_value", ) @pytest.mark.asyncio -async def test_delete_channel_group_flattened_async(): +async def test_delete_expanded_data_set_flattened_async(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.delete_channel_group), "__call__" + type(client.transport.delete_expanded_data_set), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = None @@ -42833,7 +42831,7 @@ async def test_delete_channel_group_flattened_async(): call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.delete_channel_group( + response = await client.delete_expanded_data_set( name="name_value", ) @@ -42847,7 +42845,7 @@ async def test_delete_channel_group_flattened_async(): @pytest.mark.asyncio -async def test_delete_channel_group_flattened_error_async(): +async def test_delete_expanded_data_set_flattened_error_async(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -42855,8 +42853,8 @@ async def test_delete_channel_group_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.delete_channel_group( - analytics_admin.DeleteChannelGroupRequest(), + await client.delete_expanded_data_set( + analytics_admin.DeleteExpandedDataSetRequest(), name="name_value", ) @@ -42864,11 +42862,11 @@ async def test_delete_channel_group_flattened_error_async(): @pytest.mark.parametrize( "request_type", [ - analytics_admin.SetAutomatedGa4ConfigurationOptOutRequest, + analytics_admin.GetChannelGroupRequest, dict, ], ) -def test_set_automated_ga4_configuration_opt_out(request_type, transport: str = "grpc"): +def test_get_channel_group(request_type, transport: str = "grpc"): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -42880,25 +42878,34 @@ def test_set_automated_ga4_configuration_opt_out(request_type, transport: str = # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.set_automated_ga4_configuration_opt_out), "__call__" + type(client.transport.get_channel_group), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = analytics_admin.SetAutomatedGa4ConfigurationOptOutResponse() - response = client.set_automated_ga4_configuration_opt_out(request) + call.return_value = channel_group.ChannelGroup( + name="name_value", + display_name="display_name_value", + description="description_value", + system_defined=True, + primary=True, + ) + response = client.get_channel_group(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - request = analytics_admin.SetAutomatedGa4ConfigurationOptOutRequest() + request = analytics_admin.GetChannelGroupRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance( - response, analytics_admin.SetAutomatedGa4ConfigurationOptOutResponse - ) + assert isinstance(response, channel_group.ChannelGroup) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.description == "description_value" + assert response.system_defined is True + assert response.primary is True -def test_set_automated_ga4_configuration_opt_out_empty_call(): +def test_get_channel_group_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = AnalyticsAdminServiceClient( @@ -42908,18 +42915,18 @@ def test_set_automated_ga4_configuration_opt_out_empty_call(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.set_automated_ga4_configuration_opt_out), "__call__" + type(client.transport.get_channel_group), "__call__" ) as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.set_automated_ga4_configuration_opt_out() + client.get_channel_group() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == analytics_admin.SetAutomatedGa4ConfigurationOptOutRequest() + assert args[0] == analytics_admin.GetChannelGroupRequest() -def test_set_automated_ga4_configuration_opt_out_non_empty_request_with_auto_populated_field(): +def test_get_channel_group_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. client = AnalyticsAdminServiceClient( @@ -42930,26 +42937,26 @@ def test_set_automated_ga4_configuration_opt_out_non_empty_request_with_auto_pop # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. - request = analytics_admin.SetAutomatedGa4ConfigurationOptOutRequest( - property="property_value", + request = analytics_admin.GetChannelGroupRequest( + name="name_value", ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.set_automated_ga4_configuration_opt_out), "__call__" + type(client.transport.get_channel_group), "__call__" ) as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.set_automated_ga4_configuration_opt_out(request=request) + client.get_channel_group(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == analytics_admin.SetAutomatedGa4ConfigurationOptOutRequest( - property="property_value", + assert args[0] == analytics_admin.GetChannelGroupRequest( + name="name_value", ) -def test_set_automated_ga4_configuration_opt_out_use_cached_wrapped_rpc(): +def test_get_channel_group_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -42963,10 +42970,7 @@ def test_set_automated_ga4_configuration_opt_out_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert ( - client._transport.set_automated_ga4_configuration_opt_out - in client._transport._wrapped_methods - ) + assert client._transport.get_channel_group in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() @@ -42974,15 +42978,15 @@ def test_set_automated_ga4_configuration_opt_out_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.set_automated_ga4_configuration_opt_out + client._transport.get_channel_group ] = mock_rpc request = {} - client.set_automated_ga4_configuration_opt_out(request) + client.get_channel_group(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.set_automated_ga4_configuration_opt_out(request) + client.get_channel_group(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -42990,7 +42994,7 @@ def test_set_automated_ga4_configuration_opt_out_use_cached_wrapped_rpc(): @pytest.mark.asyncio -async def test_set_automated_ga4_configuration_opt_out_empty_call_async(): +async def test_get_channel_group_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = AnalyticsAdminServiceAsyncClient( @@ -43000,20 +43004,26 @@ async def test_set_automated_ga4_configuration_opt_out_empty_call_async(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.set_automated_ga4_configuration_opt_out), "__call__" + type(client.transport.get_channel_group), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - analytics_admin.SetAutomatedGa4ConfigurationOptOutResponse() + channel_group.ChannelGroup( + name="name_value", + display_name="display_name_value", + description="description_value", + system_defined=True, + primary=True, + ) ) - response = await client.set_automated_ga4_configuration_opt_out() + response = await client.get_channel_group() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == analytics_admin.SetAutomatedGa4ConfigurationOptOutRequest() + assert args[0] == analytics_admin.GetChannelGroupRequest() @pytest.mark.asyncio -async def test_set_automated_ga4_configuration_opt_out_async_use_cached_wrapped_rpc( +async def test_get_channel_group_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", ): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -43030,33 +43040,33 @@ async def test_set_automated_ga4_configuration_opt_out_async_use_cached_wrapped_ # Ensure method has been cached assert ( - client._client._transport.set_automated_ga4_configuration_opt_out + client._client._transport.get_channel_group in client._client._transport._wrapped_methods ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ - client._client._transport.set_automated_ga4_configuration_opt_out - ] = mock_object + client._client._transport.get_channel_group + ] = mock_rpc request = {} - await client.set_automated_ga4_configuration_opt_out(request) + await client.get_channel_group(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - await client.set_automated_ga4_configuration_opt_out(request) + await client.get_channel_group(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio -async def test_set_automated_ga4_configuration_opt_out_async( - transport: str = "grpc_asyncio", - request_type=analytics_admin.SetAutomatedGa4ConfigurationOptOutRequest, +async def test_get_channel_group_async( + transport: str = "grpc_asyncio", request_type=analytics_admin.GetChannelGroupRequest ): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), @@ -43069,284 +43079,199 @@ async def test_set_automated_ga4_configuration_opt_out_async( # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.set_automated_ga4_configuration_opt_out), "__call__" + type(client.transport.get_channel_group), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - analytics_admin.SetAutomatedGa4ConfigurationOptOutResponse() + channel_group.ChannelGroup( + name="name_value", + display_name="display_name_value", + description="description_value", + system_defined=True, + primary=True, + ) ) - response = await client.set_automated_ga4_configuration_opt_out(request) + response = await client.get_channel_group(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request = analytics_admin.SetAutomatedGa4ConfigurationOptOutRequest() + request = analytics_admin.GetChannelGroupRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance( - response, analytics_admin.SetAutomatedGa4ConfigurationOptOutResponse - ) + assert isinstance(response, channel_group.ChannelGroup) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.description == "description_value" + assert response.system_defined is True + assert response.primary is True @pytest.mark.asyncio -async def test_set_automated_ga4_configuration_opt_out_async_from_dict(): - await test_set_automated_ga4_configuration_opt_out_async(request_type=dict) +async def test_get_channel_group_async_from_dict(): + await test_get_channel_group_async(request_type=dict) -@pytest.mark.parametrize( - "request_type", - [ - analytics_admin.FetchAutomatedGa4ConfigurationOptOutRequest, - dict, - ], -) -def test_fetch_automated_ga4_configuration_opt_out( - request_type, transport: str = "grpc" -): +def test_get_channel_group_field_headers(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = analytics_admin.GetChannelGroupRequest() + + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.fetch_automated_ga4_configuration_opt_out), "__call__" + type(client.transport.get_channel_group), "__call__" ) as call: - # Designate an appropriate return value for the call. - call.return_value = ( - analytics_admin.FetchAutomatedGa4ConfigurationOptOutResponse( - opt_out=True, - ) - ) - response = client.fetch_automated_ga4_configuration_opt_out(request) + call.return_value = channel_group.ChannelGroup() + client.get_channel_group(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - request = analytics_admin.FetchAutomatedGa4ConfigurationOptOutRequest() assert args[0] == request - # Establish that the response is the type that we expect. - assert isinstance( - response, analytics_admin.FetchAutomatedGa4ConfigurationOptOutResponse - ) - assert response.opt_out is True + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] -def test_fetch_automated_ga4_configuration_opt_out_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = AnalyticsAdminServiceClient( +@pytest.mark.asyncio +async def test_get_channel_group_field_headers_async(): + client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.fetch_automated_ga4_configuration_opt_out), "__call__" - ) as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.fetch_automated_ga4_configuration_opt_out() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == analytics_admin.FetchAutomatedGa4ConfigurationOptOutRequest() - - -def test_fetch_automated_ga4_configuration_opt_out_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = AnalyticsAdminServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = analytics_admin.GetChannelGroupRequest() - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = analytics_admin.FetchAutomatedGa4ConfigurationOptOutRequest( - property="property_value", - ) + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.fetch_automated_ga4_configuration_opt_out), "__call__" + type(client.transport.get_channel_group), "__call__" ) as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.fetch_automated_ga4_configuration_opt_out(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == analytics_admin.FetchAutomatedGa4ConfigurationOptOutRequest( - property="property_value", - ) - - -def test_fetch_automated_ga4_configuration_opt_out_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = AnalyticsAdminServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert ( - client._transport.fetch_automated_ga4_configuration_opt_out - in client._transport._wrapped_methods - ) - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + channel_group.ChannelGroup() ) - client._transport._wrapped_methods[ - client._transport.fetch_automated_ga4_configuration_opt_out - ] = mock_rpc - request = {} - client.fetch_automated_ga4_configuration_opt_out(request) + await client.get_channel_group(request) # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.fetch_automated_ga4_configuration_opt_out(request) + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] -@pytest.mark.asyncio -async def test_fetch_automated_ga4_configuration_opt_out_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = AnalyticsAdminServiceAsyncClient( +def test_get_channel_group_flattened(): + client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport="grpc_asyncio", ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.fetch_automated_ga4_configuration_opt_out), "__call__" + type(client.transport.get_channel_group), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - analytics_admin.FetchAutomatedGa4ConfigurationOptOutResponse( - opt_out=True, - ) + call.return_value = channel_group.ChannelGroup() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_channel_group( + name="name_value", ) - response = await client.fetch_automated_ga4_configuration_opt_out() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == analytics_admin.FetchAutomatedGa4ConfigurationOptOutRequest() + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val -@pytest.mark.asyncio -async def test_fetch_automated_ga4_configuration_opt_out_async_use_cached_wrapped_rpc( - transport: str = "grpc_asyncio", -): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = AnalyticsAdminServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() +def test_get_channel_group_flattened_error(): + client = AnalyticsAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) - # Ensure method has been cached - assert ( - client._client._transport.fetch_automated_ga4_configuration_opt_out - in client._client._transport._wrapped_methods + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_channel_group( + analytics_admin.GetChannelGroupRequest(), + name="name_value", ) - # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() - client._client._transport._wrapped_methods[ - client._client._transport.fetch_automated_ga4_configuration_opt_out - ] = mock_object - - request = {} - await client.fetch_automated_ga4_configuration_opt_out(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 - - await client.fetch_automated_ga4_configuration_opt_out(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 - @pytest.mark.asyncio -async def test_fetch_automated_ga4_configuration_opt_out_async( - transport: str = "grpc_asyncio", - request_type=analytics_admin.FetchAutomatedGa4ConfigurationOptOutRequest, -): +async def test_get_channel_group_flattened_async(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.fetch_automated_ga4_configuration_opt_out), "__call__" + type(client.transport.get_channel_group), "__call__" ) as call: # Designate an appropriate return value for the call. + call.return_value = channel_group.ChannelGroup() + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - analytics_admin.FetchAutomatedGa4ConfigurationOptOutResponse( - opt_out=True, - ) + channel_group.ChannelGroup() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_channel_group( + name="name_value", ) - response = await client.fetch_automated_ga4_configuration_opt_out(request) - # Establish that the underlying gRPC stub method was called. + # Establish that the underlying call was made with the expected + # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request = analytics_admin.FetchAutomatedGa4ConfigurationOptOutRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance( - response, analytics_admin.FetchAutomatedGa4ConfigurationOptOutResponse - ) - assert response.opt_out is True + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val @pytest.mark.asyncio -async def test_fetch_automated_ga4_configuration_opt_out_async_from_dict(): - await test_fetch_automated_ga4_configuration_opt_out_async(request_type=dict) +async def test_get_channel_group_flattened_error_async(): + client = AnalyticsAdminServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_channel_group( + analytics_admin.GetChannelGroupRequest(), + name="name_value", + ) @pytest.mark.parametrize( "request_type", [ - analytics_admin.GetBigQueryLinkRequest, + analytics_admin.ListChannelGroupsRequest, dict, ], ) -def test_get_big_query_link(request_type, transport: str = "grpc"): +def test_list_channel_groups(request_type, transport: str = "grpc"): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -43358,40 +43283,26 @@ def test_get_big_query_link(request_type, transport: str = "grpc"): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.get_big_query_link), "__call__" + type(client.transport.list_channel_groups), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = resources.BigQueryLink( - name="name_value", - project="project_value", - daily_export_enabled=True, - streaming_export_enabled=True, - fresh_daily_export_enabled=True, - include_advertising_id=True, - export_streams=["export_streams_value"], - excluded_events=["excluded_events_value"], + call.return_value = analytics_admin.ListChannelGroupsResponse( + next_page_token="next_page_token_value", ) - response = client.get_big_query_link(request) + response = client.list_channel_groups(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - request = analytics_admin.GetBigQueryLinkRequest() + request = analytics_admin.ListChannelGroupsRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, resources.BigQueryLink) - assert response.name == "name_value" - assert response.project == "project_value" - assert response.daily_export_enabled is True - assert response.streaming_export_enabled is True - assert response.fresh_daily_export_enabled is True - assert response.include_advertising_id is True - assert response.export_streams == ["export_streams_value"] - assert response.excluded_events == ["excluded_events_value"] + assert isinstance(response, pagers.ListChannelGroupsPager) + assert response.next_page_token == "next_page_token_value" -def test_get_big_query_link_empty_call(): +def test_list_channel_groups_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = AnalyticsAdminServiceClient( @@ -43401,18 +43312,18 @@ def test_get_big_query_link_empty_call(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.get_big_query_link), "__call__" + type(client.transport.list_channel_groups), "__call__" ) as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.get_big_query_link() + client.list_channel_groups() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == analytics_admin.GetBigQueryLinkRequest() + assert args[0] == analytics_admin.ListChannelGroupsRequest() -def test_get_big_query_link_non_empty_request_with_auto_populated_field(): +def test_list_channel_groups_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. client = AnalyticsAdminServiceClient( @@ -43423,26 +43334,28 @@ def test_get_big_query_link_non_empty_request_with_auto_populated_field(): # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. - request = analytics_admin.GetBigQueryLinkRequest( - name="name_value", + request = analytics_admin.ListChannelGroupsRequest( + parent="parent_value", + page_token="page_token_value", ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.get_big_query_link), "__call__" + type(client.transport.list_channel_groups), "__call__" ) as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.get_big_query_link(request=request) + client.list_channel_groups(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == analytics_admin.GetBigQueryLinkRequest( - name="name_value", + assert args[0] == analytics_admin.ListChannelGroupsRequest( + parent="parent_value", + page_token="page_token_value", ) -def test_get_big_query_link_use_cached_wrapped_rpc(): +def test_list_channel_groups_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -43457,7 +43370,7 @@ def test_get_big_query_link_use_cached_wrapped_rpc(): # Ensure method has been cached assert ( - client._transport.get_big_query_link in client._transport._wrapped_methods + client._transport.list_channel_groups in client._transport._wrapped_methods ) # Replace cached wrapped function with mock @@ -43466,15 +43379,15 @@ def test_get_big_query_link_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.get_big_query_link + client._transport.list_channel_groups ] = mock_rpc request = {} - client.get_big_query_link(request) + client.list_channel_groups(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.get_big_query_link(request) + client.list_channel_groups(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -43482,7 +43395,7 @@ def test_get_big_query_link_use_cached_wrapped_rpc(): @pytest.mark.asyncio -async def test_get_big_query_link_empty_call_async(): +async def test_list_channel_groups_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = AnalyticsAdminServiceAsyncClient( @@ -43492,29 +43405,22 @@ async def test_get_big_query_link_empty_call_async(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.get_big_query_link), "__call__" + type(client.transport.list_channel_groups), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - resources.BigQueryLink( - name="name_value", - project="project_value", - daily_export_enabled=True, - streaming_export_enabled=True, - fresh_daily_export_enabled=True, - include_advertising_id=True, - export_streams=["export_streams_value"], - excluded_events=["excluded_events_value"], + analytics_admin.ListChannelGroupsResponse( + next_page_token="next_page_token_value", ) ) - response = await client.get_big_query_link() + response = await client.list_channel_groups() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == analytics_admin.GetBigQueryLinkRequest() + assert args[0] == analytics_admin.ListChannelGroupsRequest() @pytest.mark.asyncio -async def test_get_big_query_link_async_use_cached_wrapped_rpc( +async def test_list_channel_groups_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", ): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -43531,32 +43437,34 @@ async def test_get_big_query_link_async_use_cached_wrapped_rpc( # Ensure method has been cached assert ( - client._client._transport.get_big_query_link + client._client._transport.list_channel_groups in client._client._transport._wrapped_methods ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ - client._client._transport.get_big_query_link - ] = mock_object + client._client._transport.list_channel_groups + ] = mock_rpc request = {} - await client.get_big_query_link(request) + await client.list_channel_groups(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - await client.get_big_query_link(request) + await client.list_channel_groups(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio -async def test_get_big_query_link_async( - transport: str = "grpc_asyncio", request_type=analytics_admin.GetBigQueryLinkRequest +async def test_list_channel_groups_async( + transport: str = "grpc_asyncio", + request_type=analytics_admin.ListChannelGroupsRequest, ): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), @@ -43569,63 +43477,49 @@ async def test_get_big_query_link_async( # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.get_big_query_link), "__call__" + type(client.transport.list_channel_groups), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - resources.BigQueryLink( - name="name_value", - project="project_value", - daily_export_enabled=True, - streaming_export_enabled=True, - fresh_daily_export_enabled=True, - include_advertising_id=True, - export_streams=["export_streams_value"], - excluded_events=["excluded_events_value"], + analytics_admin.ListChannelGroupsResponse( + next_page_token="next_page_token_value", ) ) - response = await client.get_big_query_link(request) + response = await client.list_channel_groups(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request = analytics_admin.GetBigQueryLinkRequest() + request = analytics_admin.ListChannelGroupsRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, resources.BigQueryLink) - assert response.name == "name_value" - assert response.project == "project_value" - assert response.daily_export_enabled is True - assert response.streaming_export_enabled is True - assert response.fresh_daily_export_enabled is True - assert response.include_advertising_id is True - assert response.export_streams == ["export_streams_value"] - assert response.excluded_events == ["excluded_events_value"] + assert isinstance(response, pagers.ListChannelGroupsAsyncPager) + assert response.next_page_token == "next_page_token_value" @pytest.mark.asyncio -async def test_get_big_query_link_async_from_dict(): - await test_get_big_query_link_async(request_type=dict) +async def test_list_channel_groups_async_from_dict(): + await test_list_channel_groups_async(request_type=dict) -def test_get_big_query_link_field_headers(): +def test_list_channel_groups_field_headers(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = analytics_admin.GetBigQueryLinkRequest() + request = analytics_admin.ListChannelGroupsRequest() - request.name = "name_value" + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.get_big_query_link), "__call__" + type(client.transport.list_channel_groups), "__call__" ) as call: - call.return_value = resources.BigQueryLink() - client.get_big_query_link(request) + call.return_value = analytics_admin.ListChannelGroupsResponse() + client.list_channel_groups(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -43636,30 +43530,30 @@ def test_get_big_query_link_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "name=name_value", + "parent=parent_value", ) in kw["metadata"] @pytest.mark.asyncio -async def test_get_big_query_link_field_headers_async(): +async def test_list_channel_groups_field_headers_async(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = analytics_admin.GetBigQueryLinkRequest() + request = analytics_admin.ListChannelGroupsRequest() - request.name = "name_value" + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.get_big_query_link), "__call__" + type(client.transport.list_channel_groups), "__call__" ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - resources.BigQueryLink() + analytics_admin.ListChannelGroupsResponse() ) - await client.get_big_query_link(request) + await client.list_channel_groups(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -43670,37 +43564,37 @@ async def test_get_big_query_link_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "name=name_value", + "parent=parent_value", ) in kw["metadata"] -def test_get_big_query_link_flattened(): +def test_list_channel_groups_flattened(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.get_big_query_link), "__call__" + type(client.transport.list_channel_groups), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = resources.BigQueryLink() + call.return_value = analytics_admin.ListChannelGroupsResponse() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.get_big_query_link( - name="name_value", + client.list_channel_groups( + parent="parent_value", ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" + arg = args[0].parent + mock_val = "parent_value" assert arg == mock_val -def test_get_big_query_link_flattened_error(): +def test_list_channel_groups_flattened_error(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -43708,45 +43602,45 @@ def test_get_big_query_link_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.get_big_query_link( - analytics_admin.GetBigQueryLinkRequest(), - name="name_value", + client.list_channel_groups( + analytics_admin.ListChannelGroupsRequest(), + parent="parent_value", ) @pytest.mark.asyncio -async def test_get_big_query_link_flattened_async(): +async def test_list_channel_groups_flattened_async(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.get_big_query_link), "__call__" + type(client.transport.list_channel_groups), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = resources.BigQueryLink() + call.return_value = analytics_admin.ListChannelGroupsResponse() call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - resources.BigQueryLink() + analytics_admin.ListChannelGroupsResponse() ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.get_big_query_link( - name="name_value", + response = await client.list_channel_groups( + parent="parent_value", ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" + arg = args[0].parent + mock_val = "parent_value" assert arg == mock_val @pytest.mark.asyncio -async def test_get_big_query_link_flattened_error_async(): +async def test_list_channel_groups_flattened_error_async(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -43754,20 +43648,222 @@ async def test_get_big_query_link_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.get_big_query_link( - analytics_admin.GetBigQueryLinkRequest(), - name="name_value", + await client.list_channel_groups( + analytics_admin.ListChannelGroupsRequest(), + parent="parent_value", + ) + + +def test_list_channel_groups_pager(transport_name: str = "grpc"): + client = AnalyticsAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_channel_groups), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + analytics_admin.ListChannelGroupsResponse( + channel_groups=[ + channel_group.ChannelGroup(), + channel_group.ChannelGroup(), + channel_group.ChannelGroup(), + ], + next_page_token="abc", + ), + analytics_admin.ListChannelGroupsResponse( + channel_groups=[], + next_page_token="def", + ), + analytics_admin.ListChannelGroupsResponse( + channel_groups=[ + channel_group.ChannelGroup(), + ], + next_page_token="ghi", + ), + analytics_admin.ListChannelGroupsResponse( + channel_groups=[ + channel_group.ChannelGroup(), + channel_group.ChannelGroup(), + ], + ), + RuntimeError, + ) + + expected_metadata = () + retry = retries.Retry() + timeout = 5 + expected_metadata = tuple(expected_metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + ) + pager = client.list_channel_groups(request={}, retry=retry, timeout=timeout) + + assert pager._metadata == expected_metadata + assert pager._retry == retry + assert pager._timeout == timeout + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, channel_group.ChannelGroup) for i in results) + + +def test_list_channel_groups_pages(transport_name: str = "grpc"): + client = AnalyticsAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_channel_groups), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + analytics_admin.ListChannelGroupsResponse( + channel_groups=[ + channel_group.ChannelGroup(), + channel_group.ChannelGroup(), + channel_group.ChannelGroup(), + ], + next_page_token="abc", + ), + analytics_admin.ListChannelGroupsResponse( + channel_groups=[], + next_page_token="def", + ), + analytics_admin.ListChannelGroupsResponse( + channel_groups=[ + channel_group.ChannelGroup(), + ], + next_page_token="ghi", + ), + analytics_admin.ListChannelGroupsResponse( + channel_groups=[ + channel_group.ChannelGroup(), + channel_group.ChannelGroup(), + ], + ), + RuntimeError, + ) + pages = list(client.list_channel_groups(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.asyncio +async def test_list_channel_groups_async_pager(): + client = AnalyticsAdminServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_channel_groups), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + analytics_admin.ListChannelGroupsResponse( + channel_groups=[ + channel_group.ChannelGroup(), + channel_group.ChannelGroup(), + channel_group.ChannelGroup(), + ], + next_page_token="abc", + ), + analytics_admin.ListChannelGroupsResponse( + channel_groups=[], + next_page_token="def", + ), + analytics_admin.ListChannelGroupsResponse( + channel_groups=[ + channel_group.ChannelGroup(), + ], + next_page_token="ghi", + ), + analytics_admin.ListChannelGroupsResponse( + channel_groups=[ + channel_group.ChannelGroup(), + channel_group.ChannelGroup(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_channel_groups( + request={}, + ) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, channel_group.ChannelGroup) for i in responses) + + +@pytest.mark.asyncio +async def test_list_channel_groups_async_pages(): + client = AnalyticsAdminServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_channel_groups), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + analytics_admin.ListChannelGroupsResponse( + channel_groups=[ + channel_group.ChannelGroup(), + channel_group.ChannelGroup(), + channel_group.ChannelGroup(), + ], + next_page_token="abc", + ), + analytics_admin.ListChannelGroupsResponse( + channel_groups=[], + next_page_token="def", + ), + analytics_admin.ListChannelGroupsResponse( + channel_groups=[ + channel_group.ChannelGroup(), + ], + next_page_token="ghi", + ), + analytics_admin.ListChannelGroupsResponse( + channel_groups=[ + channel_group.ChannelGroup(), + channel_group.ChannelGroup(), + ], + ), + RuntimeError, ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_channel_groups(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token @pytest.mark.parametrize( "request_type", [ - analytics_admin.ListBigQueryLinksRequest, + analytics_admin.CreateChannelGroupRequest, dict, ], ) -def test_list_big_query_links(request_type, transport: str = "grpc"): +def test_create_channel_group(request_type, transport: str = "grpc"): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -43779,26 +43875,34 @@ def test_list_big_query_links(request_type, transport: str = "grpc"): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_big_query_links), "__call__" + type(client.transport.create_channel_group), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = analytics_admin.ListBigQueryLinksResponse( - next_page_token="next_page_token_value", + call.return_value = gaa_channel_group.ChannelGroup( + name="name_value", + display_name="display_name_value", + description="description_value", + system_defined=True, + primary=True, ) - response = client.list_big_query_links(request) + response = client.create_channel_group(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - request = analytics_admin.ListBigQueryLinksRequest() + request = analytics_admin.CreateChannelGroupRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListBigQueryLinksPager) - assert response.next_page_token == "next_page_token_value" + assert isinstance(response, gaa_channel_group.ChannelGroup) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.description == "description_value" + assert response.system_defined is True + assert response.primary is True -def test_list_big_query_links_empty_call(): +def test_create_channel_group_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = AnalyticsAdminServiceClient( @@ -43808,18 +43912,18 @@ def test_list_big_query_links_empty_call(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_big_query_links), "__call__" + type(client.transport.create_channel_group), "__call__" ) as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.list_big_query_links() + client.create_channel_group() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == analytics_admin.ListBigQueryLinksRequest() + assert args[0] == analytics_admin.CreateChannelGroupRequest() -def test_list_big_query_links_non_empty_request_with_auto_populated_field(): +def test_create_channel_group_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. client = AnalyticsAdminServiceClient( @@ -43830,28 +43934,26 @@ def test_list_big_query_links_non_empty_request_with_auto_populated_field(): # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. - request = analytics_admin.ListBigQueryLinksRequest( + request = analytics_admin.CreateChannelGroupRequest( parent="parent_value", - page_token="page_token_value", ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_big_query_links), "__call__" + type(client.transport.create_channel_group), "__call__" ) as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.list_big_query_links(request=request) + client.create_channel_group(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == analytics_admin.ListBigQueryLinksRequest( + assert args[0] == analytics_admin.CreateChannelGroupRequest( parent="parent_value", - page_token="page_token_value", ) -def test_list_big_query_links_use_cached_wrapped_rpc(): +def test_create_channel_group_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -43866,7 +43968,7 @@ def test_list_big_query_links_use_cached_wrapped_rpc(): # Ensure method has been cached assert ( - client._transport.list_big_query_links in client._transport._wrapped_methods + client._transport.create_channel_group in client._transport._wrapped_methods ) # Replace cached wrapped function with mock @@ -43875,15 +43977,15 @@ def test_list_big_query_links_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.list_big_query_links + client._transport.create_channel_group ] = mock_rpc request = {} - client.list_big_query_links(request) + client.create_channel_group(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.list_big_query_links(request) + client.create_channel_group(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -43891,7 +43993,7 @@ def test_list_big_query_links_use_cached_wrapped_rpc(): @pytest.mark.asyncio -async def test_list_big_query_links_empty_call_async(): +async def test_create_channel_group_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = AnalyticsAdminServiceAsyncClient( @@ -43901,22 +44003,26 @@ async def test_list_big_query_links_empty_call_async(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_big_query_links), "__call__" + type(client.transport.create_channel_group), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - analytics_admin.ListBigQueryLinksResponse( - next_page_token="next_page_token_value", + gaa_channel_group.ChannelGroup( + name="name_value", + display_name="display_name_value", + description="description_value", + system_defined=True, + primary=True, ) ) - response = await client.list_big_query_links() + response = await client.create_channel_group() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == analytics_admin.ListBigQueryLinksRequest() + assert args[0] == analytics_admin.CreateChannelGroupRequest() @pytest.mark.asyncio -async def test_list_big_query_links_async_use_cached_wrapped_rpc( +async def test_create_channel_group_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", ): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -43933,33 +44039,34 @@ async def test_list_big_query_links_async_use_cached_wrapped_rpc( # Ensure method has been cached assert ( - client._client._transport.list_big_query_links + client._client._transport.create_channel_group in client._client._transport._wrapped_methods ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ - client._client._transport.list_big_query_links - ] = mock_object + client._client._transport.create_channel_group + ] = mock_rpc request = {} - await client.list_big_query_links(request) + await client.create_channel_group(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - await client.list_big_query_links(request) + await client.create_channel_group(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio -async def test_list_big_query_links_async( +async def test_create_channel_group_async( transport: str = "grpc_asyncio", - request_type=analytics_admin.ListBigQueryLinksRequest, + request_type=analytics_admin.CreateChannelGroupRequest, ): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), @@ -43972,49 +44079,57 @@ async def test_list_big_query_links_async( # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_big_query_links), "__call__" + type(client.transport.create_channel_group), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - analytics_admin.ListBigQueryLinksResponse( - next_page_token="next_page_token_value", + gaa_channel_group.ChannelGroup( + name="name_value", + display_name="display_name_value", + description="description_value", + system_defined=True, + primary=True, ) ) - response = await client.list_big_query_links(request) + response = await client.create_channel_group(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request = analytics_admin.ListBigQueryLinksRequest() + request = analytics_admin.CreateChannelGroupRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListBigQueryLinksAsyncPager) - assert response.next_page_token == "next_page_token_value" + assert isinstance(response, gaa_channel_group.ChannelGroup) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.description == "description_value" + assert response.system_defined is True + assert response.primary is True @pytest.mark.asyncio -async def test_list_big_query_links_async_from_dict(): - await test_list_big_query_links_async(request_type=dict) +async def test_create_channel_group_async_from_dict(): + await test_create_channel_group_async(request_type=dict) -def test_list_big_query_links_field_headers(): +def test_create_channel_group_field_headers(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = analytics_admin.ListBigQueryLinksRequest() + request = analytics_admin.CreateChannelGroupRequest() request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_big_query_links), "__call__" + type(client.transport.create_channel_group), "__call__" ) as call: - call.return_value = analytics_admin.ListBigQueryLinksResponse() - client.list_big_query_links(request) + call.return_value = gaa_channel_group.ChannelGroup() + client.create_channel_group(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -44030,25 +44145,25 @@ def test_list_big_query_links_field_headers(): @pytest.mark.asyncio -async def test_list_big_query_links_field_headers_async(): +async def test_create_channel_group_field_headers_async(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = analytics_admin.ListBigQueryLinksRequest() + request = analytics_admin.CreateChannelGroupRequest() request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_big_query_links), "__call__" + type(client.transport.create_channel_group), "__call__" ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - analytics_admin.ListBigQueryLinksResponse() + gaa_channel_group.ChannelGroup() ) - await client.list_big_query_links(request) + await client.create_channel_group(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -44063,21 +44178,22 @@ async def test_list_big_query_links_field_headers_async(): ) in kw["metadata"] -def test_list_big_query_links_flattened(): +def test_create_channel_group_flattened(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_big_query_links), "__call__" + type(client.transport.create_channel_group), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = analytics_admin.ListBigQueryLinksResponse() + call.return_value = gaa_channel_group.ChannelGroup() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.list_big_query_links( + client.create_channel_group( parent="parent_value", + channel_group=gaa_channel_group.ChannelGroup(name="name_value"), ) # Establish that the underlying call was made with the expected @@ -44087,9 +44203,12 @@ def test_list_big_query_links_flattened(): arg = args[0].parent mock_val = "parent_value" assert arg == mock_val + arg = args[0].channel_group + mock_val = gaa_channel_group.ChannelGroup(name="name_value") + assert arg == mock_val -def test_list_big_query_links_flattened_error(): +def test_create_channel_group_flattened_error(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -44097,32 +44216,34 @@ def test_list_big_query_links_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.list_big_query_links( - analytics_admin.ListBigQueryLinksRequest(), + client.create_channel_group( + analytics_admin.CreateChannelGroupRequest(), parent="parent_value", + channel_group=gaa_channel_group.ChannelGroup(name="name_value"), ) @pytest.mark.asyncio -async def test_list_big_query_links_flattened_async(): +async def test_create_channel_group_flattened_async(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_big_query_links), "__call__" + type(client.transport.create_channel_group), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = analytics_admin.ListBigQueryLinksResponse() + call.return_value = gaa_channel_group.ChannelGroup() call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - analytics_admin.ListBigQueryLinksResponse() + gaa_channel_group.ChannelGroup() ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.list_big_query_links( + response = await client.create_channel_group( parent="parent_value", + channel_group=gaa_channel_group.ChannelGroup(name="name_value"), ) # Establish that the underlying call was made with the expected @@ -44132,10 +44253,13 @@ async def test_list_big_query_links_flattened_async(): arg = args[0].parent mock_val = "parent_value" assert arg == mock_val + arg = args[0].channel_group + mock_val = gaa_channel_group.ChannelGroup(name="name_value") + assert arg == mock_val @pytest.mark.asyncio -async def test_list_big_query_links_flattened_error_async(): +async def test_create_channel_group_flattened_error_async(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -44143,273 +44267,60 @@ async def test_list_big_query_links_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.list_big_query_links( - analytics_admin.ListBigQueryLinksRequest(), + await client.create_channel_group( + analytics_admin.CreateChannelGroupRequest(), parent="parent_value", + channel_group=gaa_channel_group.ChannelGroup(name="name_value"), ) -def test_list_big_query_links_pager(transport_name: str = "grpc"): +@pytest.mark.parametrize( + "request_type", + [ + analytics_admin.UpdateChannelGroupRequest, + dict, + ], +) +def test_update_channel_group(request_type, transport: str = "grpc"): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, + transport=transport, ) + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_big_query_links), "__call__" - ) as call: - # Set the response to a series of pages. - call.side_effect = ( - analytics_admin.ListBigQueryLinksResponse( - bigquery_links=[ - resources.BigQueryLink(), - resources.BigQueryLink(), - resources.BigQueryLink(), - ], - next_page_token="abc", - ), - analytics_admin.ListBigQueryLinksResponse( - bigquery_links=[], - next_page_token="def", - ), - analytics_admin.ListBigQueryLinksResponse( - bigquery_links=[ - resources.BigQueryLink(), - ], - next_page_token="ghi", - ), - analytics_admin.ListBigQueryLinksResponse( - bigquery_links=[ - resources.BigQueryLink(), - resources.BigQueryLink(), - ], - ), - RuntimeError, - ) - - expected_metadata = () - retry = retries.Retry() - timeout = 5 - expected_metadata = tuple(expected_metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), - ) - pager = client.list_big_query_links(request={}, retry=retry, timeout=timeout) - - assert pager._metadata == expected_metadata - assert pager._retry == retry - assert pager._timeout == timeout - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, resources.BigQueryLink) for i in results) - - -def test_list_big_query_links_pages(transport_name: str = "grpc"): - client = AnalyticsAdminServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_big_query_links), "__call__" - ) as call: - # Set the response to a series of pages. - call.side_effect = ( - analytics_admin.ListBigQueryLinksResponse( - bigquery_links=[ - resources.BigQueryLink(), - resources.BigQueryLink(), - resources.BigQueryLink(), - ], - next_page_token="abc", - ), - analytics_admin.ListBigQueryLinksResponse( - bigquery_links=[], - next_page_token="def", - ), - analytics_admin.ListBigQueryLinksResponse( - bigquery_links=[ - resources.BigQueryLink(), - ], - next_page_token="ghi", - ), - analytics_admin.ListBigQueryLinksResponse( - bigquery_links=[ - resources.BigQueryLink(), - resources.BigQueryLink(), - ], - ), - RuntimeError, - ) - pages = list(client.list_big_query_links(request={}).pages) - for page_, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page_.raw_page.next_page_token == token - - -@pytest.mark.asyncio -async def test_list_big_query_links_async_pager(): - client = AnalyticsAdminServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_big_query_links), - "__call__", - new_callable=mock.AsyncMock, - ) as call: - # Set the response to a series of pages. - call.side_effect = ( - analytics_admin.ListBigQueryLinksResponse( - bigquery_links=[ - resources.BigQueryLink(), - resources.BigQueryLink(), - resources.BigQueryLink(), - ], - next_page_token="abc", - ), - analytics_admin.ListBigQueryLinksResponse( - bigquery_links=[], - next_page_token="def", - ), - analytics_admin.ListBigQueryLinksResponse( - bigquery_links=[ - resources.BigQueryLink(), - ], - next_page_token="ghi", - ), - analytics_admin.ListBigQueryLinksResponse( - bigquery_links=[ - resources.BigQueryLink(), - resources.BigQueryLink(), - ], - ), - RuntimeError, - ) - async_pager = await client.list_big_query_links( - request={}, - ) - assert async_pager.next_page_token == "abc" - responses = [] - async for response in async_pager: # pragma: no branch - responses.append(response) - - assert len(responses) == 6 - assert all(isinstance(i, resources.BigQueryLink) for i in responses) - - -@pytest.mark.asyncio -async def test_list_big_query_links_async_pages(): - client = AnalyticsAdminServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_big_query_links), - "__call__", - new_callable=mock.AsyncMock, - ) as call: - # Set the response to a series of pages. - call.side_effect = ( - analytics_admin.ListBigQueryLinksResponse( - bigquery_links=[ - resources.BigQueryLink(), - resources.BigQueryLink(), - resources.BigQueryLink(), - ], - next_page_token="abc", - ), - analytics_admin.ListBigQueryLinksResponse( - bigquery_links=[], - next_page_token="def", - ), - analytics_admin.ListBigQueryLinksResponse( - bigquery_links=[ - resources.BigQueryLink(), - ], - next_page_token="ghi", - ), - analytics_admin.ListBigQueryLinksResponse( - bigquery_links=[ - resources.BigQueryLink(), - resources.BigQueryLink(), - ], - ), - RuntimeError, - ) - pages = [] - # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` - # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 - async for page_ in ( # pragma: no branch - await client.list_big_query_links(request={}) - ).pages: - pages.append(page_) - for page_, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page_.raw_page.next_page_token == token - - -@pytest.mark.parametrize( - "request_type", - [ - analytics_admin.GetEnhancedMeasurementSettingsRequest, - dict, - ], -) -def test_get_enhanced_measurement_settings(request_type, transport: str = "grpc"): - client = AnalyticsAdminServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_enhanced_measurement_settings), "__call__" + type(client.transport.update_channel_group), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = resources.EnhancedMeasurementSettings( + call.return_value = gaa_channel_group.ChannelGroup( name="name_value", - stream_enabled=True, - scrolls_enabled=True, - outbound_clicks_enabled=True, - site_search_enabled=True, - video_engagement_enabled=True, - file_downloads_enabled=True, - page_changes_enabled=True, - form_interactions_enabled=True, - search_query_parameter="search_query_parameter_value", - uri_query_parameter="uri_query_parameter_value", + display_name="display_name_value", + description="description_value", + system_defined=True, + primary=True, ) - response = client.get_enhanced_measurement_settings(request) + response = client.update_channel_group(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - request = analytics_admin.GetEnhancedMeasurementSettingsRequest() + request = analytics_admin.UpdateChannelGroupRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, resources.EnhancedMeasurementSettings) + assert isinstance(response, gaa_channel_group.ChannelGroup) assert response.name == "name_value" - assert response.stream_enabled is True - assert response.scrolls_enabled is True - assert response.outbound_clicks_enabled is True - assert response.site_search_enabled is True - assert response.video_engagement_enabled is True - assert response.file_downloads_enabled is True - assert response.page_changes_enabled is True - assert response.form_interactions_enabled is True - assert response.search_query_parameter == "search_query_parameter_value" - assert response.uri_query_parameter == "uri_query_parameter_value" + assert response.display_name == "display_name_value" + assert response.description == "description_value" + assert response.system_defined is True + assert response.primary is True -def test_get_enhanced_measurement_settings_empty_call(): +def test_update_channel_group_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = AnalyticsAdminServiceClient( @@ -44419,18 +44330,18 @@ def test_get_enhanced_measurement_settings_empty_call(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.get_enhanced_measurement_settings), "__call__" + type(client.transport.update_channel_group), "__call__" ) as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.get_enhanced_measurement_settings() + client.update_channel_group() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == analytics_admin.GetEnhancedMeasurementSettingsRequest() + assert args[0] == analytics_admin.UpdateChannelGroupRequest() -def test_get_enhanced_measurement_settings_non_empty_request_with_auto_populated_field(): +def test_update_channel_group_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. client = AnalyticsAdminServiceClient( @@ -44441,26 +44352,22 @@ def test_get_enhanced_measurement_settings_non_empty_request_with_auto_populated # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. - request = analytics_admin.GetEnhancedMeasurementSettingsRequest( - name="name_value", - ) + request = analytics_admin.UpdateChannelGroupRequest() # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.get_enhanced_measurement_settings), "__call__" + type(client.transport.update_channel_group), "__call__" ) as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.get_enhanced_measurement_settings(request=request) + client.update_channel_group(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == analytics_admin.GetEnhancedMeasurementSettingsRequest( - name="name_value", - ) + assert args[0] == analytics_admin.UpdateChannelGroupRequest() -def test_get_enhanced_measurement_settings_use_cached_wrapped_rpc(): +def test_update_channel_group_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -44475,8 +44382,7 @@ def test_get_enhanced_measurement_settings_use_cached_wrapped_rpc(): # Ensure method has been cached assert ( - client._transport.get_enhanced_measurement_settings - in client._transport._wrapped_methods + client._transport.update_channel_group in client._transport._wrapped_methods ) # Replace cached wrapped function with mock @@ -44485,15 +44391,15 @@ def test_get_enhanced_measurement_settings_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.get_enhanced_measurement_settings + client._transport.update_channel_group ] = mock_rpc request = {} - client.get_enhanced_measurement_settings(request) + client.update_channel_group(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.get_enhanced_measurement_settings(request) + client.update_channel_group(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -44501,7 +44407,7 @@ def test_get_enhanced_measurement_settings_use_cached_wrapped_rpc(): @pytest.mark.asyncio -async def test_get_enhanced_measurement_settings_empty_call_async(): +async def test_update_channel_group_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = AnalyticsAdminServiceAsyncClient( @@ -44511,32 +44417,26 @@ async def test_get_enhanced_measurement_settings_empty_call_async(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.get_enhanced_measurement_settings), "__call__" + type(client.transport.update_channel_group), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - resources.EnhancedMeasurementSettings( + gaa_channel_group.ChannelGroup( name="name_value", - stream_enabled=True, - scrolls_enabled=True, - outbound_clicks_enabled=True, - site_search_enabled=True, - video_engagement_enabled=True, - file_downloads_enabled=True, - page_changes_enabled=True, - form_interactions_enabled=True, - search_query_parameter="search_query_parameter_value", - uri_query_parameter="uri_query_parameter_value", + display_name="display_name_value", + description="description_value", + system_defined=True, + primary=True, ) ) - response = await client.get_enhanced_measurement_settings() + response = await client.update_channel_group() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == analytics_admin.GetEnhancedMeasurementSettingsRequest() + assert args[0] == analytics_admin.UpdateChannelGroupRequest() @pytest.mark.asyncio -async def test_get_enhanced_measurement_settings_async_use_cached_wrapped_rpc( +async def test_update_channel_group_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", ): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -44553,33 +44453,34 @@ async def test_get_enhanced_measurement_settings_async_use_cached_wrapped_rpc( # Ensure method has been cached assert ( - client._client._transport.get_enhanced_measurement_settings + client._client._transport.update_channel_group in client._client._transport._wrapped_methods ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ - client._client._transport.get_enhanced_measurement_settings - ] = mock_object + client._client._transport.update_channel_group + ] = mock_rpc request = {} - await client.get_enhanced_measurement_settings(request) + await client.update_channel_group(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - await client.get_enhanced_measurement_settings(request) + await client.update_channel_group(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio -async def test_get_enhanced_measurement_settings_async( +async def test_update_channel_group_async( transport: str = "grpc_asyncio", - request_type=analytics_admin.GetEnhancedMeasurementSettingsRequest, + request_type=analytics_admin.UpdateChannelGroupRequest, ): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), @@ -44592,69 +44493,57 @@ async def test_get_enhanced_measurement_settings_async( # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.get_enhanced_measurement_settings), "__call__" + type(client.transport.update_channel_group), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - resources.EnhancedMeasurementSettings( + gaa_channel_group.ChannelGroup( name="name_value", - stream_enabled=True, - scrolls_enabled=True, - outbound_clicks_enabled=True, - site_search_enabled=True, - video_engagement_enabled=True, - file_downloads_enabled=True, - page_changes_enabled=True, - form_interactions_enabled=True, - search_query_parameter="search_query_parameter_value", - uri_query_parameter="uri_query_parameter_value", + display_name="display_name_value", + description="description_value", + system_defined=True, + primary=True, ) ) - response = await client.get_enhanced_measurement_settings(request) + response = await client.update_channel_group(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request = analytics_admin.GetEnhancedMeasurementSettingsRequest() + request = analytics_admin.UpdateChannelGroupRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, resources.EnhancedMeasurementSettings) + assert isinstance(response, gaa_channel_group.ChannelGroup) assert response.name == "name_value" - assert response.stream_enabled is True - assert response.scrolls_enabled is True - assert response.outbound_clicks_enabled is True - assert response.site_search_enabled is True - assert response.video_engagement_enabled is True - assert response.file_downloads_enabled is True - assert response.page_changes_enabled is True - assert response.form_interactions_enabled is True - assert response.search_query_parameter == "search_query_parameter_value" - assert response.uri_query_parameter == "uri_query_parameter_value" + assert response.display_name == "display_name_value" + assert response.description == "description_value" + assert response.system_defined is True + assert response.primary is True @pytest.mark.asyncio -async def test_get_enhanced_measurement_settings_async_from_dict(): - await test_get_enhanced_measurement_settings_async(request_type=dict) +async def test_update_channel_group_async_from_dict(): + await test_update_channel_group_async(request_type=dict) -def test_get_enhanced_measurement_settings_field_headers(): +def test_update_channel_group_field_headers(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = analytics_admin.GetEnhancedMeasurementSettingsRequest() + request = analytics_admin.UpdateChannelGroupRequest() - request.name = "name_value" + request.channel_group.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.get_enhanced_measurement_settings), "__call__" + type(client.transport.update_channel_group), "__call__" ) as call: - call.return_value = resources.EnhancedMeasurementSettings() - client.get_enhanced_measurement_settings(request) + call.return_value = gaa_channel_group.ChannelGroup() + client.update_channel_group(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -44665,30 +44554,30 @@ def test_get_enhanced_measurement_settings_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "name=name_value", + "channel_group.name=name_value", ) in kw["metadata"] @pytest.mark.asyncio -async def test_get_enhanced_measurement_settings_field_headers_async(): +async def test_update_channel_group_field_headers_async(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = analytics_admin.GetEnhancedMeasurementSettingsRequest() + request = analytics_admin.UpdateChannelGroupRequest() - request.name = "name_value" + request.channel_group.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.get_enhanced_measurement_settings), "__call__" + type(client.transport.update_channel_group), "__call__" ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - resources.EnhancedMeasurementSettings() + gaa_channel_group.ChannelGroup() ) - await client.get_enhanced_measurement_settings(request) + await client.update_channel_group(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -44699,37 +44588,41 @@ async def test_get_enhanced_measurement_settings_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "name=name_value", + "channel_group.name=name_value", ) in kw["metadata"] -def test_get_enhanced_measurement_settings_flattened(): +def test_update_channel_group_flattened(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.get_enhanced_measurement_settings), "__call__" + type(client.transport.update_channel_group), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = resources.EnhancedMeasurementSettings() + call.return_value = gaa_channel_group.ChannelGroup() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.get_enhanced_measurement_settings( - name="name_value", + client.update_channel_group( + channel_group=gaa_channel_group.ChannelGroup(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" + arg = args[0].channel_group + mock_val = gaa_channel_group.ChannelGroup(name="name_value") + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) assert arg == mock_val -def test_get_enhanced_measurement_settings_flattened_error(): +def test_update_channel_group_flattened_error(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -44737,45 +44630,50 @@ def test_get_enhanced_measurement_settings_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.get_enhanced_measurement_settings( - analytics_admin.GetEnhancedMeasurementSettingsRequest(), - name="name_value", + client.update_channel_group( + analytics_admin.UpdateChannelGroupRequest(), + channel_group=gaa_channel_group.ChannelGroup(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) @pytest.mark.asyncio -async def test_get_enhanced_measurement_settings_flattened_async(): +async def test_update_channel_group_flattened_async(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.get_enhanced_measurement_settings), "__call__" + type(client.transport.update_channel_group), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = resources.EnhancedMeasurementSettings() + call.return_value = gaa_channel_group.ChannelGroup() call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - resources.EnhancedMeasurementSettings() + gaa_channel_group.ChannelGroup() ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.get_enhanced_measurement_settings( - name="name_value", + response = await client.update_channel_group( + channel_group=gaa_channel_group.ChannelGroup(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" + arg = args[0].channel_group + mock_val = gaa_channel_group.ChannelGroup(name="name_value") + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) assert arg == mock_val @pytest.mark.asyncio -async def test_get_enhanced_measurement_settings_flattened_error_async(): +async def test_update_channel_group_flattened_error_async(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -44783,20 +44681,21 @@ async def test_get_enhanced_measurement_settings_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.get_enhanced_measurement_settings( - analytics_admin.GetEnhancedMeasurementSettingsRequest(), - name="name_value", + await client.update_channel_group( + analytics_admin.UpdateChannelGroupRequest(), + channel_group=gaa_channel_group.ChannelGroup(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) @pytest.mark.parametrize( "request_type", [ - analytics_admin.UpdateEnhancedMeasurementSettingsRequest, + analytics_admin.DeleteChannelGroupRequest, dict, ], ) -def test_update_enhanced_measurement_settings(request_type, transport: str = "grpc"): +def test_delete_channel_group(request_type, transport: str = "grpc"): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -44808,46 +44707,23 @@ def test_update_enhanced_measurement_settings(request_type, transport: str = "gr # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.update_enhanced_measurement_settings), "__call__" + type(client.transport.delete_channel_group), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = resources.EnhancedMeasurementSettings( - name="name_value", - stream_enabled=True, - scrolls_enabled=True, - outbound_clicks_enabled=True, - site_search_enabled=True, - video_engagement_enabled=True, - file_downloads_enabled=True, - page_changes_enabled=True, - form_interactions_enabled=True, - search_query_parameter="search_query_parameter_value", - uri_query_parameter="uri_query_parameter_value", - ) - response = client.update_enhanced_measurement_settings(request) + call.return_value = None + response = client.delete_channel_group(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - request = analytics_admin.UpdateEnhancedMeasurementSettingsRequest() + request = analytics_admin.DeleteChannelGroupRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, resources.EnhancedMeasurementSettings) - assert response.name == "name_value" - assert response.stream_enabled is True - assert response.scrolls_enabled is True - assert response.outbound_clicks_enabled is True - assert response.site_search_enabled is True - assert response.video_engagement_enabled is True - assert response.file_downloads_enabled is True - assert response.page_changes_enabled is True - assert response.form_interactions_enabled is True - assert response.search_query_parameter == "search_query_parameter_value" - assert response.uri_query_parameter == "uri_query_parameter_value" + assert response is None -def test_update_enhanced_measurement_settings_empty_call(): +def test_delete_channel_group_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = AnalyticsAdminServiceClient( @@ -44857,18 +44733,18 @@ def test_update_enhanced_measurement_settings_empty_call(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.update_enhanced_measurement_settings), "__call__" + type(client.transport.delete_channel_group), "__call__" ) as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.update_enhanced_measurement_settings() + client.delete_channel_group() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == analytics_admin.UpdateEnhancedMeasurementSettingsRequest() + assert args[0] == analytics_admin.DeleteChannelGroupRequest() -def test_update_enhanced_measurement_settings_non_empty_request_with_auto_populated_field(): +def test_delete_channel_group_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. client = AnalyticsAdminServiceClient( @@ -44879,22 +44755,26 @@ def test_update_enhanced_measurement_settings_non_empty_request_with_auto_popula # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. - request = analytics_admin.UpdateEnhancedMeasurementSettingsRequest() + request = analytics_admin.DeleteChannelGroupRequest( + name="name_value", + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.update_enhanced_measurement_settings), "__call__" + type(client.transport.delete_channel_group), "__call__" ) as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.update_enhanced_measurement_settings(request=request) + client.delete_channel_group(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == analytics_admin.UpdateEnhancedMeasurementSettingsRequest() + assert args[0] == analytics_admin.DeleteChannelGroupRequest( + name="name_value", + ) -def test_update_enhanced_measurement_settings_use_cached_wrapped_rpc(): +def test_delete_channel_group_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -44909,8 +44789,7 @@ def test_update_enhanced_measurement_settings_use_cached_wrapped_rpc(): # Ensure method has been cached assert ( - client._transport.update_enhanced_measurement_settings - in client._transport._wrapped_methods + client._transport.delete_channel_group in client._transport._wrapped_methods ) # Replace cached wrapped function with mock @@ -44919,15 +44798,15 @@ def test_update_enhanced_measurement_settings_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.update_enhanced_measurement_settings + client._transport.delete_channel_group ] = mock_rpc request = {} - client.update_enhanced_measurement_settings(request) + client.delete_channel_group(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.update_enhanced_measurement_settings(request) + client.delete_channel_group(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -44935,7 +44814,7 @@ def test_update_enhanced_measurement_settings_use_cached_wrapped_rpc(): @pytest.mark.asyncio -async def test_update_enhanced_measurement_settings_empty_call_async(): +async def test_delete_channel_group_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = AnalyticsAdminServiceAsyncClient( @@ -44945,32 +44824,18 @@ async def test_update_enhanced_measurement_settings_empty_call_async(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.update_enhanced_measurement_settings), "__call__" + type(client.transport.delete_channel_group), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - resources.EnhancedMeasurementSettings( - name="name_value", - stream_enabled=True, - scrolls_enabled=True, - outbound_clicks_enabled=True, - site_search_enabled=True, - video_engagement_enabled=True, - file_downloads_enabled=True, - page_changes_enabled=True, - form_interactions_enabled=True, - search_query_parameter="search_query_parameter_value", - uri_query_parameter="uri_query_parameter_value", - ) - ) - response = await client.update_enhanced_measurement_settings() + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.delete_channel_group() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == analytics_admin.UpdateEnhancedMeasurementSettingsRequest() + assert args[0] == analytics_admin.DeleteChannelGroupRequest() @pytest.mark.asyncio -async def test_update_enhanced_measurement_settings_async_use_cached_wrapped_rpc( +async def test_delete_channel_group_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", ): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -44987,33 +44852,34 @@ async def test_update_enhanced_measurement_settings_async_use_cached_wrapped_rpc # Ensure method has been cached assert ( - client._client._transport.update_enhanced_measurement_settings + client._client._transport.delete_channel_group in client._client._transport._wrapped_methods ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ - client._client._transport.update_enhanced_measurement_settings - ] = mock_object + client._client._transport.delete_channel_group + ] = mock_rpc request = {} - await client.update_enhanced_measurement_settings(request) + await client.delete_channel_group(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - await client.update_enhanced_measurement_settings(request) + await client.delete_channel_group(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio -async def test_update_enhanced_measurement_settings_async( +async def test_delete_channel_group_async( transport: str = "grpc_asyncio", - request_type=analytics_admin.UpdateEnhancedMeasurementSettingsRequest, + request_type=analytics_admin.DeleteChannelGroupRequest, ): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), @@ -45026,69 +44892,44 @@ async def test_update_enhanced_measurement_settings_async( # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.update_enhanced_measurement_settings), "__call__" + type(client.transport.delete_channel_group), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - resources.EnhancedMeasurementSettings( - name="name_value", - stream_enabled=True, - scrolls_enabled=True, - outbound_clicks_enabled=True, - site_search_enabled=True, - video_engagement_enabled=True, - file_downloads_enabled=True, - page_changes_enabled=True, - form_interactions_enabled=True, - search_query_parameter="search_query_parameter_value", - uri_query_parameter="uri_query_parameter_value", - ) - ) - response = await client.update_enhanced_measurement_settings(request) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.delete_channel_group(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request = analytics_admin.UpdateEnhancedMeasurementSettingsRequest() + request = analytics_admin.DeleteChannelGroupRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, resources.EnhancedMeasurementSettings) - assert response.name == "name_value" - assert response.stream_enabled is True - assert response.scrolls_enabled is True - assert response.outbound_clicks_enabled is True - assert response.site_search_enabled is True - assert response.video_engagement_enabled is True - assert response.file_downloads_enabled is True - assert response.page_changes_enabled is True - assert response.form_interactions_enabled is True - assert response.search_query_parameter == "search_query_parameter_value" - assert response.uri_query_parameter == "uri_query_parameter_value" + assert response is None @pytest.mark.asyncio -async def test_update_enhanced_measurement_settings_async_from_dict(): - await test_update_enhanced_measurement_settings_async(request_type=dict) +async def test_delete_channel_group_async_from_dict(): + await test_delete_channel_group_async(request_type=dict) -def test_update_enhanced_measurement_settings_field_headers(): +def test_delete_channel_group_field_headers(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = analytics_admin.UpdateEnhancedMeasurementSettingsRequest() + request = analytics_admin.DeleteChannelGroupRequest() - request.enhanced_measurement_settings.name = "name_value" + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.update_enhanced_measurement_settings), "__call__" + type(client.transport.delete_channel_group), "__call__" ) as call: - call.return_value = resources.EnhancedMeasurementSettings() - client.update_enhanced_measurement_settings(request) + call.return_value = None + client.delete_channel_group(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -45099,30 +44940,28 @@ def test_update_enhanced_measurement_settings_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "enhanced_measurement_settings.name=name_value", + "name=name_value", ) in kw["metadata"] @pytest.mark.asyncio -async def test_update_enhanced_measurement_settings_field_headers_async(): +async def test_delete_channel_group_field_headers_async(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = analytics_admin.UpdateEnhancedMeasurementSettingsRequest() + request = analytics_admin.DeleteChannelGroupRequest() - request.enhanced_measurement_settings.name = "name_value" + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.update_enhanced_measurement_settings), "__call__" + type(client.transport.delete_channel_group), "__call__" ) as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - resources.EnhancedMeasurementSettings() - ) - await client.update_enhanced_measurement_settings(request) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.delete_channel_group(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -45133,43 +44972,37 @@ async def test_update_enhanced_measurement_settings_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "enhanced_measurement_settings.name=name_value", + "name=name_value", ) in kw["metadata"] -def test_update_enhanced_measurement_settings_flattened(): +def test_delete_channel_group_flattened(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.update_enhanced_measurement_settings), "__call__" + type(client.transport.delete_channel_group), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = resources.EnhancedMeasurementSettings() + call.return_value = None # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.update_enhanced_measurement_settings( - enhanced_measurement_settings=resources.EnhancedMeasurementSettings( - name="name_value" - ), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + client.delete_channel_group( + name="name_value", ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - arg = args[0].enhanced_measurement_settings - mock_val = resources.EnhancedMeasurementSettings(name="name_value") - assert arg == mock_val - arg = args[0].update_mask - mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + arg = args[0].name + mock_val = "name_value" assert arg == mock_val -def test_update_enhanced_measurement_settings_flattened_error(): +def test_delete_channel_group_flattened_error(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -45177,54 +45010,43 @@ def test_update_enhanced_measurement_settings_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.update_enhanced_measurement_settings( - analytics_admin.UpdateEnhancedMeasurementSettingsRequest(), - enhanced_measurement_settings=resources.EnhancedMeasurementSettings( - name="name_value" - ), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + client.delete_channel_group( + analytics_admin.DeleteChannelGroupRequest(), + name="name_value", ) @pytest.mark.asyncio -async def test_update_enhanced_measurement_settings_flattened_async(): +async def test_delete_channel_group_flattened_async(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.update_enhanced_measurement_settings), "__call__" + type(client.transport.delete_channel_group), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = resources.EnhancedMeasurementSettings() + call.return_value = None - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - resources.EnhancedMeasurementSettings() - ) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.update_enhanced_measurement_settings( - enhanced_measurement_settings=resources.EnhancedMeasurementSettings( - name="name_value" - ), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + response = await client.delete_channel_group( + name="name_value", ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - arg = args[0].enhanced_measurement_settings - mock_val = resources.EnhancedMeasurementSettings(name="name_value") - assert arg == mock_val - arg = args[0].update_mask - mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + arg = args[0].name + mock_val = "name_value" assert arg == mock_val @pytest.mark.asyncio -async def test_update_enhanced_measurement_settings_flattened_error_async(): +async def test_delete_channel_group_flattened_error_async(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -45232,23 +45054,20 @@ async def test_update_enhanced_measurement_settings_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.update_enhanced_measurement_settings( - analytics_admin.UpdateEnhancedMeasurementSettingsRequest(), - enhanced_measurement_settings=resources.EnhancedMeasurementSettings( - name="name_value" - ), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + await client.delete_channel_group( + analytics_admin.DeleteChannelGroupRequest(), + name="name_value", ) @pytest.mark.parametrize( "request_type", [ - analytics_admin.CreateConnectedSiteTagRequest, + analytics_admin.SetAutomatedGa4ConfigurationOptOutRequest, dict, ], ) -def test_create_connected_site_tag(request_type, transport: str = "grpc"): +def test_set_automated_ga4_configuration_opt_out(request_type, transport: str = "grpc"): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -45260,23 +45079,25 @@ def test_create_connected_site_tag(request_type, transport: str = "grpc"): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.create_connected_site_tag), "__call__" + type(client.transport.set_automated_ga4_configuration_opt_out), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = analytics_admin.CreateConnectedSiteTagResponse() - response = client.create_connected_site_tag(request) + call.return_value = analytics_admin.SetAutomatedGa4ConfigurationOptOutResponse() + response = client.set_automated_ga4_configuration_opt_out(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - request = analytics_admin.CreateConnectedSiteTagRequest() + request = analytics_admin.SetAutomatedGa4ConfigurationOptOutRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, analytics_admin.CreateConnectedSiteTagResponse) + assert isinstance( + response, analytics_admin.SetAutomatedGa4ConfigurationOptOutResponse + ) -def test_create_connected_site_tag_empty_call(): +def test_set_automated_ga4_configuration_opt_out_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = AnalyticsAdminServiceClient( @@ -45286,18 +45107,18 @@ def test_create_connected_site_tag_empty_call(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.create_connected_site_tag), "__call__" + type(client.transport.set_automated_ga4_configuration_opt_out), "__call__" ) as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.create_connected_site_tag() + client.set_automated_ga4_configuration_opt_out() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == analytics_admin.CreateConnectedSiteTagRequest() + assert args[0] == analytics_admin.SetAutomatedGa4ConfigurationOptOutRequest() -def test_create_connected_site_tag_non_empty_request_with_auto_populated_field(): +def test_set_automated_ga4_configuration_opt_out_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. client = AnalyticsAdminServiceClient( @@ -45308,26 +45129,26 @@ def test_create_connected_site_tag_non_empty_request_with_auto_populated_field() # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. - request = analytics_admin.CreateConnectedSiteTagRequest( + request = analytics_admin.SetAutomatedGa4ConfigurationOptOutRequest( property="property_value", ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.create_connected_site_tag), "__call__" + type(client.transport.set_automated_ga4_configuration_opt_out), "__call__" ) as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.create_connected_site_tag(request=request) + client.set_automated_ga4_configuration_opt_out(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == analytics_admin.CreateConnectedSiteTagRequest( + assert args[0] == analytics_admin.SetAutomatedGa4ConfigurationOptOutRequest( property="property_value", ) -def test_create_connected_site_tag_use_cached_wrapped_rpc(): +def test_set_automated_ga4_configuration_opt_out_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -45342,7 +45163,7 @@ def test_create_connected_site_tag_use_cached_wrapped_rpc(): # Ensure method has been cached assert ( - client._transport.create_connected_site_tag + client._transport.set_automated_ga4_configuration_opt_out in client._transport._wrapped_methods ) @@ -45352,15 +45173,15 @@ def test_create_connected_site_tag_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.create_connected_site_tag + client._transport.set_automated_ga4_configuration_opt_out ] = mock_rpc request = {} - client.create_connected_site_tag(request) + client.set_automated_ga4_configuration_opt_out(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.create_connected_site_tag(request) + client.set_automated_ga4_configuration_opt_out(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -45368,7 +45189,7 @@ def test_create_connected_site_tag_use_cached_wrapped_rpc(): @pytest.mark.asyncio -async def test_create_connected_site_tag_empty_call_async(): +async def test_set_automated_ga4_configuration_opt_out_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = AnalyticsAdminServiceAsyncClient( @@ -45378,20 +45199,20 @@ async def test_create_connected_site_tag_empty_call_async(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.create_connected_site_tag), "__call__" + type(client.transport.set_automated_ga4_configuration_opt_out), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - analytics_admin.CreateConnectedSiteTagResponse() + analytics_admin.SetAutomatedGa4ConfigurationOptOutResponse() ) - response = await client.create_connected_site_tag() + response = await client.set_automated_ga4_configuration_opt_out() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == analytics_admin.CreateConnectedSiteTagRequest() + assert args[0] == analytics_admin.SetAutomatedGa4ConfigurationOptOutRequest() @pytest.mark.asyncio -async def test_create_connected_site_tag_async_use_cached_wrapped_rpc( +async def test_set_automated_ga4_configuration_opt_out_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", ): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -45408,33 +45229,34 @@ async def test_create_connected_site_tag_async_use_cached_wrapped_rpc( # Ensure method has been cached assert ( - client._client._transport.create_connected_site_tag + client._client._transport.set_automated_ga4_configuration_opt_out in client._client._transport._wrapped_methods ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ - client._client._transport.create_connected_site_tag - ] = mock_object + client._client._transport.set_automated_ga4_configuration_opt_out + ] = mock_rpc request = {} - await client.create_connected_site_tag(request) + await client.set_automated_ga4_configuration_opt_out(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - await client.create_connected_site_tag(request) + await client.set_automated_ga4_configuration_opt_out(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio -async def test_create_connected_site_tag_async( +async def test_set_automated_ga4_configuration_opt_out_async( transport: str = "grpc_asyncio", - request_type=analytics_admin.CreateConnectedSiteTagRequest, + request_type=analytics_admin.SetAutomatedGa4ConfigurationOptOutRequest, ): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), @@ -45447,37 +45269,41 @@ async def test_create_connected_site_tag_async( # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.create_connected_site_tag), "__call__" + type(client.transport.set_automated_ga4_configuration_opt_out), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - analytics_admin.CreateConnectedSiteTagResponse() + analytics_admin.SetAutomatedGa4ConfigurationOptOutResponse() ) - response = await client.create_connected_site_tag(request) + response = await client.set_automated_ga4_configuration_opt_out(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request = analytics_admin.CreateConnectedSiteTagRequest() + request = analytics_admin.SetAutomatedGa4ConfigurationOptOutRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, analytics_admin.CreateConnectedSiteTagResponse) + assert isinstance( + response, analytics_admin.SetAutomatedGa4ConfigurationOptOutResponse + ) @pytest.mark.asyncio -async def test_create_connected_site_tag_async_from_dict(): - await test_create_connected_site_tag_async(request_type=dict) +async def test_set_automated_ga4_configuration_opt_out_async_from_dict(): + await test_set_automated_ga4_configuration_opt_out_async(request_type=dict) @pytest.mark.parametrize( "request_type", [ - analytics_admin.DeleteConnectedSiteTagRequest, + analytics_admin.FetchAutomatedGa4ConfigurationOptOutRequest, dict, ], ) -def test_delete_connected_site_tag(request_type, transport: str = "grpc"): +def test_fetch_automated_ga4_configuration_opt_out( + request_type, transport: str = "grpc" +): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -45489,23 +45315,30 @@ def test_delete_connected_site_tag(request_type, transport: str = "grpc"): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.delete_connected_site_tag), "__call__" + type(client.transport.fetch_automated_ga4_configuration_opt_out), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = None - response = client.delete_connected_site_tag(request) + call.return_value = ( + analytics_admin.FetchAutomatedGa4ConfigurationOptOutResponse( + opt_out=True, + ) + ) + response = client.fetch_automated_ga4_configuration_opt_out(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - request = analytics_admin.DeleteConnectedSiteTagRequest() + request = analytics_admin.FetchAutomatedGa4ConfigurationOptOutRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert response is None + assert isinstance( + response, analytics_admin.FetchAutomatedGa4ConfigurationOptOutResponse + ) + assert response.opt_out is True -def test_delete_connected_site_tag_empty_call(): +def test_fetch_automated_ga4_configuration_opt_out_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = AnalyticsAdminServiceClient( @@ -45515,18 +45348,18 @@ def test_delete_connected_site_tag_empty_call(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.delete_connected_site_tag), "__call__" + type(client.transport.fetch_automated_ga4_configuration_opt_out), "__call__" ) as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.delete_connected_site_tag() + client.fetch_automated_ga4_configuration_opt_out() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == analytics_admin.DeleteConnectedSiteTagRequest() + assert args[0] == analytics_admin.FetchAutomatedGa4ConfigurationOptOutRequest() -def test_delete_connected_site_tag_non_empty_request_with_auto_populated_field(): +def test_fetch_automated_ga4_configuration_opt_out_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. client = AnalyticsAdminServiceClient( @@ -45537,28 +45370,26 @@ def test_delete_connected_site_tag_non_empty_request_with_auto_populated_field() # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. - request = analytics_admin.DeleteConnectedSiteTagRequest( + request = analytics_admin.FetchAutomatedGa4ConfigurationOptOutRequest( property="property_value", - tag_id="tag_id_value", ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.delete_connected_site_tag), "__call__" + type(client.transport.fetch_automated_ga4_configuration_opt_out), "__call__" ) as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.delete_connected_site_tag(request=request) + client.fetch_automated_ga4_configuration_opt_out(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == analytics_admin.DeleteConnectedSiteTagRequest( + assert args[0] == analytics_admin.FetchAutomatedGa4ConfigurationOptOutRequest( property="property_value", - tag_id="tag_id_value", ) -def test_delete_connected_site_tag_use_cached_wrapped_rpc(): +def test_fetch_automated_ga4_configuration_opt_out_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -45573,7 +45404,7 @@ def test_delete_connected_site_tag_use_cached_wrapped_rpc(): # Ensure method has been cached assert ( - client._transport.delete_connected_site_tag + client._transport.fetch_automated_ga4_configuration_opt_out in client._transport._wrapped_methods ) @@ -45583,15 +45414,15 @@ def test_delete_connected_site_tag_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.delete_connected_site_tag + client._transport.fetch_automated_ga4_configuration_opt_out ] = mock_rpc request = {} - client.delete_connected_site_tag(request) + client.fetch_automated_ga4_configuration_opt_out(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.delete_connected_site_tag(request) + client.fetch_automated_ga4_configuration_opt_out(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -45599,7 +45430,7 @@ def test_delete_connected_site_tag_use_cached_wrapped_rpc(): @pytest.mark.asyncio -async def test_delete_connected_site_tag_empty_call_async(): +async def test_fetch_automated_ga4_configuration_opt_out_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = AnalyticsAdminServiceAsyncClient( @@ -45609,18 +45440,22 @@ async def test_delete_connected_site_tag_empty_call_async(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.delete_connected_site_tag), "__call__" + type(client.transport.fetch_automated_ga4_configuration_opt_out), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.delete_connected_site_tag() + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + analytics_admin.FetchAutomatedGa4ConfigurationOptOutResponse( + opt_out=True, + ) + ) + response = await client.fetch_automated_ga4_configuration_opt_out() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == analytics_admin.DeleteConnectedSiteTagRequest() + assert args[0] == analytics_admin.FetchAutomatedGa4ConfigurationOptOutRequest() @pytest.mark.asyncio -async def test_delete_connected_site_tag_async_use_cached_wrapped_rpc( +async def test_fetch_automated_ga4_configuration_opt_out_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", ): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -45637,33 +45472,34 @@ async def test_delete_connected_site_tag_async_use_cached_wrapped_rpc( # Ensure method has been cached assert ( - client._client._transport.delete_connected_site_tag + client._client._transport.fetch_automated_ga4_configuration_opt_out in client._client._transport._wrapped_methods ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ - client._client._transport.delete_connected_site_tag - ] = mock_object + client._client._transport.fetch_automated_ga4_configuration_opt_out + ] = mock_rpc request = {} - await client.delete_connected_site_tag(request) + await client.fetch_automated_ga4_configuration_opt_out(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - await client.delete_connected_site_tag(request) + await client.fetch_automated_ga4_configuration_opt_out(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio -async def test_delete_connected_site_tag_async( +async def test_fetch_automated_ga4_configuration_opt_out_async( transport: str = "grpc_asyncio", - request_type=analytics_admin.DeleteConnectedSiteTagRequest, + request_type=analytics_admin.FetchAutomatedGa4ConfigurationOptOutRequest, ): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), @@ -45676,35 +45512,42 @@ async def test_delete_connected_site_tag_async( # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.delete_connected_site_tag), "__call__" + type(client.transport.fetch_automated_ga4_configuration_opt_out), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.delete_connected_site_tag(request) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + analytics_admin.FetchAutomatedGa4ConfigurationOptOutResponse( + opt_out=True, + ) + ) + response = await client.fetch_automated_ga4_configuration_opt_out(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request = analytics_admin.DeleteConnectedSiteTagRequest() + request = analytics_admin.FetchAutomatedGa4ConfigurationOptOutRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert response is None + assert isinstance( + response, analytics_admin.FetchAutomatedGa4ConfigurationOptOutResponse + ) + assert response.opt_out is True @pytest.mark.asyncio -async def test_delete_connected_site_tag_async_from_dict(): - await test_delete_connected_site_tag_async(request_type=dict) +async def test_fetch_automated_ga4_configuration_opt_out_async_from_dict(): + await test_fetch_automated_ga4_configuration_opt_out_async(request_type=dict) @pytest.mark.parametrize( "request_type", [ - analytics_admin.ListConnectedSiteTagsRequest, + analytics_admin.CreateBigQueryLinkRequest, dict, ], ) -def test_list_connected_site_tags(request_type, transport: str = "grpc"): +def test_create_big_query_link(request_type, transport: str = "grpc"): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -45716,23 +45559,42 @@ def test_list_connected_site_tags(request_type, transport: str = "grpc"): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_connected_site_tags), "__call__" + type(client.transport.create_big_query_link), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = analytics_admin.ListConnectedSiteTagsResponse() - response = client.list_connected_site_tags(request) + call.return_value = resources.BigQueryLink( + name="name_value", + project="project_value", + daily_export_enabled=True, + streaming_export_enabled=True, + fresh_daily_export_enabled=True, + include_advertising_id=True, + export_streams=["export_streams_value"], + excluded_events=["excluded_events_value"], + dataset_location="dataset_location_value", + ) + response = client.create_big_query_link(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - request = analytics_admin.ListConnectedSiteTagsRequest() + request = analytics_admin.CreateBigQueryLinkRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, analytics_admin.ListConnectedSiteTagsResponse) + assert isinstance(response, resources.BigQueryLink) + assert response.name == "name_value" + assert response.project == "project_value" + assert response.daily_export_enabled is True + assert response.streaming_export_enabled is True + assert response.fresh_daily_export_enabled is True + assert response.include_advertising_id is True + assert response.export_streams == ["export_streams_value"] + assert response.excluded_events == ["excluded_events_value"] + assert response.dataset_location == "dataset_location_value" -def test_list_connected_site_tags_empty_call(): +def test_create_big_query_link_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = AnalyticsAdminServiceClient( @@ -45742,18 +45604,18 @@ def test_list_connected_site_tags_empty_call(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_connected_site_tags), "__call__" + type(client.transport.create_big_query_link), "__call__" ) as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.list_connected_site_tags() + client.create_big_query_link() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == analytics_admin.ListConnectedSiteTagsRequest() + assert args[0] == analytics_admin.CreateBigQueryLinkRequest() -def test_list_connected_site_tags_non_empty_request_with_auto_populated_field(): +def test_create_big_query_link_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. client = AnalyticsAdminServiceClient( @@ -45764,26 +45626,26 @@ def test_list_connected_site_tags_non_empty_request_with_auto_populated_field(): # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. - request = analytics_admin.ListConnectedSiteTagsRequest( - property="property_value", + request = analytics_admin.CreateBigQueryLinkRequest( + parent="parent_value", ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_connected_site_tags), "__call__" + type(client.transport.create_big_query_link), "__call__" ) as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.list_connected_site_tags(request=request) + client.create_big_query_link(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == analytics_admin.ListConnectedSiteTagsRequest( - property="property_value", + assert args[0] == analytics_admin.CreateBigQueryLinkRequest( + parent="parent_value", ) -def test_list_connected_site_tags_use_cached_wrapped_rpc(): +def test_create_big_query_link_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -45798,7 +45660,7 @@ def test_list_connected_site_tags_use_cached_wrapped_rpc(): # Ensure method has been cached assert ( - client._transport.list_connected_site_tags + client._transport.create_big_query_link in client._transport._wrapped_methods ) @@ -45808,15 +45670,15 @@ def test_list_connected_site_tags_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.list_connected_site_tags + client._transport.create_big_query_link ] = mock_rpc request = {} - client.list_connected_site_tags(request) + client.create_big_query_link(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.list_connected_site_tags(request) + client.create_big_query_link(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -45824,7 +45686,7 @@ def test_list_connected_site_tags_use_cached_wrapped_rpc(): @pytest.mark.asyncio -async def test_list_connected_site_tags_empty_call_async(): +async def test_create_big_query_link_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = AnalyticsAdminServiceAsyncClient( @@ -45834,20 +45696,30 @@ async def test_list_connected_site_tags_empty_call_async(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_connected_site_tags), "__call__" + type(client.transport.create_big_query_link), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - analytics_admin.ListConnectedSiteTagsResponse() + resources.BigQueryLink( + name="name_value", + project="project_value", + daily_export_enabled=True, + streaming_export_enabled=True, + fresh_daily_export_enabled=True, + include_advertising_id=True, + export_streams=["export_streams_value"], + excluded_events=["excluded_events_value"], + dataset_location="dataset_location_value", + ) ) - response = await client.list_connected_site_tags() + response = await client.create_big_query_link() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == analytics_admin.ListConnectedSiteTagsRequest() + assert args[0] == analytics_admin.CreateBigQueryLinkRequest() @pytest.mark.asyncio -async def test_list_connected_site_tags_async_use_cached_wrapped_rpc( +async def test_create_big_query_link_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", ): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -45864,33 +45736,34 @@ async def test_list_connected_site_tags_async_use_cached_wrapped_rpc( # Ensure method has been cached assert ( - client._client._transport.list_connected_site_tags + client._client._transport.create_big_query_link in client._client._transport._wrapped_methods ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ - client._client._transport.list_connected_site_tags - ] = mock_object + client._client._transport.create_big_query_link + ] = mock_rpc request = {} - await client.list_connected_site_tags(request) + await client.create_big_query_link(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - await client.list_connected_site_tags(request) + await client.create_big_query_link(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio -async def test_list_connected_site_tags_async( +async def test_create_big_query_link_async( transport: str = "grpc_asyncio", - request_type=analytics_admin.ListConnectedSiteTagsRequest, + request_type=analytics_admin.CreateBigQueryLinkRequest, ): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), @@ -45903,274 +45776,217 @@ async def test_list_connected_site_tags_async( # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_connected_site_tags), "__call__" + type(client.transport.create_big_query_link), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - analytics_admin.ListConnectedSiteTagsResponse() + resources.BigQueryLink( + name="name_value", + project="project_value", + daily_export_enabled=True, + streaming_export_enabled=True, + fresh_daily_export_enabled=True, + include_advertising_id=True, + export_streams=["export_streams_value"], + excluded_events=["excluded_events_value"], + dataset_location="dataset_location_value", + ) ) - response = await client.list_connected_site_tags(request) + response = await client.create_big_query_link(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request = analytics_admin.ListConnectedSiteTagsRequest() + request = analytics_admin.CreateBigQueryLinkRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, analytics_admin.ListConnectedSiteTagsResponse) + assert isinstance(response, resources.BigQueryLink) + assert response.name == "name_value" + assert response.project == "project_value" + assert response.daily_export_enabled is True + assert response.streaming_export_enabled is True + assert response.fresh_daily_export_enabled is True + assert response.include_advertising_id is True + assert response.export_streams == ["export_streams_value"] + assert response.excluded_events == ["excluded_events_value"] + assert response.dataset_location == "dataset_location_value" @pytest.mark.asyncio -async def test_list_connected_site_tags_async_from_dict(): - await test_list_connected_site_tags_async(request_type=dict) +async def test_create_big_query_link_async_from_dict(): + await test_create_big_query_link_async(request_type=dict) -@pytest.mark.parametrize( - "request_type", - [ - analytics_admin.FetchConnectedGa4PropertyRequest, - dict, - ], -) -def test_fetch_connected_ga4_property(request_type, transport: str = "grpc"): +def test_create_big_query_link_field_headers(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = analytics_admin.CreateBigQueryLinkRequest() + + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.fetch_connected_ga4_property), "__call__" + type(client.transport.create_big_query_link), "__call__" ) as call: - # Designate an appropriate return value for the call. - call.return_value = analytics_admin.FetchConnectedGa4PropertyResponse( - property="property_value", - ) - response = client.fetch_connected_ga4_property(request) + call.return_value = resources.BigQueryLink() + client.create_big_query_link(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - request = analytics_admin.FetchConnectedGa4PropertyRequest() assert args[0] == request - # Establish that the response is the type that we expect. - assert isinstance(response, analytics_admin.FetchConnectedGa4PropertyResponse) - assert response.property == "property_value" + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] -def test_fetch_connected_ga4_property_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = AnalyticsAdminServiceClient( +@pytest.mark.asyncio +async def test_create_big_query_link_field_headers_async(): + client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", ) + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = analytics_admin.CreateBigQueryLinkRequest() + + request.parent = "parent_value" + # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.fetch_connected_ga4_property), "__call__" + type(client.transport.create_big_query_link), "__call__" ) as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + resources.BigQueryLink() ) - client.fetch_connected_ga4_property() - call.assert_called() + await client.create_big_query_link(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == analytics_admin.FetchConnectedGa4PropertyRequest() + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] -def test_fetch_connected_ga4_property_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. +def test_create_big_query_link_flattened(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = analytics_admin.FetchConnectedGa4PropertyRequest( - property="property_value", ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.fetch_connected_ga4_property), "__call__" + type(client.transport.create_big_query_link), "__call__" ) as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.fetch_connected_ga4_property(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == analytics_admin.FetchConnectedGa4PropertyRequest( - property="property_value", + # Designate an appropriate return value for the call. + call.return_value = resources.BigQueryLink() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.create_big_query_link( + parent="parent_value", + bigquery_link=resources.BigQueryLink(name="name_value"), ) + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].bigquery_link + mock_val = resources.BigQueryLink(name="name_value") + assert arg == mock_val -def test_fetch_connected_ga4_property_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = AnalyticsAdminServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - # Ensure method has been cached - assert ( - client._transport.fetch_connected_ga4_property - in client._transport._wrapped_methods - ) +def test_create_big_query_link_flattened_error(): + client = AnalyticsAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_big_query_link( + analytics_admin.CreateBigQueryLinkRequest(), + parent="parent_value", + bigquery_link=resources.BigQueryLink(name="name_value"), ) - client._transport._wrapped_methods[ - client._transport.fetch_connected_ga4_property - ] = mock_rpc - request = {} - client.fetch_connected_ga4_property(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.fetch_connected_ga4_property(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 @pytest.mark.asyncio -async def test_fetch_connected_ga4_property_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. +async def test_create_big_query_link_flattened_async(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), - transport="grpc_asyncio", ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.fetch_connected_ga4_property), "__call__" + type(client.transport.create_big_query_link), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - analytics_admin.FetchConnectedGa4PropertyResponse( - property="property_value", - ) - ) - response = await client.fetch_connected_ga4_property() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == analytics_admin.FetchConnectedGa4PropertyRequest() - + call.return_value = resources.BigQueryLink() -@pytest.mark.asyncio -async def test_fetch_connected_ga4_property_async_use_cached_wrapped_rpc( - transport: str = "grpc_asyncio", -): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = AnalyticsAdminServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + resources.BigQueryLink() ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert ( - client._client._transport.fetch_connected_ga4_property - in client._client._transport._wrapped_methods + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.create_big_query_link( + parent="parent_value", + bigquery_link=resources.BigQueryLink(name="name_value"), ) - # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() - client._client._transport._wrapped_methods[ - client._client._transport.fetch_connected_ga4_property - ] = mock_object - - request = {} - await client.fetch_connected_ga4_property(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 - - await client.fetch_connected_ga4_property(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].bigquery_link + mock_val = resources.BigQueryLink(name="name_value") + assert arg == mock_val @pytest.mark.asyncio -async def test_fetch_connected_ga4_property_async( - transport: str = "grpc_asyncio", - request_type=analytics_admin.FetchConnectedGa4PropertyRequest, -): +async def test_create_big_query_link_flattened_error_async(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.fetch_connected_ga4_property), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - analytics_admin.FetchConnectedGa4PropertyResponse( - property="property_value", - ) + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.create_big_query_link( + analytics_admin.CreateBigQueryLinkRequest(), + parent="parent_value", + bigquery_link=resources.BigQueryLink(name="name_value"), ) - response = await client.fetch_connected_ga4_property(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = analytics_admin.FetchConnectedGa4PropertyRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, analytics_admin.FetchConnectedGa4PropertyResponse) - assert response.property == "property_value" - - -@pytest.mark.asyncio -async def test_fetch_connected_ga4_property_async_from_dict(): - await test_fetch_connected_ga4_property_async(request_type=dict) @pytest.mark.parametrize( "request_type", [ - analytics_admin.GetAdSenseLinkRequest, + analytics_admin.GetBigQueryLinkRequest, dict, ], ) -def test_get_ad_sense_link(request_type, transport: str = "grpc"): +def test_get_big_query_link(request_type, transport: str = "grpc"): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -46182,28 +45998,42 @@ def test_get_ad_sense_link(request_type, transport: str = "grpc"): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.get_ad_sense_link), "__call__" + type(client.transport.get_big_query_link), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = resources.AdSenseLink( + call.return_value = resources.BigQueryLink( name="name_value", - ad_client_code="ad_client_code_value", + project="project_value", + daily_export_enabled=True, + streaming_export_enabled=True, + fresh_daily_export_enabled=True, + include_advertising_id=True, + export_streams=["export_streams_value"], + excluded_events=["excluded_events_value"], + dataset_location="dataset_location_value", ) - response = client.get_ad_sense_link(request) + response = client.get_big_query_link(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - request = analytics_admin.GetAdSenseLinkRequest() + request = analytics_admin.GetBigQueryLinkRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, resources.AdSenseLink) + assert isinstance(response, resources.BigQueryLink) assert response.name == "name_value" - assert response.ad_client_code == "ad_client_code_value" + assert response.project == "project_value" + assert response.daily_export_enabled is True + assert response.streaming_export_enabled is True + assert response.fresh_daily_export_enabled is True + assert response.include_advertising_id is True + assert response.export_streams == ["export_streams_value"] + assert response.excluded_events == ["excluded_events_value"] + assert response.dataset_location == "dataset_location_value" -def test_get_ad_sense_link_empty_call(): +def test_get_big_query_link_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = AnalyticsAdminServiceClient( @@ -46213,18 +46043,18 @@ def test_get_ad_sense_link_empty_call(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.get_ad_sense_link), "__call__" + type(client.transport.get_big_query_link), "__call__" ) as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.get_ad_sense_link() + client.get_big_query_link() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == analytics_admin.GetAdSenseLinkRequest() + assert args[0] == analytics_admin.GetBigQueryLinkRequest() -def test_get_ad_sense_link_non_empty_request_with_auto_populated_field(): +def test_get_big_query_link_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. client = AnalyticsAdminServiceClient( @@ -46235,26 +46065,26 @@ def test_get_ad_sense_link_non_empty_request_with_auto_populated_field(): # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. - request = analytics_admin.GetAdSenseLinkRequest( + request = analytics_admin.GetBigQueryLinkRequest( name="name_value", ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.get_ad_sense_link), "__call__" + type(client.transport.get_big_query_link), "__call__" ) as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.get_ad_sense_link(request=request) + client.get_big_query_link(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == analytics_admin.GetAdSenseLinkRequest( + assert args[0] == analytics_admin.GetBigQueryLinkRequest( name="name_value", ) -def test_get_ad_sense_link_use_cached_wrapped_rpc(): +def test_get_big_query_link_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -46268,7 +46098,9 @@ def test_get_ad_sense_link_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.get_ad_sense_link in client._transport._wrapped_methods + assert ( + client._transport.get_big_query_link in client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.Mock() @@ -46276,15 +46108,15 @@ def test_get_ad_sense_link_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.get_ad_sense_link + client._transport.get_big_query_link ] = mock_rpc request = {} - client.get_ad_sense_link(request) + client.get_big_query_link(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.get_ad_sense_link(request) + client.get_big_query_link(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -46292,7 +46124,7 @@ def test_get_ad_sense_link_use_cached_wrapped_rpc(): @pytest.mark.asyncio -async def test_get_ad_sense_link_empty_call_async(): +async def test_get_big_query_link_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = AnalyticsAdminServiceAsyncClient( @@ -46302,23 +46134,30 @@ async def test_get_ad_sense_link_empty_call_async(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.get_ad_sense_link), "__call__" + type(client.transport.get_big_query_link), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - resources.AdSenseLink( + resources.BigQueryLink( name="name_value", - ad_client_code="ad_client_code_value", + project="project_value", + daily_export_enabled=True, + streaming_export_enabled=True, + fresh_daily_export_enabled=True, + include_advertising_id=True, + export_streams=["export_streams_value"], + excluded_events=["excluded_events_value"], + dataset_location="dataset_location_value", ) ) - response = await client.get_ad_sense_link() + response = await client.get_big_query_link() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == analytics_admin.GetAdSenseLinkRequest() + assert args[0] == analytics_admin.GetBigQueryLinkRequest() @pytest.mark.asyncio -async def test_get_ad_sense_link_async_use_cached_wrapped_rpc( +async def test_get_big_query_link_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", ): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -46335,32 +46174,33 @@ async def test_get_ad_sense_link_async_use_cached_wrapped_rpc( # Ensure method has been cached assert ( - client._client._transport.get_ad_sense_link + client._client._transport.get_big_query_link in client._client._transport._wrapped_methods ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ - client._client._transport.get_ad_sense_link - ] = mock_object + client._client._transport.get_big_query_link + ] = mock_rpc request = {} - await client.get_ad_sense_link(request) + await client.get_big_query_link(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - await client.get_ad_sense_link(request) + await client.get_big_query_link(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio -async def test_get_ad_sense_link_async( - transport: str = "grpc_asyncio", request_type=analytics_admin.GetAdSenseLinkRequest +async def test_get_big_query_link_async( + transport: str = "grpc_asyncio", request_type=analytics_admin.GetBigQueryLinkRequest ): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), @@ -46373,51 +46213,65 @@ async def test_get_ad_sense_link_async( # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.get_ad_sense_link), "__call__" + type(client.transport.get_big_query_link), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - resources.AdSenseLink( + resources.BigQueryLink( name="name_value", - ad_client_code="ad_client_code_value", + project="project_value", + daily_export_enabled=True, + streaming_export_enabled=True, + fresh_daily_export_enabled=True, + include_advertising_id=True, + export_streams=["export_streams_value"], + excluded_events=["excluded_events_value"], + dataset_location="dataset_location_value", ) ) - response = await client.get_ad_sense_link(request) + response = await client.get_big_query_link(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request = analytics_admin.GetAdSenseLinkRequest() + request = analytics_admin.GetBigQueryLinkRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, resources.AdSenseLink) + assert isinstance(response, resources.BigQueryLink) assert response.name == "name_value" - assert response.ad_client_code == "ad_client_code_value" + assert response.project == "project_value" + assert response.daily_export_enabled is True + assert response.streaming_export_enabled is True + assert response.fresh_daily_export_enabled is True + assert response.include_advertising_id is True + assert response.export_streams == ["export_streams_value"] + assert response.excluded_events == ["excluded_events_value"] + assert response.dataset_location == "dataset_location_value" @pytest.mark.asyncio -async def test_get_ad_sense_link_async_from_dict(): - await test_get_ad_sense_link_async(request_type=dict) +async def test_get_big_query_link_async_from_dict(): + await test_get_big_query_link_async(request_type=dict) -def test_get_ad_sense_link_field_headers(): +def test_get_big_query_link_field_headers(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = analytics_admin.GetAdSenseLinkRequest() + request = analytics_admin.GetBigQueryLinkRequest() request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.get_ad_sense_link), "__call__" + type(client.transport.get_big_query_link), "__call__" ) as call: - call.return_value = resources.AdSenseLink() - client.get_ad_sense_link(request) + call.return_value = resources.BigQueryLink() + client.get_big_query_link(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -46433,25 +46287,25 @@ def test_get_ad_sense_link_field_headers(): @pytest.mark.asyncio -async def test_get_ad_sense_link_field_headers_async(): +async def test_get_big_query_link_field_headers_async(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = analytics_admin.GetAdSenseLinkRequest() + request = analytics_admin.GetBigQueryLinkRequest() request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.get_ad_sense_link), "__call__" + type(client.transport.get_big_query_link), "__call__" ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - resources.AdSenseLink() + resources.BigQueryLink() ) - await client.get_ad_sense_link(request) + await client.get_big_query_link(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -46466,20 +46320,20 @@ async def test_get_ad_sense_link_field_headers_async(): ) in kw["metadata"] -def test_get_ad_sense_link_flattened(): +def test_get_big_query_link_flattened(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.get_ad_sense_link), "__call__" + type(client.transport.get_big_query_link), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = resources.AdSenseLink() + call.return_value = resources.BigQueryLink() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.get_ad_sense_link( + client.get_big_query_link( name="name_value", ) @@ -46492,7 +46346,7 @@ def test_get_ad_sense_link_flattened(): assert arg == mock_val -def test_get_ad_sense_link_flattened_error(): +def test_get_big_query_link_flattened_error(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -46500,31 +46354,31 @@ def test_get_ad_sense_link_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.get_ad_sense_link( - analytics_admin.GetAdSenseLinkRequest(), + client.get_big_query_link( + analytics_admin.GetBigQueryLinkRequest(), name="name_value", ) @pytest.mark.asyncio -async def test_get_ad_sense_link_flattened_async(): +async def test_get_big_query_link_flattened_async(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.get_ad_sense_link), "__call__" + type(client.transport.get_big_query_link), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = resources.AdSenseLink() + call.return_value = resources.BigQueryLink() call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - resources.AdSenseLink() + resources.BigQueryLink() ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.get_ad_sense_link( + response = await client.get_big_query_link( name="name_value", ) @@ -46538,7 +46392,7 @@ async def test_get_ad_sense_link_flattened_async(): @pytest.mark.asyncio -async def test_get_ad_sense_link_flattened_error_async(): +async def test_get_big_query_link_flattened_error_async(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -46546,8 +46400,8 @@ async def test_get_ad_sense_link_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.get_ad_sense_link( - analytics_admin.GetAdSenseLinkRequest(), + await client.get_big_query_link( + analytics_admin.GetBigQueryLinkRequest(), name="name_value", ) @@ -46555,11 +46409,11 @@ async def test_get_ad_sense_link_flattened_error_async(): @pytest.mark.parametrize( "request_type", [ - analytics_admin.CreateAdSenseLinkRequest, + analytics_admin.ListBigQueryLinksRequest, dict, ], ) -def test_create_ad_sense_link(request_type, transport: str = "grpc"): +def test_list_big_query_links(request_type, transport: str = "grpc"): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -46571,28 +46425,26 @@ def test_create_ad_sense_link(request_type, transport: str = "grpc"): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.create_ad_sense_link), "__call__" + type(client.transport.list_big_query_links), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = resources.AdSenseLink( - name="name_value", - ad_client_code="ad_client_code_value", + call.return_value = analytics_admin.ListBigQueryLinksResponse( + next_page_token="next_page_token_value", ) - response = client.create_ad_sense_link(request) + response = client.list_big_query_links(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - request = analytics_admin.CreateAdSenseLinkRequest() + request = analytics_admin.ListBigQueryLinksRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, resources.AdSenseLink) - assert response.name == "name_value" - assert response.ad_client_code == "ad_client_code_value" + assert isinstance(response, pagers.ListBigQueryLinksPager) + assert response.next_page_token == "next_page_token_value" -def test_create_ad_sense_link_empty_call(): +def test_list_big_query_links_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = AnalyticsAdminServiceClient( @@ -46602,18 +46454,18 @@ def test_create_ad_sense_link_empty_call(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.create_ad_sense_link), "__call__" + type(client.transport.list_big_query_links), "__call__" ) as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.create_ad_sense_link() + client.list_big_query_links() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == analytics_admin.CreateAdSenseLinkRequest() + assert args[0] == analytics_admin.ListBigQueryLinksRequest() -def test_create_ad_sense_link_non_empty_request_with_auto_populated_field(): +def test_list_big_query_links_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. client = AnalyticsAdminServiceClient( @@ -46624,26 +46476,28 @@ def test_create_ad_sense_link_non_empty_request_with_auto_populated_field(): # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. - request = analytics_admin.CreateAdSenseLinkRequest( + request = analytics_admin.ListBigQueryLinksRequest( parent="parent_value", + page_token="page_token_value", ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.create_ad_sense_link), "__call__" + type(client.transport.list_big_query_links), "__call__" ) as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.create_ad_sense_link(request=request) + client.list_big_query_links(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == analytics_admin.CreateAdSenseLinkRequest( + assert args[0] == analytics_admin.ListBigQueryLinksRequest( parent="parent_value", + page_token="page_token_value", ) -def test_create_ad_sense_link_use_cached_wrapped_rpc(): +def test_list_big_query_links_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -46658,7 +46512,7 @@ def test_create_ad_sense_link_use_cached_wrapped_rpc(): # Ensure method has been cached assert ( - client._transport.create_ad_sense_link in client._transport._wrapped_methods + client._transport.list_big_query_links in client._transport._wrapped_methods ) # Replace cached wrapped function with mock @@ -46667,15 +46521,15 @@ def test_create_ad_sense_link_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.create_ad_sense_link + client._transport.list_big_query_links ] = mock_rpc request = {} - client.create_ad_sense_link(request) + client.list_big_query_links(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.create_ad_sense_link(request) + client.list_big_query_links(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -46683,7 +46537,7 @@ def test_create_ad_sense_link_use_cached_wrapped_rpc(): @pytest.mark.asyncio -async def test_create_ad_sense_link_empty_call_async(): +async def test_list_big_query_links_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = AnalyticsAdminServiceAsyncClient( @@ -46693,23 +46547,22 @@ async def test_create_ad_sense_link_empty_call_async(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.create_ad_sense_link), "__call__" + type(client.transport.list_big_query_links), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - resources.AdSenseLink( - name="name_value", - ad_client_code="ad_client_code_value", + analytics_admin.ListBigQueryLinksResponse( + next_page_token="next_page_token_value", ) ) - response = await client.create_ad_sense_link() + response = await client.list_big_query_links() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == analytics_admin.CreateAdSenseLinkRequest() + assert args[0] == analytics_admin.ListBigQueryLinksRequest() @pytest.mark.asyncio -async def test_create_ad_sense_link_async_use_cached_wrapped_rpc( +async def test_list_big_query_links_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", ): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -46726,33 +46579,34 @@ async def test_create_ad_sense_link_async_use_cached_wrapped_rpc( # Ensure method has been cached assert ( - client._client._transport.create_ad_sense_link + client._client._transport.list_big_query_links in client._client._transport._wrapped_methods ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ - client._client._transport.create_ad_sense_link - ] = mock_object + client._client._transport.list_big_query_links + ] = mock_rpc request = {} - await client.create_ad_sense_link(request) + await client.list_big_query_links(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - await client.create_ad_sense_link(request) + await client.list_big_query_links(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio -async def test_create_ad_sense_link_async( +async def test_list_big_query_links_async( transport: str = "grpc_asyncio", - request_type=analytics_admin.CreateAdSenseLinkRequest, + request_type=analytics_admin.ListBigQueryLinksRequest, ): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), @@ -46765,51 +46619,49 @@ async def test_create_ad_sense_link_async( # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.create_ad_sense_link), "__call__" + type(client.transport.list_big_query_links), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - resources.AdSenseLink( - name="name_value", - ad_client_code="ad_client_code_value", + analytics_admin.ListBigQueryLinksResponse( + next_page_token="next_page_token_value", ) ) - response = await client.create_ad_sense_link(request) + response = await client.list_big_query_links(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request = analytics_admin.CreateAdSenseLinkRequest() + request = analytics_admin.ListBigQueryLinksRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, resources.AdSenseLink) - assert response.name == "name_value" - assert response.ad_client_code == "ad_client_code_value" + assert isinstance(response, pagers.ListBigQueryLinksAsyncPager) + assert response.next_page_token == "next_page_token_value" @pytest.mark.asyncio -async def test_create_ad_sense_link_async_from_dict(): - await test_create_ad_sense_link_async(request_type=dict) +async def test_list_big_query_links_async_from_dict(): + await test_list_big_query_links_async(request_type=dict) -def test_create_ad_sense_link_field_headers(): +def test_list_big_query_links_field_headers(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = analytics_admin.CreateAdSenseLinkRequest() + request = analytics_admin.ListBigQueryLinksRequest() request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.create_ad_sense_link), "__call__" + type(client.transport.list_big_query_links), "__call__" ) as call: - call.return_value = resources.AdSenseLink() - client.create_ad_sense_link(request) + call.return_value = analytics_admin.ListBigQueryLinksResponse() + client.list_big_query_links(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -46825,25 +46677,25 @@ def test_create_ad_sense_link_field_headers(): @pytest.mark.asyncio -async def test_create_ad_sense_link_field_headers_async(): +async def test_list_big_query_links_field_headers_async(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = analytics_admin.CreateAdSenseLinkRequest() + request = analytics_admin.ListBigQueryLinksRequest() request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.create_ad_sense_link), "__call__" + type(client.transport.list_big_query_links), "__call__" ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - resources.AdSenseLink() + analytics_admin.ListBigQueryLinksResponse() ) - await client.create_ad_sense_link(request) + await client.list_big_query_links(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -46858,22 +46710,21 @@ async def test_create_ad_sense_link_field_headers_async(): ) in kw["metadata"] -def test_create_ad_sense_link_flattened(): +def test_list_big_query_links_flattened(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.create_ad_sense_link), "__call__" + type(client.transport.list_big_query_links), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = resources.AdSenseLink() + call.return_value = analytics_admin.ListBigQueryLinksResponse() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.create_ad_sense_link( + client.list_big_query_links( parent="parent_value", - adsense_link=resources.AdSenseLink(name="name_value"), ) # Establish that the underlying call was made with the expected @@ -46883,12 +46734,9 @@ def test_create_ad_sense_link_flattened(): arg = args[0].parent mock_val = "parent_value" assert arg == mock_val - arg = args[0].adsense_link - mock_val = resources.AdSenseLink(name="name_value") - assert arg == mock_val -def test_create_ad_sense_link_flattened_error(): +def test_list_big_query_links_flattened_error(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -46896,34 +46744,32 @@ def test_create_ad_sense_link_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.create_ad_sense_link( - analytics_admin.CreateAdSenseLinkRequest(), + client.list_big_query_links( + analytics_admin.ListBigQueryLinksRequest(), parent="parent_value", - adsense_link=resources.AdSenseLink(name="name_value"), ) @pytest.mark.asyncio -async def test_create_ad_sense_link_flattened_async(): +async def test_list_big_query_links_flattened_async(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.create_ad_sense_link), "__call__" + type(client.transport.list_big_query_links), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = resources.AdSenseLink() + call.return_value = analytics_admin.ListBigQueryLinksResponse() call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - resources.AdSenseLink() + analytics_admin.ListBigQueryLinksResponse() ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.create_ad_sense_link( + response = await client.list_big_query_links( parent="parent_value", - adsense_link=resources.AdSenseLink(name="name_value"), ) # Establish that the underlying call was made with the expected @@ -46933,13 +46779,10 @@ async def test_create_ad_sense_link_flattened_async(): arg = args[0].parent mock_val = "parent_value" assert arg == mock_val - arg = args[0].adsense_link - mock_val = resources.AdSenseLink(name="name_value") - assert arg == mock_val @pytest.mark.asyncio -async def test_create_ad_sense_link_flattened_error_async(): +async def test_list_big_query_links_flattened_error_async(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -46947,21 +46790,222 @@ async def test_create_ad_sense_link_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.create_ad_sense_link( - analytics_admin.CreateAdSenseLinkRequest(), + await client.list_big_query_links( + analytics_admin.ListBigQueryLinksRequest(), parent="parent_value", - adsense_link=resources.AdSenseLink(name="name_value"), ) -@pytest.mark.parametrize( - "request_type", - [ - analytics_admin.DeleteAdSenseLinkRequest, - dict, - ], +def test_list_big_query_links_pager(transport_name: str = "grpc"): + client = AnalyticsAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_big_query_links), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + analytics_admin.ListBigQueryLinksResponse( + bigquery_links=[ + resources.BigQueryLink(), + resources.BigQueryLink(), + resources.BigQueryLink(), + ], + next_page_token="abc", + ), + analytics_admin.ListBigQueryLinksResponse( + bigquery_links=[], + next_page_token="def", + ), + analytics_admin.ListBigQueryLinksResponse( + bigquery_links=[ + resources.BigQueryLink(), + ], + next_page_token="ghi", + ), + analytics_admin.ListBigQueryLinksResponse( + bigquery_links=[ + resources.BigQueryLink(), + resources.BigQueryLink(), + ], + ), + RuntimeError, + ) + + expected_metadata = () + retry = retries.Retry() + timeout = 5 + expected_metadata = tuple(expected_metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + ) + pager = client.list_big_query_links(request={}, retry=retry, timeout=timeout) + + assert pager._metadata == expected_metadata + assert pager._retry == retry + assert pager._timeout == timeout + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, resources.BigQueryLink) for i in results) + + +def test_list_big_query_links_pages(transport_name: str = "grpc"): + client = AnalyticsAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_big_query_links), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + analytics_admin.ListBigQueryLinksResponse( + bigquery_links=[ + resources.BigQueryLink(), + resources.BigQueryLink(), + resources.BigQueryLink(), + ], + next_page_token="abc", + ), + analytics_admin.ListBigQueryLinksResponse( + bigquery_links=[], + next_page_token="def", + ), + analytics_admin.ListBigQueryLinksResponse( + bigquery_links=[ + resources.BigQueryLink(), + ], + next_page_token="ghi", + ), + analytics_admin.ListBigQueryLinksResponse( + bigquery_links=[ + resources.BigQueryLink(), + resources.BigQueryLink(), + ], + ), + RuntimeError, + ) + pages = list(client.list_big_query_links(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.asyncio +async def test_list_big_query_links_async_pager(): + client = AnalyticsAdminServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_big_query_links), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + analytics_admin.ListBigQueryLinksResponse( + bigquery_links=[ + resources.BigQueryLink(), + resources.BigQueryLink(), + resources.BigQueryLink(), + ], + next_page_token="abc", + ), + analytics_admin.ListBigQueryLinksResponse( + bigquery_links=[], + next_page_token="def", + ), + analytics_admin.ListBigQueryLinksResponse( + bigquery_links=[ + resources.BigQueryLink(), + ], + next_page_token="ghi", + ), + analytics_admin.ListBigQueryLinksResponse( + bigquery_links=[ + resources.BigQueryLink(), + resources.BigQueryLink(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_big_query_links( + request={}, + ) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, resources.BigQueryLink) for i in responses) + + +@pytest.mark.asyncio +async def test_list_big_query_links_async_pages(): + client = AnalyticsAdminServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_big_query_links), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + analytics_admin.ListBigQueryLinksResponse( + bigquery_links=[ + resources.BigQueryLink(), + resources.BigQueryLink(), + resources.BigQueryLink(), + ], + next_page_token="abc", + ), + analytics_admin.ListBigQueryLinksResponse( + bigquery_links=[], + next_page_token="def", + ), + analytics_admin.ListBigQueryLinksResponse( + bigquery_links=[ + resources.BigQueryLink(), + ], + next_page_token="ghi", + ), + analytics_admin.ListBigQueryLinksResponse( + bigquery_links=[ + resources.BigQueryLink(), + resources.BigQueryLink(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_big_query_links(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + analytics_admin.DeleteBigQueryLinkRequest, + dict, + ], ) -def test_delete_ad_sense_link(request_type, transport: str = "grpc"): +def test_delete_big_query_link(request_type, transport: str = "grpc"): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -46973,23 +47017,23 @@ def test_delete_ad_sense_link(request_type, transport: str = "grpc"): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.delete_ad_sense_link), "__call__" + type(client.transport.delete_big_query_link), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = None - response = client.delete_ad_sense_link(request) + response = client.delete_big_query_link(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - request = analytics_admin.DeleteAdSenseLinkRequest() + request = analytics_admin.DeleteBigQueryLinkRequest() assert args[0] == request # Establish that the response is the type that we expect. assert response is None -def test_delete_ad_sense_link_empty_call(): +def test_delete_big_query_link_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = AnalyticsAdminServiceClient( @@ -46999,18 +47043,18 @@ def test_delete_ad_sense_link_empty_call(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.delete_ad_sense_link), "__call__" + type(client.transport.delete_big_query_link), "__call__" ) as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.delete_ad_sense_link() + client.delete_big_query_link() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == analytics_admin.DeleteAdSenseLinkRequest() + assert args[0] == analytics_admin.DeleteBigQueryLinkRequest() -def test_delete_ad_sense_link_non_empty_request_with_auto_populated_field(): +def test_delete_big_query_link_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. client = AnalyticsAdminServiceClient( @@ -47021,26 +47065,26 @@ def test_delete_ad_sense_link_non_empty_request_with_auto_populated_field(): # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. - request = analytics_admin.DeleteAdSenseLinkRequest( + request = analytics_admin.DeleteBigQueryLinkRequest( name="name_value", ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.delete_ad_sense_link), "__call__" + type(client.transport.delete_big_query_link), "__call__" ) as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.delete_ad_sense_link(request=request) + client.delete_big_query_link(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == analytics_admin.DeleteAdSenseLinkRequest( + assert args[0] == analytics_admin.DeleteBigQueryLinkRequest( name="name_value", ) -def test_delete_ad_sense_link_use_cached_wrapped_rpc(): +def test_delete_big_query_link_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -47055,7 +47099,8 @@ def test_delete_ad_sense_link_use_cached_wrapped_rpc(): # Ensure method has been cached assert ( - client._transport.delete_ad_sense_link in client._transport._wrapped_methods + client._transport.delete_big_query_link + in client._transport._wrapped_methods ) # Replace cached wrapped function with mock @@ -47064,15 +47109,15 @@ def test_delete_ad_sense_link_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.delete_ad_sense_link + client._transport.delete_big_query_link ] = mock_rpc request = {} - client.delete_ad_sense_link(request) + client.delete_big_query_link(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.delete_ad_sense_link(request) + client.delete_big_query_link(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -47080,7 +47125,7 @@ def test_delete_ad_sense_link_use_cached_wrapped_rpc(): @pytest.mark.asyncio -async def test_delete_ad_sense_link_empty_call_async(): +async def test_delete_big_query_link_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = AnalyticsAdminServiceAsyncClient( @@ -47090,18 +47135,18 @@ async def test_delete_ad_sense_link_empty_call_async(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.delete_ad_sense_link), "__call__" + type(client.transport.delete_big_query_link), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.delete_ad_sense_link() + response = await client.delete_big_query_link() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == analytics_admin.DeleteAdSenseLinkRequest() + assert args[0] == analytics_admin.DeleteBigQueryLinkRequest() @pytest.mark.asyncio -async def test_delete_ad_sense_link_async_use_cached_wrapped_rpc( +async def test_delete_big_query_link_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", ): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -47118,33 +47163,34 @@ async def test_delete_ad_sense_link_async_use_cached_wrapped_rpc( # Ensure method has been cached assert ( - client._client._transport.delete_ad_sense_link + client._client._transport.delete_big_query_link in client._client._transport._wrapped_methods ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ - client._client._transport.delete_ad_sense_link - ] = mock_object + client._client._transport.delete_big_query_link + ] = mock_rpc request = {} - await client.delete_ad_sense_link(request) + await client.delete_big_query_link(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - await client.delete_ad_sense_link(request) + await client.delete_big_query_link(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio -async def test_delete_ad_sense_link_async( +async def test_delete_big_query_link_async( transport: str = "grpc_asyncio", - request_type=analytics_admin.DeleteAdSenseLinkRequest, + request_type=analytics_admin.DeleteBigQueryLinkRequest, ): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), @@ -47157,16 +47203,16 @@ async def test_delete_ad_sense_link_async( # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.delete_ad_sense_link), "__call__" + type(client.transport.delete_big_query_link), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.delete_ad_sense_link(request) + response = await client.delete_big_query_link(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request = analytics_admin.DeleteAdSenseLinkRequest() + request = analytics_admin.DeleteBigQueryLinkRequest() assert args[0] == request # Establish that the response is the type that we expect. @@ -47174,27 +47220,27 @@ async def test_delete_ad_sense_link_async( @pytest.mark.asyncio -async def test_delete_ad_sense_link_async_from_dict(): - await test_delete_ad_sense_link_async(request_type=dict) +async def test_delete_big_query_link_async_from_dict(): + await test_delete_big_query_link_async(request_type=dict) -def test_delete_ad_sense_link_field_headers(): +def test_delete_big_query_link_field_headers(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = analytics_admin.DeleteAdSenseLinkRequest() + request = analytics_admin.DeleteBigQueryLinkRequest() request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.delete_ad_sense_link), "__call__" + type(client.transport.delete_big_query_link), "__call__" ) as call: call.return_value = None - client.delete_ad_sense_link(request) + client.delete_big_query_link(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -47210,23 +47256,23 @@ def test_delete_ad_sense_link_field_headers(): @pytest.mark.asyncio -async def test_delete_ad_sense_link_field_headers_async(): +async def test_delete_big_query_link_field_headers_async(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = analytics_admin.DeleteAdSenseLinkRequest() + request = analytics_admin.DeleteBigQueryLinkRequest() request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.delete_ad_sense_link), "__call__" + type(client.transport.delete_big_query_link), "__call__" ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - await client.delete_ad_sense_link(request) + await client.delete_big_query_link(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -47241,20 +47287,20 @@ async def test_delete_ad_sense_link_field_headers_async(): ) in kw["metadata"] -def test_delete_ad_sense_link_flattened(): +def test_delete_big_query_link_flattened(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.delete_ad_sense_link), "__call__" + type(client.transport.delete_big_query_link), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = None # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.delete_ad_sense_link( + client.delete_big_query_link( name="name_value", ) @@ -47267,7 +47313,7 @@ def test_delete_ad_sense_link_flattened(): assert arg == mock_val -def test_delete_ad_sense_link_flattened_error(): +def test_delete_big_query_link_flattened_error(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -47275,21 +47321,21 @@ def test_delete_ad_sense_link_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.delete_ad_sense_link( - analytics_admin.DeleteAdSenseLinkRequest(), + client.delete_big_query_link( + analytics_admin.DeleteBigQueryLinkRequest(), name="name_value", ) @pytest.mark.asyncio -async def test_delete_ad_sense_link_flattened_async(): +async def test_delete_big_query_link_flattened_async(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.delete_ad_sense_link), "__call__" + type(client.transport.delete_big_query_link), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = None @@ -47297,7 +47343,7 @@ async def test_delete_ad_sense_link_flattened_async(): call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.delete_ad_sense_link( + response = await client.delete_big_query_link( name="name_value", ) @@ -47311,7 +47357,7 @@ async def test_delete_ad_sense_link_flattened_async(): @pytest.mark.asyncio -async def test_delete_ad_sense_link_flattened_error_async(): +async def test_delete_big_query_link_flattened_error_async(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -47319,8 +47365,8 @@ async def test_delete_ad_sense_link_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.delete_ad_sense_link( - analytics_admin.DeleteAdSenseLinkRequest(), + await client.delete_big_query_link( + analytics_admin.DeleteBigQueryLinkRequest(), name="name_value", ) @@ -47328,11 +47374,11 @@ async def test_delete_ad_sense_link_flattened_error_async(): @pytest.mark.parametrize( "request_type", [ - analytics_admin.ListAdSenseLinksRequest, + analytics_admin.UpdateBigQueryLinkRequest, dict, ], ) -def test_list_ad_sense_links(request_type, transport: str = "grpc"): +def test_update_big_query_link(request_type, transport: str = "grpc"): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -47344,26 +47390,42 @@ def test_list_ad_sense_links(request_type, transport: str = "grpc"): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_ad_sense_links), "__call__" + type(client.transport.update_big_query_link), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = analytics_admin.ListAdSenseLinksResponse( - next_page_token="next_page_token_value", + call.return_value = resources.BigQueryLink( + name="name_value", + project="project_value", + daily_export_enabled=True, + streaming_export_enabled=True, + fresh_daily_export_enabled=True, + include_advertising_id=True, + export_streams=["export_streams_value"], + excluded_events=["excluded_events_value"], + dataset_location="dataset_location_value", ) - response = client.list_ad_sense_links(request) + response = client.update_big_query_link(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - request = analytics_admin.ListAdSenseLinksRequest() + request = analytics_admin.UpdateBigQueryLinkRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListAdSenseLinksPager) - assert response.next_page_token == "next_page_token_value" + assert isinstance(response, resources.BigQueryLink) + assert response.name == "name_value" + assert response.project == "project_value" + assert response.daily_export_enabled is True + assert response.streaming_export_enabled is True + assert response.fresh_daily_export_enabled is True + assert response.include_advertising_id is True + assert response.export_streams == ["export_streams_value"] + assert response.excluded_events == ["excluded_events_value"] + assert response.dataset_location == "dataset_location_value" -def test_list_ad_sense_links_empty_call(): +def test_update_big_query_link_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = AnalyticsAdminServiceClient( @@ -47373,18 +47435,18 @@ def test_list_ad_sense_links_empty_call(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_ad_sense_links), "__call__" + type(client.transport.update_big_query_link), "__call__" ) as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.list_ad_sense_links() + client.update_big_query_link() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == analytics_admin.ListAdSenseLinksRequest() + assert args[0] == analytics_admin.UpdateBigQueryLinkRequest() -def test_list_ad_sense_links_non_empty_request_with_auto_populated_field(): +def test_update_big_query_link_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. client = AnalyticsAdminServiceClient( @@ -47395,28 +47457,22 @@ def test_list_ad_sense_links_non_empty_request_with_auto_populated_field(): # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. - request = analytics_admin.ListAdSenseLinksRequest( - parent="parent_value", - page_token="page_token_value", - ) + request = analytics_admin.UpdateBigQueryLinkRequest() # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_ad_sense_links), "__call__" + type(client.transport.update_big_query_link), "__call__" ) as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.list_ad_sense_links(request=request) + client.update_big_query_link(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == analytics_admin.ListAdSenseLinksRequest( - parent="parent_value", - page_token="page_token_value", - ) + assert args[0] == analytics_admin.UpdateBigQueryLinkRequest() -def test_list_ad_sense_links_use_cached_wrapped_rpc(): +def test_update_big_query_link_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -47431,7 +47487,8 @@ def test_list_ad_sense_links_use_cached_wrapped_rpc(): # Ensure method has been cached assert ( - client._transport.list_ad_sense_links in client._transport._wrapped_methods + client._transport.update_big_query_link + in client._transport._wrapped_methods ) # Replace cached wrapped function with mock @@ -47440,15 +47497,15 @@ def test_list_ad_sense_links_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.list_ad_sense_links + client._transport.update_big_query_link ] = mock_rpc request = {} - client.list_ad_sense_links(request) + client.update_big_query_link(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.list_ad_sense_links(request) + client.update_big_query_link(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -47456,7 +47513,7 @@ def test_list_ad_sense_links_use_cached_wrapped_rpc(): @pytest.mark.asyncio -async def test_list_ad_sense_links_empty_call_async(): +async def test_update_big_query_link_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = AnalyticsAdminServiceAsyncClient( @@ -47466,22 +47523,30 @@ async def test_list_ad_sense_links_empty_call_async(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_ad_sense_links), "__call__" + type(client.transport.update_big_query_link), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - analytics_admin.ListAdSenseLinksResponse( - next_page_token="next_page_token_value", + resources.BigQueryLink( + name="name_value", + project="project_value", + daily_export_enabled=True, + streaming_export_enabled=True, + fresh_daily_export_enabled=True, + include_advertising_id=True, + export_streams=["export_streams_value"], + excluded_events=["excluded_events_value"], + dataset_location="dataset_location_value", ) ) - response = await client.list_ad_sense_links() + response = await client.update_big_query_link() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == analytics_admin.ListAdSenseLinksRequest() + assert args[0] == analytics_admin.UpdateBigQueryLinkRequest() @pytest.mark.asyncio -async def test_list_ad_sense_links_async_use_cached_wrapped_rpc( +async def test_update_big_query_link_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", ): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -47498,33 +47563,34 @@ async def test_list_ad_sense_links_async_use_cached_wrapped_rpc( # Ensure method has been cached assert ( - client._client._transport.list_ad_sense_links + client._client._transport.update_big_query_link in client._client._transport._wrapped_methods ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ - client._client._transport.list_ad_sense_links - ] = mock_object + client._client._transport.update_big_query_link + ] = mock_rpc request = {} - await client.list_ad_sense_links(request) + await client.update_big_query_link(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - await client.list_ad_sense_links(request) + await client.update_big_query_link(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio -async def test_list_ad_sense_links_async( +async def test_update_big_query_link_async( transport: str = "grpc_asyncio", - request_type=analytics_admin.ListAdSenseLinksRequest, + request_type=analytics_admin.UpdateBigQueryLinkRequest, ): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), @@ -47537,49 +47603,65 @@ async def test_list_ad_sense_links_async( # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_ad_sense_links), "__call__" + type(client.transport.update_big_query_link), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - analytics_admin.ListAdSenseLinksResponse( - next_page_token="next_page_token_value", + resources.BigQueryLink( + name="name_value", + project="project_value", + daily_export_enabled=True, + streaming_export_enabled=True, + fresh_daily_export_enabled=True, + include_advertising_id=True, + export_streams=["export_streams_value"], + excluded_events=["excluded_events_value"], + dataset_location="dataset_location_value", ) ) - response = await client.list_ad_sense_links(request) + response = await client.update_big_query_link(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request = analytics_admin.ListAdSenseLinksRequest() + request = analytics_admin.UpdateBigQueryLinkRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListAdSenseLinksAsyncPager) - assert response.next_page_token == "next_page_token_value" + assert isinstance(response, resources.BigQueryLink) + assert response.name == "name_value" + assert response.project == "project_value" + assert response.daily_export_enabled is True + assert response.streaming_export_enabled is True + assert response.fresh_daily_export_enabled is True + assert response.include_advertising_id is True + assert response.export_streams == ["export_streams_value"] + assert response.excluded_events == ["excluded_events_value"] + assert response.dataset_location == "dataset_location_value" @pytest.mark.asyncio -async def test_list_ad_sense_links_async_from_dict(): - await test_list_ad_sense_links_async(request_type=dict) +async def test_update_big_query_link_async_from_dict(): + await test_update_big_query_link_async(request_type=dict) -def test_list_ad_sense_links_field_headers(): +def test_update_big_query_link_field_headers(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = analytics_admin.ListAdSenseLinksRequest() + request = analytics_admin.UpdateBigQueryLinkRequest() - request.parent = "parent_value" + request.bigquery_link.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_ad_sense_links), "__call__" + type(client.transport.update_big_query_link), "__call__" ) as call: - call.return_value = analytics_admin.ListAdSenseLinksResponse() - client.list_ad_sense_links(request) + call.return_value = resources.BigQueryLink() + client.update_big_query_link(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -47590,30 +47672,30 @@ def test_list_ad_sense_links_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "parent=parent_value", + "bigquery_link.name=name_value", ) in kw["metadata"] @pytest.mark.asyncio -async def test_list_ad_sense_links_field_headers_async(): +async def test_update_big_query_link_field_headers_async(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = analytics_admin.ListAdSenseLinksRequest() + request = analytics_admin.UpdateBigQueryLinkRequest() - request.parent = "parent_value" + request.bigquery_link.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_ad_sense_links), "__call__" + type(client.transport.update_big_query_link), "__call__" ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - analytics_admin.ListAdSenseLinksResponse() + resources.BigQueryLink() ) - await client.list_ad_sense_links(request) + await client.update_big_query_link(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -47624,37 +47706,41 @@ async def test_list_ad_sense_links_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "parent=parent_value", + "bigquery_link.name=name_value", ) in kw["metadata"] -def test_list_ad_sense_links_flattened(): +def test_update_big_query_link_flattened(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_ad_sense_links), "__call__" + type(client.transport.update_big_query_link), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = analytics_admin.ListAdSenseLinksResponse() + call.return_value = resources.BigQueryLink() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.list_ad_sense_links( - parent="parent_value", + client.update_big_query_link( + bigquery_link=resources.BigQueryLink(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = "parent_value" + arg = args[0].bigquery_link + mock_val = resources.BigQueryLink(name="name_value") + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) assert arg == mock_val -def test_list_ad_sense_links_flattened_error(): +def test_update_big_query_link_flattened_error(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -47662,45 +47748,50 @@ def test_list_ad_sense_links_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.list_ad_sense_links( - analytics_admin.ListAdSenseLinksRequest(), - parent="parent_value", + client.update_big_query_link( + analytics_admin.UpdateBigQueryLinkRequest(), + bigquery_link=resources.BigQueryLink(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) @pytest.mark.asyncio -async def test_list_ad_sense_links_flattened_async(): +async def test_update_big_query_link_flattened_async(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_ad_sense_links), "__call__" + type(client.transport.update_big_query_link), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = analytics_admin.ListAdSenseLinksResponse() + call.return_value = resources.BigQueryLink() call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - analytics_admin.ListAdSenseLinksResponse() + resources.BigQueryLink() ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.list_ad_sense_links( - parent="parent_value", + response = await client.update_big_query_link( + bigquery_link=resources.BigQueryLink(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = "parent_value" + arg = args[0].bigquery_link + mock_val = resources.BigQueryLink(name="name_value") + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) assert arg == mock_val @pytest.mark.asyncio -async def test_list_ad_sense_links_flattened_error_async(): +async def test_update_big_query_link_flattened_error_async(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -47708,259 +47799,74 @@ async def test_list_ad_sense_links_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.list_ad_sense_links( - analytics_admin.ListAdSenseLinksRequest(), - parent="parent_value", + await client.update_big_query_link( + analytics_admin.UpdateBigQueryLinkRequest(), + bigquery_link=resources.BigQueryLink(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) -def test_list_ad_sense_links_pager(transport_name: str = "grpc"): +@pytest.mark.parametrize( + "request_type", + [ + analytics_admin.GetEnhancedMeasurementSettingsRequest, + dict, + ], +) +def test_get_enhanced_measurement_settings(request_type, transport: str = "grpc"): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, + transport=transport, ) + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_ad_sense_links), "__call__" + type(client.transport.get_enhanced_measurement_settings), "__call__" ) as call: - # Set the response to a series of pages. - call.side_effect = ( - analytics_admin.ListAdSenseLinksResponse( - adsense_links=[ - resources.AdSenseLink(), - resources.AdSenseLink(), - resources.AdSenseLink(), - ], - next_page_token="abc", - ), - analytics_admin.ListAdSenseLinksResponse( - adsense_links=[], - next_page_token="def", - ), - analytics_admin.ListAdSenseLinksResponse( - adsense_links=[ - resources.AdSenseLink(), - ], - next_page_token="ghi", - ), - analytics_admin.ListAdSenseLinksResponse( - adsense_links=[ - resources.AdSenseLink(), - resources.AdSenseLink(), - ], - ), - RuntimeError, - ) - - expected_metadata = () - retry = retries.Retry() - timeout = 5 - expected_metadata = tuple(expected_metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + # Designate an appropriate return value for the call. + call.return_value = resources.EnhancedMeasurementSettings( + name="name_value", + stream_enabled=True, + scrolls_enabled=True, + outbound_clicks_enabled=True, + site_search_enabled=True, + video_engagement_enabled=True, + file_downloads_enabled=True, + page_changes_enabled=True, + form_interactions_enabled=True, + search_query_parameter="search_query_parameter_value", + uri_query_parameter="uri_query_parameter_value", ) - pager = client.list_ad_sense_links(request={}, retry=retry, timeout=timeout) + response = client.get_enhanced_measurement_settings(request) - assert pager._metadata == expected_metadata - assert pager._retry == retry - assert pager._timeout == timeout + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = analytics_admin.GetEnhancedMeasurementSettingsRequest() + assert args[0] == request - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, resources.AdSenseLink) for i in results) + # Establish that the response is the type that we expect. + assert isinstance(response, resources.EnhancedMeasurementSettings) + assert response.name == "name_value" + assert response.stream_enabled is True + assert response.scrolls_enabled is True + assert response.outbound_clicks_enabled is True + assert response.site_search_enabled is True + assert response.video_engagement_enabled is True + assert response.file_downloads_enabled is True + assert response.page_changes_enabled is True + assert response.form_interactions_enabled is True + assert response.search_query_parameter == "search_query_parameter_value" + assert response.uri_query_parameter == "uri_query_parameter_value" -def test_list_ad_sense_links_pages(transport_name: str = "grpc"): - client = AnalyticsAdminServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_ad_sense_links), "__call__" - ) as call: - # Set the response to a series of pages. - call.side_effect = ( - analytics_admin.ListAdSenseLinksResponse( - adsense_links=[ - resources.AdSenseLink(), - resources.AdSenseLink(), - resources.AdSenseLink(), - ], - next_page_token="abc", - ), - analytics_admin.ListAdSenseLinksResponse( - adsense_links=[], - next_page_token="def", - ), - analytics_admin.ListAdSenseLinksResponse( - adsense_links=[ - resources.AdSenseLink(), - ], - next_page_token="ghi", - ), - analytics_admin.ListAdSenseLinksResponse( - adsense_links=[ - resources.AdSenseLink(), - resources.AdSenseLink(), - ], - ), - RuntimeError, - ) - pages = list(client.list_ad_sense_links(request={}).pages) - for page_, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page_.raw_page.next_page_token == token - - -@pytest.mark.asyncio -async def test_list_ad_sense_links_async_pager(): - client = AnalyticsAdminServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_ad_sense_links), - "__call__", - new_callable=mock.AsyncMock, - ) as call: - # Set the response to a series of pages. - call.side_effect = ( - analytics_admin.ListAdSenseLinksResponse( - adsense_links=[ - resources.AdSenseLink(), - resources.AdSenseLink(), - resources.AdSenseLink(), - ], - next_page_token="abc", - ), - analytics_admin.ListAdSenseLinksResponse( - adsense_links=[], - next_page_token="def", - ), - analytics_admin.ListAdSenseLinksResponse( - adsense_links=[ - resources.AdSenseLink(), - ], - next_page_token="ghi", - ), - analytics_admin.ListAdSenseLinksResponse( - adsense_links=[ - resources.AdSenseLink(), - resources.AdSenseLink(), - ], - ), - RuntimeError, - ) - async_pager = await client.list_ad_sense_links( - request={}, - ) - assert async_pager.next_page_token == "abc" - responses = [] - async for response in async_pager: # pragma: no branch - responses.append(response) - - assert len(responses) == 6 - assert all(isinstance(i, resources.AdSenseLink) for i in responses) - - -@pytest.mark.asyncio -async def test_list_ad_sense_links_async_pages(): - client = AnalyticsAdminServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_ad_sense_links), - "__call__", - new_callable=mock.AsyncMock, - ) as call: - # Set the response to a series of pages. - call.side_effect = ( - analytics_admin.ListAdSenseLinksResponse( - adsense_links=[ - resources.AdSenseLink(), - resources.AdSenseLink(), - resources.AdSenseLink(), - ], - next_page_token="abc", - ), - analytics_admin.ListAdSenseLinksResponse( - adsense_links=[], - next_page_token="def", - ), - analytics_admin.ListAdSenseLinksResponse( - adsense_links=[ - resources.AdSenseLink(), - ], - next_page_token="ghi", - ), - analytics_admin.ListAdSenseLinksResponse( - adsense_links=[ - resources.AdSenseLink(), - resources.AdSenseLink(), - ], - ), - RuntimeError, - ) - pages = [] - # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` - # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 - async for page_ in ( # pragma: no branch - await client.list_ad_sense_links(request={}) - ).pages: - pages.append(page_) - for page_, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page_.raw_page.next_page_token == token - - -@pytest.mark.parametrize( - "request_type", - [ - analytics_admin.GetEventCreateRuleRequest, - dict, - ], -) -def test_get_event_create_rule(request_type, transport: str = "grpc"): - client = AnalyticsAdminServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_event_create_rule), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = event_create_and_edit.EventCreateRule( - name="name_value", - destination_event="destination_event_value", - source_copy_parameters=True, - ) - response = client.get_event_create_rule(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = analytics_admin.GetEventCreateRuleRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, event_create_and_edit.EventCreateRule) - assert response.name == "name_value" - assert response.destination_event == "destination_event_value" - assert response.source_copy_parameters is True - - -def test_get_event_create_rule_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. +def test_get_enhanced_measurement_settings_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="grpc", @@ -47968,18 +47874,18 @@ def test_get_event_create_rule_empty_call(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.get_event_create_rule), "__call__" + type(client.transport.get_enhanced_measurement_settings), "__call__" ) as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.get_event_create_rule() + client.get_enhanced_measurement_settings() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == analytics_admin.GetEventCreateRuleRequest() + assert args[0] == analytics_admin.GetEnhancedMeasurementSettingsRequest() -def test_get_event_create_rule_non_empty_request_with_auto_populated_field(): +def test_get_enhanced_measurement_settings_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. client = AnalyticsAdminServiceClient( @@ -47990,26 +47896,26 @@ def test_get_event_create_rule_non_empty_request_with_auto_populated_field(): # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. - request = analytics_admin.GetEventCreateRuleRequest( + request = analytics_admin.GetEnhancedMeasurementSettingsRequest( name="name_value", ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.get_event_create_rule), "__call__" + type(client.transport.get_enhanced_measurement_settings), "__call__" ) as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.get_event_create_rule(request=request) + client.get_enhanced_measurement_settings(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == analytics_admin.GetEventCreateRuleRequest( + assert args[0] == analytics_admin.GetEnhancedMeasurementSettingsRequest( name="name_value", ) -def test_get_event_create_rule_use_cached_wrapped_rpc(): +def test_get_enhanced_measurement_settings_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -48024,7 +47930,7 @@ def test_get_event_create_rule_use_cached_wrapped_rpc(): # Ensure method has been cached assert ( - client._transport.get_event_create_rule + client._transport.get_enhanced_measurement_settings in client._transport._wrapped_methods ) @@ -48034,15 +47940,15 @@ def test_get_event_create_rule_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.get_event_create_rule + client._transport.get_enhanced_measurement_settings ] = mock_rpc request = {} - client.get_event_create_rule(request) + client.get_enhanced_measurement_settings(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.get_event_create_rule(request) + client.get_enhanced_measurement_settings(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -48050,7 +47956,7 @@ def test_get_event_create_rule_use_cached_wrapped_rpc(): @pytest.mark.asyncio -async def test_get_event_create_rule_empty_call_async(): +async def test_get_enhanced_measurement_settings_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = AnalyticsAdminServiceAsyncClient( @@ -48060,24 +47966,32 @@ async def test_get_event_create_rule_empty_call_async(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.get_event_create_rule), "__call__" + type(client.transport.get_enhanced_measurement_settings), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - event_create_and_edit.EventCreateRule( + resources.EnhancedMeasurementSettings( name="name_value", - destination_event="destination_event_value", - source_copy_parameters=True, + stream_enabled=True, + scrolls_enabled=True, + outbound_clicks_enabled=True, + site_search_enabled=True, + video_engagement_enabled=True, + file_downloads_enabled=True, + page_changes_enabled=True, + form_interactions_enabled=True, + search_query_parameter="search_query_parameter_value", + uri_query_parameter="uri_query_parameter_value", ) ) - response = await client.get_event_create_rule() + response = await client.get_enhanced_measurement_settings() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == analytics_admin.GetEventCreateRuleRequest() + assert args[0] == analytics_admin.GetEnhancedMeasurementSettingsRequest() @pytest.mark.asyncio -async def test_get_event_create_rule_async_use_cached_wrapped_rpc( +async def test_get_enhanced_measurement_settings_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", ): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -48094,33 +48008,34 @@ async def test_get_event_create_rule_async_use_cached_wrapped_rpc( # Ensure method has been cached assert ( - client._client._transport.get_event_create_rule + client._client._transport.get_enhanced_measurement_settings in client._client._transport._wrapped_methods ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ - client._client._transport.get_event_create_rule - ] = mock_object + client._client._transport.get_enhanced_measurement_settings + ] = mock_rpc request = {} - await client.get_event_create_rule(request) + await client.get_enhanced_measurement_settings(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - await client.get_event_create_rule(request) + await client.get_enhanced_measurement_settings(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio -async def test_get_event_create_rule_async( +async def test_get_enhanced_measurement_settings_async( transport: str = "grpc_asyncio", - request_type=analytics_admin.GetEventCreateRuleRequest, + request_type=analytics_admin.GetEnhancedMeasurementSettingsRequest, ): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), @@ -48133,53 +48048,69 @@ async def test_get_event_create_rule_async( # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.get_event_create_rule), "__call__" + type(client.transport.get_enhanced_measurement_settings), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - event_create_and_edit.EventCreateRule( + resources.EnhancedMeasurementSettings( name="name_value", - destination_event="destination_event_value", - source_copy_parameters=True, + stream_enabled=True, + scrolls_enabled=True, + outbound_clicks_enabled=True, + site_search_enabled=True, + video_engagement_enabled=True, + file_downloads_enabled=True, + page_changes_enabled=True, + form_interactions_enabled=True, + search_query_parameter="search_query_parameter_value", + uri_query_parameter="uri_query_parameter_value", ) ) - response = await client.get_event_create_rule(request) + response = await client.get_enhanced_measurement_settings(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request = analytics_admin.GetEventCreateRuleRequest() + request = analytics_admin.GetEnhancedMeasurementSettingsRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, event_create_and_edit.EventCreateRule) + assert isinstance(response, resources.EnhancedMeasurementSettings) assert response.name == "name_value" - assert response.destination_event == "destination_event_value" - assert response.source_copy_parameters is True + assert response.stream_enabled is True + assert response.scrolls_enabled is True + assert response.outbound_clicks_enabled is True + assert response.site_search_enabled is True + assert response.video_engagement_enabled is True + assert response.file_downloads_enabled is True + assert response.page_changes_enabled is True + assert response.form_interactions_enabled is True + assert response.search_query_parameter == "search_query_parameter_value" + assert response.uri_query_parameter == "uri_query_parameter_value" @pytest.mark.asyncio -async def test_get_event_create_rule_async_from_dict(): - await test_get_event_create_rule_async(request_type=dict) +async def test_get_enhanced_measurement_settings_async_from_dict(): + await test_get_enhanced_measurement_settings_async(request_type=dict) -def test_get_event_create_rule_field_headers(): +def test_get_enhanced_measurement_settings_field_headers(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = analytics_admin.GetEventCreateRuleRequest() + request = analytics_admin.GetEnhancedMeasurementSettingsRequest() request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.get_event_create_rule), "__call__" + type(client.transport.get_enhanced_measurement_settings), "__call__" ) as call: - call.return_value = event_create_and_edit.EventCreateRule() - client.get_event_create_rule(request) + call.return_value = resources.EnhancedMeasurementSettings() + client.get_enhanced_measurement_settings(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -48195,25 +48126,25 @@ def test_get_event_create_rule_field_headers(): @pytest.mark.asyncio -async def test_get_event_create_rule_field_headers_async(): +async def test_get_enhanced_measurement_settings_field_headers_async(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = analytics_admin.GetEventCreateRuleRequest() + request = analytics_admin.GetEnhancedMeasurementSettingsRequest() request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.get_event_create_rule), "__call__" + type(client.transport.get_enhanced_measurement_settings), "__call__" ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - event_create_and_edit.EventCreateRule() + resources.EnhancedMeasurementSettings() ) - await client.get_event_create_rule(request) + await client.get_enhanced_measurement_settings(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -48228,20 +48159,20 @@ async def test_get_event_create_rule_field_headers_async(): ) in kw["metadata"] -def test_get_event_create_rule_flattened(): +def test_get_enhanced_measurement_settings_flattened(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.get_event_create_rule), "__call__" + type(client.transport.get_enhanced_measurement_settings), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = event_create_and_edit.EventCreateRule() + call.return_value = resources.EnhancedMeasurementSettings() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.get_event_create_rule( + client.get_enhanced_measurement_settings( name="name_value", ) @@ -48254,7 +48185,7 @@ def test_get_event_create_rule_flattened(): assert arg == mock_val -def test_get_event_create_rule_flattened_error(): +def test_get_enhanced_measurement_settings_flattened_error(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -48262,31 +48193,31 @@ def test_get_event_create_rule_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.get_event_create_rule( - analytics_admin.GetEventCreateRuleRequest(), + client.get_enhanced_measurement_settings( + analytics_admin.GetEnhancedMeasurementSettingsRequest(), name="name_value", ) @pytest.mark.asyncio -async def test_get_event_create_rule_flattened_async(): +async def test_get_enhanced_measurement_settings_flattened_async(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.get_event_create_rule), "__call__" + type(client.transport.get_enhanced_measurement_settings), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = event_create_and_edit.EventCreateRule() + call.return_value = resources.EnhancedMeasurementSettings() call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - event_create_and_edit.EventCreateRule() + resources.EnhancedMeasurementSettings() ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.get_event_create_rule( + response = await client.get_enhanced_measurement_settings( name="name_value", ) @@ -48300,7 +48231,7 @@ async def test_get_event_create_rule_flattened_async(): @pytest.mark.asyncio -async def test_get_event_create_rule_flattened_error_async(): +async def test_get_enhanced_measurement_settings_flattened_error_async(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -48308,8 +48239,8 @@ async def test_get_event_create_rule_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.get_event_create_rule( - analytics_admin.GetEventCreateRuleRequest(), + await client.get_enhanced_measurement_settings( + analytics_admin.GetEnhancedMeasurementSettingsRequest(), name="name_value", ) @@ -48317,11 +48248,11 @@ async def test_get_event_create_rule_flattened_error_async(): @pytest.mark.parametrize( "request_type", [ - analytics_admin.ListEventCreateRulesRequest, + analytics_admin.UpdateEnhancedMeasurementSettingsRequest, dict, ], ) -def test_list_event_create_rules(request_type, transport: str = "grpc"): +def test_update_enhanced_measurement_settings(request_type, transport: str = "grpc"): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -48333,26 +48264,46 @@ def test_list_event_create_rules(request_type, transport: str = "grpc"): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_event_create_rules), "__call__" + type(client.transport.update_enhanced_measurement_settings), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = analytics_admin.ListEventCreateRulesResponse( - next_page_token="next_page_token_value", + call.return_value = resources.EnhancedMeasurementSettings( + name="name_value", + stream_enabled=True, + scrolls_enabled=True, + outbound_clicks_enabled=True, + site_search_enabled=True, + video_engagement_enabled=True, + file_downloads_enabled=True, + page_changes_enabled=True, + form_interactions_enabled=True, + search_query_parameter="search_query_parameter_value", + uri_query_parameter="uri_query_parameter_value", ) - response = client.list_event_create_rules(request) + response = client.update_enhanced_measurement_settings(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - request = analytics_admin.ListEventCreateRulesRequest() + request = analytics_admin.UpdateEnhancedMeasurementSettingsRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListEventCreateRulesPager) - assert response.next_page_token == "next_page_token_value" + assert isinstance(response, resources.EnhancedMeasurementSettings) + assert response.name == "name_value" + assert response.stream_enabled is True + assert response.scrolls_enabled is True + assert response.outbound_clicks_enabled is True + assert response.site_search_enabled is True + assert response.video_engagement_enabled is True + assert response.file_downloads_enabled is True + assert response.page_changes_enabled is True + assert response.form_interactions_enabled is True + assert response.search_query_parameter == "search_query_parameter_value" + assert response.uri_query_parameter == "uri_query_parameter_value" -def test_list_event_create_rules_empty_call(): +def test_update_enhanced_measurement_settings_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = AnalyticsAdminServiceClient( @@ -48362,18 +48313,18 @@ def test_list_event_create_rules_empty_call(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_event_create_rules), "__call__" + type(client.transport.update_enhanced_measurement_settings), "__call__" ) as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.list_event_create_rules() + client.update_enhanced_measurement_settings() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == analytics_admin.ListEventCreateRulesRequest() + assert args[0] == analytics_admin.UpdateEnhancedMeasurementSettingsRequest() -def test_list_event_create_rules_non_empty_request_with_auto_populated_field(): +def test_update_enhanced_measurement_settings_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. client = AnalyticsAdminServiceClient( @@ -48384,28 +48335,22 @@ def test_list_event_create_rules_non_empty_request_with_auto_populated_field(): # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. - request = analytics_admin.ListEventCreateRulesRequest( - parent="parent_value", - page_token="page_token_value", - ) + request = analytics_admin.UpdateEnhancedMeasurementSettingsRequest() # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_event_create_rules), "__call__" + type(client.transport.update_enhanced_measurement_settings), "__call__" ) as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.list_event_create_rules(request=request) + client.update_enhanced_measurement_settings(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == analytics_admin.ListEventCreateRulesRequest( - parent="parent_value", - page_token="page_token_value", - ) + assert args[0] == analytics_admin.UpdateEnhancedMeasurementSettingsRequest() -def test_list_event_create_rules_use_cached_wrapped_rpc(): +def test_update_enhanced_measurement_settings_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -48420,7 +48365,7 @@ def test_list_event_create_rules_use_cached_wrapped_rpc(): # Ensure method has been cached assert ( - client._transport.list_event_create_rules + client._transport.update_enhanced_measurement_settings in client._transport._wrapped_methods ) @@ -48430,15 +48375,15 @@ def test_list_event_create_rules_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.list_event_create_rules + client._transport.update_enhanced_measurement_settings ] = mock_rpc request = {} - client.list_event_create_rules(request) + client.update_enhanced_measurement_settings(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.list_event_create_rules(request) + client.update_enhanced_measurement_settings(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -48446,7 +48391,7 @@ def test_list_event_create_rules_use_cached_wrapped_rpc(): @pytest.mark.asyncio -async def test_list_event_create_rules_empty_call_async(): +async def test_update_enhanced_measurement_settings_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = AnalyticsAdminServiceAsyncClient( @@ -48456,22 +48401,32 @@ async def test_list_event_create_rules_empty_call_async(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_event_create_rules), "__call__" + type(client.transport.update_enhanced_measurement_settings), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - analytics_admin.ListEventCreateRulesResponse( - next_page_token="next_page_token_value", + resources.EnhancedMeasurementSettings( + name="name_value", + stream_enabled=True, + scrolls_enabled=True, + outbound_clicks_enabled=True, + site_search_enabled=True, + video_engagement_enabled=True, + file_downloads_enabled=True, + page_changes_enabled=True, + form_interactions_enabled=True, + search_query_parameter="search_query_parameter_value", + uri_query_parameter="uri_query_parameter_value", ) ) - response = await client.list_event_create_rules() + response = await client.update_enhanced_measurement_settings() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == analytics_admin.ListEventCreateRulesRequest() + assert args[0] == analytics_admin.UpdateEnhancedMeasurementSettingsRequest() @pytest.mark.asyncio -async def test_list_event_create_rules_async_use_cached_wrapped_rpc( +async def test_update_enhanced_measurement_settings_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", ): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -48488,33 +48443,34 @@ async def test_list_event_create_rules_async_use_cached_wrapped_rpc( # Ensure method has been cached assert ( - client._client._transport.list_event_create_rules + client._client._transport.update_enhanced_measurement_settings in client._client._transport._wrapped_methods ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ - client._client._transport.list_event_create_rules - ] = mock_object + client._client._transport.update_enhanced_measurement_settings + ] = mock_rpc request = {} - await client.list_event_create_rules(request) + await client.update_enhanced_measurement_settings(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - await client.list_event_create_rules(request) + await client.update_enhanced_measurement_settings(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio -async def test_list_event_create_rules_async( +async def test_update_enhanced_measurement_settings_async( transport: str = "grpc_asyncio", - request_type=analytics_admin.ListEventCreateRulesRequest, + request_type=analytics_admin.UpdateEnhancedMeasurementSettingsRequest, ): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), @@ -48527,49 +48483,69 @@ async def test_list_event_create_rules_async( # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_event_create_rules), "__call__" + type(client.transport.update_enhanced_measurement_settings), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - analytics_admin.ListEventCreateRulesResponse( - next_page_token="next_page_token_value", + resources.EnhancedMeasurementSettings( + name="name_value", + stream_enabled=True, + scrolls_enabled=True, + outbound_clicks_enabled=True, + site_search_enabled=True, + video_engagement_enabled=True, + file_downloads_enabled=True, + page_changes_enabled=True, + form_interactions_enabled=True, + search_query_parameter="search_query_parameter_value", + uri_query_parameter="uri_query_parameter_value", ) ) - response = await client.list_event_create_rules(request) + response = await client.update_enhanced_measurement_settings(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request = analytics_admin.ListEventCreateRulesRequest() + request = analytics_admin.UpdateEnhancedMeasurementSettingsRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListEventCreateRulesAsyncPager) - assert response.next_page_token == "next_page_token_value" + assert isinstance(response, resources.EnhancedMeasurementSettings) + assert response.name == "name_value" + assert response.stream_enabled is True + assert response.scrolls_enabled is True + assert response.outbound_clicks_enabled is True + assert response.site_search_enabled is True + assert response.video_engagement_enabled is True + assert response.file_downloads_enabled is True + assert response.page_changes_enabled is True + assert response.form_interactions_enabled is True + assert response.search_query_parameter == "search_query_parameter_value" + assert response.uri_query_parameter == "uri_query_parameter_value" @pytest.mark.asyncio -async def test_list_event_create_rules_async_from_dict(): - await test_list_event_create_rules_async(request_type=dict) +async def test_update_enhanced_measurement_settings_async_from_dict(): + await test_update_enhanced_measurement_settings_async(request_type=dict) -def test_list_event_create_rules_field_headers(): +def test_update_enhanced_measurement_settings_field_headers(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = analytics_admin.ListEventCreateRulesRequest() + request = analytics_admin.UpdateEnhancedMeasurementSettingsRequest() - request.parent = "parent_value" + request.enhanced_measurement_settings.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_event_create_rules), "__call__" + type(client.transport.update_enhanced_measurement_settings), "__call__" ) as call: - call.return_value = analytics_admin.ListEventCreateRulesResponse() - client.list_event_create_rules(request) + call.return_value = resources.EnhancedMeasurementSettings() + client.update_enhanced_measurement_settings(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -48580,30 +48556,30 @@ def test_list_event_create_rules_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "parent=parent_value", + "enhanced_measurement_settings.name=name_value", ) in kw["metadata"] @pytest.mark.asyncio -async def test_list_event_create_rules_field_headers_async(): +async def test_update_enhanced_measurement_settings_field_headers_async(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = analytics_admin.ListEventCreateRulesRequest() + request = analytics_admin.UpdateEnhancedMeasurementSettingsRequest() - request.parent = "parent_value" + request.enhanced_measurement_settings.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_event_create_rules), "__call__" + type(client.transport.update_enhanced_measurement_settings), "__call__" ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - analytics_admin.ListEventCreateRulesResponse() + resources.EnhancedMeasurementSettings() ) - await client.list_event_create_rules(request) + await client.update_enhanced_measurement_settings(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -48614,37 +48590,43 @@ async def test_list_event_create_rules_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "parent=parent_value", + "enhanced_measurement_settings.name=name_value", ) in kw["metadata"] -def test_list_event_create_rules_flattened(): +def test_update_enhanced_measurement_settings_flattened(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_event_create_rules), "__call__" + type(client.transport.update_enhanced_measurement_settings), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = analytics_admin.ListEventCreateRulesResponse() + call.return_value = resources.EnhancedMeasurementSettings() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.list_event_create_rules( - parent="parent_value", + client.update_enhanced_measurement_settings( + enhanced_measurement_settings=resources.EnhancedMeasurementSettings( + name="name_value" + ), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = "parent_value" + arg = args[0].enhanced_measurement_settings + mock_val = resources.EnhancedMeasurementSettings(name="name_value") + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) assert arg == mock_val -def test_list_event_create_rules_flattened_error(): +def test_update_enhanced_measurement_settings_flattened_error(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -48652,45 +48634,54 @@ def test_list_event_create_rules_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.list_event_create_rules( - analytics_admin.ListEventCreateRulesRequest(), - parent="parent_value", + client.update_enhanced_measurement_settings( + analytics_admin.UpdateEnhancedMeasurementSettingsRequest(), + enhanced_measurement_settings=resources.EnhancedMeasurementSettings( + name="name_value" + ), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) @pytest.mark.asyncio -async def test_list_event_create_rules_flattened_async(): +async def test_update_enhanced_measurement_settings_flattened_async(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_event_create_rules), "__call__" + type(client.transport.update_enhanced_measurement_settings), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = analytics_admin.ListEventCreateRulesResponse() + call.return_value = resources.EnhancedMeasurementSettings() call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - analytics_admin.ListEventCreateRulesResponse() + resources.EnhancedMeasurementSettings() ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.list_event_create_rules( - parent="parent_value", + response = await client.update_enhanced_measurement_settings( + enhanced_measurement_settings=resources.EnhancedMeasurementSettings( + name="name_value" + ), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = "parent_value" + arg = args[0].enhanced_measurement_settings + mock_val = resources.EnhancedMeasurementSettings(name="name_value") + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) assert arg == mock_val @pytest.mark.asyncio -async def test_list_event_create_rules_flattened_error_async(): +async def test_update_enhanced_measurement_settings_flattened_error_async(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -48698,226 +48689,23 @@ async def test_list_event_create_rules_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.list_event_create_rules( - analytics_admin.ListEventCreateRulesRequest(), - parent="parent_value", - ) - - -def test_list_event_create_rules_pager(transport_name: str = "grpc"): - client = AnalyticsAdminServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_event_create_rules), "__call__" - ) as call: - # Set the response to a series of pages. - call.side_effect = ( - analytics_admin.ListEventCreateRulesResponse( - event_create_rules=[ - event_create_and_edit.EventCreateRule(), - event_create_and_edit.EventCreateRule(), - event_create_and_edit.EventCreateRule(), - ], - next_page_token="abc", - ), - analytics_admin.ListEventCreateRulesResponse( - event_create_rules=[], - next_page_token="def", - ), - analytics_admin.ListEventCreateRulesResponse( - event_create_rules=[ - event_create_and_edit.EventCreateRule(), - ], - next_page_token="ghi", - ), - analytics_admin.ListEventCreateRulesResponse( - event_create_rules=[ - event_create_and_edit.EventCreateRule(), - event_create_and_edit.EventCreateRule(), - ], - ), - RuntimeError, - ) - - expected_metadata = () - retry = retries.Retry() - timeout = 5 - expected_metadata = tuple(expected_metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), - ) - pager = client.list_event_create_rules(request={}, retry=retry, timeout=timeout) - - assert pager._metadata == expected_metadata - assert pager._retry == retry - assert pager._timeout == timeout - - results = list(pager) - assert len(results) == 6 - assert all( - isinstance(i, event_create_and_edit.EventCreateRule) for i in results - ) - - -def test_list_event_create_rules_pages(transport_name: str = "grpc"): - client = AnalyticsAdminServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_event_create_rules), "__call__" - ) as call: - # Set the response to a series of pages. - call.side_effect = ( - analytics_admin.ListEventCreateRulesResponse( - event_create_rules=[ - event_create_and_edit.EventCreateRule(), - event_create_and_edit.EventCreateRule(), - event_create_and_edit.EventCreateRule(), - ], - next_page_token="abc", - ), - analytics_admin.ListEventCreateRulesResponse( - event_create_rules=[], - next_page_token="def", - ), - analytics_admin.ListEventCreateRulesResponse( - event_create_rules=[ - event_create_and_edit.EventCreateRule(), - ], - next_page_token="ghi", - ), - analytics_admin.ListEventCreateRulesResponse( - event_create_rules=[ - event_create_and_edit.EventCreateRule(), - event_create_and_edit.EventCreateRule(), - ], - ), - RuntimeError, - ) - pages = list(client.list_event_create_rules(request={}).pages) - for page_, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page_.raw_page.next_page_token == token - - -@pytest.mark.asyncio -async def test_list_event_create_rules_async_pager(): - client = AnalyticsAdminServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_event_create_rules), - "__call__", - new_callable=mock.AsyncMock, - ) as call: - # Set the response to a series of pages. - call.side_effect = ( - analytics_admin.ListEventCreateRulesResponse( - event_create_rules=[ - event_create_and_edit.EventCreateRule(), - event_create_and_edit.EventCreateRule(), - event_create_and_edit.EventCreateRule(), - ], - next_page_token="abc", - ), - analytics_admin.ListEventCreateRulesResponse( - event_create_rules=[], - next_page_token="def", - ), - analytics_admin.ListEventCreateRulesResponse( - event_create_rules=[ - event_create_and_edit.EventCreateRule(), - ], - next_page_token="ghi", - ), - analytics_admin.ListEventCreateRulesResponse( - event_create_rules=[ - event_create_and_edit.EventCreateRule(), - event_create_and_edit.EventCreateRule(), - ], - ), - RuntimeError, - ) - async_pager = await client.list_event_create_rules( - request={}, - ) - assert async_pager.next_page_token == "abc" - responses = [] - async for response in async_pager: # pragma: no branch - responses.append(response) - - assert len(responses) == 6 - assert all( - isinstance(i, event_create_and_edit.EventCreateRule) for i in responses - ) - - -@pytest.mark.asyncio -async def test_list_event_create_rules_async_pages(): - client = AnalyticsAdminServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_event_create_rules), - "__call__", - new_callable=mock.AsyncMock, - ) as call: - # Set the response to a series of pages. - call.side_effect = ( - analytics_admin.ListEventCreateRulesResponse( - event_create_rules=[ - event_create_and_edit.EventCreateRule(), - event_create_and_edit.EventCreateRule(), - event_create_and_edit.EventCreateRule(), - ], - next_page_token="abc", - ), - analytics_admin.ListEventCreateRulesResponse( - event_create_rules=[], - next_page_token="def", - ), - analytics_admin.ListEventCreateRulesResponse( - event_create_rules=[ - event_create_and_edit.EventCreateRule(), - ], - next_page_token="ghi", - ), - analytics_admin.ListEventCreateRulesResponse( - event_create_rules=[ - event_create_and_edit.EventCreateRule(), - event_create_and_edit.EventCreateRule(), - ], + await client.update_enhanced_measurement_settings( + analytics_admin.UpdateEnhancedMeasurementSettingsRequest(), + enhanced_measurement_settings=resources.EnhancedMeasurementSettings( + name="name_value" ), - RuntimeError, + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) - pages = [] - # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` - # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 - async for page_ in ( # pragma: no branch - await client.list_event_create_rules(request={}) - ).pages: - pages.append(page_) - for page_, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page_.raw_page.next_page_token == token @pytest.mark.parametrize( "request_type", [ - analytics_admin.CreateEventCreateRuleRequest, + analytics_admin.CreateConnectedSiteTagRequest, dict, ], ) -def test_create_event_create_rule(request_type, transport: str = "grpc"): +def test_create_connected_site_tag(request_type, transport: str = "grpc"): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -48929,30 +48717,23 @@ def test_create_event_create_rule(request_type, transport: str = "grpc"): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.create_event_create_rule), "__call__" + type(client.transport.create_connected_site_tag), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = event_create_and_edit.EventCreateRule( - name="name_value", - destination_event="destination_event_value", - source_copy_parameters=True, - ) - response = client.create_event_create_rule(request) + call.return_value = analytics_admin.CreateConnectedSiteTagResponse() + response = client.create_connected_site_tag(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - request = analytics_admin.CreateEventCreateRuleRequest() + request = analytics_admin.CreateConnectedSiteTagRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, event_create_and_edit.EventCreateRule) - assert response.name == "name_value" - assert response.destination_event == "destination_event_value" - assert response.source_copy_parameters is True + assert isinstance(response, analytics_admin.CreateConnectedSiteTagResponse) -def test_create_event_create_rule_empty_call(): +def test_create_connected_site_tag_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = AnalyticsAdminServiceClient( @@ -48962,18 +48743,18 @@ def test_create_event_create_rule_empty_call(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.create_event_create_rule), "__call__" + type(client.transport.create_connected_site_tag), "__call__" ) as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.create_event_create_rule() + client.create_connected_site_tag() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == analytics_admin.CreateEventCreateRuleRequest() + assert args[0] == analytics_admin.CreateConnectedSiteTagRequest() -def test_create_event_create_rule_non_empty_request_with_auto_populated_field(): +def test_create_connected_site_tag_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. client = AnalyticsAdminServiceClient( @@ -48984,26 +48765,26 @@ def test_create_event_create_rule_non_empty_request_with_auto_populated_field(): # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. - request = analytics_admin.CreateEventCreateRuleRequest( - parent="parent_value", + request = analytics_admin.CreateConnectedSiteTagRequest( + property="property_value", ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.create_event_create_rule), "__call__" + type(client.transport.create_connected_site_tag), "__call__" ) as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.create_event_create_rule(request=request) + client.create_connected_site_tag(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == analytics_admin.CreateEventCreateRuleRequest( - parent="parent_value", + assert args[0] == analytics_admin.CreateConnectedSiteTagRequest( + property="property_value", ) -def test_create_event_create_rule_use_cached_wrapped_rpc(): +def test_create_connected_site_tag_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -49018,7 +48799,7 @@ def test_create_event_create_rule_use_cached_wrapped_rpc(): # Ensure method has been cached assert ( - client._transport.create_event_create_rule + client._transport.create_connected_site_tag in client._transport._wrapped_methods ) @@ -49028,15 +48809,15 @@ def test_create_event_create_rule_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.create_event_create_rule + client._transport.create_connected_site_tag ] = mock_rpc request = {} - client.create_event_create_rule(request) + client.create_connected_site_tag(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.create_event_create_rule(request) + client.create_connected_site_tag(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -49044,7 +48825,7 @@ def test_create_event_create_rule_use_cached_wrapped_rpc(): @pytest.mark.asyncio -async def test_create_event_create_rule_empty_call_async(): +async def test_create_connected_site_tag_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = AnalyticsAdminServiceAsyncClient( @@ -49054,24 +48835,20 @@ async def test_create_event_create_rule_empty_call_async(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.create_event_create_rule), "__call__" + type(client.transport.create_connected_site_tag), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - event_create_and_edit.EventCreateRule( - name="name_value", - destination_event="destination_event_value", - source_copy_parameters=True, - ) + analytics_admin.CreateConnectedSiteTagResponse() ) - response = await client.create_event_create_rule() + response = await client.create_connected_site_tag() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == analytics_admin.CreateEventCreateRuleRequest() + assert args[0] == analytics_admin.CreateConnectedSiteTagRequest() @pytest.mark.asyncio -async def test_create_event_create_rule_async_use_cached_wrapped_rpc( +async def test_create_connected_site_tag_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", ): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -49088,33 +48865,34 @@ async def test_create_event_create_rule_async_use_cached_wrapped_rpc( # Ensure method has been cached assert ( - client._client._transport.create_event_create_rule + client._client._transport.create_connected_site_tag in client._client._transport._wrapped_methods ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ - client._client._transport.create_event_create_rule - ] = mock_object + client._client._transport.create_connected_site_tag + ] = mock_rpc request = {} - await client.create_event_create_rule(request) + await client.create_connected_site_tag(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - await client.create_event_create_rule(request) + await client.create_connected_site_tag(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio -async def test_create_event_create_rule_async( +async def test_create_connected_site_tag_async( transport: str = "grpc_asyncio", - request_type=analytics_admin.CreateEventCreateRuleRequest, + request_type=analytics_admin.CreateConnectedSiteTagRequest, ): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), @@ -49127,205 +48905,265 @@ async def test_create_event_create_rule_async( # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.create_event_create_rule), "__call__" + type(client.transport.create_connected_site_tag), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - event_create_and_edit.EventCreateRule( - name="name_value", - destination_event="destination_event_value", - source_copy_parameters=True, - ) + analytics_admin.CreateConnectedSiteTagResponse() ) - response = await client.create_event_create_rule(request) + response = await client.create_connected_site_tag(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request = analytics_admin.CreateEventCreateRuleRequest() + request = analytics_admin.CreateConnectedSiteTagRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, event_create_and_edit.EventCreateRule) - assert response.name == "name_value" - assert response.destination_event == "destination_event_value" - assert response.source_copy_parameters is True + assert isinstance(response, analytics_admin.CreateConnectedSiteTagResponse) @pytest.mark.asyncio -async def test_create_event_create_rule_async_from_dict(): - await test_create_event_create_rule_async(request_type=dict) +async def test_create_connected_site_tag_async_from_dict(): + await test_create_connected_site_tag_async(request_type=dict) -def test_create_event_create_rule_field_headers(): +@pytest.mark.parametrize( + "request_type", + [ + analytics_admin.DeleteConnectedSiteTagRequest, + dict, + ], +) +def test_delete_connected_site_tag(request_type, transport: str = "grpc"): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = analytics_admin.CreateEventCreateRuleRequest() - - request.parent = "parent_value" + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.create_event_create_rule), "__call__" + type(client.transport.delete_connected_site_tag), "__call__" ) as call: - call.return_value = event_create_and_edit.EventCreateRule() - client.create_event_create_rule(request) + # Designate an appropriate return value for the call. + call.return_value = None + response = client.delete_connected_site_tag(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] + request = analytics_admin.DeleteConnectedSiteTagRequest() assert args[0] == request - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "parent=parent_value", - ) in kw["metadata"] + # Establish that the response is the type that we expect. + assert response is None -@pytest.mark.asyncio -async def test_create_event_create_rule_field_headers_async(): - client = AnalyticsAdminServiceAsyncClient( +def test_delete_connected_site_tag_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", ) - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = analytics_admin.CreateEventCreateRuleRequest() - - request.parent = "parent_value" - # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.create_event_create_rule), "__call__" + type(client.transport.delete_connected_site_tag), "__call__" ) as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - event_create_and_edit.EventCreateRule() + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. ) - await client.create_event_create_rule(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) + client.delete_connected_site_tag() + call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "parent=parent_value", - ) in kw["metadata"] + assert args[0] == analytics_admin.DeleteConnectedSiteTagRequest() -def test_create_event_create_rule_flattened(): +def test_delete_connected_site_tag_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = analytics_admin.DeleteConnectedSiteTagRequest( + property="property_value", + tag_id="tag_id_value", ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.create_event_create_rule), "__call__" + type(client.transport.delete_connected_site_tag), "__call__" ) as call: - # Designate an appropriate return value for the call. - call.return_value = event_create_and_edit.EventCreateRule() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.create_event_create_rule( - parent="parent_value", - event_create_rule=event_create_and_edit.EventCreateRule(name="name_value"), + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 + client.delete_connected_site_tag(request=request) + call.assert_called() _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = "parent_value" - assert arg == mock_val - arg = args[0].event_create_rule - mock_val = event_create_and_edit.EventCreateRule(name="name_value") - assert arg == mock_val - + assert args[0] == analytics_admin.DeleteConnectedSiteTagRequest( + property="property_value", + tag_id="tag_id_value", + ) -def test_create_event_create_rule_flattened_error(): - client = AnalyticsAdminServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.create_event_create_rule( - analytics_admin.CreateEventCreateRuleRequest(), - parent="parent_value", - event_create_rule=event_create_and_edit.EventCreateRule(name="name_value"), +def test_delete_connected_site_tag_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = AnalyticsAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", ) + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() -@pytest.mark.asyncio -async def test_create_event_create_rule_flattened_async(): + # Ensure method has been cached + assert ( + client._transport.delete_connected_site_tag + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.delete_connected_site_tag + ] = mock_rpc + request = {} + client.delete_connected_site_tag(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.delete_connected_site_tag(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_delete_connected_site_tag_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.create_event_create_rule), "__call__" + type(client.transport.delete_connected_site_tag), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = event_create_and_edit.EventCreateRule() + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.delete_connected_site_tag() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == analytics_admin.DeleteConnectedSiteTagRequest() - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - event_create_and_edit.EventCreateRule() + +@pytest.mark.asyncio +async def test_delete_connected_site_tag_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = AnalyticsAdminServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.create_event_create_rule( - parent="parent_value", - event_create_rule=event_create_and_edit.EventCreateRule(name="name_value"), + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.delete_connected_site_tag + in client._client._transport._wrapped_methods ) - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = "parent_value" - assert arg == mock_val - arg = args[0].event_create_rule - mock_val = event_create_and_edit.EventCreateRule(name="name_value") - assert arg == mock_val + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.delete_connected_site_tag + ] = mock_rpc + + request = {} + await client.delete_connected_site_tag(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.delete_connected_site_tag(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio -async def test_create_event_create_rule_flattened_error_async(): +async def test_delete_connected_site_tag_async( + transport: str = "grpc_asyncio", + request_type=analytics_admin.DeleteConnectedSiteTagRequest, +): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.create_event_create_rule( - analytics_admin.CreateEventCreateRuleRequest(), - parent="parent_value", - event_create_rule=event_create_and_edit.EventCreateRule(name="name_value"), - ) + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_connected_site_tag), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.delete_connected_site_tag(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = analytics_admin.DeleteConnectedSiteTagRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.asyncio +async def test_delete_connected_site_tag_async_from_dict(): + await test_delete_connected_site_tag_async(request_type=dict) @pytest.mark.parametrize( "request_type", [ - analytics_admin.UpdateEventCreateRuleRequest, + analytics_admin.ListConnectedSiteTagsRequest, dict, ], ) -def test_update_event_create_rule(request_type, transport: str = "grpc"): +def test_list_connected_site_tags(request_type, transport: str = "grpc"): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -49337,30 +49175,23 @@ def test_update_event_create_rule(request_type, transport: str = "grpc"): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.update_event_create_rule), "__call__" + type(client.transport.list_connected_site_tags), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = event_create_and_edit.EventCreateRule( - name="name_value", - destination_event="destination_event_value", - source_copy_parameters=True, - ) - response = client.update_event_create_rule(request) + call.return_value = analytics_admin.ListConnectedSiteTagsResponse() + response = client.list_connected_site_tags(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - request = analytics_admin.UpdateEventCreateRuleRequest() + request = analytics_admin.ListConnectedSiteTagsRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, event_create_and_edit.EventCreateRule) - assert response.name == "name_value" - assert response.destination_event == "destination_event_value" - assert response.source_copy_parameters is True + assert isinstance(response, analytics_admin.ListConnectedSiteTagsResponse) -def test_update_event_create_rule_empty_call(): +def test_list_connected_site_tags_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = AnalyticsAdminServiceClient( @@ -49370,18 +49201,18 @@ def test_update_event_create_rule_empty_call(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.update_event_create_rule), "__call__" + type(client.transport.list_connected_site_tags), "__call__" ) as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.update_event_create_rule() + client.list_connected_site_tags() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == analytics_admin.UpdateEventCreateRuleRequest() + assert args[0] == analytics_admin.ListConnectedSiteTagsRequest() -def test_update_event_create_rule_non_empty_request_with_auto_populated_field(): +def test_list_connected_site_tags_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. client = AnalyticsAdminServiceClient( @@ -49392,22 +49223,26 @@ def test_update_event_create_rule_non_empty_request_with_auto_populated_field(): # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. - request = analytics_admin.UpdateEventCreateRuleRequest() + request = analytics_admin.ListConnectedSiteTagsRequest( + property="property_value", + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.update_event_create_rule), "__call__" + type(client.transport.list_connected_site_tags), "__call__" ) as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.update_event_create_rule(request=request) + client.list_connected_site_tags(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == analytics_admin.UpdateEventCreateRuleRequest() + assert args[0] == analytics_admin.ListConnectedSiteTagsRequest( + property="property_value", + ) -def test_update_event_create_rule_use_cached_wrapped_rpc(): +def test_list_connected_site_tags_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -49422,7 +49257,7 @@ def test_update_event_create_rule_use_cached_wrapped_rpc(): # Ensure method has been cached assert ( - client._transport.update_event_create_rule + client._transport.list_connected_site_tags in client._transport._wrapped_methods ) @@ -49432,15 +49267,15 @@ def test_update_event_create_rule_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.update_event_create_rule + client._transport.list_connected_site_tags ] = mock_rpc request = {} - client.update_event_create_rule(request) + client.list_connected_site_tags(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.update_event_create_rule(request) + client.list_connected_site_tags(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -49448,7 +49283,7 @@ def test_update_event_create_rule_use_cached_wrapped_rpc(): @pytest.mark.asyncio -async def test_update_event_create_rule_empty_call_async(): +async def test_list_connected_site_tags_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = AnalyticsAdminServiceAsyncClient( @@ -49458,24 +49293,20 @@ async def test_update_event_create_rule_empty_call_async(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.update_event_create_rule), "__call__" + type(client.transport.list_connected_site_tags), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - event_create_and_edit.EventCreateRule( - name="name_value", - destination_event="destination_event_value", - source_copy_parameters=True, - ) + analytics_admin.ListConnectedSiteTagsResponse() ) - response = await client.update_event_create_rule() + response = await client.list_connected_site_tags() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == analytics_admin.UpdateEventCreateRuleRequest() + assert args[0] == analytics_admin.ListConnectedSiteTagsRequest() @pytest.mark.asyncio -async def test_update_event_create_rule_async_use_cached_wrapped_rpc( +async def test_list_connected_site_tags_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", ): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -49492,33 +49323,34 @@ async def test_update_event_create_rule_async_use_cached_wrapped_rpc( # Ensure method has been cached assert ( - client._client._transport.update_event_create_rule + client._client._transport.list_connected_site_tags in client._client._transport._wrapped_methods ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ - client._client._transport.update_event_create_rule - ] = mock_object + client._client._transport.list_connected_site_tags + ] = mock_rpc request = {} - await client.update_event_create_rule(request) + await client.list_connected_site_tags(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - await client.update_event_create_rule(request) + await client.list_connected_site_tags(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio -async def test_update_event_create_rule_async( +async def test_list_connected_site_tags_async( transport: str = "grpc_asyncio", - request_type=analytics_admin.UpdateEventCreateRuleRequest, + request_type=analytics_admin.ListConnectedSiteTagsRequest, ): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), @@ -49531,205 +49363,37 @@ async def test_update_event_create_rule_async( # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.update_event_create_rule), "__call__" + type(client.transport.list_connected_site_tags), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - event_create_and_edit.EventCreateRule( - name="name_value", - destination_event="destination_event_value", - source_copy_parameters=True, - ) + analytics_admin.ListConnectedSiteTagsResponse() ) - response = await client.update_event_create_rule(request) + response = await client.list_connected_site_tags(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request = analytics_admin.UpdateEventCreateRuleRequest() + request = analytics_admin.ListConnectedSiteTagsRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, event_create_and_edit.EventCreateRule) - assert response.name == "name_value" - assert response.destination_event == "destination_event_value" - assert response.source_copy_parameters is True - - -@pytest.mark.asyncio -async def test_update_event_create_rule_async_from_dict(): - await test_update_event_create_rule_async(request_type=dict) - - -def test_update_event_create_rule_field_headers(): - client = AnalyticsAdminServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = analytics_admin.UpdateEventCreateRuleRequest() - - request.event_create_rule.name = "name_value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_event_create_rule), "__call__" - ) as call: - call.return_value = event_create_and_edit.EventCreateRule() - client.update_event_create_rule(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "event_create_rule.name=name_value", - ) in kw["metadata"] - - -@pytest.mark.asyncio -async def test_update_event_create_rule_field_headers_async(): - client = AnalyticsAdminServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = analytics_admin.UpdateEventCreateRuleRequest() - - request.event_create_rule.name = "name_value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_event_create_rule), "__call__" - ) as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - event_create_and_edit.EventCreateRule() - ) - await client.update_event_create_rule(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "event_create_rule.name=name_value", - ) in kw["metadata"] - - -def test_update_event_create_rule_flattened(): - client = AnalyticsAdminServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_event_create_rule), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = event_create_and_edit.EventCreateRule() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.update_event_create_rule( - event_create_rule=event_create_and_edit.EventCreateRule(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].event_create_rule - mock_val = event_create_and_edit.EventCreateRule(name="name_value") - assert arg == mock_val - arg = args[0].update_mask - mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) - assert arg == mock_val - - -def test_update_event_create_rule_flattened_error(): - client = AnalyticsAdminServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.update_event_create_rule( - analytics_admin.UpdateEventCreateRuleRequest(), - event_create_rule=event_create_and_edit.EventCreateRule(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), - ) - - -@pytest.mark.asyncio -async def test_update_event_create_rule_flattened_async(): - client = AnalyticsAdminServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_event_create_rule), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = event_create_and_edit.EventCreateRule() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - event_create_and_edit.EventCreateRule() - ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.update_event_create_rule( - event_create_rule=event_create_and_edit.EventCreateRule(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].event_create_rule - mock_val = event_create_and_edit.EventCreateRule(name="name_value") - assert arg == mock_val - arg = args[0].update_mask - mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) - assert arg == mock_val + assert isinstance(response, analytics_admin.ListConnectedSiteTagsResponse) @pytest.mark.asyncio -async def test_update_event_create_rule_flattened_error_async(): - client = AnalyticsAdminServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.update_event_create_rule( - analytics_admin.UpdateEventCreateRuleRequest(), - event_create_rule=event_create_and_edit.EventCreateRule(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), - ) +async def test_list_connected_site_tags_async_from_dict(): + await test_list_connected_site_tags_async(request_type=dict) @pytest.mark.parametrize( "request_type", [ - analytics_admin.DeleteEventCreateRuleRequest, + analytics_admin.FetchConnectedGa4PropertyRequest, dict, ], ) -def test_delete_event_create_rule(request_type, transport: str = "grpc"): +def test_fetch_connected_ga4_property(request_type, transport: str = "grpc"): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -49741,23 +49405,26 @@ def test_delete_event_create_rule(request_type, transport: str = "grpc"): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.delete_event_create_rule), "__call__" + type(client.transport.fetch_connected_ga4_property), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = None - response = client.delete_event_create_rule(request) + call.return_value = analytics_admin.FetchConnectedGa4PropertyResponse( + property="property_value", + ) + response = client.fetch_connected_ga4_property(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - request = analytics_admin.DeleteEventCreateRuleRequest() + request = analytics_admin.FetchConnectedGa4PropertyRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert response is None + assert isinstance(response, analytics_admin.FetchConnectedGa4PropertyResponse) + assert response.property == "property_value" -def test_delete_event_create_rule_empty_call(): +def test_fetch_connected_ga4_property_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = AnalyticsAdminServiceClient( @@ -49767,18 +49434,18 @@ def test_delete_event_create_rule_empty_call(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.delete_event_create_rule), "__call__" + type(client.transport.fetch_connected_ga4_property), "__call__" ) as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.delete_event_create_rule() + client.fetch_connected_ga4_property() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == analytics_admin.DeleteEventCreateRuleRequest() + assert args[0] == analytics_admin.FetchConnectedGa4PropertyRequest() -def test_delete_event_create_rule_non_empty_request_with_auto_populated_field(): +def test_fetch_connected_ga4_property_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. client = AnalyticsAdminServiceClient( @@ -49789,26 +49456,26 @@ def test_delete_event_create_rule_non_empty_request_with_auto_populated_field(): # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. - request = analytics_admin.DeleteEventCreateRuleRequest( - name="name_value", + request = analytics_admin.FetchConnectedGa4PropertyRequest( + property="property_value", ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.delete_event_create_rule), "__call__" + type(client.transport.fetch_connected_ga4_property), "__call__" ) as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.delete_event_create_rule(request=request) + client.fetch_connected_ga4_property(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == analytics_admin.DeleteEventCreateRuleRequest( - name="name_value", + assert args[0] == analytics_admin.FetchConnectedGa4PropertyRequest( + property="property_value", ) -def test_delete_event_create_rule_use_cached_wrapped_rpc(): +def test_fetch_connected_ga4_property_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -49823,7 +49490,7 @@ def test_delete_event_create_rule_use_cached_wrapped_rpc(): # Ensure method has been cached assert ( - client._transport.delete_event_create_rule + client._transport.fetch_connected_ga4_property in client._transport._wrapped_methods ) @@ -49833,15 +49500,15 @@ def test_delete_event_create_rule_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.delete_event_create_rule + client._transport.fetch_connected_ga4_property ] = mock_rpc request = {} - client.delete_event_create_rule(request) + client.fetch_connected_ga4_property(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.delete_event_create_rule(request) + client.fetch_connected_ga4_property(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -49849,7 +49516,7 @@ def test_delete_event_create_rule_use_cached_wrapped_rpc(): @pytest.mark.asyncio -async def test_delete_event_create_rule_empty_call_async(): +async def test_fetch_connected_ga4_property_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = AnalyticsAdminServiceAsyncClient( @@ -49859,18 +49526,22 @@ async def test_delete_event_create_rule_empty_call_async(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.delete_event_create_rule), "__call__" + type(client.transport.fetch_connected_ga4_property), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.delete_event_create_rule() + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + analytics_admin.FetchConnectedGa4PropertyResponse( + property="property_value", + ) + ) + response = await client.fetch_connected_ga4_property() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == analytics_admin.DeleteEventCreateRuleRequest() + assert args[0] == analytics_admin.FetchConnectedGa4PropertyRequest() @pytest.mark.asyncio -async def test_delete_event_create_rule_async_use_cached_wrapped_rpc( +async def test_fetch_connected_ga4_property_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", ): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -49887,33 +49558,34 @@ async def test_delete_event_create_rule_async_use_cached_wrapped_rpc( # Ensure method has been cached assert ( - client._client._transport.delete_event_create_rule + client._client._transport.fetch_connected_ga4_property in client._client._transport._wrapped_methods ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ - client._client._transport.delete_event_create_rule - ] = mock_object + client._client._transport.fetch_connected_ga4_property + ] = mock_rpc request = {} - await client.delete_event_create_rule(request) + await client.fetch_connected_ga4_property(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - await client.delete_event_create_rule(request) + await client.fetch_connected_ga4_property(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio -async def test_delete_event_create_rule_async( +async def test_fetch_connected_ga4_property_async( transport: str = "grpc_asyncio", - request_type=analytics_admin.DeleteEventCreateRuleRequest, + request_type=analytics_admin.FetchConnectedGa4PropertyRequest, ): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), @@ -49926,185 +49598,43 @@ async def test_delete_event_create_rule_async( # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.delete_event_create_rule), "__call__" + type(client.transport.fetch_connected_ga4_property), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.delete_event_create_rule(request) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + analytics_admin.FetchConnectedGa4PropertyResponse( + property="property_value", + ) + ) + response = await client.fetch_connected_ga4_property(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request = analytics_admin.DeleteEventCreateRuleRequest() + request = analytics_admin.FetchConnectedGa4PropertyRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert response is None + assert isinstance(response, analytics_admin.FetchConnectedGa4PropertyResponse) + assert response.property == "property_value" @pytest.mark.asyncio -async def test_delete_event_create_rule_async_from_dict(): - await test_delete_event_create_rule_async(request_type=dict) +async def test_fetch_connected_ga4_property_async_from_dict(): + await test_fetch_connected_ga4_property_async(request_type=dict) -def test_delete_event_create_rule_field_headers(): +@pytest.mark.parametrize( + "request_type", + [ + analytics_admin.GetAdSenseLinkRequest, + dict, + ], +) +def test_get_ad_sense_link(request_type, transport: str = "grpc"): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = analytics_admin.DeleteEventCreateRuleRequest() - - request.name = "name_value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_event_create_rule), "__call__" - ) as call: - call.return_value = None - client.delete_event_create_rule(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=name_value", - ) in kw["metadata"] - - -@pytest.mark.asyncio -async def test_delete_event_create_rule_field_headers_async(): - client = AnalyticsAdminServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = analytics_admin.DeleteEventCreateRuleRequest() - - request.name = "name_value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_event_create_rule), "__call__" - ) as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - await client.delete_event_create_rule(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=name_value", - ) in kw["metadata"] - - -def test_delete_event_create_rule_flattened(): - client = AnalyticsAdminServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_event_create_rule), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = None - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.delete_event_create_rule( - name="name_value", - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" - assert arg == mock_val - - -def test_delete_event_create_rule_flattened_error(): - client = AnalyticsAdminServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.delete_event_create_rule( - analytics_admin.DeleteEventCreateRuleRequest(), - name="name_value", - ) - - -@pytest.mark.asyncio -async def test_delete_event_create_rule_flattened_async(): - client = AnalyticsAdminServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_event_create_rule), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = None - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.delete_event_create_rule( - name="name_value", - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" - assert arg == mock_val - - -@pytest.mark.asyncio -async def test_delete_event_create_rule_flattened_error_async(): - client = AnalyticsAdminServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.delete_event_create_rule( - analytics_admin.DeleteEventCreateRuleRequest(), - name="name_value", - ) - - -@pytest.mark.parametrize( - "request_type", - [ - analytics_admin.UpdateDataRedactionSettingsRequest, - dict, - ], -) -def test_update_data_redaction_settings(request_type, transport: str = "grpc"): - client = AnalyticsAdminServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -50113,32 +49643,28 @@ def test_update_data_redaction_settings(request_type, transport: str = "grpc"): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.update_data_redaction_settings), "__call__" + type(client.transport.get_ad_sense_link), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = resources.DataRedactionSettings( + call.return_value = resources.AdSenseLink( name="name_value", - email_redaction_enabled=True, - query_parameter_redaction_enabled=True, - query_parameter_keys=["query_parameter_keys_value"], + ad_client_code="ad_client_code_value", ) - response = client.update_data_redaction_settings(request) + response = client.get_ad_sense_link(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - request = analytics_admin.UpdateDataRedactionSettingsRequest() + request = analytics_admin.GetAdSenseLinkRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, resources.DataRedactionSettings) + assert isinstance(response, resources.AdSenseLink) assert response.name == "name_value" - assert response.email_redaction_enabled is True - assert response.query_parameter_redaction_enabled is True - assert response.query_parameter_keys == ["query_parameter_keys_value"] + assert response.ad_client_code == "ad_client_code_value" -def test_update_data_redaction_settings_empty_call(): +def test_get_ad_sense_link_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = AnalyticsAdminServiceClient( @@ -50148,18 +49674,18 @@ def test_update_data_redaction_settings_empty_call(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.update_data_redaction_settings), "__call__" + type(client.transport.get_ad_sense_link), "__call__" ) as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.update_data_redaction_settings() + client.get_ad_sense_link() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == analytics_admin.UpdateDataRedactionSettingsRequest() + assert args[0] == analytics_admin.GetAdSenseLinkRequest() -def test_update_data_redaction_settings_non_empty_request_with_auto_populated_field(): +def test_get_ad_sense_link_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. client = AnalyticsAdminServiceClient( @@ -50170,22 +49696,26 @@ def test_update_data_redaction_settings_non_empty_request_with_auto_populated_fi # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. - request = analytics_admin.UpdateDataRedactionSettingsRequest() + request = analytics_admin.GetAdSenseLinkRequest( + name="name_value", + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.update_data_redaction_settings), "__call__" + type(client.transport.get_ad_sense_link), "__call__" ) as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.update_data_redaction_settings(request=request) + client.get_ad_sense_link(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == analytics_admin.UpdateDataRedactionSettingsRequest() + assert args[0] == analytics_admin.GetAdSenseLinkRequest( + name="name_value", + ) -def test_update_data_redaction_settings_use_cached_wrapped_rpc(): +def test_get_ad_sense_link_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -50199,10 +49729,7 @@ def test_update_data_redaction_settings_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert ( - client._transport.update_data_redaction_settings - in client._transport._wrapped_methods - ) + assert client._transport.get_ad_sense_link in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() @@ -50210,15 +49737,15 @@ def test_update_data_redaction_settings_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.update_data_redaction_settings + client._transport.get_ad_sense_link ] = mock_rpc request = {} - client.update_data_redaction_settings(request) + client.get_ad_sense_link(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.update_data_redaction_settings(request) + client.get_ad_sense_link(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -50226,7 +49753,7 @@ def test_update_data_redaction_settings_use_cached_wrapped_rpc(): @pytest.mark.asyncio -async def test_update_data_redaction_settings_empty_call_async(): +async def test_get_ad_sense_link_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = AnalyticsAdminServiceAsyncClient( @@ -50236,25 +49763,23 @@ async def test_update_data_redaction_settings_empty_call_async(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.update_data_redaction_settings), "__call__" + type(client.transport.get_ad_sense_link), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - resources.DataRedactionSettings( + resources.AdSenseLink( name="name_value", - email_redaction_enabled=True, - query_parameter_redaction_enabled=True, - query_parameter_keys=["query_parameter_keys_value"], + ad_client_code="ad_client_code_value", ) ) - response = await client.update_data_redaction_settings() + response = await client.get_ad_sense_link() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == analytics_admin.UpdateDataRedactionSettingsRequest() + assert args[0] == analytics_admin.GetAdSenseLinkRequest() @pytest.mark.asyncio -async def test_update_data_redaction_settings_async_use_cached_wrapped_rpc( +async def test_get_ad_sense_link_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", ): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -50271,33 +49796,33 @@ async def test_update_data_redaction_settings_async_use_cached_wrapped_rpc( # Ensure method has been cached assert ( - client._client._transport.update_data_redaction_settings + client._client._transport.get_ad_sense_link in client._client._transport._wrapped_methods ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ - client._client._transport.update_data_redaction_settings - ] = mock_object + client._client._transport.get_ad_sense_link + ] = mock_rpc request = {} - await client.update_data_redaction_settings(request) + await client.get_ad_sense_link(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - await client.update_data_redaction_settings(request) + await client.get_ad_sense_link(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio -async def test_update_data_redaction_settings_async( - transport: str = "grpc_asyncio", - request_type=analytics_admin.UpdateDataRedactionSettingsRequest, +async def test_get_ad_sense_link_async( + transport: str = "grpc_asyncio", request_type=analytics_admin.GetAdSenseLinkRequest ): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), @@ -50310,55 +49835,51 @@ async def test_update_data_redaction_settings_async( # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.update_data_redaction_settings), "__call__" + type(client.transport.get_ad_sense_link), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - resources.DataRedactionSettings( + resources.AdSenseLink( name="name_value", - email_redaction_enabled=True, - query_parameter_redaction_enabled=True, - query_parameter_keys=["query_parameter_keys_value"], + ad_client_code="ad_client_code_value", ) ) - response = await client.update_data_redaction_settings(request) + response = await client.get_ad_sense_link(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request = analytics_admin.UpdateDataRedactionSettingsRequest() + request = analytics_admin.GetAdSenseLinkRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, resources.DataRedactionSettings) + assert isinstance(response, resources.AdSenseLink) assert response.name == "name_value" - assert response.email_redaction_enabled is True - assert response.query_parameter_redaction_enabled is True - assert response.query_parameter_keys == ["query_parameter_keys_value"] + assert response.ad_client_code == "ad_client_code_value" @pytest.mark.asyncio -async def test_update_data_redaction_settings_async_from_dict(): - await test_update_data_redaction_settings_async(request_type=dict) +async def test_get_ad_sense_link_async_from_dict(): + await test_get_ad_sense_link_async(request_type=dict) -def test_update_data_redaction_settings_field_headers(): +def test_get_ad_sense_link_field_headers(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = analytics_admin.UpdateDataRedactionSettingsRequest() + request = analytics_admin.GetAdSenseLinkRequest() - request.data_redaction_settings.name = "name_value" + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.update_data_redaction_settings), "__call__" + type(client.transport.get_ad_sense_link), "__call__" ) as call: - call.return_value = resources.DataRedactionSettings() - client.update_data_redaction_settings(request) + call.return_value = resources.AdSenseLink() + client.get_ad_sense_link(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -50369,30 +49890,30 @@ def test_update_data_redaction_settings_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "data_redaction_settings.name=name_value", + "name=name_value", ) in kw["metadata"] @pytest.mark.asyncio -async def test_update_data_redaction_settings_field_headers_async(): +async def test_get_ad_sense_link_field_headers_async(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = analytics_admin.UpdateDataRedactionSettingsRequest() + request = analytics_admin.GetAdSenseLinkRequest() - request.data_redaction_settings.name = "name_value" + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.update_data_redaction_settings), "__call__" + type(client.transport.get_ad_sense_link), "__call__" ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - resources.DataRedactionSettings() + resources.AdSenseLink() ) - await client.update_data_redaction_settings(request) + await client.get_ad_sense_link(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -50403,41 +49924,37 @@ async def test_update_data_redaction_settings_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "data_redaction_settings.name=name_value", + "name=name_value", ) in kw["metadata"] -def test_update_data_redaction_settings_flattened(): +def test_get_ad_sense_link_flattened(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.update_data_redaction_settings), "__call__" + type(client.transport.get_ad_sense_link), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = resources.DataRedactionSettings() + call.return_value = resources.AdSenseLink() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.update_data_redaction_settings( - data_redaction_settings=resources.DataRedactionSettings(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + client.get_ad_sense_link( + name="name_value", ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - arg = args[0].data_redaction_settings - mock_val = resources.DataRedactionSettings(name="name_value") - assert arg == mock_val - arg = args[0].update_mask - mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + arg = args[0].name + mock_val = "name_value" assert arg == mock_val -def test_update_data_redaction_settings_flattened_error(): +def test_get_ad_sense_link_flattened_error(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -50445,50 +49962,45 @@ def test_update_data_redaction_settings_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.update_data_redaction_settings( - analytics_admin.UpdateDataRedactionSettingsRequest(), - data_redaction_settings=resources.DataRedactionSettings(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + client.get_ad_sense_link( + analytics_admin.GetAdSenseLinkRequest(), + name="name_value", ) @pytest.mark.asyncio -async def test_update_data_redaction_settings_flattened_async(): +async def test_get_ad_sense_link_flattened_async(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.update_data_redaction_settings), "__call__" + type(client.transport.get_ad_sense_link), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = resources.DataRedactionSettings() + call.return_value = resources.AdSenseLink() call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - resources.DataRedactionSettings() + resources.AdSenseLink() ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.update_data_redaction_settings( - data_redaction_settings=resources.DataRedactionSettings(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + response = await client.get_ad_sense_link( + name="name_value", ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - arg = args[0].data_redaction_settings - mock_val = resources.DataRedactionSettings(name="name_value") - assert arg == mock_val - arg = args[0].update_mask - mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + arg = args[0].name + mock_val = "name_value" assert arg == mock_val @pytest.mark.asyncio -async def test_update_data_redaction_settings_flattened_error_async(): +async def test_get_ad_sense_link_flattened_error_async(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -50496,21 +50008,20 @@ async def test_update_data_redaction_settings_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.update_data_redaction_settings( - analytics_admin.UpdateDataRedactionSettingsRequest(), - data_redaction_settings=resources.DataRedactionSettings(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + await client.get_ad_sense_link( + analytics_admin.GetAdSenseLinkRequest(), + name="name_value", ) @pytest.mark.parametrize( "request_type", [ - analytics_admin.GetDataRedactionSettingsRequest, + analytics_admin.CreateAdSenseLinkRequest, dict, ], ) -def test_get_data_redaction_settings(request_type, transport: str = "grpc"): +def test_create_ad_sense_link(request_type, transport: str = "grpc"): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -50522,32 +50033,28 @@ def test_get_data_redaction_settings(request_type, transport: str = "grpc"): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.get_data_redaction_settings), "__call__" + type(client.transport.create_ad_sense_link), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = resources.DataRedactionSettings( + call.return_value = resources.AdSenseLink( name="name_value", - email_redaction_enabled=True, - query_parameter_redaction_enabled=True, - query_parameter_keys=["query_parameter_keys_value"], + ad_client_code="ad_client_code_value", ) - response = client.get_data_redaction_settings(request) + response = client.create_ad_sense_link(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - request = analytics_admin.GetDataRedactionSettingsRequest() + request = analytics_admin.CreateAdSenseLinkRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, resources.DataRedactionSettings) + assert isinstance(response, resources.AdSenseLink) assert response.name == "name_value" - assert response.email_redaction_enabled is True - assert response.query_parameter_redaction_enabled is True - assert response.query_parameter_keys == ["query_parameter_keys_value"] + assert response.ad_client_code == "ad_client_code_value" -def test_get_data_redaction_settings_empty_call(): +def test_create_ad_sense_link_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = AnalyticsAdminServiceClient( @@ -50557,18 +50064,18 @@ def test_get_data_redaction_settings_empty_call(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.get_data_redaction_settings), "__call__" + type(client.transport.create_ad_sense_link), "__call__" ) as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.get_data_redaction_settings() + client.create_ad_sense_link() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == analytics_admin.GetDataRedactionSettingsRequest() + assert args[0] == analytics_admin.CreateAdSenseLinkRequest() -def test_get_data_redaction_settings_non_empty_request_with_auto_populated_field(): +def test_create_ad_sense_link_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. client = AnalyticsAdminServiceClient( @@ -50579,26 +50086,26 @@ def test_get_data_redaction_settings_non_empty_request_with_auto_populated_field # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. - request = analytics_admin.GetDataRedactionSettingsRequest( - name="name_value", + request = analytics_admin.CreateAdSenseLinkRequest( + parent="parent_value", ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.get_data_redaction_settings), "__call__" + type(client.transport.create_ad_sense_link), "__call__" ) as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.get_data_redaction_settings(request=request) + client.create_ad_sense_link(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == analytics_admin.GetDataRedactionSettingsRequest( - name="name_value", + assert args[0] == analytics_admin.CreateAdSenseLinkRequest( + parent="parent_value", ) -def test_get_data_redaction_settings_use_cached_wrapped_rpc(): +def test_create_ad_sense_link_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -50613,8 +50120,7 @@ def test_get_data_redaction_settings_use_cached_wrapped_rpc(): # Ensure method has been cached assert ( - client._transport.get_data_redaction_settings - in client._transport._wrapped_methods + client._transport.create_ad_sense_link in client._transport._wrapped_methods ) # Replace cached wrapped function with mock @@ -50623,15 +50129,15 @@ def test_get_data_redaction_settings_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.get_data_redaction_settings + client._transport.create_ad_sense_link ] = mock_rpc request = {} - client.get_data_redaction_settings(request) + client.create_ad_sense_link(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.get_data_redaction_settings(request) + client.create_ad_sense_link(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -50639,7 +50145,7 @@ def test_get_data_redaction_settings_use_cached_wrapped_rpc(): @pytest.mark.asyncio -async def test_get_data_redaction_settings_empty_call_async(): +async def test_create_ad_sense_link_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = AnalyticsAdminServiceAsyncClient( @@ -50649,25 +50155,23 @@ async def test_get_data_redaction_settings_empty_call_async(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.get_data_redaction_settings), "__call__" + type(client.transport.create_ad_sense_link), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - resources.DataRedactionSettings( + resources.AdSenseLink( name="name_value", - email_redaction_enabled=True, - query_parameter_redaction_enabled=True, - query_parameter_keys=["query_parameter_keys_value"], + ad_client_code="ad_client_code_value", ) ) - response = await client.get_data_redaction_settings() + response = await client.create_ad_sense_link() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == analytics_admin.GetDataRedactionSettingsRequest() + assert args[0] == analytics_admin.CreateAdSenseLinkRequest() @pytest.mark.asyncio -async def test_get_data_redaction_settings_async_use_cached_wrapped_rpc( +async def test_create_ad_sense_link_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", ): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -50684,33 +50188,34 @@ async def test_get_data_redaction_settings_async_use_cached_wrapped_rpc( # Ensure method has been cached assert ( - client._client._transport.get_data_redaction_settings + client._client._transport.create_ad_sense_link in client._client._transport._wrapped_methods ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ - client._client._transport.get_data_redaction_settings - ] = mock_object + client._client._transport.create_ad_sense_link + ] = mock_rpc request = {} - await client.get_data_redaction_settings(request) + await client.create_ad_sense_link(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - await client.get_data_redaction_settings(request) + await client.create_ad_sense_link(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio -async def test_get_data_redaction_settings_async( +async def test_create_ad_sense_link_async( transport: str = "grpc_asyncio", - request_type=analytics_admin.GetDataRedactionSettingsRequest, + request_type=analytics_admin.CreateAdSenseLinkRequest, ): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), @@ -50723,55 +50228,51 @@ async def test_get_data_redaction_settings_async( # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.get_data_redaction_settings), "__call__" + type(client.transport.create_ad_sense_link), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - resources.DataRedactionSettings( + resources.AdSenseLink( name="name_value", - email_redaction_enabled=True, - query_parameter_redaction_enabled=True, - query_parameter_keys=["query_parameter_keys_value"], + ad_client_code="ad_client_code_value", ) ) - response = await client.get_data_redaction_settings(request) + response = await client.create_ad_sense_link(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request = analytics_admin.GetDataRedactionSettingsRequest() + request = analytics_admin.CreateAdSenseLinkRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, resources.DataRedactionSettings) + assert isinstance(response, resources.AdSenseLink) assert response.name == "name_value" - assert response.email_redaction_enabled is True - assert response.query_parameter_redaction_enabled is True - assert response.query_parameter_keys == ["query_parameter_keys_value"] + assert response.ad_client_code == "ad_client_code_value" @pytest.mark.asyncio -async def test_get_data_redaction_settings_async_from_dict(): - await test_get_data_redaction_settings_async(request_type=dict) +async def test_create_ad_sense_link_async_from_dict(): + await test_create_ad_sense_link_async(request_type=dict) -def test_get_data_redaction_settings_field_headers(): +def test_create_ad_sense_link_field_headers(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = analytics_admin.GetDataRedactionSettingsRequest() + request = analytics_admin.CreateAdSenseLinkRequest() - request.name = "name_value" + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.get_data_redaction_settings), "__call__" + type(client.transport.create_ad_sense_link), "__call__" ) as call: - call.return_value = resources.DataRedactionSettings() - client.get_data_redaction_settings(request) + call.return_value = resources.AdSenseLink() + client.create_ad_sense_link(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -50782,30 +50283,30 @@ def test_get_data_redaction_settings_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "name=name_value", + "parent=parent_value", ) in kw["metadata"] @pytest.mark.asyncio -async def test_get_data_redaction_settings_field_headers_async(): +async def test_create_ad_sense_link_field_headers_async(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = analytics_admin.GetDataRedactionSettingsRequest() + request = analytics_admin.CreateAdSenseLinkRequest() - request.name = "name_value" + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.get_data_redaction_settings), "__call__" + type(client.transport.create_ad_sense_link), "__call__" ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - resources.DataRedactionSettings() + resources.AdSenseLink() ) - await client.get_data_redaction_settings(request) + await client.create_ad_sense_link(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -50816,37 +50317,41 @@ async def test_get_data_redaction_settings_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "name=name_value", + "parent=parent_value", ) in kw["metadata"] -def test_get_data_redaction_settings_flattened(): +def test_create_ad_sense_link_flattened(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.get_data_redaction_settings), "__call__" + type(client.transport.create_ad_sense_link), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = resources.DataRedactionSettings() + call.return_value = resources.AdSenseLink() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.get_data_redaction_settings( - name="name_value", + client.create_ad_sense_link( + parent="parent_value", + adsense_link=resources.AdSenseLink(name="name_value"), ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].adsense_link + mock_val = resources.AdSenseLink(name="name_value") assert arg == mock_val -def test_get_data_redaction_settings_flattened_error(): +def test_create_ad_sense_link_flattened_error(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -50854,45 +50359,50 @@ def test_get_data_redaction_settings_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.get_data_redaction_settings( - analytics_admin.GetDataRedactionSettingsRequest(), - name="name_value", + client.create_ad_sense_link( + analytics_admin.CreateAdSenseLinkRequest(), + parent="parent_value", + adsense_link=resources.AdSenseLink(name="name_value"), ) @pytest.mark.asyncio -async def test_get_data_redaction_settings_flattened_async(): +async def test_create_ad_sense_link_flattened_async(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.get_data_redaction_settings), "__call__" + type(client.transport.create_ad_sense_link), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = resources.DataRedactionSettings() + call.return_value = resources.AdSenseLink() call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - resources.DataRedactionSettings() + resources.AdSenseLink() ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.get_data_redaction_settings( - name="name_value", + response = await client.create_ad_sense_link( + parent="parent_value", + adsense_link=resources.AdSenseLink(name="name_value"), ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].adsense_link + mock_val = resources.AdSenseLink(name="name_value") assert arg == mock_val @pytest.mark.asyncio -async def test_get_data_redaction_settings_flattened_error_async(): +async def test_create_ad_sense_link_flattened_error_async(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -50900,20 +50410,21 @@ async def test_get_data_redaction_settings_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.get_data_redaction_settings( - analytics_admin.GetDataRedactionSettingsRequest(), - name="name_value", + await client.create_ad_sense_link( + analytics_admin.CreateAdSenseLinkRequest(), + parent="parent_value", + adsense_link=resources.AdSenseLink(name="name_value"), ) @pytest.mark.parametrize( "request_type", [ - analytics_admin.GetCalculatedMetricRequest, + analytics_admin.DeleteAdSenseLinkRequest, dict, ], ) -def test_get_calculated_metric(request_type, transport: str = "grpc"): +def test_delete_ad_sense_link(request_type, transport: str = "grpc"): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -50925,44 +50436,23 @@ def test_get_calculated_metric(request_type, transport: str = "grpc"): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.get_calculated_metric), "__call__" + type(client.transport.delete_ad_sense_link), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = resources.CalculatedMetric( - name="name_value", - description="description_value", - display_name="display_name_value", - calculated_metric_id="calculated_metric_id_value", - metric_unit=resources.CalculatedMetric.MetricUnit.STANDARD, - restricted_metric_type=[ - resources.CalculatedMetric.RestrictedMetricType.COST_DATA - ], - formula="formula_value", - invalid_metric_reference=True, - ) - response = client.get_calculated_metric(request) + call.return_value = None + response = client.delete_ad_sense_link(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - request = analytics_admin.GetCalculatedMetricRequest() + request = analytics_admin.DeleteAdSenseLinkRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, resources.CalculatedMetric) - assert response.name == "name_value" - assert response.description == "description_value" - assert response.display_name == "display_name_value" - assert response.calculated_metric_id == "calculated_metric_id_value" - assert response.metric_unit == resources.CalculatedMetric.MetricUnit.STANDARD - assert response.restricted_metric_type == [ - resources.CalculatedMetric.RestrictedMetricType.COST_DATA - ] - assert response.formula == "formula_value" - assert response.invalid_metric_reference is True + assert response is None -def test_get_calculated_metric_empty_call(): +def test_delete_ad_sense_link_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = AnalyticsAdminServiceClient( @@ -50972,18 +50462,18 @@ def test_get_calculated_metric_empty_call(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.get_calculated_metric), "__call__" + type(client.transport.delete_ad_sense_link), "__call__" ) as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.get_calculated_metric() + client.delete_ad_sense_link() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == analytics_admin.GetCalculatedMetricRequest() + assert args[0] == analytics_admin.DeleteAdSenseLinkRequest() -def test_get_calculated_metric_non_empty_request_with_auto_populated_field(): +def test_delete_ad_sense_link_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. client = AnalyticsAdminServiceClient( @@ -50994,26 +50484,26 @@ def test_get_calculated_metric_non_empty_request_with_auto_populated_field(): # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. - request = analytics_admin.GetCalculatedMetricRequest( + request = analytics_admin.DeleteAdSenseLinkRequest( name="name_value", ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.get_calculated_metric), "__call__" + type(client.transport.delete_ad_sense_link), "__call__" ) as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.get_calculated_metric(request=request) + client.delete_ad_sense_link(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == analytics_admin.GetCalculatedMetricRequest( + assert args[0] == analytics_admin.DeleteAdSenseLinkRequest( name="name_value", ) -def test_get_calculated_metric_use_cached_wrapped_rpc(): +def test_delete_ad_sense_link_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -51028,8 +50518,7 @@ def test_get_calculated_metric_use_cached_wrapped_rpc(): # Ensure method has been cached assert ( - client._transport.get_calculated_metric - in client._transport._wrapped_methods + client._transport.delete_ad_sense_link in client._transport._wrapped_methods ) # Replace cached wrapped function with mock @@ -51038,15 +50527,15 @@ def test_get_calculated_metric_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.get_calculated_metric + client._transport.delete_ad_sense_link ] = mock_rpc request = {} - client.get_calculated_metric(request) + client.delete_ad_sense_link(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.get_calculated_metric(request) + client.delete_ad_sense_link(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -51054,7 +50543,7 @@ def test_get_calculated_metric_use_cached_wrapped_rpc(): @pytest.mark.asyncio -async def test_get_calculated_metric_empty_call_async(): +async def test_delete_ad_sense_link_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = AnalyticsAdminServiceAsyncClient( @@ -51064,31 +50553,18 @@ async def test_get_calculated_metric_empty_call_async(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.get_calculated_metric), "__call__" + type(client.transport.delete_ad_sense_link), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - resources.CalculatedMetric( - name="name_value", - description="description_value", - display_name="display_name_value", - calculated_metric_id="calculated_metric_id_value", - metric_unit=resources.CalculatedMetric.MetricUnit.STANDARD, - restricted_metric_type=[ - resources.CalculatedMetric.RestrictedMetricType.COST_DATA - ], - formula="formula_value", - invalid_metric_reference=True, - ) - ) - response = await client.get_calculated_metric() + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.delete_ad_sense_link() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == analytics_admin.GetCalculatedMetricRequest() + assert args[0] == analytics_admin.DeleteAdSenseLinkRequest() @pytest.mark.asyncio -async def test_get_calculated_metric_async_use_cached_wrapped_rpc( +async def test_delete_ad_sense_link_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", ): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -51105,33 +50581,34 @@ async def test_get_calculated_metric_async_use_cached_wrapped_rpc( # Ensure method has been cached assert ( - client._client._transport.get_calculated_metric + client._client._transport.delete_ad_sense_link in client._client._transport._wrapped_methods ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ - client._client._transport.get_calculated_metric - ] = mock_object + client._client._transport.delete_ad_sense_link + ] = mock_rpc request = {} - await client.get_calculated_metric(request) + await client.delete_ad_sense_link(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - await client.get_calculated_metric(request) + await client.delete_ad_sense_link(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio -async def test_get_calculated_metric_async( +async def test_delete_ad_sense_link_async( transport: str = "grpc_asyncio", - request_type=analytics_admin.GetCalculatedMetricRequest, + request_type=analytics_admin.DeleteAdSenseLinkRequest, ): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), @@ -51144,67 +50621,44 @@ async def test_get_calculated_metric_async( # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.get_calculated_metric), "__call__" + type(client.transport.delete_ad_sense_link), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - resources.CalculatedMetric( - name="name_value", - description="description_value", - display_name="display_name_value", - calculated_metric_id="calculated_metric_id_value", - metric_unit=resources.CalculatedMetric.MetricUnit.STANDARD, - restricted_metric_type=[ - resources.CalculatedMetric.RestrictedMetricType.COST_DATA - ], - formula="formula_value", - invalid_metric_reference=True, - ) - ) - response = await client.get_calculated_metric(request) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.delete_ad_sense_link(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request = analytics_admin.GetCalculatedMetricRequest() + request = analytics_admin.DeleteAdSenseLinkRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, resources.CalculatedMetric) - assert response.name == "name_value" - assert response.description == "description_value" - assert response.display_name == "display_name_value" - assert response.calculated_metric_id == "calculated_metric_id_value" - assert response.metric_unit == resources.CalculatedMetric.MetricUnit.STANDARD - assert response.restricted_metric_type == [ - resources.CalculatedMetric.RestrictedMetricType.COST_DATA - ] - assert response.formula == "formula_value" - assert response.invalid_metric_reference is True + assert response is None @pytest.mark.asyncio -async def test_get_calculated_metric_async_from_dict(): - await test_get_calculated_metric_async(request_type=dict) +async def test_delete_ad_sense_link_async_from_dict(): + await test_delete_ad_sense_link_async(request_type=dict) -def test_get_calculated_metric_field_headers(): +def test_delete_ad_sense_link_field_headers(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = analytics_admin.GetCalculatedMetricRequest() + request = analytics_admin.DeleteAdSenseLinkRequest() request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.get_calculated_metric), "__call__" + type(client.transport.delete_ad_sense_link), "__call__" ) as call: - call.return_value = resources.CalculatedMetric() - client.get_calculated_metric(request) + call.return_value = None + client.delete_ad_sense_link(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -51220,25 +50674,23 @@ def test_get_calculated_metric_field_headers(): @pytest.mark.asyncio -async def test_get_calculated_metric_field_headers_async(): +async def test_delete_ad_sense_link_field_headers_async(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = analytics_admin.GetCalculatedMetricRequest() + request = analytics_admin.DeleteAdSenseLinkRequest() request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.get_calculated_metric), "__call__" + type(client.transport.delete_ad_sense_link), "__call__" ) as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - resources.CalculatedMetric() - ) - await client.get_calculated_metric(request) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.delete_ad_sense_link(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -51253,20 +50705,20 @@ async def test_get_calculated_metric_field_headers_async(): ) in kw["metadata"] -def test_get_calculated_metric_flattened(): +def test_delete_ad_sense_link_flattened(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.get_calculated_metric), "__call__" + type(client.transport.delete_ad_sense_link), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = resources.CalculatedMetric() + call.return_value = None # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.get_calculated_metric( + client.delete_ad_sense_link( name="name_value", ) @@ -51279,7 +50731,7 @@ def test_get_calculated_metric_flattened(): assert arg == mock_val -def test_get_calculated_metric_flattened_error(): +def test_delete_ad_sense_link_flattened_error(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -51287,31 +50739,29 @@ def test_get_calculated_metric_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.get_calculated_metric( - analytics_admin.GetCalculatedMetricRequest(), + client.delete_ad_sense_link( + analytics_admin.DeleteAdSenseLinkRequest(), name="name_value", ) @pytest.mark.asyncio -async def test_get_calculated_metric_flattened_async(): +async def test_delete_ad_sense_link_flattened_async(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.get_calculated_metric), "__call__" + type(client.transport.delete_ad_sense_link), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = resources.CalculatedMetric() + call.return_value = None - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - resources.CalculatedMetric() - ) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.get_calculated_metric( + response = await client.delete_ad_sense_link( name="name_value", ) @@ -51325,7 +50775,7 @@ async def test_get_calculated_metric_flattened_async(): @pytest.mark.asyncio -async def test_get_calculated_metric_flattened_error_async(): +async def test_delete_ad_sense_link_flattened_error_async(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -51333,8 +50783,8 @@ async def test_get_calculated_metric_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.get_calculated_metric( - analytics_admin.GetCalculatedMetricRequest(), + await client.delete_ad_sense_link( + analytics_admin.DeleteAdSenseLinkRequest(), name="name_value", ) @@ -51342,11 +50792,11 @@ async def test_get_calculated_metric_flattened_error_async(): @pytest.mark.parametrize( "request_type", [ - analytics_admin.CreateCalculatedMetricRequest, + analytics_admin.ListAdSenseLinksRequest, dict, ], ) -def test_create_calculated_metric(request_type, transport: str = "grpc"): +def test_list_ad_sense_links(request_type, transport: str = "grpc"): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -51358,44 +50808,26 @@ def test_create_calculated_metric(request_type, transport: str = "grpc"): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.create_calculated_metric), "__call__" + type(client.transport.list_ad_sense_links), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = resources.CalculatedMetric( - name="name_value", - description="description_value", - display_name="display_name_value", - calculated_metric_id="calculated_metric_id_value", - metric_unit=resources.CalculatedMetric.MetricUnit.STANDARD, - restricted_metric_type=[ - resources.CalculatedMetric.RestrictedMetricType.COST_DATA - ], - formula="formula_value", - invalid_metric_reference=True, + call.return_value = analytics_admin.ListAdSenseLinksResponse( + next_page_token="next_page_token_value", ) - response = client.create_calculated_metric(request) + response = client.list_ad_sense_links(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - request = analytics_admin.CreateCalculatedMetricRequest() + request = analytics_admin.ListAdSenseLinksRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, resources.CalculatedMetric) - assert response.name == "name_value" - assert response.description == "description_value" - assert response.display_name == "display_name_value" - assert response.calculated_metric_id == "calculated_metric_id_value" - assert response.metric_unit == resources.CalculatedMetric.MetricUnit.STANDARD - assert response.restricted_metric_type == [ - resources.CalculatedMetric.RestrictedMetricType.COST_DATA - ] - assert response.formula == "formula_value" - assert response.invalid_metric_reference is True + assert isinstance(response, pagers.ListAdSenseLinksPager) + assert response.next_page_token == "next_page_token_value" -def test_create_calculated_metric_empty_call(): +def test_list_ad_sense_links_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = AnalyticsAdminServiceClient( @@ -51405,18 +50837,18 @@ def test_create_calculated_metric_empty_call(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.create_calculated_metric), "__call__" + type(client.transport.list_ad_sense_links), "__call__" ) as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.create_calculated_metric() + client.list_ad_sense_links() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == analytics_admin.CreateCalculatedMetricRequest() + assert args[0] == analytics_admin.ListAdSenseLinksRequest() -def test_create_calculated_metric_non_empty_request_with_auto_populated_field(): +def test_list_ad_sense_links_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. client = AnalyticsAdminServiceClient( @@ -51427,28 +50859,28 @@ def test_create_calculated_metric_non_empty_request_with_auto_populated_field(): # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. - request = analytics_admin.CreateCalculatedMetricRequest( + request = analytics_admin.ListAdSenseLinksRequest( parent="parent_value", - calculated_metric_id="calculated_metric_id_value", + page_token="page_token_value", ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.create_calculated_metric), "__call__" + type(client.transport.list_ad_sense_links), "__call__" ) as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.create_calculated_metric(request=request) + client.list_ad_sense_links(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == analytics_admin.CreateCalculatedMetricRequest( + assert args[0] == analytics_admin.ListAdSenseLinksRequest( parent="parent_value", - calculated_metric_id="calculated_metric_id_value", + page_token="page_token_value", ) -def test_create_calculated_metric_use_cached_wrapped_rpc(): +def test_list_ad_sense_links_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -51463,8 +50895,7 @@ def test_create_calculated_metric_use_cached_wrapped_rpc(): # Ensure method has been cached assert ( - client._transport.create_calculated_metric - in client._transport._wrapped_methods + client._transport.list_ad_sense_links in client._transport._wrapped_methods ) # Replace cached wrapped function with mock @@ -51473,15 +50904,15 @@ def test_create_calculated_metric_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.create_calculated_metric + client._transport.list_ad_sense_links ] = mock_rpc request = {} - client.create_calculated_metric(request) + client.list_ad_sense_links(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.create_calculated_metric(request) + client.list_ad_sense_links(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -51489,7 +50920,7 @@ def test_create_calculated_metric_use_cached_wrapped_rpc(): @pytest.mark.asyncio -async def test_create_calculated_metric_empty_call_async(): +async def test_list_ad_sense_links_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = AnalyticsAdminServiceAsyncClient( @@ -51499,31 +50930,22 @@ async def test_create_calculated_metric_empty_call_async(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.create_calculated_metric), "__call__" + type(client.transport.list_ad_sense_links), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - resources.CalculatedMetric( - name="name_value", - description="description_value", - display_name="display_name_value", - calculated_metric_id="calculated_metric_id_value", - metric_unit=resources.CalculatedMetric.MetricUnit.STANDARD, - restricted_metric_type=[ - resources.CalculatedMetric.RestrictedMetricType.COST_DATA - ], - formula="formula_value", - invalid_metric_reference=True, + analytics_admin.ListAdSenseLinksResponse( + next_page_token="next_page_token_value", ) ) - response = await client.create_calculated_metric() + response = await client.list_ad_sense_links() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == analytics_admin.CreateCalculatedMetricRequest() + assert args[0] == analytics_admin.ListAdSenseLinksRequest() @pytest.mark.asyncio -async def test_create_calculated_metric_async_use_cached_wrapped_rpc( +async def test_list_ad_sense_links_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", ): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -51540,33 +50962,34 @@ async def test_create_calculated_metric_async_use_cached_wrapped_rpc( # Ensure method has been cached assert ( - client._client._transport.create_calculated_metric + client._client._transport.list_ad_sense_links in client._client._transport._wrapped_methods ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ - client._client._transport.create_calculated_metric - ] = mock_object + client._client._transport.list_ad_sense_links + ] = mock_rpc request = {} - await client.create_calculated_metric(request) + await client.list_ad_sense_links(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - await client.create_calculated_metric(request) + await client.list_ad_sense_links(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio -async def test_create_calculated_metric_async( +async def test_list_ad_sense_links_async( transport: str = "grpc_asyncio", - request_type=analytics_admin.CreateCalculatedMetricRequest, + request_type=analytics_admin.ListAdSenseLinksRequest, ): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), @@ -51579,67 +51002,49 @@ async def test_create_calculated_metric_async( # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.create_calculated_metric), "__call__" + type(client.transport.list_ad_sense_links), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - resources.CalculatedMetric( - name="name_value", - description="description_value", - display_name="display_name_value", - calculated_metric_id="calculated_metric_id_value", - metric_unit=resources.CalculatedMetric.MetricUnit.STANDARD, - restricted_metric_type=[ - resources.CalculatedMetric.RestrictedMetricType.COST_DATA - ], - formula="formula_value", - invalid_metric_reference=True, + analytics_admin.ListAdSenseLinksResponse( + next_page_token="next_page_token_value", ) ) - response = await client.create_calculated_metric(request) + response = await client.list_ad_sense_links(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request = analytics_admin.CreateCalculatedMetricRequest() + request = analytics_admin.ListAdSenseLinksRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, resources.CalculatedMetric) - assert response.name == "name_value" - assert response.description == "description_value" - assert response.display_name == "display_name_value" - assert response.calculated_metric_id == "calculated_metric_id_value" - assert response.metric_unit == resources.CalculatedMetric.MetricUnit.STANDARD - assert response.restricted_metric_type == [ - resources.CalculatedMetric.RestrictedMetricType.COST_DATA - ] - assert response.formula == "formula_value" - assert response.invalid_metric_reference is True + assert isinstance(response, pagers.ListAdSenseLinksAsyncPager) + assert response.next_page_token == "next_page_token_value" @pytest.mark.asyncio -async def test_create_calculated_metric_async_from_dict(): - await test_create_calculated_metric_async(request_type=dict) +async def test_list_ad_sense_links_async_from_dict(): + await test_list_ad_sense_links_async(request_type=dict) -def test_create_calculated_metric_field_headers(): +def test_list_ad_sense_links_field_headers(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = analytics_admin.CreateCalculatedMetricRequest() + request = analytics_admin.ListAdSenseLinksRequest() request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.create_calculated_metric), "__call__" + type(client.transport.list_ad_sense_links), "__call__" ) as call: - call.return_value = resources.CalculatedMetric() - client.create_calculated_metric(request) + call.return_value = analytics_admin.ListAdSenseLinksResponse() + client.list_ad_sense_links(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -51655,25 +51060,25 @@ def test_create_calculated_metric_field_headers(): @pytest.mark.asyncio -async def test_create_calculated_metric_field_headers_async(): +async def test_list_ad_sense_links_field_headers_async(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = analytics_admin.CreateCalculatedMetricRequest() + request = analytics_admin.ListAdSenseLinksRequest() request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.create_calculated_metric), "__call__" + type(client.transport.list_ad_sense_links), "__call__" ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - resources.CalculatedMetric() + analytics_admin.ListAdSenseLinksResponse() ) - await client.create_calculated_metric(request) + await client.list_ad_sense_links(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -51688,23 +51093,21 @@ async def test_create_calculated_metric_field_headers_async(): ) in kw["metadata"] -def test_create_calculated_metric_flattened(): +def test_list_ad_sense_links_flattened(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.create_calculated_metric), "__call__" + type(client.transport.list_ad_sense_links), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = resources.CalculatedMetric() + call.return_value = analytics_admin.ListAdSenseLinksResponse() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.create_calculated_metric( + client.list_ad_sense_links( parent="parent_value", - calculated_metric=resources.CalculatedMetric(name="name_value"), - calculated_metric_id="calculated_metric_id_value", ) # Establish that the underlying call was made with the expected @@ -51714,15 +51117,9 @@ def test_create_calculated_metric_flattened(): arg = args[0].parent mock_val = "parent_value" assert arg == mock_val - arg = args[0].calculated_metric - mock_val = resources.CalculatedMetric(name="name_value") - assert arg == mock_val - arg = args[0].calculated_metric_id - mock_val = "calculated_metric_id_value" - assert arg == mock_val -def test_create_calculated_metric_flattened_error(): +def test_list_ad_sense_links_flattened_error(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -51730,36 +51127,32 @@ def test_create_calculated_metric_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.create_calculated_metric( - analytics_admin.CreateCalculatedMetricRequest(), + client.list_ad_sense_links( + analytics_admin.ListAdSenseLinksRequest(), parent="parent_value", - calculated_metric=resources.CalculatedMetric(name="name_value"), - calculated_metric_id="calculated_metric_id_value", ) @pytest.mark.asyncio -async def test_create_calculated_metric_flattened_async(): +async def test_list_ad_sense_links_flattened_async(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.create_calculated_metric), "__call__" + type(client.transport.list_ad_sense_links), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = resources.CalculatedMetric() + call.return_value = analytics_admin.ListAdSenseLinksResponse() call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - resources.CalculatedMetric() + analytics_admin.ListAdSenseLinksResponse() ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.create_calculated_metric( + response = await client.list_ad_sense_links( parent="parent_value", - calculated_metric=resources.CalculatedMetric(name="name_value"), - calculated_metric_id="calculated_metric_id_value", ) # Establish that the underlying call was made with the expected @@ -51769,16 +51162,10 @@ async def test_create_calculated_metric_flattened_async(): arg = args[0].parent mock_val = "parent_value" assert arg == mock_val - arg = args[0].calculated_metric - mock_val = resources.CalculatedMetric(name="name_value") - assert arg == mock_val - arg = args[0].calculated_metric_id - mock_val = "calculated_metric_id_value" - assert arg == mock_val @pytest.mark.asyncio -async def test_create_calculated_metric_flattened_error_async(): +async def test_list_ad_sense_links_flattened_error_async(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -51786,74 +51173,278 @@ async def test_create_calculated_metric_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.create_calculated_metric( - analytics_admin.CreateCalculatedMetricRequest(), + await client.list_ad_sense_links( + analytics_admin.ListAdSenseLinksRequest(), parent="parent_value", - calculated_metric=resources.CalculatedMetric(name="name_value"), - calculated_metric_id="calculated_metric_id_value", ) -@pytest.mark.parametrize( - "request_type", - [ - analytics_admin.ListCalculatedMetricsRequest, - dict, - ], -) -def test_list_calculated_metrics(request_type, transport: str = "grpc"): +def test_list_ad_sense_links_pager(transport_name: str = "grpc"): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + transport=transport_name, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_calculated_metrics), "__call__" + type(client.transport.list_ad_sense_links), "__call__" ) as call: - # Designate an appropriate return value for the call. - call.return_value = analytics_admin.ListCalculatedMetricsResponse( - next_page_token="next_page_token_value", + # Set the response to a series of pages. + call.side_effect = ( + analytics_admin.ListAdSenseLinksResponse( + adsense_links=[ + resources.AdSenseLink(), + resources.AdSenseLink(), + resources.AdSenseLink(), + ], + next_page_token="abc", + ), + analytics_admin.ListAdSenseLinksResponse( + adsense_links=[], + next_page_token="def", + ), + analytics_admin.ListAdSenseLinksResponse( + adsense_links=[ + resources.AdSenseLink(), + ], + next_page_token="ghi", + ), + analytics_admin.ListAdSenseLinksResponse( + adsense_links=[ + resources.AdSenseLink(), + resources.AdSenseLink(), + ], + ), + RuntimeError, ) - response = client.list_calculated_metrics(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = analytics_admin.ListCalculatedMetricsRequest() - assert args[0] == request + expected_metadata = () + retry = retries.Retry() + timeout = 5 + expected_metadata = tuple(expected_metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + ) + pager = client.list_ad_sense_links(request={}, retry=retry, timeout=timeout) - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListCalculatedMetricsPager) - assert response.next_page_token == "next_page_token_value" + assert pager._metadata == expected_metadata + assert pager._retry == retry + assert pager._timeout == timeout + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, resources.AdSenseLink) for i in results) -def test_list_calculated_metrics_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. + +def test_list_ad_sense_links_pages(transport_name: str = "grpc"): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", + transport=transport_name, ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_calculated_metrics), "__call__" + type(client.transport.list_ad_sense_links), "__call__" ) as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.list_calculated_metrics() + # Set the response to a series of pages. + call.side_effect = ( + analytics_admin.ListAdSenseLinksResponse( + adsense_links=[ + resources.AdSenseLink(), + resources.AdSenseLink(), + resources.AdSenseLink(), + ], + next_page_token="abc", + ), + analytics_admin.ListAdSenseLinksResponse( + adsense_links=[], + next_page_token="def", + ), + analytics_admin.ListAdSenseLinksResponse( + adsense_links=[ + resources.AdSenseLink(), + ], + next_page_token="ghi", + ), + analytics_admin.ListAdSenseLinksResponse( + adsense_links=[ + resources.AdSenseLink(), + resources.AdSenseLink(), + ], + ), + RuntimeError, + ) + pages = list(client.list_ad_sense_links(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.asyncio +async def test_list_ad_sense_links_async_pager(): + client = AnalyticsAdminServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_ad_sense_links), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + analytics_admin.ListAdSenseLinksResponse( + adsense_links=[ + resources.AdSenseLink(), + resources.AdSenseLink(), + resources.AdSenseLink(), + ], + next_page_token="abc", + ), + analytics_admin.ListAdSenseLinksResponse( + adsense_links=[], + next_page_token="def", + ), + analytics_admin.ListAdSenseLinksResponse( + adsense_links=[ + resources.AdSenseLink(), + ], + next_page_token="ghi", + ), + analytics_admin.ListAdSenseLinksResponse( + adsense_links=[ + resources.AdSenseLink(), + resources.AdSenseLink(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_ad_sense_links( + request={}, + ) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, resources.AdSenseLink) for i in responses) + + +@pytest.mark.asyncio +async def test_list_ad_sense_links_async_pages(): + client = AnalyticsAdminServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_ad_sense_links), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + analytics_admin.ListAdSenseLinksResponse( + adsense_links=[ + resources.AdSenseLink(), + resources.AdSenseLink(), + resources.AdSenseLink(), + ], + next_page_token="abc", + ), + analytics_admin.ListAdSenseLinksResponse( + adsense_links=[], + next_page_token="def", + ), + analytics_admin.ListAdSenseLinksResponse( + adsense_links=[ + resources.AdSenseLink(), + ], + next_page_token="ghi", + ), + analytics_admin.ListAdSenseLinksResponse( + adsense_links=[ + resources.AdSenseLink(), + resources.AdSenseLink(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_ad_sense_links(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + analytics_admin.GetEventCreateRuleRequest, + dict, + ], +) +def test_get_event_create_rule(request_type, transport: str = "grpc"): + client = AnalyticsAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_event_create_rule), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = event_create_and_edit.EventCreateRule( + name="name_value", + destination_event="destination_event_value", + source_copy_parameters=True, + ) + response = client.get_event_create_rule(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = analytics_admin.GetEventCreateRuleRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, event_create_and_edit.EventCreateRule) + assert response.name == "name_value" + assert response.destination_event == "destination_event_value" + assert response.source_copy_parameters is True + + +def test_get_event_create_rule_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = AnalyticsAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_event_create_rule), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.get_event_create_rule() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == analytics_admin.ListCalculatedMetricsRequest() + assert args[0] == analytics_admin.GetEventCreateRuleRequest() -def test_list_calculated_metrics_non_empty_request_with_auto_populated_field(): +def test_get_event_create_rule_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. client = AnalyticsAdminServiceClient( @@ -51864,28 +51455,26 @@ def test_list_calculated_metrics_non_empty_request_with_auto_populated_field(): # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. - request = analytics_admin.ListCalculatedMetricsRequest( - parent="parent_value", - page_token="page_token_value", + request = analytics_admin.GetEventCreateRuleRequest( + name="name_value", ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_calculated_metrics), "__call__" + type(client.transport.get_event_create_rule), "__call__" ) as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.list_calculated_metrics(request=request) + client.get_event_create_rule(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == analytics_admin.ListCalculatedMetricsRequest( - parent="parent_value", - page_token="page_token_value", + assert args[0] == analytics_admin.GetEventCreateRuleRequest( + name="name_value", ) -def test_list_calculated_metrics_use_cached_wrapped_rpc(): +def test_get_event_create_rule_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -51900,7 +51489,7 @@ def test_list_calculated_metrics_use_cached_wrapped_rpc(): # Ensure method has been cached assert ( - client._transport.list_calculated_metrics + client._transport.get_event_create_rule in client._transport._wrapped_methods ) @@ -51910,15 +51499,15 @@ def test_list_calculated_metrics_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.list_calculated_metrics + client._transport.get_event_create_rule ] = mock_rpc request = {} - client.list_calculated_metrics(request) + client.get_event_create_rule(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.list_calculated_metrics(request) + client.get_event_create_rule(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -51926,7 +51515,7 @@ def test_list_calculated_metrics_use_cached_wrapped_rpc(): @pytest.mark.asyncio -async def test_list_calculated_metrics_empty_call_async(): +async def test_get_event_create_rule_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = AnalyticsAdminServiceAsyncClient( @@ -51936,22 +51525,24 @@ async def test_list_calculated_metrics_empty_call_async(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_calculated_metrics), "__call__" + type(client.transport.get_event_create_rule), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - analytics_admin.ListCalculatedMetricsResponse( - next_page_token="next_page_token_value", + event_create_and_edit.EventCreateRule( + name="name_value", + destination_event="destination_event_value", + source_copy_parameters=True, ) ) - response = await client.list_calculated_metrics() + response = await client.get_event_create_rule() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == analytics_admin.ListCalculatedMetricsRequest() + assert args[0] == analytics_admin.GetEventCreateRuleRequest() @pytest.mark.asyncio -async def test_list_calculated_metrics_async_use_cached_wrapped_rpc( +async def test_get_event_create_rule_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", ): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -51968,33 +51559,34 @@ async def test_list_calculated_metrics_async_use_cached_wrapped_rpc( # Ensure method has been cached assert ( - client._client._transport.list_calculated_metrics + client._client._transport.get_event_create_rule in client._client._transport._wrapped_methods ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ - client._client._transport.list_calculated_metrics - ] = mock_object + client._client._transport.get_event_create_rule + ] = mock_rpc request = {} - await client.list_calculated_metrics(request) + await client.get_event_create_rule(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - await client.list_calculated_metrics(request) + await client.get_event_create_rule(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio -async def test_list_calculated_metrics_async( +async def test_get_event_create_rule_async( transport: str = "grpc_asyncio", - request_type=analytics_admin.ListCalculatedMetricsRequest, + request_type=analytics_admin.GetEventCreateRuleRequest, ): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), @@ -52007,49 +51599,53 @@ async def test_list_calculated_metrics_async( # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_calculated_metrics), "__call__" + type(client.transport.get_event_create_rule), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - analytics_admin.ListCalculatedMetricsResponse( - next_page_token="next_page_token_value", + event_create_and_edit.EventCreateRule( + name="name_value", + destination_event="destination_event_value", + source_copy_parameters=True, ) ) - response = await client.list_calculated_metrics(request) + response = await client.get_event_create_rule(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request = analytics_admin.ListCalculatedMetricsRequest() + request = analytics_admin.GetEventCreateRuleRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListCalculatedMetricsAsyncPager) - assert response.next_page_token == "next_page_token_value" + assert isinstance(response, event_create_and_edit.EventCreateRule) + assert response.name == "name_value" + assert response.destination_event == "destination_event_value" + assert response.source_copy_parameters is True @pytest.mark.asyncio -async def test_list_calculated_metrics_async_from_dict(): - await test_list_calculated_metrics_async(request_type=dict) +async def test_get_event_create_rule_async_from_dict(): + await test_get_event_create_rule_async(request_type=dict) -def test_list_calculated_metrics_field_headers(): +def test_get_event_create_rule_field_headers(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = analytics_admin.ListCalculatedMetricsRequest() + request = analytics_admin.GetEventCreateRuleRequest() - request.parent = "parent_value" + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_calculated_metrics), "__call__" + type(client.transport.get_event_create_rule), "__call__" ) as call: - call.return_value = analytics_admin.ListCalculatedMetricsResponse() - client.list_calculated_metrics(request) + call.return_value = event_create_and_edit.EventCreateRule() + client.get_event_create_rule(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -52060,30 +51656,30 @@ def test_list_calculated_metrics_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "parent=parent_value", + "name=name_value", ) in kw["metadata"] @pytest.mark.asyncio -async def test_list_calculated_metrics_field_headers_async(): +async def test_get_event_create_rule_field_headers_async(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = analytics_admin.ListCalculatedMetricsRequest() + request = analytics_admin.GetEventCreateRuleRequest() - request.parent = "parent_value" + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_calculated_metrics), "__call__" + type(client.transport.get_event_create_rule), "__call__" ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - analytics_admin.ListCalculatedMetricsResponse() + event_create_and_edit.EventCreateRule() ) - await client.list_calculated_metrics(request) + await client.get_event_create_rule(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -52094,37 +51690,37 @@ async def test_list_calculated_metrics_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "parent=parent_value", + "name=name_value", ) in kw["metadata"] -def test_list_calculated_metrics_flattened(): +def test_get_event_create_rule_flattened(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_calculated_metrics), "__call__" + type(client.transport.get_event_create_rule), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = analytics_admin.ListCalculatedMetricsResponse() + call.return_value = event_create_and_edit.EventCreateRule() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.list_calculated_metrics( - parent="parent_value", + client.get_event_create_rule( + name="name_value", ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = "parent_value" + arg = args[0].name + mock_val = "name_value" assert arg == mock_val -def test_list_calculated_metrics_flattened_error(): +def test_get_event_create_rule_flattened_error(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -52132,45 +51728,45 @@ def test_list_calculated_metrics_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.list_calculated_metrics( - analytics_admin.ListCalculatedMetricsRequest(), - parent="parent_value", + client.get_event_create_rule( + analytics_admin.GetEventCreateRuleRequest(), + name="name_value", ) @pytest.mark.asyncio -async def test_list_calculated_metrics_flattened_async(): +async def test_get_event_create_rule_flattened_async(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_calculated_metrics), "__call__" + type(client.transport.get_event_create_rule), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = analytics_admin.ListCalculatedMetricsResponse() + call.return_value = event_create_and_edit.EventCreateRule() call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - analytics_admin.ListCalculatedMetricsResponse() + event_create_and_edit.EventCreateRule() ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.list_calculated_metrics( - parent="parent_value", + response = await client.get_event_create_rule( + name="name_value", ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = "parent_value" + arg = args[0].name + mock_val = "name_value" assert arg == mock_val @pytest.mark.asyncio -async def test_list_calculated_metrics_flattened_error_async(): +async def test_get_event_create_rule_flattened_error_async(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -52178,318 +51774,104 @@ async def test_list_calculated_metrics_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.list_calculated_metrics( - analytics_admin.ListCalculatedMetricsRequest(), - parent="parent_value", + await client.get_event_create_rule( + analytics_admin.GetEventCreateRuleRequest(), + name="name_value", ) -def test_list_calculated_metrics_pager(transport_name: str = "grpc"): +@pytest.mark.parametrize( + "request_type", + [ + analytics_admin.ListEventCreateRulesRequest, + dict, + ], +) +def test_list_event_create_rules(request_type, transport: str = "grpc"): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, + transport=transport, ) + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_calculated_metrics), "__call__" + type(client.transport.list_event_create_rules), "__call__" ) as call: - # Set the response to a series of pages. - call.side_effect = ( - analytics_admin.ListCalculatedMetricsResponse( - calculated_metrics=[ - resources.CalculatedMetric(), - resources.CalculatedMetric(), - resources.CalculatedMetric(), - ], - next_page_token="abc", - ), - analytics_admin.ListCalculatedMetricsResponse( - calculated_metrics=[], - next_page_token="def", - ), - analytics_admin.ListCalculatedMetricsResponse( - calculated_metrics=[ - resources.CalculatedMetric(), - ], - next_page_token="ghi", - ), - analytics_admin.ListCalculatedMetricsResponse( - calculated_metrics=[ - resources.CalculatedMetric(), - resources.CalculatedMetric(), - ], - ), - RuntimeError, - ) - - expected_metadata = () - retry = retries.Retry() - timeout = 5 - expected_metadata = tuple(expected_metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + # Designate an appropriate return value for the call. + call.return_value = analytics_admin.ListEventCreateRulesResponse( + next_page_token="next_page_token_value", ) - pager = client.list_calculated_metrics(request={}, retry=retry, timeout=timeout) + response = client.list_event_create_rules(request) - assert pager._metadata == expected_metadata - assert pager._retry == retry - assert pager._timeout == timeout + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = analytics_admin.ListEventCreateRulesRequest() + assert args[0] == request - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, resources.CalculatedMetric) for i in results) + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListEventCreateRulesPager) + assert response.next_page_token == "next_page_token_value" -def test_list_calculated_metrics_pages(transport_name: str = "grpc"): +def test_list_event_create_rules_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, + transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_calculated_metrics), "__call__" + type(client.transport.list_event_create_rules), "__call__" ) as call: - # Set the response to a series of pages. - call.side_effect = ( - analytics_admin.ListCalculatedMetricsResponse( - calculated_metrics=[ - resources.CalculatedMetric(), - resources.CalculatedMetric(), - resources.CalculatedMetric(), - ], - next_page_token="abc", - ), - analytics_admin.ListCalculatedMetricsResponse( - calculated_metrics=[], - next_page_token="def", - ), - analytics_admin.ListCalculatedMetricsResponse( - calculated_metrics=[ - resources.CalculatedMetric(), - ], - next_page_token="ghi", - ), - analytics_admin.ListCalculatedMetricsResponse( - calculated_metrics=[ - resources.CalculatedMetric(), - resources.CalculatedMetric(), - ], - ), - RuntimeError, + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. ) - pages = list(client.list_calculated_metrics(request={}).pages) - for page_, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page_.raw_page.next_page_token == token + client.list_event_create_rules() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == analytics_admin.ListEventCreateRulesRequest() -@pytest.mark.asyncio -async def test_list_calculated_metrics_async_pager(): - client = AnalyticsAdminServiceAsyncClient( +def test_list_event_create_rules_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = analytics_admin.ListEventCreateRulesRequest( + parent="parent_value", + page_token="page_token_value", ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_calculated_metrics), - "__call__", - new_callable=mock.AsyncMock, + type(client.transport.list_event_create_rules), "__call__" ) as call: - # Set the response to a series of pages. - call.side_effect = ( - analytics_admin.ListCalculatedMetricsResponse( - calculated_metrics=[ - resources.CalculatedMetric(), - resources.CalculatedMetric(), - resources.CalculatedMetric(), - ], - next_page_token="abc", - ), - analytics_admin.ListCalculatedMetricsResponse( - calculated_metrics=[], - next_page_token="def", - ), - analytics_admin.ListCalculatedMetricsResponse( - calculated_metrics=[ - resources.CalculatedMetric(), - ], - next_page_token="ghi", - ), - analytics_admin.ListCalculatedMetricsResponse( - calculated_metrics=[ - resources.CalculatedMetric(), - resources.CalculatedMetric(), - ], - ), - RuntimeError, + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. ) - async_pager = await client.list_calculated_metrics( - request={}, - ) - assert async_pager.next_page_token == "abc" - responses = [] - async for response in async_pager: # pragma: no branch - responses.append(response) - - assert len(responses) == 6 - assert all(isinstance(i, resources.CalculatedMetric) for i in responses) - - -@pytest.mark.asyncio -async def test_list_calculated_metrics_async_pages(): - client = AnalyticsAdminServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_calculated_metrics), - "__call__", - new_callable=mock.AsyncMock, - ) as call: - # Set the response to a series of pages. - call.side_effect = ( - analytics_admin.ListCalculatedMetricsResponse( - calculated_metrics=[ - resources.CalculatedMetric(), - resources.CalculatedMetric(), - resources.CalculatedMetric(), - ], - next_page_token="abc", - ), - analytics_admin.ListCalculatedMetricsResponse( - calculated_metrics=[], - next_page_token="def", - ), - analytics_admin.ListCalculatedMetricsResponse( - calculated_metrics=[ - resources.CalculatedMetric(), - ], - next_page_token="ghi", - ), - analytics_admin.ListCalculatedMetricsResponse( - calculated_metrics=[ - resources.CalculatedMetric(), - resources.CalculatedMetric(), - ], - ), - RuntimeError, - ) - pages = [] - # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` - # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 - async for page_ in ( # pragma: no branch - await client.list_calculated_metrics(request={}) - ).pages: - pages.append(page_) - for page_, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page_.raw_page.next_page_token == token - - -@pytest.mark.parametrize( - "request_type", - [ - analytics_admin.UpdateCalculatedMetricRequest, - dict, - ], -) -def test_update_calculated_metric(request_type, transport: str = "grpc"): - client = AnalyticsAdminServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_calculated_metric), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = resources.CalculatedMetric( - name="name_value", - description="description_value", - display_name="display_name_value", - calculated_metric_id="calculated_metric_id_value", - metric_unit=resources.CalculatedMetric.MetricUnit.STANDARD, - restricted_metric_type=[ - resources.CalculatedMetric.RestrictedMetricType.COST_DATA - ], - formula="formula_value", - invalid_metric_reference=True, - ) - response = client.update_calculated_metric(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = analytics_admin.UpdateCalculatedMetricRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, resources.CalculatedMetric) - assert response.name == "name_value" - assert response.description == "description_value" - assert response.display_name == "display_name_value" - assert response.calculated_metric_id == "calculated_metric_id_value" - assert response.metric_unit == resources.CalculatedMetric.MetricUnit.STANDARD - assert response.restricted_metric_type == [ - resources.CalculatedMetric.RestrictedMetricType.COST_DATA - ] - assert response.formula == "formula_value" - assert response.invalid_metric_reference is True - - -def test_update_calculated_metric_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = AnalyticsAdminServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_calculated_metric), "__call__" - ) as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.update_calculated_metric() + client.list_event_create_rules(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == analytics_admin.UpdateCalculatedMetricRequest() - - -def test_update_calculated_metric_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = AnalyticsAdminServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = analytics_admin.UpdateCalculatedMetricRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_calculated_metric), "__call__" - ) as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. + assert args[0] == analytics_admin.ListEventCreateRulesRequest( + parent="parent_value", + page_token="page_token_value", ) - client.update_calculated_metric(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == analytics_admin.UpdateCalculatedMetricRequest() -def test_update_calculated_metric_use_cached_wrapped_rpc(): +def test_list_event_create_rules_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -52504,7 +51886,7 @@ def test_update_calculated_metric_use_cached_wrapped_rpc(): # Ensure method has been cached assert ( - client._transport.update_calculated_metric + client._transport.list_event_create_rules in client._transport._wrapped_methods ) @@ -52514,15 +51896,15 @@ def test_update_calculated_metric_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.update_calculated_metric + client._transport.list_event_create_rules ] = mock_rpc request = {} - client.update_calculated_metric(request) + client.list_event_create_rules(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.update_calculated_metric(request) + client.list_event_create_rules(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -52530,7 +51912,7 @@ def test_update_calculated_metric_use_cached_wrapped_rpc(): @pytest.mark.asyncio -async def test_update_calculated_metric_empty_call_async(): +async def test_list_event_create_rules_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = AnalyticsAdminServiceAsyncClient( @@ -52540,31 +51922,22 @@ async def test_update_calculated_metric_empty_call_async(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.update_calculated_metric), "__call__" + type(client.transport.list_event_create_rules), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - resources.CalculatedMetric( - name="name_value", - description="description_value", - display_name="display_name_value", - calculated_metric_id="calculated_metric_id_value", - metric_unit=resources.CalculatedMetric.MetricUnit.STANDARD, - restricted_metric_type=[ - resources.CalculatedMetric.RestrictedMetricType.COST_DATA - ], - formula="formula_value", - invalid_metric_reference=True, + analytics_admin.ListEventCreateRulesResponse( + next_page_token="next_page_token_value", ) ) - response = await client.update_calculated_metric() + response = await client.list_event_create_rules() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == analytics_admin.UpdateCalculatedMetricRequest() + assert args[0] == analytics_admin.ListEventCreateRulesRequest() @pytest.mark.asyncio -async def test_update_calculated_metric_async_use_cached_wrapped_rpc( +async def test_list_event_create_rules_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", ): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -52581,33 +51954,34 @@ async def test_update_calculated_metric_async_use_cached_wrapped_rpc( # Ensure method has been cached assert ( - client._client._transport.update_calculated_metric + client._client._transport.list_event_create_rules in client._client._transport._wrapped_methods ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ - client._client._transport.update_calculated_metric - ] = mock_object + client._client._transport.list_event_create_rules + ] = mock_rpc request = {} - await client.update_calculated_metric(request) + await client.list_event_create_rules(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - await client.update_calculated_metric(request) + await client.list_event_create_rules(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio -async def test_update_calculated_metric_async( +async def test_list_event_create_rules_async( transport: str = "grpc_asyncio", - request_type=analytics_admin.UpdateCalculatedMetricRequest, + request_type=analytics_admin.ListEventCreateRulesRequest, ): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), @@ -52620,67 +51994,49 @@ async def test_update_calculated_metric_async( # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.update_calculated_metric), "__call__" + type(client.transport.list_event_create_rules), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - resources.CalculatedMetric( - name="name_value", - description="description_value", - display_name="display_name_value", - calculated_metric_id="calculated_metric_id_value", - metric_unit=resources.CalculatedMetric.MetricUnit.STANDARD, - restricted_metric_type=[ - resources.CalculatedMetric.RestrictedMetricType.COST_DATA - ], - formula="formula_value", - invalid_metric_reference=True, + analytics_admin.ListEventCreateRulesResponse( + next_page_token="next_page_token_value", ) ) - response = await client.update_calculated_metric(request) + response = await client.list_event_create_rules(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request = analytics_admin.UpdateCalculatedMetricRequest() + request = analytics_admin.ListEventCreateRulesRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, resources.CalculatedMetric) - assert response.name == "name_value" - assert response.description == "description_value" - assert response.display_name == "display_name_value" - assert response.calculated_metric_id == "calculated_metric_id_value" - assert response.metric_unit == resources.CalculatedMetric.MetricUnit.STANDARD - assert response.restricted_metric_type == [ - resources.CalculatedMetric.RestrictedMetricType.COST_DATA - ] - assert response.formula == "formula_value" - assert response.invalid_metric_reference is True + assert isinstance(response, pagers.ListEventCreateRulesAsyncPager) + assert response.next_page_token == "next_page_token_value" @pytest.mark.asyncio -async def test_update_calculated_metric_async_from_dict(): - await test_update_calculated_metric_async(request_type=dict) +async def test_list_event_create_rules_async_from_dict(): + await test_list_event_create_rules_async(request_type=dict) -def test_update_calculated_metric_field_headers(): +def test_list_event_create_rules_field_headers(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = analytics_admin.UpdateCalculatedMetricRequest() + request = analytics_admin.ListEventCreateRulesRequest() - request.calculated_metric.name = "name_value" + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.update_calculated_metric), "__call__" + type(client.transport.list_event_create_rules), "__call__" ) as call: - call.return_value = resources.CalculatedMetric() - client.update_calculated_metric(request) + call.return_value = analytics_admin.ListEventCreateRulesResponse() + client.list_event_create_rules(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -52691,30 +52047,30 @@ def test_update_calculated_metric_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "calculated_metric.name=name_value", + "parent=parent_value", ) in kw["metadata"] @pytest.mark.asyncio -async def test_update_calculated_metric_field_headers_async(): +async def test_list_event_create_rules_field_headers_async(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = analytics_admin.UpdateCalculatedMetricRequest() + request = analytics_admin.ListEventCreateRulesRequest() - request.calculated_metric.name = "name_value" + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.update_calculated_metric), "__call__" + type(client.transport.list_event_create_rules), "__call__" ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - resources.CalculatedMetric() + analytics_admin.ListEventCreateRulesResponse() ) - await client.update_calculated_metric(request) + await client.list_event_create_rules(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -52725,41 +52081,37 @@ async def test_update_calculated_metric_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "calculated_metric.name=name_value", + "parent=parent_value", ) in kw["metadata"] -def test_update_calculated_metric_flattened(): +def test_list_event_create_rules_flattened(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.update_calculated_metric), "__call__" + type(client.transport.list_event_create_rules), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = resources.CalculatedMetric() + call.return_value = analytics_admin.ListEventCreateRulesResponse() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.update_calculated_metric( - calculated_metric=resources.CalculatedMetric(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + client.list_event_create_rules( + parent="parent_value", ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - arg = args[0].calculated_metric - mock_val = resources.CalculatedMetric(name="name_value") - assert arg == mock_val - arg = args[0].update_mask - mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + arg = args[0].parent + mock_val = "parent_value" assert arg == mock_val -def test_update_calculated_metric_flattened_error(): +def test_list_event_create_rules_flattened_error(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -52767,50 +52119,45 @@ def test_update_calculated_metric_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.update_calculated_metric( - analytics_admin.UpdateCalculatedMetricRequest(), - calculated_metric=resources.CalculatedMetric(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + client.list_event_create_rules( + analytics_admin.ListEventCreateRulesRequest(), + parent="parent_value", ) @pytest.mark.asyncio -async def test_update_calculated_metric_flattened_async(): +async def test_list_event_create_rules_flattened_async(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.update_calculated_metric), "__call__" + type(client.transport.list_event_create_rules), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = resources.CalculatedMetric() + call.return_value = analytics_admin.ListEventCreateRulesResponse() call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - resources.CalculatedMetric() + analytics_admin.ListEventCreateRulesResponse() ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.update_calculated_metric( - calculated_metric=resources.CalculatedMetric(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + response = await client.list_event_create_rules( + parent="parent_value", ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - arg = args[0].calculated_metric - mock_val = resources.CalculatedMetric(name="name_value") - assert arg == mock_val - arg = args[0].update_mask - mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + arg = args[0].parent + mock_val = "parent_value" assert arg == mock_val @pytest.mark.asyncio -async def test_update_calculated_metric_flattened_error_async(): +async def test_list_event_create_rules_flattened_error_async(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -52818,100 +52165,312 @@ async def test_update_calculated_metric_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.update_calculated_metric( - analytics_admin.UpdateCalculatedMetricRequest(), - calculated_metric=resources.CalculatedMetric(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + await client.list_event_create_rules( + analytics_admin.ListEventCreateRulesRequest(), + parent="parent_value", ) -@pytest.mark.parametrize( - "request_type", - [ - analytics_admin.DeleteCalculatedMetricRequest, - dict, - ], -) -def test_delete_calculated_metric(request_type, transport: str = "grpc"): +def test_list_event_create_rules_pager(transport_name: str = "grpc"): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + transport=transport_name, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.delete_calculated_metric), "__call__" + type(client.transport.list_event_create_rules), "__call__" ) as call: - # Designate an appropriate return value for the call. - call.return_value = None - response = client.delete_calculated_metric(request) + # Set the response to a series of pages. + call.side_effect = ( + analytics_admin.ListEventCreateRulesResponse( + event_create_rules=[ + event_create_and_edit.EventCreateRule(), + event_create_and_edit.EventCreateRule(), + event_create_and_edit.EventCreateRule(), + ], + next_page_token="abc", + ), + analytics_admin.ListEventCreateRulesResponse( + event_create_rules=[], + next_page_token="def", + ), + analytics_admin.ListEventCreateRulesResponse( + event_create_rules=[ + event_create_and_edit.EventCreateRule(), + ], + next_page_token="ghi", + ), + analytics_admin.ListEventCreateRulesResponse( + event_create_rules=[ + event_create_and_edit.EventCreateRule(), + event_create_and_edit.EventCreateRule(), + ], + ), + RuntimeError, + ) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = analytics_admin.DeleteCalculatedMetricRequest() - assert args[0] == request + expected_metadata = () + retry = retries.Retry() + timeout = 5 + expected_metadata = tuple(expected_metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + ) + pager = client.list_event_create_rules(request={}, retry=retry, timeout=timeout) - # Establish that the response is the type that we expect. - assert response is None + assert pager._metadata == expected_metadata + assert pager._retry == retry + assert pager._timeout == timeout + + results = list(pager) + assert len(results) == 6 + assert all( + isinstance(i, event_create_and_edit.EventCreateRule) for i in results + ) -def test_delete_calculated_metric_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. +def test_list_event_create_rules_pages(transport_name: str = "grpc"): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", + transport=transport_name, ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.delete_calculated_metric), "__call__" + type(client.transport.list_event_create_rules), "__call__" ) as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. + # Set the response to a series of pages. + call.side_effect = ( + analytics_admin.ListEventCreateRulesResponse( + event_create_rules=[ + event_create_and_edit.EventCreateRule(), + event_create_and_edit.EventCreateRule(), + event_create_and_edit.EventCreateRule(), + ], + next_page_token="abc", + ), + analytics_admin.ListEventCreateRulesResponse( + event_create_rules=[], + next_page_token="def", + ), + analytics_admin.ListEventCreateRulesResponse( + event_create_rules=[ + event_create_and_edit.EventCreateRule(), + ], + next_page_token="ghi", + ), + analytics_admin.ListEventCreateRulesResponse( + event_create_rules=[ + event_create_and_edit.EventCreateRule(), + event_create_and_edit.EventCreateRule(), + ], + ), + RuntimeError, ) - client.delete_calculated_metric() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == analytics_admin.DeleteCalculatedMetricRequest() + pages = list(client.list_event_create_rules(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token -def test_delete_calculated_metric_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = AnalyticsAdminServiceClient( +@pytest.mark.asyncio +async def test_list_event_create_rules_async_pager(): + client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = analytics_admin.DeleteCalculatedMetricRequest( - name="name_value", ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.delete_calculated_metric), "__call__" + type(client.transport.list_event_create_rules), + "__call__", + new_callable=mock.AsyncMock, ) as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. + # Set the response to a series of pages. + call.side_effect = ( + analytics_admin.ListEventCreateRulesResponse( + event_create_rules=[ + event_create_and_edit.EventCreateRule(), + event_create_and_edit.EventCreateRule(), + event_create_and_edit.EventCreateRule(), + ], + next_page_token="abc", + ), + analytics_admin.ListEventCreateRulesResponse( + event_create_rules=[], + next_page_token="def", + ), + analytics_admin.ListEventCreateRulesResponse( + event_create_rules=[ + event_create_and_edit.EventCreateRule(), + ], + next_page_token="ghi", + ), + analytics_admin.ListEventCreateRulesResponse( + event_create_rules=[ + event_create_and_edit.EventCreateRule(), + event_create_and_edit.EventCreateRule(), + ], + ), + RuntimeError, ) - client.delete_calculated_metric(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == analytics_admin.DeleteCalculatedMetricRequest( - name="name_value", + async_pager = await client.list_event_create_rules( + request={}, ) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + assert len(responses) == 6 + assert all( + isinstance(i, event_create_and_edit.EventCreateRule) for i in responses + ) -def test_delete_calculated_metric_use_cached_wrapped_rpc(): + +@pytest.mark.asyncio +async def test_list_event_create_rules_async_pages(): + client = AnalyticsAdminServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_event_create_rules), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + analytics_admin.ListEventCreateRulesResponse( + event_create_rules=[ + event_create_and_edit.EventCreateRule(), + event_create_and_edit.EventCreateRule(), + event_create_and_edit.EventCreateRule(), + ], + next_page_token="abc", + ), + analytics_admin.ListEventCreateRulesResponse( + event_create_rules=[], + next_page_token="def", + ), + analytics_admin.ListEventCreateRulesResponse( + event_create_rules=[ + event_create_and_edit.EventCreateRule(), + ], + next_page_token="ghi", + ), + analytics_admin.ListEventCreateRulesResponse( + event_create_rules=[ + event_create_and_edit.EventCreateRule(), + event_create_and_edit.EventCreateRule(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_event_create_rules(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + analytics_admin.CreateEventCreateRuleRequest, + dict, + ], +) +def test_create_event_create_rule(request_type, transport: str = "grpc"): + client = AnalyticsAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_event_create_rule), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = event_create_and_edit.EventCreateRule( + name="name_value", + destination_event="destination_event_value", + source_copy_parameters=True, + ) + response = client.create_event_create_rule(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = analytics_admin.CreateEventCreateRuleRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, event_create_and_edit.EventCreateRule) + assert response.name == "name_value" + assert response.destination_event == "destination_event_value" + assert response.source_copy_parameters is True + + +def test_create_event_create_rule_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = AnalyticsAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_event_create_rule), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.create_event_create_rule() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == analytics_admin.CreateEventCreateRuleRequest() + + +def test_create_event_create_rule_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = AnalyticsAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = analytics_admin.CreateEventCreateRuleRequest( + parent="parent_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_event_create_rule), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.create_event_create_rule(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == analytics_admin.CreateEventCreateRuleRequest( + parent="parent_value", + ) + + +def test_create_event_create_rule_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -52926,7 +52485,7 @@ def test_delete_calculated_metric_use_cached_wrapped_rpc(): # Ensure method has been cached assert ( - client._transport.delete_calculated_metric + client._transport.create_event_create_rule in client._transport._wrapped_methods ) @@ -52936,15 +52495,15 @@ def test_delete_calculated_metric_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.delete_calculated_metric + client._transport.create_event_create_rule ] = mock_rpc request = {} - client.delete_calculated_metric(request) + client.create_event_create_rule(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.delete_calculated_metric(request) + client.create_event_create_rule(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -52952,7 +52511,7 @@ def test_delete_calculated_metric_use_cached_wrapped_rpc(): @pytest.mark.asyncio -async def test_delete_calculated_metric_empty_call_async(): +async def test_create_event_create_rule_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = AnalyticsAdminServiceAsyncClient( @@ -52962,18 +52521,24 @@ async def test_delete_calculated_metric_empty_call_async(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.delete_calculated_metric), "__call__" + type(client.transport.create_event_create_rule), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.delete_calculated_metric() + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + event_create_and_edit.EventCreateRule( + name="name_value", + destination_event="destination_event_value", + source_copy_parameters=True, + ) + ) + response = await client.create_event_create_rule() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == analytics_admin.DeleteCalculatedMetricRequest() + assert args[0] == analytics_admin.CreateEventCreateRuleRequest() @pytest.mark.asyncio -async def test_delete_calculated_metric_async_use_cached_wrapped_rpc( +async def test_create_event_create_rule_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", ): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -52990,33 +52555,34 @@ async def test_delete_calculated_metric_async_use_cached_wrapped_rpc( # Ensure method has been cached assert ( - client._client._transport.delete_calculated_metric + client._client._transport.create_event_create_rule in client._client._transport._wrapped_methods ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ - client._client._transport.delete_calculated_metric - ] = mock_object + client._client._transport.create_event_create_rule + ] = mock_rpc request = {} - await client.delete_calculated_metric(request) + await client.create_event_create_rule(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - await client.delete_calculated_metric(request) + await client.create_event_create_rule(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio -async def test_delete_calculated_metric_async( +async def test_create_event_create_rule_async( transport: str = "grpc_asyncio", - request_type=analytics_admin.DeleteCalculatedMetricRequest, + request_type=analytics_admin.CreateEventCreateRuleRequest, ): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), @@ -53029,44 +52595,53 @@ async def test_delete_calculated_metric_async( # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.delete_calculated_metric), "__call__" + type(client.transport.create_event_create_rule), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.delete_calculated_metric(request) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + event_create_and_edit.EventCreateRule( + name="name_value", + destination_event="destination_event_value", + source_copy_parameters=True, + ) + ) + response = await client.create_event_create_rule(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request = analytics_admin.DeleteCalculatedMetricRequest() + request = analytics_admin.CreateEventCreateRuleRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert response is None + assert isinstance(response, event_create_and_edit.EventCreateRule) + assert response.name == "name_value" + assert response.destination_event == "destination_event_value" + assert response.source_copy_parameters is True @pytest.mark.asyncio -async def test_delete_calculated_metric_async_from_dict(): - await test_delete_calculated_metric_async(request_type=dict) +async def test_create_event_create_rule_async_from_dict(): + await test_create_event_create_rule_async(request_type=dict) -def test_delete_calculated_metric_field_headers(): +def test_create_event_create_rule_field_headers(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = analytics_admin.DeleteCalculatedMetricRequest() + request = analytics_admin.CreateEventCreateRuleRequest() - request.name = "name_value" + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.delete_calculated_metric), "__call__" + type(client.transport.create_event_create_rule), "__call__" ) as call: - call.return_value = None - client.delete_calculated_metric(request) + call.return_value = event_create_and_edit.EventCreateRule() + client.create_event_create_rule(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -53077,28 +52652,30 @@ def test_delete_calculated_metric_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "name=name_value", + "parent=parent_value", ) in kw["metadata"] @pytest.mark.asyncio -async def test_delete_calculated_metric_field_headers_async(): +async def test_create_event_create_rule_field_headers_async(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = analytics_admin.DeleteCalculatedMetricRequest() + request = analytics_admin.CreateEventCreateRuleRequest() - request.name = "name_value" + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.delete_calculated_metric), "__call__" + type(client.transport.create_event_create_rule), "__call__" ) as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - await client.delete_calculated_metric(request) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + event_create_and_edit.EventCreateRule() + ) + await client.create_event_create_rule(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -53109,37 +52686,41 @@ async def test_delete_calculated_metric_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "name=name_value", + "parent=parent_value", ) in kw["metadata"] -def test_delete_calculated_metric_flattened(): +def test_create_event_create_rule_flattened(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.delete_calculated_metric), "__call__" + type(client.transport.create_event_create_rule), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = None + call.return_value = event_create_and_edit.EventCreateRule() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.delete_calculated_metric( - name="name_value", + client.create_event_create_rule( + parent="parent_value", + event_create_rule=event_create_and_edit.EventCreateRule(name="name_value"), ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].event_create_rule + mock_val = event_create_and_edit.EventCreateRule(name="name_value") assert arg == mock_val -def test_delete_calculated_metric_flattened_error(): +def test_create_event_create_rule_flattened_error(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -53147,43 +52728,50 @@ def test_delete_calculated_metric_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.delete_calculated_metric( - analytics_admin.DeleteCalculatedMetricRequest(), - name="name_value", + client.create_event_create_rule( + analytics_admin.CreateEventCreateRuleRequest(), + parent="parent_value", + event_create_rule=event_create_and_edit.EventCreateRule(name="name_value"), ) @pytest.mark.asyncio -async def test_delete_calculated_metric_flattened_async(): +async def test_create_event_create_rule_flattened_async(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.delete_calculated_metric), "__call__" + type(client.transport.create_event_create_rule), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = None + call.return_value = event_create_and_edit.EventCreateRule() - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + event_create_and_edit.EventCreateRule() + ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.delete_calculated_metric( - name="name_value", + response = await client.create_event_create_rule( + parent="parent_value", + event_create_rule=event_create_and_edit.EventCreateRule(name="name_value"), ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].event_create_rule + mock_val = event_create_and_edit.EventCreateRule(name="name_value") assert arg == mock_val @pytest.mark.asyncio -async def test_delete_calculated_metric_flattened_error_async(): +async def test_create_event_create_rule_flattened_error_async(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -53191,20 +52779,21 @@ async def test_delete_calculated_metric_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.delete_calculated_metric( - analytics_admin.DeleteCalculatedMetricRequest(), - name="name_value", + await client.create_event_create_rule( + analytics_admin.CreateEventCreateRuleRequest(), + parent="parent_value", + event_create_rule=event_create_and_edit.EventCreateRule(name="name_value"), ) @pytest.mark.parametrize( "request_type", [ - analytics_admin.CreateRollupPropertyRequest, + analytics_admin.UpdateEventCreateRuleRequest, dict, ], ) -def test_create_rollup_property(request_type, transport: str = "grpc"): +def test_update_event_create_rule(request_type, transport: str = "grpc"): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -53216,23 +52805,30 @@ def test_create_rollup_property(request_type, transport: str = "grpc"): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.create_rollup_property), "__call__" + type(client.transport.update_event_create_rule), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = analytics_admin.CreateRollupPropertyResponse() - response = client.create_rollup_property(request) + call.return_value = event_create_and_edit.EventCreateRule( + name="name_value", + destination_event="destination_event_value", + source_copy_parameters=True, + ) + response = client.update_event_create_rule(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - request = analytics_admin.CreateRollupPropertyRequest() + request = analytics_admin.UpdateEventCreateRuleRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, analytics_admin.CreateRollupPropertyResponse) + assert isinstance(response, event_create_and_edit.EventCreateRule) + assert response.name == "name_value" + assert response.destination_event == "destination_event_value" + assert response.source_copy_parameters is True -def test_create_rollup_property_empty_call(): +def test_update_event_create_rule_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = AnalyticsAdminServiceClient( @@ -53242,18 +52838,18 @@ def test_create_rollup_property_empty_call(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.create_rollup_property), "__call__" + type(client.transport.update_event_create_rule), "__call__" ) as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.create_rollup_property() + client.update_event_create_rule() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == analytics_admin.CreateRollupPropertyRequest() + assert args[0] == analytics_admin.UpdateEventCreateRuleRequest() -def test_create_rollup_property_non_empty_request_with_auto_populated_field(): +def test_update_event_create_rule_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. client = AnalyticsAdminServiceClient( @@ -53264,22 +52860,22 @@ def test_create_rollup_property_non_empty_request_with_auto_populated_field(): # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. - request = analytics_admin.CreateRollupPropertyRequest() + request = analytics_admin.UpdateEventCreateRuleRequest() # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.create_rollup_property), "__call__" + type(client.transport.update_event_create_rule), "__call__" ) as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.create_rollup_property(request=request) + client.update_event_create_rule(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == analytics_admin.CreateRollupPropertyRequest() + assert args[0] == analytics_admin.UpdateEventCreateRuleRequest() -def test_create_rollup_property_use_cached_wrapped_rpc(): +def test_update_event_create_rule_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -53294,7 +52890,7 @@ def test_create_rollup_property_use_cached_wrapped_rpc(): # Ensure method has been cached assert ( - client._transport.create_rollup_property + client._transport.update_event_create_rule in client._transport._wrapped_methods ) @@ -53304,15 +52900,15 @@ def test_create_rollup_property_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.create_rollup_property + client._transport.update_event_create_rule ] = mock_rpc request = {} - client.create_rollup_property(request) + client.update_event_create_rule(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.create_rollup_property(request) + client.update_event_create_rule(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -53320,7 +52916,7 @@ def test_create_rollup_property_use_cached_wrapped_rpc(): @pytest.mark.asyncio -async def test_create_rollup_property_empty_call_async(): +async def test_update_event_create_rule_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = AnalyticsAdminServiceAsyncClient( @@ -53330,20 +52926,24 @@ async def test_create_rollup_property_empty_call_async(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.create_rollup_property), "__call__" + type(client.transport.update_event_create_rule), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - analytics_admin.CreateRollupPropertyResponse() + event_create_and_edit.EventCreateRule( + name="name_value", + destination_event="destination_event_value", + source_copy_parameters=True, + ) ) - response = await client.create_rollup_property() + response = await client.update_event_create_rule() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == analytics_admin.CreateRollupPropertyRequest() + assert args[0] == analytics_admin.UpdateEventCreateRuleRequest() @pytest.mark.asyncio -async def test_create_rollup_property_async_use_cached_wrapped_rpc( +async def test_update_event_create_rule_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", ): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -53360,33 +52960,34 @@ async def test_create_rollup_property_async_use_cached_wrapped_rpc( # Ensure method has been cached assert ( - client._client._transport.create_rollup_property + client._client._transport.update_event_create_rule in client._client._transport._wrapped_methods ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ - client._client._transport.create_rollup_property - ] = mock_object + client._client._transport.update_event_create_rule + ] = mock_rpc request = {} - await client.create_rollup_property(request) + await client.update_event_create_rule(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - await client.create_rollup_property(request) + await client.update_event_create_rule(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio -async def test_create_rollup_property_async( +async def test_update_event_create_rule_async( transport: str = "grpc_asyncio", - request_type=analytics_admin.CreateRollupPropertyRequest, + request_type=analytics_admin.UpdateEventCreateRuleRequest, ): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), @@ -53399,121 +53000,284 @@ async def test_create_rollup_property_async( # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.create_rollup_property), "__call__" + type(client.transport.update_event_create_rule), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - analytics_admin.CreateRollupPropertyResponse() + event_create_and_edit.EventCreateRule( + name="name_value", + destination_event="destination_event_value", + source_copy_parameters=True, + ) ) - response = await client.create_rollup_property(request) + response = await client.update_event_create_rule(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request = analytics_admin.CreateRollupPropertyRequest() + request = analytics_admin.UpdateEventCreateRuleRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, analytics_admin.CreateRollupPropertyResponse) + assert isinstance(response, event_create_and_edit.EventCreateRule) + assert response.name == "name_value" + assert response.destination_event == "destination_event_value" + assert response.source_copy_parameters is True @pytest.mark.asyncio -async def test_create_rollup_property_async_from_dict(): - await test_create_rollup_property_async(request_type=dict) +async def test_update_event_create_rule_async_from_dict(): + await test_update_event_create_rule_async(request_type=dict) -@pytest.mark.parametrize( - "request_type", - [ - analytics_admin.GetRollupPropertySourceLinkRequest, - dict, - ], -) -def test_get_rollup_property_source_link(request_type, transport: str = "grpc"): +def test_update_event_create_rule_field_headers(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = analytics_admin.UpdateEventCreateRuleRequest() + + request.event_create_rule.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.get_rollup_property_source_link), "__call__" + type(client.transport.update_event_create_rule), "__call__" ) as call: - # Designate an appropriate return value for the call. - call.return_value = resources.RollupPropertySourceLink( - name="name_value", - source_property="source_property_value", - ) - response = client.get_rollup_property_source_link(request) + call.return_value = event_create_and_edit.EventCreateRule() + client.update_event_create_rule(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - request = analytics_admin.GetRollupPropertySourceLinkRequest() assert args[0] == request - # Establish that the response is the type that we expect. - assert isinstance(response, resources.RollupPropertySourceLink) - assert response.name == "name_value" - assert response.source_property == "source_property_value" + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "event_create_rule.name=name_value", + ) in kw["metadata"] -def test_get_rollup_property_source_link_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = AnalyticsAdminServiceClient( +@pytest.mark.asyncio +async def test_update_event_create_rule_field_headers_async(): + client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", ) + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = analytics_admin.UpdateEventCreateRuleRequest() + + request.event_create_rule.name = "name_value" + # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.get_rollup_property_source_link), "__call__" + type(client.transport.update_event_create_rule), "__call__" ) as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + event_create_and_edit.EventCreateRule() ) - client.get_rollup_property_source_link() - call.assert_called() + await client.update_event_create_rule(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == analytics_admin.GetRollupPropertySourceLinkRequest() + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "event_create_rule.name=name_value", + ) in kw["metadata"] -def test_get_rollup_property_source_link_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. +def test_update_event_create_rule_flattened(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = analytics_admin.GetRollupPropertySourceLinkRequest( - name="name_value", ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.get_rollup_property_source_link), "__call__" + type(client.transport.update_event_create_rule), "__call__" ) as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. + # Designate an appropriate return value for the call. + call.return_value = event_create_and_edit.EventCreateRule() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.update_event_create_rule( + event_create_rule=event_create_and_edit.EventCreateRule(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) - client.get_rollup_property_source_link(request=request) - call.assert_called() + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == analytics_admin.GetRollupPropertySourceLinkRequest( - name="name_value", - ) + arg = args[0].event_create_rule + mock_val = event_create_and_edit.EventCreateRule(name="name_value") + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + assert arg == mock_val -def test_get_rollup_property_source_link_use_cached_wrapped_rpc(): +def test_update_event_create_rule_flattened_error(): + client = AnalyticsAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_event_create_rule( + analytics_admin.UpdateEventCreateRuleRequest(), + event_create_rule=event_create_and_edit.EventCreateRule(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +@pytest.mark.asyncio +async def test_update_event_create_rule_flattened_async(): + client = AnalyticsAdminServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_event_create_rule), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = event_create_and_edit.EventCreateRule() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + event_create_and_edit.EventCreateRule() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.update_event_create_rule( + event_create_rule=event_create_and_edit.EventCreateRule(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].event_create_rule + mock_val = event_create_and_edit.EventCreateRule(name="name_value") + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_update_event_create_rule_flattened_error_async(): + client = AnalyticsAdminServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.update_event_create_rule( + analytics_admin.UpdateEventCreateRuleRequest(), + event_create_rule=event_create_and_edit.EventCreateRule(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +@pytest.mark.parametrize( + "request_type", + [ + analytics_admin.DeleteEventCreateRuleRequest, + dict, + ], +) +def test_delete_event_create_rule(request_type, transport: str = "grpc"): + client = AnalyticsAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_event_create_rule), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.delete_event_create_rule(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = analytics_admin.DeleteEventCreateRuleRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_event_create_rule_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = AnalyticsAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_event_create_rule), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.delete_event_create_rule() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == analytics_admin.DeleteEventCreateRuleRequest() + + +def test_delete_event_create_rule_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = AnalyticsAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = analytics_admin.DeleteEventCreateRuleRequest( + name="name_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_event_create_rule), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.delete_event_create_rule(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == analytics_admin.DeleteEventCreateRuleRequest( + name="name_value", + ) + + +def test_delete_event_create_rule_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -53528,7 +53292,7 @@ def test_get_rollup_property_source_link_use_cached_wrapped_rpc(): # Ensure method has been cached assert ( - client._transport.get_rollup_property_source_link + client._transport.delete_event_create_rule in client._transport._wrapped_methods ) @@ -53538,15 +53302,15 @@ def test_get_rollup_property_source_link_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.get_rollup_property_source_link + client._transport.delete_event_create_rule ] = mock_rpc request = {} - client.get_rollup_property_source_link(request) + client.delete_event_create_rule(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.get_rollup_property_source_link(request) + client.delete_event_create_rule(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -53554,7 +53318,7 @@ def test_get_rollup_property_source_link_use_cached_wrapped_rpc(): @pytest.mark.asyncio -async def test_get_rollup_property_source_link_empty_call_async(): +async def test_delete_event_create_rule_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = AnalyticsAdminServiceAsyncClient( @@ -53564,23 +53328,18 @@ async def test_get_rollup_property_source_link_empty_call_async(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.get_rollup_property_source_link), "__call__" + type(client.transport.delete_event_create_rule), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - resources.RollupPropertySourceLink( - name="name_value", - source_property="source_property_value", - ) - ) - response = await client.get_rollup_property_source_link() + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.delete_event_create_rule() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == analytics_admin.GetRollupPropertySourceLinkRequest() + assert args[0] == analytics_admin.DeleteEventCreateRuleRequest() @pytest.mark.asyncio -async def test_get_rollup_property_source_link_async_use_cached_wrapped_rpc( +async def test_delete_event_create_rule_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", ): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -53597,33 +53356,34 @@ async def test_get_rollup_property_source_link_async_use_cached_wrapped_rpc( # Ensure method has been cached assert ( - client._client._transport.get_rollup_property_source_link + client._client._transport.delete_event_create_rule in client._client._transport._wrapped_methods ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ - client._client._transport.get_rollup_property_source_link - ] = mock_object + client._client._transport.delete_event_create_rule + ] = mock_rpc request = {} - await client.get_rollup_property_source_link(request) + await client.delete_event_create_rule(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - await client.get_rollup_property_source_link(request) + await client.delete_event_create_rule(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio -async def test_get_rollup_property_source_link_async( +async def test_delete_event_create_rule_async( transport: str = "grpc_asyncio", - request_type=analytics_admin.GetRollupPropertySourceLinkRequest, + request_type=analytics_admin.DeleteEventCreateRuleRequest, ): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), @@ -53636,51 +53396,44 @@ async def test_get_rollup_property_source_link_async( # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.get_rollup_property_source_link), "__call__" + type(client.transport.delete_event_create_rule), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - resources.RollupPropertySourceLink( - name="name_value", - source_property="source_property_value", - ) - ) - response = await client.get_rollup_property_source_link(request) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.delete_event_create_rule(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request = analytics_admin.GetRollupPropertySourceLinkRequest() + request = analytics_admin.DeleteEventCreateRuleRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, resources.RollupPropertySourceLink) - assert response.name == "name_value" - assert response.source_property == "source_property_value" + assert response is None @pytest.mark.asyncio -async def test_get_rollup_property_source_link_async_from_dict(): - await test_get_rollup_property_source_link_async(request_type=dict) +async def test_delete_event_create_rule_async_from_dict(): + await test_delete_event_create_rule_async(request_type=dict) -def test_get_rollup_property_source_link_field_headers(): +def test_delete_event_create_rule_field_headers(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = analytics_admin.GetRollupPropertySourceLinkRequest() + request = analytics_admin.DeleteEventCreateRuleRequest() request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.get_rollup_property_source_link), "__call__" + type(client.transport.delete_event_create_rule), "__call__" ) as call: - call.return_value = resources.RollupPropertySourceLink() - client.get_rollup_property_source_link(request) + call.return_value = None + client.delete_event_create_rule(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -53696,25 +53449,23 @@ def test_get_rollup_property_source_link_field_headers(): @pytest.mark.asyncio -async def test_get_rollup_property_source_link_field_headers_async(): +async def test_delete_event_create_rule_field_headers_async(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = analytics_admin.GetRollupPropertySourceLinkRequest() + request = analytics_admin.DeleteEventCreateRuleRequest() request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.get_rollup_property_source_link), "__call__" + type(client.transport.delete_event_create_rule), "__call__" ) as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - resources.RollupPropertySourceLink() - ) - await client.get_rollup_property_source_link(request) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.delete_event_create_rule(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -53729,20 +53480,20 @@ async def test_get_rollup_property_source_link_field_headers_async(): ) in kw["metadata"] -def test_get_rollup_property_source_link_flattened(): +def test_delete_event_create_rule_flattened(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.get_rollup_property_source_link), "__call__" + type(client.transport.delete_event_create_rule), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = resources.RollupPropertySourceLink() + call.return_value = None # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.get_rollup_property_source_link( + client.delete_event_create_rule( name="name_value", ) @@ -53755,7 +53506,7 @@ def test_get_rollup_property_source_link_flattened(): assert arg == mock_val -def test_get_rollup_property_source_link_flattened_error(): +def test_delete_event_create_rule_flattened_error(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -53763,31 +53514,29 @@ def test_get_rollup_property_source_link_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.get_rollup_property_source_link( - analytics_admin.GetRollupPropertySourceLinkRequest(), + client.delete_event_create_rule( + analytics_admin.DeleteEventCreateRuleRequest(), name="name_value", ) @pytest.mark.asyncio -async def test_get_rollup_property_source_link_flattened_async(): +async def test_delete_event_create_rule_flattened_async(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.get_rollup_property_source_link), "__call__" + type(client.transport.delete_event_create_rule), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = resources.RollupPropertySourceLink() + call.return_value = None - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - resources.RollupPropertySourceLink() - ) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.get_rollup_property_source_link( + response = await client.delete_event_create_rule( name="name_value", ) @@ -53801,7 +53550,7 @@ async def test_get_rollup_property_source_link_flattened_async(): @pytest.mark.asyncio -async def test_get_rollup_property_source_link_flattened_error_async(): +async def test_delete_event_create_rule_flattened_error_async(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -53809,8 +53558,8 @@ async def test_get_rollup_property_source_link_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.get_rollup_property_source_link( - analytics_admin.GetRollupPropertySourceLinkRequest(), + await client.delete_event_create_rule( + analytics_admin.DeleteEventCreateRuleRequest(), name="name_value", ) @@ -53818,11 +53567,11 @@ async def test_get_rollup_property_source_link_flattened_error_async(): @pytest.mark.parametrize( "request_type", [ - analytics_admin.ListRollupPropertySourceLinksRequest, + analytics_admin.GetEventEditRuleRequest, dict, ], ) -def test_list_rollup_property_source_links(request_type, transport: str = "grpc"): +def test_get_event_edit_rule(request_type, transport: str = "grpc"): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -53834,26 +53583,30 @@ def test_list_rollup_property_source_links(request_type, transport: str = "grpc" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_rollup_property_source_links), "__call__" + type(client.transport.get_event_edit_rule), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = analytics_admin.ListRollupPropertySourceLinksResponse( - next_page_token="next_page_token_value", + call.return_value = event_create_and_edit.EventEditRule( + name="name_value", + display_name="display_name_value", + processing_order=1720, ) - response = client.list_rollup_property_source_links(request) + response = client.get_event_edit_rule(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - request = analytics_admin.ListRollupPropertySourceLinksRequest() + request = analytics_admin.GetEventEditRuleRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListRollupPropertySourceLinksPager) - assert response.next_page_token == "next_page_token_value" + assert isinstance(response, event_create_and_edit.EventEditRule) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.processing_order == 1720 -def test_list_rollup_property_source_links_empty_call(): +def test_get_event_edit_rule_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = AnalyticsAdminServiceClient( @@ -53863,18 +53616,18 @@ def test_list_rollup_property_source_links_empty_call(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_rollup_property_source_links), "__call__" + type(client.transport.get_event_edit_rule), "__call__" ) as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.list_rollup_property_source_links() + client.get_event_edit_rule() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == analytics_admin.ListRollupPropertySourceLinksRequest() + assert args[0] == analytics_admin.GetEventEditRuleRequest() -def test_list_rollup_property_source_links_non_empty_request_with_auto_populated_field(): +def test_get_event_edit_rule_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. client = AnalyticsAdminServiceClient( @@ -53885,28 +53638,26 @@ def test_list_rollup_property_source_links_non_empty_request_with_auto_populated # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. - request = analytics_admin.ListRollupPropertySourceLinksRequest( - parent="parent_value", - page_token="page_token_value", + request = analytics_admin.GetEventEditRuleRequest( + name="name_value", ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_rollup_property_source_links), "__call__" + type(client.transport.get_event_edit_rule), "__call__" ) as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.list_rollup_property_source_links(request=request) + client.get_event_edit_rule(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == analytics_admin.ListRollupPropertySourceLinksRequest( - parent="parent_value", - page_token="page_token_value", + assert args[0] == analytics_admin.GetEventEditRuleRequest( + name="name_value", ) -def test_list_rollup_property_source_links_use_cached_wrapped_rpc(): +def test_get_event_edit_rule_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -53921,8 +53672,7 @@ def test_list_rollup_property_source_links_use_cached_wrapped_rpc(): # Ensure method has been cached assert ( - client._transport.list_rollup_property_source_links - in client._transport._wrapped_methods + client._transport.get_event_edit_rule in client._transport._wrapped_methods ) # Replace cached wrapped function with mock @@ -53931,15 +53681,15 @@ def test_list_rollup_property_source_links_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.list_rollup_property_source_links + client._transport.get_event_edit_rule ] = mock_rpc request = {} - client.list_rollup_property_source_links(request) + client.get_event_edit_rule(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.list_rollup_property_source_links(request) + client.get_event_edit_rule(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -53947,7 +53697,7 @@ def test_list_rollup_property_source_links_use_cached_wrapped_rpc(): @pytest.mark.asyncio -async def test_list_rollup_property_source_links_empty_call_async(): +async def test_get_event_edit_rule_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = AnalyticsAdminServiceAsyncClient( @@ -53957,22 +53707,24 @@ async def test_list_rollup_property_source_links_empty_call_async(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_rollup_property_source_links), "__call__" + type(client.transport.get_event_edit_rule), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - analytics_admin.ListRollupPropertySourceLinksResponse( - next_page_token="next_page_token_value", + event_create_and_edit.EventEditRule( + name="name_value", + display_name="display_name_value", + processing_order=1720, ) ) - response = await client.list_rollup_property_source_links() + response = await client.get_event_edit_rule() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == analytics_admin.ListRollupPropertySourceLinksRequest() + assert args[0] == analytics_admin.GetEventEditRuleRequest() @pytest.mark.asyncio -async def test_list_rollup_property_source_links_async_use_cached_wrapped_rpc( +async def test_get_event_edit_rule_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", ): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -53989,33 +53741,34 @@ async def test_list_rollup_property_source_links_async_use_cached_wrapped_rpc( # Ensure method has been cached assert ( - client._client._transport.list_rollup_property_source_links + client._client._transport.get_event_edit_rule in client._client._transport._wrapped_methods ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ - client._client._transport.list_rollup_property_source_links - ] = mock_object + client._client._transport.get_event_edit_rule + ] = mock_rpc request = {} - await client.list_rollup_property_source_links(request) + await client.get_event_edit_rule(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - await client.list_rollup_property_source_links(request) + await client.get_event_edit_rule(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio -async def test_list_rollup_property_source_links_async( +async def test_get_event_edit_rule_async( transport: str = "grpc_asyncio", - request_type=analytics_admin.ListRollupPropertySourceLinksRequest, + request_type=analytics_admin.GetEventEditRuleRequest, ): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), @@ -54028,49 +53781,53 @@ async def test_list_rollup_property_source_links_async( # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_rollup_property_source_links), "__call__" + type(client.transport.get_event_edit_rule), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - analytics_admin.ListRollupPropertySourceLinksResponse( - next_page_token="next_page_token_value", + event_create_and_edit.EventEditRule( + name="name_value", + display_name="display_name_value", + processing_order=1720, ) ) - response = await client.list_rollup_property_source_links(request) + response = await client.get_event_edit_rule(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request = analytics_admin.ListRollupPropertySourceLinksRequest() + request = analytics_admin.GetEventEditRuleRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListRollupPropertySourceLinksAsyncPager) - assert response.next_page_token == "next_page_token_value" + assert isinstance(response, event_create_and_edit.EventEditRule) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.processing_order == 1720 @pytest.mark.asyncio -async def test_list_rollup_property_source_links_async_from_dict(): - await test_list_rollup_property_source_links_async(request_type=dict) +async def test_get_event_edit_rule_async_from_dict(): + await test_get_event_edit_rule_async(request_type=dict) -def test_list_rollup_property_source_links_field_headers(): +def test_get_event_edit_rule_field_headers(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = analytics_admin.ListRollupPropertySourceLinksRequest() + request = analytics_admin.GetEventEditRuleRequest() - request.parent = "parent_value" + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_rollup_property_source_links), "__call__" + type(client.transport.get_event_edit_rule), "__call__" ) as call: - call.return_value = analytics_admin.ListRollupPropertySourceLinksResponse() - client.list_rollup_property_source_links(request) + call.return_value = event_create_and_edit.EventEditRule() + client.get_event_edit_rule(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -54081,30 +53838,30 @@ def test_list_rollup_property_source_links_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "parent=parent_value", + "name=name_value", ) in kw["metadata"] @pytest.mark.asyncio -async def test_list_rollup_property_source_links_field_headers_async(): +async def test_get_event_edit_rule_field_headers_async(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = analytics_admin.ListRollupPropertySourceLinksRequest() + request = analytics_admin.GetEventEditRuleRequest() - request.parent = "parent_value" + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_rollup_property_source_links), "__call__" + type(client.transport.get_event_edit_rule), "__call__" ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - analytics_admin.ListRollupPropertySourceLinksResponse() + event_create_and_edit.EventEditRule() ) - await client.list_rollup_property_source_links(request) + await client.get_event_edit_rule(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -54115,37 +53872,37 @@ async def test_list_rollup_property_source_links_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "parent=parent_value", + "name=name_value", ) in kw["metadata"] -def test_list_rollup_property_source_links_flattened(): +def test_get_event_edit_rule_flattened(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_rollup_property_source_links), "__call__" + type(client.transport.get_event_edit_rule), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = analytics_admin.ListRollupPropertySourceLinksResponse() + call.return_value = event_create_and_edit.EventEditRule() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.list_rollup_property_source_links( - parent="parent_value", + client.get_event_edit_rule( + name="name_value", ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = "parent_value" + arg = args[0].name + mock_val = "name_value" assert arg == mock_val -def test_list_rollup_property_source_links_flattened_error(): +def test_get_event_edit_rule_flattened_error(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -54153,45 +53910,45 @@ def test_list_rollup_property_source_links_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.list_rollup_property_source_links( - analytics_admin.ListRollupPropertySourceLinksRequest(), - parent="parent_value", + client.get_event_edit_rule( + analytics_admin.GetEventEditRuleRequest(), + name="name_value", ) @pytest.mark.asyncio -async def test_list_rollup_property_source_links_flattened_async(): +async def test_get_event_edit_rule_flattened_async(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_rollup_property_source_links), "__call__" + type(client.transport.get_event_edit_rule), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = analytics_admin.ListRollupPropertySourceLinksResponse() + call.return_value = event_create_and_edit.EventEditRule() call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - analytics_admin.ListRollupPropertySourceLinksResponse() + event_create_and_edit.EventEditRule() ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.list_rollup_property_source_links( - parent="parent_value", + response = await client.get_event_edit_rule( + name="name_value", ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = "parent_value" + arg = args[0].name + mock_val = "name_value" assert arg == mock_val @pytest.mark.asyncio -async def test_list_rollup_property_source_links_flattened_error_async(): +async def test_get_event_edit_rule_flattened_error_async(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -54199,308 +53956,104 @@ async def test_list_rollup_property_source_links_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.list_rollup_property_source_links( - analytics_admin.ListRollupPropertySourceLinksRequest(), - parent="parent_value", + await client.get_event_edit_rule( + analytics_admin.GetEventEditRuleRequest(), + name="name_value", ) -def test_list_rollup_property_source_links_pager(transport_name: str = "grpc"): +@pytest.mark.parametrize( + "request_type", + [ + analytics_admin.ListEventEditRulesRequest, + dict, + ], +) +def test_list_event_edit_rules(request_type, transport: str = "grpc"): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, + transport=transport, ) + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_rollup_property_source_links), "__call__" + type(client.transport.list_event_edit_rules), "__call__" ) as call: - # Set the response to a series of pages. - call.side_effect = ( - analytics_admin.ListRollupPropertySourceLinksResponse( - rollup_property_source_links=[ - resources.RollupPropertySourceLink(), - resources.RollupPropertySourceLink(), - resources.RollupPropertySourceLink(), - ], - next_page_token="abc", - ), - analytics_admin.ListRollupPropertySourceLinksResponse( - rollup_property_source_links=[], - next_page_token="def", - ), - analytics_admin.ListRollupPropertySourceLinksResponse( - rollup_property_source_links=[ - resources.RollupPropertySourceLink(), - ], - next_page_token="ghi", - ), - analytics_admin.ListRollupPropertySourceLinksResponse( - rollup_property_source_links=[ - resources.RollupPropertySourceLink(), - resources.RollupPropertySourceLink(), - ], - ), - RuntimeError, - ) - - expected_metadata = () - retry = retries.Retry() - timeout = 5 - expected_metadata = tuple(expected_metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), - ) - pager = client.list_rollup_property_source_links( - request={}, retry=retry, timeout=timeout + # Designate an appropriate return value for the call. + call.return_value = analytics_admin.ListEventEditRulesResponse( + next_page_token="next_page_token_value", ) + response = client.list_event_edit_rules(request) - assert pager._metadata == expected_metadata - assert pager._retry == retry - assert pager._timeout == timeout + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = analytics_admin.ListEventEditRulesRequest() + assert args[0] == request - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, resources.RollupPropertySourceLink) for i in results) + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListEventEditRulesPager) + assert response.next_page_token == "next_page_token_value" -def test_list_rollup_property_source_links_pages(transport_name: str = "grpc"): +def test_list_event_edit_rules_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, + transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_rollup_property_source_links), "__call__" + type(client.transport.list_event_edit_rules), "__call__" ) as call: - # Set the response to a series of pages. - call.side_effect = ( - analytics_admin.ListRollupPropertySourceLinksResponse( - rollup_property_source_links=[ - resources.RollupPropertySourceLink(), - resources.RollupPropertySourceLink(), - resources.RollupPropertySourceLink(), - ], - next_page_token="abc", - ), - analytics_admin.ListRollupPropertySourceLinksResponse( - rollup_property_source_links=[], - next_page_token="def", - ), - analytics_admin.ListRollupPropertySourceLinksResponse( - rollup_property_source_links=[ - resources.RollupPropertySourceLink(), - ], - next_page_token="ghi", - ), - analytics_admin.ListRollupPropertySourceLinksResponse( - rollup_property_source_links=[ - resources.RollupPropertySourceLink(), - resources.RollupPropertySourceLink(), - ], - ), - RuntimeError, + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. ) - pages = list(client.list_rollup_property_source_links(request={}).pages) - for page_, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page_.raw_page.next_page_token == token + client.list_event_edit_rules() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == analytics_admin.ListEventEditRulesRequest() -@pytest.mark.asyncio -async def test_list_rollup_property_source_links_async_pager(): - client = AnalyticsAdminServiceAsyncClient( +def test_list_event_edit_rules_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = analytics_admin.ListEventEditRulesRequest( + parent="parent_value", + page_token="page_token_value", ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_rollup_property_source_links), - "__call__", - new_callable=mock.AsyncMock, - ) as call: - # Set the response to a series of pages. - call.side_effect = ( - analytics_admin.ListRollupPropertySourceLinksResponse( - rollup_property_source_links=[ - resources.RollupPropertySourceLink(), - resources.RollupPropertySourceLink(), - resources.RollupPropertySourceLink(), - ], - next_page_token="abc", - ), - analytics_admin.ListRollupPropertySourceLinksResponse( - rollup_property_source_links=[], - next_page_token="def", - ), - analytics_admin.ListRollupPropertySourceLinksResponse( - rollup_property_source_links=[ - resources.RollupPropertySourceLink(), - ], - next_page_token="ghi", - ), - analytics_admin.ListRollupPropertySourceLinksResponse( - rollup_property_source_links=[ - resources.RollupPropertySourceLink(), - resources.RollupPropertySourceLink(), - ], - ), - RuntimeError, - ) - async_pager = await client.list_rollup_property_source_links( - request={}, - ) - assert async_pager.next_page_token == "abc" - responses = [] - async for response in async_pager: # pragma: no branch - responses.append(response) - - assert len(responses) == 6 - assert all(isinstance(i, resources.RollupPropertySourceLink) for i in responses) - - -@pytest.mark.asyncio -async def test_list_rollup_property_source_links_async_pages(): - client = AnalyticsAdminServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_rollup_property_source_links), - "__call__", - new_callable=mock.AsyncMock, - ) as call: - # Set the response to a series of pages. - call.side_effect = ( - analytics_admin.ListRollupPropertySourceLinksResponse( - rollup_property_source_links=[ - resources.RollupPropertySourceLink(), - resources.RollupPropertySourceLink(), - resources.RollupPropertySourceLink(), - ], - next_page_token="abc", - ), - analytics_admin.ListRollupPropertySourceLinksResponse( - rollup_property_source_links=[], - next_page_token="def", - ), - analytics_admin.ListRollupPropertySourceLinksResponse( - rollup_property_source_links=[ - resources.RollupPropertySourceLink(), - ], - next_page_token="ghi", - ), - analytics_admin.ListRollupPropertySourceLinksResponse( - rollup_property_source_links=[ - resources.RollupPropertySourceLink(), - resources.RollupPropertySourceLink(), - ], - ), - RuntimeError, - ) - pages = [] - # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` - # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 - async for page_ in ( # pragma: no branch - await client.list_rollup_property_source_links(request={}) - ).pages: - pages.append(page_) - for page_, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page_.raw_page.next_page_token == token - - -@pytest.mark.parametrize( - "request_type", - [ - analytics_admin.CreateRollupPropertySourceLinkRequest, - dict, - ], -) -def test_create_rollup_property_source_link(request_type, transport: str = "grpc"): - client = AnalyticsAdminServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_rollup_property_source_link), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = resources.RollupPropertySourceLink( - name="name_value", - source_property="source_property_value", - ) - response = client.create_rollup_property_source_link(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = analytics_admin.CreateRollupPropertySourceLinkRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, resources.RollupPropertySourceLink) - assert response.name == "name_value" - assert response.source_property == "source_property_value" - - -def test_create_rollup_property_source_link_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = AnalyticsAdminServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_rollup_property_source_link), "__call__" - ) as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.create_rollup_property_source_link() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == analytics_admin.CreateRollupPropertySourceLinkRequest() - - -def test_create_rollup_property_source_link_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = AnalyticsAdminServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = analytics_admin.CreateRollupPropertySourceLinkRequest( - parent="parent_value", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_rollup_property_source_link), "__call__" + type(client.transport.list_event_edit_rules), "__call__" ) as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.create_rollup_property_source_link(request=request) + client.list_event_edit_rules(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == analytics_admin.CreateRollupPropertySourceLinkRequest( + assert args[0] == analytics_admin.ListEventEditRulesRequest( parent="parent_value", + page_token="page_token_value", ) -def test_create_rollup_property_source_link_use_cached_wrapped_rpc(): +def test_list_event_edit_rules_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -54515,7 +54068,7 @@ def test_create_rollup_property_source_link_use_cached_wrapped_rpc(): # Ensure method has been cached assert ( - client._transport.create_rollup_property_source_link + client._transport.list_event_edit_rules in client._transport._wrapped_methods ) @@ -54525,15 +54078,15 @@ def test_create_rollup_property_source_link_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.create_rollup_property_source_link + client._transport.list_event_edit_rules ] = mock_rpc request = {} - client.create_rollup_property_source_link(request) + client.list_event_edit_rules(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.create_rollup_property_source_link(request) + client.list_event_edit_rules(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -54541,7 +54094,7 @@ def test_create_rollup_property_source_link_use_cached_wrapped_rpc(): @pytest.mark.asyncio -async def test_create_rollup_property_source_link_empty_call_async(): +async def test_list_event_edit_rules_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = AnalyticsAdminServiceAsyncClient( @@ -54551,23 +54104,22 @@ async def test_create_rollup_property_source_link_empty_call_async(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.create_rollup_property_source_link), "__call__" + type(client.transport.list_event_edit_rules), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - resources.RollupPropertySourceLink( - name="name_value", - source_property="source_property_value", + analytics_admin.ListEventEditRulesResponse( + next_page_token="next_page_token_value", ) ) - response = await client.create_rollup_property_source_link() + response = await client.list_event_edit_rules() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == analytics_admin.CreateRollupPropertySourceLinkRequest() + assert args[0] == analytics_admin.ListEventEditRulesRequest() @pytest.mark.asyncio -async def test_create_rollup_property_source_link_async_use_cached_wrapped_rpc( +async def test_list_event_edit_rules_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", ): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -54584,33 +54136,34 @@ async def test_create_rollup_property_source_link_async_use_cached_wrapped_rpc( # Ensure method has been cached assert ( - client._client._transport.create_rollup_property_source_link + client._client._transport.list_event_edit_rules in client._client._transport._wrapped_methods ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ - client._client._transport.create_rollup_property_source_link - ] = mock_object + client._client._transport.list_event_edit_rules + ] = mock_rpc request = {} - await client.create_rollup_property_source_link(request) + await client.list_event_edit_rules(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - await client.create_rollup_property_source_link(request) + await client.list_event_edit_rules(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio -async def test_create_rollup_property_source_link_async( +async def test_list_event_edit_rules_async( transport: str = "grpc_asyncio", - request_type=analytics_admin.CreateRollupPropertySourceLinkRequest, + request_type=analytics_admin.ListEventEditRulesRequest, ): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), @@ -54623,51 +54176,49 @@ async def test_create_rollup_property_source_link_async( # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.create_rollup_property_source_link), "__call__" + type(client.transport.list_event_edit_rules), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - resources.RollupPropertySourceLink( - name="name_value", - source_property="source_property_value", + analytics_admin.ListEventEditRulesResponse( + next_page_token="next_page_token_value", ) ) - response = await client.create_rollup_property_source_link(request) + response = await client.list_event_edit_rules(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request = analytics_admin.CreateRollupPropertySourceLinkRequest() + request = analytics_admin.ListEventEditRulesRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, resources.RollupPropertySourceLink) - assert response.name == "name_value" - assert response.source_property == "source_property_value" + assert isinstance(response, pagers.ListEventEditRulesAsyncPager) + assert response.next_page_token == "next_page_token_value" @pytest.mark.asyncio -async def test_create_rollup_property_source_link_async_from_dict(): - await test_create_rollup_property_source_link_async(request_type=dict) +async def test_list_event_edit_rules_async_from_dict(): + await test_list_event_edit_rules_async(request_type=dict) -def test_create_rollup_property_source_link_field_headers(): +def test_list_event_edit_rules_field_headers(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = analytics_admin.CreateRollupPropertySourceLinkRequest() + request = analytics_admin.ListEventEditRulesRequest() request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.create_rollup_property_source_link), "__call__" + type(client.transport.list_event_edit_rules), "__call__" ) as call: - call.return_value = resources.RollupPropertySourceLink() - client.create_rollup_property_source_link(request) + call.return_value = analytics_admin.ListEventEditRulesResponse() + client.list_event_edit_rules(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -54683,25 +54234,25 @@ def test_create_rollup_property_source_link_field_headers(): @pytest.mark.asyncio -async def test_create_rollup_property_source_link_field_headers_async(): +async def test_list_event_edit_rules_field_headers_async(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = analytics_admin.CreateRollupPropertySourceLinkRequest() + request = analytics_admin.ListEventEditRulesRequest() request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.create_rollup_property_source_link), "__call__" + type(client.transport.list_event_edit_rules), "__call__" ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - resources.RollupPropertySourceLink() + analytics_admin.ListEventEditRulesResponse() ) - await client.create_rollup_property_source_link(request) + await client.list_event_edit_rules(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -54716,24 +54267,21 @@ async def test_create_rollup_property_source_link_field_headers_async(): ) in kw["metadata"] -def test_create_rollup_property_source_link_flattened(): +def test_list_event_edit_rules_flattened(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.create_rollup_property_source_link), "__call__" + type(client.transport.list_event_edit_rules), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = resources.RollupPropertySourceLink() + call.return_value = analytics_admin.ListEventEditRulesResponse() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.create_rollup_property_source_link( + client.list_event_edit_rules( parent="parent_value", - rollup_property_source_link=resources.RollupPropertySourceLink( - name="name_value" - ), ) # Establish that the underlying call was made with the expected @@ -54743,12 +54291,9 @@ def test_create_rollup_property_source_link_flattened(): arg = args[0].parent mock_val = "parent_value" assert arg == mock_val - arg = args[0].rollup_property_source_link - mock_val = resources.RollupPropertySourceLink(name="name_value") - assert arg == mock_val -def test_create_rollup_property_source_link_flattened_error(): +def test_list_event_edit_rules_flattened_error(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -54756,38 +54301,32 @@ def test_create_rollup_property_source_link_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.create_rollup_property_source_link( - analytics_admin.CreateRollupPropertySourceLinkRequest(), + client.list_event_edit_rules( + analytics_admin.ListEventEditRulesRequest(), parent="parent_value", - rollup_property_source_link=resources.RollupPropertySourceLink( - name="name_value" - ), ) @pytest.mark.asyncio -async def test_create_rollup_property_source_link_flattened_async(): +async def test_list_event_edit_rules_flattened_async(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.create_rollup_property_source_link), "__call__" + type(client.transport.list_event_edit_rules), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = resources.RollupPropertySourceLink() + call.return_value = analytics_admin.ListEventEditRulesResponse() call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - resources.RollupPropertySourceLink() + analytics_admin.ListEventEditRulesResponse() ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.create_rollup_property_source_link( + response = await client.list_event_edit_rules( parent="parent_value", - rollup_property_source_link=resources.RollupPropertySourceLink( - name="name_value" - ), ) # Establish that the underlying call was made with the expected @@ -54797,13 +54336,10 @@ async def test_create_rollup_property_source_link_flattened_async(): arg = args[0].parent mock_val = "parent_value" assert arg == mock_val - arg = args[0].rollup_property_source_link - mock_val = resources.RollupPropertySourceLink(name="name_value") - assert arg == mock_val @pytest.mark.asyncio -async def test_create_rollup_property_source_link_flattened_error_async(): +async def test_list_event_edit_rules_flattened_error_async(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -54811,23 +54347,224 @@ async def test_create_rollup_property_source_link_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.create_rollup_property_source_link( - analytics_admin.CreateRollupPropertySourceLinkRequest(), + await client.list_event_edit_rules( + analytics_admin.ListEventEditRulesRequest(), parent="parent_value", - rollup_property_source_link=resources.RollupPropertySourceLink( - name="name_value" + ) + + +def test_list_event_edit_rules_pager(transport_name: str = "grpc"): + client = AnalyticsAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_event_edit_rules), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + analytics_admin.ListEventEditRulesResponse( + event_edit_rules=[ + event_create_and_edit.EventEditRule(), + event_create_and_edit.EventEditRule(), + event_create_and_edit.EventEditRule(), + ], + next_page_token="abc", + ), + analytics_admin.ListEventEditRulesResponse( + event_edit_rules=[], + next_page_token="def", + ), + analytics_admin.ListEventEditRulesResponse( + event_edit_rules=[ + event_create_and_edit.EventEditRule(), + ], + next_page_token="ghi", + ), + analytics_admin.ListEventEditRulesResponse( + event_edit_rules=[ + event_create_and_edit.EventEditRule(), + event_create_and_edit.EventEditRule(), + ], + ), + RuntimeError, + ) + + expected_metadata = () + retry = retries.Retry() + timeout = 5 + expected_metadata = tuple(expected_metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + ) + pager = client.list_event_edit_rules(request={}, retry=retry, timeout=timeout) + + assert pager._metadata == expected_metadata + assert pager._retry == retry + assert pager._timeout == timeout + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, event_create_and_edit.EventEditRule) for i in results) + + +def test_list_event_edit_rules_pages(transport_name: str = "grpc"): + client = AnalyticsAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_event_edit_rules), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + analytics_admin.ListEventEditRulesResponse( + event_edit_rules=[ + event_create_and_edit.EventEditRule(), + event_create_and_edit.EventEditRule(), + event_create_and_edit.EventEditRule(), + ], + next_page_token="abc", + ), + analytics_admin.ListEventEditRulesResponse( + event_edit_rules=[], + next_page_token="def", + ), + analytics_admin.ListEventEditRulesResponse( + event_edit_rules=[ + event_create_and_edit.EventEditRule(), + ], + next_page_token="ghi", + ), + analytics_admin.ListEventEditRulesResponse( + event_edit_rules=[ + event_create_and_edit.EventEditRule(), + event_create_and_edit.EventEditRule(), + ], + ), + RuntimeError, + ) + pages = list(client.list_event_edit_rules(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.asyncio +async def test_list_event_edit_rules_async_pager(): + client = AnalyticsAdminServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_event_edit_rules), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + analytics_admin.ListEventEditRulesResponse( + event_edit_rules=[ + event_create_and_edit.EventEditRule(), + event_create_and_edit.EventEditRule(), + event_create_and_edit.EventEditRule(), + ], + next_page_token="abc", + ), + analytics_admin.ListEventEditRulesResponse( + event_edit_rules=[], + next_page_token="def", + ), + analytics_admin.ListEventEditRulesResponse( + event_edit_rules=[ + event_create_and_edit.EventEditRule(), + ], + next_page_token="ghi", + ), + analytics_admin.ListEventEditRulesResponse( + event_edit_rules=[ + event_create_and_edit.EventEditRule(), + event_create_and_edit.EventEditRule(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_event_edit_rules( + request={}, + ) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all( + isinstance(i, event_create_and_edit.EventEditRule) for i in responses + ) + + +@pytest.mark.asyncio +async def test_list_event_edit_rules_async_pages(): + client = AnalyticsAdminServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_event_edit_rules), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + analytics_admin.ListEventEditRulesResponse( + event_edit_rules=[ + event_create_and_edit.EventEditRule(), + event_create_and_edit.EventEditRule(), + event_create_and_edit.EventEditRule(), + ], + next_page_token="abc", + ), + analytics_admin.ListEventEditRulesResponse( + event_edit_rules=[], + next_page_token="def", + ), + analytics_admin.ListEventEditRulesResponse( + event_edit_rules=[ + event_create_and_edit.EventEditRule(), + ], + next_page_token="ghi", + ), + analytics_admin.ListEventEditRulesResponse( + event_edit_rules=[ + event_create_and_edit.EventEditRule(), + event_create_and_edit.EventEditRule(), + ], ), + RuntimeError, ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_event_edit_rules(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token @pytest.mark.parametrize( "request_type", [ - analytics_admin.DeleteRollupPropertySourceLinkRequest, + analytics_admin.CreateEventEditRuleRequest, dict, ], ) -def test_delete_rollup_property_source_link(request_type, transport: str = "grpc"): +def test_create_event_edit_rule(request_type, transport: str = "grpc"): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -54839,23 +54576,30 @@ def test_delete_rollup_property_source_link(request_type, transport: str = "grpc # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.delete_rollup_property_source_link), "__call__" + type(client.transport.create_event_edit_rule), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = None - response = client.delete_rollup_property_source_link(request) + call.return_value = event_create_and_edit.EventEditRule( + name="name_value", + display_name="display_name_value", + processing_order=1720, + ) + response = client.create_event_edit_rule(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - request = analytics_admin.DeleteRollupPropertySourceLinkRequest() + request = analytics_admin.CreateEventEditRuleRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert response is None + assert isinstance(response, event_create_and_edit.EventEditRule) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.processing_order == 1720 -def test_delete_rollup_property_source_link_empty_call(): +def test_create_event_edit_rule_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = AnalyticsAdminServiceClient( @@ -54865,18 +54609,18 @@ def test_delete_rollup_property_source_link_empty_call(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.delete_rollup_property_source_link), "__call__" + type(client.transport.create_event_edit_rule), "__call__" ) as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.delete_rollup_property_source_link() + client.create_event_edit_rule() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == analytics_admin.DeleteRollupPropertySourceLinkRequest() + assert args[0] == analytics_admin.CreateEventEditRuleRequest() -def test_delete_rollup_property_source_link_non_empty_request_with_auto_populated_field(): +def test_create_event_edit_rule_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. client = AnalyticsAdminServiceClient( @@ -54887,26 +54631,26 @@ def test_delete_rollup_property_source_link_non_empty_request_with_auto_populate # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. - request = analytics_admin.DeleteRollupPropertySourceLinkRequest( - name="name_value", + request = analytics_admin.CreateEventEditRuleRequest( + parent="parent_value", ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.delete_rollup_property_source_link), "__call__" + type(client.transport.create_event_edit_rule), "__call__" ) as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.delete_rollup_property_source_link(request=request) + client.create_event_edit_rule(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == analytics_admin.DeleteRollupPropertySourceLinkRequest( - name="name_value", + assert args[0] == analytics_admin.CreateEventEditRuleRequest( + parent="parent_value", ) -def test_delete_rollup_property_source_link_use_cached_wrapped_rpc(): +def test_create_event_edit_rule_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -54921,7 +54665,7 @@ def test_delete_rollup_property_source_link_use_cached_wrapped_rpc(): # Ensure method has been cached assert ( - client._transport.delete_rollup_property_source_link + client._transport.create_event_edit_rule in client._transport._wrapped_methods ) @@ -54931,15 +54675,15 @@ def test_delete_rollup_property_source_link_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.delete_rollup_property_source_link + client._transport.create_event_edit_rule ] = mock_rpc request = {} - client.delete_rollup_property_source_link(request) + client.create_event_edit_rule(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.delete_rollup_property_source_link(request) + client.create_event_edit_rule(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -54947,7 +54691,7 @@ def test_delete_rollup_property_source_link_use_cached_wrapped_rpc(): @pytest.mark.asyncio -async def test_delete_rollup_property_source_link_empty_call_async(): +async def test_create_event_edit_rule_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = AnalyticsAdminServiceAsyncClient( @@ -54957,18 +54701,24 @@ async def test_delete_rollup_property_source_link_empty_call_async(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.delete_rollup_property_source_link), "__call__" + type(client.transport.create_event_edit_rule), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.delete_rollup_property_source_link() + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + event_create_and_edit.EventEditRule( + name="name_value", + display_name="display_name_value", + processing_order=1720, + ) + ) + response = await client.create_event_edit_rule() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == analytics_admin.DeleteRollupPropertySourceLinkRequest() + assert args[0] == analytics_admin.CreateEventEditRuleRequest() @pytest.mark.asyncio -async def test_delete_rollup_property_source_link_async_use_cached_wrapped_rpc( +async def test_create_event_edit_rule_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", ): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -54985,33 +54735,34 @@ async def test_delete_rollup_property_source_link_async_use_cached_wrapped_rpc( # Ensure method has been cached assert ( - client._client._transport.delete_rollup_property_source_link + client._client._transport.create_event_edit_rule in client._client._transport._wrapped_methods ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ - client._client._transport.delete_rollup_property_source_link - ] = mock_object + client._client._transport.create_event_edit_rule + ] = mock_rpc request = {} - await client.delete_rollup_property_source_link(request) + await client.create_event_edit_rule(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - await client.delete_rollup_property_source_link(request) + await client.create_event_edit_rule(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio -async def test_delete_rollup_property_source_link_async( +async def test_create_event_edit_rule_async( transport: str = "grpc_asyncio", - request_type=analytics_admin.DeleteRollupPropertySourceLinkRequest, + request_type=analytics_admin.CreateEventEditRuleRequest, ): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), @@ -55024,44 +54775,53 @@ async def test_delete_rollup_property_source_link_async( # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.delete_rollup_property_source_link), "__call__" + type(client.transport.create_event_edit_rule), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.delete_rollup_property_source_link(request) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + event_create_and_edit.EventEditRule( + name="name_value", + display_name="display_name_value", + processing_order=1720, + ) + ) + response = await client.create_event_edit_rule(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request = analytics_admin.DeleteRollupPropertySourceLinkRequest() + request = analytics_admin.CreateEventEditRuleRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert response is None + assert isinstance(response, event_create_and_edit.EventEditRule) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.processing_order == 1720 @pytest.mark.asyncio -async def test_delete_rollup_property_source_link_async_from_dict(): - await test_delete_rollup_property_source_link_async(request_type=dict) +async def test_create_event_edit_rule_async_from_dict(): + await test_create_event_edit_rule_async(request_type=dict) -def test_delete_rollup_property_source_link_field_headers(): +def test_create_event_edit_rule_field_headers(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = analytics_admin.DeleteRollupPropertySourceLinkRequest() + request = analytics_admin.CreateEventEditRuleRequest() - request.name = "name_value" + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.delete_rollup_property_source_link), "__call__" + type(client.transport.create_event_edit_rule), "__call__" ) as call: - call.return_value = None - client.delete_rollup_property_source_link(request) + call.return_value = event_create_and_edit.EventEditRule() + client.create_event_edit_rule(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -55072,28 +54832,30 @@ def test_delete_rollup_property_source_link_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "name=name_value", + "parent=parent_value", ) in kw["metadata"] @pytest.mark.asyncio -async def test_delete_rollup_property_source_link_field_headers_async(): +async def test_create_event_edit_rule_field_headers_async(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = analytics_admin.DeleteRollupPropertySourceLinkRequest() + request = analytics_admin.CreateEventEditRuleRequest() - request.name = "name_value" + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.delete_rollup_property_source_link), "__call__" + type(client.transport.create_event_edit_rule), "__call__" ) as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - await client.delete_rollup_property_source_link(request) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + event_create_and_edit.EventEditRule() + ) + await client.create_event_edit_rule(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -55104,37 +54866,41 @@ async def test_delete_rollup_property_source_link_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "name=name_value", + "parent=parent_value", ) in kw["metadata"] -def test_delete_rollup_property_source_link_flattened(): +def test_create_event_edit_rule_flattened(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.delete_rollup_property_source_link), "__call__" + type(client.transport.create_event_edit_rule), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = None + call.return_value = event_create_and_edit.EventEditRule() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.delete_rollup_property_source_link( - name="name_value", + client.create_event_edit_rule( + parent="parent_value", + event_edit_rule=event_create_and_edit.EventEditRule(name="name_value"), ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].event_edit_rule + mock_val = event_create_and_edit.EventEditRule(name="name_value") assert arg == mock_val -def test_delete_rollup_property_source_link_flattened_error(): +def test_create_event_edit_rule_flattened_error(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -55142,43 +54908,50 @@ def test_delete_rollup_property_source_link_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.delete_rollup_property_source_link( - analytics_admin.DeleteRollupPropertySourceLinkRequest(), - name="name_value", + client.create_event_edit_rule( + analytics_admin.CreateEventEditRuleRequest(), + parent="parent_value", + event_edit_rule=event_create_and_edit.EventEditRule(name="name_value"), ) @pytest.mark.asyncio -async def test_delete_rollup_property_source_link_flattened_async(): +async def test_create_event_edit_rule_flattened_async(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.delete_rollup_property_source_link), "__call__" + type(client.transport.create_event_edit_rule), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = None + call.return_value = event_create_and_edit.EventEditRule() - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + event_create_and_edit.EventEditRule() + ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.delete_rollup_property_source_link( - name="name_value", + response = await client.create_event_edit_rule( + parent="parent_value", + event_edit_rule=event_create_and_edit.EventEditRule(name="name_value"), ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].event_edit_rule + mock_val = event_create_and_edit.EventEditRule(name="name_value") assert arg == mock_val @pytest.mark.asyncio -async def test_delete_rollup_property_source_link_flattened_error_async(): +async def test_create_event_edit_rule_flattened_error_async(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -55186,20 +54959,21 @@ async def test_delete_rollup_property_source_link_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.delete_rollup_property_source_link( - analytics_admin.DeleteRollupPropertySourceLinkRequest(), - name="name_value", + await client.create_event_edit_rule( + analytics_admin.CreateEventEditRuleRequest(), + parent="parent_value", + event_edit_rule=event_create_and_edit.EventEditRule(name="name_value"), ) @pytest.mark.parametrize( "request_type", [ - analytics_admin.CreateSubpropertyRequest, + analytics_admin.UpdateEventEditRuleRequest, dict, ], ) -def test_create_subproperty(request_type, transport: str = "grpc"): +def test_update_event_edit_rule(request_type, transport: str = "grpc"): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -55211,23 +54985,30 @@ def test_create_subproperty(request_type, transport: str = "grpc"): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.create_subproperty), "__call__" + type(client.transport.update_event_edit_rule), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = analytics_admin.CreateSubpropertyResponse() - response = client.create_subproperty(request) + call.return_value = event_create_and_edit.EventEditRule( + name="name_value", + display_name="display_name_value", + processing_order=1720, + ) + response = client.update_event_edit_rule(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - request = analytics_admin.CreateSubpropertyRequest() + request = analytics_admin.UpdateEventEditRuleRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, analytics_admin.CreateSubpropertyResponse) + assert isinstance(response, event_create_and_edit.EventEditRule) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.processing_order == 1720 -def test_create_subproperty_empty_call(): +def test_update_event_edit_rule_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = AnalyticsAdminServiceClient( @@ -55237,18 +55018,18 @@ def test_create_subproperty_empty_call(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.create_subproperty), "__call__" + type(client.transport.update_event_edit_rule), "__call__" ) as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.create_subproperty() + client.update_event_edit_rule() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == analytics_admin.CreateSubpropertyRequest() + assert args[0] == analytics_admin.UpdateEventEditRuleRequest() -def test_create_subproperty_non_empty_request_with_auto_populated_field(): +def test_update_event_edit_rule_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. client = AnalyticsAdminServiceClient( @@ -55259,26 +55040,22 @@ def test_create_subproperty_non_empty_request_with_auto_populated_field(): # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. - request = analytics_admin.CreateSubpropertyRequest( - parent="parent_value", - ) + request = analytics_admin.UpdateEventEditRuleRequest() # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.create_subproperty), "__call__" + type(client.transport.update_event_edit_rule), "__call__" ) as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.create_subproperty(request=request) + client.update_event_edit_rule(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == analytics_admin.CreateSubpropertyRequest( - parent="parent_value", - ) + assert args[0] == analytics_admin.UpdateEventEditRuleRequest() -def test_create_subproperty_use_cached_wrapped_rpc(): +def test_update_event_edit_rule_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -55293,7 +55070,8 @@ def test_create_subproperty_use_cached_wrapped_rpc(): # Ensure method has been cached assert ( - client._transport.create_subproperty in client._transport._wrapped_methods + client._transport.update_event_edit_rule + in client._transport._wrapped_methods ) # Replace cached wrapped function with mock @@ -55302,15 +55080,15 @@ def test_create_subproperty_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.create_subproperty + client._transport.update_event_edit_rule ] = mock_rpc request = {} - client.create_subproperty(request) + client.update_event_edit_rule(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.create_subproperty(request) + client.update_event_edit_rule(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -55318,7 +55096,7 @@ def test_create_subproperty_use_cached_wrapped_rpc(): @pytest.mark.asyncio -async def test_create_subproperty_empty_call_async(): +async def test_update_event_edit_rule_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = AnalyticsAdminServiceAsyncClient( @@ -55328,20 +55106,24 @@ async def test_create_subproperty_empty_call_async(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.create_subproperty), "__call__" + type(client.transport.update_event_edit_rule), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - analytics_admin.CreateSubpropertyResponse() + event_create_and_edit.EventEditRule( + name="name_value", + display_name="display_name_value", + processing_order=1720, + ) ) - response = await client.create_subproperty() + response = await client.update_event_edit_rule() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == analytics_admin.CreateSubpropertyRequest() + assert args[0] == analytics_admin.UpdateEventEditRuleRequest() @pytest.mark.asyncio -async def test_create_subproperty_async_use_cached_wrapped_rpc( +async def test_update_event_edit_rule_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", ): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -55358,33 +55140,34 @@ async def test_create_subproperty_async_use_cached_wrapped_rpc( # Ensure method has been cached assert ( - client._client._transport.create_subproperty + client._client._transport.update_event_edit_rule in client._client._transport._wrapped_methods ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ - client._client._transport.create_subproperty - ] = mock_object + client._client._transport.update_event_edit_rule + ] = mock_rpc request = {} - await client.create_subproperty(request) + await client.update_event_edit_rule(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - await client.create_subproperty(request) + await client.update_event_edit_rule(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio -async def test_create_subproperty_async( +async def test_update_event_edit_rule_async( transport: str = "grpc_asyncio", - request_type=analytics_admin.CreateSubpropertyRequest, + request_type=analytics_admin.UpdateEventEditRuleRequest, ): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), @@ -55397,288 +55180,53 @@ async def test_create_subproperty_async( # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.create_subproperty), "__call__" + type(client.transport.update_event_edit_rule), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - analytics_admin.CreateSubpropertyResponse() - ) - response = await client.create_subproperty(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = analytics_admin.CreateSubpropertyRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, analytics_admin.CreateSubpropertyResponse) - - -@pytest.mark.asyncio -async def test_create_subproperty_async_from_dict(): - await test_create_subproperty_async(request_type=dict) - - -@pytest.mark.parametrize( - "request_type", - [ - analytics_admin.CreateSubpropertyEventFilterRequest, - dict, - ], -) -def test_create_subproperty_event_filter(request_type, transport: str = "grpc"): - client = AnalyticsAdminServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_subproperty_event_filter), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = gaa_subproperty_event_filter.SubpropertyEventFilter( - name="name_value", - apply_to_property="apply_to_property_value", - ) - response = client.create_subproperty_event_filter(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = analytics_admin.CreateSubpropertyEventFilterRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, gaa_subproperty_event_filter.SubpropertyEventFilter) - assert response.name == "name_value" - assert response.apply_to_property == "apply_to_property_value" - - -def test_create_subproperty_event_filter_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = AnalyticsAdminServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_subproperty_event_filter), "__call__" - ) as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.create_subproperty_event_filter() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == analytics_admin.CreateSubpropertyEventFilterRequest() - - -def test_create_subproperty_event_filter_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = AnalyticsAdminServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = analytics_admin.CreateSubpropertyEventFilterRequest( - parent="parent_value", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_subproperty_event_filter), "__call__" - ) as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.create_subproperty_event_filter(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == analytics_admin.CreateSubpropertyEventFilterRequest( - parent="parent_value", - ) - - -def test_create_subproperty_event_filter_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = AnalyticsAdminServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert ( - client._transport.create_subproperty_event_filter - in client._transport._wrapped_methods - ) - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client._transport._wrapped_methods[ - client._transport.create_subproperty_event_filter - ] = mock_rpc - request = {} - client.create_subproperty_event_filter(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.create_subproperty_event_filter(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -@pytest.mark.asyncio -async def test_create_subproperty_event_filter_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = AnalyticsAdminServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_subproperty_event_filter), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - gaa_subproperty_event_filter.SubpropertyEventFilter( - name="name_value", - apply_to_property="apply_to_property_value", - ) - ) - response = await client.create_subproperty_event_filter() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == analytics_admin.CreateSubpropertyEventFilterRequest() - - -@pytest.mark.asyncio -async def test_create_subproperty_event_filter_async_use_cached_wrapped_rpc( - transport: str = "grpc_asyncio", -): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = AnalyticsAdminServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert ( - client._client._transport.create_subproperty_event_filter - in client._client._transport._wrapped_methods - ) - - # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() - client._client._transport._wrapped_methods[ - client._client._transport.create_subproperty_event_filter - ] = mock_object - - request = {} - await client.create_subproperty_event_filter(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 - - await client.create_subproperty_event_filter(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 - - -@pytest.mark.asyncio -async def test_create_subproperty_event_filter_async( - transport: str = "grpc_asyncio", - request_type=analytics_admin.CreateSubpropertyEventFilterRequest, -): - client = AnalyticsAdminServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_subproperty_event_filter), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - gaa_subproperty_event_filter.SubpropertyEventFilter( + event_create_and_edit.EventEditRule( name="name_value", - apply_to_property="apply_to_property_value", + display_name="display_name_value", + processing_order=1720, ) ) - response = await client.create_subproperty_event_filter(request) + response = await client.update_event_edit_rule(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request = analytics_admin.CreateSubpropertyEventFilterRequest() + request = analytics_admin.UpdateEventEditRuleRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, gaa_subproperty_event_filter.SubpropertyEventFilter) + assert isinstance(response, event_create_and_edit.EventEditRule) assert response.name == "name_value" - assert response.apply_to_property == "apply_to_property_value" + assert response.display_name == "display_name_value" + assert response.processing_order == 1720 @pytest.mark.asyncio -async def test_create_subproperty_event_filter_async_from_dict(): - await test_create_subproperty_event_filter_async(request_type=dict) +async def test_update_event_edit_rule_async_from_dict(): + await test_update_event_edit_rule_async(request_type=dict) -def test_create_subproperty_event_filter_field_headers(): +def test_update_event_edit_rule_field_headers(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = analytics_admin.CreateSubpropertyEventFilterRequest() + request = analytics_admin.UpdateEventEditRuleRequest() - request.parent = "parent_value" + request.event_edit_rule.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.create_subproperty_event_filter), "__call__" + type(client.transport.update_event_edit_rule), "__call__" ) as call: - call.return_value = gaa_subproperty_event_filter.SubpropertyEventFilter() - client.create_subproperty_event_filter(request) + call.return_value = event_create_and_edit.EventEditRule() + client.update_event_edit_rule(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -55689,30 +55237,30 @@ def test_create_subproperty_event_filter_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "parent=parent_value", + "event_edit_rule.name=name_value", ) in kw["metadata"] @pytest.mark.asyncio -async def test_create_subproperty_event_filter_field_headers_async(): +async def test_update_event_edit_rule_field_headers_async(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = analytics_admin.CreateSubpropertyEventFilterRequest() + request = analytics_admin.UpdateEventEditRuleRequest() - request.parent = "parent_value" + request.event_edit_rule.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.create_subproperty_event_filter), "__call__" + type(client.transport.update_event_edit_rule), "__call__" ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - gaa_subproperty_event_filter.SubpropertyEventFilter() + event_create_and_edit.EventEditRule() ) - await client.create_subproperty_event_filter(request) + await client.update_event_edit_rule(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -55723,45 +55271,41 @@ async def test_create_subproperty_event_filter_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "parent=parent_value", + "event_edit_rule.name=name_value", ) in kw["metadata"] -def test_create_subproperty_event_filter_flattened(): +def test_update_event_edit_rule_flattened(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.create_subproperty_event_filter), "__call__" + type(client.transport.update_event_edit_rule), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = gaa_subproperty_event_filter.SubpropertyEventFilter() + call.return_value = event_create_and_edit.EventEditRule() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.create_subproperty_event_filter( - parent="parent_value", - subproperty_event_filter=gaa_subproperty_event_filter.SubpropertyEventFilter( - name="name_value" - ), + client.update_event_edit_rule( + event_edit_rule=event_create_and_edit.EventEditRule(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = "parent_value" + arg = args[0].event_edit_rule + mock_val = event_create_and_edit.EventEditRule(name="name_value") assert arg == mock_val - arg = args[0].subproperty_event_filter - mock_val = gaa_subproperty_event_filter.SubpropertyEventFilter( - name="name_value" - ) + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) assert arg == mock_val -def test_create_subproperty_event_filter_flattened_error(): +def test_update_event_edit_rule_flattened_error(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -55769,56 +55313,50 @@ def test_create_subproperty_event_filter_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.create_subproperty_event_filter( - analytics_admin.CreateSubpropertyEventFilterRequest(), - parent="parent_value", - subproperty_event_filter=gaa_subproperty_event_filter.SubpropertyEventFilter( - name="name_value" - ), + client.update_event_edit_rule( + analytics_admin.UpdateEventEditRuleRequest(), + event_edit_rule=event_create_and_edit.EventEditRule(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) @pytest.mark.asyncio -async def test_create_subproperty_event_filter_flattened_async(): +async def test_update_event_edit_rule_flattened_async(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.create_subproperty_event_filter), "__call__" + type(client.transport.update_event_edit_rule), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = gaa_subproperty_event_filter.SubpropertyEventFilter() + call.return_value = event_create_and_edit.EventEditRule() call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - gaa_subproperty_event_filter.SubpropertyEventFilter() + event_create_and_edit.EventEditRule() ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.create_subproperty_event_filter( - parent="parent_value", - subproperty_event_filter=gaa_subproperty_event_filter.SubpropertyEventFilter( - name="name_value" - ), + response = await client.update_event_edit_rule( + event_edit_rule=event_create_and_edit.EventEditRule(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = "parent_value" + arg = args[0].event_edit_rule + mock_val = event_create_and_edit.EventEditRule(name="name_value") assert arg == mock_val - arg = args[0].subproperty_event_filter - mock_val = gaa_subproperty_event_filter.SubpropertyEventFilter( - name="name_value" - ) + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) assert arg == mock_val @pytest.mark.asyncio -async def test_create_subproperty_event_filter_flattened_error_async(): +async def test_update_event_edit_rule_flattened_error_async(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -55826,23 +55364,21 @@ async def test_create_subproperty_event_filter_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.create_subproperty_event_filter( - analytics_admin.CreateSubpropertyEventFilterRequest(), - parent="parent_value", - subproperty_event_filter=gaa_subproperty_event_filter.SubpropertyEventFilter( - name="name_value" - ), + await client.update_event_edit_rule( + analytics_admin.UpdateEventEditRuleRequest(), + event_edit_rule=event_create_and_edit.EventEditRule(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) @pytest.mark.parametrize( "request_type", [ - analytics_admin.GetSubpropertyEventFilterRequest, + analytics_admin.DeleteEventEditRuleRequest, dict, ], ) -def test_get_subproperty_event_filter(request_type, transport: str = "grpc"): +def test_delete_event_edit_rule(request_type, transport: str = "grpc"): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -55854,28 +55390,23 @@ def test_get_subproperty_event_filter(request_type, transport: str = "grpc"): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.get_subproperty_event_filter), "__call__" + type(client.transport.delete_event_edit_rule), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = subproperty_event_filter.SubpropertyEventFilter( - name="name_value", - apply_to_property="apply_to_property_value", - ) - response = client.get_subproperty_event_filter(request) + call.return_value = None + response = client.delete_event_edit_rule(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - request = analytics_admin.GetSubpropertyEventFilterRequest() + request = analytics_admin.DeleteEventEditRuleRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, subproperty_event_filter.SubpropertyEventFilter) - assert response.name == "name_value" - assert response.apply_to_property == "apply_to_property_value" + assert response is None -def test_get_subproperty_event_filter_empty_call(): +def test_delete_event_edit_rule_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = AnalyticsAdminServiceClient( @@ -55885,18 +55416,18 @@ def test_get_subproperty_event_filter_empty_call(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.get_subproperty_event_filter), "__call__" + type(client.transport.delete_event_edit_rule), "__call__" ) as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.get_subproperty_event_filter() + client.delete_event_edit_rule() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == analytics_admin.GetSubpropertyEventFilterRequest() + assert args[0] == analytics_admin.DeleteEventEditRuleRequest() -def test_get_subproperty_event_filter_non_empty_request_with_auto_populated_field(): +def test_delete_event_edit_rule_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. client = AnalyticsAdminServiceClient( @@ -55907,26 +55438,26 @@ def test_get_subproperty_event_filter_non_empty_request_with_auto_populated_fiel # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. - request = analytics_admin.GetSubpropertyEventFilterRequest( + request = analytics_admin.DeleteEventEditRuleRequest( name="name_value", ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.get_subproperty_event_filter), "__call__" + type(client.transport.delete_event_edit_rule), "__call__" ) as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.get_subproperty_event_filter(request=request) + client.delete_event_edit_rule(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == analytics_admin.GetSubpropertyEventFilterRequest( + assert args[0] == analytics_admin.DeleteEventEditRuleRequest( name="name_value", ) -def test_get_subproperty_event_filter_use_cached_wrapped_rpc(): +def test_delete_event_edit_rule_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -55941,7 +55472,7 @@ def test_get_subproperty_event_filter_use_cached_wrapped_rpc(): # Ensure method has been cached assert ( - client._transport.get_subproperty_event_filter + client._transport.delete_event_edit_rule in client._transport._wrapped_methods ) @@ -55951,15 +55482,15 @@ def test_get_subproperty_event_filter_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.get_subproperty_event_filter + client._transport.delete_event_edit_rule ] = mock_rpc request = {} - client.get_subproperty_event_filter(request) + client.delete_event_edit_rule(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.get_subproperty_event_filter(request) + client.delete_event_edit_rule(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -55967,7 +55498,7 @@ def test_get_subproperty_event_filter_use_cached_wrapped_rpc(): @pytest.mark.asyncio -async def test_get_subproperty_event_filter_empty_call_async(): +async def test_delete_event_edit_rule_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = AnalyticsAdminServiceAsyncClient( @@ -55977,23 +55508,18 @@ async def test_get_subproperty_event_filter_empty_call_async(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.get_subproperty_event_filter), "__call__" + type(client.transport.delete_event_edit_rule), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - subproperty_event_filter.SubpropertyEventFilter( - name="name_value", - apply_to_property="apply_to_property_value", - ) - ) - response = await client.get_subproperty_event_filter() + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.delete_event_edit_rule() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == analytics_admin.GetSubpropertyEventFilterRequest() + assert args[0] == analytics_admin.DeleteEventEditRuleRequest() @pytest.mark.asyncio -async def test_get_subproperty_event_filter_async_use_cached_wrapped_rpc( +async def test_delete_event_edit_rule_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", ): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -56010,33 +55536,34 @@ async def test_get_subproperty_event_filter_async_use_cached_wrapped_rpc( # Ensure method has been cached assert ( - client._client._transport.get_subproperty_event_filter + client._client._transport.delete_event_edit_rule in client._client._transport._wrapped_methods ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ - client._client._transport.get_subproperty_event_filter - ] = mock_object + client._client._transport.delete_event_edit_rule + ] = mock_rpc request = {} - await client.get_subproperty_event_filter(request) + await client.delete_event_edit_rule(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - await client.get_subproperty_event_filter(request) + await client.delete_event_edit_rule(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio -async def test_get_subproperty_event_filter_async( +async def test_delete_event_edit_rule_async( transport: str = "grpc_asyncio", - request_type=analytics_admin.GetSubpropertyEventFilterRequest, + request_type=analytics_admin.DeleteEventEditRuleRequest, ): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), @@ -56049,51 +55576,44 @@ async def test_get_subproperty_event_filter_async( # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.get_subproperty_event_filter), "__call__" + type(client.transport.delete_event_edit_rule), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - subproperty_event_filter.SubpropertyEventFilter( - name="name_value", - apply_to_property="apply_to_property_value", - ) - ) - response = await client.get_subproperty_event_filter(request) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.delete_event_edit_rule(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request = analytics_admin.GetSubpropertyEventFilterRequest() + request = analytics_admin.DeleteEventEditRuleRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, subproperty_event_filter.SubpropertyEventFilter) - assert response.name == "name_value" - assert response.apply_to_property == "apply_to_property_value" + assert response is None @pytest.mark.asyncio -async def test_get_subproperty_event_filter_async_from_dict(): - await test_get_subproperty_event_filter_async(request_type=dict) +async def test_delete_event_edit_rule_async_from_dict(): + await test_delete_event_edit_rule_async(request_type=dict) -def test_get_subproperty_event_filter_field_headers(): +def test_delete_event_edit_rule_field_headers(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = analytics_admin.GetSubpropertyEventFilterRequest() + request = analytics_admin.DeleteEventEditRuleRequest() request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.get_subproperty_event_filter), "__call__" + type(client.transport.delete_event_edit_rule), "__call__" ) as call: - call.return_value = subproperty_event_filter.SubpropertyEventFilter() - client.get_subproperty_event_filter(request) + call.return_value = None + client.delete_event_edit_rule(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -56109,25 +55629,23 @@ def test_get_subproperty_event_filter_field_headers(): @pytest.mark.asyncio -async def test_get_subproperty_event_filter_field_headers_async(): +async def test_delete_event_edit_rule_field_headers_async(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = analytics_admin.GetSubpropertyEventFilterRequest() + request = analytics_admin.DeleteEventEditRuleRequest() request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.get_subproperty_event_filter), "__call__" + type(client.transport.delete_event_edit_rule), "__call__" ) as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - subproperty_event_filter.SubpropertyEventFilter() - ) - await client.get_subproperty_event_filter(request) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.delete_event_edit_rule(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -56142,20 +55660,20 @@ async def test_get_subproperty_event_filter_field_headers_async(): ) in kw["metadata"] -def test_get_subproperty_event_filter_flattened(): +def test_delete_event_edit_rule_flattened(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.get_subproperty_event_filter), "__call__" + type(client.transport.delete_event_edit_rule), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = subproperty_event_filter.SubpropertyEventFilter() + call.return_value = None # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.get_subproperty_event_filter( + client.delete_event_edit_rule( name="name_value", ) @@ -56168,7 +55686,7 @@ def test_get_subproperty_event_filter_flattened(): assert arg == mock_val -def test_get_subproperty_event_filter_flattened_error(): +def test_delete_event_edit_rule_flattened_error(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -56176,31 +55694,29 @@ def test_get_subproperty_event_filter_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.get_subproperty_event_filter( - analytics_admin.GetSubpropertyEventFilterRequest(), + client.delete_event_edit_rule( + analytics_admin.DeleteEventEditRuleRequest(), name="name_value", ) @pytest.mark.asyncio -async def test_get_subproperty_event_filter_flattened_async(): +async def test_delete_event_edit_rule_flattened_async(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.get_subproperty_event_filter), "__call__" + type(client.transport.delete_event_edit_rule), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = subproperty_event_filter.SubpropertyEventFilter() + call.return_value = None - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - subproperty_event_filter.SubpropertyEventFilter() - ) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.get_subproperty_event_filter( + response = await client.delete_event_edit_rule( name="name_value", ) @@ -56214,7 +55730,7 @@ async def test_get_subproperty_event_filter_flattened_async(): @pytest.mark.asyncio -async def test_get_subproperty_event_filter_flattened_error_async(): +async def test_delete_event_edit_rule_flattened_error_async(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -56222,8 +55738,8 @@ async def test_get_subproperty_event_filter_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.get_subproperty_event_filter( - analytics_admin.GetSubpropertyEventFilterRequest(), + await client.delete_event_edit_rule( + analytics_admin.DeleteEventEditRuleRequest(), name="name_value", ) @@ -56231,11 +55747,11 @@ async def test_get_subproperty_event_filter_flattened_error_async(): @pytest.mark.parametrize( "request_type", [ - analytics_admin.ListSubpropertyEventFiltersRequest, + analytics_admin.ReorderEventEditRulesRequest, dict, ], ) -def test_list_subproperty_event_filters(request_type, transport: str = "grpc"): +def test_reorder_event_edit_rules(request_type, transport: str = "grpc"): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -56247,26 +55763,23 @@ def test_list_subproperty_event_filters(request_type, transport: str = "grpc"): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_subproperty_event_filters), "__call__" + type(client.transport.reorder_event_edit_rules), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = analytics_admin.ListSubpropertyEventFiltersResponse( - next_page_token="next_page_token_value", - ) - response = client.list_subproperty_event_filters(request) + call.return_value = None + response = client.reorder_event_edit_rules(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - request = analytics_admin.ListSubpropertyEventFiltersRequest() + request = analytics_admin.ReorderEventEditRulesRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListSubpropertyEventFiltersPager) - assert response.next_page_token == "next_page_token_value" + assert response is None -def test_list_subproperty_event_filters_empty_call(): +def test_reorder_event_edit_rules_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = AnalyticsAdminServiceClient( @@ -56276,18 +55789,18 @@ def test_list_subproperty_event_filters_empty_call(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_subproperty_event_filters), "__call__" + type(client.transport.reorder_event_edit_rules), "__call__" ) as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.list_subproperty_event_filters() + client.reorder_event_edit_rules() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == analytics_admin.ListSubpropertyEventFiltersRequest() + assert args[0] == analytics_admin.ReorderEventEditRulesRequest() -def test_list_subproperty_event_filters_non_empty_request_with_auto_populated_field(): +def test_reorder_event_edit_rules_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. client = AnalyticsAdminServiceClient( @@ -56298,28 +55811,26 @@ def test_list_subproperty_event_filters_non_empty_request_with_auto_populated_fi # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. - request = analytics_admin.ListSubpropertyEventFiltersRequest( + request = analytics_admin.ReorderEventEditRulesRequest( parent="parent_value", - page_token="page_token_value", ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_subproperty_event_filters), "__call__" + type(client.transport.reorder_event_edit_rules), "__call__" ) as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.list_subproperty_event_filters(request=request) + client.reorder_event_edit_rules(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == analytics_admin.ListSubpropertyEventFiltersRequest( + assert args[0] == analytics_admin.ReorderEventEditRulesRequest( parent="parent_value", - page_token="page_token_value", ) -def test_list_subproperty_event_filters_use_cached_wrapped_rpc(): +def test_reorder_event_edit_rules_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -56334,7 +55845,7 @@ def test_list_subproperty_event_filters_use_cached_wrapped_rpc(): # Ensure method has been cached assert ( - client._transport.list_subproperty_event_filters + client._transport.reorder_event_edit_rules in client._transport._wrapped_methods ) @@ -56344,15 +55855,15 @@ def test_list_subproperty_event_filters_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.list_subproperty_event_filters + client._transport.reorder_event_edit_rules ] = mock_rpc request = {} - client.list_subproperty_event_filters(request) + client.reorder_event_edit_rules(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.list_subproperty_event_filters(request) + client.reorder_event_edit_rules(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -56360,7 +55871,7 @@ def test_list_subproperty_event_filters_use_cached_wrapped_rpc(): @pytest.mark.asyncio -async def test_list_subproperty_event_filters_empty_call_async(): +async def test_reorder_event_edit_rules_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = AnalyticsAdminServiceAsyncClient( @@ -56370,22 +55881,18 @@ async def test_list_subproperty_event_filters_empty_call_async(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_subproperty_event_filters), "__call__" + type(client.transport.reorder_event_edit_rules), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - analytics_admin.ListSubpropertyEventFiltersResponse( - next_page_token="next_page_token_value", - ) - ) - response = await client.list_subproperty_event_filters() + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.reorder_event_edit_rules() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == analytics_admin.ListSubpropertyEventFiltersRequest() + assert args[0] == analytics_admin.ReorderEventEditRulesRequest() @pytest.mark.asyncio -async def test_list_subproperty_event_filters_async_use_cached_wrapped_rpc( +async def test_reorder_event_edit_rules_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", ): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -56402,33 +55909,34 @@ async def test_list_subproperty_event_filters_async_use_cached_wrapped_rpc( # Ensure method has been cached assert ( - client._client._transport.list_subproperty_event_filters + client._client._transport.reorder_event_edit_rules in client._client._transport._wrapped_methods ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ - client._client._transport.list_subproperty_event_filters - ] = mock_object + client._client._transport.reorder_event_edit_rules + ] = mock_rpc request = {} - await client.list_subproperty_event_filters(request) + await client.reorder_event_edit_rules(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - await client.list_subproperty_event_filters(request) + await client.reorder_event_edit_rules(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio -async def test_list_subproperty_event_filters_async( +async def test_reorder_event_edit_rules_async( transport: str = "grpc_asyncio", - request_type=analytics_admin.ListSubpropertyEventFiltersRequest, + request_type=analytics_admin.ReorderEventEditRulesRequest, ): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), @@ -56441,49 +55949,44 @@ async def test_list_subproperty_event_filters_async( # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_subproperty_event_filters), "__call__" + type(client.transport.reorder_event_edit_rules), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - analytics_admin.ListSubpropertyEventFiltersResponse( - next_page_token="next_page_token_value", - ) - ) - response = await client.list_subproperty_event_filters(request) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.reorder_event_edit_rules(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request = analytics_admin.ListSubpropertyEventFiltersRequest() + request = analytics_admin.ReorderEventEditRulesRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListSubpropertyEventFiltersAsyncPager) - assert response.next_page_token == "next_page_token_value" + assert response is None @pytest.mark.asyncio -async def test_list_subproperty_event_filters_async_from_dict(): - await test_list_subproperty_event_filters_async(request_type=dict) +async def test_reorder_event_edit_rules_async_from_dict(): + await test_reorder_event_edit_rules_async(request_type=dict) -def test_list_subproperty_event_filters_field_headers(): +def test_reorder_event_edit_rules_field_headers(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = analytics_admin.ListSubpropertyEventFiltersRequest() + request = analytics_admin.ReorderEventEditRulesRequest() request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_subproperty_event_filters), "__call__" + type(client.transport.reorder_event_edit_rules), "__call__" ) as call: - call.return_value = analytics_admin.ListSubpropertyEventFiltersResponse() - client.list_subproperty_event_filters(request) + call.return_value = None + client.reorder_event_edit_rules(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -56499,25 +56002,23 @@ def test_list_subproperty_event_filters_field_headers(): @pytest.mark.asyncio -async def test_list_subproperty_event_filters_field_headers_async(): +async def test_reorder_event_edit_rules_field_headers_async(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = analytics_admin.ListSubpropertyEventFiltersRequest() + request = analytics_admin.ReorderEventEditRulesRequest() request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_subproperty_event_filters), "__call__" + type(client.transport.reorder_event_edit_rules), "__call__" ) as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - analytics_admin.ListSubpropertyEventFiltersResponse() - ) - await client.list_subproperty_event_filters(request) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.reorder_event_edit_rules(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -56532,343 +56033,51 @@ async def test_list_subproperty_event_filters_field_headers_async(): ) in kw["metadata"] -def test_list_subproperty_event_filters_flattened(): +@pytest.mark.parametrize( + "request_type", + [ + analytics_admin.UpdateDataRedactionSettingsRequest, + dict, + ], +) +def test_update_data_redaction_settings(request_type, transport: str = "grpc"): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_subproperty_event_filters), "__call__" + type(client.transport.update_data_redaction_settings), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = analytics_admin.ListSubpropertyEventFiltersResponse() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.list_subproperty_event_filters( - parent="parent_value", + call.return_value = resources.DataRedactionSettings( + name="name_value", + email_redaction_enabled=True, + query_parameter_redaction_enabled=True, + query_parameter_keys=["query_parameter_keys_value"], ) + response = client.update_data_redaction_settings(request) - # Establish that the underlying call was made with the expected - # request object values. + # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = "parent_value" - assert arg == mock_val - - -def test_list_subproperty_event_filters_flattened_error(): - client = AnalyticsAdminServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_subproperty_event_filters( - analytics_admin.ListSubpropertyEventFiltersRequest(), - parent="parent_value", - ) - - -@pytest.mark.asyncio -async def test_list_subproperty_event_filters_flattened_async(): - client = AnalyticsAdminServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_subproperty_event_filters), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = analytics_admin.ListSubpropertyEventFiltersResponse() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - analytics_admin.ListSubpropertyEventFiltersResponse() - ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.list_subproperty_event_filters( - parent="parent_value", - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = "parent_value" - assert arg == mock_val - - -@pytest.mark.asyncio -async def test_list_subproperty_event_filters_flattened_error_async(): - client = AnalyticsAdminServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.list_subproperty_event_filters( - analytics_admin.ListSubpropertyEventFiltersRequest(), - parent="parent_value", - ) - - -def test_list_subproperty_event_filters_pager(transport_name: str = "grpc"): - client = AnalyticsAdminServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_subproperty_event_filters), "__call__" - ) as call: - # Set the response to a series of pages. - call.side_effect = ( - analytics_admin.ListSubpropertyEventFiltersResponse( - subproperty_event_filters=[ - subproperty_event_filter.SubpropertyEventFilter(), - subproperty_event_filter.SubpropertyEventFilter(), - subproperty_event_filter.SubpropertyEventFilter(), - ], - next_page_token="abc", - ), - analytics_admin.ListSubpropertyEventFiltersResponse( - subproperty_event_filters=[], - next_page_token="def", - ), - analytics_admin.ListSubpropertyEventFiltersResponse( - subproperty_event_filters=[ - subproperty_event_filter.SubpropertyEventFilter(), - ], - next_page_token="ghi", - ), - analytics_admin.ListSubpropertyEventFiltersResponse( - subproperty_event_filters=[ - subproperty_event_filter.SubpropertyEventFilter(), - subproperty_event_filter.SubpropertyEventFilter(), - ], - ), - RuntimeError, - ) - - expected_metadata = () - retry = retries.Retry() - timeout = 5 - expected_metadata = tuple(expected_metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), - ) - pager = client.list_subproperty_event_filters( - request={}, retry=retry, timeout=timeout - ) - - assert pager._metadata == expected_metadata - assert pager._retry == retry - assert pager._timeout == timeout - - results = list(pager) - assert len(results) == 6 - assert all( - isinstance(i, subproperty_event_filter.SubpropertyEventFilter) - for i in results - ) - - -def test_list_subproperty_event_filters_pages(transport_name: str = "grpc"): - client = AnalyticsAdminServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_subproperty_event_filters), "__call__" - ) as call: - # Set the response to a series of pages. - call.side_effect = ( - analytics_admin.ListSubpropertyEventFiltersResponse( - subproperty_event_filters=[ - subproperty_event_filter.SubpropertyEventFilter(), - subproperty_event_filter.SubpropertyEventFilter(), - subproperty_event_filter.SubpropertyEventFilter(), - ], - next_page_token="abc", - ), - analytics_admin.ListSubpropertyEventFiltersResponse( - subproperty_event_filters=[], - next_page_token="def", - ), - analytics_admin.ListSubpropertyEventFiltersResponse( - subproperty_event_filters=[ - subproperty_event_filter.SubpropertyEventFilter(), - ], - next_page_token="ghi", - ), - analytics_admin.ListSubpropertyEventFiltersResponse( - subproperty_event_filters=[ - subproperty_event_filter.SubpropertyEventFilter(), - subproperty_event_filter.SubpropertyEventFilter(), - ], - ), - RuntimeError, - ) - pages = list(client.list_subproperty_event_filters(request={}).pages) - for page_, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page_.raw_page.next_page_token == token - - -@pytest.mark.asyncio -async def test_list_subproperty_event_filters_async_pager(): - client = AnalyticsAdminServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_subproperty_event_filters), - "__call__", - new_callable=mock.AsyncMock, - ) as call: - # Set the response to a series of pages. - call.side_effect = ( - analytics_admin.ListSubpropertyEventFiltersResponse( - subproperty_event_filters=[ - subproperty_event_filter.SubpropertyEventFilter(), - subproperty_event_filter.SubpropertyEventFilter(), - subproperty_event_filter.SubpropertyEventFilter(), - ], - next_page_token="abc", - ), - analytics_admin.ListSubpropertyEventFiltersResponse( - subproperty_event_filters=[], - next_page_token="def", - ), - analytics_admin.ListSubpropertyEventFiltersResponse( - subproperty_event_filters=[ - subproperty_event_filter.SubpropertyEventFilter(), - ], - next_page_token="ghi", - ), - analytics_admin.ListSubpropertyEventFiltersResponse( - subproperty_event_filters=[ - subproperty_event_filter.SubpropertyEventFilter(), - subproperty_event_filter.SubpropertyEventFilter(), - ], - ), - RuntimeError, - ) - async_pager = await client.list_subproperty_event_filters( - request={}, - ) - assert async_pager.next_page_token == "abc" - responses = [] - async for response in async_pager: # pragma: no branch - responses.append(response) - - assert len(responses) == 6 - assert all( - isinstance(i, subproperty_event_filter.SubpropertyEventFilter) - for i in responses - ) - - -@pytest.mark.asyncio -async def test_list_subproperty_event_filters_async_pages(): - client = AnalyticsAdminServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_subproperty_event_filters), - "__call__", - new_callable=mock.AsyncMock, - ) as call: - # Set the response to a series of pages. - call.side_effect = ( - analytics_admin.ListSubpropertyEventFiltersResponse( - subproperty_event_filters=[ - subproperty_event_filter.SubpropertyEventFilter(), - subproperty_event_filter.SubpropertyEventFilter(), - subproperty_event_filter.SubpropertyEventFilter(), - ], - next_page_token="abc", - ), - analytics_admin.ListSubpropertyEventFiltersResponse( - subproperty_event_filters=[], - next_page_token="def", - ), - analytics_admin.ListSubpropertyEventFiltersResponse( - subproperty_event_filters=[ - subproperty_event_filter.SubpropertyEventFilter(), - ], - next_page_token="ghi", - ), - analytics_admin.ListSubpropertyEventFiltersResponse( - subproperty_event_filters=[ - subproperty_event_filter.SubpropertyEventFilter(), - subproperty_event_filter.SubpropertyEventFilter(), - ], - ), - RuntimeError, - ) - pages = [] - # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` - # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 - async for page_ in ( # pragma: no branch - await client.list_subproperty_event_filters(request={}) - ).pages: - pages.append(page_) - for page_, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page_.raw_page.next_page_token == token - - -@pytest.mark.parametrize( - "request_type", - [ - analytics_admin.UpdateSubpropertyEventFilterRequest, - dict, - ], -) -def test_update_subproperty_event_filter(request_type, transport: str = "grpc"): - client = AnalyticsAdminServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_subproperty_event_filter), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = gaa_subproperty_event_filter.SubpropertyEventFilter( - name="name_value", - apply_to_property="apply_to_property_value", - ) - response = client.update_subproperty_event_filter(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = analytics_admin.UpdateSubpropertyEventFilterRequest() - assert args[0] == request + request = analytics_admin.UpdateDataRedactionSettingsRequest() + assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, gaa_subproperty_event_filter.SubpropertyEventFilter) + assert isinstance(response, resources.DataRedactionSettings) assert response.name == "name_value" - assert response.apply_to_property == "apply_to_property_value" + assert response.email_redaction_enabled is True + assert response.query_parameter_redaction_enabled is True + assert response.query_parameter_keys == ["query_parameter_keys_value"] -def test_update_subproperty_event_filter_empty_call(): +def test_update_data_redaction_settings_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = AnalyticsAdminServiceClient( @@ -56878,18 +56087,18 @@ def test_update_subproperty_event_filter_empty_call(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.update_subproperty_event_filter), "__call__" + type(client.transport.update_data_redaction_settings), "__call__" ) as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.update_subproperty_event_filter() + client.update_data_redaction_settings() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == analytics_admin.UpdateSubpropertyEventFilterRequest() + assert args[0] == analytics_admin.UpdateDataRedactionSettingsRequest() -def test_update_subproperty_event_filter_non_empty_request_with_auto_populated_field(): +def test_update_data_redaction_settings_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. client = AnalyticsAdminServiceClient( @@ -56900,22 +56109,22 @@ def test_update_subproperty_event_filter_non_empty_request_with_auto_populated_f # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. - request = analytics_admin.UpdateSubpropertyEventFilterRequest() + request = analytics_admin.UpdateDataRedactionSettingsRequest() # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.update_subproperty_event_filter), "__call__" + type(client.transport.update_data_redaction_settings), "__call__" ) as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.update_subproperty_event_filter(request=request) + client.update_data_redaction_settings(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == analytics_admin.UpdateSubpropertyEventFilterRequest() + assert args[0] == analytics_admin.UpdateDataRedactionSettingsRequest() -def test_update_subproperty_event_filter_use_cached_wrapped_rpc(): +def test_update_data_redaction_settings_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -56930,7 +56139,7 @@ def test_update_subproperty_event_filter_use_cached_wrapped_rpc(): # Ensure method has been cached assert ( - client._transport.update_subproperty_event_filter + client._transport.update_data_redaction_settings in client._transport._wrapped_methods ) @@ -56940,15 +56149,15 @@ def test_update_subproperty_event_filter_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.update_subproperty_event_filter + client._transport.update_data_redaction_settings ] = mock_rpc request = {} - client.update_subproperty_event_filter(request) + client.update_data_redaction_settings(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.update_subproperty_event_filter(request) + client.update_data_redaction_settings(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -56956,7 +56165,7 @@ def test_update_subproperty_event_filter_use_cached_wrapped_rpc(): @pytest.mark.asyncio -async def test_update_subproperty_event_filter_empty_call_async(): +async def test_update_data_redaction_settings_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = AnalyticsAdminServiceAsyncClient( @@ -56966,23 +56175,25 @@ async def test_update_subproperty_event_filter_empty_call_async(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.update_subproperty_event_filter), "__call__" + type(client.transport.update_data_redaction_settings), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - gaa_subproperty_event_filter.SubpropertyEventFilter( + resources.DataRedactionSettings( name="name_value", - apply_to_property="apply_to_property_value", + email_redaction_enabled=True, + query_parameter_redaction_enabled=True, + query_parameter_keys=["query_parameter_keys_value"], ) ) - response = await client.update_subproperty_event_filter() + response = await client.update_data_redaction_settings() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == analytics_admin.UpdateSubpropertyEventFilterRequest() + assert args[0] == analytics_admin.UpdateDataRedactionSettingsRequest() @pytest.mark.asyncio -async def test_update_subproperty_event_filter_async_use_cached_wrapped_rpc( +async def test_update_data_redaction_settings_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", ): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -56999,33 +56210,34 @@ async def test_update_subproperty_event_filter_async_use_cached_wrapped_rpc( # Ensure method has been cached assert ( - client._client._transport.update_subproperty_event_filter + client._client._transport.update_data_redaction_settings in client._client._transport._wrapped_methods ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ - client._client._transport.update_subproperty_event_filter - ] = mock_object + client._client._transport.update_data_redaction_settings + ] = mock_rpc request = {} - await client.update_subproperty_event_filter(request) + await client.update_data_redaction_settings(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - await client.update_subproperty_event_filter(request) + await client.update_data_redaction_settings(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio -async def test_update_subproperty_event_filter_async( +async def test_update_data_redaction_settings_async( transport: str = "grpc_asyncio", - request_type=analytics_admin.UpdateSubpropertyEventFilterRequest, + request_type=analytics_admin.UpdateDataRedactionSettingsRequest, ): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), @@ -57038,51 +56250,55 @@ async def test_update_subproperty_event_filter_async( # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.update_subproperty_event_filter), "__call__" + type(client.transport.update_data_redaction_settings), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - gaa_subproperty_event_filter.SubpropertyEventFilter( + resources.DataRedactionSettings( name="name_value", - apply_to_property="apply_to_property_value", + email_redaction_enabled=True, + query_parameter_redaction_enabled=True, + query_parameter_keys=["query_parameter_keys_value"], ) ) - response = await client.update_subproperty_event_filter(request) + response = await client.update_data_redaction_settings(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request = analytics_admin.UpdateSubpropertyEventFilterRequest() + request = analytics_admin.UpdateDataRedactionSettingsRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, gaa_subproperty_event_filter.SubpropertyEventFilter) + assert isinstance(response, resources.DataRedactionSettings) assert response.name == "name_value" - assert response.apply_to_property == "apply_to_property_value" + assert response.email_redaction_enabled is True + assert response.query_parameter_redaction_enabled is True + assert response.query_parameter_keys == ["query_parameter_keys_value"] @pytest.mark.asyncio -async def test_update_subproperty_event_filter_async_from_dict(): - await test_update_subproperty_event_filter_async(request_type=dict) +async def test_update_data_redaction_settings_async_from_dict(): + await test_update_data_redaction_settings_async(request_type=dict) -def test_update_subproperty_event_filter_field_headers(): +def test_update_data_redaction_settings_field_headers(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = analytics_admin.UpdateSubpropertyEventFilterRequest() + request = analytics_admin.UpdateDataRedactionSettingsRequest() - request.subproperty_event_filter.name = "name_value" + request.data_redaction_settings.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.update_subproperty_event_filter), "__call__" + type(client.transport.update_data_redaction_settings), "__call__" ) as call: - call.return_value = gaa_subproperty_event_filter.SubpropertyEventFilter() - client.update_subproperty_event_filter(request) + call.return_value = resources.DataRedactionSettings() + client.update_data_redaction_settings(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -57093,30 +56309,30 @@ def test_update_subproperty_event_filter_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "subproperty_event_filter.name=name_value", + "data_redaction_settings.name=name_value", ) in kw["metadata"] @pytest.mark.asyncio -async def test_update_subproperty_event_filter_field_headers_async(): +async def test_update_data_redaction_settings_field_headers_async(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = analytics_admin.UpdateSubpropertyEventFilterRequest() + request = analytics_admin.UpdateDataRedactionSettingsRequest() - request.subproperty_event_filter.name = "name_value" + request.data_redaction_settings.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.update_subproperty_event_filter), "__call__" + type(client.transport.update_data_redaction_settings), "__call__" ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - gaa_subproperty_event_filter.SubpropertyEventFilter() + resources.DataRedactionSettings() ) - await client.update_subproperty_event_filter(request) + await client.update_data_redaction_settings(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -57127,27 +56343,25 @@ async def test_update_subproperty_event_filter_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "subproperty_event_filter.name=name_value", + "data_redaction_settings.name=name_value", ) in kw["metadata"] -def test_update_subproperty_event_filter_flattened(): +def test_update_data_redaction_settings_flattened(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.update_subproperty_event_filter), "__call__" + type(client.transport.update_data_redaction_settings), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = gaa_subproperty_event_filter.SubpropertyEventFilter() + call.return_value = resources.DataRedactionSettings() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.update_subproperty_event_filter( - subproperty_event_filter=gaa_subproperty_event_filter.SubpropertyEventFilter( - name="name_value" - ), + client.update_data_redaction_settings( + data_redaction_settings=resources.DataRedactionSettings(name="name_value"), update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) @@ -57155,17 +56369,15 @@ def test_update_subproperty_event_filter_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - arg = args[0].subproperty_event_filter - mock_val = gaa_subproperty_event_filter.SubpropertyEventFilter( - name="name_value" - ) + arg = args[0].data_redaction_settings + mock_val = resources.DataRedactionSettings(name="name_value") assert arg == mock_val arg = args[0].update_mask mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) assert arg == mock_val -def test_update_subproperty_event_filter_flattened_error(): +def test_update_data_redaction_settings_flattened_error(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -57173,37 +56385,33 @@ def test_update_subproperty_event_filter_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.update_subproperty_event_filter( - analytics_admin.UpdateSubpropertyEventFilterRequest(), - subproperty_event_filter=gaa_subproperty_event_filter.SubpropertyEventFilter( - name="name_value" - ), + client.update_data_redaction_settings( + analytics_admin.UpdateDataRedactionSettingsRequest(), + data_redaction_settings=resources.DataRedactionSettings(name="name_value"), update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) @pytest.mark.asyncio -async def test_update_subproperty_event_filter_flattened_async(): +async def test_update_data_redaction_settings_flattened_async(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.update_subproperty_event_filter), "__call__" + type(client.transport.update_data_redaction_settings), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = gaa_subproperty_event_filter.SubpropertyEventFilter() + call.return_value = resources.DataRedactionSettings() call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - gaa_subproperty_event_filter.SubpropertyEventFilter() + resources.DataRedactionSettings() ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.update_subproperty_event_filter( - subproperty_event_filter=gaa_subproperty_event_filter.SubpropertyEventFilter( - name="name_value" - ), + response = await client.update_data_redaction_settings( + data_redaction_settings=resources.DataRedactionSettings(name="name_value"), update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) @@ -57211,10 +56419,8 @@ async def test_update_subproperty_event_filter_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - arg = args[0].subproperty_event_filter - mock_val = gaa_subproperty_event_filter.SubpropertyEventFilter( - name="name_value" - ) + arg = args[0].data_redaction_settings + mock_val = resources.DataRedactionSettings(name="name_value") assert arg == mock_val arg = args[0].update_mask mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) @@ -57222,7 +56428,7 @@ async def test_update_subproperty_event_filter_flattened_async(): @pytest.mark.asyncio -async def test_update_subproperty_event_filter_flattened_error_async(): +async def test_update_data_redaction_settings_flattened_error_async(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -57230,11 +56436,9 @@ async def test_update_subproperty_event_filter_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.update_subproperty_event_filter( - analytics_admin.UpdateSubpropertyEventFilterRequest(), - subproperty_event_filter=gaa_subproperty_event_filter.SubpropertyEventFilter( - name="name_value" - ), + await client.update_data_redaction_settings( + analytics_admin.UpdateDataRedactionSettingsRequest(), + data_redaction_settings=resources.DataRedactionSettings(name="name_value"), update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) @@ -57242,11 +56446,11 @@ async def test_update_subproperty_event_filter_flattened_error_async(): @pytest.mark.parametrize( "request_type", [ - analytics_admin.DeleteSubpropertyEventFilterRequest, + analytics_admin.GetDataRedactionSettingsRequest, dict, ], ) -def test_delete_subproperty_event_filter(request_type, transport: str = "grpc"): +def test_get_data_redaction_settings(request_type, transport: str = "grpc"): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -57258,23 +56462,32 @@ def test_delete_subproperty_event_filter(request_type, transport: str = "grpc"): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.delete_subproperty_event_filter), "__call__" + type(client.transport.get_data_redaction_settings), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = None - response = client.delete_subproperty_event_filter(request) + call.return_value = resources.DataRedactionSettings( + name="name_value", + email_redaction_enabled=True, + query_parameter_redaction_enabled=True, + query_parameter_keys=["query_parameter_keys_value"], + ) + response = client.get_data_redaction_settings(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - request = analytics_admin.DeleteSubpropertyEventFilterRequest() + request = analytics_admin.GetDataRedactionSettingsRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert response is None + assert isinstance(response, resources.DataRedactionSettings) + assert response.name == "name_value" + assert response.email_redaction_enabled is True + assert response.query_parameter_redaction_enabled is True + assert response.query_parameter_keys == ["query_parameter_keys_value"] -def test_delete_subproperty_event_filter_empty_call(): +def test_get_data_redaction_settings_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = AnalyticsAdminServiceClient( @@ -57284,18 +56497,18 @@ def test_delete_subproperty_event_filter_empty_call(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.delete_subproperty_event_filter), "__call__" + type(client.transport.get_data_redaction_settings), "__call__" ) as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.delete_subproperty_event_filter() + client.get_data_redaction_settings() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == analytics_admin.DeleteSubpropertyEventFilterRequest() + assert args[0] == analytics_admin.GetDataRedactionSettingsRequest() -def test_delete_subproperty_event_filter_non_empty_request_with_auto_populated_field(): +def test_get_data_redaction_settings_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. client = AnalyticsAdminServiceClient( @@ -57306,26 +56519,26 @@ def test_delete_subproperty_event_filter_non_empty_request_with_auto_populated_f # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. - request = analytics_admin.DeleteSubpropertyEventFilterRequest( + request = analytics_admin.GetDataRedactionSettingsRequest( name="name_value", ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.delete_subproperty_event_filter), "__call__" + type(client.transport.get_data_redaction_settings), "__call__" ) as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.delete_subproperty_event_filter(request=request) + client.get_data_redaction_settings(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == analytics_admin.DeleteSubpropertyEventFilterRequest( + assert args[0] == analytics_admin.GetDataRedactionSettingsRequest( name="name_value", ) -def test_delete_subproperty_event_filter_use_cached_wrapped_rpc(): +def test_get_data_redaction_settings_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -57340,7 +56553,7 @@ def test_delete_subproperty_event_filter_use_cached_wrapped_rpc(): # Ensure method has been cached assert ( - client._transport.delete_subproperty_event_filter + client._transport.get_data_redaction_settings in client._transport._wrapped_methods ) @@ -57350,15 +56563,15 @@ def test_delete_subproperty_event_filter_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.delete_subproperty_event_filter + client._transport.get_data_redaction_settings ] = mock_rpc request = {} - client.delete_subproperty_event_filter(request) + client.get_data_redaction_settings(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.delete_subproperty_event_filter(request) + client.get_data_redaction_settings(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -57366,7 +56579,7 @@ def test_delete_subproperty_event_filter_use_cached_wrapped_rpc(): @pytest.mark.asyncio -async def test_delete_subproperty_event_filter_empty_call_async(): +async def test_get_data_redaction_settings_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = AnalyticsAdminServiceAsyncClient( @@ -57376,18 +56589,25 @@ async def test_delete_subproperty_event_filter_empty_call_async(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.delete_subproperty_event_filter), "__call__" + type(client.transport.get_data_redaction_settings), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.delete_subproperty_event_filter() + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + resources.DataRedactionSettings( + name="name_value", + email_redaction_enabled=True, + query_parameter_redaction_enabled=True, + query_parameter_keys=["query_parameter_keys_value"], + ) + ) + response = await client.get_data_redaction_settings() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == analytics_admin.DeleteSubpropertyEventFilterRequest() + assert args[0] == analytics_admin.GetDataRedactionSettingsRequest() @pytest.mark.asyncio -async def test_delete_subproperty_event_filter_async_use_cached_wrapped_rpc( +async def test_get_data_redaction_settings_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", ): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -57404,33 +56624,34 @@ async def test_delete_subproperty_event_filter_async_use_cached_wrapped_rpc( # Ensure method has been cached assert ( - client._client._transport.delete_subproperty_event_filter + client._client._transport.get_data_redaction_settings in client._client._transport._wrapped_methods ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ - client._client._transport.delete_subproperty_event_filter - ] = mock_object + client._client._transport.get_data_redaction_settings + ] = mock_rpc request = {} - await client.delete_subproperty_event_filter(request) + await client.get_data_redaction_settings(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - await client.delete_subproperty_event_filter(request) + await client.get_data_redaction_settings(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio -async def test_delete_subproperty_event_filter_async( +async def test_get_data_redaction_settings_async( transport: str = "grpc_asyncio", - request_type=analytics_admin.DeleteSubpropertyEventFilterRequest, + request_type=analytics_admin.GetDataRedactionSettingsRequest, ): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), @@ -57443,44 +56664,55 @@ async def test_delete_subproperty_event_filter_async( # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.delete_subproperty_event_filter), "__call__" + type(client.transport.get_data_redaction_settings), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.delete_subproperty_event_filter(request) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + resources.DataRedactionSettings( + name="name_value", + email_redaction_enabled=True, + query_parameter_redaction_enabled=True, + query_parameter_keys=["query_parameter_keys_value"], + ) + ) + response = await client.get_data_redaction_settings(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request = analytics_admin.DeleteSubpropertyEventFilterRequest() + request = analytics_admin.GetDataRedactionSettingsRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert response is None + assert isinstance(response, resources.DataRedactionSettings) + assert response.name == "name_value" + assert response.email_redaction_enabled is True + assert response.query_parameter_redaction_enabled is True + assert response.query_parameter_keys == ["query_parameter_keys_value"] @pytest.mark.asyncio -async def test_delete_subproperty_event_filter_async_from_dict(): - await test_delete_subproperty_event_filter_async(request_type=dict) +async def test_get_data_redaction_settings_async_from_dict(): + await test_get_data_redaction_settings_async(request_type=dict) -def test_delete_subproperty_event_filter_field_headers(): +def test_get_data_redaction_settings_field_headers(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = analytics_admin.DeleteSubpropertyEventFilterRequest() + request = analytics_admin.GetDataRedactionSettingsRequest() request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.delete_subproperty_event_filter), "__call__" + type(client.transport.get_data_redaction_settings), "__call__" ) as call: - call.return_value = None - client.delete_subproperty_event_filter(request) + call.return_value = resources.DataRedactionSettings() + client.get_data_redaction_settings(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -57496,23 +56728,25 @@ def test_delete_subproperty_event_filter_field_headers(): @pytest.mark.asyncio -async def test_delete_subproperty_event_filter_field_headers_async(): +async def test_get_data_redaction_settings_field_headers_async(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = analytics_admin.DeleteSubpropertyEventFilterRequest() + request = analytics_admin.GetDataRedactionSettingsRequest() request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.delete_subproperty_event_filter), "__call__" + type(client.transport.get_data_redaction_settings), "__call__" ) as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - await client.delete_subproperty_event_filter(request) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + resources.DataRedactionSettings() + ) + await client.get_data_redaction_settings(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -57527,20 +56761,20 @@ async def test_delete_subproperty_event_filter_field_headers_async(): ) in kw["metadata"] -def test_delete_subproperty_event_filter_flattened(): +def test_get_data_redaction_settings_flattened(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.delete_subproperty_event_filter), "__call__" + type(client.transport.get_data_redaction_settings), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = None + call.return_value = resources.DataRedactionSettings() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.delete_subproperty_event_filter( + client.get_data_redaction_settings( name="name_value", ) @@ -57553,7 +56787,7 @@ def test_delete_subproperty_event_filter_flattened(): assert arg == mock_val -def test_delete_subproperty_event_filter_flattened_error(): +def test_get_data_redaction_settings_flattened_error(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -57561,29 +56795,31 @@ def test_delete_subproperty_event_filter_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.delete_subproperty_event_filter( - analytics_admin.DeleteSubpropertyEventFilterRequest(), + client.get_data_redaction_settings( + analytics_admin.GetDataRedactionSettingsRequest(), name="name_value", ) @pytest.mark.asyncio -async def test_delete_subproperty_event_filter_flattened_async(): +async def test_get_data_redaction_settings_flattened_async(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.delete_subproperty_event_filter), "__call__" + type(client.transport.get_data_redaction_settings), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = None + call.return_value = resources.DataRedactionSettings() - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + resources.DataRedactionSettings() + ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.delete_subproperty_event_filter( + response = await client.get_data_redaction_settings( name="name_value", ) @@ -57597,7 +56833,7 @@ async def test_delete_subproperty_event_filter_flattened_async(): @pytest.mark.asyncio -async def test_delete_subproperty_event_filter_flattened_error_async(): +async def test_get_data_redaction_settings_flattened_error_async(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -57605,8 +56841,8 @@ async def test_delete_subproperty_event_filter_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.delete_subproperty_event_filter( - analytics_admin.DeleteSubpropertyEventFilterRequest(), + await client.get_data_redaction_settings( + analytics_admin.GetDataRedactionSettingsRequest(), name="name_value", ) @@ -57614,56 +56850,117 @@ async def test_delete_subproperty_event_filter_flattened_error_async(): @pytest.mark.parametrize( "request_type", [ - analytics_admin.GetAccountRequest, + analytics_admin.GetCalculatedMetricRequest, dict, ], ) -def test_get_account_rest(request_type): +def test_get_calculated_metric(request_type, transport: str = "grpc"): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport="rest", + transport=transport, ) - # send a request that will satisfy transcoding - request_init = {"name": "accounts/sample1"} - request = request_type(**request_init) + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = resources.Account( + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_calculated_metric), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = resources.CalculatedMetric( name="name_value", + description="description_value", display_name="display_name_value", - region_code="region_code_value", - deleted=True, + calculated_metric_id="calculated_metric_id_value", + metric_unit=resources.CalculatedMetric.MetricUnit.STANDARD, + restricted_metric_type=[ + resources.CalculatedMetric.RestrictedMetricType.COST_DATA + ], + formula="formula_value", + invalid_metric_reference=True, ) + response = client.get_calculated_metric(request) - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = resources.Account.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - response = client.get_account(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = analytics_admin.GetCalculatedMetricRequest() + assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, resources.Account) + assert isinstance(response, resources.CalculatedMetric) assert response.name == "name_value" + assert response.description == "description_value" assert response.display_name == "display_name_value" - assert response.region_code == "region_code_value" - assert response.deleted is True + assert response.calculated_metric_id == "calculated_metric_id_value" + assert response.metric_unit == resources.CalculatedMetric.MetricUnit.STANDARD + assert response.restricted_metric_type == [ + resources.CalculatedMetric.RestrictedMetricType.COST_DATA + ] + assert response.formula == "formula_value" + assert response.invalid_metric_reference is True -def test_get_account_rest_use_cached_wrapped_rpc(): +def test_get_calculated_metric_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = AnalyticsAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_calculated_metric), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.get_calculated_metric() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == analytics_admin.GetCalculatedMetricRequest() + + +def test_get_calculated_metric_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = AnalyticsAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = analytics_admin.GetCalculatedMetricRequest( + name="name_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_calculated_metric), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.get_calculated_metric(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == analytics_admin.GetCalculatedMetricRequest( + name="name_value", + ) + + +def test_get_calculated_metric_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport="rest", + transport="grpc", ) # Should wrap all calls on client creation @@ -57671,191 +56968,10899 @@ def test_get_account_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.get_account in client._transport._wrapped_methods + assert ( + client._transport.get_calculated_metric + in client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.get_account] = mock_rpc - + client._transport._wrapped_methods[ + client._transport.get_calculated_metric + ] = mock_rpc request = {} - client.get_account(request) + client.get_calculated_metric(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.get_account(request) + client.get_calculated_metric(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_get_account_rest_required_fields( - request_type=analytics_admin.GetAccountRequest, -): - transport_class = transports.AnalyticsAdminServiceRestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads( - json_format.MessageToJson(pb_request, use_integers_for_enums=False) +@pytest.mark.asyncio +async def test_get_calculated_metric_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = AnalyticsAdminServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", ) - # verify fields with default values are dropped - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).get_account._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = "name_value" + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_calculated_metric), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + resources.CalculatedMetric( + name="name_value", + description="description_value", + display_name="display_name_value", + calculated_metric_id="calculated_metric_id_value", + metric_unit=resources.CalculatedMetric.MetricUnit.STANDARD, + restricted_metric_type=[ + resources.CalculatedMetric.RestrictedMetricType.COST_DATA + ], + formula="formula_value", + invalid_metric_reference=True, + ) + ) + response = await client.get_calculated_metric() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == analytics_admin.GetCalculatedMetricRequest() - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).get_account._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == "name_value" +@pytest.mark.asyncio +async def test_get_calculated_metric_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = AnalyticsAdminServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) - client = AnalyticsAdminServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request = request_type(**request_init) + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() - # Designate an appropriate value for the returned response. - return_value = resources.Account() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, "transcode") as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - "uri": "v1/sample_method", - "method": "get", - "query_params": pb_request, - } - transcode.return_value = transcode_result + # Ensure method has been cached + assert ( + client._client._transport.get_calculated_metric + in client._client._transport._wrapped_methods + ) - response_value = Response() - response_value.status_code = 200 + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.get_calculated_metric + ] = mock_rpc - # Convert return value to protobuf type - return_value = resources.Account.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) + request = {} + await client.get_calculated_metric(request) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 - response = client.get_account(request) + await client.get_calculated_metric(request) - expected_params = [("$alt", "json;enum-encoding=int")] - actual_params = req.call_args.kwargs["params"] - assert expected_params == actual_params + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 -def test_get_account_rest_unset_required_fields(): - transport = transports.AnalyticsAdminServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials +@pytest.mark.asyncio +async def test_get_calculated_metric_async( + transport: str = "grpc_asyncio", + request_type=analytics_admin.GetCalculatedMetricRequest, +): + client = AnalyticsAdminServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) - unset_fields = transport.get_account._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name",))) - + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_account_rest_interceptors(null_interceptor): - transport = transports.AnalyticsAdminServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.AnalyticsAdminServiceRestInterceptor(), - ) - client = AnalyticsAdminServiceClient(transport=transport) + # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport._session), "request" - ) as req, mock.patch.object( - path_template, "transcode" - ) as transcode, mock.patch.object( - transports.AnalyticsAdminServiceRestInterceptor, "post_get_account" - ) as post, mock.patch.object( - transports.AnalyticsAdminServiceRestInterceptor, "pre_get_account" - ) as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = analytics_admin.GetAccountRequest.pb( - analytics_admin.GetAccountRequest() + type(client.transport.get_calculated_metric), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + resources.CalculatedMetric( + name="name_value", + description="description_value", + display_name="display_name_value", + calculated_metric_id="calculated_metric_id_value", + metric_unit=resources.CalculatedMetric.MetricUnit.STANDARD, + restricted_metric_type=[ + resources.CalculatedMetric.RestrictedMetricType.COST_DATA + ], + formula="formula_value", + invalid_metric_reference=True, + ) ) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } + response = await client.get_calculated_metric(request) - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = resources.Account.to_json(resources.Account()) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = analytics_admin.GetCalculatedMetricRequest() + assert args[0] == request - request = analytics_admin.GetAccountRequest() - metadata = [ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = resources.Account() + # Establish that the response is the type that we expect. + assert isinstance(response, resources.CalculatedMetric) + assert response.name == "name_value" + assert response.description == "description_value" + assert response.display_name == "display_name_value" + assert response.calculated_metric_id == "calculated_metric_id_value" + assert response.metric_unit == resources.CalculatedMetric.MetricUnit.STANDARD + assert response.restricted_metric_type == [ + resources.CalculatedMetric.RestrictedMetricType.COST_DATA + ] + assert response.formula == "formula_value" + assert response.invalid_metric_reference is True - client.get_account( - request, - metadata=[ - ("key", "val"), - ("cephalopod", "squid"), - ], - ) - pre.assert_called_once() - post.assert_called_once() +@pytest.mark.asyncio +async def test_get_calculated_metric_async_from_dict(): + await test_get_calculated_metric_async(request_type=dict) -def test_get_account_rest_bad_request( - transport: str = "rest", request_type=analytics_admin.GetAccountRequest -): +def test_get_calculated_metric_field_headers(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, ) - # send a request that will satisfy transcoding - request_init = {"name": "accounts/sample1"} - request = request_type(**request_init) + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = analytics_admin.GetCalculatedMetricRequest() - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.get_account(request) + request.name = "name_value" + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_calculated_metric), "__call__" + ) as call: + call.return_value = resources.CalculatedMetric() + client.get_calculated_metric(request) -def test_get_account_rest_flattened(): + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_calculated_metric_field_headers_async(): + client = AnalyticsAdminServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = analytics_admin.GetCalculatedMetricRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_calculated_metric), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + resources.CalculatedMetric() + ) + await client.get_calculated_metric(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_get_calculated_metric_flattened(): + client = AnalyticsAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_calculated_metric), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = resources.CalculatedMetric() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_calculated_metric( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_get_calculated_metric_flattened_error(): + client = AnalyticsAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_calculated_metric( + analytics_admin.GetCalculatedMetricRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_get_calculated_metric_flattened_async(): + client = AnalyticsAdminServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_calculated_metric), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = resources.CalculatedMetric() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + resources.CalculatedMetric() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_calculated_metric( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_get_calculated_metric_flattened_error_async(): + client = AnalyticsAdminServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_calculated_metric( + analytics_admin.GetCalculatedMetricRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + analytics_admin.CreateCalculatedMetricRequest, + dict, + ], +) +def test_create_calculated_metric(request_type, transport: str = "grpc"): + client = AnalyticsAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_calculated_metric), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = resources.CalculatedMetric( + name="name_value", + description="description_value", + display_name="display_name_value", + calculated_metric_id="calculated_metric_id_value", + metric_unit=resources.CalculatedMetric.MetricUnit.STANDARD, + restricted_metric_type=[ + resources.CalculatedMetric.RestrictedMetricType.COST_DATA + ], + formula="formula_value", + invalid_metric_reference=True, + ) + response = client.create_calculated_metric(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = analytics_admin.CreateCalculatedMetricRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, resources.CalculatedMetric) + assert response.name == "name_value" + assert response.description == "description_value" + assert response.display_name == "display_name_value" + assert response.calculated_metric_id == "calculated_metric_id_value" + assert response.metric_unit == resources.CalculatedMetric.MetricUnit.STANDARD + assert response.restricted_metric_type == [ + resources.CalculatedMetric.RestrictedMetricType.COST_DATA + ] + assert response.formula == "formula_value" + assert response.invalid_metric_reference is True + + +def test_create_calculated_metric_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = AnalyticsAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_calculated_metric), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.create_calculated_metric() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == analytics_admin.CreateCalculatedMetricRequest() + + +def test_create_calculated_metric_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = AnalyticsAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = analytics_admin.CreateCalculatedMetricRequest( + parent="parent_value", + calculated_metric_id="calculated_metric_id_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_calculated_metric), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.create_calculated_metric(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == analytics_admin.CreateCalculatedMetricRequest( + parent="parent_value", + calculated_metric_id="calculated_metric_id_value", + ) + + +def test_create_calculated_metric_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = AnalyticsAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.create_calculated_metric + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.create_calculated_metric + ] = mock_rpc + request = {} + client.create_calculated_metric(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.create_calculated_metric(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_create_calculated_metric_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = AnalyticsAdminServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_calculated_metric), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + resources.CalculatedMetric( + name="name_value", + description="description_value", + display_name="display_name_value", + calculated_metric_id="calculated_metric_id_value", + metric_unit=resources.CalculatedMetric.MetricUnit.STANDARD, + restricted_metric_type=[ + resources.CalculatedMetric.RestrictedMetricType.COST_DATA + ], + formula="formula_value", + invalid_metric_reference=True, + ) + ) + response = await client.create_calculated_metric() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == analytics_admin.CreateCalculatedMetricRequest() + + +@pytest.mark.asyncio +async def test_create_calculated_metric_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = AnalyticsAdminServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.create_calculated_metric + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.create_calculated_metric + ] = mock_rpc + + request = {} + await client.create_calculated_metric(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.create_calculated_metric(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_create_calculated_metric_async( + transport: str = "grpc_asyncio", + request_type=analytics_admin.CreateCalculatedMetricRequest, +): + client = AnalyticsAdminServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_calculated_metric), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + resources.CalculatedMetric( + name="name_value", + description="description_value", + display_name="display_name_value", + calculated_metric_id="calculated_metric_id_value", + metric_unit=resources.CalculatedMetric.MetricUnit.STANDARD, + restricted_metric_type=[ + resources.CalculatedMetric.RestrictedMetricType.COST_DATA + ], + formula="formula_value", + invalid_metric_reference=True, + ) + ) + response = await client.create_calculated_metric(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = analytics_admin.CreateCalculatedMetricRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, resources.CalculatedMetric) + assert response.name == "name_value" + assert response.description == "description_value" + assert response.display_name == "display_name_value" + assert response.calculated_metric_id == "calculated_metric_id_value" + assert response.metric_unit == resources.CalculatedMetric.MetricUnit.STANDARD + assert response.restricted_metric_type == [ + resources.CalculatedMetric.RestrictedMetricType.COST_DATA + ] + assert response.formula == "formula_value" + assert response.invalid_metric_reference is True + + +@pytest.mark.asyncio +async def test_create_calculated_metric_async_from_dict(): + await test_create_calculated_metric_async(request_type=dict) + + +def test_create_calculated_metric_field_headers(): + client = AnalyticsAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = analytics_admin.CreateCalculatedMetricRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_calculated_metric), "__call__" + ) as call: + call.return_value = resources.CalculatedMetric() + client.create_calculated_metric(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_create_calculated_metric_field_headers_async(): + client = AnalyticsAdminServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = analytics_admin.CreateCalculatedMetricRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_calculated_metric), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + resources.CalculatedMetric() + ) + await client.create_calculated_metric(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_create_calculated_metric_flattened(): + client = AnalyticsAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_calculated_metric), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = resources.CalculatedMetric() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.create_calculated_metric( + parent="parent_value", + calculated_metric=resources.CalculatedMetric(name="name_value"), + calculated_metric_id="calculated_metric_id_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].calculated_metric + mock_val = resources.CalculatedMetric(name="name_value") + assert arg == mock_val + arg = args[0].calculated_metric_id + mock_val = "calculated_metric_id_value" + assert arg == mock_val + + +def test_create_calculated_metric_flattened_error(): + client = AnalyticsAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_calculated_metric( + analytics_admin.CreateCalculatedMetricRequest(), + parent="parent_value", + calculated_metric=resources.CalculatedMetric(name="name_value"), + calculated_metric_id="calculated_metric_id_value", + ) + + +@pytest.mark.asyncio +async def test_create_calculated_metric_flattened_async(): + client = AnalyticsAdminServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_calculated_metric), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = resources.CalculatedMetric() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + resources.CalculatedMetric() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.create_calculated_metric( + parent="parent_value", + calculated_metric=resources.CalculatedMetric(name="name_value"), + calculated_metric_id="calculated_metric_id_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].calculated_metric + mock_val = resources.CalculatedMetric(name="name_value") + assert arg == mock_val + arg = args[0].calculated_metric_id + mock_val = "calculated_metric_id_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_create_calculated_metric_flattened_error_async(): + client = AnalyticsAdminServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.create_calculated_metric( + analytics_admin.CreateCalculatedMetricRequest(), + parent="parent_value", + calculated_metric=resources.CalculatedMetric(name="name_value"), + calculated_metric_id="calculated_metric_id_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + analytics_admin.ListCalculatedMetricsRequest, + dict, + ], +) +def test_list_calculated_metrics(request_type, transport: str = "grpc"): + client = AnalyticsAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_calculated_metrics), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = analytics_admin.ListCalculatedMetricsResponse( + next_page_token="next_page_token_value", + ) + response = client.list_calculated_metrics(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = analytics_admin.ListCalculatedMetricsRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListCalculatedMetricsPager) + assert response.next_page_token == "next_page_token_value" + + +def test_list_calculated_metrics_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = AnalyticsAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_calculated_metrics), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.list_calculated_metrics() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == analytics_admin.ListCalculatedMetricsRequest() + + +def test_list_calculated_metrics_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = AnalyticsAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = analytics_admin.ListCalculatedMetricsRequest( + parent="parent_value", + page_token="page_token_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_calculated_metrics), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.list_calculated_metrics(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == analytics_admin.ListCalculatedMetricsRequest( + parent="parent_value", + page_token="page_token_value", + ) + + +def test_list_calculated_metrics_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = AnalyticsAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.list_calculated_metrics + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.list_calculated_metrics + ] = mock_rpc + request = {} + client.list_calculated_metrics(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_calculated_metrics(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_list_calculated_metrics_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = AnalyticsAdminServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_calculated_metrics), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + analytics_admin.ListCalculatedMetricsResponse( + next_page_token="next_page_token_value", + ) + ) + response = await client.list_calculated_metrics() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == analytics_admin.ListCalculatedMetricsRequest() + + +@pytest.mark.asyncio +async def test_list_calculated_metrics_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = AnalyticsAdminServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.list_calculated_metrics + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.list_calculated_metrics + ] = mock_rpc + + request = {} + await client.list_calculated_metrics(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.list_calculated_metrics(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_list_calculated_metrics_async( + transport: str = "grpc_asyncio", + request_type=analytics_admin.ListCalculatedMetricsRequest, +): + client = AnalyticsAdminServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_calculated_metrics), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + analytics_admin.ListCalculatedMetricsResponse( + next_page_token="next_page_token_value", + ) + ) + response = await client.list_calculated_metrics(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = analytics_admin.ListCalculatedMetricsRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListCalculatedMetricsAsyncPager) + assert response.next_page_token == "next_page_token_value" + + +@pytest.mark.asyncio +async def test_list_calculated_metrics_async_from_dict(): + await test_list_calculated_metrics_async(request_type=dict) + + +def test_list_calculated_metrics_field_headers(): + client = AnalyticsAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = analytics_admin.ListCalculatedMetricsRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_calculated_metrics), "__call__" + ) as call: + call.return_value = analytics_admin.ListCalculatedMetricsResponse() + client.list_calculated_metrics(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_calculated_metrics_field_headers_async(): + client = AnalyticsAdminServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = analytics_admin.ListCalculatedMetricsRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_calculated_metrics), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + analytics_admin.ListCalculatedMetricsResponse() + ) + await client.list_calculated_metrics(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_list_calculated_metrics_flattened(): + client = AnalyticsAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_calculated_metrics), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = analytics_admin.ListCalculatedMetricsResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_calculated_metrics( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +def test_list_calculated_metrics_flattened_error(): + client = AnalyticsAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_calculated_metrics( + analytics_admin.ListCalculatedMetricsRequest(), + parent="parent_value", + ) + + +@pytest.mark.asyncio +async def test_list_calculated_metrics_flattened_async(): + client = AnalyticsAdminServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_calculated_metrics), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = analytics_admin.ListCalculatedMetricsResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + analytics_admin.ListCalculatedMetricsResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_calculated_metrics( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_list_calculated_metrics_flattened_error_async(): + client = AnalyticsAdminServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_calculated_metrics( + analytics_admin.ListCalculatedMetricsRequest(), + parent="parent_value", + ) + + +def test_list_calculated_metrics_pager(transport_name: str = "grpc"): + client = AnalyticsAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_calculated_metrics), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + analytics_admin.ListCalculatedMetricsResponse( + calculated_metrics=[ + resources.CalculatedMetric(), + resources.CalculatedMetric(), + resources.CalculatedMetric(), + ], + next_page_token="abc", + ), + analytics_admin.ListCalculatedMetricsResponse( + calculated_metrics=[], + next_page_token="def", + ), + analytics_admin.ListCalculatedMetricsResponse( + calculated_metrics=[ + resources.CalculatedMetric(), + ], + next_page_token="ghi", + ), + analytics_admin.ListCalculatedMetricsResponse( + calculated_metrics=[ + resources.CalculatedMetric(), + resources.CalculatedMetric(), + ], + ), + RuntimeError, + ) + + expected_metadata = () + retry = retries.Retry() + timeout = 5 + expected_metadata = tuple(expected_metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + ) + pager = client.list_calculated_metrics(request={}, retry=retry, timeout=timeout) + + assert pager._metadata == expected_metadata + assert pager._retry == retry + assert pager._timeout == timeout + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, resources.CalculatedMetric) for i in results) + + +def test_list_calculated_metrics_pages(transport_name: str = "grpc"): + client = AnalyticsAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_calculated_metrics), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + analytics_admin.ListCalculatedMetricsResponse( + calculated_metrics=[ + resources.CalculatedMetric(), + resources.CalculatedMetric(), + resources.CalculatedMetric(), + ], + next_page_token="abc", + ), + analytics_admin.ListCalculatedMetricsResponse( + calculated_metrics=[], + next_page_token="def", + ), + analytics_admin.ListCalculatedMetricsResponse( + calculated_metrics=[ + resources.CalculatedMetric(), + ], + next_page_token="ghi", + ), + analytics_admin.ListCalculatedMetricsResponse( + calculated_metrics=[ + resources.CalculatedMetric(), + resources.CalculatedMetric(), + ], + ), + RuntimeError, + ) + pages = list(client.list_calculated_metrics(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.asyncio +async def test_list_calculated_metrics_async_pager(): + client = AnalyticsAdminServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_calculated_metrics), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + analytics_admin.ListCalculatedMetricsResponse( + calculated_metrics=[ + resources.CalculatedMetric(), + resources.CalculatedMetric(), + resources.CalculatedMetric(), + ], + next_page_token="abc", + ), + analytics_admin.ListCalculatedMetricsResponse( + calculated_metrics=[], + next_page_token="def", + ), + analytics_admin.ListCalculatedMetricsResponse( + calculated_metrics=[ + resources.CalculatedMetric(), + ], + next_page_token="ghi", + ), + analytics_admin.ListCalculatedMetricsResponse( + calculated_metrics=[ + resources.CalculatedMetric(), + resources.CalculatedMetric(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_calculated_metrics( + request={}, + ) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, resources.CalculatedMetric) for i in responses) + + +@pytest.mark.asyncio +async def test_list_calculated_metrics_async_pages(): + client = AnalyticsAdminServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_calculated_metrics), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + analytics_admin.ListCalculatedMetricsResponse( + calculated_metrics=[ + resources.CalculatedMetric(), + resources.CalculatedMetric(), + resources.CalculatedMetric(), + ], + next_page_token="abc", + ), + analytics_admin.ListCalculatedMetricsResponse( + calculated_metrics=[], + next_page_token="def", + ), + analytics_admin.ListCalculatedMetricsResponse( + calculated_metrics=[ + resources.CalculatedMetric(), + ], + next_page_token="ghi", + ), + analytics_admin.ListCalculatedMetricsResponse( + calculated_metrics=[ + resources.CalculatedMetric(), + resources.CalculatedMetric(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_calculated_metrics(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + analytics_admin.UpdateCalculatedMetricRequest, + dict, + ], +) +def test_update_calculated_metric(request_type, transport: str = "grpc"): + client = AnalyticsAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_calculated_metric), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = resources.CalculatedMetric( + name="name_value", + description="description_value", + display_name="display_name_value", + calculated_metric_id="calculated_metric_id_value", + metric_unit=resources.CalculatedMetric.MetricUnit.STANDARD, + restricted_metric_type=[ + resources.CalculatedMetric.RestrictedMetricType.COST_DATA + ], + formula="formula_value", + invalid_metric_reference=True, + ) + response = client.update_calculated_metric(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = analytics_admin.UpdateCalculatedMetricRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, resources.CalculatedMetric) + assert response.name == "name_value" + assert response.description == "description_value" + assert response.display_name == "display_name_value" + assert response.calculated_metric_id == "calculated_metric_id_value" + assert response.metric_unit == resources.CalculatedMetric.MetricUnit.STANDARD + assert response.restricted_metric_type == [ + resources.CalculatedMetric.RestrictedMetricType.COST_DATA + ] + assert response.formula == "formula_value" + assert response.invalid_metric_reference is True + + +def test_update_calculated_metric_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = AnalyticsAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_calculated_metric), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.update_calculated_metric() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == analytics_admin.UpdateCalculatedMetricRequest() + + +def test_update_calculated_metric_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = AnalyticsAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = analytics_admin.UpdateCalculatedMetricRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_calculated_metric), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.update_calculated_metric(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == analytics_admin.UpdateCalculatedMetricRequest() + + +def test_update_calculated_metric_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = AnalyticsAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.update_calculated_metric + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.update_calculated_metric + ] = mock_rpc + request = {} + client.update_calculated_metric(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.update_calculated_metric(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_update_calculated_metric_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = AnalyticsAdminServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_calculated_metric), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + resources.CalculatedMetric( + name="name_value", + description="description_value", + display_name="display_name_value", + calculated_metric_id="calculated_metric_id_value", + metric_unit=resources.CalculatedMetric.MetricUnit.STANDARD, + restricted_metric_type=[ + resources.CalculatedMetric.RestrictedMetricType.COST_DATA + ], + formula="formula_value", + invalid_metric_reference=True, + ) + ) + response = await client.update_calculated_metric() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == analytics_admin.UpdateCalculatedMetricRequest() + + +@pytest.mark.asyncio +async def test_update_calculated_metric_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = AnalyticsAdminServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.update_calculated_metric + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.update_calculated_metric + ] = mock_rpc + + request = {} + await client.update_calculated_metric(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.update_calculated_metric(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_update_calculated_metric_async( + transport: str = "grpc_asyncio", + request_type=analytics_admin.UpdateCalculatedMetricRequest, +): + client = AnalyticsAdminServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_calculated_metric), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + resources.CalculatedMetric( + name="name_value", + description="description_value", + display_name="display_name_value", + calculated_metric_id="calculated_metric_id_value", + metric_unit=resources.CalculatedMetric.MetricUnit.STANDARD, + restricted_metric_type=[ + resources.CalculatedMetric.RestrictedMetricType.COST_DATA + ], + formula="formula_value", + invalid_metric_reference=True, + ) + ) + response = await client.update_calculated_metric(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = analytics_admin.UpdateCalculatedMetricRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, resources.CalculatedMetric) + assert response.name == "name_value" + assert response.description == "description_value" + assert response.display_name == "display_name_value" + assert response.calculated_metric_id == "calculated_metric_id_value" + assert response.metric_unit == resources.CalculatedMetric.MetricUnit.STANDARD + assert response.restricted_metric_type == [ + resources.CalculatedMetric.RestrictedMetricType.COST_DATA + ] + assert response.formula == "formula_value" + assert response.invalid_metric_reference is True + + +@pytest.mark.asyncio +async def test_update_calculated_metric_async_from_dict(): + await test_update_calculated_metric_async(request_type=dict) + + +def test_update_calculated_metric_field_headers(): + client = AnalyticsAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = analytics_admin.UpdateCalculatedMetricRequest() + + request.calculated_metric.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_calculated_metric), "__call__" + ) as call: + call.return_value = resources.CalculatedMetric() + client.update_calculated_metric(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "calculated_metric.name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_update_calculated_metric_field_headers_async(): + client = AnalyticsAdminServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = analytics_admin.UpdateCalculatedMetricRequest() + + request.calculated_metric.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_calculated_metric), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + resources.CalculatedMetric() + ) + await client.update_calculated_metric(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "calculated_metric.name=name_value", + ) in kw["metadata"] + + +def test_update_calculated_metric_flattened(): + client = AnalyticsAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_calculated_metric), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = resources.CalculatedMetric() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.update_calculated_metric( + calculated_metric=resources.CalculatedMetric(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].calculated_metric + mock_val = resources.CalculatedMetric(name="name_value") + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + assert arg == mock_val + + +def test_update_calculated_metric_flattened_error(): + client = AnalyticsAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_calculated_metric( + analytics_admin.UpdateCalculatedMetricRequest(), + calculated_metric=resources.CalculatedMetric(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +@pytest.mark.asyncio +async def test_update_calculated_metric_flattened_async(): + client = AnalyticsAdminServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_calculated_metric), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = resources.CalculatedMetric() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + resources.CalculatedMetric() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.update_calculated_metric( + calculated_metric=resources.CalculatedMetric(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].calculated_metric + mock_val = resources.CalculatedMetric(name="name_value") + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_update_calculated_metric_flattened_error_async(): + client = AnalyticsAdminServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.update_calculated_metric( + analytics_admin.UpdateCalculatedMetricRequest(), + calculated_metric=resources.CalculatedMetric(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +@pytest.mark.parametrize( + "request_type", + [ + analytics_admin.DeleteCalculatedMetricRequest, + dict, + ], +) +def test_delete_calculated_metric(request_type, transport: str = "grpc"): + client = AnalyticsAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_calculated_metric), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.delete_calculated_metric(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = analytics_admin.DeleteCalculatedMetricRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_calculated_metric_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = AnalyticsAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_calculated_metric), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.delete_calculated_metric() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == analytics_admin.DeleteCalculatedMetricRequest() + + +def test_delete_calculated_metric_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = AnalyticsAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = analytics_admin.DeleteCalculatedMetricRequest( + name="name_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_calculated_metric), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.delete_calculated_metric(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == analytics_admin.DeleteCalculatedMetricRequest( + name="name_value", + ) + + +def test_delete_calculated_metric_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = AnalyticsAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.delete_calculated_metric + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.delete_calculated_metric + ] = mock_rpc + request = {} + client.delete_calculated_metric(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.delete_calculated_metric(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_delete_calculated_metric_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = AnalyticsAdminServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_calculated_metric), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.delete_calculated_metric() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == analytics_admin.DeleteCalculatedMetricRequest() + + +@pytest.mark.asyncio +async def test_delete_calculated_metric_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = AnalyticsAdminServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.delete_calculated_metric + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.delete_calculated_metric + ] = mock_rpc + + request = {} + await client.delete_calculated_metric(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.delete_calculated_metric(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_delete_calculated_metric_async( + transport: str = "grpc_asyncio", + request_type=analytics_admin.DeleteCalculatedMetricRequest, +): + client = AnalyticsAdminServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_calculated_metric), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.delete_calculated_metric(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = analytics_admin.DeleteCalculatedMetricRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.asyncio +async def test_delete_calculated_metric_async_from_dict(): + await test_delete_calculated_metric_async(request_type=dict) + + +def test_delete_calculated_metric_field_headers(): + client = AnalyticsAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = analytics_admin.DeleteCalculatedMetricRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_calculated_metric), "__call__" + ) as call: + call.return_value = None + client.delete_calculated_metric(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_delete_calculated_metric_field_headers_async(): + client = AnalyticsAdminServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = analytics_admin.DeleteCalculatedMetricRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_calculated_metric), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.delete_calculated_metric(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_delete_calculated_metric_flattened(): + client = AnalyticsAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_calculated_metric), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = None + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.delete_calculated_metric( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_delete_calculated_metric_flattened_error(): + client = AnalyticsAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_calculated_metric( + analytics_admin.DeleteCalculatedMetricRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_delete_calculated_metric_flattened_async(): + client = AnalyticsAdminServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_calculated_metric), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = None + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.delete_calculated_metric( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_delete_calculated_metric_flattened_error_async(): + client = AnalyticsAdminServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.delete_calculated_metric( + analytics_admin.DeleteCalculatedMetricRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + analytics_admin.CreateRollupPropertyRequest, + dict, + ], +) +def test_create_rollup_property(request_type, transport: str = "grpc"): + client = AnalyticsAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_rollup_property), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = analytics_admin.CreateRollupPropertyResponse() + response = client.create_rollup_property(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = analytics_admin.CreateRollupPropertyRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, analytics_admin.CreateRollupPropertyResponse) + + +def test_create_rollup_property_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = AnalyticsAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_rollup_property), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.create_rollup_property() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == analytics_admin.CreateRollupPropertyRequest() + + +def test_create_rollup_property_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = AnalyticsAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = analytics_admin.CreateRollupPropertyRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_rollup_property), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.create_rollup_property(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == analytics_admin.CreateRollupPropertyRequest() + + +def test_create_rollup_property_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = AnalyticsAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.create_rollup_property + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.create_rollup_property + ] = mock_rpc + request = {} + client.create_rollup_property(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.create_rollup_property(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_create_rollup_property_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = AnalyticsAdminServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_rollup_property), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + analytics_admin.CreateRollupPropertyResponse() + ) + response = await client.create_rollup_property() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == analytics_admin.CreateRollupPropertyRequest() + + +@pytest.mark.asyncio +async def test_create_rollup_property_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = AnalyticsAdminServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.create_rollup_property + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.create_rollup_property + ] = mock_rpc + + request = {} + await client.create_rollup_property(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.create_rollup_property(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_create_rollup_property_async( + transport: str = "grpc_asyncio", + request_type=analytics_admin.CreateRollupPropertyRequest, +): + client = AnalyticsAdminServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_rollup_property), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + analytics_admin.CreateRollupPropertyResponse() + ) + response = await client.create_rollup_property(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = analytics_admin.CreateRollupPropertyRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, analytics_admin.CreateRollupPropertyResponse) + + +@pytest.mark.asyncio +async def test_create_rollup_property_async_from_dict(): + await test_create_rollup_property_async(request_type=dict) + + +@pytest.mark.parametrize( + "request_type", + [ + analytics_admin.GetRollupPropertySourceLinkRequest, + dict, + ], +) +def test_get_rollup_property_source_link(request_type, transport: str = "grpc"): + client = AnalyticsAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_rollup_property_source_link), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = resources.RollupPropertySourceLink( + name="name_value", + source_property="source_property_value", + ) + response = client.get_rollup_property_source_link(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = analytics_admin.GetRollupPropertySourceLinkRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, resources.RollupPropertySourceLink) + assert response.name == "name_value" + assert response.source_property == "source_property_value" + + +def test_get_rollup_property_source_link_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = AnalyticsAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_rollup_property_source_link), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.get_rollup_property_source_link() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == analytics_admin.GetRollupPropertySourceLinkRequest() + + +def test_get_rollup_property_source_link_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = AnalyticsAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = analytics_admin.GetRollupPropertySourceLinkRequest( + name="name_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_rollup_property_source_link), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.get_rollup_property_source_link(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == analytics_admin.GetRollupPropertySourceLinkRequest( + name="name_value", + ) + + +def test_get_rollup_property_source_link_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = AnalyticsAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.get_rollup_property_source_link + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.get_rollup_property_source_link + ] = mock_rpc + request = {} + client.get_rollup_property_source_link(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_rollup_property_source_link(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_get_rollup_property_source_link_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = AnalyticsAdminServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_rollup_property_source_link), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + resources.RollupPropertySourceLink( + name="name_value", + source_property="source_property_value", + ) + ) + response = await client.get_rollup_property_source_link() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == analytics_admin.GetRollupPropertySourceLinkRequest() + + +@pytest.mark.asyncio +async def test_get_rollup_property_source_link_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = AnalyticsAdminServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.get_rollup_property_source_link + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.get_rollup_property_source_link + ] = mock_rpc + + request = {} + await client.get_rollup_property_source_link(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.get_rollup_property_source_link(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_get_rollup_property_source_link_async( + transport: str = "grpc_asyncio", + request_type=analytics_admin.GetRollupPropertySourceLinkRequest, +): + client = AnalyticsAdminServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_rollup_property_source_link), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + resources.RollupPropertySourceLink( + name="name_value", + source_property="source_property_value", + ) + ) + response = await client.get_rollup_property_source_link(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = analytics_admin.GetRollupPropertySourceLinkRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, resources.RollupPropertySourceLink) + assert response.name == "name_value" + assert response.source_property == "source_property_value" + + +@pytest.mark.asyncio +async def test_get_rollup_property_source_link_async_from_dict(): + await test_get_rollup_property_source_link_async(request_type=dict) + + +def test_get_rollup_property_source_link_field_headers(): + client = AnalyticsAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = analytics_admin.GetRollupPropertySourceLinkRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_rollup_property_source_link), "__call__" + ) as call: + call.return_value = resources.RollupPropertySourceLink() + client.get_rollup_property_source_link(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_rollup_property_source_link_field_headers_async(): + client = AnalyticsAdminServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = analytics_admin.GetRollupPropertySourceLinkRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_rollup_property_source_link), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + resources.RollupPropertySourceLink() + ) + await client.get_rollup_property_source_link(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_get_rollup_property_source_link_flattened(): + client = AnalyticsAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_rollup_property_source_link), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = resources.RollupPropertySourceLink() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_rollup_property_source_link( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_get_rollup_property_source_link_flattened_error(): + client = AnalyticsAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_rollup_property_source_link( + analytics_admin.GetRollupPropertySourceLinkRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_get_rollup_property_source_link_flattened_async(): + client = AnalyticsAdminServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_rollup_property_source_link), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = resources.RollupPropertySourceLink() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + resources.RollupPropertySourceLink() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_rollup_property_source_link( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_get_rollup_property_source_link_flattened_error_async(): + client = AnalyticsAdminServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_rollup_property_source_link( + analytics_admin.GetRollupPropertySourceLinkRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + analytics_admin.ListRollupPropertySourceLinksRequest, + dict, + ], +) +def test_list_rollup_property_source_links(request_type, transport: str = "grpc"): + client = AnalyticsAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_rollup_property_source_links), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = analytics_admin.ListRollupPropertySourceLinksResponse( + next_page_token="next_page_token_value", + ) + response = client.list_rollup_property_source_links(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = analytics_admin.ListRollupPropertySourceLinksRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListRollupPropertySourceLinksPager) + assert response.next_page_token == "next_page_token_value" + + +def test_list_rollup_property_source_links_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = AnalyticsAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_rollup_property_source_links), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.list_rollup_property_source_links() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == analytics_admin.ListRollupPropertySourceLinksRequest() + + +def test_list_rollup_property_source_links_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = AnalyticsAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = analytics_admin.ListRollupPropertySourceLinksRequest( + parent="parent_value", + page_token="page_token_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_rollup_property_source_links), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.list_rollup_property_source_links(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == analytics_admin.ListRollupPropertySourceLinksRequest( + parent="parent_value", + page_token="page_token_value", + ) + + +def test_list_rollup_property_source_links_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = AnalyticsAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.list_rollup_property_source_links + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.list_rollup_property_source_links + ] = mock_rpc + request = {} + client.list_rollup_property_source_links(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_rollup_property_source_links(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_list_rollup_property_source_links_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = AnalyticsAdminServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_rollup_property_source_links), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + analytics_admin.ListRollupPropertySourceLinksResponse( + next_page_token="next_page_token_value", + ) + ) + response = await client.list_rollup_property_source_links() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == analytics_admin.ListRollupPropertySourceLinksRequest() + + +@pytest.mark.asyncio +async def test_list_rollup_property_source_links_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = AnalyticsAdminServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.list_rollup_property_source_links + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.list_rollup_property_source_links + ] = mock_rpc + + request = {} + await client.list_rollup_property_source_links(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.list_rollup_property_source_links(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_list_rollup_property_source_links_async( + transport: str = "grpc_asyncio", + request_type=analytics_admin.ListRollupPropertySourceLinksRequest, +): + client = AnalyticsAdminServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_rollup_property_source_links), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + analytics_admin.ListRollupPropertySourceLinksResponse( + next_page_token="next_page_token_value", + ) + ) + response = await client.list_rollup_property_source_links(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = analytics_admin.ListRollupPropertySourceLinksRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListRollupPropertySourceLinksAsyncPager) + assert response.next_page_token == "next_page_token_value" + + +@pytest.mark.asyncio +async def test_list_rollup_property_source_links_async_from_dict(): + await test_list_rollup_property_source_links_async(request_type=dict) + + +def test_list_rollup_property_source_links_field_headers(): + client = AnalyticsAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = analytics_admin.ListRollupPropertySourceLinksRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_rollup_property_source_links), "__call__" + ) as call: + call.return_value = analytics_admin.ListRollupPropertySourceLinksResponse() + client.list_rollup_property_source_links(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_rollup_property_source_links_field_headers_async(): + client = AnalyticsAdminServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = analytics_admin.ListRollupPropertySourceLinksRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_rollup_property_source_links), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + analytics_admin.ListRollupPropertySourceLinksResponse() + ) + await client.list_rollup_property_source_links(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_list_rollup_property_source_links_flattened(): + client = AnalyticsAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_rollup_property_source_links), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = analytics_admin.ListRollupPropertySourceLinksResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_rollup_property_source_links( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +def test_list_rollup_property_source_links_flattened_error(): + client = AnalyticsAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_rollup_property_source_links( + analytics_admin.ListRollupPropertySourceLinksRequest(), + parent="parent_value", + ) + + +@pytest.mark.asyncio +async def test_list_rollup_property_source_links_flattened_async(): + client = AnalyticsAdminServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_rollup_property_source_links), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = analytics_admin.ListRollupPropertySourceLinksResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + analytics_admin.ListRollupPropertySourceLinksResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_rollup_property_source_links( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_list_rollup_property_source_links_flattened_error_async(): + client = AnalyticsAdminServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_rollup_property_source_links( + analytics_admin.ListRollupPropertySourceLinksRequest(), + parent="parent_value", + ) + + +def test_list_rollup_property_source_links_pager(transport_name: str = "grpc"): + client = AnalyticsAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_rollup_property_source_links), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + analytics_admin.ListRollupPropertySourceLinksResponse( + rollup_property_source_links=[ + resources.RollupPropertySourceLink(), + resources.RollupPropertySourceLink(), + resources.RollupPropertySourceLink(), + ], + next_page_token="abc", + ), + analytics_admin.ListRollupPropertySourceLinksResponse( + rollup_property_source_links=[], + next_page_token="def", + ), + analytics_admin.ListRollupPropertySourceLinksResponse( + rollup_property_source_links=[ + resources.RollupPropertySourceLink(), + ], + next_page_token="ghi", + ), + analytics_admin.ListRollupPropertySourceLinksResponse( + rollup_property_source_links=[ + resources.RollupPropertySourceLink(), + resources.RollupPropertySourceLink(), + ], + ), + RuntimeError, + ) + + expected_metadata = () + retry = retries.Retry() + timeout = 5 + expected_metadata = tuple(expected_metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + ) + pager = client.list_rollup_property_source_links( + request={}, retry=retry, timeout=timeout + ) + + assert pager._metadata == expected_metadata + assert pager._retry == retry + assert pager._timeout == timeout + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, resources.RollupPropertySourceLink) for i in results) + + +def test_list_rollup_property_source_links_pages(transport_name: str = "grpc"): + client = AnalyticsAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_rollup_property_source_links), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + analytics_admin.ListRollupPropertySourceLinksResponse( + rollup_property_source_links=[ + resources.RollupPropertySourceLink(), + resources.RollupPropertySourceLink(), + resources.RollupPropertySourceLink(), + ], + next_page_token="abc", + ), + analytics_admin.ListRollupPropertySourceLinksResponse( + rollup_property_source_links=[], + next_page_token="def", + ), + analytics_admin.ListRollupPropertySourceLinksResponse( + rollup_property_source_links=[ + resources.RollupPropertySourceLink(), + ], + next_page_token="ghi", + ), + analytics_admin.ListRollupPropertySourceLinksResponse( + rollup_property_source_links=[ + resources.RollupPropertySourceLink(), + resources.RollupPropertySourceLink(), + ], + ), + RuntimeError, + ) + pages = list(client.list_rollup_property_source_links(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.asyncio +async def test_list_rollup_property_source_links_async_pager(): + client = AnalyticsAdminServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_rollup_property_source_links), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + analytics_admin.ListRollupPropertySourceLinksResponse( + rollup_property_source_links=[ + resources.RollupPropertySourceLink(), + resources.RollupPropertySourceLink(), + resources.RollupPropertySourceLink(), + ], + next_page_token="abc", + ), + analytics_admin.ListRollupPropertySourceLinksResponse( + rollup_property_source_links=[], + next_page_token="def", + ), + analytics_admin.ListRollupPropertySourceLinksResponse( + rollup_property_source_links=[ + resources.RollupPropertySourceLink(), + ], + next_page_token="ghi", + ), + analytics_admin.ListRollupPropertySourceLinksResponse( + rollup_property_source_links=[ + resources.RollupPropertySourceLink(), + resources.RollupPropertySourceLink(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_rollup_property_source_links( + request={}, + ) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, resources.RollupPropertySourceLink) for i in responses) + + +@pytest.mark.asyncio +async def test_list_rollup_property_source_links_async_pages(): + client = AnalyticsAdminServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_rollup_property_source_links), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + analytics_admin.ListRollupPropertySourceLinksResponse( + rollup_property_source_links=[ + resources.RollupPropertySourceLink(), + resources.RollupPropertySourceLink(), + resources.RollupPropertySourceLink(), + ], + next_page_token="abc", + ), + analytics_admin.ListRollupPropertySourceLinksResponse( + rollup_property_source_links=[], + next_page_token="def", + ), + analytics_admin.ListRollupPropertySourceLinksResponse( + rollup_property_source_links=[ + resources.RollupPropertySourceLink(), + ], + next_page_token="ghi", + ), + analytics_admin.ListRollupPropertySourceLinksResponse( + rollup_property_source_links=[ + resources.RollupPropertySourceLink(), + resources.RollupPropertySourceLink(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_rollup_property_source_links(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + analytics_admin.CreateRollupPropertySourceLinkRequest, + dict, + ], +) +def test_create_rollup_property_source_link(request_type, transport: str = "grpc"): + client = AnalyticsAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_rollup_property_source_link), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = resources.RollupPropertySourceLink( + name="name_value", + source_property="source_property_value", + ) + response = client.create_rollup_property_source_link(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = analytics_admin.CreateRollupPropertySourceLinkRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, resources.RollupPropertySourceLink) + assert response.name == "name_value" + assert response.source_property == "source_property_value" + + +def test_create_rollup_property_source_link_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = AnalyticsAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_rollup_property_source_link), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.create_rollup_property_source_link() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == analytics_admin.CreateRollupPropertySourceLinkRequest() + + +def test_create_rollup_property_source_link_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = AnalyticsAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = analytics_admin.CreateRollupPropertySourceLinkRequest( + parent="parent_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_rollup_property_source_link), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.create_rollup_property_source_link(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == analytics_admin.CreateRollupPropertySourceLinkRequest( + parent="parent_value", + ) + + +def test_create_rollup_property_source_link_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = AnalyticsAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.create_rollup_property_source_link + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.create_rollup_property_source_link + ] = mock_rpc + request = {} + client.create_rollup_property_source_link(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.create_rollup_property_source_link(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_create_rollup_property_source_link_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = AnalyticsAdminServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_rollup_property_source_link), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + resources.RollupPropertySourceLink( + name="name_value", + source_property="source_property_value", + ) + ) + response = await client.create_rollup_property_source_link() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == analytics_admin.CreateRollupPropertySourceLinkRequest() + + +@pytest.mark.asyncio +async def test_create_rollup_property_source_link_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = AnalyticsAdminServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.create_rollup_property_source_link + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.create_rollup_property_source_link + ] = mock_rpc + + request = {} + await client.create_rollup_property_source_link(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.create_rollup_property_source_link(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_create_rollup_property_source_link_async( + transport: str = "grpc_asyncio", + request_type=analytics_admin.CreateRollupPropertySourceLinkRequest, +): + client = AnalyticsAdminServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_rollup_property_source_link), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + resources.RollupPropertySourceLink( + name="name_value", + source_property="source_property_value", + ) + ) + response = await client.create_rollup_property_source_link(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = analytics_admin.CreateRollupPropertySourceLinkRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, resources.RollupPropertySourceLink) + assert response.name == "name_value" + assert response.source_property == "source_property_value" + + +@pytest.mark.asyncio +async def test_create_rollup_property_source_link_async_from_dict(): + await test_create_rollup_property_source_link_async(request_type=dict) + + +def test_create_rollup_property_source_link_field_headers(): + client = AnalyticsAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = analytics_admin.CreateRollupPropertySourceLinkRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_rollup_property_source_link), "__call__" + ) as call: + call.return_value = resources.RollupPropertySourceLink() + client.create_rollup_property_source_link(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_create_rollup_property_source_link_field_headers_async(): + client = AnalyticsAdminServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = analytics_admin.CreateRollupPropertySourceLinkRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_rollup_property_source_link), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + resources.RollupPropertySourceLink() + ) + await client.create_rollup_property_source_link(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_create_rollup_property_source_link_flattened(): + client = AnalyticsAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_rollup_property_source_link), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = resources.RollupPropertySourceLink() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.create_rollup_property_source_link( + parent="parent_value", + rollup_property_source_link=resources.RollupPropertySourceLink( + name="name_value" + ), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].rollup_property_source_link + mock_val = resources.RollupPropertySourceLink(name="name_value") + assert arg == mock_val + + +def test_create_rollup_property_source_link_flattened_error(): + client = AnalyticsAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_rollup_property_source_link( + analytics_admin.CreateRollupPropertySourceLinkRequest(), + parent="parent_value", + rollup_property_source_link=resources.RollupPropertySourceLink( + name="name_value" + ), + ) + + +@pytest.mark.asyncio +async def test_create_rollup_property_source_link_flattened_async(): + client = AnalyticsAdminServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_rollup_property_source_link), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = resources.RollupPropertySourceLink() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + resources.RollupPropertySourceLink() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.create_rollup_property_source_link( + parent="parent_value", + rollup_property_source_link=resources.RollupPropertySourceLink( + name="name_value" + ), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].rollup_property_source_link + mock_val = resources.RollupPropertySourceLink(name="name_value") + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_create_rollup_property_source_link_flattened_error_async(): + client = AnalyticsAdminServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.create_rollup_property_source_link( + analytics_admin.CreateRollupPropertySourceLinkRequest(), + parent="parent_value", + rollup_property_source_link=resources.RollupPropertySourceLink( + name="name_value" + ), + ) + + +@pytest.mark.parametrize( + "request_type", + [ + analytics_admin.DeleteRollupPropertySourceLinkRequest, + dict, + ], +) +def test_delete_rollup_property_source_link(request_type, transport: str = "grpc"): + client = AnalyticsAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_rollup_property_source_link), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.delete_rollup_property_source_link(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = analytics_admin.DeleteRollupPropertySourceLinkRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_rollup_property_source_link_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = AnalyticsAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_rollup_property_source_link), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.delete_rollup_property_source_link() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == analytics_admin.DeleteRollupPropertySourceLinkRequest() + + +def test_delete_rollup_property_source_link_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = AnalyticsAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = analytics_admin.DeleteRollupPropertySourceLinkRequest( + name="name_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_rollup_property_source_link), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.delete_rollup_property_source_link(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == analytics_admin.DeleteRollupPropertySourceLinkRequest( + name="name_value", + ) + + +def test_delete_rollup_property_source_link_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = AnalyticsAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.delete_rollup_property_source_link + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.delete_rollup_property_source_link + ] = mock_rpc + request = {} + client.delete_rollup_property_source_link(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.delete_rollup_property_source_link(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_delete_rollup_property_source_link_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = AnalyticsAdminServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_rollup_property_source_link), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.delete_rollup_property_source_link() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == analytics_admin.DeleteRollupPropertySourceLinkRequest() + + +@pytest.mark.asyncio +async def test_delete_rollup_property_source_link_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = AnalyticsAdminServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.delete_rollup_property_source_link + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.delete_rollup_property_source_link + ] = mock_rpc + + request = {} + await client.delete_rollup_property_source_link(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.delete_rollup_property_source_link(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_delete_rollup_property_source_link_async( + transport: str = "grpc_asyncio", + request_type=analytics_admin.DeleteRollupPropertySourceLinkRequest, +): + client = AnalyticsAdminServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_rollup_property_source_link), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.delete_rollup_property_source_link(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = analytics_admin.DeleteRollupPropertySourceLinkRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.asyncio +async def test_delete_rollup_property_source_link_async_from_dict(): + await test_delete_rollup_property_source_link_async(request_type=dict) + + +def test_delete_rollup_property_source_link_field_headers(): + client = AnalyticsAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = analytics_admin.DeleteRollupPropertySourceLinkRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_rollup_property_source_link), "__call__" + ) as call: + call.return_value = None + client.delete_rollup_property_source_link(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_delete_rollup_property_source_link_field_headers_async(): + client = AnalyticsAdminServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = analytics_admin.DeleteRollupPropertySourceLinkRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_rollup_property_source_link), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.delete_rollup_property_source_link(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_delete_rollup_property_source_link_flattened(): + client = AnalyticsAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_rollup_property_source_link), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = None + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.delete_rollup_property_source_link( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_delete_rollup_property_source_link_flattened_error(): + client = AnalyticsAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_rollup_property_source_link( + analytics_admin.DeleteRollupPropertySourceLinkRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_delete_rollup_property_source_link_flattened_async(): + client = AnalyticsAdminServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_rollup_property_source_link), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = None + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.delete_rollup_property_source_link( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_delete_rollup_property_source_link_flattened_error_async(): + client = AnalyticsAdminServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.delete_rollup_property_source_link( + analytics_admin.DeleteRollupPropertySourceLinkRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + analytics_admin.ProvisionSubpropertyRequest, + dict, + ], +) +def test_provision_subproperty(request_type, transport: str = "grpc"): + client = AnalyticsAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.provision_subproperty), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = analytics_admin.ProvisionSubpropertyResponse() + response = client.provision_subproperty(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = analytics_admin.ProvisionSubpropertyRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, analytics_admin.ProvisionSubpropertyResponse) + + +def test_provision_subproperty_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = AnalyticsAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.provision_subproperty), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.provision_subproperty() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == analytics_admin.ProvisionSubpropertyRequest() + + +def test_provision_subproperty_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = AnalyticsAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = analytics_admin.ProvisionSubpropertyRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.provision_subproperty), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.provision_subproperty(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == analytics_admin.ProvisionSubpropertyRequest() + + +def test_provision_subproperty_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = AnalyticsAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.provision_subproperty + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.provision_subproperty + ] = mock_rpc + request = {} + client.provision_subproperty(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.provision_subproperty(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_provision_subproperty_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = AnalyticsAdminServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.provision_subproperty), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + analytics_admin.ProvisionSubpropertyResponse() + ) + response = await client.provision_subproperty() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == analytics_admin.ProvisionSubpropertyRequest() + + +@pytest.mark.asyncio +async def test_provision_subproperty_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = AnalyticsAdminServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.provision_subproperty + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.provision_subproperty + ] = mock_rpc + + request = {} + await client.provision_subproperty(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.provision_subproperty(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_provision_subproperty_async( + transport: str = "grpc_asyncio", + request_type=analytics_admin.ProvisionSubpropertyRequest, +): + client = AnalyticsAdminServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.provision_subproperty), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + analytics_admin.ProvisionSubpropertyResponse() + ) + response = await client.provision_subproperty(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = analytics_admin.ProvisionSubpropertyRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, analytics_admin.ProvisionSubpropertyResponse) + + +@pytest.mark.asyncio +async def test_provision_subproperty_async_from_dict(): + await test_provision_subproperty_async(request_type=dict) + + +@pytest.mark.parametrize( + "request_type", + [ + analytics_admin.CreateSubpropertyEventFilterRequest, + dict, + ], +) +def test_create_subproperty_event_filter(request_type, transport: str = "grpc"): + client = AnalyticsAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_subproperty_event_filter), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = gaa_subproperty_event_filter.SubpropertyEventFilter( + name="name_value", + apply_to_property="apply_to_property_value", + ) + response = client.create_subproperty_event_filter(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = analytics_admin.CreateSubpropertyEventFilterRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, gaa_subproperty_event_filter.SubpropertyEventFilter) + assert response.name == "name_value" + assert response.apply_to_property == "apply_to_property_value" + + +def test_create_subproperty_event_filter_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = AnalyticsAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_subproperty_event_filter), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.create_subproperty_event_filter() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == analytics_admin.CreateSubpropertyEventFilterRequest() + + +def test_create_subproperty_event_filter_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = AnalyticsAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = analytics_admin.CreateSubpropertyEventFilterRequest( + parent="parent_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_subproperty_event_filter), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.create_subproperty_event_filter(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == analytics_admin.CreateSubpropertyEventFilterRequest( + parent="parent_value", + ) + + +def test_create_subproperty_event_filter_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = AnalyticsAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.create_subproperty_event_filter + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.create_subproperty_event_filter + ] = mock_rpc + request = {} + client.create_subproperty_event_filter(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.create_subproperty_event_filter(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_create_subproperty_event_filter_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = AnalyticsAdminServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_subproperty_event_filter), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + gaa_subproperty_event_filter.SubpropertyEventFilter( + name="name_value", + apply_to_property="apply_to_property_value", + ) + ) + response = await client.create_subproperty_event_filter() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == analytics_admin.CreateSubpropertyEventFilterRequest() + + +@pytest.mark.asyncio +async def test_create_subproperty_event_filter_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = AnalyticsAdminServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.create_subproperty_event_filter + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.create_subproperty_event_filter + ] = mock_rpc + + request = {} + await client.create_subproperty_event_filter(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.create_subproperty_event_filter(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_create_subproperty_event_filter_async( + transport: str = "grpc_asyncio", + request_type=analytics_admin.CreateSubpropertyEventFilterRequest, +): + client = AnalyticsAdminServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_subproperty_event_filter), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + gaa_subproperty_event_filter.SubpropertyEventFilter( + name="name_value", + apply_to_property="apply_to_property_value", + ) + ) + response = await client.create_subproperty_event_filter(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = analytics_admin.CreateSubpropertyEventFilterRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, gaa_subproperty_event_filter.SubpropertyEventFilter) + assert response.name == "name_value" + assert response.apply_to_property == "apply_to_property_value" + + +@pytest.mark.asyncio +async def test_create_subproperty_event_filter_async_from_dict(): + await test_create_subproperty_event_filter_async(request_type=dict) + + +def test_create_subproperty_event_filter_field_headers(): + client = AnalyticsAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = analytics_admin.CreateSubpropertyEventFilterRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_subproperty_event_filter), "__call__" + ) as call: + call.return_value = gaa_subproperty_event_filter.SubpropertyEventFilter() + client.create_subproperty_event_filter(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_create_subproperty_event_filter_field_headers_async(): + client = AnalyticsAdminServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = analytics_admin.CreateSubpropertyEventFilterRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_subproperty_event_filter), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + gaa_subproperty_event_filter.SubpropertyEventFilter() + ) + await client.create_subproperty_event_filter(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_create_subproperty_event_filter_flattened(): + client = AnalyticsAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_subproperty_event_filter), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = gaa_subproperty_event_filter.SubpropertyEventFilter() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.create_subproperty_event_filter( + parent="parent_value", + subproperty_event_filter=gaa_subproperty_event_filter.SubpropertyEventFilter( + name="name_value" + ), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].subproperty_event_filter + mock_val = gaa_subproperty_event_filter.SubpropertyEventFilter( + name="name_value" + ) + assert arg == mock_val + + +def test_create_subproperty_event_filter_flattened_error(): + client = AnalyticsAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_subproperty_event_filter( + analytics_admin.CreateSubpropertyEventFilterRequest(), + parent="parent_value", + subproperty_event_filter=gaa_subproperty_event_filter.SubpropertyEventFilter( + name="name_value" + ), + ) + + +@pytest.mark.asyncio +async def test_create_subproperty_event_filter_flattened_async(): + client = AnalyticsAdminServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_subproperty_event_filter), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = gaa_subproperty_event_filter.SubpropertyEventFilter() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + gaa_subproperty_event_filter.SubpropertyEventFilter() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.create_subproperty_event_filter( + parent="parent_value", + subproperty_event_filter=gaa_subproperty_event_filter.SubpropertyEventFilter( + name="name_value" + ), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].subproperty_event_filter + mock_val = gaa_subproperty_event_filter.SubpropertyEventFilter( + name="name_value" + ) + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_create_subproperty_event_filter_flattened_error_async(): + client = AnalyticsAdminServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.create_subproperty_event_filter( + analytics_admin.CreateSubpropertyEventFilterRequest(), + parent="parent_value", + subproperty_event_filter=gaa_subproperty_event_filter.SubpropertyEventFilter( + name="name_value" + ), + ) + + +@pytest.mark.parametrize( + "request_type", + [ + analytics_admin.GetSubpropertyEventFilterRequest, + dict, + ], +) +def test_get_subproperty_event_filter(request_type, transport: str = "grpc"): + client = AnalyticsAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_subproperty_event_filter), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = subproperty_event_filter.SubpropertyEventFilter( + name="name_value", + apply_to_property="apply_to_property_value", + ) + response = client.get_subproperty_event_filter(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = analytics_admin.GetSubpropertyEventFilterRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, subproperty_event_filter.SubpropertyEventFilter) + assert response.name == "name_value" + assert response.apply_to_property == "apply_to_property_value" + + +def test_get_subproperty_event_filter_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = AnalyticsAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_subproperty_event_filter), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.get_subproperty_event_filter() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == analytics_admin.GetSubpropertyEventFilterRequest() + + +def test_get_subproperty_event_filter_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = AnalyticsAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = analytics_admin.GetSubpropertyEventFilterRequest( + name="name_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_subproperty_event_filter), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.get_subproperty_event_filter(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == analytics_admin.GetSubpropertyEventFilterRequest( + name="name_value", + ) + + +def test_get_subproperty_event_filter_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = AnalyticsAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.get_subproperty_event_filter + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.get_subproperty_event_filter + ] = mock_rpc + request = {} + client.get_subproperty_event_filter(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_subproperty_event_filter(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_get_subproperty_event_filter_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = AnalyticsAdminServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_subproperty_event_filter), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + subproperty_event_filter.SubpropertyEventFilter( + name="name_value", + apply_to_property="apply_to_property_value", + ) + ) + response = await client.get_subproperty_event_filter() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == analytics_admin.GetSubpropertyEventFilterRequest() + + +@pytest.mark.asyncio +async def test_get_subproperty_event_filter_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = AnalyticsAdminServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.get_subproperty_event_filter + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.get_subproperty_event_filter + ] = mock_rpc + + request = {} + await client.get_subproperty_event_filter(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.get_subproperty_event_filter(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_get_subproperty_event_filter_async( + transport: str = "grpc_asyncio", + request_type=analytics_admin.GetSubpropertyEventFilterRequest, +): + client = AnalyticsAdminServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_subproperty_event_filter), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + subproperty_event_filter.SubpropertyEventFilter( + name="name_value", + apply_to_property="apply_to_property_value", + ) + ) + response = await client.get_subproperty_event_filter(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = analytics_admin.GetSubpropertyEventFilterRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, subproperty_event_filter.SubpropertyEventFilter) + assert response.name == "name_value" + assert response.apply_to_property == "apply_to_property_value" + + +@pytest.mark.asyncio +async def test_get_subproperty_event_filter_async_from_dict(): + await test_get_subproperty_event_filter_async(request_type=dict) + + +def test_get_subproperty_event_filter_field_headers(): + client = AnalyticsAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = analytics_admin.GetSubpropertyEventFilterRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_subproperty_event_filter), "__call__" + ) as call: + call.return_value = subproperty_event_filter.SubpropertyEventFilter() + client.get_subproperty_event_filter(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_subproperty_event_filter_field_headers_async(): + client = AnalyticsAdminServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = analytics_admin.GetSubpropertyEventFilterRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_subproperty_event_filter), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + subproperty_event_filter.SubpropertyEventFilter() + ) + await client.get_subproperty_event_filter(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_get_subproperty_event_filter_flattened(): + client = AnalyticsAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_subproperty_event_filter), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = subproperty_event_filter.SubpropertyEventFilter() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_subproperty_event_filter( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_get_subproperty_event_filter_flattened_error(): + client = AnalyticsAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_subproperty_event_filter( + analytics_admin.GetSubpropertyEventFilterRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_get_subproperty_event_filter_flattened_async(): + client = AnalyticsAdminServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_subproperty_event_filter), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = subproperty_event_filter.SubpropertyEventFilter() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + subproperty_event_filter.SubpropertyEventFilter() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_subproperty_event_filter( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_get_subproperty_event_filter_flattened_error_async(): + client = AnalyticsAdminServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_subproperty_event_filter( + analytics_admin.GetSubpropertyEventFilterRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + analytics_admin.ListSubpropertyEventFiltersRequest, + dict, + ], +) +def test_list_subproperty_event_filters(request_type, transport: str = "grpc"): + client = AnalyticsAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_subproperty_event_filters), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = analytics_admin.ListSubpropertyEventFiltersResponse( + next_page_token="next_page_token_value", + ) + response = client.list_subproperty_event_filters(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = analytics_admin.ListSubpropertyEventFiltersRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListSubpropertyEventFiltersPager) + assert response.next_page_token == "next_page_token_value" + + +def test_list_subproperty_event_filters_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = AnalyticsAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_subproperty_event_filters), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.list_subproperty_event_filters() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == analytics_admin.ListSubpropertyEventFiltersRequest() + + +def test_list_subproperty_event_filters_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = AnalyticsAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = analytics_admin.ListSubpropertyEventFiltersRequest( + parent="parent_value", + page_token="page_token_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_subproperty_event_filters), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.list_subproperty_event_filters(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == analytics_admin.ListSubpropertyEventFiltersRequest( + parent="parent_value", + page_token="page_token_value", + ) + + +def test_list_subproperty_event_filters_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = AnalyticsAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.list_subproperty_event_filters + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.list_subproperty_event_filters + ] = mock_rpc + request = {} + client.list_subproperty_event_filters(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_subproperty_event_filters(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_list_subproperty_event_filters_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = AnalyticsAdminServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_subproperty_event_filters), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + analytics_admin.ListSubpropertyEventFiltersResponse( + next_page_token="next_page_token_value", + ) + ) + response = await client.list_subproperty_event_filters() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == analytics_admin.ListSubpropertyEventFiltersRequest() + + +@pytest.mark.asyncio +async def test_list_subproperty_event_filters_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = AnalyticsAdminServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.list_subproperty_event_filters + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.list_subproperty_event_filters + ] = mock_rpc + + request = {} + await client.list_subproperty_event_filters(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.list_subproperty_event_filters(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_list_subproperty_event_filters_async( + transport: str = "grpc_asyncio", + request_type=analytics_admin.ListSubpropertyEventFiltersRequest, +): + client = AnalyticsAdminServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_subproperty_event_filters), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + analytics_admin.ListSubpropertyEventFiltersResponse( + next_page_token="next_page_token_value", + ) + ) + response = await client.list_subproperty_event_filters(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = analytics_admin.ListSubpropertyEventFiltersRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListSubpropertyEventFiltersAsyncPager) + assert response.next_page_token == "next_page_token_value" + + +@pytest.mark.asyncio +async def test_list_subproperty_event_filters_async_from_dict(): + await test_list_subproperty_event_filters_async(request_type=dict) + + +def test_list_subproperty_event_filters_field_headers(): + client = AnalyticsAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = analytics_admin.ListSubpropertyEventFiltersRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_subproperty_event_filters), "__call__" + ) as call: + call.return_value = analytics_admin.ListSubpropertyEventFiltersResponse() + client.list_subproperty_event_filters(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_subproperty_event_filters_field_headers_async(): + client = AnalyticsAdminServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = analytics_admin.ListSubpropertyEventFiltersRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_subproperty_event_filters), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + analytics_admin.ListSubpropertyEventFiltersResponse() + ) + await client.list_subproperty_event_filters(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_list_subproperty_event_filters_flattened(): + client = AnalyticsAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_subproperty_event_filters), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = analytics_admin.ListSubpropertyEventFiltersResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_subproperty_event_filters( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +def test_list_subproperty_event_filters_flattened_error(): + client = AnalyticsAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_subproperty_event_filters( + analytics_admin.ListSubpropertyEventFiltersRequest(), + parent="parent_value", + ) + + +@pytest.mark.asyncio +async def test_list_subproperty_event_filters_flattened_async(): + client = AnalyticsAdminServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_subproperty_event_filters), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = analytics_admin.ListSubpropertyEventFiltersResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + analytics_admin.ListSubpropertyEventFiltersResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_subproperty_event_filters( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_list_subproperty_event_filters_flattened_error_async(): + client = AnalyticsAdminServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_subproperty_event_filters( + analytics_admin.ListSubpropertyEventFiltersRequest(), + parent="parent_value", + ) + + +def test_list_subproperty_event_filters_pager(transport_name: str = "grpc"): + client = AnalyticsAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_subproperty_event_filters), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + analytics_admin.ListSubpropertyEventFiltersResponse( + subproperty_event_filters=[ + subproperty_event_filter.SubpropertyEventFilter(), + subproperty_event_filter.SubpropertyEventFilter(), + subproperty_event_filter.SubpropertyEventFilter(), + ], + next_page_token="abc", + ), + analytics_admin.ListSubpropertyEventFiltersResponse( + subproperty_event_filters=[], + next_page_token="def", + ), + analytics_admin.ListSubpropertyEventFiltersResponse( + subproperty_event_filters=[ + subproperty_event_filter.SubpropertyEventFilter(), + ], + next_page_token="ghi", + ), + analytics_admin.ListSubpropertyEventFiltersResponse( + subproperty_event_filters=[ + subproperty_event_filter.SubpropertyEventFilter(), + subproperty_event_filter.SubpropertyEventFilter(), + ], + ), + RuntimeError, + ) + + expected_metadata = () + retry = retries.Retry() + timeout = 5 + expected_metadata = tuple(expected_metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + ) + pager = client.list_subproperty_event_filters( + request={}, retry=retry, timeout=timeout + ) + + assert pager._metadata == expected_metadata + assert pager._retry == retry + assert pager._timeout == timeout + + results = list(pager) + assert len(results) == 6 + assert all( + isinstance(i, subproperty_event_filter.SubpropertyEventFilter) + for i in results + ) + + +def test_list_subproperty_event_filters_pages(transport_name: str = "grpc"): + client = AnalyticsAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_subproperty_event_filters), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + analytics_admin.ListSubpropertyEventFiltersResponse( + subproperty_event_filters=[ + subproperty_event_filter.SubpropertyEventFilter(), + subproperty_event_filter.SubpropertyEventFilter(), + subproperty_event_filter.SubpropertyEventFilter(), + ], + next_page_token="abc", + ), + analytics_admin.ListSubpropertyEventFiltersResponse( + subproperty_event_filters=[], + next_page_token="def", + ), + analytics_admin.ListSubpropertyEventFiltersResponse( + subproperty_event_filters=[ + subproperty_event_filter.SubpropertyEventFilter(), + ], + next_page_token="ghi", + ), + analytics_admin.ListSubpropertyEventFiltersResponse( + subproperty_event_filters=[ + subproperty_event_filter.SubpropertyEventFilter(), + subproperty_event_filter.SubpropertyEventFilter(), + ], + ), + RuntimeError, + ) + pages = list(client.list_subproperty_event_filters(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.asyncio +async def test_list_subproperty_event_filters_async_pager(): + client = AnalyticsAdminServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_subproperty_event_filters), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + analytics_admin.ListSubpropertyEventFiltersResponse( + subproperty_event_filters=[ + subproperty_event_filter.SubpropertyEventFilter(), + subproperty_event_filter.SubpropertyEventFilter(), + subproperty_event_filter.SubpropertyEventFilter(), + ], + next_page_token="abc", + ), + analytics_admin.ListSubpropertyEventFiltersResponse( + subproperty_event_filters=[], + next_page_token="def", + ), + analytics_admin.ListSubpropertyEventFiltersResponse( + subproperty_event_filters=[ + subproperty_event_filter.SubpropertyEventFilter(), + ], + next_page_token="ghi", + ), + analytics_admin.ListSubpropertyEventFiltersResponse( + subproperty_event_filters=[ + subproperty_event_filter.SubpropertyEventFilter(), + subproperty_event_filter.SubpropertyEventFilter(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_subproperty_event_filters( + request={}, + ) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all( + isinstance(i, subproperty_event_filter.SubpropertyEventFilter) + for i in responses + ) + + +@pytest.mark.asyncio +async def test_list_subproperty_event_filters_async_pages(): + client = AnalyticsAdminServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_subproperty_event_filters), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + analytics_admin.ListSubpropertyEventFiltersResponse( + subproperty_event_filters=[ + subproperty_event_filter.SubpropertyEventFilter(), + subproperty_event_filter.SubpropertyEventFilter(), + subproperty_event_filter.SubpropertyEventFilter(), + ], + next_page_token="abc", + ), + analytics_admin.ListSubpropertyEventFiltersResponse( + subproperty_event_filters=[], + next_page_token="def", + ), + analytics_admin.ListSubpropertyEventFiltersResponse( + subproperty_event_filters=[ + subproperty_event_filter.SubpropertyEventFilter(), + ], + next_page_token="ghi", + ), + analytics_admin.ListSubpropertyEventFiltersResponse( + subproperty_event_filters=[ + subproperty_event_filter.SubpropertyEventFilter(), + subproperty_event_filter.SubpropertyEventFilter(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_subproperty_event_filters(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + analytics_admin.UpdateSubpropertyEventFilterRequest, + dict, + ], +) +def test_update_subproperty_event_filter(request_type, transport: str = "grpc"): + client = AnalyticsAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_subproperty_event_filter), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = gaa_subproperty_event_filter.SubpropertyEventFilter( + name="name_value", + apply_to_property="apply_to_property_value", + ) + response = client.update_subproperty_event_filter(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = analytics_admin.UpdateSubpropertyEventFilterRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, gaa_subproperty_event_filter.SubpropertyEventFilter) + assert response.name == "name_value" + assert response.apply_to_property == "apply_to_property_value" + + +def test_update_subproperty_event_filter_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = AnalyticsAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_subproperty_event_filter), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.update_subproperty_event_filter() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == analytics_admin.UpdateSubpropertyEventFilterRequest() + + +def test_update_subproperty_event_filter_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = AnalyticsAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = analytics_admin.UpdateSubpropertyEventFilterRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_subproperty_event_filter), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.update_subproperty_event_filter(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == analytics_admin.UpdateSubpropertyEventFilterRequest() + + +def test_update_subproperty_event_filter_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = AnalyticsAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.update_subproperty_event_filter + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.update_subproperty_event_filter + ] = mock_rpc + request = {} + client.update_subproperty_event_filter(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.update_subproperty_event_filter(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_update_subproperty_event_filter_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = AnalyticsAdminServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_subproperty_event_filter), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + gaa_subproperty_event_filter.SubpropertyEventFilter( + name="name_value", + apply_to_property="apply_to_property_value", + ) + ) + response = await client.update_subproperty_event_filter() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == analytics_admin.UpdateSubpropertyEventFilterRequest() + + +@pytest.mark.asyncio +async def test_update_subproperty_event_filter_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = AnalyticsAdminServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.update_subproperty_event_filter + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.update_subproperty_event_filter + ] = mock_rpc + + request = {} + await client.update_subproperty_event_filter(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.update_subproperty_event_filter(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_update_subproperty_event_filter_async( + transport: str = "grpc_asyncio", + request_type=analytics_admin.UpdateSubpropertyEventFilterRequest, +): + client = AnalyticsAdminServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_subproperty_event_filter), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + gaa_subproperty_event_filter.SubpropertyEventFilter( + name="name_value", + apply_to_property="apply_to_property_value", + ) + ) + response = await client.update_subproperty_event_filter(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = analytics_admin.UpdateSubpropertyEventFilterRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, gaa_subproperty_event_filter.SubpropertyEventFilter) + assert response.name == "name_value" + assert response.apply_to_property == "apply_to_property_value" + + +@pytest.mark.asyncio +async def test_update_subproperty_event_filter_async_from_dict(): + await test_update_subproperty_event_filter_async(request_type=dict) + + +def test_update_subproperty_event_filter_field_headers(): + client = AnalyticsAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = analytics_admin.UpdateSubpropertyEventFilterRequest() + + request.subproperty_event_filter.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_subproperty_event_filter), "__call__" + ) as call: + call.return_value = gaa_subproperty_event_filter.SubpropertyEventFilter() + client.update_subproperty_event_filter(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "subproperty_event_filter.name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_update_subproperty_event_filter_field_headers_async(): + client = AnalyticsAdminServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = analytics_admin.UpdateSubpropertyEventFilterRequest() + + request.subproperty_event_filter.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_subproperty_event_filter), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + gaa_subproperty_event_filter.SubpropertyEventFilter() + ) + await client.update_subproperty_event_filter(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "subproperty_event_filter.name=name_value", + ) in kw["metadata"] + + +def test_update_subproperty_event_filter_flattened(): + client = AnalyticsAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_subproperty_event_filter), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = gaa_subproperty_event_filter.SubpropertyEventFilter() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.update_subproperty_event_filter( + subproperty_event_filter=gaa_subproperty_event_filter.SubpropertyEventFilter( + name="name_value" + ), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].subproperty_event_filter + mock_val = gaa_subproperty_event_filter.SubpropertyEventFilter( + name="name_value" + ) + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + assert arg == mock_val + + +def test_update_subproperty_event_filter_flattened_error(): + client = AnalyticsAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_subproperty_event_filter( + analytics_admin.UpdateSubpropertyEventFilterRequest(), + subproperty_event_filter=gaa_subproperty_event_filter.SubpropertyEventFilter( + name="name_value" + ), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +@pytest.mark.asyncio +async def test_update_subproperty_event_filter_flattened_async(): + client = AnalyticsAdminServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_subproperty_event_filter), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = gaa_subproperty_event_filter.SubpropertyEventFilter() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + gaa_subproperty_event_filter.SubpropertyEventFilter() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.update_subproperty_event_filter( + subproperty_event_filter=gaa_subproperty_event_filter.SubpropertyEventFilter( + name="name_value" + ), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].subproperty_event_filter + mock_val = gaa_subproperty_event_filter.SubpropertyEventFilter( + name="name_value" + ) + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_update_subproperty_event_filter_flattened_error_async(): + client = AnalyticsAdminServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.update_subproperty_event_filter( + analytics_admin.UpdateSubpropertyEventFilterRequest(), + subproperty_event_filter=gaa_subproperty_event_filter.SubpropertyEventFilter( + name="name_value" + ), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +@pytest.mark.parametrize( + "request_type", + [ + analytics_admin.DeleteSubpropertyEventFilterRequest, + dict, + ], +) +def test_delete_subproperty_event_filter(request_type, transport: str = "grpc"): + client = AnalyticsAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_subproperty_event_filter), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.delete_subproperty_event_filter(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = analytics_admin.DeleteSubpropertyEventFilterRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_subproperty_event_filter_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = AnalyticsAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_subproperty_event_filter), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.delete_subproperty_event_filter() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == analytics_admin.DeleteSubpropertyEventFilterRequest() + + +def test_delete_subproperty_event_filter_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = AnalyticsAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = analytics_admin.DeleteSubpropertyEventFilterRequest( + name="name_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_subproperty_event_filter), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.delete_subproperty_event_filter(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == analytics_admin.DeleteSubpropertyEventFilterRequest( + name="name_value", + ) + + +def test_delete_subproperty_event_filter_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = AnalyticsAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.delete_subproperty_event_filter + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.delete_subproperty_event_filter + ] = mock_rpc + request = {} + client.delete_subproperty_event_filter(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.delete_subproperty_event_filter(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_delete_subproperty_event_filter_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = AnalyticsAdminServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_subproperty_event_filter), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.delete_subproperty_event_filter() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == analytics_admin.DeleteSubpropertyEventFilterRequest() + + +@pytest.mark.asyncio +async def test_delete_subproperty_event_filter_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = AnalyticsAdminServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.delete_subproperty_event_filter + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.delete_subproperty_event_filter + ] = mock_rpc + + request = {} + await client.delete_subproperty_event_filter(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.delete_subproperty_event_filter(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_delete_subproperty_event_filter_async( + transport: str = "grpc_asyncio", + request_type=analytics_admin.DeleteSubpropertyEventFilterRequest, +): + client = AnalyticsAdminServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_subproperty_event_filter), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.delete_subproperty_event_filter(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = analytics_admin.DeleteSubpropertyEventFilterRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.asyncio +async def test_delete_subproperty_event_filter_async_from_dict(): + await test_delete_subproperty_event_filter_async(request_type=dict) + + +def test_delete_subproperty_event_filter_field_headers(): + client = AnalyticsAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = analytics_admin.DeleteSubpropertyEventFilterRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_subproperty_event_filter), "__call__" + ) as call: + call.return_value = None + client.delete_subproperty_event_filter(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_delete_subproperty_event_filter_field_headers_async(): + client = AnalyticsAdminServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = analytics_admin.DeleteSubpropertyEventFilterRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_subproperty_event_filter), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.delete_subproperty_event_filter(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_delete_subproperty_event_filter_flattened(): + client = AnalyticsAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_subproperty_event_filter), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = None + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.delete_subproperty_event_filter( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_delete_subproperty_event_filter_flattened_error(): + client = AnalyticsAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_subproperty_event_filter( + analytics_admin.DeleteSubpropertyEventFilterRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_delete_subproperty_event_filter_flattened_async(): + client = AnalyticsAdminServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_subproperty_event_filter), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = None + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.delete_subproperty_event_filter( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_delete_subproperty_event_filter_flattened_error_async(): + client = AnalyticsAdminServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.delete_subproperty_event_filter( + analytics_admin.DeleteSubpropertyEventFilterRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + analytics_admin.GetAccountRequest, + dict, + ], +) +def test_get_account_rest(request_type): + client = AnalyticsAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"name": "accounts/sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = resources.Account( + name="name_value", + display_name="display_name_value", + region_code="region_code_value", + deleted=True, + gmp_organization="gmp_organization_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = resources.Account.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.get_account(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, resources.Account) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.region_code == "region_code_value" + assert response.deleted is True + assert response.gmp_organization == "gmp_organization_value" + + +def test_get_account_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = AnalyticsAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_account in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.get_account] = mock_rpc + + request = {} + client.get_account(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_account(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_get_account_rest_required_fields( + request_type=analytics_admin.GetAccountRequest, +): + transport_class = transports.AnalyticsAdminServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_account._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_account._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = AnalyticsAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = resources.Account() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = resources.Account.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_account(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_get_account_rest_unset_required_fields(): + transport = transports.AnalyticsAdminServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.get_account._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_account_rest_interceptors(null_interceptor): + transport = transports.AnalyticsAdminServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.AnalyticsAdminServiceRestInterceptor(), + ) + client = AnalyticsAdminServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.AnalyticsAdminServiceRestInterceptor, "post_get_account" + ) as post, mock.patch.object( + transports.AnalyticsAdminServiceRestInterceptor, "pre_get_account" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = analytics_admin.GetAccountRequest.pb( + analytics_admin.GetAccountRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = resources.Account.to_json(resources.Account()) + + request = analytics_admin.GetAccountRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = resources.Account() + + client.get_account( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_account_rest_bad_request( + transport: str = "rest", request_type=analytics_admin.GetAccountRequest +): + client = AnalyticsAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"name": "accounts/sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_account(request) + + +def test_get_account_rest_flattened(): + client = AnalyticsAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = resources.Account() + + # get arguments that satisfy an http rule for this method + sample_request = {"name": "accounts/sample1"} + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = resources.Account.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.get_account(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1alpha/{name=accounts/*}" % client.transport._host, args[1] + ) + + +def test_get_account_rest_flattened_error(transport: str = "rest"): + client = AnalyticsAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_account( + analytics_admin.GetAccountRequest(), + name="name_value", + ) + + +def test_get_account_rest_error(): + client = AnalyticsAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + analytics_admin.ListAccountsRequest, + dict, + ], +) +def test_list_accounts_rest(request_type): + client = AnalyticsAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = analytics_admin.ListAccountsResponse( + next_page_token="next_page_token_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = analytics_admin.ListAccountsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.list_accounts(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListAccountsPager) + assert response.next_page_token == "next_page_token_value" + + +def test_list_accounts_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = AnalyticsAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_accounts in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.list_accounts] = mock_rpc + + request = {} + client.list_accounts(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_accounts(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_accounts_rest_interceptors(null_interceptor): + transport = transports.AnalyticsAdminServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.AnalyticsAdminServiceRestInterceptor(), + ) + client = AnalyticsAdminServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.AnalyticsAdminServiceRestInterceptor, "post_list_accounts" + ) as post, mock.patch.object( + transports.AnalyticsAdminServiceRestInterceptor, "pre_list_accounts" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = analytics_admin.ListAccountsRequest.pb( + analytics_admin.ListAccountsRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = analytics_admin.ListAccountsResponse.to_json( + analytics_admin.ListAccountsResponse() + ) + + request = analytics_admin.ListAccountsRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = analytics_admin.ListAccountsResponse() + + client.list_accounts( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_accounts_rest_bad_request( + transport: str = "rest", request_type=analytics_admin.ListAccountsRequest +): + client = AnalyticsAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_accounts(request) + + +def test_list_accounts_rest_pager(transport: str = "rest"): + client = AnalyticsAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + analytics_admin.ListAccountsResponse( + accounts=[ + resources.Account(), + resources.Account(), + resources.Account(), + ], + next_page_token="abc", + ), + analytics_admin.ListAccountsResponse( + accounts=[], + next_page_token="def", + ), + analytics_admin.ListAccountsResponse( + accounts=[ + resources.Account(), + ], + next_page_token="ghi", + ), + analytics_admin.ListAccountsResponse( + accounts=[ + resources.Account(), + resources.Account(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple( + analytics_admin.ListAccountsResponse.to_json(x) for x in response + ) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {} + + pager = client.list_accounts(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, resources.Account) for i in results) + + pages = list(client.list_accounts(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + analytics_admin.DeleteAccountRequest, + dict, + ], +) +def test_delete_account_rest(request_type): + client = AnalyticsAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"name": "accounts/sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "" + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.delete_account(request) + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_account_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = AnalyticsAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.delete_account in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.delete_account] = mock_rpc + + request = {} + client.delete_account(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.delete_account(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_delete_account_rest_required_fields( + request_type=analytics_admin.DeleteAccountRequest, +): + transport_class = transports.AnalyticsAdminServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_account._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_account._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = AnalyticsAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = None + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "delete", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = "" + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.delete_account(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_delete_account_rest_unset_required_fields(): + transport = transports.AnalyticsAdminServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.delete_account._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_account_rest_interceptors(null_interceptor): + transport = transports.AnalyticsAdminServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.AnalyticsAdminServiceRestInterceptor(), + ) + client = AnalyticsAdminServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.AnalyticsAdminServiceRestInterceptor, "pre_delete_account" + ) as pre: + pre.assert_not_called() + pb_message = analytics_admin.DeleteAccountRequest.pb( + analytics_admin.DeleteAccountRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + + request = analytics_admin.DeleteAccountRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + + client.delete_account( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + + +def test_delete_account_rest_bad_request( + transport: str = "rest", request_type=analytics_admin.DeleteAccountRequest +): + client = AnalyticsAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"name": "accounts/sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete_account(request) + + +def test_delete_account_rest_flattened(): + client = AnalyticsAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # get arguments that satisfy an http rule for this method + sample_request = {"name": "accounts/sample1"} + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "" + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.delete_account(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1alpha/{name=accounts/*}" % client.transport._host, args[1] + ) + + +def test_delete_account_rest_flattened_error(transport: str = "rest"): + client = AnalyticsAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_account( + analytics_admin.DeleteAccountRequest(), + name="name_value", + ) + + +def test_delete_account_rest_error(): + client = AnalyticsAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + analytics_admin.UpdateAccountRequest, + dict, + ], +) +def test_update_account_rest(request_type): + client = AnalyticsAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"account": {"name": "accounts/sample1"}} + request_init["account"] = { + "name": "accounts/sample1", + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "display_name": "display_name_value", + "region_code": "region_code_value", + "deleted": True, + "gmp_organization": "gmp_organization_value", + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = analytics_admin.UpdateAccountRequest.meta.fields["account"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["account"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["account"][field])): + del request_init["account"][field][i][subfield] + else: + del request_init["account"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = resources.Account( + name="name_value", + display_name="display_name_value", + region_code="region_code_value", + deleted=True, + gmp_organization="gmp_organization_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = resources.Account.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.update_account(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, resources.Account) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.region_code == "region_code_value" + assert response.deleted is True + assert response.gmp_organization == "gmp_organization_value" + + +def test_update_account_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = AnalyticsAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.update_account in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.update_account] = mock_rpc + + request = {} + client.update_account(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.update_account(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_update_account_rest_required_fields( + request_type=analytics_admin.UpdateAccountRequest, +): + transport_class = transports.AnalyticsAdminServiceRestTransport + + request_init = {} + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_account._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_account._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("update_mask",)) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + + client = AnalyticsAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = resources.Account() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "patch", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = resources.Account.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.update_account(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_update_account_rest_unset_required_fields(): + transport = transports.AnalyticsAdminServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.update_account._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(("updateMask",)) + & set( + ( + "account", + "updateMask", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_update_account_rest_interceptors(null_interceptor): + transport = transports.AnalyticsAdminServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.AnalyticsAdminServiceRestInterceptor(), + ) + client = AnalyticsAdminServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.AnalyticsAdminServiceRestInterceptor, "post_update_account" + ) as post, mock.patch.object( + transports.AnalyticsAdminServiceRestInterceptor, "pre_update_account" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = analytics_admin.UpdateAccountRequest.pb( + analytics_admin.UpdateAccountRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = resources.Account.to_json(resources.Account()) + + request = analytics_admin.UpdateAccountRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = resources.Account() + + client.update_account( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_update_account_rest_bad_request( + transport: str = "rest", request_type=analytics_admin.UpdateAccountRequest +): + client = AnalyticsAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"account": {"name": "accounts/sample1"}} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.update_account(request) + + +def test_update_account_rest_flattened(): + client = AnalyticsAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = resources.Account() + + # get arguments that satisfy an http rule for this method + sample_request = {"account": {"name": "accounts/sample1"}} + + # get truthy value for each flattened field + mock_args = dict( + account=resources.Account(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = resources.Account.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.update_account(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1alpha/{account.name=accounts/*}" % client.transport._host, args[1] + ) + + +def test_update_account_rest_flattened_error(transport: str = "rest"): + client = AnalyticsAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_account( + analytics_admin.UpdateAccountRequest(), + account=resources.Account(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +def test_update_account_rest_error(): + client = AnalyticsAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + analytics_admin.ProvisionAccountTicketRequest, + dict, + ], +) +def test_provision_account_ticket_rest(request_type): + client = AnalyticsAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = analytics_admin.ProvisionAccountTicketResponse( + account_ticket_id="account_ticket_id_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = analytics_admin.ProvisionAccountTicketResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.provision_account_ticket(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, analytics_admin.ProvisionAccountTicketResponse) + assert response.account_ticket_id == "account_ticket_id_value" + + +def test_provision_account_ticket_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = AnalyticsAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.provision_account_ticket + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.provision_account_ticket + ] = mock_rpc + + request = {} + client.provision_account_ticket(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.provision_account_ticket(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_provision_account_ticket_rest_interceptors(null_interceptor): + transport = transports.AnalyticsAdminServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.AnalyticsAdminServiceRestInterceptor(), + ) + client = AnalyticsAdminServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.AnalyticsAdminServiceRestInterceptor, "post_provision_account_ticket" + ) as post, mock.patch.object( + transports.AnalyticsAdminServiceRestInterceptor, "pre_provision_account_ticket" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = analytics_admin.ProvisionAccountTicketRequest.pb( + analytics_admin.ProvisionAccountTicketRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = ( + analytics_admin.ProvisionAccountTicketResponse.to_json( + analytics_admin.ProvisionAccountTicketResponse() + ) + ) + + request = analytics_admin.ProvisionAccountTicketRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = analytics_admin.ProvisionAccountTicketResponse() + + client.provision_account_ticket( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_provision_account_ticket_rest_bad_request( + transport: str = "rest", request_type=analytics_admin.ProvisionAccountTicketRequest +): + client = AnalyticsAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.provision_account_ticket(request) + + +def test_provision_account_ticket_rest_error(): + client = AnalyticsAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + analytics_admin.ListAccountSummariesRequest, + dict, + ], +) +def test_list_account_summaries_rest(request_type): + client = AnalyticsAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = analytics_admin.ListAccountSummariesResponse( + next_page_token="next_page_token_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = analytics_admin.ListAccountSummariesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.list_account_summaries(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListAccountSummariesPager) + assert response.next_page_token == "next_page_token_value" + + +def test_list_account_summaries_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = AnalyticsAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.list_account_summaries + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.list_account_summaries + ] = mock_rpc + + request = {} + client.list_account_summaries(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_account_summaries(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_account_summaries_rest_interceptors(null_interceptor): + transport = transports.AnalyticsAdminServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.AnalyticsAdminServiceRestInterceptor(), + ) + client = AnalyticsAdminServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.AnalyticsAdminServiceRestInterceptor, "post_list_account_summaries" + ) as post, mock.patch.object( + transports.AnalyticsAdminServiceRestInterceptor, "pre_list_account_summaries" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = analytics_admin.ListAccountSummariesRequest.pb( + analytics_admin.ListAccountSummariesRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = ( + analytics_admin.ListAccountSummariesResponse.to_json( + analytics_admin.ListAccountSummariesResponse() + ) + ) + + request = analytics_admin.ListAccountSummariesRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = analytics_admin.ListAccountSummariesResponse() + + client.list_account_summaries( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_account_summaries_rest_bad_request( + transport: str = "rest", request_type=analytics_admin.ListAccountSummariesRequest +): + client = AnalyticsAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_account_summaries(request) + + +def test_list_account_summaries_rest_pager(transport: str = "rest"): + client = AnalyticsAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + analytics_admin.ListAccountSummariesResponse( + account_summaries=[ + resources.AccountSummary(), + resources.AccountSummary(), + resources.AccountSummary(), + ], + next_page_token="abc", + ), + analytics_admin.ListAccountSummariesResponse( + account_summaries=[], + next_page_token="def", + ), + analytics_admin.ListAccountSummariesResponse( + account_summaries=[ + resources.AccountSummary(), + ], + next_page_token="ghi", + ), + analytics_admin.ListAccountSummariesResponse( + account_summaries=[ + resources.AccountSummary(), + resources.AccountSummary(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple( + analytics_admin.ListAccountSummariesResponse.to_json(x) for x in response + ) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {} + + pager = client.list_account_summaries(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, resources.AccountSummary) for i in results) + + pages = list(client.list_account_summaries(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + analytics_admin.GetPropertyRequest, + dict, + ], +) +def test_get_property_rest(request_type): + client = AnalyticsAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"name": "properties/sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = resources.Property( + name="name_value", + property_type=resources.PropertyType.PROPERTY_TYPE_ORDINARY, + parent="parent_value", + display_name="display_name_value", + industry_category=resources.IndustryCategory.AUTOMOTIVE, + time_zone="time_zone_value", + currency_code="currency_code_value", + service_level=resources.ServiceLevel.GOOGLE_ANALYTICS_STANDARD, + account="account_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = resources.Property.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.get_property(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, resources.Property) + assert response.name == "name_value" + assert response.property_type == resources.PropertyType.PROPERTY_TYPE_ORDINARY + assert response.parent == "parent_value" + assert response.display_name == "display_name_value" + assert response.industry_category == resources.IndustryCategory.AUTOMOTIVE + assert response.time_zone == "time_zone_value" + assert response.currency_code == "currency_code_value" + assert response.service_level == resources.ServiceLevel.GOOGLE_ANALYTICS_STANDARD + assert response.account == "account_value" + + +def test_get_property_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = AnalyticsAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_property in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.get_property] = mock_rpc + + request = {} + client.get_property(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_property(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_get_property_rest_required_fields( + request_type=analytics_admin.GetPropertyRequest, +): + transport_class = transports.AnalyticsAdminServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_property._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_property._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = AnalyticsAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = resources.Property() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = resources.Property.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_property(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_get_property_rest_unset_required_fields(): + transport = transports.AnalyticsAdminServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.get_property._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_property_rest_interceptors(null_interceptor): + transport = transports.AnalyticsAdminServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.AnalyticsAdminServiceRestInterceptor(), + ) + client = AnalyticsAdminServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.AnalyticsAdminServiceRestInterceptor, "post_get_property" + ) as post, mock.patch.object( + transports.AnalyticsAdminServiceRestInterceptor, "pre_get_property" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = analytics_admin.GetPropertyRequest.pb( + analytics_admin.GetPropertyRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = resources.Property.to_json(resources.Property()) + + request = analytics_admin.GetPropertyRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = resources.Property() + + client.get_property( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_property_rest_bad_request( + transport: str = "rest", request_type=analytics_admin.GetPropertyRequest +): + client = AnalyticsAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"name": "properties/sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_property(request) + + +def test_get_property_rest_flattened(): + client = AnalyticsAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = resources.Property() + + # get arguments that satisfy an http rule for this method + sample_request = {"name": "properties/sample1"} + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = resources.Property.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.get_property(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1alpha/{name=properties/*}" % client.transport._host, args[1] + ) + + +def test_get_property_rest_flattened_error(transport: str = "rest"): + client = AnalyticsAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_property( + analytics_admin.GetPropertyRequest(), + name="name_value", + ) + + +def test_get_property_rest_error(): + client = AnalyticsAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + analytics_admin.ListPropertiesRequest, + dict, + ], +) +def test_list_properties_rest(request_type): + client = AnalyticsAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = analytics_admin.ListPropertiesResponse( + next_page_token="next_page_token_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = analytics_admin.ListPropertiesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.list_properties(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListPropertiesPager) + assert response.next_page_token == "next_page_token_value" + + +def test_list_properties_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = AnalyticsAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_properties in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.list_properties] = mock_rpc + + request = {} + client.list_properties(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_properties(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_list_properties_rest_required_fields( + request_type=analytics_admin.ListPropertiesRequest, +): + transport_class = transports.AnalyticsAdminServiceRestTransport + + request_init = {} + request_init["filter"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + assert "filter" not in jsonified_request + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_properties._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + assert "filter" in jsonified_request + assert jsonified_request["filter"] == request_init["filter"] + + jsonified_request["filter"] = "filter_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_properties._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "filter", + "page_size", + "page_token", + "show_deleted", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "filter" in jsonified_request + assert jsonified_request["filter"] == "filter_value" + + client = AnalyticsAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = analytics_admin.ListPropertiesResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = analytics_admin.ListPropertiesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.list_properties(request) + + expected_params = [ + ( + "filter", + "", + ), + ("$alt", "json;enum-encoding=int"), + ] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_list_properties_rest_unset_required_fields(): + transport = transports.AnalyticsAdminServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.list_properties._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "filter", + "pageSize", + "pageToken", + "showDeleted", + ) + ) + & set(("filter",)) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_properties_rest_interceptors(null_interceptor): + transport = transports.AnalyticsAdminServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.AnalyticsAdminServiceRestInterceptor(), + ) + client = AnalyticsAdminServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.AnalyticsAdminServiceRestInterceptor, "post_list_properties" + ) as post, mock.patch.object( + transports.AnalyticsAdminServiceRestInterceptor, "pre_list_properties" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = analytics_admin.ListPropertiesRequest.pb( + analytics_admin.ListPropertiesRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = analytics_admin.ListPropertiesResponse.to_json( + analytics_admin.ListPropertiesResponse() + ) + + request = analytics_admin.ListPropertiesRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = analytics_admin.ListPropertiesResponse() + + client.list_properties( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_properties_rest_bad_request( + transport: str = "rest", request_type=analytics_admin.ListPropertiesRequest +): + client = AnalyticsAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_properties(request) + + +def test_list_properties_rest_pager(transport: str = "rest"): + client = AnalyticsAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + analytics_admin.ListPropertiesResponse( + properties=[ + resources.Property(), + resources.Property(), + resources.Property(), + ], + next_page_token="abc", + ), + analytics_admin.ListPropertiesResponse( + properties=[], + next_page_token="def", + ), + analytics_admin.ListPropertiesResponse( + properties=[ + resources.Property(), + ], + next_page_token="ghi", + ), + analytics_admin.ListPropertiesResponse( + properties=[ + resources.Property(), + resources.Property(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple( + analytics_admin.ListPropertiesResponse.to_json(x) for x in response + ) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {} + + pager = client.list_properties(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, resources.Property) for i in results) + + pages = list(client.list_properties(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + analytics_admin.CreatePropertyRequest, + dict, + ], +) +def test_create_property_rest(request_type): + client = AnalyticsAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {} + request_init["property"] = { + "name": "name_value", + "property_type": 1, + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "parent": "parent_value", + "display_name": "display_name_value", + "industry_category": 1, + "time_zone": "time_zone_value", + "currency_code": "currency_code_value", + "service_level": 1, + "delete_time": {}, + "expire_time": {}, + "account": "account_value", + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = analytics_admin.CreatePropertyRequest.meta.fields["property"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["property"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["property"][field])): + del request_init["property"][field][i][subfield] + else: + del request_init["property"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = resources.Property( + name="name_value", + property_type=resources.PropertyType.PROPERTY_TYPE_ORDINARY, + parent="parent_value", + display_name="display_name_value", + industry_category=resources.IndustryCategory.AUTOMOTIVE, + time_zone="time_zone_value", + currency_code="currency_code_value", + service_level=resources.ServiceLevel.GOOGLE_ANALYTICS_STANDARD, + account="account_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = resources.Property.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.create_property(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, resources.Property) + assert response.name == "name_value" + assert response.property_type == resources.PropertyType.PROPERTY_TYPE_ORDINARY + assert response.parent == "parent_value" + assert response.display_name == "display_name_value" + assert response.industry_category == resources.IndustryCategory.AUTOMOTIVE + assert response.time_zone == "time_zone_value" + assert response.currency_code == "currency_code_value" + assert response.service_level == resources.ServiceLevel.GOOGLE_ANALYTICS_STANDARD + assert response.account == "account_value" + + +def test_create_property_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = AnalyticsAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.create_property in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.create_property] = mock_rpc + + request = {} + client.create_property(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.create_property(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_create_property_rest_required_fields( + request_type=analytics_admin.CreatePropertyRequest, +): + transport_class = transports.AnalyticsAdminServiceRestTransport + + request_init = {} + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_property._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_property._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + + client = AnalyticsAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = resources.Property() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = resources.Property.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.create_property(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_create_property_rest_unset_required_fields(): + transport = transports.AnalyticsAdminServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.create_property._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("property",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_create_property_rest_interceptors(null_interceptor): + transport = transports.AnalyticsAdminServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.AnalyticsAdminServiceRestInterceptor(), + ) + client = AnalyticsAdminServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.AnalyticsAdminServiceRestInterceptor, "post_create_property" + ) as post, mock.patch.object( + transports.AnalyticsAdminServiceRestInterceptor, "pre_create_property" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = analytics_admin.CreatePropertyRequest.pb( + analytics_admin.CreatePropertyRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = resources.Property.to_json(resources.Property()) + + request = analytics_admin.CreatePropertyRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = resources.Property() + + client.create_property( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_create_property_rest_bad_request( + transport: str = "rest", request_type=analytics_admin.CreatePropertyRequest +): + client = AnalyticsAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.create_property(request) + + +def test_create_property_rest_flattened(): + client = AnalyticsAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = resources.Property() + + # get arguments that satisfy an http rule for this method + sample_request = {} + + # get truthy value for each flattened field + mock_args = dict( + property=resources.Property(name="name_value"), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = resources.Property.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.create_property(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1alpha/properties" % client.transport._host, args[1] + ) + + +def test_create_property_rest_flattened_error(transport: str = "rest"): + client = AnalyticsAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_property( + analytics_admin.CreatePropertyRequest(), + property=resources.Property(name="name_value"), + ) + + +def test_create_property_rest_error(): + client = AnalyticsAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + analytics_admin.DeletePropertyRequest, + dict, + ], +) +def test_delete_property_rest(request_type): + client = AnalyticsAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"name": "properties/sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = resources.Property( + name="name_value", + property_type=resources.PropertyType.PROPERTY_TYPE_ORDINARY, + parent="parent_value", + display_name="display_name_value", + industry_category=resources.IndustryCategory.AUTOMOTIVE, + time_zone="time_zone_value", + currency_code="currency_code_value", + service_level=resources.ServiceLevel.GOOGLE_ANALYTICS_STANDARD, + account="account_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = resources.Property.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.delete_property(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, resources.Property) + assert response.name == "name_value" + assert response.property_type == resources.PropertyType.PROPERTY_TYPE_ORDINARY + assert response.parent == "parent_value" + assert response.display_name == "display_name_value" + assert response.industry_category == resources.IndustryCategory.AUTOMOTIVE + assert response.time_zone == "time_zone_value" + assert response.currency_code == "currency_code_value" + assert response.service_level == resources.ServiceLevel.GOOGLE_ANALYTICS_STANDARD + assert response.account == "account_value" + + +def test_delete_property_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = AnalyticsAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.delete_property in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.delete_property] = mock_rpc + + request = {} + client.delete_property(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.delete_property(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_delete_property_rest_required_fields( + request_type=analytics_admin.DeletePropertyRequest, +): + transport_class = transports.AnalyticsAdminServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_property._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_property._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = AnalyticsAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = resources.Property() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "delete", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = resources.Property.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.delete_property(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_delete_property_rest_unset_required_fields(): + transport = transports.AnalyticsAdminServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.delete_property._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_property_rest_interceptors(null_interceptor): + transport = transports.AnalyticsAdminServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.AnalyticsAdminServiceRestInterceptor(), + ) + client = AnalyticsAdminServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.AnalyticsAdminServiceRestInterceptor, "post_delete_property" + ) as post, mock.patch.object( + transports.AnalyticsAdminServiceRestInterceptor, "pre_delete_property" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = analytics_admin.DeletePropertyRequest.pb( + analytics_admin.DeletePropertyRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = resources.Property.to_json(resources.Property()) + + request = analytics_admin.DeletePropertyRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = resources.Property() + + client.delete_property( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_delete_property_rest_bad_request( + transport: str = "rest", request_type=analytics_admin.DeletePropertyRequest +): + client = AnalyticsAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"name": "properties/sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete_property(request) + + +def test_delete_property_rest_flattened(): + client = AnalyticsAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = resources.Property() + + # get arguments that satisfy an http rule for this method + sample_request = {"name": "properties/sample1"} + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = resources.Property.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.delete_property(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1alpha/{name=properties/*}" % client.transport._host, args[1] + ) + + +def test_delete_property_rest_flattened_error(transport: str = "rest"): + client = AnalyticsAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_property( + analytics_admin.DeletePropertyRequest(), + name="name_value", + ) + + +def test_delete_property_rest_error(): + client = AnalyticsAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + analytics_admin.UpdatePropertyRequest, + dict, + ], +) +def test_update_property_rest(request_type): + client = AnalyticsAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"property": {"name": "properties/sample1"}} + request_init["property"] = { + "name": "properties/sample1", + "property_type": 1, + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "parent": "parent_value", + "display_name": "display_name_value", + "industry_category": 1, + "time_zone": "time_zone_value", + "currency_code": "currency_code_value", + "service_level": 1, + "delete_time": {}, + "expire_time": {}, + "account": "account_value", + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = analytics_admin.UpdatePropertyRequest.meta.fields["property"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["property"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["property"][field])): + del request_init["property"][field][i][subfield] + else: + del request_init["property"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = resources.Property( + name="name_value", + property_type=resources.PropertyType.PROPERTY_TYPE_ORDINARY, + parent="parent_value", + display_name="display_name_value", + industry_category=resources.IndustryCategory.AUTOMOTIVE, + time_zone="time_zone_value", + currency_code="currency_code_value", + service_level=resources.ServiceLevel.GOOGLE_ANALYTICS_STANDARD, + account="account_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = resources.Property.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.update_property(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, resources.Property) + assert response.name == "name_value" + assert response.property_type == resources.PropertyType.PROPERTY_TYPE_ORDINARY + assert response.parent == "parent_value" + assert response.display_name == "display_name_value" + assert response.industry_category == resources.IndustryCategory.AUTOMOTIVE + assert response.time_zone == "time_zone_value" + assert response.currency_code == "currency_code_value" + assert response.service_level == resources.ServiceLevel.GOOGLE_ANALYTICS_STANDARD + assert response.account == "account_value" + + +def test_update_property_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = AnalyticsAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.update_property in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.update_property] = mock_rpc + + request = {} + client.update_property(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.update_property(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_update_property_rest_required_fields( + request_type=analytics_admin.UpdatePropertyRequest, +): + transport_class = transports.AnalyticsAdminServiceRestTransport + + request_init = {} + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_property._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_property._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("update_mask",)) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + + client = AnalyticsAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = resources.Property() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "patch", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = resources.Property.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.update_property(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_update_property_rest_unset_required_fields(): + transport = transports.AnalyticsAdminServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.update_property._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(("updateMask",)) + & set( + ( + "property", + "updateMask", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_update_property_rest_interceptors(null_interceptor): + transport = transports.AnalyticsAdminServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.AnalyticsAdminServiceRestInterceptor(), + ) + client = AnalyticsAdminServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.AnalyticsAdminServiceRestInterceptor, "post_update_property" + ) as post, mock.patch.object( + transports.AnalyticsAdminServiceRestInterceptor, "pre_update_property" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = analytics_admin.UpdatePropertyRequest.pb( + analytics_admin.UpdatePropertyRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = resources.Property.to_json(resources.Property()) + + request = analytics_admin.UpdatePropertyRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = resources.Property() + + client.update_property( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_update_property_rest_bad_request( + transport: str = "rest", request_type=analytics_admin.UpdatePropertyRequest +): + client = AnalyticsAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"property": {"name": "properties/sample1"}} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.update_property(request) + + +def test_update_property_rest_flattened(): + client = AnalyticsAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = resources.Property() + + # get arguments that satisfy an http rule for this method + sample_request = {"property": {"name": "properties/sample1"}} + + # get truthy value for each flattened field + mock_args = dict( + property=resources.Property(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = resources.Property.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.update_property(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1alpha/{property.name=properties/*}" % client.transport._host, args[1] + ) + + +def test_update_property_rest_flattened_error(transport: str = "rest"): + client = AnalyticsAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_property( + analytics_admin.UpdatePropertyRequest(), + property=resources.Property(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +def test_update_property_rest_error(): + client = AnalyticsAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + analytics_admin.CreateFirebaseLinkRequest, + dict, + ], +) +def test_create_firebase_link_rest(request_type): + client = AnalyticsAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "properties/sample1"} + request_init["firebase_link"] = { + "name": "name_value", + "project": "project_value", + "create_time": {"seconds": 751, "nanos": 543}, + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = analytics_admin.CreateFirebaseLinkRequest.meta.fields["firebase_link"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["firebase_link"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["firebase_link"][field])): + del request_init["firebase_link"][field][i][subfield] + else: + del request_init["firebase_link"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = resources.FirebaseLink( + name="name_value", + project="project_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = resources.FirebaseLink.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.create_firebase_link(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, resources.FirebaseLink) + assert response.name == "name_value" + assert response.project == "project_value" + + +def test_create_firebase_link_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = AnalyticsAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.create_firebase_link in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.create_firebase_link + ] = mock_rpc + + request = {} + client.create_firebase_link(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.create_firebase_link(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_create_firebase_link_rest_required_fields( + request_type=analytics_admin.CreateFirebaseLinkRequest, +): + transport_class = transports.AnalyticsAdminServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_firebase_link._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_firebase_link._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = AnalyticsAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = resources.FirebaseLink() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = resources.FirebaseLink.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.create_firebase_link(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_create_firebase_link_rest_unset_required_fields(): + transport = transports.AnalyticsAdminServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.create_firebase_link._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(()) + & set( + ( + "parent", + "firebaseLink", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_create_firebase_link_rest_interceptors(null_interceptor): + transport = transports.AnalyticsAdminServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.AnalyticsAdminServiceRestInterceptor(), + ) + client = AnalyticsAdminServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.AnalyticsAdminServiceRestInterceptor, "post_create_firebase_link" + ) as post, mock.patch.object( + transports.AnalyticsAdminServiceRestInterceptor, "pre_create_firebase_link" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = analytics_admin.CreateFirebaseLinkRequest.pb( + analytics_admin.CreateFirebaseLinkRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = resources.FirebaseLink.to_json( + resources.FirebaseLink() + ) + + request = analytics_admin.CreateFirebaseLinkRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = resources.FirebaseLink() + + client.create_firebase_link( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_create_firebase_link_rest_bad_request( + transport: str = "rest", request_type=analytics_admin.CreateFirebaseLinkRequest +): + client = AnalyticsAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "properties/sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.create_firebase_link(request) + + +def test_create_firebase_link_rest_flattened(): + client = AnalyticsAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = resources.FirebaseLink() + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "properties/sample1"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + firebase_link=resources.FirebaseLink(name="name_value"), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = resources.FirebaseLink.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.create_firebase_link(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1alpha/{parent=properties/*}/firebaseLinks" % client.transport._host, + args[1], + ) + + +def test_create_firebase_link_rest_flattened_error(transport: str = "rest"): + client = AnalyticsAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_firebase_link( + analytics_admin.CreateFirebaseLinkRequest(), + parent="parent_value", + firebase_link=resources.FirebaseLink(name="name_value"), + ) + + +def test_create_firebase_link_rest_error(): + client = AnalyticsAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + analytics_admin.DeleteFirebaseLinkRequest, + dict, + ], +) +def test_delete_firebase_link_rest(request_type): + client = AnalyticsAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"name": "properties/sample1/firebaseLinks/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "" + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.delete_firebase_link(request) + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_firebase_link_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = AnalyticsAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.delete_firebase_link in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.delete_firebase_link + ] = mock_rpc + + request = {} + client.delete_firebase_link(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.delete_firebase_link(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_delete_firebase_link_rest_required_fields( + request_type=analytics_admin.DeleteFirebaseLinkRequest, +): + transport_class = transports.AnalyticsAdminServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_firebase_link._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_firebase_link._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = AnalyticsAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = None + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "delete", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = "" + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.delete_firebase_link(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_delete_firebase_link_rest_unset_required_fields(): + transport = transports.AnalyticsAdminServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.delete_firebase_link._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_firebase_link_rest_interceptors(null_interceptor): + transport = transports.AnalyticsAdminServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.AnalyticsAdminServiceRestInterceptor(), + ) + client = AnalyticsAdminServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.AnalyticsAdminServiceRestInterceptor, "pre_delete_firebase_link" + ) as pre: + pre.assert_not_called() + pb_message = analytics_admin.DeleteFirebaseLinkRequest.pb( + analytics_admin.DeleteFirebaseLinkRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + + request = analytics_admin.DeleteFirebaseLinkRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + + client.delete_firebase_link( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + + +def test_delete_firebase_link_rest_bad_request( + transport: str = "rest", request_type=analytics_admin.DeleteFirebaseLinkRequest +): + client = AnalyticsAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"name": "properties/sample1/firebaseLinks/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete_firebase_link(request) + + +def test_delete_firebase_link_rest_flattened(): + client = AnalyticsAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # get arguments that satisfy an http rule for this method + sample_request = {"name": "properties/sample1/firebaseLinks/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "" + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.delete_firebase_link(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1alpha/{name=properties/*/firebaseLinks/*}" % client.transport._host, + args[1], + ) + + +def test_delete_firebase_link_rest_flattened_error(transport: str = "rest"): + client = AnalyticsAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_firebase_link( + analytics_admin.DeleteFirebaseLinkRequest(), + name="name_value", + ) + + +def test_delete_firebase_link_rest_error(): + client = AnalyticsAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + analytics_admin.ListFirebaseLinksRequest, + dict, + ], +) +def test_list_firebase_links_rest(request_type): + client = AnalyticsAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "properties/sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = analytics_admin.ListFirebaseLinksResponse( + next_page_token="next_page_token_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = analytics_admin.ListFirebaseLinksResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.list_firebase_links(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListFirebaseLinksPager) + assert response.next_page_token == "next_page_token_value" + + +def test_list_firebase_links_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = AnalyticsAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.list_firebase_links in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.list_firebase_links + ] = mock_rpc + + request = {} + client.list_firebase_links(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_firebase_links(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_list_firebase_links_rest_required_fields( + request_type=analytics_admin.ListFirebaseLinksRequest, +): + transport_class = transports.AnalyticsAdminServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_firebase_links._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_firebase_links._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "page_size", + "page_token", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = AnalyticsAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = analytics_admin.ListFirebaseLinksResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = analytics_admin.ListFirebaseLinksResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.list_firebase_links(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_list_firebase_links_rest_unset_required_fields(): + transport = transports.AnalyticsAdminServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.list_firebase_links._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "pageSize", + "pageToken", + ) + ) + & set(("parent",)) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_firebase_links_rest_interceptors(null_interceptor): + transport = transports.AnalyticsAdminServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.AnalyticsAdminServiceRestInterceptor(), + ) + client = AnalyticsAdminServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.AnalyticsAdminServiceRestInterceptor, "post_list_firebase_links" + ) as post, mock.patch.object( + transports.AnalyticsAdminServiceRestInterceptor, "pre_list_firebase_links" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = analytics_admin.ListFirebaseLinksRequest.pb( + analytics_admin.ListFirebaseLinksRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = analytics_admin.ListFirebaseLinksResponse.to_json( + analytics_admin.ListFirebaseLinksResponse() + ) + + request = analytics_admin.ListFirebaseLinksRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = analytics_admin.ListFirebaseLinksResponse() + + client.list_firebase_links( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_firebase_links_rest_bad_request( + transport: str = "rest", request_type=analytics_admin.ListFirebaseLinksRequest +): + client = AnalyticsAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "properties/sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_firebase_links(request) + + +def test_list_firebase_links_rest_flattened(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -57864,14 +67869,14 @@ def test_get_account_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = resources.Account() + return_value = analytics_admin.ListFirebaseLinksResponse() # get arguments that satisfy an http rule for this method - sample_request = {"name": "accounts/sample1"} + sample_request = {"parent": "properties/sample1"} # get truthy value for each flattened field mock_args = dict( - name="name_value", + parent="parent_value", ) mock_args.update(sample_request) @@ -57879,23 +67884,24 @@ def test_get_account_rest_flattened(): response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = resources.Account.pb(return_value) + return_value = analytics_admin.ListFirebaseLinksResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.get_account(**mock_args) + client.list_firebase_links(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1alpha/{name=accounts/*}" % client.transport._host, args[1] + "%s/v1alpha/{parent=properties/*}/firebaseLinks" % client.transport._host, + args[1], ) -def test_get_account_rest_flattened_error(transport: str = "rest"): +def test_list_firebase_links_rest_flattened_error(transport: str = "rest"): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -57904,59 +67910,118 @@ def test_get_account_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.get_account( - analytics_admin.GetAccountRequest(), - name="name_value", + client.list_firebase_links( + analytics_admin.ListFirebaseLinksRequest(), + parent="parent_value", ) -def test_get_account_rest_error(): +def test_list_firebase_links_rest_pager(transport: str = "rest"): client = AnalyticsAdminServiceClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + analytics_admin.ListFirebaseLinksResponse( + firebase_links=[ + resources.FirebaseLink(), + resources.FirebaseLink(), + resources.FirebaseLink(), + ], + next_page_token="abc", + ), + analytics_admin.ListFirebaseLinksResponse( + firebase_links=[], + next_page_token="def", + ), + analytics_admin.ListFirebaseLinksResponse( + firebase_links=[ + resources.FirebaseLink(), + ], + next_page_token="ghi", + ), + analytics_admin.ListFirebaseLinksResponse( + firebase_links=[ + resources.FirebaseLink(), + resources.FirebaseLink(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple( + analytics_admin.ListFirebaseLinksResponse.to_json(x) for x in response + ) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {"parent": "properties/sample1"} + + pager = client.list_firebase_links(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, resources.FirebaseLink) for i in results) + + pages = list(client.list_firebase_links(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + @pytest.mark.parametrize( "request_type", [ - analytics_admin.ListAccountsRequest, + analytics_admin.GetGlobalSiteTagRequest, dict, ], ) -def test_list_accounts_rest(request_type): +def test_get_global_site_tag_rest(request_type): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = {} + request_init = {"name": "properties/sample1/dataStreams/sample2/globalSiteTag"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = analytics_admin.ListAccountsResponse( - next_page_token="next_page_token_value", + return_value = resources.GlobalSiteTag( + name="name_value", + snippet="snippet_value", ) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = analytics_admin.ListAccountsResponse.pb(return_value) + return_value = resources.GlobalSiteTag.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.list_accounts(request) + response = client.get_global_site_tag(request) # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListAccountsPager) - assert response.next_page_token == "next_page_token_value" + assert isinstance(response, resources.GlobalSiteTag) + assert response.name == "name_value" + assert response.snippet == "snippet_value" -def test_list_accounts_rest_use_cached_wrapped_rpc(): +def test_get_global_site_tag_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -57970,30 +68035,117 @@ def test_list_accounts_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.list_accounts in client._transport._wrapped_methods + assert ( + client._transport.get_global_site_tag in client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.list_accounts] = mock_rpc + client._transport._wrapped_methods[ + client._transport.get_global_site_tag + ] = mock_rpc request = {} - client.list_accounts(request) + client.get_global_site_tag(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.list_accounts(request) + client.get_global_site_tag(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 +def test_get_global_site_tag_rest_required_fields( + request_type=analytics_admin.GetGlobalSiteTagRequest, +): + transport_class = transports.AnalyticsAdminServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_global_site_tag._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_global_site_tag._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = AnalyticsAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = resources.GlobalSiteTag() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = resources.GlobalSiteTag.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_global_site_tag(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_get_global_site_tag_rest_unset_required_fields(): + transport = transports.AnalyticsAdminServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.get_global_site_tag._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_accounts_rest_interceptors(null_interceptor): +def test_get_global_site_tag_rest_interceptors(null_interceptor): transport = transports.AnalyticsAdminServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -58006,14 +68158,14 @@ def test_list_accounts_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.AnalyticsAdminServiceRestInterceptor, "post_list_accounts" + transports.AnalyticsAdminServiceRestInterceptor, "post_get_global_site_tag" ) as post, mock.patch.object( - transports.AnalyticsAdminServiceRestInterceptor, "pre_list_accounts" + transports.AnalyticsAdminServiceRestInterceptor, "pre_get_global_site_tag" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = analytics_admin.ListAccountsRequest.pb( - analytics_admin.ListAccountsRequest() + pb_message = analytics_admin.GetGlobalSiteTagRequest.pb( + analytics_admin.GetGlobalSiteTagRequest() ) transcode.return_value = { "method": "post", @@ -58025,19 +68177,19 @@ def test_list_accounts_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = analytics_admin.ListAccountsResponse.to_json( - analytics_admin.ListAccountsResponse() + req.return_value._content = resources.GlobalSiteTag.to_json( + resources.GlobalSiteTag() ) - request = analytics_admin.ListAccountsRequest() + request = analytics_admin.GetGlobalSiteTagRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = analytics_admin.ListAccountsResponse() + post.return_value = resources.GlobalSiteTag() - client.list_accounts( + client.get_global_site_tag( request, metadata=[ ("key", "val"), @@ -58049,8 +68201,8 @@ def test_list_accounts_rest_interceptors(null_interceptor): post.assert_called_once() -def test_list_accounts_rest_bad_request( - transport: str = "rest", request_type=analytics_admin.ListAccountsRequest +def test_get_global_site_tag_rest_bad_request( + transport: str = "rest", request_type=analytics_admin.GetGlobalSiteTagRequest ): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -58058,7 +68210,7 @@ def test_list_accounts_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = {} + request_init = {"name": "properties/sample1/dataStreams/sample2/globalSiteTag"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -58070,108 +68222,199 @@ def test_list_accounts_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.list_accounts(request) + client.get_global_site_tag(request) -def test_list_accounts_rest_pager(transport: str = "rest"): +def test_get_global_site_tag_rest_flattened(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + transport="rest", ) # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # TODO(kbandes): remove this mock unless there's a good reason for it. - # with mock.patch.object(path_template, 'transcode') as transcode: - # Set the response as a series of pages - response = ( - analytics_admin.ListAccountsResponse( - accounts=[ - resources.Account(), - resources.Account(), - resources.Account(), - ], - next_page_token="abc", - ), - analytics_admin.ListAccountsResponse( - accounts=[], - next_page_token="def", - ), - analytics_admin.ListAccountsResponse( - accounts=[ - resources.Account(), - ], - next_page_token="ghi", - ), - analytics_admin.ListAccountsResponse( - accounts=[ - resources.Account(), - resources.Account(), - ], - ), + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = resources.GlobalSiteTag() + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "properties/sample1/dataStreams/sample2/globalSiteTag" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", ) - # Two responses for two calls - response = response + response + mock_args.update(sample_request) - # Wrap the values into proper Response objs - response = tuple( - analytics_admin.ListAccountsResponse.to_json(x) for x in response + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = resources.GlobalSiteTag.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.get_global_site_tag(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1alpha/{name=properties/*/dataStreams/*/globalSiteTag}" + % client.transport._host, + args[1], ) - return_values = tuple(Response() for i in response) - for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode("UTF-8") - return_val.status_code = 200 - req.side_effect = return_values - sample_request = {} - pager = client.list_accounts(request=sample_request) +def test_get_global_site_tag_rest_flattened_error(transport: str = "rest"): + client = AnalyticsAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, resources.Account) for i in results) + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_global_site_tag( + analytics_admin.GetGlobalSiteTagRequest(), + name="name_value", + ) - pages = list(client.list_accounts(request=sample_request).pages) - for page_, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page_.raw_page.next_page_token == token + +def test_get_global_site_tag_rest_error(): + client = AnalyticsAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) @pytest.mark.parametrize( "request_type", [ - analytics_admin.DeleteAccountRequest, + analytics_admin.CreateGoogleAdsLinkRequest, dict, ], ) -def test_delete_account_rest(request_type): +def test_create_google_ads_link_rest(request_type): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = {"name": "accounts/sample1"} + request_init = {"parent": "properties/sample1"} + request_init["google_ads_link"] = { + "name": "name_value", + "customer_id": "customer_id_value", + "can_manage_clients": True, + "ads_personalization_enabled": {"value": True}, + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "creator_email_address": "creator_email_address_value", + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = analytics_admin.CreateGoogleAdsLinkRequest.meta.fields[ + "google_ads_link" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["google_ads_link"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["google_ads_link"][field])): + del request_init["google_ads_link"][field][i][subfield] + else: + del request_init["google_ads_link"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = None + return_value = resources.GoogleAdsLink( + name="name_value", + customer_id="customer_id_value", + can_manage_clients=True, + creator_email_address="creator_email_address_value", + ) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - json_return_value = "" + # Convert return value to protobuf type + return_value = resources.GoogleAdsLink.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.delete_account(request) + response = client.create_google_ads_link(request) # Establish that the response is the type that we expect. - assert response is None + assert isinstance(response, resources.GoogleAdsLink) + assert response.name == "name_value" + assert response.customer_id == "customer_id_value" + assert response.can_manage_clients is True + assert response.creator_email_address == "creator_email_address_value" -def test_delete_account_rest_use_cached_wrapped_rpc(): +def test_create_google_ads_link_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -58185,35 +68428,40 @@ def test_delete_account_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.delete_account in client._transport._wrapped_methods + assert ( + client._transport.create_google_ads_link + in client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.delete_account] = mock_rpc + client._transport._wrapped_methods[ + client._transport.create_google_ads_link + ] = mock_rpc request = {} - client.delete_account(request) + client.create_google_ads_link(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.delete_account(request) + client.create_google_ads_link(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_delete_account_rest_required_fields( - request_type=analytics_admin.DeleteAccountRequest, +def test_create_google_ads_link_rest_required_fields( + request_type=analytics_admin.CreateGoogleAdsLinkRequest, ): transport_class = transports.AnalyticsAdminServiceRestTransport request_init = {} - request_init["name"] = "" + request_init["parent"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -58224,21 +68472,21 @@ def test_delete_account_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).delete_account._get_unset_required_fields(jsonified_request) + ).create_google_ads_link._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["name"] = "name_value" + jsonified_request["parent"] = "parent_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).delete_account._get_unset_required_fields(jsonified_request) + ).create_google_ads_link._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == "name_value" + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -58247,7 +68495,7 @@ def test_delete_account_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = None + return_value = resources.GoogleAdsLink() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -58259,36 +68507,48 @@ def test_delete_account_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "delete", + "method": "post", "query_params": pb_request, } + transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 - json_return_value = "" + + # Convert return value to protobuf type + return_value = resources.GoogleAdsLink.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.delete_account(request) + response = client.create_google_ads_link(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_delete_account_rest_unset_required_fields(): +def test_create_google_ads_link_rest_unset_required_fields(): transport = transports.AnalyticsAdminServiceRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.delete_account._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name",))) + unset_fields = transport.create_google_ads_link._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(()) + & set( + ( + "parent", + "googleAdsLink", + ) + ) + ) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_delete_account_rest_interceptors(null_interceptor): +def test_create_google_ads_link_rest_interceptors(null_interceptor): transport = transports.AnalyticsAdminServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -58301,11 +68561,14 @@ def test_delete_account_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.AnalyticsAdminServiceRestInterceptor, "pre_delete_account" + transports.AnalyticsAdminServiceRestInterceptor, "post_create_google_ads_link" + ) as post, mock.patch.object( + transports.AnalyticsAdminServiceRestInterceptor, "pre_create_google_ads_link" ) as pre: pre.assert_not_called() - pb_message = analytics_admin.DeleteAccountRequest.pb( - analytics_admin.DeleteAccountRequest() + post.assert_not_called() + pb_message = analytics_admin.CreateGoogleAdsLinkRequest.pb( + analytics_admin.CreateGoogleAdsLinkRequest() ) transcode.return_value = { "method": "post", @@ -58317,15 +68580,19 @@ def test_delete_account_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() + req.return_value._content = resources.GoogleAdsLink.to_json( + resources.GoogleAdsLink() + ) - request = analytics_admin.DeleteAccountRequest() + request = analytics_admin.CreateGoogleAdsLinkRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata + post.return_value = resources.GoogleAdsLink() - client.delete_account( + client.create_google_ads_link( request, metadata=[ ("key", "val"), @@ -58334,10 +68601,11 @@ def test_delete_account_rest_interceptors(null_interceptor): ) pre.assert_called_once() + post.assert_called_once() -def test_delete_account_rest_bad_request( - transport: str = "rest", request_type=analytics_admin.DeleteAccountRequest +def test_create_google_ads_link_rest_bad_request( + transport: str = "rest", request_type=analytics_admin.CreateGoogleAdsLinkRequest ): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -58345,7 +68613,7 @@ def test_delete_account_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = {"name": "accounts/sample1"} + request_init = {"parent": "properties/sample1"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -58357,10 +68625,10 @@ def test_delete_account_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.delete_account(request) + client.create_google_ads_link(request) -def test_delete_account_rest_flattened(): +def test_create_google_ads_link_rest_flattened(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -58369,36 +68637,40 @@ def test_delete_account_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = None + return_value = resources.GoogleAdsLink() # get arguments that satisfy an http rule for this method - sample_request = {"name": "accounts/sample1"} + sample_request = {"parent": "properties/sample1"} # get truthy value for each flattened field mock_args = dict( - name="name_value", + parent="parent_value", + google_ads_link=resources.GoogleAdsLink(name="name_value"), ) mock_args.update(sample_request) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - json_return_value = "" + # Convert return value to protobuf type + return_value = resources.GoogleAdsLink.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.delete_account(**mock_args) + client.create_google_ads_link(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1alpha/{name=accounts/*}" % client.transport._host, args[1] + "%s/v1alpha/{parent=properties/*}/googleAdsLinks" % client.transport._host, + args[1], ) -def test_delete_account_rest_flattened_error(transport: str = "rest"): +def test_create_google_ads_link_rest_flattened_error(transport: str = "rest"): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -58407,13 +68679,14 @@ def test_delete_account_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.delete_account( - analytics_admin.DeleteAccountRequest(), - name="name_value", + client.create_google_ads_link( + analytics_admin.CreateGoogleAdsLinkRequest(), + parent="parent_value", + google_ads_link=resources.GoogleAdsLink(name="name_value"), ) -def test_delete_account_rest_error(): +def test_create_google_ads_link_rest_error(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -58422,32 +68695,37 @@ def test_delete_account_rest_error(): @pytest.mark.parametrize( "request_type", [ - analytics_admin.UpdateAccountRequest, + analytics_admin.UpdateGoogleAdsLinkRequest, dict, ], ) -def test_update_account_rest(request_type): +def test_update_google_ads_link_rest(request_type): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = {"account": {"name": "accounts/sample1"}} - request_init["account"] = { - "name": "accounts/sample1", + request_init = { + "google_ads_link": {"name": "properties/sample1/googleAdsLinks/sample2"} + } + request_init["google_ads_link"] = { + "name": "properties/sample1/googleAdsLinks/sample2", + "customer_id": "customer_id_value", + "can_manage_clients": True, + "ads_personalization_enabled": {"value": True}, "create_time": {"seconds": 751, "nanos": 543}, "update_time": {}, - "display_name": "display_name_value", - "region_code": "region_code_value", - "deleted": True, + "creator_email_address": "creator_email_address_value", } # The version of a generated dependency at test runtime may differ from the version used during generation. # Delete any fields which are not present in the current runtime dependency # See https://github.com/googleapis/gapic-generator-python/issues/1748 # Determine if the message type is proto-plus or protobuf - test_field = analytics_admin.UpdateAccountRequest.meta.fields["account"] + test_field = analytics_admin.UpdateGoogleAdsLinkRequest.meta.fields[ + "google_ads_link" + ] def get_message_fields(field): # Given a field which is a message (composite type), return a list with @@ -58475,7 +68753,7 @@ def get_message_fields(field): # For each item in the sample request, create a list of sub fields which are not present at runtime # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["account"].items(): # pragma: NO COVER + for field, value in request_init["google_ads_link"].items(): # pragma: NO COVER result = None is_repeated = False # For repeated fields @@ -58505,42 +68783,42 @@ def get_message_fields(field): subfield = subfield_to_delete.get("subfield") if subfield: if field_repeated: - for i in range(0, len(request_init["account"][field])): - del request_init["account"][field][i][subfield] + for i in range(0, len(request_init["google_ads_link"][field])): + del request_init["google_ads_link"][field][i][subfield] else: - del request_init["account"][field][subfield] + del request_init["google_ads_link"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = resources.Account( + return_value = resources.GoogleAdsLink( name="name_value", - display_name="display_name_value", - region_code="region_code_value", - deleted=True, + customer_id="customer_id_value", + can_manage_clients=True, + creator_email_address="creator_email_address_value", ) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = resources.Account.pb(return_value) + return_value = resources.GoogleAdsLink.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.update_account(request) + response = client.update_google_ads_link(request) # Establish that the response is the type that we expect. - assert isinstance(response, resources.Account) + assert isinstance(response, resources.GoogleAdsLink) assert response.name == "name_value" - assert response.display_name == "display_name_value" - assert response.region_code == "region_code_value" - assert response.deleted is True + assert response.customer_id == "customer_id_value" + assert response.can_manage_clients is True + assert response.creator_email_address == "creator_email_address_value" -def test_update_account_rest_use_cached_wrapped_rpc(): +def test_update_google_ads_link_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -58554,30 +68832,35 @@ def test_update_account_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.update_account in client._transport._wrapped_methods + assert ( + client._transport.update_google_ads_link + in client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.update_account] = mock_rpc + client._transport._wrapped_methods[ + client._transport.update_google_ads_link + ] = mock_rpc request = {} - client.update_account(request) + client.update_google_ads_link(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.update_account(request) + client.update_google_ads_link(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_update_account_rest_required_fields( - request_type=analytics_admin.UpdateAccountRequest, +def test_update_google_ads_link_rest_required_fields( + request_type=analytics_admin.UpdateGoogleAdsLinkRequest, ): transport_class = transports.AnalyticsAdminServiceRestTransport @@ -58592,14 +68875,14 @@ def test_update_account_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).update_account._get_unset_required_fields(jsonified_request) + ).update_google_ads_link._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).update_account._get_unset_required_fields(jsonified_request) + ).update_google_ads_link._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. assert not set(unset_fields) - set(("update_mask",)) jsonified_request.update(unset_fields) @@ -58613,7 +68896,7 @@ def test_update_account_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = resources.Account() + return_value = resources.GoogleAdsLink() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -58635,38 +68918,30 @@ def test_update_account_rest_required_fields( response_value.status_code = 200 # Convert return value to protobuf type - return_value = resources.Account.pb(return_value) + return_value = resources.GoogleAdsLink.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.update_account(request) + response = client.update_google_ads_link(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_update_account_rest_unset_required_fields(): +def test_update_google_ads_link_rest_unset_required_fields(): transport = transports.AnalyticsAdminServiceRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.update_account._get_unset_required_fields({}) - assert set(unset_fields) == ( - set(("updateMask",)) - & set( - ( - "account", - "updateMask", - ) - ) - ) + unset_fields = transport.update_google_ads_link._get_unset_required_fields({}) + assert set(unset_fields) == (set(("updateMask",)) & set(("updateMask",))) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_update_account_rest_interceptors(null_interceptor): +def test_update_google_ads_link_rest_interceptors(null_interceptor): transport = transports.AnalyticsAdminServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -58679,14 +68954,14 @@ def test_update_account_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.AnalyticsAdminServiceRestInterceptor, "post_update_account" + transports.AnalyticsAdminServiceRestInterceptor, "post_update_google_ads_link" ) as post, mock.patch.object( - transports.AnalyticsAdminServiceRestInterceptor, "pre_update_account" + transports.AnalyticsAdminServiceRestInterceptor, "pre_update_google_ads_link" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = analytics_admin.UpdateAccountRequest.pb( - analytics_admin.UpdateAccountRequest() + pb_message = analytics_admin.UpdateGoogleAdsLinkRequest.pb( + analytics_admin.UpdateGoogleAdsLinkRequest() ) transcode.return_value = { "method": "post", @@ -58698,17 +68973,19 @@ def test_update_account_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = resources.Account.to_json(resources.Account()) + req.return_value._content = resources.GoogleAdsLink.to_json( + resources.GoogleAdsLink() + ) - request = analytics_admin.UpdateAccountRequest() + request = analytics_admin.UpdateGoogleAdsLinkRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = resources.Account() + post.return_value = resources.GoogleAdsLink() - client.update_account( + client.update_google_ads_link( request, metadata=[ ("key", "val"), @@ -58720,8 +68997,8 @@ def test_update_account_rest_interceptors(null_interceptor): post.assert_called_once() -def test_update_account_rest_bad_request( - transport: str = "rest", request_type=analytics_admin.UpdateAccountRequest +def test_update_google_ads_link_rest_bad_request( + transport: str = "rest", request_type=analytics_admin.UpdateGoogleAdsLinkRequest ): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -58729,7 +69006,9 @@ def test_update_account_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = {"account": {"name": "accounts/sample1"}} + request_init = { + "google_ads_link": {"name": "properties/sample1/googleAdsLinks/sample2"} + } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -58741,10 +69020,10 @@ def test_update_account_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.update_account(request) + client.update_google_ads_link(request) -def test_update_account_rest_flattened(): +def test_update_google_ads_link_rest_flattened(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -58753,14 +69032,16 @@ def test_update_account_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = resources.Account() + return_value = resources.GoogleAdsLink() # get arguments that satisfy an http rule for this method - sample_request = {"account": {"name": "accounts/sample1"}} + sample_request = { + "google_ads_link": {"name": "properties/sample1/googleAdsLinks/sample2"} + } # get truthy value for each flattened field mock_args = dict( - account=resources.Account(name="name_value"), + google_ads_link=resources.GoogleAdsLink(name="name_value"), update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) mock_args.update(sample_request) @@ -58769,23 +69050,25 @@ def test_update_account_rest_flattened(): response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = resources.Account.pb(return_value) + return_value = resources.GoogleAdsLink.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.update_account(**mock_args) + client.update_google_ads_link(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1alpha/{account.name=accounts/*}" % client.transport._host, args[1] + "%s/v1alpha/{google_ads_link.name=properties/*/googleAdsLinks/*}" + % client.transport._host, + args[1], ) -def test_update_account_rest_flattened_error(transport: str = "rest"): +def test_update_google_ads_link_rest_flattened_error(transport: str = "rest"): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -58794,14 +69077,14 @@ def test_update_account_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.update_account( - analytics_admin.UpdateAccountRequest(), - account=resources.Account(name="name_value"), + client.update_google_ads_link( + analytics_admin.UpdateGoogleAdsLinkRequest(), + google_ads_link=resources.GoogleAdsLink(name="name_value"), update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) -def test_update_account_rest_error(): +def test_update_google_ads_link_rest_error(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -58810,44 +69093,39 @@ def test_update_account_rest_error(): @pytest.mark.parametrize( "request_type", [ - analytics_admin.ProvisionAccountTicketRequest, + analytics_admin.DeleteGoogleAdsLinkRequest, dict, ], ) -def test_provision_account_ticket_rest(request_type): +def test_delete_google_ads_link_rest(request_type): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = {} + request_init = {"name": "properties/sample1/googleAdsLinks/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = analytics_admin.ProvisionAccountTicketResponse( - account_ticket_id="account_ticket_id_value", - ) + return_value = None # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - # Convert return value to protobuf type - return_value = analytics_admin.ProvisionAccountTicketResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) + json_return_value = "" response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.provision_account_ticket(request) + response = client.delete_google_ads_link(request) # Establish that the response is the type that we expect. - assert isinstance(response, analytics_admin.ProvisionAccountTicketResponse) - assert response.account_ticket_id == "account_ticket_id_value" + assert response is None -def test_provision_account_ticket_rest_use_cached_wrapped_rpc(): +def test_delete_google_ads_link_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -58862,7 +69140,7 @@ def test_provision_account_ticket_rest_use_cached_wrapped_rpc(): # Ensure method has been cached assert ( - client._transport.provision_account_ticket + client._transport.delete_google_ads_link in client._transport._wrapped_methods ) @@ -58872,194 +69150,104 @@ def test_provision_account_ticket_rest_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.provision_account_ticket + client._transport.delete_google_ads_link ] = mock_rpc request = {} - client.provision_account_ticket(request) + client.delete_google_ads_link(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.provision_account_ticket(request) + client.delete_google_ads_link(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_provision_account_ticket_rest_interceptors(null_interceptor): - transport = transports.AnalyticsAdminServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.AnalyticsAdminServiceRestInterceptor(), - ) - client = AnalyticsAdminServiceClient(transport=transport) - with mock.patch.object( - type(client.transport._session), "request" - ) as req, mock.patch.object( - path_template, "transcode" - ) as transcode, mock.patch.object( - transports.AnalyticsAdminServiceRestInterceptor, "post_provision_account_ticket" - ) as post, mock.patch.object( - transports.AnalyticsAdminServiceRestInterceptor, "pre_provision_account_ticket" - ) as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = analytics_admin.ProvisionAccountTicketRequest.pb( - analytics_admin.ProvisionAccountTicketRequest() - ) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = ( - analytics_admin.ProvisionAccountTicketResponse.to_json( - analytics_admin.ProvisionAccountTicketResponse() - ) - ) - - request = analytics_admin.ProvisionAccountTicketRequest() - metadata = [ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = analytics_admin.ProvisionAccountTicketResponse() - - client.provision_account_ticket( - request, - metadata=[ - ("key", "val"), - ("cephalopod", "squid"), - ], - ) - - pre.assert_called_once() - post.assert_called_once() - - -def test_provision_account_ticket_rest_bad_request( - transport: str = "rest", request_type=analytics_admin.ProvisionAccountTicketRequest +def test_delete_google_ads_link_rest_required_fields( + request_type=analytics_admin.DeleteGoogleAdsLinkRequest, ): - client = AnalyticsAdminServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) + transport_class = transports.AnalyticsAdminServiceRestTransport - # send a request that will satisfy transcoding request_init = {} + request_init["name"] = "" request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.provision_account_ticket(request) + # verify fields with default values are dropped + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_google_ads_link._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) -def test_provision_account_ticket_rest_error(): - client = AnalyticsAdminServiceClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) + # verify required fields with default values are now present + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_google_ads_link._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" -@pytest.mark.parametrize( - "request_type", - [ - analytics_admin.ListAccountSummariesRequest, - dict, - ], -) -def test_list_account_summaries_rest(request_type): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) - - # send a request that will satisfy transcoding - request_init = {} request = request_type(**request_init) + # Designate an appropriate value for the returned response. + return_value = None # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = analytics_admin.ListAccountSummariesResponse( - next_page_token="next_page_token_value", - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = analytics_admin.ListAccountSummariesResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - response = client.list_account_summaries(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListAccountSummariesPager) - assert response.next_page_token == "next_page_token_value" - - -def test_list_account_summaries_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = AnalyticsAdminServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "delete", + "query_params": pb_request, + } + transcode.return_value = transcode_result - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() + response_value = Response() + response_value.status_code = 200 + json_return_value = "" - # Ensure method has been cached - assert ( - client._transport.list_account_summaries - in client._transport._wrapped_methods - ) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client._transport._wrapped_methods[ - client._transport.list_account_summaries - ] = mock_rpc + response = client.delete_google_ads_link(request) - request = {} - client.list_account_summaries(request) + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - client.list_account_summaries(request) +def test_delete_google_ads_link_rest_unset_required_fields(): + transport = transports.AnalyticsAdminServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 + unset_fields = transport.delete_google_ads_link._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_account_summaries_rest_interceptors(null_interceptor): +def test_delete_google_ads_link_rest_interceptors(null_interceptor): transport = transports.AnalyticsAdminServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -59072,14 +69260,11 @@ def test_list_account_summaries_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.AnalyticsAdminServiceRestInterceptor, "post_list_account_summaries" - ) as post, mock.patch.object( - transports.AnalyticsAdminServiceRestInterceptor, "pre_list_account_summaries" + transports.AnalyticsAdminServiceRestInterceptor, "pre_delete_google_ads_link" ) as pre: pre.assert_not_called() - post.assert_not_called() - pb_message = analytics_admin.ListAccountSummariesRequest.pb( - analytics_admin.ListAccountSummariesRequest() + pb_message = analytics_admin.DeleteGoogleAdsLinkRequest.pb( + analytics_admin.DeleteGoogleAdsLinkRequest() ) transcode.return_value = { "method": "post", @@ -59088,24 +69273,18 @@ def test_list_account_summaries_rest_interceptors(null_interceptor): "query_params": pb_message, } - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = ( - analytics_admin.ListAccountSummariesResponse.to_json( - analytics_admin.ListAccountSummariesResponse() - ) - ) - - request = analytics_admin.ListAccountSummariesRequest() + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + + request = analytics_admin.DeleteGoogleAdsLinkRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = analytics_admin.ListAccountSummariesResponse() - client.list_account_summaries( + client.delete_google_ads_link( request, metadata=[ ("key", "val"), @@ -59114,11 +69293,10 @@ def test_list_account_summaries_rest_interceptors(null_interceptor): ) pre.assert_called_once() - post.assert_called_once() -def test_list_account_summaries_rest_bad_request( - transport: str = "rest", request_type=analytics_admin.ListAccountSummariesRequest +def test_delete_google_ads_link_rest_bad_request( + transport: str = "rest", request_type=analytics_admin.DeleteGoogleAdsLinkRequest ): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -59126,7 +69304,7 @@ def test_list_account_summaries_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = {} + request_init = {"name": "properties/sample1/googleAdsLinks/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -59138,129 +69316,110 @@ def test_list_account_summaries_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.list_account_summaries(request) + client.delete_google_ads_link(request) -def test_list_account_summaries_rest_pager(transport: str = "rest"): +def test_delete_google_ads_link_rest_flattened(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + transport="rest", ) # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # TODO(kbandes): remove this mock unless there's a good reason for it. - # with mock.patch.object(path_template, 'transcode') as transcode: - # Set the response as a series of pages - response = ( - analytics_admin.ListAccountSummariesResponse( - account_summaries=[ - resources.AccountSummary(), - resources.AccountSummary(), - resources.AccountSummary(), - ], - next_page_token="abc", - ), - analytics_admin.ListAccountSummariesResponse( - account_summaries=[], - next_page_token="def", - ), - analytics_admin.ListAccountSummariesResponse( - account_summaries=[ - resources.AccountSummary(), - ], - next_page_token="ghi", - ), - analytics_admin.ListAccountSummariesResponse( - account_summaries=[ - resources.AccountSummary(), - resources.AccountSummary(), - ], - ), + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # get arguments that satisfy an http rule for this method + sample_request = {"name": "properties/sample1/googleAdsLinks/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", ) - # Two responses for two calls - response = response + response + mock_args.update(sample_request) - # Wrap the values into proper Response objs - response = tuple( - analytics_admin.ListAccountSummariesResponse.to_json(x) for x in response + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "" + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.delete_google_ads_link(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1alpha/{name=properties/*/googleAdsLinks/*}" % client.transport._host, + args[1], ) - return_values = tuple(Response() for i in response) - for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode("UTF-8") - return_val.status_code = 200 - req.side_effect = return_values - sample_request = {} - pager = client.list_account_summaries(request=sample_request) +def test_delete_google_ads_link_rest_flattened_error(transport: str = "rest"): + client = AnalyticsAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, resources.AccountSummary) for i in results) + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_google_ads_link( + analytics_admin.DeleteGoogleAdsLinkRequest(), + name="name_value", + ) - pages = list(client.list_account_summaries(request=sample_request).pages) - for page_, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page_.raw_page.next_page_token == token + +def test_delete_google_ads_link_rest_error(): + client = AnalyticsAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) @pytest.mark.parametrize( "request_type", [ - analytics_admin.GetPropertyRequest, + analytics_admin.ListGoogleAdsLinksRequest, dict, ], ) -def test_get_property_rest(request_type): +def test_list_google_ads_links_rest(request_type): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = {"name": "properties/sample1"} + request_init = {"parent": "properties/sample1"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = resources.Property( - name="name_value", - property_type=resources.PropertyType.PROPERTY_TYPE_ORDINARY, - parent="parent_value", - display_name="display_name_value", - industry_category=resources.IndustryCategory.AUTOMOTIVE, - time_zone="time_zone_value", - currency_code="currency_code_value", - service_level=resources.ServiceLevel.GOOGLE_ANALYTICS_STANDARD, - account="account_value", + return_value = analytics_admin.ListGoogleAdsLinksResponse( + next_page_token="next_page_token_value", ) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = resources.Property.pb(return_value) + return_value = analytics_admin.ListGoogleAdsLinksResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.get_property(request) + response = client.list_google_ads_links(request) # Establish that the response is the type that we expect. - assert isinstance(response, resources.Property) - assert response.name == "name_value" - assert response.property_type == resources.PropertyType.PROPERTY_TYPE_ORDINARY - assert response.parent == "parent_value" - assert response.display_name == "display_name_value" - assert response.industry_category == resources.IndustryCategory.AUTOMOTIVE - assert response.time_zone == "time_zone_value" - assert response.currency_code == "currency_code_value" - assert response.service_level == resources.ServiceLevel.GOOGLE_ANALYTICS_STANDARD - assert response.account == "account_value" + assert isinstance(response, pagers.ListGoogleAdsLinksPager) + assert response.next_page_token == "next_page_token_value" -def test_get_property_rest_use_cached_wrapped_rpc(): +def test_list_google_ads_links_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -59274,35 +69433,40 @@ def test_get_property_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.get_property in client._transport._wrapped_methods + assert ( + client._transport.list_google_ads_links + in client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.get_property] = mock_rpc + client._transport._wrapped_methods[ + client._transport.list_google_ads_links + ] = mock_rpc request = {} - client.get_property(request) + client.list_google_ads_links(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.get_property(request) + client.list_google_ads_links(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_get_property_rest_required_fields( - request_type=analytics_admin.GetPropertyRequest, +def test_list_google_ads_links_rest_required_fields( + request_type=analytics_admin.ListGoogleAdsLinksRequest, ): transport_class = transports.AnalyticsAdminServiceRestTransport request_init = {} - request_init["name"] = "" + request_init["parent"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -59313,21 +69477,28 @@ def test_get_property_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).get_property._get_unset_required_fields(jsonified_request) + ).list_google_ads_links._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["name"] = "name_value" + jsonified_request["parent"] = "parent_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).get_property._get_unset_required_fields(jsonified_request) + ).list_google_ads_links._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "page_size", + "page_token", + ) + ) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == "name_value" + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -59336,7 +69507,7 @@ def test_get_property_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = resources.Property() + return_value = analytics_admin.ListGoogleAdsLinksResponse() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -59357,30 +69528,38 @@ def test_get_property_rest_required_fields( response_value.status_code = 200 # Convert return value to protobuf type - return_value = resources.Property.pb(return_value) + return_value = analytics_admin.ListGoogleAdsLinksResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.get_property(request) + response = client.list_google_ads_links(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_get_property_rest_unset_required_fields(): +def test_list_google_ads_links_rest_unset_required_fields(): transport = transports.AnalyticsAdminServiceRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.get_property._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name",))) + unset_fields = transport.list_google_ads_links._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "pageSize", + "pageToken", + ) + ) + & set(("parent",)) + ) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_property_rest_interceptors(null_interceptor): +def test_list_google_ads_links_rest_interceptors(null_interceptor): transport = transports.AnalyticsAdminServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -59393,14 +69572,14 @@ def test_get_property_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.AnalyticsAdminServiceRestInterceptor, "post_get_property" + transports.AnalyticsAdminServiceRestInterceptor, "post_list_google_ads_links" ) as post, mock.patch.object( - transports.AnalyticsAdminServiceRestInterceptor, "pre_get_property" + transports.AnalyticsAdminServiceRestInterceptor, "pre_list_google_ads_links" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = analytics_admin.GetPropertyRequest.pb( - analytics_admin.GetPropertyRequest() + pb_message = analytics_admin.ListGoogleAdsLinksRequest.pb( + analytics_admin.ListGoogleAdsLinksRequest() ) transcode.return_value = { "method": "post", @@ -59412,17 +69591,19 @@ def test_get_property_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = resources.Property.to_json(resources.Property()) + req.return_value._content = analytics_admin.ListGoogleAdsLinksResponse.to_json( + analytics_admin.ListGoogleAdsLinksResponse() + ) - request = analytics_admin.GetPropertyRequest() + request = analytics_admin.ListGoogleAdsLinksRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = resources.Property() + post.return_value = analytics_admin.ListGoogleAdsLinksResponse() - client.get_property( + client.list_google_ads_links( request, metadata=[ ("key", "val"), @@ -59434,8 +69615,8 @@ def test_get_property_rest_interceptors(null_interceptor): post.assert_called_once() -def test_get_property_rest_bad_request( - transport: str = "rest", request_type=analytics_admin.GetPropertyRequest +def test_list_google_ads_links_rest_bad_request( + transport: str = "rest", request_type=analytics_admin.ListGoogleAdsLinksRequest ): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -59443,7 +69624,7 @@ def test_get_property_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = {"name": "properties/sample1"} + request_init = {"parent": "properties/sample1"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -59455,10 +69636,10 @@ def test_get_property_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.get_property(request) + client.list_google_ads_links(request) -def test_get_property_rest_flattened(): +def test_list_google_ads_links_rest_flattened(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -59467,14 +69648,14 @@ def test_get_property_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = resources.Property() + return_value = analytics_admin.ListGoogleAdsLinksResponse() # get arguments that satisfy an http rule for this method - sample_request = {"name": "properties/sample1"} + sample_request = {"parent": "properties/sample1"} # get truthy value for each flattened field mock_args = dict( - name="name_value", + parent="parent_value", ) mock_args.update(sample_request) @@ -59482,23 +69663,24 @@ def test_get_property_rest_flattened(): response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = resources.Property.pb(return_value) + return_value = analytics_admin.ListGoogleAdsLinksResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.get_property(**mock_args) + client.list_google_ads_links(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1alpha/{name=properties/*}" % client.transport._host, args[1] + "%s/v1alpha/{parent=properties/*}/googleAdsLinks" % client.transport._host, + args[1], ) -def test_get_property_rest_flattened_error(transport: str = "rest"): +def test_list_google_ads_links_rest_flattened_error(transport: str = "rest"): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -59507,59 +69689,126 @@ def test_get_property_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.get_property( - analytics_admin.GetPropertyRequest(), - name="name_value", + client.list_google_ads_links( + analytics_admin.ListGoogleAdsLinksRequest(), + parent="parent_value", ) -def test_get_property_rest_error(): +def test_list_google_ads_links_rest_pager(transport: str = "rest"): client = AnalyticsAdminServiceClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + analytics_admin.ListGoogleAdsLinksResponse( + google_ads_links=[ + resources.GoogleAdsLink(), + resources.GoogleAdsLink(), + resources.GoogleAdsLink(), + ], + next_page_token="abc", + ), + analytics_admin.ListGoogleAdsLinksResponse( + google_ads_links=[], + next_page_token="def", + ), + analytics_admin.ListGoogleAdsLinksResponse( + google_ads_links=[ + resources.GoogleAdsLink(), + ], + next_page_token="ghi", + ), + analytics_admin.ListGoogleAdsLinksResponse( + google_ads_links=[ + resources.GoogleAdsLink(), + resources.GoogleAdsLink(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple( + analytics_admin.ListGoogleAdsLinksResponse.to_json(x) for x in response + ) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {"parent": "properties/sample1"} + + pager = client.list_google_ads_links(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, resources.GoogleAdsLink) for i in results) + + pages = list(client.list_google_ads_links(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + @pytest.mark.parametrize( "request_type", [ - analytics_admin.ListPropertiesRequest, + analytics_admin.GetDataSharingSettingsRequest, dict, ], ) -def test_list_properties_rest(request_type): +def test_get_data_sharing_settings_rest(request_type): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = {} + request_init = {"name": "accounts/sample1/dataSharingSettings"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = analytics_admin.ListPropertiesResponse( - next_page_token="next_page_token_value", + return_value = resources.DataSharingSettings( + name="name_value", + sharing_with_google_support_enabled=True, + sharing_with_google_assigned_sales_enabled=True, + sharing_with_google_any_sales_enabled=True, + sharing_with_google_products_enabled=True, + sharing_with_others_enabled=True, ) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = analytics_admin.ListPropertiesResponse.pb(return_value) + return_value = resources.DataSharingSettings.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.list_properties(request) + response = client.get_data_sharing_settings(request) # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListPropertiesPager) - assert response.next_page_token == "next_page_token_value" + assert isinstance(response, resources.DataSharingSettings) + assert response.name == "name_value" + assert response.sharing_with_google_support_enabled is True + assert response.sharing_with_google_assigned_sales_enabled is True + assert response.sharing_with_google_any_sales_enabled is True + assert response.sharing_with_google_products_enabled is True + assert response.sharing_with_others_enabled is True -def test_list_properties_rest_use_cached_wrapped_rpc(): +def test_get_data_sharing_settings_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -59573,35 +69822,40 @@ def test_list_properties_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.list_properties in client._transport._wrapped_methods + assert ( + client._transport.get_data_sharing_settings + in client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.list_properties] = mock_rpc + client._transport._wrapped_methods[ + client._transport.get_data_sharing_settings + ] = mock_rpc request = {} - client.list_properties(request) + client.get_data_sharing_settings(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.list_properties(request) + client.get_data_sharing_settings(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_list_properties_rest_required_fields( - request_type=analytics_admin.ListPropertiesRequest, +def test_get_data_sharing_settings_rest_required_fields( + request_type=analytics_admin.GetDataSharingSettingsRequest, ): transport_class = transports.AnalyticsAdminServiceRestTransport request_init = {} - request_init["filter"] = "" + request_init["name"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -59609,36 +69863,24 @@ def test_list_properties_rest_required_fields( ) # verify fields with default values are dropped - assert "filter" not in jsonified_request unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).list_properties._get_unset_required_fields(jsonified_request) + ).get_data_sharing_settings._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - assert "filter" in jsonified_request - assert jsonified_request["filter"] == request_init["filter"] - jsonified_request["filter"] = "filter_value" + jsonified_request["name"] = "name_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).list_properties._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set( - ( - "filter", - "page_size", - "page_token", - "show_deleted", - ) - ) + ).get_data_sharing_settings._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "filter" in jsonified_request - assert jsonified_request["filter"] == "filter_value" + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -59647,7 +69889,7 @@ def test_list_properties_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = analytics_admin.ListPropertiesResponse() + return_value = resources.DataSharingSettings() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -59668,46 +69910,30 @@ def test_list_properties_rest_required_fields( response_value.status_code = 200 # Convert return value to protobuf type - return_value = analytics_admin.ListPropertiesResponse.pb(return_value) + return_value = resources.DataSharingSettings.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.list_properties(request) + response = client.get_data_sharing_settings(request) - expected_params = [ - ( - "filter", - "", - ), - ("$alt", "json;enum-encoding=int"), - ] + expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_list_properties_rest_unset_required_fields(): +def test_get_data_sharing_settings_rest_unset_required_fields(): transport = transports.AnalyticsAdminServiceRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.list_properties._get_unset_required_fields({}) - assert set(unset_fields) == ( - set( - ( - "filter", - "pageSize", - "pageToken", - "showDeleted", - ) - ) - & set(("filter",)) - ) + unset_fields = transport.get_data_sharing_settings._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_properties_rest_interceptors(null_interceptor): +def test_get_data_sharing_settings_rest_interceptors(null_interceptor): transport = transports.AnalyticsAdminServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -59720,14 +69946,15 @@ def test_list_properties_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.AnalyticsAdminServiceRestInterceptor, "post_list_properties" + transports.AnalyticsAdminServiceRestInterceptor, + "post_get_data_sharing_settings", ) as post, mock.patch.object( - transports.AnalyticsAdminServiceRestInterceptor, "pre_list_properties" + transports.AnalyticsAdminServiceRestInterceptor, "pre_get_data_sharing_settings" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = analytics_admin.ListPropertiesRequest.pb( - analytics_admin.ListPropertiesRequest() + pb_message = analytics_admin.GetDataSharingSettingsRequest.pb( + analytics_admin.GetDataSharingSettingsRequest() ) transcode.return_value = { "method": "post", @@ -59739,19 +69966,19 @@ def test_list_properties_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = analytics_admin.ListPropertiesResponse.to_json( - analytics_admin.ListPropertiesResponse() + req.return_value._content = resources.DataSharingSettings.to_json( + resources.DataSharingSettings() ) - request = analytics_admin.ListPropertiesRequest() + request = analytics_admin.GetDataSharingSettingsRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = analytics_admin.ListPropertiesResponse() + post.return_value = resources.DataSharingSettings() - client.list_properties( + client.get_data_sharing_settings( request, metadata=[ ("key", "val"), @@ -59763,8 +69990,8 @@ def test_list_properties_rest_interceptors(null_interceptor): post.assert_called_once() -def test_list_properties_rest_bad_request( - transport: str = "rest", request_type=analytics_admin.ListPropertiesRequest +def test_get_data_sharing_settings_rest_bad_request( + transport: str = "rest", request_type=analytics_admin.GetDataSharingSettingsRequest ): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -59772,7 +69999,7 @@ def test_list_properties_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = {} + request_init = {"name": "accounts/sample1/dataSharingSettings"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -59784,211 +70011,118 @@ def test_list_properties_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.list_properties(request) + client.get_data_sharing_settings(request) -def test_list_properties_rest_pager(transport: str = "rest"): +def test_get_data_sharing_settings_rest_flattened(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + transport="rest", ) # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # TODO(kbandes): remove this mock unless there's a good reason for it. - # with mock.patch.object(path_template, 'transcode') as transcode: - # Set the response as a series of pages - response = ( - analytics_admin.ListPropertiesResponse( - properties=[ - resources.Property(), - resources.Property(), - resources.Property(), - ], - next_page_token="abc", - ), - analytics_admin.ListPropertiesResponse( - properties=[], - next_page_token="def", - ), - analytics_admin.ListPropertiesResponse( - properties=[ - resources.Property(), - ], - next_page_token="ghi", - ), - analytics_admin.ListPropertiesResponse( - properties=[ - resources.Property(), - resources.Property(), - ], - ), + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = resources.DataSharingSettings() + + # get arguments that satisfy an http rule for this method + sample_request = {"name": "accounts/sample1/dataSharingSettings"} + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", ) - # Two responses for two calls - response = response + response + mock_args.update(sample_request) - # Wrap the values into proper Response objs - response = tuple( - analytics_admin.ListPropertiesResponse.to_json(x) for x in response + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = resources.DataSharingSettings.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.get_data_sharing_settings(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1alpha/{name=accounts/*/dataSharingSettings}" % client.transport._host, + args[1], ) - return_values = tuple(Response() for i in response) - for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode("UTF-8") - return_val.status_code = 200 - req.side_effect = return_values - sample_request = {} - pager = client.list_properties(request=sample_request) +def test_get_data_sharing_settings_rest_flattened_error(transport: str = "rest"): + client = AnalyticsAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, resources.Property) for i in results) + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_data_sharing_settings( + analytics_admin.GetDataSharingSettingsRequest(), + name="name_value", + ) - pages = list(client.list_properties(request=sample_request).pages) - for page_, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page_.raw_page.next_page_token == token + +def test_get_data_sharing_settings_rest_error(): + client = AnalyticsAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) @pytest.mark.parametrize( "request_type", [ - analytics_admin.CreatePropertyRequest, + analytics_admin.GetMeasurementProtocolSecretRequest, dict, ], ) -def test_create_property_rest(request_type): +def test_get_measurement_protocol_secret_rest(request_type): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = {} - request_init["property"] = { - "name": "name_value", - "property_type": 1, - "create_time": {"seconds": 751, "nanos": 543}, - "update_time": {}, - "parent": "parent_value", - "display_name": "display_name_value", - "industry_category": 1, - "time_zone": "time_zone_value", - "currency_code": "currency_code_value", - "service_level": 1, - "delete_time": {}, - "expire_time": {}, - "account": "account_value", + request_init = { + "name": "properties/sample1/dataStreams/sample2/measurementProtocolSecrets/sample3" } - # The version of a generated dependency at test runtime may differ from the version used during generation. - # Delete any fields which are not present in the current runtime dependency - # See https://github.com/googleapis/gapic-generator-python/issues/1748 - - # Determine if the message type is proto-plus or protobuf - test_field = analytics_admin.CreatePropertyRequest.meta.fields["property"] - - def get_message_fields(field): - # Given a field which is a message (composite type), return a list with - # all the fields of the message. - # If the field is not a composite type, return an empty list. - message_fields = [] - - if hasattr(field, "message") and field.message: - is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") - - if is_field_type_proto_plus_type: - message_fields = field.message.meta.fields.values() - # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER - message_fields = field.message.DESCRIPTOR.fields - return message_fields - - runtime_nested_fields = [ - (field.name, nested_field.name) - for field in get_message_fields(test_field) - for nested_field in get_message_fields(field) - ] - - subfields_not_in_runtime = [] - - # For each item in the sample request, create a list of sub fields which are not present at runtime - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["property"].items(): # pragma: NO COVER - result = None - is_repeated = False - # For repeated fields - if isinstance(value, list) and len(value): - is_repeated = True - result = value[0] - # For fields where the type is another message - if isinstance(value, dict): - result = value - - if result and hasattr(result, "keys"): - for subfield in result.keys(): - if (field, subfield) not in runtime_nested_fields: - subfields_not_in_runtime.append( - { - "field": field, - "subfield": subfield, - "is_repeated": is_repeated, - } - ) - - # Remove fields from the sample request which are not present in the runtime version of the dependency - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER - field = subfield_to_delete.get("field") - field_repeated = subfield_to_delete.get("is_repeated") - subfield = subfield_to_delete.get("subfield") - if subfield: - if field_repeated: - for i in range(0, len(request_init["property"][field])): - del request_init["property"][field][i][subfield] - else: - del request_init["property"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = resources.Property( + return_value = resources.MeasurementProtocolSecret( name="name_value", - property_type=resources.PropertyType.PROPERTY_TYPE_ORDINARY, - parent="parent_value", display_name="display_name_value", - industry_category=resources.IndustryCategory.AUTOMOTIVE, - time_zone="time_zone_value", - currency_code="currency_code_value", - service_level=resources.ServiceLevel.GOOGLE_ANALYTICS_STANDARD, - account="account_value", + secret_value="secret_value_value", ) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = resources.Property.pb(return_value) + return_value = resources.MeasurementProtocolSecret.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.create_property(request) + response = client.get_measurement_protocol_secret(request) # Establish that the response is the type that we expect. - assert isinstance(response, resources.Property) + assert isinstance(response, resources.MeasurementProtocolSecret) assert response.name == "name_value" - assert response.property_type == resources.PropertyType.PROPERTY_TYPE_ORDINARY - assert response.parent == "parent_value" assert response.display_name == "display_name_value" - assert response.industry_category == resources.IndustryCategory.AUTOMOTIVE - assert response.time_zone == "time_zone_value" - assert response.currency_code == "currency_code_value" - assert response.service_level == resources.ServiceLevel.GOOGLE_ANALYTICS_STANDARD - assert response.account == "account_value" + assert response.secret_value == "secret_value_value" -def test_create_property_rest_use_cached_wrapped_rpc(): +def test_get_measurement_protocol_secret_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -60002,34 +70136,40 @@ def test_create_property_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.create_property in client._transport._wrapped_methods + assert ( + client._transport.get_measurement_protocol_secret + in client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.create_property] = mock_rpc + client._transport._wrapped_methods[ + client._transport.get_measurement_protocol_secret + ] = mock_rpc request = {} - client.create_property(request) + client.get_measurement_protocol_secret(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.create_property(request) + client.get_measurement_protocol_secret(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_create_property_rest_required_fields( - request_type=analytics_admin.CreatePropertyRequest, +def test_get_measurement_protocol_secret_rest_required_fields( + request_type=analytics_admin.GetMeasurementProtocolSecretRequest, ): transport_class = transports.AnalyticsAdminServiceRestTransport request_init = {} + request_init["name"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -60040,17 +70180,21 @@ def test_create_property_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).create_property._get_unset_required_fields(jsonified_request) + ).get_measurement_protocol_secret._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present + jsonified_request["name"] = "name_value" + unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).create_property._get_unset_required_fields(jsonified_request) + ).get_measurement_protocol_secret._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -60059,7 +70203,7 @@ def test_create_property_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = resources.Property() + return_value = resources.MeasurementProtocolSecret() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -60071,40 +70215,41 @@ def test_create_property_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "post", + "method": "get", "query_params": pb_request, } - transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = resources.Property.pb(return_value) + return_value = resources.MeasurementProtocolSecret.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.create_property(request) + response = client.get_measurement_protocol_secret(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_create_property_rest_unset_required_fields(): +def test_get_measurement_protocol_secret_rest_unset_required_fields(): transport = transports.AnalyticsAdminServiceRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.create_property._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("property",))) + unset_fields = transport.get_measurement_protocol_secret._get_unset_required_fields( + {} + ) + assert set(unset_fields) == (set(()) & set(("name",))) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_create_property_rest_interceptors(null_interceptor): +def test_get_measurement_protocol_secret_rest_interceptors(null_interceptor): transport = transports.AnalyticsAdminServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -60117,14 +70262,16 @@ def test_create_property_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.AnalyticsAdminServiceRestInterceptor, "post_create_property" + transports.AnalyticsAdminServiceRestInterceptor, + "post_get_measurement_protocol_secret", ) as post, mock.patch.object( - transports.AnalyticsAdminServiceRestInterceptor, "pre_create_property" + transports.AnalyticsAdminServiceRestInterceptor, + "pre_get_measurement_protocol_secret", ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = analytics_admin.CreatePropertyRequest.pb( - analytics_admin.CreatePropertyRequest() + pb_message = analytics_admin.GetMeasurementProtocolSecretRequest.pb( + analytics_admin.GetMeasurementProtocolSecretRequest() ) transcode.return_value = { "method": "post", @@ -60136,17 +70283,19 @@ def test_create_property_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = resources.Property.to_json(resources.Property()) + req.return_value._content = resources.MeasurementProtocolSecret.to_json( + resources.MeasurementProtocolSecret() + ) - request = analytics_admin.CreatePropertyRequest() + request = analytics_admin.GetMeasurementProtocolSecretRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = resources.Property() + post.return_value = resources.MeasurementProtocolSecret() - client.create_property( + client.get_measurement_protocol_secret( request, metadata=[ ("key", "val"), @@ -60158,8 +70307,9 @@ def test_create_property_rest_interceptors(null_interceptor): post.assert_called_once() -def test_create_property_rest_bad_request( - transport: str = "rest", request_type=analytics_admin.CreatePropertyRequest +def test_get_measurement_protocol_secret_rest_bad_request( + transport: str = "rest", + request_type=analytics_admin.GetMeasurementProtocolSecretRequest, ): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -60167,7 +70317,9 @@ def test_create_property_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = {} + request_init = { + "name": "properties/sample1/dataStreams/sample2/measurementProtocolSecrets/sample3" + } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -60179,10 +70331,10 @@ def test_create_property_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.create_property(request) + client.get_measurement_protocol_secret(request) -def test_create_property_rest_flattened(): +def test_get_measurement_protocol_secret_rest_flattened(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -60191,14 +70343,16 @@ def test_create_property_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = resources.Property() + return_value = resources.MeasurementProtocolSecret() # get arguments that satisfy an http rule for this method - sample_request = {} + sample_request = { + "name": "properties/sample1/dataStreams/sample2/measurementProtocolSecrets/sample3" + } # get truthy value for each flattened field mock_args = dict( - property=resources.Property(name="name_value"), + name="name_value", ) mock_args.update(sample_request) @@ -60206,23 +70360,25 @@ def test_create_property_rest_flattened(): response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = resources.Property.pb(return_value) + return_value = resources.MeasurementProtocolSecret.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.create_property(**mock_args) + client.get_measurement_protocol_secret(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1alpha/properties" % client.transport._host, args[1] + "%s/v1alpha/{name=properties/*/dataStreams/*/measurementProtocolSecrets/*}" + % client.transport._host, + args[1], ) -def test_create_property_rest_flattened_error(transport: str = "rest"): +def test_get_measurement_protocol_secret_rest_flattened_error(transport: str = "rest"): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -60231,13 +70387,13 @@ def test_create_property_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.create_property( - analytics_admin.CreatePropertyRequest(), - property=resources.Property(name="name_value"), + client.get_measurement_protocol_secret( + analytics_admin.GetMeasurementProtocolSecretRequest(), + name="name_value", ) -def test_create_property_rest_error(): +def test_get_measurement_protocol_secret_rest_error(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -60246,60 +70402,46 @@ def test_create_property_rest_error(): @pytest.mark.parametrize( "request_type", [ - analytics_admin.DeletePropertyRequest, + analytics_admin.ListMeasurementProtocolSecretsRequest, dict, ], ) -def test_delete_property_rest(request_type): +def test_list_measurement_protocol_secrets_rest(request_type): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = {"name": "properties/sample1"} + request_init = {"parent": "properties/sample1/dataStreams/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = resources.Property( - name="name_value", - property_type=resources.PropertyType.PROPERTY_TYPE_ORDINARY, - parent="parent_value", - display_name="display_name_value", - industry_category=resources.IndustryCategory.AUTOMOTIVE, - time_zone="time_zone_value", - currency_code="currency_code_value", - service_level=resources.ServiceLevel.GOOGLE_ANALYTICS_STANDARD, - account="account_value", + return_value = analytics_admin.ListMeasurementProtocolSecretsResponse( + next_page_token="next_page_token_value", ) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = resources.Property.pb(return_value) + return_value = analytics_admin.ListMeasurementProtocolSecretsResponse.pb( + return_value + ) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.delete_property(request) + response = client.list_measurement_protocol_secrets(request) # Establish that the response is the type that we expect. - assert isinstance(response, resources.Property) - assert response.name == "name_value" - assert response.property_type == resources.PropertyType.PROPERTY_TYPE_ORDINARY - assert response.parent == "parent_value" - assert response.display_name == "display_name_value" - assert response.industry_category == resources.IndustryCategory.AUTOMOTIVE - assert response.time_zone == "time_zone_value" - assert response.currency_code == "currency_code_value" - assert response.service_level == resources.ServiceLevel.GOOGLE_ANALYTICS_STANDARD - assert response.account == "account_value" + assert isinstance(response, pagers.ListMeasurementProtocolSecretsPager) + assert response.next_page_token == "next_page_token_value" -def test_delete_property_rest_use_cached_wrapped_rpc(): +def test_list_measurement_protocol_secrets_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -60313,35 +70455,40 @@ def test_delete_property_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.delete_property in client._transport._wrapped_methods + assert ( + client._transport.list_measurement_protocol_secrets + in client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.delete_property] = mock_rpc + client._transport._wrapped_methods[ + client._transport.list_measurement_protocol_secrets + ] = mock_rpc request = {} - client.delete_property(request) + client.list_measurement_protocol_secrets(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.delete_property(request) + client.list_measurement_protocol_secrets(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_delete_property_rest_required_fields( - request_type=analytics_admin.DeletePropertyRequest, +def test_list_measurement_protocol_secrets_rest_required_fields( + request_type=analytics_admin.ListMeasurementProtocolSecretsRequest, ): transport_class = transports.AnalyticsAdminServiceRestTransport request_init = {} - request_init["name"] = "" + request_init["parent"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -60352,21 +70499,28 @@ def test_delete_property_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).delete_property._get_unset_required_fields(jsonified_request) + ).list_measurement_protocol_secrets._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["name"] = "name_value" + jsonified_request["parent"] = "parent_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).delete_property._get_unset_required_fields(jsonified_request) + ).list_measurement_protocol_secrets._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "page_size", + "page_token", + ) + ) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == "name_value" + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -60375,7 +70529,7 @@ def test_delete_property_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = resources.Property() + return_value = analytics_admin.ListMeasurementProtocolSecretsResponse() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -60387,7 +70541,7 @@ def test_delete_property_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "delete", + "method": "get", "query_params": pb_request, } transcode.return_value = transcode_result @@ -60396,30 +70550,42 @@ def test_delete_property_rest_required_fields( response_value.status_code = 200 # Convert return value to protobuf type - return_value = resources.Property.pb(return_value) + return_value = analytics_admin.ListMeasurementProtocolSecretsResponse.pb( + return_value + ) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.delete_property(request) + response = client.list_measurement_protocol_secrets(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_delete_property_rest_unset_required_fields(): +def test_list_measurement_protocol_secrets_rest_unset_required_fields(): transport = transports.AnalyticsAdminServiceRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.delete_property._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name",))) + unset_fields = ( + transport.list_measurement_protocol_secrets._get_unset_required_fields({}) + ) + assert set(unset_fields) == ( + set( + ( + "pageSize", + "pageToken", + ) + ) + & set(("parent",)) + ) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_delete_property_rest_interceptors(null_interceptor): +def test_list_measurement_protocol_secrets_rest_interceptors(null_interceptor): transport = transports.AnalyticsAdminServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -60432,14 +70598,16 @@ def test_delete_property_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.AnalyticsAdminServiceRestInterceptor, "post_delete_property" + transports.AnalyticsAdminServiceRestInterceptor, + "post_list_measurement_protocol_secrets", ) as post, mock.patch.object( - transports.AnalyticsAdminServiceRestInterceptor, "pre_delete_property" + transports.AnalyticsAdminServiceRestInterceptor, + "pre_list_measurement_protocol_secrets", ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = analytics_admin.DeletePropertyRequest.pb( - analytics_admin.DeletePropertyRequest() + pb_message = analytics_admin.ListMeasurementProtocolSecretsRequest.pb( + analytics_admin.ListMeasurementProtocolSecretsRequest() ) transcode.return_value = { "method": "post", @@ -60451,17 +70619,21 @@ def test_delete_property_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = resources.Property.to_json(resources.Property()) + req.return_value._content = ( + analytics_admin.ListMeasurementProtocolSecretsResponse.to_json( + analytics_admin.ListMeasurementProtocolSecretsResponse() + ) + ) - request = analytics_admin.DeletePropertyRequest() + request = analytics_admin.ListMeasurementProtocolSecretsRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = resources.Property() + post.return_value = analytics_admin.ListMeasurementProtocolSecretsResponse() - client.delete_property( + client.list_measurement_protocol_secrets( request, metadata=[ ("key", "val"), @@ -60473,8 +70645,9 @@ def test_delete_property_rest_interceptors(null_interceptor): post.assert_called_once() -def test_delete_property_rest_bad_request( - transport: str = "rest", request_type=analytics_admin.DeletePropertyRequest +def test_list_measurement_protocol_secrets_rest_bad_request( + transport: str = "rest", + request_type=analytics_admin.ListMeasurementProtocolSecretsRequest, ): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -60482,7 +70655,7 @@ def test_delete_property_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = {"name": "properties/sample1"} + request_init = {"parent": "properties/sample1/dataStreams/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -60494,10 +70667,10 @@ def test_delete_property_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.delete_property(request) + client.list_measurement_protocol_secrets(request) -def test_delete_property_rest_flattened(): +def test_list_measurement_protocol_secrets_rest_flattened(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -60506,14 +70679,14 @@ def test_delete_property_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = resources.Property() + return_value = analytics_admin.ListMeasurementProtocolSecretsResponse() # get arguments that satisfy an http rule for this method - sample_request = {"name": "properties/sample1"} + sample_request = {"parent": "properties/sample1/dataStreams/sample2"} # get truthy value for each flattened field mock_args = dict( - name="name_value", + parent="parent_value", ) mock_args.update(sample_request) @@ -60521,23 +70694,29 @@ def test_delete_property_rest_flattened(): response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = resources.Property.pb(return_value) + return_value = analytics_admin.ListMeasurementProtocolSecretsResponse.pb( + return_value + ) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.delete_property(**mock_args) + client.list_measurement_protocol_secrets(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1alpha/{name=properties/*}" % client.transport._host, args[1] + "%s/v1alpha/{parent=properties/*/dataStreams/*}/measurementProtocolSecrets" + % client.transport._host, + args[1], ) -def test_delete_property_rest_flattened_error(transport: str = "rest"): +def test_list_measurement_protocol_secrets_rest_flattened_error( + transport: str = "rest", +): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -60546,54 +70725,106 @@ def test_delete_property_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.delete_property( - analytics_admin.DeletePropertyRequest(), - name="name_value", + client.list_measurement_protocol_secrets( + analytics_admin.ListMeasurementProtocolSecretsRequest(), + parent="parent_value", ) -def test_delete_property_rest_error(): +def test_list_measurement_protocol_secrets_rest_pager(transport: str = "rest"): client = AnalyticsAdminServiceClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + analytics_admin.ListMeasurementProtocolSecretsResponse( + measurement_protocol_secrets=[ + resources.MeasurementProtocolSecret(), + resources.MeasurementProtocolSecret(), + resources.MeasurementProtocolSecret(), + ], + next_page_token="abc", + ), + analytics_admin.ListMeasurementProtocolSecretsResponse( + measurement_protocol_secrets=[], + next_page_token="def", + ), + analytics_admin.ListMeasurementProtocolSecretsResponse( + measurement_protocol_secrets=[ + resources.MeasurementProtocolSecret(), + ], + next_page_token="ghi", + ), + analytics_admin.ListMeasurementProtocolSecretsResponse( + measurement_protocol_secrets=[ + resources.MeasurementProtocolSecret(), + resources.MeasurementProtocolSecret(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple( + analytics_admin.ListMeasurementProtocolSecretsResponse.to_json(x) + for x in response + ) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {"parent": "properties/sample1/dataStreams/sample2"} + + pager = client.list_measurement_protocol_secrets(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, resources.MeasurementProtocolSecret) for i in results) + + pages = list( + client.list_measurement_protocol_secrets(request=sample_request).pages + ) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + @pytest.mark.parametrize( "request_type", [ - analytics_admin.UpdatePropertyRequest, + analytics_admin.CreateMeasurementProtocolSecretRequest, dict, ], ) -def test_update_property_rest(request_type): +def test_create_measurement_protocol_secret_rest(request_type): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = {"property": {"name": "properties/sample1"}} - request_init["property"] = { - "name": "properties/sample1", - "property_type": 1, - "create_time": {"seconds": 751, "nanos": 543}, - "update_time": {}, - "parent": "parent_value", + request_init = {"parent": "properties/sample1/dataStreams/sample2"} + request_init["measurement_protocol_secret"] = { + "name": "name_value", "display_name": "display_name_value", - "industry_category": 1, - "time_zone": "time_zone_value", - "currency_code": "currency_code_value", - "service_level": 1, - "delete_time": {}, - "expire_time": {}, - "account": "account_value", + "secret_value": "secret_value_value", } # The version of a generated dependency at test runtime may differ from the version used during generation. # Delete any fields which are not present in the current runtime dependency # See https://github.com/googleapis/gapic-generator-python/issues/1748 # Determine if the message type is proto-plus or protobuf - test_field = analytics_admin.UpdatePropertyRequest.meta.fields["property"] + test_field = analytics_admin.CreateMeasurementProtocolSecretRequest.meta.fields[ + "measurement_protocol_secret" + ] def get_message_fields(field): # Given a field which is a message (composite type), return a list with @@ -60621,7 +70852,9 @@ def get_message_fields(field): # For each item in the sample request, create a list of sub fields which are not present at runtime # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["property"].items(): # pragma: NO COVER + for field, value in request_init[ + "measurement_protocol_secret" + ].items(): # pragma: NO COVER result = None is_repeated = False # For repeated fields @@ -60651,52 +70884,42 @@ def get_message_fields(field): subfield = subfield_to_delete.get("subfield") if subfield: if field_repeated: - for i in range(0, len(request_init["property"][field])): - del request_init["property"][field][i][subfield] + for i in range( + 0, len(request_init["measurement_protocol_secret"][field]) + ): + del request_init["measurement_protocol_secret"][field][i][subfield] else: - del request_init["property"][field][subfield] + del request_init["measurement_protocol_secret"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = resources.Property( + return_value = resources.MeasurementProtocolSecret( name="name_value", - property_type=resources.PropertyType.PROPERTY_TYPE_ORDINARY, - parent="parent_value", display_name="display_name_value", - industry_category=resources.IndustryCategory.AUTOMOTIVE, - time_zone="time_zone_value", - currency_code="currency_code_value", - service_level=resources.ServiceLevel.GOOGLE_ANALYTICS_STANDARD, - account="account_value", + secret_value="secret_value_value", ) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = resources.Property.pb(return_value) + return_value = resources.MeasurementProtocolSecret.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.update_property(request) + response = client.create_measurement_protocol_secret(request) # Establish that the response is the type that we expect. - assert isinstance(response, resources.Property) + assert isinstance(response, resources.MeasurementProtocolSecret) assert response.name == "name_value" - assert response.property_type == resources.PropertyType.PROPERTY_TYPE_ORDINARY - assert response.parent == "parent_value" assert response.display_name == "display_name_value" - assert response.industry_category == resources.IndustryCategory.AUTOMOTIVE - assert response.time_zone == "time_zone_value" - assert response.currency_code == "currency_code_value" - assert response.service_level == resources.ServiceLevel.GOOGLE_ANALYTICS_STANDARD - assert response.account == "account_value" + assert response.secret_value == "secret_value_value" -def test_update_property_rest_use_cached_wrapped_rpc(): +def test_create_measurement_protocol_secret_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -60710,34 +70933,40 @@ def test_update_property_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.update_property in client._transport._wrapped_methods + assert ( + client._transport.create_measurement_protocol_secret + in client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.update_property] = mock_rpc + client._transport._wrapped_methods[ + client._transport.create_measurement_protocol_secret + ] = mock_rpc request = {} - client.update_property(request) + client.create_measurement_protocol_secret(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.update_property(request) + client.create_measurement_protocol_secret(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_update_property_rest_required_fields( - request_type=analytics_admin.UpdatePropertyRequest, +def test_create_measurement_protocol_secret_rest_required_fields( + request_type=analytics_admin.CreateMeasurementProtocolSecretRequest, ): transport_class = transports.AnalyticsAdminServiceRestTransport request_init = {} + request_init["parent"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -60748,19 +70977,21 @@ def test_update_property_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).update_property._get_unset_required_fields(jsonified_request) + ).create_measurement_protocol_secret._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present + jsonified_request["parent"] = "parent_value" + unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).update_property._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("update_mask",)) + ).create_measurement_protocol_secret._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -60769,7 +71000,7 @@ def test_update_property_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = resources.Property() + return_value = resources.MeasurementProtocolSecret() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -60781,7 +71012,7 @@ def test_update_property_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "patch", + "method": "post", "query_params": pb_request, } transcode_result["body"] = pb_request @@ -60791,38 +71022,40 @@ def test_update_property_rest_required_fields( response_value.status_code = 200 # Convert return value to protobuf type - return_value = resources.Property.pb(return_value) + return_value = resources.MeasurementProtocolSecret.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.update_property(request) + response = client.create_measurement_protocol_secret(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_update_property_rest_unset_required_fields(): +def test_create_measurement_protocol_secret_rest_unset_required_fields(): transport = transports.AnalyticsAdminServiceRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.update_property._get_unset_required_fields({}) + unset_fields = ( + transport.create_measurement_protocol_secret._get_unset_required_fields({}) + ) assert set(unset_fields) == ( - set(("updateMask",)) + set(()) & set( ( - "property", - "updateMask", + "parent", + "measurementProtocolSecret", ) ) ) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_update_property_rest_interceptors(null_interceptor): +def test_create_measurement_protocol_secret_rest_interceptors(null_interceptor): transport = transports.AnalyticsAdminServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -60835,14 +71068,16 @@ def test_update_property_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.AnalyticsAdminServiceRestInterceptor, "post_update_property" + transports.AnalyticsAdminServiceRestInterceptor, + "post_create_measurement_protocol_secret", ) as post, mock.patch.object( - transports.AnalyticsAdminServiceRestInterceptor, "pre_update_property" + transports.AnalyticsAdminServiceRestInterceptor, + "pre_create_measurement_protocol_secret", ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = analytics_admin.UpdatePropertyRequest.pb( - analytics_admin.UpdatePropertyRequest() + pb_message = analytics_admin.CreateMeasurementProtocolSecretRequest.pb( + analytics_admin.CreateMeasurementProtocolSecretRequest() ) transcode.return_value = { "method": "post", @@ -60854,17 +71089,19 @@ def test_update_property_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = resources.Property.to_json(resources.Property()) + req.return_value._content = resources.MeasurementProtocolSecret.to_json( + resources.MeasurementProtocolSecret() + ) - request = analytics_admin.UpdatePropertyRequest() + request = analytics_admin.CreateMeasurementProtocolSecretRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = resources.Property() + post.return_value = resources.MeasurementProtocolSecret() - client.update_property( + client.create_measurement_protocol_secret( request, metadata=[ ("key", "val"), @@ -60876,8 +71113,9 @@ def test_update_property_rest_interceptors(null_interceptor): post.assert_called_once() -def test_update_property_rest_bad_request( - transport: str = "rest", request_type=analytics_admin.UpdatePropertyRequest +def test_create_measurement_protocol_secret_rest_bad_request( + transport: str = "rest", + request_type=analytics_admin.CreateMeasurementProtocolSecretRequest, ): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -60885,7 +71123,7 @@ def test_update_property_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = {"property": {"name": "properties/sample1"}} + request_init = {"parent": "properties/sample1/dataStreams/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -60897,10 +71135,10 @@ def test_update_property_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.update_property(request) + client.create_measurement_protocol_secret(request) -def test_update_property_rest_flattened(): +def test_create_measurement_protocol_secret_rest_flattened(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -60909,15 +71147,17 @@ def test_update_property_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = resources.Property() + return_value = resources.MeasurementProtocolSecret() # get arguments that satisfy an http rule for this method - sample_request = {"property": {"name": "properties/sample1"}} + sample_request = {"parent": "properties/sample1/dataStreams/sample2"} # get truthy value for each flattened field mock_args = dict( - property=resources.Property(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + parent="parent_value", + measurement_protocol_secret=resources.MeasurementProtocolSecret( + name="name_value" + ), ) mock_args.update(sample_request) @@ -60925,23 +71165,27 @@ def test_update_property_rest_flattened(): response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = resources.Property.pb(return_value) + return_value = resources.MeasurementProtocolSecret.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.update_property(**mock_args) + client.create_measurement_protocol_secret(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1alpha/{property.name=properties/*}" % client.transport._host, args[1] + "%s/v1alpha/{parent=properties/*/dataStreams/*}/measurementProtocolSecrets" + % client.transport._host, + args[1], ) -def test_update_property_rest_flattened_error(transport: str = "rest"): +def test_create_measurement_protocol_secret_rest_flattened_error( + transport: str = "rest", +): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -60950,14 +71194,16 @@ def test_update_property_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.update_property( - analytics_admin.UpdatePropertyRequest(), - property=resources.Property(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + client.create_measurement_protocol_secret( + analytics_admin.CreateMeasurementProtocolSecretRequest(), + parent="parent_value", + measurement_protocol_secret=resources.MeasurementProtocolSecret( + name="name_value" + ), ) -def test_update_property_rest_error(): +def test_create_measurement_protocol_secret_rest_error(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -60966,118 +71212,41 @@ def test_update_property_rest_error(): @pytest.mark.parametrize( "request_type", [ - analytics_admin.CreateFirebaseLinkRequest, + analytics_admin.DeleteMeasurementProtocolSecretRequest, dict, ], ) -def test_create_firebase_link_rest(request_type): +def test_delete_measurement_protocol_secret_rest(request_type): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = {"parent": "properties/sample1"} - request_init["firebase_link"] = { - "name": "name_value", - "project": "project_value", - "create_time": {"seconds": 751, "nanos": 543}, + request_init = { + "name": "properties/sample1/dataStreams/sample2/measurementProtocolSecrets/sample3" } - # The version of a generated dependency at test runtime may differ from the version used during generation. - # Delete any fields which are not present in the current runtime dependency - # See https://github.com/googleapis/gapic-generator-python/issues/1748 - - # Determine if the message type is proto-plus or protobuf - test_field = analytics_admin.CreateFirebaseLinkRequest.meta.fields["firebase_link"] - - def get_message_fields(field): - # Given a field which is a message (composite type), return a list with - # all the fields of the message. - # If the field is not a composite type, return an empty list. - message_fields = [] - - if hasattr(field, "message") and field.message: - is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") - - if is_field_type_proto_plus_type: - message_fields = field.message.meta.fields.values() - # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER - message_fields = field.message.DESCRIPTOR.fields - return message_fields - - runtime_nested_fields = [ - (field.name, nested_field.name) - for field in get_message_fields(test_field) - for nested_field in get_message_fields(field) - ] - - subfields_not_in_runtime = [] - - # For each item in the sample request, create a list of sub fields which are not present at runtime - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["firebase_link"].items(): # pragma: NO COVER - result = None - is_repeated = False - # For repeated fields - if isinstance(value, list) and len(value): - is_repeated = True - result = value[0] - # For fields where the type is another message - if isinstance(value, dict): - result = value - - if result and hasattr(result, "keys"): - for subfield in result.keys(): - if (field, subfield) not in runtime_nested_fields: - subfields_not_in_runtime.append( - { - "field": field, - "subfield": subfield, - "is_repeated": is_repeated, - } - ) - - # Remove fields from the sample request which are not present in the runtime version of the dependency - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER - field = subfield_to_delete.get("field") - field_repeated = subfield_to_delete.get("is_repeated") - subfield = subfield_to_delete.get("subfield") - if subfield: - if field_repeated: - for i in range(0, len(request_init["firebase_link"][field])): - del request_init["firebase_link"][field][i][subfield] - else: - del request_init["firebase_link"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = resources.FirebaseLink( - name="name_value", - project="project_value", - ) + return_value = None # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - # Convert return value to protobuf type - return_value = resources.FirebaseLink.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) + json_return_value = "" response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.create_firebase_link(request) + response = client.delete_measurement_protocol_secret(request) # Establish that the response is the type that we expect. - assert isinstance(response, resources.FirebaseLink) - assert response.name == "name_value" - assert response.project == "project_value" + assert response is None -def test_create_firebase_link_rest_use_cached_wrapped_rpc(): +def test_delete_measurement_protocol_secret_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -61092,7 +71261,8 @@ def test_create_firebase_link_rest_use_cached_wrapped_rpc(): # Ensure method has been cached assert ( - client._transport.create_firebase_link in client._transport._wrapped_methods + client._transport.delete_measurement_protocol_secret + in client._transport._wrapped_methods ) # Replace cached wrapped function with mock @@ -61101,29 +71271,29 @@ def test_create_firebase_link_rest_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.create_firebase_link + client._transport.delete_measurement_protocol_secret ] = mock_rpc request = {} - client.create_firebase_link(request) + client.delete_measurement_protocol_secret(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.create_firebase_link(request) + client.delete_measurement_protocol_secret(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_create_firebase_link_rest_required_fields( - request_type=analytics_admin.CreateFirebaseLinkRequest, +def test_delete_measurement_protocol_secret_rest_required_fields( + request_type=analytics_admin.DeleteMeasurementProtocolSecretRequest, ): transport_class = transports.AnalyticsAdminServiceRestTransport request_init = {} - request_init["parent"] = "" + request_init["name"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -61134,21 +71304,21 @@ def test_create_firebase_link_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).create_firebase_link._get_unset_required_fields(jsonified_request) + ).delete_measurement_protocol_secret._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["parent"] = "parent_value" + jsonified_request["name"] = "name_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).create_firebase_link._get_unset_required_fields(jsonified_request) + ).delete_measurement_protocol_secret._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == "parent_value" + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -61157,7 +71327,7 @@ def test_create_firebase_link_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = resources.FirebaseLink() + return_value = None # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -61169,48 +71339,38 @@ def test_create_firebase_link_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "post", + "method": "delete", "query_params": pb_request, } - transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = resources.FirebaseLink.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) + json_return_value = "" response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.create_firebase_link(request) + response = client.delete_measurement_protocol_secret(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_create_firebase_link_rest_unset_required_fields(): +def test_delete_measurement_protocol_secret_rest_unset_required_fields(): transport = transports.AnalyticsAdminServiceRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.create_firebase_link._get_unset_required_fields({}) - assert set(unset_fields) == ( - set(()) - & set( - ( - "parent", - "firebaseLink", - ) - ) + unset_fields = ( + transport.delete_measurement_protocol_secret._get_unset_required_fields({}) ) + assert set(unset_fields) == (set(()) & set(("name",))) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_create_firebase_link_rest_interceptors(null_interceptor): +def test_delete_measurement_protocol_secret_rest_interceptors(null_interceptor): transport = transports.AnalyticsAdminServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -61223,14 +71383,12 @@ def test_create_firebase_link_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.AnalyticsAdminServiceRestInterceptor, "post_create_firebase_link" - ) as post, mock.patch.object( - transports.AnalyticsAdminServiceRestInterceptor, "pre_create_firebase_link" + transports.AnalyticsAdminServiceRestInterceptor, + "pre_delete_measurement_protocol_secret", ) as pre: pre.assert_not_called() - post.assert_not_called() - pb_message = analytics_admin.CreateFirebaseLinkRequest.pb( - analytics_admin.CreateFirebaseLinkRequest() + pb_message = analytics_admin.DeleteMeasurementProtocolSecretRequest.pb( + analytics_admin.DeleteMeasurementProtocolSecretRequest() ) transcode.return_value = { "method": "post", @@ -61242,19 +71400,15 @@ def test_create_firebase_link_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = resources.FirebaseLink.to_json( - resources.FirebaseLink() - ) - request = analytics_admin.CreateFirebaseLinkRequest() + request = analytics_admin.DeleteMeasurementProtocolSecretRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = resources.FirebaseLink() - client.create_firebase_link( + client.delete_measurement_protocol_secret( request, metadata=[ ("key", "val"), @@ -61263,11 +71417,11 @@ def test_create_firebase_link_rest_interceptors(null_interceptor): ) pre.assert_called_once() - post.assert_called_once() -def test_create_firebase_link_rest_bad_request( - transport: str = "rest", request_type=analytics_admin.CreateFirebaseLinkRequest +def test_delete_measurement_protocol_secret_rest_bad_request( + transport: str = "rest", + request_type=analytics_admin.DeleteMeasurementProtocolSecretRequest, ): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -61275,7 +71429,9 @@ def test_create_firebase_link_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = {"parent": "properties/sample1"} + request_init = { + "name": "properties/sample1/dataStreams/sample2/measurementProtocolSecrets/sample3" + } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -61287,10 +71443,10 @@ def test_create_firebase_link_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.create_firebase_link(request) + client.delete_measurement_protocol_secret(request) -def test_create_firebase_link_rest_flattened(): +def test_delete_measurement_protocol_secret_rest_flattened(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -61299,40 +71455,42 @@ def test_create_firebase_link_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = resources.FirebaseLink() + return_value = None # get arguments that satisfy an http rule for this method - sample_request = {"parent": "properties/sample1"} + sample_request = { + "name": "properties/sample1/dataStreams/sample2/measurementProtocolSecrets/sample3" + } # get truthy value for each flattened field mock_args = dict( - parent="parent_value", - firebase_link=resources.FirebaseLink(name="name_value"), + name="name_value", ) mock_args.update(sample_request) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - # Convert return value to protobuf type - return_value = resources.FirebaseLink.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) + json_return_value = "" response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.create_firebase_link(**mock_args) + client.delete_measurement_protocol_secret(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1alpha/{parent=properties/*}/firebaseLinks" % client.transport._host, + "%s/v1alpha/{name=properties/*/dataStreams/*/measurementProtocolSecrets/*}" + % client.transport._host, args[1], ) -def test_create_firebase_link_rest_flattened_error(transport: str = "rest"): +def test_delete_measurement_protocol_secret_rest_flattened_error( + transport: str = "rest", +): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -61341,14 +71499,13 @@ def test_create_firebase_link_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.create_firebase_link( - analytics_admin.CreateFirebaseLinkRequest(), - parent="parent_value", - firebase_link=resources.FirebaseLink(name="name_value"), + client.delete_measurement_protocol_secret( + analytics_admin.DeleteMeasurementProtocolSecretRequest(), + name="name_value", ) -def test_create_firebase_link_rest_error(): +def test_delete_measurement_protocol_secret_rest_error(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -61357,39 +71514,130 @@ def test_create_firebase_link_rest_error(): @pytest.mark.parametrize( "request_type", [ - analytics_admin.DeleteFirebaseLinkRequest, + analytics_admin.UpdateMeasurementProtocolSecretRequest, dict, ], ) -def test_delete_firebase_link_rest(request_type): +def test_update_measurement_protocol_secret_rest(request_type): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = {"name": "properties/sample1/firebaseLinks/sample2"} + request_init = { + "measurement_protocol_secret": { + "name": "properties/sample1/dataStreams/sample2/measurementProtocolSecrets/sample3" + } + } + request_init["measurement_protocol_secret"] = { + "name": "properties/sample1/dataStreams/sample2/measurementProtocolSecrets/sample3", + "display_name": "display_name_value", + "secret_value": "secret_value_value", + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = analytics_admin.UpdateMeasurementProtocolSecretRequest.meta.fields[ + "measurement_protocol_secret" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "measurement_protocol_secret" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, len(request_init["measurement_protocol_secret"][field]) + ): + del request_init["measurement_protocol_secret"][field][i][subfield] + else: + del request_init["measurement_protocol_secret"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = None + return_value = resources.MeasurementProtocolSecret( + name="name_value", + display_name="display_name_value", + secret_value="secret_value_value", + ) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - json_return_value = "" + # Convert return value to protobuf type + return_value = resources.MeasurementProtocolSecret.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.delete_firebase_link(request) + response = client.update_measurement_protocol_secret(request) # Establish that the response is the type that we expect. - assert response is None + assert isinstance(response, resources.MeasurementProtocolSecret) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.secret_value == "secret_value_value" -def test_delete_firebase_link_rest_use_cached_wrapped_rpc(): +def test_update_measurement_protocol_secret_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -61404,7 +71652,8 @@ def test_delete_firebase_link_rest_use_cached_wrapped_rpc(): # Ensure method has been cached assert ( - client._transport.delete_firebase_link in client._transport._wrapped_methods + client._transport.update_measurement_protocol_secret + in client._transport._wrapped_methods ) # Replace cached wrapped function with mock @@ -61413,29 +71662,28 @@ def test_delete_firebase_link_rest_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.delete_firebase_link + client._transport.update_measurement_protocol_secret ] = mock_rpc request = {} - client.delete_firebase_link(request) + client.update_measurement_protocol_secret(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.delete_firebase_link(request) + client.update_measurement_protocol_secret(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_delete_firebase_link_rest_required_fields( - request_type=analytics_admin.DeleteFirebaseLinkRequest, +def test_update_measurement_protocol_secret_rest_required_fields( + request_type=analytics_admin.UpdateMeasurementProtocolSecretRequest, ): transport_class = transports.AnalyticsAdminServiceRestTransport request_init = {} - request_init["name"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -61446,21 +71694,19 @@ def test_delete_firebase_link_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).delete_firebase_link._get_unset_required_fields(jsonified_request) + ).update_measurement_protocol_secret._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["name"] = "name_value" - unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).delete_firebase_link._get_unset_required_fields(jsonified_request) + ).update_measurement_protocol_secret._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("update_mask",)) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == "name_value" client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -61469,7 +71715,7 @@ def test_delete_firebase_link_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = None + return_value = resources.MeasurementProtocolSecret() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -61481,36 +71727,50 @@ def test_delete_firebase_link_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "delete", + "method": "patch", "query_params": pb_request, } + transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 - json_return_value = "" + + # Convert return value to protobuf type + return_value = resources.MeasurementProtocolSecret.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.delete_firebase_link(request) + response = client.update_measurement_protocol_secret(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_delete_firebase_link_rest_unset_required_fields(): +def test_update_measurement_protocol_secret_rest_unset_required_fields(): transport = transports.AnalyticsAdminServiceRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.delete_firebase_link._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name",))) + unset_fields = ( + transport.update_measurement_protocol_secret._get_unset_required_fields({}) + ) + assert set(unset_fields) == ( + set(("updateMask",)) + & set( + ( + "measurementProtocolSecret", + "updateMask", + ) + ) + ) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_delete_firebase_link_rest_interceptors(null_interceptor): +def test_update_measurement_protocol_secret_rest_interceptors(null_interceptor): transport = transports.AnalyticsAdminServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -61523,11 +71783,16 @@ def test_delete_firebase_link_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.AnalyticsAdminServiceRestInterceptor, "pre_delete_firebase_link" + transports.AnalyticsAdminServiceRestInterceptor, + "post_update_measurement_protocol_secret", + ) as post, mock.patch.object( + transports.AnalyticsAdminServiceRestInterceptor, + "pre_update_measurement_protocol_secret", ) as pre: pre.assert_not_called() - pb_message = analytics_admin.DeleteFirebaseLinkRequest.pb( - analytics_admin.DeleteFirebaseLinkRequest() + post.assert_not_called() + pb_message = analytics_admin.UpdateMeasurementProtocolSecretRequest.pb( + analytics_admin.UpdateMeasurementProtocolSecretRequest() ) transcode.return_value = { "method": "post", @@ -61539,15 +71804,19 @@ def test_delete_firebase_link_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() + req.return_value._content = resources.MeasurementProtocolSecret.to_json( + resources.MeasurementProtocolSecret() + ) - request = analytics_admin.DeleteFirebaseLinkRequest() + request = analytics_admin.UpdateMeasurementProtocolSecretRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata + post.return_value = resources.MeasurementProtocolSecret() - client.delete_firebase_link( + client.update_measurement_protocol_secret( request, metadata=[ ("key", "val"), @@ -61556,10 +71825,12 @@ def test_delete_firebase_link_rest_interceptors(null_interceptor): ) pre.assert_called_once() + post.assert_called_once() -def test_delete_firebase_link_rest_bad_request( - transport: str = "rest", request_type=analytics_admin.DeleteFirebaseLinkRequest +def test_update_measurement_protocol_secret_rest_bad_request( + transport: str = "rest", + request_type=analytics_admin.UpdateMeasurementProtocolSecretRequest, ): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -61567,7 +71838,11 @@ def test_delete_firebase_link_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = {"name": "properties/sample1/firebaseLinks/sample2"} + request_init = { + "measurement_protocol_secret": { + "name": "properties/sample1/dataStreams/sample2/measurementProtocolSecrets/sample3" + } + } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -61579,10 +71854,10 @@ def test_delete_firebase_link_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.delete_firebase_link(request) + client.update_measurement_protocol_secret(request) -def test_delete_firebase_link_rest_flattened(): +def test_update_measurement_protocol_secret_rest_flattened(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -61591,37 +71866,49 @@ def test_delete_firebase_link_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = None + return_value = resources.MeasurementProtocolSecret() # get arguments that satisfy an http rule for this method - sample_request = {"name": "properties/sample1/firebaseLinks/sample2"} + sample_request = { + "measurement_protocol_secret": { + "name": "properties/sample1/dataStreams/sample2/measurementProtocolSecrets/sample3" + } + } # get truthy value for each flattened field mock_args = dict( - name="name_value", + measurement_protocol_secret=resources.MeasurementProtocolSecret( + name="name_value" + ), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) mock_args.update(sample_request) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - json_return_value = "" + # Convert return value to protobuf type + return_value = resources.MeasurementProtocolSecret.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.delete_firebase_link(**mock_args) + client.update_measurement_protocol_secret(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1alpha/{name=properties/*/firebaseLinks/*}" % client.transport._host, + "%s/v1alpha/{measurement_protocol_secret.name=properties/*/dataStreams/*/measurementProtocolSecrets/*}" + % client.transport._host, args[1], ) -def test_delete_firebase_link_rest_flattened_error(transport: str = "rest"): +def test_update_measurement_protocol_secret_rest_flattened_error( + transport: str = "rest", +): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -61630,13 +71917,16 @@ def test_delete_firebase_link_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.delete_firebase_link( - analytics_admin.DeleteFirebaseLinkRequest(), - name="name_value", + client.update_measurement_protocol_secret( + analytics_admin.UpdateMeasurementProtocolSecretRequest(), + measurement_protocol_secret=resources.MeasurementProtocolSecret( + name="name_value" + ), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) -def test_delete_firebase_link_rest_error(): +def test_update_measurement_protocol_secret_rest_error(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -61645,44 +71935,43 @@ def test_delete_firebase_link_rest_error(): @pytest.mark.parametrize( "request_type", [ - analytics_admin.ListFirebaseLinksRequest, + analytics_admin.AcknowledgeUserDataCollectionRequest, dict, ], ) -def test_list_firebase_links_rest(request_type): +def test_acknowledge_user_data_collection_rest(request_type): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = {"parent": "properties/sample1"} + request_init = {"property": "properties/sample1"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = analytics_admin.ListFirebaseLinksResponse( - next_page_token="next_page_token_value", - ) + return_value = analytics_admin.AcknowledgeUserDataCollectionResponse() # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = analytics_admin.ListFirebaseLinksResponse.pb(return_value) + return_value = analytics_admin.AcknowledgeUserDataCollectionResponse.pb( + return_value + ) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.list_firebase_links(request) + response = client.acknowledge_user_data_collection(request) # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListFirebaseLinksPager) - assert response.next_page_token == "next_page_token_value" + assert isinstance(response, analytics_admin.AcknowledgeUserDataCollectionResponse) -def test_list_firebase_links_rest_use_cached_wrapped_rpc(): +def test_acknowledge_user_data_collection_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -61697,7 +71986,8 @@ def test_list_firebase_links_rest_use_cached_wrapped_rpc(): # Ensure method has been cached assert ( - client._transport.list_firebase_links in client._transport._wrapped_methods + client._transport.acknowledge_user_data_collection + in client._transport._wrapped_methods ) # Replace cached wrapped function with mock @@ -61706,29 +71996,30 @@ def test_list_firebase_links_rest_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.list_firebase_links + client._transport.acknowledge_user_data_collection ] = mock_rpc request = {} - client.list_firebase_links(request) + client.acknowledge_user_data_collection(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.list_firebase_links(request) + client.acknowledge_user_data_collection(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_list_firebase_links_rest_required_fields( - request_type=analytics_admin.ListFirebaseLinksRequest, +def test_acknowledge_user_data_collection_rest_required_fields( + request_type=analytics_admin.AcknowledgeUserDataCollectionRequest, ): transport_class = transports.AnalyticsAdminServiceRestTransport request_init = {} - request_init["parent"] = "" + request_init["property"] = "" + request_init["acknowledgement"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -61739,28 +72030,24 @@ def test_list_firebase_links_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).list_firebase_links._get_unset_required_fields(jsonified_request) + ).acknowledge_user_data_collection._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["parent"] = "parent_value" + jsonified_request["property"] = "property_value" + jsonified_request["acknowledgement"] = "acknowledgement_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).list_firebase_links._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set( - ( - "page_size", - "page_token", - ) - ) + ).acknowledge_user_data_collection._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == "parent_value" + assert "property" in jsonified_request + assert jsonified_request["property"] == "property_value" + assert "acknowledgement" in jsonified_request + assert jsonified_request["acknowledgement"] == "acknowledgement_value" client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -61769,7 +72056,7 @@ def test_list_firebase_links_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = analytics_admin.ListFirebaseLinksResponse() + return_value = analytics_admin.AcknowledgeUserDataCollectionResponse() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -61781,47 +72068,52 @@ def test_list_firebase_links_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "get", + "method": "post", "query_params": pb_request, } + transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = analytics_admin.ListFirebaseLinksResponse.pb(return_value) + return_value = analytics_admin.AcknowledgeUserDataCollectionResponse.pb( + return_value + ) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.list_firebase_links(request) + response = client.acknowledge_user_data_collection(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_list_firebase_links_rest_unset_required_fields(): +def test_acknowledge_user_data_collection_rest_unset_required_fields(): transport = transports.AnalyticsAdminServiceRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.list_firebase_links._get_unset_required_fields({}) + unset_fields = ( + transport.acknowledge_user_data_collection._get_unset_required_fields({}) + ) assert set(unset_fields) == ( - set( + set(()) + & set( ( - "pageSize", - "pageToken", + "property", + "acknowledgement", ) ) - & set(("parent",)) ) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_firebase_links_rest_interceptors(null_interceptor): +def test_acknowledge_user_data_collection_rest_interceptors(null_interceptor): transport = transports.AnalyticsAdminServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -61834,14 +72126,16 @@ def test_list_firebase_links_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.AnalyticsAdminServiceRestInterceptor, "post_list_firebase_links" + transports.AnalyticsAdminServiceRestInterceptor, + "post_acknowledge_user_data_collection", ) as post, mock.patch.object( - transports.AnalyticsAdminServiceRestInterceptor, "pre_list_firebase_links" + transports.AnalyticsAdminServiceRestInterceptor, + "pre_acknowledge_user_data_collection", ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = analytics_admin.ListFirebaseLinksRequest.pb( - analytics_admin.ListFirebaseLinksRequest() + pb_message = analytics_admin.AcknowledgeUserDataCollectionRequest.pb( + analytics_admin.AcknowledgeUserDataCollectionRequest() ) transcode.return_value = { "method": "post", @@ -61853,19 +72147,21 @@ def test_list_firebase_links_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = analytics_admin.ListFirebaseLinksResponse.to_json( - analytics_admin.ListFirebaseLinksResponse() + req.return_value._content = ( + analytics_admin.AcknowledgeUserDataCollectionResponse.to_json( + analytics_admin.AcknowledgeUserDataCollectionResponse() + ) ) - request = analytics_admin.ListFirebaseLinksRequest() + request = analytics_admin.AcknowledgeUserDataCollectionRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = analytics_admin.ListFirebaseLinksResponse() + post.return_value = analytics_admin.AcknowledgeUserDataCollectionResponse() - client.list_firebase_links( + client.acknowledge_user_data_collection( request, metadata=[ ("key", "val"), @@ -61877,8 +72173,9 @@ def test_list_firebase_links_rest_interceptors(null_interceptor): post.assert_called_once() -def test_list_firebase_links_rest_bad_request( - transport: str = "rest", request_type=analytics_admin.ListFirebaseLinksRequest +def test_acknowledge_user_data_collection_rest_bad_request( + transport: str = "rest", + request_type=analytics_admin.AcknowledgeUserDataCollectionRequest, ): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -61886,7 +72183,7 @@ def test_list_firebase_links_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = {"parent": "properties/sample1"} + request_init = {"property": "properties/sample1"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -61898,171 +72195,60 @@ def test_list_firebase_links_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.list_firebase_links(request) - - -def test_list_firebase_links_rest_flattened(): - client = AnalyticsAdminServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = analytics_admin.ListFirebaseLinksResponse() - - # get arguments that satisfy an http rule for this method - sample_request = {"parent": "properties/sample1"} - - # get truthy value for each flattened field - mock_args = dict( - parent="parent_value", - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = analytics_admin.ListFirebaseLinksResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - client.list_firebase_links(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate( - "%s/v1alpha/{parent=properties/*}/firebaseLinks" % client.transport._host, - args[1], - ) - - -def test_list_firebase_links_rest_flattened_error(transport: str = "rest"): - client = AnalyticsAdminServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_firebase_links( - analytics_admin.ListFirebaseLinksRequest(), - parent="parent_value", - ) + client.acknowledge_user_data_collection(request) -def test_list_firebase_links_rest_pager(transport: str = "rest"): +def test_acknowledge_user_data_collection_rest_error(): client = AnalyticsAdminServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # TODO(kbandes): remove this mock unless there's a good reason for it. - # with mock.patch.object(path_template, 'transcode') as transcode: - # Set the response as a series of pages - response = ( - analytics_admin.ListFirebaseLinksResponse( - firebase_links=[ - resources.FirebaseLink(), - resources.FirebaseLink(), - resources.FirebaseLink(), - ], - next_page_token="abc", - ), - analytics_admin.ListFirebaseLinksResponse( - firebase_links=[], - next_page_token="def", - ), - analytics_admin.ListFirebaseLinksResponse( - firebase_links=[ - resources.FirebaseLink(), - ], - next_page_token="ghi", - ), - analytics_admin.ListFirebaseLinksResponse( - firebase_links=[ - resources.FirebaseLink(), - resources.FirebaseLink(), - ], - ), - ) - # Two responses for two calls - response = response + response - - # Wrap the values into proper Response objs - response = tuple( - analytics_admin.ListFirebaseLinksResponse.to_json(x) for x in response - ) - return_values = tuple(Response() for i in response) - for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode("UTF-8") - return_val.status_code = 200 - req.side_effect = return_values - - sample_request = {"parent": "properties/sample1"} - - pager = client.list_firebase_links(request=sample_request) - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, resources.FirebaseLink) for i in results) - - pages = list(client.list_firebase_links(request=sample_request).pages) - for page_, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page_.raw_page.next_page_token == token - @pytest.mark.parametrize( "request_type", [ - analytics_admin.GetGlobalSiteTagRequest, + analytics_admin.GetSKAdNetworkConversionValueSchemaRequest, dict, ], ) -def test_get_global_site_tag_rest(request_type): +def test_get_sk_ad_network_conversion_value_schema_rest(request_type): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = {"name": "properties/sample1/dataStreams/sample2/globalSiteTag"} + request_init = { + "name": "properties/sample1/dataStreams/sample2/sKAdNetworkConversionValueSchema/sample3" + } request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = resources.GlobalSiteTag( + return_value = resources.SKAdNetworkConversionValueSchema( name="name_value", - snippet="snippet_value", + apply_conversion_values=True, ) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = resources.GlobalSiteTag.pb(return_value) + return_value = resources.SKAdNetworkConversionValueSchema.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.get_global_site_tag(request) + response = client.get_sk_ad_network_conversion_value_schema(request) # Establish that the response is the type that we expect. - assert isinstance(response, resources.GlobalSiteTag) + assert isinstance(response, resources.SKAdNetworkConversionValueSchema) assert response.name == "name_value" - assert response.snippet == "snippet_value" + assert response.apply_conversion_values is True -def test_get_global_site_tag_rest_use_cached_wrapped_rpc(): +def test_get_sk_ad_network_conversion_value_schema_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -62077,7 +72263,8 @@ def test_get_global_site_tag_rest_use_cached_wrapped_rpc(): # Ensure method has been cached assert ( - client._transport.get_global_site_tag in client._transport._wrapped_methods + client._transport.get_sk_ad_network_conversion_value_schema + in client._transport._wrapped_methods ) # Replace cached wrapped function with mock @@ -62086,24 +72273,24 @@ def test_get_global_site_tag_rest_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.get_global_site_tag + client._transport.get_sk_ad_network_conversion_value_schema ] = mock_rpc request = {} - client.get_global_site_tag(request) + client.get_sk_ad_network_conversion_value_schema(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.get_global_site_tag(request) + client.get_sk_ad_network_conversion_value_schema(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_get_global_site_tag_rest_required_fields( - request_type=analytics_admin.GetGlobalSiteTagRequest, +def test_get_sk_ad_network_conversion_value_schema_rest_required_fields( + request_type=analytics_admin.GetSKAdNetworkConversionValueSchemaRequest, ): transport_class = transports.AnalyticsAdminServiceRestTransport @@ -62119,7 +72306,9 @@ def test_get_global_site_tag_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).get_global_site_tag._get_unset_required_fields(jsonified_request) + ).get_sk_ad_network_conversion_value_schema._get_unset_required_fields( + jsonified_request + ) jsonified_request.update(unset_fields) # verify required fields with default values are now present @@ -62128,7 +72317,9 @@ def test_get_global_site_tag_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).get_global_site_tag._get_unset_required_fields(jsonified_request) + ).get_sk_ad_network_conversion_value_schema._get_unset_required_fields( + jsonified_request + ) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone @@ -62142,7 +72333,7 @@ def test_get_global_site_tag_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = resources.GlobalSiteTag() + return_value = resources.SKAdNetworkConversionValueSchema() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -62163,30 +72354,34 @@ def test_get_global_site_tag_rest_required_fields( response_value.status_code = 200 # Convert return value to protobuf type - return_value = resources.GlobalSiteTag.pb(return_value) + return_value = resources.SKAdNetworkConversionValueSchema.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.get_global_site_tag(request) + response = client.get_sk_ad_network_conversion_value_schema(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_get_global_site_tag_rest_unset_required_fields(): +def test_get_sk_ad_network_conversion_value_schema_rest_unset_required_fields(): transport = transports.AnalyticsAdminServiceRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.get_global_site_tag._get_unset_required_fields({}) + unset_fields = ( + transport.get_sk_ad_network_conversion_value_schema._get_unset_required_fields( + {} + ) + ) assert set(unset_fields) == (set(()) & set(("name",))) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_global_site_tag_rest_interceptors(null_interceptor): +def test_get_sk_ad_network_conversion_value_schema_rest_interceptors(null_interceptor): transport = transports.AnalyticsAdminServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -62199,14 +72394,16 @@ def test_get_global_site_tag_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.AnalyticsAdminServiceRestInterceptor, "post_get_global_site_tag" + transports.AnalyticsAdminServiceRestInterceptor, + "post_get_sk_ad_network_conversion_value_schema", ) as post, mock.patch.object( - transports.AnalyticsAdminServiceRestInterceptor, "pre_get_global_site_tag" + transports.AnalyticsAdminServiceRestInterceptor, + "pre_get_sk_ad_network_conversion_value_schema", ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = analytics_admin.GetGlobalSiteTagRequest.pb( - analytics_admin.GetGlobalSiteTagRequest() + pb_message = analytics_admin.GetSKAdNetworkConversionValueSchemaRequest.pb( + analytics_admin.GetSKAdNetworkConversionValueSchemaRequest() ) transcode.return_value = { "method": "post", @@ -62218,19 +72415,19 @@ def test_get_global_site_tag_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = resources.GlobalSiteTag.to_json( - resources.GlobalSiteTag() + req.return_value._content = resources.SKAdNetworkConversionValueSchema.to_json( + resources.SKAdNetworkConversionValueSchema() ) - request = analytics_admin.GetGlobalSiteTagRequest() + request = analytics_admin.GetSKAdNetworkConversionValueSchemaRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = resources.GlobalSiteTag() + post.return_value = resources.SKAdNetworkConversionValueSchema() - client.get_global_site_tag( + client.get_sk_ad_network_conversion_value_schema( request, metadata=[ ("key", "val"), @@ -62242,8 +72439,9 @@ def test_get_global_site_tag_rest_interceptors(null_interceptor): post.assert_called_once() -def test_get_global_site_tag_rest_bad_request( - transport: str = "rest", request_type=analytics_admin.GetGlobalSiteTagRequest +def test_get_sk_ad_network_conversion_value_schema_rest_bad_request( + transport: str = "rest", + request_type=analytics_admin.GetSKAdNetworkConversionValueSchemaRequest, ): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -62251,7 +72449,9 @@ def test_get_global_site_tag_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = {"name": "properties/sample1/dataStreams/sample2/globalSiteTag"} + request_init = { + "name": "properties/sample1/dataStreams/sample2/sKAdNetworkConversionValueSchema/sample3" + } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -62263,10 +72463,10 @@ def test_get_global_site_tag_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.get_global_site_tag(request) + client.get_sk_ad_network_conversion_value_schema(request) -def test_get_global_site_tag_rest_flattened(): +def test_get_sk_ad_network_conversion_value_schema_rest_flattened(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -62275,11 +72475,11 @@ def test_get_global_site_tag_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = resources.GlobalSiteTag() + return_value = resources.SKAdNetworkConversionValueSchema() # get arguments that satisfy an http rule for this method sample_request = { - "name": "properties/sample1/dataStreams/sample2/globalSiteTag" + "name": "properties/sample1/dataStreams/sample2/sKAdNetworkConversionValueSchema/sample3" } # get truthy value for each flattened field @@ -62292,25 +72492,27 @@ def test_get_global_site_tag_rest_flattened(): response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = resources.GlobalSiteTag.pb(return_value) + return_value = resources.SKAdNetworkConversionValueSchema.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.get_global_site_tag(**mock_args) + client.get_sk_ad_network_conversion_value_schema(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1alpha/{name=properties/*/dataStreams/*/globalSiteTag}" + "%s/v1alpha/{name=properties/*/dataStreams/*/sKAdNetworkConversionValueSchema/*}" % client.transport._host, args[1], ) -def test_get_global_site_tag_rest_flattened_error(transport: str = "rest"): +def test_get_sk_ad_network_conversion_value_schema_rest_flattened_error( + transport: str = "rest", +): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -62319,13 +72521,13 @@ def test_get_global_site_tag_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.get_global_site_tag( - analytics_admin.GetGlobalSiteTagRequest(), + client.get_sk_ad_network_conversion_value_schema( + analytics_admin.GetSKAdNetworkConversionValueSchemaRequest(), name="name_value", ) -def test_get_global_site_tag_rest_error(): +def test_get_sk_ad_network_conversion_value_schema_rest_error(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -62334,35 +72536,54 @@ def test_get_global_site_tag_rest_error(): @pytest.mark.parametrize( "request_type", [ - analytics_admin.CreateGoogleAdsLinkRequest, + analytics_admin.CreateSKAdNetworkConversionValueSchemaRequest, dict, ], ) -def test_create_google_ads_link_rest(request_type): +def test_create_sk_ad_network_conversion_value_schema_rest(request_type): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = {"parent": "properties/sample1"} - request_init["google_ads_link"] = { + request_init = {"parent": "properties/sample1/dataStreams/sample2"} + request_init["skadnetwork_conversion_value_schema"] = { "name": "name_value", - "customer_id": "customer_id_value", - "can_manage_clients": True, - "ads_personalization_enabled": {"value": True}, - "create_time": {"seconds": 751, "nanos": 543}, - "update_time": {}, - "creator_email_address": "creator_email_address_value", + "postback_window_one": { + "conversion_values": [ + { + "display_name": "display_name_value", + "fine_value": 1054, + "coarse_value": 1, + "event_mappings": [ + { + "event_name": "event_name_value", + "min_event_count": 1613, + "max_event_count": 1615, + "min_event_value": 0.16010000000000002, + "max_event_value": 0.1603, + } + ], + "lock_enabled": True, + } + ], + "postback_window_settings_enabled": True, + }, + "postback_window_two": {}, + "postback_window_three": {}, + "apply_conversion_values": True, } # The version of a generated dependency at test runtime may differ from the version used during generation. # Delete any fields which are not present in the current runtime dependency # See https://github.com/googleapis/gapic-generator-python/issues/1748 # Determine if the message type is proto-plus or protobuf - test_field = analytics_admin.CreateGoogleAdsLinkRequest.meta.fields[ - "google_ads_link" - ] + test_field = ( + analytics_admin.CreateSKAdNetworkConversionValueSchemaRequest.meta.fields[ + "skadnetwork_conversion_value_schema" + ] + ) def get_message_fields(field): # Given a field which is a message (composite type), return a list with @@ -62390,7 +72611,9 @@ def get_message_fields(field): # For each item in the sample request, create a list of sub fields which are not present at runtime # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["google_ads_link"].items(): # pragma: NO COVER + for field, value in request_init[ + "skadnetwork_conversion_value_schema" + ].items(): # pragma: NO COVER result = None is_repeated = False # For repeated fields @@ -62420,42 +72643,42 @@ def get_message_fields(field): subfield = subfield_to_delete.get("subfield") if subfield: if field_repeated: - for i in range(0, len(request_init["google_ads_link"][field])): - del request_init["google_ads_link"][field][i][subfield] + for i in range( + 0, len(request_init["skadnetwork_conversion_value_schema"][field]) + ): + del request_init["skadnetwork_conversion_value_schema"][field][i][ + subfield + ] else: - del request_init["google_ads_link"][field][subfield] + del request_init["skadnetwork_conversion_value_schema"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = resources.GoogleAdsLink( + return_value = resources.SKAdNetworkConversionValueSchema( name="name_value", - customer_id="customer_id_value", - can_manage_clients=True, - creator_email_address="creator_email_address_value", + apply_conversion_values=True, ) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = resources.GoogleAdsLink.pb(return_value) + return_value = resources.SKAdNetworkConversionValueSchema.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.create_google_ads_link(request) + response = client.create_sk_ad_network_conversion_value_schema(request) # Establish that the response is the type that we expect. - assert isinstance(response, resources.GoogleAdsLink) + assert isinstance(response, resources.SKAdNetworkConversionValueSchema) assert response.name == "name_value" - assert response.customer_id == "customer_id_value" - assert response.can_manage_clients is True - assert response.creator_email_address == "creator_email_address_value" + assert response.apply_conversion_values is True -def test_create_google_ads_link_rest_use_cached_wrapped_rpc(): +def test_create_sk_ad_network_conversion_value_schema_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -62470,7 +72693,7 @@ def test_create_google_ads_link_rest_use_cached_wrapped_rpc(): # Ensure method has been cached assert ( - client._transport.create_google_ads_link + client._transport.create_sk_ad_network_conversion_value_schema in client._transport._wrapped_methods ) @@ -62480,24 +72703,24 @@ def test_create_google_ads_link_rest_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.create_google_ads_link + client._transport.create_sk_ad_network_conversion_value_schema ] = mock_rpc request = {} - client.create_google_ads_link(request) + client.create_sk_ad_network_conversion_value_schema(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.create_google_ads_link(request) + client.create_sk_ad_network_conversion_value_schema(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_create_google_ads_link_rest_required_fields( - request_type=analytics_admin.CreateGoogleAdsLinkRequest, +def test_create_sk_ad_network_conversion_value_schema_rest_required_fields( + request_type=analytics_admin.CreateSKAdNetworkConversionValueSchemaRequest, ): transport_class = transports.AnalyticsAdminServiceRestTransport @@ -62513,7 +72736,9 @@ def test_create_google_ads_link_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).create_google_ads_link._get_unset_required_fields(jsonified_request) + ).create_sk_ad_network_conversion_value_schema._get_unset_required_fields( + jsonified_request + ) jsonified_request.update(unset_fields) # verify required fields with default values are now present @@ -62522,7 +72747,9 @@ def test_create_google_ads_link_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).create_google_ads_link._get_unset_required_fields(jsonified_request) + ).create_sk_ad_network_conversion_value_schema._get_unset_required_fields( + jsonified_request + ) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone @@ -62536,7 +72763,7 @@ def test_create_google_ads_link_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = resources.GoogleAdsLink() + return_value = resources.SKAdNetworkConversionValueSchema() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -62558,38 +72785,42 @@ def test_create_google_ads_link_rest_required_fields( response_value.status_code = 200 # Convert return value to protobuf type - return_value = resources.GoogleAdsLink.pb(return_value) + return_value = resources.SKAdNetworkConversionValueSchema.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.create_google_ads_link(request) + response = client.create_sk_ad_network_conversion_value_schema(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_create_google_ads_link_rest_unset_required_fields(): +def test_create_sk_ad_network_conversion_value_schema_rest_unset_required_fields(): transport = transports.AnalyticsAdminServiceRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.create_google_ads_link._get_unset_required_fields({}) + unset_fields = transport.create_sk_ad_network_conversion_value_schema._get_unset_required_fields( + {} + ) assert set(unset_fields) == ( set(()) & set( ( "parent", - "googleAdsLink", + "skadnetworkConversionValueSchema", ) ) ) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_create_google_ads_link_rest_interceptors(null_interceptor): +def test_create_sk_ad_network_conversion_value_schema_rest_interceptors( + null_interceptor, +): transport = transports.AnalyticsAdminServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -62602,14 +72833,16 @@ def test_create_google_ads_link_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.AnalyticsAdminServiceRestInterceptor, "post_create_google_ads_link" + transports.AnalyticsAdminServiceRestInterceptor, + "post_create_sk_ad_network_conversion_value_schema", ) as post, mock.patch.object( - transports.AnalyticsAdminServiceRestInterceptor, "pre_create_google_ads_link" + transports.AnalyticsAdminServiceRestInterceptor, + "pre_create_sk_ad_network_conversion_value_schema", ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = analytics_admin.CreateGoogleAdsLinkRequest.pb( - analytics_admin.CreateGoogleAdsLinkRequest() + pb_message = analytics_admin.CreateSKAdNetworkConversionValueSchemaRequest.pb( + analytics_admin.CreateSKAdNetworkConversionValueSchemaRequest() ) transcode.return_value = { "method": "post", @@ -62621,19 +72854,19 @@ def test_create_google_ads_link_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = resources.GoogleAdsLink.to_json( - resources.GoogleAdsLink() + req.return_value._content = resources.SKAdNetworkConversionValueSchema.to_json( + resources.SKAdNetworkConversionValueSchema() ) - request = analytics_admin.CreateGoogleAdsLinkRequest() + request = analytics_admin.CreateSKAdNetworkConversionValueSchemaRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = resources.GoogleAdsLink() + post.return_value = resources.SKAdNetworkConversionValueSchema() - client.create_google_ads_link( + client.create_sk_ad_network_conversion_value_schema( request, metadata=[ ("key", "val"), @@ -62645,8 +72878,9 @@ def test_create_google_ads_link_rest_interceptors(null_interceptor): post.assert_called_once() -def test_create_google_ads_link_rest_bad_request( - transport: str = "rest", request_type=analytics_admin.CreateGoogleAdsLinkRequest +def test_create_sk_ad_network_conversion_value_schema_rest_bad_request( + transport: str = "rest", + request_type=analytics_admin.CreateSKAdNetworkConversionValueSchemaRequest, ): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -62654,7 +72888,7 @@ def test_create_google_ads_link_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = {"parent": "properties/sample1"} + request_init = {"parent": "properties/sample1/dataStreams/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -62666,10 +72900,10 @@ def test_create_google_ads_link_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.create_google_ads_link(request) + client.create_sk_ad_network_conversion_value_schema(request) -def test_create_google_ads_link_rest_flattened(): +def test_create_sk_ad_network_conversion_value_schema_rest_flattened(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -62678,15 +72912,17 @@ def test_create_google_ads_link_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = resources.GoogleAdsLink() + return_value = resources.SKAdNetworkConversionValueSchema() # get arguments that satisfy an http rule for this method - sample_request = {"parent": "properties/sample1"} + sample_request = {"parent": "properties/sample1/dataStreams/sample2"} # get truthy value for each flattened field mock_args = dict( parent="parent_value", - google_ads_link=resources.GoogleAdsLink(name="name_value"), + skadnetwork_conversion_value_schema=resources.SKAdNetworkConversionValueSchema( + name="name_value" + ), ) mock_args.update(sample_request) @@ -62694,24 +72930,27 @@ def test_create_google_ads_link_rest_flattened(): response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = resources.GoogleAdsLink.pb(return_value) + return_value = resources.SKAdNetworkConversionValueSchema.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.create_google_ads_link(**mock_args) + client.create_sk_ad_network_conversion_value_schema(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1alpha/{parent=properties/*}/googleAdsLinks" % client.transport._host, + "%s/v1alpha/{parent=properties/*/dataStreams/*}/sKAdNetworkConversionValueSchema" + % client.transport._host, args[1], ) -def test_create_google_ads_link_rest_flattened_error(transport: str = "rest"): +def test_create_sk_ad_network_conversion_value_schema_rest_flattened_error( + transport: str = "rest", +): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -62720,14 +72959,16 @@ def test_create_google_ads_link_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.create_google_ads_link( - analytics_admin.CreateGoogleAdsLinkRequest(), + client.create_sk_ad_network_conversion_value_schema( + analytics_admin.CreateSKAdNetworkConversionValueSchemaRequest(), parent="parent_value", - google_ads_link=resources.GoogleAdsLink(name="name_value"), + skadnetwork_conversion_value_schema=resources.SKAdNetworkConversionValueSchema( + name="name_value" + ), ) -def test_create_google_ads_link_rest_error(): +def test_create_sk_ad_network_conversion_value_schema_rest_error(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -62736,11 +72977,11 @@ def test_create_google_ads_link_rest_error(): @pytest.mark.parametrize( "request_type", [ - analytics_admin.UpdateGoogleAdsLinkRequest, + analytics_admin.DeleteSKAdNetworkConversionValueSchemaRequest, dict, ], ) -def test_update_google_ads_link_rest(request_type): +def test_delete_sk_ad_network_conversion_value_schema_rest(request_type): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -62748,118 +72989,29 @@ def test_update_google_ads_link_rest(request_type): # send a request that will satisfy transcoding request_init = { - "google_ads_link": {"name": "properties/sample1/googleAdsLinks/sample2"} - } - request_init["google_ads_link"] = { - "name": "properties/sample1/googleAdsLinks/sample2", - "customer_id": "customer_id_value", - "can_manage_clients": True, - "ads_personalization_enabled": {"value": True}, - "create_time": {"seconds": 751, "nanos": 543}, - "update_time": {}, - "creator_email_address": "creator_email_address_value", + "name": "properties/sample1/dataStreams/sample2/sKAdNetworkConversionValueSchema/sample3" } - # The version of a generated dependency at test runtime may differ from the version used during generation. - # Delete any fields which are not present in the current runtime dependency - # See https://github.com/googleapis/gapic-generator-python/issues/1748 - - # Determine if the message type is proto-plus or protobuf - test_field = analytics_admin.UpdateGoogleAdsLinkRequest.meta.fields[ - "google_ads_link" - ] - - def get_message_fields(field): - # Given a field which is a message (composite type), return a list with - # all the fields of the message. - # If the field is not a composite type, return an empty list. - message_fields = [] - - if hasattr(field, "message") and field.message: - is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") - - if is_field_type_proto_plus_type: - message_fields = field.message.meta.fields.values() - # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER - message_fields = field.message.DESCRIPTOR.fields - return message_fields - - runtime_nested_fields = [ - (field.name, nested_field.name) - for field in get_message_fields(test_field) - for nested_field in get_message_fields(field) - ] - - subfields_not_in_runtime = [] - - # For each item in the sample request, create a list of sub fields which are not present at runtime - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["google_ads_link"].items(): # pragma: NO COVER - result = None - is_repeated = False - # For repeated fields - if isinstance(value, list) and len(value): - is_repeated = True - result = value[0] - # For fields where the type is another message - if isinstance(value, dict): - result = value - - if result and hasattr(result, "keys"): - for subfield in result.keys(): - if (field, subfield) not in runtime_nested_fields: - subfields_not_in_runtime.append( - { - "field": field, - "subfield": subfield, - "is_repeated": is_repeated, - } - ) - - # Remove fields from the sample request which are not present in the runtime version of the dependency - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER - field = subfield_to_delete.get("field") - field_repeated = subfield_to_delete.get("is_repeated") - subfield = subfield_to_delete.get("subfield") - if subfield: - if field_repeated: - for i in range(0, len(request_init["google_ads_link"][field])): - del request_init["google_ads_link"][field][i][subfield] - else: - del request_init["google_ads_link"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = resources.GoogleAdsLink( - name="name_value", - customer_id="customer_id_value", - can_manage_clients=True, - creator_email_address="creator_email_address_value", - ) + return_value = None # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - # Convert return value to protobuf type - return_value = resources.GoogleAdsLink.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) + json_return_value = "" response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.update_google_ads_link(request) + response = client.delete_sk_ad_network_conversion_value_schema(request) # Establish that the response is the type that we expect. - assert isinstance(response, resources.GoogleAdsLink) - assert response.name == "name_value" - assert response.customer_id == "customer_id_value" - assert response.can_manage_clients is True - assert response.creator_email_address == "creator_email_address_value" + assert response is None -def test_update_google_ads_link_rest_use_cached_wrapped_rpc(): +def test_delete_sk_ad_network_conversion_value_schema_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -62874,7 +73026,7 @@ def test_update_google_ads_link_rest_use_cached_wrapped_rpc(): # Ensure method has been cached assert ( - client._transport.update_google_ads_link + client._transport.delete_sk_ad_network_conversion_value_schema in client._transport._wrapped_methods ) @@ -62884,28 +73036,29 @@ def test_update_google_ads_link_rest_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.update_google_ads_link + client._transport.delete_sk_ad_network_conversion_value_schema ] = mock_rpc request = {} - client.update_google_ads_link(request) + client.delete_sk_ad_network_conversion_value_schema(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.update_google_ads_link(request) + client.delete_sk_ad_network_conversion_value_schema(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_update_google_ads_link_rest_required_fields( - request_type=analytics_admin.UpdateGoogleAdsLinkRequest, +def test_delete_sk_ad_network_conversion_value_schema_rest_required_fields( + request_type=analytics_admin.DeleteSKAdNetworkConversionValueSchemaRequest, ): transport_class = transports.AnalyticsAdminServiceRestTransport request_init = {} + request_init["name"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -62916,19 +73069,25 @@ def test_update_google_ads_link_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).update_google_ads_link._get_unset_required_fields(jsonified_request) + ).delete_sk_ad_network_conversion_value_schema._get_unset_required_fields( + jsonified_request + ) jsonified_request.update(unset_fields) # verify required fields with default values are now present + jsonified_request["name"] = "name_value" + unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).update_google_ads_link._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("update_mask",)) + ).delete_sk_ad_network_conversion_value_schema._get_unset_required_fields( + jsonified_request + ) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -62937,7 +73096,7 @@ def test_update_google_ads_link_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = resources.GoogleAdsLink() + return_value = None # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -62949,40 +73108,40 @@ def test_update_google_ads_link_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "patch", + "method": "delete", "query_params": pb_request, } - transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = resources.GoogleAdsLink.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) + json_return_value = "" response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.update_google_ads_link(request) + response = client.delete_sk_ad_network_conversion_value_schema(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_update_google_ads_link_rest_unset_required_fields(): +def test_delete_sk_ad_network_conversion_value_schema_rest_unset_required_fields(): transport = transports.AnalyticsAdminServiceRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.update_google_ads_link._get_unset_required_fields({}) - assert set(unset_fields) == (set(("updateMask",)) & set(("updateMask",))) + unset_fields = transport.delete_sk_ad_network_conversion_value_schema._get_unset_required_fields( + {} + ) + assert set(unset_fields) == (set(()) & set(("name",))) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_update_google_ads_link_rest_interceptors(null_interceptor): +def test_delete_sk_ad_network_conversion_value_schema_rest_interceptors( + null_interceptor, +): transport = transports.AnalyticsAdminServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -62995,14 +73154,12 @@ def test_update_google_ads_link_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.AnalyticsAdminServiceRestInterceptor, "post_update_google_ads_link" - ) as post, mock.patch.object( - transports.AnalyticsAdminServiceRestInterceptor, "pre_update_google_ads_link" + transports.AnalyticsAdminServiceRestInterceptor, + "pre_delete_sk_ad_network_conversion_value_schema", ) as pre: pre.assert_not_called() - post.assert_not_called() - pb_message = analytics_admin.UpdateGoogleAdsLinkRequest.pb( - analytics_admin.UpdateGoogleAdsLinkRequest() + pb_message = analytics_admin.DeleteSKAdNetworkConversionValueSchemaRequest.pb( + analytics_admin.DeleteSKAdNetworkConversionValueSchemaRequest() ) transcode.return_value = { "method": "post", @@ -63014,19 +73171,15 @@ def test_update_google_ads_link_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = resources.GoogleAdsLink.to_json( - resources.GoogleAdsLink() - ) - request = analytics_admin.UpdateGoogleAdsLinkRequest() + request = analytics_admin.DeleteSKAdNetworkConversionValueSchemaRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = resources.GoogleAdsLink() - client.update_google_ads_link( + client.delete_sk_ad_network_conversion_value_schema( request, metadata=[ ("key", "val"), @@ -63035,11 +73188,11 @@ def test_update_google_ads_link_rest_interceptors(null_interceptor): ) pre.assert_called_once() - post.assert_called_once() -def test_update_google_ads_link_rest_bad_request( - transport: str = "rest", request_type=analytics_admin.UpdateGoogleAdsLinkRequest +def test_delete_sk_ad_network_conversion_value_schema_rest_bad_request( + transport: str = "rest", + request_type=analytics_admin.DeleteSKAdNetworkConversionValueSchemaRequest, ): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -63048,7 +73201,7 @@ def test_update_google_ads_link_rest_bad_request( # send a request that will satisfy transcoding request_init = { - "google_ads_link": {"name": "properties/sample1/googleAdsLinks/sample2"} + "name": "properties/sample1/dataStreams/sample2/sKAdNetworkConversionValueSchema/sample3" } request = request_type(**request_init) @@ -63061,10 +73214,10 @@ def test_update_google_ads_link_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.update_google_ads_link(request) + client.delete_sk_ad_network_conversion_value_schema(request) -def test_update_google_ads_link_rest_flattened(): +def test_delete_sk_ad_network_conversion_value_schema_rest_flattened(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -63073,43 +73226,42 @@ def test_update_google_ads_link_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = resources.GoogleAdsLink() + return_value = None # get arguments that satisfy an http rule for this method sample_request = { - "google_ads_link": {"name": "properties/sample1/googleAdsLinks/sample2"} + "name": "properties/sample1/dataStreams/sample2/sKAdNetworkConversionValueSchema/sample3" } # get truthy value for each flattened field mock_args = dict( - google_ads_link=resources.GoogleAdsLink(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + name="name_value", ) mock_args.update(sample_request) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - # Convert return value to protobuf type - return_value = resources.GoogleAdsLink.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) + json_return_value = "" response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.update_google_ads_link(**mock_args) + client.delete_sk_ad_network_conversion_value_schema(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1alpha/{google_ads_link.name=properties/*/googleAdsLinks/*}" + "%s/v1alpha/{name=properties/*/dataStreams/*/sKAdNetworkConversionValueSchema/*}" % client.transport._host, args[1], ) -def test_update_google_ads_link_rest_flattened_error(transport: str = "rest"): +def test_delete_sk_ad_network_conversion_value_schema_rest_flattened_error( + transport: str = "rest", +): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -63118,14 +73270,13 @@ def test_update_google_ads_link_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.update_google_ads_link( - analytics_admin.UpdateGoogleAdsLinkRequest(), - google_ads_link=resources.GoogleAdsLink(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + client.delete_sk_ad_network_conversion_value_schema( + analytics_admin.DeleteSKAdNetworkConversionValueSchemaRequest(), + name="name_value", ) -def test_update_google_ads_link_rest_error(): +def test_delete_sk_ad_network_conversion_value_schema_rest_error(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -63134,39 +73285,153 @@ def test_update_google_ads_link_rest_error(): @pytest.mark.parametrize( "request_type", [ - analytics_admin.DeleteGoogleAdsLinkRequest, + analytics_admin.UpdateSKAdNetworkConversionValueSchemaRequest, dict, ], ) -def test_delete_google_ads_link_rest(request_type): +def test_update_sk_ad_network_conversion_value_schema_rest(request_type): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = {"name": "properties/sample1/googleAdsLinks/sample2"} + request_init = { + "skadnetwork_conversion_value_schema": { + "name": "properties/sample1/dataStreams/sample2/sKAdNetworkConversionValueSchema/sample3" + } + } + request_init["skadnetwork_conversion_value_schema"] = { + "name": "properties/sample1/dataStreams/sample2/sKAdNetworkConversionValueSchema/sample3", + "postback_window_one": { + "conversion_values": [ + { + "display_name": "display_name_value", + "fine_value": 1054, + "coarse_value": 1, + "event_mappings": [ + { + "event_name": "event_name_value", + "min_event_count": 1613, + "max_event_count": 1615, + "min_event_value": 0.16010000000000002, + "max_event_value": 0.1603, + } + ], + "lock_enabled": True, + } + ], + "postback_window_settings_enabled": True, + }, + "postback_window_two": {}, + "postback_window_three": {}, + "apply_conversion_values": True, + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = ( + analytics_admin.UpdateSKAdNetworkConversionValueSchemaRequest.meta.fields[ + "skadnetwork_conversion_value_schema" + ] + ) + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "skadnetwork_conversion_value_schema" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, len(request_init["skadnetwork_conversion_value_schema"][field]) + ): + del request_init["skadnetwork_conversion_value_schema"][field][i][ + subfield + ] + else: + del request_init["skadnetwork_conversion_value_schema"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = None + return_value = resources.SKAdNetworkConversionValueSchema( + name="name_value", + apply_conversion_values=True, + ) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - json_return_value = "" + # Convert return value to protobuf type + return_value = resources.SKAdNetworkConversionValueSchema.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.delete_google_ads_link(request) + response = client.update_sk_ad_network_conversion_value_schema(request) # Establish that the response is the type that we expect. - assert response is None + assert isinstance(response, resources.SKAdNetworkConversionValueSchema) + assert response.name == "name_value" + assert response.apply_conversion_values is True -def test_delete_google_ads_link_rest_use_cached_wrapped_rpc(): +def test_update_sk_ad_network_conversion_value_schema_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -63181,7 +73446,7 @@ def test_delete_google_ads_link_rest_use_cached_wrapped_rpc(): # Ensure method has been cached assert ( - client._transport.delete_google_ads_link + client._transport.update_sk_ad_network_conversion_value_schema in client._transport._wrapped_methods ) @@ -63191,29 +73456,28 @@ def test_delete_google_ads_link_rest_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.delete_google_ads_link + client._transport.update_sk_ad_network_conversion_value_schema ] = mock_rpc request = {} - client.delete_google_ads_link(request) + client.update_sk_ad_network_conversion_value_schema(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.delete_google_ads_link(request) + client.update_sk_ad_network_conversion_value_schema(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_delete_google_ads_link_rest_required_fields( - request_type=analytics_admin.DeleteGoogleAdsLinkRequest, +def test_update_sk_ad_network_conversion_value_schema_rest_required_fields( + request_type=analytics_admin.UpdateSKAdNetworkConversionValueSchemaRequest, ): transport_class = transports.AnalyticsAdminServiceRestTransport request_init = {} - request_init["name"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -63224,21 +73488,23 @@ def test_delete_google_ads_link_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).delete_google_ads_link._get_unset_required_fields(jsonified_request) + ).update_sk_ad_network_conversion_value_schema._get_unset_required_fields( + jsonified_request + ) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["name"] = "name_value" - unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).delete_google_ads_link._get_unset_required_fields(jsonified_request) + ).update_sk_ad_network_conversion_value_schema._get_unset_required_fields( + jsonified_request + ) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("update_mask",)) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == "name_value" client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -63247,7 +73513,7 @@ def test_delete_google_ads_link_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = None + return_value = resources.SKAdNetworkConversionValueSchema() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -63259,36 +73525,52 @@ def test_delete_google_ads_link_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "delete", + "method": "patch", "query_params": pb_request, } + transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 - json_return_value = "" + + # Convert return value to protobuf type + return_value = resources.SKAdNetworkConversionValueSchema.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.delete_google_ads_link(request) + response = client.update_sk_ad_network_conversion_value_schema(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_delete_google_ads_link_rest_unset_required_fields(): +def test_update_sk_ad_network_conversion_value_schema_rest_unset_required_fields(): transport = transports.AnalyticsAdminServiceRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.delete_google_ads_link._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name",))) + unset_fields = transport.update_sk_ad_network_conversion_value_schema._get_unset_required_fields( + {} + ) + assert set(unset_fields) == ( + set(("updateMask",)) + & set( + ( + "skadnetworkConversionValueSchema", + "updateMask", + ) + ) + ) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_delete_google_ads_link_rest_interceptors(null_interceptor): +def test_update_sk_ad_network_conversion_value_schema_rest_interceptors( + null_interceptor, +): transport = transports.AnalyticsAdminServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -63301,11 +73583,16 @@ def test_delete_google_ads_link_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.AnalyticsAdminServiceRestInterceptor, "pre_delete_google_ads_link" + transports.AnalyticsAdminServiceRestInterceptor, + "post_update_sk_ad_network_conversion_value_schema", + ) as post, mock.patch.object( + transports.AnalyticsAdminServiceRestInterceptor, + "pre_update_sk_ad_network_conversion_value_schema", ) as pre: pre.assert_not_called() - pb_message = analytics_admin.DeleteGoogleAdsLinkRequest.pb( - analytics_admin.DeleteGoogleAdsLinkRequest() + post.assert_not_called() + pb_message = analytics_admin.UpdateSKAdNetworkConversionValueSchemaRequest.pb( + analytics_admin.UpdateSKAdNetworkConversionValueSchemaRequest() ) transcode.return_value = { "method": "post", @@ -63317,15 +73604,19 @@ def test_delete_google_ads_link_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() + req.return_value._content = resources.SKAdNetworkConversionValueSchema.to_json( + resources.SKAdNetworkConversionValueSchema() + ) - request = analytics_admin.DeleteGoogleAdsLinkRequest() + request = analytics_admin.UpdateSKAdNetworkConversionValueSchemaRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata + post.return_value = resources.SKAdNetworkConversionValueSchema() - client.delete_google_ads_link( + client.update_sk_ad_network_conversion_value_schema( request, metadata=[ ("key", "val"), @@ -63334,10 +73625,12 @@ def test_delete_google_ads_link_rest_interceptors(null_interceptor): ) pre.assert_called_once() + post.assert_called_once() -def test_delete_google_ads_link_rest_bad_request( - transport: str = "rest", request_type=analytics_admin.DeleteGoogleAdsLinkRequest +def test_update_sk_ad_network_conversion_value_schema_rest_bad_request( + transport: str = "rest", + request_type=analytics_admin.UpdateSKAdNetworkConversionValueSchemaRequest, ): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -63345,7 +73638,11 @@ def test_delete_google_ads_link_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = {"name": "properties/sample1/googleAdsLinks/sample2"} + request_init = { + "skadnetwork_conversion_value_schema": { + "name": "properties/sample1/dataStreams/sample2/sKAdNetworkConversionValueSchema/sample3" + } + } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -63357,10 +73654,10 @@ def test_delete_google_ads_link_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.delete_google_ads_link(request) + client.update_sk_ad_network_conversion_value_schema(request) -def test_delete_google_ads_link_rest_flattened(): +def test_update_sk_ad_network_conversion_value_schema_rest_flattened(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -63369,37 +73666,49 @@ def test_delete_google_ads_link_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = None + return_value = resources.SKAdNetworkConversionValueSchema() # get arguments that satisfy an http rule for this method - sample_request = {"name": "properties/sample1/googleAdsLinks/sample2"} + sample_request = { + "skadnetwork_conversion_value_schema": { + "name": "properties/sample1/dataStreams/sample2/sKAdNetworkConversionValueSchema/sample3" + } + } # get truthy value for each flattened field mock_args = dict( - name="name_value", + skadnetwork_conversion_value_schema=resources.SKAdNetworkConversionValueSchema( + name="name_value" + ), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) mock_args.update(sample_request) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - json_return_value = "" + # Convert return value to protobuf type + return_value = resources.SKAdNetworkConversionValueSchema.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.delete_google_ads_link(**mock_args) + client.update_sk_ad_network_conversion_value_schema(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1alpha/{name=properties/*/googleAdsLinks/*}" % client.transport._host, + "%s/v1alpha/{skadnetwork_conversion_value_schema.name=properties/*/dataStreams/*/sKAdNetworkConversionValueSchema/*}" + % client.transport._host, args[1], ) -def test_delete_google_ads_link_rest_flattened_error(transport: str = "rest"): +def test_update_sk_ad_network_conversion_value_schema_rest_flattened_error( + transport: str = "rest", +): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -63408,13 +73717,16 @@ def test_delete_google_ads_link_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.delete_google_ads_link( - analytics_admin.DeleteGoogleAdsLinkRequest(), - name="name_value", + client.update_sk_ad_network_conversion_value_schema( + analytics_admin.UpdateSKAdNetworkConversionValueSchemaRequest(), + skadnetwork_conversion_value_schema=resources.SKAdNetworkConversionValueSchema( + name="name_value" + ), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) -def test_delete_google_ads_link_rest_error(): +def test_update_sk_ad_network_conversion_value_schema_rest_error(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -63423,24 +73735,24 @@ def test_delete_google_ads_link_rest_error(): @pytest.mark.parametrize( "request_type", [ - analytics_admin.ListGoogleAdsLinksRequest, + analytics_admin.ListSKAdNetworkConversionValueSchemasRequest, dict, ], ) -def test_list_google_ads_links_rest(request_type): +def test_list_sk_ad_network_conversion_value_schemas_rest(request_type): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = {"parent": "properties/sample1"} + request_init = {"parent": "properties/sample1/dataStreams/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = analytics_admin.ListGoogleAdsLinksResponse( + return_value = analytics_admin.ListSKAdNetworkConversionValueSchemasResponse( next_page_token="next_page_token_value", ) @@ -63448,19 +73760,21 @@ def test_list_google_ads_links_rest(request_type): response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = analytics_admin.ListGoogleAdsLinksResponse.pb(return_value) + return_value = analytics_admin.ListSKAdNetworkConversionValueSchemasResponse.pb( + return_value + ) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.list_google_ads_links(request) + response = client.list_sk_ad_network_conversion_value_schemas(request) # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListGoogleAdsLinksPager) + assert isinstance(response, pagers.ListSKAdNetworkConversionValueSchemasPager) assert response.next_page_token == "next_page_token_value" -def test_list_google_ads_links_rest_use_cached_wrapped_rpc(): +def test_list_sk_ad_network_conversion_value_schemas_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -63475,7 +73789,7 @@ def test_list_google_ads_links_rest_use_cached_wrapped_rpc(): # Ensure method has been cached assert ( - client._transport.list_google_ads_links + client._transport.list_sk_ad_network_conversion_value_schemas in client._transport._wrapped_methods ) @@ -63485,24 +73799,24 @@ def test_list_google_ads_links_rest_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.list_google_ads_links + client._transport.list_sk_ad_network_conversion_value_schemas ] = mock_rpc request = {} - client.list_google_ads_links(request) + client.list_sk_ad_network_conversion_value_schemas(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.list_google_ads_links(request) + client.list_sk_ad_network_conversion_value_schemas(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_list_google_ads_links_rest_required_fields( - request_type=analytics_admin.ListGoogleAdsLinksRequest, +def test_list_sk_ad_network_conversion_value_schemas_rest_required_fields( + request_type=analytics_admin.ListSKAdNetworkConversionValueSchemasRequest, ): transport_class = transports.AnalyticsAdminServiceRestTransport @@ -63518,7 +73832,9 @@ def test_list_google_ads_links_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).list_google_ads_links._get_unset_required_fields(jsonified_request) + ).list_sk_ad_network_conversion_value_schemas._get_unset_required_fields( + jsonified_request + ) jsonified_request.update(unset_fields) # verify required fields with default values are now present @@ -63527,7 +73843,9 @@ def test_list_google_ads_links_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).list_google_ads_links._get_unset_required_fields(jsonified_request) + ).list_sk_ad_network_conversion_value_schemas._get_unset_required_fields( + jsonified_request + ) # Check that path parameters and body parameters are not mixing in. assert not set(unset_fields) - set( ( @@ -63548,7 +73866,7 @@ def test_list_google_ads_links_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = analytics_admin.ListGoogleAdsLinksResponse() + return_value = analytics_admin.ListSKAdNetworkConversionValueSchemasResponse() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -63569,25 +73887,31 @@ def test_list_google_ads_links_rest_required_fields( response_value.status_code = 200 # Convert return value to protobuf type - return_value = analytics_admin.ListGoogleAdsLinksResponse.pb(return_value) + return_value = ( + analytics_admin.ListSKAdNetworkConversionValueSchemasResponse.pb( + return_value + ) + ) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.list_google_ads_links(request) + response = client.list_sk_ad_network_conversion_value_schemas(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_list_google_ads_links_rest_unset_required_fields(): +def test_list_sk_ad_network_conversion_value_schemas_rest_unset_required_fields(): transport = transports.AnalyticsAdminServiceRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.list_google_ads_links._get_unset_required_fields({}) + unset_fields = transport.list_sk_ad_network_conversion_value_schemas._get_unset_required_fields( + {} + ) assert set(unset_fields) == ( set( ( @@ -63600,7 +73924,9 @@ def test_list_google_ads_links_rest_unset_required_fields(): @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_google_ads_links_rest_interceptors(null_interceptor): +def test_list_sk_ad_network_conversion_value_schemas_rest_interceptors( + null_interceptor, +): transport = transports.AnalyticsAdminServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -63613,14 +73939,16 @@ def test_list_google_ads_links_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.AnalyticsAdminServiceRestInterceptor, "post_list_google_ads_links" + transports.AnalyticsAdminServiceRestInterceptor, + "post_list_sk_ad_network_conversion_value_schemas", ) as post, mock.patch.object( - transports.AnalyticsAdminServiceRestInterceptor, "pre_list_google_ads_links" + transports.AnalyticsAdminServiceRestInterceptor, + "pre_list_sk_ad_network_conversion_value_schemas", ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = analytics_admin.ListGoogleAdsLinksRequest.pb( - analytics_admin.ListGoogleAdsLinksRequest() + pb_message = analytics_admin.ListSKAdNetworkConversionValueSchemasRequest.pb( + analytics_admin.ListSKAdNetworkConversionValueSchemasRequest() ) transcode.return_value = { "method": "post", @@ -63632,19 +73960,23 @@ def test_list_google_ads_links_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = analytics_admin.ListGoogleAdsLinksResponse.to_json( - analytics_admin.ListGoogleAdsLinksResponse() + req.return_value._content = ( + analytics_admin.ListSKAdNetworkConversionValueSchemasResponse.to_json( + analytics_admin.ListSKAdNetworkConversionValueSchemasResponse() + ) ) - request = analytics_admin.ListGoogleAdsLinksRequest() + request = analytics_admin.ListSKAdNetworkConversionValueSchemasRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = analytics_admin.ListGoogleAdsLinksResponse() + post.return_value = ( + analytics_admin.ListSKAdNetworkConversionValueSchemasResponse() + ) - client.list_google_ads_links( + client.list_sk_ad_network_conversion_value_schemas( request, metadata=[ ("key", "val"), @@ -63656,8 +73988,9 @@ def test_list_google_ads_links_rest_interceptors(null_interceptor): post.assert_called_once() -def test_list_google_ads_links_rest_bad_request( - transport: str = "rest", request_type=analytics_admin.ListGoogleAdsLinksRequest +def test_list_sk_ad_network_conversion_value_schemas_rest_bad_request( + transport: str = "rest", + request_type=analytics_admin.ListSKAdNetworkConversionValueSchemasRequest, ): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -63665,7 +73998,7 @@ def test_list_google_ads_links_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = {"parent": "properties/sample1"} + request_init = {"parent": "properties/sample1/dataStreams/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -63677,10 +74010,10 @@ def test_list_google_ads_links_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.list_google_ads_links(request) + client.list_sk_ad_network_conversion_value_schemas(request) -def test_list_google_ads_links_rest_flattened(): +def test_list_sk_ad_network_conversion_value_schemas_rest_flattened(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -63689,10 +74022,10 @@ def test_list_google_ads_links_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = analytics_admin.ListGoogleAdsLinksResponse() + return_value = analytics_admin.ListSKAdNetworkConversionValueSchemasResponse() # get arguments that satisfy an http rule for this method - sample_request = {"parent": "properties/sample1"} + sample_request = {"parent": "properties/sample1/dataStreams/sample2"} # get truthy value for each flattened field mock_args = dict( @@ -63704,24 +74037,29 @@ def test_list_google_ads_links_rest_flattened(): response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = analytics_admin.ListGoogleAdsLinksResponse.pb(return_value) + return_value = analytics_admin.ListSKAdNetworkConversionValueSchemasResponse.pb( + return_value + ) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.list_google_ads_links(**mock_args) + client.list_sk_ad_network_conversion_value_schemas(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1alpha/{parent=properties/*}/googleAdsLinks" % client.transport._host, + "%s/v1alpha/{parent=properties/*/dataStreams/*}/sKAdNetworkConversionValueSchema" + % client.transport._host, args[1], ) -def test_list_google_ads_links_rest_flattened_error(transport: str = "rest"): +def test_list_sk_ad_network_conversion_value_schemas_rest_flattened_error( + transport: str = "rest", +): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -63730,13 +74068,15 @@ def test_list_google_ads_links_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.list_google_ads_links( - analytics_admin.ListGoogleAdsLinksRequest(), + client.list_sk_ad_network_conversion_value_schemas( + analytics_admin.ListSKAdNetworkConversionValueSchemasRequest(), parent="parent_value", ) -def test_list_google_ads_links_rest_pager(transport: str = "rest"): +def test_list_sk_ad_network_conversion_value_schemas_rest_pager( + transport: str = "rest", +): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -63748,28 +74088,28 @@ def test_list_google_ads_links_rest_pager(transport: str = "rest"): # with mock.patch.object(path_template, 'transcode') as transcode: # Set the response as a series of pages response = ( - analytics_admin.ListGoogleAdsLinksResponse( - google_ads_links=[ - resources.GoogleAdsLink(), - resources.GoogleAdsLink(), - resources.GoogleAdsLink(), + analytics_admin.ListSKAdNetworkConversionValueSchemasResponse( + skadnetwork_conversion_value_schemas=[ + resources.SKAdNetworkConversionValueSchema(), + resources.SKAdNetworkConversionValueSchema(), + resources.SKAdNetworkConversionValueSchema(), ], next_page_token="abc", ), - analytics_admin.ListGoogleAdsLinksResponse( - google_ads_links=[], + analytics_admin.ListSKAdNetworkConversionValueSchemasResponse( + skadnetwork_conversion_value_schemas=[], next_page_token="def", ), - analytics_admin.ListGoogleAdsLinksResponse( - google_ads_links=[ - resources.GoogleAdsLink(), + analytics_admin.ListSKAdNetworkConversionValueSchemasResponse( + skadnetwork_conversion_value_schemas=[ + resources.SKAdNetworkConversionValueSchema(), ], next_page_token="ghi", ), - analytics_admin.ListGoogleAdsLinksResponse( - google_ads_links=[ - resources.GoogleAdsLink(), - resources.GoogleAdsLink(), + analytics_admin.ListSKAdNetworkConversionValueSchemasResponse( + skadnetwork_conversion_value_schemas=[ + resources.SKAdNetworkConversionValueSchema(), + resources.SKAdNetworkConversionValueSchema(), ], ), ) @@ -63778,7 +74118,8 @@ def test_list_google_ads_links_rest_pager(transport: str = "rest"): # Wrap the values into proper Response objs response = tuple( - analytics_admin.ListGoogleAdsLinksResponse.to_json(x) for x in response + analytics_admin.ListSKAdNetworkConversionValueSchemasResponse.to_json(x) + for x in response ) return_values = tuple(Response() for i in response) for return_val, response_val in zip(return_values, response): @@ -63786,15 +74127,23 @@ def test_list_google_ads_links_rest_pager(transport: str = "rest"): return_val.status_code = 200 req.side_effect = return_values - sample_request = {"parent": "properties/sample1"} + sample_request = {"parent": "properties/sample1/dataStreams/sample2"} - pager = client.list_google_ads_links(request=sample_request) + pager = client.list_sk_ad_network_conversion_value_schemas( + request=sample_request + ) results = list(pager) assert len(results) == 6 - assert all(isinstance(i, resources.GoogleAdsLink) for i in results) + assert all( + isinstance(i, resources.SKAdNetworkConversionValueSchema) for i in results + ) - pages = list(client.list_google_ads_links(request=sample_request).pages) + pages = list( + client.list_sk_ad_network_conversion_value_schemas( + request=sample_request + ).pages + ) for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token @@ -63802,54 +74151,46 @@ def test_list_google_ads_links_rest_pager(transport: str = "rest"): @pytest.mark.parametrize( "request_type", [ - analytics_admin.GetDataSharingSettingsRequest, + analytics_admin.SearchChangeHistoryEventsRequest, dict, ], ) -def test_get_data_sharing_settings_rest(request_type): +def test_search_change_history_events_rest(request_type): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = {"name": "accounts/sample1/dataSharingSettings"} + request_init = {"account": "accounts/sample1"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = resources.DataSharingSettings( - name="name_value", - sharing_with_google_support_enabled=True, - sharing_with_google_assigned_sales_enabled=True, - sharing_with_google_any_sales_enabled=True, - sharing_with_google_products_enabled=True, - sharing_with_others_enabled=True, + return_value = analytics_admin.SearchChangeHistoryEventsResponse( + next_page_token="next_page_token_value", ) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = resources.DataSharingSettings.pb(return_value) + return_value = analytics_admin.SearchChangeHistoryEventsResponse.pb( + return_value + ) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.get_data_sharing_settings(request) + response = client.search_change_history_events(request) # Establish that the response is the type that we expect. - assert isinstance(response, resources.DataSharingSettings) - assert response.name == "name_value" - assert response.sharing_with_google_support_enabled is True - assert response.sharing_with_google_assigned_sales_enabled is True - assert response.sharing_with_google_any_sales_enabled is True - assert response.sharing_with_google_products_enabled is True - assert response.sharing_with_others_enabled is True + assert isinstance(response, pagers.SearchChangeHistoryEventsPager) + assert response.next_page_token == "next_page_token_value" -def test_get_data_sharing_settings_rest_use_cached_wrapped_rpc(): +def test_search_change_history_events_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -63864,7 +74205,7 @@ def test_get_data_sharing_settings_rest_use_cached_wrapped_rpc(): # Ensure method has been cached assert ( - client._transport.get_data_sharing_settings + client._transport.search_change_history_events in client._transport._wrapped_methods ) @@ -63874,29 +74215,29 @@ def test_get_data_sharing_settings_rest_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.get_data_sharing_settings + client._transport.search_change_history_events ] = mock_rpc request = {} - client.get_data_sharing_settings(request) + client.search_change_history_events(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.get_data_sharing_settings(request) + client.search_change_history_events(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_get_data_sharing_settings_rest_required_fields( - request_type=analytics_admin.GetDataSharingSettingsRequest, +def test_search_change_history_events_rest_required_fields( + request_type=analytics_admin.SearchChangeHistoryEventsRequest, ): transport_class = transports.AnalyticsAdminServiceRestTransport request_init = {} - request_init["name"] = "" + request_init["account"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -63907,21 +74248,21 @@ def test_get_data_sharing_settings_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).get_data_sharing_settings._get_unset_required_fields(jsonified_request) + ).search_change_history_events._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["name"] = "name_value" + jsonified_request["account"] = "account_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).get_data_sharing_settings._get_unset_required_fields(jsonified_request) + ).search_change_history_events._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == "name_value" + assert "account" in jsonified_request + assert jsonified_request["account"] == "account_value" client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -63930,7 +74271,7 @@ def test_get_data_sharing_settings_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = resources.DataSharingSettings() + return_value = analytics_admin.SearchChangeHistoryEventsResponse() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -63942,39 +74283,42 @@ def test_get_data_sharing_settings_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "get", + "method": "post", "query_params": pb_request, } + transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = resources.DataSharingSettings.pb(return_value) + return_value = analytics_admin.SearchChangeHistoryEventsResponse.pb( + return_value + ) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.get_data_sharing_settings(request) + response = client.search_change_history_events(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_get_data_sharing_settings_rest_unset_required_fields(): +def test_search_change_history_events_rest_unset_required_fields(): transport = transports.AnalyticsAdminServiceRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.get_data_sharing_settings._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name",))) + unset_fields = transport.search_change_history_events._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("account",))) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_data_sharing_settings_rest_interceptors(null_interceptor): +def test_search_change_history_events_rest_interceptors(null_interceptor): transport = transports.AnalyticsAdminServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -63988,14 +74332,15 @@ def test_get_data_sharing_settings_rest_interceptors(null_interceptor): path_template, "transcode" ) as transcode, mock.patch.object( transports.AnalyticsAdminServiceRestInterceptor, - "post_get_data_sharing_settings", + "post_search_change_history_events", ) as post, mock.patch.object( - transports.AnalyticsAdminServiceRestInterceptor, "pre_get_data_sharing_settings" + transports.AnalyticsAdminServiceRestInterceptor, + "pre_search_change_history_events", ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = analytics_admin.GetDataSharingSettingsRequest.pb( - analytics_admin.GetDataSharingSettingsRequest() + pb_message = analytics_admin.SearchChangeHistoryEventsRequest.pb( + analytics_admin.SearchChangeHistoryEventsRequest() ) transcode.return_value = { "method": "post", @@ -64007,19 +74352,21 @@ def test_get_data_sharing_settings_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = resources.DataSharingSettings.to_json( - resources.DataSharingSettings() + req.return_value._content = ( + analytics_admin.SearchChangeHistoryEventsResponse.to_json( + analytics_admin.SearchChangeHistoryEventsResponse() + ) ) - request = analytics_admin.GetDataSharingSettingsRequest() + request = analytics_admin.SearchChangeHistoryEventsRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = resources.DataSharingSettings() + post.return_value = analytics_admin.SearchChangeHistoryEventsResponse() - client.get_data_sharing_settings( + client.search_change_history_events( request, metadata=[ ("key", "val"), @@ -64031,8 +74378,9 @@ def test_get_data_sharing_settings_rest_interceptors(null_interceptor): post.assert_called_once() -def test_get_data_sharing_settings_rest_bad_request( - transport: str = "rest", request_type=analytics_admin.GetDataSharingSettingsRequest +def test_search_change_history_events_rest_bad_request( + transport: str = "rest", + request_type=analytics_admin.SearchChangeHistoryEventsRequest, ): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -64040,7 +74388,7 @@ def test_get_data_sharing_settings_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = {"name": "accounts/sample1/dataSharingSettings"} + request_init = {"account": "accounts/sample1"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -64052,118 +74400,121 @@ def test_get_data_sharing_settings_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.get_data_sharing_settings(request) + client.search_change_history_events(request) -def test_get_data_sharing_settings_rest_flattened(): +def test_search_change_history_events_rest_pager(transport: str = "rest"): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport="rest", + transport=transport, ) # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = resources.DataSharingSettings() - - # get arguments that satisfy an http rule for this method - sample_request = {"name": "accounts/sample1/dataSharingSettings"} - - # get truthy value for each flattened field - mock_args = dict( - name="name_value", + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + analytics_admin.SearchChangeHistoryEventsResponse( + change_history_events=[ + resources.ChangeHistoryEvent(), + resources.ChangeHistoryEvent(), + resources.ChangeHistoryEvent(), + ], + next_page_token="abc", + ), + analytics_admin.SearchChangeHistoryEventsResponse( + change_history_events=[], + next_page_token="def", + ), + analytics_admin.SearchChangeHistoryEventsResponse( + change_history_events=[ + resources.ChangeHistoryEvent(), + ], + next_page_token="ghi", + ), + analytics_admin.SearchChangeHistoryEventsResponse( + change_history_events=[ + resources.ChangeHistoryEvent(), + resources.ChangeHistoryEvent(), + ], + ), ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = resources.DataSharingSettings.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - client.get_data_sharing_settings(**mock_args) + # Two responses for two calls + response = response + response - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate( - "%s/v1alpha/{name=accounts/*/dataSharingSettings}" % client.transport._host, - args[1], + # Wrap the values into proper Response objs + response = tuple( + analytics_admin.SearchChangeHistoryEventsResponse.to_json(x) + for x in response ) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + sample_request = {"account": "accounts/sample1"} -def test_get_data_sharing_settings_rest_flattened_error(transport: str = "rest"): - client = AnalyticsAdminServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_data_sharing_settings( - analytics_admin.GetDataSharingSettingsRequest(), - name="name_value", - ) + pager = client.search_change_history_events(request=sample_request) + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, resources.ChangeHistoryEvent) for i in results) -def test_get_data_sharing_settings_rest_error(): - client = AnalyticsAdminServiceClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) + pages = list(client.search_change_history_events(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token @pytest.mark.parametrize( "request_type", [ - analytics_admin.GetMeasurementProtocolSecretRequest, + analytics_admin.GetGoogleSignalsSettingsRequest, dict, ], ) -def test_get_measurement_protocol_secret_rest(request_type): +def test_get_google_signals_settings_rest(request_type): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = { - "name": "properties/sample1/dataStreams/sample2/measurementProtocolSecrets/sample3" - } + request_init = {"name": "properties/sample1/googleSignalsSettings"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = resources.MeasurementProtocolSecret( + return_value = resources.GoogleSignalsSettings( name="name_value", - display_name="display_name_value", - secret_value="secret_value_value", + state=resources.GoogleSignalsState.GOOGLE_SIGNALS_ENABLED, + consent=resources.GoogleSignalsConsent.GOOGLE_SIGNALS_CONSENT_CONSENTED, ) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = resources.MeasurementProtocolSecret.pb(return_value) + return_value = resources.GoogleSignalsSettings.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.get_measurement_protocol_secret(request) + response = client.get_google_signals_settings(request) # Establish that the response is the type that we expect. - assert isinstance(response, resources.MeasurementProtocolSecret) + assert isinstance(response, resources.GoogleSignalsSettings) assert response.name == "name_value" - assert response.display_name == "display_name_value" - assert response.secret_value == "secret_value_value" + assert response.state == resources.GoogleSignalsState.GOOGLE_SIGNALS_ENABLED + assert ( + response.consent + == resources.GoogleSignalsConsent.GOOGLE_SIGNALS_CONSENT_CONSENTED + ) -def test_get_measurement_protocol_secret_rest_use_cached_wrapped_rpc(): +def test_get_google_signals_settings_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -64178,7 +74529,7 @@ def test_get_measurement_protocol_secret_rest_use_cached_wrapped_rpc(): # Ensure method has been cached assert ( - client._transport.get_measurement_protocol_secret + client._transport.get_google_signals_settings in client._transport._wrapped_methods ) @@ -64188,24 +74539,24 @@ def test_get_measurement_protocol_secret_rest_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.get_measurement_protocol_secret + client._transport.get_google_signals_settings ] = mock_rpc request = {} - client.get_measurement_protocol_secret(request) + client.get_google_signals_settings(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.get_measurement_protocol_secret(request) + client.get_google_signals_settings(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_get_measurement_protocol_secret_rest_required_fields( - request_type=analytics_admin.GetMeasurementProtocolSecretRequest, +def test_get_google_signals_settings_rest_required_fields( + request_type=analytics_admin.GetGoogleSignalsSettingsRequest, ): transport_class = transports.AnalyticsAdminServiceRestTransport @@ -64221,7 +74572,7 @@ def test_get_measurement_protocol_secret_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).get_measurement_protocol_secret._get_unset_required_fields(jsonified_request) + ).get_google_signals_settings._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present @@ -64230,7 +74581,7 @@ def test_get_measurement_protocol_secret_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).get_measurement_protocol_secret._get_unset_required_fields(jsonified_request) + ).get_google_signals_settings._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone @@ -64244,7 +74595,7 @@ def test_get_measurement_protocol_secret_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = resources.MeasurementProtocolSecret() + return_value = resources.GoogleSignalsSettings() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -64265,32 +74616,30 @@ def test_get_measurement_protocol_secret_rest_required_fields( response_value.status_code = 200 # Convert return value to protobuf type - return_value = resources.MeasurementProtocolSecret.pb(return_value) + return_value = resources.GoogleSignalsSettings.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.get_measurement_protocol_secret(request) + response = client.get_google_signals_settings(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_get_measurement_protocol_secret_rest_unset_required_fields(): +def test_get_google_signals_settings_rest_unset_required_fields(): transport = transports.AnalyticsAdminServiceRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.get_measurement_protocol_secret._get_unset_required_fields( - {} - ) + unset_fields = transport.get_google_signals_settings._get_unset_required_fields({}) assert set(unset_fields) == (set(()) & set(("name",))) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_measurement_protocol_secret_rest_interceptors(null_interceptor): +def test_get_google_signals_settings_rest_interceptors(null_interceptor): transport = transports.AnalyticsAdminServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -64304,15 +74653,15 @@ def test_get_measurement_protocol_secret_rest_interceptors(null_interceptor): path_template, "transcode" ) as transcode, mock.patch.object( transports.AnalyticsAdminServiceRestInterceptor, - "post_get_measurement_protocol_secret", + "post_get_google_signals_settings", ) as post, mock.patch.object( transports.AnalyticsAdminServiceRestInterceptor, - "pre_get_measurement_protocol_secret", + "pre_get_google_signals_settings", ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = analytics_admin.GetMeasurementProtocolSecretRequest.pb( - analytics_admin.GetMeasurementProtocolSecretRequest() + pb_message = analytics_admin.GetGoogleSignalsSettingsRequest.pb( + analytics_admin.GetGoogleSignalsSettingsRequest() ) transcode.return_value = { "method": "post", @@ -64324,19 +74673,19 @@ def test_get_measurement_protocol_secret_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = resources.MeasurementProtocolSecret.to_json( - resources.MeasurementProtocolSecret() + req.return_value._content = resources.GoogleSignalsSettings.to_json( + resources.GoogleSignalsSettings() ) - request = analytics_admin.GetMeasurementProtocolSecretRequest() + request = analytics_admin.GetGoogleSignalsSettingsRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = resources.MeasurementProtocolSecret() + post.return_value = resources.GoogleSignalsSettings() - client.get_measurement_protocol_secret( + client.get_google_signals_settings( request, metadata=[ ("key", "val"), @@ -64348,9 +74697,9 @@ def test_get_measurement_protocol_secret_rest_interceptors(null_interceptor): post.assert_called_once() -def test_get_measurement_protocol_secret_rest_bad_request( +def test_get_google_signals_settings_rest_bad_request( transport: str = "rest", - request_type=analytics_admin.GetMeasurementProtocolSecretRequest, + request_type=analytics_admin.GetGoogleSignalsSettingsRequest, ): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -64358,9 +74707,7 @@ def test_get_measurement_protocol_secret_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = { - "name": "properties/sample1/dataStreams/sample2/measurementProtocolSecrets/sample3" - } + request_init = {"name": "properties/sample1/googleSignalsSettings"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -64372,10 +74719,10 @@ def test_get_measurement_protocol_secret_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.get_measurement_protocol_secret(request) + client.get_google_signals_settings(request) -def test_get_measurement_protocol_secret_rest_flattened(): +def test_get_google_signals_settings_rest_flattened(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -64384,12 +74731,10 @@ def test_get_measurement_protocol_secret_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = resources.MeasurementProtocolSecret() + return_value = resources.GoogleSignalsSettings() # get arguments that satisfy an http rule for this method - sample_request = { - "name": "properties/sample1/dataStreams/sample2/measurementProtocolSecrets/sample3" - } + sample_request = {"name": "properties/sample1/googleSignalsSettings"} # get truthy value for each flattened field mock_args = dict( @@ -64401,25 +74746,25 @@ def test_get_measurement_protocol_secret_rest_flattened(): response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = resources.MeasurementProtocolSecret.pb(return_value) + return_value = resources.GoogleSignalsSettings.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.get_measurement_protocol_secret(**mock_args) + client.get_google_signals_settings(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1alpha/{name=properties/*/dataStreams/*/measurementProtocolSecrets/*}" + "%s/v1alpha/{name=properties/*/googleSignalsSettings}" % client.transport._host, args[1], ) -def test_get_measurement_protocol_secret_rest_flattened_error(transport: str = "rest"): +def test_get_google_signals_settings_rest_flattened_error(transport: str = "rest"): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -64428,13 +74773,13 @@ def test_get_measurement_protocol_secret_rest_flattened_error(transport: str = " # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.get_measurement_protocol_secret( - analytics_admin.GetMeasurementProtocolSecretRequest(), + client.get_google_signals_settings( + analytics_admin.GetGoogleSignalsSettingsRequest(), name="name_value", ) -def test_get_measurement_protocol_secret_rest_error(): +def test_get_google_signals_settings_rest_error(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -64443,46 +74788,129 @@ def test_get_measurement_protocol_secret_rest_error(): @pytest.mark.parametrize( "request_type", [ - analytics_admin.ListMeasurementProtocolSecretsRequest, + analytics_admin.UpdateGoogleSignalsSettingsRequest, dict, ], ) -def test_list_measurement_protocol_secrets_rest(request_type): +def test_update_google_signals_settings_rest(request_type): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = {"parent": "properties/sample1/dataStreams/sample2"} + request_init = { + "google_signals_settings": {"name": "properties/sample1/googleSignalsSettings"} + } + request_init["google_signals_settings"] = { + "name": "properties/sample1/googleSignalsSettings", + "state": 1, + "consent": 2, + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = analytics_admin.UpdateGoogleSignalsSettingsRequest.meta.fields[ + "google_signals_settings" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "google_signals_settings" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["google_signals_settings"][field])): + del request_init["google_signals_settings"][field][i][subfield] + else: + del request_init["google_signals_settings"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = analytics_admin.ListMeasurementProtocolSecretsResponse( - next_page_token="next_page_token_value", + return_value = resources.GoogleSignalsSettings( + name="name_value", + state=resources.GoogleSignalsState.GOOGLE_SIGNALS_ENABLED, + consent=resources.GoogleSignalsConsent.GOOGLE_SIGNALS_CONSENT_CONSENTED, ) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = analytics_admin.ListMeasurementProtocolSecretsResponse.pb( - return_value - ) + return_value = resources.GoogleSignalsSettings.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.list_measurement_protocol_secrets(request) + response = client.update_google_signals_settings(request) # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListMeasurementProtocolSecretsPager) - assert response.next_page_token == "next_page_token_value" + assert isinstance(response, resources.GoogleSignalsSettings) + assert response.name == "name_value" + assert response.state == resources.GoogleSignalsState.GOOGLE_SIGNALS_ENABLED + assert ( + response.consent + == resources.GoogleSignalsConsent.GOOGLE_SIGNALS_CONSENT_CONSENTED + ) -def test_list_measurement_protocol_secrets_rest_use_cached_wrapped_rpc(): +def test_update_google_signals_settings_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -64497,7 +74925,7 @@ def test_list_measurement_protocol_secrets_rest_use_cached_wrapped_rpc(): # Ensure method has been cached assert ( - client._transport.list_measurement_protocol_secrets + client._transport.update_google_signals_settings in client._transport._wrapped_methods ) @@ -64507,29 +74935,28 @@ def test_list_measurement_protocol_secrets_rest_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.list_measurement_protocol_secrets + client._transport.update_google_signals_settings ] = mock_rpc request = {} - client.list_measurement_protocol_secrets(request) + client.update_google_signals_settings(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.list_measurement_protocol_secrets(request) + client.update_google_signals_settings(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_list_measurement_protocol_secrets_rest_required_fields( - request_type=analytics_admin.ListMeasurementProtocolSecretsRequest, +def test_update_google_signals_settings_rest_required_fields( + request_type=analytics_admin.UpdateGoogleSignalsSettingsRequest, ): transport_class = transports.AnalyticsAdminServiceRestTransport request_init = {} - request_init["parent"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -64540,28 +74967,19 @@ def test_list_measurement_protocol_secrets_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).list_measurement_protocol_secrets._get_unset_required_fields(jsonified_request) + ).update_google_signals_settings._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["parent"] = "parent_value" - unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).list_measurement_protocol_secrets._get_unset_required_fields(jsonified_request) + ).update_google_signals_settings._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set( - ( - "page_size", - "page_token", - ) - ) + assert not set(unset_fields) - set(("update_mask",)) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == "parent_value" client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -64570,7 +74988,7 @@ def test_list_measurement_protocol_secrets_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = analytics_admin.ListMeasurementProtocolSecretsResponse() + return_value = resources.GoogleSignalsSettings() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -64582,51 +75000,50 @@ def test_list_measurement_protocol_secrets_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "get", + "method": "patch", "query_params": pb_request, } + transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = analytics_admin.ListMeasurementProtocolSecretsResponse.pb( - return_value - ) + return_value = resources.GoogleSignalsSettings.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.list_measurement_protocol_secrets(request) + response = client.update_google_signals_settings(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_list_measurement_protocol_secrets_rest_unset_required_fields(): +def test_update_google_signals_settings_rest_unset_required_fields(): transport = transports.AnalyticsAdminServiceRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = ( - transport.list_measurement_protocol_secrets._get_unset_required_fields({}) + unset_fields = transport.update_google_signals_settings._get_unset_required_fields( + {} ) assert set(unset_fields) == ( - set( + set(("updateMask",)) + & set( ( - "pageSize", - "pageToken", + "googleSignalsSettings", + "updateMask", ) ) - & set(("parent",)) ) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_measurement_protocol_secrets_rest_interceptors(null_interceptor): +def test_update_google_signals_settings_rest_interceptors(null_interceptor): transport = transports.AnalyticsAdminServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -64640,15 +75057,15 @@ def test_list_measurement_protocol_secrets_rest_interceptors(null_interceptor): path_template, "transcode" ) as transcode, mock.patch.object( transports.AnalyticsAdminServiceRestInterceptor, - "post_list_measurement_protocol_secrets", + "post_update_google_signals_settings", ) as post, mock.patch.object( transports.AnalyticsAdminServiceRestInterceptor, - "pre_list_measurement_protocol_secrets", + "pre_update_google_signals_settings", ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = analytics_admin.ListMeasurementProtocolSecretsRequest.pb( - analytics_admin.ListMeasurementProtocolSecretsRequest() + pb_message = analytics_admin.UpdateGoogleSignalsSettingsRequest.pb( + analytics_admin.UpdateGoogleSignalsSettingsRequest() ) transcode.return_value = { "method": "post", @@ -64660,21 +75077,19 @@ def test_list_measurement_protocol_secrets_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = ( - analytics_admin.ListMeasurementProtocolSecretsResponse.to_json( - analytics_admin.ListMeasurementProtocolSecretsResponse() - ) + req.return_value._content = resources.GoogleSignalsSettings.to_json( + resources.GoogleSignalsSettings() ) - request = analytics_admin.ListMeasurementProtocolSecretsRequest() + request = analytics_admin.UpdateGoogleSignalsSettingsRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = analytics_admin.ListMeasurementProtocolSecretsResponse() + post.return_value = resources.GoogleSignalsSettings() - client.list_measurement_protocol_secrets( + client.update_google_signals_settings( request, metadata=[ ("key", "val"), @@ -64686,9 +75101,9 @@ def test_list_measurement_protocol_secrets_rest_interceptors(null_interceptor): post.assert_called_once() -def test_list_measurement_protocol_secrets_rest_bad_request( +def test_update_google_signals_settings_rest_bad_request( transport: str = "rest", - request_type=analytics_admin.ListMeasurementProtocolSecretsRequest, + request_type=analytics_admin.UpdateGoogleSignalsSettingsRequest, ): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -64696,7 +75111,9 @@ def test_list_measurement_protocol_secrets_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = {"parent": "properties/sample1/dataStreams/sample2"} + request_init = { + "google_signals_settings": {"name": "properties/sample1/googleSignalsSettings"} + } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -64708,10 +75125,10 @@ def test_list_measurement_protocol_secrets_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.list_measurement_protocol_secrets(request) + client.update_google_signals_settings(request) -def test_list_measurement_protocol_secrets_rest_flattened(): +def test_update_google_signals_settings_rest_flattened(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -64720,14 +75137,19 @@ def test_list_measurement_protocol_secrets_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = analytics_admin.ListMeasurementProtocolSecretsResponse() + return_value = resources.GoogleSignalsSettings() # get arguments that satisfy an http rule for this method - sample_request = {"parent": "properties/sample1/dataStreams/sample2"} + sample_request = { + "google_signals_settings": { + "name": "properties/sample1/googleSignalsSettings" + } + } # get truthy value for each flattened field mock_args = dict( - parent="parent_value", + google_signals_settings=resources.GoogleSignalsSettings(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) mock_args.update(sample_request) @@ -64735,29 +75157,25 @@ def test_list_measurement_protocol_secrets_rest_flattened(): response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = analytics_admin.ListMeasurementProtocolSecretsResponse.pb( - return_value - ) + return_value = resources.GoogleSignalsSettings.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.list_measurement_protocol_secrets(**mock_args) + client.update_google_signals_settings(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1alpha/{parent=properties/*/dataStreams/*}/measurementProtocolSecrets" + "%s/v1alpha/{google_signals_settings.name=properties/*/googleSignalsSettings}" % client.transport._host, args[1], ) -def test_list_measurement_protocol_secrets_rest_flattened_error( - transport: str = "rest", -): +def test_update_google_signals_settings_rest_flattened_error(transport: str = "rest"): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -64766,105 +75184,53 @@ def test_list_measurement_protocol_secrets_rest_flattened_error( # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.list_measurement_protocol_secrets( - analytics_admin.ListMeasurementProtocolSecretsRequest(), - parent="parent_value", + client.update_google_signals_settings( + analytics_admin.UpdateGoogleSignalsSettingsRequest(), + google_signals_settings=resources.GoogleSignalsSettings(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) -def test_list_measurement_protocol_secrets_rest_pager(transport: str = "rest"): +def test_update_google_signals_settings_rest_error(): client = AnalyticsAdminServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # TODO(kbandes): remove this mock unless there's a good reason for it. - # with mock.patch.object(path_template, 'transcode') as transcode: - # Set the response as a series of pages - response = ( - analytics_admin.ListMeasurementProtocolSecretsResponse( - measurement_protocol_secrets=[ - resources.MeasurementProtocolSecret(), - resources.MeasurementProtocolSecret(), - resources.MeasurementProtocolSecret(), - ], - next_page_token="abc", - ), - analytics_admin.ListMeasurementProtocolSecretsResponse( - measurement_protocol_secrets=[], - next_page_token="def", - ), - analytics_admin.ListMeasurementProtocolSecretsResponse( - measurement_protocol_secrets=[ - resources.MeasurementProtocolSecret(), - ], - next_page_token="ghi", - ), - analytics_admin.ListMeasurementProtocolSecretsResponse( - measurement_protocol_secrets=[ - resources.MeasurementProtocolSecret(), - resources.MeasurementProtocolSecret(), - ], - ), - ) - # Two responses for two calls - response = response + response - - # Wrap the values into proper Response objs - response = tuple( - analytics_admin.ListMeasurementProtocolSecretsResponse.to_json(x) - for x in response - ) - return_values = tuple(Response() for i in response) - for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode("UTF-8") - return_val.status_code = 200 - req.side_effect = return_values - - sample_request = {"parent": "properties/sample1/dataStreams/sample2"} - - pager = client.list_measurement_protocol_secrets(request=sample_request) - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, resources.MeasurementProtocolSecret) for i in results) - - pages = list( - client.list_measurement_protocol_secrets(request=sample_request).pages - ) - for page_, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page_.raw_page.next_page_token == token - @pytest.mark.parametrize( "request_type", [ - analytics_admin.CreateMeasurementProtocolSecretRequest, + analytics_admin.CreateConversionEventRequest, dict, ], ) -def test_create_measurement_protocol_secret_rest(request_type): +def test_create_conversion_event_rest(request_type): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = {"parent": "properties/sample1/dataStreams/sample2"} - request_init["measurement_protocol_secret"] = { + request_init = {"parent": "properties/sample1"} + request_init["conversion_event"] = { "name": "name_value", - "display_name": "display_name_value", - "secret_value": "secret_value_value", + "event_name": "event_name_value", + "create_time": {"seconds": 751, "nanos": 543}, + "deletable": True, + "custom": True, + "counting_method": 1, + "default_conversion_value": { + "value": 0.541, + "currency_code": "currency_code_value", + }, } # The version of a generated dependency at test runtime may differ from the version used during generation. # Delete any fields which are not present in the current runtime dependency # See https://github.com/googleapis/gapic-generator-python/issues/1748 # Determine if the message type is proto-plus or protobuf - test_field = analytics_admin.CreateMeasurementProtocolSecretRequest.meta.fields[ - "measurement_protocol_secret" + test_field = analytics_admin.CreateConversionEventRequest.meta.fields[ + "conversion_event" ] def get_message_fields(field): @@ -64893,9 +75259,7 @@ def get_message_fields(field): # For each item in the sample request, create a list of sub fields which are not present at runtime # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init[ - "measurement_protocol_secret" - ].items(): # pragma: NO COVER + for field, value in request_init["conversion_event"].items(): # pragma: NO COVER result = None is_repeated = False # For repeated fields @@ -64925,42 +75289,47 @@ def get_message_fields(field): subfield = subfield_to_delete.get("subfield") if subfield: if field_repeated: - for i in range( - 0, len(request_init["measurement_protocol_secret"][field]) - ): - del request_init["measurement_protocol_secret"][field][i][subfield] + for i in range(0, len(request_init["conversion_event"][field])): + del request_init["conversion_event"][field][i][subfield] else: - del request_init["measurement_protocol_secret"][field][subfield] + del request_init["conversion_event"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = resources.MeasurementProtocolSecret( + return_value = resources.ConversionEvent( name="name_value", - display_name="display_name_value", - secret_value="secret_value_value", + event_name="event_name_value", + deletable=True, + custom=True, + counting_method=resources.ConversionEvent.ConversionCountingMethod.ONCE_PER_EVENT, ) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = resources.MeasurementProtocolSecret.pb(return_value) + return_value = resources.ConversionEvent.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.create_measurement_protocol_secret(request) + response = client.create_conversion_event(request) # Establish that the response is the type that we expect. - assert isinstance(response, resources.MeasurementProtocolSecret) + assert isinstance(response, resources.ConversionEvent) assert response.name == "name_value" - assert response.display_name == "display_name_value" - assert response.secret_value == "secret_value_value" + assert response.event_name == "event_name_value" + assert response.deletable is True + assert response.custom is True + assert ( + response.counting_method + == resources.ConversionEvent.ConversionCountingMethod.ONCE_PER_EVENT + ) -def test_create_measurement_protocol_secret_rest_use_cached_wrapped_rpc(): +def test_create_conversion_event_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -64975,7 +75344,7 @@ def test_create_measurement_protocol_secret_rest_use_cached_wrapped_rpc(): # Ensure method has been cached assert ( - client._transport.create_measurement_protocol_secret + client._transport.create_conversion_event in client._transport._wrapped_methods ) @@ -64985,24 +75354,24 @@ def test_create_measurement_protocol_secret_rest_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.create_measurement_protocol_secret + client._transport.create_conversion_event ] = mock_rpc request = {} - client.create_measurement_protocol_secret(request) + client.create_conversion_event(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.create_measurement_protocol_secret(request) + client.create_conversion_event(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_create_measurement_protocol_secret_rest_required_fields( - request_type=analytics_admin.CreateMeasurementProtocolSecretRequest, +def test_create_conversion_event_rest_required_fields( + request_type=analytics_admin.CreateConversionEventRequest, ): transport_class = transports.AnalyticsAdminServiceRestTransport @@ -65018,7 +75387,7 @@ def test_create_measurement_protocol_secret_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).create_measurement_protocol_secret._get_unset_required_fields(jsonified_request) + ).create_conversion_event._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present @@ -65027,7 +75396,7 @@ def test_create_measurement_protocol_secret_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).create_measurement_protocol_secret._get_unset_required_fields(jsonified_request) + ).create_conversion_event._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone @@ -65041,7 +75410,7 @@ def test_create_measurement_protocol_secret_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = resources.MeasurementProtocolSecret() + return_value = resources.ConversionEvent() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -65063,40 +75432,38 @@ def test_create_measurement_protocol_secret_rest_required_fields( response_value.status_code = 200 # Convert return value to protobuf type - return_value = resources.MeasurementProtocolSecret.pb(return_value) + return_value = resources.ConversionEvent.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.create_measurement_protocol_secret(request) + response = client.create_conversion_event(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_create_measurement_protocol_secret_rest_unset_required_fields(): +def test_create_conversion_event_rest_unset_required_fields(): transport = transports.AnalyticsAdminServiceRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = ( - transport.create_measurement_protocol_secret._get_unset_required_fields({}) - ) + unset_fields = transport.create_conversion_event._get_unset_required_fields({}) assert set(unset_fields) == ( set(()) & set( ( + "conversionEvent", "parent", - "measurementProtocolSecret", ) ) ) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_create_measurement_protocol_secret_rest_interceptors(null_interceptor): +def test_create_conversion_event_rest_interceptors(null_interceptor): transport = transports.AnalyticsAdminServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -65109,16 +75476,14 @@ def test_create_measurement_protocol_secret_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.AnalyticsAdminServiceRestInterceptor, - "post_create_measurement_protocol_secret", + transports.AnalyticsAdminServiceRestInterceptor, "post_create_conversion_event" ) as post, mock.patch.object( - transports.AnalyticsAdminServiceRestInterceptor, - "pre_create_measurement_protocol_secret", + transports.AnalyticsAdminServiceRestInterceptor, "pre_create_conversion_event" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = analytics_admin.CreateMeasurementProtocolSecretRequest.pb( - analytics_admin.CreateMeasurementProtocolSecretRequest() + pb_message = analytics_admin.CreateConversionEventRequest.pb( + analytics_admin.CreateConversionEventRequest() ) transcode.return_value = { "method": "post", @@ -65130,19 +75495,19 @@ def test_create_measurement_protocol_secret_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = resources.MeasurementProtocolSecret.to_json( - resources.MeasurementProtocolSecret() + req.return_value._content = resources.ConversionEvent.to_json( + resources.ConversionEvent() ) - request = analytics_admin.CreateMeasurementProtocolSecretRequest() + request = analytics_admin.CreateConversionEventRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = resources.MeasurementProtocolSecret() + post.return_value = resources.ConversionEvent() - client.create_measurement_protocol_secret( + client.create_conversion_event( request, metadata=[ ("key", "val"), @@ -65154,9 +75519,8 @@ def test_create_measurement_protocol_secret_rest_interceptors(null_interceptor): post.assert_called_once() -def test_create_measurement_protocol_secret_rest_bad_request( - transport: str = "rest", - request_type=analytics_admin.CreateMeasurementProtocolSecretRequest, +def test_create_conversion_event_rest_bad_request( + transport: str = "rest", request_type=analytics_admin.CreateConversionEventRequest ): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -65164,7 +75528,7 @@ def test_create_measurement_protocol_secret_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = {"parent": "properties/sample1/dataStreams/sample2"} + request_init = {"parent": "properties/sample1"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -65176,10 +75540,10 @@ def test_create_measurement_protocol_secret_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.create_measurement_protocol_secret(request) + client.create_conversion_event(request) -def test_create_measurement_protocol_secret_rest_flattened(): +def test_create_conversion_event_rest_flattened(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -65188,17 +75552,15 @@ def test_create_measurement_protocol_secret_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = resources.MeasurementProtocolSecret() + return_value = resources.ConversionEvent() # get arguments that satisfy an http rule for this method - sample_request = {"parent": "properties/sample1/dataStreams/sample2"} + sample_request = {"parent": "properties/sample1"} # get truthy value for each flattened field mock_args = dict( parent="parent_value", - measurement_protocol_secret=resources.MeasurementProtocolSecret( - name="name_value" - ), + conversion_event=resources.ConversionEvent(name="name_value"), ) mock_args.update(sample_request) @@ -65206,27 +75568,25 @@ def test_create_measurement_protocol_secret_rest_flattened(): response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = resources.MeasurementProtocolSecret.pb(return_value) + return_value = resources.ConversionEvent.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.create_measurement_protocol_secret(**mock_args) + client.create_conversion_event(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1alpha/{parent=properties/*/dataStreams/*}/measurementProtocolSecrets" + "%s/v1alpha/{parent=properties/*}/conversionEvents" % client.transport._host, args[1], ) -def test_create_measurement_protocol_secret_rest_flattened_error( - transport: str = "rest", -): +def test_create_conversion_event_rest_flattened_error(transport: str = "rest"): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -65235,59 +75595,154 @@ def test_create_measurement_protocol_secret_rest_flattened_error( # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.create_measurement_protocol_secret( - analytics_admin.CreateMeasurementProtocolSecretRequest(), + client.create_conversion_event( + analytics_admin.CreateConversionEventRequest(), parent="parent_value", - measurement_protocol_secret=resources.MeasurementProtocolSecret( - name="name_value" - ), + conversion_event=resources.ConversionEvent(name="name_value"), ) -def test_create_measurement_protocol_secret_rest_error(): - client = AnalyticsAdminServiceClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) +def test_create_conversion_event_rest_error(): + client = AnalyticsAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + analytics_admin.UpdateConversionEventRequest, + dict, + ], +) +def test_update_conversion_event_rest(request_type): + client = AnalyticsAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "conversion_event": {"name": "properties/sample1/conversionEvents/sample2"} + } + request_init["conversion_event"] = { + "name": "properties/sample1/conversionEvents/sample2", + "event_name": "event_name_value", + "create_time": {"seconds": 751, "nanos": 543}, + "deletable": True, + "custom": True, + "counting_method": 1, + "default_conversion_value": { + "value": 0.541, + "currency_code": "currency_code_value", + }, + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = analytics_admin.UpdateConversionEventRequest.meta.fields[ + "conversion_event" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["conversion_event"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value -@pytest.mark.parametrize( - "request_type", - [ - analytics_admin.DeleteMeasurementProtocolSecretRequest, - dict, - ], -) -def test_delete_measurement_protocol_secret_rest(request_type): - client = AnalyticsAdminServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) - # send a request that will satisfy transcoding - request_init = { - "name": "properties/sample1/dataStreams/sample2/measurementProtocolSecrets/sample3" - } + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["conversion_event"][field])): + del request_init["conversion_event"][field][i][subfield] + else: + del request_init["conversion_event"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = None + return_value = resources.ConversionEvent( + name="name_value", + event_name="event_name_value", + deletable=True, + custom=True, + counting_method=resources.ConversionEvent.ConversionCountingMethod.ONCE_PER_EVENT, + ) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - json_return_value = "" + # Convert return value to protobuf type + return_value = resources.ConversionEvent.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.delete_measurement_protocol_secret(request) + response = client.update_conversion_event(request) # Establish that the response is the type that we expect. - assert response is None + assert isinstance(response, resources.ConversionEvent) + assert response.name == "name_value" + assert response.event_name == "event_name_value" + assert response.deletable is True + assert response.custom is True + assert ( + response.counting_method + == resources.ConversionEvent.ConversionCountingMethod.ONCE_PER_EVENT + ) -def test_delete_measurement_protocol_secret_rest_use_cached_wrapped_rpc(): +def test_update_conversion_event_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -65302,7 +75757,7 @@ def test_delete_measurement_protocol_secret_rest_use_cached_wrapped_rpc(): # Ensure method has been cached assert ( - client._transport.delete_measurement_protocol_secret + client._transport.update_conversion_event in client._transport._wrapped_methods ) @@ -65312,29 +75767,28 @@ def test_delete_measurement_protocol_secret_rest_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.delete_measurement_protocol_secret + client._transport.update_conversion_event ] = mock_rpc request = {} - client.delete_measurement_protocol_secret(request) + client.update_conversion_event(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.delete_measurement_protocol_secret(request) + client.update_conversion_event(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_delete_measurement_protocol_secret_rest_required_fields( - request_type=analytics_admin.DeleteMeasurementProtocolSecretRequest, +def test_update_conversion_event_rest_required_fields( + request_type=analytics_admin.UpdateConversionEventRequest, ): transport_class = transports.AnalyticsAdminServiceRestTransport request_init = {} - request_init["name"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -65345,21 +75799,19 @@ def test_delete_measurement_protocol_secret_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).delete_measurement_protocol_secret._get_unset_required_fields(jsonified_request) + ).update_conversion_event._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["name"] = "name_value" - unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).delete_measurement_protocol_secret._get_unset_required_fields(jsonified_request) + ).update_conversion_event._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("update_mask",)) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == "name_value" client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -65368,7 +75820,7 @@ def test_delete_measurement_protocol_secret_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = None + return_value = resources.ConversionEvent() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -65380,38 +75832,48 @@ def test_delete_measurement_protocol_secret_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "delete", + "method": "patch", "query_params": pb_request, } + transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 - json_return_value = "" + + # Convert return value to protobuf type + return_value = resources.ConversionEvent.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.delete_measurement_protocol_secret(request) + response = client.update_conversion_event(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_delete_measurement_protocol_secret_rest_unset_required_fields(): +def test_update_conversion_event_rest_unset_required_fields(): transport = transports.AnalyticsAdminServiceRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = ( - transport.delete_measurement_protocol_secret._get_unset_required_fields({}) + unset_fields = transport.update_conversion_event._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(("updateMask",)) + & set( + ( + "conversionEvent", + "updateMask", + ) + ) ) - assert set(unset_fields) == (set(()) & set(("name",))) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_delete_measurement_protocol_secret_rest_interceptors(null_interceptor): +def test_update_conversion_event_rest_interceptors(null_interceptor): transport = transports.AnalyticsAdminServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -65424,12 +75886,14 @@ def test_delete_measurement_protocol_secret_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.AnalyticsAdminServiceRestInterceptor, - "pre_delete_measurement_protocol_secret", + transports.AnalyticsAdminServiceRestInterceptor, "post_update_conversion_event" + ) as post, mock.patch.object( + transports.AnalyticsAdminServiceRestInterceptor, "pre_update_conversion_event" ) as pre: pre.assert_not_called() - pb_message = analytics_admin.DeleteMeasurementProtocolSecretRequest.pb( - analytics_admin.DeleteMeasurementProtocolSecretRequest() + post.assert_not_called() + pb_message = analytics_admin.UpdateConversionEventRequest.pb( + analytics_admin.UpdateConversionEventRequest() ) transcode.return_value = { "method": "post", @@ -65441,15 +75905,19 @@ def test_delete_measurement_protocol_secret_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() + req.return_value._content = resources.ConversionEvent.to_json( + resources.ConversionEvent() + ) - request = analytics_admin.DeleteMeasurementProtocolSecretRequest() + request = analytics_admin.UpdateConversionEventRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata + post.return_value = resources.ConversionEvent() - client.delete_measurement_protocol_secret( + client.update_conversion_event( request, metadata=[ ("key", "val"), @@ -65458,11 +75926,11 @@ def test_delete_measurement_protocol_secret_rest_interceptors(null_interceptor): ) pre.assert_called_once() + post.assert_called_once() -def test_delete_measurement_protocol_secret_rest_bad_request( - transport: str = "rest", - request_type=analytics_admin.DeleteMeasurementProtocolSecretRequest, +def test_update_conversion_event_rest_bad_request( + transport: str = "rest", request_type=analytics_admin.UpdateConversionEventRequest ): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -65471,7 +75939,7 @@ def test_delete_measurement_protocol_secret_rest_bad_request( # send a request that will satisfy transcoding request_init = { - "name": "properties/sample1/dataStreams/sample2/measurementProtocolSecrets/sample3" + "conversion_event": {"name": "properties/sample1/conversionEvents/sample2"} } request = request_type(**request_init) @@ -65484,10 +75952,10 @@ def test_delete_measurement_protocol_secret_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.delete_measurement_protocol_secret(request) + client.update_conversion_event(request) -def test_delete_measurement_protocol_secret_rest_flattened(): +def test_update_conversion_event_rest_flattened(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -65496,42 +75964,43 @@ def test_delete_measurement_protocol_secret_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = None + return_value = resources.ConversionEvent() # get arguments that satisfy an http rule for this method sample_request = { - "name": "properties/sample1/dataStreams/sample2/measurementProtocolSecrets/sample3" + "conversion_event": {"name": "properties/sample1/conversionEvents/sample2"} } # get truthy value for each flattened field mock_args = dict( - name="name_value", + conversion_event=resources.ConversionEvent(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) mock_args.update(sample_request) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - json_return_value = "" + # Convert return value to protobuf type + return_value = resources.ConversionEvent.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.delete_measurement_protocol_secret(**mock_args) + client.update_conversion_event(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1alpha/{name=properties/*/dataStreams/*/measurementProtocolSecrets/*}" + "%s/v1alpha/{conversion_event.name=properties/*/conversionEvents/*}" % client.transport._host, args[1], ) -def test_delete_measurement_protocol_secret_rest_flattened_error( - transport: str = "rest", -): +def test_update_conversion_event_rest_flattened_error(transport: str = "rest"): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -65540,13 +76009,14 @@ def test_delete_measurement_protocol_secret_rest_flattened_error( # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.delete_measurement_protocol_secret( - analytics_admin.DeleteMeasurementProtocolSecretRequest(), - name="name_value", + client.update_conversion_event( + analytics_admin.UpdateConversionEventRequest(), + conversion_event=resources.ConversionEvent(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) -def test_delete_measurement_protocol_secret_rest_error(): +def test_update_conversion_event_rest_error(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -65555,130 +76025,55 @@ def test_delete_measurement_protocol_secret_rest_error(): @pytest.mark.parametrize( "request_type", [ - analytics_admin.UpdateMeasurementProtocolSecretRequest, + analytics_admin.GetConversionEventRequest, dict, ], ) -def test_update_measurement_protocol_secret_rest(request_type): +def test_get_conversion_event_rest(request_type): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = { - "measurement_protocol_secret": { - "name": "properties/sample1/dataStreams/sample2/measurementProtocolSecrets/sample3" - } - } - request_init["measurement_protocol_secret"] = { - "name": "properties/sample1/dataStreams/sample2/measurementProtocolSecrets/sample3", - "display_name": "display_name_value", - "secret_value": "secret_value_value", - } - # The version of a generated dependency at test runtime may differ from the version used during generation. - # Delete any fields which are not present in the current runtime dependency - # See https://github.com/googleapis/gapic-generator-python/issues/1748 - - # Determine if the message type is proto-plus or protobuf - test_field = analytics_admin.UpdateMeasurementProtocolSecretRequest.meta.fields[ - "measurement_protocol_secret" - ] - - def get_message_fields(field): - # Given a field which is a message (composite type), return a list with - # all the fields of the message. - # If the field is not a composite type, return an empty list. - message_fields = [] - - if hasattr(field, "message") and field.message: - is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") - - if is_field_type_proto_plus_type: - message_fields = field.message.meta.fields.values() - # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER - message_fields = field.message.DESCRIPTOR.fields - return message_fields - - runtime_nested_fields = [ - (field.name, nested_field.name) - for field in get_message_fields(test_field) - for nested_field in get_message_fields(field) - ] - - subfields_not_in_runtime = [] - - # For each item in the sample request, create a list of sub fields which are not present at runtime - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init[ - "measurement_protocol_secret" - ].items(): # pragma: NO COVER - result = None - is_repeated = False - # For repeated fields - if isinstance(value, list) and len(value): - is_repeated = True - result = value[0] - # For fields where the type is another message - if isinstance(value, dict): - result = value - - if result and hasattr(result, "keys"): - for subfield in result.keys(): - if (field, subfield) not in runtime_nested_fields: - subfields_not_in_runtime.append( - { - "field": field, - "subfield": subfield, - "is_repeated": is_repeated, - } - ) - - # Remove fields from the sample request which are not present in the runtime version of the dependency - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER - field = subfield_to_delete.get("field") - field_repeated = subfield_to_delete.get("is_repeated") - subfield = subfield_to_delete.get("subfield") - if subfield: - if field_repeated: - for i in range( - 0, len(request_init["measurement_protocol_secret"][field]) - ): - del request_init["measurement_protocol_secret"][field][i][subfield] - else: - del request_init["measurement_protocol_secret"][field][subfield] + request_init = {"name": "properties/sample1/conversionEvents/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = resources.MeasurementProtocolSecret( + return_value = resources.ConversionEvent( name="name_value", - display_name="display_name_value", - secret_value="secret_value_value", + event_name="event_name_value", + deletable=True, + custom=True, + counting_method=resources.ConversionEvent.ConversionCountingMethod.ONCE_PER_EVENT, ) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = resources.MeasurementProtocolSecret.pb(return_value) + return_value = resources.ConversionEvent.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.update_measurement_protocol_secret(request) + response = client.get_conversion_event(request) # Establish that the response is the type that we expect. - assert isinstance(response, resources.MeasurementProtocolSecret) + assert isinstance(response, resources.ConversionEvent) assert response.name == "name_value" - assert response.display_name == "display_name_value" - assert response.secret_value == "secret_value_value" + assert response.event_name == "event_name_value" + assert response.deletable is True + assert response.custom is True + assert ( + response.counting_method + == resources.ConversionEvent.ConversionCountingMethod.ONCE_PER_EVENT + ) -def test_update_measurement_protocol_secret_rest_use_cached_wrapped_rpc(): +def test_get_conversion_event_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -65693,8 +76088,7 @@ def test_update_measurement_protocol_secret_rest_use_cached_wrapped_rpc(): # Ensure method has been cached assert ( - client._transport.update_measurement_protocol_secret - in client._transport._wrapped_methods + client._transport.get_conversion_event in client._transport._wrapped_methods ) # Replace cached wrapped function with mock @@ -65703,28 +76097,29 @@ def test_update_measurement_protocol_secret_rest_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.update_measurement_protocol_secret + client._transport.get_conversion_event ] = mock_rpc request = {} - client.update_measurement_protocol_secret(request) + client.get_conversion_event(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.update_measurement_protocol_secret(request) + client.get_conversion_event(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_update_measurement_protocol_secret_rest_required_fields( - request_type=analytics_admin.UpdateMeasurementProtocolSecretRequest, +def test_get_conversion_event_rest_required_fields( + request_type=analytics_admin.GetConversionEventRequest, ): transport_class = transports.AnalyticsAdminServiceRestTransport request_init = {} + request_init["name"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -65735,19 +76130,21 @@ def test_update_measurement_protocol_secret_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).update_measurement_protocol_secret._get_unset_required_fields(jsonified_request) + ).get_conversion_event._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present + jsonified_request["name"] = "name_value" + unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).update_measurement_protocol_secret._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("update_mask",)) + ).get_conversion_event._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -65756,7 +76153,7 @@ def test_update_measurement_protocol_secret_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = resources.MeasurementProtocolSecret() + return_value = resources.ConversionEvent() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -65768,50 +76165,39 @@ def test_update_measurement_protocol_secret_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "patch", + "method": "get", "query_params": pb_request, } - transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = resources.MeasurementProtocolSecret.pb(return_value) + return_value = resources.ConversionEvent.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.update_measurement_protocol_secret(request) + response = client.get_conversion_event(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_update_measurement_protocol_secret_rest_unset_required_fields(): +def test_get_conversion_event_rest_unset_required_fields(): transport = transports.AnalyticsAdminServiceRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = ( - transport.update_measurement_protocol_secret._get_unset_required_fields({}) - ) - assert set(unset_fields) == ( - set(("updateMask",)) - & set( - ( - "measurementProtocolSecret", - "updateMask", - ) - ) - ) + unset_fields = transport.get_conversion_event._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_update_measurement_protocol_secret_rest_interceptors(null_interceptor): +def test_get_conversion_event_rest_interceptors(null_interceptor): transport = transports.AnalyticsAdminServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -65824,16 +76210,14 @@ def test_update_measurement_protocol_secret_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.AnalyticsAdminServiceRestInterceptor, - "post_update_measurement_protocol_secret", + transports.AnalyticsAdminServiceRestInterceptor, "post_get_conversion_event" ) as post, mock.patch.object( - transports.AnalyticsAdminServiceRestInterceptor, - "pre_update_measurement_protocol_secret", + transports.AnalyticsAdminServiceRestInterceptor, "pre_get_conversion_event" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = analytics_admin.UpdateMeasurementProtocolSecretRequest.pb( - analytics_admin.UpdateMeasurementProtocolSecretRequest() + pb_message = analytics_admin.GetConversionEventRequest.pb( + analytics_admin.GetConversionEventRequest() ) transcode.return_value = { "method": "post", @@ -65845,19 +76229,19 @@ def test_update_measurement_protocol_secret_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = resources.MeasurementProtocolSecret.to_json( - resources.MeasurementProtocolSecret() + req.return_value._content = resources.ConversionEvent.to_json( + resources.ConversionEvent() ) - request = analytics_admin.UpdateMeasurementProtocolSecretRequest() + request = analytics_admin.GetConversionEventRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = resources.MeasurementProtocolSecret() + post.return_value = resources.ConversionEvent() - client.update_measurement_protocol_secret( + client.get_conversion_event( request, metadata=[ ("key", "val"), @@ -65869,9 +76253,8 @@ def test_update_measurement_protocol_secret_rest_interceptors(null_interceptor): post.assert_called_once() -def test_update_measurement_protocol_secret_rest_bad_request( - transport: str = "rest", - request_type=analytics_admin.UpdateMeasurementProtocolSecretRequest, +def test_get_conversion_event_rest_bad_request( + transport: str = "rest", request_type=analytics_admin.GetConversionEventRequest ): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -65879,11 +76262,7 @@ def test_update_measurement_protocol_secret_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = { - "measurement_protocol_secret": { - "name": "properties/sample1/dataStreams/sample2/measurementProtocolSecrets/sample3" - } - } + request_init = {"name": "properties/sample1/conversionEvents/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -65895,10 +76274,10 @@ def test_update_measurement_protocol_secret_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.update_measurement_protocol_secret(request) + client.get_conversion_event(request) -def test_update_measurement_protocol_secret_rest_flattened(): +def test_get_conversion_event_rest_flattened(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -65907,21 +76286,14 @@ def test_update_measurement_protocol_secret_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = resources.MeasurementProtocolSecret() + return_value = resources.ConversionEvent() # get arguments that satisfy an http rule for this method - sample_request = { - "measurement_protocol_secret": { - "name": "properties/sample1/dataStreams/sample2/measurementProtocolSecrets/sample3" - } - } + sample_request = {"name": "properties/sample1/conversionEvents/sample2"} # get truthy value for each flattened field mock_args = dict( - measurement_protocol_secret=resources.MeasurementProtocolSecret( - name="name_value" - ), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + name="name_value", ) mock_args.update(sample_request) @@ -65929,27 +76301,25 @@ def test_update_measurement_protocol_secret_rest_flattened(): response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = resources.MeasurementProtocolSecret.pb(return_value) + return_value = resources.ConversionEvent.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.update_measurement_protocol_secret(**mock_args) + client.get_conversion_event(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1alpha/{measurement_protocol_secret.name=properties/*/dataStreams/*/measurementProtocolSecrets/*}" + "%s/v1alpha/{name=properties/*/conversionEvents/*}" % client.transport._host, args[1], ) -def test_update_measurement_protocol_secret_rest_flattened_error( - transport: str = "rest", -): +def test_get_conversion_event_rest_flattened_error(transport: str = "rest"): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -65958,16 +76328,13 @@ def test_update_measurement_protocol_secret_rest_flattened_error( # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.update_measurement_protocol_secret( - analytics_admin.UpdateMeasurementProtocolSecretRequest(), - measurement_protocol_secret=resources.MeasurementProtocolSecret( - name="name_value" - ), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + client.get_conversion_event( + analytics_admin.GetConversionEventRequest(), + name="name_value", ) -def test_update_measurement_protocol_secret_rest_error(): +def test_get_conversion_event_rest_error(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -65976,43 +76343,39 @@ def test_update_measurement_protocol_secret_rest_error(): @pytest.mark.parametrize( "request_type", [ - analytics_admin.AcknowledgeUserDataCollectionRequest, + analytics_admin.DeleteConversionEventRequest, dict, ], ) -def test_acknowledge_user_data_collection_rest(request_type): +def test_delete_conversion_event_rest(request_type): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = {"property": "properties/sample1"} + request_init = {"name": "properties/sample1/conversionEvents/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = analytics_admin.AcknowledgeUserDataCollectionResponse() + return_value = None # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - # Convert return value to protobuf type - return_value = analytics_admin.AcknowledgeUserDataCollectionResponse.pb( - return_value - ) - json_return_value = json_format.MessageToJson(return_value) + json_return_value = "" response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.acknowledge_user_data_collection(request) + response = client.delete_conversion_event(request) # Establish that the response is the type that we expect. - assert isinstance(response, analytics_admin.AcknowledgeUserDataCollectionResponse) + assert response is None -def test_acknowledge_user_data_collection_rest_use_cached_wrapped_rpc(): +def test_delete_conversion_event_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -66027,7 +76390,7 @@ def test_acknowledge_user_data_collection_rest_use_cached_wrapped_rpc(): # Ensure method has been cached assert ( - client._transport.acknowledge_user_data_collection + client._transport.delete_conversion_event in client._transport._wrapped_methods ) @@ -66037,30 +76400,29 @@ def test_acknowledge_user_data_collection_rest_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.acknowledge_user_data_collection + client._transport.delete_conversion_event ] = mock_rpc request = {} - client.acknowledge_user_data_collection(request) + client.delete_conversion_event(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.acknowledge_user_data_collection(request) + client.delete_conversion_event(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_acknowledge_user_data_collection_rest_required_fields( - request_type=analytics_admin.AcknowledgeUserDataCollectionRequest, +def test_delete_conversion_event_rest_required_fields( + request_type=analytics_admin.DeleteConversionEventRequest, ): transport_class = transports.AnalyticsAdminServiceRestTransport request_init = {} - request_init["property"] = "" - request_init["acknowledgement"] = "" + request_init["name"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -66071,24 +76433,21 @@ def test_acknowledge_user_data_collection_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).acknowledge_user_data_collection._get_unset_required_fields(jsonified_request) + ).delete_conversion_event._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["property"] = "property_value" - jsonified_request["acknowledgement"] = "acknowledgement_value" + jsonified_request["name"] = "name_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).acknowledge_user_data_collection._get_unset_required_fields(jsonified_request) + ).delete_conversion_event._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "property" in jsonified_request - assert jsonified_request["property"] == "property_value" - assert "acknowledgement" in jsonified_request - assert jsonified_request["acknowledgement"] == "acknowledgement_value" + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -66097,7 +76456,7 @@ def test_acknowledge_user_data_collection_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = analytics_admin.AcknowledgeUserDataCollectionResponse() + return_value = None # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -66109,52 +76468,36 @@ def test_acknowledge_user_data_collection_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "post", + "method": "delete", "query_params": pb_request, } - transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = analytics_admin.AcknowledgeUserDataCollectionResponse.pb( - return_value - ) - json_return_value = json_format.MessageToJson(return_value) + json_return_value = "" response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.acknowledge_user_data_collection(request) + response = client.delete_conversion_event(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_acknowledge_user_data_collection_rest_unset_required_fields(): +def test_delete_conversion_event_rest_unset_required_fields(): transport = transports.AnalyticsAdminServiceRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = ( - transport.acknowledge_user_data_collection._get_unset_required_fields({}) - ) - assert set(unset_fields) == ( - set(()) - & set( - ( - "property", - "acknowledgement", - ) - ) - ) + unset_fields = transport.delete_conversion_event._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_acknowledge_user_data_collection_rest_interceptors(null_interceptor): +def test_delete_conversion_event_rest_interceptors(null_interceptor): transport = transports.AnalyticsAdminServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -66167,16 +76510,11 @@ def test_acknowledge_user_data_collection_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.AnalyticsAdminServiceRestInterceptor, - "post_acknowledge_user_data_collection", - ) as post, mock.patch.object( - transports.AnalyticsAdminServiceRestInterceptor, - "pre_acknowledge_user_data_collection", + transports.AnalyticsAdminServiceRestInterceptor, "pre_delete_conversion_event" ) as pre: pre.assert_not_called() - post.assert_not_called() - pb_message = analytics_admin.AcknowledgeUserDataCollectionRequest.pb( - analytics_admin.AcknowledgeUserDataCollectionRequest() + pb_message = analytics_admin.DeleteConversionEventRequest.pb( + analytics_admin.DeleteConversionEventRequest() ) transcode.return_value = { "method": "post", @@ -66188,21 +76526,15 @@ def test_acknowledge_user_data_collection_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = ( - analytics_admin.AcknowledgeUserDataCollectionResponse.to_json( - analytics_admin.AcknowledgeUserDataCollectionResponse() - ) - ) - request = analytics_admin.AcknowledgeUserDataCollectionRequest() + request = analytics_admin.DeleteConversionEventRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = analytics_admin.AcknowledgeUserDataCollectionResponse() - client.acknowledge_user_data_collection( + client.delete_conversion_event( request, metadata=[ ("key", "val"), @@ -66211,12 +76543,10 @@ def test_acknowledge_user_data_collection_rest_interceptors(null_interceptor): ) pre.assert_called_once() - post.assert_called_once() -def test_acknowledge_user_data_collection_rest_bad_request( - transport: str = "rest", - request_type=analytics_admin.AcknowledgeUserDataCollectionRequest, +def test_delete_conversion_event_rest_bad_request( + transport: str = "rest", request_type=analytics_admin.DeleteConversionEventRequest ): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -66224,7 +76554,7 @@ def test_acknowledge_user_data_collection_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = {"property": "properties/sample1"} + request_init = {"name": "properties/sample1/conversionEvents/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -66236,10 +76566,65 @@ def test_acknowledge_user_data_collection_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.acknowledge_user_data_collection(request) + client.delete_conversion_event(request) -def test_acknowledge_user_data_collection_rest_error(): +def test_delete_conversion_event_rest_flattened(): + client = AnalyticsAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # get arguments that satisfy an http rule for this method + sample_request = {"name": "properties/sample1/conversionEvents/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "" + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.delete_conversion_event(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1alpha/{name=properties/*/conversionEvents/*}" + % client.transport._host, + args[1], + ) + + +def test_delete_conversion_event_rest_flattened_error(transport: str = "rest"): + client = AnalyticsAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_conversion_event( + analytics_admin.DeleteConversionEventRequest(), + name="name_value", + ) + + +def test_delete_conversion_event_rest_error(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -66248,48 +76633,44 @@ def test_acknowledge_user_data_collection_rest_error(): @pytest.mark.parametrize( "request_type", [ - analytics_admin.GetSKAdNetworkConversionValueSchemaRequest, + analytics_admin.ListConversionEventsRequest, dict, ], ) -def test_get_sk_ad_network_conversion_value_schema_rest(request_type): +def test_list_conversion_events_rest(request_type): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = { - "name": "properties/sample1/dataStreams/sample2/sKAdNetworkConversionValueSchema/sample3" - } + request_init = {"parent": "properties/sample1"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = resources.SKAdNetworkConversionValueSchema( - name="name_value", - apply_conversion_values=True, + return_value = analytics_admin.ListConversionEventsResponse( + next_page_token="next_page_token_value", ) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = resources.SKAdNetworkConversionValueSchema.pb(return_value) + return_value = analytics_admin.ListConversionEventsResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.get_sk_ad_network_conversion_value_schema(request) + response = client.list_conversion_events(request) # Establish that the response is the type that we expect. - assert isinstance(response, resources.SKAdNetworkConversionValueSchema) - assert response.name == "name_value" - assert response.apply_conversion_values is True + assert isinstance(response, pagers.ListConversionEventsPager) + assert response.next_page_token == "next_page_token_value" -def test_get_sk_ad_network_conversion_value_schema_rest_use_cached_wrapped_rpc(): +def test_list_conversion_events_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -66304,7 +76685,7 @@ def test_get_sk_ad_network_conversion_value_schema_rest_use_cached_wrapped_rpc() # Ensure method has been cached assert ( - client._transport.get_sk_ad_network_conversion_value_schema + client._transport.list_conversion_events in client._transport._wrapped_methods ) @@ -66314,29 +76695,29 @@ def test_get_sk_ad_network_conversion_value_schema_rest_use_cached_wrapped_rpc() "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.get_sk_ad_network_conversion_value_schema + client._transport.list_conversion_events ] = mock_rpc request = {} - client.get_sk_ad_network_conversion_value_schema(request) + client.list_conversion_events(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.get_sk_ad_network_conversion_value_schema(request) + client.list_conversion_events(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_get_sk_ad_network_conversion_value_schema_rest_required_fields( - request_type=analytics_admin.GetSKAdNetworkConversionValueSchemaRequest, +def test_list_conversion_events_rest_required_fields( + request_type=analytics_admin.ListConversionEventsRequest, ): transport_class = transports.AnalyticsAdminServiceRestTransport request_init = {} - request_init["name"] = "" + request_init["parent"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -66347,25 +76728,28 @@ def test_get_sk_ad_network_conversion_value_schema_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).get_sk_ad_network_conversion_value_schema._get_unset_required_fields( - jsonified_request - ) + ).list_conversion_events._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["name"] = "name_value" + jsonified_request["parent"] = "parent_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).get_sk_ad_network_conversion_value_schema._get_unset_required_fields( - jsonified_request + ).list_conversion_events._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "page_size", + "page_token", + ) ) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == "name_value" + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -66374,7 +76758,7 @@ def test_get_sk_ad_network_conversion_value_schema_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = resources.SKAdNetworkConversionValueSchema() + return_value = analytics_admin.ListConversionEventsResponse() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -66395,34 +76779,38 @@ def test_get_sk_ad_network_conversion_value_schema_rest_required_fields( response_value.status_code = 200 # Convert return value to protobuf type - return_value = resources.SKAdNetworkConversionValueSchema.pb(return_value) + return_value = analytics_admin.ListConversionEventsResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.get_sk_ad_network_conversion_value_schema(request) + response = client.list_conversion_events(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_get_sk_ad_network_conversion_value_schema_rest_unset_required_fields(): +def test_list_conversion_events_rest_unset_required_fields(): transport = transports.AnalyticsAdminServiceRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = ( - transport.get_sk_ad_network_conversion_value_schema._get_unset_required_fields( - {} + unset_fields = transport.list_conversion_events._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "pageSize", + "pageToken", + ) ) + & set(("parent",)) ) - assert set(unset_fields) == (set(()) & set(("name",))) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_sk_ad_network_conversion_value_schema_rest_interceptors(null_interceptor): +def test_list_conversion_events_rest_interceptors(null_interceptor): transport = transports.AnalyticsAdminServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -66435,16 +76823,14 @@ def test_get_sk_ad_network_conversion_value_schema_rest_interceptors(null_interc ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.AnalyticsAdminServiceRestInterceptor, - "post_get_sk_ad_network_conversion_value_schema", + transports.AnalyticsAdminServiceRestInterceptor, "post_list_conversion_events" ) as post, mock.patch.object( - transports.AnalyticsAdminServiceRestInterceptor, - "pre_get_sk_ad_network_conversion_value_schema", + transports.AnalyticsAdminServiceRestInterceptor, "pre_list_conversion_events" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = analytics_admin.GetSKAdNetworkConversionValueSchemaRequest.pb( - analytics_admin.GetSKAdNetworkConversionValueSchemaRequest() + pb_message = analytics_admin.ListConversionEventsRequest.pb( + analytics_admin.ListConversionEventsRequest() ) transcode.return_value = { "method": "post", @@ -66456,19 +76842,21 @@ def test_get_sk_ad_network_conversion_value_schema_rest_interceptors(null_interc req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = resources.SKAdNetworkConversionValueSchema.to_json( - resources.SKAdNetworkConversionValueSchema() + req.return_value._content = ( + analytics_admin.ListConversionEventsResponse.to_json( + analytics_admin.ListConversionEventsResponse() + ) ) - request = analytics_admin.GetSKAdNetworkConversionValueSchemaRequest() + request = analytics_admin.ListConversionEventsRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = resources.SKAdNetworkConversionValueSchema() + post.return_value = analytics_admin.ListConversionEventsResponse() - client.get_sk_ad_network_conversion_value_schema( + client.list_conversion_events( request, metadata=[ ("key", "val"), @@ -66480,9 +76868,8 @@ def test_get_sk_ad_network_conversion_value_schema_rest_interceptors(null_interc post.assert_called_once() -def test_get_sk_ad_network_conversion_value_schema_rest_bad_request( - transport: str = "rest", - request_type=analytics_admin.GetSKAdNetworkConversionValueSchemaRequest, +def test_list_conversion_events_rest_bad_request( + transport: str = "rest", request_type=analytics_admin.ListConversionEventsRequest ): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -66490,9 +76877,7 @@ def test_get_sk_ad_network_conversion_value_schema_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = { - "name": "properties/sample1/dataStreams/sample2/sKAdNetworkConversionValueSchema/sample3" - } + request_init = {"parent": "properties/sample1"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -66504,10 +76889,10 @@ def test_get_sk_ad_network_conversion_value_schema_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.get_sk_ad_network_conversion_value_schema(request) + client.list_conversion_events(request) -def test_get_sk_ad_network_conversion_value_schema_rest_flattened(): +def test_list_conversion_events_rest_flattened(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -66516,16 +76901,14 @@ def test_get_sk_ad_network_conversion_value_schema_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = resources.SKAdNetworkConversionValueSchema() + return_value = analytics_admin.ListConversionEventsResponse() # get arguments that satisfy an http rule for this method - sample_request = { - "name": "properties/sample1/dataStreams/sample2/sKAdNetworkConversionValueSchema/sample3" - } + sample_request = {"parent": "properties/sample1"} # get truthy value for each flattened field mock_args = dict( - name="name_value", + parent="parent_value", ) mock_args.update(sample_request) @@ -66533,27 +76916,25 @@ def test_get_sk_ad_network_conversion_value_schema_rest_flattened(): response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = resources.SKAdNetworkConversionValueSchema.pb(return_value) + return_value = analytics_admin.ListConversionEventsResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.get_sk_ad_network_conversion_value_schema(**mock_args) + client.list_conversion_events(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1alpha/{name=properties/*/dataStreams/*/sKAdNetworkConversionValueSchema/*}" + "%s/v1alpha/{parent=properties/*}/conversionEvents" % client.transport._host, args[1], ) -def test_get_sk_ad_network_conversion_value_schema_rest_flattened_error( - transport: str = "rest", -): +def test_list_conversion_events_rest_flattened_error(transport: str = "rest"): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -66562,69 +76943,108 @@ def test_get_sk_ad_network_conversion_value_schema_rest_flattened_error( # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.get_sk_ad_network_conversion_value_schema( - analytics_admin.GetSKAdNetworkConversionValueSchemaRequest(), - name="name_value", + client.list_conversion_events( + analytics_admin.ListConversionEventsRequest(), + parent="parent_value", ) -def test_get_sk_ad_network_conversion_value_schema_rest_error(): +def test_list_conversion_events_rest_pager(transport: str = "rest"): client = AnalyticsAdminServiceClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + analytics_admin.ListConversionEventsResponse( + conversion_events=[ + resources.ConversionEvent(), + resources.ConversionEvent(), + resources.ConversionEvent(), + ], + next_page_token="abc", + ), + analytics_admin.ListConversionEventsResponse( + conversion_events=[], + next_page_token="def", + ), + analytics_admin.ListConversionEventsResponse( + conversion_events=[ + resources.ConversionEvent(), + ], + next_page_token="ghi", + ), + analytics_admin.ListConversionEventsResponse( + conversion_events=[ + resources.ConversionEvent(), + resources.ConversionEvent(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple( + analytics_admin.ListConversionEventsResponse.to_json(x) for x in response + ) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {"parent": "properties/sample1"} + + pager = client.list_conversion_events(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, resources.ConversionEvent) for i in results) + + pages = list(client.list_conversion_events(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + @pytest.mark.parametrize( "request_type", [ - analytics_admin.CreateSKAdNetworkConversionValueSchemaRequest, + analytics_admin.CreateKeyEventRequest, dict, ], ) -def test_create_sk_ad_network_conversion_value_schema_rest(request_type): +def test_create_key_event_rest(request_type): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = {"parent": "properties/sample1/dataStreams/sample2"} - request_init["skadnetwork_conversion_value_schema"] = { + request_init = {"parent": "properties/sample1"} + request_init["key_event"] = { "name": "name_value", - "postback_window_one": { - "conversion_values": [ - { - "display_name": "display_name_value", - "fine_value": 1054, - "coarse_value": 1, - "event_mappings": [ - { - "event_name": "event_name_value", - "min_event_count": 1613, - "max_event_count": 1615, - "min_event_value": 0.16010000000000002, - "max_event_value": 0.1603, - } - ], - "lock_enabled": True, - } - ], - "postback_window_settings_enabled": True, + "event_name": "event_name_value", + "create_time": {"seconds": 751, "nanos": 543}, + "deletable": True, + "custom": True, + "counting_method": 1, + "default_value": { + "numeric_value": 0.1391, + "currency_code": "currency_code_value", }, - "postback_window_two": {}, - "postback_window_three": {}, - "apply_conversion_values": True, } # The version of a generated dependency at test runtime may differ from the version used during generation. # Delete any fields which are not present in the current runtime dependency # See https://github.com/googleapis/gapic-generator-python/issues/1748 # Determine if the message type is proto-plus or protobuf - test_field = ( - analytics_admin.CreateSKAdNetworkConversionValueSchemaRequest.meta.fields[ - "skadnetwork_conversion_value_schema" - ] - ) + test_field = analytics_admin.CreateKeyEventRequest.meta.fields["key_event"] def get_message_fields(field): # Given a field which is a message (composite type), return a list with @@ -66652,9 +77072,7 @@ def get_message_fields(field): # For each item in the sample request, create a list of sub fields which are not present at runtime # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init[ - "skadnetwork_conversion_value_schema" - ].items(): # pragma: NO COVER + for field, value in request_init["key_event"].items(): # pragma: NO COVER result = None is_repeated = False # For repeated fields @@ -66684,42 +77102,44 @@ def get_message_fields(field): subfield = subfield_to_delete.get("subfield") if subfield: if field_repeated: - for i in range( - 0, len(request_init["skadnetwork_conversion_value_schema"][field]) - ): - del request_init["skadnetwork_conversion_value_schema"][field][i][ - subfield - ] + for i in range(0, len(request_init["key_event"][field])): + del request_init["key_event"][field][i][subfield] else: - del request_init["skadnetwork_conversion_value_schema"][field][subfield] + del request_init["key_event"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = resources.SKAdNetworkConversionValueSchema( + return_value = resources.KeyEvent( name="name_value", - apply_conversion_values=True, + event_name="event_name_value", + deletable=True, + custom=True, + counting_method=resources.KeyEvent.CountingMethod.ONCE_PER_EVENT, ) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = resources.SKAdNetworkConversionValueSchema.pb(return_value) + return_value = resources.KeyEvent.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.create_sk_ad_network_conversion_value_schema(request) + response = client.create_key_event(request) # Establish that the response is the type that we expect. - assert isinstance(response, resources.SKAdNetworkConversionValueSchema) + assert isinstance(response, resources.KeyEvent) assert response.name == "name_value" - assert response.apply_conversion_values is True + assert response.event_name == "event_name_value" + assert response.deletable is True + assert response.custom is True + assert response.counting_method == resources.KeyEvent.CountingMethod.ONCE_PER_EVENT -def test_create_sk_ad_network_conversion_value_schema_rest_use_cached_wrapped_rpc(): +def test_create_key_event_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -66733,10 +77153,7 @@ def test_create_sk_ad_network_conversion_value_schema_rest_use_cached_wrapped_rp wrapper_fn.reset_mock() # Ensure method has been cached - assert ( - client._transport.create_sk_ad_network_conversion_value_schema - in client._transport._wrapped_methods - ) + assert client._transport.create_key_event in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() @@ -66744,24 +77161,24 @@ def test_create_sk_ad_network_conversion_value_schema_rest_use_cached_wrapped_rp "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.create_sk_ad_network_conversion_value_schema + client._transport.create_key_event ] = mock_rpc request = {} - client.create_sk_ad_network_conversion_value_schema(request) + client.create_key_event(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.create_sk_ad_network_conversion_value_schema(request) + client.create_key_event(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_create_sk_ad_network_conversion_value_schema_rest_required_fields( - request_type=analytics_admin.CreateSKAdNetworkConversionValueSchemaRequest, +def test_create_key_event_rest_required_fields( + request_type=analytics_admin.CreateKeyEventRequest, ): transport_class = transports.AnalyticsAdminServiceRestTransport @@ -66777,9 +77194,7 @@ def test_create_sk_ad_network_conversion_value_schema_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).create_sk_ad_network_conversion_value_schema._get_unset_required_fields( - jsonified_request - ) + ).create_key_event._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present @@ -66788,9 +77203,7 @@ def test_create_sk_ad_network_conversion_value_schema_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).create_sk_ad_network_conversion_value_schema._get_unset_required_fields( - jsonified_request - ) + ).create_key_event._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone @@ -66804,7 +77217,7 @@ def test_create_sk_ad_network_conversion_value_schema_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = resources.SKAdNetworkConversionValueSchema() + return_value = resources.KeyEvent() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -66826,42 +77239,38 @@ def test_create_sk_ad_network_conversion_value_schema_rest_required_fields( response_value.status_code = 200 # Convert return value to protobuf type - return_value = resources.SKAdNetworkConversionValueSchema.pb(return_value) + return_value = resources.KeyEvent.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.create_sk_ad_network_conversion_value_schema(request) + response = client.create_key_event(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_create_sk_ad_network_conversion_value_schema_rest_unset_required_fields(): +def test_create_key_event_rest_unset_required_fields(): transport = transports.AnalyticsAdminServiceRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.create_sk_ad_network_conversion_value_schema._get_unset_required_fields( - {} - ) + unset_fields = transport.create_key_event._get_unset_required_fields({}) assert set(unset_fields) == ( set(()) & set( ( + "keyEvent", "parent", - "skadnetworkConversionValueSchema", ) ) ) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_create_sk_ad_network_conversion_value_schema_rest_interceptors( - null_interceptor, -): +def test_create_key_event_rest_interceptors(null_interceptor): transport = transports.AnalyticsAdminServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -66874,16 +77283,14 @@ def test_create_sk_ad_network_conversion_value_schema_rest_interceptors( ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.AnalyticsAdminServiceRestInterceptor, - "post_create_sk_ad_network_conversion_value_schema", + transports.AnalyticsAdminServiceRestInterceptor, "post_create_key_event" ) as post, mock.patch.object( - transports.AnalyticsAdminServiceRestInterceptor, - "pre_create_sk_ad_network_conversion_value_schema", + transports.AnalyticsAdminServiceRestInterceptor, "pre_create_key_event" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = analytics_admin.CreateSKAdNetworkConversionValueSchemaRequest.pb( - analytics_admin.CreateSKAdNetworkConversionValueSchemaRequest() + pb_message = analytics_admin.CreateKeyEventRequest.pb( + analytics_admin.CreateKeyEventRequest() ) transcode.return_value = { "method": "post", @@ -66895,19 +77302,17 @@ def test_create_sk_ad_network_conversion_value_schema_rest_interceptors( req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = resources.SKAdNetworkConversionValueSchema.to_json( - resources.SKAdNetworkConversionValueSchema() - ) + req.return_value._content = resources.KeyEvent.to_json(resources.KeyEvent()) - request = analytics_admin.CreateSKAdNetworkConversionValueSchemaRequest() + request = analytics_admin.CreateKeyEventRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = resources.SKAdNetworkConversionValueSchema() + post.return_value = resources.KeyEvent() - client.create_sk_ad_network_conversion_value_schema( + client.create_key_event( request, metadata=[ ("key", "val"), @@ -66919,9 +77324,8 @@ def test_create_sk_ad_network_conversion_value_schema_rest_interceptors( post.assert_called_once() -def test_create_sk_ad_network_conversion_value_schema_rest_bad_request( - transport: str = "rest", - request_type=analytics_admin.CreateSKAdNetworkConversionValueSchemaRequest, +def test_create_key_event_rest_bad_request( + transport: str = "rest", request_type=analytics_admin.CreateKeyEventRequest ): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -66929,7 +77333,7 @@ def test_create_sk_ad_network_conversion_value_schema_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = {"parent": "properties/sample1/dataStreams/sample2"} + request_init = {"parent": "properties/sample1"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -66941,10 +77345,10 @@ def test_create_sk_ad_network_conversion_value_schema_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.create_sk_ad_network_conversion_value_schema(request) + client.create_key_event(request) -def test_create_sk_ad_network_conversion_value_schema_rest_flattened(): +def test_create_key_event_rest_flattened(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -66953,17 +77357,15 @@ def test_create_sk_ad_network_conversion_value_schema_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = resources.SKAdNetworkConversionValueSchema() + return_value = resources.KeyEvent() # get arguments that satisfy an http rule for this method - sample_request = {"parent": "properties/sample1/dataStreams/sample2"} + sample_request = {"parent": "properties/sample1"} # get truthy value for each flattened field mock_args = dict( parent="parent_value", - skadnetwork_conversion_value_schema=resources.SKAdNetworkConversionValueSchema( - name="name_value" - ), + key_event=resources.KeyEvent(name="name_value"), ) mock_args.update(sample_request) @@ -66971,27 +77373,24 @@ def test_create_sk_ad_network_conversion_value_schema_rest_flattened(): response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = resources.SKAdNetworkConversionValueSchema.pb(return_value) + return_value = resources.KeyEvent.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.create_sk_ad_network_conversion_value_schema(**mock_args) + client.create_key_event(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1alpha/{parent=properties/*/dataStreams/*}/sKAdNetworkConversionValueSchema" - % client.transport._host, + "%s/v1alpha/{parent=properties/*}/keyEvents" % client.transport._host, args[1], ) -def test_create_sk_ad_network_conversion_value_schema_rest_flattened_error( - transport: str = "rest", -): +def test_create_key_event_rest_flattened_error(transport: str = "rest"): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -67000,16 +77399,14 @@ def test_create_sk_ad_network_conversion_value_schema_rest_flattened_error( # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.create_sk_ad_network_conversion_value_schema( - analytics_admin.CreateSKAdNetworkConversionValueSchemaRequest(), + client.create_key_event( + analytics_admin.CreateKeyEventRequest(), parent="parent_value", - skadnetwork_conversion_value_schema=resources.SKAdNetworkConversionValueSchema( - name="name_value" - ), + key_event=resources.KeyEvent(name="name_value"), ) -def test_create_sk_ad_network_conversion_value_schema_rest_error(): +def test_create_key_event_rest_error(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -67018,41 +77415,131 @@ def test_create_sk_ad_network_conversion_value_schema_rest_error(): @pytest.mark.parametrize( "request_type", [ - analytics_admin.DeleteSKAdNetworkConversionValueSchemaRequest, + analytics_admin.UpdateKeyEventRequest, dict, ], ) -def test_delete_sk_ad_network_conversion_value_schema_rest(request_type): +def test_update_key_event_rest(request_type): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = { - "name": "properties/sample1/dataStreams/sample2/sKAdNetworkConversionValueSchema/sample3" + request_init = {"key_event": {"name": "properties/sample1/keyEvents/sample2"}} + request_init["key_event"] = { + "name": "properties/sample1/keyEvents/sample2", + "event_name": "event_name_value", + "create_time": {"seconds": 751, "nanos": 543}, + "deletable": True, + "custom": True, + "counting_method": 1, + "default_value": { + "numeric_value": 0.1391, + "currency_code": "currency_code_value", + }, } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = analytics_admin.UpdateKeyEventRequest.meta.fields["key_event"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["key_event"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["key_event"][field])): + del request_init["key_event"][field][i][subfield] + else: + del request_init["key_event"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = None + return_value = resources.KeyEvent( + name="name_value", + event_name="event_name_value", + deletable=True, + custom=True, + counting_method=resources.KeyEvent.CountingMethod.ONCE_PER_EVENT, + ) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - json_return_value = "" + # Convert return value to protobuf type + return_value = resources.KeyEvent.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.delete_sk_ad_network_conversion_value_schema(request) + response = client.update_key_event(request) # Establish that the response is the type that we expect. - assert response is None + assert isinstance(response, resources.KeyEvent) + assert response.name == "name_value" + assert response.event_name == "event_name_value" + assert response.deletable is True + assert response.custom is True + assert response.counting_method == resources.KeyEvent.CountingMethod.ONCE_PER_EVENT -def test_delete_sk_ad_network_conversion_value_schema_rest_use_cached_wrapped_rpc(): +def test_update_key_event_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -67066,10 +77553,7 @@ def test_delete_sk_ad_network_conversion_value_schema_rest_use_cached_wrapped_rp wrapper_fn.reset_mock() # Ensure method has been cached - assert ( - client._transport.delete_sk_ad_network_conversion_value_schema - in client._transport._wrapped_methods - ) + assert client._transport.update_key_event in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() @@ -67077,29 +77561,28 @@ def test_delete_sk_ad_network_conversion_value_schema_rest_use_cached_wrapped_rp "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.delete_sk_ad_network_conversion_value_schema + client._transport.update_key_event ] = mock_rpc request = {} - client.delete_sk_ad_network_conversion_value_schema(request) + client.update_key_event(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.delete_sk_ad_network_conversion_value_schema(request) + client.update_key_event(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_delete_sk_ad_network_conversion_value_schema_rest_required_fields( - request_type=analytics_admin.DeleteSKAdNetworkConversionValueSchemaRequest, +def test_update_key_event_rest_required_fields( + request_type=analytics_admin.UpdateKeyEventRequest, ): transport_class = transports.AnalyticsAdminServiceRestTransport request_init = {} - request_init["name"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -67110,25 +77593,19 @@ def test_delete_sk_ad_network_conversion_value_schema_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).delete_sk_ad_network_conversion_value_schema._get_unset_required_fields( - jsonified_request - ) + ).update_key_event._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["name"] = "name_value" - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).delete_sk_ad_network_conversion_value_schema._get_unset_required_fields( - jsonified_request - ) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_key_event._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("update_mask",)) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == "name_value" client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -67137,7 +77614,7 @@ def test_delete_sk_ad_network_conversion_value_schema_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = None + return_value = resources.KeyEvent() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -67149,40 +77626,48 @@ def test_delete_sk_ad_network_conversion_value_schema_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "delete", + "method": "patch", "query_params": pb_request, } + transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 - json_return_value = "" + + # Convert return value to protobuf type + return_value = resources.KeyEvent.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.delete_sk_ad_network_conversion_value_schema(request) + response = client.update_key_event(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_delete_sk_ad_network_conversion_value_schema_rest_unset_required_fields(): +def test_update_key_event_rest_unset_required_fields(): transport = transports.AnalyticsAdminServiceRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.delete_sk_ad_network_conversion_value_schema._get_unset_required_fields( - {} + unset_fields = transport.update_key_event._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(("updateMask",)) + & set( + ( + "keyEvent", + "updateMask", + ) + ) ) - assert set(unset_fields) == (set(()) & set(("name",))) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_delete_sk_ad_network_conversion_value_schema_rest_interceptors( - null_interceptor, -): +def test_update_key_event_rest_interceptors(null_interceptor): transport = transports.AnalyticsAdminServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -67195,12 +77680,14 @@ def test_delete_sk_ad_network_conversion_value_schema_rest_interceptors( ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.AnalyticsAdminServiceRestInterceptor, - "pre_delete_sk_ad_network_conversion_value_schema", + transports.AnalyticsAdminServiceRestInterceptor, "post_update_key_event" + ) as post, mock.patch.object( + transports.AnalyticsAdminServiceRestInterceptor, "pre_update_key_event" ) as pre: pre.assert_not_called() - pb_message = analytics_admin.DeleteSKAdNetworkConversionValueSchemaRequest.pb( - analytics_admin.DeleteSKAdNetworkConversionValueSchemaRequest() + post.assert_not_called() + pb_message = analytics_admin.UpdateKeyEventRequest.pb( + analytics_admin.UpdateKeyEventRequest() ) transcode.return_value = { "method": "post", @@ -67212,15 +77699,17 @@ def test_delete_sk_ad_network_conversion_value_schema_rest_interceptors( req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() + req.return_value._content = resources.KeyEvent.to_json(resources.KeyEvent()) - request = analytics_admin.DeleteSKAdNetworkConversionValueSchemaRequest() + request = analytics_admin.UpdateKeyEventRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata + post.return_value = resources.KeyEvent() - client.delete_sk_ad_network_conversion_value_schema( + client.update_key_event( request, metadata=[ ("key", "val"), @@ -67229,11 +77718,11 @@ def test_delete_sk_ad_network_conversion_value_schema_rest_interceptors( ) pre.assert_called_once() + post.assert_called_once() -def test_delete_sk_ad_network_conversion_value_schema_rest_bad_request( - transport: str = "rest", - request_type=analytics_admin.DeleteSKAdNetworkConversionValueSchemaRequest, +def test_update_key_event_rest_bad_request( + transport: str = "rest", request_type=analytics_admin.UpdateKeyEventRequest ): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -67241,9 +77730,7 @@ def test_delete_sk_ad_network_conversion_value_schema_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = { - "name": "properties/sample1/dataStreams/sample2/sKAdNetworkConversionValueSchema/sample3" - } + request_init = {"key_event": {"name": "properties/sample1/keyEvents/sample2"}} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -67255,10 +77742,10 @@ def test_delete_sk_ad_network_conversion_value_schema_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.delete_sk_ad_network_conversion_value_schema(request) + client.update_key_event(request) -def test_delete_sk_ad_network_conversion_value_schema_rest_flattened(): +def test_update_key_event_rest_flattened(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -67267,42 +77754,41 @@ def test_delete_sk_ad_network_conversion_value_schema_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = None + return_value = resources.KeyEvent() # get arguments that satisfy an http rule for this method - sample_request = { - "name": "properties/sample1/dataStreams/sample2/sKAdNetworkConversionValueSchema/sample3" - } + sample_request = {"key_event": {"name": "properties/sample1/keyEvents/sample2"}} # get truthy value for each flattened field mock_args = dict( - name="name_value", + key_event=resources.KeyEvent(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) mock_args.update(sample_request) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - json_return_value = "" + # Convert return value to protobuf type + return_value = resources.KeyEvent.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.delete_sk_ad_network_conversion_value_schema(**mock_args) + client.update_key_event(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1alpha/{name=properties/*/dataStreams/*/sKAdNetworkConversionValueSchema/*}" + "%s/v1alpha/{key_event.name=properties/*/keyEvents/*}" % client.transport._host, args[1], ) -def test_delete_sk_ad_network_conversion_value_schema_rest_flattened_error( - transport: str = "rest", -): +def test_update_key_event_rest_flattened_error(transport: str = "rest"): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -67311,13 +77797,14 @@ def test_delete_sk_ad_network_conversion_value_schema_rest_flattened_error( # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.delete_sk_ad_network_conversion_value_schema( - analytics_admin.DeleteSKAdNetworkConversionValueSchemaRequest(), - name="name_value", + client.update_key_event( + analytics_admin.UpdateKeyEventRequest(), + key_event=resources.KeyEvent(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) -def test_delete_sk_ad_network_conversion_value_schema_rest_error(): +def test_update_key_event_rest_error(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -67326,153 +77813,52 @@ def test_delete_sk_ad_network_conversion_value_schema_rest_error(): @pytest.mark.parametrize( "request_type", [ - analytics_admin.UpdateSKAdNetworkConversionValueSchemaRequest, + analytics_admin.GetKeyEventRequest, dict, ], ) -def test_update_sk_ad_network_conversion_value_schema_rest(request_type): +def test_get_key_event_rest(request_type): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = { - "skadnetwork_conversion_value_schema": { - "name": "properties/sample1/dataStreams/sample2/sKAdNetworkConversionValueSchema/sample3" - } - } - request_init["skadnetwork_conversion_value_schema"] = { - "name": "properties/sample1/dataStreams/sample2/sKAdNetworkConversionValueSchema/sample3", - "postback_window_one": { - "conversion_values": [ - { - "display_name": "display_name_value", - "fine_value": 1054, - "coarse_value": 1, - "event_mappings": [ - { - "event_name": "event_name_value", - "min_event_count": 1613, - "max_event_count": 1615, - "min_event_value": 0.16010000000000002, - "max_event_value": 0.1603, - } - ], - "lock_enabled": True, - } - ], - "postback_window_settings_enabled": True, - }, - "postback_window_two": {}, - "postback_window_three": {}, - "apply_conversion_values": True, - } - # The version of a generated dependency at test runtime may differ from the version used during generation. - # Delete any fields which are not present in the current runtime dependency - # See https://github.com/googleapis/gapic-generator-python/issues/1748 - - # Determine if the message type is proto-plus or protobuf - test_field = ( - analytics_admin.UpdateSKAdNetworkConversionValueSchemaRequest.meta.fields[ - "skadnetwork_conversion_value_schema" - ] - ) - - def get_message_fields(field): - # Given a field which is a message (composite type), return a list with - # all the fields of the message. - # If the field is not a composite type, return an empty list. - message_fields = [] - - if hasattr(field, "message") and field.message: - is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") - - if is_field_type_proto_plus_type: - message_fields = field.message.meta.fields.values() - # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER - message_fields = field.message.DESCRIPTOR.fields - return message_fields - - runtime_nested_fields = [ - (field.name, nested_field.name) - for field in get_message_fields(test_field) - for nested_field in get_message_fields(field) - ] - - subfields_not_in_runtime = [] - - # For each item in the sample request, create a list of sub fields which are not present at runtime - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init[ - "skadnetwork_conversion_value_schema" - ].items(): # pragma: NO COVER - result = None - is_repeated = False - # For repeated fields - if isinstance(value, list) and len(value): - is_repeated = True - result = value[0] - # For fields where the type is another message - if isinstance(value, dict): - result = value - - if result and hasattr(result, "keys"): - for subfield in result.keys(): - if (field, subfield) not in runtime_nested_fields: - subfields_not_in_runtime.append( - { - "field": field, - "subfield": subfield, - "is_repeated": is_repeated, - } - ) - - # Remove fields from the sample request which are not present in the runtime version of the dependency - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER - field = subfield_to_delete.get("field") - field_repeated = subfield_to_delete.get("is_repeated") - subfield = subfield_to_delete.get("subfield") - if subfield: - if field_repeated: - for i in range( - 0, len(request_init["skadnetwork_conversion_value_schema"][field]) - ): - del request_init["skadnetwork_conversion_value_schema"][field][i][ - subfield - ] - else: - del request_init["skadnetwork_conversion_value_schema"][field][subfield] + request_init = {"name": "properties/sample1/keyEvents/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = resources.SKAdNetworkConversionValueSchema( + return_value = resources.KeyEvent( name="name_value", - apply_conversion_values=True, + event_name="event_name_value", + deletable=True, + custom=True, + counting_method=resources.KeyEvent.CountingMethod.ONCE_PER_EVENT, ) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = resources.SKAdNetworkConversionValueSchema.pb(return_value) + return_value = resources.KeyEvent.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.update_sk_ad_network_conversion_value_schema(request) + response = client.get_key_event(request) # Establish that the response is the type that we expect. - assert isinstance(response, resources.SKAdNetworkConversionValueSchema) + assert isinstance(response, resources.KeyEvent) assert response.name == "name_value" - assert response.apply_conversion_values is True + assert response.event_name == "event_name_value" + assert response.deletable is True + assert response.custom is True + assert response.counting_method == resources.KeyEvent.CountingMethod.ONCE_PER_EVENT -def test_update_sk_ad_network_conversion_value_schema_rest_use_cached_wrapped_rpc(): +def test_get_key_event_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -67486,39 +77872,35 @@ def test_update_sk_ad_network_conversion_value_schema_rest_use_cached_wrapped_rp wrapper_fn.reset_mock() # Ensure method has been cached - assert ( - client._transport.update_sk_ad_network_conversion_value_schema - in client._transport._wrapped_methods - ) + assert client._transport.get_key_event in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.update_sk_ad_network_conversion_value_schema - ] = mock_rpc + client._transport._wrapped_methods[client._transport.get_key_event] = mock_rpc request = {} - client.update_sk_ad_network_conversion_value_schema(request) + client.get_key_event(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.update_sk_ad_network_conversion_value_schema(request) + client.get_key_event(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_update_sk_ad_network_conversion_value_schema_rest_required_fields( - request_type=analytics_admin.UpdateSKAdNetworkConversionValueSchemaRequest, +def test_get_key_event_rest_required_fields( + request_type=analytics_admin.GetKeyEventRequest, ): transport_class = transports.AnalyticsAdminServiceRestTransport request_init = {} + request_init["name"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -67529,23 +77911,21 @@ def test_update_sk_ad_network_conversion_value_schema_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).update_sk_ad_network_conversion_value_schema._get_unset_required_fields( - jsonified_request - ) + ).get_key_event._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present + jsonified_request["name"] = "name_value" + unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).update_sk_ad_network_conversion_value_schema._get_unset_required_fields( - jsonified_request - ) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("update_mask",)) + ).get_key_event._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -67554,7 +77934,7 @@ def test_update_sk_ad_network_conversion_value_schema_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = resources.SKAdNetworkConversionValueSchema() + return_value = resources.KeyEvent() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -67566,52 +77946,39 @@ def test_update_sk_ad_network_conversion_value_schema_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "patch", + "method": "get", "query_params": pb_request, } - transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = resources.SKAdNetworkConversionValueSchema.pb(return_value) + return_value = resources.KeyEvent.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.update_sk_ad_network_conversion_value_schema(request) + response = client.get_key_event(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_update_sk_ad_network_conversion_value_schema_rest_unset_required_fields(): +def test_get_key_event_rest_unset_required_fields(): transport = transports.AnalyticsAdminServiceRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.update_sk_ad_network_conversion_value_schema._get_unset_required_fields( - {} - ) - assert set(unset_fields) == ( - set(("updateMask",)) - & set( - ( - "skadnetworkConversionValueSchema", - "updateMask", - ) - ) - ) + unset_fields = transport.get_key_event._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_update_sk_ad_network_conversion_value_schema_rest_interceptors( - null_interceptor, -): +def test_get_key_event_rest_interceptors(null_interceptor): transport = transports.AnalyticsAdminServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -67624,16 +77991,14 @@ def test_update_sk_ad_network_conversion_value_schema_rest_interceptors( ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.AnalyticsAdminServiceRestInterceptor, - "post_update_sk_ad_network_conversion_value_schema", + transports.AnalyticsAdminServiceRestInterceptor, "post_get_key_event" ) as post, mock.patch.object( - transports.AnalyticsAdminServiceRestInterceptor, - "pre_update_sk_ad_network_conversion_value_schema", + transports.AnalyticsAdminServiceRestInterceptor, "pre_get_key_event" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = analytics_admin.UpdateSKAdNetworkConversionValueSchemaRequest.pb( - analytics_admin.UpdateSKAdNetworkConversionValueSchemaRequest() + pb_message = analytics_admin.GetKeyEventRequest.pb( + analytics_admin.GetKeyEventRequest() ) transcode.return_value = { "method": "post", @@ -67645,19 +78010,17 @@ def test_update_sk_ad_network_conversion_value_schema_rest_interceptors( req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = resources.SKAdNetworkConversionValueSchema.to_json( - resources.SKAdNetworkConversionValueSchema() - ) + req.return_value._content = resources.KeyEvent.to_json(resources.KeyEvent()) - request = analytics_admin.UpdateSKAdNetworkConversionValueSchemaRequest() + request = analytics_admin.GetKeyEventRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = resources.SKAdNetworkConversionValueSchema() + post.return_value = resources.KeyEvent() - client.update_sk_ad_network_conversion_value_schema( + client.get_key_event( request, metadata=[ ("key", "val"), @@ -67669,9 +78032,8 @@ def test_update_sk_ad_network_conversion_value_schema_rest_interceptors( post.assert_called_once() -def test_update_sk_ad_network_conversion_value_schema_rest_bad_request( - transport: str = "rest", - request_type=analytics_admin.UpdateSKAdNetworkConversionValueSchemaRequest, +def test_get_key_event_rest_bad_request( + transport: str = "rest", request_type=analytics_admin.GetKeyEventRequest ): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -67679,11 +78041,7 @@ def test_update_sk_ad_network_conversion_value_schema_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = { - "skadnetwork_conversion_value_schema": { - "name": "properties/sample1/dataStreams/sample2/sKAdNetworkConversionValueSchema/sample3" - } - } + request_init = {"name": "properties/sample1/keyEvents/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -67695,10 +78053,10 @@ def test_update_sk_ad_network_conversion_value_schema_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.update_sk_ad_network_conversion_value_schema(request) + client.get_key_event(request) -def test_update_sk_ad_network_conversion_value_schema_rest_flattened(): +def test_get_key_event_rest_flattened(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -67707,21 +78065,14 @@ def test_update_sk_ad_network_conversion_value_schema_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = resources.SKAdNetworkConversionValueSchema() + return_value = resources.KeyEvent() # get arguments that satisfy an http rule for this method - sample_request = { - "skadnetwork_conversion_value_schema": { - "name": "properties/sample1/dataStreams/sample2/sKAdNetworkConversionValueSchema/sample3" - } - } + sample_request = {"name": "properties/sample1/keyEvents/sample2"} # get truthy value for each flattened field mock_args = dict( - skadnetwork_conversion_value_schema=resources.SKAdNetworkConversionValueSchema( - name="name_value" - ), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + name="name_value", ) mock_args.update(sample_request) @@ -67729,27 +78080,24 @@ def test_update_sk_ad_network_conversion_value_schema_rest_flattened(): response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = resources.SKAdNetworkConversionValueSchema.pb(return_value) + return_value = resources.KeyEvent.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.update_sk_ad_network_conversion_value_schema(**mock_args) + client.get_key_event(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1alpha/{skadnetwork_conversion_value_schema.name=properties/*/dataStreams/*/sKAdNetworkConversionValueSchema/*}" - % client.transport._host, + "%s/v1alpha/{name=properties/*/keyEvents/*}" % client.transport._host, args[1], ) -def test_update_sk_ad_network_conversion_value_schema_rest_flattened_error( - transport: str = "rest", -): +def test_get_key_event_rest_flattened_error(transport: str = "rest"): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -67758,16 +78106,13 @@ def test_update_sk_ad_network_conversion_value_schema_rest_flattened_error( # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.update_sk_ad_network_conversion_value_schema( - analytics_admin.UpdateSKAdNetworkConversionValueSchemaRequest(), - skadnetwork_conversion_value_schema=resources.SKAdNetworkConversionValueSchema( - name="name_value" - ), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + client.get_key_event( + analytics_admin.GetKeyEventRequest(), + name="name_value", ) -def test_update_sk_ad_network_conversion_value_schema_rest_error(): +def test_get_key_event_rest_error(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -67776,46 +78121,39 @@ def test_update_sk_ad_network_conversion_value_schema_rest_error(): @pytest.mark.parametrize( "request_type", [ - analytics_admin.ListSKAdNetworkConversionValueSchemasRequest, + analytics_admin.DeleteKeyEventRequest, dict, ], ) -def test_list_sk_ad_network_conversion_value_schemas_rest(request_type): +def test_delete_key_event_rest(request_type): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = {"parent": "properties/sample1/dataStreams/sample2"} + request_init = {"name": "properties/sample1/keyEvents/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = analytics_admin.ListSKAdNetworkConversionValueSchemasResponse( - next_page_token="next_page_token_value", - ) + return_value = None # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - # Convert return value to protobuf type - return_value = analytics_admin.ListSKAdNetworkConversionValueSchemasResponse.pb( - return_value - ) - json_return_value = json_format.MessageToJson(return_value) + json_return_value = "" response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.list_sk_ad_network_conversion_value_schemas(request) + response = client.delete_key_event(request) # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListSKAdNetworkConversionValueSchemasPager) - assert response.next_page_token == "next_page_token_value" + assert response is None -def test_list_sk_ad_network_conversion_value_schemas_rest_use_cached_wrapped_rpc(): +def test_delete_key_event_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -67829,10 +78167,7 @@ def test_list_sk_ad_network_conversion_value_schemas_rest_use_cached_wrapped_rpc wrapper_fn.reset_mock() # Ensure method has been cached - assert ( - client._transport.list_sk_ad_network_conversion_value_schemas - in client._transport._wrapped_methods - ) + assert client._transport.delete_key_event in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() @@ -67840,29 +78175,29 @@ def test_list_sk_ad_network_conversion_value_schemas_rest_use_cached_wrapped_rpc "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.list_sk_ad_network_conversion_value_schemas + client._transport.delete_key_event ] = mock_rpc request = {} - client.list_sk_ad_network_conversion_value_schemas(request) + client.delete_key_event(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.list_sk_ad_network_conversion_value_schemas(request) + client.delete_key_event(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_list_sk_ad_network_conversion_value_schemas_rest_required_fields( - request_type=analytics_admin.ListSKAdNetworkConversionValueSchemasRequest, +def test_delete_key_event_rest_required_fields( + request_type=analytics_admin.DeleteKeyEventRequest, ): transport_class = transports.AnalyticsAdminServiceRestTransport request_init = {} - request_init["parent"] = "" + request_init["name"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -67873,32 +78208,21 @@ def test_list_sk_ad_network_conversion_value_schemas_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).list_sk_ad_network_conversion_value_schemas._get_unset_required_fields( - jsonified_request - ) + ).delete_key_event._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["parent"] = "parent_value" + jsonified_request["name"] = "name_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).list_sk_ad_network_conversion_value_schemas._get_unset_required_fields( - jsonified_request - ) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set( - ( - "page_size", - "page_token", - ) - ) + ).delete_key_event._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == "parent_value" + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -67907,7 +78231,7 @@ def test_list_sk_ad_network_conversion_value_schemas_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = analytics_admin.ListSKAdNetworkConversionValueSchemasResponse() + return_value = None # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -67919,55 +78243,36 @@ def test_list_sk_ad_network_conversion_value_schemas_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "get", + "method": "delete", "query_params": pb_request, } transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = ( - analytics_admin.ListSKAdNetworkConversionValueSchemasResponse.pb( - return_value - ) - ) - json_return_value = json_format.MessageToJson(return_value) + json_return_value = "" response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.list_sk_ad_network_conversion_value_schemas(request) + response = client.delete_key_event(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_list_sk_ad_network_conversion_value_schemas_rest_unset_required_fields(): +def test_delete_key_event_rest_unset_required_fields(): transport = transports.AnalyticsAdminServiceRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.list_sk_ad_network_conversion_value_schemas._get_unset_required_fields( - {} - ) - assert set(unset_fields) == ( - set( - ( - "pageSize", - "pageToken", - ) - ) - & set(("parent",)) - ) + unset_fields = transport.delete_key_event._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_sk_ad_network_conversion_value_schemas_rest_interceptors( - null_interceptor, -): +def test_delete_key_event_rest_interceptors(null_interceptor): transport = transports.AnalyticsAdminServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -67980,16 +78285,11 @@ def test_list_sk_ad_network_conversion_value_schemas_rest_interceptors( ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.AnalyticsAdminServiceRestInterceptor, - "post_list_sk_ad_network_conversion_value_schemas", - ) as post, mock.patch.object( - transports.AnalyticsAdminServiceRestInterceptor, - "pre_list_sk_ad_network_conversion_value_schemas", + transports.AnalyticsAdminServiceRestInterceptor, "pre_delete_key_event" ) as pre: pre.assert_not_called() - post.assert_not_called() - pb_message = analytics_admin.ListSKAdNetworkConversionValueSchemasRequest.pb( - analytics_admin.ListSKAdNetworkConversionValueSchemasRequest() + pb_message = analytics_admin.DeleteKeyEventRequest.pb( + analytics_admin.DeleteKeyEventRequest() ) transcode.return_value = { "method": "post", @@ -68001,23 +78301,15 @@ def test_list_sk_ad_network_conversion_value_schemas_rest_interceptors( req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = ( - analytics_admin.ListSKAdNetworkConversionValueSchemasResponse.to_json( - analytics_admin.ListSKAdNetworkConversionValueSchemasResponse() - ) - ) - request = analytics_admin.ListSKAdNetworkConversionValueSchemasRequest() + request = analytics_admin.DeleteKeyEventRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = ( - analytics_admin.ListSKAdNetworkConversionValueSchemasResponse() - ) - client.list_sk_ad_network_conversion_value_schemas( + client.delete_key_event( request, metadata=[ ("key", "val"), @@ -68026,12 +78318,10 @@ def test_list_sk_ad_network_conversion_value_schemas_rest_interceptors( ) pre.assert_called_once() - post.assert_called_once() -def test_list_sk_ad_network_conversion_value_schemas_rest_bad_request( - transport: str = "rest", - request_type=analytics_admin.ListSKAdNetworkConversionValueSchemasRequest, +def test_delete_key_event_rest_bad_request( + transport: str = "rest", request_type=analytics_admin.DeleteKeyEventRequest ): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -68039,7 +78329,7 @@ def test_list_sk_ad_network_conversion_value_schemas_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = {"parent": "properties/sample1/dataStreams/sample2"} + request_init = {"name": "properties/sample1/keyEvents/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -68051,10 +78341,10 @@ def test_list_sk_ad_network_conversion_value_schemas_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.list_sk_ad_network_conversion_value_schemas(request) + client.delete_key_event(request) -def test_list_sk_ad_network_conversion_value_schemas_rest_flattened(): +def test_delete_key_event_rest_flattened(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -68063,44 +78353,37 @@ def test_list_sk_ad_network_conversion_value_schemas_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = analytics_admin.ListSKAdNetworkConversionValueSchemasResponse() + return_value = None # get arguments that satisfy an http rule for this method - sample_request = {"parent": "properties/sample1/dataStreams/sample2"} + sample_request = {"name": "properties/sample1/keyEvents/sample2"} # get truthy value for each flattened field mock_args = dict( - parent="parent_value", + name="name_value", ) mock_args.update(sample_request) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - # Convert return value to protobuf type - return_value = analytics_admin.ListSKAdNetworkConversionValueSchemasResponse.pb( - return_value - ) - json_return_value = json_format.MessageToJson(return_value) + json_return_value = "" response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.list_sk_ad_network_conversion_value_schemas(**mock_args) + client.delete_key_event(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1alpha/{parent=properties/*/dataStreams/*}/sKAdNetworkConversionValueSchema" - % client.transport._host, + "%s/v1alpha/{name=properties/*/keyEvents/*}" % client.transport._host, args[1], ) -def test_list_sk_ad_network_conversion_value_schemas_rest_flattened_error( - transport: str = "rest", -): +def test_delete_key_event_rest_flattened_error(transport: str = "rest"): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -68109,107 +78392,39 @@ def test_list_sk_ad_network_conversion_value_schemas_rest_flattened_error( # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.list_sk_ad_network_conversion_value_schemas( - analytics_admin.ListSKAdNetworkConversionValueSchemasRequest(), - parent="parent_value", + client.delete_key_event( + analytics_admin.DeleteKeyEventRequest(), + name="name_value", ) -def test_list_sk_ad_network_conversion_value_schemas_rest_pager( - transport: str = "rest", -): +def test_delete_key_event_rest_error(): client = AnalyticsAdminServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # TODO(kbandes): remove this mock unless there's a good reason for it. - # with mock.patch.object(path_template, 'transcode') as transcode: - # Set the response as a series of pages - response = ( - analytics_admin.ListSKAdNetworkConversionValueSchemasResponse( - skadnetwork_conversion_value_schemas=[ - resources.SKAdNetworkConversionValueSchema(), - resources.SKAdNetworkConversionValueSchema(), - resources.SKAdNetworkConversionValueSchema(), - ], - next_page_token="abc", - ), - analytics_admin.ListSKAdNetworkConversionValueSchemasResponse( - skadnetwork_conversion_value_schemas=[], - next_page_token="def", - ), - analytics_admin.ListSKAdNetworkConversionValueSchemasResponse( - skadnetwork_conversion_value_schemas=[ - resources.SKAdNetworkConversionValueSchema(), - ], - next_page_token="ghi", - ), - analytics_admin.ListSKAdNetworkConversionValueSchemasResponse( - skadnetwork_conversion_value_schemas=[ - resources.SKAdNetworkConversionValueSchema(), - resources.SKAdNetworkConversionValueSchema(), - ], - ), - ) - # Two responses for two calls - response = response + response - - # Wrap the values into proper Response objs - response = tuple( - analytics_admin.ListSKAdNetworkConversionValueSchemasResponse.to_json(x) - for x in response - ) - return_values = tuple(Response() for i in response) - for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode("UTF-8") - return_val.status_code = 200 - req.side_effect = return_values - - sample_request = {"parent": "properties/sample1/dataStreams/sample2"} - - pager = client.list_sk_ad_network_conversion_value_schemas( - request=sample_request - ) - - results = list(pager) - assert len(results) == 6 - assert all( - isinstance(i, resources.SKAdNetworkConversionValueSchema) for i in results - ) - - pages = list( - client.list_sk_ad_network_conversion_value_schemas( - request=sample_request - ).pages - ) - for page_, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page_.raw_page.next_page_token == token - @pytest.mark.parametrize( "request_type", [ - analytics_admin.SearchChangeHistoryEventsRequest, + analytics_admin.ListKeyEventsRequest, dict, ], ) -def test_search_change_history_events_rest(request_type): +def test_list_key_events_rest(request_type): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = {"account": "accounts/sample1"} + request_init = {"parent": "properties/sample1"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = analytics_admin.SearchChangeHistoryEventsResponse( + return_value = analytics_admin.ListKeyEventsResponse( next_page_token="next_page_token_value", ) @@ -68217,21 +78432,19 @@ def test_search_change_history_events_rest(request_type): response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = analytics_admin.SearchChangeHistoryEventsResponse.pb( - return_value - ) + return_value = analytics_admin.ListKeyEventsResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.search_change_history_events(request) + response = client.list_key_events(request) # Establish that the response is the type that we expect. - assert isinstance(response, pagers.SearchChangeHistoryEventsPager) + assert isinstance(response, pagers.ListKeyEventsPager) assert response.next_page_token == "next_page_token_value" -def test_search_change_history_events_rest_use_cached_wrapped_rpc(): +def test_list_key_events_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -68245,40 +78458,35 @@ def test_search_change_history_events_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert ( - client._transport.search_change_history_events - in client._transport._wrapped_methods - ) + assert client._transport.list_key_events in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.search_change_history_events - ] = mock_rpc + client._transport._wrapped_methods[client._transport.list_key_events] = mock_rpc request = {} - client.search_change_history_events(request) + client.list_key_events(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.search_change_history_events(request) + client.list_key_events(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_search_change_history_events_rest_required_fields( - request_type=analytics_admin.SearchChangeHistoryEventsRequest, +def test_list_key_events_rest_required_fields( + request_type=analytics_admin.ListKeyEventsRequest, ): transport_class = transports.AnalyticsAdminServiceRestTransport request_init = {} - request_init["account"] = "" + request_init["parent"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -68289,21 +78497,28 @@ def test_search_change_history_events_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).search_change_history_events._get_unset_required_fields(jsonified_request) + ).list_key_events._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["account"] = "account_value" + jsonified_request["parent"] = "parent_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).search_change_history_events._get_unset_required_fields(jsonified_request) + ).list_key_events._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "page_size", + "page_token", + ) + ) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "account" in jsonified_request - assert jsonified_request["account"] == "account_value" + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -68312,7 +78527,7 @@ def test_search_change_history_events_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = analytics_admin.SearchChangeHistoryEventsResponse() + return_value = analytics_admin.ListKeyEventsResponse() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -68324,42 +78539,47 @@ def test_search_change_history_events_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "post", + "method": "get", "query_params": pb_request, } - transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = analytics_admin.SearchChangeHistoryEventsResponse.pb( - return_value - ) + return_value = analytics_admin.ListKeyEventsResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.search_change_history_events(request) + response = client.list_key_events(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_search_change_history_events_rest_unset_required_fields(): +def test_list_key_events_rest_unset_required_fields(): transport = transports.AnalyticsAdminServiceRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.search_change_history_events._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("account",))) + unset_fields = transport.list_key_events._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "pageSize", + "pageToken", + ) + ) + & set(("parent",)) + ) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_search_change_history_events_rest_interceptors(null_interceptor): +def test_list_key_events_rest_interceptors(null_interceptor): transport = transports.AnalyticsAdminServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -68372,16 +78592,14 @@ def test_search_change_history_events_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.AnalyticsAdminServiceRestInterceptor, - "post_search_change_history_events", + transports.AnalyticsAdminServiceRestInterceptor, "post_list_key_events" ) as post, mock.patch.object( - transports.AnalyticsAdminServiceRestInterceptor, - "pre_search_change_history_events", + transports.AnalyticsAdminServiceRestInterceptor, "pre_list_key_events" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = analytics_admin.SearchChangeHistoryEventsRequest.pb( - analytics_admin.SearchChangeHistoryEventsRequest() + pb_message = analytics_admin.ListKeyEventsRequest.pb( + analytics_admin.ListKeyEventsRequest() ) transcode.return_value = { "method": "post", @@ -68393,21 +78611,19 @@ def test_search_change_history_events_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = ( - analytics_admin.SearchChangeHistoryEventsResponse.to_json( - analytics_admin.SearchChangeHistoryEventsResponse() - ) + req.return_value._content = analytics_admin.ListKeyEventsResponse.to_json( + analytics_admin.ListKeyEventsResponse() ) - request = analytics_admin.SearchChangeHistoryEventsRequest() + request = analytics_admin.ListKeyEventsRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = analytics_admin.SearchChangeHistoryEventsResponse() + post.return_value = analytics_admin.ListKeyEventsResponse() - client.search_change_history_events( + client.list_key_events( request, metadata=[ ("key", "val"), @@ -68419,9 +78635,8 @@ def test_search_change_history_events_rest_interceptors(null_interceptor): post.assert_called_once() -def test_search_change_history_events_rest_bad_request( - transport: str = "rest", - request_type=analytics_admin.SearchChangeHistoryEventsRequest, +def test_list_key_events_rest_bad_request( + transport: str = "rest", request_type=analytics_admin.ListKeyEventsRequest ): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -68429,7 +78644,7 @@ def test_search_change_history_events_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = {"account": "accounts/sample1"} + request_init = {"parent": "properties/sample1"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -68441,10 +78656,66 @@ def test_search_change_history_events_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.search_change_history_events(request) + client.list_key_events(request) -def test_search_change_history_events_rest_pager(transport: str = "rest"): +def test_list_key_events_rest_flattened(): + client = AnalyticsAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = analytics_admin.ListKeyEventsResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "properties/sample1"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = analytics_admin.ListKeyEventsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.list_key_events(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1alpha/{parent=properties/*}/keyEvents" % client.transport._host, + args[1], + ) + + +def test_list_key_events_rest_flattened_error(transport: str = "rest"): + client = AnalyticsAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_key_events( + analytics_admin.ListKeyEventsRequest(), + parent="parent_value", + ) + + +def test_list_key_events_rest_pager(transport: str = "rest"): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -68456,28 +78727,28 @@ def test_search_change_history_events_rest_pager(transport: str = "rest"): # with mock.patch.object(path_template, 'transcode') as transcode: # Set the response as a series of pages response = ( - analytics_admin.SearchChangeHistoryEventsResponse( - change_history_events=[ - resources.ChangeHistoryEvent(), - resources.ChangeHistoryEvent(), - resources.ChangeHistoryEvent(), + analytics_admin.ListKeyEventsResponse( + key_events=[ + resources.KeyEvent(), + resources.KeyEvent(), + resources.KeyEvent(), ], next_page_token="abc", ), - analytics_admin.SearchChangeHistoryEventsResponse( - change_history_events=[], + analytics_admin.ListKeyEventsResponse( + key_events=[], next_page_token="def", ), - analytics_admin.SearchChangeHistoryEventsResponse( - change_history_events=[ - resources.ChangeHistoryEvent(), + analytics_admin.ListKeyEventsResponse( + key_events=[ + resources.KeyEvent(), ], next_page_token="ghi", ), - analytics_admin.SearchChangeHistoryEventsResponse( - change_history_events=[ - resources.ChangeHistoryEvent(), - resources.ChangeHistoryEvent(), + analytics_admin.ListKeyEventsResponse( + key_events=[ + resources.KeyEvent(), + resources.KeyEvent(), ], ), ) @@ -68486,8 +78757,7 @@ def test_search_change_history_events_rest_pager(transport: str = "rest"): # Wrap the values into proper Response objs response = tuple( - analytics_admin.SearchChangeHistoryEventsResponse.to_json(x) - for x in response + analytics_admin.ListKeyEventsResponse.to_json(x) for x in response ) return_values = tuple(Response() for i in response) for return_val, response_val in zip(return_values, response): @@ -68495,15 +78765,15 @@ def test_search_change_history_events_rest_pager(transport: str = "rest"): return_val.status_code = 200 req.side_effect = return_values - sample_request = {"account": "accounts/sample1"} + sample_request = {"parent": "properties/sample1"} - pager = client.search_change_history_events(request=sample_request) + pager = client.list_key_events(request=sample_request) results = list(pager) assert len(results) == 6 - assert all(isinstance(i, resources.ChangeHistoryEvent) for i in results) + assert all(isinstance(i, resources.KeyEvent) for i in results) - pages = list(client.search_change_history_events(request=sample_request).pages) + pages = list(client.list_key_events(request=sample_request).pages) for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token @@ -68511,51 +78781,48 @@ def test_search_change_history_events_rest_pager(transport: str = "rest"): @pytest.mark.parametrize( "request_type", [ - analytics_admin.GetGoogleSignalsSettingsRequest, + analytics_admin.GetDisplayVideo360AdvertiserLinkRequest, dict, ], ) -def test_get_google_signals_settings_rest(request_type): +def test_get_display_video360_advertiser_link_rest(request_type): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = {"name": "properties/sample1/googleSignalsSettings"} + request_init = {"name": "properties/sample1/displayVideo360AdvertiserLinks/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = resources.GoogleSignalsSettings( + return_value = resources.DisplayVideo360AdvertiserLink( name="name_value", - state=resources.GoogleSignalsState.GOOGLE_SIGNALS_ENABLED, - consent=resources.GoogleSignalsConsent.GOOGLE_SIGNALS_CONSENT_CONSENTED, + advertiser_id="advertiser_id_value", + advertiser_display_name="advertiser_display_name_value", ) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = resources.GoogleSignalsSettings.pb(return_value) + return_value = resources.DisplayVideo360AdvertiserLink.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.get_google_signals_settings(request) + response = client.get_display_video360_advertiser_link(request) # Establish that the response is the type that we expect. - assert isinstance(response, resources.GoogleSignalsSettings) + assert isinstance(response, resources.DisplayVideo360AdvertiserLink) assert response.name == "name_value" - assert response.state == resources.GoogleSignalsState.GOOGLE_SIGNALS_ENABLED - assert ( - response.consent - == resources.GoogleSignalsConsent.GOOGLE_SIGNALS_CONSENT_CONSENTED - ) + assert response.advertiser_id == "advertiser_id_value" + assert response.advertiser_display_name == "advertiser_display_name_value" -def test_get_google_signals_settings_rest_use_cached_wrapped_rpc(): +def test_get_display_video360_advertiser_link_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -68570,7 +78837,7 @@ def test_get_google_signals_settings_rest_use_cached_wrapped_rpc(): # Ensure method has been cached assert ( - client._transport.get_google_signals_settings + client._transport.get_display_video360_advertiser_link in client._transport._wrapped_methods ) @@ -68580,24 +78847,24 @@ def test_get_google_signals_settings_rest_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.get_google_signals_settings + client._transport.get_display_video360_advertiser_link ] = mock_rpc request = {} - client.get_google_signals_settings(request) + client.get_display_video360_advertiser_link(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.get_google_signals_settings(request) + client.get_display_video360_advertiser_link(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_get_google_signals_settings_rest_required_fields( - request_type=analytics_admin.GetGoogleSignalsSettingsRequest, +def test_get_display_video360_advertiser_link_rest_required_fields( + request_type=analytics_admin.GetDisplayVideo360AdvertiserLinkRequest, ): transport_class = transports.AnalyticsAdminServiceRestTransport @@ -68613,7 +78880,7 @@ def test_get_google_signals_settings_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).get_google_signals_settings._get_unset_required_fields(jsonified_request) + ).get_display_video360_advertiser_link._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present @@ -68622,7 +78889,7 @@ def test_get_google_signals_settings_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).get_google_signals_settings._get_unset_required_fields(jsonified_request) + ).get_display_video360_advertiser_link._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone @@ -68636,7 +78903,7 @@ def test_get_google_signals_settings_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = resources.GoogleSignalsSettings() + return_value = resources.DisplayVideo360AdvertiserLink() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -68657,30 +78924,32 @@ def test_get_google_signals_settings_rest_required_fields( response_value.status_code = 200 # Convert return value to protobuf type - return_value = resources.GoogleSignalsSettings.pb(return_value) + return_value = resources.DisplayVideo360AdvertiserLink.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.get_google_signals_settings(request) + response = client.get_display_video360_advertiser_link(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_get_google_signals_settings_rest_unset_required_fields(): +def test_get_display_video360_advertiser_link_rest_unset_required_fields(): transport = transports.AnalyticsAdminServiceRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.get_google_signals_settings._get_unset_required_fields({}) + unset_fields = ( + transport.get_display_video360_advertiser_link._get_unset_required_fields({}) + ) assert set(unset_fields) == (set(()) & set(("name",))) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_google_signals_settings_rest_interceptors(null_interceptor): +def test_get_display_video360_advertiser_link_rest_interceptors(null_interceptor): transport = transports.AnalyticsAdminServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -68694,15 +78963,15 @@ def test_get_google_signals_settings_rest_interceptors(null_interceptor): path_template, "transcode" ) as transcode, mock.patch.object( transports.AnalyticsAdminServiceRestInterceptor, - "post_get_google_signals_settings", + "post_get_display_video360_advertiser_link", ) as post, mock.patch.object( transports.AnalyticsAdminServiceRestInterceptor, - "pre_get_google_signals_settings", + "pre_get_display_video360_advertiser_link", ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = analytics_admin.GetGoogleSignalsSettingsRequest.pb( - analytics_admin.GetGoogleSignalsSettingsRequest() + pb_message = analytics_admin.GetDisplayVideo360AdvertiserLinkRequest.pb( + analytics_admin.GetDisplayVideo360AdvertiserLinkRequest() ) transcode.return_value = { "method": "post", @@ -68714,19 +78983,19 @@ def test_get_google_signals_settings_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = resources.GoogleSignalsSettings.to_json( - resources.GoogleSignalsSettings() + req.return_value._content = resources.DisplayVideo360AdvertiserLink.to_json( + resources.DisplayVideo360AdvertiserLink() ) - request = analytics_admin.GetGoogleSignalsSettingsRequest() + request = analytics_admin.GetDisplayVideo360AdvertiserLinkRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = resources.GoogleSignalsSettings() + post.return_value = resources.DisplayVideo360AdvertiserLink() - client.get_google_signals_settings( + client.get_display_video360_advertiser_link( request, metadata=[ ("key", "val"), @@ -68738,9 +79007,9 @@ def test_get_google_signals_settings_rest_interceptors(null_interceptor): post.assert_called_once() -def test_get_google_signals_settings_rest_bad_request( +def test_get_display_video360_advertiser_link_rest_bad_request( transport: str = "rest", - request_type=analytics_admin.GetGoogleSignalsSettingsRequest, + request_type=analytics_admin.GetDisplayVideo360AdvertiserLinkRequest, ): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -68748,7 +79017,7 @@ def test_get_google_signals_settings_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = {"name": "properties/sample1/googleSignalsSettings"} + request_init = {"name": "properties/sample1/displayVideo360AdvertiserLinks/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -68760,10 +79029,10 @@ def test_get_google_signals_settings_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.get_google_signals_settings(request) + client.get_display_video360_advertiser_link(request) -def test_get_google_signals_settings_rest_flattened(): +def test_get_display_video360_advertiser_link_rest_flattened(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -68772,10 +79041,12 @@ def test_get_google_signals_settings_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = resources.GoogleSignalsSettings() + return_value = resources.DisplayVideo360AdvertiserLink() # get arguments that satisfy an http rule for this method - sample_request = {"name": "properties/sample1/googleSignalsSettings"} + sample_request = { + "name": "properties/sample1/displayVideo360AdvertiserLinks/sample2" + } # get truthy value for each flattened field mock_args = dict( @@ -68787,25 +79058,27 @@ def test_get_google_signals_settings_rest_flattened(): response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = resources.GoogleSignalsSettings.pb(return_value) + return_value = resources.DisplayVideo360AdvertiserLink.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.get_google_signals_settings(**mock_args) + client.get_display_video360_advertiser_link(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1alpha/{name=properties/*/googleSignalsSettings}" + "%s/v1alpha/{name=properties/*/displayVideo360AdvertiserLinks/*}" % client.transport._host, args[1], ) -def test_get_google_signals_settings_rest_flattened_error(transport: str = "rest"): +def test_get_display_video360_advertiser_link_rest_flattened_error( + transport: str = "rest", +): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -68814,13 +79087,13 @@ def test_get_google_signals_settings_rest_flattened_error(transport: str = "rest # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.get_google_signals_settings( - analytics_admin.GetGoogleSignalsSettingsRequest(), + client.get_display_video360_advertiser_link( + analytics_admin.GetDisplayVideo360AdvertiserLinkRequest(), name="name_value", ) -def test_get_google_signals_settings_rest_error(): +def test_get_display_video360_advertiser_link_rest_error(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -68829,129 +79102,46 @@ def test_get_google_signals_settings_rest_error(): @pytest.mark.parametrize( "request_type", [ - analytics_admin.UpdateGoogleSignalsSettingsRequest, + analytics_admin.ListDisplayVideo360AdvertiserLinksRequest, dict, ], ) -def test_update_google_signals_settings_rest(request_type): +def test_list_display_video360_advertiser_links_rest(request_type): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = { - "google_signals_settings": {"name": "properties/sample1/googleSignalsSettings"} - } - request_init["google_signals_settings"] = { - "name": "properties/sample1/googleSignalsSettings", - "state": 1, - "consent": 2, - } - # The version of a generated dependency at test runtime may differ from the version used during generation. - # Delete any fields which are not present in the current runtime dependency - # See https://github.com/googleapis/gapic-generator-python/issues/1748 - - # Determine if the message type is proto-plus or protobuf - test_field = analytics_admin.UpdateGoogleSignalsSettingsRequest.meta.fields[ - "google_signals_settings" - ] - - def get_message_fields(field): - # Given a field which is a message (composite type), return a list with - # all the fields of the message. - # If the field is not a composite type, return an empty list. - message_fields = [] - - if hasattr(field, "message") and field.message: - is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") - - if is_field_type_proto_plus_type: - message_fields = field.message.meta.fields.values() - # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER - message_fields = field.message.DESCRIPTOR.fields - return message_fields - - runtime_nested_fields = [ - (field.name, nested_field.name) - for field in get_message_fields(test_field) - for nested_field in get_message_fields(field) - ] - - subfields_not_in_runtime = [] - - # For each item in the sample request, create a list of sub fields which are not present at runtime - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init[ - "google_signals_settings" - ].items(): # pragma: NO COVER - result = None - is_repeated = False - # For repeated fields - if isinstance(value, list) and len(value): - is_repeated = True - result = value[0] - # For fields where the type is another message - if isinstance(value, dict): - result = value - - if result and hasattr(result, "keys"): - for subfield in result.keys(): - if (field, subfield) not in runtime_nested_fields: - subfields_not_in_runtime.append( - { - "field": field, - "subfield": subfield, - "is_repeated": is_repeated, - } - ) - - # Remove fields from the sample request which are not present in the runtime version of the dependency - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER - field = subfield_to_delete.get("field") - field_repeated = subfield_to_delete.get("is_repeated") - subfield = subfield_to_delete.get("subfield") - if subfield: - if field_repeated: - for i in range(0, len(request_init["google_signals_settings"][field])): - del request_init["google_signals_settings"][field][i][subfield] - else: - del request_init["google_signals_settings"][field][subfield] + request_init = {"parent": "properties/sample1"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = resources.GoogleSignalsSettings( - name="name_value", - state=resources.GoogleSignalsState.GOOGLE_SIGNALS_ENABLED, - consent=resources.GoogleSignalsConsent.GOOGLE_SIGNALS_CONSENT_CONSENTED, + return_value = analytics_admin.ListDisplayVideo360AdvertiserLinksResponse( + next_page_token="next_page_token_value", ) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = resources.GoogleSignalsSettings.pb(return_value) + return_value = analytics_admin.ListDisplayVideo360AdvertiserLinksResponse.pb( + return_value + ) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.update_google_signals_settings(request) + response = client.list_display_video360_advertiser_links(request) # Establish that the response is the type that we expect. - assert isinstance(response, resources.GoogleSignalsSettings) - assert response.name == "name_value" - assert response.state == resources.GoogleSignalsState.GOOGLE_SIGNALS_ENABLED - assert ( - response.consent - == resources.GoogleSignalsConsent.GOOGLE_SIGNALS_CONSENT_CONSENTED - ) + assert isinstance(response, pagers.ListDisplayVideo360AdvertiserLinksPager) + assert response.next_page_token == "next_page_token_value" -def test_update_google_signals_settings_rest_use_cached_wrapped_rpc(): +def test_list_display_video360_advertiser_links_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -68966,7 +79156,7 @@ def test_update_google_signals_settings_rest_use_cached_wrapped_rpc(): # Ensure method has been cached assert ( - client._transport.update_google_signals_settings + client._transport.list_display_video360_advertiser_links in client._transport._wrapped_methods ) @@ -68976,28 +79166,29 @@ def test_update_google_signals_settings_rest_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.update_google_signals_settings + client._transport.list_display_video360_advertiser_links ] = mock_rpc request = {} - client.update_google_signals_settings(request) + client.list_display_video360_advertiser_links(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.update_google_signals_settings(request) + client.list_display_video360_advertiser_links(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_update_google_signals_settings_rest_required_fields( - request_type=analytics_admin.UpdateGoogleSignalsSettingsRequest, +def test_list_display_video360_advertiser_links_rest_required_fields( + request_type=analytics_admin.ListDisplayVideo360AdvertiserLinksRequest, ): transport_class = transports.AnalyticsAdminServiceRestTransport request_init = {} + request_init["parent"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -69008,19 +79199,32 @@ def test_update_google_signals_settings_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).update_google_signals_settings._get_unset_required_fields(jsonified_request) + ).list_display_video360_advertiser_links._get_unset_required_fields( + jsonified_request + ) jsonified_request.update(unset_fields) # verify required fields with default values are now present + jsonified_request["parent"] = "parent_value" + unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).update_google_signals_settings._get_unset_required_fields(jsonified_request) + ).list_display_video360_advertiser_links._get_unset_required_fields( + jsonified_request + ) # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("update_mask",)) + assert not set(unset_fields) - set( + ( + "page_size", + "page_token", + ) + ) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -69029,7 +79233,7 @@ def test_update_google_signals_settings_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = resources.GoogleSignalsSettings() + return_value = analytics_admin.ListDisplayVideo360AdvertiserLinksResponse() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -69041,50 +79245,53 @@ def test_update_google_signals_settings_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "patch", + "method": "get", "query_params": pb_request, } - transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = resources.GoogleSignalsSettings.pb(return_value) + return_value = ( + analytics_admin.ListDisplayVideo360AdvertiserLinksResponse.pb( + return_value + ) + ) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.update_google_signals_settings(request) + response = client.list_display_video360_advertiser_links(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_update_google_signals_settings_rest_unset_required_fields(): +def test_list_display_video360_advertiser_links_rest_unset_required_fields(): transport = transports.AnalyticsAdminServiceRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.update_google_signals_settings._get_unset_required_fields( - {} + unset_fields = ( + transport.list_display_video360_advertiser_links._get_unset_required_fields({}) ) assert set(unset_fields) == ( - set(("updateMask",)) - & set( + set( ( - "googleSignalsSettings", - "updateMask", + "pageSize", + "pageToken", ) ) + & set(("parent",)) ) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_update_google_signals_settings_rest_interceptors(null_interceptor): +def test_list_display_video360_advertiser_links_rest_interceptors(null_interceptor): transport = transports.AnalyticsAdminServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -69098,15 +79305,15 @@ def test_update_google_signals_settings_rest_interceptors(null_interceptor): path_template, "transcode" ) as transcode, mock.patch.object( transports.AnalyticsAdminServiceRestInterceptor, - "post_update_google_signals_settings", + "post_list_display_video360_advertiser_links", ) as post, mock.patch.object( transports.AnalyticsAdminServiceRestInterceptor, - "pre_update_google_signals_settings", + "pre_list_display_video360_advertiser_links", ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = analytics_admin.UpdateGoogleSignalsSettingsRequest.pb( - analytics_admin.UpdateGoogleSignalsSettingsRequest() + pb_message = analytics_admin.ListDisplayVideo360AdvertiserLinksRequest.pb( + analytics_admin.ListDisplayVideo360AdvertiserLinksRequest() ) transcode.return_value = { "method": "post", @@ -69118,19 +79325,21 @@ def test_update_google_signals_settings_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = resources.GoogleSignalsSettings.to_json( - resources.GoogleSignalsSettings() + req.return_value._content = ( + analytics_admin.ListDisplayVideo360AdvertiserLinksResponse.to_json( + analytics_admin.ListDisplayVideo360AdvertiserLinksResponse() + ) ) - request = analytics_admin.UpdateGoogleSignalsSettingsRequest() + request = analytics_admin.ListDisplayVideo360AdvertiserLinksRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = resources.GoogleSignalsSettings() + post.return_value = analytics_admin.ListDisplayVideo360AdvertiserLinksResponse() - client.update_google_signals_settings( + client.list_display_video360_advertiser_links( request, metadata=[ ("key", "val"), @@ -69142,9 +79351,9 @@ def test_update_google_signals_settings_rest_interceptors(null_interceptor): post.assert_called_once() -def test_update_google_signals_settings_rest_bad_request( +def test_list_display_video360_advertiser_links_rest_bad_request( transport: str = "rest", - request_type=analytics_admin.UpdateGoogleSignalsSettingsRequest, + request_type=analytics_admin.ListDisplayVideo360AdvertiserLinksRequest, ): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -69152,9 +79361,7 @@ def test_update_google_signals_settings_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = { - "google_signals_settings": {"name": "properties/sample1/googleSignalsSettings"} - } + request_init = {"parent": "properties/sample1"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -69166,10 +79373,10 @@ def test_update_google_signals_settings_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.update_google_signals_settings(request) + client.list_display_video360_advertiser_links(request) -def test_update_google_signals_settings_rest_flattened(): +def test_list_display_video360_advertiser_links_rest_flattened(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -69178,19 +79385,14 @@ def test_update_google_signals_settings_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = resources.GoogleSignalsSettings() + return_value = analytics_admin.ListDisplayVideo360AdvertiserLinksResponse() # get arguments that satisfy an http rule for this method - sample_request = { - "google_signals_settings": { - "name": "properties/sample1/googleSignalsSettings" - } - } + sample_request = {"parent": "properties/sample1"} # get truthy value for each flattened field mock_args = dict( - google_signals_settings=resources.GoogleSignalsSettings(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + parent="parent_value", ) mock_args.update(sample_request) @@ -69198,25 +79400,29 @@ def test_update_google_signals_settings_rest_flattened(): response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = resources.GoogleSignalsSettings.pb(return_value) + return_value = analytics_admin.ListDisplayVideo360AdvertiserLinksResponse.pb( + return_value + ) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.update_google_signals_settings(**mock_args) + client.list_display_video360_advertiser_links(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1alpha/{google_signals_settings.name=properties/*/googleSignalsSettings}" + "%s/v1alpha/{parent=properties/*}/displayVideo360AdvertiserLinks" % client.transport._host, args[1], ) -def test_update_google_signals_settings_rest_flattened_error(transport: str = "rest"): +def test_list_display_video360_advertiser_links_rest_flattened_error( + transport: str = "rest", +): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -69225,27 +79431,88 @@ def test_update_google_signals_settings_rest_flattened_error(transport: str = "r # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.update_google_signals_settings( - analytics_admin.UpdateGoogleSignalsSettingsRequest(), - google_signals_settings=resources.GoogleSignalsSettings(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + client.list_display_video360_advertiser_links( + analytics_admin.ListDisplayVideo360AdvertiserLinksRequest(), + parent="parent_value", + ) + + +def test_list_display_video360_advertiser_links_rest_pager(transport: str = "rest"): + client = AnalyticsAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + analytics_admin.ListDisplayVideo360AdvertiserLinksResponse( + display_video_360_advertiser_links=[ + resources.DisplayVideo360AdvertiserLink(), + resources.DisplayVideo360AdvertiserLink(), + resources.DisplayVideo360AdvertiserLink(), + ], + next_page_token="abc", + ), + analytics_admin.ListDisplayVideo360AdvertiserLinksResponse( + display_video_360_advertiser_links=[], + next_page_token="def", + ), + analytics_admin.ListDisplayVideo360AdvertiserLinksResponse( + display_video_360_advertiser_links=[ + resources.DisplayVideo360AdvertiserLink(), + ], + next_page_token="ghi", + ), + analytics_admin.ListDisplayVideo360AdvertiserLinksResponse( + display_video_360_advertiser_links=[ + resources.DisplayVideo360AdvertiserLink(), + resources.DisplayVideo360AdvertiserLink(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple( + analytics_admin.ListDisplayVideo360AdvertiserLinksResponse.to_json(x) + for x in response + ) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {"parent": "properties/sample1"} + + pager = client.list_display_video360_advertiser_links(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all( + isinstance(i, resources.DisplayVideo360AdvertiserLink) for i in results ) - -def test_update_google_signals_settings_rest_error(): - client = AnalyticsAdminServiceClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) + pages = list( + client.list_display_video360_advertiser_links(request=sample_request).pages + ) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token @pytest.mark.parametrize( "request_type", [ - analytics_admin.CreateConversionEventRequest, + analytics_admin.CreateDisplayVideo360AdvertiserLinkRequest, dict, ], ) -def test_create_conversion_event_rest(request_type): +def test_create_display_video360_advertiser_link_rest(request_type): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -69253,25 +79520,21 @@ def test_create_conversion_event_rest(request_type): # send a request that will satisfy transcoding request_init = {"parent": "properties/sample1"} - request_init["conversion_event"] = { + request_init["display_video_360_advertiser_link"] = { "name": "name_value", - "event_name": "event_name_value", - "create_time": {"seconds": 751, "nanos": 543}, - "deletable": True, - "custom": True, - "counting_method": 1, - "default_conversion_value": { - "value": 0.541, - "currency_code": "currency_code_value", - }, + "advertiser_id": "advertiser_id_value", + "advertiser_display_name": "advertiser_display_name_value", + "ads_personalization_enabled": {"value": True}, + "campaign_data_sharing_enabled": {}, + "cost_data_sharing_enabled": {}, } # The version of a generated dependency at test runtime may differ from the version used during generation. # Delete any fields which are not present in the current runtime dependency # See https://github.com/googleapis/gapic-generator-python/issues/1748 # Determine if the message type is proto-plus or protobuf - test_field = analytics_admin.CreateConversionEventRequest.meta.fields[ - "conversion_event" + test_field = analytics_admin.CreateDisplayVideo360AdvertiserLinkRequest.meta.fields[ + "display_video_360_advertiser_link" ] def get_message_fields(field): @@ -69300,7 +79563,9 @@ def get_message_fields(field): # For each item in the sample request, create a list of sub fields which are not present at runtime # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["conversion_event"].items(): # pragma: NO COVER + for field, value in request_init[ + "display_video_360_advertiser_link" + ].items(): # pragma: NO COVER result = None is_repeated = False # For repeated fields @@ -69330,47 +79595,44 @@ def get_message_fields(field): subfield = subfield_to_delete.get("subfield") if subfield: if field_repeated: - for i in range(0, len(request_init["conversion_event"][field])): - del request_init["conversion_event"][field][i][subfield] + for i in range( + 0, len(request_init["display_video_360_advertiser_link"][field]) + ): + del request_init["display_video_360_advertiser_link"][field][i][ + subfield + ] else: - del request_init["conversion_event"][field][subfield] + del request_init["display_video_360_advertiser_link"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = resources.ConversionEvent( + return_value = resources.DisplayVideo360AdvertiserLink( name="name_value", - event_name="event_name_value", - deletable=True, - custom=True, - counting_method=resources.ConversionEvent.ConversionCountingMethod.ONCE_PER_EVENT, + advertiser_id="advertiser_id_value", + advertiser_display_name="advertiser_display_name_value", ) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = resources.ConversionEvent.pb(return_value) + return_value = resources.DisplayVideo360AdvertiserLink.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.create_conversion_event(request) + response = client.create_display_video360_advertiser_link(request) # Establish that the response is the type that we expect. - assert isinstance(response, resources.ConversionEvent) + assert isinstance(response, resources.DisplayVideo360AdvertiserLink) assert response.name == "name_value" - assert response.event_name == "event_name_value" - assert response.deletable is True - assert response.custom is True - assert ( - response.counting_method - == resources.ConversionEvent.ConversionCountingMethod.ONCE_PER_EVENT - ) + assert response.advertiser_id == "advertiser_id_value" + assert response.advertiser_display_name == "advertiser_display_name_value" -def test_create_conversion_event_rest_use_cached_wrapped_rpc(): +def test_create_display_video360_advertiser_link_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -69385,7 +79647,7 @@ def test_create_conversion_event_rest_use_cached_wrapped_rpc(): # Ensure method has been cached assert ( - client._transport.create_conversion_event + client._transport.create_display_video360_advertiser_link in client._transport._wrapped_methods ) @@ -69395,24 +79657,24 @@ def test_create_conversion_event_rest_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.create_conversion_event + client._transport.create_display_video360_advertiser_link ] = mock_rpc request = {} - client.create_conversion_event(request) + client.create_display_video360_advertiser_link(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.create_conversion_event(request) + client.create_display_video360_advertiser_link(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_create_conversion_event_rest_required_fields( - request_type=analytics_admin.CreateConversionEventRequest, +def test_create_display_video360_advertiser_link_rest_required_fields( + request_type=analytics_admin.CreateDisplayVideo360AdvertiserLinkRequest, ): transport_class = transports.AnalyticsAdminServiceRestTransport @@ -69428,7 +79690,9 @@ def test_create_conversion_event_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).create_conversion_event._get_unset_required_fields(jsonified_request) + ).create_display_video360_advertiser_link._get_unset_required_fields( + jsonified_request + ) jsonified_request.update(unset_fields) # verify required fields with default values are now present @@ -69437,7 +79701,9 @@ def test_create_conversion_event_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).create_conversion_event._get_unset_required_fields(jsonified_request) + ).create_display_video360_advertiser_link._get_unset_required_fields( + jsonified_request + ) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone @@ -69451,7 +79717,7 @@ def test_create_conversion_event_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = resources.ConversionEvent() + return_value = resources.DisplayVideo360AdvertiserLink() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -69473,38 +79739,40 @@ def test_create_conversion_event_rest_required_fields( response_value.status_code = 200 # Convert return value to protobuf type - return_value = resources.ConversionEvent.pb(return_value) + return_value = resources.DisplayVideo360AdvertiserLink.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.create_conversion_event(request) + response = client.create_display_video360_advertiser_link(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_create_conversion_event_rest_unset_required_fields(): +def test_create_display_video360_advertiser_link_rest_unset_required_fields(): transport = transports.AnalyticsAdminServiceRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.create_conversion_event._get_unset_required_fields({}) + unset_fields = ( + transport.create_display_video360_advertiser_link._get_unset_required_fields({}) + ) assert set(unset_fields) == ( set(()) & set( ( - "conversionEvent", "parent", + "displayVideo360AdvertiserLink", ) ) ) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_create_conversion_event_rest_interceptors(null_interceptor): +def test_create_display_video360_advertiser_link_rest_interceptors(null_interceptor): transport = transports.AnalyticsAdminServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -69517,14 +79785,16 @@ def test_create_conversion_event_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.AnalyticsAdminServiceRestInterceptor, "post_create_conversion_event" + transports.AnalyticsAdminServiceRestInterceptor, + "post_create_display_video360_advertiser_link", ) as post, mock.patch.object( - transports.AnalyticsAdminServiceRestInterceptor, "pre_create_conversion_event" + transports.AnalyticsAdminServiceRestInterceptor, + "pre_create_display_video360_advertiser_link", ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = analytics_admin.CreateConversionEventRequest.pb( - analytics_admin.CreateConversionEventRequest() + pb_message = analytics_admin.CreateDisplayVideo360AdvertiserLinkRequest.pb( + analytics_admin.CreateDisplayVideo360AdvertiserLinkRequest() ) transcode.return_value = { "method": "post", @@ -69536,19 +79806,19 @@ def test_create_conversion_event_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = resources.ConversionEvent.to_json( - resources.ConversionEvent() + req.return_value._content = resources.DisplayVideo360AdvertiserLink.to_json( + resources.DisplayVideo360AdvertiserLink() ) - request = analytics_admin.CreateConversionEventRequest() + request = analytics_admin.CreateDisplayVideo360AdvertiserLinkRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = resources.ConversionEvent() + post.return_value = resources.DisplayVideo360AdvertiserLink() - client.create_conversion_event( + client.create_display_video360_advertiser_link( request, metadata=[ ("key", "val"), @@ -69560,8 +79830,9 @@ def test_create_conversion_event_rest_interceptors(null_interceptor): post.assert_called_once() -def test_create_conversion_event_rest_bad_request( - transport: str = "rest", request_type=analytics_admin.CreateConversionEventRequest +def test_create_display_video360_advertiser_link_rest_bad_request( + transport: str = "rest", + request_type=analytics_admin.CreateDisplayVideo360AdvertiserLinkRequest, ): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -69581,10 +79852,10 @@ def test_create_conversion_event_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.create_conversion_event(request) + client.create_display_video360_advertiser_link(request) -def test_create_conversion_event_rest_flattened(): +def test_create_display_video360_advertiser_link_rest_flattened(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -69593,7 +79864,7 @@ def test_create_conversion_event_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = resources.ConversionEvent() + return_value = resources.DisplayVideo360AdvertiserLink() # get arguments that satisfy an http rule for this method sample_request = {"parent": "properties/sample1"} @@ -69601,7 +79872,9 @@ def test_create_conversion_event_rest_flattened(): # get truthy value for each flattened field mock_args = dict( parent="parent_value", - conversion_event=resources.ConversionEvent(name="name_value"), + display_video_360_advertiser_link=resources.DisplayVideo360AdvertiserLink( + name="name_value" + ), ) mock_args.update(sample_request) @@ -69609,25 +79882,27 @@ def test_create_conversion_event_rest_flattened(): response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = resources.ConversionEvent.pb(return_value) + return_value = resources.DisplayVideo360AdvertiserLink.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.create_conversion_event(**mock_args) + client.create_display_video360_advertiser_link(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1alpha/{parent=properties/*}/conversionEvents" + "%s/v1alpha/{parent=properties/*}/displayVideo360AdvertiserLinks" % client.transport._host, args[1], ) -def test_create_conversion_event_rest_flattened_error(transport: str = "rest"): +def test_create_display_video360_advertiser_link_rest_flattened_error( + transport: str = "rest", +): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -69636,14 +79911,16 @@ def test_create_conversion_event_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.create_conversion_event( - analytics_admin.CreateConversionEventRequest(), + client.create_display_video360_advertiser_link( + analytics_admin.CreateDisplayVideo360AdvertiserLinkRequest(), parent="parent_value", - conversion_event=resources.ConversionEvent(name="name_value"), + display_video_360_advertiser_link=resources.DisplayVideo360AdvertiserLink( + name="name_value" + ), ) -def test_create_conversion_event_rest_error(): +def test_create_display_video360_advertiser_link_rest_error(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -69652,138 +79929,39 @@ def test_create_conversion_event_rest_error(): @pytest.mark.parametrize( "request_type", [ - analytics_admin.UpdateConversionEventRequest, + analytics_admin.DeleteDisplayVideo360AdvertiserLinkRequest, dict, ], ) -def test_update_conversion_event_rest(request_type): +def test_delete_display_video360_advertiser_link_rest(request_type): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = { - "conversion_event": {"name": "properties/sample1/conversionEvents/sample2"} - } - request_init["conversion_event"] = { - "name": "properties/sample1/conversionEvents/sample2", - "event_name": "event_name_value", - "create_time": {"seconds": 751, "nanos": 543}, - "deletable": True, - "custom": True, - "counting_method": 1, - "default_conversion_value": { - "value": 0.541, - "currency_code": "currency_code_value", - }, - } - # The version of a generated dependency at test runtime may differ from the version used during generation. - # Delete any fields which are not present in the current runtime dependency - # See https://github.com/googleapis/gapic-generator-python/issues/1748 - - # Determine if the message type is proto-plus or protobuf - test_field = analytics_admin.UpdateConversionEventRequest.meta.fields[ - "conversion_event" - ] - - def get_message_fields(field): - # Given a field which is a message (composite type), return a list with - # all the fields of the message. - # If the field is not a composite type, return an empty list. - message_fields = [] - - if hasattr(field, "message") and field.message: - is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") - - if is_field_type_proto_plus_type: - message_fields = field.message.meta.fields.values() - # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER - message_fields = field.message.DESCRIPTOR.fields - return message_fields - - runtime_nested_fields = [ - (field.name, nested_field.name) - for field in get_message_fields(test_field) - for nested_field in get_message_fields(field) - ] - - subfields_not_in_runtime = [] - - # For each item in the sample request, create a list of sub fields which are not present at runtime - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["conversion_event"].items(): # pragma: NO COVER - result = None - is_repeated = False - # For repeated fields - if isinstance(value, list) and len(value): - is_repeated = True - result = value[0] - # For fields where the type is another message - if isinstance(value, dict): - result = value - - if result and hasattr(result, "keys"): - for subfield in result.keys(): - if (field, subfield) not in runtime_nested_fields: - subfields_not_in_runtime.append( - { - "field": field, - "subfield": subfield, - "is_repeated": is_repeated, - } - ) - - # Remove fields from the sample request which are not present in the runtime version of the dependency - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER - field = subfield_to_delete.get("field") - field_repeated = subfield_to_delete.get("is_repeated") - subfield = subfield_to_delete.get("subfield") - if subfield: - if field_repeated: - for i in range(0, len(request_init["conversion_event"][field])): - del request_init["conversion_event"][field][i][subfield] - else: - del request_init["conversion_event"][field][subfield] + request_init = {"name": "properties/sample1/displayVideo360AdvertiserLinks/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = resources.ConversionEvent( - name="name_value", - event_name="event_name_value", - deletable=True, - custom=True, - counting_method=resources.ConversionEvent.ConversionCountingMethod.ONCE_PER_EVENT, - ) + return_value = None # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - # Convert return value to protobuf type - return_value = resources.ConversionEvent.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) + json_return_value = "" response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.update_conversion_event(request) + response = client.delete_display_video360_advertiser_link(request) # Establish that the response is the type that we expect. - assert isinstance(response, resources.ConversionEvent) - assert response.name == "name_value" - assert response.event_name == "event_name_value" - assert response.deletable is True - assert response.custom is True - assert ( - response.counting_method - == resources.ConversionEvent.ConversionCountingMethod.ONCE_PER_EVENT - ) + assert response is None -def test_update_conversion_event_rest_use_cached_wrapped_rpc(): +def test_delete_display_video360_advertiser_link_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -69798,7 +79976,7 @@ def test_update_conversion_event_rest_use_cached_wrapped_rpc(): # Ensure method has been cached assert ( - client._transport.update_conversion_event + client._transport.delete_display_video360_advertiser_link in client._transport._wrapped_methods ) @@ -69808,28 +79986,29 @@ def test_update_conversion_event_rest_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.update_conversion_event + client._transport.delete_display_video360_advertiser_link ] = mock_rpc request = {} - client.update_conversion_event(request) + client.delete_display_video360_advertiser_link(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.update_conversion_event(request) + client.delete_display_video360_advertiser_link(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_update_conversion_event_rest_required_fields( - request_type=analytics_admin.UpdateConversionEventRequest, +def test_delete_display_video360_advertiser_link_rest_required_fields( + request_type=analytics_admin.DeleteDisplayVideo360AdvertiserLinkRequest, ): transport_class = transports.AnalyticsAdminServiceRestTransport request_init = {} + request_init["name"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -69840,19 +80019,25 @@ def test_update_conversion_event_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).update_conversion_event._get_unset_required_fields(jsonified_request) + ).delete_display_video360_advertiser_link._get_unset_required_fields( + jsonified_request + ) jsonified_request.update(unset_fields) # verify required fields with default values are now present + jsonified_request["name"] = "name_value" + unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).update_conversion_event._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("update_mask",)) + ).delete_display_video360_advertiser_link._get_unset_required_fields( + jsonified_request + ) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -69861,7 +80046,7 @@ def test_update_conversion_event_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = resources.ConversionEvent() + return_value = None # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -69873,48 +80058,38 @@ def test_update_conversion_event_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "patch", + "method": "delete", "query_params": pb_request, } - transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = resources.ConversionEvent.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) + json_return_value = "" response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.update_conversion_event(request) + response = client.delete_display_video360_advertiser_link(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_update_conversion_event_rest_unset_required_fields(): +def test_delete_display_video360_advertiser_link_rest_unset_required_fields(): transport = transports.AnalyticsAdminServiceRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.update_conversion_event._get_unset_required_fields({}) - assert set(unset_fields) == ( - set(("updateMask",)) - & set( - ( - "conversionEvent", - "updateMask", - ) - ) + unset_fields = ( + transport.delete_display_video360_advertiser_link._get_unset_required_fields({}) ) + assert set(unset_fields) == (set(()) & set(("name",))) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_update_conversion_event_rest_interceptors(null_interceptor): +def test_delete_display_video360_advertiser_link_rest_interceptors(null_interceptor): transport = transports.AnalyticsAdminServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -69927,14 +80102,12 @@ def test_update_conversion_event_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.AnalyticsAdminServiceRestInterceptor, "post_update_conversion_event" - ) as post, mock.patch.object( - transports.AnalyticsAdminServiceRestInterceptor, "pre_update_conversion_event" + transports.AnalyticsAdminServiceRestInterceptor, + "pre_delete_display_video360_advertiser_link", ) as pre: pre.assert_not_called() - post.assert_not_called() - pb_message = analytics_admin.UpdateConversionEventRequest.pb( - analytics_admin.UpdateConversionEventRequest() + pb_message = analytics_admin.DeleteDisplayVideo360AdvertiserLinkRequest.pb( + analytics_admin.DeleteDisplayVideo360AdvertiserLinkRequest() ) transcode.return_value = { "method": "post", @@ -69946,19 +80119,15 @@ def test_update_conversion_event_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = resources.ConversionEvent.to_json( - resources.ConversionEvent() - ) - request = analytics_admin.UpdateConversionEventRequest() + request = analytics_admin.DeleteDisplayVideo360AdvertiserLinkRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = resources.ConversionEvent() - client.update_conversion_event( + client.delete_display_video360_advertiser_link( request, metadata=[ ("key", "val"), @@ -69967,11 +80136,11 @@ def test_update_conversion_event_rest_interceptors(null_interceptor): ) pre.assert_called_once() - post.assert_called_once() -def test_update_conversion_event_rest_bad_request( - transport: str = "rest", request_type=analytics_admin.UpdateConversionEventRequest +def test_delete_display_video360_advertiser_link_rest_bad_request( + transport: str = "rest", + request_type=analytics_admin.DeleteDisplayVideo360AdvertiserLinkRequest, ): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -69979,9 +80148,7 @@ def test_update_conversion_event_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = { - "conversion_event": {"name": "properties/sample1/conversionEvents/sample2"} - } + request_init = {"name": "properties/sample1/displayVideo360AdvertiserLinks/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -69993,10 +80160,10 @@ def test_update_conversion_event_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.update_conversion_event(request) + client.delete_display_video360_advertiser_link(request) -def test_update_conversion_event_rest_flattened(): +def test_delete_display_video360_advertiser_link_rest_flattened(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -70005,43 +80172,42 @@ def test_update_conversion_event_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = resources.ConversionEvent() + return_value = None # get arguments that satisfy an http rule for this method sample_request = { - "conversion_event": {"name": "properties/sample1/conversionEvents/sample2"} + "name": "properties/sample1/displayVideo360AdvertiserLinks/sample2" } # get truthy value for each flattened field mock_args = dict( - conversion_event=resources.ConversionEvent(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + name="name_value", ) mock_args.update(sample_request) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - # Convert return value to protobuf type - return_value = resources.ConversionEvent.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) + json_return_value = "" response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.update_conversion_event(**mock_args) + client.delete_display_video360_advertiser_link(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1alpha/{conversion_event.name=properties/*/conversionEvents/*}" + "%s/v1alpha/{name=properties/*/displayVideo360AdvertiserLinks/*}" % client.transport._host, args[1], ) -def test_update_conversion_event_rest_flattened_error(transport: str = "rest"): +def test_delete_display_video360_advertiser_link_rest_flattened_error( + transport: str = "rest", +): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -70050,14 +80216,13 @@ def test_update_conversion_event_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.update_conversion_event( - analytics_admin.UpdateConversionEventRequest(), - conversion_event=resources.ConversionEvent(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + client.delete_display_video360_advertiser_link( + analytics_admin.DeleteDisplayVideo360AdvertiserLinkRequest(), + name="name_value", ) -def test_update_conversion_event_rest_error(): +def test_delete_display_video360_advertiser_link_rest_error(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -70066,55 +80231,135 @@ def test_update_conversion_event_rest_error(): @pytest.mark.parametrize( "request_type", [ - analytics_admin.GetConversionEventRequest, + analytics_admin.UpdateDisplayVideo360AdvertiserLinkRequest, dict, ], ) -def test_get_conversion_event_rest(request_type): +def test_update_display_video360_advertiser_link_rest(request_type): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = {"name": "properties/sample1/conversionEvents/sample2"} + request_init = { + "display_video_360_advertiser_link": { + "name": "properties/sample1/displayVideo360AdvertiserLinks/sample2" + } + } + request_init["display_video_360_advertiser_link"] = { + "name": "properties/sample1/displayVideo360AdvertiserLinks/sample2", + "advertiser_id": "advertiser_id_value", + "advertiser_display_name": "advertiser_display_name_value", + "ads_personalization_enabled": {"value": True}, + "campaign_data_sharing_enabled": {}, + "cost_data_sharing_enabled": {}, + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = analytics_admin.UpdateDisplayVideo360AdvertiserLinkRequest.meta.fields[ + "display_video_360_advertiser_link" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "display_video_360_advertiser_link" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, len(request_init["display_video_360_advertiser_link"][field]) + ): + del request_init["display_video_360_advertiser_link"][field][i][ + subfield + ] + else: + del request_init["display_video_360_advertiser_link"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = resources.ConversionEvent( + return_value = resources.DisplayVideo360AdvertiserLink( name="name_value", - event_name="event_name_value", - deletable=True, - custom=True, - counting_method=resources.ConversionEvent.ConversionCountingMethod.ONCE_PER_EVENT, + advertiser_id="advertiser_id_value", + advertiser_display_name="advertiser_display_name_value", ) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = resources.ConversionEvent.pb(return_value) + return_value = resources.DisplayVideo360AdvertiserLink.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.get_conversion_event(request) + response = client.update_display_video360_advertiser_link(request) - # Establish that the response is the type that we expect. - assert isinstance(response, resources.ConversionEvent) - assert response.name == "name_value" - assert response.event_name == "event_name_value" - assert response.deletable is True - assert response.custom is True - assert ( - response.counting_method - == resources.ConversionEvent.ConversionCountingMethod.ONCE_PER_EVENT - ) + # Establish that the response is the type that we expect. + assert isinstance(response, resources.DisplayVideo360AdvertiserLink) + assert response.name == "name_value" + assert response.advertiser_id == "advertiser_id_value" + assert response.advertiser_display_name == "advertiser_display_name_value" -def test_get_conversion_event_rest_use_cached_wrapped_rpc(): +def test_update_display_video360_advertiser_link_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -70129,7 +80374,8 @@ def test_get_conversion_event_rest_use_cached_wrapped_rpc(): # Ensure method has been cached assert ( - client._transport.get_conversion_event in client._transport._wrapped_methods + client._transport.update_display_video360_advertiser_link + in client._transport._wrapped_methods ) # Replace cached wrapped function with mock @@ -70138,29 +80384,28 @@ def test_get_conversion_event_rest_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.get_conversion_event + client._transport.update_display_video360_advertiser_link ] = mock_rpc request = {} - client.get_conversion_event(request) + client.update_display_video360_advertiser_link(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.get_conversion_event(request) + client.update_display_video360_advertiser_link(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_get_conversion_event_rest_required_fields( - request_type=analytics_admin.GetConversionEventRequest, +def test_update_display_video360_advertiser_link_rest_required_fields( + request_type=analytics_admin.UpdateDisplayVideo360AdvertiserLinkRequest, ): transport_class = transports.AnalyticsAdminServiceRestTransport request_init = {} - request_init["name"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -70171,21 +80416,23 @@ def test_get_conversion_event_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).get_conversion_event._get_unset_required_fields(jsonified_request) + ).update_display_video360_advertiser_link._get_unset_required_fields( + jsonified_request + ) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["name"] = "name_value" - unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).get_conversion_event._get_unset_required_fields(jsonified_request) + ).update_display_video360_advertiser_link._get_unset_required_fields( + jsonified_request + ) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("update_mask",)) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == "name_value" client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -70194,7 +80441,7 @@ def test_get_conversion_event_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = resources.ConversionEvent() + return_value = resources.DisplayVideo360AdvertiserLink() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -70206,39 +80453,42 @@ def test_get_conversion_event_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "get", + "method": "patch", "query_params": pb_request, } + transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = resources.ConversionEvent.pb(return_value) + return_value = resources.DisplayVideo360AdvertiserLink.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.get_conversion_event(request) + response = client.update_display_video360_advertiser_link(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_get_conversion_event_rest_unset_required_fields(): +def test_update_display_video360_advertiser_link_rest_unset_required_fields(): transport = transports.AnalyticsAdminServiceRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.get_conversion_event._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name",))) + unset_fields = ( + transport.update_display_video360_advertiser_link._get_unset_required_fields({}) + ) + assert set(unset_fields) == (set(("updateMask",)) & set(("updateMask",))) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_conversion_event_rest_interceptors(null_interceptor): +def test_update_display_video360_advertiser_link_rest_interceptors(null_interceptor): transport = transports.AnalyticsAdminServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -70251,14 +80501,16 @@ def test_get_conversion_event_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.AnalyticsAdminServiceRestInterceptor, "post_get_conversion_event" + transports.AnalyticsAdminServiceRestInterceptor, + "post_update_display_video360_advertiser_link", ) as post, mock.patch.object( - transports.AnalyticsAdminServiceRestInterceptor, "pre_get_conversion_event" + transports.AnalyticsAdminServiceRestInterceptor, + "pre_update_display_video360_advertiser_link", ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = analytics_admin.GetConversionEventRequest.pb( - analytics_admin.GetConversionEventRequest() + pb_message = analytics_admin.UpdateDisplayVideo360AdvertiserLinkRequest.pb( + analytics_admin.UpdateDisplayVideo360AdvertiserLinkRequest() ) transcode.return_value = { "method": "post", @@ -70270,19 +80522,19 @@ def test_get_conversion_event_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = resources.ConversionEvent.to_json( - resources.ConversionEvent() + req.return_value._content = resources.DisplayVideo360AdvertiserLink.to_json( + resources.DisplayVideo360AdvertiserLink() ) - request = analytics_admin.GetConversionEventRequest() + request = analytics_admin.UpdateDisplayVideo360AdvertiserLinkRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = resources.ConversionEvent() + post.return_value = resources.DisplayVideo360AdvertiserLink() - client.get_conversion_event( + client.update_display_video360_advertiser_link( request, metadata=[ ("key", "val"), @@ -70294,8 +80546,9 @@ def test_get_conversion_event_rest_interceptors(null_interceptor): post.assert_called_once() -def test_get_conversion_event_rest_bad_request( - transport: str = "rest", request_type=analytics_admin.GetConversionEventRequest +def test_update_display_video360_advertiser_link_rest_bad_request( + transport: str = "rest", + request_type=analytics_admin.UpdateDisplayVideo360AdvertiserLinkRequest, ): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -70303,7 +80556,11 @@ def test_get_conversion_event_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = {"name": "properties/sample1/conversionEvents/sample2"} + request_init = { + "display_video_360_advertiser_link": { + "name": "properties/sample1/displayVideo360AdvertiserLinks/sample2" + } + } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -70315,10 +80572,10 @@ def test_get_conversion_event_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.get_conversion_event(request) + client.update_display_video360_advertiser_link(request) -def test_get_conversion_event_rest_flattened(): +def test_update_display_video360_advertiser_link_rest_flattened(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -70327,14 +80584,21 @@ def test_get_conversion_event_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = resources.ConversionEvent() + return_value = resources.DisplayVideo360AdvertiserLink() # get arguments that satisfy an http rule for this method - sample_request = {"name": "properties/sample1/conversionEvents/sample2"} + sample_request = { + "display_video_360_advertiser_link": { + "name": "properties/sample1/displayVideo360AdvertiserLinks/sample2" + } + } # get truthy value for each flattened field mock_args = dict( - name="name_value", + display_video_360_advertiser_link=resources.DisplayVideo360AdvertiserLink( + name="name_value" + ), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) mock_args.update(sample_request) @@ -70342,25 +80606,27 @@ def test_get_conversion_event_rest_flattened(): response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = resources.ConversionEvent.pb(return_value) + return_value = resources.DisplayVideo360AdvertiserLink.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.get_conversion_event(**mock_args) + client.update_display_video360_advertiser_link(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1alpha/{name=properties/*/conversionEvents/*}" + "%s/v1alpha/{display_video_360_advertiser_link.name=properties/*/displayVideo360AdvertiserLinks/*}" % client.transport._host, args[1], ) -def test_get_conversion_event_rest_flattened_error(transport: str = "rest"): +def test_update_display_video360_advertiser_link_rest_flattened_error( + transport: str = "rest", +): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -70369,13 +80635,16 @@ def test_get_conversion_event_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.get_conversion_event( - analytics_admin.GetConversionEventRequest(), - name="name_value", + client.update_display_video360_advertiser_link( + analytics_admin.UpdateDisplayVideo360AdvertiserLinkRequest(), + display_video_360_advertiser_link=resources.DisplayVideo360AdvertiserLink( + name="name_value" + ), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) -def test_get_conversion_event_rest_error(): +def test_update_display_video360_advertiser_link_rest_error(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -70384,39 +80653,52 @@ def test_get_conversion_event_rest_error(): @pytest.mark.parametrize( "request_type", [ - analytics_admin.DeleteConversionEventRequest, + analytics_admin.GetDisplayVideo360AdvertiserLinkProposalRequest, dict, ], ) -def test_delete_conversion_event_rest(request_type): +def test_get_display_video360_advertiser_link_proposal_rest(request_type): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = {"name": "properties/sample1/conversionEvents/sample2"} + request_init = { + "name": "properties/sample1/displayVideo360AdvertiserLinkProposals/sample2" + } request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = None + return_value = resources.DisplayVideo360AdvertiserLinkProposal( + name="name_value", + advertiser_id="advertiser_id_value", + advertiser_display_name="advertiser_display_name_value", + validation_email="validation_email_value", + ) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - json_return_value = "" + # Convert return value to protobuf type + return_value = resources.DisplayVideo360AdvertiserLinkProposal.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.delete_conversion_event(request) + response = client.get_display_video360_advertiser_link_proposal(request) # Establish that the response is the type that we expect. - assert response is None + assert isinstance(response, resources.DisplayVideo360AdvertiserLinkProposal) + assert response.name == "name_value" + assert response.advertiser_id == "advertiser_id_value" + assert response.advertiser_display_name == "advertiser_display_name_value" + assert response.validation_email == "validation_email_value" -def test_delete_conversion_event_rest_use_cached_wrapped_rpc(): +def test_get_display_video360_advertiser_link_proposal_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -70431,7 +80713,7 @@ def test_delete_conversion_event_rest_use_cached_wrapped_rpc(): # Ensure method has been cached assert ( - client._transport.delete_conversion_event + client._transport.get_display_video360_advertiser_link_proposal in client._transport._wrapped_methods ) @@ -70441,24 +80723,24 @@ def test_delete_conversion_event_rest_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.delete_conversion_event + client._transport.get_display_video360_advertiser_link_proposal ] = mock_rpc request = {} - client.delete_conversion_event(request) + client.get_display_video360_advertiser_link_proposal(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.delete_conversion_event(request) + client.get_display_video360_advertiser_link_proposal(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_delete_conversion_event_rest_required_fields( - request_type=analytics_admin.DeleteConversionEventRequest, +def test_get_display_video360_advertiser_link_proposal_rest_required_fields( + request_type=analytics_admin.GetDisplayVideo360AdvertiserLinkProposalRequest, ): transport_class = transports.AnalyticsAdminServiceRestTransport @@ -70474,7 +80756,9 @@ def test_delete_conversion_event_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).delete_conversion_event._get_unset_required_fields(jsonified_request) + ).get_display_video360_advertiser_link_proposal._get_unset_required_fields( + jsonified_request + ) jsonified_request.update(unset_fields) # verify required fields with default values are now present @@ -70483,7 +80767,9 @@ def test_delete_conversion_event_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).delete_conversion_event._get_unset_required_fields(jsonified_request) + ).get_display_video360_advertiser_link_proposal._get_unset_required_fields( + jsonified_request + ) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone @@ -70497,7 +80783,7 @@ def test_delete_conversion_event_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = None + return_value = resources.DisplayVideo360AdvertiserLinkProposal() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -70509,36 +80795,45 @@ def test_delete_conversion_event_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "delete", + "method": "get", "query_params": pb_request, } transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 - json_return_value = "" + + # Convert return value to protobuf type + return_value = resources.DisplayVideo360AdvertiserLinkProposal.pb( + return_value + ) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.delete_conversion_event(request) + response = client.get_display_video360_advertiser_link_proposal(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_delete_conversion_event_rest_unset_required_fields(): +def test_get_display_video360_advertiser_link_proposal_rest_unset_required_fields(): transport = transports.AnalyticsAdminServiceRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.delete_conversion_event._get_unset_required_fields({}) + unset_fields = transport.get_display_video360_advertiser_link_proposal._get_unset_required_fields( + {} + ) assert set(unset_fields) == (set(()) & set(("name",))) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_delete_conversion_event_rest_interceptors(null_interceptor): +def test_get_display_video360_advertiser_link_proposal_rest_interceptors( + null_interceptor, +): transport = transports.AnalyticsAdminServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -70551,11 +80846,16 @@ def test_delete_conversion_event_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.AnalyticsAdminServiceRestInterceptor, "pre_delete_conversion_event" + transports.AnalyticsAdminServiceRestInterceptor, + "post_get_display_video360_advertiser_link_proposal", + ) as post, mock.patch.object( + transports.AnalyticsAdminServiceRestInterceptor, + "pre_get_display_video360_advertiser_link_proposal", ) as pre: pre.assert_not_called() - pb_message = analytics_admin.DeleteConversionEventRequest.pb( - analytics_admin.DeleteConversionEventRequest() + post.assert_not_called() + pb_message = analytics_admin.GetDisplayVideo360AdvertiserLinkProposalRequest.pb( + analytics_admin.GetDisplayVideo360AdvertiserLinkProposalRequest() ) transcode.return_value = { "method": "post", @@ -70567,15 +80867,21 @@ def test_delete_conversion_event_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() + req.return_value._content = ( + resources.DisplayVideo360AdvertiserLinkProposal.to_json( + resources.DisplayVideo360AdvertiserLinkProposal() + ) + ) - request = analytics_admin.DeleteConversionEventRequest() + request = analytics_admin.GetDisplayVideo360AdvertiserLinkProposalRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata + post.return_value = resources.DisplayVideo360AdvertiserLinkProposal() - client.delete_conversion_event( + client.get_display_video360_advertiser_link_proposal( request, metadata=[ ("key", "val"), @@ -70584,10 +80890,12 @@ def test_delete_conversion_event_rest_interceptors(null_interceptor): ) pre.assert_called_once() + post.assert_called_once() -def test_delete_conversion_event_rest_bad_request( - transport: str = "rest", request_type=analytics_admin.DeleteConversionEventRequest +def test_get_display_video360_advertiser_link_proposal_rest_bad_request( + transport: str = "rest", + request_type=analytics_admin.GetDisplayVideo360AdvertiserLinkProposalRequest, ): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -70595,7 +80903,9 @@ def test_delete_conversion_event_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = {"name": "properties/sample1/conversionEvents/sample2"} + request_init = { + "name": "properties/sample1/displayVideo360AdvertiserLinkProposals/sample2" + } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -70607,10 +80917,10 @@ def test_delete_conversion_event_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.delete_conversion_event(request) + client.get_display_video360_advertiser_link_proposal(request) -def test_delete_conversion_event_rest_flattened(): +def test_get_display_video360_advertiser_link_proposal_rest_flattened(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -70619,10 +80929,12 @@ def test_delete_conversion_event_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = None + return_value = resources.DisplayVideo360AdvertiserLinkProposal() # get arguments that satisfy an http rule for this method - sample_request = {"name": "properties/sample1/conversionEvents/sample2"} + sample_request = { + "name": "properties/sample1/displayVideo360AdvertiserLinkProposals/sample2" + } # get truthy value for each flattened field mock_args = dict( @@ -70633,24 +80945,28 @@ def test_delete_conversion_event_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - json_return_value = "" + # Convert return value to protobuf type + return_value = resources.DisplayVideo360AdvertiserLinkProposal.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.delete_conversion_event(**mock_args) + client.get_display_video360_advertiser_link_proposal(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1alpha/{name=properties/*/conversionEvents/*}" + "%s/v1alpha/{name=properties/*/displayVideo360AdvertiserLinkProposals/*}" % client.transport._host, args[1], ) -def test_delete_conversion_event_rest_flattened_error(transport: str = "rest"): +def test_get_display_video360_advertiser_link_proposal_rest_flattened_error( + transport: str = "rest", +): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -70659,13 +80975,13 @@ def test_delete_conversion_event_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.delete_conversion_event( - analytics_admin.DeleteConversionEventRequest(), + client.get_display_video360_advertiser_link_proposal( + analytics_admin.GetDisplayVideo360AdvertiserLinkProposalRequest(), name="name_value", ) -def test_delete_conversion_event_rest_error(): +def test_get_display_video360_advertiser_link_proposal_rest_error(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -70674,11 +80990,11 @@ def test_delete_conversion_event_rest_error(): @pytest.mark.parametrize( "request_type", [ - analytics_admin.ListConversionEventsRequest, + analytics_admin.ListDisplayVideo360AdvertiserLinkProposalsRequest, dict, ], ) -def test_list_conversion_events_rest(request_type): +def test_list_display_video360_advertiser_link_proposals_rest(request_type): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -70691,27 +81007,33 @@ def test_list_conversion_events_rest(request_type): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = analytics_admin.ListConversionEventsResponse( - next_page_token="next_page_token_value", + return_value = ( + analytics_admin.ListDisplayVideo360AdvertiserLinkProposalsResponse( + next_page_token="next_page_token_value", + ) ) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = analytics_admin.ListConversionEventsResponse.pb(return_value) + return_value = ( + analytics_admin.ListDisplayVideo360AdvertiserLinkProposalsResponse.pb( + return_value + ) + ) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.list_conversion_events(request) + response = client.list_display_video360_advertiser_link_proposals(request) # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListConversionEventsPager) + assert isinstance(response, pagers.ListDisplayVideo360AdvertiserLinkProposalsPager) assert response.next_page_token == "next_page_token_value" -def test_list_conversion_events_rest_use_cached_wrapped_rpc(): +def test_list_display_video360_advertiser_link_proposals_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -70726,7 +81048,7 @@ def test_list_conversion_events_rest_use_cached_wrapped_rpc(): # Ensure method has been cached assert ( - client._transport.list_conversion_events + client._transport.list_display_video360_advertiser_link_proposals in client._transport._wrapped_methods ) @@ -70736,24 +81058,24 @@ def test_list_conversion_events_rest_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.list_conversion_events + client._transport.list_display_video360_advertiser_link_proposals ] = mock_rpc request = {} - client.list_conversion_events(request) + client.list_display_video360_advertiser_link_proposals(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.list_conversion_events(request) + client.list_display_video360_advertiser_link_proposals(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_list_conversion_events_rest_required_fields( - request_type=analytics_admin.ListConversionEventsRequest, +def test_list_display_video360_advertiser_link_proposals_rest_required_fields( + request_type=analytics_admin.ListDisplayVideo360AdvertiserLinkProposalsRequest, ): transport_class = transports.AnalyticsAdminServiceRestTransport @@ -70769,7 +81091,9 @@ def test_list_conversion_events_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).list_conversion_events._get_unset_required_fields(jsonified_request) + ).list_display_video360_advertiser_link_proposals._get_unset_required_fields( + jsonified_request + ) jsonified_request.update(unset_fields) # verify required fields with default values are now present @@ -70778,7 +81102,9 @@ def test_list_conversion_events_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).list_conversion_events._get_unset_required_fields(jsonified_request) + ).list_display_video360_advertiser_link_proposals._get_unset_required_fields( + jsonified_request + ) # Check that path parameters and body parameters are not mixing in. assert not set(unset_fields) - set( ( @@ -70799,7 +81125,7 @@ def test_list_conversion_events_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = analytics_admin.ListConversionEventsResponse() + return_value = analytics_admin.ListDisplayVideo360AdvertiserLinkProposalsResponse() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -70820,25 +81146,31 @@ def test_list_conversion_events_rest_required_fields( response_value.status_code = 200 # Convert return value to protobuf type - return_value = analytics_admin.ListConversionEventsResponse.pb(return_value) + return_value = ( + analytics_admin.ListDisplayVideo360AdvertiserLinkProposalsResponse.pb( + return_value + ) + ) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.list_conversion_events(request) + response = client.list_display_video360_advertiser_link_proposals(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_list_conversion_events_rest_unset_required_fields(): +def test_list_display_video360_advertiser_link_proposals_rest_unset_required_fields(): transport = transports.AnalyticsAdminServiceRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.list_conversion_events._get_unset_required_fields({}) + unset_fields = transport.list_display_video360_advertiser_link_proposals._get_unset_required_fields( + {} + ) assert set(unset_fields) == ( set( ( @@ -70851,7 +81183,9 @@ def test_list_conversion_events_rest_unset_required_fields(): @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_conversion_events_rest_interceptors(null_interceptor): +def test_list_display_video360_advertiser_link_proposals_rest_interceptors( + null_interceptor, +): transport = transports.AnalyticsAdminServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -70864,14 +81198,18 @@ def test_list_conversion_events_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.AnalyticsAdminServiceRestInterceptor, "post_list_conversion_events" + transports.AnalyticsAdminServiceRestInterceptor, + "post_list_display_video360_advertiser_link_proposals", ) as post, mock.patch.object( - transports.AnalyticsAdminServiceRestInterceptor, "pre_list_conversion_events" + transports.AnalyticsAdminServiceRestInterceptor, + "pre_list_display_video360_advertiser_link_proposals", ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = analytics_admin.ListConversionEventsRequest.pb( - analytics_admin.ListConversionEventsRequest() + pb_message = ( + analytics_admin.ListDisplayVideo360AdvertiserLinkProposalsRequest.pb( + analytics_admin.ListDisplayVideo360AdvertiserLinkProposalsRequest() + ) ) transcode.return_value = { "method": "post", @@ -70884,20 +81222,22 @@ def test_list_conversion_events_rest_interceptors(null_interceptor): req.return_value.status_code = 200 req.return_value.request = PreparedRequest() req.return_value._content = ( - analytics_admin.ListConversionEventsResponse.to_json( - analytics_admin.ListConversionEventsResponse() + analytics_admin.ListDisplayVideo360AdvertiserLinkProposalsResponse.to_json( + analytics_admin.ListDisplayVideo360AdvertiserLinkProposalsResponse() ) ) - request = analytics_admin.ListConversionEventsRequest() + request = analytics_admin.ListDisplayVideo360AdvertiserLinkProposalsRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = analytics_admin.ListConversionEventsResponse() + post.return_value = ( + analytics_admin.ListDisplayVideo360AdvertiserLinkProposalsResponse() + ) - client.list_conversion_events( + client.list_display_video360_advertiser_link_proposals( request, metadata=[ ("key", "val"), @@ -70909,8 +81249,9 @@ def test_list_conversion_events_rest_interceptors(null_interceptor): post.assert_called_once() -def test_list_conversion_events_rest_bad_request( - transport: str = "rest", request_type=analytics_admin.ListConversionEventsRequest +def test_list_display_video360_advertiser_link_proposals_rest_bad_request( + transport: str = "rest", + request_type=analytics_admin.ListDisplayVideo360AdvertiserLinkProposalsRequest, ): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -70930,10 +81271,10 @@ def test_list_conversion_events_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.list_conversion_events(request) + client.list_display_video360_advertiser_link_proposals(request) -def test_list_conversion_events_rest_flattened(): +def test_list_display_video360_advertiser_link_proposals_rest_flattened(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -70942,7 +81283,9 @@ def test_list_conversion_events_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = analytics_admin.ListConversionEventsResponse() + return_value = ( + analytics_admin.ListDisplayVideo360AdvertiserLinkProposalsResponse() + ) # get arguments that satisfy an http rule for this method sample_request = {"parent": "properties/sample1"} @@ -70957,25 +81300,31 @@ def test_list_conversion_events_rest_flattened(): response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = analytics_admin.ListConversionEventsResponse.pb(return_value) + return_value = ( + analytics_admin.ListDisplayVideo360AdvertiserLinkProposalsResponse.pb( + return_value + ) + ) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.list_conversion_events(**mock_args) + client.list_display_video360_advertiser_link_proposals(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1alpha/{parent=properties/*}/conversionEvents" + "%s/v1alpha/{parent=properties/*}/displayVideo360AdvertiserLinkProposals" % client.transport._host, args[1], ) -def test_list_conversion_events_rest_flattened_error(transport: str = "rest"): +def test_list_display_video360_advertiser_link_proposals_rest_flattened_error( + transport: str = "rest", +): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -70984,13 +81333,15 @@ def test_list_conversion_events_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.list_conversion_events( - analytics_admin.ListConversionEventsRequest(), + client.list_display_video360_advertiser_link_proposals( + analytics_admin.ListDisplayVideo360AdvertiserLinkProposalsRequest(), parent="parent_value", ) -def test_list_conversion_events_rest_pager(transport: str = "rest"): +def test_list_display_video360_advertiser_link_proposals_rest_pager( + transport: str = "rest", +): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -71002,28 +81353,28 @@ def test_list_conversion_events_rest_pager(transport: str = "rest"): # with mock.patch.object(path_template, 'transcode') as transcode: # Set the response as a series of pages response = ( - analytics_admin.ListConversionEventsResponse( - conversion_events=[ - resources.ConversionEvent(), - resources.ConversionEvent(), - resources.ConversionEvent(), + analytics_admin.ListDisplayVideo360AdvertiserLinkProposalsResponse( + display_video_360_advertiser_link_proposals=[ + resources.DisplayVideo360AdvertiserLinkProposal(), + resources.DisplayVideo360AdvertiserLinkProposal(), + resources.DisplayVideo360AdvertiserLinkProposal(), ], next_page_token="abc", ), - analytics_admin.ListConversionEventsResponse( - conversion_events=[], + analytics_admin.ListDisplayVideo360AdvertiserLinkProposalsResponse( + display_video_360_advertiser_link_proposals=[], next_page_token="def", ), - analytics_admin.ListConversionEventsResponse( - conversion_events=[ - resources.ConversionEvent(), + analytics_admin.ListDisplayVideo360AdvertiserLinkProposalsResponse( + display_video_360_advertiser_link_proposals=[ + resources.DisplayVideo360AdvertiserLinkProposal(), ], next_page_token="ghi", ), - analytics_admin.ListConversionEventsResponse( - conversion_events=[ - resources.ConversionEvent(), - resources.ConversionEvent(), + analytics_admin.ListDisplayVideo360AdvertiserLinkProposalsResponse( + display_video_360_advertiser_link_proposals=[ + resources.DisplayVideo360AdvertiserLinkProposal(), + resources.DisplayVideo360AdvertiserLinkProposal(), ], ), ) @@ -71032,7 +81383,10 @@ def test_list_conversion_events_rest_pager(transport: str = "rest"): # Wrap the values into proper Response objs response = tuple( - analytics_admin.ListConversionEventsResponse.to_json(x) for x in response + analytics_admin.ListDisplayVideo360AdvertiserLinkProposalsResponse.to_json( + x + ) + for x in response ) return_values = tuple(Response() for i in response) for return_val, response_val in zip(return_values, response): @@ -71042,13 +81396,22 @@ def test_list_conversion_events_rest_pager(transport: str = "rest"): sample_request = {"parent": "properties/sample1"} - pager = client.list_conversion_events(request=sample_request) + pager = client.list_display_video360_advertiser_link_proposals( + request=sample_request + ) results = list(pager) assert len(results) == 6 - assert all(isinstance(i, resources.ConversionEvent) for i in results) + assert all( + isinstance(i, resources.DisplayVideo360AdvertiserLinkProposal) + for i in results + ) - pages = list(client.list_conversion_events(request=sample_request).pages) + pages = list( + client.list_display_video360_advertiser_link_proposals( + request=sample_request + ).pages + ) for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token @@ -71056,48 +81419,148 @@ def test_list_conversion_events_rest_pager(transport: str = "rest"): @pytest.mark.parametrize( "request_type", [ - analytics_admin.GetDisplayVideo360AdvertiserLinkRequest, + analytics_admin.CreateDisplayVideo360AdvertiserLinkProposalRequest, dict, ], ) -def test_get_display_video360_advertiser_link_rest(request_type): +def test_create_display_video360_advertiser_link_proposal_rest(request_type): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = {"name": "properties/sample1/displayVideo360AdvertiserLinks/sample2"} + request_init = {"parent": "properties/sample1"} + request_init["display_video_360_advertiser_link_proposal"] = { + "name": "name_value", + "advertiser_id": "advertiser_id_value", + "link_proposal_status_details": { + "link_proposal_initiating_product": 1, + "requestor_email": "requestor_email_value", + "link_proposal_state": 1, + }, + "advertiser_display_name": "advertiser_display_name_value", + "validation_email": "validation_email_value", + "ads_personalization_enabled": {"value": True}, + "campaign_data_sharing_enabled": {}, + "cost_data_sharing_enabled": {}, + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = ( + analytics_admin.CreateDisplayVideo360AdvertiserLinkProposalRequest.meta.fields[ + "display_video_360_advertiser_link_proposal" + ] + ) + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "display_video_360_advertiser_link_proposal" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, + len( + request_init["display_video_360_advertiser_link_proposal"][ + field + ] + ), + ): + del request_init["display_video_360_advertiser_link_proposal"][ + field + ][i][subfield] + else: + del request_init["display_video_360_advertiser_link_proposal"][field][ + subfield + ] request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = resources.DisplayVideo360AdvertiserLink( + return_value = resources.DisplayVideo360AdvertiserLinkProposal( name="name_value", advertiser_id="advertiser_id_value", advertiser_display_name="advertiser_display_name_value", + validation_email="validation_email_value", ) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = resources.DisplayVideo360AdvertiserLink.pb(return_value) + return_value = resources.DisplayVideo360AdvertiserLinkProposal.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.get_display_video360_advertiser_link(request) + response = client.create_display_video360_advertiser_link_proposal(request) # Establish that the response is the type that we expect. - assert isinstance(response, resources.DisplayVideo360AdvertiserLink) + assert isinstance(response, resources.DisplayVideo360AdvertiserLinkProposal) assert response.name == "name_value" assert response.advertiser_id == "advertiser_id_value" assert response.advertiser_display_name == "advertiser_display_name_value" + assert response.validation_email == "validation_email_value" -def test_get_display_video360_advertiser_link_rest_use_cached_wrapped_rpc(): +def test_create_display_video360_advertiser_link_proposal_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -71112,7 +81575,7 @@ def test_get_display_video360_advertiser_link_rest_use_cached_wrapped_rpc(): # Ensure method has been cached assert ( - client._transport.get_display_video360_advertiser_link + client._transport.create_display_video360_advertiser_link_proposal in client._transport._wrapped_methods ) @@ -71122,29 +81585,29 @@ def test_get_display_video360_advertiser_link_rest_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.get_display_video360_advertiser_link + client._transport.create_display_video360_advertiser_link_proposal ] = mock_rpc request = {} - client.get_display_video360_advertiser_link(request) + client.create_display_video360_advertiser_link_proposal(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.get_display_video360_advertiser_link(request) + client.create_display_video360_advertiser_link_proposal(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_get_display_video360_advertiser_link_rest_required_fields( - request_type=analytics_admin.GetDisplayVideo360AdvertiserLinkRequest, +def test_create_display_video360_advertiser_link_proposal_rest_required_fields( + request_type=analytics_admin.CreateDisplayVideo360AdvertiserLinkProposalRequest, ): transport_class = transports.AnalyticsAdminServiceRestTransport request_init = {} - request_init["name"] = "" + request_init["parent"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -71155,21 +81618,25 @@ def test_get_display_video360_advertiser_link_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).get_display_video360_advertiser_link._get_unset_required_fields(jsonified_request) + ).create_display_video360_advertiser_link_proposal._get_unset_required_fields( + jsonified_request + ) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["name"] = "name_value" + jsonified_request["parent"] = "parent_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).get_display_video360_advertiser_link._get_unset_required_fields(jsonified_request) + ).create_display_video360_advertiser_link_proposal._get_unset_required_fields( + jsonified_request + ) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == "name_value" + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -71178,7 +81645,7 @@ def test_get_display_video360_advertiser_link_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = resources.DisplayVideo360AdvertiserLink() + return_value = resources.DisplayVideo360AdvertiserLinkProposal() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -71190,41 +81657,54 @@ def test_get_display_video360_advertiser_link_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "get", + "method": "post", "query_params": pb_request, } + transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = resources.DisplayVideo360AdvertiserLink.pb(return_value) + return_value = resources.DisplayVideo360AdvertiserLinkProposal.pb( + return_value + ) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.get_display_video360_advertiser_link(request) + response = client.create_display_video360_advertiser_link_proposal(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_get_display_video360_advertiser_link_rest_unset_required_fields(): +def test_create_display_video360_advertiser_link_proposal_rest_unset_required_fields(): transport = transports.AnalyticsAdminServiceRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = ( - transport.get_display_video360_advertiser_link._get_unset_required_fields({}) + unset_fields = transport.create_display_video360_advertiser_link_proposal._get_unset_required_fields( + {} + ) + assert set(unset_fields) == ( + set(()) + & set( + ( + "parent", + "displayVideo360AdvertiserLinkProposal", + ) + ) ) - assert set(unset_fields) == (set(()) & set(("name",))) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_display_video360_advertiser_link_rest_interceptors(null_interceptor): +def test_create_display_video360_advertiser_link_proposal_rest_interceptors( + null_interceptor, +): transport = transports.AnalyticsAdminServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -71238,15 +81718,17 @@ def test_get_display_video360_advertiser_link_rest_interceptors(null_interceptor path_template, "transcode" ) as transcode, mock.patch.object( transports.AnalyticsAdminServiceRestInterceptor, - "post_get_display_video360_advertiser_link", + "post_create_display_video360_advertiser_link_proposal", ) as post, mock.patch.object( transports.AnalyticsAdminServiceRestInterceptor, - "pre_get_display_video360_advertiser_link", + "pre_create_display_video360_advertiser_link_proposal", ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = analytics_admin.GetDisplayVideo360AdvertiserLinkRequest.pb( - analytics_admin.GetDisplayVideo360AdvertiserLinkRequest() + pb_message = ( + analytics_admin.CreateDisplayVideo360AdvertiserLinkProposalRequest.pb( + analytics_admin.CreateDisplayVideo360AdvertiserLinkProposalRequest() + ) ) transcode.return_value = { "method": "post", @@ -71258,19 +81740,21 @@ def test_get_display_video360_advertiser_link_rest_interceptors(null_interceptor req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = resources.DisplayVideo360AdvertiserLink.to_json( - resources.DisplayVideo360AdvertiserLink() + req.return_value._content = ( + resources.DisplayVideo360AdvertiserLinkProposal.to_json( + resources.DisplayVideo360AdvertiserLinkProposal() + ) ) - request = analytics_admin.GetDisplayVideo360AdvertiserLinkRequest() + request = analytics_admin.CreateDisplayVideo360AdvertiserLinkProposalRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = resources.DisplayVideo360AdvertiserLink() + post.return_value = resources.DisplayVideo360AdvertiserLinkProposal() - client.get_display_video360_advertiser_link( + client.create_display_video360_advertiser_link_proposal( request, metadata=[ ("key", "val"), @@ -71282,9 +81766,9 @@ def test_get_display_video360_advertiser_link_rest_interceptors(null_interceptor post.assert_called_once() -def test_get_display_video360_advertiser_link_rest_bad_request( +def test_create_display_video360_advertiser_link_proposal_rest_bad_request( transport: str = "rest", - request_type=analytics_admin.GetDisplayVideo360AdvertiserLinkRequest, + request_type=analytics_admin.CreateDisplayVideo360AdvertiserLinkProposalRequest, ): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -71292,7 +81776,7 @@ def test_get_display_video360_advertiser_link_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = {"name": "properties/sample1/displayVideo360AdvertiserLinks/sample2"} + request_init = {"parent": "properties/sample1"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -71304,10 +81788,10 @@ def test_get_display_video360_advertiser_link_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.get_display_video360_advertiser_link(request) + client.create_display_video360_advertiser_link_proposal(request) -def test_get_display_video360_advertiser_link_rest_flattened(): +def test_create_display_video360_advertiser_link_proposal_rest_flattened(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -71316,16 +81800,17 @@ def test_get_display_video360_advertiser_link_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = resources.DisplayVideo360AdvertiserLink() + return_value = resources.DisplayVideo360AdvertiserLinkProposal() # get arguments that satisfy an http rule for this method - sample_request = { - "name": "properties/sample1/displayVideo360AdvertiserLinks/sample2" - } + sample_request = {"parent": "properties/sample1"} # get truthy value for each flattened field mock_args = dict( - name="name_value", + parent="parent_value", + display_video_360_advertiser_link_proposal=resources.DisplayVideo360AdvertiserLinkProposal( + name="name_value" + ), ) mock_args.update(sample_request) @@ -71333,25 +81818,25 @@ def test_get_display_video360_advertiser_link_rest_flattened(): response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = resources.DisplayVideo360AdvertiserLink.pb(return_value) + return_value = resources.DisplayVideo360AdvertiserLinkProposal.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.get_display_video360_advertiser_link(**mock_args) + client.create_display_video360_advertiser_link_proposal(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1alpha/{name=properties/*/displayVideo360AdvertiserLinks/*}" + "%s/v1alpha/{parent=properties/*}/displayVideo360AdvertiserLinkProposals" % client.transport._host, args[1], ) -def test_get_display_video360_advertiser_link_rest_flattened_error( +def test_create_display_video360_advertiser_link_proposal_rest_flattened_error( transport: str = "rest", ): client = AnalyticsAdminServiceClient( @@ -71362,13 +81847,16 @@ def test_get_display_video360_advertiser_link_rest_flattened_error( # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.get_display_video360_advertiser_link( - analytics_admin.GetDisplayVideo360AdvertiserLinkRequest(), - name="name_value", + client.create_display_video360_advertiser_link_proposal( + analytics_admin.CreateDisplayVideo360AdvertiserLinkProposalRequest(), + parent="parent_value", + display_video_360_advertiser_link_proposal=resources.DisplayVideo360AdvertiserLinkProposal( + name="name_value" + ), ) -def test_get_display_video360_advertiser_link_rest_error(): +def test_create_display_video360_advertiser_link_proposal_rest_error(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -71377,46 +81865,41 @@ def test_get_display_video360_advertiser_link_rest_error(): @pytest.mark.parametrize( "request_type", [ - analytics_admin.ListDisplayVideo360AdvertiserLinksRequest, + analytics_admin.DeleteDisplayVideo360AdvertiserLinkProposalRequest, dict, ], ) -def test_list_display_video360_advertiser_links_rest(request_type): +def test_delete_display_video360_advertiser_link_proposal_rest(request_type): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = {"parent": "properties/sample1"} + request_init = { + "name": "properties/sample1/displayVideo360AdvertiserLinkProposals/sample2" + } request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = analytics_admin.ListDisplayVideo360AdvertiserLinksResponse( - next_page_token="next_page_token_value", - ) + return_value = None # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - # Convert return value to protobuf type - return_value = analytics_admin.ListDisplayVideo360AdvertiserLinksResponse.pb( - return_value - ) - json_return_value = json_format.MessageToJson(return_value) + json_return_value = "" response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.list_display_video360_advertiser_links(request) + response = client.delete_display_video360_advertiser_link_proposal(request) # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListDisplayVideo360AdvertiserLinksPager) - assert response.next_page_token == "next_page_token_value" + assert response is None -def test_list_display_video360_advertiser_links_rest_use_cached_wrapped_rpc(): +def test_delete_display_video360_advertiser_link_proposal_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -71431,7 +81914,7 @@ def test_list_display_video360_advertiser_links_rest_use_cached_wrapped_rpc(): # Ensure method has been cached assert ( - client._transport.list_display_video360_advertiser_links + client._transport.delete_display_video360_advertiser_link_proposal in client._transport._wrapped_methods ) @@ -71441,29 +81924,29 @@ def test_list_display_video360_advertiser_links_rest_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.list_display_video360_advertiser_links + client._transport.delete_display_video360_advertiser_link_proposal ] = mock_rpc request = {} - client.list_display_video360_advertiser_links(request) + client.delete_display_video360_advertiser_link_proposal(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.list_display_video360_advertiser_links(request) + client.delete_display_video360_advertiser_link_proposal(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_list_display_video360_advertiser_links_rest_required_fields( - request_type=analytics_admin.ListDisplayVideo360AdvertiserLinksRequest, +def test_delete_display_video360_advertiser_link_proposal_rest_required_fields( + request_type=analytics_admin.DeleteDisplayVideo360AdvertiserLinkProposalRequest, ): transport_class = transports.AnalyticsAdminServiceRestTransport request_init = {} - request_init["parent"] = "" + request_init["name"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -71474,32 +81957,25 @@ def test_list_display_video360_advertiser_links_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).list_display_video360_advertiser_links._get_unset_required_fields( + ).delete_display_video360_advertiser_link_proposal._get_unset_required_fields( jsonified_request ) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["parent"] = "parent_value" + jsonified_request["name"] = "name_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).list_display_video360_advertiser_links._get_unset_required_fields( + ).delete_display_video360_advertiser_link_proposal._get_unset_required_fields( jsonified_request ) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set( - ( - "page_size", - "page_token", - ) - ) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == "parent_value" + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -71508,7 +81984,7 @@ def test_list_display_video360_advertiser_links_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = analytics_admin.ListDisplayVideo360AdvertiserLinksResponse() + return_value = None # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -71520,53 +81996,40 @@ def test_list_display_video360_advertiser_links_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "get", + "method": "delete", "query_params": pb_request, } transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = ( - analytics_admin.ListDisplayVideo360AdvertiserLinksResponse.pb( - return_value - ) - ) - json_return_value = json_format.MessageToJson(return_value) + json_return_value = "" response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.list_display_video360_advertiser_links(request) + response = client.delete_display_video360_advertiser_link_proposal(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_list_display_video360_advertiser_links_rest_unset_required_fields(): +def test_delete_display_video360_advertiser_link_proposal_rest_unset_required_fields(): transport = transports.AnalyticsAdminServiceRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = ( - transport.list_display_video360_advertiser_links._get_unset_required_fields({}) - ) - assert set(unset_fields) == ( - set( - ( - "pageSize", - "pageToken", - ) - ) - & set(("parent",)) + unset_fields = transport.delete_display_video360_advertiser_link_proposal._get_unset_required_fields( + {} ) + assert set(unset_fields) == (set(()) & set(("name",))) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_display_video360_advertiser_links_rest_interceptors(null_interceptor): +def test_delete_display_video360_advertiser_link_proposal_rest_interceptors( + null_interceptor, +): transport = transports.AnalyticsAdminServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -71580,15 +82043,13 @@ def test_list_display_video360_advertiser_links_rest_interceptors(null_intercept path_template, "transcode" ) as transcode, mock.patch.object( transports.AnalyticsAdminServiceRestInterceptor, - "post_list_display_video360_advertiser_links", - ) as post, mock.patch.object( - transports.AnalyticsAdminServiceRestInterceptor, - "pre_list_display_video360_advertiser_links", + "pre_delete_display_video360_advertiser_link_proposal", ) as pre: pre.assert_not_called() - post.assert_not_called() - pb_message = analytics_admin.ListDisplayVideo360AdvertiserLinksRequest.pb( - analytics_admin.ListDisplayVideo360AdvertiserLinksRequest() + pb_message = ( + analytics_admin.DeleteDisplayVideo360AdvertiserLinkProposalRequest.pb( + analytics_admin.DeleteDisplayVideo360AdvertiserLinkProposalRequest() + ) ) transcode.return_value = { "method": "post", @@ -71600,21 +82061,15 @@ def test_list_display_video360_advertiser_links_rest_interceptors(null_intercept req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = ( - analytics_admin.ListDisplayVideo360AdvertiserLinksResponse.to_json( - analytics_admin.ListDisplayVideo360AdvertiserLinksResponse() - ) - ) - request = analytics_admin.ListDisplayVideo360AdvertiserLinksRequest() + request = analytics_admin.DeleteDisplayVideo360AdvertiserLinkProposalRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = analytics_admin.ListDisplayVideo360AdvertiserLinksResponse() - client.list_display_video360_advertiser_links( + client.delete_display_video360_advertiser_link_proposal( request, metadata=[ ("key", "val"), @@ -71623,12 +82078,11 @@ def test_list_display_video360_advertiser_links_rest_interceptors(null_intercept ) pre.assert_called_once() - post.assert_called_once() -def test_list_display_video360_advertiser_links_rest_bad_request( +def test_delete_display_video360_advertiser_link_proposal_rest_bad_request( transport: str = "rest", - request_type=analytics_admin.ListDisplayVideo360AdvertiserLinksRequest, + request_type=analytics_admin.DeleteDisplayVideo360AdvertiserLinkProposalRequest, ): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -71636,7 +82090,9 @@ def test_list_display_video360_advertiser_links_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = {"parent": "properties/sample1"} + request_init = { + "name": "properties/sample1/displayVideo360AdvertiserLinkProposals/sample2" + } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -71648,10 +82104,10 @@ def test_list_display_video360_advertiser_links_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.list_display_video360_advertiser_links(request) + client.delete_display_video360_advertiser_link_proposal(request) -def test_list_display_video360_advertiser_links_rest_flattened(): +def test_delete_display_video360_advertiser_link_proposal_rest_flattened(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -71660,42 +82116,40 @@ def test_list_display_video360_advertiser_links_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = analytics_admin.ListDisplayVideo360AdvertiserLinksResponse() + return_value = None # get arguments that satisfy an http rule for this method - sample_request = {"parent": "properties/sample1"} + sample_request = { + "name": "properties/sample1/displayVideo360AdvertiserLinkProposals/sample2" + } # get truthy value for each flattened field mock_args = dict( - parent="parent_value", + name="name_value", ) mock_args.update(sample_request) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - # Convert return value to protobuf type - return_value = analytics_admin.ListDisplayVideo360AdvertiserLinksResponse.pb( - return_value - ) - json_return_value = json_format.MessageToJson(return_value) + json_return_value = "" response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.list_display_video360_advertiser_links(**mock_args) + client.delete_display_video360_advertiser_link_proposal(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1alpha/{parent=properties/*}/displayVideo360AdvertiserLinks" + "%s/v1alpha/{name=properties/*/displayVideo360AdvertiserLinkProposals/*}" % client.transport._host, args[1], ) -def test_list_display_video360_advertiser_links_rest_flattened_error( +def test_delete_display_video360_advertiser_link_proposal_rest_flattened_error( transport: str = "rest", ): client = AnalyticsAdminServiceClient( @@ -71706,208 +82160,66 @@ def test_list_display_video360_advertiser_links_rest_flattened_error( # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.list_display_video360_advertiser_links( - analytics_admin.ListDisplayVideo360AdvertiserLinksRequest(), - parent="parent_value", + client.delete_display_video360_advertiser_link_proposal( + analytics_admin.DeleteDisplayVideo360AdvertiserLinkProposalRequest(), + name="name_value", ) -def test_list_display_video360_advertiser_links_rest_pager(transport: str = "rest"): +def test_delete_display_video360_advertiser_link_proposal_rest_error(): client = AnalyticsAdminServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # TODO(kbandes): remove this mock unless there's a good reason for it. - # with mock.patch.object(path_template, 'transcode') as transcode: - # Set the response as a series of pages - response = ( - analytics_admin.ListDisplayVideo360AdvertiserLinksResponse( - display_video_360_advertiser_links=[ - resources.DisplayVideo360AdvertiserLink(), - resources.DisplayVideo360AdvertiserLink(), - resources.DisplayVideo360AdvertiserLink(), - ], - next_page_token="abc", - ), - analytics_admin.ListDisplayVideo360AdvertiserLinksResponse( - display_video_360_advertiser_links=[], - next_page_token="def", - ), - analytics_admin.ListDisplayVideo360AdvertiserLinksResponse( - display_video_360_advertiser_links=[ - resources.DisplayVideo360AdvertiserLink(), - ], - next_page_token="ghi", - ), - analytics_admin.ListDisplayVideo360AdvertiserLinksResponse( - display_video_360_advertiser_links=[ - resources.DisplayVideo360AdvertiserLink(), - resources.DisplayVideo360AdvertiserLink(), - ], - ), - ) - # Two responses for two calls - response = response + response - - # Wrap the values into proper Response objs - response = tuple( - analytics_admin.ListDisplayVideo360AdvertiserLinksResponse.to_json(x) - for x in response - ) - return_values = tuple(Response() for i in response) - for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode("UTF-8") - return_val.status_code = 200 - req.side_effect = return_values - - sample_request = {"parent": "properties/sample1"} - - pager = client.list_display_video360_advertiser_links(request=sample_request) - - results = list(pager) - assert len(results) == 6 - assert all( - isinstance(i, resources.DisplayVideo360AdvertiserLink) for i in results - ) - - pages = list( - client.list_display_video360_advertiser_links(request=sample_request).pages - ) - for page_, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page_.raw_page.next_page_token == token - @pytest.mark.parametrize( "request_type", [ - analytics_admin.CreateDisplayVideo360AdvertiserLinkRequest, + analytics_admin.ApproveDisplayVideo360AdvertiserLinkProposalRequest, dict, ], ) -def test_create_display_video360_advertiser_link_rest(request_type): +def test_approve_display_video360_advertiser_link_proposal_rest(request_type): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = {"parent": "properties/sample1"} - request_init["display_video_360_advertiser_link"] = { - "name": "name_value", - "advertiser_id": "advertiser_id_value", - "advertiser_display_name": "advertiser_display_name_value", - "ads_personalization_enabled": {"value": True}, - "campaign_data_sharing_enabled": {}, - "cost_data_sharing_enabled": {}, + request_init = { + "name": "properties/sample1/displayVideo360AdvertiserLinkProposals/sample2" } - # The version of a generated dependency at test runtime may differ from the version used during generation. - # Delete any fields which are not present in the current runtime dependency - # See https://github.com/googleapis/gapic-generator-python/issues/1748 - - # Determine if the message type is proto-plus or protobuf - test_field = analytics_admin.CreateDisplayVideo360AdvertiserLinkRequest.meta.fields[ - "display_video_360_advertiser_link" - ] - - def get_message_fields(field): - # Given a field which is a message (composite type), return a list with - # all the fields of the message. - # If the field is not a composite type, return an empty list. - message_fields = [] - - if hasattr(field, "message") and field.message: - is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") - - if is_field_type_proto_plus_type: - message_fields = field.message.meta.fields.values() - # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER - message_fields = field.message.DESCRIPTOR.fields - return message_fields - - runtime_nested_fields = [ - (field.name, nested_field.name) - for field in get_message_fields(test_field) - for nested_field in get_message_fields(field) - ] - - subfields_not_in_runtime = [] - - # For each item in the sample request, create a list of sub fields which are not present at runtime - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init[ - "display_video_360_advertiser_link" - ].items(): # pragma: NO COVER - result = None - is_repeated = False - # For repeated fields - if isinstance(value, list) and len(value): - is_repeated = True - result = value[0] - # For fields where the type is another message - if isinstance(value, dict): - result = value - - if result and hasattr(result, "keys"): - for subfield in result.keys(): - if (field, subfield) not in runtime_nested_fields: - subfields_not_in_runtime.append( - { - "field": field, - "subfield": subfield, - "is_repeated": is_repeated, - } - ) - - # Remove fields from the sample request which are not present in the runtime version of the dependency - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER - field = subfield_to_delete.get("field") - field_repeated = subfield_to_delete.get("is_repeated") - subfield = subfield_to_delete.get("subfield") - if subfield: - if field_repeated: - for i in range( - 0, len(request_init["display_video_360_advertiser_link"][field]) - ): - del request_init["display_video_360_advertiser_link"][field][i][ - subfield - ] - else: - del request_init["display_video_360_advertiser_link"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = resources.DisplayVideo360AdvertiserLink( - name="name_value", - advertiser_id="advertiser_id_value", - advertiser_display_name="advertiser_display_name_value", + return_value = ( + analytics_admin.ApproveDisplayVideo360AdvertiserLinkProposalResponse() ) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = resources.DisplayVideo360AdvertiserLink.pb(return_value) + return_value = ( + analytics_admin.ApproveDisplayVideo360AdvertiserLinkProposalResponse.pb( + return_value + ) + ) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.create_display_video360_advertiser_link(request) + response = client.approve_display_video360_advertiser_link_proposal(request) # Establish that the response is the type that we expect. - assert isinstance(response, resources.DisplayVideo360AdvertiserLink) - assert response.name == "name_value" - assert response.advertiser_id == "advertiser_id_value" - assert response.advertiser_display_name == "advertiser_display_name_value" + assert isinstance( + response, analytics_admin.ApproveDisplayVideo360AdvertiserLinkProposalResponse + ) -def test_create_display_video360_advertiser_link_rest_use_cached_wrapped_rpc(): +def test_approve_display_video360_advertiser_link_proposal_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -71922,7 +82234,7 @@ def test_create_display_video360_advertiser_link_rest_use_cached_wrapped_rpc(): # Ensure method has been cached assert ( - client._transport.create_display_video360_advertiser_link + client._transport.approve_display_video360_advertiser_link_proposal in client._transport._wrapped_methods ) @@ -71932,29 +82244,29 @@ def test_create_display_video360_advertiser_link_rest_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.create_display_video360_advertiser_link + client._transport.approve_display_video360_advertiser_link_proposal ] = mock_rpc request = {} - client.create_display_video360_advertiser_link(request) + client.approve_display_video360_advertiser_link_proposal(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.create_display_video360_advertiser_link(request) + client.approve_display_video360_advertiser_link_proposal(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_create_display_video360_advertiser_link_rest_required_fields( - request_type=analytics_admin.CreateDisplayVideo360AdvertiserLinkRequest, +def test_approve_display_video360_advertiser_link_proposal_rest_required_fields( + request_type=analytics_admin.ApproveDisplayVideo360AdvertiserLinkProposalRequest, ): transport_class = transports.AnalyticsAdminServiceRestTransport request_init = {} - request_init["parent"] = "" + request_init["name"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -71965,25 +82277,25 @@ def test_create_display_video360_advertiser_link_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).create_display_video360_advertiser_link._get_unset_required_fields( + ).approve_display_video360_advertiser_link_proposal._get_unset_required_fields( jsonified_request ) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["parent"] = "parent_value" + jsonified_request["name"] = "name_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).create_display_video360_advertiser_link._get_unset_required_fields( + ).approve_display_video360_advertiser_link_proposal._get_unset_required_fields( jsonified_request ) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == "parent_value" + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -71992,7 +82304,9 @@ def test_create_display_video360_advertiser_link_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = resources.DisplayVideo360AdvertiserLink() + return_value = ( + analytics_admin.ApproveDisplayVideo360AdvertiserLinkProposalResponse() + ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -72014,40 +82328,38 @@ def test_create_display_video360_advertiser_link_rest_required_fields( response_value.status_code = 200 # Convert return value to protobuf type - return_value = resources.DisplayVideo360AdvertiserLink.pb(return_value) + return_value = ( + analytics_admin.ApproveDisplayVideo360AdvertiserLinkProposalResponse.pb( + return_value + ) + ) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.create_display_video360_advertiser_link(request) + response = client.approve_display_video360_advertiser_link_proposal(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_create_display_video360_advertiser_link_rest_unset_required_fields(): +def test_approve_display_video360_advertiser_link_proposal_rest_unset_required_fields(): transport = transports.AnalyticsAdminServiceRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = ( - transport.create_display_video360_advertiser_link._get_unset_required_fields({}) - ) - assert set(unset_fields) == ( - set(()) - & set( - ( - "parent", - "displayVideo360AdvertiserLink", - ) - ) + unset_fields = transport.approve_display_video360_advertiser_link_proposal._get_unset_required_fields( + {} ) + assert set(unset_fields) == (set(()) & set(("name",))) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_create_display_video360_advertiser_link_rest_interceptors(null_interceptor): +def test_approve_display_video360_advertiser_link_proposal_rest_interceptors( + null_interceptor, +): transport = transports.AnalyticsAdminServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -72061,15 +82373,17 @@ def test_create_display_video360_advertiser_link_rest_interceptors(null_intercep path_template, "transcode" ) as transcode, mock.patch.object( transports.AnalyticsAdminServiceRestInterceptor, - "post_create_display_video360_advertiser_link", + "post_approve_display_video360_advertiser_link_proposal", ) as post, mock.patch.object( transports.AnalyticsAdminServiceRestInterceptor, - "pre_create_display_video360_advertiser_link", + "pre_approve_display_video360_advertiser_link_proposal", ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = analytics_admin.CreateDisplayVideo360AdvertiserLinkRequest.pb( - analytics_admin.CreateDisplayVideo360AdvertiserLinkRequest() + pb_message = ( + analytics_admin.ApproveDisplayVideo360AdvertiserLinkProposalRequest.pb( + analytics_admin.ApproveDisplayVideo360AdvertiserLinkProposalRequest() + ) ) transcode.return_value = { "method": "post", @@ -72081,19 +82395,21 @@ def test_create_display_video360_advertiser_link_rest_interceptors(null_intercep req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = resources.DisplayVideo360AdvertiserLink.to_json( - resources.DisplayVideo360AdvertiserLink() + req.return_value._content = analytics_admin.ApproveDisplayVideo360AdvertiserLinkProposalResponse.to_json( + analytics_admin.ApproveDisplayVideo360AdvertiserLinkProposalResponse() ) - request = analytics_admin.CreateDisplayVideo360AdvertiserLinkRequest() + request = analytics_admin.ApproveDisplayVideo360AdvertiserLinkProposalRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = resources.DisplayVideo360AdvertiserLink() + post.return_value = ( + analytics_admin.ApproveDisplayVideo360AdvertiserLinkProposalResponse() + ) - client.create_display_video360_advertiser_link( + client.approve_display_video360_advertiser_link_proposal( request, metadata=[ ("key", "val"), @@ -72105,9 +82421,9 @@ def test_create_display_video360_advertiser_link_rest_interceptors(null_intercep post.assert_called_once() -def test_create_display_video360_advertiser_link_rest_bad_request( +def test_approve_display_video360_advertiser_link_proposal_rest_bad_request( transport: str = "rest", - request_type=analytics_admin.CreateDisplayVideo360AdvertiserLinkRequest, + request_type=analytics_admin.ApproveDisplayVideo360AdvertiserLinkProposalRequest, ): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -72115,7 +82431,9 @@ def test_create_display_video360_advertiser_link_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = {"parent": "properties/sample1"} + request_init = { + "name": "properties/sample1/displayVideo360AdvertiserLinkProposals/sample2" + } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -72127,75 +82445,10 @@ def test_create_display_video360_advertiser_link_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.create_display_video360_advertiser_link(request) - - -def test_create_display_video360_advertiser_link_rest_flattened(): - client = AnalyticsAdminServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = resources.DisplayVideo360AdvertiserLink() - - # get arguments that satisfy an http rule for this method - sample_request = {"parent": "properties/sample1"} - - # get truthy value for each flattened field - mock_args = dict( - parent="parent_value", - display_video_360_advertiser_link=resources.DisplayVideo360AdvertiserLink( - name="name_value" - ), - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = resources.DisplayVideo360AdvertiserLink.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - client.create_display_video360_advertiser_link(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate( - "%s/v1alpha/{parent=properties/*}/displayVideo360AdvertiserLinks" - % client.transport._host, - args[1], - ) - - -def test_create_display_video360_advertiser_link_rest_flattened_error( - transport: str = "rest", -): - client = AnalyticsAdminServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.create_display_video360_advertiser_link( - analytics_admin.CreateDisplayVideo360AdvertiserLinkRequest(), - parent="parent_value", - display_video_360_advertiser_link=resources.DisplayVideo360AdvertiserLink( - name="name_value" - ), - ) + client.approve_display_video360_advertiser_link_proposal(request) -def test_create_display_video360_advertiser_link_rest_error(): +def test_approve_display_video360_advertiser_link_proposal_rest_error(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -72204,39 +82457,52 @@ def test_create_display_video360_advertiser_link_rest_error(): @pytest.mark.parametrize( "request_type", [ - analytics_admin.DeleteDisplayVideo360AdvertiserLinkRequest, + analytics_admin.CancelDisplayVideo360AdvertiserLinkProposalRequest, dict, ], ) -def test_delete_display_video360_advertiser_link_rest(request_type): +def test_cancel_display_video360_advertiser_link_proposal_rest(request_type): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = {"name": "properties/sample1/displayVideo360AdvertiserLinks/sample2"} + request_init = { + "name": "properties/sample1/displayVideo360AdvertiserLinkProposals/sample2" + } request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = None + return_value = resources.DisplayVideo360AdvertiserLinkProposal( + name="name_value", + advertiser_id="advertiser_id_value", + advertiser_display_name="advertiser_display_name_value", + validation_email="validation_email_value", + ) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - json_return_value = "" + # Convert return value to protobuf type + return_value = resources.DisplayVideo360AdvertiserLinkProposal.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.delete_display_video360_advertiser_link(request) + response = client.cancel_display_video360_advertiser_link_proposal(request) # Establish that the response is the type that we expect. - assert response is None + assert isinstance(response, resources.DisplayVideo360AdvertiserLinkProposal) + assert response.name == "name_value" + assert response.advertiser_id == "advertiser_id_value" + assert response.advertiser_display_name == "advertiser_display_name_value" + assert response.validation_email == "validation_email_value" -def test_delete_display_video360_advertiser_link_rest_use_cached_wrapped_rpc(): +def test_cancel_display_video360_advertiser_link_proposal_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -72251,7 +82517,7 @@ def test_delete_display_video360_advertiser_link_rest_use_cached_wrapped_rpc(): # Ensure method has been cached assert ( - client._transport.delete_display_video360_advertiser_link + client._transport.cancel_display_video360_advertiser_link_proposal in client._transport._wrapped_methods ) @@ -72261,24 +82527,24 @@ def test_delete_display_video360_advertiser_link_rest_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.delete_display_video360_advertiser_link + client._transport.cancel_display_video360_advertiser_link_proposal ] = mock_rpc request = {} - client.delete_display_video360_advertiser_link(request) + client.cancel_display_video360_advertiser_link_proposal(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.delete_display_video360_advertiser_link(request) + client.cancel_display_video360_advertiser_link_proposal(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_delete_display_video360_advertiser_link_rest_required_fields( - request_type=analytics_admin.DeleteDisplayVideo360AdvertiserLinkRequest, +def test_cancel_display_video360_advertiser_link_proposal_rest_required_fields( + request_type=analytics_admin.CancelDisplayVideo360AdvertiserLinkProposalRequest, ): transport_class = transports.AnalyticsAdminServiceRestTransport @@ -72294,7 +82560,7 @@ def test_delete_display_video360_advertiser_link_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).delete_display_video360_advertiser_link._get_unset_required_fields( + ).cancel_display_video360_advertiser_link_proposal._get_unset_required_fields( jsonified_request ) jsonified_request.update(unset_fields) @@ -72305,7 +82571,7 @@ def test_delete_display_video360_advertiser_link_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).delete_display_video360_advertiser_link._get_unset_required_fields( + ).cancel_display_video360_advertiser_link_proposal._get_unset_required_fields( jsonified_request ) jsonified_request.update(unset_fields) @@ -72321,7 +82587,7 @@ def test_delete_display_video360_advertiser_link_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = None + return_value = resources.DisplayVideo360AdvertiserLinkProposal() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -72333,38 +82599,46 @@ def test_delete_display_video360_advertiser_link_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "delete", + "method": "post", "query_params": pb_request, } + transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 - json_return_value = "" + + # Convert return value to protobuf type + return_value = resources.DisplayVideo360AdvertiserLinkProposal.pb( + return_value + ) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.delete_display_video360_advertiser_link(request) + response = client.cancel_display_video360_advertiser_link_proposal(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_delete_display_video360_advertiser_link_rest_unset_required_fields(): +def test_cancel_display_video360_advertiser_link_proposal_rest_unset_required_fields(): transport = transports.AnalyticsAdminServiceRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = ( - transport.delete_display_video360_advertiser_link._get_unset_required_fields({}) + unset_fields = transport.cancel_display_video360_advertiser_link_proposal._get_unset_required_fields( + {} ) assert set(unset_fields) == (set(()) & set(("name",))) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_delete_display_video360_advertiser_link_rest_interceptors(null_interceptor): +def test_cancel_display_video360_advertiser_link_proposal_rest_interceptors( + null_interceptor, +): transport = transports.AnalyticsAdminServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -72378,11 +82652,17 @@ def test_delete_display_video360_advertiser_link_rest_interceptors(null_intercep path_template, "transcode" ) as transcode, mock.patch.object( transports.AnalyticsAdminServiceRestInterceptor, - "pre_delete_display_video360_advertiser_link", + "post_cancel_display_video360_advertiser_link_proposal", + ) as post, mock.patch.object( + transports.AnalyticsAdminServiceRestInterceptor, + "pre_cancel_display_video360_advertiser_link_proposal", ) as pre: pre.assert_not_called() - pb_message = analytics_admin.DeleteDisplayVideo360AdvertiserLinkRequest.pb( - analytics_admin.DeleteDisplayVideo360AdvertiserLinkRequest() + post.assert_not_called() + pb_message = ( + analytics_admin.CancelDisplayVideo360AdvertiserLinkProposalRequest.pb( + analytics_admin.CancelDisplayVideo360AdvertiserLinkProposalRequest() + ) ) transcode.return_value = { "method": "post", @@ -72394,15 +82674,21 @@ def test_delete_display_video360_advertiser_link_rest_interceptors(null_intercep req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() + req.return_value._content = ( + resources.DisplayVideo360AdvertiserLinkProposal.to_json( + resources.DisplayVideo360AdvertiserLinkProposal() + ) + ) - request = analytics_admin.DeleteDisplayVideo360AdvertiserLinkRequest() + request = analytics_admin.CancelDisplayVideo360AdvertiserLinkProposalRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata + post.return_value = resources.DisplayVideo360AdvertiserLinkProposal() - client.delete_display_video360_advertiser_link( + client.cancel_display_video360_advertiser_link_proposal( request, metadata=[ ("key", "val"), @@ -72411,11 +82697,12 @@ def test_delete_display_video360_advertiser_link_rest_interceptors(null_intercep ) pre.assert_called_once() + post.assert_called_once() -def test_delete_display_video360_advertiser_link_rest_bad_request( +def test_cancel_display_video360_advertiser_link_proposal_rest_bad_request( transport: str = "rest", - request_type=analytics_admin.DeleteDisplayVideo360AdvertiserLinkRequest, + request_type=analytics_admin.CancelDisplayVideo360AdvertiserLinkProposalRequest, ): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -72423,7 +82710,9 @@ def test_delete_display_video360_advertiser_link_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = {"name": "properties/sample1/displayVideo360AdvertiserLinks/sample2"} + request_init = { + "name": "properties/sample1/displayVideo360AdvertiserLinkProposals/sample2" + } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -72435,69 +82724,10 @@ def test_delete_display_video360_advertiser_link_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.delete_display_video360_advertiser_link(request) - - -def test_delete_display_video360_advertiser_link_rest_flattened(): - client = AnalyticsAdminServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = None - - # get arguments that satisfy an http rule for this method - sample_request = { - "name": "properties/sample1/displayVideo360AdvertiserLinks/sample2" - } - - # get truthy value for each flattened field - mock_args = dict( - name="name_value", - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = "" - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - client.delete_display_video360_advertiser_link(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate( - "%s/v1alpha/{name=properties/*/displayVideo360AdvertiserLinks/*}" - % client.transport._host, - args[1], - ) - - -def test_delete_display_video360_advertiser_link_rest_flattened_error( - transport: str = "rest", -): - client = AnalyticsAdminServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.delete_display_video360_advertiser_link( - analytics_admin.DeleteDisplayVideo360AdvertiserLinkRequest(), - name="name_value", - ) + client.cancel_display_video360_advertiser_link_proposal(request) -def test_delete_display_video360_advertiser_link_rest_error(): +def test_cancel_display_video360_advertiser_link_proposal_rest_error(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -72506,37 +82736,33 @@ def test_delete_display_video360_advertiser_link_rest_error(): @pytest.mark.parametrize( "request_type", [ - analytics_admin.UpdateDisplayVideo360AdvertiserLinkRequest, + analytics_admin.CreateCustomDimensionRequest, dict, ], ) -def test_update_display_video360_advertiser_link_rest(request_type): +def test_create_custom_dimension_rest(request_type): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = { - "display_video_360_advertiser_link": { - "name": "properties/sample1/displayVideo360AdvertiserLinks/sample2" - } - } - request_init["display_video_360_advertiser_link"] = { - "name": "properties/sample1/displayVideo360AdvertiserLinks/sample2", - "advertiser_id": "advertiser_id_value", - "advertiser_display_name": "advertiser_display_name_value", - "ads_personalization_enabled": {"value": True}, - "campaign_data_sharing_enabled": {}, - "cost_data_sharing_enabled": {}, + request_init = {"parent": "properties/sample1"} + request_init["custom_dimension"] = { + "name": "name_value", + "parameter_name": "parameter_name_value", + "display_name": "display_name_value", + "description": "description_value", + "scope": 1, + "disallow_ads_personalization": True, } # The version of a generated dependency at test runtime may differ from the version used during generation. # Delete any fields which are not present in the current runtime dependency # See https://github.com/googleapis/gapic-generator-python/issues/1748 # Determine if the message type is proto-plus or protobuf - test_field = analytics_admin.UpdateDisplayVideo360AdvertiserLinkRequest.meta.fields[ - "display_video_360_advertiser_link" + test_field = analytics_admin.CreateCustomDimensionRequest.meta.fields[ + "custom_dimension" ] def get_message_fields(field): @@ -72565,9 +82791,7 @@ def get_message_fields(field): # For each item in the sample request, create a list of sub fields which are not present at runtime # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init[ - "display_video_360_advertiser_link" - ].items(): # pragma: NO COVER + for field, value in request_init["custom_dimension"].items(): # pragma: NO COVER result = None is_repeated = False # For repeated fields @@ -72597,44 +82821,46 @@ def get_message_fields(field): subfield = subfield_to_delete.get("subfield") if subfield: if field_repeated: - for i in range( - 0, len(request_init["display_video_360_advertiser_link"][field]) - ): - del request_init["display_video_360_advertiser_link"][field][i][ - subfield - ] + for i in range(0, len(request_init["custom_dimension"][field])): + del request_init["custom_dimension"][field][i][subfield] else: - del request_init["display_video_360_advertiser_link"][field][subfield] + del request_init["custom_dimension"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = resources.DisplayVideo360AdvertiserLink( + return_value = resources.CustomDimension( name="name_value", - advertiser_id="advertiser_id_value", - advertiser_display_name="advertiser_display_name_value", + parameter_name="parameter_name_value", + display_name="display_name_value", + description="description_value", + scope=resources.CustomDimension.DimensionScope.EVENT, + disallow_ads_personalization=True, ) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = resources.DisplayVideo360AdvertiserLink.pb(return_value) + return_value = resources.CustomDimension.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.update_display_video360_advertiser_link(request) + response = client.create_custom_dimension(request) # Establish that the response is the type that we expect. - assert isinstance(response, resources.DisplayVideo360AdvertiserLink) + assert isinstance(response, resources.CustomDimension) assert response.name == "name_value" - assert response.advertiser_id == "advertiser_id_value" - assert response.advertiser_display_name == "advertiser_display_name_value" + assert response.parameter_name == "parameter_name_value" + assert response.display_name == "display_name_value" + assert response.description == "description_value" + assert response.scope == resources.CustomDimension.DimensionScope.EVENT + assert response.disallow_ads_personalization is True -def test_update_display_video360_advertiser_link_rest_use_cached_wrapped_rpc(): +def test_create_custom_dimension_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -72649,7 +82875,7 @@ def test_update_display_video360_advertiser_link_rest_use_cached_wrapped_rpc(): # Ensure method has been cached assert ( - client._transport.update_display_video360_advertiser_link + client._transport.create_custom_dimension in client._transport._wrapped_methods ) @@ -72659,28 +82885,29 @@ def test_update_display_video360_advertiser_link_rest_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.update_display_video360_advertiser_link + client._transport.create_custom_dimension ] = mock_rpc request = {} - client.update_display_video360_advertiser_link(request) + client.create_custom_dimension(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.update_display_video360_advertiser_link(request) + client.create_custom_dimension(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_update_display_video360_advertiser_link_rest_required_fields( - request_type=analytics_admin.UpdateDisplayVideo360AdvertiserLinkRequest, +def test_create_custom_dimension_rest_required_fields( + request_type=analytics_admin.CreateCustomDimensionRequest, ): transport_class = transports.AnalyticsAdminServiceRestTransport request_init = {} + request_init["parent"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -72691,23 +82918,21 @@ def test_update_display_video360_advertiser_link_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).update_display_video360_advertiser_link._get_unset_required_fields( - jsonified_request - ) + ).create_custom_dimension._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present + jsonified_request["parent"] = "parent_value" + unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).update_display_video360_advertiser_link._get_unset_required_fields( - jsonified_request - ) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("update_mask",)) + ).create_custom_dimension._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -72716,7 +82941,7 @@ def test_update_display_video360_advertiser_link_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = resources.DisplayVideo360AdvertiserLink() + return_value = resources.CustomDimension() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -72728,7 +82953,7 @@ def test_update_display_video360_advertiser_link_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "patch", + "method": "post", "query_params": pb_request, } transcode_result["body"] = pb_request @@ -72738,32 +82963,38 @@ def test_update_display_video360_advertiser_link_rest_required_fields( response_value.status_code = 200 # Convert return value to protobuf type - return_value = resources.DisplayVideo360AdvertiserLink.pb(return_value) + return_value = resources.CustomDimension.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.update_display_video360_advertiser_link(request) + response = client.create_custom_dimension(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_update_display_video360_advertiser_link_rest_unset_required_fields(): +def test_create_custom_dimension_rest_unset_required_fields(): transport = transports.AnalyticsAdminServiceRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = ( - transport.update_display_video360_advertiser_link._get_unset_required_fields({}) + unset_fields = transport.create_custom_dimension._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(()) + & set( + ( + "parent", + "customDimension", + ) + ) ) - assert set(unset_fields) == (set(("updateMask",)) & set(("updateMask",))) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_update_display_video360_advertiser_link_rest_interceptors(null_interceptor): +def test_create_custom_dimension_rest_interceptors(null_interceptor): transport = transports.AnalyticsAdminServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -72776,16 +83007,14 @@ def test_update_display_video360_advertiser_link_rest_interceptors(null_intercep ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.AnalyticsAdminServiceRestInterceptor, - "post_update_display_video360_advertiser_link", + transports.AnalyticsAdminServiceRestInterceptor, "post_create_custom_dimension" ) as post, mock.patch.object( - transports.AnalyticsAdminServiceRestInterceptor, - "pre_update_display_video360_advertiser_link", + transports.AnalyticsAdminServiceRestInterceptor, "pre_create_custom_dimension" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = analytics_admin.UpdateDisplayVideo360AdvertiserLinkRequest.pb( - analytics_admin.UpdateDisplayVideo360AdvertiserLinkRequest() + pb_message = analytics_admin.CreateCustomDimensionRequest.pb( + analytics_admin.CreateCustomDimensionRequest() ) transcode.return_value = { "method": "post", @@ -72797,19 +83026,19 @@ def test_update_display_video360_advertiser_link_rest_interceptors(null_intercep req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = resources.DisplayVideo360AdvertiserLink.to_json( - resources.DisplayVideo360AdvertiserLink() + req.return_value._content = resources.CustomDimension.to_json( + resources.CustomDimension() ) - request = analytics_admin.UpdateDisplayVideo360AdvertiserLinkRequest() + request = analytics_admin.CreateCustomDimensionRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = resources.DisplayVideo360AdvertiserLink() + post.return_value = resources.CustomDimension() - client.update_display_video360_advertiser_link( + client.create_custom_dimension( request, metadata=[ ("key", "val"), @@ -72821,9 +83050,8 @@ def test_update_display_video360_advertiser_link_rest_interceptors(null_intercep post.assert_called_once() -def test_update_display_video360_advertiser_link_rest_bad_request( - transport: str = "rest", - request_type=analytics_admin.UpdateDisplayVideo360AdvertiserLinkRequest, +def test_create_custom_dimension_rest_bad_request( + transport: str = "rest", request_type=analytics_admin.CreateCustomDimensionRequest ): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -72831,11 +83059,7 @@ def test_update_display_video360_advertiser_link_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = { - "display_video_360_advertiser_link": { - "name": "properties/sample1/displayVideo360AdvertiserLinks/sample2" - } - } + request_init = {"parent": "properties/sample1"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -72847,10 +83071,10 @@ def test_update_display_video360_advertiser_link_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.update_display_video360_advertiser_link(request) + client.create_custom_dimension(request) -def test_update_display_video360_advertiser_link_rest_flattened(): +def test_create_custom_dimension_rest_flattened(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -72859,21 +83083,15 @@ def test_update_display_video360_advertiser_link_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = resources.DisplayVideo360AdvertiserLink() + return_value = resources.CustomDimension() # get arguments that satisfy an http rule for this method - sample_request = { - "display_video_360_advertiser_link": { - "name": "properties/sample1/displayVideo360AdvertiserLinks/sample2" - } - } + sample_request = {"parent": "properties/sample1"} # get truthy value for each flattened field mock_args = dict( - display_video_360_advertiser_link=resources.DisplayVideo360AdvertiserLink( - name="name_value" - ), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + parent="parent_value", + custom_dimension=resources.CustomDimension(name="name_value"), ) mock_args.update(sample_request) @@ -72881,27 +83099,25 @@ def test_update_display_video360_advertiser_link_rest_flattened(): response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = resources.DisplayVideo360AdvertiserLink.pb(return_value) + return_value = resources.CustomDimension.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.update_display_video360_advertiser_link(**mock_args) + client.create_custom_dimension(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1alpha/{display_video_360_advertiser_link.name=properties/*/displayVideo360AdvertiserLinks/*}" + "%s/v1alpha/{parent=properties/*}/customDimensions" % client.transport._host, args[1], ) -def test_update_display_video360_advertiser_link_rest_flattened_error( - transport: str = "rest", -): +def test_create_custom_dimension_rest_flattened_error(transport: str = "rest"): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -72910,70 +83126,149 @@ def test_update_display_video360_advertiser_link_rest_flattened_error( # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.update_display_video360_advertiser_link( - analytics_admin.UpdateDisplayVideo360AdvertiserLinkRequest(), - display_video_360_advertiser_link=resources.DisplayVideo360AdvertiserLink( - name="name_value" - ), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + client.create_custom_dimension( + analytics_admin.CreateCustomDimensionRequest(), + parent="parent_value", + custom_dimension=resources.CustomDimension(name="name_value"), ) -def test_update_display_video360_advertiser_link_rest_error(): - client = AnalyticsAdminServiceClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) +def test_create_custom_dimension_rest_error(): + client = AnalyticsAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + analytics_admin.UpdateCustomDimensionRequest, + dict, + ], +) +def test_update_custom_dimension_rest(request_type): + client = AnalyticsAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "custom_dimension": {"name": "properties/sample1/customDimensions/sample2"} + } + request_init["custom_dimension"] = { + "name": "properties/sample1/customDimensions/sample2", + "parameter_name": "parameter_name_value", + "display_name": "display_name_value", + "description": "description_value", + "scope": 1, + "disallow_ads_personalization": True, + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = analytics_admin.UpdateCustomDimensionRequest.meta.fields[ + "custom_dimension" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] -@pytest.mark.parametrize( - "request_type", - [ - analytics_admin.GetDisplayVideo360AdvertiserLinkProposalRequest, - dict, - ], -) -def test_get_display_video360_advertiser_link_proposal_rest(request_type): - client = AnalyticsAdminServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) + subfields_not_in_runtime = [] - # send a request that will satisfy transcoding - request_init = { - "name": "properties/sample1/displayVideo360AdvertiserLinkProposals/sample2" - } + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["custom_dimension"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["custom_dimension"][field])): + del request_init["custom_dimension"][field][i][subfield] + else: + del request_init["custom_dimension"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = resources.DisplayVideo360AdvertiserLinkProposal( + return_value = resources.CustomDimension( name="name_value", - advertiser_id="advertiser_id_value", - advertiser_display_name="advertiser_display_name_value", - validation_email="validation_email_value", + parameter_name="parameter_name_value", + display_name="display_name_value", + description="description_value", + scope=resources.CustomDimension.DimensionScope.EVENT, + disallow_ads_personalization=True, ) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = resources.DisplayVideo360AdvertiserLinkProposal.pb(return_value) + return_value = resources.CustomDimension.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.get_display_video360_advertiser_link_proposal(request) + response = client.update_custom_dimension(request) # Establish that the response is the type that we expect. - assert isinstance(response, resources.DisplayVideo360AdvertiserLinkProposal) + assert isinstance(response, resources.CustomDimension) assert response.name == "name_value" - assert response.advertiser_id == "advertiser_id_value" - assert response.advertiser_display_name == "advertiser_display_name_value" - assert response.validation_email == "validation_email_value" + assert response.parameter_name == "parameter_name_value" + assert response.display_name == "display_name_value" + assert response.description == "description_value" + assert response.scope == resources.CustomDimension.DimensionScope.EVENT + assert response.disallow_ads_personalization is True -def test_get_display_video360_advertiser_link_proposal_rest_use_cached_wrapped_rpc(): +def test_update_custom_dimension_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -72988,7 +83283,7 @@ def test_get_display_video360_advertiser_link_proposal_rest_use_cached_wrapped_r # Ensure method has been cached assert ( - client._transport.get_display_video360_advertiser_link_proposal + client._transport.update_custom_dimension in client._transport._wrapped_methods ) @@ -72998,29 +83293,28 @@ def test_get_display_video360_advertiser_link_proposal_rest_use_cached_wrapped_r "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.get_display_video360_advertiser_link_proposal + client._transport.update_custom_dimension ] = mock_rpc request = {} - client.get_display_video360_advertiser_link_proposal(request) + client.update_custom_dimension(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.get_display_video360_advertiser_link_proposal(request) + client.update_custom_dimension(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_get_display_video360_advertiser_link_proposal_rest_required_fields( - request_type=analytics_admin.GetDisplayVideo360AdvertiserLinkProposalRequest, +def test_update_custom_dimension_rest_required_fields( + request_type=analytics_admin.UpdateCustomDimensionRequest, ): transport_class = transports.AnalyticsAdminServiceRestTransport request_init = {} - request_init["name"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -73031,25 +83325,19 @@ def test_get_display_video360_advertiser_link_proposal_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).get_display_video360_advertiser_link_proposal._get_unset_required_fields( - jsonified_request - ) + ).update_custom_dimension._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["name"] = "name_value" - unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).get_display_video360_advertiser_link_proposal._get_unset_required_fields( - jsonified_request - ) + ).update_custom_dimension._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("update_mask",)) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == "name_value" client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -73058,7 +83346,7 @@ def test_get_display_video360_advertiser_link_proposal_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = resources.DisplayVideo360AdvertiserLinkProposal() + return_value = resources.CustomDimension() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -73070,45 +83358,40 @@ def test_get_display_video360_advertiser_link_proposal_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "get", + "method": "patch", "query_params": pb_request, } + transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = resources.DisplayVideo360AdvertiserLinkProposal.pb( - return_value - ) + return_value = resources.CustomDimension.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.get_display_video360_advertiser_link_proposal(request) + response = client.update_custom_dimension(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_get_display_video360_advertiser_link_proposal_rest_unset_required_fields(): +def test_update_custom_dimension_rest_unset_required_fields(): transport = transports.AnalyticsAdminServiceRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.get_display_video360_advertiser_link_proposal._get_unset_required_fields( - {} - ) - assert set(unset_fields) == (set(()) & set(("name",))) + unset_fields = transport.update_custom_dimension._get_unset_required_fields({}) + assert set(unset_fields) == (set(("updateMask",)) & set(("updateMask",))) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_display_video360_advertiser_link_proposal_rest_interceptors( - null_interceptor, -): +def test_update_custom_dimension_rest_interceptors(null_interceptor): transport = transports.AnalyticsAdminServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -73121,16 +83404,14 @@ def test_get_display_video360_advertiser_link_proposal_rest_interceptors( ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.AnalyticsAdminServiceRestInterceptor, - "post_get_display_video360_advertiser_link_proposal", + transports.AnalyticsAdminServiceRestInterceptor, "post_update_custom_dimension" ) as post, mock.patch.object( - transports.AnalyticsAdminServiceRestInterceptor, - "pre_get_display_video360_advertiser_link_proposal", + transports.AnalyticsAdminServiceRestInterceptor, "pre_update_custom_dimension" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = analytics_admin.GetDisplayVideo360AdvertiserLinkProposalRequest.pb( - analytics_admin.GetDisplayVideo360AdvertiserLinkProposalRequest() + pb_message = analytics_admin.UpdateCustomDimensionRequest.pb( + analytics_admin.UpdateCustomDimensionRequest() ) transcode.return_value = { "method": "post", @@ -73142,21 +83423,19 @@ def test_get_display_video360_advertiser_link_proposal_rest_interceptors( req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = ( - resources.DisplayVideo360AdvertiserLinkProposal.to_json( - resources.DisplayVideo360AdvertiserLinkProposal() - ) + req.return_value._content = resources.CustomDimension.to_json( + resources.CustomDimension() ) - request = analytics_admin.GetDisplayVideo360AdvertiserLinkProposalRequest() + request = analytics_admin.UpdateCustomDimensionRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = resources.DisplayVideo360AdvertiserLinkProposal() + post.return_value = resources.CustomDimension() - client.get_display_video360_advertiser_link_proposal( + client.update_custom_dimension( request, metadata=[ ("key", "val"), @@ -73168,9 +83447,8 @@ def test_get_display_video360_advertiser_link_proposal_rest_interceptors( post.assert_called_once() -def test_get_display_video360_advertiser_link_proposal_rest_bad_request( - transport: str = "rest", - request_type=analytics_admin.GetDisplayVideo360AdvertiserLinkProposalRequest, +def test_update_custom_dimension_rest_bad_request( + transport: str = "rest", request_type=analytics_admin.UpdateCustomDimensionRequest ): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -73179,7 +83457,7 @@ def test_get_display_video360_advertiser_link_proposal_rest_bad_request( # send a request that will satisfy transcoding request_init = { - "name": "properties/sample1/displayVideo360AdvertiserLinkProposals/sample2" + "custom_dimension": {"name": "properties/sample1/customDimensions/sample2"} } request = request_type(**request_init) @@ -73192,10 +83470,10 @@ def test_get_display_video360_advertiser_link_proposal_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.get_display_video360_advertiser_link_proposal(request) + client.update_custom_dimension(request) -def test_get_display_video360_advertiser_link_proposal_rest_flattened(): +def test_update_custom_dimension_rest_flattened(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -73204,16 +83482,17 @@ def test_get_display_video360_advertiser_link_proposal_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = resources.DisplayVideo360AdvertiserLinkProposal() + return_value = resources.CustomDimension() # get arguments that satisfy an http rule for this method sample_request = { - "name": "properties/sample1/displayVideo360AdvertiserLinkProposals/sample2" + "custom_dimension": {"name": "properties/sample1/customDimensions/sample2"} } # get truthy value for each flattened field mock_args = dict( - name="name_value", + custom_dimension=resources.CustomDimension(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) mock_args.update(sample_request) @@ -73221,27 +83500,25 @@ def test_get_display_video360_advertiser_link_proposal_rest_flattened(): response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = resources.DisplayVideo360AdvertiserLinkProposal.pb(return_value) + return_value = resources.CustomDimension.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.get_display_video360_advertiser_link_proposal(**mock_args) + client.update_custom_dimension(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1alpha/{name=properties/*/displayVideo360AdvertiserLinkProposals/*}" + "%s/v1alpha/{custom_dimension.name=properties/*/customDimensions/*}" % client.transport._host, args[1], ) -def test_get_display_video360_advertiser_link_proposal_rest_flattened_error( - transport: str = "rest", -): +def test_update_custom_dimension_rest_flattened_error(transport: str = "rest"): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -73250,13 +83527,14 @@ def test_get_display_video360_advertiser_link_proposal_rest_flattened_error( # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.get_display_video360_advertiser_link_proposal( - analytics_admin.GetDisplayVideo360AdvertiserLinkProposalRequest(), - name="name_value", + client.update_custom_dimension( + analytics_admin.UpdateCustomDimensionRequest(), + custom_dimension=resources.CustomDimension(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) -def test_get_display_video360_advertiser_link_proposal_rest_error(): +def test_update_custom_dimension_rest_error(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -73265,11 +83543,11 @@ def test_get_display_video360_advertiser_link_proposal_rest_error(): @pytest.mark.parametrize( "request_type", [ - analytics_admin.ListDisplayVideo360AdvertiserLinkProposalsRequest, + analytics_admin.ListCustomDimensionsRequest, dict, ], ) -def test_list_display_video360_advertiser_link_proposals_rest(request_type): +def test_list_custom_dimensions_rest(request_type): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -73282,33 +83560,27 @@ def test_list_display_video360_advertiser_link_proposals_rest(request_type): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = ( - analytics_admin.ListDisplayVideo360AdvertiserLinkProposalsResponse( - next_page_token="next_page_token_value", - ) + return_value = analytics_admin.ListCustomDimensionsResponse( + next_page_token="next_page_token_value", ) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = ( - analytics_admin.ListDisplayVideo360AdvertiserLinkProposalsResponse.pb( - return_value - ) - ) + return_value = analytics_admin.ListCustomDimensionsResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.list_display_video360_advertiser_link_proposals(request) + response = client.list_custom_dimensions(request) # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListDisplayVideo360AdvertiserLinkProposalsPager) + assert isinstance(response, pagers.ListCustomDimensionsPager) assert response.next_page_token == "next_page_token_value" -def test_list_display_video360_advertiser_link_proposals_rest_use_cached_wrapped_rpc(): +def test_list_custom_dimensions_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -73323,7 +83595,7 @@ def test_list_display_video360_advertiser_link_proposals_rest_use_cached_wrapped # Ensure method has been cached assert ( - client._transport.list_display_video360_advertiser_link_proposals + client._transport.list_custom_dimensions in client._transport._wrapped_methods ) @@ -73333,24 +83605,24 @@ def test_list_display_video360_advertiser_link_proposals_rest_use_cached_wrapped "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.list_display_video360_advertiser_link_proposals + client._transport.list_custom_dimensions ] = mock_rpc request = {} - client.list_display_video360_advertiser_link_proposals(request) + client.list_custom_dimensions(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.list_display_video360_advertiser_link_proposals(request) + client.list_custom_dimensions(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_list_display_video360_advertiser_link_proposals_rest_required_fields( - request_type=analytics_admin.ListDisplayVideo360AdvertiserLinkProposalsRequest, +def test_list_custom_dimensions_rest_required_fields( + request_type=analytics_admin.ListCustomDimensionsRequest, ): transport_class = transports.AnalyticsAdminServiceRestTransport @@ -73366,9 +83638,7 @@ def test_list_display_video360_advertiser_link_proposals_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).list_display_video360_advertiser_link_proposals._get_unset_required_fields( - jsonified_request - ) + ).list_custom_dimensions._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present @@ -73377,9 +83647,7 @@ def test_list_display_video360_advertiser_link_proposals_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).list_display_video360_advertiser_link_proposals._get_unset_required_fields( - jsonified_request - ) + ).list_custom_dimensions._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. assert not set(unset_fields) - set( ( @@ -73400,7 +83668,7 @@ def test_list_display_video360_advertiser_link_proposals_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = analytics_admin.ListDisplayVideo360AdvertiserLinkProposalsResponse() + return_value = analytics_admin.ListCustomDimensionsResponse() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -73421,31 +83689,25 @@ def test_list_display_video360_advertiser_link_proposals_rest_required_fields( response_value.status_code = 200 # Convert return value to protobuf type - return_value = ( - analytics_admin.ListDisplayVideo360AdvertiserLinkProposalsResponse.pb( - return_value - ) - ) + return_value = analytics_admin.ListCustomDimensionsResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.list_display_video360_advertiser_link_proposals(request) + response = client.list_custom_dimensions(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_list_display_video360_advertiser_link_proposals_rest_unset_required_fields(): +def test_list_custom_dimensions_rest_unset_required_fields(): transport = transports.AnalyticsAdminServiceRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.list_display_video360_advertiser_link_proposals._get_unset_required_fields( - {} - ) + unset_fields = transport.list_custom_dimensions._get_unset_required_fields({}) assert set(unset_fields) == ( set( ( @@ -73458,9 +83720,7 @@ def test_list_display_video360_advertiser_link_proposals_rest_unset_required_fie @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_display_video360_advertiser_link_proposals_rest_interceptors( - null_interceptor, -): +def test_list_custom_dimensions_rest_interceptors(null_interceptor): transport = transports.AnalyticsAdminServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -73473,18 +83733,14 @@ def test_list_display_video360_advertiser_link_proposals_rest_interceptors( ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.AnalyticsAdminServiceRestInterceptor, - "post_list_display_video360_advertiser_link_proposals", + transports.AnalyticsAdminServiceRestInterceptor, "post_list_custom_dimensions" ) as post, mock.patch.object( - transports.AnalyticsAdminServiceRestInterceptor, - "pre_list_display_video360_advertiser_link_proposals", + transports.AnalyticsAdminServiceRestInterceptor, "pre_list_custom_dimensions" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = ( - analytics_admin.ListDisplayVideo360AdvertiserLinkProposalsRequest.pb( - analytics_admin.ListDisplayVideo360AdvertiserLinkProposalsRequest() - ) + pb_message = analytics_admin.ListCustomDimensionsRequest.pb( + analytics_admin.ListCustomDimensionsRequest() ) transcode.return_value = { "method": "post", @@ -73497,22 +83753,20 @@ def test_list_display_video360_advertiser_link_proposals_rest_interceptors( req.return_value.status_code = 200 req.return_value.request = PreparedRequest() req.return_value._content = ( - analytics_admin.ListDisplayVideo360AdvertiserLinkProposalsResponse.to_json( - analytics_admin.ListDisplayVideo360AdvertiserLinkProposalsResponse() + analytics_admin.ListCustomDimensionsResponse.to_json( + analytics_admin.ListCustomDimensionsResponse() ) ) - request = analytics_admin.ListDisplayVideo360AdvertiserLinkProposalsRequest() + request = analytics_admin.ListCustomDimensionsRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = ( - analytics_admin.ListDisplayVideo360AdvertiserLinkProposalsResponse() - ) + post.return_value = analytics_admin.ListCustomDimensionsResponse() - client.list_display_video360_advertiser_link_proposals( + client.list_custom_dimensions( request, metadata=[ ("key", "val"), @@ -73524,9 +83778,8 @@ def test_list_display_video360_advertiser_link_proposals_rest_interceptors( post.assert_called_once() -def test_list_display_video360_advertiser_link_proposals_rest_bad_request( - transport: str = "rest", - request_type=analytics_admin.ListDisplayVideo360AdvertiserLinkProposalsRequest, +def test_list_custom_dimensions_rest_bad_request( + transport: str = "rest", request_type=analytics_admin.ListCustomDimensionsRequest ): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -73546,10 +83799,10 @@ def test_list_display_video360_advertiser_link_proposals_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.list_display_video360_advertiser_link_proposals(request) + client.list_custom_dimensions(request) -def test_list_display_video360_advertiser_link_proposals_rest_flattened(): +def test_list_custom_dimensions_rest_flattened(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -73558,9 +83811,7 @@ def test_list_display_video360_advertiser_link_proposals_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = ( - analytics_admin.ListDisplayVideo360AdvertiserLinkProposalsResponse() - ) + return_value = analytics_admin.ListCustomDimensionsResponse() # get arguments that satisfy an http rule for this method sample_request = {"parent": "properties/sample1"} @@ -73575,31 +83826,25 @@ def test_list_display_video360_advertiser_link_proposals_rest_flattened(): response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = ( - analytics_admin.ListDisplayVideo360AdvertiserLinkProposalsResponse.pb( - return_value - ) - ) + return_value = analytics_admin.ListCustomDimensionsResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.list_display_video360_advertiser_link_proposals(**mock_args) + client.list_custom_dimensions(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1alpha/{parent=properties/*}/displayVideo360AdvertiserLinkProposals" + "%s/v1alpha/{parent=properties/*}/customDimensions" % client.transport._host, args[1], ) -def test_list_display_video360_advertiser_link_proposals_rest_flattened_error( - transport: str = "rest", -): +def test_list_custom_dimensions_rest_flattened_error(transport: str = "rest"): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -73608,15 +83853,13 @@ def test_list_display_video360_advertiser_link_proposals_rest_flattened_error( # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.list_display_video360_advertiser_link_proposals( - analytics_admin.ListDisplayVideo360AdvertiserLinkProposalsRequest(), + client.list_custom_dimensions( + analytics_admin.ListCustomDimensionsRequest(), parent="parent_value", ) -def test_list_display_video360_advertiser_link_proposals_rest_pager( - transport: str = "rest", -): +def test_list_custom_dimensions_rest_pager(transport: str = "rest"): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -73628,28 +83871,28 @@ def test_list_display_video360_advertiser_link_proposals_rest_pager( # with mock.patch.object(path_template, 'transcode') as transcode: # Set the response as a series of pages response = ( - analytics_admin.ListDisplayVideo360AdvertiserLinkProposalsResponse( - display_video_360_advertiser_link_proposals=[ - resources.DisplayVideo360AdvertiserLinkProposal(), - resources.DisplayVideo360AdvertiserLinkProposal(), - resources.DisplayVideo360AdvertiserLinkProposal(), + analytics_admin.ListCustomDimensionsResponse( + custom_dimensions=[ + resources.CustomDimension(), + resources.CustomDimension(), + resources.CustomDimension(), ], next_page_token="abc", ), - analytics_admin.ListDisplayVideo360AdvertiserLinkProposalsResponse( - display_video_360_advertiser_link_proposals=[], + analytics_admin.ListCustomDimensionsResponse( + custom_dimensions=[], next_page_token="def", ), - analytics_admin.ListDisplayVideo360AdvertiserLinkProposalsResponse( - display_video_360_advertiser_link_proposals=[ - resources.DisplayVideo360AdvertiserLinkProposal(), + analytics_admin.ListCustomDimensionsResponse( + custom_dimensions=[ + resources.CustomDimension(), ], next_page_token="ghi", ), - analytics_admin.ListDisplayVideo360AdvertiserLinkProposalsResponse( - display_video_360_advertiser_link_proposals=[ - resources.DisplayVideo360AdvertiserLinkProposal(), - resources.DisplayVideo360AdvertiserLinkProposal(), + analytics_admin.ListCustomDimensionsResponse( + custom_dimensions=[ + resources.CustomDimension(), + resources.CustomDimension(), ], ), ) @@ -73658,10 +83901,7 @@ def test_list_display_video360_advertiser_link_proposals_rest_pager( # Wrap the values into proper Response objs response = tuple( - analytics_admin.ListDisplayVideo360AdvertiserLinkProposalsResponse.to_json( - x - ) - for x in response + analytics_admin.ListCustomDimensionsResponse.to_json(x) for x in response ) return_values = tuple(Response() for i in response) for return_val, response_val in zip(return_values, response): @@ -73671,22 +83911,13 @@ def test_list_display_video360_advertiser_link_proposals_rest_pager( sample_request = {"parent": "properties/sample1"} - pager = client.list_display_video360_advertiser_link_proposals( - request=sample_request - ) + pager = client.list_custom_dimensions(request=sample_request) results = list(pager) assert len(results) == 6 - assert all( - isinstance(i, resources.DisplayVideo360AdvertiserLinkProposal) - for i in results - ) + assert all(isinstance(i, resources.CustomDimension) for i in results) - pages = list( - client.list_display_video360_advertiser_link_proposals( - request=sample_request - ).pages - ) + pages = list(client.list_custom_dimensions(request=sample_request).pages) for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token @@ -73694,148 +83925,39 @@ def test_list_display_video360_advertiser_link_proposals_rest_pager( @pytest.mark.parametrize( "request_type", [ - analytics_admin.CreateDisplayVideo360AdvertiserLinkProposalRequest, + analytics_admin.ArchiveCustomDimensionRequest, dict, ], ) -def test_create_display_video360_advertiser_link_proposal_rest(request_type): +def test_archive_custom_dimension_rest(request_type): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = {"parent": "properties/sample1"} - request_init["display_video_360_advertiser_link_proposal"] = { - "name": "name_value", - "advertiser_id": "advertiser_id_value", - "link_proposal_status_details": { - "link_proposal_initiating_product": 1, - "requestor_email": "requestor_email_value", - "link_proposal_state": 1, - }, - "advertiser_display_name": "advertiser_display_name_value", - "validation_email": "validation_email_value", - "ads_personalization_enabled": {"value": True}, - "campaign_data_sharing_enabled": {}, - "cost_data_sharing_enabled": {}, - } - # The version of a generated dependency at test runtime may differ from the version used during generation. - # Delete any fields which are not present in the current runtime dependency - # See https://github.com/googleapis/gapic-generator-python/issues/1748 - - # Determine if the message type is proto-plus or protobuf - test_field = ( - analytics_admin.CreateDisplayVideo360AdvertiserLinkProposalRequest.meta.fields[ - "display_video_360_advertiser_link_proposal" - ] - ) - - def get_message_fields(field): - # Given a field which is a message (composite type), return a list with - # all the fields of the message. - # If the field is not a composite type, return an empty list. - message_fields = [] - - if hasattr(field, "message") and field.message: - is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") - - if is_field_type_proto_plus_type: - message_fields = field.message.meta.fields.values() - # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER - message_fields = field.message.DESCRIPTOR.fields - return message_fields - - runtime_nested_fields = [ - (field.name, nested_field.name) - for field in get_message_fields(test_field) - for nested_field in get_message_fields(field) - ] - - subfields_not_in_runtime = [] - - # For each item in the sample request, create a list of sub fields which are not present at runtime - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init[ - "display_video_360_advertiser_link_proposal" - ].items(): # pragma: NO COVER - result = None - is_repeated = False - # For repeated fields - if isinstance(value, list) and len(value): - is_repeated = True - result = value[0] - # For fields where the type is another message - if isinstance(value, dict): - result = value - - if result and hasattr(result, "keys"): - for subfield in result.keys(): - if (field, subfield) not in runtime_nested_fields: - subfields_not_in_runtime.append( - { - "field": field, - "subfield": subfield, - "is_repeated": is_repeated, - } - ) - - # Remove fields from the sample request which are not present in the runtime version of the dependency - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER - field = subfield_to_delete.get("field") - field_repeated = subfield_to_delete.get("is_repeated") - subfield = subfield_to_delete.get("subfield") - if subfield: - if field_repeated: - for i in range( - 0, - len( - request_init["display_video_360_advertiser_link_proposal"][ - field - ] - ), - ): - del request_init["display_video_360_advertiser_link_proposal"][ - field - ][i][subfield] - else: - del request_init["display_video_360_advertiser_link_proposal"][field][ - subfield - ] + request_init = {"name": "properties/sample1/customDimensions/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = resources.DisplayVideo360AdvertiserLinkProposal( - name="name_value", - advertiser_id="advertiser_id_value", - advertiser_display_name="advertiser_display_name_value", - validation_email="validation_email_value", - ) + return_value = None # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - # Convert return value to protobuf type - return_value = resources.DisplayVideo360AdvertiserLinkProposal.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) + json_return_value = "" response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.create_display_video360_advertiser_link_proposal(request) + response = client.archive_custom_dimension(request) # Establish that the response is the type that we expect. - assert isinstance(response, resources.DisplayVideo360AdvertiserLinkProposal) - assert response.name == "name_value" - assert response.advertiser_id == "advertiser_id_value" - assert response.advertiser_display_name == "advertiser_display_name_value" - assert response.validation_email == "validation_email_value" + assert response is None -def test_create_display_video360_advertiser_link_proposal_rest_use_cached_wrapped_rpc(): +def test_archive_custom_dimension_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -73850,7 +83972,7 @@ def test_create_display_video360_advertiser_link_proposal_rest_use_cached_wrappe # Ensure method has been cached assert ( - client._transport.create_display_video360_advertiser_link_proposal + client._transport.archive_custom_dimension in client._transport._wrapped_methods ) @@ -73860,29 +83982,29 @@ def test_create_display_video360_advertiser_link_proposal_rest_use_cached_wrappe "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.create_display_video360_advertiser_link_proposal + client._transport.archive_custom_dimension ] = mock_rpc request = {} - client.create_display_video360_advertiser_link_proposal(request) + client.archive_custom_dimension(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.create_display_video360_advertiser_link_proposal(request) + client.archive_custom_dimension(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_create_display_video360_advertiser_link_proposal_rest_required_fields( - request_type=analytics_admin.CreateDisplayVideo360AdvertiserLinkProposalRequest, +def test_archive_custom_dimension_rest_required_fields( + request_type=analytics_admin.ArchiveCustomDimensionRequest, ): transport_class = transports.AnalyticsAdminServiceRestTransport request_init = {} - request_init["parent"] = "" + request_init["name"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -73893,25 +84015,21 @@ def test_create_display_video360_advertiser_link_proposal_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).create_display_video360_advertiser_link_proposal._get_unset_required_fields( - jsonified_request - ) + ).archive_custom_dimension._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["parent"] = "parent_value" + jsonified_request["name"] = "name_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).create_display_video360_advertiser_link_proposal._get_unset_required_fields( - jsonified_request - ) + ).archive_custom_dimension._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == "parent_value" + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -73920,7 +84038,7 @@ def test_create_display_video360_advertiser_link_proposal_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = resources.DisplayVideo360AdvertiserLinkProposal() + return_value = None # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -73940,46 +84058,29 @@ def test_create_display_video360_advertiser_link_proposal_rest_required_fields( response_value = Response() response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = resources.DisplayVideo360AdvertiserLinkProposal.pb( - return_value - ) - json_return_value = json_format.MessageToJson(return_value) + json_return_value = "" response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.create_display_video360_advertiser_link_proposal(request) + response = client.archive_custom_dimension(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_create_display_video360_advertiser_link_proposal_rest_unset_required_fields(): +def test_archive_custom_dimension_rest_unset_required_fields(): transport = transports.AnalyticsAdminServiceRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.create_display_video360_advertiser_link_proposal._get_unset_required_fields( - {} - ) - assert set(unset_fields) == ( - set(()) - & set( - ( - "parent", - "displayVideo360AdvertiserLinkProposal", - ) - ) - ) + unset_fields = transport.archive_custom_dimension._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_create_display_video360_advertiser_link_proposal_rest_interceptors( - null_interceptor, -): +def test_archive_custom_dimension_rest_interceptors(null_interceptor): transport = transports.AnalyticsAdminServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -73992,18 +84093,11 @@ def test_create_display_video360_advertiser_link_proposal_rest_interceptors( ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.AnalyticsAdminServiceRestInterceptor, - "post_create_display_video360_advertiser_link_proposal", - ) as post, mock.patch.object( - transports.AnalyticsAdminServiceRestInterceptor, - "pre_create_display_video360_advertiser_link_proposal", + transports.AnalyticsAdminServiceRestInterceptor, "pre_archive_custom_dimension" ) as pre: pre.assert_not_called() - post.assert_not_called() - pb_message = ( - analytics_admin.CreateDisplayVideo360AdvertiserLinkProposalRequest.pb( - analytics_admin.CreateDisplayVideo360AdvertiserLinkProposalRequest() - ) + pb_message = analytics_admin.ArchiveCustomDimensionRequest.pb( + analytics_admin.ArchiveCustomDimensionRequest() ) transcode.return_value = { "method": "post", @@ -74015,21 +84109,15 @@ def test_create_display_video360_advertiser_link_proposal_rest_interceptors( req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = ( - resources.DisplayVideo360AdvertiserLinkProposal.to_json( - resources.DisplayVideo360AdvertiserLinkProposal() - ) - ) - request = analytics_admin.CreateDisplayVideo360AdvertiserLinkProposalRequest() + request = analytics_admin.ArchiveCustomDimensionRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = resources.DisplayVideo360AdvertiserLinkProposal() - client.create_display_video360_advertiser_link_proposal( + client.archive_custom_dimension( request, metadata=[ ("key", "val"), @@ -74038,12 +84126,10 @@ def test_create_display_video360_advertiser_link_proposal_rest_interceptors( ) pre.assert_called_once() - post.assert_called_once() -def test_create_display_video360_advertiser_link_proposal_rest_bad_request( - transport: str = "rest", - request_type=analytics_admin.CreateDisplayVideo360AdvertiserLinkProposalRequest, +def test_archive_custom_dimension_rest_bad_request( + transport: str = "rest", request_type=analytics_admin.ArchiveCustomDimensionRequest ): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -74051,7 +84137,7 @@ def test_create_display_video360_advertiser_link_proposal_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = {"parent": "properties/sample1"} + request_init = {"name": "properties/sample1/customDimensions/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -74063,10 +84149,10 @@ def test_create_display_video360_advertiser_link_proposal_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.create_display_video360_advertiser_link_proposal(request) + client.archive_custom_dimension(request) -def test_create_display_video360_advertiser_link_proposal_rest_flattened(): +def test_archive_custom_dimension_rest_flattened(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -74075,45 +84161,38 @@ def test_create_display_video360_advertiser_link_proposal_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = resources.DisplayVideo360AdvertiserLinkProposal() + return_value = None # get arguments that satisfy an http rule for this method - sample_request = {"parent": "properties/sample1"} + sample_request = {"name": "properties/sample1/customDimensions/sample2"} # get truthy value for each flattened field mock_args = dict( - parent="parent_value", - display_video_360_advertiser_link_proposal=resources.DisplayVideo360AdvertiserLinkProposal( - name="name_value" - ), + name="name_value", ) mock_args.update(sample_request) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - # Convert return value to protobuf type - return_value = resources.DisplayVideo360AdvertiserLinkProposal.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) + json_return_value = "" response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.create_display_video360_advertiser_link_proposal(**mock_args) + client.archive_custom_dimension(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1alpha/{parent=properties/*}/displayVideo360AdvertiserLinkProposals" + "%s/v1alpha/{name=properties/*/customDimensions/*}:archive" % client.transport._host, args[1], ) -def test_create_display_video360_advertiser_link_proposal_rest_flattened_error( - transport: str = "rest", -): +def test_archive_custom_dimension_rest_flattened_error(transport: str = "rest"): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -74122,16 +84201,13 @@ def test_create_display_video360_advertiser_link_proposal_rest_flattened_error( # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.create_display_video360_advertiser_link_proposal( - analytics_admin.CreateDisplayVideo360AdvertiserLinkProposalRequest(), - parent="parent_value", - display_video_360_advertiser_link_proposal=resources.DisplayVideo360AdvertiserLinkProposal( - name="name_value" - ), + client.archive_custom_dimension( + analytics_admin.ArchiveCustomDimensionRequest(), + name="name_value", ) -def test_create_display_video360_advertiser_link_proposal_rest_error(): +def test_archive_custom_dimension_rest_error(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -74140,41 +84216,54 @@ def test_create_display_video360_advertiser_link_proposal_rest_error(): @pytest.mark.parametrize( "request_type", [ - analytics_admin.DeleteDisplayVideo360AdvertiserLinkProposalRequest, + analytics_admin.GetCustomDimensionRequest, dict, ], ) -def test_delete_display_video360_advertiser_link_proposal_rest(request_type): +def test_get_custom_dimension_rest(request_type): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = { - "name": "properties/sample1/displayVideo360AdvertiserLinkProposals/sample2" - } + request_init = {"name": "properties/sample1/customDimensions/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = None + return_value = resources.CustomDimension( + name="name_value", + parameter_name="parameter_name_value", + display_name="display_name_value", + description="description_value", + scope=resources.CustomDimension.DimensionScope.EVENT, + disallow_ads_personalization=True, + ) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - json_return_value = "" + # Convert return value to protobuf type + return_value = resources.CustomDimension.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.delete_display_video360_advertiser_link_proposal(request) + response = client.get_custom_dimension(request) # Establish that the response is the type that we expect. - assert response is None + assert isinstance(response, resources.CustomDimension) + assert response.name == "name_value" + assert response.parameter_name == "parameter_name_value" + assert response.display_name == "display_name_value" + assert response.description == "description_value" + assert response.scope == resources.CustomDimension.DimensionScope.EVENT + assert response.disallow_ads_personalization is True -def test_delete_display_video360_advertiser_link_proposal_rest_use_cached_wrapped_rpc(): +def test_get_custom_dimension_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -74189,8 +84278,7 @@ def test_delete_display_video360_advertiser_link_proposal_rest_use_cached_wrappe # Ensure method has been cached assert ( - client._transport.delete_display_video360_advertiser_link_proposal - in client._transport._wrapped_methods + client._transport.get_custom_dimension in client._transport._wrapped_methods ) # Replace cached wrapped function with mock @@ -74199,24 +84287,24 @@ def test_delete_display_video360_advertiser_link_proposal_rest_use_cached_wrappe "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.delete_display_video360_advertiser_link_proposal + client._transport.get_custom_dimension ] = mock_rpc request = {} - client.delete_display_video360_advertiser_link_proposal(request) + client.get_custom_dimension(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.delete_display_video360_advertiser_link_proposal(request) + client.get_custom_dimension(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_delete_display_video360_advertiser_link_proposal_rest_required_fields( - request_type=analytics_admin.DeleteDisplayVideo360AdvertiserLinkProposalRequest, +def test_get_custom_dimension_rest_required_fields( + request_type=analytics_admin.GetCustomDimensionRequest, ): transport_class = transports.AnalyticsAdminServiceRestTransport @@ -74232,9 +84320,7 @@ def test_delete_display_video360_advertiser_link_proposal_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).delete_display_video360_advertiser_link_proposal._get_unset_required_fields( - jsonified_request - ) + ).get_custom_dimension._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present @@ -74243,9 +84329,7 @@ def test_delete_display_video360_advertiser_link_proposal_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).delete_display_video360_advertiser_link_proposal._get_unset_required_fields( - jsonified_request - ) + ).get_custom_dimension._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone @@ -74259,7 +84343,7 @@ def test_delete_display_video360_advertiser_link_proposal_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = None + return_value = resources.CustomDimension() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -74271,40 +84355,39 @@ def test_delete_display_video360_advertiser_link_proposal_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "delete", + "method": "get", "query_params": pb_request, } transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 - json_return_value = "" + + # Convert return value to protobuf type + return_value = resources.CustomDimension.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.delete_display_video360_advertiser_link_proposal(request) + response = client.get_custom_dimension(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_delete_display_video360_advertiser_link_proposal_rest_unset_required_fields(): +def test_get_custom_dimension_rest_unset_required_fields(): transport = transports.AnalyticsAdminServiceRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.delete_display_video360_advertiser_link_proposal._get_unset_required_fields( - {} - ) + unset_fields = transport.get_custom_dimension._get_unset_required_fields({}) assert set(unset_fields) == (set(()) & set(("name",))) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_delete_display_video360_advertiser_link_proposal_rest_interceptors( - null_interceptor, -): +def test_get_custom_dimension_rest_interceptors(null_interceptor): transport = transports.AnalyticsAdminServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -74317,14 +84400,14 @@ def test_delete_display_video360_advertiser_link_proposal_rest_interceptors( ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.AnalyticsAdminServiceRestInterceptor, - "pre_delete_display_video360_advertiser_link_proposal", + transports.AnalyticsAdminServiceRestInterceptor, "post_get_custom_dimension" + ) as post, mock.patch.object( + transports.AnalyticsAdminServiceRestInterceptor, "pre_get_custom_dimension" ) as pre: pre.assert_not_called() - pb_message = ( - analytics_admin.DeleteDisplayVideo360AdvertiserLinkProposalRequest.pb( - analytics_admin.DeleteDisplayVideo360AdvertiserLinkProposalRequest() - ) + post.assert_not_called() + pb_message = analytics_admin.GetCustomDimensionRequest.pb( + analytics_admin.GetCustomDimensionRequest() ) transcode.return_value = { "method": "post", @@ -74336,15 +84419,19 @@ def test_delete_display_video360_advertiser_link_proposal_rest_interceptors( req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() + req.return_value._content = resources.CustomDimension.to_json( + resources.CustomDimension() + ) - request = analytics_admin.DeleteDisplayVideo360AdvertiserLinkProposalRequest() + request = analytics_admin.GetCustomDimensionRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata + post.return_value = resources.CustomDimension() - client.delete_display_video360_advertiser_link_proposal( + client.get_custom_dimension( request, metadata=[ ("key", "val"), @@ -74353,11 +84440,11 @@ def test_delete_display_video360_advertiser_link_proposal_rest_interceptors( ) pre.assert_called_once() + post.assert_called_once() -def test_delete_display_video360_advertiser_link_proposal_rest_bad_request( - transport: str = "rest", - request_type=analytics_admin.DeleteDisplayVideo360AdvertiserLinkProposalRequest, +def test_get_custom_dimension_rest_bad_request( + transport: str = "rest", request_type=analytics_admin.GetCustomDimensionRequest ): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -74365,9 +84452,7 @@ def test_delete_display_video360_advertiser_link_proposal_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = { - "name": "properties/sample1/displayVideo360AdvertiserLinkProposals/sample2" - } + request_init = {"name": "properties/sample1/customDimensions/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -74379,10 +84464,10 @@ def test_delete_display_video360_advertiser_link_proposal_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.delete_display_video360_advertiser_link_proposal(request) + client.get_custom_dimension(request) -def test_delete_display_video360_advertiser_link_proposal_rest_flattened(): +def test_get_custom_dimension_rest_flattened(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -74391,12 +84476,10 @@ def test_delete_display_video360_advertiser_link_proposal_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = None + return_value = resources.CustomDimension() # get arguments that satisfy an http rule for this method - sample_request = { - "name": "properties/sample1/displayVideo360AdvertiserLinkProposals/sample2" - } + sample_request = {"name": "properties/sample1/customDimensions/sample2"} # get truthy value for each flattened field mock_args = dict( @@ -74407,26 +84490,26 @@ def test_delete_display_video360_advertiser_link_proposal_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - json_return_value = "" + # Convert return value to protobuf type + return_value = resources.CustomDimension.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.delete_display_video360_advertiser_link_proposal(**mock_args) + client.get_custom_dimension(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1alpha/{name=properties/*/displayVideo360AdvertiserLinkProposals/*}" + "%s/v1alpha/{name=properties/*/customDimensions/*}" % client.transport._host, args[1], ) -def test_delete_display_video360_advertiser_link_proposal_rest_flattened_error( - transport: str = "rest", -): +def test_get_custom_dimension_rest_flattened_error(transport: str = "rest"): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -74435,13 +84518,13 @@ def test_delete_display_video360_advertiser_link_proposal_rest_flattened_error( # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.delete_display_video360_advertiser_link_proposal( - analytics_admin.DeleteDisplayVideo360AdvertiserLinkProposalRequest(), + client.get_custom_dimension( + analytics_admin.GetCustomDimensionRequest(), name="name_value", ) -def test_delete_display_video360_advertiser_link_proposal_rest_error(): +def test_get_custom_dimension_rest_error(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -74450,51 +84533,136 @@ def test_delete_display_video360_advertiser_link_proposal_rest_error(): @pytest.mark.parametrize( "request_type", [ - analytics_admin.ApproveDisplayVideo360AdvertiserLinkProposalRequest, + analytics_admin.CreateCustomMetricRequest, dict, ], ) -def test_approve_display_video360_advertiser_link_proposal_rest(request_type): +def test_create_custom_metric_rest(request_type): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = { - "name": "properties/sample1/displayVideo360AdvertiserLinkProposals/sample2" + request_init = {"parent": "properties/sample1"} + request_init["custom_metric"] = { + "name": "name_value", + "parameter_name": "parameter_name_value", + "display_name": "display_name_value", + "description": "description_value", + "measurement_unit": 1, + "scope": 1, + "restricted_metric_type": [1], } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = analytics_admin.CreateCustomMetricRequest.meta.fields["custom_metric"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["custom_metric"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["custom_metric"][field])): + del request_init["custom_metric"][field][i][subfield] + else: + del request_init["custom_metric"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = ( - analytics_admin.ApproveDisplayVideo360AdvertiserLinkProposalResponse() + return_value = resources.CustomMetric( + name="name_value", + parameter_name="parameter_name_value", + display_name="display_name_value", + description="description_value", + measurement_unit=resources.CustomMetric.MeasurementUnit.STANDARD, + scope=resources.CustomMetric.MetricScope.EVENT, + restricted_metric_type=[ + resources.CustomMetric.RestrictedMetricType.COST_DATA + ], ) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = ( - analytics_admin.ApproveDisplayVideo360AdvertiserLinkProposalResponse.pb( - return_value - ) - ) + return_value = resources.CustomMetric.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.approve_display_video360_advertiser_link_proposal(request) + response = client.create_custom_metric(request) # Establish that the response is the type that we expect. - assert isinstance( - response, analytics_admin.ApproveDisplayVideo360AdvertiserLinkProposalResponse - ) + assert isinstance(response, resources.CustomMetric) + assert response.name == "name_value" + assert response.parameter_name == "parameter_name_value" + assert response.display_name == "display_name_value" + assert response.description == "description_value" + assert response.measurement_unit == resources.CustomMetric.MeasurementUnit.STANDARD + assert response.scope == resources.CustomMetric.MetricScope.EVENT + assert response.restricted_metric_type == [ + resources.CustomMetric.RestrictedMetricType.COST_DATA + ] -def test_approve_display_video360_advertiser_link_proposal_rest_use_cached_wrapped_rpc(): +def test_create_custom_metric_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -74509,8 +84677,7 @@ def test_approve_display_video360_advertiser_link_proposal_rest_use_cached_wrapp # Ensure method has been cached assert ( - client._transport.approve_display_video360_advertiser_link_proposal - in client._transport._wrapped_methods + client._transport.create_custom_metric in client._transport._wrapped_methods ) # Replace cached wrapped function with mock @@ -74519,29 +84686,29 @@ def test_approve_display_video360_advertiser_link_proposal_rest_use_cached_wrapp "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.approve_display_video360_advertiser_link_proposal + client._transport.create_custom_metric ] = mock_rpc request = {} - client.approve_display_video360_advertiser_link_proposal(request) + client.create_custom_metric(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.approve_display_video360_advertiser_link_proposal(request) + client.create_custom_metric(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_approve_display_video360_advertiser_link_proposal_rest_required_fields( - request_type=analytics_admin.ApproveDisplayVideo360AdvertiserLinkProposalRequest, +def test_create_custom_metric_rest_required_fields( + request_type=analytics_admin.CreateCustomMetricRequest, ): transport_class = transports.AnalyticsAdminServiceRestTransport request_init = {} - request_init["name"] = "" + request_init["parent"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -74552,25 +84719,21 @@ def test_approve_display_video360_advertiser_link_proposal_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).approve_display_video360_advertiser_link_proposal._get_unset_required_fields( - jsonified_request - ) + ).create_custom_metric._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["name"] = "name_value" + jsonified_request["parent"] = "parent_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).approve_display_video360_advertiser_link_proposal._get_unset_required_fields( - jsonified_request - ) + ).create_custom_metric._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == "name_value" + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -74579,9 +84742,7 @@ def test_approve_display_video360_advertiser_link_proposal_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = ( - analytics_admin.ApproveDisplayVideo360AdvertiserLinkProposalResponse() - ) + return_value = resources.CustomMetric() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -74603,38 +84764,38 @@ def test_approve_display_video360_advertiser_link_proposal_rest_required_fields( response_value.status_code = 200 # Convert return value to protobuf type - return_value = ( - analytics_admin.ApproveDisplayVideo360AdvertiserLinkProposalResponse.pb( - return_value - ) - ) + return_value = resources.CustomMetric.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.approve_display_video360_advertiser_link_proposal(request) + response = client.create_custom_metric(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_approve_display_video360_advertiser_link_proposal_rest_unset_required_fields(): +def test_create_custom_metric_rest_unset_required_fields(): transport = transports.AnalyticsAdminServiceRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.approve_display_video360_advertiser_link_proposal._get_unset_required_fields( - {} + unset_fields = transport.create_custom_metric._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(()) + & set( + ( + "parent", + "customMetric", + ) + ) ) - assert set(unset_fields) == (set(()) & set(("name",))) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_approve_display_video360_advertiser_link_proposal_rest_interceptors( - null_interceptor, -): +def test_create_custom_metric_rest_interceptors(null_interceptor): transport = transports.AnalyticsAdminServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -74647,18 +84808,14 @@ def test_approve_display_video360_advertiser_link_proposal_rest_interceptors( ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.AnalyticsAdminServiceRestInterceptor, - "post_approve_display_video360_advertiser_link_proposal", + transports.AnalyticsAdminServiceRestInterceptor, "post_create_custom_metric" ) as post, mock.patch.object( - transports.AnalyticsAdminServiceRestInterceptor, - "pre_approve_display_video360_advertiser_link_proposal", + transports.AnalyticsAdminServiceRestInterceptor, "pre_create_custom_metric" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = ( - analytics_admin.ApproveDisplayVideo360AdvertiserLinkProposalRequest.pb( - analytics_admin.ApproveDisplayVideo360AdvertiserLinkProposalRequest() - ) + pb_message = analytics_admin.CreateCustomMetricRequest.pb( + analytics_admin.CreateCustomMetricRequest() ) transcode.return_value = { "method": "post", @@ -74670,21 +84827,19 @@ def test_approve_display_video360_advertiser_link_proposal_rest_interceptors( req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = analytics_admin.ApproveDisplayVideo360AdvertiserLinkProposalResponse.to_json( - analytics_admin.ApproveDisplayVideo360AdvertiserLinkProposalResponse() + req.return_value._content = resources.CustomMetric.to_json( + resources.CustomMetric() ) - request = analytics_admin.ApproveDisplayVideo360AdvertiserLinkProposalRequest() + request = analytics_admin.CreateCustomMetricRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = ( - analytics_admin.ApproveDisplayVideo360AdvertiserLinkProposalResponse() - ) + post.return_value = resources.CustomMetric() - client.approve_display_video360_advertiser_link_proposal( + client.create_custom_metric( request, metadata=[ ("key", "val"), @@ -74696,9 +84851,8 @@ def test_approve_display_video360_advertiser_link_proposal_rest_interceptors( post.assert_called_once() -def test_approve_display_video360_advertiser_link_proposal_rest_bad_request( - transport: str = "rest", - request_type=analytics_admin.ApproveDisplayVideo360AdvertiserLinkProposalRequest, +def test_create_custom_metric_rest_bad_request( + transport: str = "rest", request_type=analytics_admin.CreateCustomMetricRequest ): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -74706,9 +84860,7 @@ def test_approve_display_video360_advertiser_link_proposal_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = { - "name": "properties/sample1/displayVideo360AdvertiserLinkProposals/sample2" - } + request_init = {"parent": "properties/sample1"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -74720,10 +84872,68 @@ def test_approve_display_video360_advertiser_link_proposal_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.approve_display_video360_advertiser_link_proposal(request) + client.create_custom_metric(request) + + +def test_create_custom_metric_rest_flattened(): + client = AnalyticsAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = resources.CustomMetric() + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "properties/sample1"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + custom_metric=resources.CustomMetric(name="name_value"), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = resources.CustomMetric.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.create_custom_metric(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1alpha/{parent=properties/*}/customMetrics" % client.transport._host, + args[1], + ) + + +def test_create_custom_metric_rest_flattened_error(transport: str = "rest"): + client = AnalyticsAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_custom_metric( + analytics_admin.CreateCustomMetricRequest(), + parent="parent_value", + custom_metric=resources.CustomMetric(name="name_value"), + ) -def test_approve_display_video360_advertiser_link_proposal_rest_error(): +def test_create_custom_metric_rest_error(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -74732,11 +84942,11 @@ def test_approve_display_video360_advertiser_link_proposal_rest_error(): @pytest.mark.parametrize( "request_type", [ - analytics_admin.CancelDisplayVideo360AdvertiserLinkProposalRequest, + analytics_admin.UpdateCustomMetricRequest, dict, ], ) -def test_cancel_display_video360_advertiser_link_proposal_rest(request_type): +def test_update_custom_metric_rest(request_type): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -74744,40 +84954,126 @@ def test_cancel_display_video360_advertiser_link_proposal_rest(request_type): # send a request that will satisfy transcoding request_init = { - "name": "properties/sample1/displayVideo360AdvertiserLinkProposals/sample2" + "custom_metric": {"name": "properties/sample1/customMetrics/sample2"} } + request_init["custom_metric"] = { + "name": "properties/sample1/customMetrics/sample2", + "parameter_name": "parameter_name_value", + "display_name": "display_name_value", + "description": "description_value", + "measurement_unit": 1, + "scope": 1, + "restricted_metric_type": [1], + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = analytics_admin.UpdateCustomMetricRequest.meta.fields["custom_metric"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["custom_metric"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["custom_metric"][field])): + del request_init["custom_metric"][field][i][subfield] + else: + del request_init["custom_metric"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = resources.DisplayVideo360AdvertiserLinkProposal( + return_value = resources.CustomMetric( name="name_value", - advertiser_id="advertiser_id_value", - advertiser_display_name="advertiser_display_name_value", - validation_email="validation_email_value", + parameter_name="parameter_name_value", + display_name="display_name_value", + description="description_value", + measurement_unit=resources.CustomMetric.MeasurementUnit.STANDARD, + scope=resources.CustomMetric.MetricScope.EVENT, + restricted_metric_type=[ + resources.CustomMetric.RestrictedMetricType.COST_DATA + ], ) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = resources.DisplayVideo360AdvertiserLinkProposal.pb(return_value) + return_value = resources.CustomMetric.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.cancel_display_video360_advertiser_link_proposal(request) + response = client.update_custom_metric(request) # Establish that the response is the type that we expect. - assert isinstance(response, resources.DisplayVideo360AdvertiserLinkProposal) + assert isinstance(response, resources.CustomMetric) assert response.name == "name_value" - assert response.advertiser_id == "advertiser_id_value" - assert response.advertiser_display_name == "advertiser_display_name_value" - assert response.validation_email == "validation_email_value" + assert response.parameter_name == "parameter_name_value" + assert response.display_name == "display_name_value" + assert response.description == "description_value" + assert response.measurement_unit == resources.CustomMetric.MeasurementUnit.STANDARD + assert response.scope == resources.CustomMetric.MetricScope.EVENT + assert response.restricted_metric_type == [ + resources.CustomMetric.RestrictedMetricType.COST_DATA + ] -def test_cancel_display_video360_advertiser_link_proposal_rest_use_cached_wrapped_rpc(): +def test_update_custom_metric_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -74792,8 +85088,7 @@ def test_cancel_display_video360_advertiser_link_proposal_rest_use_cached_wrappe # Ensure method has been cached assert ( - client._transport.cancel_display_video360_advertiser_link_proposal - in client._transport._wrapped_methods + client._transport.update_custom_metric in client._transport._wrapped_methods ) # Replace cached wrapped function with mock @@ -74802,29 +85097,28 @@ def test_cancel_display_video360_advertiser_link_proposal_rest_use_cached_wrappe "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.cancel_display_video360_advertiser_link_proposal + client._transport.update_custom_metric ] = mock_rpc request = {} - client.cancel_display_video360_advertiser_link_proposal(request) + client.update_custom_metric(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.cancel_display_video360_advertiser_link_proposal(request) + client.update_custom_metric(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_cancel_display_video360_advertiser_link_proposal_rest_required_fields( - request_type=analytics_admin.CancelDisplayVideo360AdvertiserLinkProposalRequest, +def test_update_custom_metric_rest_required_fields( + request_type=analytics_admin.UpdateCustomMetricRequest, ): transport_class = transports.AnalyticsAdminServiceRestTransport request_init = {} - request_init["name"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -74835,25 +85129,19 @@ def test_cancel_display_video360_advertiser_link_proposal_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).cancel_display_video360_advertiser_link_proposal._get_unset_required_fields( - jsonified_request - ) + ).update_custom_metric._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["name"] = "name_value" - unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).cancel_display_video360_advertiser_link_proposal._get_unset_required_fields( - jsonified_request - ) + ).update_custom_metric._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("update_mask",)) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == "name_value" client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -74862,7 +85150,7 @@ def test_cancel_display_video360_advertiser_link_proposal_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = resources.DisplayVideo360AdvertiserLinkProposal() + return_value = resources.CustomMetric() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -74874,7 +85162,7 @@ def test_cancel_display_video360_advertiser_link_proposal_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "post", + "method": "patch", "query_params": pb_request, } transcode_result["body"] = pb_request @@ -74884,36 +85172,30 @@ def test_cancel_display_video360_advertiser_link_proposal_rest_required_fields( response_value.status_code = 200 # Convert return value to protobuf type - return_value = resources.DisplayVideo360AdvertiserLinkProposal.pb( - return_value - ) + return_value = resources.CustomMetric.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.cancel_display_video360_advertiser_link_proposal(request) + response = client.update_custom_metric(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_cancel_display_video360_advertiser_link_proposal_rest_unset_required_fields(): +def test_update_custom_metric_rest_unset_required_fields(): transport = transports.AnalyticsAdminServiceRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.cancel_display_video360_advertiser_link_proposal._get_unset_required_fields( - {} - ) - assert set(unset_fields) == (set(()) & set(("name",))) + unset_fields = transport.update_custom_metric._get_unset_required_fields({}) + assert set(unset_fields) == (set(("updateMask",)) & set(("updateMask",))) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_cancel_display_video360_advertiser_link_proposal_rest_interceptors( - null_interceptor, -): +def test_update_custom_metric_rest_interceptors(null_interceptor): transport = transports.AnalyticsAdminServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -74926,18 +85208,14 @@ def test_cancel_display_video360_advertiser_link_proposal_rest_interceptors( ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.AnalyticsAdminServiceRestInterceptor, - "post_cancel_display_video360_advertiser_link_proposal", + transports.AnalyticsAdminServiceRestInterceptor, "post_update_custom_metric" ) as post, mock.patch.object( - transports.AnalyticsAdminServiceRestInterceptor, - "pre_cancel_display_video360_advertiser_link_proposal", + transports.AnalyticsAdminServiceRestInterceptor, "pre_update_custom_metric" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = ( - analytics_admin.CancelDisplayVideo360AdvertiserLinkProposalRequest.pb( - analytics_admin.CancelDisplayVideo360AdvertiserLinkProposalRequest() - ) + pb_message = analytics_admin.UpdateCustomMetricRequest.pb( + analytics_admin.UpdateCustomMetricRequest() ) transcode.return_value = { "method": "post", @@ -74949,21 +85227,19 @@ def test_cancel_display_video360_advertiser_link_proposal_rest_interceptors( req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = ( - resources.DisplayVideo360AdvertiserLinkProposal.to_json( - resources.DisplayVideo360AdvertiserLinkProposal() - ) + req.return_value._content = resources.CustomMetric.to_json( + resources.CustomMetric() ) - request = analytics_admin.CancelDisplayVideo360AdvertiserLinkProposalRequest() + request = analytics_admin.UpdateCustomMetricRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = resources.DisplayVideo360AdvertiserLinkProposal() + post.return_value = resources.CustomMetric() - client.cancel_display_video360_advertiser_link_proposal( + client.update_custom_metric( request, metadata=[ ("key", "val"), @@ -74975,9 +85251,8 @@ def test_cancel_display_video360_advertiser_link_proposal_rest_interceptors( post.assert_called_once() -def test_cancel_display_video360_advertiser_link_proposal_rest_bad_request( - transport: str = "rest", - request_type=analytics_admin.CancelDisplayVideo360AdvertiserLinkProposalRequest, +def test_update_custom_metric_rest_bad_request( + transport: str = "rest", request_type=analytics_admin.UpdateCustomMetricRequest ): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -74986,7 +85261,7 @@ def test_cancel_display_video360_advertiser_link_proposal_rest_bad_request( # send a request that will satisfy transcoding request_init = { - "name": "properties/sample1/displayVideo360AdvertiserLinkProposals/sample2" + "custom_metric": {"name": "properties/sample1/customMetrics/sample2"} } request = request_type(**request_init) @@ -74999,10 +85274,71 @@ def test_cancel_display_video360_advertiser_link_proposal_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.cancel_display_video360_advertiser_link_proposal(request) + client.update_custom_metric(request) -def test_cancel_display_video360_advertiser_link_proposal_rest_error(): +def test_update_custom_metric_rest_flattened(): + client = AnalyticsAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = resources.CustomMetric() + + # get arguments that satisfy an http rule for this method + sample_request = { + "custom_metric": {"name": "properties/sample1/customMetrics/sample2"} + } + + # get truthy value for each flattened field + mock_args = dict( + custom_metric=resources.CustomMetric(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = resources.CustomMetric.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.update_custom_metric(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1alpha/{custom_metric.name=properties/*/customMetrics/*}" + % client.transport._host, + args[1], + ) + + +def test_update_custom_metric_rest_flattened_error(transport: str = "rest"): + client = AnalyticsAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_custom_metric( + analytics_admin.UpdateCustomMetricRequest(), + custom_metric=resources.CustomMetric(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +def test_update_custom_metric_rest_error(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -75011,11 +85347,11 @@ def test_cancel_display_video360_advertiser_link_proposal_rest_error(): @pytest.mark.parametrize( "request_type", [ - analytics_admin.CreateCustomDimensionRequest, + analytics_admin.ListCustomMetricsRequest, dict, ], ) -def test_create_custom_dimension_rest(request_type): +def test_list_custom_metrics_rest(request_type): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -75023,119 +85359,32 @@ def test_create_custom_dimension_rest(request_type): # send a request that will satisfy transcoding request_init = {"parent": "properties/sample1"} - request_init["custom_dimension"] = { - "name": "name_value", - "parameter_name": "parameter_name_value", - "display_name": "display_name_value", - "description": "description_value", - "scope": 1, - "disallow_ads_personalization": True, - } - # The version of a generated dependency at test runtime may differ from the version used during generation. - # Delete any fields which are not present in the current runtime dependency - # See https://github.com/googleapis/gapic-generator-python/issues/1748 - - # Determine if the message type is proto-plus or protobuf - test_field = analytics_admin.CreateCustomDimensionRequest.meta.fields[ - "custom_dimension" - ] - - def get_message_fields(field): - # Given a field which is a message (composite type), return a list with - # all the fields of the message. - # If the field is not a composite type, return an empty list. - message_fields = [] - - if hasattr(field, "message") and field.message: - is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") - - if is_field_type_proto_plus_type: - message_fields = field.message.meta.fields.values() - # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER - message_fields = field.message.DESCRIPTOR.fields - return message_fields - - runtime_nested_fields = [ - (field.name, nested_field.name) - for field in get_message_fields(test_field) - for nested_field in get_message_fields(field) - ] - - subfields_not_in_runtime = [] - - # For each item in the sample request, create a list of sub fields which are not present at runtime - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["custom_dimension"].items(): # pragma: NO COVER - result = None - is_repeated = False - # For repeated fields - if isinstance(value, list) and len(value): - is_repeated = True - result = value[0] - # For fields where the type is another message - if isinstance(value, dict): - result = value - - if result and hasattr(result, "keys"): - for subfield in result.keys(): - if (field, subfield) not in runtime_nested_fields: - subfields_not_in_runtime.append( - { - "field": field, - "subfield": subfield, - "is_repeated": is_repeated, - } - ) - - # Remove fields from the sample request which are not present in the runtime version of the dependency - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER - field = subfield_to_delete.get("field") - field_repeated = subfield_to_delete.get("is_repeated") - subfield = subfield_to_delete.get("subfield") - if subfield: - if field_repeated: - for i in range(0, len(request_init["custom_dimension"][field])): - del request_init["custom_dimension"][field][i][subfield] - else: - del request_init["custom_dimension"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = resources.CustomDimension( - name="name_value", - parameter_name="parameter_name_value", - display_name="display_name_value", - description="description_value", - scope=resources.CustomDimension.DimensionScope.EVENT, - disallow_ads_personalization=True, + return_value = analytics_admin.ListCustomMetricsResponse( + next_page_token="next_page_token_value", ) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = resources.CustomDimension.pb(return_value) + return_value = analytics_admin.ListCustomMetricsResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.create_custom_dimension(request) + response = client.list_custom_metrics(request) # Establish that the response is the type that we expect. - assert isinstance(response, resources.CustomDimension) - assert response.name == "name_value" - assert response.parameter_name == "parameter_name_value" - assert response.display_name == "display_name_value" - assert response.description == "description_value" - assert response.scope == resources.CustomDimension.DimensionScope.EVENT - assert response.disallow_ads_personalization is True + assert isinstance(response, pagers.ListCustomMetricsPager) + assert response.next_page_token == "next_page_token_value" -def test_create_custom_dimension_rest_use_cached_wrapped_rpc(): +def test_list_custom_metrics_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -75150,8 +85399,7 @@ def test_create_custom_dimension_rest_use_cached_wrapped_rpc(): # Ensure method has been cached assert ( - client._transport.create_custom_dimension - in client._transport._wrapped_methods + client._transport.list_custom_metrics in client._transport._wrapped_methods ) # Replace cached wrapped function with mock @@ -75160,24 +85408,24 @@ def test_create_custom_dimension_rest_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.create_custom_dimension + client._transport.list_custom_metrics ] = mock_rpc request = {} - client.create_custom_dimension(request) + client.list_custom_metrics(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.create_custom_dimension(request) + client.list_custom_metrics(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_create_custom_dimension_rest_required_fields( - request_type=analytics_admin.CreateCustomDimensionRequest, +def test_list_custom_metrics_rest_required_fields( + request_type=analytics_admin.ListCustomMetricsRequest, ): transport_class = transports.AnalyticsAdminServiceRestTransport @@ -75193,7 +85441,7 @@ def test_create_custom_dimension_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).create_custom_dimension._get_unset_required_fields(jsonified_request) + ).list_custom_metrics._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present @@ -75202,7 +85450,14 @@ def test_create_custom_dimension_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).create_custom_dimension._get_unset_required_fields(jsonified_request) + ).list_custom_metrics._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "page_size", + "page_token", + ) + ) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone @@ -75216,7 +85471,7 @@ def test_create_custom_dimension_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = resources.CustomDimension() + return_value = analytics_admin.ListCustomMetricsResponse() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -75228,48 +85483,47 @@ def test_create_custom_dimension_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "post", + "method": "get", "query_params": pb_request, } - transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = resources.CustomDimension.pb(return_value) + return_value = analytics_admin.ListCustomMetricsResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.create_custom_dimension(request) + response = client.list_custom_metrics(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_create_custom_dimension_rest_unset_required_fields(): +def test_list_custom_metrics_rest_unset_required_fields(): transport = transports.AnalyticsAdminServiceRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.create_custom_dimension._get_unset_required_fields({}) + unset_fields = transport.list_custom_metrics._get_unset_required_fields({}) assert set(unset_fields) == ( - set(()) - & set( + set( ( - "parent", - "customDimension", + "pageSize", + "pageToken", ) ) + & set(("parent",)) ) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_create_custom_dimension_rest_interceptors(null_interceptor): +def test_list_custom_metrics_rest_interceptors(null_interceptor): transport = transports.AnalyticsAdminServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -75282,14 +85536,14 @@ def test_create_custom_dimension_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.AnalyticsAdminServiceRestInterceptor, "post_create_custom_dimension" + transports.AnalyticsAdminServiceRestInterceptor, "post_list_custom_metrics" ) as post, mock.patch.object( - transports.AnalyticsAdminServiceRestInterceptor, "pre_create_custom_dimension" + transports.AnalyticsAdminServiceRestInterceptor, "pre_list_custom_metrics" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = analytics_admin.CreateCustomDimensionRequest.pb( - analytics_admin.CreateCustomDimensionRequest() + pb_message = analytics_admin.ListCustomMetricsRequest.pb( + analytics_admin.ListCustomMetricsRequest() ) transcode.return_value = { "method": "post", @@ -75301,19 +85555,19 @@ def test_create_custom_dimension_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = resources.CustomDimension.to_json( - resources.CustomDimension() + req.return_value._content = analytics_admin.ListCustomMetricsResponse.to_json( + analytics_admin.ListCustomMetricsResponse() ) - request = analytics_admin.CreateCustomDimensionRequest() + request = analytics_admin.ListCustomMetricsRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = resources.CustomDimension() + post.return_value = analytics_admin.ListCustomMetricsResponse() - client.create_custom_dimension( + client.list_custom_metrics( request, metadata=[ ("key", "val"), @@ -75325,8 +85579,8 @@ def test_create_custom_dimension_rest_interceptors(null_interceptor): post.assert_called_once() -def test_create_custom_dimension_rest_bad_request( - transport: str = "rest", request_type=analytics_admin.CreateCustomDimensionRequest +def test_list_custom_metrics_rest_bad_request( + transport: str = "rest", request_type=analytics_admin.ListCustomMetricsRequest ): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -75346,10 +85600,10 @@ def test_create_custom_dimension_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.create_custom_dimension(request) + client.list_custom_metrics(request) -def test_create_custom_dimension_rest_flattened(): +def test_list_custom_metrics_rest_flattened(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -75358,7 +85612,7 @@ def test_create_custom_dimension_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = resources.CustomDimension() + return_value = analytics_admin.ListCustomMetricsResponse() # get arguments that satisfy an http rule for this method sample_request = {"parent": "properties/sample1"} @@ -75366,7 +85620,6 @@ def test_create_custom_dimension_rest_flattened(): # get truthy value for each flattened field mock_args = dict( parent="parent_value", - custom_dimension=resources.CustomDimension(name="name_value"), ) mock_args.update(sample_request) @@ -75374,176 +85627,137 @@ def test_create_custom_dimension_rest_flattened(): response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = resources.CustomDimension.pb(return_value) + return_value = analytics_admin.ListCustomMetricsResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.create_custom_dimension(**mock_args) + client.list_custom_metrics(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1alpha/{parent=properties/*}/customDimensions" - % client.transport._host, + "%s/v1alpha/{parent=properties/*}/customMetrics" % client.transport._host, args[1], ) -def test_create_custom_dimension_rest_flattened_error(transport: str = "rest"): - client = AnalyticsAdminServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) +def test_list_custom_metrics_rest_flattened_error(transport: str = "rest"): + client = AnalyticsAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_custom_metrics( + analytics_admin.ListCustomMetricsRequest(), + parent="parent_value", + ) + + +def test_list_custom_metrics_rest_pager(transport: str = "rest"): + client = AnalyticsAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + analytics_admin.ListCustomMetricsResponse( + custom_metrics=[ + resources.CustomMetric(), + resources.CustomMetric(), + resources.CustomMetric(), + ], + next_page_token="abc", + ), + analytics_admin.ListCustomMetricsResponse( + custom_metrics=[], + next_page_token="def", + ), + analytics_admin.ListCustomMetricsResponse( + custom_metrics=[ + resources.CustomMetric(), + ], + next_page_token="ghi", + ), + analytics_admin.ListCustomMetricsResponse( + custom_metrics=[ + resources.CustomMetric(), + resources.CustomMetric(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple( + analytics_admin.ListCustomMetricsResponse.to_json(x) for x in response + ) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {"parent": "properties/sample1"} - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.create_custom_dimension( - analytics_admin.CreateCustomDimensionRequest(), - parent="parent_value", - custom_dimension=resources.CustomDimension(name="name_value"), - ) + pager = client.list_custom_metrics(request=sample_request) + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, resources.CustomMetric) for i in results) -def test_create_custom_dimension_rest_error(): - client = AnalyticsAdminServiceClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) + pages = list(client.list_custom_metrics(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token @pytest.mark.parametrize( "request_type", [ - analytics_admin.UpdateCustomDimensionRequest, + analytics_admin.ArchiveCustomMetricRequest, dict, ], ) -def test_update_custom_dimension_rest(request_type): +def test_archive_custom_metric_rest(request_type): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = { - "custom_dimension": {"name": "properties/sample1/customDimensions/sample2"} - } - request_init["custom_dimension"] = { - "name": "properties/sample1/customDimensions/sample2", - "parameter_name": "parameter_name_value", - "display_name": "display_name_value", - "description": "description_value", - "scope": 1, - "disallow_ads_personalization": True, - } - # The version of a generated dependency at test runtime may differ from the version used during generation. - # Delete any fields which are not present in the current runtime dependency - # See https://github.com/googleapis/gapic-generator-python/issues/1748 - - # Determine if the message type is proto-plus or protobuf - test_field = analytics_admin.UpdateCustomDimensionRequest.meta.fields[ - "custom_dimension" - ] - - def get_message_fields(field): - # Given a field which is a message (composite type), return a list with - # all the fields of the message. - # If the field is not a composite type, return an empty list. - message_fields = [] - - if hasattr(field, "message") and field.message: - is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") - - if is_field_type_proto_plus_type: - message_fields = field.message.meta.fields.values() - # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER - message_fields = field.message.DESCRIPTOR.fields - return message_fields - - runtime_nested_fields = [ - (field.name, nested_field.name) - for field in get_message_fields(test_field) - for nested_field in get_message_fields(field) - ] - - subfields_not_in_runtime = [] - - # For each item in the sample request, create a list of sub fields which are not present at runtime - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["custom_dimension"].items(): # pragma: NO COVER - result = None - is_repeated = False - # For repeated fields - if isinstance(value, list) and len(value): - is_repeated = True - result = value[0] - # For fields where the type is another message - if isinstance(value, dict): - result = value - - if result and hasattr(result, "keys"): - for subfield in result.keys(): - if (field, subfield) not in runtime_nested_fields: - subfields_not_in_runtime.append( - { - "field": field, - "subfield": subfield, - "is_repeated": is_repeated, - } - ) - - # Remove fields from the sample request which are not present in the runtime version of the dependency - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER - field = subfield_to_delete.get("field") - field_repeated = subfield_to_delete.get("is_repeated") - subfield = subfield_to_delete.get("subfield") - if subfield: - if field_repeated: - for i in range(0, len(request_init["custom_dimension"][field])): - del request_init["custom_dimension"][field][i][subfield] - else: - del request_init["custom_dimension"][field][subfield] + request_init = {"name": "properties/sample1/customMetrics/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = resources.CustomDimension( - name="name_value", - parameter_name="parameter_name_value", - display_name="display_name_value", - description="description_value", - scope=resources.CustomDimension.DimensionScope.EVENT, - disallow_ads_personalization=True, - ) + return_value = None # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - # Convert return value to protobuf type - return_value = resources.CustomDimension.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) + json_return_value = "" response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.update_custom_dimension(request) + response = client.archive_custom_metric(request) # Establish that the response is the type that we expect. - assert isinstance(response, resources.CustomDimension) - assert response.name == "name_value" - assert response.parameter_name == "parameter_name_value" - assert response.display_name == "display_name_value" - assert response.description == "description_value" - assert response.scope == resources.CustomDimension.DimensionScope.EVENT - assert response.disallow_ads_personalization is True + assert response is None -def test_update_custom_dimension_rest_use_cached_wrapped_rpc(): +def test_archive_custom_metric_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -75558,7 +85772,7 @@ def test_update_custom_dimension_rest_use_cached_wrapped_rpc(): # Ensure method has been cached assert ( - client._transport.update_custom_dimension + client._transport.archive_custom_metric in client._transport._wrapped_methods ) @@ -75568,28 +85782,29 @@ def test_update_custom_dimension_rest_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.update_custom_dimension + client._transport.archive_custom_metric ] = mock_rpc request = {} - client.update_custom_dimension(request) + client.archive_custom_metric(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.update_custom_dimension(request) + client.archive_custom_metric(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_update_custom_dimension_rest_required_fields( - request_type=analytics_admin.UpdateCustomDimensionRequest, +def test_archive_custom_metric_rest_required_fields( + request_type=analytics_admin.ArchiveCustomMetricRequest, ): transport_class = transports.AnalyticsAdminServiceRestTransport request_init = {} + request_init["name"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -75600,19 +85815,21 @@ def test_update_custom_dimension_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).update_custom_dimension._get_unset_required_fields(jsonified_request) + ).archive_custom_metric._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present + jsonified_request["name"] = "name_value" + unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).update_custom_dimension._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("update_mask",)) + ).archive_custom_metric._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -75621,7 +85838,7 @@ def test_update_custom_dimension_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = resources.CustomDimension() + return_value = None # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -75633,7 +85850,7 @@ def test_update_custom_dimension_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "patch", + "method": "post", "query_params": pb_request, } transcode_result["body"] = pb_request @@ -75641,32 +85858,29 @@ def test_update_custom_dimension_rest_required_fields( response_value = Response() response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = resources.CustomDimension.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) + json_return_value = "" response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.update_custom_dimension(request) + response = client.archive_custom_metric(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_update_custom_dimension_rest_unset_required_fields(): +def test_archive_custom_metric_rest_unset_required_fields(): transport = transports.AnalyticsAdminServiceRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.update_custom_dimension._get_unset_required_fields({}) - assert set(unset_fields) == (set(("updateMask",)) & set(("updateMask",))) + unset_fields = transport.archive_custom_metric._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_update_custom_dimension_rest_interceptors(null_interceptor): +def test_archive_custom_metric_rest_interceptors(null_interceptor): transport = transports.AnalyticsAdminServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -75679,14 +85893,11 @@ def test_update_custom_dimension_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.AnalyticsAdminServiceRestInterceptor, "post_update_custom_dimension" - ) as post, mock.patch.object( - transports.AnalyticsAdminServiceRestInterceptor, "pre_update_custom_dimension" + transports.AnalyticsAdminServiceRestInterceptor, "pre_archive_custom_metric" ) as pre: pre.assert_not_called() - post.assert_not_called() - pb_message = analytics_admin.UpdateCustomDimensionRequest.pb( - analytics_admin.UpdateCustomDimensionRequest() + pb_message = analytics_admin.ArchiveCustomMetricRequest.pb( + analytics_admin.ArchiveCustomMetricRequest() ) transcode.return_value = { "method": "post", @@ -75698,19 +85909,15 @@ def test_update_custom_dimension_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = resources.CustomDimension.to_json( - resources.CustomDimension() - ) - request = analytics_admin.UpdateCustomDimensionRequest() + request = analytics_admin.ArchiveCustomMetricRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = resources.CustomDimension() - client.update_custom_dimension( + client.archive_custom_metric( request, metadata=[ ("key", "val"), @@ -75719,11 +85926,10 @@ def test_update_custom_dimension_rest_interceptors(null_interceptor): ) pre.assert_called_once() - post.assert_called_once() -def test_update_custom_dimension_rest_bad_request( - transport: str = "rest", request_type=analytics_admin.UpdateCustomDimensionRequest +def test_archive_custom_metric_rest_bad_request( + transport: str = "rest", request_type=analytics_admin.ArchiveCustomMetricRequest ): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -75731,9 +85937,7 @@ def test_update_custom_dimension_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = { - "custom_dimension": {"name": "properties/sample1/customDimensions/sample2"} - } + request_init = {"name": "properties/sample1/customMetrics/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -75745,10 +85949,10 @@ def test_update_custom_dimension_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.update_custom_dimension(request) + client.archive_custom_metric(request) -def test_update_custom_dimension_rest_flattened(): +def test_archive_custom_metric_rest_flattened(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -75757,43 +85961,38 @@ def test_update_custom_dimension_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = resources.CustomDimension() + return_value = None # get arguments that satisfy an http rule for this method - sample_request = { - "custom_dimension": {"name": "properties/sample1/customDimensions/sample2"} - } + sample_request = {"name": "properties/sample1/customMetrics/sample2"} # get truthy value for each flattened field mock_args = dict( - custom_dimension=resources.CustomDimension(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + name="name_value", ) mock_args.update(sample_request) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - # Convert return value to protobuf type - return_value = resources.CustomDimension.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) + json_return_value = "" response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.update_custom_dimension(**mock_args) + client.archive_custom_metric(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1alpha/{custom_dimension.name=properties/*/customDimensions/*}" + "%s/v1alpha/{name=properties/*/customMetrics/*}:archive" % client.transport._host, args[1], ) -def test_update_custom_dimension_rest_flattened_error(transport: str = "rest"): +def test_archive_custom_metric_rest_flattened_error(transport: str = "rest"): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -75802,14 +86001,13 @@ def test_update_custom_dimension_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.update_custom_dimension( - analytics_admin.UpdateCustomDimensionRequest(), - custom_dimension=resources.CustomDimension(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + client.archive_custom_metric( + analytics_admin.ArchiveCustomMetricRequest(), + name="name_value", ) -def test_update_custom_dimension_rest_error(): +def test_archive_custom_metric_rest_error(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -75818,44 +86016,60 @@ def test_update_custom_dimension_rest_error(): @pytest.mark.parametrize( "request_type", [ - analytics_admin.ListCustomDimensionsRequest, + analytics_admin.GetCustomMetricRequest, dict, ], ) -def test_list_custom_dimensions_rest(request_type): +def test_get_custom_metric_rest(request_type): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = {"parent": "properties/sample1"} + request_init = {"name": "properties/sample1/customMetrics/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = analytics_admin.ListCustomDimensionsResponse( - next_page_token="next_page_token_value", + return_value = resources.CustomMetric( + name="name_value", + parameter_name="parameter_name_value", + display_name="display_name_value", + description="description_value", + measurement_unit=resources.CustomMetric.MeasurementUnit.STANDARD, + scope=resources.CustomMetric.MetricScope.EVENT, + restricted_metric_type=[ + resources.CustomMetric.RestrictedMetricType.COST_DATA + ], ) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = analytics_admin.ListCustomDimensionsResponse.pb(return_value) + return_value = resources.CustomMetric.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.list_custom_dimensions(request) + response = client.get_custom_metric(request) # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListCustomDimensionsPager) - assert response.next_page_token == "next_page_token_value" + assert isinstance(response, resources.CustomMetric) + assert response.name == "name_value" + assert response.parameter_name == "parameter_name_value" + assert response.display_name == "display_name_value" + assert response.description == "description_value" + assert response.measurement_unit == resources.CustomMetric.MeasurementUnit.STANDARD + assert response.scope == resources.CustomMetric.MetricScope.EVENT + assert response.restricted_metric_type == [ + resources.CustomMetric.RestrictedMetricType.COST_DATA + ] -def test_list_custom_dimensions_rest_use_cached_wrapped_rpc(): +def test_get_custom_metric_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -75869,10 +86083,7 @@ def test_list_custom_dimensions_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert ( - client._transport.list_custom_dimensions - in client._transport._wrapped_methods - ) + assert client._transport.get_custom_metric in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() @@ -75880,29 +86091,29 @@ def test_list_custom_dimensions_rest_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.list_custom_dimensions + client._transport.get_custom_metric ] = mock_rpc request = {} - client.list_custom_dimensions(request) + client.get_custom_metric(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.list_custom_dimensions(request) + client.get_custom_metric(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_list_custom_dimensions_rest_required_fields( - request_type=analytics_admin.ListCustomDimensionsRequest, +def test_get_custom_metric_rest_required_fields( + request_type=analytics_admin.GetCustomMetricRequest, ): transport_class = transports.AnalyticsAdminServiceRestTransport request_init = {} - request_init["parent"] = "" + request_init["name"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -75913,28 +86124,21 @@ def test_list_custom_dimensions_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).list_custom_dimensions._get_unset_required_fields(jsonified_request) + ).get_custom_metric._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["parent"] = "parent_value" + jsonified_request["name"] = "name_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).list_custom_dimensions._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set( - ( - "page_size", - "page_token", - ) - ) + ).get_custom_metric._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == "parent_value" + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -75943,7 +86147,7 @@ def test_list_custom_dimensions_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = analytics_admin.ListCustomDimensionsResponse() + return_value = resources.CustomMetric() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -75964,38 +86168,30 @@ def test_list_custom_dimensions_rest_required_fields( response_value.status_code = 200 # Convert return value to protobuf type - return_value = analytics_admin.ListCustomDimensionsResponse.pb(return_value) + return_value = resources.CustomMetric.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.list_custom_dimensions(request) + response = client.get_custom_metric(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_list_custom_dimensions_rest_unset_required_fields(): +def test_get_custom_metric_rest_unset_required_fields(): transport = transports.AnalyticsAdminServiceRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.list_custom_dimensions._get_unset_required_fields({}) - assert set(unset_fields) == ( - set( - ( - "pageSize", - "pageToken", - ) - ) - & set(("parent",)) - ) + unset_fields = transport.get_custom_metric._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_custom_dimensions_rest_interceptors(null_interceptor): +def test_get_custom_metric_rest_interceptors(null_interceptor): transport = transports.AnalyticsAdminServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -76008,14 +86204,14 @@ def test_list_custom_dimensions_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.AnalyticsAdminServiceRestInterceptor, "post_list_custom_dimensions" + transports.AnalyticsAdminServiceRestInterceptor, "post_get_custom_metric" ) as post, mock.patch.object( - transports.AnalyticsAdminServiceRestInterceptor, "pre_list_custom_dimensions" + transports.AnalyticsAdminServiceRestInterceptor, "pre_get_custom_metric" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = analytics_admin.ListCustomDimensionsRequest.pb( - analytics_admin.ListCustomDimensionsRequest() + pb_message = analytics_admin.GetCustomMetricRequest.pb( + analytics_admin.GetCustomMetricRequest() ) transcode.return_value = { "method": "post", @@ -76027,21 +86223,19 @@ def test_list_custom_dimensions_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = ( - analytics_admin.ListCustomDimensionsResponse.to_json( - analytics_admin.ListCustomDimensionsResponse() - ) + req.return_value._content = resources.CustomMetric.to_json( + resources.CustomMetric() ) - request = analytics_admin.ListCustomDimensionsRequest() + request = analytics_admin.GetCustomMetricRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = analytics_admin.ListCustomDimensionsResponse() + post.return_value = resources.CustomMetric() - client.list_custom_dimensions( + client.get_custom_metric( request, metadata=[ ("key", "val"), @@ -76053,8 +86247,8 @@ def test_list_custom_dimensions_rest_interceptors(null_interceptor): post.assert_called_once() -def test_list_custom_dimensions_rest_bad_request( - transport: str = "rest", request_type=analytics_admin.ListCustomDimensionsRequest +def test_get_custom_metric_rest_bad_request( + transport: str = "rest", request_type=analytics_admin.GetCustomMetricRequest ): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -76062,7 +86256,7 @@ def test_list_custom_dimensions_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = {"parent": "properties/sample1"} + request_init = {"name": "properties/sample1/customMetrics/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -76074,10 +86268,10 @@ def test_list_custom_dimensions_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.list_custom_dimensions(request) + client.get_custom_metric(request) -def test_list_custom_dimensions_rest_flattened(): +def test_get_custom_metric_rest_flattened(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -76086,14 +86280,14 @@ def test_list_custom_dimensions_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = analytics_admin.ListCustomDimensionsResponse() + return_value = resources.CustomMetric() # get arguments that satisfy an http rule for this method - sample_request = {"parent": "properties/sample1"} + sample_request = {"name": "properties/sample1/customMetrics/sample2"} # get truthy value for each flattened field mock_args = dict( - parent="parent_value", + name="name_value", ) mock_args.update(sample_request) @@ -76101,25 +86295,24 @@ def test_list_custom_dimensions_rest_flattened(): response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = analytics_admin.ListCustomDimensionsResponse.pb(return_value) + return_value = resources.CustomMetric.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.list_custom_dimensions(**mock_args) + client.get_custom_metric(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1alpha/{parent=properties/*}/customDimensions" - % client.transport._host, + "%s/v1alpha/{name=properties/*/customMetrics/*}" % client.transport._host, args[1], ) -def test_list_custom_dimensions_rest_flattened_error(transport: str = "rest"): +def test_get_custom_metric_rest_flattened_error(transport: str = "rest"): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -76128,111 +86321,66 @@ def test_list_custom_dimensions_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.list_custom_dimensions( - analytics_admin.ListCustomDimensionsRequest(), - parent="parent_value", + client.get_custom_metric( + analytics_admin.GetCustomMetricRequest(), + name="name_value", ) -def test_list_custom_dimensions_rest_pager(transport: str = "rest"): +def test_get_custom_metric_rest_error(): client = AnalyticsAdminServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # TODO(kbandes): remove this mock unless there's a good reason for it. - # with mock.patch.object(path_template, 'transcode') as transcode: - # Set the response as a series of pages - response = ( - analytics_admin.ListCustomDimensionsResponse( - custom_dimensions=[ - resources.CustomDimension(), - resources.CustomDimension(), - resources.CustomDimension(), - ], - next_page_token="abc", - ), - analytics_admin.ListCustomDimensionsResponse( - custom_dimensions=[], - next_page_token="def", - ), - analytics_admin.ListCustomDimensionsResponse( - custom_dimensions=[ - resources.CustomDimension(), - ], - next_page_token="ghi", - ), - analytics_admin.ListCustomDimensionsResponse( - custom_dimensions=[ - resources.CustomDimension(), - resources.CustomDimension(), - ], - ), - ) - # Two responses for two calls - response = response + response - - # Wrap the values into proper Response objs - response = tuple( - analytics_admin.ListCustomDimensionsResponse.to_json(x) for x in response - ) - return_values = tuple(Response() for i in response) - for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode("UTF-8") - return_val.status_code = 200 - req.side_effect = return_values - - sample_request = {"parent": "properties/sample1"} - - pager = client.list_custom_dimensions(request=sample_request) - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, resources.CustomDimension) for i in results) - - pages = list(client.list_custom_dimensions(request=sample_request).pages) - for page_, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page_.raw_page.next_page_token == token - @pytest.mark.parametrize( "request_type", [ - analytics_admin.ArchiveCustomDimensionRequest, + analytics_admin.GetDataRetentionSettingsRequest, dict, ], ) -def test_archive_custom_dimension_rest(request_type): +def test_get_data_retention_settings_rest(request_type): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = {"name": "properties/sample1/customDimensions/sample2"} + request_init = {"name": "properties/sample1/dataRetentionSettings"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = None + return_value = resources.DataRetentionSettings( + name="name_value", + event_data_retention=resources.DataRetentionSettings.RetentionDuration.TWO_MONTHS, + reset_user_data_on_new_activity=True, + ) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - json_return_value = "" + # Convert return value to protobuf type + return_value = resources.DataRetentionSettings.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.archive_custom_dimension(request) + response = client.get_data_retention_settings(request) # Establish that the response is the type that we expect. - assert response is None + assert isinstance(response, resources.DataRetentionSettings) + assert response.name == "name_value" + assert ( + response.event_data_retention + == resources.DataRetentionSettings.RetentionDuration.TWO_MONTHS + ) + assert response.reset_user_data_on_new_activity is True -def test_archive_custom_dimension_rest_use_cached_wrapped_rpc(): +def test_get_data_retention_settings_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -76247,7 +86395,7 @@ def test_archive_custom_dimension_rest_use_cached_wrapped_rpc(): # Ensure method has been cached assert ( - client._transport.archive_custom_dimension + client._transport.get_data_retention_settings in client._transport._wrapped_methods ) @@ -76257,24 +86405,24 @@ def test_archive_custom_dimension_rest_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.archive_custom_dimension + client._transport.get_data_retention_settings ] = mock_rpc request = {} - client.archive_custom_dimension(request) + client.get_data_retention_settings(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.archive_custom_dimension(request) + client.get_data_retention_settings(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_archive_custom_dimension_rest_required_fields( - request_type=analytics_admin.ArchiveCustomDimensionRequest, +def test_get_data_retention_settings_rest_required_fields( + request_type=analytics_admin.GetDataRetentionSettingsRequest, ): transport_class = transports.AnalyticsAdminServiceRestTransport @@ -76290,7 +86438,7 @@ def test_archive_custom_dimension_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).archive_custom_dimension._get_unset_required_fields(jsonified_request) + ).get_data_retention_settings._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present @@ -76299,7 +86447,7 @@ def test_archive_custom_dimension_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).archive_custom_dimension._get_unset_required_fields(jsonified_request) + ).get_data_retention_settings._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone @@ -76313,7 +86461,7 @@ def test_archive_custom_dimension_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = None + return_value = resources.DataRetentionSettings() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -76325,37 +86473,39 @@ def test_archive_custom_dimension_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "post", + "method": "get", "query_params": pb_request, } - transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 - json_return_value = "" + + # Convert return value to protobuf type + return_value = resources.DataRetentionSettings.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.archive_custom_dimension(request) + response = client.get_data_retention_settings(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_archive_custom_dimension_rest_unset_required_fields(): +def test_get_data_retention_settings_rest_unset_required_fields(): transport = transports.AnalyticsAdminServiceRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.archive_custom_dimension._get_unset_required_fields({}) + unset_fields = transport.get_data_retention_settings._get_unset_required_fields({}) assert set(unset_fields) == (set(()) & set(("name",))) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_archive_custom_dimension_rest_interceptors(null_interceptor): +def test_get_data_retention_settings_rest_interceptors(null_interceptor): transport = transports.AnalyticsAdminServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -76368,11 +86518,16 @@ def test_archive_custom_dimension_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.AnalyticsAdminServiceRestInterceptor, "pre_archive_custom_dimension" + transports.AnalyticsAdminServiceRestInterceptor, + "post_get_data_retention_settings", + ) as post, mock.patch.object( + transports.AnalyticsAdminServiceRestInterceptor, + "pre_get_data_retention_settings", ) as pre: pre.assert_not_called() - pb_message = analytics_admin.ArchiveCustomDimensionRequest.pb( - analytics_admin.ArchiveCustomDimensionRequest() + post.assert_not_called() + pb_message = analytics_admin.GetDataRetentionSettingsRequest.pb( + analytics_admin.GetDataRetentionSettingsRequest() ) transcode.return_value = { "method": "post", @@ -76384,15 +86539,19 @@ def test_archive_custom_dimension_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() + req.return_value._content = resources.DataRetentionSettings.to_json( + resources.DataRetentionSettings() + ) - request = analytics_admin.ArchiveCustomDimensionRequest() + request = analytics_admin.GetDataRetentionSettingsRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata + post.return_value = resources.DataRetentionSettings() - client.archive_custom_dimension( + client.get_data_retention_settings( request, metadata=[ ("key", "val"), @@ -76401,10 +86560,12 @@ def test_archive_custom_dimension_rest_interceptors(null_interceptor): ) pre.assert_called_once() + post.assert_called_once() -def test_archive_custom_dimension_rest_bad_request( - transport: str = "rest", request_type=analytics_admin.ArchiveCustomDimensionRequest +def test_get_data_retention_settings_rest_bad_request( + transport: str = "rest", + request_type=analytics_admin.GetDataRetentionSettingsRequest, ): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -76412,7 +86573,7 @@ def test_archive_custom_dimension_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = {"name": "properties/sample1/customDimensions/sample2"} + request_init = {"name": "properties/sample1/dataRetentionSettings"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -76424,10 +86585,10 @@ def test_archive_custom_dimension_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.archive_custom_dimension(request) + client.get_data_retention_settings(request) -def test_archive_custom_dimension_rest_flattened(): +def test_get_data_retention_settings_rest_flattened(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -76436,10 +86597,10 @@ def test_archive_custom_dimension_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = None + return_value = resources.DataRetentionSettings() # get arguments that satisfy an http rule for this method - sample_request = {"name": "properties/sample1/customDimensions/sample2"} + sample_request = {"name": "properties/sample1/dataRetentionSettings"} # get truthy value for each flattened field mock_args = dict( @@ -76450,24 +86611,26 @@ def test_archive_custom_dimension_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - json_return_value = "" + # Convert return value to protobuf type + return_value = resources.DataRetentionSettings.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.archive_custom_dimension(**mock_args) + client.get_data_retention_settings(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1alpha/{name=properties/*/customDimensions/*}:archive" + "%s/v1alpha/{name=properties/*/dataRetentionSettings}" % client.transport._host, args[1], ) -def test_archive_custom_dimension_rest_flattened_error(transport: str = "rest"): +def test_get_data_retention_settings_rest_flattened_error(transport: str = "rest"): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -76476,13 +86639,13 @@ def test_archive_custom_dimension_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.archive_custom_dimension( - analytics_admin.ArchiveCustomDimensionRequest(), + client.get_data_retention_settings( + analytics_admin.GetDataRetentionSettingsRequest(), name="name_value", ) -def test_archive_custom_dimension_rest_error(): +def test_get_data_retention_settings_rest_error(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -76491,54 +86654,129 @@ def test_archive_custom_dimension_rest_error(): @pytest.mark.parametrize( "request_type", [ - analytics_admin.GetCustomDimensionRequest, + analytics_admin.UpdateDataRetentionSettingsRequest, dict, ], ) -def test_get_custom_dimension_rest(request_type): +def test_update_data_retention_settings_rest(request_type): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = {"name": "properties/sample1/customDimensions/sample2"} + request_init = { + "data_retention_settings": {"name": "properties/sample1/dataRetentionSettings"} + } + request_init["data_retention_settings"] = { + "name": "properties/sample1/dataRetentionSettings", + "event_data_retention": 1, + "reset_user_data_on_new_activity": True, + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = analytics_admin.UpdateDataRetentionSettingsRequest.meta.fields[ + "data_retention_settings" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "data_retention_settings" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["data_retention_settings"][field])): + del request_init["data_retention_settings"][field][i][subfield] + else: + del request_init["data_retention_settings"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = resources.CustomDimension( + return_value = resources.DataRetentionSettings( name="name_value", - parameter_name="parameter_name_value", - display_name="display_name_value", - description="description_value", - scope=resources.CustomDimension.DimensionScope.EVENT, - disallow_ads_personalization=True, + event_data_retention=resources.DataRetentionSettings.RetentionDuration.TWO_MONTHS, + reset_user_data_on_new_activity=True, ) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = resources.CustomDimension.pb(return_value) + return_value = resources.DataRetentionSettings.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.get_custom_dimension(request) + response = client.update_data_retention_settings(request) # Establish that the response is the type that we expect. - assert isinstance(response, resources.CustomDimension) + assert isinstance(response, resources.DataRetentionSettings) assert response.name == "name_value" - assert response.parameter_name == "parameter_name_value" - assert response.display_name == "display_name_value" - assert response.description == "description_value" - assert response.scope == resources.CustomDimension.DimensionScope.EVENT - assert response.disallow_ads_personalization is True + assert ( + response.event_data_retention + == resources.DataRetentionSettings.RetentionDuration.TWO_MONTHS + ) + assert response.reset_user_data_on_new_activity is True -def test_get_custom_dimension_rest_use_cached_wrapped_rpc(): +def test_update_data_retention_settings_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -76553,7 +86791,8 @@ def test_get_custom_dimension_rest_use_cached_wrapped_rpc(): # Ensure method has been cached assert ( - client._transport.get_custom_dimension in client._transport._wrapped_methods + client._transport.update_data_retention_settings + in client._transport._wrapped_methods ) # Replace cached wrapped function with mock @@ -76562,29 +86801,28 @@ def test_get_custom_dimension_rest_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.get_custom_dimension + client._transport.update_data_retention_settings ] = mock_rpc request = {} - client.get_custom_dimension(request) + client.update_data_retention_settings(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.get_custom_dimension(request) + client.update_data_retention_settings(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_get_custom_dimension_rest_required_fields( - request_type=analytics_admin.GetCustomDimensionRequest, +def test_update_data_retention_settings_rest_required_fields( + request_type=analytics_admin.UpdateDataRetentionSettingsRequest, ): transport_class = transports.AnalyticsAdminServiceRestTransport request_init = {} - request_init["name"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -76595,21 +86833,19 @@ def test_get_custom_dimension_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).get_custom_dimension._get_unset_required_fields(jsonified_request) + ).update_data_retention_settings._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["name"] = "name_value" - unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).get_custom_dimension._get_unset_required_fields(jsonified_request) + ).update_data_retention_settings._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("update_mask",)) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == "name_value" client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -76618,7 +86854,7 @@ def test_get_custom_dimension_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = resources.CustomDimension() + return_value = resources.DataRetentionSettings() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -76630,39 +86866,50 @@ def test_get_custom_dimension_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "get", + "method": "patch", "query_params": pb_request, } + transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = resources.CustomDimension.pb(return_value) + return_value = resources.DataRetentionSettings.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.get_custom_dimension(request) + response = client.update_data_retention_settings(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_get_custom_dimension_rest_unset_required_fields(): +def test_update_data_retention_settings_rest_unset_required_fields(): transport = transports.AnalyticsAdminServiceRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.get_custom_dimension._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name",))) + unset_fields = transport.update_data_retention_settings._get_unset_required_fields( + {} + ) + assert set(unset_fields) == ( + set(("updateMask",)) + & set( + ( + "dataRetentionSettings", + "updateMask", + ) + ) + ) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_custom_dimension_rest_interceptors(null_interceptor): +def test_update_data_retention_settings_rest_interceptors(null_interceptor): transport = transports.AnalyticsAdminServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -76675,14 +86922,16 @@ def test_get_custom_dimension_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.AnalyticsAdminServiceRestInterceptor, "post_get_custom_dimension" + transports.AnalyticsAdminServiceRestInterceptor, + "post_update_data_retention_settings", ) as post, mock.patch.object( - transports.AnalyticsAdminServiceRestInterceptor, "pre_get_custom_dimension" + transports.AnalyticsAdminServiceRestInterceptor, + "pre_update_data_retention_settings", ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = analytics_admin.GetCustomDimensionRequest.pb( - analytics_admin.GetCustomDimensionRequest() + pb_message = analytics_admin.UpdateDataRetentionSettingsRequest.pb( + analytics_admin.UpdateDataRetentionSettingsRequest() ) transcode.return_value = { "method": "post", @@ -76694,19 +86943,19 @@ def test_get_custom_dimension_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = resources.CustomDimension.to_json( - resources.CustomDimension() + req.return_value._content = resources.DataRetentionSettings.to_json( + resources.DataRetentionSettings() ) - request = analytics_admin.GetCustomDimensionRequest() + request = analytics_admin.UpdateDataRetentionSettingsRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = resources.CustomDimension() + post.return_value = resources.DataRetentionSettings() - client.get_custom_dimension( + client.update_data_retention_settings( request, metadata=[ ("key", "val"), @@ -76718,8 +86967,9 @@ def test_get_custom_dimension_rest_interceptors(null_interceptor): post.assert_called_once() -def test_get_custom_dimension_rest_bad_request( - transport: str = "rest", request_type=analytics_admin.GetCustomDimensionRequest +def test_update_data_retention_settings_rest_bad_request( + transport: str = "rest", + request_type=analytics_admin.UpdateDataRetentionSettingsRequest, ): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -76727,7 +86977,9 @@ def test_get_custom_dimension_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = {"name": "properties/sample1/customDimensions/sample2"} + request_init = { + "data_retention_settings": {"name": "properties/sample1/dataRetentionSettings"} + } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -76739,10 +86991,10 @@ def test_get_custom_dimension_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.get_custom_dimension(request) + client.update_data_retention_settings(request) -def test_get_custom_dimension_rest_flattened(): +def test_update_data_retention_settings_rest_flattened(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -76751,14 +87003,19 @@ def test_get_custom_dimension_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = resources.CustomDimension() + return_value = resources.DataRetentionSettings() # get arguments that satisfy an http rule for this method - sample_request = {"name": "properties/sample1/customDimensions/sample2"} + sample_request = { + "data_retention_settings": { + "name": "properties/sample1/dataRetentionSettings" + } + } # get truthy value for each flattened field mock_args = dict( - name="name_value", + data_retention_settings=resources.DataRetentionSettings(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) mock_args.update(sample_request) @@ -76766,25 +87023,25 @@ def test_get_custom_dimension_rest_flattened(): response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = resources.CustomDimension.pb(return_value) + return_value = resources.DataRetentionSettings.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.get_custom_dimension(**mock_args) + client.update_data_retention_settings(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1alpha/{name=properties/*/customDimensions/*}" + "%s/v1alpha/{data_retention_settings.name=properties/*/dataRetentionSettings}" % client.transport._host, args[1], ) -def test_get_custom_dimension_rest_flattened_error(transport: str = "rest"): +def test_update_data_retention_settings_rest_flattened_error(transport: str = "rest"): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -76793,13 +87050,14 @@ def test_get_custom_dimension_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.get_custom_dimension( - analytics_admin.GetCustomDimensionRequest(), - name="name_value", + client.update_data_retention_settings( + analytics_admin.UpdateDataRetentionSettingsRequest(), + data_retention_settings=resources.DataRetentionSettings(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) -def test_get_custom_dimension_rest_error(): +def test_update_data_retention_settings_rest_error(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -76808,11 +87066,11 @@ def test_get_custom_dimension_rest_error(): @pytest.mark.parametrize( "request_type", [ - analytics_admin.CreateCustomMetricRequest, + analytics_admin.CreateDataStreamRequest, dict, ], ) -def test_create_custom_metric_rest(request_type): +def test_create_data_stream_rest(request_type): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -76820,21 +87078,32 @@ def test_create_custom_metric_rest(request_type): # send a request that will satisfy transcoding request_init = {"parent": "properties/sample1"} - request_init["custom_metric"] = { + request_init["data_stream"] = { + "web_stream_data": { + "measurement_id": "measurement_id_value", + "firebase_app_id": "firebase_app_id_value", + "default_uri": "default_uri_value", + }, + "android_app_stream_data": { + "firebase_app_id": "firebase_app_id_value", + "package_name": "package_name_value", + }, + "ios_app_stream_data": { + "firebase_app_id": "firebase_app_id_value", + "bundle_id": "bundle_id_value", + }, "name": "name_value", - "parameter_name": "parameter_name_value", + "type_": 1, "display_name": "display_name_value", - "description": "description_value", - "measurement_unit": 1, - "scope": 1, - "restricted_metric_type": [1], + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, } # The version of a generated dependency at test runtime may differ from the version used during generation. # Delete any fields which are not present in the current runtime dependency # See https://github.com/googleapis/gapic-generator-python/issues/1748 # Determine if the message type is proto-plus or protobuf - test_field = analytics_admin.CreateCustomMetricRequest.meta.fields["custom_metric"] + test_field = analytics_admin.CreateDataStreamRequest.meta.fields["data_stream"] def get_message_fields(field): # Given a field which is a message (composite type), return a list with @@ -76862,7 +87131,7 @@ def get_message_fields(field): # For each item in the sample request, create a list of sub fields which are not present at runtime # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["custom_metric"].items(): # pragma: NO COVER + for field, value in request_init["data_stream"].items(): # pragma: NO COVER result = None is_repeated = False # For repeated fields @@ -76892,52 +87161,40 @@ def get_message_fields(field): subfield = subfield_to_delete.get("subfield") if subfield: if field_repeated: - for i in range(0, len(request_init["custom_metric"][field])): - del request_init["custom_metric"][field][i][subfield] + for i in range(0, len(request_init["data_stream"][field])): + del request_init["data_stream"][field][i][subfield] else: - del request_init["custom_metric"][field][subfield] + del request_init["data_stream"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = resources.CustomMetric( + return_value = resources.DataStream( name="name_value", - parameter_name="parameter_name_value", + type_=resources.DataStream.DataStreamType.WEB_DATA_STREAM, display_name="display_name_value", - description="description_value", - measurement_unit=resources.CustomMetric.MeasurementUnit.STANDARD, - scope=resources.CustomMetric.MetricScope.EVENT, - restricted_metric_type=[ - resources.CustomMetric.RestrictedMetricType.COST_DATA - ], ) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = resources.CustomMetric.pb(return_value) + return_value = resources.DataStream.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.create_custom_metric(request) + response = client.create_data_stream(request) # Establish that the response is the type that we expect. - assert isinstance(response, resources.CustomMetric) + assert isinstance(response, resources.DataStream) assert response.name == "name_value" - assert response.parameter_name == "parameter_name_value" + assert response.type_ == resources.DataStream.DataStreamType.WEB_DATA_STREAM assert response.display_name == "display_name_value" - assert response.description == "description_value" - assert response.measurement_unit == resources.CustomMetric.MeasurementUnit.STANDARD - assert response.scope == resources.CustomMetric.MetricScope.EVENT - assert response.restricted_metric_type == [ - resources.CustomMetric.RestrictedMetricType.COST_DATA - ] -def test_create_custom_metric_rest_use_cached_wrapped_rpc(): +def test_create_data_stream_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -76952,7 +87209,7 @@ def test_create_custom_metric_rest_use_cached_wrapped_rpc(): # Ensure method has been cached assert ( - client._transport.create_custom_metric in client._transport._wrapped_methods + client._transport.create_data_stream in client._transport._wrapped_methods ) # Replace cached wrapped function with mock @@ -76961,24 +87218,24 @@ def test_create_custom_metric_rest_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.create_custom_metric + client._transport.create_data_stream ] = mock_rpc request = {} - client.create_custom_metric(request) + client.create_data_stream(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.create_custom_metric(request) + client.create_data_stream(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_create_custom_metric_rest_required_fields( - request_type=analytics_admin.CreateCustomMetricRequest, +def test_create_data_stream_rest_required_fields( + request_type=analytics_admin.CreateDataStreamRequest, ): transport_class = transports.AnalyticsAdminServiceRestTransport @@ -76994,7 +87251,7 @@ def test_create_custom_metric_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).create_custom_metric._get_unset_required_fields(jsonified_request) + ).create_data_stream._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present @@ -77003,7 +87260,7 @@ def test_create_custom_metric_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).create_custom_metric._get_unset_required_fields(jsonified_request) + ).create_data_stream._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone @@ -77017,7 +87274,7 @@ def test_create_custom_metric_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = resources.CustomMetric() + return_value = resources.DataStream() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -77039,38 +87296,38 @@ def test_create_custom_metric_rest_required_fields( response_value.status_code = 200 # Convert return value to protobuf type - return_value = resources.CustomMetric.pb(return_value) + return_value = resources.DataStream.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.create_custom_metric(request) + response = client.create_data_stream(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_create_custom_metric_rest_unset_required_fields(): +def test_create_data_stream_rest_unset_required_fields(): transport = transports.AnalyticsAdminServiceRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.create_custom_metric._get_unset_required_fields({}) + unset_fields = transport.create_data_stream._get_unset_required_fields({}) assert set(unset_fields) == ( set(()) & set( ( "parent", - "customMetric", + "dataStream", ) ) ) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_create_custom_metric_rest_interceptors(null_interceptor): +def test_create_data_stream_rest_interceptors(null_interceptor): transport = transports.AnalyticsAdminServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -77083,14 +87340,14 @@ def test_create_custom_metric_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.AnalyticsAdminServiceRestInterceptor, "post_create_custom_metric" + transports.AnalyticsAdminServiceRestInterceptor, "post_create_data_stream" ) as post, mock.patch.object( - transports.AnalyticsAdminServiceRestInterceptor, "pre_create_custom_metric" + transports.AnalyticsAdminServiceRestInterceptor, "pre_create_data_stream" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = analytics_admin.CreateCustomMetricRequest.pb( - analytics_admin.CreateCustomMetricRequest() + pb_message = analytics_admin.CreateDataStreamRequest.pb( + analytics_admin.CreateDataStreamRequest() ) transcode.return_value = { "method": "post", @@ -77102,19 +87359,17 @@ def test_create_custom_metric_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = resources.CustomMetric.to_json( - resources.CustomMetric() - ) + req.return_value._content = resources.DataStream.to_json(resources.DataStream()) - request = analytics_admin.CreateCustomMetricRequest() + request = analytics_admin.CreateDataStreamRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = resources.CustomMetric() + post.return_value = resources.DataStream() - client.create_custom_metric( + client.create_data_stream( request, metadata=[ ("key", "val"), @@ -77126,8 +87381,8 @@ def test_create_custom_metric_rest_interceptors(null_interceptor): post.assert_called_once() -def test_create_custom_metric_rest_bad_request( - transport: str = "rest", request_type=analytics_admin.CreateCustomMetricRequest +def test_create_data_stream_rest_bad_request( + transport: str = "rest", request_type=analytics_admin.CreateDataStreamRequest ): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -77147,10 +87402,10 @@ def test_create_custom_metric_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.create_custom_metric(request) + client.create_data_stream(request) -def test_create_custom_metric_rest_flattened(): +def test_create_data_stream_rest_flattened(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -77159,7 +87414,7 @@ def test_create_custom_metric_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = resources.CustomMetric() + return_value = resources.DataStream() # get arguments that satisfy an http rule for this method sample_request = {"parent": "properties/sample1"} @@ -77167,7 +87422,11 @@ def test_create_custom_metric_rest_flattened(): # get truthy value for each flattened field mock_args = dict( parent="parent_value", - custom_metric=resources.CustomMetric(name="name_value"), + data_stream=resources.DataStream( + web_stream_data=resources.DataStream.WebStreamData( + measurement_id="measurement_id_value" + ) + ), ) mock_args.update(sample_request) @@ -77175,24 +87434,24 @@ def test_create_custom_metric_rest_flattened(): response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = resources.CustomMetric.pb(return_value) + return_value = resources.DataStream.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.create_custom_metric(**mock_args) + client.create_data_stream(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1alpha/{parent=properties/*}/customMetrics" % client.transport._host, + "%s/v1alpha/{parent=properties/*}/dataStreams" % client.transport._host, args[1], ) -def test_create_custom_metric_rest_flattened_error(transport: str = "rest"): +def test_create_data_stream_rest_flattened_error(transport: str = "rest"): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -77201,14 +87460,18 @@ def test_create_custom_metric_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.create_custom_metric( - analytics_admin.CreateCustomMetricRequest(), + client.create_data_stream( + analytics_admin.CreateDataStreamRequest(), parent="parent_value", - custom_metric=resources.CustomMetric(name="name_value"), + data_stream=resources.DataStream( + web_stream_data=resources.DataStream.WebStreamData( + measurement_id="measurement_id_value" + ) + ), ) -def test_create_custom_metric_rest_error(): +def test_create_data_stream_rest_error(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -77216,139 +87479,40 @@ def test_create_custom_metric_rest_error(): @pytest.mark.parametrize( "request_type", - [ - analytics_admin.UpdateCustomMetricRequest, - dict, - ], -) -def test_update_custom_metric_rest(request_type): - client = AnalyticsAdminServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = { - "custom_metric": {"name": "properties/sample1/customMetrics/sample2"} - } - request_init["custom_metric"] = { - "name": "properties/sample1/customMetrics/sample2", - "parameter_name": "parameter_name_value", - "display_name": "display_name_value", - "description": "description_value", - "measurement_unit": 1, - "scope": 1, - "restricted_metric_type": [1], - } - # The version of a generated dependency at test runtime may differ from the version used during generation. - # Delete any fields which are not present in the current runtime dependency - # See https://github.com/googleapis/gapic-generator-python/issues/1748 - - # Determine if the message type is proto-plus or protobuf - test_field = analytics_admin.UpdateCustomMetricRequest.meta.fields["custom_metric"] - - def get_message_fields(field): - # Given a field which is a message (composite type), return a list with - # all the fields of the message. - # If the field is not a composite type, return an empty list. - message_fields = [] - - if hasattr(field, "message") and field.message: - is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") - - if is_field_type_proto_plus_type: - message_fields = field.message.meta.fields.values() - # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER - message_fields = field.message.DESCRIPTOR.fields - return message_fields - - runtime_nested_fields = [ - (field.name, nested_field.name) - for field in get_message_fields(test_field) - for nested_field in get_message_fields(field) - ] - - subfields_not_in_runtime = [] - - # For each item in the sample request, create a list of sub fields which are not present at runtime - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["custom_metric"].items(): # pragma: NO COVER - result = None - is_repeated = False - # For repeated fields - if isinstance(value, list) and len(value): - is_repeated = True - result = value[0] - # For fields where the type is another message - if isinstance(value, dict): - result = value - - if result and hasattr(result, "keys"): - for subfield in result.keys(): - if (field, subfield) not in runtime_nested_fields: - subfields_not_in_runtime.append( - { - "field": field, - "subfield": subfield, - "is_repeated": is_repeated, - } - ) - - # Remove fields from the sample request which are not present in the runtime version of the dependency - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER - field = subfield_to_delete.get("field") - field_repeated = subfield_to_delete.get("is_repeated") - subfield = subfield_to_delete.get("subfield") - if subfield: - if field_repeated: - for i in range(0, len(request_init["custom_metric"][field])): - del request_init["custom_metric"][field][i][subfield] - else: - del request_init["custom_metric"][field][subfield] + [ + analytics_admin.DeleteDataStreamRequest, + dict, + ], +) +def test_delete_data_stream_rest(request_type): + client = AnalyticsAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"name": "properties/sample1/dataStreams/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = resources.CustomMetric( - name="name_value", - parameter_name="parameter_name_value", - display_name="display_name_value", - description="description_value", - measurement_unit=resources.CustomMetric.MeasurementUnit.STANDARD, - scope=resources.CustomMetric.MetricScope.EVENT, - restricted_metric_type=[ - resources.CustomMetric.RestrictedMetricType.COST_DATA - ], - ) + return_value = None # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - # Convert return value to protobuf type - return_value = resources.CustomMetric.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) + json_return_value = "" response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.update_custom_metric(request) + response = client.delete_data_stream(request) # Establish that the response is the type that we expect. - assert isinstance(response, resources.CustomMetric) - assert response.name == "name_value" - assert response.parameter_name == "parameter_name_value" - assert response.display_name == "display_name_value" - assert response.description == "description_value" - assert response.measurement_unit == resources.CustomMetric.MeasurementUnit.STANDARD - assert response.scope == resources.CustomMetric.MetricScope.EVENT - assert response.restricted_metric_type == [ - resources.CustomMetric.RestrictedMetricType.COST_DATA - ] + assert response is None -def test_update_custom_metric_rest_use_cached_wrapped_rpc(): +def test_delete_data_stream_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -77363,7 +87527,7 @@ def test_update_custom_metric_rest_use_cached_wrapped_rpc(): # Ensure method has been cached assert ( - client._transport.update_custom_metric in client._transport._wrapped_methods + client._transport.delete_data_stream in client._transport._wrapped_methods ) # Replace cached wrapped function with mock @@ -77372,28 +87536,29 @@ def test_update_custom_metric_rest_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.update_custom_metric + client._transport.delete_data_stream ] = mock_rpc request = {} - client.update_custom_metric(request) + client.delete_data_stream(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.update_custom_metric(request) + client.delete_data_stream(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_update_custom_metric_rest_required_fields( - request_type=analytics_admin.UpdateCustomMetricRequest, +def test_delete_data_stream_rest_required_fields( + request_type=analytics_admin.DeleteDataStreamRequest, ): transport_class = transports.AnalyticsAdminServiceRestTransport request_init = {} + request_init["name"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -77404,19 +87569,21 @@ def test_update_custom_metric_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).update_custom_metric._get_unset_required_fields(jsonified_request) + ).delete_data_stream._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present + jsonified_request["name"] = "name_value" + unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).update_custom_metric._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("update_mask",)) + ).delete_data_stream._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -77425,7 +87592,7 @@ def test_update_custom_metric_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = resources.CustomMetric() + return_value = None # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -77437,40 +87604,36 @@ def test_update_custom_metric_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "patch", + "method": "delete", "query_params": pb_request, } - transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = resources.CustomMetric.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) + json_return_value = "" response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.update_custom_metric(request) + response = client.delete_data_stream(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_update_custom_metric_rest_unset_required_fields(): +def test_delete_data_stream_rest_unset_required_fields(): transport = transports.AnalyticsAdminServiceRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.update_custom_metric._get_unset_required_fields({}) - assert set(unset_fields) == (set(("updateMask",)) & set(("updateMask",))) + unset_fields = transport.delete_data_stream._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_update_custom_metric_rest_interceptors(null_interceptor): +def test_delete_data_stream_rest_interceptors(null_interceptor): transport = transports.AnalyticsAdminServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -77483,14 +87646,11 @@ def test_update_custom_metric_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.AnalyticsAdminServiceRestInterceptor, "post_update_custom_metric" - ) as post, mock.patch.object( - transports.AnalyticsAdminServiceRestInterceptor, "pre_update_custom_metric" + transports.AnalyticsAdminServiceRestInterceptor, "pre_delete_data_stream" ) as pre: pre.assert_not_called() - post.assert_not_called() - pb_message = analytics_admin.UpdateCustomMetricRequest.pb( - analytics_admin.UpdateCustomMetricRequest() + pb_message = analytics_admin.DeleteDataStreamRequest.pb( + analytics_admin.DeleteDataStreamRequest() ) transcode.return_value = { "method": "post", @@ -77502,19 +87662,15 @@ def test_update_custom_metric_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = resources.CustomMetric.to_json( - resources.CustomMetric() - ) - request = analytics_admin.UpdateCustomMetricRequest() + request = analytics_admin.DeleteDataStreamRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = resources.CustomMetric() - client.update_custom_metric( + client.delete_data_stream( request, metadata=[ ("key", "val"), @@ -77523,11 +87679,10 @@ def test_update_custom_metric_rest_interceptors(null_interceptor): ) pre.assert_called_once() - post.assert_called_once() -def test_update_custom_metric_rest_bad_request( - transport: str = "rest", request_type=analytics_admin.UpdateCustomMetricRequest +def test_delete_data_stream_rest_bad_request( + transport: str = "rest", request_type=analytics_admin.DeleteDataStreamRequest ): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -77535,9 +87690,7 @@ def test_update_custom_metric_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = { - "custom_metric": {"name": "properties/sample1/customMetrics/sample2"} - } + request_init = {"name": "properties/sample1/dataStreams/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -77549,10 +87702,10 @@ def test_update_custom_metric_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.update_custom_metric(request) + client.delete_data_stream(request) -def test_update_custom_metric_rest_flattened(): +def test_delete_data_stream_rest_flattened(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -77561,43 +87714,37 @@ def test_update_custom_metric_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = resources.CustomMetric() + return_value = None # get arguments that satisfy an http rule for this method - sample_request = { - "custom_metric": {"name": "properties/sample1/customMetrics/sample2"} - } + sample_request = {"name": "properties/sample1/dataStreams/sample2"} # get truthy value for each flattened field mock_args = dict( - custom_metric=resources.CustomMetric(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + name="name_value", ) mock_args.update(sample_request) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - # Convert return value to protobuf type - return_value = resources.CustomMetric.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) + json_return_value = "" response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.update_custom_metric(**mock_args) + client.delete_data_stream(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1alpha/{custom_metric.name=properties/*/customMetrics/*}" - % client.transport._host, + "%s/v1alpha/{name=properties/*/dataStreams/*}" % client.transport._host, args[1], ) -def test_update_custom_metric_rest_flattened_error(transport: str = "rest"): +def test_delete_data_stream_rest_flattened_error(transport: str = "rest"): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -77606,14 +87753,13 @@ def test_update_custom_metric_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.update_custom_metric( - analytics_admin.UpdateCustomMetricRequest(), - custom_metric=resources.CustomMetric(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + client.delete_data_stream( + analytics_admin.DeleteDataStreamRequest(), + name="name_value", ) -def test_update_custom_metric_rest_error(): +def test_delete_data_stream_rest_error(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -77622,44 +87768,135 @@ def test_update_custom_metric_rest_error(): @pytest.mark.parametrize( "request_type", [ - analytics_admin.ListCustomMetricsRequest, + analytics_admin.UpdateDataStreamRequest, dict, ], ) -def test_list_custom_metrics_rest(request_type): +def test_update_data_stream_rest(request_type): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = {"parent": "properties/sample1"} + request_init = {"data_stream": {"name": "properties/sample1/dataStreams/sample2"}} + request_init["data_stream"] = { + "web_stream_data": { + "measurement_id": "measurement_id_value", + "firebase_app_id": "firebase_app_id_value", + "default_uri": "default_uri_value", + }, + "android_app_stream_data": { + "firebase_app_id": "firebase_app_id_value", + "package_name": "package_name_value", + }, + "ios_app_stream_data": { + "firebase_app_id": "firebase_app_id_value", + "bundle_id": "bundle_id_value", + }, + "name": "properties/sample1/dataStreams/sample2", + "type_": 1, + "display_name": "display_name_value", + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = analytics_admin.UpdateDataStreamRequest.meta.fields["data_stream"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["data_stream"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["data_stream"][field])): + del request_init["data_stream"][field][i][subfield] + else: + del request_init["data_stream"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = analytics_admin.ListCustomMetricsResponse( - next_page_token="next_page_token_value", + return_value = resources.DataStream( + name="name_value", + type_=resources.DataStream.DataStreamType.WEB_DATA_STREAM, + display_name="display_name_value", ) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = analytics_admin.ListCustomMetricsResponse.pb(return_value) + return_value = resources.DataStream.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.list_custom_metrics(request) + response = client.update_data_stream(request) # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListCustomMetricsPager) - assert response.next_page_token == "next_page_token_value" + assert isinstance(response, resources.DataStream) + assert response.name == "name_value" + assert response.type_ == resources.DataStream.DataStreamType.WEB_DATA_STREAM + assert response.display_name == "display_name_value" -def test_list_custom_metrics_rest_use_cached_wrapped_rpc(): +def test_update_data_stream_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -77674,7 +87911,7 @@ def test_list_custom_metrics_rest_use_cached_wrapped_rpc(): # Ensure method has been cached assert ( - client._transport.list_custom_metrics in client._transport._wrapped_methods + client._transport.update_data_stream in client._transport._wrapped_methods ) # Replace cached wrapped function with mock @@ -77683,29 +87920,28 @@ def test_list_custom_metrics_rest_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.list_custom_metrics + client._transport.update_data_stream ] = mock_rpc request = {} - client.list_custom_metrics(request) + client.update_data_stream(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.list_custom_metrics(request) + client.update_data_stream(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_list_custom_metrics_rest_required_fields( - request_type=analytics_admin.ListCustomMetricsRequest, +def test_update_data_stream_rest_required_fields( + request_type=analytics_admin.UpdateDataStreamRequest, ): transport_class = transports.AnalyticsAdminServiceRestTransport request_init = {} - request_init["parent"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -77716,28 +87952,19 @@ def test_list_custom_metrics_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).list_custom_metrics._get_unset_required_fields(jsonified_request) + ).update_data_stream._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["parent"] = "parent_value" - unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).list_custom_metrics._get_unset_required_fields(jsonified_request) + ).update_data_stream._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set( - ( - "page_size", - "page_token", - ) - ) + assert not set(unset_fields) - set(("update_mask",)) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == "parent_value" client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -77746,7 +87973,7 @@ def test_list_custom_metrics_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = analytics_admin.ListCustomMetricsResponse() + return_value = resources.DataStream() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -77758,47 +87985,40 @@ def test_list_custom_metrics_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "get", + "method": "patch", "query_params": pb_request, } + transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = analytics_admin.ListCustomMetricsResponse.pb(return_value) + return_value = resources.DataStream.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.list_custom_metrics(request) + response = client.update_data_stream(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_list_custom_metrics_rest_unset_required_fields(): +def test_update_data_stream_rest_unset_required_fields(): transport = transports.AnalyticsAdminServiceRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.list_custom_metrics._get_unset_required_fields({}) - assert set(unset_fields) == ( - set( - ( - "pageSize", - "pageToken", - ) - ) - & set(("parent",)) - ) + unset_fields = transport.update_data_stream._get_unset_required_fields({}) + assert set(unset_fields) == (set(("updateMask",)) & set(("updateMask",))) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_custom_metrics_rest_interceptors(null_interceptor): +def test_update_data_stream_rest_interceptors(null_interceptor): transport = transports.AnalyticsAdminServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -77811,14 +88031,14 @@ def test_list_custom_metrics_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.AnalyticsAdminServiceRestInterceptor, "post_list_custom_metrics" + transports.AnalyticsAdminServiceRestInterceptor, "post_update_data_stream" ) as post, mock.patch.object( - transports.AnalyticsAdminServiceRestInterceptor, "pre_list_custom_metrics" + transports.AnalyticsAdminServiceRestInterceptor, "pre_update_data_stream" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = analytics_admin.ListCustomMetricsRequest.pb( - analytics_admin.ListCustomMetricsRequest() + pb_message = analytics_admin.UpdateDataStreamRequest.pb( + analytics_admin.UpdateDataStreamRequest() ) transcode.return_value = { "method": "post", @@ -77830,19 +88050,17 @@ def test_list_custom_metrics_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = analytics_admin.ListCustomMetricsResponse.to_json( - analytics_admin.ListCustomMetricsResponse() - ) + req.return_value._content = resources.DataStream.to_json(resources.DataStream()) - request = analytics_admin.ListCustomMetricsRequest() + request = analytics_admin.UpdateDataStreamRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = analytics_admin.ListCustomMetricsResponse() + post.return_value = resources.DataStream() - client.list_custom_metrics( + client.update_data_stream( request, metadata=[ ("key", "val"), @@ -77854,8 +88072,8 @@ def test_list_custom_metrics_rest_interceptors(null_interceptor): post.assert_called_once() -def test_list_custom_metrics_rest_bad_request( - transport: str = "rest", request_type=analytics_admin.ListCustomMetricsRequest +def test_update_data_stream_rest_bad_request( + transport: str = "rest", request_type=analytics_admin.UpdateDataStreamRequest ): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -77863,7 +88081,7 @@ def test_list_custom_metrics_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = {"parent": "properties/sample1"} + request_init = {"data_stream": {"name": "properties/sample1/dataStreams/sample2"}} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -77875,10 +88093,10 @@ def test_list_custom_metrics_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.list_custom_metrics(request) + client.update_data_stream(request) -def test_list_custom_metrics_rest_flattened(): +def test_update_data_stream_rest_flattened(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -77887,14 +88105,21 @@ def test_list_custom_metrics_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = analytics_admin.ListCustomMetricsResponse() + return_value = resources.DataStream() # get arguments that satisfy an http rule for this method - sample_request = {"parent": "properties/sample1"} + sample_request = { + "data_stream": {"name": "properties/sample1/dataStreams/sample2"} + } # get truthy value for each flattened field mock_args = dict( - parent="parent_value", + data_stream=resources.DataStream( + web_stream_data=resources.DataStream.WebStreamData( + measurement_id="measurement_id_value" + ) + ), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) mock_args.update(sample_request) @@ -77902,24 +88127,25 @@ def test_list_custom_metrics_rest_flattened(): response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = analytics_admin.ListCustomMetricsResponse.pb(return_value) + return_value = resources.DataStream.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.list_custom_metrics(**mock_args) + client.update_data_stream(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1alpha/{parent=properties/*}/customMetrics" % client.transport._host, + "%s/v1alpha/{data_stream.name=properties/*/dataStreams/*}" + % client.transport._host, args[1], ) -def test_list_custom_metrics_rest_flattened_error(transport: str = "rest"): +def test_update_data_stream_rest_flattened_error(transport: str = "rest"): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -77928,111 +88154,64 @@ def test_list_custom_metrics_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.list_custom_metrics( - analytics_admin.ListCustomMetricsRequest(), - parent="parent_value", + client.update_data_stream( + analytics_admin.UpdateDataStreamRequest(), + data_stream=resources.DataStream( + web_stream_data=resources.DataStream.WebStreamData( + measurement_id="measurement_id_value" + ) + ), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) -def test_list_custom_metrics_rest_pager(transport: str = "rest"): +def test_update_data_stream_rest_error(): client = AnalyticsAdminServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # TODO(kbandes): remove this mock unless there's a good reason for it. - # with mock.patch.object(path_template, 'transcode') as transcode: - # Set the response as a series of pages - response = ( - analytics_admin.ListCustomMetricsResponse( - custom_metrics=[ - resources.CustomMetric(), - resources.CustomMetric(), - resources.CustomMetric(), - ], - next_page_token="abc", - ), - analytics_admin.ListCustomMetricsResponse( - custom_metrics=[], - next_page_token="def", - ), - analytics_admin.ListCustomMetricsResponse( - custom_metrics=[ - resources.CustomMetric(), - ], - next_page_token="ghi", - ), - analytics_admin.ListCustomMetricsResponse( - custom_metrics=[ - resources.CustomMetric(), - resources.CustomMetric(), - ], - ), - ) - # Two responses for two calls - response = response + response - - # Wrap the values into proper Response objs - response = tuple( - analytics_admin.ListCustomMetricsResponse.to_json(x) for x in response - ) - return_values = tuple(Response() for i in response) - for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode("UTF-8") - return_val.status_code = 200 - req.side_effect = return_values - - sample_request = {"parent": "properties/sample1"} - - pager = client.list_custom_metrics(request=sample_request) - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, resources.CustomMetric) for i in results) - - pages = list(client.list_custom_metrics(request=sample_request).pages) - for page_, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page_.raw_page.next_page_token == token - @pytest.mark.parametrize( "request_type", [ - analytics_admin.ArchiveCustomMetricRequest, + analytics_admin.ListDataStreamsRequest, dict, ], ) -def test_archive_custom_metric_rest(request_type): +def test_list_data_streams_rest(request_type): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = {"name": "properties/sample1/customMetrics/sample2"} + request_init = {"parent": "properties/sample1"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = None + return_value = analytics_admin.ListDataStreamsResponse( + next_page_token="next_page_token_value", + ) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - json_return_value = "" + # Convert return value to protobuf type + return_value = analytics_admin.ListDataStreamsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.archive_custom_metric(request) + response = client.list_data_streams(request) # Establish that the response is the type that we expect. - assert response is None + assert isinstance(response, pagers.ListDataStreamsPager) + assert response.next_page_token == "next_page_token_value" -def test_archive_custom_metric_rest_use_cached_wrapped_rpc(): +def test_list_data_streams_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -78046,10 +88225,7 @@ def test_archive_custom_metric_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert ( - client._transport.archive_custom_metric - in client._transport._wrapped_methods - ) + assert client._transport.list_data_streams in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() @@ -78057,29 +88233,29 @@ def test_archive_custom_metric_rest_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.archive_custom_metric + client._transport.list_data_streams ] = mock_rpc request = {} - client.archive_custom_metric(request) + client.list_data_streams(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.archive_custom_metric(request) + client.list_data_streams(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_archive_custom_metric_rest_required_fields( - request_type=analytics_admin.ArchiveCustomMetricRequest, +def test_list_data_streams_rest_required_fields( + request_type=analytics_admin.ListDataStreamsRequest, ): transport_class = transports.AnalyticsAdminServiceRestTransport request_init = {} - request_init["name"] = "" + request_init["parent"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -78090,21 +88266,28 @@ def test_archive_custom_metric_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).archive_custom_metric._get_unset_required_fields(jsonified_request) + ).list_data_streams._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["name"] = "name_value" + jsonified_request["parent"] = "parent_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).archive_custom_metric._get_unset_required_fields(jsonified_request) + ).list_data_streams._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "page_size", + "page_token", + ) + ) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == "name_value" + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -78113,7 +88296,7 @@ def test_archive_custom_metric_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = None + return_value = analytics_admin.ListDataStreamsResponse() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -78125,37 +88308,47 @@ def test_archive_custom_metric_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "post", + "method": "get", "query_params": pb_request, } - transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 - json_return_value = "" + + # Convert return value to protobuf type + return_value = analytics_admin.ListDataStreamsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.archive_custom_metric(request) + response = client.list_data_streams(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_archive_custom_metric_rest_unset_required_fields(): +def test_list_data_streams_rest_unset_required_fields(): transport = transports.AnalyticsAdminServiceRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.archive_custom_metric._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name",))) - + unset_fields = transport.list_data_streams._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "pageSize", + "pageToken", + ) + ) + & set(("parent",)) + ) + @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_archive_custom_metric_rest_interceptors(null_interceptor): +def test_list_data_streams_rest_interceptors(null_interceptor): transport = transports.AnalyticsAdminServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -78168,11 +88361,14 @@ def test_archive_custom_metric_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.AnalyticsAdminServiceRestInterceptor, "pre_archive_custom_metric" + transports.AnalyticsAdminServiceRestInterceptor, "post_list_data_streams" + ) as post, mock.patch.object( + transports.AnalyticsAdminServiceRestInterceptor, "pre_list_data_streams" ) as pre: pre.assert_not_called() - pb_message = analytics_admin.ArchiveCustomMetricRequest.pb( - analytics_admin.ArchiveCustomMetricRequest() + post.assert_not_called() + pb_message = analytics_admin.ListDataStreamsRequest.pb( + analytics_admin.ListDataStreamsRequest() ) transcode.return_value = { "method": "post", @@ -78184,15 +88380,19 @@ def test_archive_custom_metric_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() + req.return_value._content = analytics_admin.ListDataStreamsResponse.to_json( + analytics_admin.ListDataStreamsResponse() + ) - request = analytics_admin.ArchiveCustomMetricRequest() + request = analytics_admin.ListDataStreamsRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata + post.return_value = analytics_admin.ListDataStreamsResponse() - client.archive_custom_metric( + client.list_data_streams( request, metadata=[ ("key", "val"), @@ -78201,10 +88401,11 @@ def test_archive_custom_metric_rest_interceptors(null_interceptor): ) pre.assert_called_once() + post.assert_called_once() -def test_archive_custom_metric_rest_bad_request( - transport: str = "rest", request_type=analytics_admin.ArchiveCustomMetricRequest +def test_list_data_streams_rest_bad_request( + transport: str = "rest", request_type=analytics_admin.ListDataStreamsRequest ): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -78212,7 +88413,7 @@ def test_archive_custom_metric_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = {"name": "properties/sample1/customMetrics/sample2"} + request_init = {"parent": "properties/sample1"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -78224,10 +88425,10 @@ def test_archive_custom_metric_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.archive_custom_metric(request) + client.list_data_streams(request) -def test_archive_custom_metric_rest_flattened(): +def test_list_data_streams_rest_flattened(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -78236,38 +88437,39 @@ def test_archive_custom_metric_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = None + return_value = analytics_admin.ListDataStreamsResponse() # get arguments that satisfy an http rule for this method - sample_request = {"name": "properties/sample1/customMetrics/sample2"} + sample_request = {"parent": "properties/sample1"} # get truthy value for each flattened field mock_args = dict( - name="name_value", + parent="parent_value", ) mock_args.update(sample_request) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - json_return_value = "" + # Convert return value to protobuf type + return_value = analytics_admin.ListDataStreamsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.archive_custom_metric(**mock_args) + client.list_data_streams(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1alpha/{name=properties/*/customMetrics/*}:archive" - % client.transport._host, + "%s/v1alpha/{parent=properties/*}/dataStreams" % client.transport._host, args[1], ) -def test_archive_custom_metric_rest_flattened_error(transport: str = "rest"): +def test_list_data_streams_rest_flattened_error(transport: str = "rest"): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -78276,75 +88478,120 @@ def test_archive_custom_metric_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.archive_custom_metric( - analytics_admin.ArchiveCustomMetricRequest(), - name="name_value", + client.list_data_streams( + analytics_admin.ListDataStreamsRequest(), + parent="parent_value", ) -def test_archive_custom_metric_rest_error(): +def test_list_data_streams_rest_pager(transport: str = "rest"): client = AnalyticsAdminServiceClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + analytics_admin.ListDataStreamsResponse( + data_streams=[ + resources.DataStream(), + resources.DataStream(), + resources.DataStream(), + ], + next_page_token="abc", + ), + analytics_admin.ListDataStreamsResponse( + data_streams=[], + next_page_token="def", + ), + analytics_admin.ListDataStreamsResponse( + data_streams=[ + resources.DataStream(), + ], + next_page_token="ghi", + ), + analytics_admin.ListDataStreamsResponse( + data_streams=[ + resources.DataStream(), + resources.DataStream(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple( + analytics_admin.ListDataStreamsResponse.to_json(x) for x in response + ) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {"parent": "properties/sample1"} + + pager = client.list_data_streams(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, resources.DataStream) for i in results) + + pages = list(client.list_data_streams(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + @pytest.mark.parametrize( "request_type", [ - analytics_admin.GetCustomMetricRequest, + analytics_admin.GetDataStreamRequest, dict, ], ) -def test_get_custom_metric_rest(request_type): +def test_get_data_stream_rest(request_type): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = {"name": "properties/sample1/customMetrics/sample2"} + request_init = {"name": "properties/sample1/dataStreams/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = resources.CustomMetric( + return_value = resources.DataStream( name="name_value", - parameter_name="parameter_name_value", + type_=resources.DataStream.DataStreamType.WEB_DATA_STREAM, display_name="display_name_value", - description="description_value", - measurement_unit=resources.CustomMetric.MeasurementUnit.STANDARD, - scope=resources.CustomMetric.MetricScope.EVENT, - restricted_metric_type=[ - resources.CustomMetric.RestrictedMetricType.COST_DATA - ], ) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = resources.CustomMetric.pb(return_value) + return_value = resources.DataStream.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.get_custom_metric(request) + response = client.get_data_stream(request) # Establish that the response is the type that we expect. - assert isinstance(response, resources.CustomMetric) + assert isinstance(response, resources.DataStream) assert response.name == "name_value" - assert response.parameter_name == "parameter_name_value" + assert response.type_ == resources.DataStream.DataStreamType.WEB_DATA_STREAM assert response.display_name == "display_name_value" - assert response.description == "description_value" - assert response.measurement_unit == resources.CustomMetric.MeasurementUnit.STANDARD - assert response.scope == resources.CustomMetric.MetricScope.EVENT - assert response.restricted_metric_type == [ - resources.CustomMetric.RestrictedMetricType.COST_DATA - ] -def test_get_custom_metric_rest_use_cached_wrapped_rpc(): +def test_get_data_stream_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -78358,32 +88605,30 @@ def test_get_custom_metric_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.get_custom_metric in client._transport._wrapped_methods + assert client._transport.get_data_stream in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.get_custom_metric - ] = mock_rpc + client._transport._wrapped_methods[client._transport.get_data_stream] = mock_rpc request = {} - client.get_custom_metric(request) + client.get_data_stream(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.get_custom_metric(request) + client.get_data_stream(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_get_custom_metric_rest_required_fields( - request_type=analytics_admin.GetCustomMetricRequest, +def test_get_data_stream_rest_required_fields( + request_type=analytics_admin.GetDataStreamRequest, ): transport_class = transports.AnalyticsAdminServiceRestTransport @@ -78399,7 +88644,7 @@ def test_get_custom_metric_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).get_custom_metric._get_unset_required_fields(jsonified_request) + ).get_data_stream._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present @@ -78408,7 +88653,7 @@ def test_get_custom_metric_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).get_custom_metric._get_unset_required_fields(jsonified_request) + ).get_data_stream._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone @@ -78422,7 +88667,7 @@ def test_get_custom_metric_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = resources.CustomMetric() + return_value = resources.DataStream() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -78443,30 +88688,30 @@ def test_get_custom_metric_rest_required_fields( response_value.status_code = 200 # Convert return value to protobuf type - return_value = resources.CustomMetric.pb(return_value) + return_value = resources.DataStream.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.get_custom_metric(request) + response = client.get_data_stream(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_get_custom_metric_rest_unset_required_fields(): +def test_get_data_stream_rest_unset_required_fields(): transport = transports.AnalyticsAdminServiceRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.get_custom_metric._get_unset_required_fields({}) + unset_fields = transport.get_data_stream._get_unset_required_fields({}) assert set(unset_fields) == (set(()) & set(("name",))) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_custom_metric_rest_interceptors(null_interceptor): +def test_get_data_stream_rest_interceptors(null_interceptor): transport = transports.AnalyticsAdminServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -78479,14 +88724,14 @@ def test_get_custom_metric_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.AnalyticsAdminServiceRestInterceptor, "post_get_custom_metric" + transports.AnalyticsAdminServiceRestInterceptor, "post_get_data_stream" ) as post, mock.patch.object( - transports.AnalyticsAdminServiceRestInterceptor, "pre_get_custom_metric" + transports.AnalyticsAdminServiceRestInterceptor, "pre_get_data_stream" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = analytics_admin.GetCustomMetricRequest.pb( - analytics_admin.GetCustomMetricRequest() + pb_message = analytics_admin.GetDataStreamRequest.pb( + analytics_admin.GetDataStreamRequest() ) transcode.return_value = { "method": "post", @@ -78498,19 +88743,17 @@ def test_get_custom_metric_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = resources.CustomMetric.to_json( - resources.CustomMetric() - ) + req.return_value._content = resources.DataStream.to_json(resources.DataStream()) - request = analytics_admin.GetCustomMetricRequest() + request = analytics_admin.GetDataStreamRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = resources.CustomMetric() + post.return_value = resources.DataStream() - client.get_custom_metric( + client.get_data_stream( request, metadata=[ ("key", "val"), @@ -78522,8 +88765,8 @@ def test_get_custom_metric_rest_interceptors(null_interceptor): post.assert_called_once() -def test_get_custom_metric_rest_bad_request( - transport: str = "rest", request_type=analytics_admin.GetCustomMetricRequest +def test_get_data_stream_rest_bad_request( + transport: str = "rest", request_type=analytics_admin.GetDataStreamRequest ): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -78531,7 +88774,7 @@ def test_get_custom_metric_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = {"name": "properties/sample1/customMetrics/sample2"} + request_init = {"name": "properties/sample1/dataStreams/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -78543,10 +88786,10 @@ def test_get_custom_metric_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.get_custom_metric(request) + client.get_data_stream(request) -def test_get_custom_metric_rest_flattened(): +def test_get_data_stream_rest_flattened(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -78555,10 +88798,10 @@ def test_get_custom_metric_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = resources.CustomMetric() + return_value = resources.DataStream() # get arguments that satisfy an http rule for this method - sample_request = {"name": "properties/sample1/customMetrics/sample2"} + sample_request = {"name": "properties/sample1/dataStreams/sample2"} # get truthy value for each flattened field mock_args = dict( @@ -78570,24 +88813,24 @@ def test_get_custom_metric_rest_flattened(): response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = resources.CustomMetric.pb(return_value) + return_value = resources.DataStream.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.get_custom_metric(**mock_args) + client.get_data_stream(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1alpha/{name=properties/*/customMetrics/*}" % client.transport._host, + "%s/v1alpha/{name=properties/*/dataStreams/*}" % client.transport._host, args[1], ) -def test_get_custom_metric_rest_flattened_error(transport: str = "rest"): +def test_get_data_stream_rest_flattened_error(transport: str = "rest"): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -78596,13 +88839,13 @@ def test_get_custom_metric_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.get_custom_metric( - analytics_admin.GetCustomMetricRequest(), + client.get_data_stream( + analytics_admin.GetDataStreamRequest(), name="name_value", ) -def test_get_custom_metric_rest_error(): +def test_get_data_stream_rest_error(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -78611,51 +88854,57 @@ def test_get_custom_metric_rest_error(): @pytest.mark.parametrize( "request_type", [ - analytics_admin.GetDataRetentionSettingsRequest, + analytics_admin.GetAudienceRequest, dict, ], ) -def test_get_data_retention_settings_rest(request_type): +def test_get_audience_rest(request_type): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = {"name": "properties/sample1/dataRetentionSettings"} + request_init = {"name": "properties/sample1/audiences/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = resources.DataRetentionSettings( + return_value = audience.Audience( name="name_value", - event_data_retention=resources.DataRetentionSettings.RetentionDuration.TWO_MONTHS, - reset_user_data_on_new_activity=True, + display_name="display_name_value", + description="description_value", + membership_duration_days=2561, + ads_personalization_enabled=True, + exclusion_duration_mode=audience.Audience.AudienceExclusionDurationMode.EXCLUDE_TEMPORARILY, ) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = resources.DataRetentionSettings.pb(return_value) + return_value = audience.Audience.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.get_data_retention_settings(request) + response = client.get_audience(request) # Establish that the response is the type that we expect. - assert isinstance(response, resources.DataRetentionSettings) + assert isinstance(response, audience.Audience) assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.description == "description_value" + assert response.membership_duration_days == 2561 + assert response.ads_personalization_enabled is True assert ( - response.event_data_retention - == resources.DataRetentionSettings.RetentionDuration.TWO_MONTHS + response.exclusion_duration_mode + == audience.Audience.AudienceExclusionDurationMode.EXCLUDE_TEMPORARILY ) - assert response.reset_user_data_on_new_activity is True -def test_get_data_retention_settings_rest_use_cached_wrapped_rpc(): +def test_get_audience_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -78669,35 +88918,30 @@ def test_get_data_retention_settings_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert ( - client._transport.get_data_retention_settings - in client._transport._wrapped_methods - ) + assert client._transport.get_audience in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.get_data_retention_settings - ] = mock_rpc + client._transport._wrapped_methods[client._transport.get_audience] = mock_rpc request = {} - client.get_data_retention_settings(request) + client.get_audience(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.get_data_retention_settings(request) + client.get_audience(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_get_data_retention_settings_rest_required_fields( - request_type=analytics_admin.GetDataRetentionSettingsRequest, +def test_get_audience_rest_required_fields( + request_type=analytics_admin.GetAudienceRequest, ): transport_class = transports.AnalyticsAdminServiceRestTransport @@ -78713,7 +88957,7 @@ def test_get_data_retention_settings_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).get_data_retention_settings._get_unset_required_fields(jsonified_request) + ).get_audience._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present @@ -78722,7 +88966,7 @@ def test_get_data_retention_settings_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).get_data_retention_settings._get_unset_required_fields(jsonified_request) + ).get_audience._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone @@ -78736,7 +88980,7 @@ def test_get_data_retention_settings_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = resources.DataRetentionSettings() + return_value = audience.Audience() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -78757,30 +89001,30 @@ def test_get_data_retention_settings_rest_required_fields( response_value.status_code = 200 # Convert return value to protobuf type - return_value = resources.DataRetentionSettings.pb(return_value) + return_value = audience.Audience.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.get_data_retention_settings(request) + response = client.get_audience(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_get_data_retention_settings_rest_unset_required_fields(): +def test_get_audience_rest_unset_required_fields(): transport = transports.AnalyticsAdminServiceRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.get_data_retention_settings._get_unset_required_fields({}) + unset_fields = transport.get_audience._get_unset_required_fields({}) assert set(unset_fields) == (set(()) & set(("name",))) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_data_retention_settings_rest_interceptors(null_interceptor): +def test_get_audience_rest_interceptors(null_interceptor): transport = transports.AnalyticsAdminServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -78793,16 +89037,14 @@ def test_get_data_retention_settings_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.AnalyticsAdminServiceRestInterceptor, - "post_get_data_retention_settings", + transports.AnalyticsAdminServiceRestInterceptor, "post_get_audience" ) as post, mock.patch.object( - transports.AnalyticsAdminServiceRestInterceptor, - "pre_get_data_retention_settings", + transports.AnalyticsAdminServiceRestInterceptor, "pre_get_audience" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = analytics_admin.GetDataRetentionSettingsRequest.pb( - analytics_admin.GetDataRetentionSettingsRequest() + pb_message = analytics_admin.GetAudienceRequest.pb( + analytics_admin.GetAudienceRequest() ) transcode.return_value = { "method": "post", @@ -78814,19 +89056,17 @@ def test_get_data_retention_settings_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = resources.DataRetentionSettings.to_json( - resources.DataRetentionSettings() - ) + req.return_value._content = audience.Audience.to_json(audience.Audience()) - request = analytics_admin.GetDataRetentionSettingsRequest() + request = analytics_admin.GetAudienceRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = resources.DataRetentionSettings() + post.return_value = audience.Audience() - client.get_data_retention_settings( + client.get_audience( request, metadata=[ ("key", "val"), @@ -78838,9 +89078,8 @@ def test_get_data_retention_settings_rest_interceptors(null_interceptor): post.assert_called_once() -def test_get_data_retention_settings_rest_bad_request( - transport: str = "rest", - request_type=analytics_admin.GetDataRetentionSettingsRequest, +def test_get_audience_rest_bad_request( + transport: str = "rest", request_type=analytics_admin.GetAudienceRequest ): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -78848,7 +89087,7 @@ def test_get_data_retention_settings_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = {"name": "properties/sample1/dataRetentionSettings"} + request_init = {"name": "properties/sample1/audiences/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -78860,10 +89099,10 @@ def test_get_data_retention_settings_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.get_data_retention_settings(request) + client.get_audience(request) -def test_get_data_retention_settings_rest_flattened(): +def test_get_audience_rest_flattened(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -78872,10 +89111,10 @@ def test_get_data_retention_settings_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = resources.DataRetentionSettings() + return_value = audience.Audience() # get arguments that satisfy an http rule for this method - sample_request = {"name": "properties/sample1/dataRetentionSettings"} + sample_request = {"name": "properties/sample1/audiences/sample2"} # get truthy value for each flattened field mock_args = dict( @@ -78887,25 +89126,24 @@ def test_get_data_retention_settings_rest_flattened(): response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = resources.DataRetentionSettings.pb(return_value) + return_value = audience.Audience.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.get_data_retention_settings(**mock_args) + client.get_audience(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1alpha/{name=properties/*/dataRetentionSettings}" - % client.transport._host, + "%s/v1alpha/{name=properties/*/audiences/*}" % client.transport._host, args[1], ) -def test_get_data_retention_settings_rest_flattened_error(transport: str = "rest"): +def test_get_audience_rest_flattened_error(transport: str = "rest"): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -78914,13 +89152,13 @@ def test_get_data_retention_settings_rest_flattened_error(transport: str = "rest # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.get_data_retention_settings( - analytics_admin.GetDataRetentionSettingsRequest(), + client.get_audience( + analytics_admin.GetAudienceRequest(), name="name_value", ) -def test_get_data_retention_settings_rest_error(): +def test_get_audience_rest_error(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -78929,129 +89167,44 @@ def test_get_data_retention_settings_rest_error(): @pytest.mark.parametrize( "request_type", [ - analytics_admin.UpdateDataRetentionSettingsRequest, + analytics_admin.ListAudiencesRequest, dict, ], ) -def test_update_data_retention_settings_rest(request_type): +def test_list_audiences_rest(request_type): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = { - "data_retention_settings": {"name": "properties/sample1/dataRetentionSettings"} - } - request_init["data_retention_settings"] = { - "name": "properties/sample1/dataRetentionSettings", - "event_data_retention": 1, - "reset_user_data_on_new_activity": True, - } - # The version of a generated dependency at test runtime may differ from the version used during generation. - # Delete any fields which are not present in the current runtime dependency - # See https://github.com/googleapis/gapic-generator-python/issues/1748 - - # Determine if the message type is proto-plus or protobuf - test_field = analytics_admin.UpdateDataRetentionSettingsRequest.meta.fields[ - "data_retention_settings" - ] - - def get_message_fields(field): - # Given a field which is a message (composite type), return a list with - # all the fields of the message. - # If the field is not a composite type, return an empty list. - message_fields = [] - - if hasattr(field, "message") and field.message: - is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") - - if is_field_type_proto_plus_type: - message_fields = field.message.meta.fields.values() - # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER - message_fields = field.message.DESCRIPTOR.fields - return message_fields - - runtime_nested_fields = [ - (field.name, nested_field.name) - for field in get_message_fields(test_field) - for nested_field in get_message_fields(field) - ] - - subfields_not_in_runtime = [] - - # For each item in the sample request, create a list of sub fields which are not present at runtime - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init[ - "data_retention_settings" - ].items(): # pragma: NO COVER - result = None - is_repeated = False - # For repeated fields - if isinstance(value, list) and len(value): - is_repeated = True - result = value[0] - # For fields where the type is another message - if isinstance(value, dict): - result = value - - if result and hasattr(result, "keys"): - for subfield in result.keys(): - if (field, subfield) not in runtime_nested_fields: - subfields_not_in_runtime.append( - { - "field": field, - "subfield": subfield, - "is_repeated": is_repeated, - } - ) - - # Remove fields from the sample request which are not present in the runtime version of the dependency - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER - field = subfield_to_delete.get("field") - field_repeated = subfield_to_delete.get("is_repeated") - subfield = subfield_to_delete.get("subfield") - if subfield: - if field_repeated: - for i in range(0, len(request_init["data_retention_settings"][field])): - del request_init["data_retention_settings"][field][i][subfield] - else: - del request_init["data_retention_settings"][field][subfield] + request_init = {"parent": "properties/sample1"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = resources.DataRetentionSettings( - name="name_value", - event_data_retention=resources.DataRetentionSettings.RetentionDuration.TWO_MONTHS, - reset_user_data_on_new_activity=True, + return_value = analytics_admin.ListAudiencesResponse( + next_page_token="next_page_token_value", ) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = resources.DataRetentionSettings.pb(return_value) + return_value = analytics_admin.ListAudiencesResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.update_data_retention_settings(request) + response = client.list_audiences(request) # Establish that the response is the type that we expect. - assert isinstance(response, resources.DataRetentionSettings) - assert response.name == "name_value" - assert ( - response.event_data_retention - == resources.DataRetentionSettings.RetentionDuration.TWO_MONTHS - ) - assert response.reset_user_data_on_new_activity is True + assert isinstance(response, pagers.ListAudiencesPager) + assert response.next_page_token == "next_page_token_value" -def test_update_data_retention_settings_rest_use_cached_wrapped_rpc(): +def test_list_audiences_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -79065,39 +89218,35 @@ def test_update_data_retention_settings_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert ( - client._transport.update_data_retention_settings - in client._transport._wrapped_methods - ) + assert client._transport.list_audiences in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.update_data_retention_settings - ] = mock_rpc + client._transport._wrapped_methods[client._transport.list_audiences] = mock_rpc request = {} - client.update_data_retention_settings(request) + client.list_audiences(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.update_data_retention_settings(request) + client.list_audiences(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_update_data_retention_settings_rest_required_fields( - request_type=analytics_admin.UpdateDataRetentionSettingsRequest, +def test_list_audiences_rest_required_fields( + request_type=analytics_admin.ListAudiencesRequest, ): transport_class = transports.AnalyticsAdminServiceRestTransport request_init = {} + request_init["parent"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -79108,19 +89257,28 @@ def test_update_data_retention_settings_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).update_data_retention_settings._get_unset_required_fields(jsonified_request) + ).list_audiences._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present + jsonified_request["parent"] = "parent_value" + unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).update_data_retention_settings._get_unset_required_fields(jsonified_request) + ).list_audiences._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("update_mask",)) + assert not set(unset_fields) - set( + ( + "page_size", + "page_token", + ) + ) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -79129,7 +89287,7 @@ def test_update_data_retention_settings_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = resources.DataRetentionSettings() + return_value = analytics_admin.ListAudiencesResponse() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -79141,50 +89299,47 @@ def test_update_data_retention_settings_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "patch", + "method": "get", "query_params": pb_request, } - transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = resources.DataRetentionSettings.pb(return_value) + return_value = analytics_admin.ListAudiencesResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.update_data_retention_settings(request) + response = client.list_audiences(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_update_data_retention_settings_rest_unset_required_fields(): +def test_list_audiences_rest_unset_required_fields(): transport = transports.AnalyticsAdminServiceRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.update_data_retention_settings._get_unset_required_fields( - {} - ) + unset_fields = transport.list_audiences._get_unset_required_fields({}) assert set(unset_fields) == ( - set(("updateMask",)) - & set( + set( ( - "dataRetentionSettings", - "updateMask", + "pageSize", + "pageToken", ) ) + & set(("parent",)) ) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_update_data_retention_settings_rest_interceptors(null_interceptor): +def test_list_audiences_rest_interceptors(null_interceptor): transport = transports.AnalyticsAdminServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -79197,16 +89352,14 @@ def test_update_data_retention_settings_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.AnalyticsAdminServiceRestInterceptor, - "post_update_data_retention_settings", + transports.AnalyticsAdminServiceRestInterceptor, "post_list_audiences" ) as post, mock.patch.object( - transports.AnalyticsAdminServiceRestInterceptor, - "pre_update_data_retention_settings", + transports.AnalyticsAdminServiceRestInterceptor, "pre_list_audiences" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = analytics_admin.UpdateDataRetentionSettingsRequest.pb( - analytics_admin.UpdateDataRetentionSettingsRequest() + pb_message = analytics_admin.ListAudiencesRequest.pb( + analytics_admin.ListAudiencesRequest() ) transcode.return_value = { "method": "post", @@ -79218,19 +89371,19 @@ def test_update_data_retention_settings_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = resources.DataRetentionSettings.to_json( - resources.DataRetentionSettings() + req.return_value._content = analytics_admin.ListAudiencesResponse.to_json( + analytics_admin.ListAudiencesResponse() ) - request = analytics_admin.UpdateDataRetentionSettingsRequest() + request = analytics_admin.ListAudiencesRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = resources.DataRetentionSettings() + post.return_value = analytics_admin.ListAudiencesResponse() - client.update_data_retention_settings( + client.list_audiences( request, metadata=[ ("key", "val"), @@ -79242,9 +89395,8 @@ def test_update_data_retention_settings_rest_interceptors(null_interceptor): post.assert_called_once() -def test_update_data_retention_settings_rest_bad_request( - transport: str = "rest", - request_type=analytics_admin.UpdateDataRetentionSettingsRequest, +def test_list_audiences_rest_bad_request( + transport: str = "rest", request_type=analytics_admin.ListAudiencesRequest ): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -79252,9 +89404,7 @@ def test_update_data_retention_settings_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = { - "data_retention_settings": {"name": "properties/sample1/dataRetentionSettings"} - } + request_init = {"parent": "properties/sample1"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -79266,10 +89416,10 @@ def test_update_data_retention_settings_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.update_data_retention_settings(request) + client.list_audiences(request) -def test_update_data_retention_settings_rest_flattened(): +def test_list_audiences_rest_flattened(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -79278,19 +89428,14 @@ def test_update_data_retention_settings_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = resources.DataRetentionSettings() + return_value = analytics_admin.ListAudiencesResponse() # get arguments that satisfy an http rule for this method - sample_request = { - "data_retention_settings": { - "name": "properties/sample1/dataRetentionSettings" - } - } + sample_request = {"parent": "properties/sample1"} # get truthy value for each flattened field mock_args = dict( - data_retention_settings=resources.DataRetentionSettings(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + parent="parent_value", ) mock_args.update(sample_request) @@ -79298,25 +89443,24 @@ def test_update_data_retention_settings_rest_flattened(): response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = resources.DataRetentionSettings.pb(return_value) + return_value = analytics_admin.ListAudiencesResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.update_data_retention_settings(**mock_args) + client.list_audiences(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1alpha/{data_retention_settings.name=properties/*/dataRetentionSettings}" - % client.transport._host, + "%s/v1alpha/{parent=properties/*}/audiences" % client.transport._host, args[1], ) -def test_update_data_retention_settings_rest_flattened_error(transport: str = "rest"): +def test_list_audiences_rest_flattened_error(transport: str = "rest"): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -79325,27 +89469,83 @@ def test_update_data_retention_settings_rest_flattened_error(transport: str = "r # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.update_data_retention_settings( - analytics_admin.UpdateDataRetentionSettingsRequest(), - data_retention_settings=resources.DataRetentionSettings(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + client.list_audiences( + analytics_admin.ListAudiencesRequest(), + parent="parent_value", ) -def test_update_data_retention_settings_rest_error(): +def test_list_audiences_rest_pager(transport: str = "rest"): client = AnalyticsAdminServiceClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + analytics_admin.ListAudiencesResponse( + audiences=[ + audience.Audience(), + audience.Audience(), + audience.Audience(), + ], + next_page_token="abc", + ), + analytics_admin.ListAudiencesResponse( + audiences=[], + next_page_token="def", + ), + analytics_admin.ListAudiencesResponse( + audiences=[ + audience.Audience(), + ], + next_page_token="ghi", + ), + analytics_admin.ListAudiencesResponse( + audiences=[ + audience.Audience(), + audience.Audience(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple( + analytics_admin.ListAudiencesResponse.to_json(x) for x in response + ) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {"parent": "properties/sample1"} + + pager = client.list_audiences(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, audience.Audience) for i in results) + + pages = list(client.list_audiences(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + @pytest.mark.parametrize( "request_type", [ - analytics_admin.CreateDataStreamRequest, + analytics_admin.CreateAudienceRequest, dict, ], ) -def test_create_data_stream_rest(request_type): +def test_create_audience_rest(request_type): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -79353,32 +89553,73 @@ def test_create_data_stream_rest(request_type): # send a request that will satisfy transcoding request_init = {"parent": "properties/sample1"} - request_init["data_stream"] = { - "web_stream_data": { - "measurement_id": "measurement_id_value", - "firebase_app_id": "firebase_app_id_value", - "default_uri": "default_uri_value", - }, - "android_app_stream_data": { - "firebase_app_id": "firebase_app_id_value", - "package_name": "package_name_value", - }, - "ios_app_stream_data": { - "firebase_app_id": "firebase_app_id_value", - "bundle_id": "bundle_id_value", - }, + request_init["audience"] = { "name": "name_value", - "type_": 1, "display_name": "display_name_value", + "description": "description_value", + "membership_duration_days": 2561, + "ads_personalization_enabled": True, + "event_trigger": {"event_name": "event_name_value", "log_condition": 1}, + "exclusion_duration_mode": 1, + "filter_clauses": [ + { + "simple_filter": { + "scope": 1, + "filter_expression": { + "and_group": {"filter_expressions": {}}, + "or_group": {}, + "not_expression": {}, + "dimension_or_metric_filter": { + "string_filter": { + "match_type": 1, + "value": "value_value", + "case_sensitive": True, + }, + "in_list_filter": { + "values": ["values_value1", "values_value2"], + "case_sensitive": True, + }, + "numeric_filter": { + "operation": 1, + "value": { + "int64_value": 1073, + "double_value": 0.12710000000000002, + }, + }, + "between_filter": {"from_value": {}, "to_value": {}}, + "field_name": "field_name_value", + "at_any_point_in_time": True, + "in_any_n_day_period": 1994, + }, + "event_filter": { + "event_name": "event_name_value", + "event_parameter_filter_expression": {}, + }, + }, + }, + "sequence_filter": { + "scope": 1, + "sequence_maximum_duration": {"seconds": 751, "nanos": 543}, + "sequence_steps": [ + { + "scope": 1, + "immediately_follows": True, + "constraint_duration": {}, + "filter_expression": {}, + } + ], + }, + "clause_type": 1, + } + ], "create_time": {"seconds": 751, "nanos": 543}, - "update_time": {}, } # The version of a generated dependency at test runtime may differ from the version used during generation. # Delete any fields which are not present in the current runtime dependency # See https://github.com/googleapis/gapic-generator-python/issues/1748 # Determine if the message type is proto-plus or protobuf - test_field = analytics_admin.CreateDataStreamRequest.meta.fields["data_stream"] + test_field = analytics_admin.CreateAudienceRequest.meta.fields["audience"] def get_message_fields(field): # Given a field which is a message (composite type), return a list with @@ -79406,7 +89647,7 @@ def get_message_fields(field): # For each item in the sample request, create a list of sub fields which are not present at runtime # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["data_stream"].items(): # pragma: NO COVER + for field, value in request_init["audience"].items(): # pragma: NO COVER result = None is_repeated = False # For repeated fields @@ -79436,40 +89677,49 @@ def get_message_fields(field): subfield = subfield_to_delete.get("subfield") if subfield: if field_repeated: - for i in range(0, len(request_init["data_stream"][field])): - del request_init["data_stream"][field][i][subfield] + for i in range(0, len(request_init["audience"][field])): + del request_init["audience"][field][i][subfield] else: - del request_init["data_stream"][field][subfield] + del request_init["audience"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = resources.DataStream( + return_value = gaa_audience.Audience( name="name_value", - type_=resources.DataStream.DataStreamType.WEB_DATA_STREAM, display_name="display_name_value", + description="description_value", + membership_duration_days=2561, + ads_personalization_enabled=True, + exclusion_duration_mode=gaa_audience.Audience.AudienceExclusionDurationMode.EXCLUDE_TEMPORARILY, ) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = resources.DataStream.pb(return_value) + return_value = gaa_audience.Audience.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.create_data_stream(request) + response = client.create_audience(request) # Establish that the response is the type that we expect. - assert isinstance(response, resources.DataStream) + assert isinstance(response, gaa_audience.Audience) assert response.name == "name_value" - assert response.type_ == resources.DataStream.DataStreamType.WEB_DATA_STREAM assert response.display_name == "display_name_value" + assert response.description == "description_value" + assert response.membership_duration_days == 2561 + assert response.ads_personalization_enabled is True + assert ( + response.exclusion_duration_mode + == gaa_audience.Audience.AudienceExclusionDurationMode.EXCLUDE_TEMPORARILY + ) -def test_create_data_stream_rest_use_cached_wrapped_rpc(): +def test_create_audience_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -79483,34 +89733,30 @@ def test_create_data_stream_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert ( - client._transport.create_data_stream in client._transport._wrapped_methods - ) + assert client._transport.create_audience in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.create_data_stream - ] = mock_rpc + client._transport._wrapped_methods[client._transport.create_audience] = mock_rpc request = {} - client.create_data_stream(request) + client.create_audience(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.create_data_stream(request) + client.create_audience(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_create_data_stream_rest_required_fields( - request_type=analytics_admin.CreateDataStreamRequest, +def test_create_audience_rest_required_fields( + request_type=analytics_admin.CreateAudienceRequest, ): transport_class = transports.AnalyticsAdminServiceRestTransport @@ -79526,7 +89772,7 @@ def test_create_data_stream_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).create_data_stream._get_unset_required_fields(jsonified_request) + ).create_audience._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present @@ -79535,7 +89781,7 @@ def test_create_data_stream_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).create_data_stream._get_unset_required_fields(jsonified_request) + ).create_audience._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone @@ -79549,7 +89795,7 @@ def test_create_data_stream_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = resources.DataStream() + return_value = gaa_audience.Audience() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -79571,38 +89817,38 @@ def test_create_data_stream_rest_required_fields( response_value.status_code = 200 # Convert return value to protobuf type - return_value = resources.DataStream.pb(return_value) + return_value = gaa_audience.Audience.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.create_data_stream(request) + response = client.create_audience(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_create_data_stream_rest_unset_required_fields(): +def test_create_audience_rest_unset_required_fields(): transport = transports.AnalyticsAdminServiceRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.create_data_stream._get_unset_required_fields({}) + unset_fields = transport.create_audience._get_unset_required_fields({}) assert set(unset_fields) == ( set(()) & set( ( "parent", - "dataStream", + "audience", ) ) ) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_create_data_stream_rest_interceptors(null_interceptor): +def test_create_audience_rest_interceptors(null_interceptor): transport = transports.AnalyticsAdminServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -79615,14 +89861,14 @@ def test_create_data_stream_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.AnalyticsAdminServiceRestInterceptor, "post_create_data_stream" + transports.AnalyticsAdminServiceRestInterceptor, "post_create_audience" ) as post, mock.patch.object( - transports.AnalyticsAdminServiceRestInterceptor, "pre_create_data_stream" + transports.AnalyticsAdminServiceRestInterceptor, "pre_create_audience" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = analytics_admin.CreateDataStreamRequest.pb( - analytics_admin.CreateDataStreamRequest() + pb_message = analytics_admin.CreateAudienceRequest.pb( + analytics_admin.CreateAudienceRequest() ) transcode.return_value = { "method": "post", @@ -79634,17 +89880,19 @@ def test_create_data_stream_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = resources.DataStream.to_json(resources.DataStream()) + req.return_value._content = gaa_audience.Audience.to_json( + gaa_audience.Audience() + ) - request = analytics_admin.CreateDataStreamRequest() + request = analytics_admin.CreateAudienceRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = resources.DataStream() + post.return_value = gaa_audience.Audience() - client.create_data_stream( + client.create_audience( request, metadata=[ ("key", "val"), @@ -79656,8 +89904,8 @@ def test_create_data_stream_rest_interceptors(null_interceptor): post.assert_called_once() -def test_create_data_stream_rest_bad_request( - transport: str = "rest", request_type=analytics_admin.CreateDataStreamRequest +def test_create_audience_rest_bad_request( + transport: str = "rest", request_type=analytics_admin.CreateAudienceRequest ): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -79677,10 +89925,10 @@ def test_create_data_stream_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.create_data_stream(request) + client.create_audience(request) -def test_create_data_stream_rest_flattened(): +def test_create_audience_rest_flattened(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -79689,7 +89937,7 @@ def test_create_data_stream_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = resources.DataStream() + return_value = gaa_audience.Audience() # get arguments that satisfy an http rule for this method sample_request = {"parent": "properties/sample1"} @@ -79697,11 +89945,7 @@ def test_create_data_stream_rest_flattened(): # get truthy value for each flattened field mock_args = dict( parent="parent_value", - data_stream=resources.DataStream( - web_stream_data=resources.DataStream.WebStreamData( - measurement_id="measurement_id_value" - ) - ), + audience=gaa_audience.Audience(name="name_value"), ) mock_args.update(sample_request) @@ -79709,24 +89953,24 @@ def test_create_data_stream_rest_flattened(): response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = resources.DataStream.pb(return_value) + return_value = gaa_audience.Audience.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.create_data_stream(**mock_args) + client.create_audience(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1alpha/{parent=properties/*}/dataStreams" % client.transport._host, + "%s/v1alpha/{parent=properties/*}/audiences" % client.transport._host, args[1], ) -def test_create_data_stream_rest_flattened_error(transport: str = "rest"): +def test_create_audience_rest_flattened_error(transport: str = "rest"): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -79735,18 +89979,14 @@ def test_create_data_stream_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.create_data_stream( - analytics_admin.CreateDataStreamRequest(), + client.create_audience( + analytics_admin.CreateAudienceRequest(), parent="parent_value", - data_stream=resources.DataStream( - web_stream_data=resources.DataStream.WebStreamData( - measurement_id="measurement_id_value" - ) - ), + audience=gaa_audience.Audience(name="name_value"), ) -def test_create_data_stream_rest_error(): +def test_create_audience_rest_error(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -79755,39 +89995,185 @@ def test_create_data_stream_rest_error(): @pytest.mark.parametrize( "request_type", [ - analytics_admin.DeleteDataStreamRequest, + analytics_admin.UpdateAudienceRequest, dict, ], ) -def test_delete_data_stream_rest(request_type): +def test_update_audience_rest(request_type): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = {"name": "properties/sample1/dataStreams/sample2"} + request_init = {"audience": {"name": "properties/sample1/audiences/sample2"}} + request_init["audience"] = { + "name": "properties/sample1/audiences/sample2", + "display_name": "display_name_value", + "description": "description_value", + "membership_duration_days": 2561, + "ads_personalization_enabled": True, + "event_trigger": {"event_name": "event_name_value", "log_condition": 1}, + "exclusion_duration_mode": 1, + "filter_clauses": [ + { + "simple_filter": { + "scope": 1, + "filter_expression": { + "and_group": {"filter_expressions": {}}, + "or_group": {}, + "not_expression": {}, + "dimension_or_metric_filter": { + "string_filter": { + "match_type": 1, + "value": "value_value", + "case_sensitive": True, + }, + "in_list_filter": { + "values": ["values_value1", "values_value2"], + "case_sensitive": True, + }, + "numeric_filter": { + "operation": 1, + "value": { + "int64_value": 1073, + "double_value": 0.12710000000000002, + }, + }, + "between_filter": {"from_value": {}, "to_value": {}}, + "field_name": "field_name_value", + "at_any_point_in_time": True, + "in_any_n_day_period": 1994, + }, + "event_filter": { + "event_name": "event_name_value", + "event_parameter_filter_expression": {}, + }, + }, + }, + "sequence_filter": { + "scope": 1, + "sequence_maximum_duration": {"seconds": 751, "nanos": 543}, + "sequence_steps": [ + { + "scope": 1, + "immediately_follows": True, + "constraint_duration": {}, + "filter_expression": {}, + } + ], + }, + "clause_type": 1, + } + ], + "create_time": {"seconds": 751, "nanos": 543}, + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = analytics_admin.UpdateAudienceRequest.meta.fields["audience"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["audience"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["audience"][field])): + del request_init["audience"][field][i][subfield] + else: + del request_init["audience"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = None + return_value = gaa_audience.Audience( + name="name_value", + display_name="display_name_value", + description="description_value", + membership_duration_days=2561, + ads_personalization_enabled=True, + exclusion_duration_mode=gaa_audience.Audience.AudienceExclusionDurationMode.EXCLUDE_TEMPORARILY, + ) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - json_return_value = "" + # Convert return value to protobuf type + return_value = gaa_audience.Audience.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.delete_data_stream(request) + response = client.update_audience(request) # Establish that the response is the type that we expect. - assert response is None + assert isinstance(response, gaa_audience.Audience) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.description == "description_value" + assert response.membership_duration_days == 2561 + assert response.ads_personalization_enabled is True + assert ( + response.exclusion_duration_mode + == gaa_audience.Audience.AudienceExclusionDurationMode.EXCLUDE_TEMPORARILY + ) -def test_delete_data_stream_rest_use_cached_wrapped_rpc(): +def test_update_audience_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -79801,39 +90187,34 @@ def test_delete_data_stream_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert ( - client._transport.delete_data_stream in client._transport._wrapped_methods - ) + assert client._transport.update_audience in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.delete_data_stream - ] = mock_rpc + client._transport._wrapped_methods[client._transport.update_audience] = mock_rpc request = {} - client.delete_data_stream(request) + client.update_audience(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.delete_data_stream(request) + client.update_audience(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_delete_data_stream_rest_required_fields( - request_type=analytics_admin.DeleteDataStreamRequest, +def test_update_audience_rest_required_fields( + request_type=analytics_admin.UpdateAudienceRequest, ): transport_class = transports.AnalyticsAdminServiceRestTransport request_init = {} - request_init["name"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -79844,21 +90225,19 @@ def test_delete_data_stream_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).delete_data_stream._get_unset_required_fields(jsonified_request) + ).update_audience._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["name"] = "name_value" - unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).delete_data_stream._get_unset_required_fields(jsonified_request) + ).update_audience._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("update_mask",)) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == "name_value" client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -79867,7 +90246,7 @@ def test_delete_data_stream_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = None + return_value = gaa_audience.Audience() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -79879,36 +90258,48 @@ def test_delete_data_stream_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "delete", + "method": "patch", "query_params": pb_request, } + transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 - json_return_value = "" + + # Convert return value to protobuf type + return_value = gaa_audience.Audience.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.delete_data_stream(request) + response = client.update_audience(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_delete_data_stream_rest_unset_required_fields(): +def test_update_audience_rest_unset_required_fields(): transport = transports.AnalyticsAdminServiceRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.delete_data_stream._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name",))) + unset_fields = transport.update_audience._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(("updateMask",)) + & set( + ( + "audience", + "updateMask", + ) + ) + ) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_delete_data_stream_rest_interceptors(null_interceptor): +def test_update_audience_rest_interceptors(null_interceptor): transport = transports.AnalyticsAdminServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -79921,11 +90312,14 @@ def test_delete_data_stream_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.AnalyticsAdminServiceRestInterceptor, "pre_delete_data_stream" + transports.AnalyticsAdminServiceRestInterceptor, "post_update_audience" + ) as post, mock.patch.object( + transports.AnalyticsAdminServiceRestInterceptor, "pre_update_audience" ) as pre: pre.assert_not_called() - pb_message = analytics_admin.DeleteDataStreamRequest.pb( - analytics_admin.DeleteDataStreamRequest() + post.assert_not_called() + pb_message = analytics_admin.UpdateAudienceRequest.pb( + analytics_admin.UpdateAudienceRequest() ) transcode.return_value = { "method": "post", @@ -79937,15 +90331,19 @@ def test_delete_data_stream_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() + req.return_value._content = gaa_audience.Audience.to_json( + gaa_audience.Audience() + ) - request = analytics_admin.DeleteDataStreamRequest() + request = analytics_admin.UpdateAudienceRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata + post.return_value = gaa_audience.Audience() - client.delete_data_stream( + client.update_audience( request, metadata=[ ("key", "val"), @@ -79954,10 +90352,11 @@ def test_delete_data_stream_rest_interceptors(null_interceptor): ) pre.assert_called_once() + post.assert_called_once() -def test_delete_data_stream_rest_bad_request( - transport: str = "rest", request_type=analytics_admin.DeleteDataStreamRequest +def test_update_audience_rest_bad_request( + transport: str = "rest", request_type=analytics_admin.UpdateAudienceRequest ): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -79965,7 +90364,7 @@ def test_delete_data_stream_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = {"name": "properties/sample1/dataStreams/sample2"} + request_init = {"audience": {"name": "properties/sample1/audiences/sample2"}} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -79977,10 +90376,10 @@ def test_delete_data_stream_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.delete_data_stream(request) + client.update_audience(request) -def test_delete_data_stream_rest_flattened(): +def test_update_audience_rest_flattened(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -79989,37 +90388,41 @@ def test_delete_data_stream_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = None + return_value = gaa_audience.Audience() # get arguments that satisfy an http rule for this method - sample_request = {"name": "properties/sample1/dataStreams/sample2"} + sample_request = {"audience": {"name": "properties/sample1/audiences/sample2"}} # get truthy value for each flattened field mock_args = dict( - name="name_value", + audience=gaa_audience.Audience(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) mock_args.update(sample_request) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - json_return_value = "" + # Convert return value to protobuf type + return_value = gaa_audience.Audience.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.delete_data_stream(**mock_args) + client.update_audience(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1alpha/{name=properties/*/dataStreams/*}" % client.transport._host, + "%s/v1alpha/{audience.name=properties/*/audiences/*}" + % client.transport._host, args[1], ) -def test_delete_data_stream_rest_flattened_error(transport: str = "rest"): +def test_update_audience_rest_flattened_error(transport: str = "rest"): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -80028,13 +90431,14 @@ def test_delete_data_stream_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.delete_data_stream( - analytics_admin.DeleteDataStreamRequest(), - name="name_value", + client.update_audience( + analytics_admin.UpdateAudienceRequest(), + audience=gaa_audience.Audience(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) -def test_delete_data_stream_rest_error(): +def test_update_audience_rest_error(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -80043,135 +90447,39 @@ def test_delete_data_stream_rest_error(): @pytest.mark.parametrize( "request_type", [ - analytics_admin.UpdateDataStreamRequest, + analytics_admin.ArchiveAudienceRequest, dict, ], ) -def test_update_data_stream_rest(request_type): +def test_archive_audience_rest(request_type): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = {"data_stream": {"name": "properties/sample1/dataStreams/sample2"}} - request_init["data_stream"] = { - "web_stream_data": { - "measurement_id": "measurement_id_value", - "firebase_app_id": "firebase_app_id_value", - "default_uri": "default_uri_value", - }, - "android_app_stream_data": { - "firebase_app_id": "firebase_app_id_value", - "package_name": "package_name_value", - }, - "ios_app_stream_data": { - "firebase_app_id": "firebase_app_id_value", - "bundle_id": "bundle_id_value", - }, - "name": "properties/sample1/dataStreams/sample2", - "type_": 1, - "display_name": "display_name_value", - "create_time": {"seconds": 751, "nanos": 543}, - "update_time": {}, - } - # The version of a generated dependency at test runtime may differ from the version used during generation. - # Delete any fields which are not present in the current runtime dependency - # See https://github.com/googleapis/gapic-generator-python/issues/1748 - - # Determine if the message type is proto-plus or protobuf - test_field = analytics_admin.UpdateDataStreamRequest.meta.fields["data_stream"] - - def get_message_fields(field): - # Given a field which is a message (composite type), return a list with - # all the fields of the message. - # If the field is not a composite type, return an empty list. - message_fields = [] - - if hasattr(field, "message") and field.message: - is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") - - if is_field_type_proto_plus_type: - message_fields = field.message.meta.fields.values() - # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER - message_fields = field.message.DESCRIPTOR.fields - return message_fields - - runtime_nested_fields = [ - (field.name, nested_field.name) - for field in get_message_fields(test_field) - for nested_field in get_message_fields(field) - ] - - subfields_not_in_runtime = [] - - # For each item in the sample request, create a list of sub fields which are not present at runtime - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["data_stream"].items(): # pragma: NO COVER - result = None - is_repeated = False - # For repeated fields - if isinstance(value, list) and len(value): - is_repeated = True - result = value[0] - # For fields where the type is another message - if isinstance(value, dict): - result = value - - if result and hasattr(result, "keys"): - for subfield in result.keys(): - if (field, subfield) not in runtime_nested_fields: - subfields_not_in_runtime.append( - { - "field": field, - "subfield": subfield, - "is_repeated": is_repeated, - } - ) - - # Remove fields from the sample request which are not present in the runtime version of the dependency - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER - field = subfield_to_delete.get("field") - field_repeated = subfield_to_delete.get("is_repeated") - subfield = subfield_to_delete.get("subfield") - if subfield: - if field_repeated: - for i in range(0, len(request_init["data_stream"][field])): - del request_init["data_stream"][field][i][subfield] - else: - del request_init["data_stream"][field][subfield] + request_init = {"name": "properties/sample1/audiences/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = resources.DataStream( - name="name_value", - type_=resources.DataStream.DataStreamType.WEB_DATA_STREAM, - display_name="display_name_value", - ) + return_value = None # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - # Convert return value to protobuf type - return_value = resources.DataStream.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) + json_return_value = "" response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.update_data_stream(request) + response = client.archive_audience(request) # Establish that the response is the type that we expect. - assert isinstance(response, resources.DataStream) - assert response.name == "name_value" - assert response.type_ == resources.DataStream.DataStreamType.WEB_DATA_STREAM - assert response.display_name == "display_name_value" + assert response is None -def test_update_data_stream_rest_use_cached_wrapped_rpc(): +def test_archive_audience_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -80185,9 +90493,7 @@ def test_update_data_stream_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert ( - client._transport.update_data_stream in client._transport._wrapped_methods - ) + assert client._transport.archive_audience in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() @@ -80195,28 +90501,29 @@ def test_update_data_stream_rest_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.update_data_stream + client._transport.archive_audience ] = mock_rpc request = {} - client.update_data_stream(request) + client.archive_audience(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.update_data_stream(request) + client.archive_audience(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_update_data_stream_rest_required_fields( - request_type=analytics_admin.UpdateDataStreamRequest, +def test_archive_audience_rest_required_fields( + request_type=analytics_admin.ArchiveAudienceRequest, ): transport_class = transports.AnalyticsAdminServiceRestTransport request_init = {} + request_init["name"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -80227,19 +90534,21 @@ def test_update_data_stream_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).update_data_stream._get_unset_required_fields(jsonified_request) + ).archive_audience._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present + jsonified_request["name"] = "name_value" + unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).update_data_stream._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("update_mask",)) + ).archive_audience._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -80248,7 +90557,7 @@ def test_update_data_stream_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = resources.DataStream() + return_value = None # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -80260,7 +90569,7 @@ def test_update_data_stream_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "patch", + "method": "post", "query_params": pb_request, } transcode_result["body"] = pb_request @@ -80268,32 +90577,29 @@ def test_update_data_stream_rest_required_fields( response_value = Response() response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = resources.DataStream.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) + json_return_value = "" response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.update_data_stream(request) + response = client.archive_audience(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_update_data_stream_rest_unset_required_fields(): +def test_archive_audience_rest_unset_required_fields(): transport = transports.AnalyticsAdminServiceRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.update_data_stream._get_unset_required_fields({}) - assert set(unset_fields) == (set(("updateMask",)) & set(("updateMask",))) + unset_fields = transport.archive_audience._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_update_data_stream_rest_interceptors(null_interceptor): +def test_archive_audience_rest_interceptors(null_interceptor): transport = transports.AnalyticsAdminServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -80306,14 +90612,11 @@ def test_update_data_stream_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.AnalyticsAdminServiceRestInterceptor, "post_update_data_stream" - ) as post, mock.patch.object( - transports.AnalyticsAdminServiceRestInterceptor, "pre_update_data_stream" + transports.AnalyticsAdminServiceRestInterceptor, "pre_archive_audience" ) as pre: pre.assert_not_called() - post.assert_not_called() - pb_message = analytics_admin.UpdateDataStreamRequest.pb( - analytics_admin.UpdateDataStreamRequest() + pb_message = analytics_admin.ArchiveAudienceRequest.pb( + analytics_admin.ArchiveAudienceRequest() ) transcode.return_value = { "method": "post", @@ -80325,17 +90628,15 @@ def test_update_data_stream_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = resources.DataStream.to_json(resources.DataStream()) - request = analytics_admin.UpdateDataStreamRequest() + request = analytics_admin.ArchiveAudienceRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = resources.DataStream() - client.update_data_stream( + client.archive_audience( request, metadata=[ ("key", "val"), @@ -80344,11 +90645,10 @@ def test_update_data_stream_rest_interceptors(null_interceptor): ) pre.assert_called_once() - post.assert_called_once() -def test_update_data_stream_rest_bad_request( - transport: str = "rest", request_type=analytics_admin.UpdateDataStreamRequest +def test_archive_audience_rest_bad_request( + transport: str = "rest", request_type=analytics_admin.ArchiveAudienceRequest ): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -80356,7 +90656,7 @@ def test_update_data_stream_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = {"data_stream": {"name": "properties/sample1/dataStreams/sample2"}} + request_init = {"name": "properties/sample1/audiences/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -80368,79 +90668,10 @@ def test_update_data_stream_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.update_data_stream(request) - - -def test_update_data_stream_rest_flattened(): - client = AnalyticsAdminServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = resources.DataStream() - - # get arguments that satisfy an http rule for this method - sample_request = { - "data_stream": {"name": "properties/sample1/dataStreams/sample2"} - } - - # get truthy value for each flattened field - mock_args = dict( - data_stream=resources.DataStream( - web_stream_data=resources.DataStream.WebStreamData( - measurement_id="measurement_id_value" - ) - ), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = resources.DataStream.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - client.update_data_stream(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate( - "%s/v1alpha/{data_stream.name=properties/*/dataStreams/*}" - % client.transport._host, - args[1], - ) - - -def test_update_data_stream_rest_flattened_error(transport: str = "rest"): - client = AnalyticsAdminServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.update_data_stream( - analytics_admin.UpdateDataStreamRequest(), - data_stream=resources.DataStream( - web_stream_data=resources.DataStream.WebStreamData( - measurement_id="measurement_id_value" - ) - ), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), - ) + client.archive_audience(request) -def test_update_data_stream_rest_error(): +def test_archive_audience_rest_error(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -80449,44 +90680,48 @@ def test_update_data_stream_rest_error(): @pytest.mark.parametrize( "request_type", [ - analytics_admin.ListDataStreamsRequest, + analytics_admin.GetSearchAds360LinkRequest, dict, ], ) -def test_list_data_streams_rest(request_type): +def test_get_search_ads360_link_rest(request_type): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = {"parent": "properties/sample1"} + request_init = {"name": "properties/sample1/searchAds360Links/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = analytics_admin.ListDataStreamsResponse( - next_page_token="next_page_token_value", + return_value = resources.SearchAds360Link( + name="name_value", + advertiser_id="advertiser_id_value", + advertiser_display_name="advertiser_display_name_value", ) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = analytics_admin.ListDataStreamsResponse.pb(return_value) + return_value = resources.SearchAds360Link.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.list_data_streams(request) + response = client.get_search_ads360_link(request) # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListDataStreamsPager) - assert response.next_page_token == "next_page_token_value" + assert isinstance(response, resources.SearchAds360Link) + assert response.name == "name_value" + assert response.advertiser_id == "advertiser_id_value" + assert response.advertiser_display_name == "advertiser_display_name_value" -def test_list_data_streams_rest_use_cached_wrapped_rpc(): +def test_get_search_ads360_link_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -80500,7 +90735,10 @@ def test_list_data_streams_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.list_data_streams in client._transport._wrapped_methods + assert ( + client._transport.get_search_ads360_link + in client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.Mock() @@ -80508,29 +90746,29 @@ def test_list_data_streams_rest_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.list_data_streams + client._transport.get_search_ads360_link ] = mock_rpc request = {} - client.list_data_streams(request) + client.get_search_ads360_link(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.list_data_streams(request) + client.get_search_ads360_link(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_list_data_streams_rest_required_fields( - request_type=analytics_admin.ListDataStreamsRequest, +def test_get_search_ads360_link_rest_required_fields( + request_type=analytics_admin.GetSearchAds360LinkRequest, ): transport_class = transports.AnalyticsAdminServiceRestTransport request_init = {} - request_init["parent"] = "" + request_init["name"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -80541,28 +90779,21 @@ def test_list_data_streams_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).list_data_streams._get_unset_required_fields(jsonified_request) + ).get_search_ads360_link._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["parent"] = "parent_value" + jsonified_request["name"] = "name_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).list_data_streams._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set( - ( - "page_size", - "page_token", - ) - ) + ).get_search_ads360_link._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == "parent_value" + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -80571,7 +90802,7 @@ def test_list_data_streams_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = analytics_admin.ListDataStreamsResponse() + return_value = resources.SearchAds360Link() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -80592,38 +90823,30 @@ def test_list_data_streams_rest_required_fields( response_value.status_code = 200 # Convert return value to protobuf type - return_value = analytics_admin.ListDataStreamsResponse.pb(return_value) + return_value = resources.SearchAds360Link.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.list_data_streams(request) + response = client.get_search_ads360_link(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_list_data_streams_rest_unset_required_fields(): +def test_get_search_ads360_link_rest_unset_required_fields(): transport = transports.AnalyticsAdminServiceRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.list_data_streams._get_unset_required_fields({}) - assert set(unset_fields) == ( - set( - ( - "pageSize", - "pageToken", - ) - ) - & set(("parent",)) - ) + unset_fields = transport.get_search_ads360_link._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_data_streams_rest_interceptors(null_interceptor): +def test_get_search_ads360_link_rest_interceptors(null_interceptor): transport = transports.AnalyticsAdminServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -80636,14 +90859,14 @@ def test_list_data_streams_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.AnalyticsAdminServiceRestInterceptor, "post_list_data_streams" + transports.AnalyticsAdminServiceRestInterceptor, "post_get_search_ads360_link" ) as post, mock.patch.object( - transports.AnalyticsAdminServiceRestInterceptor, "pre_list_data_streams" + transports.AnalyticsAdminServiceRestInterceptor, "pre_get_search_ads360_link" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = analytics_admin.ListDataStreamsRequest.pb( - analytics_admin.ListDataStreamsRequest() + pb_message = analytics_admin.GetSearchAds360LinkRequest.pb( + analytics_admin.GetSearchAds360LinkRequest() ) transcode.return_value = { "method": "post", @@ -80655,19 +90878,19 @@ def test_list_data_streams_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = analytics_admin.ListDataStreamsResponse.to_json( - analytics_admin.ListDataStreamsResponse() + req.return_value._content = resources.SearchAds360Link.to_json( + resources.SearchAds360Link() ) - request = analytics_admin.ListDataStreamsRequest() + request = analytics_admin.GetSearchAds360LinkRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = analytics_admin.ListDataStreamsResponse() + post.return_value = resources.SearchAds360Link() - client.list_data_streams( + client.get_search_ads360_link( request, metadata=[ ("key", "val"), @@ -80679,8 +90902,8 @@ def test_list_data_streams_rest_interceptors(null_interceptor): post.assert_called_once() -def test_list_data_streams_rest_bad_request( - transport: str = "rest", request_type=analytics_admin.ListDataStreamsRequest +def test_get_search_ads360_link_rest_bad_request( + transport: str = "rest", request_type=analytics_admin.GetSearchAds360LinkRequest ): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -80688,7 +90911,7 @@ def test_list_data_streams_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = {"parent": "properties/sample1"} + request_init = {"name": "properties/sample1/searchAds360Links/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -80700,10 +90923,10 @@ def test_list_data_streams_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.list_data_streams(request) + client.get_search_ads360_link(request) -def test_list_data_streams_rest_flattened(): +def test_get_search_ads360_link_rest_flattened(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -80712,161 +90935,101 @@ def test_list_data_streams_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = analytics_admin.ListDataStreamsResponse() + return_value = resources.SearchAds360Link() # get arguments that satisfy an http rule for this method - sample_request = {"parent": "properties/sample1"} - - # get truthy value for each flattened field - mock_args = dict( - parent="parent_value", - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = analytics_admin.ListDataStreamsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - client.list_data_streams(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate( - "%s/v1alpha/{parent=properties/*}/dataStreams" % client.transport._host, - args[1], - ) - - -def test_list_data_streams_rest_flattened_error(transport: str = "rest"): - client = AnalyticsAdminServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_data_streams( - analytics_admin.ListDataStreamsRequest(), - parent="parent_value", - ) - - -def test_list_data_streams_rest_pager(transport: str = "rest"): - client = AnalyticsAdminServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # TODO(kbandes): remove this mock unless there's a good reason for it. - # with mock.patch.object(path_template, 'transcode') as transcode: - # Set the response as a series of pages - response = ( - analytics_admin.ListDataStreamsResponse( - data_streams=[ - resources.DataStream(), - resources.DataStream(), - resources.DataStream(), - ], - next_page_token="abc", - ), - analytics_admin.ListDataStreamsResponse( - data_streams=[], - next_page_token="def", - ), - analytics_admin.ListDataStreamsResponse( - data_streams=[ - resources.DataStream(), - ], - next_page_token="ghi", - ), - analytics_admin.ListDataStreamsResponse( - data_streams=[ - resources.DataStream(), - resources.DataStream(), - ], - ), + sample_request = {"name": "properties/sample1/searchAds360Links/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", ) - # Two responses for two calls - response = response + response + mock_args.update(sample_request) - # Wrap the values into proper Response objs - response = tuple( - analytics_admin.ListDataStreamsResponse.to_json(x) for x in response + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = resources.SearchAds360Link.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.get_search_ads360_link(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1alpha/{name=properties/*/searchAds360Links/*}" + % client.transport._host, + args[1], ) - return_values = tuple(Response() for i in response) - for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode("UTF-8") - return_val.status_code = 200 - req.side_effect = return_values - sample_request = {"parent": "properties/sample1"} - pager = client.list_data_streams(request=sample_request) +def test_get_search_ads360_link_rest_flattened_error(transport: str = "rest"): + client = AnalyticsAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, resources.DataStream) for i in results) + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_search_ads360_link( + analytics_admin.GetSearchAds360LinkRequest(), + name="name_value", + ) - pages = list(client.list_data_streams(request=sample_request).pages) - for page_, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page_.raw_page.next_page_token == token + +def test_get_search_ads360_link_rest_error(): + client = AnalyticsAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) @pytest.mark.parametrize( "request_type", [ - analytics_admin.GetDataStreamRequest, + analytics_admin.ListSearchAds360LinksRequest, dict, ], ) -def test_get_data_stream_rest(request_type): +def test_list_search_ads360_links_rest(request_type): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = {"name": "properties/sample1/dataStreams/sample2"} + request_init = {"parent": "properties/sample1"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = resources.DataStream( - name="name_value", - type_=resources.DataStream.DataStreamType.WEB_DATA_STREAM, - display_name="display_name_value", + return_value = analytics_admin.ListSearchAds360LinksResponse( + next_page_token="next_page_token_value", ) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = resources.DataStream.pb(return_value) + return_value = analytics_admin.ListSearchAds360LinksResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.get_data_stream(request) + response = client.list_search_ads360_links(request) # Establish that the response is the type that we expect. - assert isinstance(response, resources.DataStream) - assert response.name == "name_value" - assert response.type_ == resources.DataStream.DataStreamType.WEB_DATA_STREAM - assert response.display_name == "display_name_value" + assert isinstance(response, pagers.ListSearchAds360LinksPager) + assert response.next_page_token == "next_page_token_value" -def test_get_data_stream_rest_use_cached_wrapped_rpc(): +def test_list_search_ads360_links_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -80880,35 +91043,40 @@ def test_get_data_stream_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.get_data_stream in client._transport._wrapped_methods + assert ( + client._transport.list_search_ads360_links + in client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.get_data_stream] = mock_rpc + client._transport._wrapped_methods[ + client._transport.list_search_ads360_links + ] = mock_rpc request = {} - client.get_data_stream(request) + client.list_search_ads360_links(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.get_data_stream(request) + client.list_search_ads360_links(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_get_data_stream_rest_required_fields( - request_type=analytics_admin.GetDataStreamRequest, +def test_list_search_ads360_links_rest_required_fields( + request_type=analytics_admin.ListSearchAds360LinksRequest, ): transport_class = transports.AnalyticsAdminServiceRestTransport request_init = {} - request_init["name"] = "" + request_init["parent"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -80919,21 +91087,28 @@ def test_get_data_stream_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).get_data_stream._get_unset_required_fields(jsonified_request) + ).list_search_ads360_links._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["name"] = "name_value" + jsonified_request["parent"] = "parent_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).get_data_stream._get_unset_required_fields(jsonified_request) + ).list_search_ads360_links._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "page_size", + "page_token", + ) + ) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == "name_value" + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -80942,7 +91117,7 @@ def test_get_data_stream_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = resources.DataStream() + return_value = analytics_admin.ListSearchAds360LinksResponse() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -80963,30 +91138,40 @@ def test_get_data_stream_rest_required_fields( response_value.status_code = 200 # Convert return value to protobuf type - return_value = resources.DataStream.pb(return_value) + return_value = analytics_admin.ListSearchAds360LinksResponse.pb( + return_value + ) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.get_data_stream(request) + response = client.list_search_ads360_links(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_get_data_stream_rest_unset_required_fields(): +def test_list_search_ads360_links_rest_unset_required_fields(): transport = transports.AnalyticsAdminServiceRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.get_data_stream._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name",))) + unset_fields = transport.list_search_ads360_links._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "pageSize", + "pageToken", + ) + ) + & set(("parent",)) + ) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_data_stream_rest_interceptors(null_interceptor): +def test_list_search_ads360_links_rest_interceptors(null_interceptor): transport = transports.AnalyticsAdminServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -80999,14 +91184,14 @@ def test_get_data_stream_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.AnalyticsAdminServiceRestInterceptor, "post_get_data_stream" + transports.AnalyticsAdminServiceRestInterceptor, "post_list_search_ads360_links" ) as post, mock.patch.object( - transports.AnalyticsAdminServiceRestInterceptor, "pre_get_data_stream" + transports.AnalyticsAdminServiceRestInterceptor, "pre_list_search_ads360_links" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = analytics_admin.GetDataStreamRequest.pb( - analytics_admin.GetDataStreamRequest() + pb_message = analytics_admin.ListSearchAds360LinksRequest.pb( + analytics_admin.ListSearchAds360LinksRequest() ) transcode.return_value = { "method": "post", @@ -81018,17 +91203,21 @@ def test_get_data_stream_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = resources.DataStream.to_json(resources.DataStream()) + req.return_value._content = ( + analytics_admin.ListSearchAds360LinksResponse.to_json( + analytics_admin.ListSearchAds360LinksResponse() + ) + ) - request = analytics_admin.GetDataStreamRequest() + request = analytics_admin.ListSearchAds360LinksRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = resources.DataStream() + post.return_value = analytics_admin.ListSearchAds360LinksResponse() - client.get_data_stream( + client.list_search_ads360_links( request, metadata=[ ("key", "val"), @@ -81040,8 +91229,8 @@ def test_get_data_stream_rest_interceptors(null_interceptor): post.assert_called_once() -def test_get_data_stream_rest_bad_request( - transport: str = "rest", request_type=analytics_admin.GetDataStreamRequest +def test_list_search_ads360_links_rest_bad_request( + transport: str = "rest", request_type=analytics_admin.ListSearchAds360LinksRequest ): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -81049,7 +91238,7 @@ def test_get_data_stream_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = {"name": "properties/sample1/dataStreams/sample2"} + request_init = {"parent": "properties/sample1"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -81061,10 +91250,10 @@ def test_get_data_stream_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.get_data_stream(request) + client.list_search_ads360_links(request) -def test_get_data_stream_rest_flattened(): +def test_list_search_ads360_links_rest_flattened(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -81073,14 +91262,14 @@ def test_get_data_stream_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = resources.DataStream() + return_value = analytics_admin.ListSearchAds360LinksResponse() # get arguments that satisfy an http rule for this method - sample_request = {"name": "properties/sample1/dataStreams/sample2"} + sample_request = {"parent": "properties/sample1"} # get truthy value for each flattened field mock_args = dict( - name="name_value", + parent="parent_value", ) mock_args.update(sample_request) @@ -81088,24 +91277,25 @@ def test_get_data_stream_rest_flattened(): response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = resources.DataStream.pb(return_value) + return_value = analytics_admin.ListSearchAds360LinksResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.get_data_stream(**mock_args) + client.list_search_ads360_links(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1alpha/{name=properties/*/dataStreams/*}" % client.transport._host, + "%s/v1alpha/{parent=properties/*}/searchAds360Links" + % client.transport._host, args[1], ) -def test_get_data_stream_rest_flattened_error(transport: str = "rest"): +def test_list_search_ads360_links_rest_flattened_error(transport: str = "rest"): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -81114,72 +91304,198 @@ def test_get_data_stream_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.get_data_stream( - analytics_admin.GetDataStreamRequest(), - name="name_value", + client.list_search_ads360_links( + analytics_admin.ListSearchAds360LinksRequest(), + parent="parent_value", ) -def test_get_data_stream_rest_error(): +def test_list_search_ads360_links_rest_pager(transport: str = "rest"): client = AnalyticsAdminServiceClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + analytics_admin.ListSearchAds360LinksResponse( + search_ads_360_links=[ + resources.SearchAds360Link(), + resources.SearchAds360Link(), + resources.SearchAds360Link(), + ], + next_page_token="abc", + ), + analytics_admin.ListSearchAds360LinksResponse( + search_ads_360_links=[], + next_page_token="def", + ), + analytics_admin.ListSearchAds360LinksResponse( + search_ads_360_links=[ + resources.SearchAds360Link(), + ], + next_page_token="ghi", + ), + analytics_admin.ListSearchAds360LinksResponse( + search_ads_360_links=[ + resources.SearchAds360Link(), + resources.SearchAds360Link(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple( + analytics_admin.ListSearchAds360LinksResponse.to_json(x) for x in response + ) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {"parent": "properties/sample1"} + + pager = client.list_search_ads360_links(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, resources.SearchAds360Link) for i in results) + + pages = list(client.list_search_ads360_links(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + @pytest.mark.parametrize( "request_type", [ - analytics_admin.GetAudienceRequest, + analytics_admin.CreateSearchAds360LinkRequest, dict, ], ) -def test_get_audience_rest(request_type): +def test_create_search_ads360_link_rest(request_type): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = {"name": "properties/sample1/audiences/sample2"} + request_init = {"parent": "properties/sample1"} + request_init["search_ads_360_link"] = { + "name": "name_value", + "advertiser_id": "advertiser_id_value", + "campaign_data_sharing_enabled": {"value": True}, + "cost_data_sharing_enabled": {}, + "advertiser_display_name": "advertiser_display_name_value", + "ads_personalization_enabled": {}, + "site_stats_sharing_enabled": {}, + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = analytics_admin.CreateSearchAds360LinkRequest.meta.fields[ + "search_ads_360_link" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["search_ads_360_link"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["search_ads_360_link"][field])): + del request_init["search_ads_360_link"][field][i][subfield] + else: + del request_init["search_ads_360_link"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = audience.Audience( + return_value = resources.SearchAds360Link( name="name_value", - display_name="display_name_value", - description="description_value", - membership_duration_days=2561, - ads_personalization_enabled=True, - exclusion_duration_mode=audience.Audience.AudienceExclusionDurationMode.EXCLUDE_TEMPORARILY, + advertiser_id="advertiser_id_value", + advertiser_display_name="advertiser_display_name_value", ) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = audience.Audience.pb(return_value) + return_value = resources.SearchAds360Link.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.get_audience(request) + response = client.create_search_ads360_link(request) # Establish that the response is the type that we expect. - assert isinstance(response, audience.Audience) + assert isinstance(response, resources.SearchAds360Link) assert response.name == "name_value" - assert response.display_name == "display_name_value" - assert response.description == "description_value" - assert response.membership_duration_days == 2561 - assert response.ads_personalization_enabled is True - assert ( - response.exclusion_duration_mode - == audience.Audience.AudienceExclusionDurationMode.EXCLUDE_TEMPORARILY - ) + assert response.advertiser_id == "advertiser_id_value" + assert response.advertiser_display_name == "advertiser_display_name_value" -def test_get_audience_rest_use_cached_wrapped_rpc(): +def test_create_search_ads360_link_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -81193,35 +91509,40 @@ def test_get_audience_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.get_audience in client._transport._wrapped_methods + assert ( + client._transport.create_search_ads360_link + in client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.get_audience] = mock_rpc + client._transport._wrapped_methods[ + client._transport.create_search_ads360_link + ] = mock_rpc request = {} - client.get_audience(request) + client.create_search_ads360_link(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.get_audience(request) + client.create_search_ads360_link(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_get_audience_rest_required_fields( - request_type=analytics_admin.GetAudienceRequest, +def test_create_search_ads360_link_rest_required_fields( + request_type=analytics_admin.CreateSearchAds360LinkRequest, ): transport_class = transports.AnalyticsAdminServiceRestTransport request_init = {} - request_init["name"] = "" + request_init["parent"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -81232,21 +91553,21 @@ def test_get_audience_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).get_audience._get_unset_required_fields(jsonified_request) + ).create_search_ads360_link._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["name"] = "name_value" + jsonified_request["parent"] = "parent_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).get_audience._get_unset_required_fields(jsonified_request) + ).create_search_ads360_link._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == "name_value" + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -81255,7 +91576,7 @@ def test_get_audience_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = audience.Audience() + return_value = resources.SearchAds360Link() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -81267,39 +91588,48 @@ def test_get_audience_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "get", + "method": "post", "query_params": pb_request, } + transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = audience.Audience.pb(return_value) + return_value = resources.SearchAds360Link.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.get_audience(request) + response = client.create_search_ads360_link(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_get_audience_rest_unset_required_fields(): +def test_create_search_ads360_link_rest_unset_required_fields(): transport = transports.AnalyticsAdminServiceRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.get_audience._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name",))) + unset_fields = transport.create_search_ads360_link._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(()) + & set( + ( + "parent", + "searchAds360Link", + ) + ) + ) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_audience_rest_interceptors(null_interceptor): +def test_create_search_ads360_link_rest_interceptors(null_interceptor): transport = transports.AnalyticsAdminServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -81312,14 +91642,15 @@ def test_get_audience_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.AnalyticsAdminServiceRestInterceptor, "post_get_audience" + transports.AnalyticsAdminServiceRestInterceptor, + "post_create_search_ads360_link", ) as post, mock.patch.object( - transports.AnalyticsAdminServiceRestInterceptor, "pre_get_audience" + transports.AnalyticsAdminServiceRestInterceptor, "pre_create_search_ads360_link" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = analytics_admin.GetAudienceRequest.pb( - analytics_admin.GetAudienceRequest() + pb_message = analytics_admin.CreateSearchAds360LinkRequest.pb( + analytics_admin.CreateSearchAds360LinkRequest() ) transcode.return_value = { "method": "post", @@ -81331,17 +91662,19 @@ def test_get_audience_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = audience.Audience.to_json(audience.Audience()) + req.return_value._content = resources.SearchAds360Link.to_json( + resources.SearchAds360Link() + ) - request = analytics_admin.GetAudienceRequest() + request = analytics_admin.CreateSearchAds360LinkRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = audience.Audience() + post.return_value = resources.SearchAds360Link() - client.get_audience( + client.create_search_ads360_link( request, metadata=[ ("key", "val"), @@ -81353,8 +91686,8 @@ def test_get_audience_rest_interceptors(null_interceptor): post.assert_called_once() -def test_get_audience_rest_bad_request( - transport: str = "rest", request_type=analytics_admin.GetAudienceRequest +def test_create_search_ads360_link_rest_bad_request( + transport: str = "rest", request_type=analytics_admin.CreateSearchAds360LinkRequest ): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -81362,7 +91695,7 @@ def test_get_audience_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = {"name": "properties/sample1/audiences/sample2"} + request_init = {"parent": "properties/sample1"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -81374,10 +91707,10 @@ def test_get_audience_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.get_audience(request) + client.create_search_ads360_link(request) -def test_get_audience_rest_flattened(): +def test_create_search_ads360_link_rest_flattened(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -81386,14 +91719,15 @@ def test_get_audience_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = audience.Audience() + return_value = resources.SearchAds360Link() # get arguments that satisfy an http rule for this method - sample_request = {"name": "properties/sample1/audiences/sample2"} + sample_request = {"parent": "properties/sample1"} # get truthy value for each flattened field mock_args = dict( - name="name_value", + parent="parent_value", + search_ads_360_link=resources.SearchAds360Link(name="name_value"), ) mock_args.update(sample_request) @@ -81401,24 +91735,25 @@ def test_get_audience_rest_flattened(): response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = audience.Audience.pb(return_value) + return_value = resources.SearchAds360Link.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.get_audience(**mock_args) + client.create_search_ads360_link(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1alpha/{name=properties/*/audiences/*}" % client.transport._host, + "%s/v1alpha/{parent=properties/*}/searchAds360Links" + % client.transport._host, args[1], ) -def test_get_audience_rest_flattened_error(transport: str = "rest"): +def test_create_search_ads360_link_rest_flattened_error(transport: str = "rest"): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -81427,13 +91762,14 @@ def test_get_audience_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.get_audience( - analytics_admin.GetAudienceRequest(), - name="name_value", + client.create_search_ads360_link( + analytics_admin.CreateSearchAds360LinkRequest(), + parent="parent_value", + search_ads_360_link=resources.SearchAds360Link(name="name_value"), ) -def test_get_audience_rest_error(): +def test_create_search_ads360_link_rest_error(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -81442,44 +91778,39 @@ def test_get_audience_rest_error(): @pytest.mark.parametrize( "request_type", [ - analytics_admin.ListAudiencesRequest, + analytics_admin.DeleteSearchAds360LinkRequest, dict, ], ) -def test_list_audiences_rest(request_type): +def test_delete_search_ads360_link_rest(request_type): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = {"parent": "properties/sample1"} + request_init = {"name": "properties/sample1/searchAds360Links/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = analytics_admin.ListAudiencesResponse( - next_page_token="next_page_token_value", - ) + return_value = None # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - # Convert return value to protobuf type - return_value = analytics_admin.ListAudiencesResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) + json_return_value = "" response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.list_audiences(request) + response = client.delete_search_ads360_link(request) # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListAudiencesPager) - assert response.next_page_token == "next_page_token_value" + assert response is None -def test_list_audiences_rest_use_cached_wrapped_rpc(): +def test_delete_search_ads360_link_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -81493,35 +91824,40 @@ def test_list_audiences_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.list_audiences in client._transport._wrapped_methods + assert ( + client._transport.delete_search_ads360_link + in client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.list_audiences] = mock_rpc + client._transport._wrapped_methods[ + client._transport.delete_search_ads360_link + ] = mock_rpc request = {} - client.list_audiences(request) + client.delete_search_ads360_link(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.list_audiences(request) + client.delete_search_ads360_link(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_list_audiences_rest_required_fields( - request_type=analytics_admin.ListAudiencesRequest, +def test_delete_search_ads360_link_rest_required_fields( + request_type=analytics_admin.DeleteSearchAds360LinkRequest, ): transport_class = transports.AnalyticsAdminServiceRestTransport request_init = {} - request_init["parent"] = "" + request_init["name"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -81532,28 +91868,21 @@ def test_list_audiences_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).list_audiences._get_unset_required_fields(jsonified_request) + ).delete_search_ads360_link._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["parent"] = "parent_value" + jsonified_request["name"] = "name_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).list_audiences._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set( - ( - "page_size", - "page_token", - ) - ) + ).delete_search_ads360_link._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == "parent_value" + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -81562,7 +91891,7 @@ def test_list_audiences_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = analytics_admin.ListAudiencesResponse() + return_value = None # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -81574,47 +91903,36 @@ def test_list_audiences_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "get", + "method": "delete", "query_params": pb_request, } transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = analytics_admin.ListAudiencesResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) + json_return_value = "" response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.list_audiences(request) + response = client.delete_search_ads360_link(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_list_audiences_rest_unset_required_fields(): +def test_delete_search_ads360_link_rest_unset_required_fields(): transport = transports.AnalyticsAdminServiceRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.list_audiences._get_unset_required_fields({}) - assert set(unset_fields) == ( - set( - ( - "pageSize", - "pageToken", - ) - ) - & set(("parent",)) - ) + unset_fields = transport.delete_search_ads360_link._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_audiences_rest_interceptors(null_interceptor): +def test_delete_search_ads360_link_rest_interceptors(null_interceptor): transport = transports.AnalyticsAdminServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -81627,14 +91945,11 @@ def test_list_audiences_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.AnalyticsAdminServiceRestInterceptor, "post_list_audiences" - ) as post, mock.patch.object( - transports.AnalyticsAdminServiceRestInterceptor, "pre_list_audiences" + transports.AnalyticsAdminServiceRestInterceptor, "pre_delete_search_ads360_link" ) as pre: pre.assert_not_called() - post.assert_not_called() - pb_message = analytics_admin.ListAudiencesRequest.pb( - analytics_admin.ListAudiencesRequest() + pb_message = analytics_admin.DeleteSearchAds360LinkRequest.pb( + analytics_admin.DeleteSearchAds360LinkRequest() ) transcode.return_value = { "method": "post", @@ -81646,19 +91961,15 @@ def test_list_audiences_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = analytics_admin.ListAudiencesResponse.to_json( - analytics_admin.ListAudiencesResponse() - ) - request = analytics_admin.ListAudiencesRequest() + request = analytics_admin.DeleteSearchAds360LinkRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = analytics_admin.ListAudiencesResponse() - client.list_audiences( + client.delete_search_ads360_link( request, metadata=[ ("key", "val"), @@ -81667,11 +91978,10 @@ def test_list_audiences_rest_interceptors(null_interceptor): ) pre.assert_called_once() - post.assert_called_once() -def test_list_audiences_rest_bad_request( - transport: str = "rest", request_type=analytics_admin.ListAudiencesRequest +def test_delete_search_ads360_link_rest_bad_request( + transport: str = "rest", request_type=analytics_admin.DeleteSearchAds360LinkRequest ): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -81679,7 +91989,7 @@ def test_list_audiences_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = {"parent": "properties/sample1"} + request_init = {"name": "properties/sample1/searchAds360Links/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -81691,10 +92001,10 @@ def test_list_audiences_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.list_audiences(request) + client.delete_search_ads360_link(request) -def test_list_audiences_rest_flattened(): +def test_delete_search_ads360_link_rest_flattened(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -81703,39 +92013,38 @@ def test_list_audiences_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = analytics_admin.ListAudiencesResponse() + return_value = None # get arguments that satisfy an http rule for this method - sample_request = {"parent": "properties/sample1"} + sample_request = {"name": "properties/sample1/searchAds360Links/sample2"} # get truthy value for each flattened field mock_args = dict( - parent="parent_value", + name="name_value", ) mock_args.update(sample_request) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - # Convert return value to protobuf type - return_value = analytics_admin.ListAudiencesResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) + json_return_value = "" response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.list_audiences(**mock_args) + client.delete_search_ads360_link(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1alpha/{parent=properties/*}/audiences" % client.transport._host, + "%s/v1alpha/{name=properties/*/searchAds360Links/*}" + % client.transport._host, args[1], ) -def test_list_audiences_rest_flattened_error(transport: str = "rest"): +def test_delete_search_ads360_link_rest_flattened_error(transport: str = "rest"): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -81744,156 +92053,52 @@ def test_list_audiences_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.list_audiences( - analytics_admin.ListAudiencesRequest(), - parent="parent_value", + client.delete_search_ads360_link( + analytics_admin.DeleteSearchAds360LinkRequest(), + name="name_value", ) -def test_list_audiences_rest_pager(transport: str = "rest"): +def test_delete_search_ads360_link_rest_error(): client = AnalyticsAdminServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # TODO(kbandes): remove this mock unless there's a good reason for it. - # with mock.patch.object(path_template, 'transcode') as transcode: - # Set the response as a series of pages - response = ( - analytics_admin.ListAudiencesResponse( - audiences=[ - audience.Audience(), - audience.Audience(), - audience.Audience(), - ], - next_page_token="abc", - ), - analytics_admin.ListAudiencesResponse( - audiences=[], - next_page_token="def", - ), - analytics_admin.ListAudiencesResponse( - audiences=[ - audience.Audience(), - ], - next_page_token="ghi", - ), - analytics_admin.ListAudiencesResponse( - audiences=[ - audience.Audience(), - audience.Audience(), - ], - ), - ) - # Two responses for two calls - response = response + response - - # Wrap the values into proper Response objs - response = tuple( - analytics_admin.ListAudiencesResponse.to_json(x) for x in response - ) - return_values = tuple(Response() for i in response) - for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode("UTF-8") - return_val.status_code = 200 - req.side_effect = return_values - - sample_request = {"parent": "properties/sample1"} - - pager = client.list_audiences(request=sample_request) - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, audience.Audience) for i in results) - - pages = list(client.list_audiences(request=sample_request).pages) - for page_, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page_.raw_page.next_page_token == token - @pytest.mark.parametrize( "request_type", [ - analytics_admin.CreateAudienceRequest, + analytics_admin.UpdateSearchAds360LinkRequest, dict, ], ) -def test_create_audience_rest(request_type): +def test_update_search_ads360_link_rest(request_type): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = {"parent": "properties/sample1"} - request_init["audience"] = { - "name": "name_value", - "display_name": "display_name_value", - "description": "description_value", - "membership_duration_days": 2561, - "ads_personalization_enabled": True, - "event_trigger": {"event_name": "event_name_value", "log_condition": 1}, - "exclusion_duration_mode": 1, - "filter_clauses": [ - { - "simple_filter": { - "scope": 1, - "filter_expression": { - "and_group": {"filter_expressions": {}}, - "or_group": {}, - "not_expression": {}, - "dimension_or_metric_filter": { - "string_filter": { - "match_type": 1, - "value": "value_value", - "case_sensitive": True, - }, - "in_list_filter": { - "values": ["values_value1", "values_value2"], - "case_sensitive": True, - }, - "numeric_filter": { - "operation": 1, - "value": { - "int64_value": 1073, - "double_value": 0.12710000000000002, - }, - }, - "between_filter": {"from_value": {}, "to_value": {}}, - "field_name": "field_name_value", - "at_any_point_in_time": True, - "in_any_n_day_period": 1994, - }, - "event_filter": { - "event_name": "event_name_value", - "event_parameter_filter_expression": {}, - }, - }, - }, - "sequence_filter": { - "scope": 1, - "sequence_maximum_duration": {"seconds": 751, "nanos": 543}, - "sequence_steps": [ - { - "scope": 1, - "immediately_follows": True, - "constraint_duration": {}, - "filter_expression": {}, - } - ], - }, - "clause_type": 1, - } - ], + request_init = { + "search_ads_360_link": {"name": "properties/sample1/searchAds360Links/sample2"} + } + request_init["search_ads_360_link"] = { + "name": "properties/sample1/searchAds360Links/sample2", + "advertiser_id": "advertiser_id_value", + "campaign_data_sharing_enabled": {"value": True}, + "cost_data_sharing_enabled": {}, + "advertiser_display_name": "advertiser_display_name_value", + "ads_personalization_enabled": {}, + "site_stats_sharing_enabled": {}, } # The version of a generated dependency at test runtime may differ from the version used during generation. # Delete any fields which are not present in the current runtime dependency # See https://github.com/googleapis/gapic-generator-python/issues/1748 # Determine if the message type is proto-plus or protobuf - test_field = analytics_admin.CreateAudienceRequest.meta.fields["audience"] + test_field = analytics_admin.UpdateSearchAds360LinkRequest.meta.fields[ + "search_ads_360_link" + ] def get_message_fields(field): # Given a field which is a message (composite type), return a list with @@ -81921,7 +92126,7 @@ def get_message_fields(field): # For each item in the sample request, create a list of sub fields which are not present at runtime # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["audience"].items(): # pragma: NO COVER + for field, value in request_init["search_ads_360_link"].items(): # pragma: NO COVER result = None is_repeated = False # For repeated fields @@ -81951,49 +92156,40 @@ def get_message_fields(field): subfield = subfield_to_delete.get("subfield") if subfield: if field_repeated: - for i in range(0, len(request_init["audience"][field])): - del request_init["audience"][field][i][subfield] + for i in range(0, len(request_init["search_ads_360_link"][field])): + del request_init["search_ads_360_link"][field][i][subfield] else: - del request_init["audience"][field][subfield] + del request_init["search_ads_360_link"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = gaa_audience.Audience( + return_value = resources.SearchAds360Link( name="name_value", - display_name="display_name_value", - description="description_value", - membership_duration_days=2561, - ads_personalization_enabled=True, - exclusion_duration_mode=gaa_audience.Audience.AudienceExclusionDurationMode.EXCLUDE_TEMPORARILY, + advertiser_id="advertiser_id_value", + advertiser_display_name="advertiser_display_name_value", ) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = gaa_audience.Audience.pb(return_value) + return_value = resources.SearchAds360Link.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.create_audience(request) + response = client.update_search_ads360_link(request) # Establish that the response is the type that we expect. - assert isinstance(response, gaa_audience.Audience) + assert isinstance(response, resources.SearchAds360Link) assert response.name == "name_value" - assert response.display_name == "display_name_value" - assert response.description == "description_value" - assert response.membership_duration_days == 2561 - assert response.ads_personalization_enabled is True - assert ( - response.exclusion_duration_mode - == gaa_audience.Audience.AudienceExclusionDurationMode.EXCLUDE_TEMPORARILY - ) + assert response.advertiser_id == "advertiser_id_value" + assert response.advertiser_display_name == "advertiser_display_name_value" -def test_create_audience_rest_use_cached_wrapped_rpc(): +def test_update_search_ads360_link_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -82007,35 +92203,39 @@ def test_create_audience_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.create_audience in client._transport._wrapped_methods + assert ( + client._transport.update_search_ads360_link + in client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.create_audience] = mock_rpc + client._transport._wrapped_methods[ + client._transport.update_search_ads360_link + ] = mock_rpc request = {} - client.create_audience(request) + client.update_search_ads360_link(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.create_audience(request) + client.update_search_ads360_link(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_create_audience_rest_required_fields( - request_type=analytics_admin.CreateAudienceRequest, +def test_update_search_ads360_link_rest_required_fields( + request_type=analytics_admin.UpdateSearchAds360LinkRequest, ): transport_class = transports.AnalyticsAdminServiceRestTransport request_init = {} - request_init["parent"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -82046,21 +92246,19 @@ def test_create_audience_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).create_audience._get_unset_required_fields(jsonified_request) + ).update_search_ads360_link._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["parent"] = "parent_value" - unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).create_audience._get_unset_required_fields(jsonified_request) + ).update_search_ads360_link._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("update_mask",)) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == "parent_value" client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -82069,7 +92267,7 @@ def test_create_audience_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = gaa_audience.Audience() + return_value = resources.SearchAds360Link() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -82081,7 +92279,7 @@ def test_create_audience_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "post", + "method": "patch", "query_params": pb_request, } transcode_result["body"] = pb_request @@ -82091,38 +92289,30 @@ def test_create_audience_rest_required_fields( response_value.status_code = 200 # Convert return value to protobuf type - return_value = gaa_audience.Audience.pb(return_value) + return_value = resources.SearchAds360Link.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.create_audience(request) + response = client.update_search_ads360_link(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_create_audience_rest_unset_required_fields(): +def test_update_search_ads360_link_rest_unset_required_fields(): transport = transports.AnalyticsAdminServiceRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.create_audience._get_unset_required_fields({}) - assert set(unset_fields) == ( - set(()) - & set( - ( - "parent", - "audience", - ) - ) - ) + unset_fields = transport.update_search_ads360_link._get_unset_required_fields({}) + assert set(unset_fields) == (set(("updateMask",)) & set(("updateMask",))) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_create_audience_rest_interceptors(null_interceptor): +def test_update_search_ads360_link_rest_interceptors(null_interceptor): transport = transports.AnalyticsAdminServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -82135,14 +92325,15 @@ def test_create_audience_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.AnalyticsAdminServiceRestInterceptor, "post_create_audience" + transports.AnalyticsAdminServiceRestInterceptor, + "post_update_search_ads360_link", ) as post, mock.patch.object( - transports.AnalyticsAdminServiceRestInterceptor, "pre_create_audience" + transports.AnalyticsAdminServiceRestInterceptor, "pre_update_search_ads360_link" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = analytics_admin.CreateAudienceRequest.pb( - analytics_admin.CreateAudienceRequest() + pb_message = analytics_admin.UpdateSearchAds360LinkRequest.pb( + analytics_admin.UpdateSearchAds360LinkRequest() ) transcode.return_value = { "method": "post", @@ -82154,19 +92345,19 @@ def test_create_audience_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = gaa_audience.Audience.to_json( - gaa_audience.Audience() + req.return_value._content = resources.SearchAds360Link.to_json( + resources.SearchAds360Link() ) - request = analytics_admin.CreateAudienceRequest() + request = analytics_admin.UpdateSearchAds360LinkRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = gaa_audience.Audience() + post.return_value = resources.SearchAds360Link() - client.create_audience( + client.update_search_ads360_link( request, metadata=[ ("key", "val"), @@ -82178,8 +92369,8 @@ def test_create_audience_rest_interceptors(null_interceptor): post.assert_called_once() -def test_create_audience_rest_bad_request( - transport: str = "rest", request_type=analytics_admin.CreateAudienceRequest +def test_update_search_ads360_link_rest_bad_request( + transport: str = "rest", request_type=analytics_admin.UpdateSearchAds360LinkRequest ): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -82187,7 +92378,9 @@ def test_create_audience_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = {"parent": "properties/sample1"} + request_init = { + "search_ads_360_link": {"name": "properties/sample1/searchAds360Links/sample2"} + } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -82199,10 +92392,10 @@ def test_create_audience_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.create_audience(request) + client.update_search_ads360_link(request) -def test_create_audience_rest_flattened(): +def test_update_search_ads360_link_rest_flattened(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -82211,15 +92404,19 @@ def test_create_audience_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = gaa_audience.Audience() + return_value = resources.SearchAds360Link() # get arguments that satisfy an http rule for this method - sample_request = {"parent": "properties/sample1"} + sample_request = { + "search_ads_360_link": { + "name": "properties/sample1/searchAds360Links/sample2" + } + } # get truthy value for each flattened field mock_args = dict( - parent="parent_value", - audience=gaa_audience.Audience(name="name_value"), + search_ads_360_link=resources.SearchAds360Link(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) mock_args.update(sample_request) @@ -82227,24 +92424,25 @@ def test_create_audience_rest_flattened(): response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = gaa_audience.Audience.pb(return_value) + return_value = resources.SearchAds360Link.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.create_audience(**mock_args) + client.update_search_ads360_link(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1alpha/{parent=properties/*}/audiences" % client.transport._host, + "%s/v1alpha/{search_ads_360_link.name=properties/*/searchAds360Links/*}" + % client.transport._host, args[1], ) -def test_create_audience_rest_flattened_error(transport: str = "rest"): +def test_update_search_ads360_link_rest_flattened_error(transport: str = "rest"): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -82253,14 +92451,14 @@ def test_create_audience_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.create_audience( - analytics_admin.CreateAudienceRequest(), - parent="parent_value", - audience=gaa_audience.Audience(name="name_value"), + client.update_search_ads360_link( + analytics_admin.UpdateSearchAds360LinkRequest(), + search_ads_360_link=resources.SearchAds360Link(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) -def test_create_audience_rest_error(): +def test_update_search_ads360_link_rest_error(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -82269,184 +92467,64 @@ def test_create_audience_rest_error(): @pytest.mark.parametrize( "request_type", [ - analytics_admin.UpdateAudienceRequest, - dict, - ], -) -def test_update_audience_rest(request_type): - client = AnalyticsAdminServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {"audience": {"name": "properties/sample1/audiences/sample2"}} - request_init["audience"] = { - "name": "properties/sample1/audiences/sample2", - "display_name": "display_name_value", - "description": "description_value", - "membership_duration_days": 2561, - "ads_personalization_enabled": True, - "event_trigger": {"event_name": "event_name_value", "log_condition": 1}, - "exclusion_duration_mode": 1, - "filter_clauses": [ - { - "simple_filter": { - "scope": 1, - "filter_expression": { - "and_group": {"filter_expressions": {}}, - "or_group": {}, - "not_expression": {}, - "dimension_or_metric_filter": { - "string_filter": { - "match_type": 1, - "value": "value_value", - "case_sensitive": True, - }, - "in_list_filter": { - "values": ["values_value1", "values_value2"], - "case_sensitive": True, - }, - "numeric_filter": { - "operation": 1, - "value": { - "int64_value": 1073, - "double_value": 0.12710000000000002, - }, - }, - "between_filter": {"from_value": {}, "to_value": {}}, - "field_name": "field_name_value", - "at_any_point_in_time": True, - "in_any_n_day_period": 1994, - }, - "event_filter": { - "event_name": "event_name_value", - "event_parameter_filter_expression": {}, - }, - }, - }, - "sequence_filter": { - "scope": 1, - "sequence_maximum_duration": {"seconds": 751, "nanos": 543}, - "sequence_steps": [ - { - "scope": 1, - "immediately_follows": True, - "constraint_duration": {}, - "filter_expression": {}, - } - ], - }, - "clause_type": 1, - } - ], - } - # The version of a generated dependency at test runtime may differ from the version used during generation. - # Delete any fields which are not present in the current runtime dependency - # See https://github.com/googleapis/gapic-generator-python/issues/1748 - - # Determine if the message type is proto-plus or protobuf - test_field = analytics_admin.UpdateAudienceRequest.meta.fields["audience"] - - def get_message_fields(field): - # Given a field which is a message (composite type), return a list with - # all the fields of the message. - # If the field is not a composite type, return an empty list. - message_fields = [] - - if hasattr(field, "message") and field.message: - is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") - - if is_field_type_proto_plus_type: - message_fields = field.message.meta.fields.values() - # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER - message_fields = field.message.DESCRIPTOR.fields - return message_fields - - runtime_nested_fields = [ - (field.name, nested_field.name) - for field in get_message_fields(test_field) - for nested_field in get_message_fields(field) - ] - - subfields_not_in_runtime = [] - - # For each item in the sample request, create a list of sub fields which are not present at runtime - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["audience"].items(): # pragma: NO COVER - result = None - is_repeated = False - # For repeated fields - if isinstance(value, list) and len(value): - is_repeated = True - result = value[0] - # For fields where the type is another message - if isinstance(value, dict): - result = value - - if result and hasattr(result, "keys"): - for subfield in result.keys(): - if (field, subfield) not in runtime_nested_fields: - subfields_not_in_runtime.append( - { - "field": field, - "subfield": subfield, - "is_repeated": is_repeated, - } - ) + analytics_admin.GetAttributionSettingsRequest, + dict, + ], +) +def test_get_attribution_settings_rest(request_type): + client = AnalyticsAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) - # Remove fields from the sample request which are not present in the runtime version of the dependency - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER - field = subfield_to_delete.get("field") - field_repeated = subfield_to_delete.get("is_repeated") - subfield = subfield_to_delete.get("subfield") - if subfield: - if field_repeated: - for i in range(0, len(request_init["audience"][field])): - del request_init["audience"][field][i][subfield] - else: - del request_init["audience"][field][subfield] + # send a request that will satisfy transcoding + request_init = {"name": "properties/sample1/attributionSettings"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = gaa_audience.Audience( + return_value = resources.AttributionSettings( name="name_value", - display_name="display_name_value", - description="description_value", - membership_duration_days=2561, - ads_personalization_enabled=True, - exclusion_duration_mode=gaa_audience.Audience.AudienceExclusionDurationMode.EXCLUDE_TEMPORARILY, + acquisition_conversion_event_lookback_window=resources.AttributionSettings.AcquisitionConversionEventLookbackWindow.ACQUISITION_CONVERSION_EVENT_LOOKBACK_WINDOW_7_DAYS, + other_conversion_event_lookback_window=resources.AttributionSettings.OtherConversionEventLookbackWindow.OTHER_CONVERSION_EVENT_LOOKBACK_WINDOW_30_DAYS, + reporting_attribution_model=resources.AttributionSettings.ReportingAttributionModel.PAID_AND_ORGANIC_CHANNELS_DATA_DRIVEN, + ads_web_conversion_data_export_scope=resources.AttributionSettings.AdsWebConversionDataExportScope.NOT_SELECTED_YET, ) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = gaa_audience.Audience.pb(return_value) + return_value = resources.AttributionSettings.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.update_audience(request) + response = client.get_attribution_settings(request) # Establish that the response is the type that we expect. - assert isinstance(response, gaa_audience.Audience) + assert isinstance(response, resources.AttributionSettings) assert response.name == "name_value" - assert response.display_name == "display_name_value" - assert response.description == "description_value" - assert response.membership_duration_days == 2561 - assert response.ads_personalization_enabled is True assert ( - response.exclusion_duration_mode - == gaa_audience.Audience.AudienceExclusionDurationMode.EXCLUDE_TEMPORARILY + response.acquisition_conversion_event_lookback_window + == resources.AttributionSettings.AcquisitionConversionEventLookbackWindow.ACQUISITION_CONVERSION_EVENT_LOOKBACK_WINDOW_7_DAYS + ) + assert ( + response.other_conversion_event_lookback_window + == resources.AttributionSettings.OtherConversionEventLookbackWindow.OTHER_CONVERSION_EVENT_LOOKBACK_WINDOW_30_DAYS + ) + assert ( + response.reporting_attribution_model + == resources.AttributionSettings.ReportingAttributionModel.PAID_AND_ORGANIC_CHANNELS_DATA_DRIVEN + ) + assert ( + response.ads_web_conversion_data_export_scope + == resources.AttributionSettings.AdsWebConversionDataExportScope.NOT_SELECTED_YET ) -def test_update_audience_rest_use_cached_wrapped_rpc(): +def test_get_attribution_settings_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -82460,34 +92538,40 @@ def test_update_audience_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.update_audience in client._transport._wrapped_methods + assert ( + client._transport.get_attribution_settings + in client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.update_audience] = mock_rpc + client._transport._wrapped_methods[ + client._transport.get_attribution_settings + ] = mock_rpc request = {} - client.update_audience(request) + client.get_attribution_settings(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.update_audience(request) + client.get_attribution_settings(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_update_audience_rest_required_fields( - request_type=analytics_admin.UpdateAudienceRequest, +def test_get_attribution_settings_rest_required_fields( + request_type=analytics_admin.GetAttributionSettingsRequest, ): transport_class = transports.AnalyticsAdminServiceRestTransport request_init = {} + request_init["name"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -82498,19 +92582,21 @@ def test_update_audience_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).update_audience._get_unset_required_fields(jsonified_request) + ).get_attribution_settings._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present + jsonified_request["name"] = "name_value" + unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).update_audience._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("update_mask",)) + ).get_attribution_settings._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -82519,7 +92605,7 @@ def test_update_audience_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = gaa_audience.Audience() + return_value = resources.AttributionSettings() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -82531,48 +92617,39 @@ def test_update_audience_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "patch", + "method": "get", "query_params": pb_request, } - transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = gaa_audience.Audience.pb(return_value) + return_value = resources.AttributionSettings.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.update_audience(request) + response = client.get_attribution_settings(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_update_audience_rest_unset_required_fields(): +def test_get_attribution_settings_rest_unset_required_fields(): transport = transports.AnalyticsAdminServiceRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.update_audience._get_unset_required_fields({}) - assert set(unset_fields) == ( - set(("updateMask",)) - & set( - ( - "audience", - "updateMask", - ) - ) - ) + unset_fields = transport.get_attribution_settings._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_update_audience_rest_interceptors(null_interceptor): +def test_get_attribution_settings_rest_interceptors(null_interceptor): transport = transports.AnalyticsAdminServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -82585,14 +92662,14 @@ def test_update_audience_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.AnalyticsAdminServiceRestInterceptor, "post_update_audience" + transports.AnalyticsAdminServiceRestInterceptor, "post_get_attribution_settings" ) as post, mock.patch.object( - transports.AnalyticsAdminServiceRestInterceptor, "pre_update_audience" + transports.AnalyticsAdminServiceRestInterceptor, "pre_get_attribution_settings" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = analytics_admin.UpdateAudienceRequest.pb( - analytics_admin.UpdateAudienceRequest() + pb_message = analytics_admin.GetAttributionSettingsRequest.pb( + analytics_admin.GetAttributionSettingsRequest() ) transcode.return_value = { "method": "post", @@ -82604,19 +92681,19 @@ def test_update_audience_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = gaa_audience.Audience.to_json( - gaa_audience.Audience() + req.return_value._content = resources.AttributionSettings.to_json( + resources.AttributionSettings() ) - request = analytics_admin.UpdateAudienceRequest() + request = analytics_admin.GetAttributionSettingsRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = gaa_audience.Audience() + post.return_value = resources.AttributionSettings() - client.update_audience( + client.get_attribution_settings( request, metadata=[ ("key", "val"), @@ -82628,8 +92705,8 @@ def test_update_audience_rest_interceptors(null_interceptor): post.assert_called_once() -def test_update_audience_rest_bad_request( - transport: str = "rest", request_type=analytics_admin.UpdateAudienceRequest +def test_get_attribution_settings_rest_bad_request( + transport: str = "rest", request_type=analytics_admin.GetAttributionSettingsRequest ): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -82637,7 +92714,7 @@ def test_update_audience_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = {"audience": {"name": "properties/sample1/audiences/sample2"}} + request_init = {"name": "properties/sample1/attributionSettings"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -82649,10 +92726,10 @@ def test_update_audience_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.update_audience(request) + client.get_attribution_settings(request) -def test_update_audience_rest_flattened(): +def test_get_attribution_settings_rest_flattened(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -82661,15 +92738,14 @@ def test_update_audience_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = gaa_audience.Audience() + return_value = resources.AttributionSettings() # get arguments that satisfy an http rule for this method - sample_request = {"audience": {"name": "properties/sample1/audiences/sample2"}} + sample_request = {"name": "properties/sample1/attributionSettings"} # get truthy value for each flattened field mock_args = dict( - audience=gaa_audience.Audience(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + name="name_value", ) mock_args.update(sample_request) @@ -82677,25 +92753,25 @@ def test_update_audience_rest_flattened(): response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = gaa_audience.Audience.pb(return_value) + return_value = resources.AttributionSettings.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.update_audience(**mock_args) + client.get_attribution_settings(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1alpha/{audience.name=properties/*/audiences/*}" + "%s/v1alpha/{name=properties/*/attributionSettings}" % client.transport._host, args[1], ) -def test_update_audience_rest_flattened_error(transport: str = "rest"): +def test_get_attribution_settings_rest_flattened_error(transport: str = "rest"): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -82704,14 +92780,13 @@ def test_update_audience_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.update_audience( - analytics_admin.UpdateAudienceRequest(), - audience=gaa_audience.Audience(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + client.get_attribution_settings( + analytics_admin.GetAttributionSettingsRequest(), + name="name_value", ) -def test_update_audience_rest_error(): +def test_get_attribution_settings_rest_error(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -82720,39 +92795,144 @@ def test_update_audience_rest_error(): @pytest.mark.parametrize( "request_type", [ - analytics_admin.ArchiveAudienceRequest, + analytics_admin.UpdateAttributionSettingsRequest, dict, ], ) -def test_archive_audience_rest(request_type): +def test_update_attribution_settings_rest(request_type): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = {"name": "properties/sample1/audiences/sample2"} + request_init = { + "attribution_settings": {"name": "properties/sample1/attributionSettings"} + } + request_init["attribution_settings"] = { + "name": "properties/sample1/attributionSettings", + "acquisition_conversion_event_lookback_window": 1, + "other_conversion_event_lookback_window": 1, + "reporting_attribution_model": 1, + "ads_web_conversion_data_export_scope": 1, + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = analytics_admin.UpdateAttributionSettingsRequest.meta.fields[ + "attribution_settings" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "attribution_settings" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["attribution_settings"][field])): + del request_init["attribution_settings"][field][i][subfield] + else: + del request_init["attribution_settings"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = None + return_value = resources.AttributionSettings( + name="name_value", + acquisition_conversion_event_lookback_window=resources.AttributionSettings.AcquisitionConversionEventLookbackWindow.ACQUISITION_CONVERSION_EVENT_LOOKBACK_WINDOW_7_DAYS, + other_conversion_event_lookback_window=resources.AttributionSettings.OtherConversionEventLookbackWindow.OTHER_CONVERSION_EVENT_LOOKBACK_WINDOW_30_DAYS, + reporting_attribution_model=resources.AttributionSettings.ReportingAttributionModel.PAID_AND_ORGANIC_CHANNELS_DATA_DRIVEN, + ads_web_conversion_data_export_scope=resources.AttributionSettings.AdsWebConversionDataExportScope.NOT_SELECTED_YET, + ) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - json_return_value = "" + # Convert return value to protobuf type + return_value = resources.AttributionSettings.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.archive_audience(request) + response = client.update_attribution_settings(request) # Establish that the response is the type that we expect. - assert response is None + assert isinstance(response, resources.AttributionSettings) + assert response.name == "name_value" + assert ( + response.acquisition_conversion_event_lookback_window + == resources.AttributionSettings.AcquisitionConversionEventLookbackWindow.ACQUISITION_CONVERSION_EVENT_LOOKBACK_WINDOW_7_DAYS + ) + assert ( + response.other_conversion_event_lookback_window + == resources.AttributionSettings.OtherConversionEventLookbackWindow.OTHER_CONVERSION_EVENT_LOOKBACK_WINDOW_30_DAYS + ) + assert ( + response.reporting_attribution_model + == resources.AttributionSettings.ReportingAttributionModel.PAID_AND_ORGANIC_CHANNELS_DATA_DRIVEN + ) + assert ( + response.ads_web_conversion_data_export_scope + == resources.AttributionSettings.AdsWebConversionDataExportScope.NOT_SELECTED_YET + ) -def test_archive_audience_rest_use_cached_wrapped_rpc(): +def test_update_attribution_settings_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -82766,7 +92946,10 @@ def test_archive_audience_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.archive_audience in client._transport._wrapped_methods + assert ( + client._transport.update_attribution_settings + in client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.Mock() @@ -82774,29 +92957,28 @@ def test_archive_audience_rest_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.archive_audience + client._transport.update_attribution_settings ] = mock_rpc request = {} - client.archive_audience(request) + client.update_attribution_settings(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.archive_audience(request) + client.update_attribution_settings(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_archive_audience_rest_required_fields( - request_type=analytics_admin.ArchiveAudienceRequest, +def test_update_attribution_settings_rest_required_fields( + request_type=analytics_admin.UpdateAttributionSettingsRequest, ): transport_class = transports.AnalyticsAdminServiceRestTransport request_init = {} - request_init["name"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -82807,21 +92989,19 @@ def test_archive_audience_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).archive_audience._get_unset_required_fields(jsonified_request) + ).update_attribution_settings._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["name"] = "name_value" - unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).archive_audience._get_unset_required_fields(jsonified_request) + ).update_attribution_settings._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("update_mask",)) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == "name_value" client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -82830,7 +93010,7 @@ def test_archive_audience_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = None + return_value = resources.AttributionSettings() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -82842,7 +93022,7 @@ def test_archive_audience_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "post", + "method": "patch", "query_params": pb_request, } transcode_result["body"] = pb_request @@ -82850,29 +93030,40 @@ def test_archive_audience_rest_required_fields( response_value = Response() response_value.status_code = 200 - json_return_value = "" + + # Convert return value to protobuf type + return_value = resources.AttributionSettings.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.archive_audience(request) + response = client.update_attribution_settings(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_archive_audience_rest_unset_required_fields(): +def test_update_attribution_settings_rest_unset_required_fields(): transport = transports.AnalyticsAdminServiceRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.archive_audience._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name",))) + unset_fields = transport.update_attribution_settings._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(("updateMask",)) + & set( + ( + "attributionSettings", + "updateMask", + ) + ) + ) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_archive_audience_rest_interceptors(null_interceptor): +def test_update_attribution_settings_rest_interceptors(null_interceptor): transport = transports.AnalyticsAdminServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -82885,11 +93076,16 @@ def test_archive_audience_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.AnalyticsAdminServiceRestInterceptor, "pre_archive_audience" + transports.AnalyticsAdminServiceRestInterceptor, + "post_update_attribution_settings", + ) as post, mock.patch.object( + transports.AnalyticsAdminServiceRestInterceptor, + "pre_update_attribution_settings", ) as pre: pre.assert_not_called() - pb_message = analytics_admin.ArchiveAudienceRequest.pb( - analytics_admin.ArchiveAudienceRequest() + post.assert_not_called() + pb_message = analytics_admin.UpdateAttributionSettingsRequest.pb( + analytics_admin.UpdateAttributionSettingsRequest() ) transcode.return_value = { "method": "post", @@ -82901,15 +93097,19 @@ def test_archive_audience_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() + req.return_value._content = resources.AttributionSettings.to_json( + resources.AttributionSettings() + ) - request = analytics_admin.ArchiveAudienceRequest() + request = analytics_admin.UpdateAttributionSettingsRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata + post.return_value = resources.AttributionSettings() - client.archive_audience( + client.update_attribution_settings( request, metadata=[ ("key", "val"), @@ -82918,10 +93118,12 @@ def test_archive_audience_rest_interceptors(null_interceptor): ) pre.assert_called_once() + post.assert_called_once() -def test_archive_audience_rest_bad_request( - transport: str = "rest", request_type=analytics_admin.ArchiveAudienceRequest +def test_update_attribution_settings_rest_bad_request( + transport: str = "rest", + request_type=analytics_admin.UpdateAttributionSettingsRequest, ): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -82929,7 +93131,9 @@ def test_archive_audience_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = {"name": "properties/sample1/audiences/sample2"} + request_init = { + "attribution_settings": {"name": "properties/sample1/attributionSettings"} + } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -82941,185 +93145,156 @@ def test_archive_audience_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.archive_audience(request) - - -def test_archive_audience_rest_error(): - client = AnalyticsAdminServiceClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) + client.update_attribution_settings(request) -@pytest.mark.parametrize( - "request_type", - [ - analytics_admin.GetSearchAds360LinkRequest, - dict, - ], -) -def test_get_search_ads360_link_rest(request_type): +def test_update_attribution_settings_rest_flattened(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) - # send a request that will satisfy transcoding - request_init = {"name": "properties/sample1/searchAds360Links/sample2"} - request = request_type(**request_init) - # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = resources.SearchAds360Link( - name="name_value", - advertiser_id="advertiser_id_value", - advertiser_display_name="advertiser_display_name_value", + return_value = resources.AttributionSettings() + + # get arguments that satisfy an http rule for this method + sample_request = { + "attribution_settings": {"name": "properties/sample1/attributionSettings"} + } + + # get truthy value for each flattened field + mock_args = dict( + attribution_settings=resources.AttributionSettings(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) + mock_args.update(sample_request) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = resources.SearchAds360Link.pb(return_value) + return_value = resources.AttributionSettings.pb(return_value) json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.get_search_ads360_link(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, resources.SearchAds360Link) - assert response.name == "name_value" - assert response.advertiser_id == "advertiser_id_value" - assert response.advertiser_display_name == "advertiser_display_name_value" + client.update_attribution_settings(**mock_args) -def test_get_search_ads360_link_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = AnalyticsAdminServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1alpha/{attribution_settings.name=properties/*/attributionSettings}" + % client.transport._host, + args[1], ) - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - # Ensure method has been cached - assert ( - client._transport.get_search_ads360_link - in client._transport._wrapped_methods - ) +def test_update_attribution_settings_rest_flattened_error(transport: str = "rest"): + client = AnalyticsAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_attribution_settings( + analytics_admin.UpdateAttributionSettingsRequest(), + attribution_settings=resources.AttributionSettings(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) - client._transport._wrapped_methods[ - client._transport.get_search_ads360_link - ] = mock_rpc - - request = {} - client.get_search_ads360_link(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.get_search_ads360_link(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 -def test_get_search_ads360_link_rest_required_fields( - request_type=analytics_admin.GetSearchAds360LinkRequest, -): - transport_class = transports.AnalyticsAdminServiceRestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads( - json_format.MessageToJson(pb_request, use_integers_for_enums=False) +def test_update_attribution_settings_rest_error(): + client = AnalyticsAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) - # verify fields with default values are dropped - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).get_search_ads360_link._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = "name_value" - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).get_search_ads360_link._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == "name_value" +@pytest.mark.parametrize( + "request_type", + [ + analytics_admin.RunAccessReportRequest, + dict, + ], +) +def test_run_access_report_rest(request_type): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) + + # send a request that will satisfy transcoding + request_init = {"entity": "properties/sample1"} request = request_type(**request_init) - # Designate an appropriate value for the returned response. - return_value = resources.SearchAds360Link() # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, "transcode") as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - "uri": "v1/sample_method", - "method": "get", - "query_params": pb_request, - } - transcode.return_value = transcode_result + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = analytics_admin.RunAccessReportResponse( + row_count=992, + ) - response_value = Response() - response_value.status_code = 200 + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = analytics_admin.RunAccessReportResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) - # Convert return value to protobuf type - return_value = resources.SearchAds360Link.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.run_access_report(request) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value + # Establish that the response is the type that we expect. + assert isinstance(response, analytics_admin.RunAccessReportResponse) + assert response.row_count == 992 - response = client.get_search_ads360_link(request) - expected_params = [("$alt", "json;enum-encoding=int")] - actual_params = req.call_args.kwargs["params"] - assert expected_params == actual_params +def test_run_access_report_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = AnalyticsAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() -def test_get_search_ads360_link_rest_unset_required_fields(): - transport = transports.AnalyticsAdminServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials - ) + # Ensure method has been cached + assert client._transport.run_access_report in client._transport._wrapped_methods - unset_fields = transport.get_search_ads360_link._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name",))) + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.run_access_report + ] = mock_rpc + + request = {} + client.run_access_report(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.run_access_report(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_search_ads360_link_rest_interceptors(null_interceptor): +def test_run_access_report_rest_interceptors(null_interceptor): transport = transports.AnalyticsAdminServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -83132,14 +93307,14 @@ def test_get_search_ads360_link_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.AnalyticsAdminServiceRestInterceptor, "post_get_search_ads360_link" + transports.AnalyticsAdminServiceRestInterceptor, "post_run_access_report" ) as post, mock.patch.object( - transports.AnalyticsAdminServiceRestInterceptor, "pre_get_search_ads360_link" + transports.AnalyticsAdminServiceRestInterceptor, "pre_run_access_report" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = analytics_admin.GetSearchAds360LinkRequest.pb( - analytics_admin.GetSearchAds360LinkRequest() + pb_message = analytics_admin.RunAccessReportRequest.pb( + analytics_admin.RunAccessReportRequest() ) transcode.return_value = { "method": "post", @@ -83151,19 +93326,19 @@ def test_get_search_ads360_link_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = resources.SearchAds360Link.to_json( - resources.SearchAds360Link() + req.return_value._content = analytics_admin.RunAccessReportResponse.to_json( + analytics_admin.RunAccessReportResponse() ) - request = analytics_admin.GetSearchAds360LinkRequest() + request = analytics_admin.RunAccessReportRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = resources.SearchAds360Link() + post.return_value = analytics_admin.RunAccessReportResponse() - client.get_search_ads360_link( + client.run_access_report( request, metadata=[ ("key", "val"), @@ -83175,8 +93350,8 @@ def test_get_search_ads360_link_rest_interceptors(null_interceptor): post.assert_called_once() -def test_get_search_ads360_link_rest_bad_request( - transport: str = "rest", request_type=analytics_admin.GetSearchAds360LinkRequest +def test_run_access_report_rest_bad_request( + transport: str = "rest", request_type=analytics_admin.RunAccessReportRequest ): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -83184,7 +93359,7 @@ def test_get_search_ads360_link_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = {"name": "properties/sample1/searchAds360Links/sample2"} + request_init = {"entity": "properties/sample1"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -83196,67 +93371,10 @@ def test_get_search_ads360_link_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.get_search_ads360_link(request) - - -def test_get_search_ads360_link_rest_flattened(): - client = AnalyticsAdminServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = resources.SearchAds360Link() - - # get arguments that satisfy an http rule for this method - sample_request = {"name": "properties/sample1/searchAds360Links/sample2"} - - # get truthy value for each flattened field - mock_args = dict( - name="name_value", - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = resources.SearchAds360Link.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - client.get_search_ads360_link(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate( - "%s/v1alpha/{name=properties/*/searchAds360Links/*}" - % client.transport._host, - args[1], - ) - - -def test_get_search_ads360_link_rest_flattened_error(transport: str = "rest"): - client = AnalyticsAdminServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_search_ads360_link( - analytics_admin.GetSearchAds360LinkRequest(), - name="name_value", - ) + client.run_access_report(request) -def test_get_search_ads360_link_rest_error(): +def test_run_access_report_rest_error(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -83265,44 +93383,121 @@ def test_get_search_ads360_link_rest_error(): @pytest.mark.parametrize( "request_type", [ - analytics_admin.ListSearchAds360LinksRequest, + analytics_admin.CreateAccessBindingRequest, dict, ], ) -def test_list_search_ads360_links_rest(request_type): +def test_create_access_binding_rest(request_type): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = {"parent": "properties/sample1"} + request_init = {"parent": "accounts/sample1"} + request_init["access_binding"] = { + "user": "user_value", + "name": "name_value", + "roles": ["roles_value1", "roles_value2"], + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = analytics_admin.CreateAccessBindingRequest.meta.fields[ + "access_binding" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["access_binding"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["access_binding"][field])): + del request_init["access_binding"][field][i][subfield] + else: + del request_init["access_binding"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = analytics_admin.ListSearchAds360LinksResponse( - next_page_token="next_page_token_value", + return_value = resources.AccessBinding( + name="name_value", + roles=["roles_value"], + user="user_value", ) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = analytics_admin.ListSearchAds360LinksResponse.pb(return_value) + return_value = resources.AccessBinding.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.list_search_ads360_links(request) + response = client.create_access_binding(request) # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListSearchAds360LinksPager) - assert response.next_page_token == "next_page_token_value" + assert isinstance(response, resources.AccessBinding) + assert response.name == "name_value" + assert response.roles == ["roles_value"] -def test_list_search_ads360_links_rest_use_cached_wrapped_rpc(): +def test_create_access_binding_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -83317,7 +93512,7 @@ def test_list_search_ads360_links_rest_use_cached_wrapped_rpc(): # Ensure method has been cached assert ( - client._transport.list_search_ads360_links + client._transport.create_access_binding in client._transport._wrapped_methods ) @@ -83327,24 +93522,24 @@ def test_list_search_ads360_links_rest_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.list_search_ads360_links + client._transport.create_access_binding ] = mock_rpc request = {} - client.list_search_ads360_links(request) + client.create_access_binding(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.list_search_ads360_links(request) + client.create_access_binding(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_list_search_ads360_links_rest_required_fields( - request_type=analytics_admin.ListSearchAds360LinksRequest, +def test_create_access_binding_rest_required_fields( + request_type=analytics_admin.CreateAccessBindingRequest, ): transport_class = transports.AnalyticsAdminServiceRestTransport @@ -83360,7 +93555,7 @@ def test_list_search_ads360_links_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).list_search_ads360_links._get_unset_required_fields(jsonified_request) + ).create_access_binding._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present @@ -83369,14 +93564,7 @@ def test_list_search_ads360_links_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).list_search_ads360_links._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set( - ( - "page_size", - "page_token", - ) - ) + ).create_access_binding._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone @@ -83390,7 +93578,7 @@ def test_list_search_ads360_links_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = analytics_admin.ListSearchAds360LinksResponse() + return_value = resources.AccessBinding() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -83402,49 +93590,48 @@ def test_list_search_ads360_links_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "get", + "method": "post", "query_params": pb_request, } + transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = analytics_admin.ListSearchAds360LinksResponse.pb( - return_value - ) + return_value = resources.AccessBinding.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.list_search_ads360_links(request) + response = client.create_access_binding(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_list_search_ads360_links_rest_unset_required_fields(): +def test_create_access_binding_rest_unset_required_fields(): transport = transports.AnalyticsAdminServiceRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.list_search_ads360_links._get_unset_required_fields({}) + unset_fields = transport.create_access_binding._get_unset_required_fields({}) assert set(unset_fields) == ( - set( + set(()) + & set( ( - "pageSize", - "pageToken", + "parent", + "accessBinding", ) ) - & set(("parent",)) ) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_search_ads360_links_rest_interceptors(null_interceptor): +def test_create_access_binding_rest_interceptors(null_interceptor): transport = transports.AnalyticsAdminServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -83457,14 +93644,14 @@ def test_list_search_ads360_links_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.AnalyticsAdminServiceRestInterceptor, "post_list_search_ads360_links" + transports.AnalyticsAdminServiceRestInterceptor, "post_create_access_binding" ) as post, mock.patch.object( - transports.AnalyticsAdminServiceRestInterceptor, "pre_list_search_ads360_links" + transports.AnalyticsAdminServiceRestInterceptor, "pre_create_access_binding" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = analytics_admin.ListSearchAds360LinksRequest.pb( - analytics_admin.ListSearchAds360LinksRequest() + pb_message = analytics_admin.CreateAccessBindingRequest.pb( + analytics_admin.CreateAccessBindingRequest() ) transcode.return_value = { "method": "post", @@ -83476,21 +93663,19 @@ def test_list_search_ads360_links_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = ( - analytics_admin.ListSearchAds360LinksResponse.to_json( - analytics_admin.ListSearchAds360LinksResponse() - ) + req.return_value._content = resources.AccessBinding.to_json( + resources.AccessBinding() ) - request = analytics_admin.ListSearchAds360LinksRequest() + request = analytics_admin.CreateAccessBindingRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = analytics_admin.ListSearchAds360LinksResponse() + post.return_value = resources.AccessBinding() - client.list_search_ads360_links( + client.create_access_binding( request, metadata=[ ("key", "val"), @@ -83502,8 +93687,8 @@ def test_list_search_ads360_links_rest_interceptors(null_interceptor): post.assert_called_once() -def test_list_search_ads360_links_rest_bad_request( - transport: str = "rest", request_type=analytics_admin.ListSearchAds360LinksRequest +def test_create_access_binding_rest_bad_request( + transport: str = "rest", request_type=analytics_admin.CreateAccessBindingRequest ): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -83511,7 +93696,7 @@ def test_list_search_ads360_links_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = {"parent": "properties/sample1"} + request_init = {"parent": "accounts/sample1"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -83523,10 +93708,10 @@ def test_list_search_ads360_links_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.list_search_ads360_links(request) + client.create_access_binding(request) -def test_list_search_ads360_links_rest_flattened(): +def test_create_access_binding_rest_flattened(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -83535,14 +93720,15 @@ def test_list_search_ads360_links_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = analytics_admin.ListSearchAds360LinksResponse() + return_value = resources.AccessBinding() # get arguments that satisfy an http rule for this method - sample_request = {"parent": "properties/sample1"} + sample_request = {"parent": "accounts/sample1"} # get truthy value for each flattened field mock_args = dict( parent="parent_value", + access_binding=resources.AccessBinding(user="user_value"), ) mock_args.update(sample_request) @@ -83550,25 +93736,24 @@ def test_list_search_ads360_links_rest_flattened(): response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = analytics_admin.ListSearchAds360LinksResponse.pb(return_value) + return_value = resources.AccessBinding.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.list_search_ads360_links(**mock_args) + client.create_access_binding(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1alpha/{parent=properties/*}/searchAds360Links" - % client.transport._host, + "%s/v1alpha/{parent=accounts/*}/accessBindings" % client.transport._host, args[1], ) -def test_list_search_ads360_links_rest_flattened_error(transport: str = "rest"): +def test_create_access_binding_rest_flattened_error(transport: str = "rest"): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -83577,198 +93762,63 @@ def test_list_search_ads360_links_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.list_search_ads360_links( - analytics_admin.ListSearchAds360LinksRequest(), + client.create_access_binding( + analytics_admin.CreateAccessBindingRequest(), parent="parent_value", + access_binding=resources.AccessBinding(user="user_value"), ) -def test_list_search_ads360_links_rest_pager(transport: str = "rest"): - client = AnalyticsAdminServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # TODO(kbandes): remove this mock unless there's a good reason for it. - # with mock.patch.object(path_template, 'transcode') as transcode: - # Set the response as a series of pages - response = ( - analytics_admin.ListSearchAds360LinksResponse( - search_ads_360_links=[ - resources.SearchAds360Link(), - resources.SearchAds360Link(), - resources.SearchAds360Link(), - ], - next_page_token="abc", - ), - analytics_admin.ListSearchAds360LinksResponse( - search_ads_360_links=[], - next_page_token="def", - ), - analytics_admin.ListSearchAds360LinksResponse( - search_ads_360_links=[ - resources.SearchAds360Link(), - ], - next_page_token="ghi", - ), - analytics_admin.ListSearchAds360LinksResponse( - search_ads_360_links=[ - resources.SearchAds360Link(), - resources.SearchAds360Link(), - ], - ), - ) - # Two responses for two calls - response = response + response - - # Wrap the values into proper Response objs - response = tuple( - analytics_admin.ListSearchAds360LinksResponse.to_json(x) for x in response - ) - return_values = tuple(Response() for i in response) - for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode("UTF-8") - return_val.status_code = 200 - req.side_effect = return_values - - sample_request = {"parent": "properties/sample1"} - - pager = client.list_search_ads360_links(request=sample_request) - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, resources.SearchAds360Link) for i in results) - - pages = list(client.list_search_ads360_links(request=sample_request).pages) - for page_, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page_.raw_page.next_page_token == token - - -@pytest.mark.parametrize( - "request_type", - [ - analytics_admin.CreateSearchAds360LinkRequest, - dict, - ], -) -def test_create_search_ads360_link_rest(request_type): - client = AnalyticsAdminServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {"parent": "properties/sample1"} - request_init["search_ads_360_link"] = { - "name": "name_value", - "advertiser_id": "advertiser_id_value", - "campaign_data_sharing_enabled": {"value": True}, - "cost_data_sharing_enabled": {}, - "advertiser_display_name": "advertiser_display_name_value", - "ads_personalization_enabled": {}, - "site_stats_sharing_enabled": {}, - } - # The version of a generated dependency at test runtime may differ from the version used during generation. - # Delete any fields which are not present in the current runtime dependency - # See https://github.com/googleapis/gapic-generator-python/issues/1748 - - # Determine if the message type is proto-plus or protobuf - test_field = analytics_admin.CreateSearchAds360LinkRequest.meta.fields[ - "search_ads_360_link" - ] - - def get_message_fields(field): - # Given a field which is a message (composite type), return a list with - # all the fields of the message. - # If the field is not a composite type, return an empty list. - message_fields = [] - - if hasattr(field, "message") and field.message: - is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") - - if is_field_type_proto_plus_type: - message_fields = field.message.meta.fields.values() - # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER - message_fields = field.message.DESCRIPTOR.fields - return message_fields - - runtime_nested_fields = [ - (field.name, nested_field.name) - for field in get_message_fields(test_field) - for nested_field in get_message_fields(field) - ] - - subfields_not_in_runtime = [] - - # For each item in the sample request, create a list of sub fields which are not present at runtime - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["search_ads_360_link"].items(): # pragma: NO COVER - result = None - is_repeated = False - # For repeated fields - if isinstance(value, list) and len(value): - is_repeated = True - result = value[0] - # For fields where the type is another message - if isinstance(value, dict): - result = value - - if result and hasattr(result, "keys"): - for subfield in result.keys(): - if (field, subfield) not in runtime_nested_fields: - subfields_not_in_runtime.append( - { - "field": field, - "subfield": subfield, - "is_repeated": is_repeated, - } - ) - - # Remove fields from the sample request which are not present in the runtime version of the dependency - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER - field = subfield_to_delete.get("field") - field_repeated = subfield_to_delete.get("is_repeated") - subfield = subfield_to_delete.get("subfield") - if subfield: - if field_repeated: - for i in range(0, len(request_init["search_ads_360_link"][field])): - del request_init["search_ads_360_link"][field][i][subfield] - else: - del request_init["search_ads_360_link"][field][subfield] +def test_create_access_binding_rest_error(): + client = AnalyticsAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + analytics_admin.GetAccessBindingRequest, + dict, + ], +) +def test_get_access_binding_rest(request_type): + client = AnalyticsAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"name": "accounts/sample1/accessBindings/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = resources.SearchAds360Link( + return_value = resources.AccessBinding( name="name_value", - advertiser_id="advertiser_id_value", - advertiser_display_name="advertiser_display_name_value", + roles=["roles_value"], + user="user_value", ) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = resources.SearchAds360Link.pb(return_value) + return_value = resources.AccessBinding.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.create_search_ads360_link(request) + response = client.get_access_binding(request) # Establish that the response is the type that we expect. - assert isinstance(response, resources.SearchAds360Link) + assert isinstance(response, resources.AccessBinding) assert response.name == "name_value" - assert response.advertiser_id == "advertiser_id_value" - assert response.advertiser_display_name == "advertiser_display_name_value" + assert response.roles == ["roles_value"] -def test_create_search_ads360_link_rest_use_cached_wrapped_rpc(): +def test_get_access_binding_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -83783,8 +93833,7 @@ def test_create_search_ads360_link_rest_use_cached_wrapped_rpc(): # Ensure method has been cached assert ( - client._transport.create_search_ads360_link - in client._transport._wrapped_methods + client._transport.get_access_binding in client._transport._wrapped_methods ) # Replace cached wrapped function with mock @@ -83793,29 +93842,29 @@ def test_create_search_ads360_link_rest_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.create_search_ads360_link + client._transport.get_access_binding ] = mock_rpc request = {} - client.create_search_ads360_link(request) + client.get_access_binding(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.create_search_ads360_link(request) + client.get_access_binding(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_create_search_ads360_link_rest_required_fields( - request_type=analytics_admin.CreateSearchAds360LinkRequest, +def test_get_access_binding_rest_required_fields( + request_type=analytics_admin.GetAccessBindingRequest, ): transport_class = transports.AnalyticsAdminServiceRestTransport request_init = {} - request_init["parent"] = "" + request_init["name"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -83826,21 +93875,21 @@ def test_create_search_ads360_link_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).create_search_ads360_link._get_unset_required_fields(jsonified_request) + ).get_access_binding._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["parent"] = "parent_value" + jsonified_request["name"] = "name_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).create_search_ads360_link._get_unset_required_fields(jsonified_request) + ).get_access_binding._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == "parent_value" + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -83849,7 +93898,7 @@ def test_create_search_ads360_link_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = resources.SearchAds360Link() + return_value = resources.AccessBinding() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -83861,48 +93910,39 @@ def test_create_search_ads360_link_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "post", + "method": "get", "query_params": pb_request, } - transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = resources.SearchAds360Link.pb(return_value) + return_value = resources.AccessBinding.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.create_search_ads360_link(request) + response = client.get_access_binding(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_create_search_ads360_link_rest_unset_required_fields(): +def test_get_access_binding_rest_unset_required_fields(): transport = transports.AnalyticsAdminServiceRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.create_search_ads360_link._get_unset_required_fields({}) - assert set(unset_fields) == ( - set(()) - & set( - ( - "parent", - "searchAds360Link", - ) - ) - ) + unset_fields = transport.get_access_binding._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_create_search_ads360_link_rest_interceptors(null_interceptor): +def test_get_access_binding_rest_interceptors(null_interceptor): transport = transports.AnalyticsAdminServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -83915,15 +93955,14 @@ def test_create_search_ads360_link_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.AnalyticsAdminServiceRestInterceptor, - "post_create_search_ads360_link", + transports.AnalyticsAdminServiceRestInterceptor, "post_get_access_binding" ) as post, mock.patch.object( - transports.AnalyticsAdminServiceRestInterceptor, "pre_create_search_ads360_link" + transports.AnalyticsAdminServiceRestInterceptor, "pre_get_access_binding" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = analytics_admin.CreateSearchAds360LinkRequest.pb( - analytics_admin.CreateSearchAds360LinkRequest() + pb_message = analytics_admin.GetAccessBindingRequest.pb( + analytics_admin.GetAccessBindingRequest() ) transcode.return_value = { "method": "post", @@ -83935,19 +93974,19 @@ def test_create_search_ads360_link_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = resources.SearchAds360Link.to_json( - resources.SearchAds360Link() + req.return_value._content = resources.AccessBinding.to_json( + resources.AccessBinding() ) - request = analytics_admin.CreateSearchAds360LinkRequest() + request = analytics_admin.GetAccessBindingRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = resources.SearchAds360Link() + post.return_value = resources.AccessBinding() - client.create_search_ads360_link( + client.get_access_binding( request, metadata=[ ("key", "val"), @@ -83959,8 +93998,8 @@ def test_create_search_ads360_link_rest_interceptors(null_interceptor): post.assert_called_once() -def test_create_search_ads360_link_rest_bad_request( - transport: str = "rest", request_type=analytics_admin.CreateSearchAds360LinkRequest +def test_get_access_binding_rest_bad_request( + transport: str = "rest", request_type=analytics_admin.GetAccessBindingRequest ): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -83968,7 +94007,7 @@ def test_create_search_ads360_link_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = {"parent": "properties/sample1"} + request_init = {"name": "accounts/sample1/accessBindings/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -83980,10 +94019,10 @@ def test_create_search_ads360_link_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.create_search_ads360_link(request) + client.get_access_binding(request) -def test_create_search_ads360_link_rest_flattened(): +def test_get_access_binding_rest_flattened(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -83992,15 +94031,14 @@ def test_create_search_ads360_link_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = resources.SearchAds360Link() + return_value = resources.AccessBinding() # get arguments that satisfy an http rule for this method - sample_request = {"parent": "properties/sample1"} + sample_request = {"name": "accounts/sample1/accessBindings/sample2"} # get truthy value for each flattened field mock_args = dict( - parent="parent_value", - search_ads_360_link=resources.SearchAds360Link(name="name_value"), + name="name_value", ) mock_args.update(sample_request) @@ -84008,25 +94046,24 @@ def test_create_search_ads360_link_rest_flattened(): response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = resources.SearchAds360Link.pb(return_value) + return_value = resources.AccessBinding.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.create_search_ads360_link(**mock_args) + client.get_access_binding(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1alpha/{parent=properties/*}/searchAds360Links" - % client.transport._host, + "%s/v1alpha/{name=accounts/*/accessBindings/*}" % client.transport._host, args[1], ) -def test_create_search_ads360_link_rest_flattened_error(transport: str = "rest"): +def test_get_access_binding_rest_flattened_error(transport: str = "rest"): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -84035,14 +94072,13 @@ def test_create_search_ads360_link_rest_flattened_error(transport: str = "rest") # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.create_search_ads360_link( - analytics_admin.CreateSearchAds360LinkRequest(), - parent="parent_value", - search_ads_360_link=resources.SearchAds360Link(name="name_value"), + client.get_access_binding( + analytics_admin.GetAccessBindingRequest(), + name="name_value", ) -def test_create_search_ads360_link_rest_error(): +def test_get_access_binding_rest_error(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -84051,39 +94087,123 @@ def test_create_search_ads360_link_rest_error(): @pytest.mark.parametrize( "request_type", [ - analytics_admin.DeleteSearchAds360LinkRequest, + analytics_admin.UpdateAccessBindingRequest, dict, ], ) -def test_delete_search_ads360_link_rest(request_type): +def test_update_access_binding_rest(request_type): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = {"name": "properties/sample1/searchAds360Links/sample2"} + request_init = { + "access_binding": {"name": "accounts/sample1/accessBindings/sample2"} + } + request_init["access_binding"] = { + "user": "user_value", + "name": "accounts/sample1/accessBindings/sample2", + "roles": ["roles_value1", "roles_value2"], + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = analytics_admin.UpdateAccessBindingRequest.meta.fields[ + "access_binding" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["access_binding"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["access_binding"][field])): + del request_init["access_binding"][field][i][subfield] + else: + del request_init["access_binding"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = None + return_value = resources.AccessBinding( + name="name_value", + roles=["roles_value"], + user="user_value", + ) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - json_return_value = "" + # Convert return value to protobuf type + return_value = resources.AccessBinding.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.delete_search_ads360_link(request) + response = client.update_access_binding(request) # Establish that the response is the type that we expect. - assert response is None + assert isinstance(response, resources.AccessBinding) + assert response.name == "name_value" + assert response.roles == ["roles_value"] -def test_delete_search_ads360_link_rest_use_cached_wrapped_rpc(): +def test_update_access_binding_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -84098,7 +94218,7 @@ def test_delete_search_ads360_link_rest_use_cached_wrapped_rpc(): # Ensure method has been cached assert ( - client._transport.delete_search_ads360_link + client._transport.update_access_binding in client._transport._wrapped_methods ) @@ -84108,29 +94228,28 @@ def test_delete_search_ads360_link_rest_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.delete_search_ads360_link + client._transport.update_access_binding ] = mock_rpc request = {} - client.delete_search_ads360_link(request) + client.update_access_binding(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.delete_search_ads360_link(request) + client.update_access_binding(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_delete_search_ads360_link_rest_required_fields( - request_type=analytics_admin.DeleteSearchAds360LinkRequest, +def test_update_access_binding_rest_required_fields( + request_type=analytics_admin.UpdateAccessBindingRequest, ): transport_class = transports.AnalyticsAdminServiceRestTransport request_init = {} - request_init["name"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -84141,21 +94260,17 @@ def test_delete_search_ads360_link_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).delete_search_ads360_link._get_unset_required_fields(jsonified_request) + ).update_access_binding._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["name"] = "name_value" - unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).delete_search_ads360_link._get_unset_required_fields(jsonified_request) + ).update_access_binding._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == "name_value" client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -84164,7 +94279,7 @@ def test_delete_search_ads360_link_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = None + return_value = resources.AccessBinding() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -84176,36 +94291,40 @@ def test_delete_search_ads360_link_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "delete", + "method": "patch", "query_params": pb_request, } + transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 - json_return_value = "" + + # Convert return value to protobuf type + return_value = resources.AccessBinding.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.delete_search_ads360_link(request) + response = client.update_access_binding(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_delete_search_ads360_link_rest_unset_required_fields(): +def test_update_access_binding_rest_unset_required_fields(): transport = transports.AnalyticsAdminServiceRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.delete_search_ads360_link._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name",))) + unset_fields = transport.update_access_binding._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("accessBinding",))) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_delete_search_ads360_link_rest_interceptors(null_interceptor): +def test_update_access_binding_rest_interceptors(null_interceptor): transport = transports.AnalyticsAdminServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -84218,11 +94337,14 @@ def test_delete_search_ads360_link_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.AnalyticsAdminServiceRestInterceptor, "pre_delete_search_ads360_link" + transports.AnalyticsAdminServiceRestInterceptor, "post_update_access_binding" + ) as post, mock.patch.object( + transports.AnalyticsAdminServiceRestInterceptor, "pre_update_access_binding" ) as pre: pre.assert_not_called() - pb_message = analytics_admin.DeleteSearchAds360LinkRequest.pb( - analytics_admin.DeleteSearchAds360LinkRequest() + post.assert_not_called() + pb_message = analytics_admin.UpdateAccessBindingRequest.pb( + analytics_admin.UpdateAccessBindingRequest() ) transcode.return_value = { "method": "post", @@ -84234,15 +94356,19 @@ def test_delete_search_ads360_link_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() + req.return_value._content = resources.AccessBinding.to_json( + resources.AccessBinding() + ) - request = analytics_admin.DeleteSearchAds360LinkRequest() + request = analytics_admin.UpdateAccessBindingRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata + post.return_value = resources.AccessBinding() - client.delete_search_ads360_link( + client.update_access_binding( request, metadata=[ ("key", "val"), @@ -84251,10 +94377,11 @@ def test_delete_search_ads360_link_rest_interceptors(null_interceptor): ) pre.assert_called_once() + post.assert_called_once() -def test_delete_search_ads360_link_rest_bad_request( - transport: str = "rest", request_type=analytics_admin.DeleteSearchAds360LinkRequest +def test_update_access_binding_rest_bad_request( + transport: str = "rest", request_type=analytics_admin.UpdateAccessBindingRequest ): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -84262,7 +94389,9 @@ def test_delete_search_ads360_link_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = {"name": "properties/sample1/searchAds360Links/sample2"} + request_init = { + "access_binding": {"name": "accounts/sample1/accessBindings/sample2"} + } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -84274,10 +94403,10 @@ def test_delete_search_ads360_link_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.delete_search_ads360_link(request) + client.update_access_binding(request) -def test_delete_search_ads360_link_rest_flattened(): +def test_update_access_binding_rest_flattened(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -84286,38 +94415,42 @@ def test_delete_search_ads360_link_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = None + return_value = resources.AccessBinding() # get arguments that satisfy an http rule for this method - sample_request = {"name": "properties/sample1/searchAds360Links/sample2"} + sample_request = { + "access_binding": {"name": "accounts/sample1/accessBindings/sample2"} + } # get truthy value for each flattened field mock_args = dict( - name="name_value", + access_binding=resources.AccessBinding(user="user_value"), ) mock_args.update(sample_request) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - json_return_value = "" + # Convert return value to protobuf type + return_value = resources.AccessBinding.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.delete_search_ads360_link(**mock_args) + client.update_access_binding(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1alpha/{name=properties/*/searchAds360Links/*}" + "%s/v1alpha/{access_binding.name=accounts/*/accessBindings/*}" % client.transport._host, args[1], ) -def test_delete_search_ads360_link_rest_flattened_error(transport: str = "rest"): +def test_update_access_binding_rest_flattened_error(transport: str = "rest"): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -84326,13 +94459,13 @@ def test_delete_search_ads360_link_rest_flattened_error(transport: str = "rest") # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.delete_search_ads360_link( - analytics_admin.DeleteSearchAds360LinkRequest(), - name="name_value", + client.update_access_binding( + analytics_admin.UpdateAccessBindingRequest(), + access_binding=resources.AccessBinding(user="user_value"), ) -def test_delete_search_ads360_link_rest_error(): +def test_update_access_binding_rest_error(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -84341,128 +94474,39 @@ def test_delete_search_ads360_link_rest_error(): @pytest.mark.parametrize( "request_type", [ - analytics_admin.UpdateSearchAds360LinkRequest, + analytics_admin.DeleteAccessBindingRequest, dict, ], ) -def test_update_search_ads360_link_rest(request_type): +def test_delete_access_binding_rest(request_type): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = { - "search_ads_360_link": {"name": "properties/sample1/searchAds360Links/sample2"} - } - request_init["search_ads_360_link"] = { - "name": "properties/sample1/searchAds360Links/sample2", - "advertiser_id": "advertiser_id_value", - "campaign_data_sharing_enabled": {"value": True}, - "cost_data_sharing_enabled": {}, - "advertiser_display_name": "advertiser_display_name_value", - "ads_personalization_enabled": {}, - "site_stats_sharing_enabled": {}, - } - # The version of a generated dependency at test runtime may differ from the version used during generation. - # Delete any fields which are not present in the current runtime dependency - # See https://github.com/googleapis/gapic-generator-python/issues/1748 - - # Determine if the message type is proto-plus or protobuf - test_field = analytics_admin.UpdateSearchAds360LinkRequest.meta.fields[ - "search_ads_360_link" - ] - - def get_message_fields(field): - # Given a field which is a message (composite type), return a list with - # all the fields of the message. - # If the field is not a composite type, return an empty list. - message_fields = [] - - if hasattr(field, "message") and field.message: - is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") - - if is_field_type_proto_plus_type: - message_fields = field.message.meta.fields.values() - # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER - message_fields = field.message.DESCRIPTOR.fields - return message_fields - - runtime_nested_fields = [ - (field.name, nested_field.name) - for field in get_message_fields(test_field) - for nested_field in get_message_fields(field) - ] - - subfields_not_in_runtime = [] - - # For each item in the sample request, create a list of sub fields which are not present at runtime - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["search_ads_360_link"].items(): # pragma: NO COVER - result = None - is_repeated = False - # For repeated fields - if isinstance(value, list) and len(value): - is_repeated = True - result = value[0] - # For fields where the type is another message - if isinstance(value, dict): - result = value - - if result and hasattr(result, "keys"): - for subfield in result.keys(): - if (field, subfield) not in runtime_nested_fields: - subfields_not_in_runtime.append( - { - "field": field, - "subfield": subfield, - "is_repeated": is_repeated, - } - ) - - # Remove fields from the sample request which are not present in the runtime version of the dependency - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER - field = subfield_to_delete.get("field") - field_repeated = subfield_to_delete.get("is_repeated") - subfield = subfield_to_delete.get("subfield") - if subfield: - if field_repeated: - for i in range(0, len(request_init["search_ads_360_link"][field])): - del request_init["search_ads_360_link"][field][i][subfield] - else: - del request_init["search_ads_360_link"][field][subfield] + request_init = {"name": "accounts/sample1/accessBindings/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = resources.SearchAds360Link( - name="name_value", - advertiser_id="advertiser_id_value", - advertiser_display_name="advertiser_display_name_value", - ) + return_value = None # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - # Convert return value to protobuf type - return_value = resources.SearchAds360Link.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) + json_return_value = "" response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.update_search_ads360_link(request) + response = client.delete_access_binding(request) # Establish that the response is the type that we expect. - assert isinstance(response, resources.SearchAds360Link) - assert response.name == "name_value" - assert response.advertiser_id == "advertiser_id_value" - assert response.advertiser_display_name == "advertiser_display_name_value" + assert response is None -def test_update_search_ads360_link_rest_use_cached_wrapped_rpc(): +def test_delete_access_binding_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -84477,7 +94521,7 @@ def test_update_search_ads360_link_rest_use_cached_wrapped_rpc(): # Ensure method has been cached assert ( - client._transport.update_search_ads360_link + client._transport.delete_access_binding in client._transport._wrapped_methods ) @@ -84487,28 +94531,29 @@ def test_update_search_ads360_link_rest_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.update_search_ads360_link + client._transport.delete_access_binding ] = mock_rpc request = {} - client.update_search_ads360_link(request) + client.delete_access_binding(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.update_search_ads360_link(request) + client.delete_access_binding(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_update_search_ads360_link_rest_required_fields( - request_type=analytics_admin.UpdateSearchAds360LinkRequest, +def test_delete_access_binding_rest_required_fields( + request_type=analytics_admin.DeleteAccessBindingRequest, ): transport_class = transports.AnalyticsAdminServiceRestTransport request_init = {} + request_init["name"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -84519,19 +94564,21 @@ def test_update_search_ads360_link_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).update_search_ads360_link._get_unset_required_fields(jsonified_request) + ).delete_access_binding._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present + jsonified_request["name"] = "name_value" + unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).update_search_ads360_link._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("update_mask",)) + ).delete_access_binding._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -84540,7 +94587,7 @@ def test_update_search_ads360_link_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = resources.SearchAds360Link() + return_value = None # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -84552,40 +94599,36 @@ def test_update_search_ads360_link_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "patch", + "method": "delete", "query_params": pb_request, } - transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = resources.SearchAds360Link.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) + json_return_value = "" response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.update_search_ads360_link(request) + response = client.delete_access_binding(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_update_search_ads360_link_rest_unset_required_fields(): +def test_delete_access_binding_rest_unset_required_fields(): transport = transports.AnalyticsAdminServiceRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.update_search_ads360_link._get_unset_required_fields({}) - assert set(unset_fields) == (set(("updateMask",)) & set(("updateMask",))) + unset_fields = transport.delete_access_binding._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_update_search_ads360_link_rest_interceptors(null_interceptor): +def test_delete_access_binding_rest_interceptors(null_interceptor): transport = transports.AnalyticsAdminServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -84598,15 +94641,11 @@ def test_update_search_ads360_link_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.AnalyticsAdminServiceRestInterceptor, - "post_update_search_ads360_link", - ) as post, mock.patch.object( - transports.AnalyticsAdminServiceRestInterceptor, "pre_update_search_ads360_link" + transports.AnalyticsAdminServiceRestInterceptor, "pre_delete_access_binding" ) as pre: pre.assert_not_called() - post.assert_not_called() - pb_message = analytics_admin.UpdateSearchAds360LinkRequest.pb( - analytics_admin.UpdateSearchAds360LinkRequest() + pb_message = analytics_admin.DeleteAccessBindingRequest.pb( + analytics_admin.DeleteAccessBindingRequest() ) transcode.return_value = { "method": "post", @@ -84618,19 +94657,15 @@ def test_update_search_ads360_link_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = resources.SearchAds360Link.to_json( - resources.SearchAds360Link() - ) - request = analytics_admin.UpdateSearchAds360LinkRequest() + request = analytics_admin.DeleteAccessBindingRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = resources.SearchAds360Link() - client.update_search_ads360_link( + client.delete_access_binding( request, metadata=[ ("key", "val"), @@ -84639,11 +94674,10 @@ def test_update_search_ads360_link_rest_interceptors(null_interceptor): ) pre.assert_called_once() - post.assert_called_once() -def test_update_search_ads360_link_rest_bad_request( - transport: str = "rest", request_type=analytics_admin.UpdateSearchAds360LinkRequest +def test_delete_access_binding_rest_bad_request( + transport: str = "rest", request_type=analytics_admin.DeleteAccessBindingRequest ): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -84651,9 +94685,7 @@ def test_update_search_ads360_link_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = { - "search_ads_360_link": {"name": "properties/sample1/searchAds360Links/sample2"} - } + request_init = {"name": "accounts/sample1/accessBindings/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -84665,10 +94697,10 @@ def test_update_search_ads360_link_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.update_search_ads360_link(request) + client.delete_access_binding(request) -def test_update_search_ads360_link_rest_flattened(): +def test_delete_access_binding_rest_flattened(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -84677,45 +94709,37 @@ def test_update_search_ads360_link_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = resources.SearchAds360Link() + return_value = None # get arguments that satisfy an http rule for this method - sample_request = { - "search_ads_360_link": { - "name": "properties/sample1/searchAds360Links/sample2" - } - } + sample_request = {"name": "accounts/sample1/accessBindings/sample2"} # get truthy value for each flattened field mock_args = dict( - search_ads_360_link=resources.SearchAds360Link(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + name="name_value", ) mock_args.update(sample_request) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - # Convert return value to protobuf type - return_value = resources.SearchAds360Link.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) + json_return_value = "" response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.update_search_ads360_link(**mock_args) + client.delete_access_binding(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1alpha/{search_ads_360_link.name=properties/*/searchAds360Links/*}" - % client.transport._host, + "%s/v1alpha/{name=accounts/*/accessBindings/*}" % client.transport._host, args[1], ) -def test_update_search_ads360_link_rest_flattened_error(transport: str = "rest"): +def test_delete_access_binding_rest_flattened_error(transport: str = "rest"): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -84724,14 +94748,13 @@ def test_update_search_ads360_link_rest_flattened_error(transport: str = "rest") # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.update_search_ads360_link( - analytics_admin.UpdateSearchAds360LinkRequest(), - search_ads_360_link=resources.SearchAds360Link(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + client.delete_access_binding( + analytics_admin.DeleteAccessBindingRequest(), + name="name_value", ) -def test_update_search_ads360_link_rest_error(): +def test_delete_access_binding_rest_error(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -84740,64 +94763,44 @@ def test_update_search_ads360_link_rest_error(): @pytest.mark.parametrize( "request_type", [ - analytics_admin.GetAttributionSettingsRequest, + analytics_admin.ListAccessBindingsRequest, dict, ], ) -def test_get_attribution_settings_rest(request_type): +def test_list_access_bindings_rest(request_type): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = {"name": "properties/sample1/attributionSettings"} + request_init = {"parent": "accounts/sample1"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = resources.AttributionSettings( - name="name_value", - acquisition_conversion_event_lookback_window=resources.AttributionSettings.AcquisitionConversionEventLookbackWindow.ACQUISITION_CONVERSION_EVENT_LOOKBACK_WINDOW_7_DAYS, - other_conversion_event_lookback_window=resources.AttributionSettings.OtherConversionEventLookbackWindow.OTHER_CONVERSION_EVENT_LOOKBACK_WINDOW_30_DAYS, - reporting_attribution_model=resources.AttributionSettings.ReportingAttributionModel.PAID_AND_ORGANIC_CHANNELS_DATA_DRIVEN, - ads_web_conversion_data_export_scope=resources.AttributionSettings.AdsWebConversionDataExportScope.NOT_SELECTED_YET, + return_value = analytics_admin.ListAccessBindingsResponse( + next_page_token="next_page_token_value", ) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = resources.AttributionSettings.pb(return_value) + return_value = analytics_admin.ListAccessBindingsResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.get_attribution_settings(request) + response = client.list_access_bindings(request) # Establish that the response is the type that we expect. - assert isinstance(response, resources.AttributionSettings) - assert response.name == "name_value" - assert ( - response.acquisition_conversion_event_lookback_window - == resources.AttributionSettings.AcquisitionConversionEventLookbackWindow.ACQUISITION_CONVERSION_EVENT_LOOKBACK_WINDOW_7_DAYS - ) - assert ( - response.other_conversion_event_lookback_window - == resources.AttributionSettings.OtherConversionEventLookbackWindow.OTHER_CONVERSION_EVENT_LOOKBACK_WINDOW_30_DAYS - ) - assert ( - response.reporting_attribution_model - == resources.AttributionSettings.ReportingAttributionModel.PAID_AND_ORGANIC_CHANNELS_DATA_DRIVEN - ) - assert ( - response.ads_web_conversion_data_export_scope - == resources.AttributionSettings.AdsWebConversionDataExportScope.NOT_SELECTED_YET - ) + assert isinstance(response, pagers.ListAccessBindingsPager) + assert response.next_page_token == "next_page_token_value" -def test_get_attribution_settings_rest_use_cached_wrapped_rpc(): +def test_list_access_bindings_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -84812,8 +94815,7 @@ def test_get_attribution_settings_rest_use_cached_wrapped_rpc(): # Ensure method has been cached assert ( - client._transport.get_attribution_settings - in client._transport._wrapped_methods + client._transport.list_access_bindings in client._transport._wrapped_methods ) # Replace cached wrapped function with mock @@ -84822,29 +94824,29 @@ def test_get_attribution_settings_rest_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.get_attribution_settings + client._transport.list_access_bindings ] = mock_rpc request = {} - client.get_attribution_settings(request) + client.list_access_bindings(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.get_attribution_settings(request) + client.list_access_bindings(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_get_attribution_settings_rest_required_fields( - request_type=analytics_admin.GetAttributionSettingsRequest, +def test_list_access_bindings_rest_required_fields( + request_type=analytics_admin.ListAccessBindingsRequest, ): transport_class = transports.AnalyticsAdminServiceRestTransport request_init = {} - request_init["name"] = "" + request_init["parent"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -84855,21 +94857,28 @@ def test_get_attribution_settings_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).get_attribution_settings._get_unset_required_fields(jsonified_request) + ).list_access_bindings._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["name"] = "name_value" + jsonified_request["parent"] = "parent_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).get_attribution_settings._get_unset_required_fields(jsonified_request) + ).list_access_bindings._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "page_size", + "page_token", + ) + ) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == "name_value" + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -84878,7 +94887,7 @@ def test_get_attribution_settings_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = resources.AttributionSettings() + return_value = analytics_admin.ListAccessBindingsResponse() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -84899,30 +94908,38 @@ def test_get_attribution_settings_rest_required_fields( response_value.status_code = 200 # Convert return value to protobuf type - return_value = resources.AttributionSettings.pb(return_value) + return_value = analytics_admin.ListAccessBindingsResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.get_attribution_settings(request) + response = client.list_access_bindings(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_get_attribution_settings_rest_unset_required_fields(): +def test_list_access_bindings_rest_unset_required_fields(): transport = transports.AnalyticsAdminServiceRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.get_attribution_settings._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name",))) + unset_fields = transport.list_access_bindings._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "pageSize", + "pageToken", + ) + ) + & set(("parent",)) + ) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_attribution_settings_rest_interceptors(null_interceptor): +def test_list_access_bindings_rest_interceptors(null_interceptor): transport = transports.AnalyticsAdminServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -84935,14 +94952,14 @@ def test_get_attribution_settings_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.AnalyticsAdminServiceRestInterceptor, "post_get_attribution_settings" + transports.AnalyticsAdminServiceRestInterceptor, "post_list_access_bindings" ) as post, mock.patch.object( - transports.AnalyticsAdminServiceRestInterceptor, "pre_get_attribution_settings" + transports.AnalyticsAdminServiceRestInterceptor, "pre_list_access_bindings" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = analytics_admin.GetAttributionSettingsRequest.pb( - analytics_admin.GetAttributionSettingsRequest() + pb_message = analytics_admin.ListAccessBindingsRequest.pb( + analytics_admin.ListAccessBindingsRequest() ) transcode.return_value = { "method": "post", @@ -84954,19 +94971,19 @@ def test_get_attribution_settings_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = resources.AttributionSettings.to_json( - resources.AttributionSettings() + req.return_value._content = analytics_admin.ListAccessBindingsResponse.to_json( + analytics_admin.ListAccessBindingsResponse() ) - request = analytics_admin.GetAttributionSettingsRequest() + request = analytics_admin.ListAccessBindingsRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = resources.AttributionSettings() + post.return_value = analytics_admin.ListAccessBindingsResponse() - client.get_attribution_settings( + client.list_access_bindings( request, metadata=[ ("key", "val"), @@ -84978,8 +94995,8 @@ def test_get_attribution_settings_rest_interceptors(null_interceptor): post.assert_called_once() -def test_get_attribution_settings_rest_bad_request( - transport: str = "rest", request_type=analytics_admin.GetAttributionSettingsRequest +def test_list_access_bindings_rest_bad_request( + transport: str = "rest", request_type=analytics_admin.ListAccessBindingsRequest ): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -84987,7 +95004,7 @@ def test_get_attribution_settings_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = {"name": "properties/sample1/attributionSettings"} + request_init = {"parent": "accounts/sample1"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -84999,10 +95016,10 @@ def test_get_attribution_settings_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.get_attribution_settings(request) + client.list_access_bindings(request) -def test_get_attribution_settings_rest_flattened(): +def test_list_access_bindings_rest_flattened(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -85011,14 +95028,14 @@ def test_get_attribution_settings_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = resources.AttributionSettings() + return_value = analytics_admin.ListAccessBindingsResponse() # get arguments that satisfy an http rule for this method - sample_request = {"name": "properties/sample1/attributionSettings"} + sample_request = {"parent": "accounts/sample1"} # get truthy value for each flattened field mock_args = dict( - name="name_value", + parent="parent_value", ) mock_args.update(sample_request) @@ -85026,25 +95043,24 @@ def test_get_attribution_settings_rest_flattened(): response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = resources.AttributionSettings.pb(return_value) + return_value = analytics_admin.ListAccessBindingsResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.get_attribution_settings(**mock_args) + client.list_access_bindings(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1alpha/{name=properties/*/attributionSettings}" - % client.transport._host, + "%s/v1alpha/{parent=accounts/*}/accessBindings" % client.transport._host, args[1], ) -def test_get_attribution_settings_rest_flattened_error(transport: str = "rest"): +def test_list_access_bindings_rest_flattened_error(transport: str = "rest"): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -85053,159 +95069,115 @@ def test_get_attribution_settings_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.get_attribution_settings( - analytics_admin.GetAttributionSettingsRequest(), - name="name_value", + client.list_access_bindings( + analytics_admin.ListAccessBindingsRequest(), + parent="parent_value", ) -def test_get_attribution_settings_rest_error(): +def test_list_access_bindings_rest_pager(transport: str = "rest"): client = AnalyticsAdminServiceClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + analytics_admin.ListAccessBindingsResponse( + access_bindings=[ + resources.AccessBinding(), + resources.AccessBinding(), + resources.AccessBinding(), + ], + next_page_token="abc", + ), + analytics_admin.ListAccessBindingsResponse( + access_bindings=[], + next_page_token="def", + ), + analytics_admin.ListAccessBindingsResponse( + access_bindings=[ + resources.AccessBinding(), + ], + next_page_token="ghi", + ), + analytics_admin.ListAccessBindingsResponse( + access_bindings=[ + resources.AccessBinding(), + resources.AccessBinding(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple( + analytics_admin.ListAccessBindingsResponse.to_json(x) for x in response + ) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {"parent": "accounts/sample1"} + + pager = client.list_access_bindings(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, resources.AccessBinding) for i in results) + + pages = list(client.list_access_bindings(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + @pytest.mark.parametrize( "request_type", [ - analytics_admin.UpdateAttributionSettingsRequest, + analytics_admin.BatchCreateAccessBindingsRequest, dict, ], ) -def test_update_attribution_settings_rest(request_type): +def test_batch_create_access_bindings_rest(request_type): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = { - "attribution_settings": {"name": "properties/sample1/attributionSettings"} - } - request_init["attribution_settings"] = { - "name": "properties/sample1/attributionSettings", - "acquisition_conversion_event_lookback_window": 1, - "other_conversion_event_lookback_window": 1, - "reporting_attribution_model": 1, - "ads_web_conversion_data_export_scope": 1, - } - # The version of a generated dependency at test runtime may differ from the version used during generation. - # Delete any fields which are not present in the current runtime dependency - # See https://github.com/googleapis/gapic-generator-python/issues/1748 - - # Determine if the message type is proto-plus or protobuf - test_field = analytics_admin.UpdateAttributionSettingsRequest.meta.fields[ - "attribution_settings" - ] - - def get_message_fields(field): - # Given a field which is a message (composite type), return a list with - # all the fields of the message. - # If the field is not a composite type, return an empty list. - message_fields = [] - - if hasattr(field, "message") and field.message: - is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") - - if is_field_type_proto_plus_type: - message_fields = field.message.meta.fields.values() - # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER - message_fields = field.message.DESCRIPTOR.fields - return message_fields - - runtime_nested_fields = [ - (field.name, nested_field.name) - for field in get_message_fields(test_field) - for nested_field in get_message_fields(field) - ] - - subfields_not_in_runtime = [] - - # For each item in the sample request, create a list of sub fields which are not present at runtime - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init[ - "attribution_settings" - ].items(): # pragma: NO COVER - result = None - is_repeated = False - # For repeated fields - if isinstance(value, list) and len(value): - is_repeated = True - result = value[0] - # For fields where the type is another message - if isinstance(value, dict): - result = value - - if result and hasattr(result, "keys"): - for subfield in result.keys(): - if (field, subfield) not in runtime_nested_fields: - subfields_not_in_runtime.append( - { - "field": field, - "subfield": subfield, - "is_repeated": is_repeated, - } - ) - - # Remove fields from the sample request which are not present in the runtime version of the dependency - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER - field = subfield_to_delete.get("field") - field_repeated = subfield_to_delete.get("is_repeated") - subfield = subfield_to_delete.get("subfield") - if subfield: - if field_repeated: - for i in range(0, len(request_init["attribution_settings"][field])): - del request_init["attribution_settings"][field][i][subfield] - else: - del request_init["attribution_settings"][field][subfield] + request_init = {"parent": "accounts/sample1"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = resources.AttributionSettings( - name="name_value", - acquisition_conversion_event_lookback_window=resources.AttributionSettings.AcquisitionConversionEventLookbackWindow.ACQUISITION_CONVERSION_EVENT_LOOKBACK_WINDOW_7_DAYS, - other_conversion_event_lookback_window=resources.AttributionSettings.OtherConversionEventLookbackWindow.OTHER_CONVERSION_EVENT_LOOKBACK_WINDOW_30_DAYS, - reporting_attribution_model=resources.AttributionSettings.ReportingAttributionModel.PAID_AND_ORGANIC_CHANNELS_DATA_DRIVEN, - ads_web_conversion_data_export_scope=resources.AttributionSettings.AdsWebConversionDataExportScope.NOT_SELECTED_YET, - ) + return_value = analytics_admin.BatchCreateAccessBindingsResponse() # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = resources.AttributionSettings.pb(return_value) + return_value = analytics_admin.BatchCreateAccessBindingsResponse.pb( + return_value + ) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.update_attribution_settings(request) + response = client.batch_create_access_bindings(request) # Establish that the response is the type that we expect. - assert isinstance(response, resources.AttributionSettings) - assert response.name == "name_value" - assert ( - response.acquisition_conversion_event_lookback_window - == resources.AttributionSettings.AcquisitionConversionEventLookbackWindow.ACQUISITION_CONVERSION_EVENT_LOOKBACK_WINDOW_7_DAYS - ) - assert ( - response.other_conversion_event_lookback_window - == resources.AttributionSettings.OtherConversionEventLookbackWindow.OTHER_CONVERSION_EVENT_LOOKBACK_WINDOW_30_DAYS - ) - assert ( - response.reporting_attribution_model - == resources.AttributionSettings.ReportingAttributionModel.PAID_AND_ORGANIC_CHANNELS_DATA_DRIVEN - ) - assert ( - response.ads_web_conversion_data_export_scope - == resources.AttributionSettings.AdsWebConversionDataExportScope.NOT_SELECTED_YET - ) + assert isinstance(response, analytics_admin.BatchCreateAccessBindingsResponse) -def test_update_attribution_settings_rest_use_cached_wrapped_rpc(): +def test_batch_create_access_bindings_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -85220,7 +95192,7 @@ def test_update_attribution_settings_rest_use_cached_wrapped_rpc(): # Ensure method has been cached assert ( - client._transport.update_attribution_settings + client._transport.batch_create_access_bindings in client._transport._wrapped_methods ) @@ -85230,28 +95202,29 @@ def test_update_attribution_settings_rest_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.update_attribution_settings + client._transport.batch_create_access_bindings ] = mock_rpc request = {} - client.update_attribution_settings(request) + client.batch_create_access_bindings(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.update_attribution_settings(request) + client.batch_create_access_bindings(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_update_attribution_settings_rest_required_fields( - request_type=analytics_admin.UpdateAttributionSettingsRequest, +def test_batch_create_access_bindings_rest_required_fields( + request_type=analytics_admin.BatchCreateAccessBindingsRequest, ): transport_class = transports.AnalyticsAdminServiceRestTransport request_init = {} + request_init["parent"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -85262,19 +95235,21 @@ def test_update_attribution_settings_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).update_attribution_settings._get_unset_required_fields(jsonified_request) + ).batch_create_access_bindings._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present + jsonified_request["parent"] = "parent_value" + unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).update_attribution_settings._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("update_mask",)) + ).batch_create_access_bindings._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -85283,7 +95258,7 @@ def test_update_attribution_settings_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = resources.AttributionSettings() + return_value = analytics_admin.BatchCreateAccessBindingsResponse() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -85295,7 +95270,7 @@ def test_update_attribution_settings_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "patch", + "method": "post", "query_params": pb_request, } transcode_result["body"] = pb_request @@ -85305,38 +95280,40 @@ def test_update_attribution_settings_rest_required_fields( response_value.status_code = 200 # Convert return value to protobuf type - return_value = resources.AttributionSettings.pb(return_value) + return_value = analytics_admin.BatchCreateAccessBindingsResponse.pb( + return_value + ) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.update_attribution_settings(request) + response = client.batch_create_access_bindings(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_update_attribution_settings_rest_unset_required_fields(): +def test_batch_create_access_bindings_rest_unset_required_fields(): transport = transports.AnalyticsAdminServiceRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.update_attribution_settings._get_unset_required_fields({}) + unset_fields = transport.batch_create_access_bindings._get_unset_required_fields({}) assert set(unset_fields) == ( - set(("updateMask",)) + set(()) & set( ( - "attributionSettings", - "updateMask", + "parent", + "requests", ) ) ) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_update_attribution_settings_rest_interceptors(null_interceptor): +def test_batch_create_access_bindings_rest_interceptors(null_interceptor): transport = transports.AnalyticsAdminServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -85350,15 +95327,15 @@ def test_update_attribution_settings_rest_interceptors(null_interceptor): path_template, "transcode" ) as transcode, mock.patch.object( transports.AnalyticsAdminServiceRestInterceptor, - "post_update_attribution_settings", + "post_batch_create_access_bindings", ) as post, mock.patch.object( transports.AnalyticsAdminServiceRestInterceptor, - "pre_update_attribution_settings", + "pre_batch_create_access_bindings", ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = analytics_admin.UpdateAttributionSettingsRequest.pb( - analytics_admin.UpdateAttributionSettingsRequest() + pb_message = analytics_admin.BatchCreateAccessBindingsRequest.pb( + analytics_admin.BatchCreateAccessBindingsRequest() ) transcode.return_value = { "method": "post", @@ -85370,19 +95347,21 @@ def test_update_attribution_settings_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = resources.AttributionSettings.to_json( - resources.AttributionSettings() + req.return_value._content = ( + analytics_admin.BatchCreateAccessBindingsResponse.to_json( + analytics_admin.BatchCreateAccessBindingsResponse() + ) ) - request = analytics_admin.UpdateAttributionSettingsRequest() + request = analytics_admin.BatchCreateAccessBindingsRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = resources.AttributionSettings() + post.return_value = analytics_admin.BatchCreateAccessBindingsResponse() - client.update_attribution_settings( + client.batch_create_access_bindings( request, metadata=[ ("key", "val"), @@ -85394,95 +95373,32 @@ def test_update_attribution_settings_rest_interceptors(null_interceptor): post.assert_called_once() -def test_update_attribution_settings_rest_bad_request( +def test_batch_create_access_bindings_rest_bad_request( transport: str = "rest", - request_type=analytics_admin.UpdateAttributionSettingsRequest, -): - client = AnalyticsAdminServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = { - "attribution_settings": {"name": "properties/sample1/attributionSettings"} - } - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.update_attribution_settings(request) - - -def test_update_attribution_settings_rest_flattened(): - client = AnalyticsAdminServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = resources.AttributionSettings() - - # get arguments that satisfy an http rule for this method - sample_request = { - "attribution_settings": {"name": "properties/sample1/attributionSettings"} - } - - # get truthy value for each flattened field - mock_args = dict( - attribution_settings=resources.AttributionSettings(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = resources.AttributionSettings.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - client.update_attribution_settings(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate( - "%s/v1alpha/{attribution_settings.name=properties/*/attributionSettings}" - % client.transport._host, - args[1], - ) - - -def test_update_attribution_settings_rest_flattened_error(transport: str = "rest"): + request_type=analytics_admin.BatchCreateAccessBindingsRequest, +): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.update_attribution_settings( - analytics_admin.UpdateAttributionSettingsRequest(), - attribution_settings=resources.AttributionSettings(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), - ) + # send a request that will satisfy transcoding + request_init = {"parent": "accounts/sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.batch_create_access_bindings(request) -def test_update_attribution_settings_rest_error(): +def test_batch_create_access_bindings_rest_error(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -85491,44 +95407,41 @@ def test_update_attribution_settings_rest_error(): @pytest.mark.parametrize( "request_type", [ - analytics_admin.RunAccessReportRequest, + analytics_admin.BatchGetAccessBindingsRequest, dict, ], ) -def test_run_access_report_rest(request_type): +def test_batch_get_access_bindings_rest(request_type): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = {"entity": "properties/sample1"} + request_init = {"parent": "accounts/sample1"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = analytics_admin.RunAccessReportResponse( - row_count=992, - ) + return_value = analytics_admin.BatchGetAccessBindingsResponse() # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = analytics_admin.RunAccessReportResponse.pb(return_value) + return_value = analytics_admin.BatchGetAccessBindingsResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.run_access_report(request) + response = client.batch_get_access_bindings(request) # Establish that the response is the type that we expect. - assert isinstance(response, analytics_admin.RunAccessReportResponse) - assert response.row_count == 992 + assert isinstance(response, analytics_admin.BatchGetAccessBindingsResponse) -def test_run_access_report_rest_use_cached_wrapped_rpc(): +def test_batch_get_access_bindings_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -85542,7 +95455,10 @@ def test_run_access_report_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.run_access_report in client._transport._wrapped_methods + assert ( + client._transport.batch_get_access_bindings + in client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.Mock() @@ -85550,24 +95466,132 @@ def test_run_access_report_rest_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.run_access_report + client._transport.batch_get_access_bindings ] = mock_rpc request = {} - client.run_access_report(request) + client.batch_get_access_bindings(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.run_access_report(request) + client.batch_get_access_bindings(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 +def test_batch_get_access_bindings_rest_required_fields( + request_type=analytics_admin.BatchGetAccessBindingsRequest, +): + transport_class = transports.AnalyticsAdminServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request_init["names"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + assert "names" not in jsonified_request + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).batch_get_access_bindings._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + assert "names" in jsonified_request + assert jsonified_request["names"] == request_init["names"] + + jsonified_request["parent"] = "parent_value" + jsonified_request["names"] = "names_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).batch_get_access_bindings._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("names",)) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + assert "names" in jsonified_request + assert jsonified_request["names"] == "names_value" + + client = AnalyticsAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = analytics_admin.BatchGetAccessBindingsResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = analytics_admin.BatchGetAccessBindingsResponse.pb( + return_value + ) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.batch_get_access_bindings(request) + + expected_params = [ + ( + "names", + "", + ), + ("$alt", "json;enum-encoding=int"), + ] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_batch_get_access_bindings_rest_unset_required_fields(): + transport = transports.AnalyticsAdminServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.batch_get_access_bindings._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(("names",)) + & set( + ( + "parent", + "names", + ) + ) + ) + + @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_run_access_report_rest_interceptors(null_interceptor): +def test_batch_get_access_bindings_rest_interceptors(null_interceptor): transport = transports.AnalyticsAdminServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -85580,14 +95604,15 @@ def test_run_access_report_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.AnalyticsAdminServiceRestInterceptor, "post_run_access_report" + transports.AnalyticsAdminServiceRestInterceptor, + "post_batch_get_access_bindings", ) as post, mock.patch.object( - transports.AnalyticsAdminServiceRestInterceptor, "pre_run_access_report" + transports.AnalyticsAdminServiceRestInterceptor, "pre_batch_get_access_bindings" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = analytics_admin.RunAccessReportRequest.pb( - analytics_admin.RunAccessReportRequest() + pb_message = analytics_admin.BatchGetAccessBindingsRequest.pb( + analytics_admin.BatchGetAccessBindingsRequest() ) transcode.return_value = { "method": "post", @@ -85599,19 +95624,21 @@ def test_run_access_report_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = analytics_admin.RunAccessReportResponse.to_json( - analytics_admin.RunAccessReportResponse() + req.return_value._content = ( + analytics_admin.BatchGetAccessBindingsResponse.to_json( + analytics_admin.BatchGetAccessBindingsResponse() + ) ) - request = analytics_admin.RunAccessReportRequest() + request = analytics_admin.BatchGetAccessBindingsRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = analytics_admin.RunAccessReportResponse() + post.return_value = analytics_admin.BatchGetAccessBindingsResponse() - client.run_access_report( + client.batch_get_access_bindings( request, metadata=[ ("key", "val"), @@ -85623,8 +95650,8 @@ def test_run_access_report_rest_interceptors(null_interceptor): post.assert_called_once() -def test_run_access_report_rest_bad_request( - transport: str = "rest", request_type=analytics_admin.RunAccessReportRequest +def test_batch_get_access_bindings_rest_bad_request( + transport: str = "rest", request_type=analytics_admin.BatchGetAccessBindingsRequest ): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -85632,7 +95659,7 @@ def test_run_access_report_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = {"entity": "properties/sample1"} + request_init = {"parent": "accounts/sample1"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -85644,10 +95671,10 @@ def test_run_access_report_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.run_access_report(request) + client.batch_get_access_bindings(request) -def test_run_access_report_rest_error(): +def test_batch_get_access_bindings_rest_error(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -85656,11 +95683,11 @@ def test_run_access_report_rest_error(): @pytest.mark.parametrize( "request_type", [ - analytics_admin.CreateAccessBindingRequest, + analytics_admin.BatchUpdateAccessBindingsRequest, dict, ], ) -def test_create_access_binding_rest(request_type): +def test_batch_update_access_bindings_rest(request_type): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -85668,109 +95695,31 @@ def test_create_access_binding_rest(request_type): # send a request that will satisfy transcoding request_init = {"parent": "accounts/sample1"} - request_init["access_binding"] = { - "user": "user_value", - "name": "name_value", - "roles": ["roles_value1", "roles_value2"], - } - # The version of a generated dependency at test runtime may differ from the version used during generation. - # Delete any fields which are not present in the current runtime dependency - # See https://github.com/googleapis/gapic-generator-python/issues/1748 - - # Determine if the message type is proto-plus or protobuf - test_field = analytics_admin.CreateAccessBindingRequest.meta.fields[ - "access_binding" - ] - - def get_message_fields(field): - # Given a field which is a message (composite type), return a list with - # all the fields of the message. - # If the field is not a composite type, return an empty list. - message_fields = [] - - if hasattr(field, "message") and field.message: - is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") - - if is_field_type_proto_plus_type: - message_fields = field.message.meta.fields.values() - # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER - message_fields = field.message.DESCRIPTOR.fields - return message_fields - - runtime_nested_fields = [ - (field.name, nested_field.name) - for field in get_message_fields(test_field) - for nested_field in get_message_fields(field) - ] - - subfields_not_in_runtime = [] - - # For each item in the sample request, create a list of sub fields which are not present at runtime - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["access_binding"].items(): # pragma: NO COVER - result = None - is_repeated = False - # For repeated fields - if isinstance(value, list) and len(value): - is_repeated = True - result = value[0] - # For fields where the type is another message - if isinstance(value, dict): - result = value - - if result and hasattr(result, "keys"): - for subfield in result.keys(): - if (field, subfield) not in runtime_nested_fields: - subfields_not_in_runtime.append( - { - "field": field, - "subfield": subfield, - "is_repeated": is_repeated, - } - ) - - # Remove fields from the sample request which are not present in the runtime version of the dependency - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER - field = subfield_to_delete.get("field") - field_repeated = subfield_to_delete.get("is_repeated") - subfield = subfield_to_delete.get("subfield") - if subfield: - if field_repeated: - for i in range(0, len(request_init["access_binding"][field])): - del request_init["access_binding"][field][i][subfield] - else: - del request_init["access_binding"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = resources.AccessBinding( - name="name_value", - roles=["roles_value"], - user="user_value", - ) + return_value = analytics_admin.BatchUpdateAccessBindingsResponse() # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = resources.AccessBinding.pb(return_value) + return_value = analytics_admin.BatchUpdateAccessBindingsResponse.pb( + return_value + ) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.create_access_binding(request) + response = client.batch_update_access_bindings(request) # Establish that the response is the type that we expect. - assert isinstance(response, resources.AccessBinding) - assert response.name == "name_value" - assert response.roles == ["roles_value"] + assert isinstance(response, analytics_admin.BatchUpdateAccessBindingsResponse) -def test_create_access_binding_rest_use_cached_wrapped_rpc(): +def test_batch_update_access_bindings_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -85785,7 +95734,7 @@ def test_create_access_binding_rest_use_cached_wrapped_rpc(): # Ensure method has been cached assert ( - client._transport.create_access_binding + client._transport.batch_update_access_bindings in client._transport._wrapped_methods ) @@ -85795,24 +95744,24 @@ def test_create_access_binding_rest_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.create_access_binding + client._transport.batch_update_access_bindings ] = mock_rpc request = {} - client.create_access_binding(request) + client.batch_update_access_bindings(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.create_access_binding(request) + client.batch_update_access_bindings(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_create_access_binding_rest_required_fields( - request_type=analytics_admin.CreateAccessBindingRequest, +def test_batch_update_access_bindings_rest_required_fields( + request_type=analytics_admin.BatchUpdateAccessBindingsRequest, ): transport_class = transports.AnalyticsAdminServiceRestTransport @@ -85828,7 +95777,7 @@ def test_create_access_binding_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).create_access_binding._get_unset_required_fields(jsonified_request) + ).batch_update_access_bindings._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present @@ -85837,7 +95786,7 @@ def test_create_access_binding_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).create_access_binding._get_unset_required_fields(jsonified_request) + ).batch_update_access_bindings._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone @@ -85851,7 +95800,7 @@ def test_create_access_binding_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = resources.AccessBinding() + return_value = analytics_admin.BatchUpdateAccessBindingsResponse() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -85873,38 +95822,40 @@ def test_create_access_binding_rest_required_fields( response_value.status_code = 200 # Convert return value to protobuf type - return_value = resources.AccessBinding.pb(return_value) + return_value = analytics_admin.BatchUpdateAccessBindingsResponse.pb( + return_value + ) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.create_access_binding(request) + response = client.batch_update_access_bindings(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_create_access_binding_rest_unset_required_fields(): +def test_batch_update_access_bindings_rest_unset_required_fields(): transport = transports.AnalyticsAdminServiceRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.create_access_binding._get_unset_required_fields({}) + unset_fields = transport.batch_update_access_bindings._get_unset_required_fields({}) assert set(unset_fields) == ( set(()) & set( ( "parent", - "accessBinding", + "requests", ) ) ) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_create_access_binding_rest_interceptors(null_interceptor): +def test_batch_update_access_bindings_rest_interceptors(null_interceptor): transport = transports.AnalyticsAdminServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -85917,14 +95868,16 @@ def test_create_access_binding_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.AnalyticsAdminServiceRestInterceptor, "post_create_access_binding" + transports.AnalyticsAdminServiceRestInterceptor, + "post_batch_update_access_bindings", ) as post, mock.patch.object( - transports.AnalyticsAdminServiceRestInterceptor, "pre_create_access_binding" + transports.AnalyticsAdminServiceRestInterceptor, + "pre_batch_update_access_bindings", ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = analytics_admin.CreateAccessBindingRequest.pb( - analytics_admin.CreateAccessBindingRequest() + pb_message = analytics_admin.BatchUpdateAccessBindingsRequest.pb( + analytics_admin.BatchUpdateAccessBindingsRequest() ) transcode.return_value = { "method": "post", @@ -85936,19 +95889,21 @@ def test_create_access_binding_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = resources.AccessBinding.to_json( - resources.AccessBinding() + req.return_value._content = ( + analytics_admin.BatchUpdateAccessBindingsResponse.to_json( + analytics_admin.BatchUpdateAccessBindingsResponse() + ) ) - request = analytics_admin.CreateAccessBindingRequest() + request = analytics_admin.BatchUpdateAccessBindingsRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = resources.AccessBinding() + post.return_value = analytics_admin.BatchUpdateAccessBindingsResponse() - client.create_access_binding( + client.batch_update_access_bindings( request, metadata=[ ("key", "val"), @@ -85960,8 +95915,9 @@ def test_create_access_binding_rest_interceptors(null_interceptor): post.assert_called_once() -def test_create_access_binding_rest_bad_request( - transport: str = "rest", request_type=analytics_admin.CreateAccessBindingRequest +def test_batch_update_access_bindings_rest_bad_request( + transport: str = "rest", + request_type=analytics_admin.BatchUpdateAccessBindingsRequest, ): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -85981,68 +95937,10 @@ def test_create_access_binding_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.create_access_binding(request) - - -def test_create_access_binding_rest_flattened(): - client = AnalyticsAdminServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = resources.AccessBinding() - - # get arguments that satisfy an http rule for this method - sample_request = {"parent": "accounts/sample1"} - - # get truthy value for each flattened field - mock_args = dict( - parent="parent_value", - access_binding=resources.AccessBinding(user="user_value"), - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = resources.AccessBinding.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - client.create_access_binding(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate( - "%s/v1alpha/{parent=accounts/*}/accessBindings" % client.transport._host, - args[1], - ) - - -def test_create_access_binding_rest_flattened_error(transport: str = "rest"): - client = AnalyticsAdminServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.create_access_binding( - analytics_admin.CreateAccessBindingRequest(), - parent="parent_value", - access_binding=resources.AccessBinding(user="user_value"), - ) + client.batch_update_access_bindings(request) -def test_create_access_binding_rest_error(): +def test_batch_update_access_bindings_rest_error(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -86051,47 +95949,39 @@ def test_create_access_binding_rest_error(): @pytest.mark.parametrize( "request_type", [ - analytics_admin.GetAccessBindingRequest, + analytics_admin.BatchDeleteAccessBindingsRequest, dict, ], ) -def test_get_access_binding_rest(request_type): +def test_batch_delete_access_bindings_rest(request_type): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = {"name": "accounts/sample1/accessBindings/sample2"} + request_init = {"parent": "accounts/sample1"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = resources.AccessBinding( - name="name_value", - roles=["roles_value"], - user="user_value", - ) + return_value = None # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - # Convert return value to protobuf type - return_value = resources.AccessBinding.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) + json_return_value = "" response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.get_access_binding(request) + response = client.batch_delete_access_bindings(request) # Establish that the response is the type that we expect. - assert isinstance(response, resources.AccessBinding) - assert response.name == "name_value" - assert response.roles == ["roles_value"] + assert response is None -def test_get_access_binding_rest_use_cached_wrapped_rpc(): +def test_batch_delete_access_bindings_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -86106,7 +95996,8 @@ def test_get_access_binding_rest_use_cached_wrapped_rpc(): # Ensure method has been cached assert ( - client._transport.get_access_binding in client._transport._wrapped_methods + client._transport.batch_delete_access_bindings + in client._transport._wrapped_methods ) # Replace cached wrapped function with mock @@ -86115,29 +96006,29 @@ def test_get_access_binding_rest_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.get_access_binding + client._transport.batch_delete_access_bindings ] = mock_rpc request = {} - client.get_access_binding(request) + client.batch_delete_access_bindings(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.get_access_binding(request) + client.batch_delete_access_bindings(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_get_access_binding_rest_required_fields( - request_type=analytics_admin.GetAccessBindingRequest, +def test_batch_delete_access_bindings_rest_required_fields( + request_type=analytics_admin.BatchDeleteAccessBindingsRequest, ): transport_class = transports.AnalyticsAdminServiceRestTransport request_init = {} - request_init["name"] = "" + request_init["parent"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -86148,21 +96039,21 @@ def test_get_access_binding_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).get_access_binding._get_unset_required_fields(jsonified_request) + ).batch_delete_access_bindings._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["name"] = "name_value" + jsonified_request["parent"] = "parent_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).get_access_binding._get_unset_required_fields(jsonified_request) + ).batch_delete_access_bindings._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == "name_value" + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -86171,7 +96062,7 @@ def test_get_access_binding_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = resources.AccessBinding() + return_value = None # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -86183,39 +96074,45 @@ def test_get_access_binding_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "get", + "method": "post", "query_params": pb_request, } + transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = resources.AccessBinding.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) + json_return_value = "" response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.get_access_binding(request) + response = client.batch_delete_access_bindings(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_get_access_binding_rest_unset_required_fields(): +def test_batch_delete_access_bindings_rest_unset_required_fields(): transport = transports.AnalyticsAdminServiceRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.get_access_binding._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name",))) + unset_fields = transport.batch_delete_access_bindings._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(()) + & set( + ( + "parent", + "requests", + ) + ) + ) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_access_binding_rest_interceptors(null_interceptor): +def test_batch_delete_access_bindings_rest_interceptors(null_interceptor): transport = transports.AnalyticsAdminServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -86228,14 +96125,12 @@ def test_get_access_binding_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.AnalyticsAdminServiceRestInterceptor, "post_get_access_binding" - ) as post, mock.patch.object( - transports.AnalyticsAdminServiceRestInterceptor, "pre_get_access_binding" + transports.AnalyticsAdminServiceRestInterceptor, + "pre_batch_delete_access_bindings", ) as pre: pre.assert_not_called() - post.assert_not_called() - pb_message = analytics_admin.GetAccessBindingRequest.pb( - analytics_admin.GetAccessBindingRequest() + pb_message = analytics_admin.BatchDeleteAccessBindingsRequest.pb( + analytics_admin.BatchDeleteAccessBindingsRequest() ) transcode.return_value = { "method": "post", @@ -86247,19 +96142,15 @@ def test_get_access_binding_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = resources.AccessBinding.to_json( - resources.AccessBinding() - ) - request = analytics_admin.GetAccessBindingRequest() + request = analytics_admin.BatchDeleteAccessBindingsRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = resources.AccessBinding() - client.get_access_binding( + client.batch_delete_access_bindings( request, metadata=[ ("key", "val"), @@ -86268,11 +96159,11 @@ def test_get_access_binding_rest_interceptors(null_interceptor): ) pre.assert_called_once() - post.assert_called_once() -def test_get_access_binding_rest_bad_request( - transport: str = "rest", request_type=analytics_admin.GetAccessBindingRequest +def test_batch_delete_access_bindings_rest_bad_request( + transport: str = "rest", + request_type=analytics_admin.BatchDeleteAccessBindingsRequest, ): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -86280,7 +96171,7 @@ def test_get_access_binding_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = {"name": "accounts/sample1/accessBindings/sample2"} + request_init = {"parent": "accounts/sample1"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -86292,66 +96183,10 @@ def test_get_access_binding_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.get_access_binding(request) - - -def test_get_access_binding_rest_flattened(): - client = AnalyticsAdminServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = resources.AccessBinding() - - # get arguments that satisfy an http rule for this method - sample_request = {"name": "accounts/sample1/accessBindings/sample2"} - - # get truthy value for each flattened field - mock_args = dict( - name="name_value", - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = resources.AccessBinding.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - client.get_access_binding(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate( - "%s/v1alpha/{name=accounts/*/accessBindings/*}" % client.transport._host, - args[1], - ) - - -def test_get_access_binding_rest_flattened_error(transport: str = "rest"): - client = AnalyticsAdminServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_access_binding( - analytics_admin.GetAccessBindingRequest(), - name="name_value", - ) + client.batch_delete_access_bindings(request) -def test_get_access_binding_rest_error(): +def test_batch_delete_access_bindings_rest_error(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -86360,123 +96195,52 @@ def test_get_access_binding_rest_error(): @pytest.mark.parametrize( "request_type", [ - analytics_admin.UpdateAccessBindingRequest, + analytics_admin.GetExpandedDataSetRequest, dict, ], ) -def test_update_access_binding_rest(request_type): +def test_get_expanded_data_set_rest(request_type): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = { - "access_binding": {"name": "accounts/sample1/accessBindings/sample2"} - } - request_init["access_binding"] = { - "user": "user_value", - "name": "accounts/sample1/accessBindings/sample2", - "roles": ["roles_value1", "roles_value2"], - } - # The version of a generated dependency at test runtime may differ from the version used during generation. - # Delete any fields which are not present in the current runtime dependency - # See https://github.com/googleapis/gapic-generator-python/issues/1748 - - # Determine if the message type is proto-plus or protobuf - test_field = analytics_admin.UpdateAccessBindingRequest.meta.fields[ - "access_binding" - ] - - def get_message_fields(field): - # Given a field which is a message (composite type), return a list with - # all the fields of the message. - # If the field is not a composite type, return an empty list. - message_fields = [] - - if hasattr(field, "message") and field.message: - is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") - - if is_field_type_proto_plus_type: - message_fields = field.message.meta.fields.values() - # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER - message_fields = field.message.DESCRIPTOR.fields - return message_fields - - runtime_nested_fields = [ - (field.name, nested_field.name) - for field in get_message_fields(test_field) - for nested_field in get_message_fields(field) - ] - - subfields_not_in_runtime = [] - - # For each item in the sample request, create a list of sub fields which are not present at runtime - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["access_binding"].items(): # pragma: NO COVER - result = None - is_repeated = False - # For repeated fields - if isinstance(value, list) and len(value): - is_repeated = True - result = value[0] - # For fields where the type is another message - if isinstance(value, dict): - result = value - - if result and hasattr(result, "keys"): - for subfield in result.keys(): - if (field, subfield) not in runtime_nested_fields: - subfields_not_in_runtime.append( - { - "field": field, - "subfield": subfield, - "is_repeated": is_repeated, - } - ) - - # Remove fields from the sample request which are not present in the runtime version of the dependency - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER - field = subfield_to_delete.get("field") - field_repeated = subfield_to_delete.get("is_repeated") - subfield = subfield_to_delete.get("subfield") - if subfield: - if field_repeated: - for i in range(0, len(request_init["access_binding"][field])): - del request_init["access_binding"][field][i][subfield] - else: - del request_init["access_binding"][field][subfield] + request_init = {"name": "properties/sample1/expandedDataSets/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = resources.AccessBinding( + return_value = expanded_data_set.ExpandedDataSet( name="name_value", - roles=["roles_value"], - user="user_value", + display_name="display_name_value", + description="description_value", + dimension_names=["dimension_names_value"], + metric_names=["metric_names_value"], ) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = resources.AccessBinding.pb(return_value) + return_value = expanded_data_set.ExpandedDataSet.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.update_access_binding(request) + response = client.get_expanded_data_set(request) # Establish that the response is the type that we expect. - assert isinstance(response, resources.AccessBinding) + assert isinstance(response, expanded_data_set.ExpandedDataSet) assert response.name == "name_value" - assert response.roles == ["roles_value"] + assert response.display_name == "display_name_value" + assert response.description == "description_value" + assert response.dimension_names == ["dimension_names_value"] + assert response.metric_names == ["metric_names_value"] -def test_update_access_binding_rest_use_cached_wrapped_rpc(): +def test_get_expanded_data_set_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -86491,7 +96255,7 @@ def test_update_access_binding_rest_use_cached_wrapped_rpc(): # Ensure method has been cached assert ( - client._transport.update_access_binding + client._transport.get_expanded_data_set in client._transport._wrapped_methods ) @@ -86501,28 +96265,29 @@ def test_update_access_binding_rest_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.update_access_binding + client._transport.get_expanded_data_set ] = mock_rpc request = {} - client.update_access_binding(request) + client.get_expanded_data_set(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.update_access_binding(request) + client.get_expanded_data_set(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_update_access_binding_rest_required_fields( - request_type=analytics_admin.UpdateAccessBindingRequest, +def test_get_expanded_data_set_rest_required_fields( + request_type=analytics_admin.GetExpandedDataSetRequest, ): transport_class = transports.AnalyticsAdminServiceRestTransport request_init = {} + request_init["name"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -86533,17 +96298,21 @@ def test_update_access_binding_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).update_access_binding._get_unset_required_fields(jsonified_request) + ).get_expanded_data_set._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present + jsonified_request["name"] = "name_value" + unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).update_access_binding._get_unset_required_fields(jsonified_request) + ).get_expanded_data_set._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -86552,7 +96321,7 @@ def test_update_access_binding_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = resources.AccessBinding() + return_value = expanded_data_set.ExpandedDataSet() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -86564,40 +96333,39 @@ def test_update_access_binding_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "patch", + "method": "get", "query_params": pb_request, } - transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = resources.AccessBinding.pb(return_value) + return_value = expanded_data_set.ExpandedDataSet.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.update_access_binding(request) + response = client.get_expanded_data_set(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_update_access_binding_rest_unset_required_fields(): +def test_get_expanded_data_set_rest_unset_required_fields(): transport = transports.AnalyticsAdminServiceRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.update_access_binding._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("accessBinding",))) + unset_fields = transport.get_expanded_data_set._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_update_access_binding_rest_interceptors(null_interceptor): +def test_get_expanded_data_set_rest_interceptors(null_interceptor): transport = transports.AnalyticsAdminServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -86610,14 +96378,14 @@ def test_update_access_binding_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.AnalyticsAdminServiceRestInterceptor, "post_update_access_binding" + transports.AnalyticsAdminServiceRestInterceptor, "post_get_expanded_data_set" ) as post, mock.patch.object( - transports.AnalyticsAdminServiceRestInterceptor, "pre_update_access_binding" + transports.AnalyticsAdminServiceRestInterceptor, "pre_get_expanded_data_set" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = analytics_admin.UpdateAccessBindingRequest.pb( - analytics_admin.UpdateAccessBindingRequest() + pb_message = analytics_admin.GetExpandedDataSetRequest.pb( + analytics_admin.GetExpandedDataSetRequest() ) transcode.return_value = { "method": "post", @@ -86629,19 +96397,19 @@ def test_update_access_binding_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = resources.AccessBinding.to_json( - resources.AccessBinding() + req.return_value._content = expanded_data_set.ExpandedDataSet.to_json( + expanded_data_set.ExpandedDataSet() ) - request = analytics_admin.UpdateAccessBindingRequest() + request = analytics_admin.GetExpandedDataSetRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = resources.AccessBinding() + post.return_value = expanded_data_set.ExpandedDataSet() - client.update_access_binding( + client.get_expanded_data_set( request, metadata=[ ("key", "val"), @@ -86653,8 +96421,8 @@ def test_update_access_binding_rest_interceptors(null_interceptor): post.assert_called_once() -def test_update_access_binding_rest_bad_request( - transport: str = "rest", request_type=analytics_admin.UpdateAccessBindingRequest +def test_get_expanded_data_set_rest_bad_request( + transport: str = "rest", request_type=analytics_admin.GetExpandedDataSetRequest ): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -86662,9 +96430,7 @@ def test_update_access_binding_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = { - "access_binding": {"name": "accounts/sample1/accessBindings/sample2"} - } + request_init = {"name": "properties/sample1/expandedDataSets/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -86676,10 +96442,10 @@ def test_update_access_binding_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.update_access_binding(request) + client.get_expanded_data_set(request) -def test_update_access_binding_rest_flattened(): +def test_get_expanded_data_set_rest_flattened(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -86688,16 +96454,14 @@ def test_update_access_binding_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = resources.AccessBinding() + return_value = expanded_data_set.ExpandedDataSet() # get arguments that satisfy an http rule for this method - sample_request = { - "access_binding": {"name": "accounts/sample1/accessBindings/sample2"} - } + sample_request = {"name": "properties/sample1/expandedDataSets/sample2"} # get truthy value for each flattened field mock_args = dict( - access_binding=resources.AccessBinding(user="user_value"), + name="name_value", ) mock_args.update(sample_request) @@ -86705,25 +96469,25 @@ def test_update_access_binding_rest_flattened(): response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = resources.AccessBinding.pb(return_value) + return_value = expanded_data_set.ExpandedDataSet.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.update_access_binding(**mock_args) + client.get_expanded_data_set(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1alpha/{access_binding.name=accounts/*/accessBindings/*}" + "%s/v1alpha/{name=properties/*/expandedDataSets/*}" % client.transport._host, args[1], ) -def test_update_access_binding_rest_flattened_error(transport: str = "rest"): +def test_get_expanded_data_set_rest_flattened_error(transport: str = "rest"): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -86732,13 +96496,13 @@ def test_update_access_binding_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.update_access_binding( - analytics_admin.UpdateAccessBindingRequest(), - access_binding=resources.AccessBinding(user="user_value"), + client.get_expanded_data_set( + analytics_admin.GetExpandedDataSetRequest(), + name="name_value", ) -def test_update_access_binding_rest_error(): +def test_get_expanded_data_set_rest_error(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -86747,39 +96511,44 @@ def test_update_access_binding_rest_error(): @pytest.mark.parametrize( "request_type", [ - analytics_admin.DeleteAccessBindingRequest, + analytics_admin.ListExpandedDataSetsRequest, dict, ], ) -def test_delete_access_binding_rest(request_type): +def test_list_expanded_data_sets_rest(request_type): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = {"name": "accounts/sample1/accessBindings/sample2"} + request_init = {"parent": "properties/sample1"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = None + return_value = analytics_admin.ListExpandedDataSetsResponse( + next_page_token="next_page_token_value", + ) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - json_return_value = "" + # Convert return value to protobuf type + return_value = analytics_admin.ListExpandedDataSetsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.delete_access_binding(request) + response = client.list_expanded_data_sets(request) # Establish that the response is the type that we expect. - assert response is None + assert isinstance(response, pagers.ListExpandedDataSetsPager) + assert response.next_page_token == "next_page_token_value" -def test_delete_access_binding_rest_use_cached_wrapped_rpc(): +def test_list_expanded_data_sets_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -86794,7 +96563,7 @@ def test_delete_access_binding_rest_use_cached_wrapped_rpc(): # Ensure method has been cached assert ( - client._transport.delete_access_binding + client._transport.list_expanded_data_sets in client._transport._wrapped_methods ) @@ -86804,29 +96573,29 @@ def test_delete_access_binding_rest_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.delete_access_binding + client._transport.list_expanded_data_sets ] = mock_rpc request = {} - client.delete_access_binding(request) + client.list_expanded_data_sets(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.delete_access_binding(request) + client.list_expanded_data_sets(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_delete_access_binding_rest_required_fields( - request_type=analytics_admin.DeleteAccessBindingRequest, +def test_list_expanded_data_sets_rest_required_fields( + request_type=analytics_admin.ListExpandedDataSetsRequest, ): transport_class = transports.AnalyticsAdminServiceRestTransport request_init = {} - request_init["name"] = "" + request_init["parent"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -86837,21 +96606,28 @@ def test_delete_access_binding_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).delete_access_binding._get_unset_required_fields(jsonified_request) + ).list_expanded_data_sets._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["name"] = "name_value" + jsonified_request["parent"] = "parent_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).delete_access_binding._get_unset_required_fields(jsonified_request) + ).list_expanded_data_sets._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "page_size", + "page_token", + ) + ) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == "name_value" + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -86860,7 +96636,7 @@ def test_delete_access_binding_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = None + return_value = analytics_admin.ListExpandedDataSetsResponse() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -86872,36 +96648,47 @@ def test_delete_access_binding_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "delete", + "method": "get", "query_params": pb_request, } transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 - json_return_value = "" + + # Convert return value to protobuf type + return_value = analytics_admin.ListExpandedDataSetsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.delete_access_binding(request) + response = client.list_expanded_data_sets(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_delete_access_binding_rest_unset_required_fields(): +def test_list_expanded_data_sets_rest_unset_required_fields(): transport = transports.AnalyticsAdminServiceRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.delete_access_binding._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name",))) + unset_fields = transport.list_expanded_data_sets._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "pageSize", + "pageToken", + ) + ) + & set(("parent",)) + ) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_delete_access_binding_rest_interceptors(null_interceptor): +def test_list_expanded_data_sets_rest_interceptors(null_interceptor): transport = transports.AnalyticsAdminServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -86914,11 +96701,14 @@ def test_delete_access_binding_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.AnalyticsAdminServiceRestInterceptor, "pre_delete_access_binding" + transports.AnalyticsAdminServiceRestInterceptor, "post_list_expanded_data_sets" + ) as post, mock.patch.object( + transports.AnalyticsAdminServiceRestInterceptor, "pre_list_expanded_data_sets" ) as pre: pre.assert_not_called() - pb_message = analytics_admin.DeleteAccessBindingRequest.pb( - analytics_admin.DeleteAccessBindingRequest() + post.assert_not_called() + pb_message = analytics_admin.ListExpandedDataSetsRequest.pb( + analytics_admin.ListExpandedDataSetsRequest() ) transcode.return_value = { "method": "post", @@ -86930,15 +96720,21 @@ def test_delete_access_binding_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() + req.return_value._content = ( + analytics_admin.ListExpandedDataSetsResponse.to_json( + analytics_admin.ListExpandedDataSetsResponse() + ) + ) - request = analytics_admin.DeleteAccessBindingRequest() + request = analytics_admin.ListExpandedDataSetsRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata + post.return_value = analytics_admin.ListExpandedDataSetsResponse() - client.delete_access_binding( + client.list_expanded_data_sets( request, metadata=[ ("key", "val"), @@ -86947,10 +96743,11 @@ def test_delete_access_binding_rest_interceptors(null_interceptor): ) pre.assert_called_once() + post.assert_called_once() -def test_delete_access_binding_rest_bad_request( - transport: str = "rest", request_type=analytics_admin.DeleteAccessBindingRequest +def test_list_expanded_data_sets_rest_bad_request( + transport: str = "rest", request_type=analytics_admin.ListExpandedDataSetsRequest ): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -86958,7 +96755,7 @@ def test_delete_access_binding_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = {"name": "accounts/sample1/accessBindings/sample2"} + request_init = {"parent": "properties/sample1"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -86970,10 +96767,10 @@ def test_delete_access_binding_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.delete_access_binding(request) + client.list_expanded_data_sets(request) -def test_delete_access_binding_rest_flattened(): +def test_list_expanded_data_sets_rest_flattened(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -86982,98 +96779,259 @@ def test_delete_access_binding_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = None + return_value = analytics_admin.ListExpandedDataSetsResponse() # get arguments that satisfy an http rule for this method - sample_request = {"name": "accounts/sample1/accessBindings/sample2"} + sample_request = {"parent": "properties/sample1"} # get truthy value for each flattened field mock_args = dict( - name="name_value", + parent="parent_value", ) mock_args.update(sample_request) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - json_return_value = "" + # Convert return value to protobuf type + return_value = analytics_admin.ListExpandedDataSetsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.delete_access_binding(**mock_args) + client.list_expanded_data_sets(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1alpha/{name=accounts/*/accessBindings/*}" % client.transport._host, + "%s/v1alpha/{parent=properties/*}/expandedDataSets" + % client.transport._host, args[1], ) -def test_delete_access_binding_rest_flattened_error(transport: str = "rest"): +def test_list_expanded_data_sets_rest_flattened_error(transport: str = "rest"): + client = AnalyticsAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_expanded_data_sets( + analytics_admin.ListExpandedDataSetsRequest(), + parent="parent_value", + ) + + +def test_list_expanded_data_sets_rest_pager(transport: str = "rest"): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.delete_access_binding( - analytics_admin.DeleteAccessBindingRequest(), - name="name_value", - ) + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + analytics_admin.ListExpandedDataSetsResponse( + expanded_data_sets=[ + expanded_data_set.ExpandedDataSet(), + expanded_data_set.ExpandedDataSet(), + expanded_data_set.ExpandedDataSet(), + ], + next_page_token="abc", + ), + analytics_admin.ListExpandedDataSetsResponse( + expanded_data_sets=[], + next_page_token="def", + ), + analytics_admin.ListExpandedDataSetsResponse( + expanded_data_sets=[ + expanded_data_set.ExpandedDataSet(), + ], + next_page_token="ghi", + ), + analytics_admin.ListExpandedDataSetsResponse( + expanded_data_sets=[ + expanded_data_set.ExpandedDataSet(), + expanded_data_set.ExpandedDataSet(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple( + analytics_admin.ListExpandedDataSetsResponse.to_json(x) for x in response + ) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {"parent": "properties/sample1"} + + pager = client.list_expanded_data_sets(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, expanded_data_set.ExpandedDataSet) for i in results) + + pages = list(client.list_expanded_data_sets(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + analytics_admin.CreateExpandedDataSetRequest, + dict, + ], +) +def test_create_expanded_data_set_rest(request_type): + client = AnalyticsAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "properties/sample1"} + request_init["expanded_data_set"] = { + "name": "name_value", + "display_name": "display_name_value", + "description": "description_value", + "dimension_names": ["dimension_names_value1", "dimension_names_value2"], + "metric_names": ["metric_names_value1", "metric_names_value2"], + "dimension_filter_expression": { + "and_group": {"filter_expressions": {}}, + "not_expression": {}, + "filter": { + "string_filter": { + "match_type": 1, + "value": "value_value", + "case_sensitive": True, + }, + "in_list_filter": { + "values": ["values_value1", "values_value2"], + "case_sensitive": True, + }, + "field_name": "field_name_value", + }, + }, + "data_collection_start_time": {"seconds": 751, "nanos": 543}, + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = analytics_admin.CreateExpandedDataSetRequest.meta.fields[ + "expanded_data_set" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields -def test_delete_access_binding_rest_error(): - client = AnalyticsAdminServiceClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + subfields_not_in_runtime = [] -@pytest.mark.parametrize( - "request_type", - [ - analytics_admin.ListAccessBindingsRequest, - dict, - ], -) -def test_list_access_bindings_rest(request_type): - client = AnalyticsAdminServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["expanded_data_set"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value - # send a request that will satisfy transcoding - request_init = {"parent": "accounts/sample1"} + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["expanded_data_set"][field])): + del request_init["expanded_data_set"][field][i][subfield] + else: + del request_init["expanded_data_set"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = analytics_admin.ListAccessBindingsResponse( - next_page_token="next_page_token_value", + return_value = gaa_expanded_data_set.ExpandedDataSet( + name="name_value", + display_name="display_name_value", + description="description_value", + dimension_names=["dimension_names_value"], + metric_names=["metric_names_value"], ) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = analytics_admin.ListAccessBindingsResponse.pb(return_value) + return_value = gaa_expanded_data_set.ExpandedDataSet.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.list_access_bindings(request) + response = client.create_expanded_data_set(request) # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListAccessBindingsPager) - assert response.next_page_token == "next_page_token_value" + assert isinstance(response, gaa_expanded_data_set.ExpandedDataSet) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.description == "description_value" + assert response.dimension_names == ["dimension_names_value"] + assert response.metric_names == ["metric_names_value"] -def test_list_access_bindings_rest_use_cached_wrapped_rpc(): +def test_create_expanded_data_set_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -87088,7 +97046,8 @@ def test_list_access_bindings_rest_use_cached_wrapped_rpc(): # Ensure method has been cached assert ( - client._transport.list_access_bindings in client._transport._wrapped_methods + client._transport.create_expanded_data_set + in client._transport._wrapped_methods ) # Replace cached wrapped function with mock @@ -87097,24 +97056,24 @@ def test_list_access_bindings_rest_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.list_access_bindings + client._transport.create_expanded_data_set ] = mock_rpc request = {} - client.list_access_bindings(request) + client.create_expanded_data_set(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.list_access_bindings(request) + client.create_expanded_data_set(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_list_access_bindings_rest_required_fields( - request_type=analytics_admin.ListAccessBindingsRequest, +def test_create_expanded_data_set_rest_required_fields( + request_type=analytics_admin.CreateExpandedDataSetRequest, ): transport_class = transports.AnalyticsAdminServiceRestTransport @@ -87130,7 +97089,7 @@ def test_list_access_bindings_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).list_access_bindings._get_unset_required_fields(jsonified_request) + ).create_expanded_data_set._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present @@ -87139,14 +97098,7 @@ def test_list_access_bindings_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).list_access_bindings._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set( - ( - "page_size", - "page_token", - ) - ) + ).create_expanded_data_set._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone @@ -87160,7 +97112,7 @@ def test_list_access_bindings_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = analytics_admin.ListAccessBindingsResponse() + return_value = gaa_expanded_data_set.ExpandedDataSet() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -87172,47 +97124,48 @@ def test_list_access_bindings_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "get", + "method": "post", "query_params": pb_request, } + transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = analytics_admin.ListAccessBindingsResponse.pb(return_value) + return_value = gaa_expanded_data_set.ExpandedDataSet.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.list_access_bindings(request) + response = client.create_expanded_data_set(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_list_access_bindings_rest_unset_required_fields(): +def test_create_expanded_data_set_rest_unset_required_fields(): transport = transports.AnalyticsAdminServiceRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.list_access_bindings._get_unset_required_fields({}) + unset_fields = transport.create_expanded_data_set._get_unset_required_fields({}) assert set(unset_fields) == ( - set( + set(()) + & set( ( - "pageSize", - "pageToken", + "parent", + "expandedDataSet", ) ) - & set(("parent",)) ) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_access_bindings_rest_interceptors(null_interceptor): +def test_create_expanded_data_set_rest_interceptors(null_interceptor): transport = transports.AnalyticsAdminServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -87225,14 +97178,14 @@ def test_list_access_bindings_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.AnalyticsAdminServiceRestInterceptor, "post_list_access_bindings" + transports.AnalyticsAdminServiceRestInterceptor, "post_create_expanded_data_set" ) as post, mock.patch.object( - transports.AnalyticsAdminServiceRestInterceptor, "pre_list_access_bindings" + transports.AnalyticsAdminServiceRestInterceptor, "pre_create_expanded_data_set" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = analytics_admin.ListAccessBindingsRequest.pb( - analytics_admin.ListAccessBindingsRequest() + pb_message = analytics_admin.CreateExpandedDataSetRequest.pb( + analytics_admin.CreateExpandedDataSetRequest() ) transcode.return_value = { "method": "post", @@ -87244,19 +97197,19 @@ def test_list_access_bindings_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = analytics_admin.ListAccessBindingsResponse.to_json( - analytics_admin.ListAccessBindingsResponse() + req.return_value._content = gaa_expanded_data_set.ExpandedDataSet.to_json( + gaa_expanded_data_set.ExpandedDataSet() ) - request = analytics_admin.ListAccessBindingsRequest() + request = analytics_admin.CreateExpandedDataSetRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = analytics_admin.ListAccessBindingsResponse() + post.return_value = gaa_expanded_data_set.ExpandedDataSet() - client.list_access_bindings( + client.create_expanded_data_set( request, metadata=[ ("key", "val"), @@ -87268,8 +97221,8 @@ def test_list_access_bindings_rest_interceptors(null_interceptor): post.assert_called_once() -def test_list_access_bindings_rest_bad_request( - transport: str = "rest", request_type=analytics_admin.ListAccessBindingsRequest +def test_create_expanded_data_set_rest_bad_request( + transport: str = "rest", request_type=analytics_admin.CreateExpandedDataSetRequest ): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -87277,7 +97230,7 @@ def test_list_access_bindings_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = {"parent": "accounts/sample1"} + request_init = {"parent": "properties/sample1"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -87289,10 +97242,10 @@ def test_list_access_bindings_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.list_access_bindings(request) + client.create_expanded_data_set(request) -def test_list_access_bindings_rest_flattened(): +def test_create_expanded_data_set_rest_flattened(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -87301,14 +97254,15 @@ def test_list_access_bindings_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = analytics_admin.ListAccessBindingsResponse() + return_value = gaa_expanded_data_set.ExpandedDataSet() # get arguments that satisfy an http rule for this method - sample_request = {"parent": "accounts/sample1"} + sample_request = {"parent": "properties/sample1"} # get truthy value for each flattened field mock_args = dict( parent="parent_value", + expanded_data_set=gaa_expanded_data_set.ExpandedDataSet(name="name_value"), ) mock_args.update(sample_request) @@ -87316,24 +97270,25 @@ def test_list_access_bindings_rest_flattened(): response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = analytics_admin.ListAccessBindingsResponse.pb(return_value) + return_value = gaa_expanded_data_set.ExpandedDataSet.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.list_access_bindings(**mock_args) + client.create_expanded_data_set(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1alpha/{parent=accounts/*}/accessBindings" % client.transport._host, + "%s/v1alpha/{parent=properties/*}/expandedDataSets" + % client.transport._host, args[1], ) -def test_list_access_bindings_rest_flattened_error(transport: str = "rest"): +def test_create_expanded_data_set_rest_flattened_error(transport: str = "rest"): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -87342,115 +97297,163 @@ def test_list_access_bindings_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.list_access_bindings( - analytics_admin.ListAccessBindingsRequest(), + client.create_expanded_data_set( + analytics_admin.CreateExpandedDataSetRequest(), parent="parent_value", + expanded_data_set=gaa_expanded_data_set.ExpandedDataSet(name="name_value"), ) -def test_list_access_bindings_rest_pager(transport: str = "rest"): +def test_create_expanded_data_set_rest_error(): client = AnalyticsAdminServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # TODO(kbandes): remove this mock unless there's a good reason for it. - # with mock.patch.object(path_template, 'transcode') as transcode: - # Set the response as a series of pages - response = ( - analytics_admin.ListAccessBindingsResponse( - access_bindings=[ - resources.AccessBinding(), - resources.AccessBinding(), - resources.AccessBinding(), - ], - next_page_token="abc", - ), - analytics_admin.ListAccessBindingsResponse( - access_bindings=[], - next_page_token="def", - ), - analytics_admin.ListAccessBindingsResponse( - access_bindings=[ - resources.AccessBinding(), - ], - next_page_token="ghi", - ), - analytics_admin.ListAccessBindingsResponse( - access_bindings=[ - resources.AccessBinding(), - resources.AccessBinding(), - ], - ), - ) - # Two responses for two calls - response = response + response - - # Wrap the values into proper Response objs - response = tuple( - analytics_admin.ListAccessBindingsResponse.to_json(x) for x in response - ) - return_values = tuple(Response() for i in response) - for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode("UTF-8") - return_val.status_code = 200 - req.side_effect = return_values - - sample_request = {"parent": "accounts/sample1"} - - pager = client.list_access_bindings(request=sample_request) - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, resources.AccessBinding) for i in results) - - pages = list(client.list_access_bindings(request=sample_request).pages) - for page_, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page_.raw_page.next_page_token == token - @pytest.mark.parametrize( "request_type", [ - analytics_admin.BatchCreateAccessBindingsRequest, + analytics_admin.UpdateExpandedDataSetRequest, dict, ], ) -def test_batch_create_access_bindings_rest(request_type): +def test_update_expanded_data_set_rest(request_type): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = {"parent": "accounts/sample1"} + request_init = { + "expanded_data_set": {"name": "properties/sample1/expandedDataSets/sample2"} + } + request_init["expanded_data_set"] = { + "name": "properties/sample1/expandedDataSets/sample2", + "display_name": "display_name_value", + "description": "description_value", + "dimension_names": ["dimension_names_value1", "dimension_names_value2"], + "metric_names": ["metric_names_value1", "metric_names_value2"], + "dimension_filter_expression": { + "and_group": {"filter_expressions": {}}, + "not_expression": {}, + "filter": { + "string_filter": { + "match_type": 1, + "value": "value_value", + "case_sensitive": True, + }, + "in_list_filter": { + "values": ["values_value1", "values_value2"], + "case_sensitive": True, + }, + "field_name": "field_name_value", + }, + }, + "data_collection_start_time": {"seconds": 751, "nanos": 543}, + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = analytics_admin.UpdateExpandedDataSetRequest.meta.fields[ + "expanded_data_set" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["expanded_data_set"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["expanded_data_set"][field])): + del request_init["expanded_data_set"][field][i][subfield] + else: + del request_init["expanded_data_set"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = analytics_admin.BatchCreateAccessBindingsResponse() + return_value = gaa_expanded_data_set.ExpandedDataSet( + name="name_value", + display_name="display_name_value", + description="description_value", + dimension_names=["dimension_names_value"], + metric_names=["metric_names_value"], + ) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = analytics_admin.BatchCreateAccessBindingsResponse.pb( - return_value - ) + return_value = gaa_expanded_data_set.ExpandedDataSet.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.batch_create_access_bindings(request) + response = client.update_expanded_data_set(request) # Establish that the response is the type that we expect. - assert isinstance(response, analytics_admin.BatchCreateAccessBindingsResponse) + assert isinstance(response, gaa_expanded_data_set.ExpandedDataSet) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.description == "description_value" + assert response.dimension_names == ["dimension_names_value"] + assert response.metric_names == ["metric_names_value"] -def test_batch_create_access_bindings_rest_use_cached_wrapped_rpc(): +def test_update_expanded_data_set_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -87465,7 +97468,7 @@ def test_batch_create_access_bindings_rest_use_cached_wrapped_rpc(): # Ensure method has been cached assert ( - client._transport.batch_create_access_bindings + client._transport.update_expanded_data_set in client._transport._wrapped_methods ) @@ -87475,29 +97478,28 @@ def test_batch_create_access_bindings_rest_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.batch_create_access_bindings + client._transport.update_expanded_data_set ] = mock_rpc request = {} - client.batch_create_access_bindings(request) + client.update_expanded_data_set(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.batch_create_access_bindings(request) + client.update_expanded_data_set(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_batch_create_access_bindings_rest_required_fields( - request_type=analytics_admin.BatchCreateAccessBindingsRequest, +def test_update_expanded_data_set_rest_required_fields( + request_type=analytics_admin.UpdateExpandedDataSetRequest, ): transport_class = transports.AnalyticsAdminServiceRestTransport request_init = {} - request_init["parent"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -87508,21 +97510,19 @@ def test_batch_create_access_bindings_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).batch_create_access_bindings._get_unset_required_fields(jsonified_request) + ).update_expanded_data_set._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["parent"] = "parent_value" - unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).batch_create_access_bindings._get_unset_required_fields(jsonified_request) + ).update_expanded_data_set._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("update_mask",)) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == "parent_value" client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -87531,7 +97531,7 @@ def test_batch_create_access_bindings_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = analytics_admin.BatchCreateAccessBindingsResponse() + return_value = gaa_expanded_data_set.ExpandedDataSet() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -87543,7 +97543,7 @@ def test_batch_create_access_bindings_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "post", + "method": "patch", "query_params": pb_request, } transcode_result["body"] = pb_request @@ -87553,40 +97553,38 @@ def test_batch_create_access_bindings_rest_required_fields( response_value.status_code = 200 # Convert return value to protobuf type - return_value = analytics_admin.BatchCreateAccessBindingsResponse.pb( - return_value - ) + return_value = gaa_expanded_data_set.ExpandedDataSet.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.batch_create_access_bindings(request) + response = client.update_expanded_data_set(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_batch_create_access_bindings_rest_unset_required_fields(): +def test_update_expanded_data_set_rest_unset_required_fields(): transport = transports.AnalyticsAdminServiceRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.batch_create_access_bindings._get_unset_required_fields({}) + unset_fields = transport.update_expanded_data_set._get_unset_required_fields({}) assert set(unset_fields) == ( - set(()) + set(("updateMask",)) & set( ( - "parent", - "requests", + "expandedDataSet", + "updateMask", ) ) ) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_batch_create_access_bindings_rest_interceptors(null_interceptor): +def test_update_expanded_data_set_rest_interceptors(null_interceptor): transport = transports.AnalyticsAdminServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -87599,16 +97597,14 @@ def test_batch_create_access_bindings_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.AnalyticsAdminServiceRestInterceptor, - "post_batch_create_access_bindings", + transports.AnalyticsAdminServiceRestInterceptor, "post_update_expanded_data_set" ) as post, mock.patch.object( - transports.AnalyticsAdminServiceRestInterceptor, - "pre_batch_create_access_bindings", + transports.AnalyticsAdminServiceRestInterceptor, "pre_update_expanded_data_set" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = analytics_admin.BatchCreateAccessBindingsRequest.pb( - analytics_admin.BatchCreateAccessBindingsRequest() + pb_message = analytics_admin.UpdateExpandedDataSetRequest.pb( + analytics_admin.UpdateExpandedDataSetRequest() ) transcode.return_value = { "method": "post", @@ -87620,21 +97616,19 @@ def test_batch_create_access_bindings_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = ( - analytics_admin.BatchCreateAccessBindingsResponse.to_json( - analytics_admin.BatchCreateAccessBindingsResponse() - ) + req.return_value._content = gaa_expanded_data_set.ExpandedDataSet.to_json( + gaa_expanded_data_set.ExpandedDataSet() ) - request = analytics_admin.BatchCreateAccessBindingsRequest() + request = analytics_admin.UpdateExpandedDataSetRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = analytics_admin.BatchCreateAccessBindingsResponse() + post.return_value = gaa_expanded_data_set.ExpandedDataSet() - client.batch_create_access_bindings( + client.update_expanded_data_set( request, metadata=[ ("key", "val"), @@ -87646,9 +97640,8 @@ def test_batch_create_access_bindings_rest_interceptors(null_interceptor): post.assert_called_once() -def test_batch_create_access_bindings_rest_bad_request( - transport: str = "rest", - request_type=analytics_admin.BatchCreateAccessBindingsRequest, +def test_update_expanded_data_set_rest_bad_request( + transport: str = "rest", request_type=analytics_admin.UpdateExpandedDataSetRequest ): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -87656,22 +97649,85 @@ def test_batch_create_access_bindings_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = {"parent": "accounts/sample1"} + request_init = { + "expanded_data_set": {"name": "properties/sample1/expandedDataSets/sample2"} + } request = request_type(**request_init) - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.batch_create_access_bindings(request) + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.update_expanded_data_set(request) + + +def test_update_expanded_data_set_rest_flattened(): + client = AnalyticsAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = gaa_expanded_data_set.ExpandedDataSet() + + # get arguments that satisfy an http rule for this method + sample_request = { + "expanded_data_set": {"name": "properties/sample1/expandedDataSets/sample2"} + } + + # get truthy value for each flattened field + mock_args = dict( + expanded_data_set=gaa_expanded_data_set.ExpandedDataSet(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = gaa_expanded_data_set.ExpandedDataSet.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.update_expanded_data_set(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1alpha/{expanded_data_set.name=properties/*/expandedDataSets/*}" + % client.transport._host, + args[1], + ) + + +def test_update_expanded_data_set_rest_flattened_error(transport: str = "rest"): + client = AnalyticsAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_expanded_data_set( + analytics_admin.UpdateExpandedDataSetRequest(), + expanded_data_set=gaa_expanded_data_set.ExpandedDataSet(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) -def test_batch_create_access_bindings_rest_error(): +def test_update_expanded_data_set_rest_error(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -87680,41 +97736,39 @@ def test_batch_create_access_bindings_rest_error(): @pytest.mark.parametrize( "request_type", [ - analytics_admin.BatchGetAccessBindingsRequest, + analytics_admin.DeleteExpandedDataSetRequest, dict, ], ) -def test_batch_get_access_bindings_rest(request_type): +def test_delete_expanded_data_set_rest(request_type): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = {"parent": "accounts/sample1"} + request_init = {"name": "properties/sample1/expandedDataSets/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = analytics_admin.BatchGetAccessBindingsResponse() + return_value = None # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - # Convert return value to protobuf type - return_value = analytics_admin.BatchGetAccessBindingsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) + json_return_value = "" response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.batch_get_access_bindings(request) + response = client.delete_expanded_data_set(request) # Establish that the response is the type that we expect. - assert isinstance(response, analytics_admin.BatchGetAccessBindingsResponse) + assert response is None -def test_batch_get_access_bindings_rest_use_cached_wrapped_rpc(): +def test_delete_expanded_data_set_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -87729,7 +97783,7 @@ def test_batch_get_access_bindings_rest_use_cached_wrapped_rpc(): # Ensure method has been cached assert ( - client._transport.batch_get_access_bindings + client._transport.delete_expanded_data_set in client._transport._wrapped_methods ) @@ -87739,30 +97793,29 @@ def test_batch_get_access_bindings_rest_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.batch_get_access_bindings + client._transport.delete_expanded_data_set ] = mock_rpc request = {} - client.batch_get_access_bindings(request) + client.delete_expanded_data_set(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.batch_get_access_bindings(request) + client.delete_expanded_data_set(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_batch_get_access_bindings_rest_required_fields( - request_type=analytics_admin.BatchGetAccessBindingsRequest, +def test_delete_expanded_data_set_rest_required_fields( + request_type=analytics_admin.DeleteExpandedDataSetRequest, ): transport_class = transports.AnalyticsAdminServiceRestTransport request_init = {} - request_init["parent"] = "" - request_init["names"] = "" + request_init["name"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -87770,32 +97823,24 @@ def test_batch_get_access_bindings_rest_required_fields( ) # verify fields with default values are dropped - assert "names" not in jsonified_request unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).batch_get_access_bindings._get_unset_required_fields(jsonified_request) + ).delete_expanded_data_set._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - assert "names" in jsonified_request - assert jsonified_request["names"] == request_init["names"] - jsonified_request["parent"] = "parent_value" - jsonified_request["names"] = "names_value" + jsonified_request["name"] = "name_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).batch_get_access_bindings._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("names",)) + ).delete_expanded_data_set._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == "parent_value" - assert "names" in jsonified_request - assert jsonified_request["names"] == "names_value" + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -87804,7 +97849,7 @@ def test_batch_get_access_bindings_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = analytics_admin.BatchGetAccessBindingsResponse() + return_value = None # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -87816,55 +97861,36 @@ def test_batch_get_access_bindings_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "get", + "method": "delete", "query_params": pb_request, } transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = analytics_admin.BatchGetAccessBindingsResponse.pb( - return_value - ) - json_return_value = json_format.MessageToJson(return_value) + json_return_value = "" response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.batch_get_access_bindings(request) + response = client.delete_expanded_data_set(request) - expected_params = [ - ( - "names", - "", - ), - ("$alt", "json;enum-encoding=int"), - ] + expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_batch_get_access_bindings_rest_unset_required_fields(): +def test_delete_expanded_data_set_rest_unset_required_fields(): transport = transports.AnalyticsAdminServiceRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.batch_get_access_bindings._get_unset_required_fields({}) - assert set(unset_fields) == ( - set(("names",)) - & set( - ( - "parent", - "names", - ) - ) - ) + unset_fields = transport.delete_expanded_data_set._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_batch_get_access_bindings_rest_interceptors(null_interceptor): +def test_delete_expanded_data_set_rest_interceptors(null_interceptor): transport = transports.AnalyticsAdminServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -87877,15 +97903,11 @@ def test_batch_get_access_bindings_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.AnalyticsAdminServiceRestInterceptor, - "post_batch_get_access_bindings", - ) as post, mock.patch.object( - transports.AnalyticsAdminServiceRestInterceptor, "pre_batch_get_access_bindings" + transports.AnalyticsAdminServiceRestInterceptor, "pre_delete_expanded_data_set" ) as pre: pre.assert_not_called() - post.assert_not_called() - pb_message = analytics_admin.BatchGetAccessBindingsRequest.pb( - analytics_admin.BatchGetAccessBindingsRequest() + pb_message = analytics_admin.DeleteExpandedDataSetRequest.pb( + analytics_admin.DeleteExpandedDataSetRequest() ) transcode.return_value = { "method": "post", @@ -87897,21 +97919,15 @@ def test_batch_get_access_bindings_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = ( - analytics_admin.BatchGetAccessBindingsResponse.to_json( - analytics_admin.BatchGetAccessBindingsResponse() - ) - ) - request = analytics_admin.BatchGetAccessBindingsRequest() + request = analytics_admin.DeleteExpandedDataSetRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = analytics_admin.BatchGetAccessBindingsResponse() - client.batch_get_access_bindings( + client.delete_expanded_data_set( request, metadata=[ ("key", "val"), @@ -87920,11 +97936,10 @@ def test_batch_get_access_bindings_rest_interceptors(null_interceptor): ) pre.assert_called_once() - post.assert_called_once() -def test_batch_get_access_bindings_rest_bad_request( - transport: str = "rest", request_type=analytics_admin.BatchGetAccessBindingsRequest +def test_delete_expanded_data_set_rest_bad_request( + transport: str = "rest", request_type=analytics_admin.DeleteExpandedDataSetRequest ): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -87932,7 +97947,7 @@ def test_batch_get_access_bindings_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = {"parent": "accounts/sample1"} + request_init = {"name": "properties/sample1/expandedDataSets/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -87944,10 +97959,65 @@ def test_batch_get_access_bindings_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.batch_get_access_bindings(request) + client.delete_expanded_data_set(request) -def test_batch_get_access_bindings_rest_error(): +def test_delete_expanded_data_set_rest_flattened(): + client = AnalyticsAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # get arguments that satisfy an http rule for this method + sample_request = {"name": "properties/sample1/expandedDataSets/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "" + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.delete_expanded_data_set(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1alpha/{name=properties/*/expandedDataSets/*}" + % client.transport._host, + args[1], + ) + + +def test_delete_expanded_data_set_rest_flattened_error(transport: str = "rest"): + client = AnalyticsAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_expanded_data_set( + analytics_admin.DeleteExpandedDataSetRequest(), + name="name_value", + ) + + +def test_delete_expanded_data_set_rest_error(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -87956,43 +98026,52 @@ def test_batch_get_access_bindings_rest_error(): @pytest.mark.parametrize( "request_type", [ - analytics_admin.BatchUpdateAccessBindingsRequest, + analytics_admin.GetChannelGroupRequest, dict, ], ) -def test_batch_update_access_bindings_rest(request_type): +def test_get_channel_group_rest(request_type): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = {"parent": "accounts/sample1"} + request_init = {"name": "properties/sample1/channelGroups/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = analytics_admin.BatchUpdateAccessBindingsResponse() + return_value = channel_group.ChannelGroup( + name="name_value", + display_name="display_name_value", + description="description_value", + system_defined=True, + primary=True, + ) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = analytics_admin.BatchUpdateAccessBindingsResponse.pb( - return_value - ) + return_value = channel_group.ChannelGroup.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.batch_update_access_bindings(request) + response = client.get_channel_group(request) # Establish that the response is the type that we expect. - assert isinstance(response, analytics_admin.BatchUpdateAccessBindingsResponse) + assert isinstance(response, channel_group.ChannelGroup) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.description == "description_value" + assert response.system_defined is True + assert response.primary is True -def test_batch_update_access_bindings_rest_use_cached_wrapped_rpc(): +def test_get_channel_group_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -88006,10 +98085,7 @@ def test_batch_update_access_bindings_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert ( - client._transport.batch_update_access_bindings - in client._transport._wrapped_methods - ) + assert client._transport.get_channel_group in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() @@ -88017,29 +98093,29 @@ def test_batch_update_access_bindings_rest_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.batch_update_access_bindings + client._transport.get_channel_group ] = mock_rpc request = {} - client.batch_update_access_bindings(request) + client.get_channel_group(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.batch_update_access_bindings(request) + client.get_channel_group(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_batch_update_access_bindings_rest_required_fields( - request_type=analytics_admin.BatchUpdateAccessBindingsRequest, +def test_get_channel_group_rest_required_fields( + request_type=analytics_admin.GetChannelGroupRequest, ): transport_class = transports.AnalyticsAdminServiceRestTransport request_init = {} - request_init["parent"] = "" + request_init["name"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -88050,21 +98126,21 @@ def test_batch_update_access_bindings_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).batch_update_access_bindings._get_unset_required_fields(jsonified_request) + ).get_channel_group._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["parent"] = "parent_value" + jsonified_request["name"] = "name_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).batch_update_access_bindings._get_unset_required_fields(jsonified_request) + ).get_channel_group._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == "parent_value" + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -88073,7 +98149,7 @@ def test_batch_update_access_bindings_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = analytics_admin.BatchUpdateAccessBindingsResponse() + return_value = channel_group.ChannelGroup() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -88085,50 +98161,39 @@ def test_batch_update_access_bindings_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "post", + "method": "get", "query_params": pb_request, } - transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = analytics_admin.BatchUpdateAccessBindingsResponse.pb( - return_value - ) + return_value = channel_group.ChannelGroup.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.batch_update_access_bindings(request) + response = client.get_channel_group(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_batch_update_access_bindings_rest_unset_required_fields(): +def test_get_channel_group_rest_unset_required_fields(): transport = transports.AnalyticsAdminServiceRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.batch_update_access_bindings._get_unset_required_fields({}) - assert set(unset_fields) == ( - set(()) - & set( - ( - "parent", - "requests", - ) - ) - ) + unset_fields = transport.get_channel_group._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_batch_update_access_bindings_rest_interceptors(null_interceptor): +def test_get_channel_group_rest_interceptors(null_interceptor): transport = transports.AnalyticsAdminServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -88141,16 +98206,14 @@ def test_batch_update_access_bindings_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.AnalyticsAdminServiceRestInterceptor, - "post_batch_update_access_bindings", + transports.AnalyticsAdminServiceRestInterceptor, "post_get_channel_group" ) as post, mock.patch.object( - transports.AnalyticsAdminServiceRestInterceptor, - "pre_batch_update_access_bindings", + transports.AnalyticsAdminServiceRestInterceptor, "pre_get_channel_group" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = analytics_admin.BatchUpdateAccessBindingsRequest.pb( - analytics_admin.BatchUpdateAccessBindingsRequest() + pb_message = analytics_admin.GetChannelGroupRequest.pb( + analytics_admin.GetChannelGroupRequest() ) transcode.return_value = { "method": "post", @@ -88162,21 +98225,19 @@ def test_batch_update_access_bindings_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = ( - analytics_admin.BatchUpdateAccessBindingsResponse.to_json( - analytics_admin.BatchUpdateAccessBindingsResponse() - ) + req.return_value._content = channel_group.ChannelGroup.to_json( + channel_group.ChannelGroup() ) - request = analytics_admin.BatchUpdateAccessBindingsRequest() + request = analytics_admin.GetChannelGroupRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = analytics_admin.BatchUpdateAccessBindingsResponse() + post.return_value = channel_group.ChannelGroup() - client.batch_update_access_bindings( + client.get_channel_group( request, metadata=[ ("key", "val"), @@ -88188,9 +98249,8 @@ def test_batch_update_access_bindings_rest_interceptors(null_interceptor): post.assert_called_once() -def test_batch_update_access_bindings_rest_bad_request( - transport: str = "rest", - request_type=analytics_admin.BatchUpdateAccessBindingsRequest, +def test_get_channel_group_rest_bad_request( + transport: str = "rest", request_type=analytics_admin.GetChannelGroupRequest ): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -88198,7 +98258,7 @@ def test_batch_update_access_bindings_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = {"parent": "accounts/sample1"} + request_init = {"name": "properties/sample1/channelGroups/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -88210,10 +98270,66 @@ def test_batch_update_access_bindings_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.batch_update_access_bindings(request) + client.get_channel_group(request) -def test_batch_update_access_bindings_rest_error(): +def test_get_channel_group_rest_flattened(): + client = AnalyticsAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = channel_group.ChannelGroup() + + # get arguments that satisfy an http rule for this method + sample_request = {"name": "properties/sample1/channelGroups/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = channel_group.ChannelGroup.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.get_channel_group(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1alpha/{name=properties/*/channelGroups/*}" % client.transport._host, + args[1], + ) + + +def test_get_channel_group_rest_flattened_error(transport: str = "rest"): + client = AnalyticsAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_channel_group( + analytics_admin.GetChannelGroupRequest(), + name="name_value", + ) + + +def test_get_channel_group_rest_error(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -88222,39 +98338,44 @@ def test_batch_update_access_bindings_rest_error(): @pytest.mark.parametrize( "request_type", [ - analytics_admin.BatchDeleteAccessBindingsRequest, + analytics_admin.ListChannelGroupsRequest, dict, ], ) -def test_batch_delete_access_bindings_rest(request_type): +def test_list_channel_groups_rest(request_type): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = {"parent": "accounts/sample1"} + request_init = {"parent": "properties/sample1"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = None + return_value = analytics_admin.ListChannelGroupsResponse( + next_page_token="next_page_token_value", + ) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - json_return_value = "" + # Convert return value to protobuf type + return_value = analytics_admin.ListChannelGroupsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.batch_delete_access_bindings(request) + response = client.list_channel_groups(request) # Establish that the response is the type that we expect. - assert response is None + assert isinstance(response, pagers.ListChannelGroupsPager) + assert response.next_page_token == "next_page_token_value" -def test_batch_delete_access_bindings_rest_use_cached_wrapped_rpc(): +def test_list_channel_groups_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -88269,8 +98390,7 @@ def test_batch_delete_access_bindings_rest_use_cached_wrapped_rpc(): # Ensure method has been cached assert ( - client._transport.batch_delete_access_bindings - in client._transport._wrapped_methods + client._transport.list_channel_groups in client._transport._wrapped_methods ) # Replace cached wrapped function with mock @@ -88279,24 +98399,24 @@ def test_batch_delete_access_bindings_rest_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.batch_delete_access_bindings + client._transport.list_channel_groups ] = mock_rpc request = {} - client.batch_delete_access_bindings(request) + client.list_channel_groups(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.batch_delete_access_bindings(request) + client.list_channel_groups(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_batch_delete_access_bindings_rest_required_fields( - request_type=analytics_admin.BatchDeleteAccessBindingsRequest, +def test_list_channel_groups_rest_required_fields( + request_type=analytics_admin.ListChannelGroupsRequest, ): transport_class = transports.AnalyticsAdminServiceRestTransport @@ -88312,7 +98432,7 @@ def test_batch_delete_access_bindings_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).batch_delete_access_bindings._get_unset_required_fields(jsonified_request) + ).list_channel_groups._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present @@ -88321,7 +98441,14 @@ def test_batch_delete_access_bindings_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).batch_delete_access_bindings._get_unset_required_fields(jsonified_request) + ).list_channel_groups._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "page_size", + "page_token", + ) + ) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone @@ -88335,7 +98462,7 @@ def test_batch_delete_access_bindings_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = None + return_value = analytics_admin.ListChannelGroupsResponse() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -88347,45 +98474,47 @@ def test_batch_delete_access_bindings_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "post", + "method": "get", "query_params": pb_request, } - transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 - json_return_value = "" + + # Convert return value to protobuf type + return_value = analytics_admin.ListChannelGroupsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.batch_delete_access_bindings(request) + response = client.list_channel_groups(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_batch_delete_access_bindings_rest_unset_required_fields(): +def test_list_channel_groups_rest_unset_required_fields(): transport = transports.AnalyticsAdminServiceRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.batch_delete_access_bindings._get_unset_required_fields({}) + unset_fields = transport.list_channel_groups._get_unset_required_fields({}) assert set(unset_fields) == ( - set(()) - & set( + set( ( - "parent", - "requests", + "pageSize", + "pageToken", ) ) + & set(("parent",)) ) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_batch_delete_access_bindings_rest_interceptors(null_interceptor): +def test_list_channel_groups_rest_interceptors(null_interceptor): transport = transports.AnalyticsAdminServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -88398,12 +98527,14 @@ def test_batch_delete_access_bindings_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.AnalyticsAdminServiceRestInterceptor, - "pre_batch_delete_access_bindings", + transports.AnalyticsAdminServiceRestInterceptor, "post_list_channel_groups" + ) as post, mock.patch.object( + transports.AnalyticsAdminServiceRestInterceptor, "pre_list_channel_groups" ) as pre: pre.assert_not_called() - pb_message = analytics_admin.BatchDeleteAccessBindingsRequest.pb( - analytics_admin.BatchDeleteAccessBindingsRequest() + post.assert_not_called() + pb_message = analytics_admin.ListChannelGroupsRequest.pb( + analytics_admin.ListChannelGroupsRequest() ) transcode.return_value = { "method": "post", @@ -88415,15 +98546,19 @@ def test_batch_delete_access_bindings_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() + req.return_value._content = analytics_admin.ListChannelGroupsResponse.to_json( + analytics_admin.ListChannelGroupsResponse() + ) - request = analytics_admin.BatchDeleteAccessBindingsRequest() + request = analytics_admin.ListChannelGroupsRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata + post.return_value = analytics_admin.ListChannelGroupsResponse() - client.batch_delete_access_bindings( + client.list_channel_groups( request, metadata=[ ("key", "val"), @@ -88431,89 +98566,293 @@ def test_batch_delete_access_bindings_rest_interceptors(null_interceptor): ], ) - pre.assert_called_once() + pre.assert_called_once() + post.assert_called_once() + + +def test_list_channel_groups_rest_bad_request( + transport: str = "rest", request_type=analytics_admin.ListChannelGroupsRequest +): + client = AnalyticsAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "properties/sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_channel_groups(request) + + +def test_list_channel_groups_rest_flattened(): + client = AnalyticsAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = analytics_admin.ListChannelGroupsResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "properties/sample1"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = analytics_admin.ListChannelGroupsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.list_channel_groups(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1alpha/{parent=properties/*}/channelGroups" % client.transport._host, + args[1], + ) -def test_batch_delete_access_bindings_rest_bad_request( - transport: str = "rest", - request_type=analytics_admin.BatchDeleteAccessBindingsRequest, -): +def test_list_channel_groups_rest_flattened_error(transport: str = "rest"): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # send a request that will satisfy transcoding - request_init = {"parent": "accounts/sample1"} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.batch_delete_access_bindings(request) + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_channel_groups( + analytics_admin.ListChannelGroupsRequest(), + parent="parent_value", + ) -def test_batch_delete_access_bindings_rest_error(): +def test_list_channel_groups_rest_pager(transport: str = "rest"): client = AnalyticsAdminServiceClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + analytics_admin.ListChannelGroupsResponse( + channel_groups=[ + channel_group.ChannelGroup(), + channel_group.ChannelGroup(), + channel_group.ChannelGroup(), + ], + next_page_token="abc", + ), + analytics_admin.ListChannelGroupsResponse( + channel_groups=[], + next_page_token="def", + ), + analytics_admin.ListChannelGroupsResponse( + channel_groups=[ + channel_group.ChannelGroup(), + ], + next_page_token="ghi", + ), + analytics_admin.ListChannelGroupsResponse( + channel_groups=[ + channel_group.ChannelGroup(), + channel_group.ChannelGroup(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple( + analytics_admin.ListChannelGroupsResponse.to_json(x) for x in response + ) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {"parent": "properties/sample1"} + + pager = client.list_channel_groups(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, channel_group.ChannelGroup) for i in results) + + pages = list(client.list_channel_groups(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + @pytest.mark.parametrize( "request_type", [ - analytics_admin.GetExpandedDataSetRequest, + analytics_admin.CreateChannelGroupRequest, dict, ], ) -def test_get_expanded_data_set_rest(request_type): +def test_create_channel_group_rest(request_type): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = {"name": "properties/sample1/expandedDataSets/sample2"} + request_init = {"parent": "properties/sample1"} + request_init["channel_group"] = { + "name": "name_value", + "display_name": "display_name_value", + "description": "description_value", + "grouping_rule": [ + { + "display_name": "display_name_value", + "expression": { + "and_group": {"filter_expressions": {}}, + "or_group": {}, + "not_expression": {}, + "filter": { + "string_filter": {"match_type": 1, "value": "value_value"}, + "in_list_filter": { + "values": ["values_value1", "values_value2"] + }, + "field_name": "field_name_value", + }, + }, + } + ], + "system_defined": True, + "primary": True, + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = analytics_admin.CreateChannelGroupRequest.meta.fields["channel_group"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["channel_group"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["channel_group"][field])): + del request_init["channel_group"][field][i][subfield] + else: + del request_init["channel_group"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = expanded_data_set.ExpandedDataSet( + return_value = gaa_channel_group.ChannelGroup( name="name_value", display_name="display_name_value", description="description_value", - dimension_names=["dimension_names_value"], - metric_names=["metric_names_value"], + system_defined=True, + primary=True, ) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = expanded_data_set.ExpandedDataSet.pb(return_value) + return_value = gaa_channel_group.ChannelGroup.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.get_expanded_data_set(request) + response = client.create_channel_group(request) # Establish that the response is the type that we expect. - assert isinstance(response, expanded_data_set.ExpandedDataSet) + assert isinstance(response, gaa_channel_group.ChannelGroup) assert response.name == "name_value" assert response.display_name == "display_name_value" assert response.description == "description_value" - assert response.dimension_names == ["dimension_names_value"] - assert response.metric_names == ["metric_names_value"] + assert response.system_defined is True + assert response.primary is True -def test_get_expanded_data_set_rest_use_cached_wrapped_rpc(): +def test_create_channel_group_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -88528,8 +98867,7 @@ def test_get_expanded_data_set_rest_use_cached_wrapped_rpc(): # Ensure method has been cached assert ( - client._transport.get_expanded_data_set - in client._transport._wrapped_methods + client._transport.create_channel_group in client._transport._wrapped_methods ) # Replace cached wrapped function with mock @@ -88538,29 +98876,29 @@ def test_get_expanded_data_set_rest_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.get_expanded_data_set + client._transport.create_channel_group ] = mock_rpc request = {} - client.get_expanded_data_set(request) + client.create_channel_group(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.get_expanded_data_set(request) + client.create_channel_group(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_get_expanded_data_set_rest_required_fields( - request_type=analytics_admin.GetExpandedDataSetRequest, +def test_create_channel_group_rest_required_fields( + request_type=analytics_admin.CreateChannelGroupRequest, ): transport_class = transports.AnalyticsAdminServiceRestTransport request_init = {} - request_init["name"] = "" + request_init["parent"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -88571,21 +98909,21 @@ def test_get_expanded_data_set_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).get_expanded_data_set._get_unset_required_fields(jsonified_request) + ).create_channel_group._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["name"] = "name_value" + jsonified_request["parent"] = "parent_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).get_expanded_data_set._get_unset_required_fields(jsonified_request) + ).create_channel_group._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == "name_value" + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -88594,7 +98932,7 @@ def test_get_expanded_data_set_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = expanded_data_set.ExpandedDataSet() + return_value = gaa_channel_group.ChannelGroup() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -88606,39 +98944,48 @@ def test_get_expanded_data_set_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "get", + "method": "post", "query_params": pb_request, } + transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = expanded_data_set.ExpandedDataSet.pb(return_value) + return_value = gaa_channel_group.ChannelGroup.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.get_expanded_data_set(request) + response = client.create_channel_group(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_get_expanded_data_set_rest_unset_required_fields(): +def test_create_channel_group_rest_unset_required_fields(): transport = transports.AnalyticsAdminServiceRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.get_expanded_data_set._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name",))) + unset_fields = transport.create_channel_group._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(()) + & set( + ( + "parent", + "channelGroup", + ) + ) + ) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_expanded_data_set_rest_interceptors(null_interceptor): +def test_create_channel_group_rest_interceptors(null_interceptor): transport = transports.AnalyticsAdminServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -88651,14 +98998,14 @@ def test_get_expanded_data_set_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.AnalyticsAdminServiceRestInterceptor, "post_get_expanded_data_set" + transports.AnalyticsAdminServiceRestInterceptor, "post_create_channel_group" ) as post, mock.patch.object( - transports.AnalyticsAdminServiceRestInterceptor, "pre_get_expanded_data_set" + transports.AnalyticsAdminServiceRestInterceptor, "pre_create_channel_group" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = analytics_admin.GetExpandedDataSetRequest.pb( - analytics_admin.GetExpandedDataSetRequest() + pb_message = analytics_admin.CreateChannelGroupRequest.pb( + analytics_admin.CreateChannelGroupRequest() ) transcode.return_value = { "method": "post", @@ -88670,19 +99017,19 @@ def test_get_expanded_data_set_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = expanded_data_set.ExpandedDataSet.to_json( - expanded_data_set.ExpandedDataSet() + req.return_value._content = gaa_channel_group.ChannelGroup.to_json( + gaa_channel_group.ChannelGroup() ) - request = analytics_admin.GetExpandedDataSetRequest() + request = analytics_admin.CreateChannelGroupRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = expanded_data_set.ExpandedDataSet() + post.return_value = gaa_channel_group.ChannelGroup() - client.get_expanded_data_set( + client.create_channel_group( request, metadata=[ ("key", "val"), @@ -88694,8 +99041,8 @@ def test_get_expanded_data_set_rest_interceptors(null_interceptor): post.assert_called_once() -def test_get_expanded_data_set_rest_bad_request( - transport: str = "rest", request_type=analytics_admin.GetExpandedDataSetRequest +def test_create_channel_group_rest_bad_request( + transport: str = "rest", request_type=analytics_admin.CreateChannelGroupRequest ): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -88703,7 +99050,7 @@ def test_get_expanded_data_set_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = {"name": "properties/sample1/expandedDataSets/sample2"} + request_init = {"parent": "properties/sample1"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -88715,10 +99062,10 @@ def test_get_expanded_data_set_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.get_expanded_data_set(request) + client.create_channel_group(request) -def test_get_expanded_data_set_rest_flattened(): +def test_create_channel_group_rest_flattened(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -88727,14 +99074,15 @@ def test_get_expanded_data_set_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = expanded_data_set.ExpandedDataSet() + return_value = gaa_channel_group.ChannelGroup() # get arguments that satisfy an http rule for this method - sample_request = {"name": "properties/sample1/expandedDataSets/sample2"} + sample_request = {"parent": "properties/sample1"} # get truthy value for each flattened field mock_args = dict( - name="name_value", + parent="parent_value", + channel_group=gaa_channel_group.ChannelGroup(name="name_value"), ) mock_args.update(sample_request) @@ -88742,25 +99090,24 @@ def test_get_expanded_data_set_rest_flattened(): response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = expanded_data_set.ExpandedDataSet.pb(return_value) + return_value = gaa_channel_group.ChannelGroup.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.get_expanded_data_set(**mock_args) + client.create_channel_group(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1alpha/{name=properties/*/expandedDataSets/*}" - % client.transport._host, + "%s/v1alpha/{parent=properties/*}/channelGroups" % client.transport._host, args[1], ) -def test_get_expanded_data_set_rest_flattened_error(transport: str = "rest"): +def test_create_channel_group_rest_flattened_error(transport: str = "rest"): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -88769,13 +99116,14 @@ def test_get_expanded_data_set_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.get_expanded_data_set( - analytics_admin.GetExpandedDataSetRequest(), - name="name_value", + client.create_channel_group( + analytics_admin.CreateChannelGroupRequest(), + parent="parent_value", + channel_group=gaa_channel_group.ChannelGroup(name="name_value"), ) -def test_get_expanded_data_set_rest_error(): +def test_create_channel_group_rest_error(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -88784,44 +99132,145 @@ def test_get_expanded_data_set_rest_error(): @pytest.mark.parametrize( "request_type", [ - analytics_admin.ListExpandedDataSetsRequest, + analytics_admin.UpdateChannelGroupRequest, dict, ], ) -def test_list_expanded_data_sets_rest(request_type): +def test_update_channel_group_rest(request_type): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = {"parent": "properties/sample1"} + request_init = { + "channel_group": {"name": "properties/sample1/channelGroups/sample2"} + } + request_init["channel_group"] = { + "name": "properties/sample1/channelGroups/sample2", + "display_name": "display_name_value", + "description": "description_value", + "grouping_rule": [ + { + "display_name": "display_name_value", + "expression": { + "and_group": {"filter_expressions": {}}, + "or_group": {}, + "not_expression": {}, + "filter": { + "string_filter": {"match_type": 1, "value": "value_value"}, + "in_list_filter": { + "values": ["values_value1", "values_value2"] + }, + "field_name": "field_name_value", + }, + }, + } + ], + "system_defined": True, + "primary": True, + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = analytics_admin.UpdateChannelGroupRequest.meta.fields["channel_group"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["channel_group"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["channel_group"][field])): + del request_init["channel_group"][field][i][subfield] + else: + del request_init["channel_group"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = analytics_admin.ListExpandedDataSetsResponse( - next_page_token="next_page_token_value", + return_value = gaa_channel_group.ChannelGroup( + name="name_value", + display_name="display_name_value", + description="description_value", + system_defined=True, + primary=True, ) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = analytics_admin.ListExpandedDataSetsResponse.pb(return_value) + return_value = gaa_channel_group.ChannelGroup.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.list_expanded_data_sets(request) + response = client.update_channel_group(request) # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListExpandedDataSetsPager) - assert response.next_page_token == "next_page_token_value" + assert isinstance(response, gaa_channel_group.ChannelGroup) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.description == "description_value" + assert response.system_defined is True + assert response.primary is True -def test_list_expanded_data_sets_rest_use_cached_wrapped_rpc(): +def test_update_channel_group_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -88836,8 +99285,7 @@ def test_list_expanded_data_sets_rest_use_cached_wrapped_rpc(): # Ensure method has been cached assert ( - client._transport.list_expanded_data_sets - in client._transport._wrapped_methods + client._transport.update_channel_group in client._transport._wrapped_methods ) # Replace cached wrapped function with mock @@ -88846,29 +99294,28 @@ def test_list_expanded_data_sets_rest_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.list_expanded_data_sets + client._transport.update_channel_group ] = mock_rpc request = {} - client.list_expanded_data_sets(request) + client.update_channel_group(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.list_expanded_data_sets(request) + client.update_channel_group(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_list_expanded_data_sets_rest_required_fields( - request_type=analytics_admin.ListExpandedDataSetsRequest, +def test_update_channel_group_rest_required_fields( + request_type=analytics_admin.UpdateChannelGroupRequest, ): transport_class = transports.AnalyticsAdminServiceRestTransport request_init = {} - request_init["parent"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -88879,28 +99326,19 @@ def test_list_expanded_data_sets_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).list_expanded_data_sets._get_unset_required_fields(jsonified_request) + ).update_channel_group._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["parent"] = "parent_value" - unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).list_expanded_data_sets._get_unset_required_fields(jsonified_request) + ).update_channel_group._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set( - ( - "page_size", - "page_token", - ) - ) + assert not set(unset_fields) - set(("update_mask",)) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == "parent_value" client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -88909,7 +99347,7 @@ def test_list_expanded_data_sets_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = analytics_admin.ListExpandedDataSetsResponse() + return_value = gaa_channel_group.ChannelGroup() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -88921,47 +99359,48 @@ def test_list_expanded_data_sets_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "get", + "method": "patch", "query_params": pb_request, } + transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = analytics_admin.ListExpandedDataSetsResponse.pb(return_value) + return_value = gaa_channel_group.ChannelGroup.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.list_expanded_data_sets(request) + response = client.update_channel_group(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_list_expanded_data_sets_rest_unset_required_fields(): +def test_update_channel_group_rest_unset_required_fields(): transport = transports.AnalyticsAdminServiceRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.list_expanded_data_sets._get_unset_required_fields({}) + unset_fields = transport.update_channel_group._get_unset_required_fields({}) assert set(unset_fields) == ( - set( + set(("updateMask",)) + & set( ( - "pageSize", - "pageToken", + "channelGroup", + "updateMask", ) ) - & set(("parent",)) ) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_expanded_data_sets_rest_interceptors(null_interceptor): +def test_update_channel_group_rest_interceptors(null_interceptor): transport = transports.AnalyticsAdminServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -88974,14 +99413,14 @@ def test_list_expanded_data_sets_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.AnalyticsAdminServiceRestInterceptor, "post_list_expanded_data_sets" + transports.AnalyticsAdminServiceRestInterceptor, "post_update_channel_group" ) as post, mock.patch.object( - transports.AnalyticsAdminServiceRestInterceptor, "pre_list_expanded_data_sets" + transports.AnalyticsAdminServiceRestInterceptor, "pre_update_channel_group" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = analytics_admin.ListExpandedDataSetsRequest.pb( - analytics_admin.ListExpandedDataSetsRequest() + pb_message = analytics_admin.UpdateChannelGroupRequest.pb( + analytics_admin.UpdateChannelGroupRequest() ) transcode.return_value = { "method": "post", @@ -88993,21 +99432,19 @@ def test_list_expanded_data_sets_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = ( - analytics_admin.ListExpandedDataSetsResponse.to_json( - analytics_admin.ListExpandedDataSetsResponse() - ) + req.return_value._content = gaa_channel_group.ChannelGroup.to_json( + gaa_channel_group.ChannelGroup() ) - request = analytics_admin.ListExpandedDataSetsRequest() + request = analytics_admin.UpdateChannelGroupRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = analytics_admin.ListExpandedDataSetsResponse() + post.return_value = gaa_channel_group.ChannelGroup() - client.list_expanded_data_sets( + client.update_channel_group( request, metadata=[ ("key", "val"), @@ -89019,8 +99456,8 @@ def test_list_expanded_data_sets_rest_interceptors(null_interceptor): post.assert_called_once() -def test_list_expanded_data_sets_rest_bad_request( - transport: str = "rest", request_type=analytics_admin.ListExpandedDataSetsRequest +def test_update_channel_group_rest_bad_request( + transport: str = "rest", request_type=analytics_admin.UpdateChannelGroupRequest ): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -89028,7 +99465,9 @@ def test_list_expanded_data_sets_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = {"parent": "properties/sample1"} + request_init = { + "channel_group": {"name": "properties/sample1/channelGroups/sample2"} + } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -89040,10 +99479,10 @@ def test_list_expanded_data_sets_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.list_expanded_data_sets(request) + client.update_channel_group(request) -def test_list_expanded_data_sets_rest_flattened(): +def test_update_channel_group_rest_flattened(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -89052,14 +99491,17 @@ def test_list_expanded_data_sets_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = analytics_admin.ListExpandedDataSetsResponse() + return_value = gaa_channel_group.ChannelGroup() # get arguments that satisfy an http rule for this method - sample_request = {"parent": "properties/sample1"} + sample_request = { + "channel_group": {"name": "properties/sample1/channelGroups/sample2"} + } # get truthy value for each flattened field mock_args = dict( - parent="parent_value", + channel_group=gaa_channel_group.ChannelGroup(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) mock_args.update(sample_request) @@ -89067,25 +99509,25 @@ def test_list_expanded_data_sets_rest_flattened(): response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = analytics_admin.ListExpandedDataSetsResponse.pb(return_value) + return_value = gaa_channel_group.ChannelGroup.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.list_expanded_data_sets(**mock_args) + client.update_channel_group(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1alpha/{parent=properties/*}/expandedDataSets" + "%s/v1alpha/{channel_group.name=properties/*/channelGroups/*}" % client.transport._host, args[1], ) -def test_list_expanded_data_sets_rest_flattened_error(transport: str = "rest"): +def test_update_channel_group_rest_flattened_error(transport: str = "rest"): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -89094,217 +99536,55 @@ def test_list_expanded_data_sets_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.list_expanded_data_sets( - analytics_admin.ListExpandedDataSetsRequest(), - parent="parent_value", + client.update_channel_group( + analytics_admin.UpdateChannelGroupRequest(), + channel_group=gaa_channel_group.ChannelGroup(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) -def test_list_expanded_data_sets_rest_pager(transport: str = "rest"): +def test_update_channel_group_rest_error(): client = AnalyticsAdminServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # TODO(kbandes): remove this mock unless there's a good reason for it. - # with mock.patch.object(path_template, 'transcode') as transcode: - # Set the response as a series of pages - response = ( - analytics_admin.ListExpandedDataSetsResponse( - expanded_data_sets=[ - expanded_data_set.ExpandedDataSet(), - expanded_data_set.ExpandedDataSet(), - expanded_data_set.ExpandedDataSet(), - ], - next_page_token="abc", - ), - analytics_admin.ListExpandedDataSetsResponse( - expanded_data_sets=[], - next_page_token="def", - ), - analytics_admin.ListExpandedDataSetsResponse( - expanded_data_sets=[ - expanded_data_set.ExpandedDataSet(), - ], - next_page_token="ghi", - ), - analytics_admin.ListExpandedDataSetsResponse( - expanded_data_sets=[ - expanded_data_set.ExpandedDataSet(), - expanded_data_set.ExpandedDataSet(), - ], - ), - ) - # Two responses for two calls - response = response + response - - # Wrap the values into proper Response objs - response = tuple( - analytics_admin.ListExpandedDataSetsResponse.to_json(x) for x in response - ) - return_values = tuple(Response() for i in response) - for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode("UTF-8") - return_val.status_code = 200 - req.side_effect = return_values - - sample_request = {"parent": "properties/sample1"} - - pager = client.list_expanded_data_sets(request=sample_request) - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, expanded_data_set.ExpandedDataSet) for i in results) - - pages = list(client.list_expanded_data_sets(request=sample_request).pages) - for page_, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page_.raw_page.next_page_token == token - @pytest.mark.parametrize( "request_type", [ - analytics_admin.CreateExpandedDataSetRequest, + analytics_admin.DeleteChannelGroupRequest, dict, ], ) -def test_create_expanded_data_set_rest(request_type): +def test_delete_channel_group_rest(request_type): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = {"parent": "properties/sample1"} - request_init["expanded_data_set"] = { - "name": "name_value", - "display_name": "display_name_value", - "description": "description_value", - "dimension_names": ["dimension_names_value1", "dimension_names_value2"], - "metric_names": ["metric_names_value1", "metric_names_value2"], - "dimension_filter_expression": { - "and_group": {"filter_expressions": {}}, - "not_expression": {}, - "filter": { - "string_filter": { - "match_type": 1, - "value": "value_value", - "case_sensitive": True, - }, - "in_list_filter": { - "values": ["values_value1", "values_value2"], - "case_sensitive": True, - }, - "field_name": "field_name_value", - }, - }, - "data_collection_start_time": {"seconds": 751, "nanos": 543}, - } - # The version of a generated dependency at test runtime may differ from the version used during generation. - # Delete any fields which are not present in the current runtime dependency - # See https://github.com/googleapis/gapic-generator-python/issues/1748 - - # Determine if the message type is proto-plus or protobuf - test_field = analytics_admin.CreateExpandedDataSetRequest.meta.fields[ - "expanded_data_set" - ] - - def get_message_fields(field): - # Given a field which is a message (composite type), return a list with - # all the fields of the message. - # If the field is not a composite type, return an empty list. - message_fields = [] - - if hasattr(field, "message") and field.message: - is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") - - if is_field_type_proto_plus_type: - message_fields = field.message.meta.fields.values() - # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER - message_fields = field.message.DESCRIPTOR.fields - return message_fields - - runtime_nested_fields = [ - (field.name, nested_field.name) - for field in get_message_fields(test_field) - for nested_field in get_message_fields(field) - ] - - subfields_not_in_runtime = [] - - # For each item in the sample request, create a list of sub fields which are not present at runtime - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["expanded_data_set"].items(): # pragma: NO COVER - result = None - is_repeated = False - # For repeated fields - if isinstance(value, list) and len(value): - is_repeated = True - result = value[0] - # For fields where the type is another message - if isinstance(value, dict): - result = value - - if result and hasattr(result, "keys"): - for subfield in result.keys(): - if (field, subfield) not in runtime_nested_fields: - subfields_not_in_runtime.append( - { - "field": field, - "subfield": subfield, - "is_repeated": is_repeated, - } - ) - - # Remove fields from the sample request which are not present in the runtime version of the dependency - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER - field = subfield_to_delete.get("field") - field_repeated = subfield_to_delete.get("is_repeated") - subfield = subfield_to_delete.get("subfield") - if subfield: - if field_repeated: - for i in range(0, len(request_init["expanded_data_set"][field])): - del request_init["expanded_data_set"][field][i][subfield] - else: - del request_init["expanded_data_set"][field][subfield] + request_init = {"name": "properties/sample1/channelGroups/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = gaa_expanded_data_set.ExpandedDataSet( - name="name_value", - display_name="display_name_value", - description="description_value", - dimension_names=["dimension_names_value"], - metric_names=["metric_names_value"], - ) + return_value = None # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - # Convert return value to protobuf type - return_value = gaa_expanded_data_set.ExpandedDataSet.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) + json_return_value = "" response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.create_expanded_data_set(request) + response = client.delete_channel_group(request) # Establish that the response is the type that we expect. - assert isinstance(response, gaa_expanded_data_set.ExpandedDataSet) - assert response.name == "name_value" - assert response.display_name == "display_name_value" - assert response.description == "description_value" - assert response.dimension_names == ["dimension_names_value"] - assert response.metric_names == ["metric_names_value"] + assert response is None -def test_create_expanded_data_set_rest_use_cached_wrapped_rpc(): +def test_delete_channel_group_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -89319,8 +99599,7 @@ def test_create_expanded_data_set_rest_use_cached_wrapped_rpc(): # Ensure method has been cached assert ( - client._transport.create_expanded_data_set - in client._transport._wrapped_methods + client._transport.delete_channel_group in client._transport._wrapped_methods ) # Replace cached wrapped function with mock @@ -89329,29 +99608,29 @@ def test_create_expanded_data_set_rest_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.create_expanded_data_set + client._transport.delete_channel_group ] = mock_rpc request = {} - client.create_expanded_data_set(request) + client.delete_channel_group(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.create_expanded_data_set(request) + client.delete_channel_group(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_create_expanded_data_set_rest_required_fields( - request_type=analytics_admin.CreateExpandedDataSetRequest, +def test_delete_channel_group_rest_required_fields( + request_type=analytics_admin.DeleteChannelGroupRequest, ): transport_class = transports.AnalyticsAdminServiceRestTransport request_init = {} - request_init["parent"] = "" + request_init["name"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -89362,21 +99641,21 @@ def test_create_expanded_data_set_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).create_expanded_data_set._get_unset_required_fields(jsonified_request) + ).delete_channel_group._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["parent"] = "parent_value" + jsonified_request["name"] = "name_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).create_expanded_data_set._get_unset_required_fields(jsonified_request) + ).delete_channel_group._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == "parent_value" + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -89385,7 +99664,7 @@ def test_create_expanded_data_set_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = gaa_expanded_data_set.ExpandedDataSet() + return_value = None # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -89397,48 +99676,36 @@ def test_create_expanded_data_set_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "post", + "method": "delete", "query_params": pb_request, } - transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = gaa_expanded_data_set.ExpandedDataSet.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) + json_return_value = "" response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.create_expanded_data_set(request) + response = client.delete_channel_group(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_create_expanded_data_set_rest_unset_required_fields(): +def test_delete_channel_group_rest_unset_required_fields(): transport = transports.AnalyticsAdminServiceRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.create_expanded_data_set._get_unset_required_fields({}) - assert set(unset_fields) == ( - set(()) - & set( - ( - "parent", - "expandedDataSet", - ) - ) - ) + unset_fields = transport.delete_channel_group._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_create_expanded_data_set_rest_interceptors(null_interceptor): +def test_delete_channel_group_rest_interceptors(null_interceptor): transport = transports.AnalyticsAdminServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -89451,14 +99718,11 @@ def test_create_expanded_data_set_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.AnalyticsAdminServiceRestInterceptor, "post_create_expanded_data_set" - ) as post, mock.patch.object( - transports.AnalyticsAdminServiceRestInterceptor, "pre_create_expanded_data_set" + transports.AnalyticsAdminServiceRestInterceptor, "pre_delete_channel_group" ) as pre: pre.assert_not_called() - post.assert_not_called() - pb_message = analytics_admin.CreateExpandedDataSetRequest.pb( - analytics_admin.CreateExpandedDataSetRequest() + pb_message = analytics_admin.DeleteChannelGroupRequest.pb( + analytics_admin.DeleteChannelGroupRequest() ) transcode.return_value = { "method": "post", @@ -89470,19 +99734,15 @@ def test_create_expanded_data_set_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = gaa_expanded_data_set.ExpandedDataSet.to_json( - gaa_expanded_data_set.ExpandedDataSet() - ) - request = analytics_admin.CreateExpandedDataSetRequest() + request = analytics_admin.DeleteChannelGroupRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = gaa_expanded_data_set.ExpandedDataSet() - client.create_expanded_data_set( + client.delete_channel_group( request, metadata=[ ("key", "val"), @@ -89491,11 +99751,10 @@ def test_create_expanded_data_set_rest_interceptors(null_interceptor): ) pre.assert_called_once() - post.assert_called_once() -def test_create_expanded_data_set_rest_bad_request( - transport: str = "rest", request_type=analytics_admin.CreateExpandedDataSetRequest +def test_delete_channel_group_rest_bad_request( + transport: str = "rest", request_type=analytics_admin.DeleteChannelGroupRequest ): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -89503,7 +99762,7 @@ def test_create_expanded_data_set_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = {"parent": "properties/sample1"} + request_init = {"name": "properties/sample1/channelGroups/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -89515,10 +99774,10 @@ def test_create_expanded_data_set_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.create_expanded_data_set(request) + client.delete_channel_group(request) -def test_create_expanded_data_set_rest_flattened(): +def test_delete_channel_group_rest_flattened(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -89527,41 +99786,37 @@ def test_create_expanded_data_set_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = gaa_expanded_data_set.ExpandedDataSet() + return_value = None # get arguments that satisfy an http rule for this method - sample_request = {"parent": "properties/sample1"} + sample_request = {"name": "properties/sample1/channelGroups/sample2"} # get truthy value for each flattened field mock_args = dict( - parent="parent_value", - expanded_data_set=gaa_expanded_data_set.ExpandedDataSet(name="name_value"), + name="name_value", ) mock_args.update(sample_request) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - # Convert return value to protobuf type - return_value = gaa_expanded_data_set.ExpandedDataSet.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) + json_return_value = "" response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.create_expanded_data_set(**mock_args) + client.delete_channel_group(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1alpha/{parent=properties/*}/expandedDataSets" - % client.transport._host, + "%s/v1alpha/{name=properties/*/channelGroups/*}" % client.transport._host, args[1], ) -def test_create_expanded_data_set_rest_flattened_error(transport: str = "rest"): +def test_delete_channel_group_rest_flattened_error(transport: str = "rest"): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -89570,14 +99825,13 @@ def test_create_expanded_data_set_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.create_expanded_data_set( - analytics_admin.CreateExpandedDataSetRequest(), - parent="parent_value", - expanded_data_set=gaa_expanded_data_set.ExpandedDataSet(name="name_value"), + client.delete_channel_group( + analytics_admin.DeleteChannelGroupRequest(), + name="name_value", ) -def test_create_expanded_data_set_rest_error(): +def test_delete_channel_group_rest_error(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -89586,147 +99840,45 @@ def test_create_expanded_data_set_rest_error(): @pytest.mark.parametrize( "request_type", [ - analytics_admin.UpdateExpandedDataSetRequest, + analytics_admin.SetAutomatedGa4ConfigurationOptOutRequest, dict, ], ) -def test_update_expanded_data_set_rest(request_type): +def test_set_automated_ga4_configuration_opt_out_rest(request_type): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = { - "expanded_data_set": {"name": "properties/sample1/expandedDataSets/sample2"} - } - request_init["expanded_data_set"] = { - "name": "properties/sample1/expandedDataSets/sample2", - "display_name": "display_name_value", - "description": "description_value", - "dimension_names": ["dimension_names_value1", "dimension_names_value2"], - "metric_names": ["metric_names_value1", "metric_names_value2"], - "dimension_filter_expression": { - "and_group": {"filter_expressions": {}}, - "not_expression": {}, - "filter": { - "string_filter": { - "match_type": 1, - "value": "value_value", - "case_sensitive": True, - }, - "in_list_filter": { - "values": ["values_value1", "values_value2"], - "case_sensitive": True, - }, - "field_name": "field_name_value", - }, - }, - "data_collection_start_time": {"seconds": 751, "nanos": 543}, - } - # The version of a generated dependency at test runtime may differ from the version used during generation. - # Delete any fields which are not present in the current runtime dependency - # See https://github.com/googleapis/gapic-generator-python/issues/1748 - - # Determine if the message type is proto-plus or protobuf - test_field = analytics_admin.UpdateExpandedDataSetRequest.meta.fields[ - "expanded_data_set" - ] - - def get_message_fields(field): - # Given a field which is a message (composite type), return a list with - # all the fields of the message. - # If the field is not a composite type, return an empty list. - message_fields = [] - - if hasattr(field, "message") and field.message: - is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") - - if is_field_type_proto_plus_type: - message_fields = field.message.meta.fields.values() - # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER - message_fields = field.message.DESCRIPTOR.fields - return message_fields - - runtime_nested_fields = [ - (field.name, nested_field.name) - for field in get_message_fields(test_field) - for nested_field in get_message_fields(field) - ] - - subfields_not_in_runtime = [] - - # For each item in the sample request, create a list of sub fields which are not present at runtime - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["expanded_data_set"].items(): # pragma: NO COVER - result = None - is_repeated = False - # For repeated fields - if isinstance(value, list) and len(value): - is_repeated = True - result = value[0] - # For fields where the type is another message - if isinstance(value, dict): - result = value - - if result and hasattr(result, "keys"): - for subfield in result.keys(): - if (field, subfield) not in runtime_nested_fields: - subfields_not_in_runtime.append( - { - "field": field, - "subfield": subfield, - "is_repeated": is_repeated, - } - ) - - # Remove fields from the sample request which are not present in the runtime version of the dependency - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER - field = subfield_to_delete.get("field") - field_repeated = subfield_to_delete.get("is_repeated") - subfield = subfield_to_delete.get("subfield") - if subfield: - if field_repeated: - for i in range(0, len(request_init["expanded_data_set"][field])): - del request_init["expanded_data_set"][field][i][subfield] - else: - del request_init["expanded_data_set"][field][subfield] + request_init = {} request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = gaa_expanded_data_set.ExpandedDataSet( - name="name_value", - display_name="display_name_value", - description="description_value", - dimension_names=["dimension_names_value"], - metric_names=["metric_names_value"], - ) + return_value = analytics_admin.SetAutomatedGa4ConfigurationOptOutResponse() # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = gaa_expanded_data_set.ExpandedDataSet.pb(return_value) + return_value = analytics_admin.SetAutomatedGa4ConfigurationOptOutResponse.pb( + return_value + ) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.update_expanded_data_set(request) + response = client.set_automated_ga4_configuration_opt_out(request) # Establish that the response is the type that we expect. - assert isinstance(response, gaa_expanded_data_set.ExpandedDataSet) - assert response.name == "name_value" - assert response.display_name == "display_name_value" - assert response.description == "description_value" - assert response.dimension_names == ["dimension_names_value"] - assert response.metric_names == ["metric_names_value"] + assert isinstance( + response, analytics_admin.SetAutomatedGa4ConfigurationOptOutResponse + ) -def test_update_expanded_data_set_rest_use_cached_wrapped_rpc(): +def test_set_automated_ga4_configuration_opt_out_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -89741,7 +99893,7 @@ def test_update_expanded_data_set_rest_use_cached_wrapped_rpc(): # Ensure method has been cached assert ( - client._transport.update_expanded_data_set + client._transport.set_automated_ga4_configuration_opt_out in client._transport._wrapped_methods ) @@ -89751,28 +99903,29 @@ def test_update_expanded_data_set_rest_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.update_expanded_data_set + client._transport.set_automated_ga4_configuration_opt_out ] = mock_rpc request = {} - client.update_expanded_data_set(request) + client.set_automated_ga4_configuration_opt_out(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.update_expanded_data_set(request) + client.set_automated_ga4_configuration_opt_out(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_update_expanded_data_set_rest_required_fields( - request_type=analytics_admin.UpdateExpandedDataSetRequest, +def test_set_automated_ga4_configuration_opt_out_rest_required_fields( + request_type=analytics_admin.SetAutomatedGa4ConfigurationOptOutRequest, ): transport_class = transports.AnalyticsAdminServiceRestTransport request_init = {} + request_init["property"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -89783,19 +99936,25 @@ def test_update_expanded_data_set_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).update_expanded_data_set._get_unset_required_fields(jsonified_request) + ).set_automated_ga4_configuration_opt_out._get_unset_required_fields( + jsonified_request + ) jsonified_request.update(unset_fields) # verify required fields with default values are now present + jsonified_request["property"] = "property_value" + unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).update_expanded_data_set._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("update_mask",)) + ).set_automated_ga4_configuration_opt_out._get_unset_required_fields( + jsonified_request + ) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone + assert "property" in jsonified_request + assert jsonified_request["property"] == "property_value" client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -89804,7 +99963,7 @@ def test_update_expanded_data_set_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = gaa_expanded_data_set.ExpandedDataSet() + return_value = analytics_admin.SetAutomatedGa4ConfigurationOptOutResponse() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -89816,7 +99975,7 @@ def test_update_expanded_data_set_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "patch", + "method": "post", "query_params": pb_request, } transcode_result["body"] = pb_request @@ -89826,38 +99985,36 @@ def test_update_expanded_data_set_rest_required_fields( response_value.status_code = 200 # Convert return value to protobuf type - return_value = gaa_expanded_data_set.ExpandedDataSet.pb(return_value) + return_value = ( + analytics_admin.SetAutomatedGa4ConfigurationOptOutResponse.pb( + return_value + ) + ) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.update_expanded_data_set(request) + response = client.set_automated_ga4_configuration_opt_out(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_update_expanded_data_set_rest_unset_required_fields(): +def test_set_automated_ga4_configuration_opt_out_rest_unset_required_fields(): transport = transports.AnalyticsAdminServiceRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.update_expanded_data_set._get_unset_required_fields({}) - assert set(unset_fields) == ( - set(("updateMask",)) - & set( - ( - "expandedDataSet", - "updateMask", - ) - ) + unset_fields = ( + transport.set_automated_ga4_configuration_opt_out._get_unset_required_fields({}) ) + assert set(unset_fields) == (set(()) & set(("property",))) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_update_expanded_data_set_rest_interceptors(null_interceptor): +def test_set_automated_ga4_configuration_opt_out_rest_interceptors(null_interceptor): transport = transports.AnalyticsAdminServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -89870,14 +100027,16 @@ def test_update_expanded_data_set_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.AnalyticsAdminServiceRestInterceptor, "post_update_expanded_data_set" + transports.AnalyticsAdminServiceRestInterceptor, + "post_set_automated_ga4_configuration_opt_out", ) as post, mock.patch.object( - transports.AnalyticsAdminServiceRestInterceptor, "pre_update_expanded_data_set" + transports.AnalyticsAdminServiceRestInterceptor, + "pre_set_automated_ga4_configuration_opt_out", ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = analytics_admin.UpdateExpandedDataSetRequest.pb( - analytics_admin.UpdateExpandedDataSetRequest() + pb_message = analytics_admin.SetAutomatedGa4ConfigurationOptOutRequest.pb( + analytics_admin.SetAutomatedGa4ConfigurationOptOutRequest() ) transcode.return_value = { "method": "post", @@ -89889,19 +100048,21 @@ def test_update_expanded_data_set_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = gaa_expanded_data_set.ExpandedDataSet.to_json( - gaa_expanded_data_set.ExpandedDataSet() + req.return_value._content = ( + analytics_admin.SetAutomatedGa4ConfigurationOptOutResponse.to_json( + analytics_admin.SetAutomatedGa4ConfigurationOptOutResponse() + ) ) - request = analytics_admin.UpdateExpandedDataSetRequest() + request = analytics_admin.SetAutomatedGa4ConfigurationOptOutRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = gaa_expanded_data_set.ExpandedDataSet() + post.return_value = analytics_admin.SetAutomatedGa4ConfigurationOptOutResponse() - client.update_expanded_data_set( + client.set_automated_ga4_configuration_opt_out( request, metadata=[ ("key", "val"), @@ -89913,8 +100074,9 @@ def test_update_expanded_data_set_rest_interceptors(null_interceptor): post.assert_called_once() -def test_update_expanded_data_set_rest_bad_request( - transport: str = "rest", request_type=analytics_admin.UpdateExpandedDataSetRequest +def test_set_automated_ga4_configuration_opt_out_rest_bad_request( + transport: str = "rest", + request_type=analytics_admin.SetAutomatedGa4ConfigurationOptOutRequest, ): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -89922,9 +100084,7 @@ def test_update_expanded_data_set_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = { - "expanded_data_set": {"name": "properties/sample1/expandedDataSets/sample2"} - } + request_init = {} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -89936,71 +100096,10 @@ def test_update_expanded_data_set_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.update_expanded_data_set(request) - - -def test_update_expanded_data_set_rest_flattened(): - client = AnalyticsAdminServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = gaa_expanded_data_set.ExpandedDataSet() - - # get arguments that satisfy an http rule for this method - sample_request = { - "expanded_data_set": {"name": "properties/sample1/expandedDataSets/sample2"} - } - - # get truthy value for each flattened field - mock_args = dict( - expanded_data_set=gaa_expanded_data_set.ExpandedDataSet(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = gaa_expanded_data_set.ExpandedDataSet.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - client.update_expanded_data_set(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate( - "%s/v1alpha/{expanded_data_set.name=properties/*/expandedDataSets/*}" - % client.transport._host, - args[1], - ) - - -def test_update_expanded_data_set_rest_flattened_error(transport: str = "rest"): - client = AnalyticsAdminServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.update_expanded_data_set( - analytics_admin.UpdateExpandedDataSetRequest(), - expanded_data_set=gaa_expanded_data_set.ExpandedDataSet(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), - ) + client.set_automated_ga4_configuration_opt_out(request) -def test_update_expanded_data_set_rest_error(): +def test_set_automated_ga4_configuration_opt_out_rest_error(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -90009,39 +100108,48 @@ def test_update_expanded_data_set_rest_error(): @pytest.mark.parametrize( "request_type", [ - analytics_admin.DeleteExpandedDataSetRequest, + analytics_admin.FetchAutomatedGa4ConfigurationOptOutRequest, dict, ], ) -def test_delete_expanded_data_set_rest(request_type): +def test_fetch_automated_ga4_configuration_opt_out_rest(request_type): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = {"name": "properties/sample1/expandedDataSets/sample2"} + request_init = {} request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = None + return_value = analytics_admin.FetchAutomatedGa4ConfigurationOptOutResponse( + opt_out=True, + ) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - json_return_value = "" + # Convert return value to protobuf type + return_value = analytics_admin.FetchAutomatedGa4ConfigurationOptOutResponse.pb( + return_value + ) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.delete_expanded_data_set(request) + response = client.fetch_automated_ga4_configuration_opt_out(request) # Establish that the response is the type that we expect. - assert response is None + assert isinstance( + response, analytics_admin.FetchAutomatedGa4ConfigurationOptOutResponse + ) + assert response.opt_out is True -def test_delete_expanded_data_set_rest_use_cached_wrapped_rpc(): +def test_fetch_automated_ga4_configuration_opt_out_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -90056,7 +100164,7 @@ def test_delete_expanded_data_set_rest_use_cached_wrapped_rpc(): # Ensure method has been cached assert ( - client._transport.delete_expanded_data_set + client._transport.fetch_automated_ga4_configuration_opt_out in client._transport._wrapped_methods ) @@ -90066,29 +100174,29 @@ def test_delete_expanded_data_set_rest_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.delete_expanded_data_set + client._transport.fetch_automated_ga4_configuration_opt_out ] = mock_rpc request = {} - client.delete_expanded_data_set(request) + client.fetch_automated_ga4_configuration_opt_out(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.delete_expanded_data_set(request) + client.fetch_automated_ga4_configuration_opt_out(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_delete_expanded_data_set_rest_required_fields( - request_type=analytics_admin.DeleteExpandedDataSetRequest, +def test_fetch_automated_ga4_configuration_opt_out_rest_required_fields( + request_type=analytics_admin.FetchAutomatedGa4ConfigurationOptOutRequest, ): transport_class = transports.AnalyticsAdminServiceRestTransport request_init = {} - request_init["name"] = "" + request_init["property"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -90099,21 +100207,25 @@ def test_delete_expanded_data_set_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).delete_expanded_data_set._get_unset_required_fields(jsonified_request) + ).fetch_automated_ga4_configuration_opt_out._get_unset_required_fields( + jsonified_request + ) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["name"] = "name_value" + jsonified_request["property"] = "property_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).delete_expanded_data_set._get_unset_required_fields(jsonified_request) + ).fetch_automated_ga4_configuration_opt_out._get_unset_required_fields( + jsonified_request + ) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == "name_value" + assert "property" in jsonified_request + assert jsonified_request["property"] == "property_value" client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -90122,7 +100234,7 @@ def test_delete_expanded_data_set_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = None + return_value = analytics_admin.FetchAutomatedGa4ConfigurationOptOutResponse() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -90134,36 +100246,48 @@ def test_delete_expanded_data_set_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "delete", + "method": "post", "query_params": pb_request, } + transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 - json_return_value = "" + + # Convert return value to protobuf type + return_value = ( + analytics_admin.FetchAutomatedGa4ConfigurationOptOutResponse.pb( + return_value + ) + ) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.delete_expanded_data_set(request) + response = client.fetch_automated_ga4_configuration_opt_out(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_delete_expanded_data_set_rest_unset_required_fields(): +def test_fetch_automated_ga4_configuration_opt_out_rest_unset_required_fields(): transport = transports.AnalyticsAdminServiceRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.delete_expanded_data_set._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name",))) + unset_fields = ( + transport.fetch_automated_ga4_configuration_opt_out._get_unset_required_fields( + {} + ) + ) + assert set(unset_fields) == (set(()) & set(("property",))) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_delete_expanded_data_set_rest_interceptors(null_interceptor): +def test_fetch_automated_ga4_configuration_opt_out_rest_interceptors(null_interceptor): transport = transports.AnalyticsAdminServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -90176,11 +100300,16 @@ def test_delete_expanded_data_set_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.AnalyticsAdminServiceRestInterceptor, "pre_delete_expanded_data_set" + transports.AnalyticsAdminServiceRestInterceptor, + "post_fetch_automated_ga4_configuration_opt_out", + ) as post, mock.patch.object( + transports.AnalyticsAdminServiceRestInterceptor, + "pre_fetch_automated_ga4_configuration_opt_out", ) as pre: pre.assert_not_called() - pb_message = analytics_admin.DeleteExpandedDataSetRequest.pb( - analytics_admin.DeleteExpandedDataSetRequest() + post.assert_not_called() + pb_message = analytics_admin.FetchAutomatedGa4ConfigurationOptOutRequest.pb( + analytics_admin.FetchAutomatedGa4ConfigurationOptOutRequest() ) transcode.return_value = { "method": "post", @@ -90192,15 +100321,23 @@ def test_delete_expanded_data_set_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() + req.return_value._content = ( + analytics_admin.FetchAutomatedGa4ConfigurationOptOutResponse.to_json( + analytics_admin.FetchAutomatedGa4ConfigurationOptOutResponse() + ) + ) - request = analytics_admin.DeleteExpandedDataSetRequest() + request = analytics_admin.FetchAutomatedGa4ConfigurationOptOutRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata + post.return_value = ( + analytics_admin.FetchAutomatedGa4ConfigurationOptOutResponse() + ) - client.delete_expanded_data_set( + client.fetch_automated_ga4_configuration_opt_out( request, metadata=[ ("key", "val"), @@ -90209,10 +100346,12 @@ def test_delete_expanded_data_set_rest_interceptors(null_interceptor): ) pre.assert_called_once() + post.assert_called_once() -def test_delete_expanded_data_set_rest_bad_request( - transport: str = "rest", request_type=analytics_admin.DeleteExpandedDataSetRequest +def test_fetch_automated_ga4_configuration_opt_out_rest_bad_request( + transport: str = "rest", + request_type=analytics_admin.FetchAutomatedGa4ConfigurationOptOutRequest, ): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -90220,7 +100359,7 @@ def test_delete_expanded_data_set_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = {"name": "properties/sample1/expandedDataSets/sample2"} + request_init = {} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -90232,65 +100371,10 @@ def test_delete_expanded_data_set_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.delete_expanded_data_set(request) - - -def test_delete_expanded_data_set_rest_flattened(): - client = AnalyticsAdminServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = None - - # get arguments that satisfy an http rule for this method - sample_request = {"name": "properties/sample1/expandedDataSets/sample2"} - - # get truthy value for each flattened field - mock_args = dict( - name="name_value", - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = "" - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - client.delete_expanded_data_set(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate( - "%s/v1alpha/{name=properties/*/expandedDataSets/*}" - % client.transport._host, - args[1], - ) - - -def test_delete_expanded_data_set_rest_flattened_error(transport: str = "rest"): - client = AnalyticsAdminServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.delete_expanded_data_set( - analytics_admin.DeleteExpandedDataSetRequest(), - name="name_value", - ) + client.fetch_automated_ga4_configuration_opt_out(request) -def test_delete_expanded_data_set_rest_error(): +def test_fetch_automated_ga4_configuration_opt_out_rest_error(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -90299,50 +100383,139 @@ def test_delete_expanded_data_set_rest_error(): @pytest.mark.parametrize( "request_type", [ - analytics_admin.GetChannelGroupRequest, + analytics_admin.CreateBigQueryLinkRequest, dict, ], ) -def test_get_channel_group_rest(request_type): +def test_create_big_query_link_rest(request_type): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = {"name": "properties/sample1/channelGroups/sample2"} + request_init = {"parent": "properties/sample1"} + request_init["bigquery_link"] = { + "name": "name_value", + "project": "project_value", + "create_time": {"seconds": 751, "nanos": 543}, + "daily_export_enabled": True, + "streaming_export_enabled": True, + "fresh_daily_export_enabled": True, + "include_advertising_id": True, + "export_streams": ["export_streams_value1", "export_streams_value2"], + "excluded_events": ["excluded_events_value1", "excluded_events_value2"], + "dataset_location": "dataset_location_value", + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = analytics_admin.CreateBigQueryLinkRequest.meta.fields["bigquery_link"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["bigquery_link"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["bigquery_link"][field])): + del request_init["bigquery_link"][field][i][subfield] + else: + del request_init["bigquery_link"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = channel_group.ChannelGroup( + return_value = resources.BigQueryLink( name="name_value", - display_name="display_name_value", - description="description_value", - system_defined=True, + project="project_value", + daily_export_enabled=True, + streaming_export_enabled=True, + fresh_daily_export_enabled=True, + include_advertising_id=True, + export_streams=["export_streams_value"], + excluded_events=["excluded_events_value"], + dataset_location="dataset_location_value", ) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = channel_group.ChannelGroup.pb(return_value) + return_value = resources.BigQueryLink.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.get_channel_group(request) + response = client.create_big_query_link(request) # Establish that the response is the type that we expect. - assert isinstance(response, channel_group.ChannelGroup) + assert isinstance(response, resources.BigQueryLink) assert response.name == "name_value" - assert response.display_name == "display_name_value" - assert response.description == "description_value" - assert response.system_defined is True + assert response.project == "project_value" + assert response.daily_export_enabled is True + assert response.streaming_export_enabled is True + assert response.fresh_daily_export_enabled is True + assert response.include_advertising_id is True + assert response.export_streams == ["export_streams_value"] + assert response.excluded_events == ["excluded_events_value"] + assert response.dataset_location == "dataset_location_value" -def test_get_channel_group_rest_use_cached_wrapped_rpc(): +def test_create_big_query_link_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -90356,7 +100529,10 @@ def test_get_channel_group_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.get_channel_group in client._transport._wrapped_methods + assert ( + client._transport.create_big_query_link + in client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.Mock() @@ -90364,29 +100540,29 @@ def test_get_channel_group_rest_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.get_channel_group + client._transport.create_big_query_link ] = mock_rpc request = {} - client.get_channel_group(request) + client.create_big_query_link(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.get_channel_group(request) + client.create_big_query_link(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_get_channel_group_rest_required_fields( - request_type=analytics_admin.GetChannelGroupRequest, +def test_create_big_query_link_rest_required_fields( + request_type=analytics_admin.CreateBigQueryLinkRequest, ): transport_class = transports.AnalyticsAdminServiceRestTransport request_init = {} - request_init["name"] = "" + request_init["parent"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -90397,21 +100573,21 @@ def test_get_channel_group_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).get_channel_group._get_unset_required_fields(jsonified_request) + ).create_big_query_link._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["name"] = "name_value" + jsonified_request["parent"] = "parent_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).get_channel_group._get_unset_required_fields(jsonified_request) + ).create_big_query_link._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == "name_value" + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -90420,7 +100596,7 @@ def test_get_channel_group_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = channel_group.ChannelGroup() + return_value = resources.BigQueryLink() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -90432,39 +100608,48 @@ def test_get_channel_group_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "get", + "method": "post", "query_params": pb_request, } + transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = channel_group.ChannelGroup.pb(return_value) + return_value = resources.BigQueryLink.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.get_channel_group(request) + response = client.create_big_query_link(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_get_channel_group_rest_unset_required_fields(): +def test_create_big_query_link_rest_unset_required_fields(): transport = transports.AnalyticsAdminServiceRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.get_channel_group._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name",))) + unset_fields = transport.create_big_query_link._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(()) + & set( + ( + "parent", + "bigqueryLink", + ) + ) + ) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_channel_group_rest_interceptors(null_interceptor): +def test_create_big_query_link_rest_interceptors(null_interceptor): transport = transports.AnalyticsAdminServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -90477,14 +100662,14 @@ def test_get_channel_group_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.AnalyticsAdminServiceRestInterceptor, "post_get_channel_group" + transports.AnalyticsAdminServiceRestInterceptor, "post_create_big_query_link" ) as post, mock.patch.object( - transports.AnalyticsAdminServiceRestInterceptor, "pre_get_channel_group" + transports.AnalyticsAdminServiceRestInterceptor, "pre_create_big_query_link" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = analytics_admin.GetChannelGroupRequest.pb( - analytics_admin.GetChannelGroupRequest() + pb_message = analytics_admin.CreateBigQueryLinkRequest.pb( + analytics_admin.CreateBigQueryLinkRequest() ) transcode.return_value = { "method": "post", @@ -90496,19 +100681,19 @@ def test_get_channel_group_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = channel_group.ChannelGroup.to_json( - channel_group.ChannelGroup() + req.return_value._content = resources.BigQueryLink.to_json( + resources.BigQueryLink() ) - request = analytics_admin.GetChannelGroupRequest() + request = analytics_admin.CreateBigQueryLinkRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = channel_group.ChannelGroup() + post.return_value = resources.BigQueryLink() - client.get_channel_group( + client.create_big_query_link( request, metadata=[ ("key", "val"), @@ -90520,8 +100705,8 @@ def test_get_channel_group_rest_interceptors(null_interceptor): post.assert_called_once() -def test_get_channel_group_rest_bad_request( - transport: str = "rest", request_type=analytics_admin.GetChannelGroupRequest +def test_create_big_query_link_rest_bad_request( + transport: str = "rest", request_type=analytics_admin.CreateBigQueryLinkRequest ): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -90529,7 +100714,7 @@ def test_get_channel_group_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = {"name": "properties/sample1/channelGroups/sample2"} + request_init = {"parent": "properties/sample1"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -90541,10 +100726,10 @@ def test_get_channel_group_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.get_channel_group(request) + client.create_big_query_link(request) -def test_get_channel_group_rest_flattened(): +def test_create_big_query_link_rest_flattened(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -90553,14 +100738,15 @@ def test_get_channel_group_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = channel_group.ChannelGroup() + return_value = resources.BigQueryLink() # get arguments that satisfy an http rule for this method - sample_request = {"name": "properties/sample1/channelGroups/sample2"} + sample_request = {"parent": "properties/sample1"} # get truthy value for each flattened field mock_args = dict( - name="name_value", + parent="parent_value", + bigquery_link=resources.BigQueryLink(name="name_value"), ) mock_args.update(sample_request) @@ -90568,24 +100754,24 @@ def test_get_channel_group_rest_flattened(): response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = channel_group.ChannelGroup.pb(return_value) + return_value = resources.BigQueryLink.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.get_channel_group(**mock_args) + client.create_big_query_link(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1alpha/{name=properties/*/channelGroups/*}" % client.transport._host, + "%s/v1alpha/{parent=properties/*}/bigQueryLinks" % client.transport._host, args[1], ) -def test_get_channel_group_rest_flattened_error(transport: str = "rest"): +def test_create_big_query_link_rest_flattened_error(transport: str = "rest"): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -90594,13 +100780,14 @@ def test_get_channel_group_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.get_channel_group( - analytics_admin.GetChannelGroupRequest(), - name="name_value", + client.create_big_query_link( + analytics_admin.CreateBigQueryLinkRequest(), + parent="parent_value", + bigquery_link=resources.BigQueryLink(name="name_value"), ) -def test_get_channel_group_rest_error(): +def test_create_big_query_link_rest_error(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -90609,44 +100796,60 @@ def test_get_channel_group_rest_error(): @pytest.mark.parametrize( "request_type", [ - analytics_admin.ListChannelGroupsRequest, + analytics_admin.GetBigQueryLinkRequest, dict, ], ) -def test_list_channel_groups_rest(request_type): +def test_get_big_query_link_rest(request_type): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = {"parent": "properties/sample1"} + request_init = {"name": "properties/sample1/bigQueryLinks/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = analytics_admin.ListChannelGroupsResponse( - next_page_token="next_page_token_value", + return_value = resources.BigQueryLink( + name="name_value", + project="project_value", + daily_export_enabled=True, + streaming_export_enabled=True, + fresh_daily_export_enabled=True, + include_advertising_id=True, + export_streams=["export_streams_value"], + excluded_events=["excluded_events_value"], + dataset_location="dataset_location_value", ) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = analytics_admin.ListChannelGroupsResponse.pb(return_value) + return_value = resources.BigQueryLink.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.list_channel_groups(request) + response = client.get_big_query_link(request) # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListChannelGroupsPager) - assert response.next_page_token == "next_page_token_value" + assert isinstance(response, resources.BigQueryLink) + assert response.name == "name_value" + assert response.project == "project_value" + assert response.daily_export_enabled is True + assert response.streaming_export_enabled is True + assert response.fresh_daily_export_enabled is True + assert response.include_advertising_id is True + assert response.export_streams == ["export_streams_value"] + assert response.excluded_events == ["excluded_events_value"] + assert response.dataset_location == "dataset_location_value" -def test_list_channel_groups_rest_use_cached_wrapped_rpc(): +def test_get_big_query_link_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -90661,7 +100864,7 @@ def test_list_channel_groups_rest_use_cached_wrapped_rpc(): # Ensure method has been cached assert ( - client._transport.list_channel_groups in client._transport._wrapped_methods + client._transport.get_big_query_link in client._transport._wrapped_methods ) # Replace cached wrapped function with mock @@ -90670,29 +100873,29 @@ def test_list_channel_groups_rest_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.list_channel_groups + client._transport.get_big_query_link ] = mock_rpc request = {} - client.list_channel_groups(request) + client.get_big_query_link(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.list_channel_groups(request) + client.get_big_query_link(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_list_channel_groups_rest_required_fields( - request_type=analytics_admin.ListChannelGroupsRequest, +def test_get_big_query_link_rest_required_fields( + request_type=analytics_admin.GetBigQueryLinkRequest, ): transport_class = transports.AnalyticsAdminServiceRestTransport request_init = {} - request_init["parent"] = "" + request_init["name"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -90703,28 +100906,21 @@ def test_list_channel_groups_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).list_channel_groups._get_unset_required_fields(jsonified_request) + ).get_big_query_link._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["parent"] = "parent_value" + jsonified_request["name"] = "name_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).list_channel_groups._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set( - ( - "page_size", - "page_token", - ) - ) + ).get_big_query_link._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == "parent_value" + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -90733,7 +100929,7 @@ def test_list_channel_groups_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = analytics_admin.ListChannelGroupsResponse() + return_value = resources.BigQueryLink() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -90754,38 +100950,30 @@ def test_list_channel_groups_rest_required_fields( response_value.status_code = 200 # Convert return value to protobuf type - return_value = analytics_admin.ListChannelGroupsResponse.pb(return_value) + return_value = resources.BigQueryLink.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.list_channel_groups(request) + response = client.get_big_query_link(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params - -def test_list_channel_groups_rest_unset_required_fields(): - transport = transports.AnalyticsAdminServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials - ) - - unset_fields = transport.list_channel_groups._get_unset_required_fields({}) - assert set(unset_fields) == ( - set( - ( - "pageSize", - "pageToken", - ) - ) - & set(("parent",)) + +def test_get_big_query_link_rest_unset_required_fields(): + transport = transports.AnalyticsAdminServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials ) + unset_fields = transport.get_big_query_link._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_channel_groups_rest_interceptors(null_interceptor): +def test_get_big_query_link_rest_interceptors(null_interceptor): transport = transports.AnalyticsAdminServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -90798,14 +100986,14 @@ def test_list_channel_groups_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.AnalyticsAdminServiceRestInterceptor, "post_list_channel_groups" + transports.AnalyticsAdminServiceRestInterceptor, "post_get_big_query_link" ) as post, mock.patch.object( - transports.AnalyticsAdminServiceRestInterceptor, "pre_list_channel_groups" + transports.AnalyticsAdminServiceRestInterceptor, "pre_get_big_query_link" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = analytics_admin.ListChannelGroupsRequest.pb( - analytics_admin.ListChannelGroupsRequest() + pb_message = analytics_admin.GetBigQueryLinkRequest.pb( + analytics_admin.GetBigQueryLinkRequest() ) transcode.return_value = { "method": "post", @@ -90817,19 +101005,19 @@ def test_list_channel_groups_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = analytics_admin.ListChannelGroupsResponse.to_json( - analytics_admin.ListChannelGroupsResponse() + req.return_value._content = resources.BigQueryLink.to_json( + resources.BigQueryLink() ) - request = analytics_admin.ListChannelGroupsRequest() + request = analytics_admin.GetBigQueryLinkRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = analytics_admin.ListChannelGroupsResponse() + post.return_value = resources.BigQueryLink() - client.list_channel_groups( + client.get_big_query_link( request, metadata=[ ("key", "val"), @@ -90841,8 +101029,8 @@ def test_list_channel_groups_rest_interceptors(null_interceptor): post.assert_called_once() -def test_list_channel_groups_rest_bad_request( - transport: str = "rest", request_type=analytics_admin.ListChannelGroupsRequest +def test_get_big_query_link_rest_bad_request( + transport: str = "rest", request_type=analytics_admin.GetBigQueryLinkRequest ): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -90850,7 +101038,7 @@ def test_list_channel_groups_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = {"parent": "properties/sample1"} + request_init = {"name": "properties/sample1/bigQueryLinks/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -90862,10 +101050,10 @@ def test_list_channel_groups_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.list_channel_groups(request) + client.get_big_query_link(request) -def test_list_channel_groups_rest_flattened(): +def test_get_big_query_link_rest_flattened(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -90874,14 +101062,14 @@ def test_list_channel_groups_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = analytics_admin.ListChannelGroupsResponse() + return_value = resources.BigQueryLink() # get arguments that satisfy an http rule for this method - sample_request = {"parent": "properties/sample1"} + sample_request = {"name": "properties/sample1/bigQueryLinks/sample2"} # get truthy value for each flattened field mock_args = dict( - parent="parent_value", + name="name_value", ) mock_args.update(sample_request) @@ -90889,24 +101077,24 @@ def test_list_channel_groups_rest_flattened(): response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = analytics_admin.ListChannelGroupsResponse.pb(return_value) + return_value = resources.BigQueryLink.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.list_channel_groups(**mock_args) + client.get_big_query_link(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1alpha/{parent=properties/*}/channelGroups" % client.transport._host, + "%s/v1alpha/{name=properties/*/bigQueryLinks/*}" % client.transport._host, args[1], ) -def test_list_channel_groups_rest_flattened_error(transport: str = "rest"): +def test_get_big_query_link_rest_flattened_error(transport: str = "rest"): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -90915,83 +101103,26 @@ def test_list_channel_groups_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.list_channel_groups( - analytics_admin.ListChannelGroupsRequest(), - parent="parent_value", + client.get_big_query_link( + analytics_admin.GetBigQueryLinkRequest(), + name="name_value", ) -def test_list_channel_groups_rest_pager(transport: str = "rest"): +def test_get_big_query_link_rest_error(): client = AnalyticsAdminServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # TODO(kbandes): remove this mock unless there's a good reason for it. - # with mock.patch.object(path_template, 'transcode') as transcode: - # Set the response as a series of pages - response = ( - analytics_admin.ListChannelGroupsResponse( - channel_groups=[ - channel_group.ChannelGroup(), - channel_group.ChannelGroup(), - channel_group.ChannelGroup(), - ], - next_page_token="abc", - ), - analytics_admin.ListChannelGroupsResponse( - channel_groups=[], - next_page_token="def", - ), - analytics_admin.ListChannelGroupsResponse( - channel_groups=[ - channel_group.ChannelGroup(), - ], - next_page_token="ghi", - ), - analytics_admin.ListChannelGroupsResponse( - channel_groups=[ - channel_group.ChannelGroup(), - channel_group.ChannelGroup(), - ], - ), - ) - # Two responses for two calls - response = response + response - - # Wrap the values into proper Response objs - response = tuple( - analytics_admin.ListChannelGroupsResponse.to_json(x) for x in response - ) - return_values = tuple(Response() for i in response) - for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode("UTF-8") - return_val.status_code = 200 - req.side_effect = return_values - - sample_request = {"parent": "properties/sample1"} - - pager = client.list_channel_groups(request=sample_request) - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, channel_group.ChannelGroup) for i in results) - - pages = list(client.list_channel_groups(request=sample_request).pages) - for page_, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page_.raw_page.next_page_token == token - @pytest.mark.parametrize( "request_type", [ - analytics_admin.CreateChannelGroupRequest, + analytics_admin.ListBigQueryLinksRequest, dict, ], ) -def test_create_channel_group_rest(request_type): +def test_list_big_query_links_rest(request_type): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -90999,128 +101130,32 @@ def test_create_channel_group_rest(request_type): # send a request that will satisfy transcoding request_init = {"parent": "properties/sample1"} - request_init["channel_group"] = { - "name": "name_value", - "display_name": "display_name_value", - "description": "description_value", - "grouping_rule": [ - { - "display_name": "display_name_value", - "expression": { - "and_group": {"filter_expressions": {}}, - "or_group": {}, - "not_expression": {}, - "filter": { - "string_filter": {"match_type": 1, "value": "value_value"}, - "in_list_filter": { - "values": ["values_value1", "values_value2"] - }, - "field_name": "field_name_value", - }, - }, - } - ], - "system_defined": True, - } - # The version of a generated dependency at test runtime may differ from the version used during generation. - # Delete any fields which are not present in the current runtime dependency - # See https://github.com/googleapis/gapic-generator-python/issues/1748 - - # Determine if the message type is proto-plus or protobuf - test_field = analytics_admin.CreateChannelGroupRequest.meta.fields["channel_group"] - - def get_message_fields(field): - # Given a field which is a message (composite type), return a list with - # all the fields of the message. - # If the field is not a composite type, return an empty list. - message_fields = [] - - if hasattr(field, "message") and field.message: - is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") - - if is_field_type_proto_plus_type: - message_fields = field.message.meta.fields.values() - # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER - message_fields = field.message.DESCRIPTOR.fields - return message_fields - - runtime_nested_fields = [ - (field.name, nested_field.name) - for field in get_message_fields(test_field) - for nested_field in get_message_fields(field) - ] - - subfields_not_in_runtime = [] - - # For each item in the sample request, create a list of sub fields which are not present at runtime - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["channel_group"].items(): # pragma: NO COVER - result = None - is_repeated = False - # For repeated fields - if isinstance(value, list) and len(value): - is_repeated = True - result = value[0] - # For fields where the type is another message - if isinstance(value, dict): - result = value - - if result and hasattr(result, "keys"): - for subfield in result.keys(): - if (field, subfield) not in runtime_nested_fields: - subfields_not_in_runtime.append( - { - "field": field, - "subfield": subfield, - "is_repeated": is_repeated, - } - ) - - # Remove fields from the sample request which are not present in the runtime version of the dependency - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER - field = subfield_to_delete.get("field") - field_repeated = subfield_to_delete.get("is_repeated") - subfield = subfield_to_delete.get("subfield") - if subfield: - if field_repeated: - for i in range(0, len(request_init["channel_group"][field])): - del request_init["channel_group"][field][i][subfield] - else: - del request_init["channel_group"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = gaa_channel_group.ChannelGroup( - name="name_value", - display_name="display_name_value", - description="description_value", - system_defined=True, + return_value = analytics_admin.ListBigQueryLinksResponse( + next_page_token="next_page_token_value", ) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = gaa_channel_group.ChannelGroup.pb(return_value) + return_value = analytics_admin.ListBigQueryLinksResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.create_channel_group(request) + response = client.list_big_query_links(request) # Establish that the response is the type that we expect. - assert isinstance(response, gaa_channel_group.ChannelGroup) - assert response.name == "name_value" - assert response.display_name == "display_name_value" - assert response.description == "description_value" - assert response.system_defined is True + assert isinstance(response, pagers.ListBigQueryLinksPager) + assert response.next_page_token == "next_page_token_value" -def test_create_channel_group_rest_use_cached_wrapped_rpc(): +def test_list_big_query_links_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -91135,7 +101170,7 @@ def test_create_channel_group_rest_use_cached_wrapped_rpc(): # Ensure method has been cached assert ( - client._transport.create_channel_group in client._transport._wrapped_methods + client._transport.list_big_query_links in client._transport._wrapped_methods ) # Replace cached wrapped function with mock @@ -91144,24 +101179,24 @@ def test_create_channel_group_rest_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.create_channel_group + client._transport.list_big_query_links ] = mock_rpc request = {} - client.create_channel_group(request) + client.list_big_query_links(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.create_channel_group(request) + client.list_big_query_links(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_create_channel_group_rest_required_fields( - request_type=analytics_admin.CreateChannelGroupRequest, +def test_list_big_query_links_rest_required_fields( + request_type=analytics_admin.ListBigQueryLinksRequest, ): transport_class = transports.AnalyticsAdminServiceRestTransport @@ -91177,7 +101212,7 @@ def test_create_channel_group_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).create_channel_group._get_unset_required_fields(jsonified_request) + ).list_big_query_links._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present @@ -91186,7 +101221,14 @@ def test_create_channel_group_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).create_channel_group._get_unset_required_fields(jsonified_request) + ).list_big_query_links._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "page_size", + "page_token", + ) + ) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone @@ -91200,7 +101242,7 @@ def test_create_channel_group_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = gaa_channel_group.ChannelGroup() + return_value = analytics_admin.ListBigQueryLinksResponse() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -91212,48 +101254,47 @@ def test_create_channel_group_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "post", + "method": "get", "query_params": pb_request, } - transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = gaa_channel_group.ChannelGroup.pb(return_value) + return_value = analytics_admin.ListBigQueryLinksResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.create_channel_group(request) + response = client.list_big_query_links(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_create_channel_group_rest_unset_required_fields(): +def test_list_big_query_links_rest_unset_required_fields(): transport = transports.AnalyticsAdminServiceRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.create_channel_group._get_unset_required_fields({}) + unset_fields = transport.list_big_query_links._get_unset_required_fields({}) assert set(unset_fields) == ( - set(()) - & set( + set( ( - "parent", - "channelGroup", + "pageSize", + "pageToken", ) ) + & set(("parent",)) ) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_create_channel_group_rest_interceptors(null_interceptor): +def test_list_big_query_links_rest_interceptors(null_interceptor): transport = transports.AnalyticsAdminServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -91266,14 +101307,14 @@ def test_create_channel_group_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.AnalyticsAdminServiceRestInterceptor, "post_create_channel_group" + transports.AnalyticsAdminServiceRestInterceptor, "post_list_big_query_links" ) as post, mock.patch.object( - transports.AnalyticsAdminServiceRestInterceptor, "pre_create_channel_group" + transports.AnalyticsAdminServiceRestInterceptor, "pre_list_big_query_links" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = analytics_admin.CreateChannelGroupRequest.pb( - analytics_admin.CreateChannelGroupRequest() + pb_message = analytics_admin.ListBigQueryLinksRequest.pb( + analytics_admin.ListBigQueryLinksRequest() ) transcode.return_value = { "method": "post", @@ -91285,19 +101326,19 @@ def test_create_channel_group_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = gaa_channel_group.ChannelGroup.to_json( - gaa_channel_group.ChannelGroup() + req.return_value._content = analytics_admin.ListBigQueryLinksResponse.to_json( + analytics_admin.ListBigQueryLinksResponse() ) - request = analytics_admin.CreateChannelGroupRequest() + request = analytics_admin.ListBigQueryLinksRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = gaa_channel_group.ChannelGroup() + post.return_value = analytics_admin.ListBigQueryLinksResponse() - client.create_channel_group( + client.list_big_query_links( request, metadata=[ ("key", "val"), @@ -91309,8 +101350,8 @@ def test_create_channel_group_rest_interceptors(null_interceptor): post.assert_called_once() -def test_create_channel_group_rest_bad_request( - transport: str = "rest", request_type=analytics_admin.CreateChannelGroupRequest +def test_list_big_query_links_rest_bad_request( + transport: str = "rest", request_type=analytics_admin.ListBigQueryLinksRequest ): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -91330,10 +101371,10 @@ def test_create_channel_group_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.create_channel_group(request) + client.list_big_query_links(request) -def test_create_channel_group_rest_flattened(): +def test_list_big_query_links_rest_flattened(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -91342,7 +101383,7 @@ def test_create_channel_group_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = gaa_channel_group.ChannelGroup() + return_value = analytics_admin.ListBigQueryLinksResponse() # get arguments that satisfy an http rule for this method sample_request = {"parent": "properties/sample1"} @@ -91350,7 +101391,6 @@ def test_create_channel_group_rest_flattened(): # get truthy value for each flattened field mock_args = dict( parent="parent_value", - channel_group=gaa_channel_group.ChannelGroup(name="name_value"), ) mock_args.update(sample_request) @@ -91358,24 +101398,24 @@ def test_create_channel_group_rest_flattened(): response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = gaa_channel_group.ChannelGroup.pb(return_value) + return_value = analytics_admin.ListBigQueryLinksResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.create_channel_group(**mock_args) + client.list_big_query_links(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1alpha/{parent=properties/*}/channelGroups" % client.transport._host, + "%s/v1alpha/{parent=properties/*}/bigQueryLinks" % client.transport._host, args[1], ) -def test_create_channel_group_rest_flattened_error(transport: str = "rest"): +def test_list_big_query_links_rest_flattened_error(transport: str = "rest"): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -91384,158 +101424,111 @@ def test_create_channel_group_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.create_channel_group( - analytics_admin.CreateChannelGroupRequest(), + client.list_big_query_links( + analytics_admin.ListBigQueryLinksRequest(), parent="parent_value", - channel_group=gaa_channel_group.ChannelGroup(name="name_value"), ) -def test_create_channel_group_rest_error(): +def test_list_big_query_links_rest_pager(transport: str = "rest"): client = AnalyticsAdminServiceClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + analytics_admin.ListBigQueryLinksResponse( + bigquery_links=[ + resources.BigQueryLink(), + resources.BigQueryLink(), + resources.BigQueryLink(), + ], + next_page_token="abc", + ), + analytics_admin.ListBigQueryLinksResponse( + bigquery_links=[], + next_page_token="def", + ), + analytics_admin.ListBigQueryLinksResponse( + bigquery_links=[ + resources.BigQueryLink(), + ], + next_page_token="ghi", + ), + analytics_admin.ListBigQueryLinksResponse( + bigquery_links=[ + resources.BigQueryLink(), + resources.BigQueryLink(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple( + analytics_admin.ListBigQueryLinksResponse.to_json(x) for x in response + ) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {"parent": "properties/sample1"} + + pager = client.list_big_query_links(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, resources.BigQueryLink) for i in results) + + pages = list(client.list_big_query_links(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + @pytest.mark.parametrize( "request_type", [ - analytics_admin.UpdateChannelGroupRequest, + analytics_admin.DeleteBigQueryLinkRequest, dict, ], ) -def test_update_channel_group_rest(request_type): +def test_delete_big_query_link_rest(request_type): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = { - "channel_group": {"name": "properties/sample1/channelGroups/sample2"} - } - request_init["channel_group"] = { - "name": "properties/sample1/channelGroups/sample2", - "display_name": "display_name_value", - "description": "description_value", - "grouping_rule": [ - { - "display_name": "display_name_value", - "expression": { - "and_group": {"filter_expressions": {}}, - "or_group": {}, - "not_expression": {}, - "filter": { - "string_filter": {"match_type": 1, "value": "value_value"}, - "in_list_filter": { - "values": ["values_value1", "values_value2"] - }, - "field_name": "field_name_value", - }, - }, - } - ], - "system_defined": True, - } - # The version of a generated dependency at test runtime may differ from the version used during generation. - # Delete any fields which are not present in the current runtime dependency - # See https://github.com/googleapis/gapic-generator-python/issues/1748 - - # Determine if the message type is proto-plus or protobuf - test_field = analytics_admin.UpdateChannelGroupRequest.meta.fields["channel_group"] - - def get_message_fields(field): - # Given a field which is a message (composite type), return a list with - # all the fields of the message. - # If the field is not a composite type, return an empty list. - message_fields = [] - - if hasattr(field, "message") and field.message: - is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") - - if is_field_type_proto_plus_type: - message_fields = field.message.meta.fields.values() - # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER - message_fields = field.message.DESCRIPTOR.fields - return message_fields - - runtime_nested_fields = [ - (field.name, nested_field.name) - for field in get_message_fields(test_field) - for nested_field in get_message_fields(field) - ] - - subfields_not_in_runtime = [] - - # For each item in the sample request, create a list of sub fields which are not present at runtime - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["channel_group"].items(): # pragma: NO COVER - result = None - is_repeated = False - # For repeated fields - if isinstance(value, list) and len(value): - is_repeated = True - result = value[0] - # For fields where the type is another message - if isinstance(value, dict): - result = value - - if result and hasattr(result, "keys"): - for subfield in result.keys(): - if (field, subfield) not in runtime_nested_fields: - subfields_not_in_runtime.append( - { - "field": field, - "subfield": subfield, - "is_repeated": is_repeated, - } - ) - - # Remove fields from the sample request which are not present in the runtime version of the dependency - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER - field = subfield_to_delete.get("field") - field_repeated = subfield_to_delete.get("is_repeated") - subfield = subfield_to_delete.get("subfield") - if subfield: - if field_repeated: - for i in range(0, len(request_init["channel_group"][field])): - del request_init["channel_group"][field][i][subfield] - else: - del request_init["channel_group"][field][subfield] + request_init = {"name": "properties/sample1/bigQueryLinks/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = gaa_channel_group.ChannelGroup( - name="name_value", - display_name="display_name_value", - description="description_value", - system_defined=True, - ) + return_value = None # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - # Convert return value to protobuf type - return_value = gaa_channel_group.ChannelGroup.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) + json_return_value = "" response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.update_channel_group(request) + response = client.delete_big_query_link(request) # Establish that the response is the type that we expect. - assert isinstance(response, gaa_channel_group.ChannelGroup) - assert response.name == "name_value" - assert response.display_name == "display_name_value" - assert response.description == "description_value" - assert response.system_defined is True + assert response is None -def test_update_channel_group_rest_use_cached_wrapped_rpc(): +def test_delete_big_query_link_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -91550,7 +101543,8 @@ def test_update_channel_group_rest_use_cached_wrapped_rpc(): # Ensure method has been cached assert ( - client._transport.update_channel_group in client._transport._wrapped_methods + client._transport.delete_big_query_link + in client._transport._wrapped_methods ) # Replace cached wrapped function with mock @@ -91559,28 +101553,29 @@ def test_update_channel_group_rest_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.update_channel_group + client._transport.delete_big_query_link ] = mock_rpc request = {} - client.update_channel_group(request) + client.delete_big_query_link(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.update_channel_group(request) + client.delete_big_query_link(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_update_channel_group_rest_required_fields( - request_type=analytics_admin.UpdateChannelGroupRequest, +def test_delete_big_query_link_rest_required_fields( + request_type=analytics_admin.DeleteBigQueryLinkRequest, ): transport_class = transports.AnalyticsAdminServiceRestTransport request_init = {} + request_init["name"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -91591,19 +101586,21 @@ def test_update_channel_group_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).update_channel_group._get_unset_required_fields(jsonified_request) + ).delete_big_query_link._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).update_channel_group._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("update_mask",)) + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_big_query_link._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -91612,7 +101609,7 @@ def test_update_channel_group_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = gaa_channel_group.ChannelGroup() + return_value = None # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -91624,48 +101621,36 @@ def test_update_channel_group_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "patch", + "method": "delete", "query_params": pb_request, } - transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = gaa_channel_group.ChannelGroup.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) + json_return_value = "" response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.update_channel_group(request) + response = client.delete_big_query_link(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_update_channel_group_rest_unset_required_fields(): +def test_delete_big_query_link_rest_unset_required_fields(): transport = transports.AnalyticsAdminServiceRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.update_channel_group._get_unset_required_fields({}) - assert set(unset_fields) == ( - set(("updateMask",)) - & set( - ( - "channelGroup", - "updateMask", - ) - ) - ) + unset_fields = transport.delete_big_query_link._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_update_channel_group_rest_interceptors(null_interceptor): +def test_delete_big_query_link_rest_interceptors(null_interceptor): transport = transports.AnalyticsAdminServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -91678,14 +101663,11 @@ def test_update_channel_group_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.AnalyticsAdminServiceRestInterceptor, "post_update_channel_group" - ) as post, mock.patch.object( - transports.AnalyticsAdminServiceRestInterceptor, "pre_update_channel_group" + transports.AnalyticsAdminServiceRestInterceptor, "pre_delete_big_query_link" ) as pre: pre.assert_not_called() - post.assert_not_called() - pb_message = analytics_admin.UpdateChannelGroupRequest.pb( - analytics_admin.UpdateChannelGroupRequest() + pb_message = analytics_admin.DeleteBigQueryLinkRequest.pb( + analytics_admin.DeleteBigQueryLinkRequest() ) transcode.return_value = { "method": "post", @@ -91697,19 +101679,15 @@ def test_update_channel_group_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = gaa_channel_group.ChannelGroup.to_json( - gaa_channel_group.ChannelGroup() - ) - request = analytics_admin.UpdateChannelGroupRequest() + request = analytics_admin.DeleteBigQueryLinkRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = gaa_channel_group.ChannelGroup() - client.update_channel_group( + client.delete_big_query_link( request, metadata=[ ("key", "val"), @@ -91718,11 +101696,10 @@ def test_update_channel_group_rest_interceptors(null_interceptor): ) pre.assert_called_once() - post.assert_called_once() -def test_update_channel_group_rest_bad_request( - transport: str = "rest", request_type=analytics_admin.UpdateChannelGroupRequest +def test_delete_big_query_link_rest_bad_request( + transport: str = "rest", request_type=analytics_admin.DeleteBigQueryLinkRequest ): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -91730,9 +101707,7 @@ def test_update_channel_group_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = { - "channel_group": {"name": "properties/sample1/channelGroups/sample2"} - } + request_init = {"name": "properties/sample1/bigQueryLinks/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -91744,10 +101719,10 @@ def test_update_channel_group_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.update_channel_group(request) + client.delete_big_query_link(request) -def test_update_channel_group_rest_flattened(): +def test_delete_big_query_link_rest_flattened(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -91756,43 +101731,37 @@ def test_update_channel_group_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = gaa_channel_group.ChannelGroup() + return_value = None # get arguments that satisfy an http rule for this method - sample_request = { - "channel_group": {"name": "properties/sample1/channelGroups/sample2"} - } + sample_request = {"name": "properties/sample1/bigQueryLinks/sample2"} # get truthy value for each flattened field mock_args = dict( - channel_group=gaa_channel_group.ChannelGroup(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + name="name_value", ) mock_args.update(sample_request) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - # Convert return value to protobuf type - return_value = gaa_channel_group.ChannelGroup.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) + json_return_value = "" response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.update_channel_group(**mock_args) + client.delete_big_query_link(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1alpha/{channel_group.name=properties/*/channelGroups/*}" - % client.transport._host, + "%s/v1alpha/{name=properties/*/bigQueryLinks/*}" % client.transport._host, args[1], ) -def test_update_channel_group_rest_flattened_error(transport: str = "rest"): +def test_delete_big_query_link_rest_flattened_error(transport: str = "rest"): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -91801,14 +101770,13 @@ def test_update_channel_group_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.update_channel_group( - analytics_admin.UpdateChannelGroupRequest(), - channel_group=gaa_channel_group.ChannelGroup(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + client.delete_big_query_link( + analytics_admin.DeleteBigQueryLinkRequest(), + name="name_value", ) -def test_update_channel_group_rest_error(): +def test_delete_big_query_link_rest_error(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -91817,39 +101785,141 @@ def test_update_channel_group_rest_error(): @pytest.mark.parametrize( "request_type", [ - analytics_admin.DeleteChannelGroupRequest, + analytics_admin.UpdateBigQueryLinkRequest, dict, ], ) -def test_delete_channel_group_rest(request_type): +def test_update_big_query_link_rest(request_type): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = {"name": "properties/sample1/channelGroups/sample2"} + request_init = { + "bigquery_link": {"name": "properties/sample1/bigQueryLinks/sample2"} + } + request_init["bigquery_link"] = { + "name": "properties/sample1/bigQueryLinks/sample2", + "project": "project_value", + "create_time": {"seconds": 751, "nanos": 543}, + "daily_export_enabled": True, + "streaming_export_enabled": True, + "fresh_daily_export_enabled": True, + "include_advertising_id": True, + "export_streams": ["export_streams_value1", "export_streams_value2"], + "excluded_events": ["excluded_events_value1", "excluded_events_value2"], + "dataset_location": "dataset_location_value", + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = analytics_admin.UpdateBigQueryLinkRequest.meta.fields["bigquery_link"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["bigquery_link"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["bigquery_link"][field])): + del request_init["bigquery_link"][field][i][subfield] + else: + del request_init["bigquery_link"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = None + return_value = resources.BigQueryLink( + name="name_value", + project="project_value", + daily_export_enabled=True, + streaming_export_enabled=True, + fresh_daily_export_enabled=True, + include_advertising_id=True, + export_streams=["export_streams_value"], + excluded_events=["excluded_events_value"], + dataset_location="dataset_location_value", + ) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - json_return_value = "" + # Convert return value to protobuf type + return_value = resources.BigQueryLink.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.delete_channel_group(request) + response = client.update_big_query_link(request) # Establish that the response is the type that we expect. - assert response is None + assert isinstance(response, resources.BigQueryLink) + assert response.name == "name_value" + assert response.project == "project_value" + assert response.daily_export_enabled is True + assert response.streaming_export_enabled is True + assert response.fresh_daily_export_enabled is True + assert response.include_advertising_id is True + assert response.export_streams == ["export_streams_value"] + assert response.excluded_events == ["excluded_events_value"] + assert response.dataset_location == "dataset_location_value" -def test_delete_channel_group_rest_use_cached_wrapped_rpc(): +def test_update_big_query_link_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -91864,7 +101934,8 @@ def test_delete_channel_group_rest_use_cached_wrapped_rpc(): # Ensure method has been cached assert ( - client._transport.delete_channel_group in client._transport._wrapped_methods + client._transport.update_big_query_link + in client._transport._wrapped_methods ) # Replace cached wrapped function with mock @@ -91873,29 +101944,28 @@ def test_delete_channel_group_rest_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.delete_channel_group + client._transport.update_big_query_link ] = mock_rpc request = {} - client.delete_channel_group(request) + client.update_big_query_link(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.delete_channel_group(request) + client.update_big_query_link(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_delete_channel_group_rest_required_fields( - request_type=analytics_admin.DeleteChannelGroupRequest, +def test_update_big_query_link_rest_required_fields( + request_type=analytics_admin.UpdateBigQueryLinkRequest, ): transport_class = transports.AnalyticsAdminServiceRestTransport request_init = {} - request_init["name"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -91906,21 +101976,19 @@ def test_delete_channel_group_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).delete_channel_group._get_unset_required_fields(jsonified_request) + ).update_big_query_link._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["name"] = "name_value" - unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).delete_channel_group._get_unset_required_fields(jsonified_request) + ).update_big_query_link._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("update_mask",)) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == "name_value" client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -91929,7 +101997,7 @@ def test_delete_channel_group_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = None + return_value = resources.BigQueryLink() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -91941,36 +102009,48 @@ def test_delete_channel_group_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "delete", + "method": "patch", "query_params": pb_request, } + transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 - json_return_value = "" + + # Convert return value to protobuf type + return_value = resources.BigQueryLink.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.delete_channel_group(request) + response = client.update_big_query_link(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_delete_channel_group_rest_unset_required_fields(): +def test_update_big_query_link_rest_unset_required_fields(): transport = transports.AnalyticsAdminServiceRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.delete_channel_group._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name",))) + unset_fields = transport.update_big_query_link._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(("updateMask",)) + & set( + ( + "bigqueryLink", + "updateMask", + ) + ) + ) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_delete_channel_group_rest_interceptors(null_interceptor): +def test_update_big_query_link_rest_interceptors(null_interceptor): transport = transports.AnalyticsAdminServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -91983,11 +102063,14 @@ def test_delete_channel_group_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.AnalyticsAdminServiceRestInterceptor, "pre_delete_channel_group" + transports.AnalyticsAdminServiceRestInterceptor, "post_update_big_query_link" + ) as post, mock.patch.object( + transports.AnalyticsAdminServiceRestInterceptor, "pre_update_big_query_link" ) as pre: pre.assert_not_called() - pb_message = analytics_admin.DeleteChannelGroupRequest.pb( - analytics_admin.DeleteChannelGroupRequest() + post.assert_not_called() + pb_message = analytics_admin.UpdateBigQueryLinkRequest.pb( + analytics_admin.UpdateBigQueryLinkRequest() ) transcode.return_value = { "method": "post", @@ -91999,15 +102082,19 @@ def test_delete_channel_group_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() + req.return_value._content = resources.BigQueryLink.to_json( + resources.BigQueryLink() + ) - request = analytics_admin.DeleteChannelGroupRequest() + request = analytics_admin.UpdateBigQueryLinkRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata + post.return_value = resources.BigQueryLink() - client.delete_channel_group( + client.update_big_query_link( request, metadata=[ ("key", "val"), @@ -92016,10 +102103,11 @@ def test_delete_channel_group_rest_interceptors(null_interceptor): ) pre.assert_called_once() + post.assert_called_once() -def test_delete_channel_group_rest_bad_request( - transport: str = "rest", request_type=analytics_admin.DeleteChannelGroupRequest +def test_update_big_query_link_rest_bad_request( + transport: str = "rest", request_type=analytics_admin.UpdateBigQueryLinkRequest ): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -92027,7 +102115,9 @@ def test_delete_channel_group_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = {"name": "properties/sample1/channelGroups/sample2"} + request_init = { + "bigquery_link": {"name": "properties/sample1/bigQueryLinks/sample2"} + } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -92039,10 +102129,10 @@ def test_delete_channel_group_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.delete_channel_group(request) + client.update_big_query_link(request) -def test_delete_channel_group_rest_flattened(): +def test_update_big_query_link_rest_flattened(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -92051,37 +102141,43 @@ def test_delete_channel_group_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = None + return_value = resources.BigQueryLink() # get arguments that satisfy an http rule for this method - sample_request = {"name": "properties/sample1/channelGroups/sample2"} + sample_request = { + "bigquery_link": {"name": "properties/sample1/bigQueryLinks/sample2"} + } # get truthy value for each flattened field mock_args = dict( - name="name_value", + bigquery_link=resources.BigQueryLink(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) mock_args.update(sample_request) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - json_return_value = "" + # Convert return value to protobuf type + return_value = resources.BigQueryLink.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.delete_channel_group(**mock_args) + client.update_big_query_link(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1alpha/{name=properties/*/channelGroups/*}" % client.transport._host, + "%s/v1alpha/{bigquery_link.name=properties/*/bigQueryLinks/*}" + % client.transport._host, args[1], ) -def test_delete_channel_group_rest_flattened_error(transport: str = "rest"): +def test_update_big_query_link_rest_flattened_error(transport: str = "rest"): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -92090,13 +102186,14 @@ def test_delete_channel_group_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.delete_channel_group( - analytics_admin.DeleteChannelGroupRequest(), - name="name_value", + client.update_big_query_link( + analytics_admin.UpdateBigQueryLinkRequest(), + bigquery_link=resources.BigQueryLink(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) -def test_delete_channel_group_rest_error(): +def test_update_big_query_link_rest_error(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -92105,45 +102202,66 @@ def test_delete_channel_group_rest_error(): @pytest.mark.parametrize( "request_type", [ - analytics_admin.SetAutomatedGa4ConfigurationOptOutRequest, + analytics_admin.GetEnhancedMeasurementSettingsRequest, dict, ], ) -def test_set_automated_ga4_configuration_opt_out_rest(request_type): +def test_get_enhanced_measurement_settings_rest(request_type): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = {} + request_init = { + "name": "properties/sample1/dataStreams/sample2/enhancedMeasurementSettings" + } request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = analytics_admin.SetAutomatedGa4ConfigurationOptOutResponse() + return_value = resources.EnhancedMeasurementSettings( + name="name_value", + stream_enabled=True, + scrolls_enabled=True, + outbound_clicks_enabled=True, + site_search_enabled=True, + video_engagement_enabled=True, + file_downloads_enabled=True, + page_changes_enabled=True, + form_interactions_enabled=True, + search_query_parameter="search_query_parameter_value", + uri_query_parameter="uri_query_parameter_value", + ) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = analytics_admin.SetAutomatedGa4ConfigurationOptOutResponse.pb( - return_value - ) + return_value = resources.EnhancedMeasurementSettings.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.set_automated_ga4_configuration_opt_out(request) + response = client.get_enhanced_measurement_settings(request) # Establish that the response is the type that we expect. - assert isinstance( - response, analytics_admin.SetAutomatedGa4ConfigurationOptOutResponse - ) + assert isinstance(response, resources.EnhancedMeasurementSettings) + assert response.name == "name_value" + assert response.stream_enabled is True + assert response.scrolls_enabled is True + assert response.outbound_clicks_enabled is True + assert response.site_search_enabled is True + assert response.video_engagement_enabled is True + assert response.file_downloads_enabled is True + assert response.page_changes_enabled is True + assert response.form_interactions_enabled is True + assert response.search_query_parameter == "search_query_parameter_value" + assert response.uri_query_parameter == "uri_query_parameter_value" -def test_set_automated_ga4_configuration_opt_out_rest_use_cached_wrapped_rpc(): +def test_get_enhanced_measurement_settings_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -92158,7 +102276,7 @@ def test_set_automated_ga4_configuration_opt_out_rest_use_cached_wrapped_rpc(): # Ensure method has been cached assert ( - client._transport.set_automated_ga4_configuration_opt_out + client._transport.get_enhanced_measurement_settings in client._transport._wrapped_methods ) @@ -92168,29 +102286,29 @@ def test_set_automated_ga4_configuration_opt_out_rest_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.set_automated_ga4_configuration_opt_out + client._transport.get_enhanced_measurement_settings ] = mock_rpc request = {} - client.set_automated_ga4_configuration_opt_out(request) + client.get_enhanced_measurement_settings(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.set_automated_ga4_configuration_opt_out(request) + client.get_enhanced_measurement_settings(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_set_automated_ga4_configuration_opt_out_rest_required_fields( - request_type=analytics_admin.SetAutomatedGa4ConfigurationOptOutRequest, +def test_get_enhanced_measurement_settings_rest_required_fields( + request_type=analytics_admin.GetEnhancedMeasurementSettingsRequest, ): transport_class = transports.AnalyticsAdminServiceRestTransport request_init = {} - request_init["property"] = "" + request_init["name"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -92201,25 +102319,21 @@ def test_set_automated_ga4_configuration_opt_out_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).set_automated_ga4_configuration_opt_out._get_unset_required_fields( - jsonified_request - ) + ).get_enhanced_measurement_settings._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["property"] = "property_value" + jsonified_request["name"] = "name_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).set_automated_ga4_configuration_opt_out._get_unset_required_fields( - jsonified_request - ) + ).get_enhanced_measurement_settings._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "property" in jsonified_request - assert jsonified_request["property"] == "property_value" + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -92228,7 +102342,7 @@ def test_set_automated_ga4_configuration_opt_out_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = analytics_admin.SetAutomatedGa4ConfigurationOptOutResponse() + return_value = resources.EnhancedMeasurementSettings() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -92240,46 +102354,41 @@ def test_set_automated_ga4_configuration_opt_out_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "post", + "method": "get", "query_params": pb_request, } - transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = ( - analytics_admin.SetAutomatedGa4ConfigurationOptOutResponse.pb( - return_value - ) - ) + return_value = resources.EnhancedMeasurementSettings.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.set_automated_ga4_configuration_opt_out(request) + response = client.get_enhanced_measurement_settings(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_set_automated_ga4_configuration_opt_out_rest_unset_required_fields(): +def test_get_enhanced_measurement_settings_rest_unset_required_fields(): transport = transports.AnalyticsAdminServiceRestTransport( credentials=ga_credentials.AnonymousCredentials ) unset_fields = ( - transport.set_automated_ga4_configuration_opt_out._get_unset_required_fields({}) + transport.get_enhanced_measurement_settings._get_unset_required_fields({}) ) - assert set(unset_fields) == (set(()) & set(("property",))) + assert set(unset_fields) == (set(()) & set(("name",))) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_set_automated_ga4_configuration_opt_out_rest_interceptors(null_interceptor): +def test_get_enhanced_measurement_settings_rest_interceptors(null_interceptor): transport = transports.AnalyticsAdminServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -92293,15 +102402,15 @@ def test_set_automated_ga4_configuration_opt_out_rest_interceptors(null_intercep path_template, "transcode" ) as transcode, mock.patch.object( transports.AnalyticsAdminServiceRestInterceptor, - "post_set_automated_ga4_configuration_opt_out", + "post_get_enhanced_measurement_settings", ) as post, mock.patch.object( transports.AnalyticsAdminServiceRestInterceptor, - "pre_set_automated_ga4_configuration_opt_out", + "pre_get_enhanced_measurement_settings", ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = analytics_admin.SetAutomatedGa4ConfigurationOptOutRequest.pb( - analytics_admin.SetAutomatedGa4ConfigurationOptOutRequest() + pb_message = analytics_admin.GetEnhancedMeasurementSettingsRequest.pb( + analytics_admin.GetEnhancedMeasurementSettingsRequest() ) transcode.return_value = { "method": "post", @@ -92313,21 +102422,19 @@ def test_set_automated_ga4_configuration_opt_out_rest_interceptors(null_intercep req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = ( - analytics_admin.SetAutomatedGa4ConfigurationOptOutResponse.to_json( - analytics_admin.SetAutomatedGa4ConfigurationOptOutResponse() - ) + req.return_value._content = resources.EnhancedMeasurementSettings.to_json( + resources.EnhancedMeasurementSettings() ) - request = analytics_admin.SetAutomatedGa4ConfigurationOptOutRequest() + request = analytics_admin.GetEnhancedMeasurementSettingsRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = analytics_admin.SetAutomatedGa4ConfigurationOptOutResponse() + post.return_value = resources.EnhancedMeasurementSettings() - client.set_automated_ga4_configuration_opt_out( + client.get_enhanced_measurement_settings( request, metadata=[ ("key", "val"), @@ -92339,9 +102446,9 @@ def test_set_automated_ga4_configuration_opt_out_rest_interceptors(null_intercep post.assert_called_once() -def test_set_automated_ga4_configuration_opt_out_rest_bad_request( +def test_get_enhanced_measurement_settings_rest_bad_request( transport: str = "rest", - request_type=analytics_admin.SetAutomatedGa4ConfigurationOptOutRequest, + request_type=analytics_admin.GetEnhancedMeasurementSettingsRequest, ): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -92349,7 +102456,9 @@ def test_set_automated_ga4_configuration_opt_out_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = {} + request_init = { + "name": "properties/sample1/dataStreams/sample2/enhancedMeasurementSettings" + } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -92361,10 +102470,71 @@ def test_set_automated_ga4_configuration_opt_out_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.set_automated_ga4_configuration_opt_out(request) + client.get_enhanced_measurement_settings(request) -def test_set_automated_ga4_configuration_opt_out_rest_error(): +def test_get_enhanced_measurement_settings_rest_flattened(): + client = AnalyticsAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = resources.EnhancedMeasurementSettings() + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "properties/sample1/dataStreams/sample2/enhancedMeasurementSettings" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = resources.EnhancedMeasurementSettings.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.get_enhanced_measurement_settings(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1alpha/{name=properties/*/dataStreams/*/enhancedMeasurementSettings}" + % client.transport._host, + args[1], + ) + + +def test_get_enhanced_measurement_settings_rest_flattened_error( + transport: str = "rest", +): + client = AnalyticsAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_enhanced_measurement_settings( + analytics_admin.GetEnhancedMeasurementSettingsRequest(), + name="name_value", + ) + + +def test_get_enhanced_measurement_settings_rest_error(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -92373,48 +102543,156 @@ def test_set_automated_ga4_configuration_opt_out_rest_error(): @pytest.mark.parametrize( "request_type", [ - analytics_admin.FetchAutomatedGa4ConfigurationOptOutRequest, + analytics_admin.UpdateEnhancedMeasurementSettingsRequest, dict, ], ) -def test_fetch_automated_ga4_configuration_opt_out_rest(request_type): +def test_update_enhanced_measurement_settings_rest(request_type): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) - # send a request that will satisfy transcoding - request_init = {} + # send a request that will satisfy transcoding + request_init = { + "enhanced_measurement_settings": { + "name": "properties/sample1/dataStreams/sample2/enhancedMeasurementSettings" + } + } + request_init["enhanced_measurement_settings"] = { + "name": "properties/sample1/dataStreams/sample2/enhancedMeasurementSettings", + "stream_enabled": True, + "scrolls_enabled": True, + "outbound_clicks_enabled": True, + "site_search_enabled": True, + "video_engagement_enabled": True, + "file_downloads_enabled": True, + "page_changes_enabled": True, + "form_interactions_enabled": True, + "search_query_parameter": "search_query_parameter_value", + "uri_query_parameter": "uri_query_parameter_value", + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = analytics_admin.UpdateEnhancedMeasurementSettingsRequest.meta.fields[ + "enhanced_measurement_settings" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "enhanced_measurement_settings" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, len(request_init["enhanced_measurement_settings"][field]) + ): + del request_init["enhanced_measurement_settings"][field][i][ + subfield + ] + else: + del request_init["enhanced_measurement_settings"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = analytics_admin.FetchAutomatedGa4ConfigurationOptOutResponse( - opt_out=True, + return_value = resources.EnhancedMeasurementSettings( + name="name_value", + stream_enabled=True, + scrolls_enabled=True, + outbound_clicks_enabled=True, + site_search_enabled=True, + video_engagement_enabled=True, + file_downloads_enabled=True, + page_changes_enabled=True, + form_interactions_enabled=True, + search_query_parameter="search_query_parameter_value", + uri_query_parameter="uri_query_parameter_value", ) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = analytics_admin.FetchAutomatedGa4ConfigurationOptOutResponse.pb( - return_value - ) + return_value = resources.EnhancedMeasurementSettings.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.fetch_automated_ga4_configuration_opt_out(request) + response = client.update_enhanced_measurement_settings(request) # Establish that the response is the type that we expect. - assert isinstance( - response, analytics_admin.FetchAutomatedGa4ConfigurationOptOutResponse - ) - assert response.opt_out is True + assert isinstance(response, resources.EnhancedMeasurementSettings) + assert response.name == "name_value" + assert response.stream_enabled is True + assert response.scrolls_enabled is True + assert response.outbound_clicks_enabled is True + assert response.site_search_enabled is True + assert response.video_engagement_enabled is True + assert response.file_downloads_enabled is True + assert response.page_changes_enabled is True + assert response.form_interactions_enabled is True + assert response.search_query_parameter == "search_query_parameter_value" + assert response.uri_query_parameter == "uri_query_parameter_value" -def test_fetch_automated_ga4_configuration_opt_out_rest_use_cached_wrapped_rpc(): +def test_update_enhanced_measurement_settings_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -92429,7 +102707,7 @@ def test_fetch_automated_ga4_configuration_opt_out_rest_use_cached_wrapped_rpc() # Ensure method has been cached assert ( - client._transport.fetch_automated_ga4_configuration_opt_out + client._transport.update_enhanced_measurement_settings in client._transport._wrapped_methods ) @@ -92439,29 +102717,28 @@ def test_fetch_automated_ga4_configuration_opt_out_rest_use_cached_wrapped_rpc() "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.fetch_automated_ga4_configuration_opt_out + client._transport.update_enhanced_measurement_settings ] = mock_rpc request = {} - client.fetch_automated_ga4_configuration_opt_out(request) + client.update_enhanced_measurement_settings(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.fetch_automated_ga4_configuration_opt_out(request) + client.update_enhanced_measurement_settings(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_fetch_automated_ga4_configuration_opt_out_rest_required_fields( - request_type=analytics_admin.FetchAutomatedGa4ConfigurationOptOutRequest, +def test_update_enhanced_measurement_settings_rest_required_fields( + request_type=analytics_admin.UpdateEnhancedMeasurementSettingsRequest, ): transport_class = transports.AnalyticsAdminServiceRestTransport request_init = {} - request_init["property"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -92472,25 +102749,19 @@ def test_fetch_automated_ga4_configuration_opt_out_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).fetch_automated_ga4_configuration_opt_out._get_unset_required_fields( - jsonified_request - ) + ).update_enhanced_measurement_settings._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["property"] = "property_value" - unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).fetch_automated_ga4_configuration_opt_out._get_unset_required_fields( - jsonified_request - ) + ).update_enhanced_measurement_settings._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("update_mask",)) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "property" in jsonified_request - assert jsonified_request["property"] == "property_value" client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -92499,7 +102770,7 @@ def test_fetch_automated_ga4_configuration_opt_out_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = analytics_admin.FetchAutomatedGa4ConfigurationOptOutResponse() + return_value = resources.EnhancedMeasurementSettings() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -92511,7 +102782,7 @@ def test_fetch_automated_ga4_configuration_opt_out_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "post", + "method": "patch", "query_params": pb_request, } transcode_result["body"] = pb_request @@ -92521,38 +102792,40 @@ def test_fetch_automated_ga4_configuration_opt_out_rest_required_fields( response_value.status_code = 200 # Convert return value to protobuf type - return_value = ( - analytics_admin.FetchAutomatedGa4ConfigurationOptOutResponse.pb( - return_value - ) - ) + return_value = resources.EnhancedMeasurementSettings.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.fetch_automated_ga4_configuration_opt_out(request) + response = client.update_enhanced_measurement_settings(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_fetch_automated_ga4_configuration_opt_out_rest_unset_required_fields(): +def test_update_enhanced_measurement_settings_rest_unset_required_fields(): transport = transports.AnalyticsAdminServiceRestTransport( credentials=ga_credentials.AnonymousCredentials ) unset_fields = ( - transport.fetch_automated_ga4_configuration_opt_out._get_unset_required_fields( - {} + transport.update_enhanced_measurement_settings._get_unset_required_fields({}) + ) + assert set(unset_fields) == ( + set(("updateMask",)) + & set( + ( + "enhancedMeasurementSettings", + "updateMask", + ) ) ) - assert set(unset_fields) == (set(()) & set(("property",))) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_fetch_automated_ga4_configuration_opt_out_rest_interceptors(null_interceptor): +def test_update_enhanced_measurement_settings_rest_interceptors(null_interceptor): transport = transports.AnalyticsAdminServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -92566,15 +102839,15 @@ def test_fetch_automated_ga4_configuration_opt_out_rest_interceptors(null_interc path_template, "transcode" ) as transcode, mock.patch.object( transports.AnalyticsAdminServiceRestInterceptor, - "post_fetch_automated_ga4_configuration_opt_out", + "post_update_enhanced_measurement_settings", ) as post, mock.patch.object( transports.AnalyticsAdminServiceRestInterceptor, - "pre_fetch_automated_ga4_configuration_opt_out", + "pre_update_enhanced_measurement_settings", ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = analytics_admin.FetchAutomatedGa4ConfigurationOptOutRequest.pb( - analytics_admin.FetchAutomatedGa4ConfigurationOptOutRequest() + pb_message = analytics_admin.UpdateEnhancedMeasurementSettingsRequest.pb( + analytics_admin.UpdateEnhancedMeasurementSettingsRequest() ) transcode.return_value = { "method": "post", @@ -92586,23 +102859,19 @@ def test_fetch_automated_ga4_configuration_opt_out_rest_interceptors(null_interc req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = ( - analytics_admin.FetchAutomatedGa4ConfigurationOptOutResponse.to_json( - analytics_admin.FetchAutomatedGa4ConfigurationOptOutResponse() - ) + req.return_value._content = resources.EnhancedMeasurementSettings.to_json( + resources.EnhancedMeasurementSettings() ) - request = analytics_admin.FetchAutomatedGa4ConfigurationOptOutRequest() + request = analytics_admin.UpdateEnhancedMeasurementSettingsRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = ( - analytics_admin.FetchAutomatedGa4ConfigurationOptOutResponse() - ) + post.return_value = resources.EnhancedMeasurementSettings() - client.fetch_automated_ga4_configuration_opt_out( + client.update_enhanced_measurement_settings( request, metadata=[ ("key", "val"), @@ -92614,9 +102883,9 @@ def test_fetch_automated_ga4_configuration_opt_out_rest_interceptors(null_interc post.assert_called_once() -def test_fetch_automated_ga4_configuration_opt_out_rest_bad_request( +def test_update_enhanced_measurement_settings_rest_bad_request( transport: str = "rest", - request_type=analytics_admin.FetchAutomatedGa4ConfigurationOptOutRequest, + request_type=analytics_admin.UpdateEnhancedMeasurementSettingsRequest, ): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -92624,7 +102893,11 @@ def test_fetch_automated_ga4_configuration_opt_out_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = {} + request_init = { + "enhanced_measurement_settings": { + "name": "properties/sample1/dataStreams/sample2/enhancedMeasurementSettings" + } + } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -92636,10 +102909,79 @@ def test_fetch_automated_ga4_configuration_opt_out_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.fetch_automated_ga4_configuration_opt_out(request) + client.update_enhanced_measurement_settings(request) -def test_fetch_automated_ga4_configuration_opt_out_rest_error(): +def test_update_enhanced_measurement_settings_rest_flattened(): + client = AnalyticsAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = resources.EnhancedMeasurementSettings() + + # get arguments that satisfy an http rule for this method + sample_request = { + "enhanced_measurement_settings": { + "name": "properties/sample1/dataStreams/sample2/enhancedMeasurementSettings" + } + } + + # get truthy value for each flattened field + mock_args = dict( + enhanced_measurement_settings=resources.EnhancedMeasurementSettings( + name="name_value" + ), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = resources.EnhancedMeasurementSettings.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.update_enhanced_measurement_settings(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1alpha/{enhanced_measurement_settings.name=properties/*/dataStreams/*/enhancedMeasurementSettings}" + % client.transport._host, + args[1], + ) + + +def test_update_enhanced_measurement_settings_rest_flattened_error( + transport: str = "rest", +): + client = AnalyticsAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_enhanced_measurement_settings( + analytics_admin.UpdateEnhancedMeasurementSettingsRequest(), + enhanced_measurement_settings=resources.EnhancedMeasurementSettings( + name="name_value" + ), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +def test_update_enhanced_measurement_settings_rest_error(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -92648,58 +102990,41 @@ def test_fetch_automated_ga4_configuration_opt_out_rest_error(): @pytest.mark.parametrize( "request_type", [ - analytics_admin.GetBigQueryLinkRequest, + analytics_admin.CreateConnectedSiteTagRequest, dict, ], ) -def test_get_big_query_link_rest(request_type): +def test_create_connected_site_tag_rest(request_type): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = {"name": "properties/sample1/bigQueryLinks/sample2"} + request_init = {} request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = resources.BigQueryLink( - name="name_value", - project="project_value", - daily_export_enabled=True, - streaming_export_enabled=True, - fresh_daily_export_enabled=True, - include_advertising_id=True, - export_streams=["export_streams_value"], - excluded_events=["excluded_events_value"], - ) + return_value = analytics_admin.CreateConnectedSiteTagResponse() # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = resources.BigQueryLink.pb(return_value) + return_value = analytics_admin.CreateConnectedSiteTagResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.get_big_query_link(request) + response = client.create_connected_site_tag(request) # Establish that the response is the type that we expect. - assert isinstance(response, resources.BigQueryLink) - assert response.name == "name_value" - assert response.project == "project_value" - assert response.daily_export_enabled is True - assert response.streaming_export_enabled is True - assert response.fresh_daily_export_enabled is True - assert response.include_advertising_id is True - assert response.export_streams == ["export_streams_value"] - assert response.excluded_events == ["excluded_events_value"] + assert isinstance(response, analytics_admin.CreateConnectedSiteTagResponse) -def test_get_big_query_link_rest_use_cached_wrapped_rpc(): +def test_create_connected_site_tag_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -92714,7 +103039,8 @@ def test_get_big_query_link_rest_use_cached_wrapped_rpc(): # Ensure method has been cached assert ( - client._transport.get_big_query_link in client._transport._wrapped_methods + client._transport.create_connected_site_tag + in client._transport._wrapped_methods ) # Replace cached wrapped function with mock @@ -92723,29 +103049,28 @@ def test_get_big_query_link_rest_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.get_big_query_link + client._transport.create_connected_site_tag ] = mock_rpc request = {} - client.get_big_query_link(request) + client.create_connected_site_tag(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.get_big_query_link(request) + client.create_connected_site_tag(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_get_big_query_link_rest_required_fields( - request_type=analytics_admin.GetBigQueryLinkRequest, +def test_create_connected_site_tag_rest_required_fields( + request_type=analytics_admin.CreateConnectedSiteTagRequest, ): transport_class = transports.AnalyticsAdminServiceRestTransport request_init = {} - request_init["name"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -92756,21 +103081,17 @@ def test_get_big_query_link_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).get_big_query_link._get_unset_required_fields(jsonified_request) + ).create_connected_site_tag._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["name"] = "name_value" - unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).get_big_query_link._get_unset_required_fields(jsonified_request) + ).create_connected_site_tag._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == "name_value" client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -92779,7 +103100,7 @@ def test_get_big_query_link_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = resources.BigQueryLink() + return_value = analytics_admin.CreateConnectedSiteTagResponse() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -92791,39 +103112,42 @@ def test_get_big_query_link_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "get", + "method": "post", "query_params": pb_request, } + transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = resources.BigQueryLink.pb(return_value) + return_value = analytics_admin.CreateConnectedSiteTagResponse.pb( + return_value + ) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.get_big_query_link(request) + response = client.create_connected_site_tag(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_get_big_query_link_rest_unset_required_fields(): +def test_create_connected_site_tag_rest_unset_required_fields(): transport = transports.AnalyticsAdminServiceRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.get_big_query_link._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name",))) + unset_fields = transport.create_connected_site_tag._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("connectedSiteTag",))) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_big_query_link_rest_interceptors(null_interceptor): +def test_create_connected_site_tag_rest_interceptors(null_interceptor): transport = transports.AnalyticsAdminServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -92836,14 +103160,15 @@ def test_get_big_query_link_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.AnalyticsAdminServiceRestInterceptor, "post_get_big_query_link" + transports.AnalyticsAdminServiceRestInterceptor, + "post_create_connected_site_tag", ) as post, mock.patch.object( - transports.AnalyticsAdminServiceRestInterceptor, "pre_get_big_query_link" + transports.AnalyticsAdminServiceRestInterceptor, "pre_create_connected_site_tag" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = analytics_admin.GetBigQueryLinkRequest.pb( - analytics_admin.GetBigQueryLinkRequest() + pb_message = analytics_admin.CreateConnectedSiteTagRequest.pb( + analytics_admin.CreateConnectedSiteTagRequest() ) transcode.return_value = { "method": "post", @@ -92855,19 +103180,21 @@ def test_get_big_query_link_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = resources.BigQueryLink.to_json( - resources.BigQueryLink() + req.return_value._content = ( + analytics_admin.CreateConnectedSiteTagResponse.to_json( + analytics_admin.CreateConnectedSiteTagResponse() + ) ) - request = analytics_admin.GetBigQueryLinkRequest() + request = analytics_admin.CreateConnectedSiteTagRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = resources.BigQueryLink() + post.return_value = analytics_admin.CreateConnectedSiteTagResponse() - client.get_big_query_link( + client.create_connected_site_tag( request, metadata=[ ("key", "val"), @@ -92879,8 +103206,8 @@ def test_get_big_query_link_rest_interceptors(null_interceptor): post.assert_called_once() -def test_get_big_query_link_rest_bad_request( - transport: str = "rest", request_type=analytics_admin.GetBigQueryLinkRequest +def test_create_connected_site_tag_rest_bad_request( + transport: str = "rest", request_type=analytics_admin.CreateConnectedSiteTagRequest ): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -92888,7 +103215,7 @@ def test_get_big_query_link_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = {"name": "properties/sample1/bigQueryLinks/sample2"} + request_init = {} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -92900,66 +103227,10 @@ def test_get_big_query_link_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.get_big_query_link(request) - - -def test_get_big_query_link_rest_flattened(): - client = AnalyticsAdminServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = resources.BigQueryLink() - - # get arguments that satisfy an http rule for this method - sample_request = {"name": "properties/sample1/bigQueryLinks/sample2"} - - # get truthy value for each flattened field - mock_args = dict( - name="name_value", - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = resources.BigQueryLink.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - client.get_big_query_link(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate( - "%s/v1alpha/{name=properties/*/bigQueryLinks/*}" % client.transport._host, - args[1], - ) - - -def test_get_big_query_link_rest_flattened_error(transport: str = "rest"): - client = AnalyticsAdminServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_big_query_link( - analytics_admin.GetBigQueryLinkRequest(), - name="name_value", - ) + client.create_connected_site_tag(request) -def test_get_big_query_link_rest_error(): +def test_create_connected_site_tag_rest_error(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -92968,44 +103239,39 @@ def test_get_big_query_link_rest_error(): @pytest.mark.parametrize( "request_type", [ - analytics_admin.ListBigQueryLinksRequest, + analytics_admin.DeleteConnectedSiteTagRequest, dict, ], ) -def test_list_big_query_links_rest(request_type): +def test_delete_connected_site_tag_rest(request_type): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = {"parent": "properties/sample1"} + request_init = {} request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = analytics_admin.ListBigQueryLinksResponse( - next_page_token="next_page_token_value", - ) + return_value = None # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - # Convert return value to protobuf type - return_value = analytics_admin.ListBigQueryLinksResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) + json_return_value = "" response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.list_big_query_links(request) + response = client.delete_connected_site_tag(request) # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListBigQueryLinksPager) - assert response.next_page_token == "next_page_token_value" + assert response is None -def test_list_big_query_links_rest_use_cached_wrapped_rpc(): +def test_delete_connected_site_tag_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -93020,7 +103286,8 @@ def test_list_big_query_links_rest_use_cached_wrapped_rpc(): # Ensure method has been cached assert ( - client._transport.list_big_query_links in client._transport._wrapped_methods + client._transport.delete_connected_site_tag + in client._transport._wrapped_methods ) # Replace cached wrapped function with mock @@ -93029,122 +103296,24 @@ def test_list_big_query_links_rest_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.list_big_query_links + client._transport.delete_connected_site_tag ] = mock_rpc request = {} - client.list_big_query_links(request) + client.delete_connected_site_tag(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.list_big_query_links(request) + client.delete_connected_site_tag(request) # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_list_big_query_links_rest_required_fields( - request_type=analytics_admin.ListBigQueryLinksRequest, -): - transport_class = transports.AnalyticsAdminServiceRestTransport - - request_init = {} - request_init["parent"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads( - json_format.MessageToJson(pb_request, use_integers_for_enums=False) - ) - - # verify fields with default values are dropped - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).list_big_query_links._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["parent"] = "parent_value" - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).list_big_query_links._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set( - ( - "page_size", - "page_token", - ) - ) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == "parent_value" - - client = AnalyticsAdminServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = analytics_admin.ListBigQueryLinksResponse() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, "transcode") as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - "uri": "v1/sample_method", - "method": "get", - "query_params": pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = analytics_admin.ListBigQueryLinksResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - response = client.list_big_query_links(request) - - expected_params = [("$alt", "json;enum-encoding=int")] - actual_params = req.call_args.kwargs["params"] - assert expected_params == actual_params - - -def test_list_big_query_links_rest_unset_required_fields(): - transport = transports.AnalyticsAdminServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials - ) - - unset_fields = transport.list_big_query_links._get_unset_required_fields({}) - assert set(unset_fields) == ( - set( - ( - "pageSize", - "pageToken", - ) - ) - & set(("parent",)) - ) + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_big_query_links_rest_interceptors(null_interceptor): +def test_delete_connected_site_tag_rest_interceptors(null_interceptor): transport = transports.AnalyticsAdminServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -93157,14 +103326,11 @@ def test_list_big_query_links_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.AnalyticsAdminServiceRestInterceptor, "post_list_big_query_links" - ) as post, mock.patch.object( - transports.AnalyticsAdminServiceRestInterceptor, "pre_list_big_query_links" + transports.AnalyticsAdminServiceRestInterceptor, "pre_delete_connected_site_tag" ) as pre: pre.assert_not_called() - post.assert_not_called() - pb_message = analytics_admin.ListBigQueryLinksRequest.pb( - analytics_admin.ListBigQueryLinksRequest() + pb_message = analytics_admin.DeleteConnectedSiteTagRequest.pb( + analytics_admin.DeleteConnectedSiteTagRequest() ) transcode.return_value = { "method": "post", @@ -93176,19 +103342,15 @@ def test_list_big_query_links_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = analytics_admin.ListBigQueryLinksResponse.to_json( - analytics_admin.ListBigQueryLinksResponse() - ) - request = analytics_admin.ListBigQueryLinksRequest() + request = analytics_admin.DeleteConnectedSiteTagRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = analytics_admin.ListBigQueryLinksResponse() - client.list_big_query_links( + client.delete_connected_site_tag( request, metadata=[ ("key", "val"), @@ -93197,11 +103359,10 @@ def test_list_big_query_links_rest_interceptors(null_interceptor): ) pre.assert_called_once() - post.assert_called_once() -def test_list_big_query_links_rest_bad_request( - transport: str = "rest", request_type=analytics_admin.ListBigQueryLinksRequest +def test_delete_connected_site_tag_rest_bad_request( + transport: str = "rest", request_type=analytics_admin.DeleteConnectedSiteTagRequest ): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -93209,7 +103370,7 @@ def test_list_big_query_links_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = {"parent": "properties/sample1"} + request_init = {} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -93221,191 +103382,53 @@ def test_list_big_query_links_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.list_big_query_links(request) - - -def test_list_big_query_links_rest_flattened(): - client = AnalyticsAdminServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = analytics_admin.ListBigQueryLinksResponse() - - # get arguments that satisfy an http rule for this method - sample_request = {"parent": "properties/sample1"} - - # get truthy value for each flattened field - mock_args = dict( - parent="parent_value", - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = analytics_admin.ListBigQueryLinksResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - client.list_big_query_links(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate( - "%s/v1alpha/{parent=properties/*}/bigQueryLinks" % client.transport._host, - args[1], - ) - - -def test_list_big_query_links_rest_flattened_error(transport: str = "rest"): - client = AnalyticsAdminServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_big_query_links( - analytics_admin.ListBigQueryLinksRequest(), - parent="parent_value", - ) + client.delete_connected_site_tag(request) -def test_list_big_query_links_rest_pager(transport: str = "rest"): +def test_delete_connected_site_tag_rest_error(): client = AnalyticsAdminServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # TODO(kbandes): remove this mock unless there's a good reason for it. - # with mock.patch.object(path_template, 'transcode') as transcode: - # Set the response as a series of pages - response = ( - analytics_admin.ListBigQueryLinksResponse( - bigquery_links=[ - resources.BigQueryLink(), - resources.BigQueryLink(), - resources.BigQueryLink(), - ], - next_page_token="abc", - ), - analytics_admin.ListBigQueryLinksResponse( - bigquery_links=[], - next_page_token="def", - ), - analytics_admin.ListBigQueryLinksResponse( - bigquery_links=[ - resources.BigQueryLink(), - ], - next_page_token="ghi", - ), - analytics_admin.ListBigQueryLinksResponse( - bigquery_links=[ - resources.BigQueryLink(), - resources.BigQueryLink(), - ], - ), - ) - # Two responses for two calls - response = response + response - - # Wrap the values into proper Response objs - response = tuple( - analytics_admin.ListBigQueryLinksResponse.to_json(x) for x in response - ) - return_values = tuple(Response() for i in response) - for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode("UTF-8") - return_val.status_code = 200 - req.side_effect = return_values - - sample_request = {"parent": "properties/sample1"} - - pager = client.list_big_query_links(request=sample_request) - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, resources.BigQueryLink) for i in results) - - pages = list(client.list_big_query_links(request=sample_request).pages) - for page_, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page_.raw_page.next_page_token == token - @pytest.mark.parametrize( "request_type", [ - analytics_admin.GetEnhancedMeasurementSettingsRequest, + analytics_admin.ListConnectedSiteTagsRequest, dict, ], ) -def test_get_enhanced_measurement_settings_rest(request_type): +def test_list_connected_site_tags_rest(request_type): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = { - "name": "properties/sample1/dataStreams/sample2/enhancedMeasurementSettings" - } + request_init = {} request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = resources.EnhancedMeasurementSettings( - name="name_value", - stream_enabled=True, - scrolls_enabled=True, - outbound_clicks_enabled=True, - site_search_enabled=True, - video_engagement_enabled=True, - file_downloads_enabled=True, - page_changes_enabled=True, - form_interactions_enabled=True, - search_query_parameter="search_query_parameter_value", - uri_query_parameter="uri_query_parameter_value", - ) + return_value = analytics_admin.ListConnectedSiteTagsResponse() # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = resources.EnhancedMeasurementSettings.pb(return_value) + return_value = analytics_admin.ListConnectedSiteTagsResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.get_enhanced_measurement_settings(request) + response = client.list_connected_site_tags(request) # Establish that the response is the type that we expect. - assert isinstance(response, resources.EnhancedMeasurementSettings) - assert response.name == "name_value" - assert response.stream_enabled is True - assert response.scrolls_enabled is True - assert response.outbound_clicks_enabled is True - assert response.site_search_enabled is True - assert response.video_engagement_enabled is True - assert response.file_downloads_enabled is True - assert response.page_changes_enabled is True - assert response.form_interactions_enabled is True - assert response.search_query_parameter == "search_query_parameter_value" - assert response.uri_query_parameter == "uri_query_parameter_value" + assert isinstance(response, analytics_admin.ListConnectedSiteTagsResponse) -def test_get_enhanced_measurement_settings_rest_use_cached_wrapped_rpc(): +def test_list_connected_site_tags_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -93420,7 +103443,7 @@ def test_get_enhanced_measurement_settings_rest_use_cached_wrapped_rpc(): # Ensure method has been cached assert ( - client._transport.get_enhanced_measurement_settings + client._transport.list_connected_site_tags in client._transport._wrapped_methods ) @@ -93430,109 +103453,24 @@ def test_get_enhanced_measurement_settings_rest_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.get_enhanced_measurement_settings + client._transport.list_connected_site_tags ] = mock_rpc request = {} - client.get_enhanced_measurement_settings(request) + client.list_connected_site_tags(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.get_enhanced_measurement_settings(request) + client.list_connected_site_tags(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_get_enhanced_measurement_settings_rest_required_fields( - request_type=analytics_admin.GetEnhancedMeasurementSettingsRequest, -): - transport_class = transports.AnalyticsAdminServiceRestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads( - json_format.MessageToJson(pb_request, use_integers_for_enums=False) - ) - - # verify fields with default values are dropped - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).get_enhanced_measurement_settings._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = "name_value" - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).get_enhanced_measurement_settings._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == "name_value" - - client = AnalyticsAdminServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = resources.EnhancedMeasurementSettings() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, "transcode") as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - "uri": "v1/sample_method", - "method": "get", - "query_params": pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = resources.EnhancedMeasurementSettings.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - response = client.get_enhanced_measurement_settings(request) - - expected_params = [("$alt", "json;enum-encoding=int")] - actual_params = req.call_args.kwargs["params"] - assert expected_params == actual_params - - -def test_get_enhanced_measurement_settings_rest_unset_required_fields(): - transport = transports.AnalyticsAdminServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials - ) - - unset_fields = ( - transport.get_enhanced_measurement_settings._get_unset_required_fields({}) - ) - assert set(unset_fields) == (set(()) & set(("name",))) - - @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_enhanced_measurement_settings_rest_interceptors(null_interceptor): +def test_list_connected_site_tags_rest_interceptors(null_interceptor): transport = transports.AnalyticsAdminServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -93545,16 +103483,14 @@ def test_get_enhanced_measurement_settings_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.AnalyticsAdminServiceRestInterceptor, - "post_get_enhanced_measurement_settings", + transports.AnalyticsAdminServiceRestInterceptor, "post_list_connected_site_tags" ) as post, mock.patch.object( - transports.AnalyticsAdminServiceRestInterceptor, - "pre_get_enhanced_measurement_settings", + transports.AnalyticsAdminServiceRestInterceptor, "pre_list_connected_site_tags" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = analytics_admin.GetEnhancedMeasurementSettingsRequest.pb( - analytics_admin.GetEnhancedMeasurementSettingsRequest() + pb_message = analytics_admin.ListConnectedSiteTagsRequest.pb( + analytics_admin.ListConnectedSiteTagsRequest() ) transcode.return_value = { "method": "post", @@ -93566,19 +103502,21 @@ def test_get_enhanced_measurement_settings_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = resources.EnhancedMeasurementSettings.to_json( - resources.EnhancedMeasurementSettings() + req.return_value._content = ( + analytics_admin.ListConnectedSiteTagsResponse.to_json( + analytics_admin.ListConnectedSiteTagsResponse() + ) ) - request = analytics_admin.GetEnhancedMeasurementSettingsRequest() + request = analytics_admin.ListConnectedSiteTagsRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = resources.EnhancedMeasurementSettings() + post.return_value = analytics_admin.ListConnectedSiteTagsResponse() - client.get_enhanced_measurement_settings( + client.list_connected_site_tags( request, metadata=[ ("key", "val"), @@ -93590,9 +103528,8 @@ def test_get_enhanced_measurement_settings_rest_interceptors(null_interceptor): post.assert_called_once() -def test_get_enhanced_measurement_settings_rest_bad_request( - transport: str = "rest", - request_type=analytics_admin.GetEnhancedMeasurementSettingsRequest, +def test_list_connected_site_tags_rest_bad_request( + transport: str = "rest", request_type=analytics_admin.ListConnectedSiteTagsRequest ): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -93600,9 +103537,7 @@ def test_get_enhanced_measurement_settings_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = { - "name": "properties/sample1/dataStreams/sample2/enhancedMeasurementSettings" - } + request_init = {} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -93614,71 +103549,10 @@ def test_get_enhanced_measurement_settings_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.get_enhanced_measurement_settings(request) - - -def test_get_enhanced_measurement_settings_rest_flattened(): - client = AnalyticsAdminServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = resources.EnhancedMeasurementSettings() - - # get arguments that satisfy an http rule for this method - sample_request = { - "name": "properties/sample1/dataStreams/sample2/enhancedMeasurementSettings" - } - - # get truthy value for each flattened field - mock_args = dict( - name="name_value", - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = resources.EnhancedMeasurementSettings.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - client.get_enhanced_measurement_settings(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate( - "%s/v1alpha/{name=properties/*/dataStreams/*/enhancedMeasurementSettings}" - % client.transport._host, - args[1], - ) - - -def test_get_enhanced_measurement_settings_rest_flattened_error( - transport: str = "rest", -): - client = AnalyticsAdminServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_enhanced_measurement_settings( - analytics_admin.GetEnhancedMeasurementSettingsRequest(), - name="name_value", - ) + client.list_connected_site_tags(request) -def test_get_enhanced_measurement_settings_rest_error(): +def test_list_connected_site_tags_rest_error(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -93687,156 +103561,46 @@ def test_get_enhanced_measurement_settings_rest_error(): @pytest.mark.parametrize( "request_type", [ - analytics_admin.UpdateEnhancedMeasurementSettingsRequest, + analytics_admin.FetchConnectedGa4PropertyRequest, dict, ], ) -def test_update_enhanced_measurement_settings_rest(request_type): +def test_fetch_connected_ga4_property_rest(request_type): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = { - "enhanced_measurement_settings": { - "name": "properties/sample1/dataStreams/sample2/enhancedMeasurementSettings" - } - } - request_init["enhanced_measurement_settings"] = { - "name": "properties/sample1/dataStreams/sample2/enhancedMeasurementSettings", - "stream_enabled": True, - "scrolls_enabled": True, - "outbound_clicks_enabled": True, - "site_search_enabled": True, - "video_engagement_enabled": True, - "file_downloads_enabled": True, - "page_changes_enabled": True, - "form_interactions_enabled": True, - "search_query_parameter": "search_query_parameter_value", - "uri_query_parameter": "uri_query_parameter_value", - } - # The version of a generated dependency at test runtime may differ from the version used during generation. - # Delete any fields which are not present in the current runtime dependency - # See https://github.com/googleapis/gapic-generator-python/issues/1748 - - # Determine if the message type is proto-plus or protobuf - test_field = analytics_admin.UpdateEnhancedMeasurementSettingsRequest.meta.fields[ - "enhanced_measurement_settings" - ] - - def get_message_fields(field): - # Given a field which is a message (composite type), return a list with - # all the fields of the message. - # If the field is not a composite type, return an empty list. - message_fields = [] - - if hasattr(field, "message") and field.message: - is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") - - if is_field_type_proto_plus_type: - message_fields = field.message.meta.fields.values() - # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER - message_fields = field.message.DESCRIPTOR.fields - return message_fields - - runtime_nested_fields = [ - (field.name, nested_field.name) - for field in get_message_fields(test_field) - for nested_field in get_message_fields(field) - ] - - subfields_not_in_runtime = [] - - # For each item in the sample request, create a list of sub fields which are not present at runtime - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init[ - "enhanced_measurement_settings" - ].items(): # pragma: NO COVER - result = None - is_repeated = False - # For repeated fields - if isinstance(value, list) and len(value): - is_repeated = True - result = value[0] - # For fields where the type is another message - if isinstance(value, dict): - result = value - - if result and hasattr(result, "keys"): - for subfield in result.keys(): - if (field, subfield) not in runtime_nested_fields: - subfields_not_in_runtime.append( - { - "field": field, - "subfield": subfield, - "is_repeated": is_repeated, - } - ) - - # Remove fields from the sample request which are not present in the runtime version of the dependency - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER - field = subfield_to_delete.get("field") - field_repeated = subfield_to_delete.get("is_repeated") - subfield = subfield_to_delete.get("subfield") - if subfield: - if field_repeated: - for i in range( - 0, len(request_init["enhanced_measurement_settings"][field]) - ): - del request_init["enhanced_measurement_settings"][field][i][ - subfield - ] - else: - del request_init["enhanced_measurement_settings"][field][subfield] + request_init = {} request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = resources.EnhancedMeasurementSettings( - name="name_value", - stream_enabled=True, - scrolls_enabled=True, - outbound_clicks_enabled=True, - site_search_enabled=True, - video_engagement_enabled=True, - file_downloads_enabled=True, - page_changes_enabled=True, - form_interactions_enabled=True, - search_query_parameter="search_query_parameter_value", - uri_query_parameter="uri_query_parameter_value", + return_value = analytics_admin.FetchConnectedGa4PropertyResponse( + property="property_value", ) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = resources.EnhancedMeasurementSettings.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - response = client.update_enhanced_measurement_settings(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, resources.EnhancedMeasurementSettings) - assert response.name == "name_value" - assert response.stream_enabled is True - assert response.scrolls_enabled is True - assert response.outbound_clicks_enabled is True - assert response.site_search_enabled is True - assert response.video_engagement_enabled is True - assert response.file_downloads_enabled is True - assert response.page_changes_enabled is True - assert response.form_interactions_enabled is True - assert response.search_query_parameter == "search_query_parameter_value" - assert response.uri_query_parameter == "uri_query_parameter_value" + return_value = analytics_admin.FetchConnectedGa4PropertyResponse.pb( + return_value + ) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.fetch_connected_ga4_property(request) -def test_update_enhanced_measurement_settings_rest_use_cached_wrapped_rpc(): + # Establish that the response is the type that we expect. + assert isinstance(response, analytics_admin.FetchConnectedGa4PropertyResponse) + assert response.property == "property_value" + + +def test_fetch_connected_ga4_property_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -93851,7 +103615,7 @@ def test_update_enhanced_measurement_settings_rest_use_cached_wrapped_rpc(): # Ensure method has been cached assert ( - client._transport.update_enhanced_measurement_settings + client._transport.fetch_connected_ga4_property in client._transport._wrapped_methods ) @@ -93861,28 +103625,29 @@ def test_update_enhanced_measurement_settings_rest_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.update_enhanced_measurement_settings + client._transport.fetch_connected_ga4_property ] = mock_rpc request = {} - client.update_enhanced_measurement_settings(request) + client.fetch_connected_ga4_property(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.update_enhanced_measurement_settings(request) + client.fetch_connected_ga4_property(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_update_enhanced_measurement_settings_rest_required_fields( - request_type=analytics_admin.UpdateEnhancedMeasurementSettingsRequest, +def test_fetch_connected_ga4_property_rest_required_fields( + request_type=analytics_admin.FetchConnectedGa4PropertyRequest, ): transport_class = transports.AnalyticsAdminServiceRestTransport request_init = {} + request_init["property"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -93890,22 +103655,29 @@ def test_update_enhanced_measurement_settings_rest_required_fields( ) # verify fields with default values are dropped + assert "property" not in jsonified_request unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).update_enhanced_measurement_settings._get_unset_required_fields(jsonified_request) + ).fetch_connected_ga4_property._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present + assert "property" in jsonified_request + assert jsonified_request["property"] == request_init["property"] + + jsonified_request["property"] = "property_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).update_enhanced_measurement_settings._get_unset_required_fields(jsonified_request) + ).fetch_connected_ga4_property._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("update_mask",)) + assert not set(unset_fields) - set(("property",)) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone + assert "property" in jsonified_request + assert jsonified_request["property"] == "property_value" client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -93914,7 +103686,7 @@ def test_update_enhanced_measurement_settings_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = resources.EnhancedMeasurementSettings() + return_value = analytics_admin.FetchConnectedGa4PropertyResponse() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -93926,50 +103698,47 @@ def test_update_enhanced_measurement_settings_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "patch", + "method": "get", "query_params": pb_request, } - transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = resources.EnhancedMeasurementSettings.pb(return_value) + return_value = analytics_admin.FetchConnectedGa4PropertyResponse.pb( + return_value + ) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.update_enhanced_measurement_settings(request) + response = client.fetch_connected_ga4_property(request) - expected_params = [("$alt", "json;enum-encoding=int")] + expected_params = [ + ( + "property", + "", + ), + ("$alt", "json;enum-encoding=int"), + ] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_update_enhanced_measurement_settings_rest_unset_required_fields(): +def test_fetch_connected_ga4_property_rest_unset_required_fields(): transport = transports.AnalyticsAdminServiceRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = ( - transport.update_enhanced_measurement_settings._get_unset_required_fields({}) - ) - assert set(unset_fields) == ( - set(("updateMask",)) - & set( - ( - "enhancedMeasurementSettings", - "updateMask", - ) - ) - ) + unset_fields = transport.fetch_connected_ga4_property._get_unset_required_fields({}) + assert set(unset_fields) == (set(("property",)) & set(("property",))) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_update_enhanced_measurement_settings_rest_interceptors(null_interceptor): +def test_fetch_connected_ga4_property_rest_interceptors(null_interceptor): transport = transports.AnalyticsAdminServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -93983,15 +103752,15 @@ def test_update_enhanced_measurement_settings_rest_interceptors(null_interceptor path_template, "transcode" ) as transcode, mock.patch.object( transports.AnalyticsAdminServiceRestInterceptor, - "post_update_enhanced_measurement_settings", + "post_fetch_connected_ga4_property", ) as post, mock.patch.object( transports.AnalyticsAdminServiceRestInterceptor, - "pre_update_enhanced_measurement_settings", + "pre_fetch_connected_ga4_property", ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = analytics_admin.UpdateEnhancedMeasurementSettingsRequest.pb( - analytics_admin.UpdateEnhancedMeasurementSettingsRequest() + pb_message = analytics_admin.FetchConnectedGa4PropertyRequest.pb( + analytics_admin.FetchConnectedGa4PropertyRequest() ) transcode.return_value = { "method": "post", @@ -94003,19 +103772,21 @@ def test_update_enhanced_measurement_settings_rest_interceptors(null_interceptor req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = resources.EnhancedMeasurementSettings.to_json( - resources.EnhancedMeasurementSettings() + req.return_value._content = ( + analytics_admin.FetchConnectedGa4PropertyResponse.to_json( + analytics_admin.FetchConnectedGa4PropertyResponse() + ) ) - request = analytics_admin.UpdateEnhancedMeasurementSettingsRequest() + request = analytics_admin.FetchConnectedGa4PropertyRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = resources.EnhancedMeasurementSettings() + post.return_value = analytics_admin.FetchConnectedGa4PropertyResponse() - client.update_enhanced_measurement_settings( + client.fetch_connected_ga4_property( request, metadata=[ ("key", "val"), @@ -94027,9 +103798,9 @@ def test_update_enhanced_measurement_settings_rest_interceptors(null_interceptor post.assert_called_once() -def test_update_enhanced_measurement_settings_rest_bad_request( +def test_fetch_connected_ga4_property_rest_bad_request( transport: str = "rest", - request_type=analytics_admin.UpdateEnhancedMeasurementSettingsRequest, + request_type=analytics_admin.FetchConnectedGa4PropertyRequest, ): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -94037,11 +103808,7 @@ def test_update_enhanced_measurement_settings_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = { - "enhanced_measurement_settings": { - "name": "properties/sample1/dataStreams/sample2/enhancedMeasurementSettings" - } - } + request_init = {} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -94053,79 +103820,10 @@ def test_update_enhanced_measurement_settings_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.update_enhanced_measurement_settings(request) - - -def test_update_enhanced_measurement_settings_rest_flattened(): - client = AnalyticsAdminServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = resources.EnhancedMeasurementSettings() - - # get arguments that satisfy an http rule for this method - sample_request = { - "enhanced_measurement_settings": { - "name": "properties/sample1/dataStreams/sample2/enhancedMeasurementSettings" - } - } - - # get truthy value for each flattened field - mock_args = dict( - enhanced_measurement_settings=resources.EnhancedMeasurementSettings( - name="name_value" - ), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = resources.EnhancedMeasurementSettings.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - client.update_enhanced_measurement_settings(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate( - "%s/v1alpha/{enhanced_measurement_settings.name=properties/*/dataStreams/*/enhancedMeasurementSettings}" - % client.transport._host, - args[1], - ) - - -def test_update_enhanced_measurement_settings_rest_flattened_error( - transport: str = "rest", -): - client = AnalyticsAdminServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.update_enhanced_measurement_settings( - analytics_admin.UpdateEnhancedMeasurementSettingsRequest(), - enhanced_measurement_settings=resources.EnhancedMeasurementSettings( - name="name_value" - ), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), - ) + client.fetch_connected_ga4_property(request) -def test_update_enhanced_measurement_settings_rest_error(): +def test_fetch_connected_ga4_property_rest_error(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -94134,41 +103832,46 @@ def test_update_enhanced_measurement_settings_rest_error(): @pytest.mark.parametrize( "request_type", [ - analytics_admin.CreateConnectedSiteTagRequest, + analytics_admin.GetAdSenseLinkRequest, dict, ], ) -def test_create_connected_site_tag_rest(request_type): +def test_get_ad_sense_link_rest(request_type): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = {} + request_init = {"name": "properties/sample1/adSenseLinks/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = analytics_admin.CreateConnectedSiteTagResponse() + return_value = resources.AdSenseLink( + name="name_value", + ad_client_code="ad_client_code_value", + ) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = analytics_admin.CreateConnectedSiteTagResponse.pb(return_value) + return_value = resources.AdSenseLink.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.create_connected_site_tag(request) + response = client.get_ad_sense_link(request) # Establish that the response is the type that we expect. - assert isinstance(response, analytics_admin.CreateConnectedSiteTagResponse) + assert isinstance(response, resources.AdSenseLink) + assert response.name == "name_value" + assert response.ad_client_code == "ad_client_code_value" -def test_create_connected_site_tag_rest_use_cached_wrapped_rpc(): +def test_get_ad_sense_link_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -94182,10 +103885,7 @@ def test_create_connected_site_tag_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert ( - client._transport.create_connected_site_tag - in client._transport._wrapped_methods - ) + assert client._transport.get_ad_sense_link in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() @@ -94193,28 +103893,29 @@ def test_create_connected_site_tag_rest_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.create_connected_site_tag + client._transport.get_ad_sense_link ] = mock_rpc request = {} - client.create_connected_site_tag(request) + client.get_ad_sense_link(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.create_connected_site_tag(request) + client.get_ad_sense_link(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_create_connected_site_tag_rest_required_fields( - request_type=analytics_admin.CreateConnectedSiteTagRequest, +def test_get_ad_sense_link_rest_required_fields( + request_type=analytics_admin.GetAdSenseLinkRequest, ): transport_class = transports.AnalyticsAdminServiceRestTransport request_init = {} + request_init["name"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -94225,17 +103926,21 @@ def test_create_connected_site_tag_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).create_connected_site_tag._get_unset_required_fields(jsonified_request) + ).get_ad_sense_link._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present + jsonified_request["name"] = "name_value" + unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).create_connected_site_tag._get_unset_required_fields(jsonified_request) + ).get_ad_sense_link._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -94244,7 +103949,7 @@ def test_create_connected_site_tag_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = analytics_admin.CreateConnectedSiteTagResponse() + return_value = resources.AdSenseLink() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -94256,42 +103961,39 @@ def test_create_connected_site_tag_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "post", + "method": "get", "query_params": pb_request, } - transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = analytics_admin.CreateConnectedSiteTagResponse.pb( - return_value - ) + return_value = resources.AdSenseLink.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.create_connected_site_tag(request) + response = client.get_ad_sense_link(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_create_connected_site_tag_rest_unset_required_fields(): +def test_get_ad_sense_link_rest_unset_required_fields(): transport = transports.AnalyticsAdminServiceRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.create_connected_site_tag._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("connectedSiteTag",))) + unset_fields = transport.get_ad_sense_link._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_create_connected_site_tag_rest_interceptors(null_interceptor): +def test_get_ad_sense_link_rest_interceptors(null_interceptor): transport = transports.AnalyticsAdminServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -94304,15 +104006,14 @@ def test_create_connected_site_tag_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.AnalyticsAdminServiceRestInterceptor, - "post_create_connected_site_tag", + transports.AnalyticsAdminServiceRestInterceptor, "post_get_ad_sense_link" ) as post, mock.patch.object( - transports.AnalyticsAdminServiceRestInterceptor, "pre_create_connected_site_tag" + transports.AnalyticsAdminServiceRestInterceptor, "pre_get_ad_sense_link" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = analytics_admin.CreateConnectedSiteTagRequest.pb( - analytics_admin.CreateConnectedSiteTagRequest() + pb_message = analytics_admin.GetAdSenseLinkRequest.pb( + analytics_admin.GetAdSenseLinkRequest() ) transcode.return_value = { "method": "post", @@ -94324,21 +104025,19 @@ def test_create_connected_site_tag_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = ( - analytics_admin.CreateConnectedSiteTagResponse.to_json( - analytics_admin.CreateConnectedSiteTagResponse() - ) + req.return_value._content = resources.AdSenseLink.to_json( + resources.AdSenseLink() ) - request = analytics_admin.CreateConnectedSiteTagRequest() + request = analytics_admin.GetAdSenseLinkRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = analytics_admin.CreateConnectedSiteTagResponse() + post.return_value = resources.AdSenseLink() - client.create_connected_site_tag( + client.get_ad_sense_link( request, metadata=[ ("key", "val"), @@ -94350,8 +104049,8 @@ def test_create_connected_site_tag_rest_interceptors(null_interceptor): post.assert_called_once() -def test_create_connected_site_tag_rest_bad_request( - transport: str = "rest", request_type=analytics_admin.CreateConnectedSiteTagRequest +def test_get_ad_sense_link_rest_bad_request( + transport: str = "rest", request_type=analytics_admin.GetAdSenseLinkRequest ): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -94359,7 +104058,7 @@ def test_create_connected_site_tag_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = {} + request_init = {"name": "properties/sample1/adSenseLinks/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -94371,10 +104070,66 @@ def test_create_connected_site_tag_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.create_connected_site_tag(request) + client.get_ad_sense_link(request) -def test_create_connected_site_tag_rest_error(): +def test_get_ad_sense_link_rest_flattened(): + client = AnalyticsAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = resources.AdSenseLink() + + # get arguments that satisfy an http rule for this method + sample_request = {"name": "properties/sample1/adSenseLinks/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = resources.AdSenseLink.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.get_ad_sense_link(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1alpha/{name=properties/*/adSenseLinks/*}" % client.transport._host, + args[1], + ) + + +def test_get_ad_sense_link_rest_flattened_error(transport: str = "rest"): + client = AnalyticsAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_ad_sense_link( + analytics_admin.GetAdSenseLinkRequest(), + name="name_value", + ) + + +def test_get_ad_sense_link_rest_error(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -94383,39 +104138,117 @@ def test_create_connected_site_tag_rest_error(): @pytest.mark.parametrize( "request_type", [ - analytics_admin.DeleteConnectedSiteTagRequest, + analytics_admin.CreateAdSenseLinkRequest, dict, ], ) -def test_delete_connected_site_tag_rest(request_type): +def test_create_ad_sense_link_rest(request_type): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = {} + request_init = {"parent": "properties/sample1"} + request_init["adsense_link"] = { + "name": "name_value", + "ad_client_code": "ad_client_code_value", + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = analytics_admin.CreateAdSenseLinkRequest.meta.fields["adsense_link"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["adsense_link"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["adsense_link"][field])): + del request_init["adsense_link"][field][i][subfield] + else: + del request_init["adsense_link"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = None + return_value = resources.AdSenseLink( + name="name_value", + ad_client_code="ad_client_code_value", + ) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - json_return_value = "" + # Convert return value to protobuf type + return_value = resources.AdSenseLink.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.delete_connected_site_tag(request) + response = client.create_ad_sense_link(request) # Establish that the response is the type that we expect. - assert response is None + assert isinstance(response, resources.AdSenseLink) + assert response.name == "name_value" + assert response.ad_client_code == "ad_client_code_value" -def test_delete_connected_site_tag_rest_use_cached_wrapped_rpc(): +def test_create_ad_sense_link_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -94428,36 +104261,127 @@ def test_delete_connected_site_tag_rest_use_cached_wrapped_rpc(): assert wrapper_fn.call_count > 0 wrapper_fn.reset_mock() - # Ensure method has been cached - assert ( - client._transport.delete_connected_site_tag - in client._transport._wrapped_methods - ) + # Ensure method has been cached + assert ( + client._transport.create_ad_sense_link in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.create_ad_sense_link + ] = mock_rpc + + request = {} + client.create_ad_sense_link(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.create_ad_sense_link(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_create_ad_sense_link_rest_required_fields( + request_type=analytics_admin.CreateAdSenseLinkRequest, +): + transport_class = transports.AnalyticsAdminServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_ad_sense_link._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_ad_sense_link._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = AnalyticsAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = resources.AdSenseLink() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = resources.AdSenseLink.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client._transport._wrapped_methods[ - client._transport.delete_connected_site_tag - ] = mock_rpc + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value - request = {} - client.delete_connected_site_tag(request) + response = client.create_ad_sense_link(request) - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params - client.delete_connected_site_tag(request) - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 +def test_create_ad_sense_link_rest_unset_required_fields(): + transport = transports.AnalyticsAdminServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.create_ad_sense_link._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(()) + & set( + ( + "parent", + "adsenseLink", + ) + ) + ) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_delete_connected_site_tag_rest_interceptors(null_interceptor): +def test_create_ad_sense_link_rest_interceptors(null_interceptor): transport = transports.AnalyticsAdminServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -94470,11 +104394,14 @@ def test_delete_connected_site_tag_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.AnalyticsAdminServiceRestInterceptor, "pre_delete_connected_site_tag" + transports.AnalyticsAdminServiceRestInterceptor, "post_create_ad_sense_link" + ) as post, mock.patch.object( + transports.AnalyticsAdminServiceRestInterceptor, "pre_create_ad_sense_link" ) as pre: pre.assert_not_called() - pb_message = analytics_admin.DeleteConnectedSiteTagRequest.pb( - analytics_admin.DeleteConnectedSiteTagRequest() + post.assert_not_called() + pb_message = analytics_admin.CreateAdSenseLinkRequest.pb( + analytics_admin.CreateAdSenseLinkRequest() ) transcode.return_value = { "method": "post", @@ -94486,15 +104413,19 @@ def test_delete_connected_site_tag_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() + req.return_value._content = resources.AdSenseLink.to_json( + resources.AdSenseLink() + ) - request = analytics_admin.DeleteConnectedSiteTagRequest() + request = analytics_admin.CreateAdSenseLinkRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata + post.return_value = resources.AdSenseLink() - client.delete_connected_site_tag( + client.create_ad_sense_link( request, metadata=[ ("key", "val"), @@ -94503,10 +104434,11 @@ def test_delete_connected_site_tag_rest_interceptors(null_interceptor): ) pre.assert_called_once() + post.assert_called_once() -def test_delete_connected_site_tag_rest_bad_request( - transport: str = "rest", request_type=analytics_admin.DeleteConnectedSiteTagRequest +def test_create_ad_sense_link_rest_bad_request( + transport: str = "rest", request_type=analytics_admin.CreateAdSenseLinkRequest ): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -94514,7 +104446,7 @@ def test_delete_connected_site_tag_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = {} + request_init = {"parent": "properties/sample1"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -94526,10 +104458,68 @@ def test_delete_connected_site_tag_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.delete_connected_site_tag(request) + client.create_ad_sense_link(request) -def test_delete_connected_site_tag_rest_error(): +def test_create_ad_sense_link_rest_flattened(): + client = AnalyticsAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = resources.AdSenseLink() + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "properties/sample1"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + adsense_link=resources.AdSenseLink(name="name_value"), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = resources.AdSenseLink.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.create_ad_sense_link(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1alpha/{parent=properties/*}/adSenseLinks" % client.transport._host, + args[1], + ) + + +def test_create_ad_sense_link_rest_flattened_error(transport: str = "rest"): + client = AnalyticsAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_ad_sense_link( + analytics_admin.CreateAdSenseLinkRequest(), + parent="parent_value", + adsense_link=resources.AdSenseLink(name="name_value"), + ) + + +def test_create_ad_sense_link_rest_error(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -94538,41 +104528,39 @@ def test_delete_connected_site_tag_rest_error(): @pytest.mark.parametrize( "request_type", [ - analytics_admin.ListConnectedSiteTagsRequest, + analytics_admin.DeleteAdSenseLinkRequest, dict, ], ) -def test_list_connected_site_tags_rest(request_type): +def test_delete_ad_sense_link_rest(request_type): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = {} + request_init = {"name": "properties/sample1/adSenseLinks/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = analytics_admin.ListConnectedSiteTagsResponse() + return_value = None # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - # Convert return value to protobuf type - return_value = analytics_admin.ListConnectedSiteTagsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) + json_return_value = "" response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.list_connected_site_tags(request) + response = client.delete_ad_sense_link(request) # Establish that the response is the type that we expect. - assert isinstance(response, analytics_admin.ListConnectedSiteTagsResponse) + assert response is None -def test_list_connected_site_tags_rest_use_cached_wrapped_rpc(): +def test_delete_ad_sense_link_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -94587,8 +104575,7 @@ def test_list_connected_site_tags_rest_use_cached_wrapped_rpc(): # Ensure method has been cached assert ( - client._transport.list_connected_site_tags - in client._transport._wrapped_methods + client._transport.delete_ad_sense_link in client._transport._wrapped_methods ) # Replace cached wrapped function with mock @@ -94597,24 +104584,104 @@ def test_list_connected_site_tags_rest_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.list_connected_site_tags + client._transport.delete_ad_sense_link ] = mock_rpc request = {} - client.list_connected_site_tags(request) + client.delete_ad_sense_link(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.list_connected_site_tags(request) + client.delete_ad_sense_link(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 +def test_delete_ad_sense_link_rest_required_fields( + request_type=analytics_admin.DeleteAdSenseLinkRequest, +): + transport_class = transports.AnalyticsAdminServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_ad_sense_link._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_ad_sense_link._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = AnalyticsAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = None + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "delete", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = "" + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.delete_ad_sense_link(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_delete_ad_sense_link_rest_unset_required_fields(): + transport = transports.AnalyticsAdminServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.delete_ad_sense_link._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_connected_site_tags_rest_interceptors(null_interceptor): +def test_delete_ad_sense_link_rest_interceptors(null_interceptor): transport = transports.AnalyticsAdminServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -94627,14 +104694,11 @@ def test_list_connected_site_tags_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.AnalyticsAdminServiceRestInterceptor, "post_list_connected_site_tags" - ) as post, mock.patch.object( - transports.AnalyticsAdminServiceRestInterceptor, "pre_list_connected_site_tags" + transports.AnalyticsAdminServiceRestInterceptor, "pre_delete_ad_sense_link" ) as pre: pre.assert_not_called() - post.assert_not_called() - pb_message = analytics_admin.ListConnectedSiteTagsRequest.pb( - analytics_admin.ListConnectedSiteTagsRequest() + pb_message = analytics_admin.DeleteAdSenseLinkRequest.pb( + analytics_admin.DeleteAdSenseLinkRequest() ) transcode.return_value = { "method": "post", @@ -94646,21 +104710,15 @@ def test_list_connected_site_tags_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = ( - analytics_admin.ListConnectedSiteTagsResponse.to_json( - analytics_admin.ListConnectedSiteTagsResponse() - ) - ) - request = analytics_admin.ListConnectedSiteTagsRequest() + request = analytics_admin.DeleteAdSenseLinkRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = analytics_admin.ListConnectedSiteTagsResponse() - client.list_connected_site_tags( + client.delete_ad_sense_link( request, metadata=[ ("key", "val"), @@ -94669,11 +104727,10 @@ def test_list_connected_site_tags_rest_interceptors(null_interceptor): ) pre.assert_called_once() - post.assert_called_once() -def test_list_connected_site_tags_rest_bad_request( - transport: str = "rest", request_type=analytics_admin.ListConnectedSiteTagsRequest +def test_delete_ad_sense_link_rest_bad_request( + transport: str = "rest", request_type=analytics_admin.DeleteAdSenseLinkRequest ): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -94681,7 +104738,7 @@ def test_list_connected_site_tags_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = {} + request_init = {"name": "properties/sample1/adSenseLinks/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -94693,10 +104750,64 @@ def test_list_connected_site_tags_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.list_connected_site_tags(request) + client.delete_ad_sense_link(request) -def test_list_connected_site_tags_rest_error(): +def test_delete_ad_sense_link_rest_flattened(): + client = AnalyticsAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # get arguments that satisfy an http rule for this method + sample_request = {"name": "properties/sample1/adSenseLinks/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "" + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.delete_ad_sense_link(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1alpha/{name=properties/*/adSenseLinks/*}" % client.transport._host, + args[1], + ) + + +def test_delete_ad_sense_link_rest_flattened_error(transport: str = "rest"): + client = AnalyticsAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_ad_sense_link( + analytics_admin.DeleteAdSenseLinkRequest(), + name="name_value", + ) + + +def test_delete_ad_sense_link_rest_error(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -94705,46 +104816,44 @@ def test_list_connected_site_tags_rest_error(): @pytest.mark.parametrize( "request_type", [ - analytics_admin.FetchConnectedGa4PropertyRequest, + analytics_admin.ListAdSenseLinksRequest, dict, ], ) -def test_fetch_connected_ga4_property_rest(request_type): +def test_list_ad_sense_links_rest(request_type): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = {} + request_init = {"parent": "properties/sample1"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = analytics_admin.FetchConnectedGa4PropertyResponse( - property="property_value", + return_value = analytics_admin.ListAdSenseLinksResponse( + next_page_token="next_page_token_value", ) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = analytics_admin.FetchConnectedGa4PropertyResponse.pb( - return_value - ) + return_value = analytics_admin.ListAdSenseLinksResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.fetch_connected_ga4_property(request) + response = client.list_ad_sense_links(request) # Establish that the response is the type that we expect. - assert isinstance(response, analytics_admin.FetchConnectedGa4PropertyResponse) - assert response.property == "property_value" + assert isinstance(response, pagers.ListAdSenseLinksPager) + assert response.next_page_token == "next_page_token_value" -def test_fetch_connected_ga4_property_rest_use_cached_wrapped_rpc(): +def test_list_ad_sense_links_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -94759,8 +104868,7 @@ def test_fetch_connected_ga4_property_rest_use_cached_wrapped_rpc(): # Ensure method has been cached assert ( - client._transport.fetch_connected_ga4_property - in client._transport._wrapped_methods + client._transport.list_ad_sense_links in client._transport._wrapped_methods ) # Replace cached wrapped function with mock @@ -94769,29 +104877,29 @@ def test_fetch_connected_ga4_property_rest_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.fetch_connected_ga4_property + client._transport.list_ad_sense_links ] = mock_rpc request = {} - client.fetch_connected_ga4_property(request) + client.list_ad_sense_links(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.fetch_connected_ga4_property(request) + client.list_ad_sense_links(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_fetch_connected_ga4_property_rest_required_fields( - request_type=analytics_admin.FetchConnectedGa4PropertyRequest, +def test_list_ad_sense_links_rest_required_fields( + request_type=analytics_admin.ListAdSenseLinksRequest, ): transport_class = transports.AnalyticsAdminServiceRestTransport request_init = {} - request_init["property"] = "" + request_init["parent"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -94799,29 +104907,31 @@ def test_fetch_connected_ga4_property_rest_required_fields( ) # verify fields with default values are dropped - assert "property" not in jsonified_request unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).fetch_connected_ga4_property._get_unset_required_fields(jsonified_request) + ).list_ad_sense_links._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - assert "property" in jsonified_request - assert jsonified_request["property"] == request_init["property"] - jsonified_request["property"] = "property_value" + jsonified_request["parent"] = "parent_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).fetch_connected_ga4_property._get_unset_required_fields(jsonified_request) + ).list_ad_sense_links._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("property",)) + assert not set(unset_fields) - set( + ( + "page_size", + "page_token", + ) + ) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "property" in jsonified_request - assert jsonified_request["property"] == "property_value" + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -94830,7 +104940,7 @@ def test_fetch_connected_ga4_property_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = analytics_admin.FetchConnectedGa4PropertyResponse() + return_value = analytics_admin.ListAdSenseLinksResponse() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -94851,38 +104961,38 @@ def test_fetch_connected_ga4_property_rest_required_fields( response_value.status_code = 200 # Convert return value to protobuf type - return_value = analytics_admin.FetchConnectedGa4PropertyResponse.pb( - return_value - ) + return_value = analytics_admin.ListAdSenseLinksResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.fetch_connected_ga4_property(request) + response = client.list_ad_sense_links(request) - expected_params = [ - ( - "property", - "", - ), - ("$alt", "json;enum-encoding=int"), - ] + expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_fetch_connected_ga4_property_rest_unset_required_fields(): +def test_list_ad_sense_links_rest_unset_required_fields(): transport = transports.AnalyticsAdminServiceRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.fetch_connected_ga4_property._get_unset_required_fields({}) - assert set(unset_fields) == (set(("property",)) & set(("property",))) + unset_fields = transport.list_ad_sense_links._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "pageSize", + "pageToken", + ) + ) + & set(("parent",)) + ) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_fetch_connected_ga4_property_rest_interceptors(null_interceptor): +def test_list_ad_sense_links_rest_interceptors(null_interceptor): transport = transports.AnalyticsAdminServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -94895,16 +105005,14 @@ def test_fetch_connected_ga4_property_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.AnalyticsAdminServiceRestInterceptor, - "post_fetch_connected_ga4_property", + transports.AnalyticsAdminServiceRestInterceptor, "post_list_ad_sense_links" ) as post, mock.patch.object( - transports.AnalyticsAdminServiceRestInterceptor, - "pre_fetch_connected_ga4_property", + transports.AnalyticsAdminServiceRestInterceptor, "pre_list_ad_sense_links" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = analytics_admin.FetchConnectedGa4PropertyRequest.pb( - analytics_admin.FetchConnectedGa4PropertyRequest() + pb_message = analytics_admin.ListAdSenseLinksRequest.pb( + analytics_admin.ListAdSenseLinksRequest() ) transcode.return_value = { "method": "post", @@ -94916,21 +105024,19 @@ def test_fetch_connected_ga4_property_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = ( - analytics_admin.FetchConnectedGa4PropertyResponse.to_json( - analytics_admin.FetchConnectedGa4PropertyResponse() - ) + req.return_value._content = analytics_admin.ListAdSenseLinksResponse.to_json( + analytics_admin.ListAdSenseLinksResponse() ) - request = analytics_admin.FetchConnectedGa4PropertyRequest() + request = analytics_admin.ListAdSenseLinksRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = analytics_admin.FetchConnectedGa4PropertyResponse() + post.return_value = analytics_admin.ListAdSenseLinksResponse() - client.fetch_connected_ga4_property( + client.list_ad_sense_links( request, metadata=[ ("key", "val"), @@ -94938,84 +105044,200 @@ def test_fetch_connected_ga4_property_rest_interceptors(null_interceptor): ], ) - pre.assert_called_once() - post.assert_called_once() + pre.assert_called_once() + post.assert_called_once() + + +def test_list_ad_sense_links_rest_bad_request( + transport: str = "rest", request_type=analytics_admin.ListAdSenseLinksRequest +): + client = AnalyticsAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "properties/sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_ad_sense_links(request) + + +def test_list_ad_sense_links_rest_flattened(): + client = AnalyticsAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = analytics_admin.ListAdSenseLinksResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "properties/sample1"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = analytics_admin.ListAdSenseLinksResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.list_ad_sense_links(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1alpha/{parent=properties/*}/adSenseLinks" % client.transport._host, + args[1], + ) + + +def test_list_ad_sense_links_rest_flattened_error(transport: str = "rest"): + client = AnalyticsAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_ad_sense_links( + analytics_admin.ListAdSenseLinksRequest(), + parent="parent_value", + ) -def test_fetch_connected_ga4_property_rest_bad_request( - transport: str = "rest", - request_type=analytics_admin.FetchConnectedGa4PropertyRequest, -): +def test_list_ad_sense_links_rest_pager(transport: str = "rest"): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # send a request that will satisfy transcoding - request_init = {} - request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + analytics_admin.ListAdSenseLinksResponse( + adsense_links=[ + resources.AdSenseLink(), + resources.AdSenseLink(), + resources.AdSenseLink(), + ], + next_page_token="abc", + ), + analytics_admin.ListAdSenseLinksResponse( + adsense_links=[], + next_page_token="def", + ), + analytics_admin.ListAdSenseLinksResponse( + adsense_links=[ + resources.AdSenseLink(), + ], + next_page_token="ghi", + ), + analytics_admin.ListAdSenseLinksResponse( + adsense_links=[ + resources.AdSenseLink(), + resources.AdSenseLink(), + ], + ), + ) + # Two responses for two calls + response = response + response - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.fetch_connected_ga4_property(request) + # Wrap the values into proper Response objs + response = tuple( + analytics_admin.ListAdSenseLinksResponse.to_json(x) for x in response + ) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + sample_request = {"parent": "properties/sample1"} -def test_fetch_connected_ga4_property_rest_error(): - client = AnalyticsAdminServiceClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) + pager = client.list_ad_sense_links(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, resources.AdSenseLink) for i in results) + + pages = list(client.list_ad_sense_links(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token @pytest.mark.parametrize( "request_type", [ - analytics_admin.GetAdSenseLinkRequest, + analytics_admin.GetEventCreateRuleRequest, dict, ], ) -def test_get_ad_sense_link_rest(request_type): +def test_get_event_create_rule_rest(request_type): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = {"name": "properties/sample1/adSenseLinks/sample2"} + request_init = { + "name": "properties/sample1/dataStreams/sample2/eventCreateRules/sample3" + } request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = resources.AdSenseLink( + return_value = event_create_and_edit.EventCreateRule( name="name_value", - ad_client_code="ad_client_code_value", + destination_event="destination_event_value", + source_copy_parameters=True, ) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = resources.AdSenseLink.pb(return_value) + return_value = event_create_and_edit.EventCreateRule.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.get_ad_sense_link(request) + response = client.get_event_create_rule(request) # Establish that the response is the type that we expect. - assert isinstance(response, resources.AdSenseLink) + assert isinstance(response, event_create_and_edit.EventCreateRule) assert response.name == "name_value" - assert response.ad_client_code == "ad_client_code_value" + assert response.destination_event == "destination_event_value" + assert response.source_copy_parameters is True -def test_get_ad_sense_link_rest_use_cached_wrapped_rpc(): +def test_get_event_create_rule_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -95029,7 +105251,10 @@ def test_get_ad_sense_link_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.get_ad_sense_link in client._transport._wrapped_methods + assert ( + client._transport.get_event_create_rule + in client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.Mock() @@ -95037,24 +105262,24 @@ def test_get_ad_sense_link_rest_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.get_ad_sense_link + client._transport.get_event_create_rule ] = mock_rpc request = {} - client.get_ad_sense_link(request) + client.get_event_create_rule(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.get_ad_sense_link(request) + client.get_event_create_rule(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_get_ad_sense_link_rest_required_fields( - request_type=analytics_admin.GetAdSenseLinkRequest, +def test_get_event_create_rule_rest_required_fields( + request_type=analytics_admin.GetEventCreateRuleRequest, ): transport_class = transports.AnalyticsAdminServiceRestTransport @@ -95070,7 +105295,7 @@ def test_get_ad_sense_link_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).get_ad_sense_link._get_unset_required_fields(jsonified_request) + ).get_event_create_rule._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present @@ -95079,7 +105304,7 @@ def test_get_ad_sense_link_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).get_ad_sense_link._get_unset_required_fields(jsonified_request) + ).get_event_create_rule._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone @@ -95093,7 +105318,7 @@ def test_get_ad_sense_link_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = resources.AdSenseLink() + return_value = event_create_and_edit.EventCreateRule() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -95114,30 +105339,30 @@ def test_get_ad_sense_link_rest_required_fields( response_value.status_code = 200 # Convert return value to protobuf type - return_value = resources.AdSenseLink.pb(return_value) + return_value = event_create_and_edit.EventCreateRule.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.get_ad_sense_link(request) + response = client.get_event_create_rule(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_get_ad_sense_link_rest_unset_required_fields(): +def test_get_event_create_rule_rest_unset_required_fields(): transport = transports.AnalyticsAdminServiceRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.get_ad_sense_link._get_unset_required_fields({}) + unset_fields = transport.get_event_create_rule._get_unset_required_fields({}) assert set(unset_fields) == (set(()) & set(("name",))) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_ad_sense_link_rest_interceptors(null_interceptor): +def test_get_event_create_rule_rest_interceptors(null_interceptor): transport = transports.AnalyticsAdminServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -95150,14 +105375,14 @@ def test_get_ad_sense_link_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.AnalyticsAdminServiceRestInterceptor, "post_get_ad_sense_link" + transports.AnalyticsAdminServiceRestInterceptor, "post_get_event_create_rule" ) as post, mock.patch.object( - transports.AnalyticsAdminServiceRestInterceptor, "pre_get_ad_sense_link" + transports.AnalyticsAdminServiceRestInterceptor, "pre_get_event_create_rule" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = analytics_admin.GetAdSenseLinkRequest.pb( - analytics_admin.GetAdSenseLinkRequest() + pb_message = analytics_admin.GetEventCreateRuleRequest.pb( + analytics_admin.GetEventCreateRuleRequest() ) transcode.return_value = { "method": "post", @@ -95169,19 +105394,19 @@ def test_get_ad_sense_link_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = resources.AdSenseLink.to_json( - resources.AdSenseLink() + req.return_value._content = event_create_and_edit.EventCreateRule.to_json( + event_create_and_edit.EventCreateRule() ) - request = analytics_admin.GetAdSenseLinkRequest() + request = analytics_admin.GetEventCreateRuleRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = resources.AdSenseLink() + post.return_value = event_create_and_edit.EventCreateRule() - client.get_ad_sense_link( + client.get_event_create_rule( request, metadata=[ ("key", "val"), @@ -95193,8 +105418,8 @@ def test_get_ad_sense_link_rest_interceptors(null_interceptor): post.assert_called_once() -def test_get_ad_sense_link_rest_bad_request( - transport: str = "rest", request_type=analytics_admin.GetAdSenseLinkRequest +def test_get_event_create_rule_rest_bad_request( + transport: str = "rest", request_type=analytics_admin.GetEventCreateRuleRequest ): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -95202,7 +105427,9 @@ def test_get_ad_sense_link_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = {"name": "properties/sample1/adSenseLinks/sample2"} + request_init = { + "name": "properties/sample1/dataStreams/sample2/eventCreateRules/sample3" + } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -95214,10 +105441,10 @@ def test_get_ad_sense_link_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.get_ad_sense_link(request) + client.get_event_create_rule(request) -def test_get_ad_sense_link_rest_flattened(): +def test_get_event_create_rule_rest_flattened(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -95226,10 +105453,12 @@ def test_get_ad_sense_link_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = resources.AdSenseLink() + return_value = event_create_and_edit.EventCreateRule() # get arguments that satisfy an http rule for this method - sample_request = {"name": "properties/sample1/adSenseLinks/sample2"} + sample_request = { + "name": "properties/sample1/dataStreams/sample2/eventCreateRules/sample3" + } # get truthy value for each flattened field mock_args = dict( @@ -95241,24 +105470,25 @@ def test_get_ad_sense_link_rest_flattened(): response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = resources.AdSenseLink.pb(return_value) + return_value = event_create_and_edit.EventCreateRule.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.get_ad_sense_link(**mock_args) + client.get_event_create_rule(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1alpha/{name=properties/*/adSenseLinks/*}" % client.transport._host, + "%s/v1alpha/{name=properties/*/dataStreams/*/eventCreateRules/*}" + % client.transport._host, args[1], ) -def test_get_ad_sense_link_rest_flattened_error(transport: str = "rest"): +def test_get_event_create_rule_rest_flattened_error(transport: str = "rest"): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -95267,13 +105497,13 @@ def test_get_ad_sense_link_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.get_ad_sense_link( - analytics_admin.GetAdSenseLinkRequest(), + client.get_event_create_rule( + analytics_admin.GetEventCreateRuleRequest(), name="name_value", ) -def test_get_ad_sense_link_rest_error(): +def test_get_event_create_rule_rest_error(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -95282,117 +105512,44 @@ def test_get_ad_sense_link_rest_error(): @pytest.mark.parametrize( "request_type", [ - analytics_admin.CreateAdSenseLinkRequest, + analytics_admin.ListEventCreateRulesRequest, dict, ], ) -def test_create_ad_sense_link_rest(request_type): +def test_list_event_create_rules_rest(request_type): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = {"parent": "properties/sample1"} - request_init["adsense_link"] = { - "name": "name_value", - "ad_client_code": "ad_client_code_value", - } - # The version of a generated dependency at test runtime may differ from the version used during generation. - # Delete any fields which are not present in the current runtime dependency - # See https://github.com/googleapis/gapic-generator-python/issues/1748 - - # Determine if the message type is proto-plus or protobuf - test_field = analytics_admin.CreateAdSenseLinkRequest.meta.fields["adsense_link"] - - def get_message_fields(field): - # Given a field which is a message (composite type), return a list with - # all the fields of the message. - # If the field is not a composite type, return an empty list. - message_fields = [] - - if hasattr(field, "message") and field.message: - is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") - - if is_field_type_proto_plus_type: - message_fields = field.message.meta.fields.values() - # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER - message_fields = field.message.DESCRIPTOR.fields - return message_fields - - runtime_nested_fields = [ - (field.name, nested_field.name) - for field in get_message_fields(test_field) - for nested_field in get_message_fields(field) - ] - - subfields_not_in_runtime = [] - - # For each item in the sample request, create a list of sub fields which are not present at runtime - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["adsense_link"].items(): # pragma: NO COVER - result = None - is_repeated = False - # For repeated fields - if isinstance(value, list) and len(value): - is_repeated = True - result = value[0] - # For fields where the type is another message - if isinstance(value, dict): - result = value - - if result and hasattr(result, "keys"): - for subfield in result.keys(): - if (field, subfield) not in runtime_nested_fields: - subfields_not_in_runtime.append( - { - "field": field, - "subfield": subfield, - "is_repeated": is_repeated, - } - ) - - # Remove fields from the sample request which are not present in the runtime version of the dependency - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER - field = subfield_to_delete.get("field") - field_repeated = subfield_to_delete.get("is_repeated") - subfield = subfield_to_delete.get("subfield") - if subfield: - if field_repeated: - for i in range(0, len(request_init["adsense_link"][field])): - del request_init["adsense_link"][field][i][subfield] - else: - del request_init["adsense_link"][field][subfield] + request_init = {"parent": "properties/sample1/dataStreams/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = resources.AdSenseLink( - name="name_value", - ad_client_code="ad_client_code_value", + return_value = analytics_admin.ListEventCreateRulesResponse( + next_page_token="next_page_token_value", ) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = resources.AdSenseLink.pb(return_value) + return_value = analytics_admin.ListEventCreateRulesResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.create_ad_sense_link(request) + response = client.list_event_create_rules(request) # Establish that the response is the type that we expect. - assert isinstance(response, resources.AdSenseLink) - assert response.name == "name_value" - assert response.ad_client_code == "ad_client_code_value" + assert isinstance(response, pagers.ListEventCreateRulesPager) + assert response.next_page_token == "next_page_token_value" -def test_create_ad_sense_link_rest_use_cached_wrapped_rpc(): +def test_list_event_create_rules_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -95407,7 +105564,8 @@ def test_create_ad_sense_link_rest_use_cached_wrapped_rpc(): # Ensure method has been cached assert ( - client._transport.create_ad_sense_link in client._transport._wrapped_methods + client._transport.list_event_create_rules + in client._transport._wrapped_methods ) # Replace cached wrapped function with mock @@ -95416,24 +105574,24 @@ def test_create_ad_sense_link_rest_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.create_ad_sense_link + client._transport.list_event_create_rules ] = mock_rpc request = {} - client.create_ad_sense_link(request) + client.list_event_create_rules(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.create_ad_sense_link(request) + client.list_event_create_rules(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_create_ad_sense_link_rest_required_fields( - request_type=analytics_admin.CreateAdSenseLinkRequest, +def test_list_event_create_rules_rest_required_fields( + request_type=analytics_admin.ListEventCreateRulesRequest, ): transport_class = transports.AnalyticsAdminServiceRestTransport @@ -95449,7 +105607,7 @@ def test_create_ad_sense_link_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).create_ad_sense_link._get_unset_required_fields(jsonified_request) + ).list_event_create_rules._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present @@ -95458,7 +105616,14 @@ def test_create_ad_sense_link_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).create_ad_sense_link._get_unset_required_fields(jsonified_request) + ).list_event_create_rules._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "page_size", + "page_token", + ) + ) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone @@ -95472,7 +105637,7 @@ def test_create_ad_sense_link_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = resources.AdSenseLink() + return_value = analytics_admin.ListEventCreateRulesResponse() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -95484,48 +105649,47 @@ def test_create_ad_sense_link_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "post", + "method": "get", "query_params": pb_request, } - transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = resources.AdSenseLink.pb(return_value) + return_value = analytics_admin.ListEventCreateRulesResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.create_ad_sense_link(request) + response = client.list_event_create_rules(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_create_ad_sense_link_rest_unset_required_fields(): +def test_list_event_create_rules_rest_unset_required_fields(): transport = transports.AnalyticsAdminServiceRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.create_ad_sense_link._get_unset_required_fields({}) + unset_fields = transport.list_event_create_rules._get_unset_required_fields({}) assert set(unset_fields) == ( - set(()) - & set( + set( ( - "parent", - "adsenseLink", + "pageSize", + "pageToken", ) ) + & set(("parent",)) ) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_create_ad_sense_link_rest_interceptors(null_interceptor): +def test_list_event_create_rules_rest_interceptors(null_interceptor): transport = transports.AnalyticsAdminServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -95538,14 +105702,14 @@ def test_create_ad_sense_link_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.AnalyticsAdminServiceRestInterceptor, "post_create_ad_sense_link" + transports.AnalyticsAdminServiceRestInterceptor, "post_list_event_create_rules" ) as post, mock.patch.object( - transports.AnalyticsAdminServiceRestInterceptor, "pre_create_ad_sense_link" + transports.AnalyticsAdminServiceRestInterceptor, "pre_list_event_create_rules" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = analytics_admin.CreateAdSenseLinkRequest.pb( - analytics_admin.CreateAdSenseLinkRequest() + pb_message = analytics_admin.ListEventCreateRulesRequest.pb( + analytics_admin.ListEventCreateRulesRequest() ) transcode.return_value = { "method": "post", @@ -95557,19 +105721,21 @@ def test_create_ad_sense_link_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = resources.AdSenseLink.to_json( - resources.AdSenseLink() + req.return_value._content = ( + analytics_admin.ListEventCreateRulesResponse.to_json( + analytics_admin.ListEventCreateRulesResponse() + ) ) - request = analytics_admin.CreateAdSenseLinkRequest() + request = analytics_admin.ListEventCreateRulesRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = resources.AdSenseLink() + post.return_value = analytics_admin.ListEventCreateRulesResponse() - client.create_ad_sense_link( + client.list_event_create_rules( request, metadata=[ ("key", "val"), @@ -95581,8 +105747,8 @@ def test_create_ad_sense_link_rest_interceptors(null_interceptor): post.assert_called_once() -def test_create_ad_sense_link_rest_bad_request( - transport: str = "rest", request_type=analytics_admin.CreateAdSenseLinkRequest +def test_list_event_create_rules_rest_bad_request( + transport: str = "rest", request_type=analytics_admin.ListEventCreateRulesRequest ): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -95590,7 +105756,7 @@ def test_create_ad_sense_link_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = {"parent": "properties/sample1"} + request_init = {"parent": "properties/sample1/dataStreams/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -95602,10 +105768,10 @@ def test_create_ad_sense_link_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.create_ad_sense_link(request) + client.list_event_create_rules(request) -def test_create_ad_sense_link_rest_flattened(): +def test_list_event_create_rules_rest_flattened(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -95614,15 +105780,14 @@ def test_create_ad_sense_link_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = resources.AdSenseLink() + return_value = analytics_admin.ListEventCreateRulesResponse() # get arguments that satisfy an http rule for this method - sample_request = {"parent": "properties/sample1"} + sample_request = {"parent": "properties/sample1/dataStreams/sample2"} # get truthy value for each flattened field mock_args = dict( parent="parent_value", - adsense_link=resources.AdSenseLink(name="name_value"), ) mock_args.update(sample_request) @@ -95630,24 +105795,25 @@ def test_create_ad_sense_link_rest_flattened(): response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = resources.AdSenseLink.pb(return_value) + return_value = analytics_admin.ListEventCreateRulesResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.create_ad_sense_link(**mock_args) + client.list_event_create_rules(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1alpha/{parent=properties/*}/adSenseLinks" % client.transport._host, + "%s/v1alpha/{parent=properties/*/dataStreams/*}/eventCreateRules" + % client.transport._host, args[1], ) -def test_create_ad_sense_link_rest_flattened_error(transport: str = "rest"): +def test_list_event_create_rules_rest_flattened_error(transport: str = "rest"): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -95656,55 +105822,207 @@ def test_create_ad_sense_link_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.create_ad_sense_link( - analytics_admin.CreateAdSenseLinkRequest(), + client.list_event_create_rules( + analytics_admin.ListEventCreateRulesRequest(), parent="parent_value", - adsense_link=resources.AdSenseLink(name="name_value"), ) -def test_create_ad_sense_link_rest_error(): +def test_list_event_create_rules_rest_pager(transport: str = "rest"): client = AnalyticsAdminServiceClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + analytics_admin.ListEventCreateRulesResponse( + event_create_rules=[ + event_create_and_edit.EventCreateRule(), + event_create_and_edit.EventCreateRule(), + event_create_and_edit.EventCreateRule(), + ], + next_page_token="abc", + ), + analytics_admin.ListEventCreateRulesResponse( + event_create_rules=[], + next_page_token="def", + ), + analytics_admin.ListEventCreateRulesResponse( + event_create_rules=[ + event_create_and_edit.EventCreateRule(), + ], + next_page_token="ghi", + ), + analytics_admin.ListEventCreateRulesResponse( + event_create_rules=[ + event_create_and_edit.EventCreateRule(), + event_create_and_edit.EventCreateRule(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple( + analytics_admin.ListEventCreateRulesResponse.to_json(x) for x in response + ) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {"parent": "properties/sample1/dataStreams/sample2"} + + pager = client.list_event_create_rules(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all( + isinstance(i, event_create_and_edit.EventCreateRule) for i in results + ) + + pages = list(client.list_event_create_rules(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + @pytest.mark.parametrize( "request_type", [ - analytics_admin.DeleteAdSenseLinkRequest, + analytics_admin.CreateEventCreateRuleRequest, dict, ], ) -def test_delete_ad_sense_link_rest(request_type): +def test_create_event_create_rule_rest(request_type): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) - # send a request that will satisfy transcoding - request_init = {"name": "properties/sample1/adSenseLinks/sample2"} + # send a request that will satisfy transcoding + request_init = {"parent": "properties/sample1/dataStreams/sample2"} + request_init["event_create_rule"] = { + "name": "name_value", + "destination_event": "destination_event_value", + "event_conditions": [ + { + "field": "field_value", + "comparison_type": 1, + "value": "value_value", + "negated": True, + } + ], + "source_copy_parameters": True, + "parameter_mutations": [ + {"parameter": "parameter_value", "parameter_value": "parameter_value_value"} + ], + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = analytics_admin.CreateEventCreateRuleRequest.meta.fields[ + "event_create_rule" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["event_create_rule"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["event_create_rule"][field])): + del request_init["event_create_rule"][field][i][subfield] + else: + del request_init["event_create_rule"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = None + return_value = event_create_and_edit.EventCreateRule( + name="name_value", + destination_event="destination_event_value", + source_copy_parameters=True, + ) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - json_return_value = "" + # Convert return value to protobuf type + return_value = event_create_and_edit.EventCreateRule.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.delete_ad_sense_link(request) + response = client.create_event_create_rule(request) # Establish that the response is the type that we expect. - assert response is None + assert isinstance(response, event_create_and_edit.EventCreateRule) + assert response.name == "name_value" + assert response.destination_event == "destination_event_value" + assert response.source_copy_parameters is True -def test_delete_ad_sense_link_rest_use_cached_wrapped_rpc(): +def test_create_event_create_rule_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -95719,7 +106037,8 @@ def test_delete_ad_sense_link_rest_use_cached_wrapped_rpc(): # Ensure method has been cached assert ( - client._transport.delete_ad_sense_link in client._transport._wrapped_methods + client._transport.create_event_create_rule + in client._transport._wrapped_methods ) # Replace cached wrapped function with mock @@ -95728,29 +106047,29 @@ def test_delete_ad_sense_link_rest_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.delete_ad_sense_link + client._transport.create_event_create_rule ] = mock_rpc request = {} - client.delete_ad_sense_link(request) + client.create_event_create_rule(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.delete_ad_sense_link(request) + client.create_event_create_rule(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_delete_ad_sense_link_rest_required_fields( - request_type=analytics_admin.DeleteAdSenseLinkRequest, +def test_create_event_create_rule_rest_required_fields( + request_type=analytics_admin.CreateEventCreateRuleRequest, ): transport_class = transports.AnalyticsAdminServiceRestTransport request_init = {} - request_init["name"] = "" + request_init["parent"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -95761,21 +106080,21 @@ def test_delete_ad_sense_link_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).delete_ad_sense_link._get_unset_required_fields(jsonified_request) + ).create_event_create_rule._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["name"] = "name_value" + jsonified_request["parent"] = "parent_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).delete_ad_sense_link._get_unset_required_fields(jsonified_request) + ).create_event_create_rule._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == "name_value" + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -95784,7 +106103,7 @@ def test_delete_ad_sense_link_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = None + return_value = event_create_and_edit.EventCreateRule() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -95796,36 +106115,48 @@ def test_delete_ad_sense_link_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "delete", + "method": "post", "query_params": pb_request, } + transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 - json_return_value = "" + + # Convert return value to protobuf type + return_value = event_create_and_edit.EventCreateRule.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.delete_ad_sense_link(request) + response = client.create_event_create_rule(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_delete_ad_sense_link_rest_unset_required_fields(): +def test_create_event_create_rule_rest_unset_required_fields(): transport = transports.AnalyticsAdminServiceRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.delete_ad_sense_link._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name",))) + unset_fields = transport.create_event_create_rule._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(()) + & set( + ( + "parent", + "eventCreateRule", + ) + ) + ) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_delete_ad_sense_link_rest_interceptors(null_interceptor): +def test_create_event_create_rule_rest_interceptors(null_interceptor): transport = transports.AnalyticsAdminServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -95838,11 +106169,14 @@ def test_delete_ad_sense_link_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.AnalyticsAdminServiceRestInterceptor, "pre_delete_ad_sense_link" + transports.AnalyticsAdminServiceRestInterceptor, "post_create_event_create_rule" + ) as post, mock.patch.object( + transports.AnalyticsAdminServiceRestInterceptor, "pre_create_event_create_rule" ) as pre: pre.assert_not_called() - pb_message = analytics_admin.DeleteAdSenseLinkRequest.pb( - analytics_admin.DeleteAdSenseLinkRequest() + post.assert_not_called() + pb_message = analytics_admin.CreateEventCreateRuleRequest.pb( + analytics_admin.CreateEventCreateRuleRequest() ) transcode.return_value = { "method": "post", @@ -95854,15 +106188,19 @@ def test_delete_ad_sense_link_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() + req.return_value._content = event_create_and_edit.EventCreateRule.to_json( + event_create_and_edit.EventCreateRule() + ) - request = analytics_admin.DeleteAdSenseLinkRequest() + request = analytics_admin.CreateEventCreateRuleRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata + post.return_value = event_create_and_edit.EventCreateRule() - client.delete_ad_sense_link( + client.create_event_create_rule( request, metadata=[ ("key", "val"), @@ -95871,10 +106209,11 @@ def test_delete_ad_sense_link_rest_interceptors(null_interceptor): ) pre.assert_called_once() + post.assert_called_once() -def test_delete_ad_sense_link_rest_bad_request( - transport: str = "rest", request_type=analytics_admin.DeleteAdSenseLinkRequest +def test_create_event_create_rule_rest_bad_request( + transport: str = "rest", request_type=analytics_admin.CreateEventCreateRuleRequest ): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -95882,7 +106221,7 @@ def test_delete_ad_sense_link_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = {"name": "properties/sample1/adSenseLinks/sample2"} + request_init = {"parent": "properties/sample1/dataStreams/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -95894,10 +106233,10 @@ def test_delete_ad_sense_link_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.delete_ad_sense_link(request) + client.create_event_create_rule(request) -def test_delete_ad_sense_link_rest_flattened(): +def test_create_event_create_rule_rest_flattened(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -95906,37 +106245,41 @@ def test_delete_ad_sense_link_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = None + return_value = event_create_and_edit.EventCreateRule() # get arguments that satisfy an http rule for this method - sample_request = {"name": "properties/sample1/adSenseLinks/sample2"} + sample_request = {"parent": "properties/sample1/dataStreams/sample2"} # get truthy value for each flattened field mock_args = dict( - name="name_value", + parent="parent_value", + event_create_rule=event_create_and_edit.EventCreateRule(name="name_value"), ) mock_args.update(sample_request) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - json_return_value = "" + # Convert return value to protobuf type + return_value = event_create_and_edit.EventCreateRule.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.delete_ad_sense_link(**mock_args) + client.create_event_create_rule(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1alpha/{name=properties/*/adSenseLinks/*}" % client.transport._host, + "%s/v1alpha/{parent=properties/*/dataStreams/*}/eventCreateRules" + % client.transport._host, args[1], ) -def test_delete_ad_sense_link_rest_flattened_error(transport: str = "rest"): +def test_create_event_create_rule_rest_flattened_error(transport: str = "rest"): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -95945,13 +106288,14 @@ def test_delete_ad_sense_link_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.delete_ad_sense_link( - analytics_admin.DeleteAdSenseLinkRequest(), - name="name_value", + client.create_event_create_rule( + analytics_admin.CreateEventCreateRuleRequest(), + parent="parent_value", + event_create_rule=event_create_and_edit.EventCreateRule(name="name_value"), ) -def test_delete_ad_sense_link_rest_error(): +def test_create_event_create_rule_rest_error(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -95960,44 +106304,137 @@ def test_delete_ad_sense_link_rest_error(): @pytest.mark.parametrize( "request_type", [ - analytics_admin.ListAdSenseLinksRequest, + analytics_admin.UpdateEventCreateRuleRequest, dict, ], ) -def test_list_ad_sense_links_rest(request_type): +def test_update_event_create_rule_rest(request_type): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = {"parent": "properties/sample1"} + request_init = { + "event_create_rule": { + "name": "properties/sample1/dataStreams/sample2/eventCreateRules/sample3" + } + } + request_init["event_create_rule"] = { + "name": "properties/sample1/dataStreams/sample2/eventCreateRules/sample3", + "destination_event": "destination_event_value", + "event_conditions": [ + { + "field": "field_value", + "comparison_type": 1, + "value": "value_value", + "negated": True, + } + ], + "source_copy_parameters": True, + "parameter_mutations": [ + {"parameter": "parameter_value", "parameter_value": "parameter_value_value"} + ], + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = analytics_admin.UpdateEventCreateRuleRequest.meta.fields[ + "event_create_rule" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["event_create_rule"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["event_create_rule"][field])): + del request_init["event_create_rule"][field][i][subfield] + else: + del request_init["event_create_rule"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = analytics_admin.ListAdSenseLinksResponse( - next_page_token="next_page_token_value", + return_value = event_create_and_edit.EventCreateRule( + name="name_value", + destination_event="destination_event_value", + source_copy_parameters=True, ) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = analytics_admin.ListAdSenseLinksResponse.pb(return_value) + return_value = event_create_and_edit.EventCreateRule.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.list_ad_sense_links(request) + response = client.update_event_create_rule(request) # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListAdSenseLinksPager) - assert response.next_page_token == "next_page_token_value" + assert isinstance(response, event_create_and_edit.EventCreateRule) + assert response.name == "name_value" + assert response.destination_event == "destination_event_value" + assert response.source_copy_parameters is True -def test_list_ad_sense_links_rest_use_cached_wrapped_rpc(): +def test_update_event_create_rule_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -96012,7 +106449,8 @@ def test_list_ad_sense_links_rest_use_cached_wrapped_rpc(): # Ensure method has been cached assert ( - client._transport.list_ad_sense_links in client._transport._wrapped_methods + client._transport.update_event_create_rule + in client._transport._wrapped_methods ) # Replace cached wrapped function with mock @@ -96021,29 +106459,28 @@ def test_list_ad_sense_links_rest_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.list_ad_sense_links + client._transport.update_event_create_rule ] = mock_rpc request = {} - client.list_ad_sense_links(request) + client.update_event_create_rule(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.list_ad_sense_links(request) + client.update_event_create_rule(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_list_ad_sense_links_rest_required_fields( - request_type=analytics_admin.ListAdSenseLinksRequest, +def test_update_event_create_rule_rest_required_fields( + request_type=analytics_admin.UpdateEventCreateRuleRequest, ): transport_class = transports.AnalyticsAdminServiceRestTransport request_init = {} - request_init["parent"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -96054,28 +106491,19 @@ def test_list_ad_sense_links_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).list_ad_sense_links._get_unset_required_fields(jsonified_request) + ).update_event_create_rule._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["parent"] = "parent_value" - unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).list_ad_sense_links._get_unset_required_fields(jsonified_request) + ).update_event_create_rule._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set( - ( - "page_size", - "page_token", - ) - ) + assert not set(unset_fields) - set(("update_mask",)) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == "parent_value" client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -96084,7 +106512,7 @@ def test_list_ad_sense_links_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = analytics_admin.ListAdSenseLinksResponse() + return_value = event_create_and_edit.EventCreateRule() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -96096,47 +106524,48 @@ def test_list_ad_sense_links_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "get", + "method": "patch", "query_params": pb_request, } + transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = analytics_admin.ListAdSenseLinksResponse.pb(return_value) + return_value = event_create_and_edit.EventCreateRule.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.list_ad_sense_links(request) + response = client.update_event_create_rule(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_list_ad_sense_links_rest_unset_required_fields(): +def test_update_event_create_rule_rest_unset_required_fields(): transport = transports.AnalyticsAdminServiceRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.list_ad_sense_links._get_unset_required_fields({}) + unset_fields = transport.update_event_create_rule._get_unset_required_fields({}) assert set(unset_fields) == ( - set( + set(("updateMask",)) + & set( ( - "pageSize", - "pageToken", + "eventCreateRule", + "updateMask", ) ) - & set(("parent",)) ) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_ad_sense_links_rest_interceptors(null_interceptor): +def test_update_event_create_rule_rest_interceptors(null_interceptor): transport = transports.AnalyticsAdminServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -96149,14 +106578,14 @@ def test_list_ad_sense_links_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.AnalyticsAdminServiceRestInterceptor, "post_list_ad_sense_links" + transports.AnalyticsAdminServiceRestInterceptor, "post_update_event_create_rule" ) as post, mock.patch.object( - transports.AnalyticsAdminServiceRestInterceptor, "pre_list_ad_sense_links" + transports.AnalyticsAdminServiceRestInterceptor, "pre_update_event_create_rule" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = analytics_admin.ListAdSenseLinksRequest.pb( - analytics_admin.ListAdSenseLinksRequest() + pb_message = analytics_admin.UpdateEventCreateRuleRequest.pb( + analytics_admin.UpdateEventCreateRuleRequest() ) transcode.return_value = { "method": "post", @@ -96168,19 +106597,19 @@ def test_list_ad_sense_links_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = analytics_admin.ListAdSenseLinksResponse.to_json( - analytics_admin.ListAdSenseLinksResponse() + req.return_value._content = event_create_and_edit.EventCreateRule.to_json( + event_create_and_edit.EventCreateRule() ) - request = analytics_admin.ListAdSenseLinksRequest() + request = analytics_admin.UpdateEventCreateRuleRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = analytics_admin.ListAdSenseLinksResponse() + post.return_value = event_create_and_edit.EventCreateRule() - client.list_ad_sense_links( + client.update_event_create_rule( request, metadata=[ ("key", "val"), @@ -96192,8 +106621,8 @@ def test_list_ad_sense_links_rest_interceptors(null_interceptor): post.assert_called_once() -def test_list_ad_sense_links_rest_bad_request( - transport: str = "rest", request_type=analytics_admin.ListAdSenseLinksRequest +def test_update_event_create_rule_rest_bad_request( + transport: str = "rest", request_type=analytics_admin.UpdateEventCreateRuleRequest ): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -96201,7 +106630,11 @@ def test_list_ad_sense_links_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = {"parent": "properties/sample1"} + request_init = { + "event_create_rule": { + "name": "properties/sample1/dataStreams/sample2/eventCreateRules/sample3" + } + } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -96213,10 +106646,10 @@ def test_list_ad_sense_links_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.list_ad_sense_links(request) + client.update_event_create_rule(request) -def test_list_ad_sense_links_rest_flattened(): +def test_update_event_create_rule_rest_flattened(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -96225,14 +106658,19 @@ def test_list_ad_sense_links_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = analytics_admin.ListAdSenseLinksResponse() + return_value = event_create_and_edit.EventCreateRule() # get arguments that satisfy an http rule for this method - sample_request = {"parent": "properties/sample1"} + sample_request = { + "event_create_rule": { + "name": "properties/sample1/dataStreams/sample2/eventCreateRules/sample3" + } + } # get truthy value for each flattened field mock_args = dict( - parent="parent_value", + event_create_rule=event_create_and_edit.EventCreateRule(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) mock_args.update(sample_request) @@ -96240,24 +106678,25 @@ def test_list_ad_sense_links_rest_flattened(): response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = analytics_admin.ListAdSenseLinksResponse.pb(return_value) + return_value = event_create_and_edit.EventCreateRule.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.list_ad_sense_links(**mock_args) + client.update_event_create_rule(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1alpha/{parent=properties/*}/adSenseLinks" % client.transport._host, + "%s/v1alpha/{event_create_rule.name=properties/*/dataStreams/*/eventCreateRules/*}" + % client.transport._host, args[1], ) -def test_list_ad_sense_links_rest_flattened_error(transport: str = "rest"): +def test_update_event_create_rule_rest_flattened_error(transport: str = "rest"): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -96266,83 +106705,27 @@ def test_list_ad_sense_links_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.list_ad_sense_links( - analytics_admin.ListAdSenseLinksRequest(), - parent="parent_value", + client.update_event_create_rule( + analytics_admin.UpdateEventCreateRuleRequest(), + event_create_rule=event_create_and_edit.EventCreateRule(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) -def test_list_ad_sense_links_rest_pager(transport: str = "rest"): +def test_update_event_create_rule_rest_error(): client = AnalyticsAdminServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # TODO(kbandes): remove this mock unless there's a good reason for it. - # with mock.patch.object(path_template, 'transcode') as transcode: - # Set the response as a series of pages - response = ( - analytics_admin.ListAdSenseLinksResponse( - adsense_links=[ - resources.AdSenseLink(), - resources.AdSenseLink(), - resources.AdSenseLink(), - ], - next_page_token="abc", - ), - analytics_admin.ListAdSenseLinksResponse( - adsense_links=[], - next_page_token="def", - ), - analytics_admin.ListAdSenseLinksResponse( - adsense_links=[ - resources.AdSenseLink(), - ], - next_page_token="ghi", - ), - analytics_admin.ListAdSenseLinksResponse( - adsense_links=[ - resources.AdSenseLink(), - resources.AdSenseLink(), - ], - ), - ) - # Two responses for two calls - response = response + response - - # Wrap the values into proper Response objs - response = tuple( - analytics_admin.ListAdSenseLinksResponse.to_json(x) for x in response - ) - return_values = tuple(Response() for i in response) - for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode("UTF-8") - return_val.status_code = 200 - req.side_effect = return_values - - sample_request = {"parent": "properties/sample1"} - - pager = client.list_ad_sense_links(request=sample_request) - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, resources.AdSenseLink) for i in results) - - pages = list(client.list_ad_sense_links(request=sample_request).pages) - for page_, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page_.raw_page.next_page_token == token - @pytest.mark.parametrize( "request_type", [ - analytics_admin.GetEventCreateRuleRequest, + analytics_admin.DeleteEventCreateRuleRequest, dict, ], ) -def test_get_event_create_rule_rest(request_type): +def test_delete_event_create_rule_rest(request_type): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -96357,31 +106740,22 @@ def test_get_event_create_rule_rest(request_type): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = event_create_and_edit.EventCreateRule( - name="name_value", - destination_event="destination_event_value", - source_copy_parameters=True, - ) + return_value = None # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - # Convert return value to protobuf type - return_value = event_create_and_edit.EventCreateRule.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) + json_return_value = "" response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.get_event_create_rule(request) + response = client.delete_event_create_rule(request) # Establish that the response is the type that we expect. - assert isinstance(response, event_create_and_edit.EventCreateRule) - assert response.name == "name_value" - assert response.destination_event == "destination_event_value" - assert response.source_copy_parameters is True + assert response is None -def test_get_event_create_rule_rest_use_cached_wrapped_rpc(): +def test_delete_event_create_rule_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -96396,7 +106770,7 @@ def test_get_event_create_rule_rest_use_cached_wrapped_rpc(): # Ensure method has been cached assert ( - client._transport.get_event_create_rule + client._transport.delete_event_create_rule in client._transport._wrapped_methods ) @@ -96406,24 +106780,24 @@ def test_get_event_create_rule_rest_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.get_event_create_rule + client._transport.delete_event_create_rule ] = mock_rpc request = {} - client.get_event_create_rule(request) + client.delete_event_create_rule(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.get_event_create_rule(request) + client.delete_event_create_rule(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_get_event_create_rule_rest_required_fields( - request_type=analytics_admin.GetEventCreateRuleRequest, +def test_delete_event_create_rule_rest_required_fields( + request_type=analytics_admin.DeleteEventCreateRuleRequest, ): transport_class = transports.AnalyticsAdminServiceRestTransport @@ -96439,7 +106813,7 @@ def test_get_event_create_rule_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).get_event_create_rule._get_unset_required_fields(jsonified_request) + ).delete_event_create_rule._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present @@ -96448,7 +106822,7 @@ def test_get_event_create_rule_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).get_event_create_rule._get_unset_required_fields(jsonified_request) + ).delete_event_create_rule._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone @@ -96462,7 +106836,7 @@ def test_get_event_create_rule_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = event_create_and_edit.EventCreateRule() + return_value = None # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -96474,39 +106848,36 @@ def test_get_event_create_rule_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "get", + "method": "delete", "query_params": pb_request, } transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = event_create_and_edit.EventCreateRule.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) + json_return_value = "" response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.get_event_create_rule(request) + response = client.delete_event_create_rule(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_get_event_create_rule_rest_unset_required_fields(): +def test_delete_event_create_rule_rest_unset_required_fields(): transport = transports.AnalyticsAdminServiceRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.get_event_create_rule._get_unset_required_fields({}) + unset_fields = transport.delete_event_create_rule._get_unset_required_fields({}) assert set(unset_fields) == (set(()) & set(("name",))) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_event_create_rule_rest_interceptors(null_interceptor): +def test_delete_event_create_rule_rest_interceptors(null_interceptor): transport = transports.AnalyticsAdminServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -96519,14 +106890,11 @@ def test_get_event_create_rule_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.AnalyticsAdminServiceRestInterceptor, "post_get_event_create_rule" - ) as post, mock.patch.object( - transports.AnalyticsAdminServiceRestInterceptor, "pre_get_event_create_rule" + transports.AnalyticsAdminServiceRestInterceptor, "pre_delete_event_create_rule" ) as pre: pre.assert_not_called() - post.assert_not_called() - pb_message = analytics_admin.GetEventCreateRuleRequest.pb( - analytics_admin.GetEventCreateRuleRequest() + pb_message = analytics_admin.DeleteEventCreateRuleRequest.pb( + analytics_admin.DeleteEventCreateRuleRequest() ) transcode.return_value = { "method": "post", @@ -96538,19 +106906,15 @@ def test_get_event_create_rule_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = event_create_and_edit.EventCreateRule.to_json( - event_create_and_edit.EventCreateRule() - ) - request = analytics_admin.GetEventCreateRuleRequest() + request = analytics_admin.DeleteEventCreateRuleRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = event_create_and_edit.EventCreateRule() - client.get_event_create_rule( + client.delete_event_create_rule( request, metadata=[ ("key", "val"), @@ -96559,11 +106923,10 @@ def test_get_event_create_rule_rest_interceptors(null_interceptor): ) pre.assert_called_once() - post.assert_called_once() -def test_get_event_create_rule_rest_bad_request( - transport: str = "rest", request_type=analytics_admin.GetEventCreateRuleRequest +def test_delete_event_create_rule_rest_bad_request( + transport: str = "rest", request_type=analytics_admin.DeleteEventCreateRuleRequest ): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -96585,10 +106948,10 @@ def test_get_event_create_rule_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.get_event_create_rule(request) + client.delete_event_create_rule(request) -def test_get_event_create_rule_rest_flattened(): +def test_delete_event_create_rule_rest_flattened(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -96597,7 +106960,7 @@ def test_get_event_create_rule_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = event_create_and_edit.EventCreateRule() + return_value = None # get arguments that satisfy an http rule for this method sample_request = { @@ -96610,29 +106973,344 @@ def test_get_event_create_rule_rest_flattened(): ) mock_args.update(sample_request) + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "" + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.delete_event_create_rule(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1alpha/{name=properties/*/dataStreams/*/eventCreateRules/*}" + % client.transport._host, + args[1], + ) + + +def test_delete_event_create_rule_rest_flattened_error(transport: str = "rest"): + client = AnalyticsAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_event_create_rule( + analytics_admin.DeleteEventCreateRuleRequest(), + name="name_value", + ) + + +def test_delete_event_create_rule_rest_error(): + client = AnalyticsAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + analytics_admin.GetEventEditRuleRequest, + dict, + ], +) +def test_get_event_edit_rule_rest(request_type): + client = AnalyticsAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "properties/sample1/dataStreams/sample2/eventEditRules/sample3" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = event_create_and_edit.EventEditRule( + name="name_value", + display_name="display_name_value", + processing_order=1720, + ) + # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = event_create_and_edit.EventCreateRule.pb(return_value) + return_value = event_create_and_edit.EventEditRule.pb(return_value) json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value + response = client.get_event_edit_rule(request) - client.get_event_create_rule(**mock_args) + # Establish that the response is the type that we expect. + assert isinstance(response, event_create_and_edit.EventEditRule) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.processing_order == 1720 + + +def test_get_event_edit_rule_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = AnalyticsAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.get_event_edit_rule in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.get_event_edit_rule + ] = mock_rpc + + request = {} + client.get_event_edit_rule(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_event_edit_rule(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_get_event_edit_rule_rest_required_fields( + request_type=analytics_admin.GetEventEditRuleRequest, +): + transport_class = transports.AnalyticsAdminServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_event_edit_rule._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_event_edit_rule._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = AnalyticsAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = event_create_and_edit.EventEditRule() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = event_create_and_edit.EventEditRule.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_event_edit_rule(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_get_event_edit_rule_rest_unset_required_fields(): + transport = transports.AnalyticsAdminServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.get_event_edit_rule._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_event_edit_rule_rest_interceptors(null_interceptor): + transport = transports.AnalyticsAdminServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.AnalyticsAdminServiceRestInterceptor(), + ) + client = AnalyticsAdminServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.AnalyticsAdminServiceRestInterceptor, "post_get_event_edit_rule" + ) as post, mock.patch.object( + transports.AnalyticsAdminServiceRestInterceptor, "pre_get_event_edit_rule" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = analytics_admin.GetEventEditRuleRequest.pb( + analytics_admin.GetEventEditRuleRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = event_create_and_edit.EventEditRule.to_json( + event_create_and_edit.EventEditRule() + ) + + request = analytics_admin.GetEventEditRuleRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = event_create_and_edit.EventEditRule() + + client.get_event_edit_rule( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_event_edit_rule_rest_bad_request( + transport: str = "rest", request_type=analytics_admin.GetEventEditRuleRequest +): + client = AnalyticsAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "properties/sample1/dataStreams/sample2/eventEditRules/sample3" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_event_edit_rule(request) + + +def test_get_event_edit_rule_rest_flattened(): + client = AnalyticsAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = event_create_and_edit.EventEditRule() + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "properties/sample1/dataStreams/sample2/eventEditRules/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = event_create_and_edit.EventEditRule.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.get_event_edit_rule(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1alpha/{name=properties/*/dataStreams/*/eventCreateRules/*}" + "%s/v1alpha/{name=properties/*/dataStreams/*/eventEditRules/*}" % client.transport._host, args[1], ) -def test_get_event_create_rule_rest_flattened_error(transport: str = "rest"): +def test_get_event_edit_rule_rest_flattened_error(transport: str = "rest"): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -96641,13 +107319,13 @@ def test_get_event_create_rule_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.get_event_create_rule( - analytics_admin.GetEventCreateRuleRequest(), + client.get_event_edit_rule( + analytics_admin.GetEventEditRuleRequest(), name="name_value", ) -def test_get_event_create_rule_rest_error(): +def test_get_event_edit_rule_rest_error(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -96656,11 +107334,11 @@ def test_get_event_create_rule_rest_error(): @pytest.mark.parametrize( "request_type", [ - analytics_admin.ListEventCreateRulesRequest, + analytics_admin.ListEventEditRulesRequest, dict, ], ) -def test_list_event_create_rules_rest(request_type): +def test_list_event_edit_rules_rest(request_type): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -96673,7 +107351,7 @@ def test_list_event_create_rules_rest(request_type): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = analytics_admin.ListEventCreateRulesResponse( + return_value = analytics_admin.ListEventEditRulesResponse( next_page_token="next_page_token_value", ) @@ -96681,19 +107359,19 @@ def test_list_event_create_rules_rest(request_type): response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = analytics_admin.ListEventCreateRulesResponse.pb(return_value) + return_value = analytics_admin.ListEventEditRulesResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.list_event_create_rules(request) + response = client.list_event_edit_rules(request) # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListEventCreateRulesPager) + assert isinstance(response, pagers.ListEventEditRulesPager) assert response.next_page_token == "next_page_token_value" -def test_list_event_create_rules_rest_use_cached_wrapped_rpc(): +def test_list_event_edit_rules_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -96708,7 +107386,7 @@ def test_list_event_create_rules_rest_use_cached_wrapped_rpc(): # Ensure method has been cached assert ( - client._transport.list_event_create_rules + client._transport.list_event_edit_rules in client._transport._wrapped_methods ) @@ -96718,24 +107396,24 @@ def test_list_event_create_rules_rest_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.list_event_create_rules + client._transport.list_event_edit_rules ] = mock_rpc request = {} - client.list_event_create_rules(request) + client.list_event_edit_rules(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.list_event_create_rules(request) + client.list_event_edit_rules(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_list_event_create_rules_rest_required_fields( - request_type=analytics_admin.ListEventCreateRulesRequest, +def test_list_event_edit_rules_rest_required_fields( + request_type=analytics_admin.ListEventEditRulesRequest, ): transport_class = transports.AnalyticsAdminServiceRestTransport @@ -96751,7 +107429,7 @@ def test_list_event_create_rules_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).list_event_create_rules._get_unset_required_fields(jsonified_request) + ).list_event_edit_rules._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present @@ -96760,7 +107438,7 @@ def test_list_event_create_rules_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).list_event_create_rules._get_unset_required_fields(jsonified_request) + ).list_event_edit_rules._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. assert not set(unset_fields) - set( ( @@ -96781,7 +107459,7 @@ def test_list_event_create_rules_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = analytics_admin.ListEventCreateRulesResponse() + return_value = analytics_admin.ListEventEditRulesResponse() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -96802,25 +107480,25 @@ def test_list_event_create_rules_rest_required_fields( response_value.status_code = 200 # Convert return value to protobuf type - return_value = analytics_admin.ListEventCreateRulesResponse.pb(return_value) + return_value = analytics_admin.ListEventEditRulesResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.list_event_create_rules(request) + response = client.list_event_edit_rules(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_list_event_create_rules_rest_unset_required_fields(): +def test_list_event_edit_rules_rest_unset_required_fields(): transport = transports.AnalyticsAdminServiceRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.list_event_create_rules._get_unset_required_fields({}) + unset_fields = transport.list_event_edit_rules._get_unset_required_fields({}) assert set(unset_fields) == ( set( ( @@ -96833,7 +107511,7 @@ def test_list_event_create_rules_rest_unset_required_fields(): @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_event_create_rules_rest_interceptors(null_interceptor): +def test_list_event_edit_rules_rest_interceptors(null_interceptor): transport = transports.AnalyticsAdminServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -96846,14 +107524,14 @@ def test_list_event_create_rules_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.AnalyticsAdminServiceRestInterceptor, "post_list_event_create_rules" + transports.AnalyticsAdminServiceRestInterceptor, "post_list_event_edit_rules" ) as post, mock.patch.object( - transports.AnalyticsAdminServiceRestInterceptor, "pre_list_event_create_rules" + transports.AnalyticsAdminServiceRestInterceptor, "pre_list_event_edit_rules" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = analytics_admin.ListEventCreateRulesRequest.pb( - analytics_admin.ListEventCreateRulesRequest() + pb_message = analytics_admin.ListEventEditRulesRequest.pb( + analytics_admin.ListEventEditRulesRequest() ) transcode.return_value = { "method": "post", @@ -96865,21 +107543,19 @@ def test_list_event_create_rules_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = ( - analytics_admin.ListEventCreateRulesResponse.to_json( - analytics_admin.ListEventCreateRulesResponse() - ) + req.return_value._content = analytics_admin.ListEventEditRulesResponse.to_json( + analytics_admin.ListEventEditRulesResponse() ) - request = analytics_admin.ListEventCreateRulesRequest() + request = analytics_admin.ListEventEditRulesRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = analytics_admin.ListEventCreateRulesResponse() + post.return_value = analytics_admin.ListEventEditRulesResponse() - client.list_event_create_rules( + client.list_event_edit_rules( request, metadata=[ ("key", "val"), @@ -96891,8 +107567,8 @@ def test_list_event_create_rules_rest_interceptors(null_interceptor): post.assert_called_once() -def test_list_event_create_rules_rest_bad_request( - transport: str = "rest", request_type=analytics_admin.ListEventCreateRulesRequest +def test_list_event_edit_rules_rest_bad_request( + transport: str = "rest", request_type=analytics_admin.ListEventEditRulesRequest ): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -96912,10 +107588,10 @@ def test_list_event_create_rules_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.list_event_create_rules(request) + client.list_event_edit_rules(request) -def test_list_event_create_rules_rest_flattened(): +def test_list_event_edit_rules_rest_flattened(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -96924,7 +107600,7 @@ def test_list_event_create_rules_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = analytics_admin.ListEventCreateRulesResponse() + return_value = analytics_admin.ListEventEditRulesResponse() # get arguments that satisfy an http rule for this method sample_request = {"parent": "properties/sample1/dataStreams/sample2"} @@ -96939,25 +107615,25 @@ def test_list_event_create_rules_rest_flattened(): response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = analytics_admin.ListEventCreateRulesResponse.pb(return_value) + return_value = analytics_admin.ListEventEditRulesResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.list_event_create_rules(**mock_args) + client.list_event_edit_rules(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1alpha/{parent=properties/*/dataStreams/*}/eventCreateRules" + "%s/v1alpha/{parent=properties/*/dataStreams/*}/eventEditRules" % client.transport._host, args[1], ) -def test_list_event_create_rules_rest_flattened_error(transport: str = "rest"): +def test_list_event_edit_rules_rest_flattened_error(transport: str = "rest"): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -96966,13 +107642,13 @@ def test_list_event_create_rules_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.list_event_create_rules( - analytics_admin.ListEventCreateRulesRequest(), + client.list_event_edit_rules( + analytics_admin.ListEventEditRulesRequest(), parent="parent_value", ) -def test_list_event_create_rules_rest_pager(transport: str = "rest"): +def test_list_event_edit_rules_rest_pager(transport: str = "rest"): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -96984,28 +107660,28 @@ def test_list_event_create_rules_rest_pager(transport: str = "rest"): # with mock.patch.object(path_template, 'transcode') as transcode: # Set the response as a series of pages response = ( - analytics_admin.ListEventCreateRulesResponse( - event_create_rules=[ - event_create_and_edit.EventCreateRule(), - event_create_and_edit.EventCreateRule(), - event_create_and_edit.EventCreateRule(), + analytics_admin.ListEventEditRulesResponse( + event_edit_rules=[ + event_create_and_edit.EventEditRule(), + event_create_and_edit.EventEditRule(), + event_create_and_edit.EventEditRule(), ], next_page_token="abc", ), - analytics_admin.ListEventCreateRulesResponse( - event_create_rules=[], + analytics_admin.ListEventEditRulesResponse( + event_edit_rules=[], next_page_token="def", ), - analytics_admin.ListEventCreateRulesResponse( - event_create_rules=[ - event_create_and_edit.EventCreateRule(), + analytics_admin.ListEventEditRulesResponse( + event_edit_rules=[ + event_create_and_edit.EventEditRule(), ], next_page_token="ghi", ), - analytics_admin.ListEventCreateRulesResponse( - event_create_rules=[ - event_create_and_edit.EventCreateRule(), - event_create_and_edit.EventCreateRule(), + analytics_admin.ListEventEditRulesResponse( + event_edit_rules=[ + event_create_and_edit.EventEditRule(), + event_create_and_edit.EventEditRule(), ], ), ) @@ -97014,7 +107690,7 @@ def test_list_event_create_rules_rest_pager(transport: str = "rest"): # Wrap the values into proper Response objs response = tuple( - analytics_admin.ListEventCreateRulesResponse.to_json(x) for x in response + analytics_admin.ListEventEditRulesResponse.to_json(x) for x in response ) return_values = tuple(Response() for i in response) for return_val, response_val in zip(return_values, response): @@ -97024,15 +107700,13 @@ def test_list_event_create_rules_rest_pager(transport: str = "rest"): sample_request = {"parent": "properties/sample1/dataStreams/sample2"} - pager = client.list_event_create_rules(request=sample_request) + pager = client.list_event_edit_rules(request=sample_request) results = list(pager) assert len(results) == 6 - assert all( - isinstance(i, event_create_and_edit.EventCreateRule) for i in results - ) + assert all(isinstance(i, event_create_and_edit.EventEditRule) for i in results) - pages = list(client.list_event_create_rules(request=sample_request).pages) + pages = list(client.list_event_edit_rules(request=sample_request).pages) for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token @@ -97040,11 +107714,11 @@ def test_list_event_create_rules_rest_pager(transport: str = "rest"): @pytest.mark.parametrize( "request_type", [ - analytics_admin.CreateEventCreateRuleRequest, + analytics_admin.CreateEventEditRuleRequest, dict, ], ) -def test_create_event_create_rule_rest(request_type): +def test_create_event_edit_rule_rest(request_type): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -97052,9 +107726,9 @@ def test_create_event_create_rule_rest(request_type): # send a request that will satisfy transcoding request_init = {"parent": "properties/sample1/dataStreams/sample2"} - request_init["event_create_rule"] = { + request_init["event_edit_rule"] = { "name": "name_value", - "destination_event": "destination_event_value", + "display_name": "display_name_value", "event_conditions": [ { "field": "field_value", @@ -97063,18 +107737,18 @@ def test_create_event_create_rule_rest(request_type): "negated": True, } ], - "source_copy_parameters": True, "parameter_mutations": [ {"parameter": "parameter_value", "parameter_value": "parameter_value_value"} ], + "processing_order": 1720, } # The version of a generated dependency at test runtime may differ from the version used during generation. # Delete any fields which are not present in the current runtime dependency # See https://github.com/googleapis/gapic-generator-python/issues/1748 # Determine if the message type is proto-plus or protobuf - test_field = analytics_admin.CreateEventCreateRuleRequest.meta.fields[ - "event_create_rule" + test_field = analytics_admin.CreateEventEditRuleRequest.meta.fields[ + "event_edit_rule" ] def get_message_fields(field): @@ -97103,7 +107777,7 @@ def get_message_fields(field): # For each item in the sample request, create a list of sub fields which are not present at runtime # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["event_create_rule"].items(): # pragma: NO COVER + for field, value in request_init["event_edit_rule"].items(): # pragma: NO COVER result = None is_repeated = False # For repeated fields @@ -97133,40 +107807,40 @@ def get_message_fields(field): subfield = subfield_to_delete.get("subfield") if subfield: if field_repeated: - for i in range(0, len(request_init["event_create_rule"][field])): - del request_init["event_create_rule"][field][i][subfield] + for i in range(0, len(request_init["event_edit_rule"][field])): + del request_init["event_edit_rule"][field][i][subfield] else: - del request_init["event_create_rule"][field][subfield] + del request_init["event_edit_rule"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = event_create_and_edit.EventCreateRule( + return_value = event_create_and_edit.EventEditRule( name="name_value", - destination_event="destination_event_value", - source_copy_parameters=True, + display_name="display_name_value", + processing_order=1720, ) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = event_create_and_edit.EventCreateRule.pb(return_value) + return_value = event_create_and_edit.EventEditRule.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.create_event_create_rule(request) + response = client.create_event_edit_rule(request) # Establish that the response is the type that we expect. - assert isinstance(response, event_create_and_edit.EventCreateRule) + assert isinstance(response, event_create_and_edit.EventEditRule) assert response.name == "name_value" - assert response.destination_event == "destination_event_value" - assert response.source_copy_parameters is True + assert response.display_name == "display_name_value" + assert response.processing_order == 1720 -def test_create_event_create_rule_rest_use_cached_wrapped_rpc(): +def test_create_event_edit_rule_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -97181,7 +107855,7 @@ def test_create_event_create_rule_rest_use_cached_wrapped_rpc(): # Ensure method has been cached assert ( - client._transport.create_event_create_rule + client._transport.create_event_edit_rule in client._transport._wrapped_methods ) @@ -97191,24 +107865,24 @@ def test_create_event_create_rule_rest_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.create_event_create_rule + client._transport.create_event_edit_rule ] = mock_rpc request = {} - client.create_event_create_rule(request) + client.create_event_edit_rule(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.create_event_create_rule(request) + client.create_event_edit_rule(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_create_event_create_rule_rest_required_fields( - request_type=analytics_admin.CreateEventCreateRuleRequest, +def test_create_event_edit_rule_rest_required_fields( + request_type=analytics_admin.CreateEventEditRuleRequest, ): transport_class = transports.AnalyticsAdminServiceRestTransport @@ -97224,7 +107898,7 @@ def test_create_event_create_rule_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).create_event_create_rule._get_unset_required_fields(jsonified_request) + ).create_event_edit_rule._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present @@ -97233,7 +107907,7 @@ def test_create_event_create_rule_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).create_event_create_rule._get_unset_required_fields(jsonified_request) + ).create_event_edit_rule._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone @@ -97247,7 +107921,7 @@ def test_create_event_create_rule_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = event_create_and_edit.EventCreateRule() + return_value = event_create_and_edit.EventEditRule() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -97269,38 +107943,38 @@ def test_create_event_create_rule_rest_required_fields( response_value.status_code = 200 # Convert return value to protobuf type - return_value = event_create_and_edit.EventCreateRule.pb(return_value) + return_value = event_create_and_edit.EventEditRule.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.create_event_create_rule(request) + response = client.create_event_edit_rule(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_create_event_create_rule_rest_unset_required_fields(): +def test_create_event_edit_rule_rest_unset_required_fields(): transport = transports.AnalyticsAdminServiceRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.create_event_create_rule._get_unset_required_fields({}) + unset_fields = transport.create_event_edit_rule._get_unset_required_fields({}) assert set(unset_fields) == ( set(()) & set( ( "parent", - "eventCreateRule", + "eventEditRule", ) ) ) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_create_event_create_rule_rest_interceptors(null_interceptor): +def test_create_event_edit_rule_rest_interceptors(null_interceptor): transport = transports.AnalyticsAdminServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -97313,14 +107987,14 @@ def test_create_event_create_rule_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.AnalyticsAdminServiceRestInterceptor, "post_create_event_create_rule" + transports.AnalyticsAdminServiceRestInterceptor, "post_create_event_edit_rule" ) as post, mock.patch.object( - transports.AnalyticsAdminServiceRestInterceptor, "pre_create_event_create_rule" + transports.AnalyticsAdminServiceRestInterceptor, "pre_create_event_edit_rule" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = analytics_admin.CreateEventCreateRuleRequest.pb( - analytics_admin.CreateEventCreateRuleRequest() + pb_message = analytics_admin.CreateEventEditRuleRequest.pb( + analytics_admin.CreateEventEditRuleRequest() ) transcode.return_value = { "method": "post", @@ -97332,19 +108006,19 @@ def test_create_event_create_rule_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = event_create_and_edit.EventCreateRule.to_json( - event_create_and_edit.EventCreateRule() + req.return_value._content = event_create_and_edit.EventEditRule.to_json( + event_create_and_edit.EventEditRule() ) - request = analytics_admin.CreateEventCreateRuleRequest() + request = analytics_admin.CreateEventEditRuleRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = event_create_and_edit.EventCreateRule() + post.return_value = event_create_and_edit.EventEditRule() - client.create_event_create_rule( + client.create_event_edit_rule( request, metadata=[ ("key", "val"), @@ -97356,8 +108030,8 @@ def test_create_event_create_rule_rest_interceptors(null_interceptor): post.assert_called_once() -def test_create_event_create_rule_rest_bad_request( - transport: str = "rest", request_type=analytics_admin.CreateEventCreateRuleRequest +def test_create_event_edit_rule_rest_bad_request( + transport: str = "rest", request_type=analytics_admin.CreateEventEditRuleRequest ): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -97377,10 +108051,10 @@ def test_create_event_create_rule_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.create_event_create_rule(request) + client.create_event_edit_rule(request) -def test_create_event_create_rule_rest_flattened(): +def test_create_event_edit_rule_rest_flattened(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -97389,7 +108063,7 @@ def test_create_event_create_rule_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = event_create_and_edit.EventCreateRule() + return_value = event_create_and_edit.EventEditRule() # get arguments that satisfy an http rule for this method sample_request = {"parent": "properties/sample1/dataStreams/sample2"} @@ -97397,7 +108071,7 @@ def test_create_event_create_rule_rest_flattened(): # get truthy value for each flattened field mock_args = dict( parent="parent_value", - event_create_rule=event_create_and_edit.EventCreateRule(name="name_value"), + event_edit_rule=event_create_and_edit.EventEditRule(name="name_value"), ) mock_args.update(sample_request) @@ -97405,25 +108079,25 @@ def test_create_event_create_rule_rest_flattened(): response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = event_create_and_edit.EventCreateRule.pb(return_value) + return_value = event_create_and_edit.EventEditRule.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.create_event_create_rule(**mock_args) + client.create_event_edit_rule(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1alpha/{parent=properties/*/dataStreams/*}/eventCreateRules" + "%s/v1alpha/{parent=properties/*/dataStreams/*}/eventEditRules" % client.transport._host, args[1], ) -def test_create_event_create_rule_rest_flattened_error(transport: str = "rest"): +def test_create_event_edit_rule_rest_flattened_error(transport: str = "rest"): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -97432,14 +108106,14 @@ def test_create_event_create_rule_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.create_event_create_rule( - analytics_admin.CreateEventCreateRuleRequest(), + client.create_event_edit_rule( + analytics_admin.CreateEventEditRuleRequest(), parent="parent_value", - event_create_rule=event_create_and_edit.EventCreateRule(name="name_value"), + event_edit_rule=event_create_and_edit.EventEditRule(name="name_value"), ) -def test_create_event_create_rule_rest_error(): +def test_create_event_edit_rule_rest_error(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -97448,11 +108122,11 @@ def test_create_event_create_rule_rest_error(): @pytest.mark.parametrize( "request_type", [ - analytics_admin.UpdateEventCreateRuleRequest, + analytics_admin.UpdateEventEditRuleRequest, dict, ], ) -def test_update_event_create_rule_rest(request_type): +def test_update_event_edit_rule_rest(request_type): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -97460,13 +108134,13 @@ def test_update_event_create_rule_rest(request_type): # send a request that will satisfy transcoding request_init = { - "event_create_rule": { - "name": "properties/sample1/dataStreams/sample2/eventCreateRules/sample3" + "event_edit_rule": { + "name": "properties/sample1/dataStreams/sample2/eventEditRules/sample3" } } - request_init["event_create_rule"] = { - "name": "properties/sample1/dataStreams/sample2/eventCreateRules/sample3", - "destination_event": "destination_event_value", + request_init["event_edit_rule"] = { + "name": "properties/sample1/dataStreams/sample2/eventEditRules/sample3", + "display_name": "display_name_value", "event_conditions": [ { "field": "field_value", @@ -97475,18 +108149,18 @@ def test_update_event_create_rule_rest(request_type): "negated": True, } ], - "source_copy_parameters": True, "parameter_mutations": [ {"parameter": "parameter_value", "parameter_value": "parameter_value_value"} ], + "processing_order": 1720, } # The version of a generated dependency at test runtime may differ from the version used during generation. # Delete any fields which are not present in the current runtime dependency # See https://github.com/googleapis/gapic-generator-python/issues/1748 # Determine if the message type is proto-plus or protobuf - test_field = analytics_admin.UpdateEventCreateRuleRequest.meta.fields[ - "event_create_rule" + test_field = analytics_admin.UpdateEventEditRuleRequest.meta.fields[ + "event_edit_rule" ] def get_message_fields(field): @@ -97515,7 +108189,7 @@ def get_message_fields(field): # For each item in the sample request, create a list of sub fields which are not present at runtime # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["event_create_rule"].items(): # pragma: NO COVER + for field, value in request_init["event_edit_rule"].items(): # pragma: NO COVER result = None is_repeated = False # For repeated fields @@ -97545,40 +108219,40 @@ def get_message_fields(field): subfield = subfield_to_delete.get("subfield") if subfield: if field_repeated: - for i in range(0, len(request_init["event_create_rule"][field])): - del request_init["event_create_rule"][field][i][subfield] + for i in range(0, len(request_init["event_edit_rule"][field])): + del request_init["event_edit_rule"][field][i][subfield] else: - del request_init["event_create_rule"][field][subfield] + del request_init["event_edit_rule"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = event_create_and_edit.EventCreateRule( + return_value = event_create_and_edit.EventEditRule( name="name_value", - destination_event="destination_event_value", - source_copy_parameters=True, + display_name="display_name_value", + processing_order=1720, ) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = event_create_and_edit.EventCreateRule.pb(return_value) + return_value = event_create_and_edit.EventEditRule.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.update_event_create_rule(request) + response = client.update_event_edit_rule(request) # Establish that the response is the type that we expect. - assert isinstance(response, event_create_and_edit.EventCreateRule) + assert isinstance(response, event_create_and_edit.EventEditRule) assert response.name == "name_value" - assert response.destination_event == "destination_event_value" - assert response.source_copy_parameters is True + assert response.display_name == "display_name_value" + assert response.processing_order == 1720 -def test_update_event_create_rule_rest_use_cached_wrapped_rpc(): +def test_update_event_edit_rule_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -97593,7 +108267,7 @@ def test_update_event_create_rule_rest_use_cached_wrapped_rpc(): # Ensure method has been cached assert ( - client._transport.update_event_create_rule + client._transport.update_event_edit_rule in client._transport._wrapped_methods ) @@ -97603,24 +108277,24 @@ def test_update_event_create_rule_rest_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.update_event_create_rule + client._transport.update_event_edit_rule ] = mock_rpc request = {} - client.update_event_create_rule(request) + client.update_event_edit_rule(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.update_event_create_rule(request) + client.update_event_edit_rule(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_update_event_create_rule_rest_required_fields( - request_type=analytics_admin.UpdateEventCreateRuleRequest, +def test_update_event_edit_rule_rest_required_fields( + request_type=analytics_admin.UpdateEventEditRuleRequest, ): transport_class = transports.AnalyticsAdminServiceRestTransport @@ -97635,14 +108309,14 @@ def test_update_event_create_rule_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).update_event_create_rule._get_unset_required_fields(jsonified_request) + ).update_event_edit_rule._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).update_event_create_rule._get_unset_required_fields(jsonified_request) + ).update_event_edit_rule._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. assert not set(unset_fields) - set(("update_mask",)) jsonified_request.update(unset_fields) @@ -97656,7 +108330,7 @@ def test_update_event_create_rule_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = event_create_and_edit.EventCreateRule() + return_value = event_create_and_edit.EventEditRule() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -97678,30 +108352,30 @@ def test_update_event_create_rule_rest_required_fields( response_value.status_code = 200 # Convert return value to protobuf type - return_value = event_create_and_edit.EventCreateRule.pb(return_value) + return_value = event_create_and_edit.EventEditRule.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.update_event_create_rule(request) + response = client.update_event_edit_rule(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_update_event_create_rule_rest_unset_required_fields(): +def test_update_event_edit_rule_rest_unset_required_fields(): transport = transports.AnalyticsAdminServiceRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.update_event_create_rule._get_unset_required_fields({}) + unset_fields = transport.update_event_edit_rule._get_unset_required_fields({}) assert set(unset_fields) == ( set(("updateMask",)) & set( ( - "eventCreateRule", + "eventEditRule", "updateMask", ) ) @@ -97709,7 +108383,7 @@ def test_update_event_create_rule_rest_unset_required_fields(): @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_update_event_create_rule_rest_interceptors(null_interceptor): +def test_update_event_edit_rule_rest_interceptors(null_interceptor): transport = transports.AnalyticsAdminServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -97722,14 +108396,14 @@ def test_update_event_create_rule_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.AnalyticsAdminServiceRestInterceptor, "post_update_event_create_rule" + transports.AnalyticsAdminServiceRestInterceptor, "post_update_event_edit_rule" ) as post, mock.patch.object( - transports.AnalyticsAdminServiceRestInterceptor, "pre_update_event_create_rule" + transports.AnalyticsAdminServiceRestInterceptor, "pre_update_event_edit_rule" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = analytics_admin.UpdateEventCreateRuleRequest.pb( - analytics_admin.UpdateEventCreateRuleRequest() + pb_message = analytics_admin.UpdateEventEditRuleRequest.pb( + analytics_admin.UpdateEventEditRuleRequest() ) transcode.return_value = { "method": "post", @@ -97741,19 +108415,19 @@ def test_update_event_create_rule_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = event_create_and_edit.EventCreateRule.to_json( - event_create_and_edit.EventCreateRule() + req.return_value._content = event_create_and_edit.EventEditRule.to_json( + event_create_and_edit.EventEditRule() ) - request = analytics_admin.UpdateEventCreateRuleRequest() + request = analytics_admin.UpdateEventEditRuleRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = event_create_and_edit.EventCreateRule() + post.return_value = event_create_and_edit.EventEditRule() - client.update_event_create_rule( + client.update_event_edit_rule( request, metadata=[ ("key", "val"), @@ -97765,8 +108439,8 @@ def test_update_event_create_rule_rest_interceptors(null_interceptor): post.assert_called_once() -def test_update_event_create_rule_rest_bad_request( - transport: str = "rest", request_type=analytics_admin.UpdateEventCreateRuleRequest +def test_update_event_edit_rule_rest_bad_request( + transport: str = "rest", request_type=analytics_admin.UpdateEventEditRuleRequest ): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -97775,8 +108449,8 @@ def test_update_event_create_rule_rest_bad_request( # send a request that will satisfy transcoding request_init = { - "event_create_rule": { - "name": "properties/sample1/dataStreams/sample2/eventCreateRules/sample3" + "event_edit_rule": { + "name": "properties/sample1/dataStreams/sample2/eventEditRules/sample3" } } request = request_type(**request_init) @@ -97790,10 +108464,10 @@ def test_update_event_create_rule_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.update_event_create_rule(request) + client.update_event_edit_rule(request) -def test_update_event_create_rule_rest_flattened(): +def test_update_event_edit_rule_rest_flattened(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -97802,18 +108476,18 @@ def test_update_event_create_rule_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = event_create_and_edit.EventCreateRule() + return_value = event_create_and_edit.EventEditRule() # get arguments that satisfy an http rule for this method sample_request = { - "event_create_rule": { - "name": "properties/sample1/dataStreams/sample2/eventCreateRules/sample3" + "event_edit_rule": { + "name": "properties/sample1/dataStreams/sample2/eventEditRules/sample3" } } # get truthy value for each flattened field mock_args = dict( - event_create_rule=event_create_and_edit.EventCreateRule(name="name_value"), + event_edit_rule=event_create_and_edit.EventEditRule(name="name_value"), update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) mock_args.update(sample_request) @@ -97822,25 +108496,25 @@ def test_update_event_create_rule_rest_flattened(): response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = event_create_and_edit.EventCreateRule.pb(return_value) + return_value = event_create_and_edit.EventEditRule.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.update_event_create_rule(**mock_args) + client.update_event_edit_rule(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1alpha/{event_create_rule.name=properties/*/dataStreams/*/eventCreateRules/*}" + "%s/v1alpha/{event_edit_rule.name=properties/*/dataStreams/*/eventEditRules/*}" % client.transport._host, args[1], ) -def test_update_event_create_rule_rest_flattened_error(transport: str = "rest"): +def test_update_event_edit_rule_rest_flattened_error(transport: str = "rest"): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -97849,14 +108523,14 @@ def test_update_event_create_rule_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.update_event_create_rule( - analytics_admin.UpdateEventCreateRuleRequest(), - event_create_rule=event_create_and_edit.EventCreateRule(name="name_value"), + client.update_event_edit_rule( + analytics_admin.UpdateEventEditRuleRequest(), + event_edit_rule=event_create_and_edit.EventEditRule(name="name_value"), update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) -def test_update_event_create_rule_rest_error(): +def test_update_event_edit_rule_rest_error(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -97865,11 +108539,11 @@ def test_update_event_create_rule_rest_error(): @pytest.mark.parametrize( "request_type", [ - analytics_admin.DeleteEventCreateRuleRequest, + analytics_admin.DeleteEventEditRuleRequest, dict, ], ) -def test_delete_event_create_rule_rest(request_type): +def test_delete_event_edit_rule_rest(request_type): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -97877,7 +108551,7 @@ def test_delete_event_create_rule_rest(request_type): # send a request that will satisfy transcoding request_init = { - "name": "properties/sample1/dataStreams/sample2/eventCreateRules/sample3" + "name": "properties/sample1/dataStreams/sample2/eventEditRules/sample3" } request = request_type(**request_init) @@ -97893,13 +108567,13 @@ def test_delete_event_create_rule_rest(request_type): response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.delete_event_create_rule(request) + response = client.delete_event_edit_rule(request) # Establish that the response is the type that we expect. assert response is None -def test_delete_event_create_rule_rest_use_cached_wrapped_rpc(): +def test_delete_event_edit_rule_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -97914,7 +108588,7 @@ def test_delete_event_create_rule_rest_use_cached_wrapped_rpc(): # Ensure method has been cached assert ( - client._transport.delete_event_create_rule + client._transport.delete_event_edit_rule in client._transport._wrapped_methods ) @@ -97924,24 +108598,24 @@ def test_delete_event_create_rule_rest_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.delete_event_create_rule + client._transport.delete_event_edit_rule ] = mock_rpc request = {} - client.delete_event_create_rule(request) + client.delete_event_edit_rule(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.delete_event_create_rule(request) + client.delete_event_edit_rule(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_delete_event_create_rule_rest_required_fields( - request_type=analytics_admin.DeleteEventCreateRuleRequest, +def test_delete_event_edit_rule_rest_required_fields( + request_type=analytics_admin.DeleteEventEditRuleRequest, ): transport_class = transports.AnalyticsAdminServiceRestTransport @@ -97957,7 +108631,7 @@ def test_delete_event_create_rule_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).delete_event_create_rule._get_unset_required_fields(jsonified_request) + ).delete_event_edit_rule._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present @@ -97966,7 +108640,7 @@ def test_delete_event_create_rule_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).delete_event_create_rule._get_unset_required_fields(jsonified_request) + ).delete_event_edit_rule._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone @@ -98004,24 +108678,24 @@ def test_delete_event_create_rule_rest_required_fields( response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.delete_event_create_rule(request) + response = client.delete_event_edit_rule(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_delete_event_create_rule_rest_unset_required_fields(): +def test_delete_event_edit_rule_rest_unset_required_fields(): transport = transports.AnalyticsAdminServiceRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.delete_event_create_rule._get_unset_required_fields({}) + unset_fields = transport.delete_event_edit_rule._get_unset_required_fields({}) assert set(unset_fields) == (set(()) & set(("name",))) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_delete_event_create_rule_rest_interceptors(null_interceptor): +def test_delete_event_edit_rule_rest_interceptors(null_interceptor): transport = transports.AnalyticsAdminServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -98034,11 +108708,11 @@ def test_delete_event_create_rule_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.AnalyticsAdminServiceRestInterceptor, "pre_delete_event_create_rule" + transports.AnalyticsAdminServiceRestInterceptor, "pre_delete_event_edit_rule" ) as pre: pre.assert_not_called() - pb_message = analytics_admin.DeleteEventCreateRuleRequest.pb( - analytics_admin.DeleteEventCreateRuleRequest() + pb_message = analytics_admin.DeleteEventEditRuleRequest.pb( + analytics_admin.DeleteEventEditRuleRequest() ) transcode.return_value = { "method": "post", @@ -98051,14 +108725,14 @@ def test_delete_event_create_rule_rest_interceptors(null_interceptor): req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - request = analytics_admin.DeleteEventCreateRuleRequest() + request = analytics_admin.DeleteEventEditRuleRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - client.delete_event_create_rule( + client.delete_event_edit_rule( request, metadata=[ ("key", "val"), @@ -98069,8 +108743,8 @@ def test_delete_event_create_rule_rest_interceptors(null_interceptor): pre.assert_called_once() -def test_delete_event_create_rule_rest_bad_request( - transport: str = "rest", request_type=analytics_admin.DeleteEventCreateRuleRequest +def test_delete_event_edit_rule_rest_bad_request( + transport: str = "rest", request_type=analytics_admin.DeleteEventEditRuleRequest ): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -98079,7 +108753,7 @@ def test_delete_event_create_rule_rest_bad_request( # send a request that will satisfy transcoding request_init = { - "name": "properties/sample1/dataStreams/sample2/eventCreateRules/sample3" + "name": "properties/sample1/dataStreams/sample2/eventEditRules/sample3" } request = request_type(**request_init) @@ -98092,10 +108766,10 @@ def test_delete_event_create_rule_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.delete_event_create_rule(request) + client.delete_event_edit_rule(request) -def test_delete_event_create_rule_rest_flattened(): +def test_delete_event_edit_rule_rest_flattened(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -98108,7 +108782,7 @@ def test_delete_event_create_rule_rest_flattened(): # get arguments that satisfy an http rule for this method sample_request = { - "name": "properties/sample1/dataStreams/sample2/eventCreateRules/sample3" + "name": "properties/sample1/dataStreams/sample2/eventEditRules/sample3" } # get truthy value for each flattened field @@ -98124,20 +108798,20 @@ def test_delete_event_create_rule_rest_flattened(): response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.delete_event_create_rule(**mock_args) + client.delete_event_edit_rule(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1alpha/{name=properties/*/dataStreams/*/eventCreateRules/*}" + "%s/v1alpha/{name=properties/*/dataStreams/*/eventEditRules/*}" % client.transport._host, args[1], ) -def test_delete_event_create_rule_rest_flattened_error(transport: str = "rest"): +def test_delete_event_edit_rule_rest_flattened_error(transport: str = "rest"): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -98146,13 +108820,261 @@ def test_delete_event_create_rule_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.delete_event_create_rule( - analytics_admin.DeleteEventCreateRuleRequest(), + client.delete_event_edit_rule( + analytics_admin.DeleteEventEditRuleRequest(), name="name_value", ) -def test_delete_event_create_rule_rest_error(): +def test_delete_event_edit_rule_rest_error(): + client = AnalyticsAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + analytics_admin.ReorderEventEditRulesRequest, + dict, + ], +) +def test_reorder_event_edit_rules_rest(request_type): + client = AnalyticsAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "properties/sample1/dataStreams/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "" + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.reorder_event_edit_rules(request) + + # Establish that the response is the type that we expect. + assert response is None + + +def test_reorder_event_edit_rules_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = AnalyticsAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.reorder_event_edit_rules + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.reorder_event_edit_rules + ] = mock_rpc + + request = {} + client.reorder_event_edit_rules(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.reorder_event_edit_rules(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_reorder_event_edit_rules_rest_required_fields( + request_type=analytics_admin.ReorderEventEditRulesRequest, +): + transport_class = transports.AnalyticsAdminServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request_init["event_edit_rules"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).reorder_event_edit_rules._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + jsonified_request["eventEditRules"] = "event_edit_rules_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).reorder_event_edit_rules._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + assert "eventEditRules" in jsonified_request + assert jsonified_request["eventEditRules"] == "event_edit_rules_value" + + client = AnalyticsAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = None + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = "" + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.reorder_event_edit_rules(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_reorder_event_edit_rules_rest_unset_required_fields(): + transport = transports.AnalyticsAdminServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.reorder_event_edit_rules._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(()) + & set( + ( + "parent", + "eventEditRules", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_reorder_event_edit_rules_rest_interceptors(null_interceptor): + transport = transports.AnalyticsAdminServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.AnalyticsAdminServiceRestInterceptor(), + ) + client = AnalyticsAdminServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.AnalyticsAdminServiceRestInterceptor, "pre_reorder_event_edit_rules" + ) as pre: + pre.assert_not_called() + pb_message = analytics_admin.ReorderEventEditRulesRequest.pb( + analytics_admin.ReorderEventEditRulesRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + + request = analytics_admin.ReorderEventEditRulesRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + + client.reorder_event_edit_rules( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + + +def test_reorder_event_edit_rules_rest_bad_request( + transport: str = "rest", request_type=analytics_admin.ReorderEventEditRulesRequest +): + client = AnalyticsAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "properties/sample1/dataStreams/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.reorder_event_edit_rules(request) + + +def test_reorder_event_edit_rules_rest_error(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -102428,11 +113350,11 @@ def test_delete_rollup_property_source_link_rest_error(): @pytest.mark.parametrize( "request_type", [ - analytics_admin.CreateSubpropertyRequest, + analytics_admin.ProvisionSubpropertyRequest, dict, ], ) -def test_create_subproperty_rest(request_type): +def test_provision_subproperty_rest(request_type): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -102445,24 +113367,24 @@ def test_create_subproperty_rest(request_type): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = analytics_admin.CreateSubpropertyResponse() + return_value = analytics_admin.ProvisionSubpropertyResponse() # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = analytics_admin.CreateSubpropertyResponse.pb(return_value) + return_value = analytics_admin.ProvisionSubpropertyResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.create_subproperty(request) + response = client.provision_subproperty(request) # Establish that the response is the type that we expect. - assert isinstance(response, analytics_admin.CreateSubpropertyResponse) + assert isinstance(response, analytics_admin.ProvisionSubpropertyResponse) -def test_create_subproperty_rest_use_cached_wrapped_rpc(): +def test_provision_subproperty_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -102477,7 +113399,8 @@ def test_create_subproperty_rest_use_cached_wrapped_rpc(): # Ensure method has been cached assert ( - client._transport.create_subproperty in client._transport._wrapped_methods + client._transport.provision_subproperty + in client._transport._wrapped_methods ) # Replace cached wrapped function with mock @@ -102486,29 +113409,28 @@ def test_create_subproperty_rest_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.create_subproperty + client._transport.provision_subproperty ] = mock_rpc request = {} - client.create_subproperty(request) + client.provision_subproperty(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.create_subproperty(request) + client.provision_subproperty(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_create_subproperty_rest_required_fields( - request_type=analytics_admin.CreateSubpropertyRequest, +def test_provision_subproperty_rest_required_fields( + request_type=analytics_admin.ProvisionSubpropertyRequest, ): transport_class = transports.AnalyticsAdminServiceRestTransport request_init = {} - request_init["parent"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -102519,21 +113441,17 @@ def test_create_subproperty_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).create_subproperty._get_unset_required_fields(jsonified_request) + ).provision_subproperty._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["parent"] = "parent_value" - unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).create_subproperty._get_unset_required_fields(jsonified_request) + ).provision_subproperty._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == "parent_value" client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -102542,7 +113460,7 @@ def test_create_subproperty_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = analytics_admin.CreateSubpropertyResponse() + return_value = analytics_admin.ProvisionSubpropertyResponse() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -102564,38 +113482,30 @@ def test_create_subproperty_rest_required_fields( response_value.status_code = 200 # Convert return value to protobuf type - return_value = analytics_admin.CreateSubpropertyResponse.pb(return_value) + return_value = analytics_admin.ProvisionSubpropertyResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.create_subproperty(request) + response = client.provision_subproperty(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_create_subproperty_rest_unset_required_fields(): +def test_provision_subproperty_rest_unset_required_fields(): transport = transports.AnalyticsAdminServiceRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.create_subproperty._get_unset_required_fields({}) - assert set(unset_fields) == ( - set(()) - & set( - ( - "parent", - "subproperty", - ) - ) - ) + unset_fields = transport.provision_subproperty._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("subproperty",))) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_create_subproperty_rest_interceptors(null_interceptor): +def test_provision_subproperty_rest_interceptors(null_interceptor): transport = transports.AnalyticsAdminServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -102608,14 +113518,14 @@ def test_create_subproperty_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.AnalyticsAdminServiceRestInterceptor, "post_create_subproperty" + transports.AnalyticsAdminServiceRestInterceptor, "post_provision_subproperty" ) as post, mock.patch.object( - transports.AnalyticsAdminServiceRestInterceptor, "pre_create_subproperty" + transports.AnalyticsAdminServiceRestInterceptor, "pre_provision_subproperty" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = analytics_admin.CreateSubpropertyRequest.pb( - analytics_admin.CreateSubpropertyRequest() + pb_message = analytics_admin.ProvisionSubpropertyRequest.pb( + analytics_admin.ProvisionSubpropertyRequest() ) transcode.return_value = { "method": "post", @@ -102627,19 +113537,21 @@ def test_create_subproperty_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = analytics_admin.CreateSubpropertyResponse.to_json( - analytics_admin.CreateSubpropertyResponse() + req.return_value._content = ( + analytics_admin.ProvisionSubpropertyResponse.to_json( + analytics_admin.ProvisionSubpropertyResponse() + ) ) - request = analytics_admin.CreateSubpropertyRequest() + request = analytics_admin.ProvisionSubpropertyRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = analytics_admin.CreateSubpropertyResponse() + post.return_value = analytics_admin.ProvisionSubpropertyResponse() - client.create_subproperty( + client.provision_subproperty( request, metadata=[ ("key", "val"), @@ -102651,8 +113563,8 @@ def test_create_subproperty_rest_interceptors(null_interceptor): post.assert_called_once() -def test_create_subproperty_rest_bad_request( - transport: str = "rest", request_type=analytics_admin.CreateSubpropertyRequest +def test_provision_subproperty_rest_bad_request( + transport: str = "rest", request_type=analytics_admin.ProvisionSubpropertyRequest ): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -102672,10 +113584,10 @@ def test_create_subproperty_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.create_subproperty(request) + client.provision_subproperty(request) -def test_create_subproperty_rest_error(): +def test_provision_subproperty_rest_error(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -104740,6 +115652,11 @@ def test_analytics_admin_service_base_transport(): "get_conversion_event", "delete_conversion_event", "list_conversion_events", + "create_key_event", + "update_key_event", + "get_key_event", + "delete_key_event", + "list_key_events", "get_display_video360_advertiser_link", "list_display_video360_advertiser_links", "create_display_video360_advertiser_link", @@ -104802,8 +115719,11 @@ def test_analytics_admin_service_base_transport(): "delete_channel_group", "set_automated_ga4_configuration_opt_out", "fetch_automated_ga4_configuration_opt_out", + "create_big_query_link", "get_big_query_link", "list_big_query_links", + "delete_big_query_link", + "update_big_query_link", "get_enhanced_measurement_settings", "update_enhanced_measurement_settings", "create_connected_site_tag", @@ -104819,6 +115739,12 @@ def test_analytics_admin_service_base_transport(): "create_event_create_rule", "update_event_create_rule", "delete_event_create_rule", + "get_event_edit_rule", + "list_event_edit_rules", + "create_event_edit_rule", + "update_event_edit_rule", + "delete_event_edit_rule", + "reorder_event_edit_rules", "update_data_redaction_settings", "get_data_redaction_settings", "get_calculated_metric", @@ -104831,7 +115757,7 @@ def test_analytics_admin_service_base_transport(): "list_rollup_property_source_links", "create_rollup_property_source_link", "delete_rollup_property_source_link", - "create_subproperty", + "provision_subproperty", "create_subproperty_event_filter", "get_subproperty_event_filter", "list_subproperty_event_filters", @@ -105236,6 +116162,21 @@ def test_analytics_admin_service_client_transport_session_collision(transport_na session1 = client1.transport.list_conversion_events._session session2 = client2.transport.list_conversion_events._session assert session1 != session2 + session1 = client1.transport.create_key_event._session + session2 = client2.transport.create_key_event._session + assert session1 != session2 + session1 = client1.transport.update_key_event._session + session2 = client2.transport.update_key_event._session + assert session1 != session2 + session1 = client1.transport.get_key_event._session + session2 = client2.transport.get_key_event._session + assert session1 != session2 + session1 = client1.transport.delete_key_event._session + session2 = client2.transport.delete_key_event._session + assert session1 != session2 + session1 = client1.transport.list_key_events._session + session2 = client2.transport.list_key_events._session + assert session1 != session2 session1 = client1.transport.get_display_video360_advertiser_link._session session2 = client2.transport.get_display_video360_advertiser_link._session assert session1 != session2 @@ -105442,12 +116383,21 @@ def test_analytics_admin_service_client_transport_session_collision(transport_na session1 = client1.transport.fetch_automated_ga4_configuration_opt_out._session session2 = client2.transport.fetch_automated_ga4_configuration_opt_out._session assert session1 != session2 + session1 = client1.transport.create_big_query_link._session + session2 = client2.transport.create_big_query_link._session + assert session1 != session2 session1 = client1.transport.get_big_query_link._session session2 = client2.transport.get_big_query_link._session assert session1 != session2 session1 = client1.transport.list_big_query_links._session session2 = client2.transport.list_big_query_links._session assert session1 != session2 + session1 = client1.transport.delete_big_query_link._session + session2 = client2.transport.delete_big_query_link._session + assert session1 != session2 + session1 = client1.transport.update_big_query_link._session + session2 = client2.transport.update_big_query_link._session + assert session1 != session2 session1 = client1.transport.get_enhanced_measurement_settings._session session2 = client2.transport.get_enhanced_measurement_settings._session assert session1 != session2 @@ -105493,6 +116443,24 @@ def test_analytics_admin_service_client_transport_session_collision(transport_na session1 = client1.transport.delete_event_create_rule._session session2 = client2.transport.delete_event_create_rule._session assert session1 != session2 + session1 = client1.transport.get_event_edit_rule._session + session2 = client2.transport.get_event_edit_rule._session + assert session1 != session2 + session1 = client1.transport.list_event_edit_rules._session + session2 = client2.transport.list_event_edit_rules._session + assert session1 != session2 + session1 = client1.transport.create_event_edit_rule._session + session2 = client2.transport.create_event_edit_rule._session + assert session1 != session2 + session1 = client1.transport.update_event_edit_rule._session + session2 = client2.transport.update_event_edit_rule._session + assert session1 != session2 + session1 = client1.transport.delete_event_edit_rule._session + session2 = client2.transport.delete_event_edit_rule._session + assert session1 != session2 + session1 = client1.transport.reorder_event_edit_rules._session + session2 = client2.transport.reorder_event_edit_rules._session + assert session1 != session2 session1 = client1.transport.update_data_redaction_settings._session session2 = client2.transport.update_data_redaction_settings._session assert session1 != session2 @@ -105529,8 +116497,8 @@ def test_analytics_admin_service_client_transport_session_collision(transport_na session1 = client1.transport.delete_rollup_property_source_link._session session2 = client2.transport.delete_rollup_property_source_link._session assert session1 != session2 - session1 = client1.transport.create_subproperty._session - session2 = client2.transport.create_subproperty._session + session1 = client1.transport.provision_subproperty._session + session2 = client2.transport.provision_subproperty._session assert session1 != session2 session1 = client1.transport.create_subproperty_event_filter._session session2 = client2.transport.create_subproperty_event_filter._session @@ -106147,9 +117115,37 @@ def test_parse_event_create_rule_path(): assert expected == actual -def test_expanded_data_set_path(): +def test_event_edit_rule_path(): property = "squid" - expanded_data_set = "clam" + data_stream = "clam" + event_edit_rule = "whelk" + expected = "properties/{property}/dataStreams/{data_stream}/eventEditRules/{event_edit_rule}".format( + property=property, + data_stream=data_stream, + event_edit_rule=event_edit_rule, + ) + actual = AnalyticsAdminServiceClient.event_edit_rule_path( + property, data_stream, event_edit_rule + ) + assert expected == actual + + +def test_parse_event_edit_rule_path(): + expected = { + "property": "octopus", + "data_stream": "oyster", + "event_edit_rule": "nudibranch", + } + path = AnalyticsAdminServiceClient.event_edit_rule_path(**expected) + + # Check that the path construction is reversible. + actual = AnalyticsAdminServiceClient.parse_event_edit_rule_path(path) + assert expected == actual + + +def test_expanded_data_set_path(): + property = "cuttlefish" + expanded_data_set = "mussel" expected = "properties/{property}/expandedDataSets/{expanded_data_set}".format( property=property, expanded_data_set=expanded_data_set, @@ -106162,8 +117158,8 @@ def test_expanded_data_set_path(): def test_parse_expanded_data_set_path(): expected = { - "property": "whelk", - "expanded_data_set": "octopus", + "property": "winkle", + "expanded_data_set": "nautilus", } path = AnalyticsAdminServiceClient.expanded_data_set_path(**expected) @@ -106173,8 +117169,8 @@ def test_parse_expanded_data_set_path(): def test_firebase_link_path(): - property = "oyster" - firebase_link = "nudibranch" + property = "scallop" + firebase_link = "abalone" expected = "properties/{property}/firebaseLinks/{firebase_link}".format( property=property, firebase_link=firebase_link, @@ -106185,8 +117181,8 @@ def test_firebase_link_path(): def test_parse_firebase_link_path(): expected = { - "property": "cuttlefish", - "firebase_link": "mussel", + "property": "squid", + "firebase_link": "clam", } path = AnalyticsAdminServiceClient.firebase_link_path(**expected) @@ -106196,8 +117192,8 @@ def test_parse_firebase_link_path(): def test_global_site_tag_path(): - property = "winkle" - data_stream = "nautilus" + property = "whelk" + data_stream = "octopus" expected = "properties/{property}/dataStreams/{data_stream}/globalSiteTag".format( property=property, data_stream=data_stream, @@ -106208,8 +117204,8 @@ def test_global_site_tag_path(): def test_parse_global_site_tag_path(): expected = { - "property": "scallop", - "data_stream": "abalone", + "property": "oyster", + "data_stream": "nudibranch", } path = AnalyticsAdminServiceClient.global_site_tag_path(**expected) @@ -106219,8 +117215,8 @@ def test_parse_global_site_tag_path(): def test_google_ads_link_path(): - property = "squid" - google_ads_link = "clam" + property = "cuttlefish" + google_ads_link = "mussel" expected = "properties/{property}/googleAdsLinks/{google_ads_link}".format( property=property, google_ads_link=google_ads_link, @@ -106231,8 +117227,8 @@ def test_google_ads_link_path(): def test_parse_google_ads_link_path(): expected = { - "property": "whelk", - "google_ads_link": "octopus", + "property": "winkle", + "google_ads_link": "nautilus", } path = AnalyticsAdminServiceClient.google_ads_link_path(**expected) @@ -106242,7 +117238,7 @@ def test_parse_google_ads_link_path(): def test_google_signals_settings_path(): - property = "oyster" + property = "scallop" expected = "properties/{property}/googleSignalsSettings".format( property=property, ) @@ -106252,7 +117248,7 @@ def test_google_signals_settings_path(): def test_parse_google_signals_settings_path(): expected = { - "property": "nudibranch", + "property": "abalone", } path = AnalyticsAdminServiceClient.google_signals_settings_path(**expected) @@ -106261,10 +117257,33 @@ def test_parse_google_signals_settings_path(): assert expected == actual +def test_key_event_path(): + property = "squid" + key_event = "clam" + expected = "properties/{property}/keyEvents/{key_event}".format( + property=property, + key_event=key_event, + ) + actual = AnalyticsAdminServiceClient.key_event_path(property, key_event) + assert expected == actual + + +def test_parse_key_event_path(): + expected = { + "property": "whelk", + "key_event": "octopus", + } + path = AnalyticsAdminServiceClient.key_event_path(**expected) + + # Check that the path construction is reversible. + actual = AnalyticsAdminServiceClient.parse_key_event_path(path) + assert expected == actual + + def test_measurement_protocol_secret_path(): - property = "cuttlefish" - data_stream = "mussel" - measurement_protocol_secret = "winkle" + property = "oyster" + data_stream = "nudibranch" + measurement_protocol_secret = "cuttlefish" expected = "properties/{property}/dataStreams/{data_stream}/measurementProtocolSecrets/{measurement_protocol_secret}".format( property=property, data_stream=data_stream, @@ -106278,9 +117297,9 @@ def test_measurement_protocol_secret_path(): def test_parse_measurement_protocol_secret_path(): expected = { - "property": "nautilus", - "data_stream": "scallop", - "measurement_protocol_secret": "abalone", + "property": "mussel", + "data_stream": "winkle", + "measurement_protocol_secret": "nautilus", } path = AnalyticsAdminServiceClient.measurement_protocol_secret_path(**expected) @@ -106289,6 +117308,26 @@ def test_parse_measurement_protocol_secret_path(): assert expected == actual +def test_organization_path(): + organization = "scallop" + expected = "organizations/{organization}".format( + organization=organization, + ) + actual = AnalyticsAdminServiceClient.organization_path(organization) + assert expected == actual + + +def test_parse_organization_path(): + expected = { + "organization": "abalone", + } + path = AnalyticsAdminServiceClient.organization_path(**expected) + + # Check that the path construction is reversible. + actual = AnalyticsAdminServiceClient.parse_organization_path(path) + assert expected == actual + + def test_property_path(): property = "squid" expected = "properties/{property}".format( diff --git a/packages/google-analytics-admin/tests/unit/gapic/admin_v1beta/test_analytics_admin_service.py b/packages/google-analytics-admin/tests/unit/gapic/admin_v1beta/test_analytics_admin_service.py index 997c4f87bd9d..12a1f7b4cde1 100644 --- a/packages/google-analytics-admin/tests/unit/gapic/admin_v1beta/test_analytics_admin_service.py +++ b/packages/google-analytics-admin/tests/unit/gapic/admin_v1beta/test_analytics_admin_service.py @@ -1373,22 +1373,23 @@ async def test_get_account_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_account - ] = mock_object + ] = mock_rpc request = {} await client.get_account(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_account(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1741,22 +1742,23 @@ async def test_list_accounts_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_accounts - ] = mock_object + ] = mock_rpc request = {} await client.list_accounts(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_accounts(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2146,22 +2148,23 @@ async def test_delete_account_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_account - ] = mock_object + ] = mock_rpc request = {} await client.delete_account(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_account(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2509,22 +2512,23 @@ async def test_update_account_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_account - ] = mock_object + ] = mock_rpc request = {} await client.update_account(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_account(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2900,22 +2904,23 @@ async def test_provision_account_ticket_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.provision_account_ticket - ] = mock_object + ] = mock_rpc request = {} await client.provision_account_ticket(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.provision_account_ticket(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3137,22 +3142,23 @@ async def test_list_account_summaries_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_account_summaries - ] = mock_object + ] = mock_rpc request = {} await client.list_account_summaries(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_account_summaries(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3584,22 +3590,23 @@ async def test_get_property_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_property - ] = mock_object + ] = mock_rpc request = {} await client.get_property(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_property(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3962,22 +3969,23 @@ async def test_list_properties_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_properties - ] = mock_object + ] = mock_rpc request = {} await client.list_properties(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_properties(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4394,22 +4402,23 @@ async def test_create_property_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_property - ] = mock_object + ] = mock_rpc request = {} await client.create_property(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_property(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4735,22 +4744,23 @@ async def test_delete_property_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_property - ] = mock_object + ] = mock_rpc request = {} await client.delete_property(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_property(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5131,22 +5141,23 @@ async def test_update_property_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_property - ] = mock_object + ] = mock_rpc request = {} await client.update_property(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_property(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5532,22 +5543,23 @@ async def test_create_firebase_link_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_firebase_link - ] = mock_object + ] = mock_rpc request = {} await client.create_firebase_link(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_firebase_link(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5924,22 +5936,23 @@ async def test_delete_firebase_link_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_firebase_link - ] = mock_object + ] = mock_rpc request = {} await client.delete_firebase_link(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_firebase_link(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -6304,22 +6317,23 @@ async def test_list_firebase_links_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_firebase_links - ] = mock_object + ] = mock_rpc request = {} await client.list_firebase_links(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_firebase_links(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -6903,22 +6917,23 @@ async def test_create_google_ads_link_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_google_ads_link - ] = mock_object + ] = mock_rpc request = {} await client.create_google_ads_link(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_google_ads_link(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -7312,22 +7327,23 @@ async def test_update_google_ads_link_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_google_ads_link - ] = mock_object + ] = mock_rpc request = {} await client.update_google_ads_link(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_google_ads_link(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -7709,22 +7725,23 @@ async def test_delete_google_ads_link_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_google_ads_link - ] = mock_object + ] = mock_rpc request = {} await client.delete_google_ads_link(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_google_ads_link(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -8090,22 +8107,23 @@ async def test_list_google_ads_links_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_google_ads_links - ] = mock_object + ] = mock_rpc request = {} await client.list_google_ads_links(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_google_ads_links(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -8695,22 +8713,23 @@ async def test_get_data_sharing_settings_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_data_sharing_settings - ] = mock_object + ] = mock_rpc request = {} await client.get_data_sharing_settings(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_data_sharing_settings(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -9099,22 +9118,23 @@ async def test_get_measurement_protocol_secret_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_measurement_protocol_secret - ] = mock_object + ] = mock_rpc request = {} await client.get_measurement_protocol_secret(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_measurement_protocol_secret(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -9493,22 +9513,23 @@ async def test_list_measurement_protocol_secrets_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_measurement_protocol_secrets - ] = mock_object + ] = mock_rpc request = {} await client.list_measurement_protocol_secrets(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_measurement_protocol_secrets(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -10093,22 +10114,23 @@ async def test_create_measurement_protocol_secret_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_measurement_protocol_secret - ] = mock_object + ] = mock_rpc request = {} await client.create_measurement_protocol_secret(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_measurement_protocol_secret(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -10496,22 +10518,23 @@ async def test_delete_measurement_protocol_secret_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_measurement_protocol_secret - ] = mock_object + ] = mock_rpc request = {} await client.delete_measurement_protocol_secret(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_measurement_protocol_secret(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -10877,22 +10900,23 @@ async def test_update_measurement_protocol_secret_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_measurement_protocol_secret - ] = mock_object + ] = mock_rpc request = {} await client.update_measurement_protocol_secret(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_measurement_protocol_secret(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -11284,22 +11308,23 @@ async def test_acknowledge_user_data_collection_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.acknowledge_user_data_collection - ] = mock_object + ] = mock_rpc request = {} await client.acknowledge_user_data_collection(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.acknowledge_user_data_collection(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -11587,22 +11612,23 @@ async def test_search_change_history_events_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.search_change_history_events - ] = mock_object + ] = mock_rpc request = {} await client.search_change_history_events(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.search_change_history_events(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -12108,22 +12134,23 @@ async def test_create_conversion_event_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_conversion_event - ] = mock_object + ] = mock_rpc request = {} await client.create_conversion_event(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_conversion_event(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -12528,22 +12555,23 @@ async def test_update_conversion_event_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_conversion_event - ] = mock_object + ] = mock_rpc request = {} await client.update_conversion_event(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_conversion_event(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -12951,22 +12979,23 @@ async def test_get_conversion_event_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_conversion_event - ] = mock_object + ] = mock_rpc request = {} await client.get_conversion_event(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_conversion_event(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -13343,22 +13372,23 @@ async def test_delete_conversion_event_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_conversion_event - ] = mock_object + ] = mock_rpc request = {} await client.delete_conversion_event(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_conversion_event(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -13724,22 +13754,23 @@ async def test_list_conversion_events_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_conversion_events - ] = mock_object + ] = mock_rpc request = {} await client.list_conversion_events(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_conversion_events(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -14315,22 +14346,23 @@ async def test_create_key_event_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_key_event - ] = mock_object + ] = mock_rpc request = {} await client.create_key_event(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_key_event(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -14703,22 +14735,23 @@ async def test_update_key_event_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_key_event - ] = mock_object + ] = mock_rpc request = {} await client.update_key_event(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_key_event(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -15093,22 +15126,23 @@ async def test_get_key_event_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_key_event - ] = mock_object + ] = mock_rpc request = {} await client.get_key_event(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_key_event(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -15456,22 +15490,23 @@ async def test_delete_key_event_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_key_event - ] = mock_object + ] = mock_rpc request = {} await client.delete_key_event(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_key_event(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -15813,22 +15848,23 @@ async def test_list_key_events_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_key_events - ] = mock_object + ] = mock_rpc request = {} await client.list_key_events(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_key_events(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -16399,22 +16435,23 @@ async def test_create_custom_dimension_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_custom_dimension - ] = mock_object + ] = mock_rpc request = {} await client.create_custom_dimension(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_custom_dimension(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -16818,22 +16855,23 @@ async def test_update_custom_dimension_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_custom_dimension - ] = mock_object + ] = mock_rpc request = {} await client.update_custom_dimension(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_custom_dimension(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -17228,22 +17266,23 @@ async def test_list_custom_dimensions_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_custom_dimensions - ] = mock_object + ] = mock_rpc request = {} await client.list_custom_dimensions(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_custom_dimensions(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -17811,22 +17850,23 @@ async def test_archive_custom_dimension_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.archive_custom_dimension - ] = mock_object + ] = mock_rpc request = {} await client.archive_custom_dimension(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.archive_custom_dimension(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -18204,22 +18244,23 @@ async def test_get_custom_dimension_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_custom_dimension - ] = mock_object + ] = mock_rpc request = {} await client.get_custom_dimension(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_custom_dimension(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -18625,22 +18666,23 @@ async def test_create_custom_metric_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_custom_metric - ] = mock_object + ] = mock_rpc request = {} await client.create_custom_metric(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_custom_metric(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -19058,22 +19100,23 @@ async def test_update_custom_metric_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_custom_metric - ] = mock_object + ] = mock_rpc request = {} await client.update_custom_metric(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_custom_metric(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -19473,22 +19516,23 @@ async def test_list_custom_metrics_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_custom_metrics - ] = mock_object + ] = mock_rpc request = {} await client.list_custom_metrics(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_custom_metrics(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -20056,22 +20100,23 @@ async def test_archive_custom_metric_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.archive_custom_metric - ] = mock_object + ] = mock_rpc request = {} await client.archive_custom_metric(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.archive_custom_metric(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -20456,22 +20501,23 @@ async def test_get_custom_metric_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_custom_metric - ] = mock_object + ] = mock_rpc request = {} await client.get_custom_metric(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_custom_metric(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -20868,22 +20914,23 @@ async def test_get_data_retention_settings_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_data_retention_settings - ] = mock_object + ] = mock_rpc request = {} await client.get_data_retention_settings(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_data_retention_settings(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -21268,22 +21315,23 @@ async def test_update_data_retention_settings_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_data_retention_settings - ] = mock_object + ] = mock_rpc request = {} await client.update_data_retention_settings(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_data_retention_settings(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -21678,22 +21726,23 @@ async def test_create_data_stream_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_data_stream - ] = mock_object + ] = mock_rpc request = {} await client.create_data_stream(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_data_stream(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -22096,22 +22145,23 @@ async def test_delete_data_stream_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_data_stream - ] = mock_object + ] = mock_rpc request = {} await client.delete_data_stream(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_data_stream(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -22476,22 +22526,23 @@ async def test_update_data_stream_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_data_stream - ] = mock_object + ] = mock_rpc request = {} await client.update_data_stream(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_data_stream(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -22901,22 +22952,23 @@ async def test_list_data_streams_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_data_streams - ] = mock_object + ] = mock_rpc request = {} await client.list_data_streams(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_data_streams(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -23483,22 +23535,23 @@ async def test_get_data_stream_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_data_stream - ] = mock_object + ] = mock_rpc request = {} await client.get_data_stream(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_data_stream(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -23863,22 +23916,23 @@ async def test_run_access_report_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.run_access_report - ] = mock_object + ] = mock_rpc request = {} await client.run_access_report(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.run_access_report(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-analytics-data/CHANGELOG.md b/packages/google-analytics-data/CHANGELOG.md index 42f54c5847ba..f863e9dda9cd 100644 --- a/packages/google-analytics-data/CHANGELOG.md +++ b/packages/google-analytics-data/CHANGELOG.md @@ -1,5 +1,33 @@ # Changelog +## [0.18.12](https://github.com/googleapis/google-cloud-python/compare/google-analytics-data-v0.18.11...google-analytics-data-v0.18.12) (2024-09-23) + + +### Features + +* add `GetPropertyQuotasSnapshot` method to the Data API v1alpha ([65f098a](https://github.com/googleapis/google-cloud-python/commit/65f098a1125677c69240849703a0b97bcab7fc4c)) +* add `PropertyQuotasSnapshot` type to the Data API v1alpha ([65f098a](https://github.com/googleapis/google-cloud-python/commit/65f098a1125677c69240849703a0b97bcab7fc4c)) + + +### Documentation + +* update the documentation for the `CreateReportTask` method ([65f098a](https://github.com/googleapis/google-cloud-python/commit/65f098a1125677c69240849703a0b97bcab7fc4c)) + +## [0.18.11](https://github.com/googleapis/google-cloud-python/compare/google-analytics-data-v0.18.10...google-analytics-data-v0.18.11) (2024-08-08) + + +### Features + +* add the `Comparison` type ([2c4e4d1](https://github.com/googleapis/google-cloud-python/commit/2c4e4d12ea70dbdf1af813f114d6df7d33d8e6d3)) +* add the `ComparisonMetadata` type ([2c4e4d1](https://github.com/googleapis/google-cloud-python/commit/2c4e4d12ea70dbdf1af813f114d6df7d33d8e6d3)) +* add the `comparisons` field to the `Metadata` resource ([2c4e4d1](https://github.com/googleapis/google-cloud-python/commit/2c4e4d12ea70dbdf1af813f114d6df7d33d8e6d3)) +* add the `comparisons` field to the `RunReportRequest`, `RunPivotReportRequest` resources ([2c4e4d1](https://github.com/googleapis/google-cloud-python/commit/2c4e4d12ea70dbdf1af813f114d6df7d33d8e6d3)) + + +### Documentation + +* a comment for field `custom_definition` in message `DimensionMetadata` is changed ([2c4e4d1](https://github.com/googleapis/google-cloud-python/commit/2c4e4d12ea70dbdf1af813f114d6df7d33d8e6d3)) + ## [0.18.10](https://github.com/googleapis/google-cloud-python/compare/google-analytics-data-v0.18.9...google-analytics-data-v0.18.10) (2024-07-30) diff --git a/packages/google-analytics-data/google/analytics/data/__init__.py b/packages/google-analytics-data/google/analytics/data/__init__.py index 9dfd7e4d3a94..ef41c399ec91 100644 --- a/packages/google-analytics-data/google/analytics/data/__init__.py +++ b/packages/google-analytics-data/google/analytics/data/__init__.py @@ -56,6 +56,8 @@ CohortReportSettings, CohortSpec, CohortsRange, + Comparison, + ComparisonMetadata, Compatibility, DateRange, Dimension, @@ -120,6 +122,8 @@ "CohortReportSettings", "CohortSpec", "CohortsRange", + "Comparison", + "ComparisonMetadata", "DateRange", "Dimension", "DimensionCompatibility", diff --git a/packages/google-analytics-data/google/analytics/data/gapic_version.py b/packages/google-analytics-data/google/analytics/data/gapic_version.py index d18984dd1f08..48dad06a30d5 100644 --- a/packages/google-analytics-data/google/analytics/data/gapic_version.py +++ b/packages/google-analytics-data/google/analytics/data/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.18.10" # {x-release-please-version} +__version__ = "0.18.12" # {x-release-please-version} diff --git a/packages/google-analytics-data/google/analytics/data_v1alpha/__init__.py b/packages/google-analytics-data/google/analytics/data_v1alpha/__init__.py index d27c32f3b750..4b5c6ad3ac51 100644 --- a/packages/google-analytics-data/google/analytics/data_v1alpha/__init__.py +++ b/packages/google-analytics-data/google/analytics/data_v1alpha/__init__.py @@ -32,6 +32,7 @@ CreateRecurringAudienceListRequest, CreateReportTaskRequest, GetAudienceListRequest, + GetPropertyQuotasSnapshotRequest, GetRecurringAudienceListRequest, GetReportTaskRequest, ListAudienceListsRequest, @@ -40,6 +41,7 @@ ListRecurringAudienceListsResponse, ListReportTasksRequest, ListReportTasksResponse, + PropertyQuotasSnapshot, QueryAudienceListRequest, QueryAudienceListResponse, QueryReportTaskRequest, @@ -172,6 +174,7 @@ "FunnelStep", "FunnelSubReport", "GetAudienceListRequest", + "GetPropertyQuotasSnapshotRequest", "GetRecurringAudienceListRequest", "GetReportTaskRequest", "InListFilter", @@ -190,6 +193,7 @@ "NumericValue", "OrderBy", "PropertyQuota", + "PropertyQuotasSnapshot", "QueryAudienceListRequest", "QueryAudienceListResponse", "QueryReportTaskRequest", diff --git a/packages/google-analytics-data/google/analytics/data_v1alpha/gapic_metadata.json b/packages/google-analytics-data/google/analytics/data_v1alpha/gapic_metadata.json index 886097e06d3f..bb6c0b6f462d 100644 --- a/packages/google-analytics-data/google/analytics/data_v1alpha/gapic_metadata.json +++ b/packages/google-analytics-data/google/analytics/data_v1alpha/gapic_metadata.json @@ -30,6 +30,11 @@ "get_audience_list" ] }, + "GetPropertyQuotasSnapshot": { + "methods": [ + "get_property_quotas_snapshot" + ] + }, "GetRecurringAudienceList": { "methods": [ "get_recurring_audience_list" @@ -100,6 +105,11 @@ "get_audience_list" ] }, + "GetPropertyQuotasSnapshot": { + "methods": [ + "get_property_quotas_snapshot" + ] + }, "GetRecurringAudienceList": { "methods": [ "get_recurring_audience_list" @@ -170,6 +180,11 @@ "get_audience_list" ] }, + "GetPropertyQuotasSnapshot": { + "methods": [ + "get_property_quotas_snapshot" + ] + }, "GetRecurringAudienceList": { "methods": [ "get_recurring_audience_list" diff --git a/packages/google-analytics-data/google/analytics/data_v1alpha/gapic_version.py b/packages/google-analytics-data/google/analytics/data_v1alpha/gapic_version.py index d18984dd1f08..48dad06a30d5 100644 --- a/packages/google-analytics-data/google/analytics/data_v1alpha/gapic_version.py +++ b/packages/google-analytics-data/google/analytics/data_v1alpha/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.18.10" # {x-release-please-version} +__version__ = "0.18.12" # {x-release-please-version} diff --git a/packages/google-analytics-data/google/analytics/data_v1alpha/services/alpha_analytics_data/async_client.py b/packages/google-analytics-data/google/analytics/data_v1alpha/services/alpha_analytics_data/async_client.py index bf56a8b4708b..85f47086326b 100644 --- a/packages/google-analytics-data/google/analytics/data_v1alpha/services/alpha_analytics_data/async_client.py +++ b/packages/google-analytics-data/google/analytics/data_v1alpha/services/alpha_analytics_data/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -72,6 +71,12 @@ class AlphaAnalyticsDataAsyncClient: parse_audience_list_path = staticmethod( AlphaAnalyticsDataClient.parse_audience_list_path ) + property_quotas_snapshot_path = staticmethod( + AlphaAnalyticsDataClient.property_quotas_snapshot_path + ) + parse_property_quotas_snapshot_path = staticmethod( + AlphaAnalyticsDataClient.parse_property_quotas_snapshot_path + ) recurring_audience_list_path = staticmethod( AlphaAnalyticsDataClient.recurring_audience_list_path ) @@ -204,10 +209,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(AlphaAnalyticsDataClient).get_transport_class, - type(AlphaAnalyticsDataClient), - ) + get_transport_class = AlphaAnalyticsDataClient.get_transport_class def __init__( self, @@ -1472,6 +1474,118 @@ async def sample_list_recurring_audience_lists(): # Done; return the response. return response + async def get_property_quotas_snapshot( + self, + request: Optional[ + Union[analytics_data_api.GetPropertyQuotasSnapshotRequest, dict] + ] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> analytics_data_api.PropertyQuotasSnapshot: + r"""Get all property quotas organized by quota category + for a given property. This will charge 1 property quota + from the category with the most quota. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.analytics import data_v1alpha + + async def sample_get_property_quotas_snapshot(): + # Create a client + client = data_v1alpha.AlphaAnalyticsDataAsyncClient() + + # Initialize request argument(s) + request = data_v1alpha.GetPropertyQuotasSnapshotRequest( + name="name_value", + ) + + # Make the request + response = await client.get_property_quotas_snapshot(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.analytics.data_v1alpha.types.GetPropertyQuotasSnapshotRequest, dict]]): + The request object. A request to return the + PropertyQuotasSnapshot for a given + category. + name (:class:`str`): + Required. Quotas from this property will be listed in + the response. Format: + ``properties/{property}/propertyQuotasSnapshot`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.analytics.data_v1alpha.types.PropertyQuotasSnapshot: + Current state of all Property Quotas + organized by quota category. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, analytics_data_api.GetPropertyQuotasSnapshotRequest): + request = analytics_data_api.GetPropertyQuotasSnapshotRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.get_property_quotas_snapshot + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + async def create_report_task( self, request: Optional[ @@ -1489,6 +1603,12 @@ async def create_report_task( running asynchronous request to form a customized report of your Google Analytics event data. + A report task will be retained and available for + querying for 72 hours after it has been created. + + A report task created by one user can be listed and + queried by all users who have access to the property. + .. code-block:: python # This snippet has been automatically generated and should be regarded as a diff --git a/packages/google-analytics-data/google/analytics/data_v1alpha/services/alpha_analytics_data/client.py b/packages/google-analytics-data/google/analytics/data_v1alpha/services/alpha_analytics_data/client.py index ccebedecd1af..2b333ae4af4a 100644 --- a/packages/google-analytics-data/google/analytics/data_v1alpha/services/alpha_analytics_data/client.py +++ b/packages/google-analytics-data/google/analytics/data_v1alpha/services/alpha_analytics_data/client.py @@ -207,6 +207,21 @@ def parse_audience_list_path(path: str) -> Dict[str, str]: ) return m.groupdict() if m else {} + @staticmethod + def property_quotas_snapshot_path( + property: str, + ) -> str: + """Returns a fully-qualified property_quotas_snapshot string.""" + return "properties/{property}/propertyQuotasSnapshot".format( + property=property, + ) + + @staticmethod + def parse_property_quotas_snapshot_path(path: str) -> Dict[str, str]: + """Parses a property_quotas_snapshot path into its component segments.""" + m = re.match(r"^properties/(?P.+?)/propertyQuotasSnapshot$", path) + return m.groupdict() if m else {} + @staticmethod def recurring_audience_list_path( property: str, @@ -706,7 +721,7 @@ def __init__( Type[AlphaAnalyticsDataTransport], Callable[..., AlphaAnalyticsDataTransport], ] = ( - type(self).get_transport_class(transport) + AlphaAnalyticsDataClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., AlphaAnalyticsDataTransport], transport) ) @@ -1898,6 +1913,117 @@ def sample_list_recurring_audience_lists(): # Done; return the response. return response + def get_property_quotas_snapshot( + self, + request: Optional[ + Union[analytics_data_api.GetPropertyQuotasSnapshotRequest, dict] + ] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> analytics_data_api.PropertyQuotasSnapshot: + r"""Get all property quotas organized by quota category + for a given property. This will charge 1 property quota + from the category with the most quota. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.analytics import data_v1alpha + + def sample_get_property_quotas_snapshot(): + # Create a client + client = data_v1alpha.AlphaAnalyticsDataClient() + + # Initialize request argument(s) + request = data_v1alpha.GetPropertyQuotasSnapshotRequest( + name="name_value", + ) + + # Make the request + response = client.get_property_quotas_snapshot(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.analytics.data_v1alpha.types.GetPropertyQuotasSnapshotRequest, dict]): + The request object. A request to return the + PropertyQuotasSnapshot for a given + category. + name (str): + Required. Quotas from this property will be listed in + the response. Format: + ``properties/{property}/propertyQuotasSnapshot`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.analytics.data_v1alpha.types.PropertyQuotasSnapshot: + Current state of all Property Quotas + organized by quota category. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, analytics_data_api.GetPropertyQuotasSnapshotRequest): + request = analytics_data_api.GetPropertyQuotasSnapshotRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[ + self._transport.get_property_quotas_snapshot + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + def create_report_task( self, request: Optional[ @@ -1915,6 +2041,12 @@ def create_report_task( running asynchronous request to form a customized report of your Google Analytics event data. + A report task will be retained and available for + querying for 72 hours after it has been created. + + A report task created by one user can be listed and + queried by all users who have access to the property. + .. code-block:: python # This snippet has been automatically generated and should be regarded as a diff --git a/packages/google-analytics-data/google/analytics/data_v1alpha/services/alpha_analytics_data/transports/base.py b/packages/google-analytics-data/google/analytics/data_v1alpha/services/alpha_analytics_data/transports/base.py index c2c66c588816..3ba97b9f363e 100644 --- a/packages/google-analytics-data/google/analytics/data_v1alpha/services/alpha_analytics_data/transports/base.py +++ b/packages/google-analytics-data/google/analytics/data_v1alpha/services/alpha_analytics_data/transports/base.py @@ -180,6 +180,11 @@ def _prep_wrapped_messages(self, client_info): default_timeout=None, client_info=client_info, ), + self.get_property_quotas_snapshot: gapic_v1.method.wrap_method( + self.get_property_quotas_snapshot, + default_timeout=None, + client_info=client_info, + ), self.create_report_task: gapic_v1.method.wrap_method( self.create_report_task, default_timeout=None, @@ -320,6 +325,18 @@ def list_recurring_audience_lists( ]: raise NotImplementedError() + @property + def get_property_quotas_snapshot( + self, + ) -> Callable[ + [analytics_data_api.GetPropertyQuotasSnapshotRequest], + Union[ + analytics_data_api.PropertyQuotasSnapshot, + Awaitable[analytics_data_api.PropertyQuotasSnapshot], + ], + ]: + raise NotImplementedError() + @property def create_report_task( self, diff --git a/packages/google-analytics-data/google/analytics/data_v1alpha/services/alpha_analytics_data/transports/grpc.py b/packages/google-analytics-data/google/analytics/data_v1alpha/services/alpha_analytics_data/transports/grpc.py index b9962cad40e2..c43f7d864e80 100644 --- a/packages/google-analytics-data/google/analytics/data_v1alpha/services/alpha_analytics_data/transports/grpc.py +++ b/packages/google-analytics-data/google/analytics/data_v1alpha/services/alpha_analytics_data/transports/grpc.py @@ -659,6 +659,37 @@ def list_recurring_audience_lists( ) return self._stubs["list_recurring_audience_lists"] + @property + def get_property_quotas_snapshot( + self, + ) -> Callable[ + [analytics_data_api.GetPropertyQuotasSnapshotRequest], + analytics_data_api.PropertyQuotasSnapshot, + ]: + r"""Return a callable for the get property quotas snapshot method over gRPC. + + Get all property quotas organized by quota category + for a given property. This will charge 1 property quota + from the category with the most quota. + + Returns: + Callable[[~.GetPropertyQuotasSnapshotRequest], + ~.PropertyQuotasSnapshot]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_property_quotas_snapshot" not in self._stubs: + self._stubs["get_property_quotas_snapshot"] = self.grpc_channel.unary_unary( + "/google.analytics.data.v1alpha.AlphaAnalyticsData/GetPropertyQuotasSnapshot", + request_serializer=analytics_data_api.GetPropertyQuotasSnapshotRequest.serialize, + response_deserializer=analytics_data_api.PropertyQuotasSnapshot.deserialize, + ) + return self._stubs["get_property_quotas_snapshot"] + @property def create_report_task( self, @@ -672,6 +703,12 @@ def create_report_task( running asynchronous request to form a customized report of your Google Analytics event data. + A report task will be retained and available for + querying for 72 hours after it has been created. + + A report task created by one user can be listed and + queried by all users who have access to the property. + Returns: Callable[[~.CreateReportTaskRequest], ~.Operation]: diff --git a/packages/google-analytics-data/google/analytics/data_v1alpha/services/alpha_analytics_data/transports/grpc_asyncio.py b/packages/google-analytics-data/google/analytics/data_v1alpha/services/alpha_analytics_data/transports/grpc_asyncio.py index c05a987fbb2b..a220f2ddb524 100644 --- a/packages/google-analytics-data/google/analytics/data_v1alpha/services/alpha_analytics_data/transports/grpc_asyncio.py +++ b/packages/google-analytics-data/google/analytics/data_v1alpha/services/alpha_analytics_data/transports/grpc_asyncio.py @@ -667,6 +667,37 @@ def list_recurring_audience_lists( ) return self._stubs["list_recurring_audience_lists"] + @property + def get_property_quotas_snapshot( + self, + ) -> Callable[ + [analytics_data_api.GetPropertyQuotasSnapshotRequest], + Awaitable[analytics_data_api.PropertyQuotasSnapshot], + ]: + r"""Return a callable for the get property quotas snapshot method over gRPC. + + Get all property quotas organized by quota category + for a given property. This will charge 1 property quota + from the category with the most quota. + + Returns: + Callable[[~.GetPropertyQuotasSnapshotRequest], + Awaitable[~.PropertyQuotasSnapshot]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_property_quotas_snapshot" not in self._stubs: + self._stubs["get_property_quotas_snapshot"] = self.grpc_channel.unary_unary( + "/google.analytics.data.v1alpha.AlphaAnalyticsData/GetPropertyQuotasSnapshot", + request_serializer=analytics_data_api.GetPropertyQuotasSnapshotRequest.serialize, + response_deserializer=analytics_data_api.PropertyQuotasSnapshot.deserialize, + ) + return self._stubs["get_property_quotas_snapshot"] + @property def create_report_task( self, @@ -681,6 +712,12 @@ def create_report_task( running asynchronous request to form a customized report of your Google Analytics event data. + A report task will be retained and available for + querying for 72 hours after it has been created. + + A report task created by one user can be listed and + queried by all users who have access to the property. + Returns: Callable[[~.CreateReportTaskRequest], Awaitable[~.Operation]]: @@ -841,6 +878,11 @@ def _prep_wrapped_messages(self, client_info): default_timeout=None, client_info=client_info, ), + self.get_property_quotas_snapshot: gapic_v1.method_async.wrap_method( + self.get_property_quotas_snapshot, + default_timeout=None, + client_info=client_info, + ), self.create_report_task: gapic_v1.method_async.wrap_method( self.create_report_task, default_timeout=None, diff --git a/packages/google-analytics-data/google/analytics/data_v1alpha/services/alpha_analytics_data/transports/rest.py b/packages/google-analytics-data/google/analytics/data_v1alpha/services/alpha_analytics_data/transports/rest.py index 5f98dacd404b..510c1d55640b 100644 --- a/packages/google-analytics-data/google/analytics/data_v1alpha/services/alpha_analytics_data/transports/rest.py +++ b/packages/google-analytics-data/google/analytics/data_v1alpha/services/alpha_analytics_data/transports/rest.py @@ -103,6 +103,14 @@ def post_get_audience_list(self, response): logging.log(f"Received response: {response}") return response + def pre_get_property_quotas_snapshot(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_property_quotas_snapshot(self, response): + logging.log(f"Received response: {response}") + return response + def pre_get_recurring_audience_list(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata @@ -275,6 +283,31 @@ def post_get_audience_list( """ return response + def pre_get_property_quotas_snapshot( + self, + request: analytics_data_api.GetPropertyQuotasSnapshotRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[ + analytics_data_api.GetPropertyQuotasSnapshotRequest, Sequence[Tuple[str, str]] + ]: + """Pre-rpc interceptor for get_property_quotas_snapshot + + Override in a subclass to manipulate the request or metadata + before they are sent to the AlphaAnalyticsData server. + """ + return request, metadata + + def post_get_property_quotas_snapshot( + self, response: analytics_data_api.PropertyQuotasSnapshot + ) -> analytics_data_api.PropertyQuotasSnapshot: + """Post-rpc interceptor for get_property_quotas_snapshot + + Override in a subclass to manipulate the response + after it is returned by the AlphaAnalyticsData server but before + it is returned to user code. + """ + return response + def pre_get_recurring_audience_list( self, request: analytics_data_api.GetRecurringAudienceListRequest, @@ -1002,6 +1035,98 @@ def __call__( resp = self._interceptor.post_get_audience_list(resp) return resp + class _GetPropertyQuotasSnapshot(AlphaAnalyticsDataRestStub): + def __hash__(self): + return hash("GetPropertyQuotasSnapshot") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: analytics_data_api.GetPropertyQuotasSnapshotRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> analytics_data_api.PropertyQuotasSnapshot: + r"""Call the get property quotas + snapshot method over HTTP. + + Args: + request (~.analytics_data_api.GetPropertyQuotasSnapshotRequest): + The request object. A request to return the + PropertyQuotasSnapshot for a given + category. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.analytics_data_api.PropertyQuotasSnapshot: + Current state of all Property Quotas + organized by quota category. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1alpha/{name=properties/*/propertyQuotasSnapshot}", + }, + ] + request, metadata = self._interceptor.pre_get_property_quotas_snapshot( + request, metadata + ) + pb_request = analytics_data_api.GetPropertyQuotasSnapshotRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = analytics_data_api.PropertyQuotasSnapshot() + pb_resp = analytics_data_api.PropertyQuotasSnapshot.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_property_quotas_snapshot(resp) + return resp + class _GetRecurringAudienceList(AlphaAnalyticsDataRestStub): def __hash__(self): return hash("GetRecurringAudienceList") @@ -1876,6 +2001,17 @@ def get_audience_list( # In C++ this would require a dynamic_cast return self._GetAudienceList(self._session, self._host, self._interceptor) # type: ignore + @property + def get_property_quotas_snapshot( + self, + ) -> Callable[ + [analytics_data_api.GetPropertyQuotasSnapshotRequest], + analytics_data_api.PropertyQuotasSnapshot, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetPropertyQuotasSnapshot(self._session, self._host, self._interceptor) # type: ignore + @property def get_recurring_audience_list( self, diff --git a/packages/google-analytics-data/google/analytics/data_v1alpha/types/__init__.py b/packages/google-analytics-data/google/analytics/data_v1alpha/types/__init__.py index be0c011beb28..c5f36ceee95f 100644 --- a/packages/google-analytics-data/google/analytics/data_v1alpha/types/__init__.py +++ b/packages/google-analytics-data/google/analytics/data_v1alpha/types/__init__.py @@ -23,6 +23,7 @@ CreateRecurringAudienceListRequest, CreateReportTaskRequest, GetAudienceListRequest, + GetPropertyQuotasSnapshotRequest, GetRecurringAudienceListRequest, GetReportTaskRequest, ListAudienceListsRequest, @@ -31,6 +32,7 @@ ListRecurringAudienceListsResponse, ListReportTasksRequest, ListReportTasksResponse, + PropertyQuotasSnapshot, QueryAudienceListRequest, QueryAudienceListResponse, QueryReportTaskRequest, @@ -129,6 +131,7 @@ "CreateRecurringAudienceListRequest", "CreateReportTaskRequest", "GetAudienceListRequest", + "GetPropertyQuotasSnapshotRequest", "GetRecurringAudienceListRequest", "GetReportTaskRequest", "ListAudienceListsRequest", @@ -137,6 +140,7 @@ "ListRecurringAudienceListsResponse", "ListReportTasksRequest", "ListReportTasksResponse", + "PropertyQuotasSnapshot", "QueryAudienceListRequest", "QueryAudienceListResponse", "QueryReportTaskRequest", diff --git a/packages/google-analytics-data/google/analytics/data_v1alpha/types/analytics_data_api.py b/packages/google-analytics-data/google/analytics/data_v1alpha/types/analytics_data_api.py index 40cf2af6b247..5ef02adbab94 100644 --- a/packages/google-analytics-data/google/analytics/data_v1alpha/types/analytics_data_api.py +++ b/packages/google-analytics-data/google/analytics/data_v1alpha/types/analytics_data_api.py @@ -31,6 +31,8 @@ "GetRecurringAudienceListRequest", "ListRecurringAudienceListsRequest", "ListRecurringAudienceListsResponse", + "GetPropertyQuotasSnapshotRequest", + "PropertyQuotasSnapshot", "GetAudienceListRequest", "ListAudienceListsRequest", "ListAudienceListsResponse", @@ -368,6 +370,60 @@ def raw_page(self): ) +class GetPropertyQuotasSnapshotRequest(proto.Message): + r"""A request to return the PropertyQuotasSnapshot for a given + category. + + Attributes: + name (str): + Required. Quotas from this property will be listed in the + response. Format: + ``properties/{property}/propertyQuotasSnapshot`` + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class PropertyQuotasSnapshot(proto.Message): + r"""Current state of all Property Quotas organized by quota + category. + + Attributes: + name (str): + Identifier. The property quota snapshot + resource name. + core_property_quota (google.analytics.data_v1alpha.types.PropertyQuota): + Property Quota for core property tokens + realtime_property_quota (google.analytics.data_v1alpha.types.PropertyQuota): + Property Quota for realtime property tokens + funnel_property_quota (google.analytics.data_v1alpha.types.PropertyQuota): + Property Quota for funnel property tokens + """ + + name: str = proto.Field( + proto.STRING, + number=4, + ) + core_property_quota: data.PropertyQuota = proto.Field( + proto.MESSAGE, + number=1, + message=data.PropertyQuota, + ) + realtime_property_quota: data.PropertyQuota = proto.Field( + proto.MESSAGE, + number=2, + message=data.PropertyQuota, + ) + funnel_property_quota: data.PropertyQuota = proto.Field( + proto.MESSAGE, + number=3, + message=data.PropertyQuota, + ) + + class GetAudienceListRequest(proto.Message): r"""A request to retrieve configuration metadata about a specific audience list. @@ -942,7 +998,7 @@ class RunFunnelReportRequest(proto.Message): Attributes: property (str): - Optional. A Google Analytics GA4 property identifier whose + Optional. A Google Analytics property identifier whose events are tracked. Specified in the URL path and not the body. To learn more, see `where to find your Property ID `__. @@ -1146,7 +1202,7 @@ class ReportTask(proto.Message): name (str): Output only. Identifier. The report task resource name assigned during creation. Format: - ``properties/{property}/reportTasks/{report_task}`` + "properties/{property}/reportTasks/{report_task}". report_definition (google.analytics.data_v1alpha.types.ReportTask.ReportDefinition): Optional. A report definition to fetch report data, which describes the structure of a report. @@ -1236,8 +1292,8 @@ class ReportDefinition(proto.Message): returned if they are not separately removed by a filter. Regardless of this ``keep_empty_rows`` setting, only data - recorded by the Google Analytics (GA4) property can be - displayed in a report. + recorded by the Google Analytics property can be displayed + in a report. For example if a property never logs a ``purchase`` event, then a query for the ``eventName`` dimension and diff --git a/packages/google-analytics-data/google/analytics/data_v1alpha/types/data.py b/packages/google-analytics-data/google/analytics/data_v1alpha/types/data.py index d0d65e10b736..f3e86639400b 100644 --- a/packages/google-analytics-data/google/analytics/data_v1alpha/types/data.py +++ b/packages/google-analytics-data/google/analytics/data_v1alpha/types/data.py @@ -293,7 +293,7 @@ class MetricType(proto.Enum): class RestrictedMetricType(proto.Enum): r"""Categories of data that you may be restricted from viewing on - certain GA4 properties. + certain Google Analytics properties. Values: RESTRICTED_METRIC_TYPE_UNSPECIFIED (0): @@ -2231,7 +2231,7 @@ class Segment(proto.Message): particular line of products or who visit a specific part of your site or trigger certain events in your app. - To learn more, see `GA4 Segment + To learn more, see `Segment Builder `__. This message has `oneof`_ fields (mutually exclusive fields). diff --git a/packages/google-analytics-data/google/analytics/data_v1beta/__init__.py b/packages/google-analytics-data/google/analytics/data_v1beta/__init__.py index 4c5105050e54..d77ca6b13486 100644 --- a/packages/google-analytics-data/google/analytics/data_v1beta/__init__.py +++ b/packages/google-analytics-data/google/analytics/data_v1beta/__init__.py @@ -54,6 +54,8 @@ CohortReportSettings, CohortSpec, CohortsRange, + Comparison, + ComparisonMetadata, Compatibility, DateRange, Dimension, @@ -104,6 +106,8 @@ "CohortReportSettings", "CohortSpec", "CohortsRange", + "Comparison", + "ComparisonMetadata", "Compatibility", "CreateAudienceExportRequest", "DateRange", diff --git a/packages/google-analytics-data/google/analytics/data_v1beta/gapic_version.py b/packages/google-analytics-data/google/analytics/data_v1beta/gapic_version.py index d18984dd1f08..48dad06a30d5 100644 --- a/packages/google-analytics-data/google/analytics/data_v1beta/gapic_version.py +++ b/packages/google-analytics-data/google/analytics/data_v1beta/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.18.10" # {x-release-please-version} +__version__ = "0.18.12" # {x-release-please-version} diff --git a/packages/google-analytics-data/google/analytics/data_v1beta/services/beta_analytics_data/async_client.py b/packages/google-analytics-data/google/analytics/data_v1beta/services/beta_analytics_data/async_client.py index 2a6f8bbb4aed..f3de83272ce7 100644 --- a/packages/google-analytics-data/google/analytics/data_v1beta/services/beta_analytics_data/async_client.py +++ b/packages/google-analytics-data/google/analytics/data_v1beta/services/beta_analytics_data/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -196,9 +195,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(BetaAnalyticsDataClient).get_transport_class, type(BetaAnalyticsDataClient) - ) + get_transport_class = BetaAnalyticsDataClient.get_transport_class def __init__( self, diff --git a/packages/google-analytics-data/google/analytics/data_v1beta/services/beta_analytics_data/client.py b/packages/google-analytics-data/google/analytics/data_v1beta/services/beta_analytics_data/client.py index bbf248164ecf..0e5f00491f32 100644 --- a/packages/google-analytics-data/google/analytics/data_v1beta/services/beta_analytics_data/client.py +++ b/packages/google-analytics-data/google/analytics/data_v1beta/services/beta_analytics_data/client.py @@ -683,7 +683,7 @@ def __init__( Type[BetaAnalyticsDataTransport], Callable[..., BetaAnalyticsDataTransport], ] = ( - type(self).get_transport_class(transport) + BetaAnalyticsDataClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., BetaAnalyticsDataTransport], transport) ) diff --git a/packages/google-analytics-data/google/analytics/data_v1beta/types/__init__.py b/packages/google-analytics-data/google/analytics/data_v1beta/types/__init__.py index d3564da440c2..33d6267b83a9 100644 --- a/packages/google-analytics-data/google/analytics/data_v1beta/types/__init__.py +++ b/packages/google-analytics-data/google/analytics/data_v1beta/types/__init__.py @@ -45,6 +45,8 @@ CohortReportSettings, CohortSpec, CohortsRange, + Comparison, + ComparisonMetadata, Compatibility, DateRange, Dimension, @@ -107,6 +109,8 @@ "CohortReportSettings", "CohortSpec", "CohortsRange", + "Comparison", + "ComparisonMetadata", "DateRange", "Dimension", "DimensionCompatibility", diff --git a/packages/google-analytics-data/google/analytics/data_v1beta/types/analytics_data_api.py b/packages/google-analytics-data/google/analytics/data_v1beta/types/analytics_data_api.py index 6676ecceeb45..5374bfdb0fe5 100644 --- a/packages/google-analytics-data/google/analytics/data_v1beta/types/analytics_data_api.py +++ b/packages/google-analytics-data/google/analytics/data_v1beta/types/analytics_data_api.py @@ -157,6 +157,8 @@ class Metadata(proto.Message): The dimension descriptions. metrics (MutableSequence[google.analytics.data_v1beta.types.MetricMetadata]): The metric descriptions. + comparisons (MutableSequence[google.analytics.data_v1beta.types.ComparisonMetadata]): + The comparison descriptions. """ name: str = proto.Field( @@ -173,6 +175,11 @@ class Metadata(proto.Message): number=2, message=data.MetricMetadata, ) + comparisons: MutableSequence[data.ComparisonMetadata] = proto.RepeatedField( + proto.MESSAGE, + number=4, + message=data.ComparisonMetadata, + ) class RunReportRequest(proto.Message): @@ -269,6 +276,11 @@ class RunReportRequest(proto.Message): Toggles whether to return the current state of this Analytics Property's quota. Quota is returned in `PropertyQuota <#PropertyQuota>`__. + comparisons (MutableSequence[google.analytics.data_v1beta.types.Comparison]): + Optional. The configuration of comparisons + requested and displayed. The request only + requires a comparisons field in order to receive + a comparison column in the response. """ property: str = proto.Field( @@ -335,6 +347,11 @@ class RunReportRequest(proto.Message): proto.BOOL, number=14, ) + comparisons: MutableSequence[data.Comparison] = proto.RepeatedField( + proto.MESSAGE, + number=15, + message=data.Comparison, + ) class RunReportResponse(proto.Message): @@ -504,6 +521,12 @@ class RunPivotReportRequest(proto.Message): Toggles whether to return the current state of this Analytics Property's quota. Quota is returned in `PropertyQuota <#PropertyQuota>`__. + comparisons (MutableSequence[google.analytics.data_v1beta.types.Comparison]): + Optional. The configuration of comparisons + requested and displayed. The request requires + both a comparisons field and a comparisons + dimension to receive a comparison column in the + response. """ property: str = proto.Field( @@ -557,6 +580,11 @@ class RunPivotReportRequest(proto.Message): proto.BOOL, number=11, ) + comparisons: MutableSequence[data.Comparison] = proto.RepeatedField( + proto.MESSAGE, + number=12, + message=data.Comparison, + ) class RunPivotReportResponse(proto.Message): diff --git a/packages/google-analytics-data/google/analytics/data_v1beta/types/data.py b/packages/google-analytics-data/google/analytics/data_v1beta/types/data.py index 822dbacc246d..9cc69fc324a8 100644 --- a/packages/google-analytics-data/google/analytics/data_v1beta/types/data.py +++ b/packages/google-analytics-data/google/analytics/data_v1beta/types/data.py @@ -31,6 +31,7 @@ "Dimension", "DimensionExpression", "Metric", + "Comparison", "FilterExpression", "FilterExpressionList", "Filter", @@ -54,6 +55,7 @@ "QuotaStatus", "DimensionMetadata", "MetricMetadata", + "ComparisonMetadata", "DimensionCompatibility", "MetricCompatibility", }, @@ -471,6 +473,56 @@ class Metric(proto.Message): ) +class Comparison(proto.Message): + r"""Defines an individual comparison. Most requests will include + multiple comparisons so that the report compares between the + comparisons. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + name (str): + Each comparison produces separate rows in the + response. In the response, this comparison is + identified by this name. If name is unspecified, + we will use the saved comparisons display name. + + This field is a member of `oneof`_ ``_name``. + dimension_filter (google.analytics.data_v1beta.types.FilterExpression): + A basic comparison. + + This field is a member of `oneof`_ ``one_comparison``. + comparison (str): + A saved comparison identified by the + comparison's resource name. For example, + 'comparisons/1234'. + + This field is a member of `oneof`_ ``one_comparison``. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + optional=True, + ) + dimension_filter: "FilterExpression" = proto.Field( + proto.MESSAGE, + number=2, + oneof="one_comparison", + message="FilterExpression", + ) + comparison: str = proto.Field( + proto.STRING, + number=3, + oneof="one_comparison", + ) + + class FilterExpression(proto.Message): r"""To express dimension or metric filters. The fields in the same FilterExpression need to be either all dimensions or all @@ -1751,8 +1803,14 @@ class DimensionMetadata(proto.Message): the deprecation period, the dimension will be available only by ``apiName``. custom_definition (bool): - True if the dimension is a custom dimension - for this property. + True if the dimension is custom to this + property. This includes user, event, & item + scoped custom dimensions; to learn more about + custom dimensions, see + https://support.google.com/analytics/answer/14240153. + This also include custom channel groups; to + learn more about custom channel groups, see + https://support.google.com/analytics/answer/13051316. category (str): The display name of the category that this dimension belongs to. Similar dimensions and @@ -1889,6 +1947,35 @@ class BlockedReason(proto.Enum): ) +class ComparisonMetadata(proto.Message): + r"""The metadata for a single comparison. + + Attributes: + api_name (str): + This comparison's resource name. Useable in + `Comparison <#Comparison>`__'s ``comparison`` field. For + example, 'comparisons/1234'. + ui_name (str): + This comparison's name within the Google + Analytics user interface. + description (str): + This comparison's description. + """ + + api_name: str = proto.Field( + proto.STRING, + number=1, + ) + ui_name: str = proto.Field( + proto.STRING, + number=2, + ) + description: str = proto.Field( + proto.STRING, + number=3, + ) + + class DimensionCompatibility(proto.Message): r"""The compatibility for a single dimension. diff --git a/packages/google-analytics-data/samples/generated_samples/analyticsdata_v1alpha_generated_alpha_analytics_data_get_property_quotas_snapshot_async.py b/packages/google-analytics-data/samples/generated_samples/analyticsdata_v1alpha_generated_alpha_analytics_data_get_property_quotas_snapshot_async.py new file mode 100644 index 000000000000..cfa47528bf6b --- /dev/null +++ b/packages/google-analytics-data/samples/generated_samples/analyticsdata_v1alpha_generated_alpha_analytics_data_get_property_quotas_snapshot_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetPropertyQuotasSnapshot +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-analytics-data + + +# [START analyticsdata_v1alpha_generated_AlphaAnalyticsData_GetPropertyQuotasSnapshot_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.analytics import data_v1alpha + + +async def sample_get_property_quotas_snapshot(): + # Create a client + client = data_v1alpha.AlphaAnalyticsDataAsyncClient() + + # Initialize request argument(s) + request = data_v1alpha.GetPropertyQuotasSnapshotRequest( + name="name_value", + ) + + # Make the request + response = await client.get_property_quotas_snapshot(request=request) + + # Handle the response + print(response) + +# [END analyticsdata_v1alpha_generated_AlphaAnalyticsData_GetPropertyQuotasSnapshot_async] diff --git a/packages/google-analytics-data/samples/generated_samples/analyticsdata_v1alpha_generated_alpha_analytics_data_get_property_quotas_snapshot_sync.py b/packages/google-analytics-data/samples/generated_samples/analyticsdata_v1alpha_generated_alpha_analytics_data_get_property_quotas_snapshot_sync.py new file mode 100644 index 000000000000..964edacdbb5d --- /dev/null +++ b/packages/google-analytics-data/samples/generated_samples/analyticsdata_v1alpha_generated_alpha_analytics_data_get_property_quotas_snapshot_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetPropertyQuotasSnapshot +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-analytics-data + + +# [START analyticsdata_v1alpha_generated_AlphaAnalyticsData_GetPropertyQuotasSnapshot_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.analytics import data_v1alpha + + +def sample_get_property_quotas_snapshot(): + # Create a client + client = data_v1alpha.AlphaAnalyticsDataClient() + + # Initialize request argument(s) + request = data_v1alpha.GetPropertyQuotasSnapshotRequest( + name="name_value", + ) + + # Make the request + response = client.get_property_quotas_snapshot(request=request) + + # Handle the response + print(response) + +# [END analyticsdata_v1alpha_generated_AlphaAnalyticsData_GetPropertyQuotasSnapshot_sync] diff --git a/packages/google-analytics-data/samples/generated_samples/snippet_metadata_google.analytics.data.v1alpha.json b/packages/google-analytics-data/samples/generated_samples/snippet_metadata_google.analytics.data.v1alpha.json index d710f6b29e8a..d5b5816eb6e5 100644 --- a/packages/google-analytics-data/samples/generated_samples/snippet_metadata_google.analytics.data.v1alpha.json +++ b/packages/google-analytics-data/samples/generated_samples/snippet_metadata_google.analytics.data.v1alpha.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-analytics-data", - "version": "0.18.10" + "version": "0.18.12" }, "snippets": [ { @@ -679,6 +679,167 @@ ], "title": "analyticsdata_v1alpha_generated_alpha_analytics_data_get_audience_list_sync.py" }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.analytics.data_v1alpha.AlphaAnalyticsDataAsyncClient", + "shortName": "AlphaAnalyticsDataAsyncClient" + }, + "fullName": "google.analytics.data_v1alpha.AlphaAnalyticsDataAsyncClient.get_property_quotas_snapshot", + "method": { + "fullName": "google.analytics.data.v1alpha.AlphaAnalyticsData.GetPropertyQuotasSnapshot", + "service": { + "fullName": "google.analytics.data.v1alpha.AlphaAnalyticsData", + "shortName": "AlphaAnalyticsData" + }, + "shortName": "GetPropertyQuotasSnapshot" + }, + "parameters": [ + { + "name": "request", + "type": "google.analytics.data_v1alpha.types.GetPropertyQuotasSnapshotRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.analytics.data_v1alpha.types.PropertyQuotasSnapshot", + "shortName": "get_property_quotas_snapshot" + }, + "description": "Sample for GetPropertyQuotasSnapshot", + "file": "analyticsdata_v1alpha_generated_alpha_analytics_data_get_property_quotas_snapshot_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "analyticsdata_v1alpha_generated_AlphaAnalyticsData_GetPropertyQuotasSnapshot_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "analyticsdata_v1alpha_generated_alpha_analytics_data_get_property_quotas_snapshot_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.analytics.data_v1alpha.AlphaAnalyticsDataClient", + "shortName": "AlphaAnalyticsDataClient" + }, + "fullName": "google.analytics.data_v1alpha.AlphaAnalyticsDataClient.get_property_quotas_snapshot", + "method": { + "fullName": "google.analytics.data.v1alpha.AlphaAnalyticsData.GetPropertyQuotasSnapshot", + "service": { + "fullName": "google.analytics.data.v1alpha.AlphaAnalyticsData", + "shortName": "AlphaAnalyticsData" + }, + "shortName": "GetPropertyQuotasSnapshot" + }, + "parameters": [ + { + "name": "request", + "type": "google.analytics.data_v1alpha.types.GetPropertyQuotasSnapshotRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.analytics.data_v1alpha.types.PropertyQuotasSnapshot", + "shortName": "get_property_quotas_snapshot" + }, + "description": "Sample for GetPropertyQuotasSnapshot", + "file": "analyticsdata_v1alpha_generated_alpha_analytics_data_get_property_quotas_snapshot_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "analyticsdata_v1alpha_generated_AlphaAnalyticsData_GetPropertyQuotasSnapshot_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "analyticsdata_v1alpha_generated_alpha_analytics_data_get_property_quotas_snapshot_sync.py" + }, { "canonical": true, "clientMethod": { diff --git a/packages/google-analytics-data/samples/generated_samples/snippet_metadata_google.analytics.data.v1beta.json b/packages/google-analytics-data/samples/generated_samples/snippet_metadata_google.analytics.data.v1beta.json index 6e9433c51432..753d0fcebd81 100644 --- a/packages/google-analytics-data/samples/generated_samples/snippet_metadata_google.analytics.data.v1beta.json +++ b/packages/google-analytics-data/samples/generated_samples/snippet_metadata_google.analytics.data.v1beta.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-analytics-data", - "version": "0.18.10" + "version": "0.18.12" }, "snippets": [ { diff --git a/packages/google-analytics-data/scripts/fixup_data_v1alpha_keywords.py b/packages/google-analytics-data/scripts/fixup_data_v1alpha_keywords.py index 416f17a987b4..463b61dcee73 100644 --- a/packages/google-analytics-data/scripts/fixup_data_v1alpha_keywords.py +++ b/packages/google-analytics-data/scripts/fixup_data_v1alpha_keywords.py @@ -43,6 +43,7 @@ class dataCallTransformer(cst.CSTTransformer): 'create_recurring_audience_list': ('parent', 'recurring_audience_list', ), 'create_report_task': ('parent', 'report_task', ), 'get_audience_list': ('name', ), + 'get_property_quotas_snapshot': ('name', ), 'get_recurring_audience_list': ('name', ), 'get_report_task': ('name', ), 'list_audience_lists': ('parent', 'page_size', 'page_token', ), diff --git a/packages/google-analytics-data/scripts/fixup_data_v1beta_keywords.py b/packages/google-analytics-data/scripts/fixup_data_v1beta_keywords.py index 3a98463f2491..7363c0b9a5be 100644 --- a/packages/google-analytics-data/scripts/fixup_data_v1beta_keywords.py +++ b/packages/google-analytics-data/scripts/fixup_data_v1beta_keywords.py @@ -47,9 +47,9 @@ class dataCallTransformer(cst.CSTTransformer): 'get_metadata': ('name', ), 'list_audience_exports': ('parent', 'page_size', 'page_token', ), 'query_audience_export': ('name', 'offset', 'limit', ), - 'run_pivot_report': ('property', 'dimensions', 'metrics', 'date_ranges', 'pivots', 'dimension_filter', 'metric_filter', 'currency_code', 'cohort_spec', 'keep_empty_rows', 'return_property_quota', ), + 'run_pivot_report': ('property', 'dimensions', 'metrics', 'date_ranges', 'pivots', 'dimension_filter', 'metric_filter', 'currency_code', 'cohort_spec', 'keep_empty_rows', 'return_property_quota', 'comparisons', ), 'run_realtime_report': ('property', 'dimensions', 'metrics', 'dimension_filter', 'metric_filter', 'limit', 'metric_aggregations', 'order_bys', 'return_property_quota', 'minute_ranges', ), - 'run_report': ('property', 'dimensions', 'metrics', 'date_ranges', 'dimension_filter', 'metric_filter', 'offset', 'limit', 'metric_aggregations', 'order_bys', 'currency_code', 'cohort_spec', 'keep_empty_rows', 'return_property_quota', ), + 'run_report': ('property', 'dimensions', 'metrics', 'date_ranges', 'dimension_filter', 'metric_filter', 'offset', 'limit', 'metric_aggregations', 'order_bys', 'currency_code', 'cohort_spec', 'keep_empty_rows', 'return_property_quota', 'comparisons', ), } def leave_Call(self, original: cst.Call, updated: cst.Call) -> cst.CSTNode: diff --git a/packages/google-analytics-data/tests/unit/gapic/data_v1alpha/test_alpha_analytics_data.py b/packages/google-analytics-data/tests/unit/gapic/data_v1alpha/test_alpha_analytics_data.py index 7f312b50b0ba..1a4da5db4733 100644 --- a/packages/google-analytics-data/tests/unit/gapic/data_v1alpha/test_alpha_analytics_data.py +++ b/packages/google-analytics-data/tests/unit/gapic/data_v1alpha/test_alpha_analytics_data.py @@ -1351,22 +1351,23 @@ async def test_run_funnel_report_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.run_funnel_report - ] = mock_object + ] = mock_rpc request = {} await client.run_funnel_report(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.run_funnel_report(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1594,8 +1595,9 @@ def test_create_audience_list_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.create_audience_list(request) @@ -1651,26 +1653,28 @@ async def test_create_audience_list_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_audience_list - ] = mock_object + ] = mock_rpc request = {} await client.create_audience_list(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.create_audience_list(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2049,22 +2053,23 @@ async def test_query_audience_list_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.query_audience_list - ] = mock_object + ] = mock_rpc request = {} await client.query_audience_list(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.query_audience_list(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2443,22 +2448,23 @@ async def test_sheet_export_audience_list_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.sheet_export_audience_list - ] = mock_object + ] = mock_rpc request = {} await client.sheet_export_audience_list(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.sheet_export_audience_list(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2856,22 +2862,23 @@ async def test_get_audience_list_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_audience_list - ] = mock_object + ] = mock_rpc request = {} await client.get_audience_list(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_audience_list(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3261,22 +3268,23 @@ async def test_list_audience_lists_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_audience_lists - ] = mock_object + ] = mock_rpc request = {} await client.list_audience_lists(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_audience_lists(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3863,22 +3871,23 @@ async def test_create_recurring_audience_list_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_recurring_audience_list - ] = mock_object + ] = mock_rpc request = {} await client.create_recurring_audience_list(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_recurring_audience_list(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4289,22 +4298,23 @@ async def test_get_recurring_audience_list_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_recurring_audience_list - ] = mock_object + ] = mock_rpc request = {} await client.get_recurring_audience_list(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_recurring_audience_list(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4687,22 +4697,23 @@ async def test_list_recurring_audience_lists_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_recurring_audience_lists - ] = mock_object + ] = mock_rpc request = {} await client.list_recurring_audience_lists(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_recurring_audience_lists(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5109,11 +5120,11 @@ async def test_list_recurring_audience_lists_async_pages(): @pytest.mark.parametrize( "request_type", [ - analytics_data_api.CreateReportTaskRequest, + analytics_data_api.GetPropertyQuotasSnapshotRequest, dict, ], ) -def test_create_report_task(request_type, transport: str = "grpc"): +def test_get_property_quotas_snapshot(request_type, transport: str = "grpc"): client = AlphaAnalyticsDataClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -5125,23 +5136,26 @@ def test_create_report_task(request_type, transport: str = "grpc"): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.create_report_task), "__call__" + type(client.transport.get_property_quotas_snapshot), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name="operations/spam") - response = client.create_report_task(request) + call.return_value = analytics_data_api.PropertyQuotasSnapshot( + name="name_value", + ) + response = client.get_property_quotas_snapshot(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - request = analytics_data_api.CreateReportTaskRequest() + request = analytics_data_api.GetPropertyQuotasSnapshotRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) + assert isinstance(response, analytics_data_api.PropertyQuotasSnapshot) + assert response.name == "name_value" -def test_create_report_task_empty_call(): +def test_get_property_quotas_snapshot_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = AlphaAnalyticsDataClient( @@ -5151,18 +5165,18 @@ def test_create_report_task_empty_call(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.create_report_task), "__call__" + type(client.transport.get_property_quotas_snapshot), "__call__" ) as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.create_report_task() + client.get_property_quotas_snapshot() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == analytics_data_api.CreateReportTaskRequest() + assert args[0] == analytics_data_api.GetPropertyQuotasSnapshotRequest() -def test_create_report_task_non_empty_request_with_auto_populated_field(): +def test_get_property_quotas_snapshot_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. client = AlphaAnalyticsDataClient( @@ -5173,26 +5187,26 @@ def test_create_report_task_non_empty_request_with_auto_populated_field(): # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. - request = analytics_data_api.CreateReportTaskRequest( - parent="parent_value", + request = analytics_data_api.GetPropertyQuotasSnapshotRequest( + name="name_value", ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.create_report_task), "__call__" + type(client.transport.get_property_quotas_snapshot), "__call__" ) as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.create_report_task(request=request) + client.get_property_quotas_snapshot(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == analytics_data_api.CreateReportTaskRequest( - parent="parent_value", + assert args[0] == analytics_data_api.GetPropertyQuotasSnapshotRequest( + name="name_value", ) -def test_create_report_task_use_cached_wrapped_rpc(): +def test_get_property_quotas_snapshot_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -5207,7 +5221,8 @@ def test_create_report_task_use_cached_wrapped_rpc(): # Ensure method has been cached assert ( - client._transport.create_report_task in client._transport._wrapped_methods + client._transport.get_property_quotas_snapshot + in client._transport._wrapped_methods ) # Replace cached wrapped function with mock @@ -5216,19 +5231,15 @@ def test_create_report_task_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.create_report_task + client._transport.get_property_quotas_snapshot ] = mock_rpc request = {} - client.create_report_task(request) + client.get_property_quotas_snapshot(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.create_report_task(request) + client.get_property_quotas_snapshot(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -5236,7 +5247,7 @@ def test_create_report_task_use_cached_wrapped_rpc(): @pytest.mark.asyncio -async def test_create_report_task_empty_call_async(): +async def test_get_property_quotas_snapshot_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = AlphaAnalyticsDataAsyncClient( @@ -5246,20 +5257,22 @@ async def test_create_report_task_empty_call_async(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.create_report_task), "__call__" + type(client.transport.get_property_quotas_snapshot), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name="operations/spam") + analytics_data_api.PropertyQuotasSnapshot( + name="name_value", + ) ) - response = await client.create_report_task() + response = await client.get_property_quotas_snapshot() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == analytics_data_api.CreateReportTaskRequest() + assert args[0] == analytics_data_api.GetPropertyQuotasSnapshotRequest() @pytest.mark.asyncio -async def test_create_report_task_async_use_cached_wrapped_rpc( +async def test_get_property_quotas_snapshot_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", ): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -5276,37 +5289,34 @@ async def test_create_report_task_async_use_cached_wrapped_rpc( # Ensure method has been cached assert ( - client._client._transport.create_report_task + client._client._transport.get_property_quotas_snapshot in client._client._transport._wrapped_methods ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ - client._client._transport.create_report_task - ] = mock_object + client._client._transport.get_property_quotas_snapshot + ] = mock_rpc request = {} - await client.create_report_task(request) + await client.get_property_quotas_snapshot(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 - - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() + assert mock_rpc.call_count == 1 - await client.create_report_task(request) + await client.get_property_quotas_snapshot(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio -async def test_create_report_task_async( +async def test_get_property_quotas_snapshot_async( transport: str = "grpc_asyncio", - request_type=analytics_data_api.CreateReportTaskRequest, + request_type=analytics_data_api.GetPropertyQuotasSnapshotRequest, ): client = AlphaAnalyticsDataAsyncClient( credentials=ga_credentials.AnonymousCredentials(), @@ -5319,46 +5329,49 @@ async def test_create_report_task_async( # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.create_report_task), "__call__" + type(client.transport.get_property_quotas_snapshot), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name="operations/spam") + analytics_data_api.PropertyQuotasSnapshot( + name="name_value", + ) ) - response = await client.create_report_task(request) + response = await client.get_property_quotas_snapshot(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request = analytics_data_api.CreateReportTaskRequest() + request = analytics_data_api.GetPropertyQuotasSnapshotRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) + assert isinstance(response, analytics_data_api.PropertyQuotasSnapshot) + assert response.name == "name_value" @pytest.mark.asyncio -async def test_create_report_task_async_from_dict(): - await test_create_report_task_async(request_type=dict) +async def test_get_property_quotas_snapshot_async_from_dict(): + await test_get_property_quotas_snapshot_async(request_type=dict) -def test_create_report_task_field_headers(): +def test_get_property_quotas_snapshot_field_headers(): client = AlphaAnalyticsDataClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = analytics_data_api.CreateReportTaskRequest() + request = analytics_data_api.GetPropertyQuotasSnapshotRequest() - request.parent = "parent_value" + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.create_report_task), "__call__" + type(client.transport.get_property_quotas_snapshot), "__call__" ) as call: - call.return_value = operations_pb2.Operation(name="operations/op") - client.create_report_task(request) + call.return_value = analytics_data_api.PropertyQuotasSnapshot() + client.get_property_quotas_snapshot(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -5369,30 +5382,30 @@ def test_create_report_task_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "parent=parent_value", + "name=name_value", ) in kw["metadata"] @pytest.mark.asyncio -async def test_create_report_task_field_headers_async(): +async def test_get_property_quotas_snapshot_field_headers_async(): client = AlphaAnalyticsDataAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = analytics_data_api.CreateReportTaskRequest() + request = analytics_data_api.GetPropertyQuotasSnapshotRequest() - request.parent = "parent_value" + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.create_report_task), "__call__" + type(client.transport.get_property_quotas_snapshot), "__call__" ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name="operations/op") + analytics_data_api.PropertyQuotasSnapshot() ) - await client.create_report_task(request) + await client.get_property_quotas_snapshot(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -5403,41 +5416,37 @@ async def test_create_report_task_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "parent=parent_value", + "name=name_value", ) in kw["metadata"] -def test_create_report_task_flattened(): +def test_get_property_quotas_snapshot_flattened(): client = AlphaAnalyticsDataClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.create_report_task), "__call__" + type(client.transport.get_property_quotas_snapshot), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name="operations/op") + call.return_value = analytics_data_api.PropertyQuotasSnapshot() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.create_report_task( - parent="parent_value", - report_task=analytics_data_api.ReportTask(name="name_value"), + client.get_property_quotas_snapshot( + name="name_value", ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = "parent_value" - assert arg == mock_val - arg = args[0].report_task - mock_val = analytics_data_api.ReportTask(name="name_value") + arg = args[0].name + mock_val = "name_value" assert arg == mock_val -def test_create_report_task_flattened_error(): +def test_get_property_quotas_snapshot_flattened_error(): client = AlphaAnalyticsDataClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -5445,50 +5454,45 @@ def test_create_report_task_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.create_report_task( - analytics_data_api.CreateReportTaskRequest(), - parent="parent_value", - report_task=analytics_data_api.ReportTask(name="name_value"), + client.get_property_quotas_snapshot( + analytics_data_api.GetPropertyQuotasSnapshotRequest(), + name="name_value", ) @pytest.mark.asyncio -async def test_create_report_task_flattened_async(): +async def test_get_property_quotas_snapshot_flattened_async(): client = AlphaAnalyticsDataAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.create_report_task), "__call__" + type(client.transport.get_property_quotas_snapshot), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name="operations/op") + call.return_value = analytics_data_api.PropertyQuotasSnapshot() call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name="operations/spam") + analytics_data_api.PropertyQuotasSnapshot() ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.create_report_task( - parent="parent_value", - report_task=analytics_data_api.ReportTask(name="name_value"), + response = await client.get_property_quotas_snapshot( + name="name_value", ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = "parent_value" - assert arg == mock_val - arg = args[0].report_task - mock_val = analytics_data_api.ReportTask(name="name_value") + arg = args[0].name + mock_val = "name_value" assert arg == mock_val @pytest.mark.asyncio -async def test_create_report_task_flattened_error_async(): +async def test_get_property_quotas_snapshot_flattened_error_async(): client = AlphaAnalyticsDataAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -5496,21 +5500,20 @@ async def test_create_report_task_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.create_report_task( - analytics_data_api.CreateReportTaskRequest(), - parent="parent_value", - report_task=analytics_data_api.ReportTask(name="name_value"), + await client.get_property_quotas_snapshot( + analytics_data_api.GetPropertyQuotasSnapshotRequest(), + name="name_value", ) @pytest.mark.parametrize( "request_type", [ - analytics_data_api.QueryReportTaskRequest, + analytics_data_api.CreateReportTaskRequest, dict, ], ) -def test_query_report_task(request_type, transport: str = "grpc"): +def test_create_report_task(request_type, transport: str = "grpc"): client = AlphaAnalyticsDataClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -5522,26 +5525,23 @@ def test_query_report_task(request_type, transport: str = "grpc"): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.query_report_task), "__call__" + type(client.transport.create_report_task), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = analytics_data_api.QueryReportTaskResponse( - row_count=992, - ) - response = client.query_report_task(request) + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.create_report_task(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - request = analytics_data_api.QueryReportTaskRequest() + request = analytics_data_api.CreateReportTaskRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, analytics_data_api.QueryReportTaskResponse) - assert response.row_count == 992 + assert isinstance(response, future.Future) -def test_query_report_task_empty_call(): +def test_create_report_task_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = AlphaAnalyticsDataClient( @@ -5551,18 +5551,18 @@ def test_query_report_task_empty_call(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.query_report_task), "__call__" + type(client.transport.create_report_task), "__call__" ) as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.query_report_task() + client.create_report_task() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == analytics_data_api.QueryReportTaskRequest() + assert args[0] == analytics_data_api.CreateReportTaskRequest() -def test_query_report_task_non_empty_request_with_auto_populated_field(): +def test_create_report_task_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. client = AlphaAnalyticsDataClient( @@ -5573,26 +5573,26 @@ def test_query_report_task_non_empty_request_with_auto_populated_field(): # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. - request = analytics_data_api.QueryReportTaskRequest( - name="name_value", + request = analytics_data_api.CreateReportTaskRequest( + parent="parent_value", ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.query_report_task), "__call__" + type(client.transport.create_report_task), "__call__" ) as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.query_report_task(request=request) + client.create_report_task(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == analytics_data_api.QueryReportTaskRequest( - name="name_value", + assert args[0] == analytics_data_api.CreateReportTaskRequest( + parent="parent_value", ) -def test_query_report_task_use_cached_wrapped_rpc(): +def test_create_report_task_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -5606,7 +5606,9 @@ def test_query_report_task_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.query_report_task in client._transport._wrapped_methods + assert ( + client._transport.create_report_task in client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.Mock() @@ -5614,15 +5616,20 @@ def test_query_report_task_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.query_report_task + client._transport.create_report_task ] = mock_rpc request = {} - client.query_report_task(request) + client.create_report_task(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.query_report_task(request) + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.create_report_task(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -5630,7 +5637,7 @@ def test_query_report_task_use_cached_wrapped_rpc(): @pytest.mark.asyncio -async def test_query_report_task_empty_call_async(): +async def test_create_report_task_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = AlphaAnalyticsDataAsyncClient( @@ -5640,22 +5647,20 @@ async def test_query_report_task_empty_call_async(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.query_report_task), "__call__" + type(client.transport.create_report_task), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - analytics_data_api.QueryReportTaskResponse( - row_count=992, - ) + operations_pb2.Operation(name="operations/spam") ) - response = await client.query_report_task() + response = await client.create_report_task() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == analytics_data_api.QueryReportTaskRequest() + assert args[0] == analytics_data_api.CreateReportTaskRequest() @pytest.mark.asyncio -async def test_query_report_task_async_use_cached_wrapped_rpc( +async def test_create_report_task_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", ): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -5672,33 +5677,39 @@ async def test_query_report_task_async_use_cached_wrapped_rpc( # Ensure method has been cached assert ( - client._client._transport.query_report_task + client._client._transport.create_report_task in client._client._transport._wrapped_methods ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ - client._client._transport.query_report_task - ] = mock_object + client._client._transport.create_report_task + ] = mock_rpc request = {} - await client.query_report_task(request) + await client.create_report_task(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - await client.query_report_task(request) + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.create_report_task(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio -async def test_query_report_task_async( +async def test_create_report_task_async( transport: str = "grpc_asyncio", - request_type=analytics_data_api.QueryReportTaskRequest, + request_type=analytics_data_api.CreateReportTaskRequest, ): client = AlphaAnalyticsDataAsyncClient( credentials=ga_credentials.AnonymousCredentials(), @@ -5711,49 +5722,46 @@ async def test_query_report_task_async( # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.query_report_task), "__call__" + type(client.transport.create_report_task), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - analytics_data_api.QueryReportTaskResponse( - row_count=992, - ) + operations_pb2.Operation(name="operations/spam") ) - response = await client.query_report_task(request) + response = await client.create_report_task(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request = analytics_data_api.QueryReportTaskRequest() + request = analytics_data_api.CreateReportTaskRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, analytics_data_api.QueryReportTaskResponse) - assert response.row_count == 992 + assert isinstance(response, future.Future) @pytest.mark.asyncio -async def test_query_report_task_async_from_dict(): - await test_query_report_task_async(request_type=dict) +async def test_create_report_task_async_from_dict(): + await test_create_report_task_async(request_type=dict) -def test_query_report_task_field_headers(): +def test_create_report_task_field_headers(): client = AlphaAnalyticsDataClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = analytics_data_api.QueryReportTaskRequest() + request = analytics_data_api.CreateReportTaskRequest() - request.name = "name_value" + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.query_report_task), "__call__" + type(client.transport.create_report_task), "__call__" ) as call: - call.return_value = analytics_data_api.QueryReportTaskResponse() - client.query_report_task(request) + call.return_value = operations_pb2.Operation(name="operations/op") + client.create_report_task(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -5764,30 +5772,30 @@ def test_query_report_task_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "name=name_value", + "parent=parent_value", ) in kw["metadata"] @pytest.mark.asyncio -async def test_query_report_task_field_headers_async(): +async def test_create_report_task_field_headers_async(): client = AlphaAnalyticsDataAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = analytics_data_api.QueryReportTaskRequest() + request = analytics_data_api.CreateReportTaskRequest() - request.name = "name_value" + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.query_report_task), "__call__" + type(client.transport.create_report_task), "__call__" ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - analytics_data_api.QueryReportTaskResponse() + operations_pb2.Operation(name="operations/op") ) - await client.query_report_task(request) + await client.create_report_task(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -5798,37 +5806,41 @@ async def test_query_report_task_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "name=name_value", + "parent=parent_value", ) in kw["metadata"] -def test_query_report_task_flattened(): +def test_create_report_task_flattened(): client = AlphaAnalyticsDataClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.query_report_task), "__call__" + type(client.transport.create_report_task), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = analytics_data_api.QueryReportTaskResponse() + call.return_value = operations_pb2.Operation(name="operations/op") # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.query_report_task( - name="name_value", + client.create_report_task( + parent="parent_value", + report_task=analytics_data_api.ReportTask(name="name_value"), ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].report_task + mock_val = analytics_data_api.ReportTask(name="name_value") assert arg == mock_val -def test_query_report_task_flattened_error(): +def test_create_report_task_flattened_error(): client = AlphaAnalyticsDataClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -5836,45 +5848,50 @@ def test_query_report_task_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.query_report_task( - analytics_data_api.QueryReportTaskRequest(), - name="name_value", + client.create_report_task( + analytics_data_api.CreateReportTaskRequest(), + parent="parent_value", + report_task=analytics_data_api.ReportTask(name="name_value"), ) @pytest.mark.asyncio -async def test_query_report_task_flattened_async(): +async def test_create_report_task_flattened_async(): client = AlphaAnalyticsDataAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.query_report_task), "__call__" + type(client.transport.create_report_task), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = analytics_data_api.QueryReportTaskResponse() + call.return_value = operations_pb2.Operation(name="operations/op") call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - analytics_data_api.QueryReportTaskResponse() + operations_pb2.Operation(name="operations/spam") ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.query_report_task( - name="name_value", + response = await client.create_report_task( + parent="parent_value", + report_task=analytics_data_api.ReportTask(name="name_value"), ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].report_task + mock_val = analytics_data_api.ReportTask(name="name_value") assert arg == mock_val @pytest.mark.asyncio -async def test_query_report_task_flattened_error_async(): +async def test_create_report_task_flattened_error_async(): client = AlphaAnalyticsDataAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -5882,20 +5899,21 @@ async def test_query_report_task_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.query_report_task( - analytics_data_api.QueryReportTaskRequest(), - name="name_value", + await client.create_report_task( + analytics_data_api.CreateReportTaskRequest(), + parent="parent_value", + report_task=analytics_data_api.ReportTask(name="name_value"), ) @pytest.mark.parametrize( "request_type", [ - analytics_data_api.GetReportTaskRequest, + analytics_data_api.QueryReportTaskRequest, dict, ], ) -def test_get_report_task(request_type, transport: str = "grpc"): +def test_query_report_task(request_type, transport: str = "grpc"): client = AlphaAnalyticsDataClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -5906,25 +5924,27 @@ def test_get_report_task(request_type, transport: str = "grpc"): request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_report_task), "__call__") as call: + with mock.patch.object( + type(client.transport.query_report_task), "__call__" + ) as call: # Designate an appropriate return value for the call. - call.return_value = analytics_data_api.ReportTask( - name="name_value", + call.return_value = analytics_data_api.QueryReportTaskResponse( + row_count=992, ) - response = client.get_report_task(request) + response = client.query_report_task(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - request = analytics_data_api.GetReportTaskRequest() + request = analytics_data_api.QueryReportTaskRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, analytics_data_api.ReportTask) - assert response.name == "name_value" + assert isinstance(response, analytics_data_api.QueryReportTaskResponse) + assert response.row_count == 992 -def test_get_report_task_empty_call(): +def test_query_report_task_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = AlphaAnalyticsDataClient( @@ -5933,17 +5953,19 @@ def test_get_report_task_empty_call(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_report_task), "__call__") as call: + with mock.patch.object( + type(client.transport.query_report_task), "__call__" + ) as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.get_report_task() + client.query_report_task() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == analytics_data_api.GetReportTaskRequest() + assert args[0] == analytics_data_api.QueryReportTaskRequest() -def test_get_report_task_non_empty_request_with_auto_populated_field(): +def test_query_report_task_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. client = AlphaAnalyticsDataClient( @@ -5954,24 +5976,26 @@ def test_get_report_task_non_empty_request_with_auto_populated_field(): # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. - request = analytics_data_api.GetReportTaskRequest( + request = analytics_data_api.QueryReportTaskRequest( name="name_value", ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_report_task), "__call__") as call: + with mock.patch.object( + type(client.transport.query_report_task), "__call__" + ) as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.get_report_task(request=request) + client.query_report_task(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == analytics_data_api.GetReportTaskRequest( + assert args[0] == analytics_data_api.QueryReportTaskRequest( name="name_value", ) -def test_get_report_task_use_cached_wrapped_rpc(): +def test_query_report_task_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -5985,21 +6009,23 @@ def test_get_report_task_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.get_report_task in client._transport._wrapped_methods + assert client._transport.query_report_task in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.get_report_task] = mock_rpc + client._transport._wrapped_methods[ + client._transport.query_report_task + ] = mock_rpc request = {} - client.get_report_task(request) + client.query_report_task(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.get_report_task(request) + client.query_report_task(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -6007,7 +6033,7 @@ def test_get_report_task_use_cached_wrapped_rpc(): @pytest.mark.asyncio -async def test_get_report_task_empty_call_async(): +async def test_query_report_task_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = AlphaAnalyticsDataAsyncClient( @@ -6016,21 +6042,23 @@ async def test_get_report_task_empty_call_async(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_report_task), "__call__") as call: + with mock.patch.object( + type(client.transport.query_report_task), "__call__" + ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - analytics_data_api.ReportTask( - name="name_value", + analytics_data_api.QueryReportTaskResponse( + row_count=992, ) ) - response = await client.get_report_task() + response = await client.query_report_task() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == analytics_data_api.GetReportTaskRequest() + assert args[0] == analytics_data_api.QueryReportTaskRequest() @pytest.mark.asyncio -async def test_get_report_task_async_use_cached_wrapped_rpc( +async def test_query_report_task_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", ): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -6047,33 +6075,34 @@ async def test_get_report_task_async_use_cached_wrapped_rpc( # Ensure method has been cached assert ( - client._client._transport.get_report_task + client._client._transport.query_report_task in client._client._transport._wrapped_methods ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ - client._client._transport.get_report_task - ] = mock_object + client._client._transport.query_report_task + ] = mock_rpc request = {} - await client.get_report_task(request) + await client.query_report_task(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - await client.get_report_task(request) + await client.query_report_task(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio -async def test_get_report_task_async( +async def test_query_report_task_async( transport: str = "grpc_asyncio", - request_type=analytics_data_api.GetReportTaskRequest, + request_type=analytics_data_api.QueryReportTaskRequest, ): client = AlphaAnalyticsDataAsyncClient( credentials=ga_credentials.AnonymousCredentials(), @@ -6085,46 +6114,50 @@ async def test_get_report_task_async( request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_report_task), "__call__") as call: + with mock.patch.object( + type(client.transport.query_report_task), "__call__" + ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - analytics_data_api.ReportTask( - name="name_value", + analytics_data_api.QueryReportTaskResponse( + row_count=992, ) ) - response = await client.get_report_task(request) + response = await client.query_report_task(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request = analytics_data_api.GetReportTaskRequest() + request = analytics_data_api.QueryReportTaskRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, analytics_data_api.ReportTask) - assert response.name == "name_value" + assert isinstance(response, analytics_data_api.QueryReportTaskResponse) + assert response.row_count == 992 @pytest.mark.asyncio -async def test_get_report_task_async_from_dict(): - await test_get_report_task_async(request_type=dict) +async def test_query_report_task_async_from_dict(): + await test_query_report_task_async(request_type=dict) -def test_get_report_task_field_headers(): +def test_query_report_task_field_headers(): client = AlphaAnalyticsDataClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = analytics_data_api.GetReportTaskRequest() + request = analytics_data_api.QueryReportTaskRequest() request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_report_task), "__call__") as call: - call.return_value = analytics_data_api.ReportTask() - client.get_report_task(request) + with mock.patch.object( + type(client.transport.query_report_task), "__call__" + ) as call: + call.return_value = analytics_data_api.QueryReportTaskResponse() + client.query_report_task(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -6140,23 +6173,25 @@ def test_get_report_task_field_headers(): @pytest.mark.asyncio -async def test_get_report_task_field_headers_async(): +async def test_query_report_task_field_headers_async(): client = AlphaAnalyticsDataAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = analytics_data_api.GetReportTaskRequest() + request = analytics_data_api.QueryReportTaskRequest() request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_report_task), "__call__") as call: + with mock.patch.object( + type(client.transport.query_report_task), "__call__" + ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - analytics_data_api.ReportTask() + analytics_data_api.QueryReportTaskResponse() ) - await client.get_report_task(request) + await client.query_report_task(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -6171,18 +6206,20 @@ async def test_get_report_task_field_headers_async(): ) in kw["metadata"] -def test_get_report_task_flattened(): +def test_query_report_task_flattened(): client = AlphaAnalyticsDataClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_report_task), "__call__") as call: + with mock.patch.object( + type(client.transport.query_report_task), "__call__" + ) as call: # Designate an appropriate return value for the call. - call.return_value = analytics_data_api.ReportTask() + call.return_value = analytics_data_api.QueryReportTaskResponse() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.get_report_task( + client.query_report_task( name="name_value", ) @@ -6195,7 +6232,7 @@ def test_get_report_task_flattened(): assert arg == mock_val -def test_get_report_task_flattened_error(): +def test_query_report_task_flattened_error(): client = AlphaAnalyticsDataClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -6203,29 +6240,31 @@ def test_get_report_task_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.get_report_task( - analytics_data_api.GetReportTaskRequest(), + client.query_report_task( + analytics_data_api.QueryReportTaskRequest(), name="name_value", ) @pytest.mark.asyncio -async def test_get_report_task_flattened_async(): +async def test_query_report_task_flattened_async(): client = AlphaAnalyticsDataAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_report_task), "__call__") as call: + with mock.patch.object( + type(client.transport.query_report_task), "__call__" + ) as call: # Designate an appropriate return value for the call. - call.return_value = analytics_data_api.ReportTask() + call.return_value = analytics_data_api.QueryReportTaskResponse() call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - analytics_data_api.ReportTask() + analytics_data_api.QueryReportTaskResponse() ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.get_report_task( + response = await client.query_report_task( name="name_value", ) @@ -6239,7 +6278,7 @@ async def test_get_report_task_flattened_async(): @pytest.mark.asyncio -async def test_get_report_task_flattened_error_async(): +async def test_query_report_task_flattened_error_async(): client = AlphaAnalyticsDataAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -6247,8 +6286,8 @@ async def test_get_report_task_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.get_report_task( - analytics_data_api.GetReportTaskRequest(), + await client.query_report_task( + analytics_data_api.QueryReportTaskRequest(), name="name_value", ) @@ -6256,11 +6295,11 @@ async def test_get_report_task_flattened_error_async(): @pytest.mark.parametrize( "request_type", [ - analytics_data_api.ListReportTasksRequest, + analytics_data_api.GetReportTaskRequest, dict, ], ) -def test_list_report_tasks(request_type, transport: str = "grpc"): +def test_get_report_task(request_type, transport: str = "grpc"): client = AlphaAnalyticsDataClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -6271,27 +6310,25 @@ def test_list_report_tasks(request_type, transport: str = "grpc"): request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_report_tasks), "__call__" - ) as call: + with mock.patch.object(type(client.transport.get_report_task), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = analytics_data_api.ListReportTasksResponse( - next_page_token="next_page_token_value", + call.return_value = analytics_data_api.ReportTask( + name="name_value", ) - response = client.list_report_tasks(request) + response = client.get_report_task(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - request = analytics_data_api.ListReportTasksRequest() + request = analytics_data_api.GetReportTaskRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListReportTasksPager) - assert response.next_page_token == "next_page_token_value" + assert isinstance(response, analytics_data_api.ReportTask) + assert response.name == "name_value" -def test_list_report_tasks_empty_call(): +def test_get_report_task_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = AlphaAnalyticsDataClient( @@ -6300,19 +6337,17 @@ def test_list_report_tasks_empty_call(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_report_tasks), "__call__" - ) as call: + with mock.patch.object(type(client.transport.get_report_task), "__call__") as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.list_report_tasks() + client.get_report_task() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == analytics_data_api.ListReportTasksRequest() + assert args[0] == analytics_data_api.GetReportTaskRequest() -def test_list_report_tasks_non_empty_request_with_auto_populated_field(): +def test_get_report_task_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. client = AlphaAnalyticsDataClient( @@ -6323,28 +6358,24 @@ def test_list_report_tasks_non_empty_request_with_auto_populated_field(): # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. - request = analytics_data_api.ListReportTasksRequest( - parent="parent_value", - page_token="page_token_value", + request = analytics_data_api.GetReportTaskRequest( + name="name_value", ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_report_tasks), "__call__" - ) as call: + with mock.patch.object(type(client.transport.get_report_task), "__call__") as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.list_report_tasks(request=request) + client.get_report_task(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == analytics_data_api.ListReportTasksRequest( - parent="parent_value", - page_token="page_token_value", + assert args[0] == analytics_data_api.GetReportTaskRequest( + name="name_value", ) -def test_list_report_tasks_use_cached_wrapped_rpc(): +def test_get_report_task_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -6358,23 +6389,21 @@ def test_list_report_tasks_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.list_report_tasks in client._transport._wrapped_methods + assert client._transport.get_report_task in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.list_report_tasks - ] = mock_rpc + client._transport._wrapped_methods[client._transport.get_report_task] = mock_rpc request = {} - client.list_report_tasks(request) + client.get_report_task(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.list_report_tasks(request) + client.get_report_task(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -6382,7 +6411,7 @@ def test_list_report_tasks_use_cached_wrapped_rpc(): @pytest.mark.asyncio -async def test_list_report_tasks_empty_call_async(): +async def test_get_report_task_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = AlphaAnalyticsDataAsyncClient( @@ -6391,23 +6420,21 @@ async def test_list_report_tasks_empty_call_async(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_report_tasks), "__call__" - ) as call: + with mock.patch.object(type(client.transport.get_report_task), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - analytics_data_api.ListReportTasksResponse( - next_page_token="next_page_token_value", + analytics_data_api.ReportTask( + name="name_value", ) ) - response = await client.list_report_tasks() + response = await client.get_report_task() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == analytics_data_api.ListReportTasksRequest() + assert args[0] == analytics_data_api.GetReportTaskRequest() @pytest.mark.asyncio -async def test_list_report_tasks_async_use_cached_wrapped_rpc( +async def test_get_report_task_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", ): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -6424,33 +6451,34 @@ async def test_list_report_tasks_async_use_cached_wrapped_rpc( # Ensure method has been cached assert ( - client._client._transport.list_report_tasks + client._client._transport.get_report_task in client._client._transport._wrapped_methods ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ - client._client._transport.list_report_tasks - ] = mock_object + client._client._transport.get_report_task + ] = mock_rpc request = {} - await client.list_report_tasks(request) + await client.get_report_task(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - await client.list_report_tasks(request) + await client.get_report_task(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio -async def test_list_report_tasks_async( +async def test_get_report_task_async( transport: str = "grpc_asyncio", - request_type=analytics_data_api.ListReportTasksRequest, + request_type=analytics_data_api.GetReportTaskRequest, ): client = AlphaAnalyticsDataAsyncClient( credentials=ga_credentials.AnonymousCredentials(), @@ -6462,50 +6490,46 @@ async def test_list_report_tasks_async( request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_report_tasks), "__call__" - ) as call: + with mock.patch.object(type(client.transport.get_report_task), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - analytics_data_api.ListReportTasksResponse( - next_page_token="next_page_token_value", + analytics_data_api.ReportTask( + name="name_value", ) ) - response = await client.list_report_tasks(request) + response = await client.get_report_task(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request = analytics_data_api.ListReportTasksRequest() + request = analytics_data_api.GetReportTaskRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListReportTasksAsyncPager) - assert response.next_page_token == "next_page_token_value" + assert isinstance(response, analytics_data_api.ReportTask) + assert response.name == "name_value" @pytest.mark.asyncio -async def test_list_report_tasks_async_from_dict(): - await test_list_report_tasks_async(request_type=dict) +async def test_get_report_task_async_from_dict(): + await test_get_report_task_async(request_type=dict) -def test_list_report_tasks_field_headers(): +def test_get_report_task_field_headers(): client = AlphaAnalyticsDataClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = analytics_data_api.ListReportTasksRequest() + request = analytics_data_api.GetReportTaskRequest() - request.parent = "parent_value" + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_report_tasks), "__call__" - ) as call: - call.return_value = analytics_data_api.ListReportTasksResponse() - client.list_report_tasks(request) + with mock.patch.object(type(client.transport.get_report_task), "__call__") as call: + call.return_value = analytics_data_api.ReportTask() + client.get_report_task(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -6516,30 +6540,28 @@ def test_list_report_tasks_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "parent=parent_value", + "name=name_value", ) in kw["metadata"] @pytest.mark.asyncio -async def test_list_report_tasks_field_headers_async(): +async def test_get_report_task_field_headers_async(): client = AlphaAnalyticsDataAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = analytics_data_api.ListReportTasksRequest() + request = analytics_data_api.GetReportTaskRequest() - request.parent = "parent_value" + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_report_tasks), "__call__" - ) as call: + with mock.patch.object(type(client.transport.get_report_task), "__call__") as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - analytics_data_api.ListReportTasksResponse() + analytics_data_api.ReportTask() ) - await client.list_report_tasks(request) + await client.get_report_task(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -6550,37 +6572,35 @@ async def test_list_report_tasks_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "parent=parent_value", + "name=name_value", ) in kw["metadata"] -def test_list_report_tasks_flattened(): +def test_get_report_task_flattened(): client = AlphaAnalyticsDataClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_report_tasks), "__call__" - ) as call: + with mock.patch.object(type(client.transport.get_report_task), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = analytics_data_api.ListReportTasksResponse() + call.return_value = analytics_data_api.ReportTask() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.list_report_tasks( - parent="parent_value", + client.get_report_task( + name="name_value", ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = "parent_value" + arg = args[0].name + mock_val = "name_value" assert arg == mock_val -def test_list_report_tasks_flattened_error(): +def test_get_report_task_flattened_error(): client = AlphaAnalyticsDataClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -6588,45 +6608,43 @@ def test_list_report_tasks_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.list_report_tasks( - analytics_data_api.ListReportTasksRequest(), - parent="parent_value", + client.get_report_task( + analytics_data_api.GetReportTaskRequest(), + name="name_value", ) @pytest.mark.asyncio -async def test_list_report_tasks_flattened_async(): +async def test_get_report_task_flattened_async(): client = AlphaAnalyticsDataAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_report_tasks), "__call__" - ) as call: + with mock.patch.object(type(client.transport.get_report_task), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = analytics_data_api.ListReportTasksResponse() + call.return_value = analytics_data_api.ReportTask() call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - analytics_data_api.ListReportTasksResponse() + analytics_data_api.ReportTask() ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.list_report_tasks( - parent="parent_value", + response = await client.get_report_task( + name="name_value", ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = "parent_value" + arg = args[0].name + mock_val = "name_value" assert arg == mock_val @pytest.mark.asyncio -async def test_list_report_tasks_flattened_error_async(): +async def test_get_report_task_flattened_error_async(): client = AlphaAnalyticsDataAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -6634,72 +6652,460 @@ async def test_list_report_tasks_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.list_report_tasks( - analytics_data_api.ListReportTasksRequest(), - parent="parent_value", + await client.get_report_task( + analytics_data_api.GetReportTaskRequest(), + name="name_value", ) -def test_list_report_tasks_pager(transport_name: str = "grpc"): +@pytest.mark.parametrize( + "request_type", + [ + analytics_data_api.ListReportTasksRequest, + dict, + ], +) +def test_list_report_tasks(request_type, transport: str = "grpc"): client = AlphaAnalyticsDataClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, + transport=transport, ) + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.list_report_tasks), "__call__" ) as call: - # Set the response to a series of pages. - call.side_effect = ( - analytics_data_api.ListReportTasksResponse( - report_tasks=[ - analytics_data_api.ReportTask(), - analytics_data_api.ReportTask(), - analytics_data_api.ReportTask(), - ], - next_page_token="abc", - ), - analytics_data_api.ListReportTasksResponse( - report_tasks=[], - next_page_token="def", - ), - analytics_data_api.ListReportTasksResponse( - report_tasks=[ - analytics_data_api.ReportTask(), - ], - next_page_token="ghi", - ), - analytics_data_api.ListReportTasksResponse( - report_tasks=[ - analytics_data_api.ReportTask(), - analytics_data_api.ReportTask(), - ], - ), - RuntimeError, - ) - - expected_metadata = () - retry = retries.Retry() - timeout = 5 - expected_metadata = tuple(expected_metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + # Designate an appropriate return value for the call. + call.return_value = analytics_data_api.ListReportTasksResponse( + next_page_token="next_page_token_value", ) - pager = client.list_report_tasks(request={}, retry=retry, timeout=timeout) + response = client.list_report_tasks(request) - assert pager._metadata == expected_metadata - assert pager._retry == retry - assert pager._timeout == timeout + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = analytics_data_api.ListReportTasksRequest() + assert args[0] == request - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, analytics_data_api.ReportTask) for i in results) + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListReportTasksPager) + assert response.next_page_token == "next_page_token_value" -def test_list_report_tasks_pages(transport_name: str = "grpc"): +def test_list_report_tasks_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. client = AlphaAnalyticsDataClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_report_tasks), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.list_report_tasks() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == analytics_data_api.ListReportTasksRequest() + + +def test_list_report_tasks_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = AlphaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = analytics_data_api.ListReportTasksRequest( + parent="parent_value", + page_token="page_token_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_report_tasks), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.list_report_tasks(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == analytics_data_api.ListReportTasksRequest( + parent="parent_value", + page_token="page_token_value", + ) + + +def test_list_report_tasks_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = AlphaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_report_tasks in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.list_report_tasks + ] = mock_rpc + request = {} + client.list_report_tasks(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_report_tasks(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_list_report_tasks_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = AlphaAnalyticsDataAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_report_tasks), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + analytics_data_api.ListReportTasksResponse( + next_page_token="next_page_token_value", + ) + ) + response = await client.list_report_tasks() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == analytics_data_api.ListReportTasksRequest() + + +@pytest.mark.asyncio +async def test_list_report_tasks_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = AlphaAnalyticsDataAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.list_report_tasks + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.list_report_tasks + ] = mock_rpc + + request = {} + await client.list_report_tasks(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.list_report_tasks(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_list_report_tasks_async( + transport: str = "grpc_asyncio", + request_type=analytics_data_api.ListReportTasksRequest, +): + client = AlphaAnalyticsDataAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_report_tasks), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + analytics_data_api.ListReportTasksResponse( + next_page_token="next_page_token_value", + ) + ) + response = await client.list_report_tasks(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = analytics_data_api.ListReportTasksRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListReportTasksAsyncPager) + assert response.next_page_token == "next_page_token_value" + + +@pytest.mark.asyncio +async def test_list_report_tasks_async_from_dict(): + await test_list_report_tasks_async(request_type=dict) + + +def test_list_report_tasks_field_headers(): + client = AlphaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = analytics_data_api.ListReportTasksRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_report_tasks), "__call__" + ) as call: + call.return_value = analytics_data_api.ListReportTasksResponse() + client.list_report_tasks(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_report_tasks_field_headers_async(): + client = AlphaAnalyticsDataAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = analytics_data_api.ListReportTasksRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_report_tasks), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + analytics_data_api.ListReportTasksResponse() + ) + await client.list_report_tasks(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_list_report_tasks_flattened(): + client = AlphaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_report_tasks), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = analytics_data_api.ListReportTasksResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_report_tasks( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +def test_list_report_tasks_flattened_error(): + client = AlphaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_report_tasks( + analytics_data_api.ListReportTasksRequest(), + parent="parent_value", + ) + + +@pytest.mark.asyncio +async def test_list_report_tasks_flattened_async(): + client = AlphaAnalyticsDataAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_report_tasks), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = analytics_data_api.ListReportTasksResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + analytics_data_api.ListReportTasksResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_report_tasks( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_list_report_tasks_flattened_error_async(): + client = AlphaAnalyticsDataAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_report_tasks( + analytics_data_api.ListReportTasksRequest(), + parent="parent_value", + ) + + +def test_list_report_tasks_pager(transport_name: str = "grpc"): + client = AlphaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_report_tasks), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + analytics_data_api.ListReportTasksResponse( + report_tasks=[ + analytics_data_api.ReportTask(), + analytics_data_api.ReportTask(), + analytics_data_api.ReportTask(), + ], + next_page_token="abc", + ), + analytics_data_api.ListReportTasksResponse( + report_tasks=[], + next_page_token="def", + ), + analytics_data_api.ListReportTasksResponse( + report_tasks=[ + analytics_data_api.ReportTask(), + ], + next_page_token="ghi", + ), + analytics_data_api.ListReportTasksResponse( + report_tasks=[ + analytics_data_api.ReportTask(), + analytics_data_api.ReportTask(), + ], + ), + RuntimeError, + ) + + expected_metadata = () + retry = retries.Retry() + timeout = 5 + expected_metadata = tuple(expected_metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + ) + pager = client.list_report_tasks(request={}, retry=retry, timeout=timeout) + + assert pager._metadata == expected_metadata + assert pager._retry == retry + assert pager._timeout == timeout + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, analytics_data_api.ReportTask) for i in results) + + +def test_list_report_tasks_pages(transport_name: str = "grpc"): + client = AlphaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, ) # Mock the actual call within the gRPC stub, and fake the request. @@ -6842,47 +7248,293 @@ async def test_list_report_tasks_async_pages(): assert page_.raw_page.next_page_token == token -@pytest.mark.parametrize( - "request_type", - [ - analytics_data_api.RunFunnelReportRequest, - dict, - ], -) -def test_run_funnel_report_rest(request_type): - client = AlphaAnalyticsDataClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) +@pytest.mark.parametrize( + "request_type", + [ + analytics_data_api.RunFunnelReportRequest, + dict, + ], +) +def test_run_funnel_report_rest(request_type): + client = AlphaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"property": "properties/sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = analytics_data_api.RunFunnelReportResponse( + kind="kind_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = analytics_data_api.RunFunnelReportResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.run_funnel_report(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, analytics_data_api.RunFunnelReportResponse) + assert response.kind == "kind_value" + + +def test_run_funnel_report_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = AlphaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.run_funnel_report in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.run_funnel_report + ] = mock_rpc + + request = {} + client.run_funnel_report(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.run_funnel_report(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_run_funnel_report_rest_interceptors(null_interceptor): + transport = transports.AlphaAnalyticsDataRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.AlphaAnalyticsDataRestInterceptor(), + ) + client = AlphaAnalyticsDataClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.AlphaAnalyticsDataRestInterceptor, "post_run_funnel_report" + ) as post, mock.patch.object( + transports.AlphaAnalyticsDataRestInterceptor, "pre_run_funnel_report" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = analytics_data_api.RunFunnelReportRequest.pb( + analytics_data_api.RunFunnelReportRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = analytics_data_api.RunFunnelReportResponse.to_json( + analytics_data_api.RunFunnelReportResponse() + ) + + request = analytics_data_api.RunFunnelReportRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = analytics_data_api.RunFunnelReportResponse() + + client.run_funnel_report( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_run_funnel_report_rest_bad_request( + transport: str = "rest", request_type=analytics_data_api.RunFunnelReportRequest +): + client = AlphaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"property": "properties/sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.run_funnel_report(request) + + +def test_run_funnel_report_rest_error(): + client = AlphaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + analytics_data_api.CreateAudienceListRequest, + dict, + ], +) +def test_create_audience_list_rest(request_type): + client = AlphaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "properties/sample1"} + request_init["audience_list"] = { + "name": "name_value", + "audience": "audience_value", + "audience_display_name": "audience_display_name_value", + "dimensions": [{"dimension_name": "dimension_name_value"}], + "state": 1, + "begin_creating_time": {"seconds": 751, "nanos": 543}, + "creation_quota_tokens_charged": 3070, + "row_count": 992, + "error_message": "error_message_value", + "percentage_completed": 0.2106, + "recurring_audience_list": "recurring_audience_list_value", + "webhook_notification": { + "uri": "uri_value", + "channel_token": "channel_token_value", + }, + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = analytics_data_api.CreateAudienceListRequest.meta.fields[ + "audience_list" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] - # send a request that will satisfy transcoding - request_init = {"property": "properties/sample1"} + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["audience_list"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["audience_list"][field])): + del request_init["audience_list"][field][i][subfield] + else: + del request_init["audience_list"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = analytics_data_api.RunFunnelReportResponse( - kind="kind_value", - ) + return_value = operations_pb2.Operation(name="operations/spam") # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - # Convert return value to protobuf type - return_value = analytics_data_api.RunFunnelReportResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.run_funnel_report(request) + response = client.create_audience_list(request) # Establish that the response is the type that we expect. - assert isinstance(response, analytics_data_api.RunFunnelReportResponse) - assert response.kind == "kind_value" + assert response.operation.name == "operations/spam" -def test_run_funnel_report_rest_use_cached_wrapped_rpc(): +def test_create_audience_list_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -6896,7 +7548,9 @@ def test_run_funnel_report_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.run_funnel_report in client._transport._wrapped_methods + assert ( + client._transport.create_audience_list in client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.Mock() @@ -6904,24 +7558,117 @@ def test_run_funnel_report_rest_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.run_funnel_report + client._transport.create_audience_list ] = mock_rpc request = {} - client.run_funnel_report(request) + client.create_audience_list(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.run_funnel_report(request) + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.create_audience_list(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 +def test_create_audience_list_rest_required_fields( + request_type=analytics_data_api.CreateAudienceListRequest, +): + transport_class = transports.AlphaAnalyticsDataRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_audience_list._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_audience_list._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = AlphaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.create_audience_list(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_create_audience_list_rest_unset_required_fields(): + transport = transports.AlphaAnalyticsDataRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.create_audience_list._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(()) + & set( + ( + "parent", + "audienceList", + ) + ) + ) + + @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_run_funnel_report_rest_interceptors(null_interceptor): +def test_create_audience_list_rest_interceptors(null_interceptor): transport = transports.AlphaAnalyticsDataRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -6934,14 +7681,16 @@ def test_run_funnel_report_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.AlphaAnalyticsDataRestInterceptor, "post_run_funnel_report" + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.AlphaAnalyticsDataRestInterceptor, "post_create_audience_list" ) as post, mock.patch.object( - transports.AlphaAnalyticsDataRestInterceptor, "pre_run_funnel_report" + transports.AlphaAnalyticsDataRestInterceptor, "pre_create_audience_list" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = analytics_data_api.RunFunnelReportRequest.pb( - analytics_data_api.RunFunnelReportRequest() + pb_message = analytics_data_api.CreateAudienceListRequest.pb( + analytics_data_api.CreateAudienceListRequest() ) transcode.return_value = { "method": "post", @@ -6953,55 +7702,111 @@ def test_run_funnel_report_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = analytics_data_api.RunFunnelReportResponse.to_json( - analytics_data_api.RunFunnelReportResponse() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() ) - request = analytics_data_api.RunFunnelReportRequest() + request = analytics_data_api.CreateAudienceListRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = analytics_data_api.RunFunnelReportResponse() + post.return_value = operations_pb2.Operation() + + client.create_audience_list( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_create_audience_list_rest_bad_request( + transport: str = "rest", request_type=analytics_data_api.CreateAudienceListRequest +): + client = AlphaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "properties/sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.create_audience_list(request) + + +def test_create_audience_list_rest_flattened(): + client = AlphaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "properties/sample1"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + audience_list=analytics_data_api.AudienceList(name="name_value"), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value - client.run_funnel_report( - request, - metadata=[ - ("key", "val"), - ("cephalopod", "squid"), - ], - ) + client.create_audience_list(**mock_args) - pre.assert_called_once() - post.assert_called_once() + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1alpha/{parent=properties/*}/audienceLists" % client.transport._host, + args[1], + ) -def test_run_funnel_report_rest_bad_request( - transport: str = "rest", request_type=analytics_data_api.RunFunnelReportRequest -): +def test_create_audience_list_rest_flattened_error(transport: str = "rest"): client = AlphaAnalyticsDataClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # send a request that will satisfy transcoding - request_init = {"property": "properties/sample1"} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.run_funnel_report(request) + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_audience_list( + analytics_data_api.CreateAudienceListRequest(), + parent="parent_value", + audience_list=analytics_data_api.AudienceList(name="name_value"), + ) -def test_run_funnel_report_rest_error(): +def test_create_audience_list_rest_error(): client = AlphaAnalyticsDataClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -7010,125 +7815,44 @@ def test_run_funnel_report_rest_error(): @pytest.mark.parametrize( "request_type", [ - analytics_data_api.CreateAudienceListRequest, + analytics_data_api.QueryAudienceListRequest, dict, ], ) -def test_create_audience_list_rest(request_type): +def test_query_audience_list_rest(request_type): client = AlphaAnalyticsDataClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = {"parent": "properties/sample1"} - request_init["audience_list"] = { - "name": "name_value", - "audience": "audience_value", - "audience_display_name": "audience_display_name_value", - "dimensions": [{"dimension_name": "dimension_name_value"}], - "state": 1, - "begin_creating_time": {"seconds": 751, "nanos": 543}, - "creation_quota_tokens_charged": 3070, - "row_count": 992, - "error_message": "error_message_value", - "percentage_completed": 0.2106, - "recurring_audience_list": "recurring_audience_list_value", - "webhook_notification": { - "uri": "uri_value", - "channel_token": "channel_token_value", - }, - } - # The version of a generated dependency at test runtime may differ from the version used during generation. - # Delete any fields which are not present in the current runtime dependency - # See https://github.com/googleapis/gapic-generator-python/issues/1748 - - # Determine if the message type is proto-plus or protobuf - test_field = analytics_data_api.CreateAudienceListRequest.meta.fields[ - "audience_list" - ] - - def get_message_fields(field): - # Given a field which is a message (composite type), return a list with - # all the fields of the message. - # If the field is not a composite type, return an empty list. - message_fields = [] - - if hasattr(field, "message") and field.message: - is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") - - if is_field_type_proto_plus_type: - message_fields = field.message.meta.fields.values() - # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER - message_fields = field.message.DESCRIPTOR.fields - return message_fields - - runtime_nested_fields = [ - (field.name, nested_field.name) - for field in get_message_fields(test_field) - for nested_field in get_message_fields(field) - ] - - subfields_not_in_runtime = [] - - # For each item in the sample request, create a list of sub fields which are not present at runtime - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["audience_list"].items(): # pragma: NO COVER - result = None - is_repeated = False - # For repeated fields - if isinstance(value, list) and len(value): - is_repeated = True - result = value[0] - # For fields where the type is another message - if isinstance(value, dict): - result = value - - if result and hasattr(result, "keys"): - for subfield in result.keys(): - if (field, subfield) not in runtime_nested_fields: - subfields_not_in_runtime.append( - { - "field": field, - "subfield": subfield, - "is_repeated": is_repeated, - } - ) - - # Remove fields from the sample request which are not present in the runtime version of the dependency - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER - field = subfield_to_delete.get("field") - field_repeated = subfield_to_delete.get("is_repeated") - subfield = subfield_to_delete.get("subfield") - if subfield: - if field_repeated: - for i in range(0, len(request_init["audience_list"][field])): - del request_init["audience_list"][field][i][subfield] - else: - del request_init["audience_list"][field][subfield] + request_init = {"name": "properties/sample1/audienceLists/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = analytics_data_api.QueryAudienceListResponse( + row_count=992, + ) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 + # Convert return value to protobuf type + return_value = analytics_data_api.QueryAudienceListResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.create_audience_list(request) + response = client.query_audience_list(request) # Establish that the response is the type that we expect. - assert response.operation.name == "operations/spam" + assert isinstance(response, analytics_data_api.QueryAudienceListResponse) + assert response.row_count == 992 -def test_create_audience_list_rest_use_cached_wrapped_rpc(): +def test_query_audience_list_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -7143,7 +7867,7 @@ def test_create_audience_list_rest_use_cached_wrapped_rpc(): # Ensure method has been cached assert ( - client._transport.create_audience_list in client._transport._wrapped_methods + client._transport.query_audience_list in client._transport._wrapped_methods ) # Replace cached wrapped function with mock @@ -7152,33 +7876,29 @@ def test_create_audience_list_rest_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.create_audience_list + client._transport.query_audience_list ] = mock_rpc request = {} - client.create_audience_list(request) + client.query_audience_list(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.create_audience_list(request) + client.query_audience_list(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_create_audience_list_rest_required_fields( - request_type=analytics_data_api.CreateAudienceListRequest, +def test_query_audience_list_rest_required_fields( + request_type=analytics_data_api.QueryAudienceListRequest, ): transport_class = transports.AlphaAnalyticsDataRestTransport request_init = {} - request_init["parent"] = "" + request_init["name"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -7189,21 +7909,21 @@ def test_create_audience_list_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).create_audience_list._get_unset_required_fields(jsonified_request) + ).query_audience_list._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["parent"] = "parent_value" + jsonified_request["name"] = "name_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).create_audience_list._get_unset_required_fields(jsonified_request) + ).query_audience_list._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == "parent_value" + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" client = AlphaAnalyticsDataClient( credentials=ga_credentials.AnonymousCredentials(), @@ -7212,7 +7932,7 @@ def test_create_audience_list_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = analytics_data_api.QueryAudienceListResponse() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -7232,37 +7952,32 @@ def test_create_audience_list_rest_required_fields( response_value = Response() response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = analytics_data_api.QueryAudienceListResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.create_audience_list(request) + response = client.query_audience_list(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_create_audience_list_rest_unset_required_fields(): +def test_query_audience_list_rest_unset_required_fields(): transport = transports.AlphaAnalyticsDataRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.create_audience_list._get_unset_required_fields({}) - assert set(unset_fields) == ( - set(()) - & set( - ( - "parent", - "audienceList", - ) - ) - ) + unset_fields = transport.query_audience_list._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_create_audience_list_rest_interceptors(null_interceptor): +def test_query_audience_list_rest_interceptors(null_interceptor): transport = transports.AlphaAnalyticsDataRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -7275,16 +7990,14 @@ def test_create_audience_list_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - operation.Operation, "_set_result_from_operation" - ), mock.patch.object( - transports.AlphaAnalyticsDataRestInterceptor, "post_create_audience_list" + transports.AlphaAnalyticsDataRestInterceptor, "post_query_audience_list" ) as post, mock.patch.object( - transports.AlphaAnalyticsDataRestInterceptor, "pre_create_audience_list" + transports.AlphaAnalyticsDataRestInterceptor, "pre_query_audience_list" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = analytics_data_api.CreateAudienceListRequest.pb( - analytics_data_api.CreateAudienceListRequest() + pb_message = analytics_data_api.QueryAudienceListRequest.pb( + analytics_data_api.QueryAudienceListRequest() ) transcode.return_value = { "method": "post", @@ -7296,19 +8009,21 @@ def test_create_audience_list_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = json_format.MessageToJson( - operations_pb2.Operation() + req.return_value._content = ( + analytics_data_api.QueryAudienceListResponse.to_json( + analytics_data_api.QueryAudienceListResponse() + ) ) - request = analytics_data_api.CreateAudienceListRequest() + request = analytics_data_api.QueryAudienceListRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = operations_pb2.Operation() + post.return_value = analytics_data_api.QueryAudienceListResponse() - client.create_audience_list( + client.query_audience_list( request, metadata=[ ("key", "val"), @@ -7320,8 +8035,8 @@ def test_create_audience_list_rest_interceptors(null_interceptor): post.assert_called_once() -def test_create_audience_list_rest_bad_request( - transport: str = "rest", request_type=analytics_data_api.CreateAudienceListRequest +def test_query_audience_list_rest_bad_request( + transport: str = "rest", request_type=analytics_data_api.QueryAudienceListRequest ): client = AlphaAnalyticsDataClient( credentials=ga_credentials.AnonymousCredentials(), @@ -7329,7 +8044,7 @@ def test_create_audience_list_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = {"parent": "properties/sample1"} + request_init = {"name": "properties/sample1/audienceLists/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -7341,10 +8056,10 @@ def test_create_audience_list_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.create_audience_list(request) + client.query_audience_list(request) -def test_create_audience_list_rest_flattened(): +def test_query_audience_list_rest_flattened(): client = AlphaAnalyticsDataClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -7353,38 +8068,40 @@ def test_create_audience_list_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = analytics_data_api.QueryAudienceListResponse() # get arguments that satisfy an http rule for this method - sample_request = {"parent": "properties/sample1"} + sample_request = {"name": "properties/sample1/audienceLists/sample2"} # get truthy value for each flattened field mock_args = dict( - parent="parent_value", - audience_list=analytics_data_api.AudienceList(name="name_value"), + name="name_value", ) mock_args.update(sample_request) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 + # Convert return value to protobuf type + return_value = analytics_data_api.QueryAudienceListResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.create_audience_list(**mock_args) + client.query_audience_list(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1alpha/{parent=properties/*}/audienceLists" % client.transport._host, + "%s/v1alpha/{name=properties/*/audienceLists/*}:query" + % client.transport._host, args[1], ) -def test_create_audience_list_rest_flattened_error(transport: str = "rest"): +def test_query_audience_list_rest_flattened_error(transport: str = "rest"): client = AlphaAnalyticsDataClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -7393,14 +8110,13 @@ def test_create_audience_list_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.create_audience_list( - analytics_data_api.CreateAudienceListRequest(), - parent="parent_value", - audience_list=analytics_data_api.AudienceList(name="name_value"), + client.query_audience_list( + analytics_data_api.QueryAudienceListRequest(), + name="name_value", ) -def test_create_audience_list_rest_error(): +def test_query_audience_list_rest_error(): client = AlphaAnalyticsDataClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -7409,11 +8125,11 @@ def test_create_audience_list_rest_error(): @pytest.mark.parametrize( "request_type", [ - analytics_data_api.QueryAudienceListRequest, + analytics_data_api.SheetExportAudienceListRequest, dict, ], ) -def test_query_audience_list_rest(request_type): +def test_sheet_export_audience_list_rest(request_type): client = AlphaAnalyticsDataClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -7426,7 +8142,9 @@ def test_query_audience_list_rest(request_type): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = analytics_data_api.QueryAudienceListResponse( + return_value = analytics_data_api.SheetExportAudienceListResponse( + spreadsheet_uri="spreadsheet_uri_value", + spreadsheet_id="spreadsheet_id_value", row_count=992, ) @@ -7434,19 +8152,23 @@ def test_query_audience_list_rest(request_type): response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = analytics_data_api.QueryAudienceListResponse.pb(return_value) + return_value = analytics_data_api.SheetExportAudienceListResponse.pb( + return_value + ) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.query_audience_list(request) + response = client.sheet_export_audience_list(request) # Establish that the response is the type that we expect. - assert isinstance(response, analytics_data_api.QueryAudienceListResponse) + assert isinstance(response, analytics_data_api.SheetExportAudienceListResponse) + assert response.spreadsheet_uri == "spreadsheet_uri_value" + assert response.spreadsheet_id == "spreadsheet_id_value" assert response.row_count == 992 -def test_query_audience_list_rest_use_cached_wrapped_rpc(): +def test_sheet_export_audience_list_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -7461,7 +8183,8 @@ def test_query_audience_list_rest_use_cached_wrapped_rpc(): # Ensure method has been cached assert ( - client._transport.query_audience_list in client._transport._wrapped_methods + client._transport.sheet_export_audience_list + in client._transport._wrapped_methods ) # Replace cached wrapped function with mock @@ -7470,24 +8193,24 @@ def test_query_audience_list_rest_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.query_audience_list + client._transport.sheet_export_audience_list ] = mock_rpc request = {} - client.query_audience_list(request) + client.sheet_export_audience_list(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.query_audience_list(request) + client.sheet_export_audience_list(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_query_audience_list_rest_required_fields( - request_type=analytics_data_api.QueryAudienceListRequest, +def test_sheet_export_audience_list_rest_required_fields( + request_type=analytics_data_api.SheetExportAudienceListRequest, ): transport_class = transports.AlphaAnalyticsDataRestTransport @@ -7503,7 +8226,7 @@ def test_query_audience_list_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).query_audience_list._get_unset_required_fields(jsonified_request) + ).sheet_export_audience_list._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present @@ -7512,7 +8235,7 @@ def test_query_audience_list_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).query_audience_list._get_unset_required_fields(jsonified_request) + ).sheet_export_audience_list._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone @@ -7526,7 +8249,7 @@ def test_query_audience_list_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = analytics_data_api.QueryAudienceListResponse() + return_value = analytics_data_api.SheetExportAudienceListResponse() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -7548,30 +8271,32 @@ def test_query_audience_list_rest_required_fields( response_value.status_code = 200 # Convert return value to protobuf type - return_value = analytics_data_api.QueryAudienceListResponse.pb(return_value) + return_value = analytics_data_api.SheetExportAudienceListResponse.pb( + return_value + ) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.query_audience_list(request) + response = client.sheet_export_audience_list(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_query_audience_list_rest_unset_required_fields(): +def test_sheet_export_audience_list_rest_unset_required_fields(): transport = transports.AlphaAnalyticsDataRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.query_audience_list._get_unset_required_fields({}) + unset_fields = transport.sheet_export_audience_list._get_unset_required_fields({}) assert set(unset_fields) == (set(()) & set(("name",))) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_query_audience_list_rest_interceptors(null_interceptor): +def test_sheet_export_audience_list_rest_interceptors(null_interceptor): transport = transports.AlphaAnalyticsDataRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -7584,14 +8309,14 @@ def test_query_audience_list_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.AlphaAnalyticsDataRestInterceptor, "post_query_audience_list" + transports.AlphaAnalyticsDataRestInterceptor, "post_sheet_export_audience_list" ) as post, mock.patch.object( - transports.AlphaAnalyticsDataRestInterceptor, "pre_query_audience_list" + transports.AlphaAnalyticsDataRestInterceptor, "pre_sheet_export_audience_list" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = analytics_data_api.QueryAudienceListRequest.pb( - analytics_data_api.QueryAudienceListRequest() + pb_message = analytics_data_api.SheetExportAudienceListRequest.pb( + analytics_data_api.SheetExportAudienceListRequest() ) transcode.return_value = { "method": "post", @@ -7604,20 +8329,20 @@ def test_query_audience_list_rest_interceptors(null_interceptor): req.return_value.status_code = 200 req.return_value.request = PreparedRequest() req.return_value._content = ( - analytics_data_api.QueryAudienceListResponse.to_json( - analytics_data_api.QueryAudienceListResponse() + analytics_data_api.SheetExportAudienceListResponse.to_json( + analytics_data_api.SheetExportAudienceListResponse() ) ) - request = analytics_data_api.QueryAudienceListRequest() + request = analytics_data_api.SheetExportAudienceListRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = analytics_data_api.QueryAudienceListResponse() + post.return_value = analytics_data_api.SheetExportAudienceListResponse() - client.query_audience_list( + client.sheet_export_audience_list( request, metadata=[ ("key", "val"), @@ -7629,8 +8354,9 @@ def test_query_audience_list_rest_interceptors(null_interceptor): post.assert_called_once() -def test_query_audience_list_rest_bad_request( - transport: str = "rest", request_type=analytics_data_api.QueryAudienceListRequest +def test_sheet_export_audience_list_rest_bad_request( + transport: str = "rest", + request_type=analytics_data_api.SheetExportAudienceListRequest, ): client = AlphaAnalyticsDataClient( credentials=ga_credentials.AnonymousCredentials(), @@ -7650,10 +8376,10 @@ def test_query_audience_list_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.query_audience_list(request) + client.sheet_export_audience_list(request) -def test_query_audience_list_rest_flattened(): +def test_sheet_export_audience_list_rest_flattened(): client = AlphaAnalyticsDataClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -7662,7 +8388,7 @@ def test_query_audience_list_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = analytics_data_api.QueryAudienceListResponse() + return_value = analytics_data_api.SheetExportAudienceListResponse() # get arguments that satisfy an http rule for this method sample_request = {"name": "properties/sample1/audienceLists/sample2"} @@ -7677,25 +8403,27 @@ def test_query_audience_list_rest_flattened(): response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = analytics_data_api.QueryAudienceListResponse.pb(return_value) + return_value = analytics_data_api.SheetExportAudienceListResponse.pb( + return_value + ) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.query_audience_list(**mock_args) + client.sheet_export_audience_list(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1alpha/{name=properties/*/audienceLists/*}:query" + "%s/v1alpha/{name=properties/*/audienceLists/*}:exportSheet" % client.transport._host, args[1], ) -def test_query_audience_list_rest_flattened_error(transport: str = "rest"): +def test_sheet_export_audience_list_rest_flattened_error(transport: str = "rest"): client = AlphaAnalyticsDataClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -7704,13 +8432,13 @@ def test_query_audience_list_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.query_audience_list( - analytics_data_api.QueryAudienceListRequest(), + client.sheet_export_audience_list( + analytics_data_api.SheetExportAudienceListRequest(), name="name_value", ) -def test_query_audience_list_rest_error(): +def test_sheet_export_audience_list_rest_error(): client = AlphaAnalyticsDataClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -7719,11 +8447,11 @@ def test_query_audience_list_rest_error(): @pytest.mark.parametrize( "request_type", [ - analytics_data_api.SheetExportAudienceListRequest, + analytics_data_api.GetAudienceListRequest, dict, ], ) -def test_sheet_export_audience_list_rest(request_type): +def test_get_audience_list_rest(request_type): client = AlphaAnalyticsDataClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -7736,33 +8464,43 @@ def test_sheet_export_audience_list_rest(request_type): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = analytics_data_api.SheetExportAudienceListResponse( - spreadsheet_uri="spreadsheet_uri_value", - spreadsheet_id="spreadsheet_id_value", + return_value = analytics_data_api.AudienceList( + name="name_value", + audience="audience_value", + audience_display_name="audience_display_name_value", + state=analytics_data_api.AudienceList.State.CREATING, + creation_quota_tokens_charged=3070, row_count=992, + error_message="error_message_value", + percentage_completed=0.2106, + recurring_audience_list="recurring_audience_list_value", ) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = analytics_data_api.SheetExportAudienceListResponse.pb( - return_value - ) + return_value = analytics_data_api.AudienceList.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.sheet_export_audience_list(request) + response = client.get_audience_list(request) # Establish that the response is the type that we expect. - assert isinstance(response, analytics_data_api.SheetExportAudienceListResponse) - assert response.spreadsheet_uri == "spreadsheet_uri_value" - assert response.spreadsheet_id == "spreadsheet_id_value" + assert isinstance(response, analytics_data_api.AudienceList) + assert response.name == "name_value" + assert response.audience == "audience_value" + assert response.audience_display_name == "audience_display_name_value" + assert response.state == analytics_data_api.AudienceList.State.CREATING + assert response.creation_quota_tokens_charged == 3070 assert response.row_count == 992 + assert response.error_message == "error_message_value" + assert math.isclose(response.percentage_completed, 0.2106, rel_tol=1e-6) + assert response.recurring_audience_list == "recurring_audience_list_value" -def test_sheet_export_audience_list_rest_use_cached_wrapped_rpc(): +def test_get_audience_list_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -7776,10 +8514,7 @@ def test_sheet_export_audience_list_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert ( - client._transport.sheet_export_audience_list - in client._transport._wrapped_methods - ) + assert client._transport.get_audience_list in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() @@ -7787,24 +8522,24 @@ def test_sheet_export_audience_list_rest_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.sheet_export_audience_list + client._transport.get_audience_list ] = mock_rpc request = {} - client.sheet_export_audience_list(request) + client.get_audience_list(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.sheet_export_audience_list(request) + client.get_audience_list(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_sheet_export_audience_list_rest_required_fields( - request_type=analytics_data_api.SheetExportAudienceListRequest, +def test_get_audience_list_rest_required_fields( + request_type=analytics_data_api.GetAudienceListRequest, ): transport_class = transports.AlphaAnalyticsDataRestTransport @@ -7820,7 +8555,7 @@ def test_sheet_export_audience_list_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).sheet_export_audience_list._get_unset_required_fields(jsonified_request) + ).get_audience_list._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present @@ -7829,7 +8564,7 @@ def test_sheet_export_audience_list_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).sheet_export_audience_list._get_unset_required_fields(jsonified_request) + ).get_audience_list._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone @@ -7843,7 +8578,7 @@ def test_sheet_export_audience_list_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = analytics_data_api.SheetExportAudienceListResponse() + return_value = analytics_data_api.AudienceList() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -7855,42 +8590,39 @@ def test_sheet_export_audience_list_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "post", + "method": "get", "query_params": pb_request, } - transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = analytics_data_api.SheetExportAudienceListResponse.pb( - return_value - ) + return_value = analytics_data_api.AudienceList.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.sheet_export_audience_list(request) + response = client.get_audience_list(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_sheet_export_audience_list_rest_unset_required_fields(): +def test_get_audience_list_rest_unset_required_fields(): transport = transports.AlphaAnalyticsDataRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.sheet_export_audience_list._get_unset_required_fields({}) + unset_fields = transport.get_audience_list._get_unset_required_fields({}) assert set(unset_fields) == (set(()) & set(("name",))) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_sheet_export_audience_list_rest_interceptors(null_interceptor): +def test_get_audience_list_rest_interceptors(null_interceptor): transport = transports.AlphaAnalyticsDataRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -7903,14 +8635,14 @@ def test_sheet_export_audience_list_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.AlphaAnalyticsDataRestInterceptor, "post_sheet_export_audience_list" + transports.AlphaAnalyticsDataRestInterceptor, "post_get_audience_list" ) as post, mock.patch.object( - transports.AlphaAnalyticsDataRestInterceptor, "pre_sheet_export_audience_list" + transports.AlphaAnalyticsDataRestInterceptor, "pre_get_audience_list" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = analytics_data_api.SheetExportAudienceListRequest.pb( - analytics_data_api.SheetExportAudienceListRequest() + pb_message = analytics_data_api.GetAudienceListRequest.pb( + analytics_data_api.GetAudienceListRequest() ) transcode.return_value = { "method": "post", @@ -7922,21 +8654,19 @@ def test_sheet_export_audience_list_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = ( - analytics_data_api.SheetExportAudienceListResponse.to_json( - analytics_data_api.SheetExportAudienceListResponse() - ) + req.return_value._content = analytics_data_api.AudienceList.to_json( + analytics_data_api.AudienceList() ) - request = analytics_data_api.SheetExportAudienceListRequest() + request = analytics_data_api.GetAudienceListRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = analytics_data_api.SheetExportAudienceListResponse() + post.return_value = analytics_data_api.AudienceList() - client.sheet_export_audience_list( + client.get_audience_list( request, metadata=[ ("key", "val"), @@ -7948,9 +8678,8 @@ def test_sheet_export_audience_list_rest_interceptors(null_interceptor): post.assert_called_once() -def test_sheet_export_audience_list_rest_bad_request( - transport: str = "rest", - request_type=analytics_data_api.SheetExportAudienceListRequest, +def test_get_audience_list_rest_bad_request( + transport: str = "rest", request_type=analytics_data_api.GetAudienceListRequest ): client = AlphaAnalyticsDataClient( credentials=ga_credentials.AnonymousCredentials(), @@ -7970,10 +8699,10 @@ def test_sheet_export_audience_list_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.sheet_export_audience_list(request) + client.get_audience_list(request) -def test_sheet_export_audience_list_rest_flattened(): +def test_get_audience_list_rest_flattened(): client = AlphaAnalyticsDataClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -7982,7 +8711,7 @@ def test_sheet_export_audience_list_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = analytics_data_api.SheetExportAudienceListResponse() + return_value = analytics_data_api.AudienceList() # get arguments that satisfy an http rule for this method sample_request = {"name": "properties/sample1/audienceLists/sample2"} @@ -7997,27 +8726,24 @@ def test_sheet_export_audience_list_rest_flattened(): response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = analytics_data_api.SheetExportAudienceListResponse.pb( - return_value - ) + return_value = analytics_data_api.AudienceList.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.sheet_export_audience_list(**mock_args) + client.get_audience_list(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1alpha/{name=properties/*/audienceLists/*}:exportSheet" - % client.transport._host, + "%s/v1alpha/{name=properties/*/audienceLists/*}" % client.transport._host, args[1], ) -def test_sheet_export_audience_list_rest_flattened_error(transport: str = "rest"): +def test_get_audience_list_rest_flattened_error(transport: str = "rest"): client = AlphaAnalyticsDataClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -8026,13 +8752,13 @@ def test_sheet_export_audience_list_rest_flattened_error(transport: str = "rest" # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.sheet_export_audience_list( - analytics_data_api.SheetExportAudienceListRequest(), + client.get_audience_list( + analytics_data_api.GetAudienceListRequest(), name="name_value", ) -def test_sheet_export_audience_list_rest_error(): +def test_get_audience_list_rest_error(): client = AlphaAnalyticsDataClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -8041,60 +8767,44 @@ def test_sheet_export_audience_list_rest_error(): @pytest.mark.parametrize( "request_type", [ - analytics_data_api.GetAudienceListRequest, + analytics_data_api.ListAudienceListsRequest, dict, ], ) -def test_get_audience_list_rest(request_type): +def test_list_audience_lists_rest(request_type): client = AlphaAnalyticsDataClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = {"name": "properties/sample1/audienceLists/sample2"} + request_init = {"parent": "properties/sample1"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = analytics_data_api.AudienceList( - name="name_value", - audience="audience_value", - audience_display_name="audience_display_name_value", - state=analytics_data_api.AudienceList.State.CREATING, - creation_quota_tokens_charged=3070, - row_count=992, - error_message="error_message_value", - percentage_completed=0.2106, - recurring_audience_list="recurring_audience_list_value", + return_value = analytics_data_api.ListAudienceListsResponse( + next_page_token="next_page_token_value", ) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = analytics_data_api.AudienceList.pb(return_value) + return_value = analytics_data_api.ListAudienceListsResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.get_audience_list(request) + response = client.list_audience_lists(request) # Establish that the response is the type that we expect. - assert isinstance(response, analytics_data_api.AudienceList) - assert response.name == "name_value" - assert response.audience == "audience_value" - assert response.audience_display_name == "audience_display_name_value" - assert response.state == analytics_data_api.AudienceList.State.CREATING - assert response.creation_quota_tokens_charged == 3070 - assert response.row_count == 992 - assert response.error_message == "error_message_value" - assert math.isclose(response.percentage_completed, 0.2106, rel_tol=1e-6) - assert response.recurring_audience_list == "recurring_audience_list_value" + assert isinstance(response, pagers.ListAudienceListsPager) + assert response.next_page_token == "next_page_token_value" -def test_get_audience_list_rest_use_cached_wrapped_rpc(): +def test_list_audience_lists_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -8108,7 +8818,9 @@ def test_get_audience_list_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.get_audience_list in client._transport._wrapped_methods + assert ( + client._transport.list_audience_lists in client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.Mock() @@ -8116,29 +8828,29 @@ def test_get_audience_list_rest_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.get_audience_list + client._transport.list_audience_lists ] = mock_rpc request = {} - client.get_audience_list(request) + client.list_audience_lists(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.get_audience_list(request) + client.list_audience_lists(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_get_audience_list_rest_required_fields( - request_type=analytics_data_api.GetAudienceListRequest, +def test_list_audience_lists_rest_required_fields( + request_type=analytics_data_api.ListAudienceListsRequest, ): transport_class = transports.AlphaAnalyticsDataRestTransport request_init = {} - request_init["name"] = "" + request_init["parent"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -8149,21 +8861,28 @@ def test_get_audience_list_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).get_audience_list._get_unset_required_fields(jsonified_request) + ).list_audience_lists._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["name"] = "name_value" + jsonified_request["parent"] = "parent_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).get_audience_list._get_unset_required_fields(jsonified_request) + ).list_audience_lists._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "page_size", + "page_token", + ) + ) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == "name_value" + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" client = AlphaAnalyticsDataClient( credentials=ga_credentials.AnonymousCredentials(), @@ -8172,7 +8891,7 @@ def test_get_audience_list_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = analytics_data_api.AudienceList() + return_value = analytics_data_api.ListAudienceListsResponse() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -8193,30 +8912,38 @@ def test_get_audience_list_rest_required_fields( response_value.status_code = 200 # Convert return value to protobuf type - return_value = analytics_data_api.AudienceList.pb(return_value) + return_value = analytics_data_api.ListAudienceListsResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.get_audience_list(request) + response = client.list_audience_lists(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_get_audience_list_rest_unset_required_fields(): +def test_list_audience_lists_rest_unset_required_fields(): transport = transports.AlphaAnalyticsDataRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.get_audience_list._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name",))) + unset_fields = transport.list_audience_lists._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "pageSize", + "pageToken", + ) + ) + & set(("parent",)) + ) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_audience_list_rest_interceptors(null_interceptor): +def test_list_audience_lists_rest_interceptors(null_interceptor): transport = transports.AlphaAnalyticsDataRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -8229,14 +8956,14 @@ def test_get_audience_list_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.AlphaAnalyticsDataRestInterceptor, "post_get_audience_list" + transports.AlphaAnalyticsDataRestInterceptor, "post_list_audience_lists" ) as post, mock.patch.object( - transports.AlphaAnalyticsDataRestInterceptor, "pre_get_audience_list" + transports.AlphaAnalyticsDataRestInterceptor, "pre_list_audience_lists" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = analytics_data_api.GetAudienceListRequest.pb( - analytics_data_api.GetAudienceListRequest() + pb_message = analytics_data_api.ListAudienceListsRequest.pb( + analytics_data_api.ListAudienceListsRequest() ) transcode.return_value = { "method": "post", @@ -8248,19 +8975,21 @@ def test_get_audience_list_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = analytics_data_api.AudienceList.to_json( - analytics_data_api.AudienceList() + req.return_value._content = ( + analytics_data_api.ListAudienceListsResponse.to_json( + analytics_data_api.ListAudienceListsResponse() + ) ) - request = analytics_data_api.GetAudienceListRequest() + request = analytics_data_api.ListAudienceListsRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = analytics_data_api.AudienceList() + post.return_value = analytics_data_api.ListAudienceListsResponse() - client.get_audience_list( + client.list_audience_lists( request, metadata=[ ("key", "val"), @@ -8272,8 +9001,8 @@ def test_get_audience_list_rest_interceptors(null_interceptor): post.assert_called_once() -def test_get_audience_list_rest_bad_request( - transport: str = "rest", request_type=analytics_data_api.GetAudienceListRequest +def test_list_audience_lists_rest_bad_request( + transport: str = "rest", request_type=analytics_data_api.ListAudienceListsRequest ): client = AlphaAnalyticsDataClient( credentials=ga_credentials.AnonymousCredentials(), @@ -8281,7 +9010,7 @@ def test_get_audience_list_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = {"name": "properties/sample1/audienceLists/sample2"} + request_init = {"parent": "properties/sample1"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -8293,10 +9022,10 @@ def test_get_audience_list_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.get_audience_list(request) + client.list_audience_lists(request) -def test_get_audience_list_rest_flattened(): +def test_list_audience_lists_rest_flattened(): client = AlphaAnalyticsDataClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -8305,14 +9034,14 @@ def test_get_audience_list_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = analytics_data_api.AudienceList() + return_value = analytics_data_api.ListAudienceListsResponse() # get arguments that satisfy an http rule for this method - sample_request = {"name": "properties/sample1/audienceLists/sample2"} + sample_request = {"parent": "properties/sample1"} # get truthy value for each flattened field mock_args = dict( - name="name_value", + parent="parent_value", ) mock_args.update(sample_request) @@ -8320,24 +9049,24 @@ def test_get_audience_list_rest_flattened(): response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = analytics_data_api.AudienceList.pb(return_value) + return_value = analytics_data_api.ListAudienceListsResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.get_audience_list(**mock_args) + client.list_audience_lists(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1alpha/{name=properties/*/audienceLists/*}" % client.transport._host, + "%s/v1alpha/{parent=properties/*}/audienceLists" % client.transport._host, args[1], ) -def test_get_audience_list_rest_flattened_error(transport: str = "rest"): +def test_list_audience_lists_rest_flattened_error(transport: str = "rest"): client = AlphaAnalyticsDataClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -8346,26 +9075,83 @@ def test_get_audience_list_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.get_audience_list( - analytics_data_api.GetAudienceListRequest(), - name="name_value", + client.list_audience_lists( + analytics_data_api.ListAudienceListsRequest(), + parent="parent_value", ) -def test_get_audience_list_rest_error(): +def test_list_audience_lists_rest_pager(transport: str = "rest"): client = AlphaAnalyticsDataClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + analytics_data_api.ListAudienceListsResponse( + audience_lists=[ + analytics_data_api.AudienceList(), + analytics_data_api.AudienceList(), + analytics_data_api.AudienceList(), + ], + next_page_token="abc", + ), + analytics_data_api.ListAudienceListsResponse( + audience_lists=[], + next_page_token="def", + ), + analytics_data_api.ListAudienceListsResponse( + audience_lists=[ + analytics_data_api.AudienceList(), + ], + next_page_token="ghi", + ), + analytics_data_api.ListAudienceListsResponse( + audience_lists=[ + analytics_data_api.AudienceList(), + analytics_data_api.AudienceList(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple( + analytics_data_api.ListAudienceListsResponse.to_json(x) for x in response + ) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {"parent": "properties/sample1"} + + pager = client.list_audience_lists(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, analytics_data_api.AudienceList) for i in results) + + pages = list(client.list_audience_lists(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + @pytest.mark.parametrize( "request_type", [ - analytics_data_api.ListAudienceListsRequest, + analytics_data_api.CreateRecurringAudienceListRequest, dict, ], ) -def test_list_audience_lists_rest(request_type): +def test_create_recurring_audience_list_rest(request_type): client = AlphaAnalyticsDataClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -8373,32 +9159,123 @@ def test_list_audience_lists_rest(request_type): # send a request that will satisfy transcoding request_init = {"parent": "properties/sample1"} + request_init["recurring_audience_list"] = { + "name": "name_value", + "audience": "audience_value", + "audience_display_name": "audience_display_name_value", + "dimensions": [{"dimension_name": "dimension_name_value"}], + "active_days_remaining": 2213, + "audience_lists": ["audience_lists_value1", "audience_lists_value2"], + "webhook_notification": { + "uri": "uri_value", + "channel_token": "channel_token_value", + }, + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = analytics_data_api.CreateRecurringAudienceListRequest.meta.fields[ + "recurring_audience_list" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "recurring_audience_list" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["recurring_audience_list"][field])): + del request_init["recurring_audience_list"][field][i][subfield] + else: + del request_init["recurring_audience_list"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = analytics_data_api.ListAudienceListsResponse( - next_page_token="next_page_token_value", + return_value = analytics_data_api.RecurringAudienceList( + name="name_value", + audience="audience_value", + audience_display_name="audience_display_name_value", + active_days_remaining=2213, + audience_lists=["audience_lists_value"], ) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = analytics_data_api.ListAudienceListsResponse.pb(return_value) + return_value = analytics_data_api.RecurringAudienceList.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.list_audience_lists(request) + response = client.create_recurring_audience_list(request) # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListAudienceListsPager) - assert response.next_page_token == "next_page_token_value" + assert isinstance(response, analytics_data_api.RecurringAudienceList) + assert response.name == "name_value" + assert response.audience == "audience_value" + assert response.audience_display_name == "audience_display_name_value" + assert response.active_days_remaining == 2213 + assert response.audience_lists == ["audience_lists_value"] -def test_list_audience_lists_rest_use_cached_wrapped_rpc(): +def test_create_recurring_audience_list_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -8413,7 +9290,8 @@ def test_list_audience_lists_rest_use_cached_wrapped_rpc(): # Ensure method has been cached assert ( - client._transport.list_audience_lists in client._transport._wrapped_methods + client._transport.create_recurring_audience_list + in client._transport._wrapped_methods ) # Replace cached wrapped function with mock @@ -8422,24 +9300,24 @@ def test_list_audience_lists_rest_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.list_audience_lists + client._transport.create_recurring_audience_list ] = mock_rpc request = {} - client.list_audience_lists(request) + client.create_recurring_audience_list(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.list_audience_lists(request) + client.create_recurring_audience_list(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_list_audience_lists_rest_required_fields( - request_type=analytics_data_api.ListAudienceListsRequest, +def test_create_recurring_audience_list_rest_required_fields( + request_type=analytics_data_api.CreateRecurringAudienceListRequest, ): transport_class = transports.AlphaAnalyticsDataRestTransport @@ -8455,7 +9333,7 @@ def test_list_audience_lists_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).list_audience_lists._get_unset_required_fields(jsonified_request) + ).create_recurring_audience_list._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present @@ -8464,14 +9342,7 @@ def test_list_audience_lists_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).list_audience_lists._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set( - ( - "page_size", - "page_token", - ) - ) + ).create_recurring_audience_list._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone @@ -8485,7 +9356,7 @@ def test_list_audience_lists_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = analytics_data_api.ListAudienceListsResponse() + return_value = analytics_data_api.RecurringAudienceList() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -8497,47 +9368,50 @@ def test_list_audience_lists_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "get", + "method": "post", "query_params": pb_request, } + transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = analytics_data_api.ListAudienceListsResponse.pb(return_value) + return_value = analytics_data_api.RecurringAudienceList.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.list_audience_lists(request) + response = client.create_recurring_audience_list(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_list_audience_lists_rest_unset_required_fields(): +def test_create_recurring_audience_list_rest_unset_required_fields(): transport = transports.AlphaAnalyticsDataRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.list_audience_lists._get_unset_required_fields({}) + unset_fields = transport.create_recurring_audience_list._get_unset_required_fields( + {} + ) assert set(unset_fields) == ( - set( + set(()) + & set( ( - "pageSize", - "pageToken", + "parent", + "recurringAudienceList", ) ) - & set(("parent",)) ) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_audience_lists_rest_interceptors(null_interceptor): +def test_create_recurring_audience_list_rest_interceptors(null_interceptor): transport = transports.AlphaAnalyticsDataRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -8550,14 +9424,16 @@ def test_list_audience_lists_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.AlphaAnalyticsDataRestInterceptor, "post_list_audience_lists" + transports.AlphaAnalyticsDataRestInterceptor, + "post_create_recurring_audience_list", ) as post, mock.patch.object( - transports.AlphaAnalyticsDataRestInterceptor, "pre_list_audience_lists" + transports.AlphaAnalyticsDataRestInterceptor, + "pre_create_recurring_audience_list", ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = analytics_data_api.ListAudienceListsRequest.pb( - analytics_data_api.ListAudienceListsRequest() + pb_message = analytics_data_api.CreateRecurringAudienceListRequest.pb( + analytics_data_api.CreateRecurringAudienceListRequest() ) transcode.return_value = { "method": "post", @@ -8569,21 +9445,19 @@ def test_list_audience_lists_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = ( - analytics_data_api.ListAudienceListsResponse.to_json( - analytics_data_api.ListAudienceListsResponse() - ) + req.return_value._content = analytics_data_api.RecurringAudienceList.to_json( + analytics_data_api.RecurringAudienceList() ) - request = analytics_data_api.ListAudienceListsRequest() + request = analytics_data_api.CreateRecurringAudienceListRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = analytics_data_api.ListAudienceListsResponse() + post.return_value = analytics_data_api.RecurringAudienceList() - client.list_audience_lists( + client.create_recurring_audience_list( request, metadata=[ ("key", "val"), @@ -8595,8 +9469,9 @@ def test_list_audience_lists_rest_interceptors(null_interceptor): post.assert_called_once() -def test_list_audience_lists_rest_bad_request( - transport: str = "rest", request_type=analytics_data_api.ListAudienceListsRequest +def test_create_recurring_audience_list_rest_bad_request( + transport: str = "rest", + request_type=analytics_data_api.CreateRecurringAudienceListRequest, ): client = AlphaAnalyticsDataClient( credentials=ga_credentials.AnonymousCredentials(), @@ -8616,10 +9491,10 @@ def test_list_audience_lists_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.list_audience_lists(request) + client.create_recurring_audience_list(request) -def test_list_audience_lists_rest_flattened(): +def test_create_recurring_audience_list_rest_flattened(): client = AlphaAnalyticsDataClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -8628,7 +9503,7 @@ def test_list_audience_lists_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = analytics_data_api.ListAudienceListsResponse() + return_value = analytics_data_api.RecurringAudienceList() # get arguments that satisfy an http rule for this method sample_request = {"parent": "properties/sample1"} @@ -8636,6 +9511,9 @@ def test_list_audience_lists_rest_flattened(): # get truthy value for each flattened field mock_args = dict( parent="parent_value", + recurring_audience_list=analytics_data_api.RecurringAudienceList( + name="name_value" + ), ) mock_args.update(sample_request) @@ -8643,24 +9521,25 @@ def test_list_audience_lists_rest_flattened(): response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = analytics_data_api.ListAudienceListsResponse.pb(return_value) + return_value = analytics_data_api.RecurringAudienceList.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.list_audience_lists(**mock_args) + client.create_recurring_audience_list(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1alpha/{parent=properties/*}/audienceLists" % client.transport._host, + "%s/v1alpha/{parent=properties/*}/recurringAudienceLists" + % client.transport._host, args[1], ) -def test_list_audience_lists_rest_flattened_error(transport: str = "rest"): +def test_create_recurring_audience_list_rest_flattened_error(transport: str = "rest"): client = AlphaAnalyticsDataClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -8669,173 +9548,36 @@ def test_list_audience_lists_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.list_audience_lists( - analytics_data_api.ListAudienceListsRequest(), + client.create_recurring_audience_list( + analytics_data_api.CreateRecurringAudienceListRequest(), parent="parent_value", - ) - - -def test_list_audience_lists_rest_pager(transport: str = "rest"): - client = AlphaAnalyticsDataClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # TODO(kbandes): remove this mock unless there's a good reason for it. - # with mock.patch.object(path_template, 'transcode') as transcode: - # Set the response as a series of pages - response = ( - analytics_data_api.ListAudienceListsResponse( - audience_lists=[ - analytics_data_api.AudienceList(), - analytics_data_api.AudienceList(), - analytics_data_api.AudienceList(), - ], - next_page_token="abc", - ), - analytics_data_api.ListAudienceListsResponse( - audience_lists=[], - next_page_token="def", - ), - analytics_data_api.ListAudienceListsResponse( - audience_lists=[ - analytics_data_api.AudienceList(), - ], - next_page_token="ghi", - ), - analytics_data_api.ListAudienceListsResponse( - audience_lists=[ - analytics_data_api.AudienceList(), - analytics_data_api.AudienceList(), - ], + recurring_audience_list=analytics_data_api.RecurringAudienceList( + name="name_value" ), ) - # Two responses for two calls - response = response + response - - # Wrap the values into proper Response objs - response = tuple( - analytics_data_api.ListAudienceListsResponse.to_json(x) for x in response - ) - return_values = tuple(Response() for i in response) - for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode("UTF-8") - return_val.status_code = 200 - req.side_effect = return_values - - sample_request = {"parent": "properties/sample1"} - - pager = client.list_audience_lists(request=sample_request) - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, analytics_data_api.AudienceList) for i in results) - - pages = list(client.list_audience_lists(request=sample_request).pages) - for page_, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page_.raw_page.next_page_token == token - - -@pytest.mark.parametrize( - "request_type", - [ - analytics_data_api.CreateRecurringAudienceListRequest, - dict, - ], -) -def test_create_recurring_audience_list_rest(request_type): - client = AlphaAnalyticsDataClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {"parent": "properties/sample1"} - request_init["recurring_audience_list"] = { - "name": "name_value", - "audience": "audience_value", - "audience_display_name": "audience_display_name_value", - "dimensions": [{"dimension_name": "dimension_name_value"}], - "active_days_remaining": 2213, - "audience_lists": ["audience_lists_value1", "audience_lists_value2"], - "webhook_notification": { - "uri": "uri_value", - "channel_token": "channel_token_value", - }, - } - # The version of a generated dependency at test runtime may differ from the version used during generation. - # Delete any fields which are not present in the current runtime dependency - # See https://github.com/googleapis/gapic-generator-python/issues/1748 - - # Determine if the message type is proto-plus or protobuf - test_field = analytics_data_api.CreateRecurringAudienceListRequest.meta.fields[ - "recurring_audience_list" - ] - - def get_message_fields(field): - # Given a field which is a message (composite type), return a list with - # all the fields of the message. - # If the field is not a composite type, return an empty list. - message_fields = [] - if hasattr(field, "message") and field.message: - is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") - - if is_field_type_proto_plus_type: - message_fields = field.message.meta.fields.values() - # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER - message_fields = field.message.DESCRIPTOR.fields - return message_fields - - runtime_nested_fields = [ - (field.name, nested_field.name) - for field in get_message_fields(test_field) - for nested_field in get_message_fields(field) - ] - - subfields_not_in_runtime = [] - # For each item in the sample request, create a list of sub fields which are not present at runtime - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init[ - "recurring_audience_list" - ].items(): # pragma: NO COVER - result = None - is_repeated = False - # For repeated fields - if isinstance(value, list) and len(value): - is_repeated = True - result = value[0] - # For fields where the type is another message - if isinstance(value, dict): - result = value +def test_create_recurring_audience_list_rest_error(): + client = AlphaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) - if result and hasattr(result, "keys"): - for subfield in result.keys(): - if (field, subfield) not in runtime_nested_fields: - subfields_not_in_runtime.append( - { - "field": field, - "subfield": subfield, - "is_repeated": is_repeated, - } - ) - # Remove fields from the sample request which are not present in the runtime version of the dependency - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER - field = subfield_to_delete.get("field") - field_repeated = subfield_to_delete.get("is_repeated") - subfield = subfield_to_delete.get("subfield") - if subfield: - if field_repeated: - for i in range(0, len(request_init["recurring_audience_list"][field])): - del request_init["recurring_audience_list"][field][i][subfield] - else: - del request_init["recurring_audience_list"][field][subfield] +@pytest.mark.parametrize( + "request_type", + [ + analytics_data_api.GetRecurringAudienceListRequest, + dict, + ], +) +def test_get_recurring_audience_list_rest(request_type): + client = AlphaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"name": "properties/sample1/recurringAudienceLists/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -8858,7 +9600,7 @@ def get_message_fields(field): response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.create_recurring_audience_list(request) + response = client.get_recurring_audience_list(request) # Establish that the response is the type that we expect. assert isinstance(response, analytics_data_api.RecurringAudienceList) @@ -8869,7 +9611,7 @@ def get_message_fields(field): assert response.audience_lists == ["audience_lists_value"] -def test_create_recurring_audience_list_rest_use_cached_wrapped_rpc(): +def test_get_recurring_audience_list_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -8884,7 +9626,7 @@ def test_create_recurring_audience_list_rest_use_cached_wrapped_rpc(): # Ensure method has been cached assert ( - client._transport.create_recurring_audience_list + client._transport.get_recurring_audience_list in client._transport._wrapped_methods ) @@ -8894,29 +9636,29 @@ def test_create_recurring_audience_list_rest_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.create_recurring_audience_list + client._transport.get_recurring_audience_list ] = mock_rpc request = {} - client.create_recurring_audience_list(request) + client.get_recurring_audience_list(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.create_recurring_audience_list(request) + client.get_recurring_audience_list(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_create_recurring_audience_list_rest_required_fields( - request_type=analytics_data_api.CreateRecurringAudienceListRequest, +def test_get_recurring_audience_list_rest_required_fields( + request_type=analytics_data_api.GetRecurringAudienceListRequest, ): transport_class = transports.AlphaAnalyticsDataRestTransport request_init = {} - request_init["parent"] = "" + request_init["name"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -8927,21 +9669,21 @@ def test_create_recurring_audience_list_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).create_recurring_audience_list._get_unset_required_fields(jsonified_request) + ).get_recurring_audience_list._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["parent"] = "parent_value" + jsonified_request["name"] = "name_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).create_recurring_audience_list._get_unset_required_fields(jsonified_request) + ).get_recurring_audience_list._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == "parent_value" + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" client = AlphaAnalyticsDataClient( credentials=ga_credentials.AnonymousCredentials(), @@ -8962,10 +9704,9 @@ def test_create_recurring_audience_list_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "post", + "method": "get", "query_params": pb_request, } - transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() @@ -8978,34 +9719,24 @@ def test_create_recurring_audience_list_rest_required_fields( response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.create_recurring_audience_list(request) + response = client.get_recurring_audience_list(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_create_recurring_audience_list_rest_unset_required_fields(): +def test_get_recurring_audience_list_rest_unset_required_fields(): transport = transports.AlphaAnalyticsDataRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.create_recurring_audience_list._get_unset_required_fields( - {} - ) - assert set(unset_fields) == ( - set(()) - & set( - ( - "parent", - "recurringAudienceList", - ) - ) - ) + unset_fields = transport.get_recurring_audience_list._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_create_recurring_audience_list_rest_interceptors(null_interceptor): +def test_get_recurring_audience_list_rest_interceptors(null_interceptor): transport = transports.AlphaAnalyticsDataRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -9018,16 +9749,14 @@ def test_create_recurring_audience_list_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.AlphaAnalyticsDataRestInterceptor, - "post_create_recurring_audience_list", + transports.AlphaAnalyticsDataRestInterceptor, "post_get_recurring_audience_list" ) as post, mock.patch.object( - transports.AlphaAnalyticsDataRestInterceptor, - "pre_create_recurring_audience_list", + transports.AlphaAnalyticsDataRestInterceptor, "pre_get_recurring_audience_list" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = analytics_data_api.CreateRecurringAudienceListRequest.pb( - analytics_data_api.CreateRecurringAudienceListRequest() + pb_message = analytics_data_api.GetRecurringAudienceListRequest.pb( + analytics_data_api.GetRecurringAudienceListRequest() ) transcode.return_value = { "method": "post", @@ -9043,7 +9772,7 @@ def test_create_recurring_audience_list_rest_interceptors(null_interceptor): analytics_data_api.RecurringAudienceList() ) - request = analytics_data_api.CreateRecurringAudienceListRequest() + request = analytics_data_api.GetRecurringAudienceListRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), @@ -9051,7 +9780,7 @@ def test_create_recurring_audience_list_rest_interceptors(null_interceptor): pre.return_value = request, metadata post.return_value = analytics_data_api.RecurringAudienceList() - client.create_recurring_audience_list( + client.get_recurring_audience_list( request, metadata=[ ("key", "val"), @@ -9063,9 +9792,9 @@ def test_create_recurring_audience_list_rest_interceptors(null_interceptor): post.assert_called_once() -def test_create_recurring_audience_list_rest_bad_request( +def test_get_recurring_audience_list_rest_bad_request( transport: str = "rest", - request_type=analytics_data_api.CreateRecurringAudienceListRequest, + request_type=analytics_data_api.GetRecurringAudienceListRequest, ): client = AlphaAnalyticsDataClient( credentials=ga_credentials.AnonymousCredentials(), @@ -9073,7 +9802,7 @@ def test_create_recurring_audience_list_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = {"parent": "properties/sample1"} + request_init = {"name": "properties/sample1/recurringAudienceLists/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -9085,10 +9814,10 @@ def test_create_recurring_audience_list_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.create_recurring_audience_list(request) + client.get_recurring_audience_list(request) -def test_create_recurring_audience_list_rest_flattened(): +def test_get_recurring_audience_list_rest_flattened(): client = AlphaAnalyticsDataClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -9100,14 +9829,11 @@ def test_create_recurring_audience_list_rest_flattened(): return_value = analytics_data_api.RecurringAudienceList() # get arguments that satisfy an http rule for this method - sample_request = {"parent": "properties/sample1"} + sample_request = {"name": "properties/sample1/recurringAudienceLists/sample2"} # get truthy value for each flattened field mock_args = dict( - parent="parent_value", - recurring_audience_list=analytics_data_api.RecurringAudienceList( - name="name_value" - ), + name="name_value", ) mock_args.update(sample_request) @@ -9120,20 +9846,20 @@ def test_create_recurring_audience_list_rest_flattened(): response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.create_recurring_audience_list(**mock_args) + client.get_recurring_audience_list(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1alpha/{parent=properties/*}/recurringAudienceLists" + "%s/v1alpha/{name=properties/*/recurringAudienceLists/*}" % client.transport._host, args[1], ) -def test_create_recurring_audience_list_rest_flattened_error(transport: str = "rest"): +def test_get_recurring_audience_list_rest_flattened_error(transport: str = "rest"): client = AlphaAnalyticsDataClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -9142,16 +9868,13 @@ def test_create_recurring_audience_list_rest_flattened_error(transport: str = "r # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.create_recurring_audience_list( - analytics_data_api.CreateRecurringAudienceListRequest(), - parent="parent_value", - recurring_audience_list=analytics_data_api.RecurringAudienceList( - name="name_value" - ), + client.get_recurring_audience_list( + analytics_data_api.GetRecurringAudienceListRequest(), + name="name_value", ) -def test_create_recurring_audience_list_rest_error(): +def test_get_recurring_audience_list_rest_error(): client = AlphaAnalyticsDataClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -9160,52 +9883,46 @@ def test_create_recurring_audience_list_rest_error(): @pytest.mark.parametrize( "request_type", [ - analytics_data_api.GetRecurringAudienceListRequest, + analytics_data_api.ListRecurringAudienceListsRequest, dict, ], ) -def test_get_recurring_audience_list_rest(request_type): +def test_list_recurring_audience_lists_rest(request_type): client = AlphaAnalyticsDataClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = {"name": "properties/sample1/recurringAudienceLists/sample2"} + request_init = {"parent": "properties/sample1"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = analytics_data_api.RecurringAudienceList( - name="name_value", - audience="audience_value", - audience_display_name="audience_display_name_value", - active_days_remaining=2213, - audience_lists=["audience_lists_value"], + return_value = analytics_data_api.ListRecurringAudienceListsResponse( + next_page_token="next_page_token_value", ) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = analytics_data_api.RecurringAudienceList.pb(return_value) + return_value = analytics_data_api.ListRecurringAudienceListsResponse.pb( + return_value + ) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.get_recurring_audience_list(request) + response = client.list_recurring_audience_lists(request) # Establish that the response is the type that we expect. - assert isinstance(response, analytics_data_api.RecurringAudienceList) - assert response.name == "name_value" - assert response.audience == "audience_value" - assert response.audience_display_name == "audience_display_name_value" - assert response.active_days_remaining == 2213 - assert response.audience_lists == ["audience_lists_value"] + assert isinstance(response, pagers.ListRecurringAudienceListsPager) + assert response.next_page_token == "next_page_token_value" -def test_get_recurring_audience_list_rest_use_cached_wrapped_rpc(): +def test_list_recurring_audience_lists_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -9220,7 +9937,7 @@ def test_get_recurring_audience_list_rest_use_cached_wrapped_rpc(): # Ensure method has been cached assert ( - client._transport.get_recurring_audience_list + client._transport.list_recurring_audience_lists in client._transport._wrapped_methods ) @@ -9230,29 +9947,29 @@ def test_get_recurring_audience_list_rest_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.get_recurring_audience_list + client._transport.list_recurring_audience_lists ] = mock_rpc request = {} - client.get_recurring_audience_list(request) + client.list_recurring_audience_lists(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.get_recurring_audience_list(request) + client.list_recurring_audience_lists(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_get_recurring_audience_list_rest_required_fields( - request_type=analytics_data_api.GetRecurringAudienceListRequest, +def test_list_recurring_audience_lists_rest_required_fields( + request_type=analytics_data_api.ListRecurringAudienceListsRequest, ): transport_class = transports.AlphaAnalyticsDataRestTransport request_init = {} - request_init["name"] = "" + request_init["parent"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -9263,21 +9980,28 @@ def test_get_recurring_audience_list_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).get_recurring_audience_list._get_unset_required_fields(jsonified_request) + ).list_recurring_audience_lists._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["name"] = "name_value" + jsonified_request["parent"] = "parent_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).get_recurring_audience_list._get_unset_required_fields(jsonified_request) + ).list_recurring_audience_lists._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "page_size", + "page_token", + ) + ) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == "name_value" + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" client = AlphaAnalyticsDataClient( credentials=ga_credentials.AnonymousCredentials(), @@ -9286,7 +10010,7 @@ def test_get_recurring_audience_list_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = analytics_data_api.RecurringAudienceList() + return_value = analytics_data_api.ListRecurringAudienceListsResponse() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -9307,30 +10031,42 @@ def test_get_recurring_audience_list_rest_required_fields( response_value.status_code = 200 # Convert return value to protobuf type - return_value = analytics_data_api.RecurringAudienceList.pb(return_value) + return_value = analytics_data_api.ListRecurringAudienceListsResponse.pb( + return_value + ) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.get_recurring_audience_list(request) + response = client.list_recurring_audience_lists(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_get_recurring_audience_list_rest_unset_required_fields(): +def test_list_recurring_audience_lists_rest_unset_required_fields(): transport = transports.AlphaAnalyticsDataRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.get_recurring_audience_list._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name",))) + unset_fields = transport.list_recurring_audience_lists._get_unset_required_fields( + {} + ) + assert set(unset_fields) == ( + set( + ( + "pageSize", + "pageToken", + ) + ) + & set(("parent",)) + ) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_recurring_audience_list_rest_interceptors(null_interceptor): +def test_list_recurring_audience_lists_rest_interceptors(null_interceptor): transport = transports.AlphaAnalyticsDataRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -9343,14 +10079,16 @@ def test_get_recurring_audience_list_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.AlphaAnalyticsDataRestInterceptor, "post_get_recurring_audience_list" + transports.AlphaAnalyticsDataRestInterceptor, + "post_list_recurring_audience_lists", ) as post, mock.patch.object( - transports.AlphaAnalyticsDataRestInterceptor, "pre_get_recurring_audience_list" + transports.AlphaAnalyticsDataRestInterceptor, + "pre_list_recurring_audience_lists", ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = analytics_data_api.GetRecurringAudienceListRequest.pb( - analytics_data_api.GetRecurringAudienceListRequest() + pb_message = analytics_data_api.ListRecurringAudienceListsRequest.pb( + analytics_data_api.ListRecurringAudienceListsRequest() ) transcode.return_value = { "method": "post", @@ -9362,19 +10100,21 @@ def test_get_recurring_audience_list_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = analytics_data_api.RecurringAudienceList.to_json( - analytics_data_api.RecurringAudienceList() + req.return_value._content = ( + analytics_data_api.ListRecurringAudienceListsResponse.to_json( + analytics_data_api.ListRecurringAudienceListsResponse() + ) ) - request = analytics_data_api.GetRecurringAudienceListRequest() + request = analytics_data_api.ListRecurringAudienceListsRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = analytics_data_api.RecurringAudienceList() + post.return_value = analytics_data_api.ListRecurringAudienceListsResponse() - client.get_recurring_audience_list( + client.list_recurring_audience_lists( request, metadata=[ ("key", "val"), @@ -9386,9 +10126,9 @@ def test_get_recurring_audience_list_rest_interceptors(null_interceptor): post.assert_called_once() -def test_get_recurring_audience_list_rest_bad_request( +def test_list_recurring_audience_lists_rest_bad_request( transport: str = "rest", - request_type=analytics_data_api.GetRecurringAudienceListRequest, + request_type=analytics_data_api.ListRecurringAudienceListsRequest, ): client = AlphaAnalyticsDataClient( credentials=ga_credentials.AnonymousCredentials(), @@ -9396,7 +10136,7 @@ def test_get_recurring_audience_list_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = {"name": "properties/sample1/recurringAudienceLists/sample2"} + request_init = {"parent": "properties/sample1"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -9408,10 +10148,10 @@ def test_get_recurring_audience_list_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.get_recurring_audience_list(request) + client.list_recurring_audience_lists(request) -def test_get_recurring_audience_list_rest_flattened(): +def test_list_recurring_audience_lists_rest_flattened(): client = AlphaAnalyticsDataClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -9420,14 +10160,14 @@ def test_get_recurring_audience_list_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = analytics_data_api.RecurringAudienceList() + return_value = analytics_data_api.ListRecurringAudienceListsResponse() # get arguments that satisfy an http rule for this method - sample_request = {"name": "properties/sample1/recurringAudienceLists/sample2"} + sample_request = {"parent": "properties/sample1"} # get truthy value for each flattened field mock_args = dict( - name="name_value", + parent="parent_value", ) mock_args.update(sample_request) @@ -9435,25 +10175,27 @@ def test_get_recurring_audience_list_rest_flattened(): response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = analytics_data_api.RecurringAudienceList.pb(return_value) + return_value = analytics_data_api.ListRecurringAudienceListsResponse.pb( + return_value + ) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.get_recurring_audience_list(**mock_args) + client.list_recurring_audience_lists(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1alpha/{name=properties/*/recurringAudienceLists/*}" + "%s/v1alpha/{parent=properties/*}/recurringAudienceLists" % client.transport._host, args[1], ) -def test_get_recurring_audience_list_rest_flattened_error(transport: str = "rest"): +def test_list_recurring_audience_lists_rest_flattened_error(transport: str = "rest"): client = AlphaAnalyticsDataClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -9462,61 +10204,119 @@ def test_get_recurring_audience_list_rest_flattened_error(transport: str = "rest # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.get_recurring_audience_list( - analytics_data_api.GetRecurringAudienceListRequest(), - name="name_value", + client.list_recurring_audience_lists( + analytics_data_api.ListRecurringAudienceListsRequest(), + parent="parent_value", ) -def test_get_recurring_audience_list_rest_error(): +def test_list_recurring_audience_lists_rest_pager(transport: str = "rest"): client = AlphaAnalyticsDataClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + analytics_data_api.ListRecurringAudienceListsResponse( + recurring_audience_lists=[ + analytics_data_api.RecurringAudienceList(), + analytics_data_api.RecurringAudienceList(), + analytics_data_api.RecurringAudienceList(), + ], + next_page_token="abc", + ), + analytics_data_api.ListRecurringAudienceListsResponse( + recurring_audience_lists=[], + next_page_token="def", + ), + analytics_data_api.ListRecurringAudienceListsResponse( + recurring_audience_lists=[ + analytics_data_api.RecurringAudienceList(), + ], + next_page_token="ghi", + ), + analytics_data_api.ListRecurringAudienceListsResponse( + recurring_audience_lists=[ + analytics_data_api.RecurringAudienceList(), + analytics_data_api.RecurringAudienceList(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple( + analytics_data_api.ListRecurringAudienceListsResponse.to_json(x) + for x in response + ) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {"parent": "properties/sample1"} + + pager = client.list_recurring_audience_lists(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all( + isinstance(i, analytics_data_api.RecurringAudienceList) for i in results + ) + + pages = list(client.list_recurring_audience_lists(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + @pytest.mark.parametrize( "request_type", [ - analytics_data_api.ListRecurringAudienceListsRequest, + analytics_data_api.GetPropertyQuotasSnapshotRequest, dict, ], ) -def test_list_recurring_audience_lists_rest(request_type): +def test_get_property_quotas_snapshot_rest(request_type): client = AlphaAnalyticsDataClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = {"parent": "properties/sample1"} + request_init = {"name": "properties/sample1/propertyQuotasSnapshot"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = analytics_data_api.ListRecurringAudienceListsResponse( - next_page_token="next_page_token_value", + return_value = analytics_data_api.PropertyQuotasSnapshot( + name="name_value", ) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = analytics_data_api.ListRecurringAudienceListsResponse.pb( - return_value - ) + return_value = analytics_data_api.PropertyQuotasSnapshot.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.list_recurring_audience_lists(request) + response = client.get_property_quotas_snapshot(request) # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListRecurringAudienceListsPager) - assert response.next_page_token == "next_page_token_value" + assert isinstance(response, analytics_data_api.PropertyQuotasSnapshot) + assert response.name == "name_value" -def test_list_recurring_audience_lists_rest_use_cached_wrapped_rpc(): +def test_get_property_quotas_snapshot_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -9531,7 +10331,7 @@ def test_list_recurring_audience_lists_rest_use_cached_wrapped_rpc(): # Ensure method has been cached assert ( - client._transport.list_recurring_audience_lists + client._transport.get_property_quotas_snapshot in client._transport._wrapped_methods ) @@ -9541,29 +10341,29 @@ def test_list_recurring_audience_lists_rest_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.list_recurring_audience_lists + client._transport.get_property_quotas_snapshot ] = mock_rpc request = {} - client.list_recurring_audience_lists(request) + client.get_property_quotas_snapshot(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.list_recurring_audience_lists(request) + client.get_property_quotas_snapshot(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_list_recurring_audience_lists_rest_required_fields( - request_type=analytics_data_api.ListRecurringAudienceListsRequest, +def test_get_property_quotas_snapshot_rest_required_fields( + request_type=analytics_data_api.GetPropertyQuotasSnapshotRequest, ): transport_class = transports.AlphaAnalyticsDataRestTransport request_init = {} - request_init["parent"] = "" + request_init["name"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -9574,28 +10374,21 @@ def test_list_recurring_audience_lists_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).list_recurring_audience_lists._get_unset_required_fields(jsonified_request) + ).get_property_quotas_snapshot._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["parent"] = "parent_value" + jsonified_request["name"] = "name_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).list_recurring_audience_lists._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set( - ( - "page_size", - "page_token", - ) - ) + ).get_property_quotas_snapshot._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == "parent_value" + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" client = AlphaAnalyticsDataClient( credentials=ga_credentials.AnonymousCredentials(), @@ -9604,7 +10397,7 @@ def test_list_recurring_audience_lists_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = analytics_data_api.ListRecurringAudienceListsResponse() + return_value = analytics_data_api.PropertyQuotasSnapshot() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -9625,42 +10418,30 @@ def test_list_recurring_audience_lists_rest_required_fields( response_value.status_code = 200 # Convert return value to protobuf type - return_value = analytics_data_api.ListRecurringAudienceListsResponse.pb( - return_value - ) + return_value = analytics_data_api.PropertyQuotasSnapshot.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.list_recurring_audience_lists(request) + response = client.get_property_quotas_snapshot(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_list_recurring_audience_lists_rest_unset_required_fields(): +def test_get_property_quotas_snapshot_rest_unset_required_fields(): transport = transports.AlphaAnalyticsDataRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.list_recurring_audience_lists._get_unset_required_fields( - {} - ) - assert set(unset_fields) == ( - set( - ( - "pageSize", - "pageToken", - ) - ) - & set(("parent",)) - ) + unset_fields = transport.get_property_quotas_snapshot._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_recurring_audience_lists_rest_interceptors(null_interceptor): +def test_get_property_quotas_snapshot_rest_interceptors(null_interceptor): transport = transports.AlphaAnalyticsDataRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -9674,15 +10455,14 @@ def test_list_recurring_audience_lists_rest_interceptors(null_interceptor): path_template, "transcode" ) as transcode, mock.patch.object( transports.AlphaAnalyticsDataRestInterceptor, - "post_list_recurring_audience_lists", + "post_get_property_quotas_snapshot", ) as post, mock.patch.object( - transports.AlphaAnalyticsDataRestInterceptor, - "pre_list_recurring_audience_lists", + transports.AlphaAnalyticsDataRestInterceptor, "pre_get_property_quotas_snapshot" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = analytics_data_api.ListRecurringAudienceListsRequest.pb( - analytics_data_api.ListRecurringAudienceListsRequest() + pb_message = analytics_data_api.GetPropertyQuotasSnapshotRequest.pb( + analytics_data_api.GetPropertyQuotasSnapshotRequest() ) transcode.return_value = { "method": "post", @@ -9694,21 +10474,19 @@ def test_list_recurring_audience_lists_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = ( - analytics_data_api.ListRecurringAudienceListsResponse.to_json( - analytics_data_api.ListRecurringAudienceListsResponse() - ) + req.return_value._content = analytics_data_api.PropertyQuotasSnapshot.to_json( + analytics_data_api.PropertyQuotasSnapshot() ) - request = analytics_data_api.ListRecurringAudienceListsRequest() + request = analytics_data_api.GetPropertyQuotasSnapshotRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = analytics_data_api.ListRecurringAudienceListsResponse() + post.return_value = analytics_data_api.PropertyQuotasSnapshot() - client.list_recurring_audience_lists( + client.get_property_quotas_snapshot( request, metadata=[ ("key", "val"), @@ -9720,9 +10498,9 @@ def test_list_recurring_audience_lists_rest_interceptors(null_interceptor): post.assert_called_once() -def test_list_recurring_audience_lists_rest_bad_request( +def test_get_property_quotas_snapshot_rest_bad_request( transport: str = "rest", - request_type=analytics_data_api.ListRecurringAudienceListsRequest, + request_type=analytics_data_api.GetPropertyQuotasSnapshotRequest, ): client = AlphaAnalyticsDataClient( credentials=ga_credentials.AnonymousCredentials(), @@ -9730,7 +10508,7 @@ def test_list_recurring_audience_lists_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = {"parent": "properties/sample1"} + request_init = {"name": "properties/sample1/propertyQuotasSnapshot"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -9742,10 +10520,10 @@ def test_list_recurring_audience_lists_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.list_recurring_audience_lists(request) + client.get_property_quotas_snapshot(request) -def test_list_recurring_audience_lists_rest_flattened(): +def test_get_property_quotas_snapshot_rest_flattened(): client = AlphaAnalyticsDataClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -9754,14 +10532,14 @@ def test_list_recurring_audience_lists_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = analytics_data_api.ListRecurringAudienceListsResponse() + return_value = analytics_data_api.PropertyQuotasSnapshot() # get arguments that satisfy an http rule for this method - sample_request = {"parent": "properties/sample1"} + sample_request = {"name": "properties/sample1/propertyQuotasSnapshot"} # get truthy value for each flattened field mock_args = dict( - parent="parent_value", + name="name_value", ) mock_args.update(sample_request) @@ -9769,27 +10547,25 @@ def test_list_recurring_audience_lists_rest_flattened(): response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = analytics_data_api.ListRecurringAudienceListsResponse.pb( - return_value - ) + return_value = analytics_data_api.PropertyQuotasSnapshot.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.list_recurring_audience_lists(**mock_args) + client.get_property_quotas_snapshot(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1alpha/{parent=properties/*}/recurringAudienceLists" + "%s/v1alpha/{name=properties/*/propertyQuotasSnapshot}" % client.transport._host, args[1], ) -def test_list_recurring_audience_lists_rest_flattened_error(transport: str = "rest"): +def test_get_property_quotas_snapshot_rest_flattened_error(transport: str = "rest"): client = AlphaAnalyticsDataClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -9798,77 +10574,17 @@ def test_list_recurring_audience_lists_rest_flattened_error(transport: str = "re # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.list_recurring_audience_lists( - analytics_data_api.ListRecurringAudienceListsRequest(), - parent="parent_value", + client.get_property_quotas_snapshot( + analytics_data_api.GetPropertyQuotasSnapshotRequest(), + name="name_value", ) -def test_list_recurring_audience_lists_rest_pager(transport: str = "rest"): +def test_get_property_quotas_snapshot_rest_error(): client = AlphaAnalyticsDataClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # TODO(kbandes): remove this mock unless there's a good reason for it. - # with mock.patch.object(path_template, 'transcode') as transcode: - # Set the response as a series of pages - response = ( - analytics_data_api.ListRecurringAudienceListsResponse( - recurring_audience_lists=[ - analytics_data_api.RecurringAudienceList(), - analytics_data_api.RecurringAudienceList(), - analytics_data_api.RecurringAudienceList(), - ], - next_page_token="abc", - ), - analytics_data_api.ListRecurringAudienceListsResponse( - recurring_audience_lists=[], - next_page_token="def", - ), - analytics_data_api.ListRecurringAudienceListsResponse( - recurring_audience_lists=[ - analytics_data_api.RecurringAudienceList(), - ], - next_page_token="ghi", - ), - analytics_data_api.ListRecurringAudienceListsResponse( - recurring_audience_lists=[ - analytics_data_api.RecurringAudienceList(), - analytics_data_api.RecurringAudienceList(), - ], - ), - ) - # Two responses for two calls - response = response + response - - # Wrap the values into proper Response objs - response = tuple( - analytics_data_api.ListRecurringAudienceListsResponse.to_json(x) - for x in response - ) - return_values = tuple(Response() for i in response) - for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode("UTF-8") - return_val.status_code = 200 - req.side_effect = return_values - - sample_request = {"parent": "properties/sample1"} - - pager = client.list_recurring_audience_lists(request=sample_request) - - results = list(pager) - assert len(results) == 6 - assert all( - isinstance(i, analytics_data_api.RecurringAudienceList) for i in results - ) - - pages = list(client.list_recurring_audience_lists(request=sample_request).pages) - for page_, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page_.raw_page.next_page_token == token - @pytest.mark.parametrize( "request_type", @@ -11481,6 +12197,7 @@ def test_alpha_analytics_data_base_transport(): "create_recurring_audience_list", "get_recurring_audience_list", "list_recurring_audience_lists", + "get_property_quotas_snapshot", "create_report_task", "query_report_task", "get_report_task", @@ -11818,6 +12535,9 @@ def test_alpha_analytics_data_client_transport_session_collision(transport_name) session1 = client1.transport.list_recurring_audience_lists._session session2 = client2.transport.list_recurring_audience_lists._session assert session1 != session2 + session1 = client1.transport.get_property_quotas_snapshot._session + session2 = client2.transport.get_property_quotas_snapshot._session + assert session1 != session2 session1 = client1.transport.create_report_task._session session2 = client2.transport.create_report_task._session assert session1 != session2 @@ -12015,9 +12735,29 @@ def test_parse_audience_list_path(): assert expected == actual -def test_recurring_audience_list_path(): +def test_property_quotas_snapshot_path(): property = "oyster" - recurring_audience_list = "nudibranch" + expected = "properties/{property}/propertyQuotasSnapshot".format( + property=property, + ) + actual = AlphaAnalyticsDataClient.property_quotas_snapshot_path(property) + assert expected == actual + + +def test_parse_property_quotas_snapshot_path(): + expected = { + "property": "nudibranch", + } + path = AlphaAnalyticsDataClient.property_quotas_snapshot_path(**expected) + + # Check that the path construction is reversible. + actual = AlphaAnalyticsDataClient.parse_property_quotas_snapshot_path(path) + assert expected == actual + + +def test_recurring_audience_list_path(): + property = "cuttlefish" + recurring_audience_list = "mussel" expected = ( "properties/{property}/recurringAudienceLists/{recurring_audience_list}".format( property=property, @@ -12032,8 +12772,8 @@ def test_recurring_audience_list_path(): def test_parse_recurring_audience_list_path(): expected = { - "property": "cuttlefish", - "recurring_audience_list": "mussel", + "property": "winkle", + "recurring_audience_list": "nautilus", } path = AlphaAnalyticsDataClient.recurring_audience_list_path(**expected) @@ -12043,8 +12783,8 @@ def test_parse_recurring_audience_list_path(): def test_report_task_path(): - property = "winkle" - report_task = "nautilus" + property = "scallop" + report_task = "abalone" expected = "properties/{property}/reportTasks/{report_task}".format( property=property, report_task=report_task, @@ -12055,8 +12795,8 @@ def test_report_task_path(): def test_parse_report_task_path(): expected = { - "property": "scallop", - "report_task": "abalone", + "property": "squid", + "report_task": "clam", } path = AlphaAnalyticsDataClient.report_task_path(**expected) @@ -12066,7 +12806,7 @@ def test_parse_report_task_path(): def test_common_billing_account_path(): - billing_account = "squid" + billing_account = "whelk" expected = "billingAccounts/{billing_account}".format( billing_account=billing_account, ) @@ -12076,7 +12816,7 @@ def test_common_billing_account_path(): def test_parse_common_billing_account_path(): expected = { - "billing_account": "clam", + "billing_account": "octopus", } path = AlphaAnalyticsDataClient.common_billing_account_path(**expected) @@ -12086,7 +12826,7 @@ def test_parse_common_billing_account_path(): def test_common_folder_path(): - folder = "whelk" + folder = "oyster" expected = "folders/{folder}".format( folder=folder, ) @@ -12096,7 +12836,7 @@ def test_common_folder_path(): def test_parse_common_folder_path(): expected = { - "folder": "octopus", + "folder": "nudibranch", } path = AlphaAnalyticsDataClient.common_folder_path(**expected) @@ -12106,7 +12846,7 @@ def test_parse_common_folder_path(): def test_common_organization_path(): - organization = "oyster" + organization = "cuttlefish" expected = "organizations/{organization}".format( organization=organization, ) @@ -12116,7 +12856,7 @@ def test_common_organization_path(): def test_parse_common_organization_path(): expected = { - "organization": "nudibranch", + "organization": "mussel", } path = AlphaAnalyticsDataClient.common_organization_path(**expected) @@ -12126,7 +12866,7 @@ def test_parse_common_organization_path(): def test_common_project_path(): - project = "cuttlefish" + project = "winkle" expected = "projects/{project}".format( project=project, ) @@ -12136,7 +12876,7 @@ def test_common_project_path(): def test_parse_common_project_path(): expected = { - "project": "mussel", + "project": "nautilus", } path = AlphaAnalyticsDataClient.common_project_path(**expected) @@ -12146,8 +12886,8 @@ def test_parse_common_project_path(): def test_common_location_path(): - project = "winkle" - location = "nautilus" + project = "scallop" + location = "abalone" expected = "projects/{project}/locations/{location}".format( project=project, location=location, @@ -12158,8 +12898,8 @@ def test_common_location_path(): def test_parse_common_location_path(): expected = { - "project": "scallop", - "location": "abalone", + "project": "squid", + "location": "clam", } path = AlphaAnalyticsDataClient.common_location_path(**expected) diff --git a/packages/google-analytics-data/tests/unit/gapic/data_v1beta/test_beta_analytics_data.py b/packages/google-analytics-data/tests/unit/gapic/data_v1beta/test_beta_analytics_data.py index 19258ca590fa..c160996cd5ab 100644 --- a/packages/google-analytics-data/tests/unit/gapic/data_v1beta/test_beta_analytics_data.py +++ b/packages/google-analytics-data/tests/unit/gapic/data_v1beta/test_beta_analytics_data.py @@ -1336,22 +1336,23 @@ async def test_run_report_async_use_cached_wrapped_rpc(transport: str = "grpc_as ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.run_report - ] = mock_object + ] = mock_rpc request = {} await client.run_report(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.run_report(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1624,22 +1625,23 @@ async def test_run_pivot_report_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.run_pivot_report - ] = mock_object + ] = mock_rpc request = {} await client.run_pivot_report(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.run_pivot_report(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1917,22 +1919,23 @@ async def test_batch_run_reports_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.batch_run_reports - ] = mock_object + ] = mock_rpc request = {} await client.batch_run_reports(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.batch_run_reports(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2219,22 +2222,23 @@ async def test_batch_run_pivot_reports_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.batch_run_pivot_reports - ] = mock_object + ] = mock_rpc request = {} await client.batch_run_pivot_reports(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.batch_run_pivot_reports(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2508,22 +2512,23 @@ async def test_get_metadata_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_metadata - ] = mock_object + ] = mock_rpc request = {} await client.get_metadata(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_metadata(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2887,22 +2892,23 @@ async def test_run_realtime_report_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.run_realtime_report - ] = mock_object + ] = mock_rpc request = {} await client.run_realtime_report(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.run_realtime_report(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3185,22 +3191,23 @@ async def test_check_compatibility_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.check_compatibility - ] = mock_object + ] = mock_rpc request = {} await client.check_compatibility(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.check_compatibility(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3426,8 +3433,9 @@ def test_create_audience_export_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.create_audience_export(request) @@ -3483,26 +3491,28 @@ async def test_create_audience_export_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_audience_export - ] = mock_object + ] = mock_rpc request = {} await client.create_audience_export(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.create_audience_export(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3882,22 +3892,23 @@ async def test_query_audience_export_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.query_audience_export - ] = mock_object + ] = mock_rpc request = {} await client.query_audience_export(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.query_audience_export(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4290,22 +4301,23 @@ async def test_get_audience_export_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_audience_export - ] = mock_object + ] = mock_rpc request = {} await client.get_audience_export(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_audience_export(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4694,22 +4706,23 @@ async def test_list_audience_exports_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_audience_exports - ] = mock_object + ] = mock_rpc request = {} await client.list_audience_exports(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_audience_exports(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-apps-card/google/apps/card/gapic_version.py b/packages/google-apps-card/google/apps/card/gapic_version.py index 937ede8823ef..558c8aab67c5 100644 --- a/packages/google-apps-card/google/apps/card/gapic_version.py +++ b/packages/google-apps-card/google/apps/card/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.1.4" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-apps-card/google/apps/card_v1/gapic_version.py b/packages/google-apps-card/google/apps/card_v1/gapic_version.py index 937ede8823ef..558c8aab67c5 100644 --- a/packages/google-apps-card/google/apps/card_v1/gapic_version.py +++ b/packages/google-apps-card/google/apps/card_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.1.4" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-apps-chat/CHANGELOG.md b/packages/google-apps-chat/CHANGELOG.md index ec2485ff2775..4e560ca6d23f 100644 --- a/packages/google-apps-chat/CHANGELOG.md +++ b/packages/google-apps-chat/CHANGELOG.md @@ -1,5 +1,39 @@ # Changelog +## [0.1.12](https://github.com/googleapis/google-cloud-python/compare/google-apps-chat-v0.1.11...google-apps-chat-v0.1.12) (2024-10-08) + + +### Features + +* Add doc for import mode external users support ([3881914](https://github.com/googleapis/google-cloud-python/commit/3881914b43b47bf2ee187f62447ef9eccc851749)) +* Add doc for permission settings & announcement space support ([3881914](https://github.com/googleapis/google-cloud-python/commit/3881914b43b47bf2ee187f62447ef9eccc851749)) + + +### Documentation + +* Discoverable space docs improvement ([3881914](https://github.com/googleapis/google-cloud-python/commit/3881914b43b47bf2ee187f62447ef9eccc851749)) +* Memberships API dev docs improvement ([3881914](https://github.com/googleapis/google-cloud-python/commit/3881914b43b47bf2ee187f62447ef9eccc851749)) +* Messages API dev docs improvement ([3881914](https://github.com/googleapis/google-cloud-python/commit/3881914b43b47bf2ee187f62447ef9eccc851749)) + +## [0.1.11](https://github.com/googleapis/google-cloud-python/compare/google-apps-chat-v0.1.10...google-apps-chat-v0.1.11) (2024-09-16) + + +### Features + +* If you're a domain administrator or a delegated administrator, you can now include the `useAdminAccess` parameter when you call the Chat API with your administrator privileges with the following methods to manage Chat spaces and memberships in your Workspace organization: ([a20b1e5](https://github.com/googleapis/google-cloud-python/commit/a20b1e508068845c36b1701836ba17a699cb10ac)) + + +### Documentation + +* A comment for field `filter` in message `.google.chat.v1.ListMembershipsRequest` is updated to support `!=` operator ([a20b1e5](https://github.com/googleapis/google-cloud-python/commit/a20b1e508068845c36b1701836ba17a699cb10ac)) + +## [0.1.10](https://github.com/googleapis/google-cloud-python/compare/google-apps-chat-v0.1.9...google-apps-chat-v0.1.10) (2024-09-05) + + +### Features + +* [google-apps-chat] Add CHAT_SPACE link type support for GA launch ([#13064](https://github.com/googleapis/google-cloud-python/issues/13064)) ([0ee300a](https://github.com/googleapis/google-cloud-python/commit/0ee300a0497968aa2c85969924b37f95f67675f0)) + ## [0.1.9](https://github.com/googleapis/google-cloud-python/compare/google-apps-chat-v0.1.8...google-apps-chat-v0.1.9) (2024-07-30) diff --git a/packages/google-apps-chat/chat-v1-py.tar.gz b/packages/google-apps-chat/chat-v1-py.tar.gz index e69de29bb2d1..496d03ec6ccf 100644 Binary files a/packages/google-apps-chat/chat-v1-py.tar.gz and b/packages/google-apps-chat/chat-v1-py.tar.gz differ diff --git a/packages/google-apps-chat/google/apps/chat/__init__.py b/packages/google-apps-chat/google/apps/chat/__init__.py index b50c0acffe6f..17679ffaa12a 100644 --- a/packages/google-apps-chat/google/apps/chat/__init__.py +++ b/packages/google-apps-chat/google/apps/chat/__init__.py @@ -26,6 +26,7 @@ from google.apps.chat_v1.types.annotation import ( Annotation, AnnotationType, + ChatSpaceLinkData, DriveLinkData, RichLinkMetadata, SlashCommandMetadata, @@ -110,6 +111,8 @@ GetSpaceRequest, ListSpacesRequest, ListSpacesResponse, + SearchSpacesRequest, + SearchSpacesResponse, Space, UpdateSpaceRequest, ) @@ -137,6 +140,7 @@ "ChatServiceAsyncClient", "ActionStatus", "Annotation", + "ChatSpaceLinkData", "DriveLinkData", "RichLinkMetadata", "SlashCommandMetadata", @@ -210,6 +214,8 @@ "GetSpaceRequest", "ListSpacesRequest", "ListSpacesResponse", + "SearchSpacesRequest", + "SearchSpacesResponse", "Space", "UpdateSpaceRequest", "GetSpaceEventRequest", diff --git a/packages/google-apps-chat/google/apps/chat/gapic_version.py b/packages/google-apps-chat/google/apps/chat/gapic_version.py index f8ea948a9c30..17bbab4c1877 100644 --- a/packages/google-apps-chat/google/apps/chat/gapic_version.py +++ b/packages/google-apps-chat/google/apps/chat/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.1.9" # {x-release-please-version} +__version__ = "0.1.12" # {x-release-please-version} diff --git a/packages/google-apps-chat/google/apps/chat_v1/__init__.py b/packages/google-apps-chat/google/apps/chat_v1/__init__.py index 94c3b6f7fef3..d770a6fbb7ff 100644 --- a/packages/google-apps-chat/google/apps/chat_v1/__init__.py +++ b/packages/google-apps-chat/google/apps/chat_v1/__init__.py @@ -23,6 +23,7 @@ from .types.annotation import ( Annotation, AnnotationType, + ChatSpaceLinkData, DriveLinkData, RichLinkMetadata, SlashCommandMetadata, @@ -107,6 +108,8 @@ GetSpaceRequest, ListSpacesRequest, ListSpacesResponse, + SearchSpacesRequest, + SearchSpacesResponse, Space, UpdateSpaceRequest, ) @@ -138,6 +141,7 @@ "AttachmentDataRef", "CardWithId", "ChatServiceClient", + "ChatSpaceLinkData", "CompleteImportSpaceRequest", "CompleteImportSpaceResponse", "ContextualAddOnMarkup", @@ -199,6 +203,8 @@ "ReactionCreatedEventData", "ReactionDeletedEventData", "RichLinkMetadata", + "SearchSpacesRequest", + "SearchSpacesResponse", "SetUpSpaceRequest", "SlashCommand", "SlashCommandMetadata", diff --git a/packages/google-apps-chat/google/apps/chat_v1/gapic_metadata.json b/packages/google-apps-chat/google/apps/chat_v1/gapic_metadata.json index 3cdf90b4e07e..325f347acf03 100644 --- a/packages/google-apps-chat/google/apps/chat_v1/gapic_metadata.json +++ b/packages/google-apps-chat/google/apps/chat_v1/gapic_metadata.json @@ -120,6 +120,11 @@ "list_spaces" ] }, + "SearchSpaces": { + "methods": [ + "search_spaces" + ] + }, "SetUpSpace": { "methods": [ "set_up_space" @@ -265,6 +270,11 @@ "list_spaces" ] }, + "SearchSpaces": { + "methods": [ + "search_spaces" + ] + }, "SetUpSpace": { "methods": [ "set_up_space" @@ -410,6 +420,11 @@ "list_spaces" ] }, + "SearchSpaces": { + "methods": [ + "search_spaces" + ] + }, "SetUpSpace": { "methods": [ "set_up_space" diff --git a/packages/google-apps-chat/google/apps/chat_v1/gapic_version.py b/packages/google-apps-chat/google/apps/chat_v1/gapic_version.py index f8ea948a9c30..17bbab4c1877 100644 --- a/packages/google-apps-chat/google/apps/chat_v1/gapic_version.py +++ b/packages/google-apps-chat/google/apps/chat_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.1.9" # {x-release-please-version} +__version__ = "0.1.12" # {x-release-please-version} diff --git a/packages/google-apps-chat/google/apps/chat_v1/services/chat_service/async_client.py b/packages/google-apps-chat/google/apps/chat_v1/services/chat_service/async_client.py index e8130b807fc3..8e88d84deba6 100644 --- a/packages/google-apps-chat/google/apps/chat_v1/services/chat_service/async_client.py +++ b/packages/google-apps-chat/google/apps/chat_v1/services/chat_service/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -236,9 +235,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(ChatServiceClient).get_transport_class, type(ChatServiceClient) - ) + get_transport_class = ChatServiceClient.get_transport_class def __init__( self, @@ -317,19 +314,36 @@ async def create_message( timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> gc_message.Message: - r"""Creates a message in a Google Chat space. The maximum message - size, including text and cards, is 32,000 bytes. For an example, - see `Send a + r"""Creates a message in a Google Chat space. For an example, see + `Send a message `__. - Calling this method requires - `authentication `__ - and supports the following authentication types: + The ``create()`` method requires either user or app + authentication. Chat attributes the message sender differently + depending on the type of authentication that you use in your + request. + + The following image shows how Chat attributes a message when you + use app authentication. Chat displays the Chat app as the + message sender. The content of the message can contain text + (``text``), cards (``cardsV2``), and accessory widgets + (``accessoryWidgets``). + + |Message sent with app authentication async client| + + The following image shows how Chat attributes a message when you + use user authentication. Chat displays the user as the message + sender and attributes the Chat app to the message by displaying + its name. The content of message can only contain text + (``text``). + + |Message sent with user authentication async client| + + The maximum message size, including the message contents, is + 32,000 bytes. - - For text messages, user authentication or app authentication - are supported. - - For card messages, only app authentication is supported. - (Only Chat apps can create card messages.) + .. |Message sent with app authentication async client| image:: https://developers.google.com/workspace/chat/images/message-app-auth.svg + .. |Message sent with user authentication async client| image:: https://developers.google.com/workspace/chat/images/message-user-auth.svg .. code-block:: python @@ -467,9 +481,12 @@ async def list_messages( metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.ListMessagesAsyncPager: r"""Lists messages in a space that the caller is a member of, - including messages from blocked members and spaces. For an - example, see `List - messages `__. Requires `user + including messages from blocked members and spaces. If you list + messages from a space with no messages, the response is an empty + object. When using a REST/HTTP interface, the response contains + an empty JSON object, ``{}``. For an example, see `List + messages `__. + Requires `user authentication `__. .. code-block:: python @@ -1538,6 +1555,103 @@ async def sample_list_spaces(): # Done; return the response. return response + async def search_spaces( + self, + request: Optional[Union[space.SearchSpacesRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.SearchSpacesAsyncPager: + r"""Returns a list of spaces in a Google Workspace organization + based on an administrator's search. Requires `user + authentication with administrator + privileges `__. + In the request, set ``use_admin_access`` to ``true``. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.apps import chat_v1 + + async def sample_search_spaces(): + # Create a client + client = chat_v1.ChatServiceAsyncClient() + + # Initialize request argument(s) + request = chat_v1.SearchSpacesRequest( + query="query_value", + ) + + # Make the request + page_result = client.search_spaces(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.apps.chat_v1.types.SearchSpacesRequest, dict]]): + The request object. Request to search for a list of + spaces based on a query. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.apps.chat_v1.services.chat_service.pagers.SearchSpacesAsyncPager: + Response with a list of spaces + corresponding to the search spaces + request. Iterating over this object + will yield results and resolve + additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, space.SearchSpacesRequest): + request = space.SearchSpacesRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.search_spaces + ] + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.SearchSpacesAsyncPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + async def get_space( self, request: Optional[Union[space.GetSpaceRequest, dict]] = None, @@ -1665,8 +1779,9 @@ async def create_space( timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> gc_space.Space: - r"""Creates a named space. Spaces grouped by topics aren't - supported. For an example, see `Create a + r"""Creates a space with no members. Can be used to create a named + space. Spaces grouped by topics aren't supported. For an + example, see `Create a space `__. If you receive the error message ``ALREADY_EXISTS`` when @@ -1674,6 +1789,11 @@ async def create_space( space within the Google Workspace organization might already use this display name. + If you're a member of the `Developer Preview + program `__, + you can create a group chat in import mode using + ``spaceType.GROUP_CHAT``. + Requires `user authentication `__. @@ -1693,7 +1813,11 @@ async def sample_create_space(): client = chat_v1.ChatServiceAsyncClient() # Initialize request argument(s) + space = chat_v1.Space() + space.predefined_permission_settings = "ANNOUNCEMENT_SPACE" + request = chat_v1.CreateSpaceRequest( + space=space, ) # Make the request @@ -1704,16 +1828,22 @@ async def sample_create_space(): Args: request (Optional[Union[google.apps.chat_v1.types.CreateSpaceRequest, dict]]): - The request object. A request to create a named space. + The request object. A request to create a named space + with no members. space (:class:`google.apps.chat_v1.types.Space`): Required. The ``displayName`` and ``spaceType`` fields must be populated. Only ``SpaceType.SPACE`` is supported. - If you receive the error message ``ALREADY_EXISTS`` when - creating a space, try a different ``displayName``. An - existing space within the Google Workspace organization - might already use this display name. + If you receive the error message ``ALREADY_EXISTS``, try + a different ``displayName``. An existing space within + the Google Workspace organization might already use this + display name. + + If you're a member of the `Developer Preview + program `__, + ``SpaceType.GROUP_CHAT`` can be used if ``importMode`` + is set to true. The space ``name`` is assigned on the server so anything specified in this field will be ignored. @@ -1858,7 +1988,11 @@ async def sample_set_up_space(): client = chat_v1.ChatServiceAsyncClient() # Initialize request argument(s) + space = chat_v1.Space() + space.predefined_permission_settings = "ANNOUNCEMENT_SPACE" + request = chat_v1.SetUpSpaceRequest( + space=space, ) # Make the request @@ -1948,7 +2082,11 @@ async def sample_update_space(): client = chat_v1.ChatServiceAsyncClient() # Initialize request argument(s) + space = chat_v1.Space() + space.predefined_permission_settings = "ANNOUNCEMENT_SPACE" + request = chat_v1.UpdateSpaceRequest( + space=space, ) # Make the request @@ -1973,68 +2111,73 @@ async def sample_update_space(): Required. The updated field paths, comma separated if there are multiple. - Currently supported field paths: - - - ``display_name`` (Only supports changing the display - name of a space with the ``SPACE`` type, or when also - including the ``space_type`` mask to change a - ``GROUP_CHAT`` space type to ``SPACE``. Trying to - update the display name of a ``GROUP_CHAT`` or a - ``DIRECT_MESSAGE`` space results in an invalid - argument error. If you receive the error message - ``ALREADY_EXISTS`` when updating the ``displayName``, - try a different ``displayName``. An existing space - within the Google Workspace organization might - already use this display name.) - - - ``space_type`` (Only supports changing a - ``GROUP_CHAT`` space type to ``SPACE``. Include - ``display_name`` together with ``space_type`` in the - update mask and ensure that the specified space has a - non-empty display name and the ``SPACE`` space type. - Including the ``space_type`` mask and the ``SPACE`` - type in the specified space when updating the display - name is optional if the existing space already has - the ``SPACE`` type. Trying to update the space type - in other ways results in an invalid argument error). - ``space_type`` is not supported with admin access. - - - ``space_details`` - - - ``space_history_state`` (Supports `turning history on - or off for the - space `__ - if `the organization allows users to change their - history - setting `__. - Warning: mutually exclusive with all other field - paths.) ``space_history_state`` is not supported with - admin access. - - - ``access_settings.audience`` (Supports changing the - `access - setting `__ - of who can discover the space, join the space, and - preview the messages in space. If no audience is - specified in the access setting, the space's access - setting is updated to private. Warning: mutually - exclusive with all other field paths.) - ``access_settings.audience`` is not supported with - admin access. - - - Developer Preview: Supports changing the `permission - settings `__ - of a space, supported field paths include: - ``permission_settings.manage_members_and_groups``, - ``permission_settings.modify_space_details``, - ``permission_settings.toggle_history``, - ``permission_settings.use_at_mention_all``, - ``permission_settings.manage_apps``, - ``permission_settings.manage_webhooks``, - ``permission_settings.reply_messages`` (Warning: - mutually exclusive with all other non-permission - settings field paths). ``permission_settings`` is not - supported with admin access. + You can update the following fields for a space: + + ``space_details``: Updates the space's description. + Supports up to 150 characters. + + ``display_name``: Only supports updating the display + name for spaces where ``spaceType`` field is ``SPACE``. + If you receive the error message ``ALREADY_EXISTS``, try + a different value. An existing space within the Google + Workspace organization might already use this display + name. + + ``space_type``: Only supports changing a ``GROUP_CHAT`` + space type to ``SPACE``. Include ``display_name`` + together with ``space_type`` in the update mask and + ensure that the specified space has a non-empty display + name and the ``SPACE`` space type. Including the + ``space_type`` mask and the ``SPACE`` type in the + specified space when updating the display name is + optional if the existing space already has the ``SPACE`` + type. Trying to update the space type in other ways + results in an invalid argument error. ``space_type`` is + not supported with ``useAdminAccess``. + + ``space_history_state``: Updates `space history + settings `__ + by turning history on or off for the space. Only + supported if history settings are enabled for the Google + Workspace organization. To update the space history + state, you must omit all other field masks in your + request. ``space_history_state`` is not supported with + ``useAdminAccess``. + + ``access_settings.audience``: Updates the `access + setting `__ + of who can discover the space, join the space, and + preview the messages in named space where ``spaceType`` + field is ``SPACE``. If the existing space has a target + audience, you can remove the audience and restrict space + access by omitting a value for this field mask. To + update access settings for a space, the authenticating + user must be a space manager and omit all other field + masks in your request. You can't update this field if + the space is in `import + mode `__. + To learn more, see `Make a space discoverable to + specific + users `__. + ``access_settings.audience`` is not supported with + ``useAdminAccess``. + + ``permission_settings``: Supports changing the + `permission + settings `__ + of a space. When updating permission settings, you can + only specify ``permissionSettings`` field masks; you + cannot update other field masks at the same time. + ``permissionSettings`` is not supported with + ``useAdminAccess``. The supported field masks include: + + - ``permission_settings.manageMembersAndGroups`` + - ``permission_settings.modifySpaceDetails`` + - ``permission_settings.toggleHistory`` + - ``permission_settings.useAtMentionAll`` + - ``permission_settings.manageApps`` + - ``permission_settings.manageWebhooks`` + - ``permission_settings.replyMessages`` This corresponds to the ``update_mask`` field on the ``request`` instance; if ``request`` is provided, this @@ -2401,46 +2544,25 @@ async def create_membership( timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> gc_membership.Membership: - r"""Creates a human membership or app membership for the calling - app. Creating memberships for other apps isn't supported. For an - example, see `Invite or add a user or a Google Chat app to a - space `__. - When creating a membership, if the specified member has their - auto-accept policy turned off, then they're invited, and must - accept the space invitation before joining. Otherwise, creating - a membership adds the member directly to the specified space. - Requires `user + r"""Creates a membership for the calling Chat app, a user, or a + Google Group. Creating memberships for other Chat apps isn't + supported. When creating a membership, if the specified member + has their auto-accept policy turned off, then they're invited, + and must accept the space invitation before joining. Otherwise, + creating a membership adds the member directly to the specified + space. Requires `user authentication `__. - To specify the member to add, set the ``membership.member.name`` - for the human or app member, or set the - ``membership.group_member.name`` for the group member. - - - To add the calling app to a space or a direct message between - two human users, use ``users/app``. Unable to add other apps - to the space. - - - To add a human user, use ``users/{user}``, where ``{user}`` - can be the email address for the user. For users in the same - Workspace organization ``{user}`` can also be the ``id`` for - the person from the People API, or the ``id`` for the user in - the Directory API. For example, if the People API Person - profile ID for ``user@example.com`` is ``123456789``, you can - add the user to the space by setting the - ``membership.member.name`` to ``users/user@example.com`` or - ``users/123456789``. - - - To add or invite a Google group in a named space, use - ``groups/{group}``, where ``{group}`` is the ``id`` for the - group from the Cloud Identity Groups API. For example, you - can use `Cloud Identity Groups lookup - API `__ - to retrieve the ID ``123456789`` for group email - ``group@example.com``, then you can add or invite the group - to a named space by setting the - ``membership.group_member.name`` to ``groups/123456789``. - Group email is not supported, and Google groups can only be - added as members in named spaces. + For example usage, see: + + - `Invite or add a user to a + space `__. + + - `Invite or add a Google Group to a + space `__. + + - `Add the Chat app to a + space `__. .. code-block:: python @@ -3600,6 +3722,9 @@ async def get_space_event( message was later updated, the server returns the updated ``Message`` resource in the event payload. + Note: The ``permissionSettings`` field is not returned in the + Space object of the Space event data for this request. + Requires `user authentication `__. To get an event, the authenticated user must be a member of the diff --git a/packages/google-apps-chat/google/apps/chat_v1/services/chat_service/client.py b/packages/google-apps-chat/google/apps/chat_v1/services/chat_service/client.py index fa5e085eb32e..aff7158000f3 100644 --- a/packages/google-apps-chat/google/apps/chat_v1/services/chat_service/client.py +++ b/packages/google-apps-chat/google/apps/chat_v1/services/chat_service/client.py @@ -850,7 +850,7 @@ def __init__( transport_init: Union[ Type[ChatServiceTransport], Callable[..., ChatServiceTransport] ] = ( - type(self).get_transport_class(transport) + ChatServiceClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., ChatServiceTransport], transport) ) @@ -878,19 +878,36 @@ def create_message( timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> gc_message.Message: - r"""Creates a message in a Google Chat space. The maximum message - size, including text and cards, is 32,000 bytes. For an example, - see `Send a + r"""Creates a message in a Google Chat space. For an example, see + `Send a message `__. - Calling this method requires - `authentication `__ - and supports the following authentication types: + The ``create()`` method requires either user or app + authentication. Chat attributes the message sender differently + depending on the type of authentication that you use in your + request. - - For text messages, user authentication or app authentication - are supported. - - For card messages, only app authentication is supported. - (Only Chat apps can create card messages.) + The following image shows how Chat attributes a message when you + use app authentication. Chat displays the Chat app as the + message sender. The content of the message can contain text + (``text``), cards (``cardsV2``), and accessory widgets + (``accessoryWidgets``). + + |Message sent with app authentication client| + + The following image shows how Chat attributes a message when you + use user authentication. Chat displays the user as the message + sender and attributes the Chat app to the message by displaying + its name. The content of message can only contain text + (``text``). + + |Message sent with user authentication client| + + The maximum message size, including the message contents, is + 32,000 bytes. + + .. |Message sent with app authentication client| image:: https://developers.google.com/workspace/chat/images/message-app-auth.svg + .. |Message sent with user authentication client| image:: https://developers.google.com/workspace/chat/images/message-user-auth.svg .. code-block:: python @@ -1025,9 +1042,12 @@ def list_messages( metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.ListMessagesPager: r"""Lists messages in a space that the caller is a member of, - including messages from blocked members and spaces. For an - example, see `List - messages `__. Requires `user + including messages from blocked members and spaces. If you list + messages from a space with no messages, the response is an empty + object. When using a REST/HTTP interface, the response contains + an empty JSON object, ``{}``. For an example, see `List + messages `__. + Requires `user authentication `__. .. code-block:: python @@ -2071,6 +2091,101 @@ def sample_list_spaces(): # Done; return the response. return response + def search_spaces( + self, + request: Optional[Union[space.SearchSpacesRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.SearchSpacesPager: + r"""Returns a list of spaces in a Google Workspace organization + based on an administrator's search. Requires `user + authentication with administrator + privileges `__. + In the request, set ``use_admin_access`` to ``true``. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.apps import chat_v1 + + def sample_search_spaces(): + # Create a client + client = chat_v1.ChatServiceClient() + + # Initialize request argument(s) + request = chat_v1.SearchSpacesRequest( + query="query_value", + ) + + # Make the request + page_result = client.search_spaces(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.apps.chat_v1.types.SearchSpacesRequest, dict]): + The request object. Request to search for a list of + spaces based on a query. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.apps.chat_v1.services.chat_service.pagers.SearchSpacesPager: + Response with a list of spaces + corresponding to the search spaces + request. Iterating over this object + will yield results and resolve + additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, space.SearchSpacesRequest): + request = space.SearchSpacesRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.search_spaces] + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.SearchSpacesPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + def get_space( self, request: Optional[Union[space.GetSpaceRequest, dict]] = None, @@ -2195,8 +2310,9 @@ def create_space( timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> gc_space.Space: - r"""Creates a named space. Spaces grouped by topics aren't - supported. For an example, see `Create a + r"""Creates a space with no members. Can be used to create a named + space. Spaces grouped by topics aren't supported. For an + example, see `Create a space `__. If you receive the error message ``ALREADY_EXISTS`` when @@ -2204,6 +2320,11 @@ def create_space( space within the Google Workspace organization might already use this display name. + If you're a member of the `Developer Preview + program `__, + you can create a group chat in import mode using + ``spaceType.GROUP_CHAT``. + Requires `user authentication `__. @@ -2223,7 +2344,11 @@ def sample_create_space(): client = chat_v1.ChatServiceClient() # Initialize request argument(s) + space = chat_v1.Space() + space.predefined_permission_settings = "ANNOUNCEMENT_SPACE" + request = chat_v1.CreateSpaceRequest( + space=space, ) # Make the request @@ -2234,16 +2359,22 @@ def sample_create_space(): Args: request (Union[google.apps.chat_v1.types.CreateSpaceRequest, dict]): - The request object. A request to create a named space. + The request object. A request to create a named space + with no members. space (google.apps.chat_v1.types.Space): Required. The ``displayName`` and ``spaceType`` fields must be populated. Only ``SpaceType.SPACE`` is supported. - If you receive the error message ``ALREADY_EXISTS`` when - creating a space, try a different ``displayName``. An - existing space within the Google Workspace organization - might already use this display name. + If you receive the error message ``ALREADY_EXISTS``, try + a different ``displayName``. An existing space within + the Google Workspace organization might already use this + display name. + + If you're a member of the `Developer Preview + program `__, + ``SpaceType.GROUP_CHAT`` can be used if ``importMode`` + is set to true. The space ``name`` is assigned on the server so anything specified in this field will be ignored. @@ -2385,7 +2516,11 @@ def sample_set_up_space(): client = chat_v1.ChatServiceClient() # Initialize request argument(s) + space = chat_v1.Space() + space.predefined_permission_settings = "ANNOUNCEMENT_SPACE" + request = chat_v1.SetUpSpaceRequest( + space=space, ) # Make the request @@ -2473,7 +2608,11 @@ def sample_update_space(): client = chat_v1.ChatServiceClient() # Initialize request argument(s) + space = chat_v1.Space() + space.predefined_permission_settings = "ANNOUNCEMENT_SPACE" + request = chat_v1.UpdateSpaceRequest( + space=space, ) # Make the request @@ -2498,68 +2637,73 @@ def sample_update_space(): Required. The updated field paths, comma separated if there are multiple. - Currently supported field paths: - - - ``display_name`` (Only supports changing the display - name of a space with the ``SPACE`` type, or when also - including the ``space_type`` mask to change a - ``GROUP_CHAT`` space type to ``SPACE``. Trying to - update the display name of a ``GROUP_CHAT`` or a - ``DIRECT_MESSAGE`` space results in an invalid - argument error. If you receive the error message - ``ALREADY_EXISTS`` when updating the ``displayName``, - try a different ``displayName``. An existing space - within the Google Workspace organization might - already use this display name.) - - - ``space_type`` (Only supports changing a - ``GROUP_CHAT`` space type to ``SPACE``. Include - ``display_name`` together with ``space_type`` in the - update mask and ensure that the specified space has a - non-empty display name and the ``SPACE`` space type. - Including the ``space_type`` mask and the ``SPACE`` - type in the specified space when updating the display - name is optional if the existing space already has - the ``SPACE`` type. Trying to update the space type - in other ways results in an invalid argument error). - ``space_type`` is not supported with admin access. - - - ``space_details`` - - - ``space_history_state`` (Supports `turning history on - or off for the - space `__ - if `the organization allows users to change their - history - setting `__. - Warning: mutually exclusive with all other field - paths.) ``space_history_state`` is not supported with - admin access. - - - ``access_settings.audience`` (Supports changing the - `access - setting `__ - of who can discover the space, join the space, and - preview the messages in space. If no audience is - specified in the access setting, the space's access - setting is updated to private. Warning: mutually - exclusive with all other field paths.) - ``access_settings.audience`` is not supported with - admin access. - - - Developer Preview: Supports changing the `permission - settings `__ - of a space, supported field paths include: - ``permission_settings.manage_members_and_groups``, - ``permission_settings.modify_space_details``, - ``permission_settings.toggle_history``, - ``permission_settings.use_at_mention_all``, - ``permission_settings.manage_apps``, - ``permission_settings.manage_webhooks``, - ``permission_settings.reply_messages`` (Warning: - mutually exclusive with all other non-permission - settings field paths). ``permission_settings`` is not - supported with admin access. + You can update the following fields for a space: + + ``space_details``: Updates the space's description. + Supports up to 150 characters. + + ``display_name``: Only supports updating the display + name for spaces where ``spaceType`` field is ``SPACE``. + If you receive the error message ``ALREADY_EXISTS``, try + a different value. An existing space within the Google + Workspace organization might already use this display + name. + + ``space_type``: Only supports changing a ``GROUP_CHAT`` + space type to ``SPACE``. Include ``display_name`` + together with ``space_type`` in the update mask and + ensure that the specified space has a non-empty display + name and the ``SPACE`` space type. Including the + ``space_type`` mask and the ``SPACE`` type in the + specified space when updating the display name is + optional if the existing space already has the ``SPACE`` + type. Trying to update the space type in other ways + results in an invalid argument error. ``space_type`` is + not supported with ``useAdminAccess``. + + ``space_history_state``: Updates `space history + settings `__ + by turning history on or off for the space. Only + supported if history settings are enabled for the Google + Workspace organization. To update the space history + state, you must omit all other field masks in your + request. ``space_history_state`` is not supported with + ``useAdminAccess``. + + ``access_settings.audience``: Updates the `access + setting `__ + of who can discover the space, join the space, and + preview the messages in named space where ``spaceType`` + field is ``SPACE``. If the existing space has a target + audience, you can remove the audience and restrict space + access by omitting a value for this field mask. To + update access settings for a space, the authenticating + user must be a space manager and omit all other field + masks in your request. You can't update this field if + the space is in `import + mode `__. + To learn more, see `Make a space discoverable to + specific + users `__. + ``access_settings.audience`` is not supported with + ``useAdminAccess``. + + ``permission_settings``: Supports changing the + `permission + settings `__ + of a space. When updating permission settings, you can + only specify ``permissionSettings`` field masks; you + cannot update other field masks at the same time. + ``permissionSettings`` is not supported with + ``useAdminAccess``. The supported field masks include: + + - ``permission_settings.manageMembersAndGroups`` + - ``permission_settings.modifySpaceDetails`` + - ``permission_settings.toggleHistory`` + - ``permission_settings.useAtMentionAll`` + - ``permission_settings.manageApps`` + - ``permission_settings.manageWebhooks`` + - ``permission_settings.replyMessages`` This corresponds to the ``update_mask`` field on the ``request`` instance; if ``request`` is provided, this @@ -2916,46 +3060,25 @@ def create_membership( timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> gc_membership.Membership: - r"""Creates a human membership or app membership for the calling - app. Creating memberships for other apps isn't supported. For an - example, see `Invite or add a user or a Google Chat app to a - space `__. - When creating a membership, if the specified member has their - auto-accept policy turned off, then they're invited, and must - accept the space invitation before joining. Otherwise, creating - a membership adds the member directly to the specified space. - Requires `user + r"""Creates a membership for the calling Chat app, a user, or a + Google Group. Creating memberships for other Chat apps isn't + supported. When creating a membership, if the specified member + has their auto-accept policy turned off, then they're invited, + and must accept the space invitation before joining. Otherwise, + creating a membership adds the member directly to the specified + space. Requires `user authentication `__. - To specify the member to add, set the ``membership.member.name`` - for the human or app member, or set the - ``membership.group_member.name`` for the group member. - - - To add the calling app to a space or a direct message between - two human users, use ``users/app``. Unable to add other apps - to the space. - - - To add a human user, use ``users/{user}``, where ``{user}`` - can be the email address for the user. For users in the same - Workspace organization ``{user}`` can also be the ``id`` for - the person from the People API, or the ``id`` for the user in - the Directory API. For example, if the People API Person - profile ID for ``user@example.com`` is ``123456789``, you can - add the user to the space by setting the - ``membership.member.name`` to ``users/user@example.com`` or - ``users/123456789``. - - - To add or invite a Google group in a named space, use - ``groups/{group}``, where ``{group}`` is the ``id`` for the - group from the Cloud Identity Groups API. For example, you - can use `Cloud Identity Groups lookup - API `__ - to retrieve the ID ``123456789`` for group email - ``group@example.com``, then you can add or invite the group - to a named space by setting the - ``membership.group_member.name`` to ``groups/123456789``. - Group email is not supported, and Google groups can only be - added as members in named spaces. + For example usage, see: + + - `Invite or add a user to a + space `__. + + - `Invite or add a Google Group to a + space `__. + + - `Add the Chat app to a + space `__. .. code-block:: python @@ -4088,6 +4211,9 @@ def get_space_event( message was later updated, the server returns the updated ``Message`` resource in the event payload. + Note: The ``permissionSettings`` field is not returned in the + Space object of the Space event data for this request. + Requires `user authentication `__. To get an event, the authenticated user must be a member of the diff --git a/packages/google-apps-chat/google/apps/chat_v1/services/chat_service/pagers.py b/packages/google-apps-chat/google/apps/chat_v1/services/chat_service/pagers.py index d7565e952ff8..94763fc39240 100644 --- a/packages/google-apps-chat/google/apps/chat_v1/services/chat_service/pagers.py +++ b/packages/google-apps-chat/google/apps/chat_v1/services/chat_service/pagers.py @@ -497,6 +497,158 @@ def __repr__(self) -> str: return "{0}<{1!r}>".format(self.__class__.__name__, self._response) +class SearchSpacesPager: + """A pager for iterating through ``search_spaces`` requests. + + This class thinly wraps an initial + :class:`google.apps.chat_v1.types.SearchSpacesResponse` object, and + provides an ``__iter__`` method to iterate through its + ``spaces`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``SearchSpaces`` requests and continue to iterate + through the ``spaces`` field on the + corresponding responses. + + All the usual :class:`google.apps.chat_v1.types.SearchSpacesResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., space.SearchSpacesResponse], + request: space.SearchSpacesRequest, + response: space.SearchSpacesResponse, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.apps.chat_v1.types.SearchSpacesRequest): + The initial request object. + response (google.apps.chat_v1.types.SearchSpacesResponse): + The initial response object. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = space.SearchSpacesRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[space.SearchSpacesResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) + yield self._response + + def __iter__(self) -> Iterator[space.Space]: + for page in self.pages: + yield from page.spaces + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class SearchSpacesAsyncPager: + """A pager for iterating through ``search_spaces`` requests. + + This class thinly wraps an initial + :class:`google.apps.chat_v1.types.SearchSpacesResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``spaces`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``SearchSpaces`` requests and continue to iterate + through the ``spaces`` field on the + corresponding responses. + + All the usual :class:`google.apps.chat_v1.types.SearchSpacesResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., Awaitable[space.SearchSpacesResponse]], + request: space.SearchSpacesRequest, + response: space.SearchSpacesResponse, + *, + retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.apps.chat_v1.types.SearchSpacesRequest): + The initial request object. + response (google.apps.chat_v1.types.SearchSpacesResponse): + The initial response object. + retry (google.api_core.retry.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = space.SearchSpacesRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[space.SearchSpacesResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) + yield self._response + + def __aiter__(self) -> AsyncIterator[space.Space]: + async def async_generator(): + async for page in self.pages: + for response in page.spaces: + yield response + + return async_generator() + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + class ListReactionsPager: """A pager for iterating through ``list_reactions`` requests. diff --git a/packages/google-apps-chat/google/apps/chat_v1/services/chat_service/transports/base.py b/packages/google-apps-chat/google/apps/chat_v1/services/chat_service/transports/base.py index e5b32aaf272b..5c83cc5f462e 100644 --- a/packages/google-apps-chat/google/apps/chat_v1/services/chat_service/transports/base.py +++ b/packages/google-apps-chat/google/apps/chat_v1/services/chat_service/transports/base.py @@ -304,6 +304,20 @@ def _prep_wrapped_messages(self, client_info): default_timeout=30.0, client_info=client_info, ), + self.search_spaces: gapic_v1.method.wrap_method( + self.search_spaces, + default_retry=retries.Retry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=30.0, + ), + default_timeout=30.0, + client_info=client_info, + ), self.get_space: gapic_v1.method.wrap_method( self.get_space, default_retry=retries.Retry( @@ -662,6 +676,15 @@ def list_spaces( ]: raise NotImplementedError() + @property + def search_spaces( + self, + ) -> Callable[ + [space.SearchSpacesRequest], + Union[space.SearchSpacesResponse, Awaitable[space.SearchSpacesResponse]], + ]: + raise NotImplementedError() + @property def get_space( self, diff --git a/packages/google-apps-chat/google/apps/chat_v1/services/chat_service/transports/grpc.py b/packages/google-apps-chat/google/apps/chat_v1/services/chat_service/transports/grpc.py index 2ef3b8c317bb..d02bc6784c2a 100644 --- a/packages/google-apps-chat/google/apps/chat_v1/services/chat_service/transports/grpc.py +++ b/packages/google-apps-chat/google/apps/chat_v1/services/chat_service/transports/grpc.py @@ -254,19 +254,36 @@ def create_message( ) -> Callable[[gc_message.CreateMessageRequest], gc_message.Message]: r"""Return a callable for the create message method over gRPC. - Creates a message in a Google Chat space. The maximum message - size, including text and cards, is 32,000 bytes. For an example, - see `Send a + Creates a message in a Google Chat space. For an example, see + `Send a message `__. - Calling this method requires - `authentication `__ - and supports the following authentication types: + The ``create()`` method requires either user or app + authentication. Chat attributes the message sender differently + depending on the type of authentication that you use in your + request. - - For text messages, user authentication or app authentication - are supported. - - For card messages, only app authentication is supported. - (Only Chat apps can create card messages.) + The following image shows how Chat attributes a message when you + use app authentication. Chat displays the Chat app as the + message sender. The content of the message can contain text + (``text``), cards (``cardsV2``), and accessory widgets + (``accessoryWidgets``). + + |Message sent with app authentication gRPC| + + The following image shows how Chat attributes a message when you + use user authentication. Chat displays the user as the message + sender and attributes the Chat app to the message by displaying + its name. The content of message can only contain text + (``text``). + + |Message sent with user authentication gRPC| + + The maximum message size, including the message contents, is + 32,000 bytes. + + .. |Message sent with app authentication gRPC| image:: https://developers.google.com/workspace/chat/images/message-app-auth.svg + .. |Message sent with user authentication gRPC| image:: https://developers.google.com/workspace/chat/images/message-user-auth.svg Returns: Callable[[~.CreateMessageRequest], @@ -293,9 +310,12 @@ def list_messages( r"""Return a callable for the list messages method over gRPC. Lists messages in a space that the caller is a member of, - including messages from blocked members and spaces. For an - example, see `List - messages `__. Requires `user + including messages from blocked members and spaces. If you list + messages from a space with no messages, the response is an empty + object. When using a REST/HTTP interface, the response contains + an empty JSON object, ``{}``. For an example, see `List + messages `__. + Requires `user authentication `__. Returns: @@ -619,6 +639,36 @@ def list_spaces( ) return self._stubs["list_spaces"] + @property + def search_spaces( + self, + ) -> Callable[[space.SearchSpacesRequest], space.SearchSpacesResponse]: + r"""Return a callable for the search spaces method over gRPC. + + Returns a list of spaces in a Google Workspace organization + based on an administrator's search. Requires `user + authentication with administrator + privileges `__. + In the request, set ``use_admin_access`` to ``true``. + + Returns: + Callable[[~.SearchSpacesRequest], + ~.SearchSpacesResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "search_spaces" not in self._stubs: + self._stubs["search_spaces"] = self.grpc_channel.unary_unary( + "/google.chat.v1.ChatService/SearchSpaces", + request_serializer=space.SearchSpacesRequest.serialize, + response_deserializer=space.SearchSpacesResponse.deserialize, + ) + return self._stubs["search_spaces"] + @property def get_space(self) -> Callable[[space.GetSpaceRequest], space.Space]: r"""Return a callable for the get space method over gRPC. @@ -656,8 +706,9 @@ def get_space(self) -> Callable[[space.GetSpaceRequest], space.Space]: def create_space(self) -> Callable[[gc_space.CreateSpaceRequest], gc_space.Space]: r"""Return a callable for the create space method over gRPC. - Creates a named space. Spaces grouped by topics aren't - supported. For an example, see `Create a + Creates a space with no members. Can be used to create a named + space. Spaces grouped by topics aren't supported. For an + example, see `Create a space `__. If you receive the error message ``ALREADY_EXISTS`` when @@ -665,6 +716,11 @@ def create_space(self) -> Callable[[gc_space.CreateSpaceRequest], gc_space.Space space within the Google Workspace organization might already use this display name. + If you're a member of the `Developer Preview + program `__, + you can create a group chat in import mode using + ``spaceType.GROUP_CHAT``. + Requires `user authentication `__. @@ -914,46 +970,25 @@ def create_membership( ) -> Callable[[gc_membership.CreateMembershipRequest], gc_membership.Membership]: r"""Return a callable for the create membership method over gRPC. - Creates a human membership or app membership for the calling - app. Creating memberships for other apps isn't supported. For an - example, see `Invite or add a user or a Google Chat app to a - space `__. - When creating a membership, if the specified member has their - auto-accept policy turned off, then they're invited, and must - accept the space invitation before joining. Otherwise, creating - a membership adds the member directly to the specified space. - Requires `user + Creates a membership for the calling Chat app, a user, or a + Google Group. Creating memberships for other Chat apps isn't + supported. When creating a membership, if the specified member + has their auto-accept policy turned off, then they're invited, + and must accept the space invitation before joining. Otherwise, + creating a membership adds the member directly to the specified + space. Requires `user authentication `__. - To specify the member to add, set the ``membership.member.name`` - for the human or app member, or set the - ``membership.group_member.name`` for the group member. - - - To add the calling app to a space or a direct message between - two human users, use ``users/app``. Unable to add other apps - to the space. - - - To add a human user, use ``users/{user}``, where ``{user}`` - can be the email address for the user. For users in the same - Workspace organization ``{user}`` can also be the ``id`` for - the person from the People API, or the ``id`` for the user in - the Directory API. For example, if the People API Person - profile ID for ``user@example.com`` is ``123456789``, you can - add the user to the space by setting the - ``membership.member.name`` to ``users/user@example.com`` or - ``users/123456789``. - - - To add or invite a Google group in a named space, use - ``groups/{group}``, where ``{group}`` is the ``id`` for the - group from the Cloud Identity Groups API. For example, you - can use `Cloud Identity Groups lookup - API `__ - to retrieve the ID ``123456789`` for group email - ``group@example.com``, then you can add or invite the group - to a named space by setting the - ``membership.group_member.name`` to ``groups/123456789``. - Group email is not supported, and Google groups can only be - added as members in named spaces. + For example usage, see: + + - `Invite or add a user to a + space `__. + + - `Invite or add a Google Group to a + space `__. + + - `Add the Chat app to a + space `__. Returns: Callable[[~.CreateMembershipRequest], @@ -1241,6 +1276,9 @@ def get_space_event( message was later updated, the server returns the updated ``Message`` resource in the event payload. + Note: The ``permissionSettings`` field is not returned in the + Space object of the Space event data for this request. + Requires `user authentication `__. To get an event, the authenticated user must be a member of the diff --git a/packages/google-apps-chat/google/apps/chat_v1/services/chat_service/transports/grpc_asyncio.py b/packages/google-apps-chat/google/apps/chat_v1/services/chat_service/transports/grpc_asyncio.py index e36b81f08a6c..86137f66eff8 100644 --- a/packages/google-apps-chat/google/apps/chat_v1/services/chat_service/transports/grpc_asyncio.py +++ b/packages/google-apps-chat/google/apps/chat_v1/services/chat_service/transports/grpc_asyncio.py @@ -258,19 +258,36 @@ def create_message( ) -> Callable[[gc_message.CreateMessageRequest], Awaitable[gc_message.Message]]: r"""Return a callable for the create message method over gRPC. - Creates a message in a Google Chat space. The maximum message - size, including text and cards, is 32,000 bytes. For an example, - see `Send a + Creates a message in a Google Chat space. For an example, see + `Send a message `__. - Calling this method requires - `authentication `__ - and supports the following authentication types: + The ``create()`` method requires either user or app + authentication. Chat attributes the message sender differently + depending on the type of authentication that you use in your + request. - - For text messages, user authentication or app authentication - are supported. - - For card messages, only app authentication is supported. - (Only Chat apps can create card messages.) + The following image shows how Chat attributes a message when you + use app authentication. Chat displays the Chat app as the + message sender. The content of the message can contain text + (``text``), cards (``cardsV2``), and accessory widgets + (``accessoryWidgets``). + + |Message sent with app authentication async gRPC| + + The following image shows how Chat attributes a message when you + use user authentication. Chat displays the user as the message + sender and attributes the Chat app to the message by displaying + its name. The content of message can only contain text + (``text``). + + |Message sent with user authentication async gRPC| + + The maximum message size, including the message contents, is + 32,000 bytes. + + .. |Message sent with app authentication async gRPC| image:: https://developers.google.com/workspace/chat/images/message-app-auth.svg + .. |Message sent with user authentication async gRPC| image:: https://developers.google.com/workspace/chat/images/message-user-auth.svg Returns: Callable[[~.CreateMessageRequest], @@ -299,9 +316,12 @@ def list_messages( r"""Return a callable for the list messages method over gRPC. Lists messages in a space that the caller is a member of, - including messages from blocked members and spaces. For an - example, see `List - messages `__. Requires `user + including messages from blocked members and spaces. If you list + messages from a space with no messages, the response is an empty + object. When using a REST/HTTP interface, the response contains + an empty JSON object, ``{}``. For an example, see `List + messages `__. + Requires `user authentication `__. Returns: @@ -629,6 +649,36 @@ def list_spaces( ) return self._stubs["list_spaces"] + @property + def search_spaces( + self, + ) -> Callable[[space.SearchSpacesRequest], Awaitable[space.SearchSpacesResponse]]: + r"""Return a callable for the search spaces method over gRPC. + + Returns a list of spaces in a Google Workspace organization + based on an administrator's search. Requires `user + authentication with administrator + privileges `__. + In the request, set ``use_admin_access`` to ``true``. + + Returns: + Callable[[~.SearchSpacesRequest], + Awaitable[~.SearchSpacesResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "search_spaces" not in self._stubs: + self._stubs["search_spaces"] = self.grpc_channel.unary_unary( + "/google.chat.v1.ChatService/SearchSpaces", + request_serializer=space.SearchSpacesRequest.serialize, + response_deserializer=space.SearchSpacesResponse.deserialize, + ) + return self._stubs["search_spaces"] + @property def get_space(self) -> Callable[[space.GetSpaceRequest], Awaitable[space.Space]]: r"""Return a callable for the get space method over gRPC. @@ -668,8 +718,9 @@ def create_space( ) -> Callable[[gc_space.CreateSpaceRequest], Awaitable[gc_space.Space]]: r"""Return a callable for the create space method over gRPC. - Creates a named space. Spaces grouped by topics aren't - supported. For an example, see `Create a + Creates a space with no members. Can be used to create a named + space. Spaces grouped by topics aren't supported. For an + example, see `Create a space `__. If you receive the error message ``ALREADY_EXISTS`` when @@ -677,6 +728,11 @@ def create_space( space within the Google Workspace organization might already use this display name. + If you're a member of the `Developer Preview + program `__, + you can create a group chat in import mode using + ``spaceType.GROUP_CHAT``. + Requires `user authentication `__. @@ -934,46 +990,25 @@ def create_membership( ]: r"""Return a callable for the create membership method over gRPC. - Creates a human membership or app membership for the calling - app. Creating memberships for other apps isn't supported. For an - example, see `Invite or add a user or a Google Chat app to a - space `__. - When creating a membership, if the specified member has their - auto-accept policy turned off, then they're invited, and must - accept the space invitation before joining. Otherwise, creating - a membership adds the member directly to the specified space. - Requires `user + Creates a membership for the calling Chat app, a user, or a + Google Group. Creating memberships for other Chat apps isn't + supported. When creating a membership, if the specified member + has their auto-accept policy turned off, then they're invited, + and must accept the space invitation before joining. Otherwise, + creating a membership adds the member directly to the specified + space. Requires `user authentication `__. - To specify the member to add, set the ``membership.member.name`` - for the human or app member, or set the - ``membership.group_member.name`` for the group member. - - - To add the calling app to a space or a direct message between - two human users, use ``users/app``. Unable to add other apps - to the space. - - - To add a human user, use ``users/{user}``, where ``{user}`` - can be the email address for the user. For users in the same - Workspace organization ``{user}`` can also be the ``id`` for - the person from the People API, or the ``id`` for the user in - the Directory API. For example, if the People API Person - profile ID for ``user@example.com`` is ``123456789``, you can - add the user to the space by setting the - ``membership.member.name`` to ``users/user@example.com`` or - ``users/123456789``. - - - To add or invite a Google group in a named space, use - ``groups/{group}``, where ``{group}`` is the ``id`` for the - group from the Cloud Identity Groups API. For example, you - can use `Cloud Identity Groups lookup - API `__ - to retrieve the ID ``123456789`` for group email - ``group@example.com``, then you can add or invite the group - to a named space by setting the - ``membership.group_member.name`` to ``groups/123456789``. - Group email is not supported, and Google groups can only be - added as members in named spaces. + For example usage, see: + + - `Invite or add a user to a + space `__. + + - `Invite or add a Google Group to a + space `__. + + - `Add the Chat app to a + space `__. Returns: Callable[[~.CreateMembershipRequest], @@ -1271,6 +1306,9 @@ def get_space_event( message was later updated, the server returns the updated ``Message`` resource in the event payload. + Note: The ``permissionSettings`` field is not returned in the + Space object of the Space event data for this request. + Requires `user authentication `__. To get an event, the authenticated user must be a member of the @@ -1485,6 +1523,20 @@ def _prep_wrapped_messages(self, client_info): default_timeout=30.0, client_info=client_info, ), + self.search_spaces: gapic_v1.method_async.wrap_method( + self.search_spaces, + default_retry=retries.AsyncRetry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=30.0, + ), + default_timeout=30.0, + client_info=client_info, + ), self.get_space: gapic_v1.method_async.wrap_method( self.get_space, default_retry=retries.AsyncRetry( diff --git a/packages/google-apps-chat/google/apps/chat_v1/services/chat_service/transports/rest.py b/packages/google-apps-chat/google/apps/chat_v1/services/chat_service/transports/rest.py index 87947ff116c8..d9717f7c33ea 100644 --- a/packages/google-apps-chat/google/apps/chat_v1/services/chat_service/transports/rest.py +++ b/packages/google-apps-chat/google/apps/chat_v1/services/chat_service/transports/rest.py @@ -241,6 +241,14 @@ def post_list_spaces(self, response): logging.log(f"Received response: {response}") return response + def pre_search_spaces(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_search_spaces(self, response): + logging.log(f"Received response: {response}") + return response + def pre_set_up_space(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata @@ -744,6 +752,27 @@ def post_list_spaces( """ return response + def pre_search_spaces( + self, request: space.SearchSpacesRequest, metadata: Sequence[Tuple[str, str]] + ) -> Tuple[space.SearchSpacesRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for search_spaces + + Override in a subclass to manipulate the request or metadata + before they are sent to the ChatService server. + """ + return request, metadata + + def post_search_spaces( + self, response: space.SearchSpacesResponse + ) -> space.SearchSpacesResponse: + """Post-rpc interceptor for search_spaces + + Override in a subclass to manipulate the response + after it is returned by the ChatService server but before + it is returned to user code. + """ + return response + def pre_set_up_space( self, request: space_setup.SetUpSpaceRequest, @@ -1380,7 +1409,8 @@ def __call__( Args: request (~.gc_space.CreateSpaceRequest): - The request object. A request to create a named space. + The request object. A request to create a named space + with no members. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -2908,6 +2938,97 @@ def __call__( resp = self._interceptor.post_list_spaces(resp) return resp + class _SearchSpaces(ChatServiceRestStub): + def __hash__(self): + return hash("SearchSpaces") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "query": "", + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: space.SearchSpacesRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> space.SearchSpacesResponse: + r"""Call the search spaces method over HTTP. + + Args: + request (~.space.SearchSpacesRequest): + The request object. Request to search for a list of + spaces based on a query. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.space.SearchSpacesResponse: + Response with a list of spaces + corresponding to the search spaces + request. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/spaces:search", + }, + ] + request, metadata = self._interceptor.pre_search_spaces(request, metadata) + pb_request = space.SearchSpacesRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = space.SearchSpacesResponse() + pb_resp = space.SearchSpacesResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_search_spaces(resp) + return resp + class _SetUpSpace(ChatServiceRestStub): def __hash__(self): return hash("SetUpSpace") @@ -3671,6 +3792,14 @@ def list_spaces( # In C++ this would require a dynamic_cast return self._ListSpaces(self._session, self._host, self._interceptor) # type: ignore + @property + def search_spaces( + self, + ) -> Callable[[space.SearchSpacesRequest], space.SearchSpacesResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._SearchSpaces(self._session, self._host, self._interceptor) # type: ignore + @property def set_up_space(self) -> Callable[[space_setup.SetUpSpaceRequest], space.Space]: # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. diff --git a/packages/google-apps-chat/google/apps/chat_v1/types/__init__.py b/packages/google-apps-chat/google/apps/chat_v1/types/__init__.py index e47046db19af..d510a888bd89 100644 --- a/packages/google-apps-chat/google/apps/chat_v1/types/__init__.py +++ b/packages/google-apps-chat/google/apps/chat_v1/types/__init__.py @@ -17,6 +17,7 @@ from .annotation import ( Annotation, AnnotationType, + ChatSpaceLinkData, DriveLinkData, RichLinkMetadata, SlashCommandMetadata, @@ -101,6 +102,8 @@ GetSpaceRequest, ListSpacesRequest, ListSpacesResponse, + SearchSpacesRequest, + SearchSpacesResponse, Space, UpdateSpaceRequest, ) @@ -123,6 +126,7 @@ __all__ = ( "ActionStatus", "Annotation", + "ChatSpaceLinkData", "DriveLinkData", "RichLinkMetadata", "SlashCommandMetadata", @@ -196,6 +200,8 @@ "GetSpaceRequest", "ListSpacesRequest", "ListSpacesResponse", + "SearchSpacesRequest", + "SearchSpacesResponse", "Space", "UpdateSpaceRequest", "GetSpaceEventRequest", diff --git a/packages/google-apps-chat/google/apps/chat_v1/types/annotation.py b/packages/google-apps-chat/google/apps/chat_v1/types/annotation.py index fa2e62f43671..ee3267d68907 100644 --- a/packages/google-apps-chat/google/apps/chat_v1/types/annotation.py +++ b/packages/google-apps-chat/google/apps/chat_v1/types/annotation.py @@ -31,6 +31,7 @@ "SlashCommandMetadata", "RichLinkMetadata", "DriveLinkData", + "ChatSpaceLinkData", }, ) @@ -247,6 +248,11 @@ class Type(proto.Enum): class RichLinkMetadata(proto.Message): r"""A rich link to a resource. + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields Attributes: @@ -257,6 +263,10 @@ class RichLinkMetadata(proto.Message): drive_link_data (google.apps.chat_v1.types.DriveLinkData): Data for a drive link. + This field is a member of `oneof`_ ``data``. + chat_space_link_data (google.apps.chat_v1.types.ChatSpaceLinkData): + Data for a chat space link. + This field is a member of `oneof`_ ``data``. """ @@ -268,9 +278,13 @@ class RichLinkType(proto.Enum): Default value for the enum. Don't use. DRIVE_FILE (1): A Google Drive rich link type. + CHAT_SPACE (2): + A Chat space rich link type. For example, a + space smart chip. """ RICH_LINK_TYPE_UNSPECIFIED = 0 DRIVE_FILE = 1 + CHAT_SPACE = 2 uri: str = proto.Field( proto.STRING, @@ -287,6 +301,12 @@ class RichLinkType(proto.Enum): oneof="data", message="DriveLinkData", ) + chat_space_link_data: "ChatSpaceLinkData" = proto.Field( + proto.MESSAGE, + number=4, + oneof="data", + message="ChatSpaceLinkData", + ) class DriveLinkData(proto.Message): @@ -313,4 +333,36 @@ class DriveLinkData(proto.Message): ) +class ChatSpaceLinkData(proto.Message): + r"""Data for Chat space links. + + Attributes: + space (str): + The space of the linked Chat space resource. + + Format: ``spaces/{space}`` + thread (str): + The thread of the linked Chat space resource. + + Format: ``spaces/{space}/threads/{thread}`` + message (str): + The message of the linked Chat space resource. + + Format: ``spaces/{space}/messages/{message}`` + """ + + space: str = proto.Field( + proto.STRING, + number=1, + ) + thread: str = proto.Field( + proto.STRING, + number=2, + ) + message: str = proto.Field( + proto.STRING, + number=3, + ) + + __all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-apps-chat/google/apps/chat_v1/types/membership.py b/packages/google-apps-chat/google/apps/chat_v1/types/membership.py index 925e1d7b28ed..f0e00c470930 100644 --- a/packages/google-apps-chat/google/apps/chat_v1/types/membership.py +++ b/packages/google-apps-chat/google/apps/chat_v1/types/membership.py @@ -208,6 +208,20 @@ class CreateMembershipRequest(proto.Message): relation for itself, it must use the ``chat.memberships.app`` scope, set ``user.type`` to ``BOT``, and set ``user.name`` to ``users/app``. + use_admin_access (bool): + When ``true``, the method runs using the user's Google + Workspace administrator privileges. + + The calling user must be a Google Workspace administrator + with the `manage chat and spaces conversations + privilege `__. + + Requires the ``chat.admin.memberships`` `OAuth 2.0 + scope `__. + + Creating app memberships or creating memberships for users + outside the administrator's Google Workspace organization + isn't supported using admin access. """ parent: str = proto.Field( @@ -219,6 +233,10 @@ class CreateMembershipRequest(proto.Message): number=2, message="Membership", ) + use_admin_access: bool = proto.Field( + proto.BOOL, + number=5, + ) class UpdateMembershipRequest(proto.Message): @@ -235,6 +253,16 @@ class UpdateMembershipRequest(proto.Message): Currently supported field paths: - ``role`` + use_admin_access (bool): + When ``true``, the method runs using the user's Google + Workspace administrator privileges. + + The calling user must be a Google Workspace administrator + with the `manage chat and spaces conversations + privilege `__. + + Requires the ``chat.admin.memberships`` `OAuth 2.0 + scope `__. """ membership: "Membership" = proto.Field( @@ -247,6 +275,10 @@ class UpdateMembershipRequest(proto.Message): number=2, message=field_mask_pb2.FieldMask, ) + use_admin_access: bool = proto.Field( + proto.BOOL, + number=3, + ) class ListMembershipsRequest(proto.Message): @@ -289,8 +321,8 @@ class ListMembershipsRequest(proto.Message): ``ROLE_MANAGER``. To filter by type, set ``member.type`` to ``HUMAN`` or - ``BOT``. Developer Preview: You can also filter for - ``member.type`` using the ``!=`` operator. + ``BOT``. You can also filter for ``member.type`` using the + ``!=`` operator. To filter by both role and type, use the ``AND`` operator. To filter by either role or type, use the ``OR`` operator. @@ -338,6 +370,20 @@ class ListMembershipsRequest(proto.Message): Currently requires `user authentication `__. + use_admin_access (bool): + When ``true``, the method runs using the user's Google + Workspace administrator privileges. + + The calling user must be a Google Workspace administrator + with the `manage chat and spaces conversations + privilege `__. + + Requires either the ``chat.admin.memberships.readonly`` or + ``chat.admin.memberships`` `OAuth 2.0 + scope `__. + + Listing app memberships in a space isn't supported when + using admin access. """ parent: str = proto.Field( @@ -364,6 +410,10 @@ class ListMembershipsRequest(proto.Message): proto.BOOL, number=7, ) + use_admin_access: bool = proto.Field( + proto.BOOL, + number=8, + ) class ListMembershipsResponse(proto.Message): @@ -414,12 +464,30 @@ class GetMembershipRequest(proto.Message): For example, ``spaces/{space}/members/example@gmail.com`` where ``example@gmail.com`` is the email of the Google Chat user. + use_admin_access (bool): + When ``true``, the method runs using the user's Google + Workspace administrator privileges. + + The calling user must be a Google Workspace administrator + with the `manage chat and spaces conversations + privilege `__. + + Requires the ``chat.admin.memberships`` or + ``chat.admin.memberships.readonly`` `OAuth 2.0 + scopes `__. + + Getting app memberships in a space isn't supported when + using admin access. """ name: str = proto.Field( proto.STRING, number=1, ) + use_admin_access: bool = proto.Field( + proto.BOOL, + number=3, + ) class DeleteMembershipRequest(proto.Message): @@ -444,12 +512,29 @@ class DeleteMembershipRequest(proto.Message): Format: ``spaces/{space}/members/{member}`` or ``spaces/{space}/members/app``. + use_admin_access (bool): + When ``true``, the method runs using the user's Google + Workspace administrator privileges. + + The calling user must be a Google Workspace administrator + with the `manage chat and spaces conversations + privilege `__. + + Requires the ``chat.admin.memberships`` `OAuth 2.0 + scope `__. + + Deleting app memberships in a space isn't supported using + admin access. """ name: str = proto.Field( proto.STRING, number=1, ) + use_admin_access: bool = proto.Field( + proto.BOOL, + number=2, + ) __all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-apps-chat/google/apps/chat_v1/types/message.py b/packages/google-apps-chat/google/apps/chat_v1/types/message.py index 90dda263a1ec..56d10d7b3574 100644 --- a/packages/google-apps-chat/google/apps/chat_v1/types/message.py +++ b/packages/google-apps-chat/google/apps/chat_v1/types/message.py @@ -109,8 +109,8 @@ class Message(proto.Message): user `__, or everyone in the space. - To learn about creating text messages, see `Send a text - message `__. + To learn about creating text messages, see `Send a + message `__. formatted_text (str): Output only. Contains the message ``text`` with markups added to communicate formatting. This field might not @@ -154,8 +154,9 @@ class Message(proto.Message): user `__, the messages can't contain cards. - To learn about cards and how to create them, see `Send card - messages `__. + To learn how to create a message that contains cards, see + `Send a + message `__. `Card builder `__ @@ -213,17 +214,17 @@ class Message(proto.Message): Immutable. Input for creating a message, otherwise output only. The user that can view the message. When set, the message is private and only visible to the specified user - and the Chat app. Link previews and attachments aren't - supported for private messages. + and the Chat app. To include this field in your request, you + must call the Chat API using `app + authentication `__ + and omit the following: - Only Chat apps can send private messages. If your Chat app - `authenticates as a - user `__ - to send a message, the message can't be private and must - omit this field. + - `Attachments `__ + - `Accessory + widgets `__ - For details, see `Send private messages to Google Chat - users `__. + For details, see `Send a message + privately `__. deletion_metadata (google.apps.chat_v1.types.DeletionMetadata): Output only. Information about a deleted message. A message is deleted when ``delete_time`` is set. @@ -428,7 +429,7 @@ class Thread(proto.Message): Attributes: name (str): - Output only. Resource name of the thread. + Resource name of the thread. Example: ``spaces/{space}/threads/{thread}`` thread_key (str): diff --git a/packages/google-apps-chat/google/apps/chat_v1/types/space.py b/packages/google-apps-chat/google/apps/chat_v1/types/space.py index 46f46068321a..694375fc0ebd 100644 --- a/packages/google-apps-chat/google/apps/chat_v1/types/space.py +++ b/packages/google-apps-chat/google/apps/chat_v1/types/space.py @@ -33,6 +33,8 @@ "GetSpaceRequest", "FindDirectMessageRequest", "UpdateSpaceRequest", + "SearchSpacesRequest", + "SearchSpacesResponse", "DeleteSpaceRequest", "CompleteImportSpaceRequest", "CompleteImportSpaceResponse", @@ -44,11 +46,26 @@ class Space(proto.Message): r"""A space in Google Chat. Spaces are conversations between two or more users or 1:1 messages between a user and a Chat app. + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + Attributes: name (str): Resource name of the space. Format: ``spaces/{space}`` + + Where ``{space}`` represents the system-assigned ID for the + space. You can obtain the space ID by calling the + ```spaces.list()`` `__ + method or from the space URL. For example, if the space URL + is + ``https://mail.google.com/mail/u/0/#chat/space/AAAAAAAAA``, + the space ID is ``AAAAAAAAA``. type_ (google.apps.chat_v1.types.Space.Type): Output only. Deprecated: Use ``space_type`` instead. The type of a space. @@ -64,12 +81,12 @@ class Space(proto.Message): instead. Whether messages are threaded in this space. display_name (str): The space's display name. Required when `creating a - space `__. - If you receive the error message ``ALREADY_EXISTS`` when - creating a space or updating the ``displayName``, try a - different ``displayName``. An existing space within the - Google Workspace organization might already use this display - name. + space `__ + with a ``spaceType`` of ``SPACE``. If you receive the error + message ``ALREADY_EXISTS`` when creating a space or updating + the ``displayName``, try a different ``displayName``. An + existing space within the Google Workspace organization + might already use this display name. For direct messages, this field might be empty. @@ -84,15 +101,6 @@ class Space(proto.Message): user account). By default, a space created by a consumer account permits any Google Chat user. - - The space is used to [import data to Google Chat] - (https://developers.google.com/chat/api/guides/import-data-overview) - because import mode spaces must only permit members from - the same Google Workspace organization. However, as part - of the `Google Workspace Developer Preview - Program `__, - import mode spaces can permit any Google Chat user so - this field can then be set for import mode spaces. - For existing spaces, this field is output only. space_threading_state (google.apps.chat_v1.types.Space.SpaceThreadingState): Output only. The threading state in the Chat @@ -119,6 +127,9 @@ class Space(proto.Message): Only populated in the output when ``spaceType`` is ``GROUP_CHAT`` or ``SPACE``. + last_active_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. Timestamp of the last message in + the space. admin_installed (bool): Output only. For direct message (DM) spaces with a Chat app, whether the space was created @@ -129,6 +140,10 @@ class Space(proto.Message): To support admin install, your Chat app must feature direct messaging. + membership_count (google.apps.chat_v1.types.Space.MembershipCount): + Output only. The count of joined memberships grouped by + member type. Populated when the ``space_type`` is ``SPACE``, + ``DIRECT_MESSAGE`` or ``GROUP_CHAT``. access_settings (google.apps.chat_v1.types.Space.AccessSettings): Optional. Specifies the `access setting `__ @@ -137,6 +152,21 @@ class Space(proto.Message): space_uri (str): Output only. The URI for a user to access the space. + predefined_permission_settings (google.apps.chat_v1.types.Space.PredefinedPermissionSettings): + Optional. Input only. Predefined space permission settings, + input only when creating a space. If the field is not set, a + collaboration space is created. After you create the space, + settings are populated in the ``PermissionSettings`` field. + + This field is a member of `oneof`_ ``space_permission_settings``. + permission_settings (google.apps.chat_v1.types.Space.PermissionSettings): + Optional. Space permission settings for + existing spaces. Input for updating exact space + permission settings, where existing permission + settings are replaced. Output lists current + permission settings. + + This field is a member of `oneof`_ ``space_permission_settings``. """ class Type(proto.Enum): @@ -203,6 +233,27 @@ class SpaceThreadingState(proto.Enum): GROUPED_MESSAGES = 3 UNTHREADED_MESSAGES = 4 + class PredefinedPermissionSettings(proto.Enum): + r"""Predefined permission settings that you can only specify when + creating a named space. More settings might be added in the future. + For details about permission settings for named spaces, see `Learn + about spaces `__. + + Values: + PREDEFINED_PERMISSION_SETTINGS_UNSPECIFIED (0): + Unspecified. Don't use. + COLLABORATION_SPACE (1): + Setting to make the space a collaboration + space where all members can post messages. + ANNOUNCEMENT_SPACE (2): + Setting to make the space an announcement + space where only space managers can post + messages. + """ + PREDEFINED_PERMISSION_SETTINGS_UNSPECIFIED = 0 + COLLABORATION_SPACE = 1 + ANNOUNCEMENT_SPACE = 2 + class SpaceDetails(proto.Message): r"""Details about the space including description and rules. @@ -228,6 +279,29 @@ class SpaceDetails(proto.Message): number=2, ) + class MembershipCount(proto.Message): + r"""Represents the count of memberships of a space, grouped into + categories. + + Attributes: + joined_direct_human_user_count (int): + Count of human users that have directly + joined the space, not counting users joined by + having membership in a joined group. + joined_group_count (int): + Count of all groups that have directly joined + the space. + """ + + joined_direct_human_user_count: int = proto.Field( + proto.INT32, + number=4, + ) + joined_group_count: int = proto.Field( + proto.INT32, + number=5, + ) + class AccessSettings(proto.Message): r"""Represents the `access setting `__ of the @@ -241,14 +315,20 @@ class AccessSettings(proto.Message): Optional. The resource name of the `target audience `__ who can discover the space, join the space, and preview the - messages in the space. For details, see `Make a space - discoverable to a target + messages in the space. If unset, only users or Google Groups + who have been individually invited or added to the space can + access it. For details, see `Make a space discoverable to a + target audience `__. Format: ``audiences/{audience}`` To use the default target audience for the Google Workspace organization, set to ``audiences/default``. + + This field is not populated when using the ``chat.bot`` + scope with `app + authentication `__. """ class AccessState(proto.Enum): @@ -259,12 +339,17 @@ class AccessState(proto.Enum): Access state is unknown or not supported in this API. PRIVATE (1): - Space is discoverable by added or invited - members or groups. + Only users or Google Groups that have been + individually added or invited by other users or + Google Workspace administrators can discover and + access the space. DISCOVERABLE (2): - Space is discoverable by the selected `target - audience `__, - as well as added or invited members or groups. + A space manager has granted a target audience access to the + space. Users or Google Groups that have been individually + added or invited to the space can also discover and access + the space. To learn more, see `Make a space discoverable to + specific + users `__. """ ACCESS_STATE_UNSPECIFIED = 0 PRIVATE = 1 @@ -280,6 +365,125 @@ class AccessState(proto.Enum): number=3, ) + class PermissionSettings(proto.Message): + r"""`Permission + settings `__ that + you can specify when updating an existing named space. + + To set permission settings when creating a space, specify the + ``PredefinedPermissionSettings`` field in your request. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + manage_members_and_groups (google.apps.chat_v1.types.Space.PermissionSetting): + Setting for managing members and groups in a + space. + + This field is a member of `oneof`_ ``_manage_members_and_groups``. + modify_space_details (google.apps.chat_v1.types.Space.PermissionSetting): + Setting for updating space name, avatar, + description and guidelines. + + This field is a member of `oneof`_ ``_modify_space_details``. + toggle_history (google.apps.chat_v1.types.Space.PermissionSetting): + Setting for toggling space history on and + off. + + This field is a member of `oneof`_ ``_toggle_history``. + use_at_mention_all (google.apps.chat_v1.types.Space.PermissionSetting): + Setting for using @all in a space. + + This field is a member of `oneof`_ ``_use_at_mention_all``. + manage_apps (google.apps.chat_v1.types.Space.PermissionSetting): + Setting for managing apps in a space. + + This field is a member of `oneof`_ ``_manage_apps``. + manage_webhooks (google.apps.chat_v1.types.Space.PermissionSetting): + Setting for managing webhooks in a space. + + This field is a member of `oneof`_ ``_manage_webhooks``. + post_messages (google.apps.chat_v1.types.Space.PermissionSetting): + Output only. Setting for posting messages in + a space. + + This field is a member of `oneof`_ ``_post_messages``. + reply_messages (google.apps.chat_v1.types.Space.PermissionSetting): + Setting for replying to messages in a space. + + This field is a member of `oneof`_ ``_reply_messages``. + """ + + manage_members_and_groups: "Space.PermissionSetting" = proto.Field( + proto.MESSAGE, + number=1, + optional=True, + message="Space.PermissionSetting", + ) + modify_space_details: "Space.PermissionSetting" = proto.Field( + proto.MESSAGE, + number=2, + optional=True, + message="Space.PermissionSetting", + ) + toggle_history: "Space.PermissionSetting" = proto.Field( + proto.MESSAGE, + number=3, + optional=True, + message="Space.PermissionSetting", + ) + use_at_mention_all: "Space.PermissionSetting" = proto.Field( + proto.MESSAGE, + number=4, + optional=True, + message="Space.PermissionSetting", + ) + manage_apps: "Space.PermissionSetting" = proto.Field( + proto.MESSAGE, + number=5, + optional=True, + message="Space.PermissionSetting", + ) + manage_webhooks: "Space.PermissionSetting" = proto.Field( + proto.MESSAGE, + number=6, + optional=True, + message="Space.PermissionSetting", + ) + post_messages: "Space.PermissionSetting" = proto.Field( + proto.MESSAGE, + number=7, + optional=True, + message="Space.PermissionSetting", + ) + reply_messages: "Space.PermissionSetting" = proto.Field( + proto.MESSAGE, + number=8, + optional=True, + message="Space.PermissionSetting", + ) + + class PermissionSetting(proto.Message): + r"""Represents a space permission setting. + + Attributes: + managers_allowed (bool): + Whether spaces managers have this permission. + members_allowed (bool): + Whether non-manager members have this + permission. + """ + + managers_allowed: bool = proto.Field( + proto.BOOL, + number=1, + ) + members_allowed: bool = proto.Field( + proto.BOOL, + number=2, + ) + name: str = proto.Field( proto.STRING, number=1, @@ -334,10 +538,20 @@ class AccessState(proto.Enum): number=17, message=timestamp_pb2.Timestamp, ) + last_active_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=18, + message=timestamp_pb2.Timestamp, + ) admin_installed: bool = proto.Field( proto.BOOL, number=19, ) + membership_count: MembershipCount = proto.Field( + proto.MESSAGE, + number=20, + message=MembershipCount, + ) access_settings: AccessSettings = proto.Field( proto.MESSAGE, number=23, @@ -347,20 +561,37 @@ class AccessState(proto.Enum): proto.STRING, number=25, ) + predefined_permission_settings: PredefinedPermissionSettings = proto.Field( + proto.ENUM, + number=26, + oneof="space_permission_settings", + enum=PredefinedPermissionSettings, + ) + permission_settings: PermissionSettings = proto.Field( + proto.MESSAGE, + number=27, + oneof="space_permission_settings", + message=PermissionSettings, + ) class CreateSpaceRequest(proto.Message): - r"""A request to create a named space. + r"""A request to create a named space with no members. Attributes: space (google.apps.chat_v1.types.Space): Required. The ``displayName`` and ``spaceType`` fields must be populated. Only ``SpaceType.SPACE`` is supported. - If you receive the error message ``ALREADY_EXISTS`` when - creating a space, try a different ``displayName``. An - existing space within the Google Workspace organization - might already use this display name. + If you receive the error message ``ALREADY_EXISTS``, try a + different ``displayName``. An existing space within the + Google Workspace organization might already use this display + name. + + If you're a member of the `Developer Preview + program `__, + ``SpaceType.GROUP_CHAT`` can be used if ``importMode`` is + set to true. The space ``name`` is assigned on the server so anything specified in this field will be ignored. @@ -449,8 +680,9 @@ class ListSpacesResponse(proto.Message): Attributes: spaces (MutableSequence[google.apps.chat_v1.types.Space]): - List of spaces in the requested (or first) - page. + List of spaces in the requested (or first) page. Note: The + ``permissionSettings`` field is not returned in the Space + object for list requests. next_page_token (str): You can send a token as ``pageToken`` to retrieve the next page of results. If empty, there are no subsequent pages. @@ -480,12 +712,27 @@ class GetSpaceRequest(proto.Message): ``spaces/{space}``. Format: ``spaces/{space}`` + use_admin_access (bool): + When ``true``, the method runs using the user's Google + Workspace administrator privileges. + + The calling user must be a Google Workspace administrator + with the `manage chat and spaces conversations + privilege `__. + + Requires the ``chat.admin.spaces`` or + ``chat.admin.spaces.readonly`` `OAuth 2.0 + scopes `__. """ name: str = proto.Field( proto.STRING, number=1, ) + use_admin_access: bool = proto.Field( + proto.BOOL, + number=2, + ) class FindDirectMessageRequest(proto.Message): @@ -530,67 +777,82 @@ class UpdateSpaceRequest(proto.Message): Required. The updated field paths, comma separated if there are multiple. - Currently supported field paths: - - - ``display_name`` (Only supports changing the display name - of a space with the ``SPACE`` type, or when also - including the ``space_type`` mask to change a - ``GROUP_CHAT`` space type to ``SPACE``. Trying to update - the display name of a ``GROUP_CHAT`` or a - ``DIRECT_MESSAGE`` space results in an invalid argument - error. If you receive the error message - ``ALREADY_EXISTS`` when updating the ``displayName``, try - a different ``displayName``. An existing space within the - Google Workspace organization might already use this - display name.) - - - ``space_type`` (Only supports changing a ``GROUP_CHAT`` - space type to ``SPACE``. Include ``display_name`` - together with ``space_type`` in the update mask and - ensure that the specified space has a non-empty display - name and the ``SPACE`` space type. Including the - ``space_type`` mask and the ``SPACE`` type in the - specified space when updating the display name is - optional if the existing space already has the ``SPACE`` - type. Trying to update the space type in other ways - results in an invalid argument error). ``space_type`` is - not supported with admin access. - - - ``space_details`` - - - ``space_history_state`` (Supports `turning history on or - off for the - space `__ - if `the organization allows users to change their history - setting `__. - Warning: mutually exclusive with all other field paths.) - ``space_history_state`` is not supported with admin - access. - - - ``access_settings.audience`` (Supports changing the - `access - setting `__ - of who can discover the space, join the space, and - preview the messages in space. If no audience is - specified in the access setting, the space's access - setting is updated to private. Warning: mutually - exclusive with all other field paths.) - ``access_settings.audience`` is not supported with admin - access. - - - Developer Preview: Supports changing the `permission - settings `__ - of a space, supported field paths include: - ``permission_settings.manage_members_and_groups``, - ``permission_settings.modify_space_details``, - ``permission_settings.toggle_history``, - ``permission_settings.use_at_mention_all``, - ``permission_settings.manage_apps``, - ``permission_settings.manage_webhooks``, - ``permission_settings.reply_messages`` (Warning: mutually - exclusive with all other non-permission settings field - paths). ``permission_settings`` is not supported with - admin access. + You can update the following fields for a space: + + ``space_details``: Updates the space's description. Supports + up to 150 characters. + + ``display_name``: Only supports updating the display name + for spaces where ``spaceType`` field is ``SPACE``. If you + receive the error message ``ALREADY_EXISTS``, try a + different value. An existing space within the Google + Workspace organization might already use this display name. + + ``space_type``: Only supports changing a ``GROUP_CHAT`` + space type to ``SPACE``. Include ``display_name`` together + with ``space_type`` in the update mask and ensure that the + specified space has a non-empty display name and the + ``SPACE`` space type. Including the ``space_type`` mask and + the ``SPACE`` type in the specified space when updating the + display name is optional if the existing space already has + the ``SPACE`` type. Trying to update the space type in other + ways results in an invalid argument error. ``space_type`` is + not supported with ``useAdminAccess``. + + ``space_history_state``: Updates `space history + settings `__ + by turning history on or off for the space. Only supported + if history settings are enabled for the Google Workspace + organization. To update the space history state, you must + omit all other field masks in your request. + ``space_history_state`` is not supported with + ``useAdminAccess``. + + ``access_settings.audience``: Updates the `access + setting `__ + of who can discover the space, join the space, and preview + the messages in named space where ``spaceType`` field is + ``SPACE``. If the existing space has a target audience, you + can remove the audience and restrict space access by + omitting a value for this field mask. To update access + settings for a space, the authenticating user must be a + space manager and omit all other field masks in your + request. You can't update this field if the space is in + `import + mode `__. + To learn more, see `Make a space discoverable to specific + users `__. + ``access_settings.audience`` is not supported with + ``useAdminAccess``. + + ``permission_settings``: Supports changing the `permission + settings `__ + of a space. When updating permission settings, you can only + specify ``permissionSettings`` field masks; you cannot + update other field masks at the same time. + ``permissionSettings`` is not supported with + ``useAdminAccess``. The supported field masks include: + + - ``permission_settings.manageMembersAndGroups`` + - ``permission_settings.modifySpaceDetails`` + - ``permission_settings.toggleHistory`` + - ``permission_settings.useAtMentionAll`` + - ``permission_settings.manageApps`` + - ``permission_settings.manageWebhooks`` + - ``permission_settings.replyMessages`` + use_admin_access (bool): + When ``true``, the method runs using the user's Google + Workspace administrator privileges. + + The calling user must be a Google Workspace administrator + with the `manage chat and spaces conversations + privilege `__. + + Requires the ``chat.admin.spaces`` `OAuth 2.0 + scope `__. + + Some ``FieldMask`` values are not supported using admin + access. For details, see the description of ``update_mask``. """ space: "Space" = proto.Field( @@ -603,6 +865,209 @@ class UpdateSpaceRequest(proto.Message): number=2, message=field_mask_pb2.FieldMask, ) + use_admin_access: bool = proto.Field( + proto.BOOL, + number=3, + ) + + +class SearchSpacesRequest(proto.Message): + r"""Request to search for a list of spaces based on a query. + + Attributes: + use_admin_access (bool): + When ``true``, the method runs using the user's Google + Workspace administrator privileges. + + The calling user must be a Google Workspace administrator + with the `manage chat and spaces conversations + privilege `__. + + Requires either the ``chat.admin.spaces.readonly`` or + ``chat.admin.spaces`` `OAuth 2.0 + scope `__. + + This method currently only supports admin access, thus only + ``true`` is accepted for this field. + page_size (int): + The maximum number of spaces to return. The + service may return fewer than this value. + + If unspecified, at most 100 spaces are returned. + + The maximum value is 1000. If you use a value + more than 1000, it's automatically changed to + 1000. + page_token (str): + A token, received from the previous search + spaces call. Provide this parameter to retrieve + the subsequent page. + + When paginating, all other parameters provided + should match the call that provided the page + token. Passing different values to the other + parameters might lead to unexpected results. + query (str): + Required. A search query. + + You can search by using the following parameters: + + - ``create_time`` + - ``customer`` + - ``display_name`` + - ``external_user_allowed`` + - ``last_active_time`` + - ``space_history_state`` + - ``space_type`` + + ``create_time`` and ``last_active_time`` accept a timestamp + in `RFC-3339 `__ + format and the supported comparison operators are: ``=``, + ``<``, ``>``, ``<=``, ``>=``. + + ``customer`` is required and is used to indicate which + customer to fetch spaces from. ``customers/my_customer`` is + the only supported value. + + ``display_name`` only accepts the ``HAS`` (``:``) operator. + The text to match is first tokenized into tokens and each + token is prefix-matched case-insensitively and independently + as a substring anywhere in the space's ``display_name``. For + example, ``Fun Eve`` matches ``Fun event`` or + ``The evening was fun``, but not ``notFun event`` or + ``even``. + + ``external_user_allowed`` accepts either ``true`` or + ``false``. + + ``space_history_state`` only accepts values from the + [``historyState``] + (https://developers.google.com/workspace/chat/api/reference/rest/v1/spaces#Space.HistoryState) + field of a ``space`` resource. + + ``space_type`` is required and the only valid value is + ``SPACE``. + + Across different fields, only ``AND`` operators are + supported. A valid example is + ``space_type = "SPACE" AND display_name:"Hello"`` and an + invalid example is + ``space_type = "SPACE" OR display_name:"Hello"``. + + Among the same field, ``space_type`` doesn't support ``AND`` + or ``OR`` operators. ``display_name``, + 'space_history_state', and 'external_user_allowed' only + support ``OR`` operators. ``last_active_time`` and + ``create_time`` support both ``AND`` and ``OR`` operators. + ``AND`` can only be used to represent an interval, such as + ``last_active_time < "2022-01-01T00:00:00+00:00" AND last_active_time > "2023-01-01T00:00:00+00:00"``. + + The following example queries are valid: + + :: + + customer = "customers/my_customer" AND space_type = "SPACE" + + customer = "customers/my_customer" AND space_type = "SPACE" AND + display_name:"Hello World" + + customer = "customers/my_customer" AND space_type = "SPACE" AND + (last_active_time < "2020-01-01T00:00:00+00:00" OR last_active_time > + "2022-01-01T00:00:00+00:00") + + customer = "customers/my_customer" AND space_type = "SPACE" AND + (display_name:"Hello World" OR display_name:"Fun event") AND + (last_active_time > "2020-01-01T00:00:00+00:00" AND last_active_time < + "2022-01-01T00:00:00+00:00") + + customer = "customers/my_customer" AND space_type = "SPACE" AND + (create_time > "2019-01-01T00:00:00+00:00" AND create_time < + "2020-01-01T00:00:00+00:00") AND (external_user_allowed = "true") AND + (space_history_state = "HISTORY_ON" OR space_history_state = "HISTORY_OFF") + order_by (str): + Optional. How the list of spaces is ordered. + + Supported attributes to order by are: + + - ``membership_count.joined_direct_human_user_count`` — + Denotes the count of human users that have directly + joined a space. + - ``last_active_time`` — Denotes the time when last + eligible item is added to any topic of this space. + - ``create_time`` — Denotes the time of the space creation. + + Valid ordering operation values are: + + - ``ASC`` for ascending. Default value. + + - ``DESC`` for descending. + + The supported syntax are: + + - ``membership_count.joined_direct_human_user_count DESC`` + - ``membership_count.joined_direct_human_user_count ASC`` + - ``last_active_time DESC`` + - ``last_active_time ASC`` + - ``create_time DESC`` + - ``create_time ASC`` + """ + + use_admin_access: bool = proto.Field( + proto.BOOL, + number=1, + ) + page_size: int = proto.Field( + proto.INT32, + number=2, + ) + page_token: str = proto.Field( + proto.STRING, + number=3, + ) + query: str = proto.Field( + proto.STRING, + number=4, + ) + order_by: str = proto.Field( + proto.STRING, + number=5, + ) + + +class SearchSpacesResponse(proto.Message): + r"""Response with a list of spaces corresponding to the search + spaces request. + + Attributes: + spaces (MutableSequence[google.apps.chat_v1.types.Space]): + A page of the requested spaces. + next_page_token (str): + A token that can be used to retrieve the next + page. If this field is empty, there are no + subsequent pages. + total_size (int): + The total number of spaces that match the + query, across all pages. If the result is over + 10,000 spaces, this value is an estimate. + """ + + @property + def raw_page(self): + return self + + spaces: MutableSequence["Space"] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="Space", + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + total_size: int = proto.Field( + proto.INT32, + number=3, + ) class DeleteSpaceRequest(proto.Message): @@ -613,12 +1078,26 @@ class DeleteSpaceRequest(proto.Message): Required. Resource name of the space to delete. Format: ``spaces/{space}`` + use_admin_access (bool): + When ``true``, the method runs using the user's Google + Workspace administrator privileges. + + The calling user must be a Google Workspace administrator + with the `manage chat and spaces conversations + privilege `__. + + Requires the ``chat.admin.delete`` `OAuth 2.0 + scope `__. """ name: str = proto.Field( proto.STRING, number=1, ) + use_admin_access: bool = proto.Field( + proto.BOOL, + number=2, + ) class CompleteImportSpaceRequest(proto.Message): diff --git a/packages/google-apps-chat/google/apps/chat_v1/types/space_event.py b/packages/google-apps-chat/google/apps/chat_v1/types/space_event.py index 96591e800225..17bf07ba27d7 100644 --- a/packages/google-apps-chat/google/apps/chat_v1/types/space_event.py +++ b/packages/google-apps-chat/google/apps/chat_v1/types/space_event.py @@ -492,8 +492,9 @@ class ListSpaceEventsResponse(proto.Message): Attributes: space_events (MutableSequence[google.apps.chat_v1.types.SpaceEvent]): - Results are returned in chronological order - (oldest event first). + Results are returned in chronological order (oldest event + first). Note: The ``permissionSettings`` field is not + returned in the Space object for list requests. next_page_token (str): Continuation token used to fetch more events. If this field is omitted, there are no diff --git a/packages/google-apps-chat/samples/generated_samples/chat_v1_generated_chat_service_create_space_async.py b/packages/google-apps-chat/samples/generated_samples/chat_v1_generated_chat_service_create_space_async.py index 047b4031cee0..b72fb46759d1 100644 --- a/packages/google-apps-chat/samples/generated_samples/chat_v1_generated_chat_service_create_space_async.py +++ b/packages/google-apps-chat/samples/generated_samples/chat_v1_generated_chat_service_create_space_async.py @@ -39,7 +39,11 @@ async def sample_create_space(): client = chat_v1.ChatServiceAsyncClient() # Initialize request argument(s) + space = chat_v1.Space() + space.predefined_permission_settings = "ANNOUNCEMENT_SPACE" + request = chat_v1.CreateSpaceRequest( + space=space, ) # Make the request diff --git a/packages/google-apps-chat/samples/generated_samples/chat_v1_generated_chat_service_create_space_sync.py b/packages/google-apps-chat/samples/generated_samples/chat_v1_generated_chat_service_create_space_sync.py index 845ce548ddc0..083a0684ae1e 100644 --- a/packages/google-apps-chat/samples/generated_samples/chat_v1_generated_chat_service_create_space_sync.py +++ b/packages/google-apps-chat/samples/generated_samples/chat_v1_generated_chat_service_create_space_sync.py @@ -39,7 +39,11 @@ def sample_create_space(): client = chat_v1.ChatServiceClient() # Initialize request argument(s) + space = chat_v1.Space() + space.predefined_permission_settings = "ANNOUNCEMENT_SPACE" + request = chat_v1.CreateSpaceRequest( + space=space, ) # Make the request diff --git a/packages/google-apps-chat/samples/generated_samples/chat_v1_generated_chat_service_search_spaces_async.py b/packages/google-apps-chat/samples/generated_samples/chat_v1_generated_chat_service_search_spaces_async.py new file mode 100644 index 000000000000..3d25def75a1b --- /dev/null +++ b/packages/google-apps-chat/samples/generated_samples/chat_v1_generated_chat_service_search_spaces_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for SearchSpaces +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-apps-chat + + +# [START chat_v1_generated_ChatService_SearchSpaces_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.apps import chat_v1 + + +async def sample_search_spaces(): + # Create a client + client = chat_v1.ChatServiceAsyncClient() + + # Initialize request argument(s) + request = chat_v1.SearchSpacesRequest( + query="query_value", + ) + + # Make the request + page_result = client.search_spaces(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END chat_v1_generated_ChatService_SearchSpaces_async] diff --git a/packages/google-apps-chat/samples/generated_samples/chat_v1_generated_chat_service_search_spaces_sync.py b/packages/google-apps-chat/samples/generated_samples/chat_v1_generated_chat_service_search_spaces_sync.py new file mode 100644 index 000000000000..52e32c45ead5 --- /dev/null +++ b/packages/google-apps-chat/samples/generated_samples/chat_v1_generated_chat_service_search_spaces_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for SearchSpaces +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-apps-chat + + +# [START chat_v1_generated_ChatService_SearchSpaces_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.apps import chat_v1 + + +def sample_search_spaces(): + # Create a client + client = chat_v1.ChatServiceClient() + + # Initialize request argument(s) + request = chat_v1.SearchSpacesRequest( + query="query_value", + ) + + # Make the request + page_result = client.search_spaces(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END chat_v1_generated_ChatService_SearchSpaces_sync] diff --git a/packages/google-apps-chat/samples/generated_samples/chat_v1_generated_chat_service_set_up_space_async.py b/packages/google-apps-chat/samples/generated_samples/chat_v1_generated_chat_service_set_up_space_async.py index 6870b8744a05..8e2b99a025ad 100644 --- a/packages/google-apps-chat/samples/generated_samples/chat_v1_generated_chat_service_set_up_space_async.py +++ b/packages/google-apps-chat/samples/generated_samples/chat_v1_generated_chat_service_set_up_space_async.py @@ -39,7 +39,11 @@ async def sample_set_up_space(): client = chat_v1.ChatServiceAsyncClient() # Initialize request argument(s) + space = chat_v1.Space() + space.predefined_permission_settings = "ANNOUNCEMENT_SPACE" + request = chat_v1.SetUpSpaceRequest( + space=space, ) # Make the request diff --git a/packages/google-apps-chat/samples/generated_samples/chat_v1_generated_chat_service_set_up_space_sync.py b/packages/google-apps-chat/samples/generated_samples/chat_v1_generated_chat_service_set_up_space_sync.py index 4d27131a243c..61ad37d3d045 100644 --- a/packages/google-apps-chat/samples/generated_samples/chat_v1_generated_chat_service_set_up_space_sync.py +++ b/packages/google-apps-chat/samples/generated_samples/chat_v1_generated_chat_service_set_up_space_sync.py @@ -39,7 +39,11 @@ def sample_set_up_space(): client = chat_v1.ChatServiceClient() # Initialize request argument(s) + space = chat_v1.Space() + space.predefined_permission_settings = "ANNOUNCEMENT_SPACE" + request = chat_v1.SetUpSpaceRequest( + space=space, ) # Make the request diff --git a/packages/google-apps-chat/samples/generated_samples/chat_v1_generated_chat_service_update_space_async.py b/packages/google-apps-chat/samples/generated_samples/chat_v1_generated_chat_service_update_space_async.py index 14e0944aaaeb..2c2a593b22a2 100644 --- a/packages/google-apps-chat/samples/generated_samples/chat_v1_generated_chat_service_update_space_async.py +++ b/packages/google-apps-chat/samples/generated_samples/chat_v1_generated_chat_service_update_space_async.py @@ -39,7 +39,11 @@ async def sample_update_space(): client = chat_v1.ChatServiceAsyncClient() # Initialize request argument(s) + space = chat_v1.Space() + space.predefined_permission_settings = "ANNOUNCEMENT_SPACE" + request = chat_v1.UpdateSpaceRequest( + space=space, ) # Make the request diff --git a/packages/google-apps-chat/samples/generated_samples/chat_v1_generated_chat_service_update_space_sync.py b/packages/google-apps-chat/samples/generated_samples/chat_v1_generated_chat_service_update_space_sync.py index a5158f7a9e07..362d50feb376 100644 --- a/packages/google-apps-chat/samples/generated_samples/chat_v1_generated_chat_service_update_space_sync.py +++ b/packages/google-apps-chat/samples/generated_samples/chat_v1_generated_chat_service_update_space_sync.py @@ -39,7 +39,11 @@ def sample_update_space(): client = chat_v1.ChatServiceClient() # Initialize request argument(s) + space = chat_v1.Space() + space.predefined_permission_settings = "ANNOUNCEMENT_SPACE" + request = chat_v1.UpdateSpaceRequest( + space=space, ) # Make the request diff --git a/packages/google-apps-chat/samples/generated_samples/snippet_metadata_google.chat.v1.json b/packages/google-apps-chat/samples/generated_samples/snippet_metadata_google.chat.v1.json index 189e4bbc7470..8e15b6a7b93b 100644 --- a/packages/google-apps-chat/samples/generated_samples/snippet_metadata_google.chat.v1.json +++ b/packages/google-apps-chat/samples/generated_samples/snippet_metadata_google.chat.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-apps-chat", - "version": "0.1.9" + "version": "0.1.12" }, "snippets": [ { @@ -728,12 +728,12 @@ "regionTag": "chat_v1_generated_ChatService_CreateSpace_async", "segments": [ { - "end": 50, + "end": 54, "start": 27, "type": "FULL" }, { - "end": 50, + "end": 54, "start": 27, "type": "SHORT" }, @@ -743,18 +743,18 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 44, + "end": 48, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 47, - "start": 45, + "end": 51, + "start": 49, "type": "REQUEST_EXECUTION" }, { - "end": 51, - "start": 48, + "end": 55, + "start": 52, "type": "RESPONSE_HANDLING" } ], @@ -808,12 +808,12 @@ "regionTag": "chat_v1_generated_ChatService_CreateSpace_sync", "segments": [ { - "end": 50, + "end": 54, "start": 27, "type": "FULL" }, { - "end": 50, + "end": 54, "start": 27, "type": "SHORT" }, @@ -823,18 +823,18 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 44, + "end": 48, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 47, - "start": 45, + "end": 51, + "start": 49, "type": "REQUEST_EXECUTION" }, { - "end": 51, - "start": 48, + "end": 55, + "start": 52, "type": "RESPONSE_HANDLING" } ], @@ -3551,6 +3551,159 @@ ], "title": "chat_v1_generated_chat_service_list_spaces_sync.py" }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.apps.chat_v1.ChatServiceAsyncClient", + "shortName": "ChatServiceAsyncClient" + }, + "fullName": "google.apps.chat_v1.ChatServiceAsyncClient.search_spaces", + "method": { + "fullName": "google.chat.v1.ChatService.SearchSpaces", + "service": { + "fullName": "google.chat.v1.ChatService", + "shortName": "ChatService" + }, + "shortName": "SearchSpaces" + }, + "parameters": [ + { + "name": "request", + "type": "google.apps.chat_v1.types.SearchSpacesRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.apps.chat_v1.services.chat_service.pagers.SearchSpacesAsyncPager", + "shortName": "search_spaces" + }, + "description": "Sample for SearchSpaces", + "file": "chat_v1_generated_chat_service_search_spaces_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "chat_v1_generated_ChatService_SearchSpaces_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "chat_v1_generated_chat_service_search_spaces_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.apps.chat_v1.ChatServiceClient", + "shortName": "ChatServiceClient" + }, + "fullName": "google.apps.chat_v1.ChatServiceClient.search_spaces", + "method": { + "fullName": "google.chat.v1.ChatService.SearchSpaces", + "service": { + "fullName": "google.chat.v1.ChatService", + "shortName": "ChatService" + }, + "shortName": "SearchSpaces" + }, + "parameters": [ + { + "name": "request", + "type": "google.apps.chat_v1.types.SearchSpacesRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.apps.chat_v1.services.chat_service.pagers.SearchSpacesPager", + "shortName": "search_spaces" + }, + "description": "Sample for SearchSpaces", + "file": "chat_v1_generated_chat_service_search_spaces_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "chat_v1_generated_ChatService_SearchSpaces_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "chat_v1_generated_chat_service_search_spaces_sync.py" + }, { "canonical": true, "clientMethod": { @@ -3596,12 +3749,12 @@ "regionTag": "chat_v1_generated_ChatService_SetUpSpace_async", "segments": [ { - "end": 50, + "end": 54, "start": 27, "type": "FULL" }, { - "end": 50, + "end": 54, "start": 27, "type": "SHORT" }, @@ -3611,18 +3764,18 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 44, + "end": 48, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 47, - "start": 45, + "end": 51, + "start": 49, "type": "REQUEST_EXECUTION" }, { - "end": 51, - "start": 48, + "end": 55, + "start": 52, "type": "RESPONSE_HANDLING" } ], @@ -3672,12 +3825,12 @@ "regionTag": "chat_v1_generated_ChatService_SetUpSpace_sync", "segments": [ { - "end": 50, + "end": 54, "start": 27, "type": "FULL" }, { - "end": 50, + "end": 54, "start": 27, "type": "SHORT" }, @@ -3687,18 +3840,18 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 44, + "end": 48, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 47, - "start": 45, + "end": 51, + "start": 49, "type": "REQUEST_EXECUTION" }, { - "end": 51, - "start": 48, + "end": 55, + "start": 52, "type": "RESPONSE_HANDLING" } ], @@ -4264,12 +4417,12 @@ "regionTag": "chat_v1_generated_ChatService_UpdateSpace_async", "segments": [ { - "end": 50, + "end": 54, "start": 27, "type": "FULL" }, { - "end": 50, + "end": 54, "start": 27, "type": "SHORT" }, @@ -4279,18 +4432,18 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 44, + "end": 48, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 47, - "start": 45, + "end": 51, + "start": 49, "type": "REQUEST_EXECUTION" }, { - "end": 51, - "start": 48, + "end": 55, + "start": 52, "type": "RESPONSE_HANDLING" } ], @@ -4348,12 +4501,12 @@ "regionTag": "chat_v1_generated_ChatService_UpdateSpace_sync", "segments": [ { - "end": 50, + "end": 54, "start": 27, "type": "FULL" }, { - "end": 50, + "end": 54, "start": 27, "type": "SHORT" }, @@ -4363,18 +4516,18 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 44, + "end": 48, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 47, - "start": 45, + "end": 51, + "start": 49, "type": "REQUEST_EXECUTION" }, { - "end": 51, - "start": 48, + "end": 55, + "start": 52, "type": "RESPONSE_HANDLING" } ], diff --git a/packages/google-apps-chat/scripts/fixup_chat_v1_keywords.py b/packages/google-apps-chat/scripts/fixup_chat_v1_keywords.py index 6dfa6d4d93f8..18fea55c4198 100644 --- a/packages/google-apps-chat/scripts/fixup_chat_v1_keywords.py +++ b/packages/google-apps-chat/scripts/fixup_chat_v1_keywords.py @@ -40,31 +40,32 @@ class chatCallTransformer(cst.CSTTransformer): CTRL_PARAMS: Tuple[str] = ('retry', 'timeout', 'metadata') METHOD_TO_PARAMS: Dict[str, Tuple[str]] = { 'complete_import_space': ('name', ), - 'create_membership': ('parent', 'membership', ), + 'create_membership': ('parent', 'membership', 'use_admin_access', ), 'create_message': ('parent', 'message', 'thread_key', 'request_id', 'message_reply_option', 'message_id', ), 'create_reaction': ('parent', 'reaction', ), 'create_space': ('space', 'request_id', ), - 'delete_membership': ('name', ), + 'delete_membership': ('name', 'use_admin_access', ), 'delete_message': ('name', 'force', ), 'delete_reaction': ('name', ), - 'delete_space': ('name', ), + 'delete_space': ('name', 'use_admin_access', ), 'find_direct_message': ('name', ), 'get_attachment': ('name', ), - 'get_membership': ('name', ), + 'get_membership': ('name', 'use_admin_access', ), 'get_message': ('name', ), - 'get_space': ('name', ), + 'get_space': ('name', 'use_admin_access', ), 'get_space_event': ('name', ), 'get_space_read_state': ('name', ), 'get_thread_read_state': ('name', ), - 'list_memberships': ('parent', 'page_size', 'page_token', 'filter', 'show_groups', 'show_invited', ), + 'list_memberships': ('parent', 'page_size', 'page_token', 'filter', 'show_groups', 'show_invited', 'use_admin_access', ), 'list_messages': ('parent', 'page_size', 'page_token', 'filter', 'order_by', 'show_deleted', ), 'list_reactions': ('parent', 'page_size', 'page_token', 'filter', ), 'list_space_events': ('parent', 'filter', 'page_size', 'page_token', ), 'list_spaces': ('page_size', 'page_token', 'filter', ), + 'search_spaces': ('query', 'use_admin_access', 'page_size', 'page_token', 'order_by', ), 'set_up_space': ('space', 'request_id', 'memberships', ), - 'update_membership': ('membership', 'update_mask', ), + 'update_membership': ('membership', 'update_mask', 'use_admin_access', ), 'update_message': ('message', 'update_mask', 'allow_missing', ), - 'update_space': ('space', 'update_mask', ), + 'update_space': ('space', 'update_mask', 'use_admin_access', ), 'update_space_read_state': ('space_read_state', 'update_mask', ), 'upload_attachment': ('parent', 'filename', ), } diff --git a/packages/google-apps-chat/tests/unit/gapic/chat_v1/test_chat_service.py b/packages/google-apps-chat/tests/unit/gapic/chat_v1/test_chat_service.py index 28054fb2de8b..893992dd3ab5 100644 --- a/packages/google-apps-chat/tests/unit/gapic/chat_v1/test_chat_service.py +++ b/packages/google-apps-chat/tests/unit/gapic/chat_v1/test_chat_service.py @@ -1320,22 +1320,23 @@ async def test_create_message_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_message - ] = mock_object + ] = mock_rpc request = {} await client.create_message(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_message(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1718,22 +1719,23 @@ async def test_list_messages_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_messages - ] = mock_object + ] = mock_rpc request = {} await client.list_messages(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_messages(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2282,22 +2284,23 @@ async def test_list_memberships_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_memberships - ] = mock_object + ] = mock_rpc request = {} await client.list_memberships(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_memberships(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2846,22 +2849,23 @@ async def test_get_membership_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_membership - ] = mock_object + ] = mock_rpc request = {} await client.get_membership(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_membership(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3232,22 +3236,23 @@ async def test_get_message_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_message - ] = mock_object + ] = mock_rpc request = {} await client.get_message(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_message(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3618,22 +3623,23 @@ async def test_update_message_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_message - ] = mock_object + ] = mock_rpc request = {} await client.update_message(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_message(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3993,22 +3999,23 @@ async def test_delete_message_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_message - ] = mock_object + ] = mock_rpc request = {} await client.delete_message(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_message(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4363,22 +4370,23 @@ async def test_get_attachment_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_attachment - ] = mock_object + ] = mock_rpc request = {} await client.get_attachment(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_attachment(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4744,22 +4752,23 @@ async def test_upload_attachment_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.upload_attachment - ] = mock_object + ] = mock_rpc request = {} await client.upload_attachment(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.upload_attachment(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5031,27 +5040,448 @@ async def test_list_spaces_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_spaces - ] = mock_object + ] = mock_rpc request = {} await client.list_spaces(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_spaces(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_list_spaces_async( + transport: str = "grpc_asyncio", request_type=space.ListSpacesRequest +): + client = ChatServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_spaces), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + space.ListSpacesResponse( + next_page_token="next_page_token_value", + ) + ) + response = await client.list_spaces(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = space.ListSpacesRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListSpacesAsyncPager) + assert response.next_page_token == "next_page_token_value" + + +@pytest.mark.asyncio +async def test_list_spaces_async_from_dict(): + await test_list_spaces_async(request_type=dict) + + +def test_list_spaces_pager(transport_name: str = "grpc"): + client = ChatServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_spaces), "__call__") as call: + # Set the response to a series of pages. + call.side_effect = ( + space.ListSpacesResponse( + spaces=[ + space.Space(), + space.Space(), + space.Space(), + ], + next_page_token="abc", + ), + space.ListSpacesResponse( + spaces=[], + next_page_token="def", + ), + space.ListSpacesResponse( + spaces=[ + space.Space(), + ], + next_page_token="ghi", + ), + space.ListSpacesResponse( + spaces=[ + space.Space(), + space.Space(), + ], + ), + RuntimeError, + ) + + expected_metadata = () + retry = retries.Retry() + timeout = 5 + pager = client.list_spaces(request={}, retry=retry, timeout=timeout) + + assert pager._metadata == expected_metadata + assert pager._retry == retry + assert pager._timeout == timeout + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, space.Space) for i in results) + + +def test_list_spaces_pages(transport_name: str = "grpc"): + client = ChatServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_spaces), "__call__") as call: + # Set the response to a series of pages. + call.side_effect = ( + space.ListSpacesResponse( + spaces=[ + space.Space(), + space.Space(), + space.Space(), + ], + next_page_token="abc", + ), + space.ListSpacesResponse( + spaces=[], + next_page_token="def", + ), + space.ListSpacesResponse( + spaces=[ + space.Space(), + ], + next_page_token="ghi", + ), + space.ListSpacesResponse( + spaces=[ + space.Space(), + space.Space(), + ], + ), + RuntimeError, + ) + pages = list(client.list_spaces(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.asyncio +async def test_list_spaces_async_pager(): + client = ChatServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_spaces), "__call__", new_callable=mock.AsyncMock + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + space.ListSpacesResponse( + spaces=[ + space.Space(), + space.Space(), + space.Space(), + ], + next_page_token="abc", + ), + space.ListSpacesResponse( + spaces=[], + next_page_token="def", + ), + space.ListSpacesResponse( + spaces=[ + space.Space(), + ], + next_page_token="ghi", + ), + space.ListSpacesResponse( + spaces=[ + space.Space(), + space.Space(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_spaces( + request={}, + ) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, space.Space) for i in responses) + + +@pytest.mark.asyncio +async def test_list_spaces_async_pages(): + client = ChatServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_spaces), "__call__", new_callable=mock.AsyncMock + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + space.ListSpacesResponse( + spaces=[ + space.Space(), + space.Space(), + space.Space(), + ], + next_page_token="abc", + ), + space.ListSpacesResponse( + spaces=[], + next_page_token="def", + ), + space.ListSpacesResponse( + spaces=[ + space.Space(), + ], + next_page_token="ghi", + ), + space.ListSpacesResponse( + spaces=[ + space.Space(), + space.Space(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_spaces(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + space.SearchSpacesRequest, + dict, + ], +) +def test_search_spaces(request_type, transport: str = "grpc"): + client = ChatServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.search_spaces), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = space.SearchSpacesResponse( + next_page_token="next_page_token_value", + total_size=1086, + ) + response = client.search_spaces(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = space.SearchSpacesRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.SearchSpacesPager) + assert response.next_page_token == "next_page_token_value" + assert response.total_size == 1086 + + +def test_search_spaces_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ChatServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.search_spaces), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.search_spaces() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == space.SearchSpacesRequest() + + +def test_search_spaces_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = ChatServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = space.SearchSpacesRequest( + page_token="page_token_value", + query="query_value", + order_by="order_by_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.search_spaces), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.search_spaces(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == space.SearchSpacesRequest( + page_token="page_token_value", + query="query_value", + order_by="order_by_value", + ) + + +def test_search_spaces_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ChatServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.search_spaces in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.search_spaces] = mock_rpc + request = {} + client.search_spaces(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.search_spaces(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_search_spaces_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ChatServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.search_spaces), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + space.SearchSpacesResponse( + next_page_token="next_page_token_value", + total_size=1086, + ) + ) + response = await client.search_spaces() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == space.SearchSpacesRequest() + + +@pytest.mark.asyncio +async def test_search_spaces_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = ChatServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.search_spaces + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.search_spaces + ] = mock_rpc + + request = {} + await client.search_spaces(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.search_spaces(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio -async def test_list_spaces_async( - transport: str = "grpc_asyncio", request_type=space.ListSpacesRequest +async def test_search_spaces_async( + transport: str = "grpc_asyncio", request_type=space.SearchSpacesRequest ): client = ChatServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), @@ -5063,42 +5493,44 @@ async def test_list_spaces_async( request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_spaces), "__call__") as call: + with mock.patch.object(type(client.transport.search_spaces), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - space.ListSpacesResponse( + space.SearchSpacesResponse( next_page_token="next_page_token_value", + total_size=1086, ) ) - response = await client.list_spaces(request) + response = await client.search_spaces(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request = space.ListSpacesRequest() + request = space.SearchSpacesRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListSpacesAsyncPager) + assert isinstance(response, pagers.SearchSpacesAsyncPager) assert response.next_page_token == "next_page_token_value" + assert response.total_size == 1086 @pytest.mark.asyncio -async def test_list_spaces_async_from_dict(): - await test_list_spaces_async(request_type=dict) +async def test_search_spaces_async_from_dict(): + await test_search_spaces_async(request_type=dict) -def test_list_spaces_pager(transport_name: str = "grpc"): +def test_search_spaces_pager(transport_name: str = "grpc"): client = ChatServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport_name, ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_spaces), "__call__") as call: + with mock.patch.object(type(client.transport.search_spaces), "__call__") as call: # Set the response to a series of pages. call.side_effect = ( - space.ListSpacesResponse( + space.SearchSpacesResponse( spaces=[ space.Space(), space.Space(), @@ -5106,17 +5538,17 @@ def test_list_spaces_pager(transport_name: str = "grpc"): ], next_page_token="abc", ), - space.ListSpacesResponse( + space.SearchSpacesResponse( spaces=[], next_page_token="def", ), - space.ListSpacesResponse( + space.SearchSpacesResponse( spaces=[ space.Space(), ], next_page_token="ghi", ), - space.ListSpacesResponse( + space.SearchSpacesResponse( spaces=[ space.Space(), space.Space(), @@ -5128,7 +5560,7 @@ def test_list_spaces_pager(transport_name: str = "grpc"): expected_metadata = () retry = retries.Retry() timeout = 5 - pager = client.list_spaces(request={}, retry=retry, timeout=timeout) + pager = client.search_spaces(request={}, retry=retry, timeout=timeout) assert pager._metadata == expected_metadata assert pager._retry == retry @@ -5139,17 +5571,17 @@ def test_list_spaces_pager(transport_name: str = "grpc"): assert all(isinstance(i, space.Space) for i in results) -def test_list_spaces_pages(transport_name: str = "grpc"): +def test_search_spaces_pages(transport_name: str = "grpc"): client = ChatServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport_name, ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_spaces), "__call__") as call: + with mock.patch.object(type(client.transport.search_spaces), "__call__") as call: # Set the response to a series of pages. call.side_effect = ( - space.ListSpacesResponse( + space.SearchSpacesResponse( spaces=[ space.Space(), space.Space(), @@ -5157,17 +5589,17 @@ def test_list_spaces_pages(transport_name: str = "grpc"): ], next_page_token="abc", ), - space.ListSpacesResponse( + space.SearchSpacesResponse( spaces=[], next_page_token="def", ), - space.ListSpacesResponse( + space.SearchSpacesResponse( spaces=[ space.Space(), ], next_page_token="ghi", ), - space.ListSpacesResponse( + space.SearchSpacesResponse( spaces=[ space.Space(), space.Space(), @@ -5175,24 +5607,24 @@ def test_list_spaces_pages(transport_name: str = "grpc"): ), RuntimeError, ) - pages = list(client.list_spaces(request={}).pages) + pages = list(client.search_spaces(request={}).pages) for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token @pytest.mark.asyncio -async def test_list_spaces_async_pager(): +async def test_search_spaces_async_pager(): client = ChatServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_spaces), "__call__", new_callable=mock.AsyncMock + type(client.transport.search_spaces), "__call__", new_callable=mock.AsyncMock ) as call: # Set the response to a series of pages. call.side_effect = ( - space.ListSpacesResponse( + space.SearchSpacesResponse( spaces=[ space.Space(), space.Space(), @@ -5200,17 +5632,17 @@ async def test_list_spaces_async_pager(): ], next_page_token="abc", ), - space.ListSpacesResponse( + space.SearchSpacesResponse( spaces=[], next_page_token="def", ), - space.ListSpacesResponse( + space.SearchSpacesResponse( spaces=[ space.Space(), ], next_page_token="ghi", ), - space.ListSpacesResponse( + space.SearchSpacesResponse( spaces=[ space.Space(), space.Space(), @@ -5218,7 +5650,7 @@ async def test_list_spaces_async_pager(): ), RuntimeError, ) - async_pager = await client.list_spaces( + async_pager = await client.search_spaces( request={}, ) assert async_pager.next_page_token == "abc" @@ -5231,18 +5663,18 @@ async def test_list_spaces_async_pager(): @pytest.mark.asyncio -async def test_list_spaces_async_pages(): +async def test_search_spaces_async_pages(): client = ChatServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_spaces), "__call__", new_callable=mock.AsyncMock + type(client.transport.search_spaces), "__call__", new_callable=mock.AsyncMock ) as call: # Set the response to a series of pages. call.side_effect = ( - space.ListSpacesResponse( + space.SearchSpacesResponse( spaces=[ space.Space(), space.Space(), @@ -5250,17 +5682,17 @@ async def test_list_spaces_async_pages(): ], next_page_token="abc", ), - space.ListSpacesResponse( + space.SearchSpacesResponse( spaces=[], next_page_token="def", ), - space.ListSpacesResponse( + space.SearchSpacesResponse( spaces=[ space.Space(), ], next_page_token="ghi", ), - space.ListSpacesResponse( + space.SearchSpacesResponse( spaces=[ space.Space(), space.Space(), @@ -5272,7 +5704,7 @@ async def test_list_spaces_async_pages(): # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 async for page_ in ( # pragma: no branch - await client.list_spaces(request={}) + await client.search_spaces(request={}) ).pages: pages.append(page_) for page_, token in zip(pages, ["abc", "def", "ghi", ""]): @@ -5312,6 +5744,7 @@ def test_get_space(request_type, transport: str = "grpc"): import_mode=True, admin_installed=True, space_uri="space_uri_value", + predefined_permission_settings=space.Space.PredefinedPermissionSettings.COLLABORATION_SPACE, ) response = client.get_space(request) @@ -5477,22 +5910,23 @@ async def test_get_space_async_use_cached_wrapped_rpc(transport: str = "grpc_asy ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_space - ] = mock_object + ] = mock_rpc request = {} await client.get_space(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_space(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5731,6 +6165,7 @@ def test_create_space(request_type, transport: str = "grpc"): import_mode=True, admin_installed=True, space_uri="space_uri_value", + predefined_permission_settings=gc_space.Space.PredefinedPermissionSettings.COLLABORATION_SPACE, ) response = client.create_space(request) @@ -5898,22 +6333,23 @@ async def test_create_space_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_space - ] = mock_object + ] = mock_rpc request = {} await client.create_space(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_space(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -6093,6 +6529,7 @@ def test_set_up_space(request_type, transport: str = "grpc"): import_mode=True, admin_installed=True, space_uri="space_uri_value", + predefined_permission_settings=space.Space.PredefinedPermissionSettings.COLLABORATION_SPACE, ) response = client.set_up_space(request) @@ -6260,22 +6697,23 @@ async def test_set_up_space_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.set_up_space - ] = mock_object + ] = mock_rpc request = {} await client.set_up_space(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.set_up_space(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -6375,6 +6813,7 @@ def test_update_space(request_type, transport: str = "grpc"): import_mode=True, admin_installed=True, space_uri="space_uri_value", + predefined_permission_settings=gc_space.Space.PredefinedPermissionSettings.COLLABORATION_SPACE, ) response = client.update_space(request) @@ -6538,22 +6977,23 @@ async def test_update_space_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_space - ] = mock_object + ] = mock_rpc request = {} await client.update_space(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_space(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -6926,22 +7366,23 @@ async def test_delete_space_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_space - ] = mock_object + ] = mock_rpc request = {} await client.delete_space(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_space(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -7289,22 +7730,23 @@ async def test_complete_import_space_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.complete_import_space - ] = mock_object + ] = mock_rpc request = {} await client.complete_import_space(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.complete_import_space(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -7445,6 +7887,7 @@ def test_find_direct_message(request_type, transport: str = "grpc"): import_mode=True, admin_installed=True, space_uri="space_uri_value", + predefined_permission_settings=space.Space.PredefinedPermissionSettings.COLLABORATION_SPACE, ) response = client.find_direct_message(request) @@ -7622,22 +8065,23 @@ async def test_find_direct_message_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.find_direct_message - ] = mock_object + ] = mock_rpc request = {} await client.find_direct_message(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.find_direct_message(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -7886,22 +8330,23 @@ async def test_create_membership_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_membership - ] = mock_object + ] = mock_rpc request = {} await client.create_membership(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_membership(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -8286,22 +8731,23 @@ async def test_update_membership_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_membership - ] = mock_object + ] = mock_rpc request = {} await client.update_membership(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_membership(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -8690,22 +9136,23 @@ async def test_delete_membership_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_membership - ] = mock_object + ] = mock_rpc request = {} await client.delete_membership(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_membership(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -9068,22 +9515,23 @@ async def test_create_reaction_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_reaction - ] = mock_object + ] = mock_rpc request = {} await client.create_reaction(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_reaction(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -9446,22 +9894,23 @@ async def test_list_reactions_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_reactions - ] = mock_object + ] = mock_rpc request = {} await client.list_reactions(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_reactions(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -9997,22 +10446,23 @@ async def test_delete_reaction_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_reaction - ] = mock_object + ] = mock_rpc request = {} await client.delete_reaction(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_reaction(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -10364,22 +10814,23 @@ async def test_get_space_read_state_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_space_read_state - ] = mock_object + ] = mock_rpc request = {} await client.get_space_read_state(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_space_read_state(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -10748,22 +11199,23 @@ async def test_update_space_read_state_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_space_read_state - ] = mock_object + ] = mock_rpc request = {} await client.update_space_read_state(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_space_read_state(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -11146,22 +11598,23 @@ async def test_get_thread_read_state_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_thread_read_state - ] = mock_object + ] = mock_rpc request = {} await client.get_thread_read_state(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_thread_read_state(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -11524,22 +11977,23 @@ async def test_get_space_event_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_space_event - ] = mock_object + ] = mock_rpc request = {} await client.get_space_event(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_space_event(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -11904,22 +12358,23 @@ async def test_list_space_events_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_space_events - ] = mock_object + ] = mock_rpc request = {} await client.list_space_events(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_space_events(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -12629,6 +13084,11 @@ def test_create_message_rest(request_type): "drive_data_ref": {"drive_file_id": "drive_file_id_value"}, "mime_type": "mime_type_value", }, + "chat_space_link_data": { + "space": "space_value", + "thread": "thread_value", + "message": "message_value", + }, }, } ], @@ -12649,9 +13109,28 @@ def test_create_message_rest(request_type): "space_history_state": 1, "import_mode": True, "create_time": {}, + "last_active_time": {}, "admin_installed": True, + "membership_count": { + "joined_direct_human_user_count": 3185, + "joined_group_count": 1933, + }, "access_settings": {"access_state": 1, "audience": "audience_value"}, "space_uri": "space_uri_value", + "predefined_permission_settings": 1, + "permission_settings": { + "manage_members_and_groups": { + "managers_allowed": True, + "members_allowed": True, + }, + "modify_space_details": {}, + "toggle_history": {}, + "use_at_mention_all": {}, + "manage_apps": {}, + "manage_webhooks": {}, + "post_messages": {}, + "reply_messages": {}, + }, }, "fallback_text": "fallback_text_value", "action_response": { @@ -13579,6 +14058,7 @@ def test_list_memberships_rest_required_fields( "page_token", "show_groups", "show_invited", + "use_admin_access", ) ) jsonified_request.update(unset_fields) @@ -13642,6 +14122,7 @@ def test_list_memberships_rest_unset_required_fields(): "pageToken", "showGroups", "showInvited", + "useAdminAccess", ) ) & set(("parent",)) @@ -13954,6 +14435,8 @@ def test_get_membership_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() ).get_membership._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("use_admin_access",)) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone @@ -14007,7 +14490,7 @@ def test_get_membership_rest_unset_required_fields(): ) unset_fields = transport.get_membership._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name",))) + assert set(unset_fields) == (set(("useAdminAccess",)) & set(("name",))) @pytest.mark.parametrize("null_interceptor", [True, False]) @@ -14762,6 +15245,11 @@ def test_update_message_rest(request_type): "drive_data_ref": {"drive_file_id": "drive_file_id_value"}, "mime_type": "mime_type_value", }, + "chat_space_link_data": { + "space": "space_value", + "thread": "thread_value", + "message": "message_value", + }, }, } ], @@ -14782,9 +15270,28 @@ def test_update_message_rest(request_type): "space_history_state": 1, "import_mode": True, "create_time": {}, + "last_active_time": {}, "admin_installed": True, + "membership_count": { + "joined_direct_human_user_count": 3185, + "joined_group_count": 1933, + }, "access_settings": {"access_state": 1, "audience": "audience_value"}, "space_uri": "space_uri_value", + "predefined_permission_settings": 1, + "permission_settings": { + "manage_members_and_groups": { + "managers_allowed": True, + "members_allowed": True, + }, + "modify_space_details": {}, + "toggle_history": {}, + "use_at_mention_all": {}, + "manage_apps": {}, + "manage_webhooks": {}, + "post_messages": {}, + "reply_messages": {}, + }, }, "fallback_text": "fallback_text_value", "action_response": { @@ -15975,7 +16482,170 @@ def test_upload_attachment_rest_unset_required_fields(): @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_upload_attachment_rest_interceptors(null_interceptor): +def test_upload_attachment_rest_interceptors(null_interceptor): + transport = transports.ChatServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.ChatServiceRestInterceptor(), + ) + client = ChatServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.ChatServiceRestInterceptor, "post_upload_attachment" + ) as post, mock.patch.object( + transports.ChatServiceRestInterceptor, "pre_upload_attachment" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = attachment.UploadAttachmentRequest.pb( + attachment.UploadAttachmentRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = attachment.UploadAttachmentResponse.to_json( + attachment.UploadAttachmentResponse() + ) + + request = attachment.UploadAttachmentRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = attachment.UploadAttachmentResponse() + + client.upload_attachment( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_upload_attachment_rest_bad_request( + transport: str = "rest", request_type=attachment.UploadAttachmentRequest +): + client = ChatServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "spaces/sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.upload_attachment(request) + + +def test_upload_attachment_rest_error(): + client = ChatServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + space.ListSpacesRequest, + dict, + ], +) +def test_list_spaces_rest(request_type): + client = ChatServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = space.ListSpacesResponse( + next_page_token="next_page_token_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = space.ListSpacesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.list_spaces(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListSpacesPager) + assert response.next_page_token == "next_page_token_value" + + +def test_list_spaces_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ChatServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_spaces in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.list_spaces] = mock_rpc + + request = {} + client.list_spaces(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_spaces(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_spaces_rest_interceptors(null_interceptor): transport = transports.ChatServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -15988,15 +16658,13 @@ def test_upload_attachment_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.ChatServiceRestInterceptor, "post_upload_attachment" + transports.ChatServiceRestInterceptor, "post_list_spaces" ) as post, mock.patch.object( - transports.ChatServiceRestInterceptor, "pre_upload_attachment" + transports.ChatServiceRestInterceptor, "pre_list_spaces" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = attachment.UploadAttachmentRequest.pb( - attachment.UploadAttachmentRequest() - ) + pb_message = space.ListSpacesRequest.pb(space.ListSpacesRequest()) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -16007,19 +16675,19 @@ def test_upload_attachment_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = attachment.UploadAttachmentResponse.to_json( - attachment.UploadAttachmentResponse() + req.return_value._content = space.ListSpacesResponse.to_json( + space.ListSpacesResponse() ) - request = attachment.UploadAttachmentRequest() + request = space.ListSpacesRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = attachment.UploadAttachmentResponse() + post.return_value = space.ListSpacesResponse() - client.upload_attachment( + client.list_spaces( request, metadata=[ ("key", "val"), @@ -16031,8 +16699,8 @@ def test_upload_attachment_rest_interceptors(null_interceptor): post.assert_called_once() -def test_upload_attachment_rest_bad_request( - transport: str = "rest", request_type=attachment.UploadAttachmentRequest +def test_list_spaces_rest_bad_request( + transport: str = "rest", request_type=space.ListSpacesRequest ): client = ChatServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -16040,7 +16708,7 @@ def test_upload_attachment_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = {"parent": "spaces/sample1"} + request_init = {} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -16052,23 +16720,78 @@ def test_upload_attachment_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.upload_attachment(request) + client.list_spaces(request) -def test_upload_attachment_rest_error(): +def test_list_spaces_rest_pager(transport: str = "rest"): client = ChatServiceClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + space.ListSpacesResponse( + spaces=[ + space.Space(), + space.Space(), + space.Space(), + ], + next_page_token="abc", + ), + space.ListSpacesResponse( + spaces=[], + next_page_token="def", + ), + space.ListSpacesResponse( + spaces=[ + space.Space(), + ], + next_page_token="ghi", + ), + space.ListSpacesResponse( + spaces=[ + space.Space(), + space.Space(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(space.ListSpacesResponse.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {} + + pager = client.list_spaces(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, space.Space) for i in results) + + pages = list(client.list_spaces(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + @pytest.mark.parametrize( "request_type", [ - space.ListSpacesRequest, + space.SearchSpacesRequest, dict, ], ) -def test_list_spaces_rest(request_type): +def test_search_spaces_rest(request_type): client = ChatServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -16081,27 +16804,29 @@ def test_list_spaces_rest(request_type): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = space.ListSpacesResponse( + return_value = space.SearchSpacesResponse( next_page_token="next_page_token_value", + total_size=1086, ) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = space.ListSpacesResponse.pb(return_value) + return_value = space.SearchSpacesResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.list_spaces(request) + response = client.search_spaces(request) # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListSpacesPager) + assert isinstance(response, pagers.SearchSpacesPager) assert response.next_page_token == "next_page_token_value" + assert response.total_size == 1086 -def test_list_spaces_rest_use_cached_wrapped_rpc(): +def test_search_spaces_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -16115,30 +16840,141 @@ def test_list_spaces_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.list_spaces in client._transport._wrapped_methods + assert client._transport.search_spaces in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.list_spaces] = mock_rpc + client._transport._wrapped_methods[client._transport.search_spaces] = mock_rpc request = {} - client.list_spaces(request) + client.search_spaces(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.list_spaces(request) + client.search_spaces(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 +def test_search_spaces_rest_required_fields(request_type=space.SearchSpacesRequest): + transport_class = transports.ChatServiceRestTransport + + request_init = {} + request_init["query"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + assert "query" not in jsonified_request + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).search_spaces._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + assert "query" in jsonified_request + assert jsonified_request["query"] == request_init["query"] + + jsonified_request["query"] = "query_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).search_spaces._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "order_by", + "page_size", + "page_token", + "query", + "use_admin_access", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "query" in jsonified_request + assert jsonified_request["query"] == "query_value" + + client = ChatServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = space.SearchSpacesResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = space.SearchSpacesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.search_spaces(request) + + expected_params = [ + ( + "query", + "", + ), + ("$alt", "json;enum-encoding=int"), + ] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_search_spaces_rest_unset_required_fields(): + transport = transports.ChatServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.search_spaces._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "orderBy", + "pageSize", + "pageToken", + "query", + "useAdminAccess", + ) + ) + & set(("query",)) + ) + + @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_spaces_rest_interceptors(null_interceptor): +def test_search_spaces_rest_interceptors(null_interceptor): transport = transports.ChatServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -16151,13 +16987,13 @@ def test_list_spaces_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.ChatServiceRestInterceptor, "post_list_spaces" + transports.ChatServiceRestInterceptor, "post_search_spaces" ) as post, mock.patch.object( - transports.ChatServiceRestInterceptor, "pre_list_spaces" + transports.ChatServiceRestInterceptor, "pre_search_spaces" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = space.ListSpacesRequest.pb(space.ListSpacesRequest()) + pb_message = space.SearchSpacesRequest.pb(space.SearchSpacesRequest()) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -16168,19 +17004,19 @@ def test_list_spaces_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = space.ListSpacesResponse.to_json( - space.ListSpacesResponse() + req.return_value._content = space.SearchSpacesResponse.to_json( + space.SearchSpacesResponse() ) - request = space.ListSpacesRequest() + request = space.SearchSpacesRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = space.ListSpacesResponse() + post.return_value = space.SearchSpacesResponse() - client.list_spaces( + client.search_spaces( request, metadata=[ ("key", "val"), @@ -16192,8 +17028,8 @@ def test_list_spaces_rest_interceptors(null_interceptor): post.assert_called_once() -def test_list_spaces_rest_bad_request( - transport: str = "rest", request_type=space.ListSpacesRequest +def test_search_spaces_rest_bad_request( + transport: str = "rest", request_type=space.SearchSpacesRequest ): client = ChatServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -16213,10 +17049,10 @@ def test_list_spaces_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.list_spaces(request) + client.search_spaces(request) -def test_list_spaces_rest_pager(transport: str = "rest"): +def test_search_spaces_rest_pager(transport: str = "rest"): client = ChatServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -16228,7 +17064,7 @@ def test_list_spaces_rest_pager(transport: str = "rest"): # with mock.patch.object(path_template, 'transcode') as transcode: # Set the response as a series of pages response = ( - space.ListSpacesResponse( + space.SearchSpacesResponse( spaces=[ space.Space(), space.Space(), @@ -16236,17 +17072,17 @@ def test_list_spaces_rest_pager(transport: str = "rest"): ], next_page_token="abc", ), - space.ListSpacesResponse( + space.SearchSpacesResponse( spaces=[], next_page_token="def", ), - space.ListSpacesResponse( + space.SearchSpacesResponse( spaces=[ space.Space(), ], next_page_token="ghi", ), - space.ListSpacesResponse( + space.SearchSpacesResponse( spaces=[ space.Space(), space.Space(), @@ -16257,7 +17093,7 @@ def test_list_spaces_rest_pager(transport: str = "rest"): response = response + response # Wrap the values into proper Response objs - response = tuple(space.ListSpacesResponse.to_json(x) for x in response) + response = tuple(space.SearchSpacesResponse.to_json(x) for x in response) return_values = tuple(Response() for i in response) for return_val, response_val in zip(return_values, response): return_val._content = response_val.encode("UTF-8") @@ -16266,13 +17102,13 @@ def test_list_spaces_rest_pager(transport: str = "rest"): sample_request = {} - pager = client.list_spaces(request=sample_request) + pager = client.search_spaces(request=sample_request) results = list(pager) assert len(results) == 6 assert all(isinstance(i, space.Space) for i in results) - pages = list(client.list_spaces(request=sample_request).pages) + pages = list(client.search_spaces(request=sample_request).pages) for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token @@ -16310,6 +17146,7 @@ def test_get_space_rest(request_type): import_mode=True, admin_installed=True, space_uri="space_uri_value", + predefined_permission_settings=space.Space.PredefinedPermissionSettings.COLLABORATION_SPACE, ) # Wrap the value into a proper Response obj @@ -16403,6 +17240,8 @@ def test_get_space_rest_required_fields(request_type=space.GetSpaceRequest): unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() ).get_space._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("use_admin_access",)) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone @@ -16456,7 +17295,7 @@ def test_get_space_rest_unset_required_fields(): ) unset_fields = transport.get_space._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name",))) + assert set(unset_fields) == (set(("useAdminAccess",)) & set(("name",))) @pytest.mark.parametrize("null_interceptor", [True, False]) @@ -16628,9 +17467,28 @@ def test_create_space_rest(request_type): "space_history_state": 1, "import_mode": True, "create_time": {"seconds": 751, "nanos": 543}, + "last_active_time": {}, "admin_installed": True, + "membership_count": { + "joined_direct_human_user_count": 3185, + "joined_group_count": 1933, + }, "access_settings": {"access_state": 1, "audience": "audience_value"}, "space_uri": "space_uri_value", + "predefined_permission_settings": 1, + "permission_settings": { + "manage_members_and_groups": { + "managers_allowed": True, + "members_allowed": True, + }, + "modify_space_details": {}, + "toggle_history": {}, + "use_at_mention_all": {}, + "manage_apps": {}, + "manage_webhooks": {}, + "post_messages": {}, + "reply_messages": {}, + }, } # The version of a generated dependency at test runtime may differ from the version used during generation. # Delete any fields which are not present in the current runtime dependency @@ -16717,6 +17575,7 @@ def get_message_fields(field): import_mode=True, admin_installed=True, space_uri="space_uri_value", + predefined_permission_settings=gc_space.Space.PredefinedPermissionSettings.COLLABORATION_SPACE, ) # Wrap the value into a proper Response obj @@ -17033,6 +17892,7 @@ def test_set_up_space_rest(request_type): import_mode=True, admin_installed=True, space_uri="space_uri_value", + predefined_permission_settings=space.Space.PredefinedPermissionSettings.COLLABORATION_SPACE, ) # Wrap the value into a proper Response obj @@ -17292,9 +18152,28 @@ def test_update_space_rest(request_type): "space_history_state": 1, "import_mode": True, "create_time": {"seconds": 751, "nanos": 543}, + "last_active_time": {}, "admin_installed": True, + "membership_count": { + "joined_direct_human_user_count": 3185, + "joined_group_count": 1933, + }, "access_settings": {"access_state": 1, "audience": "audience_value"}, "space_uri": "space_uri_value", + "predefined_permission_settings": 1, + "permission_settings": { + "manage_members_and_groups": { + "managers_allowed": True, + "members_allowed": True, + }, + "modify_space_details": {}, + "toggle_history": {}, + "use_at_mention_all": {}, + "manage_apps": {}, + "manage_webhooks": {}, + "post_messages": {}, + "reply_messages": {}, + }, } # The version of a generated dependency at test runtime may differ from the version used during generation. # Delete any fields which are not present in the current runtime dependency @@ -17381,6 +18260,7 @@ def get_message_fields(field): import_mode=True, admin_installed=True, space_uri="space_uri_value", + predefined_permission_settings=gc_space.Space.PredefinedPermissionSettings.COLLABORATION_SPACE, ) # Wrap the value into a proper Response obj @@ -17472,7 +18352,12 @@ def test_update_space_rest_required_fields(request_type=gc_space.UpdateSpaceRequ credentials=ga_credentials.AnonymousCredentials() ).update_space._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("update_mask",)) + assert not set(unset_fields) - set( + ( + "update_mask", + "use_admin_access", + ) + ) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone @@ -17525,7 +18410,15 @@ def test_update_space_rest_unset_required_fields(): ) unset_fields = transport.update_space._get_unset_required_fields({}) - assert set(unset_fields) == (set(("updateMask",)) & set(("space",))) + assert set(unset_fields) == ( + set( + ( + "updateMask", + "useAdminAccess", + ) + ) + & set(("space",)) + ) @pytest.mark.parametrize("null_interceptor", [True, False]) @@ -17764,6 +18657,8 @@ def test_delete_space_rest_required_fields(request_type=space.DeleteSpaceRequest unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() ).delete_space._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("use_admin_access",)) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone @@ -17814,7 +18709,7 @@ def test_delete_space_rest_unset_required_fields(): ) unset_fields = transport.delete_space._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name",))) + assert set(unset_fields) == (set(("useAdminAccess",)) & set(("name",))) @pytest.mark.parametrize("null_interceptor", [True, False]) @@ -18229,6 +19124,7 @@ def test_find_direct_message_rest(request_type): import_mode=True, admin_installed=True, space_uri="space_uri_value", + predefined_permission_settings=space.Space.PredefinedPermissionSettings.COLLABORATION_SPACE, ) # Wrap the value into a proper Response obj @@ -18669,6 +19565,8 @@ def test_create_membership_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() ).create_membership._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("use_admin_access",)) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone @@ -18724,7 +19622,7 @@ def test_create_membership_rest_unset_required_fields(): unset_fields = transport.create_membership._get_unset_required_fields({}) assert set(unset_fields) == ( - set(()) + set(("useAdminAccess",)) & set( ( "parent", @@ -19067,7 +19965,12 @@ def test_update_membership_rest_required_fields( credentials=ga_credentials.AnonymousCredentials() ).update_membership._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("update_mask",)) + assert not set(unset_fields) - set( + ( + "update_mask", + "use_admin_access", + ) + ) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone @@ -19121,7 +20024,12 @@ def test_update_membership_rest_unset_required_fields(): unset_fields = transport.update_membership._get_unset_required_fields({}) assert set(unset_fields) == ( - set(("updateMask",)) + set( + ( + "updateMask", + "useAdminAccess", + ) + ) & set( ( "membership", @@ -19385,6 +20293,8 @@ def test_delete_membership_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() ).delete_membership._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("use_admin_access",)) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone @@ -19438,7 +20348,7 @@ def test_delete_membership_rest_unset_required_fields(): ) unset_fields = transport.delete_membership._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name",))) + assert set(unset_fields) == (set(("useAdminAccess",)) & set(("name",))) @pytest.mark.parametrize("null_interceptor", [True, False]) @@ -22491,6 +23401,7 @@ def test_chat_service_base_transport(): "get_attachment", "upload_attachment", "list_spaces", + "search_spaces", "get_space", "create_space", "set_up_space", @@ -22884,6 +23795,9 @@ def test_chat_service_client_transport_session_collision(transport_name): session1 = client1.transport.list_spaces._session session2 = client2.transport.list_spaces._session assert session1 != session2 + session1 = client1.transport.search_spaces._session + session2 = client2.transport.search_spaces._session + assert session1 != session2 session1 = client1.transport.get_space._session session2 = client2.transport.get_space._session assert session1 != session2 diff --git a/packages/google-apps-events-subscriptions/google/apps/events_subscriptions/gapic_version.py b/packages/google-apps-events-subscriptions/google/apps/events_subscriptions/gapic_version.py index 3b0a9d9a8d43..558c8aab67c5 100644 --- a/packages/google-apps-events-subscriptions/google/apps/events_subscriptions/gapic_version.py +++ b/packages/google-apps-events-subscriptions/google/apps/events_subscriptions/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.1.2" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-apps-events-subscriptions/google/apps/events_subscriptions_v1/gapic_version.py b/packages/google-apps-events-subscriptions/google/apps/events_subscriptions_v1/gapic_version.py index 3b0a9d9a8d43..558c8aab67c5 100644 --- a/packages/google-apps-events-subscriptions/google/apps/events_subscriptions_v1/gapic_version.py +++ b/packages/google-apps-events-subscriptions/google/apps/events_subscriptions_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.1.2" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-apps-events-subscriptions/google/apps/events_subscriptions_v1/services/subscriptions_service/async_client.py b/packages/google-apps-events-subscriptions/google/apps/events_subscriptions_v1/services/subscriptions_service/async_client.py index 78d8fb9cc72b..819b85fa0aaf 100644 --- a/packages/google-apps-events-subscriptions/google/apps/events_subscriptions_v1/services/subscriptions_service/async_client.py +++ b/packages/google-apps-events-subscriptions/google/apps/events_subscriptions_v1/services/subscriptions_service/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -206,10 +205,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(SubscriptionsServiceClient).get_transport_class, - type(SubscriptionsServiceClient), - ) + get_transport_class = SubscriptionsServiceClient.get_transport_class def __init__( self, diff --git a/packages/google-apps-events-subscriptions/google/apps/events_subscriptions_v1/services/subscriptions_service/client.py b/packages/google-apps-events-subscriptions/google/apps/events_subscriptions_v1/services/subscriptions_service/client.py index 986f228ec4bf..738eea50f5e0 100644 --- a/packages/google-apps-events-subscriptions/google/apps/events_subscriptions_v1/services/subscriptions_service/client.py +++ b/packages/google-apps-events-subscriptions/google/apps/events_subscriptions_v1/services/subscriptions_service/client.py @@ -703,7 +703,7 @@ def __init__( Type[SubscriptionsServiceTransport], Callable[..., SubscriptionsServiceTransport], ] = ( - type(self).get_transport_class(transport) + SubscriptionsServiceClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., SubscriptionsServiceTransport], transport) ) diff --git a/packages/google-apps-events-subscriptions/samples/generated_samples/snippet_metadata_google.apps.events.subscriptions.v1.json b/packages/google-apps-events-subscriptions/samples/generated_samples/snippet_metadata_google.apps.events.subscriptions.v1.json index f45e3c572dbc..367a176961c1 100644 --- a/packages/google-apps-events-subscriptions/samples/generated_samples/snippet_metadata_google.apps.events.subscriptions.v1.json +++ b/packages/google-apps-events-subscriptions/samples/generated_samples/snippet_metadata_google.apps.events.subscriptions.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-apps-events-subscriptions", - "version": "0.1.2" + "version": "0.1.0" }, "snippets": [ { diff --git a/packages/google-apps-events-subscriptions/tests/unit/gapic/events_subscriptions_v1/test_subscriptions_service.py b/packages/google-apps-events-subscriptions/tests/unit/gapic/events_subscriptions_v1/test_subscriptions_service.py index 25cce96bedd4..b142e2d9e8f3 100644 --- a/packages/google-apps-events-subscriptions/tests/unit/gapic/events_subscriptions_v1/test_subscriptions_service.py +++ b/packages/google-apps-events-subscriptions/tests/unit/gapic/events_subscriptions_v1/test_subscriptions_service.py @@ -1328,8 +1328,9 @@ def test_create_subscription_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.create_subscription(request) @@ -1385,26 +1386,28 @@ async def test_create_subscription_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_subscription - ] = mock_object + ] = mock_rpc request = {} await client.create_subscription(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.create_subscription(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1664,8 +1667,9 @@ def test_delete_subscription_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.delete_subscription(request) @@ -1721,26 +1725,28 @@ async def test_delete_subscription_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_subscription - ] = mock_object + ] = mock_rpc request = {} await client.delete_subscription(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.delete_subscription(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2126,22 +2132,23 @@ async def test_get_subscription_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_subscription - ] = mock_object + ] = mock_rpc request = {} await client.get_subscription(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_subscription(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2524,22 +2531,23 @@ async def test_list_subscriptions_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_subscriptions - ] = mock_object + ] = mock_rpc request = {} await client.list_subscriptions(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_subscriptions(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2983,8 +2991,9 @@ def test_update_subscription_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.update_subscription(request) @@ -3040,26 +3049,28 @@ async def test_update_subscription_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_subscription - ] = mock_object + ] = mock_rpc request = {} await client.update_subscription(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.update_subscription(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3393,8 +3404,9 @@ def test_reactivate_subscription_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.reactivate_subscription(request) @@ -3450,26 +3462,28 @@ async def test_reactivate_subscription_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.reactivate_subscription - ] = mock_object + ] = mock_rpc request = {} await client.reactivate_subscription(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.reactivate_subscription(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-apps-meet/google/apps/meet/gapic_version.py b/packages/google-apps-meet/google/apps/meet/gapic_version.py index ec8d212c9160..558c8aab67c5 100644 --- a/packages/google-apps-meet/google/apps/meet/gapic_version.py +++ b/packages/google-apps-meet/google/apps/meet/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.1.8" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-apps-meet/google/apps/meet_v2/gapic_version.py b/packages/google-apps-meet/google/apps/meet_v2/gapic_version.py index ec8d212c9160..558c8aab67c5 100644 --- a/packages/google-apps-meet/google/apps/meet_v2/gapic_version.py +++ b/packages/google-apps-meet/google/apps/meet_v2/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.1.8" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-apps-meet/google/apps/meet_v2/services/conference_records_service/async_client.py b/packages/google-apps-meet/google/apps/meet_v2/services/conference_records_service/async_client.py index 3e05db20e0fd..d1eeabee1a10 100644 --- a/packages/google-apps-meet/google/apps/meet_v2/services/conference_records_service/async_client.py +++ b/packages/google-apps-meet/google/apps/meet_v2/services/conference_records_service/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -225,10 +224,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(ConferenceRecordsServiceClient).get_transport_class, - type(ConferenceRecordsServiceClient), - ) + get_transport_class = ConferenceRecordsServiceClient.get_transport_class def __init__( self, diff --git a/packages/google-apps-meet/google/apps/meet_v2/services/conference_records_service/client.py b/packages/google-apps-meet/google/apps/meet_v2/services/conference_records_service/client.py index b99ecbce32bc..4001fe724b57 100644 --- a/packages/google-apps-meet/google/apps/meet_v2/services/conference_records_service/client.py +++ b/packages/google-apps-meet/google/apps/meet_v2/services/conference_records_service/client.py @@ -785,7 +785,7 @@ def __init__( Type[ConferenceRecordsServiceTransport], Callable[..., ConferenceRecordsServiceTransport], ] = ( - type(self).get_transport_class(transport) + ConferenceRecordsServiceClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., ConferenceRecordsServiceTransport], transport) ) diff --git a/packages/google-apps-meet/google/apps/meet_v2/services/spaces_service/async_client.py b/packages/google-apps-meet/google/apps/meet_v2/services/spaces_service/async_client.py index 5791d5429b37..8ffd92ec7dd0 100644 --- a/packages/google-apps-meet/google/apps/meet_v2/services/spaces_service/async_client.py +++ b/packages/google-apps-meet/google/apps/meet_v2/services/spaces_service/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -192,9 +191,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(SpacesServiceClient).get_transport_class, type(SpacesServiceClient) - ) + get_transport_class = SpacesServiceClient.get_transport_class def __init__( self, diff --git a/packages/google-apps-meet/google/apps/meet_v2/services/spaces_service/client.py b/packages/google-apps-meet/google/apps/meet_v2/services/spaces_service/client.py index 585fb27f62a5..7a6bed8a44d2 100644 --- a/packages/google-apps-meet/google/apps/meet_v2/services/spaces_service/client.py +++ b/packages/google-apps-meet/google/apps/meet_v2/services/spaces_service/client.py @@ -667,7 +667,7 @@ def __init__( transport_init: Union[ Type[SpacesServiceTransport], Callable[..., SpacesServiceTransport] ] = ( - type(self).get_transport_class(transport) + SpacesServiceClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., SpacesServiceTransport], transport) ) diff --git a/packages/google-apps-meet/google/apps/meet_v2beta/gapic_version.py b/packages/google-apps-meet/google/apps/meet_v2beta/gapic_version.py index ec8d212c9160..558c8aab67c5 100644 --- a/packages/google-apps-meet/google/apps/meet_v2beta/gapic_version.py +++ b/packages/google-apps-meet/google/apps/meet_v2beta/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.1.8" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-apps-meet/google/apps/meet_v2beta/services/conference_records_service/async_client.py b/packages/google-apps-meet/google/apps/meet_v2beta/services/conference_records_service/async_client.py index ac6349067970..a44ca33c3796 100644 --- a/packages/google-apps-meet/google/apps/meet_v2beta/services/conference_records_service/async_client.py +++ b/packages/google-apps-meet/google/apps/meet_v2beta/services/conference_records_service/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -225,10 +224,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(ConferenceRecordsServiceClient).get_transport_class, - type(ConferenceRecordsServiceClient), - ) + get_transport_class = ConferenceRecordsServiceClient.get_transport_class def __init__( self, diff --git a/packages/google-apps-meet/google/apps/meet_v2beta/services/conference_records_service/client.py b/packages/google-apps-meet/google/apps/meet_v2beta/services/conference_records_service/client.py index 81a836304d99..4764e9d20ae9 100644 --- a/packages/google-apps-meet/google/apps/meet_v2beta/services/conference_records_service/client.py +++ b/packages/google-apps-meet/google/apps/meet_v2beta/services/conference_records_service/client.py @@ -785,7 +785,7 @@ def __init__( Type[ConferenceRecordsServiceTransport], Callable[..., ConferenceRecordsServiceTransport], ] = ( - type(self).get_transport_class(transport) + ConferenceRecordsServiceClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., ConferenceRecordsServiceTransport], transport) ) diff --git a/packages/google-apps-meet/google/apps/meet_v2beta/services/spaces_service/async_client.py b/packages/google-apps-meet/google/apps/meet_v2beta/services/spaces_service/async_client.py index f66d5f40977d..3e1421737e28 100644 --- a/packages/google-apps-meet/google/apps/meet_v2beta/services/spaces_service/async_client.py +++ b/packages/google-apps-meet/google/apps/meet_v2beta/services/spaces_service/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -192,9 +191,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(SpacesServiceClient).get_transport_class, type(SpacesServiceClient) - ) + get_transport_class = SpacesServiceClient.get_transport_class def __init__( self, diff --git a/packages/google-apps-meet/google/apps/meet_v2beta/services/spaces_service/client.py b/packages/google-apps-meet/google/apps/meet_v2beta/services/spaces_service/client.py index 8af3974b5ca2..a8c4f208485c 100644 --- a/packages/google-apps-meet/google/apps/meet_v2beta/services/spaces_service/client.py +++ b/packages/google-apps-meet/google/apps/meet_v2beta/services/spaces_service/client.py @@ -667,7 +667,7 @@ def __init__( transport_init: Union[ Type[SpacesServiceTransport], Callable[..., SpacesServiceTransport] ] = ( - type(self).get_transport_class(transport) + SpacesServiceClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., SpacesServiceTransport], transport) ) diff --git a/packages/google-apps-meet/samples/generated_samples/snippet_metadata_google.apps.meet.v2.json b/packages/google-apps-meet/samples/generated_samples/snippet_metadata_google.apps.meet.v2.json index 4071af248422..1c795de189b2 100644 --- a/packages/google-apps-meet/samples/generated_samples/snippet_metadata_google.apps.meet.v2.json +++ b/packages/google-apps-meet/samples/generated_samples/snippet_metadata_google.apps.meet.v2.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-apps-meet", - "version": "0.1.8" + "version": "0.1.0" }, "snippets": [ { diff --git a/packages/google-apps-meet/samples/generated_samples/snippet_metadata_google.apps.meet.v2beta.json b/packages/google-apps-meet/samples/generated_samples/snippet_metadata_google.apps.meet.v2beta.json index c956685a5da0..bbee1583d40d 100644 --- a/packages/google-apps-meet/samples/generated_samples/snippet_metadata_google.apps.meet.v2beta.json +++ b/packages/google-apps-meet/samples/generated_samples/snippet_metadata_google.apps.meet.v2beta.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-apps-meet", - "version": "0.1.8" + "version": "0.1.0" }, "snippets": [ { diff --git a/packages/google-apps-meet/tests/unit/gapic/meet_v2/test_conference_records_service.py b/packages/google-apps-meet/tests/unit/gapic/meet_v2/test_conference_records_service.py index 4d7cf4e2f4ce..e141ae8bb275 100644 --- a/packages/google-apps-meet/tests/unit/gapic/meet_v2/test_conference_records_service.py +++ b/packages/google-apps-meet/tests/unit/gapic/meet_v2/test_conference_records_service.py @@ -1379,22 +1379,23 @@ async def test_get_conference_record_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_conference_record - ] = mock_object + ] = mock_rpc request = {} await client.get_conference_record(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_conference_record(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1770,22 +1771,23 @@ async def test_list_conference_records_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_conference_records - ] = mock_object + ] = mock_rpc request = {} await client.list_conference_records(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_conference_records(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2192,22 +2194,23 @@ async def test_get_participant_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_participant - ] = mock_object + ] = mock_rpc request = {} await client.get_participant(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_participant(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2573,22 +2576,23 @@ async def test_list_participants_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_participants - ] = mock_object + ] = mock_rpc request = {} await client.list_participants(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_participants(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3164,22 +3168,23 @@ async def test_get_participant_session_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_participant_session - ] = mock_object + ] = mock_rpc request = {} await client.get_participant_session(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_participant_session(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3555,22 +3560,23 @@ async def test_list_participant_sessions_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_participant_sessions - ] = mock_object + ] = mock_rpc request = {} await client.list_participant_sessions(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_participant_sessions(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4136,22 +4142,23 @@ async def test_get_recording_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_recording - ] = mock_object + ] = mock_rpc request = {} await client.get_recording(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_recording(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4500,22 +4507,23 @@ async def test_list_recordings_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_recordings - ] = mock_object + ] = mock_rpc request = {} await client.list_recordings(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_recordings(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5061,22 +5069,23 @@ async def test_get_transcript_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_transcript - ] = mock_object + ] = mock_rpc request = {} await client.get_transcript(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_transcript(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5427,22 +5436,23 @@ async def test_list_transcripts_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_transcripts - ] = mock_object + ] = mock_rpc request = {} await client.list_transcripts(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_transcripts(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -6006,22 +6016,23 @@ async def test_get_transcript_entry_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_transcript_entry - ] = mock_object + ] = mock_rpc request = {} await client.get_transcript_entry(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_transcript_entry(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -6401,22 +6412,23 @@ async def test_list_transcript_entries_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_transcript_entries - ] = mock_object + ] = mock_rpc request = {} await client.list_transcript_entries(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_transcript_entries(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-apps-meet/tests/unit/gapic/meet_v2/test_spaces_service.py b/packages/google-apps-meet/tests/unit/gapic/meet_v2/test_spaces_service.py index bba44c0fa971..f9af1772962d 100644 --- a/packages/google-apps-meet/tests/unit/gapic/meet_v2/test_spaces_service.py +++ b/packages/google-apps-meet/tests/unit/gapic/meet_v2/test_spaces_service.py @@ -1282,22 +1282,23 @@ async def test_create_space_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_space - ] = mock_object + ] = mock_rpc request = {} await client.create_space(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_space(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1591,22 +1592,23 @@ async def test_get_space_async_use_cached_wrapped_rpc(transport: str = "grpc_asy ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_space - ] = mock_object + ] = mock_rpc request = {} await client.get_space(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_space(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1957,22 +1959,23 @@ async def test_update_space_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_space - ] = mock_object + ] = mock_rpc request = {} await client.update_space(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_space(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2337,22 +2340,23 @@ async def test_end_active_conference_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.end_active_conference - ] = mock_object + ] = mock_rpc request = {} await client.end_active_conference(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.end_active_conference(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-apps-meet/tests/unit/gapic/meet_v2beta/test_conference_records_service.py b/packages/google-apps-meet/tests/unit/gapic/meet_v2beta/test_conference_records_service.py index 3b6c1a68092e..37a89bcdfaba 100644 --- a/packages/google-apps-meet/tests/unit/gapic/meet_v2beta/test_conference_records_service.py +++ b/packages/google-apps-meet/tests/unit/gapic/meet_v2beta/test_conference_records_service.py @@ -1376,22 +1376,23 @@ async def test_get_conference_record_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_conference_record - ] = mock_object + ] = mock_rpc request = {} await client.get_conference_record(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_conference_record(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1767,22 +1768,23 @@ async def test_list_conference_records_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_conference_records - ] = mock_object + ] = mock_rpc request = {} await client.list_conference_records(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_conference_records(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2189,22 +2191,23 @@ async def test_get_participant_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_participant - ] = mock_object + ] = mock_rpc request = {} await client.get_participant(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_participant(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2570,22 +2573,23 @@ async def test_list_participants_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_participants - ] = mock_object + ] = mock_rpc request = {} await client.list_participants(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_participants(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3161,22 +3165,23 @@ async def test_get_participant_session_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_participant_session - ] = mock_object + ] = mock_rpc request = {} await client.get_participant_session(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_participant_session(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3552,22 +3557,23 @@ async def test_list_participant_sessions_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_participant_sessions - ] = mock_object + ] = mock_rpc request = {} await client.list_participant_sessions(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_participant_sessions(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4133,22 +4139,23 @@ async def test_get_recording_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_recording - ] = mock_object + ] = mock_rpc request = {} await client.get_recording(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_recording(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4497,22 +4504,23 @@ async def test_list_recordings_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_recordings - ] = mock_object + ] = mock_rpc request = {} await client.list_recordings(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_recordings(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5058,22 +5066,23 @@ async def test_get_transcript_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_transcript - ] = mock_object + ] = mock_rpc request = {} await client.get_transcript(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_transcript(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5424,22 +5433,23 @@ async def test_list_transcripts_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_transcripts - ] = mock_object + ] = mock_rpc request = {} await client.list_transcripts(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_transcripts(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -6003,22 +6013,23 @@ async def test_get_transcript_entry_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_transcript_entry - ] = mock_object + ] = mock_rpc request = {} await client.get_transcript_entry(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_transcript_entry(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -6398,22 +6409,23 @@ async def test_list_transcript_entries_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_transcript_entries - ] = mock_object + ] = mock_rpc request = {} await client.list_transcript_entries(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_transcript_entries(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-apps-meet/tests/unit/gapic/meet_v2beta/test_spaces_service.py b/packages/google-apps-meet/tests/unit/gapic/meet_v2beta/test_spaces_service.py index baaa4aa3e894..e3c1edc6f594 100644 --- a/packages/google-apps-meet/tests/unit/gapic/meet_v2beta/test_spaces_service.py +++ b/packages/google-apps-meet/tests/unit/gapic/meet_v2beta/test_spaces_service.py @@ -1279,22 +1279,23 @@ async def test_create_space_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_space - ] = mock_object + ] = mock_rpc request = {} await client.create_space(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_space(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1588,22 +1589,23 @@ async def test_get_space_async_use_cached_wrapped_rpc(transport: str = "grpc_asy ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_space - ] = mock_object + ] = mock_rpc request = {} await client.get_space(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_space(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1954,22 +1956,23 @@ async def test_update_space_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_space - ] = mock_object + ] = mock_rpc request = {} await client.update_space(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_space(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2334,22 +2337,23 @@ async def test_end_active_conference_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.end_active_conference - ] = mock_object + ] = mock_rpc request = {} await client.end_active_conference(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.end_active_conference(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-apps-script-type/google/apps/script/type/calendar/gapic_version.py b/packages/google-apps-script-type/google/apps/script/type/calendar/gapic_version.py index 3425e3287cda..558c8aab67c5 100644 --- a/packages/google-apps-script-type/google/apps/script/type/calendar/gapic_version.py +++ b/packages/google-apps-script-type/google/apps/script/type/calendar/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.3.10" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-apps-script-type/google/apps/script/type/docs/gapic_version.py b/packages/google-apps-script-type/google/apps/script/type/docs/gapic_version.py index 3425e3287cda..558c8aab67c5 100644 --- a/packages/google-apps-script-type/google/apps/script/type/docs/gapic_version.py +++ b/packages/google-apps-script-type/google/apps/script/type/docs/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.3.10" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-apps-script-type/google/apps/script/type/drive/gapic_version.py b/packages/google-apps-script-type/google/apps/script/type/drive/gapic_version.py index 3425e3287cda..558c8aab67c5 100644 --- a/packages/google-apps-script-type/google/apps/script/type/drive/gapic_version.py +++ b/packages/google-apps-script-type/google/apps/script/type/drive/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.3.10" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-apps-script-type/google/apps/script/type/gapic_version.py b/packages/google-apps-script-type/google/apps/script/type/gapic_version.py index 3425e3287cda..558c8aab67c5 100644 --- a/packages/google-apps-script-type/google/apps/script/type/gapic_version.py +++ b/packages/google-apps-script-type/google/apps/script/type/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.3.10" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-apps-script-type/google/apps/script/type/gmail/gapic_version.py b/packages/google-apps-script-type/google/apps/script/type/gmail/gapic_version.py index 3425e3287cda..558c8aab67c5 100644 --- a/packages/google-apps-script-type/google/apps/script/type/gmail/gapic_version.py +++ b/packages/google-apps-script-type/google/apps/script/type/gmail/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.3.10" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-apps-script-type/google/apps/script/type/sheets/gapic_version.py b/packages/google-apps-script-type/google/apps/script/type/sheets/gapic_version.py index 3425e3287cda..558c8aab67c5 100644 --- a/packages/google-apps-script-type/google/apps/script/type/sheets/gapic_version.py +++ b/packages/google-apps-script-type/google/apps/script/type/sheets/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.3.10" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-apps-script-type/google/apps/script/type/slides/gapic_version.py b/packages/google-apps-script-type/google/apps/script/type/slides/gapic_version.py index 3425e3287cda..558c8aab67c5 100644 --- a/packages/google-apps-script-type/google/apps/script/type/slides/gapic_version.py +++ b/packages/google-apps-script-type/google/apps/script/type/slides/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.3.10" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-area120-tables/google/area120/tables/gapic_version.py b/packages/google-area120-tables/google/area120/tables/gapic_version.py index 11e34cec2824..558c8aab67c5 100644 --- a/packages/google-area120-tables/google/area120/tables/gapic_version.py +++ b/packages/google-area120-tables/google/area120/tables/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.11.11" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-area120-tables/google/area120/tables_v1alpha1/gapic_version.py b/packages/google-area120-tables/google/area120/tables_v1alpha1/gapic_version.py index 11e34cec2824..558c8aab67c5 100644 --- a/packages/google-area120-tables/google/area120/tables_v1alpha1/gapic_version.py +++ b/packages/google-area120-tables/google/area120/tables_v1alpha1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.11.11" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-area120-tables/google/area120/tables_v1alpha1/services/tables_service/async_client.py b/packages/google-area120-tables/google/area120/tables_v1alpha1/services/tables_service/async_client.py index ee5242225d3a..5a27331668c0 100644 --- a/packages/google-area120-tables/google/area120/tables_v1alpha1/services/tables_service/async_client.py +++ b/packages/google-area120-tables/google/area120/tables_v1alpha1/services/tables_service/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -207,9 +206,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(TablesServiceClient).get_transport_class, type(TablesServiceClient) - ) + get_transport_class = TablesServiceClient.get_transport_class def __init__( self, diff --git a/packages/google-area120-tables/google/area120/tables_v1alpha1/services/tables_service/client.py b/packages/google-area120-tables/google/area120/tables_v1alpha1/services/tables_service/client.py index d87ea30e4e99..871e6857384e 100644 --- a/packages/google-area120-tables/google/area120/tables_v1alpha1/services/tables_service/client.py +++ b/packages/google-area120-tables/google/area120/tables_v1alpha1/services/tables_service/client.py @@ -699,7 +699,7 @@ def __init__( transport_init: Union[ Type[TablesServiceTransport], Callable[..., TablesServiceTransport] ] = ( - type(self).get_transport_class(transport) + TablesServiceClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., TablesServiceTransport], transport) ) diff --git a/packages/google-area120-tables/samples/generated_samples/snippet_metadata_google.area120.tables.v1alpha1.json b/packages/google-area120-tables/samples/generated_samples/snippet_metadata_google.area120.tables.v1alpha1.json index 16f4052fc1ba..fdfb568d52d6 100644 --- a/packages/google-area120-tables/samples/generated_samples/snippet_metadata_google.area120.tables.v1alpha1.json +++ b/packages/google-area120-tables/samples/generated_samples/snippet_metadata_google.area120.tables.v1alpha1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-area120-tables", - "version": "0.11.11" + "version": "0.1.0" }, "snippets": [ { diff --git a/packages/google-area120-tables/tests/unit/gapic/tables_v1alpha1/test_tables_service.py b/packages/google-area120-tables/tests/unit/gapic/tables_v1alpha1/test_tables_service.py index d46a74ee41c8..580666937e7b 100644 --- a/packages/google-area120-tables/tests/unit/gapic/tables_v1alpha1/test_tables_service.py +++ b/packages/google-area120-tables/tests/unit/gapic/tables_v1alpha1/test_tables_service.py @@ -1287,22 +1287,23 @@ async def test_get_table_async_use_cached_wrapped_rpc(transport: str = "grpc_asy ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_table - ] = mock_object + ] = mock_rpc request = {} await client.get_table(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_table(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1649,22 +1650,23 @@ async def test_list_tables_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_tables - ] = mock_object + ] = mock_rpc request = {} await client.list_tables(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_tables(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2064,22 +2066,23 @@ async def test_get_workspace_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_workspace - ] = mock_object + ] = mock_rpc request = {} await client.get_workspace(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_workspace(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2426,22 +2429,23 @@ async def test_list_workspaces_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_workspaces - ] = mock_object + ] = mock_rpc request = {} await client.list_workspaces(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_workspaces(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2836,22 +2840,23 @@ async def test_get_row_async_use_cached_wrapped_rpc(transport: str = "grpc_async ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_row - ] = mock_object + ] = mock_rpc request = {} await client.get_row(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_row(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3198,22 +3203,23 @@ async def test_list_rows_async_use_cached_wrapped_rpc(transport: str = "grpc_asy ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_rows - ] = mock_object + ] = mock_rpc request = {} await client.list_rows(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_rows(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3754,22 +3760,23 @@ async def test_create_row_async_use_cached_wrapped_rpc(transport: str = "grpc_as ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_row - ] = mock_object + ] = mock_rpc request = {} await client.create_row(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_row(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4129,22 +4136,23 @@ async def test_batch_create_rows_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.batch_create_rows - ] = mock_object + ] = mock_rpc request = {} await client.batch_create_rows(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.batch_create_rows(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4408,22 +4416,23 @@ async def test_update_row_async_use_cached_wrapped_rpc(transport: str = "grpc_as ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_row - ] = mock_object + ] = mock_rpc request = {} await client.update_row(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_row(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4783,22 +4792,23 @@ async def test_batch_update_rows_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.batch_update_rows - ] = mock_object + ] = mock_rpc request = {} await client.batch_update_rows(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.batch_update_rows(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5059,22 +5069,23 @@ async def test_delete_row_async_use_cached_wrapped_rpc(transport: str = "grpc_as ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_row - ] = mock_object + ] = mock_rpc request = {} await client.delete_row(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_row(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5417,22 +5428,23 @@ async def test_batch_delete_rows_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.batch_delete_rows - ] = mock_object + ] = mock_rpc request = {} await client.batch_delete_rows(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.batch_delete_rows(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-access-approval/google/cloud/accessapproval/gapic_version.py b/packages/google-cloud-access-approval/google/cloud/accessapproval/gapic_version.py index f192c1b4f03b..558c8aab67c5 100644 --- a/packages/google-cloud-access-approval/google/cloud/accessapproval/gapic_version.py +++ b/packages/google-cloud-access-approval/google/cloud/accessapproval/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.13.5" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-access-approval/google/cloud/accessapproval_v1/gapic_version.py b/packages/google-cloud-access-approval/google/cloud/accessapproval_v1/gapic_version.py index f192c1b4f03b..558c8aab67c5 100644 --- a/packages/google-cloud-access-approval/google/cloud/accessapproval_v1/gapic_version.py +++ b/packages/google-cloud-access-approval/google/cloud/accessapproval_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.13.5" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-access-approval/google/cloud/accessapproval_v1/services/access_approval/async_client.py b/packages/google-cloud-access-approval/google/cloud/accessapproval_v1/services/access_approval/async_client.py index 5ce9fd99a7cf..2adcbbec15b4 100644 --- a/packages/google-cloud-access-approval/google/cloud/accessapproval_v1/services/access_approval/async_client.py +++ b/packages/google-cloud-access-approval/google/cloud/accessapproval_v1/services/access_approval/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -239,9 +238,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(AccessApprovalClient).get_transport_class, type(AccessApprovalClient) - ) + get_transport_class = AccessApprovalClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-access-approval/google/cloud/accessapproval_v1/services/access_approval/client.py b/packages/google-cloud-access-approval/google/cloud/accessapproval_v1/services/access_approval/client.py index 867e35bd52d9..3a134b51689f 100644 --- a/packages/google-cloud-access-approval/google/cloud/accessapproval_v1/services/access_approval/client.py +++ b/packages/google-cloud-access-approval/google/cloud/accessapproval_v1/services/access_approval/client.py @@ -726,7 +726,7 @@ def __init__( transport_init: Union[ Type[AccessApprovalTransport], Callable[..., AccessApprovalTransport] ] = ( - type(self).get_transport_class(transport) + AccessApprovalClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., AccessApprovalTransport], transport) ) diff --git a/packages/google-cloud-access-approval/samples/generated_samples/snippet_metadata_google.cloud.accessapproval.v1.json b/packages/google-cloud-access-approval/samples/generated_samples/snippet_metadata_google.cloud.accessapproval.v1.json index 794dd0aecd46..b1f693e8b12f 100644 --- a/packages/google-cloud-access-approval/samples/generated_samples/snippet_metadata_google.cloud.accessapproval.v1.json +++ b/packages/google-cloud-access-approval/samples/generated_samples/snippet_metadata_google.cloud.accessapproval.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-access-approval", - "version": "1.13.5" + "version": "0.1.0" }, "snippets": [ { diff --git a/packages/google-cloud-access-approval/tests/unit/gapic/accessapproval_v1/test_access_approval.py b/packages/google-cloud-access-approval/tests/unit/gapic/accessapproval_v1/test_access_approval.py index 4afcbb8ae2b0..05777554f668 100644 --- a/packages/google-cloud-access-approval/tests/unit/gapic/accessapproval_v1/test_access_approval.py +++ b/packages/google-cloud-access-approval/tests/unit/gapic/accessapproval_v1/test_access_approval.py @@ -1300,22 +1300,23 @@ async def test_list_approval_requests_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_approval_requests - ] = mock_object + ] = mock_rpc request = {} await client.list_approval_requests(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_approval_requests(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1892,22 +1893,23 @@ async def test_get_approval_request_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_approval_request - ] = mock_object + ] = mock_rpc request = {} await client.get_approval_request(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_approval_request(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2285,22 +2287,23 @@ async def test_approve_approval_request_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.approve_approval_request - ] = mock_object + ] = mock_rpc request = {} await client.approve_approval_request(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.approve_approval_request(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2592,22 +2595,23 @@ async def test_dismiss_approval_request_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.dismiss_approval_request - ] = mock_object + ] = mock_rpc request = {} await client.dismiss_approval_request(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.dismiss_approval_request(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2899,22 +2903,23 @@ async def test_invalidate_approval_request_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.invalidate_approval_request - ] = mock_object + ] = mock_rpc request = {} await client.invalidate_approval_request(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.invalidate_approval_request(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3218,22 +3223,23 @@ async def test_get_access_approval_settings_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_access_approval_settings - ] = mock_object + ] = mock_rpc request = {} await client.get_access_approval_settings(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_access_approval_settings(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3627,22 +3633,23 @@ async def test_update_access_approval_settings_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_access_approval_settings - ] = mock_object + ] = mock_rpc request = {} await client.update_access_approval_settings(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_access_approval_settings(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4028,22 +4035,23 @@ async def test_delete_access_approval_settings_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_access_approval_settings - ] = mock_object + ] = mock_rpc request = {} await client.delete_access_approval_settings(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_access_approval_settings(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4410,22 +4418,23 @@ async def test_get_access_approval_service_account_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_access_approval_service_account - ] = mock_object + ] = mock_rpc request = {} await client.get_access_approval_service_account(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_access_approval_service_account(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-advisorynotifications/google/cloud/advisorynotifications/gapic_version.py b/packages/google-cloud-advisorynotifications/google/cloud/advisorynotifications/gapic_version.py index 3425e3287cda..558c8aab67c5 100644 --- a/packages/google-cloud-advisorynotifications/google/cloud/advisorynotifications/gapic_version.py +++ b/packages/google-cloud-advisorynotifications/google/cloud/advisorynotifications/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.3.10" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-advisorynotifications/google/cloud/advisorynotifications_v1/gapic_version.py b/packages/google-cloud-advisorynotifications/google/cloud/advisorynotifications_v1/gapic_version.py index 3425e3287cda..558c8aab67c5 100644 --- a/packages/google-cloud-advisorynotifications/google/cloud/advisorynotifications_v1/gapic_version.py +++ b/packages/google-cloud-advisorynotifications/google/cloud/advisorynotifications_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.3.10" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-advisorynotifications/google/cloud/advisorynotifications_v1/services/advisory_notifications_service/async_client.py b/packages/google-cloud-advisorynotifications/google/cloud/advisorynotifications_v1/services/advisory_notifications_service/async_client.py index ff477afcbd78..0de083efd228 100644 --- a/packages/google-cloud-advisorynotifications/google/cloud/advisorynotifications_v1/services/advisory_notifications_service/async_client.py +++ b/packages/google-cloud-advisorynotifications/google/cloud/advisorynotifications_v1/services/advisory_notifications_service/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -207,10 +206,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(AdvisoryNotificationsServiceClient).get_transport_class, - type(AdvisoryNotificationsServiceClient), - ) + get_transport_class = AdvisoryNotificationsServiceClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-advisorynotifications/google/cloud/advisorynotifications_v1/services/advisory_notifications_service/client.py b/packages/google-cloud-advisorynotifications/google/cloud/advisorynotifications_v1/services/advisory_notifications_service/client.py index 8b819b14687a..1596808d73f2 100644 --- a/packages/google-cloud-advisorynotifications/google/cloud/advisorynotifications_v1/services/advisory_notifications_service/client.py +++ b/packages/google-cloud-advisorynotifications/google/cloud/advisorynotifications_v1/services/advisory_notifications_service/client.py @@ -699,7 +699,7 @@ def __init__( Type[AdvisoryNotificationsServiceTransport], Callable[..., AdvisoryNotificationsServiceTransport], ] = ( - type(self).get_transport_class(transport) + AdvisoryNotificationsServiceClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast( Callable[..., AdvisoryNotificationsServiceTransport], transport diff --git a/packages/google-cloud-advisorynotifications/samples/generated_samples/snippet_metadata_google.cloud.advisorynotifications.v1.json b/packages/google-cloud-advisorynotifications/samples/generated_samples/snippet_metadata_google.cloud.advisorynotifications.v1.json index 32a422671417..d786a8bfb8cb 100644 --- a/packages/google-cloud-advisorynotifications/samples/generated_samples/snippet_metadata_google.cloud.advisorynotifications.v1.json +++ b/packages/google-cloud-advisorynotifications/samples/generated_samples/snippet_metadata_google.cloud.advisorynotifications.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-advisorynotifications", - "version": "0.3.10" + "version": "0.1.0" }, "snippets": [ { diff --git a/packages/google-cloud-advisorynotifications/tests/unit/gapic/advisorynotifications_v1/test_advisory_notifications_service.py b/packages/google-cloud-advisorynotifications/tests/unit/gapic/advisorynotifications_v1/test_advisory_notifications_service.py index 5369d76a3f66..d0845265e3ea 100644 --- a/packages/google-cloud-advisorynotifications/tests/unit/gapic/advisorynotifications_v1/test_advisory_notifications_service.py +++ b/packages/google-cloud-advisorynotifications/tests/unit/gapic/advisorynotifications_v1/test_advisory_notifications_service.py @@ -1393,22 +1393,23 @@ async def test_list_notifications_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_notifications - ] = mock_object + ] = mock_rpc request = {} await client.list_notifications(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_notifications(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1981,22 +1982,23 @@ async def test_get_notification_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_notification - ] = mock_object + ] = mock_rpc request = {} await client.get_notification(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_notification(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2353,22 +2355,23 @@ async def test_get_settings_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_settings - ] = mock_object + ] = mock_rpc request = {} await client.get_settings(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_settings(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2714,22 +2717,23 @@ async def test_update_settings_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_settings - ] = mock_object + ] = mock_rpc request = {} await client.update_settings(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_settings(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-alloydb-connectors/google/cloud/alloydb/connectors/gapic_version.py b/packages/google-cloud-alloydb-connectors/google/cloud/alloydb/connectors/gapic_version.py index 51d2795b9d6b..558c8aab67c5 100644 --- a/packages/google-cloud-alloydb-connectors/google/cloud/alloydb/connectors/gapic_version.py +++ b/packages/google-cloud-alloydb-connectors/google/cloud/alloydb/connectors/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.1.6" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-alloydb-connectors/google/cloud/alloydb/connectors_v1/gapic_version.py b/packages/google-cloud-alloydb-connectors/google/cloud/alloydb/connectors_v1/gapic_version.py index 51d2795b9d6b..558c8aab67c5 100644 --- a/packages/google-cloud-alloydb-connectors/google/cloud/alloydb/connectors_v1/gapic_version.py +++ b/packages/google-cloud-alloydb-connectors/google/cloud/alloydb/connectors_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.1.6" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-alloydb/CHANGELOG.md b/packages/google-cloud-alloydb/CHANGELOG.md index c2be3ab22ace..7545a07df323 100644 --- a/packages/google-cloud-alloydb/CHANGELOG.md +++ b/packages/google-cloud-alloydb/CHANGELOG.md @@ -1,5 +1,15 @@ # Changelog +## [0.3.13](https://github.com/googleapis/google-cloud-python/compare/google-cloud-alloydb-v0.3.12...google-cloud-alloydb-v0.3.13) (2024-09-03) + + +### Features + +* support for enabling outbound public IP on an instance ([4f468fa](https://github.com/googleapis/google-cloud-python/commit/4f468fa598c51426ef31ef878f9c3b61f79802f9)) +* support for getting maintenance schedule of a cluster ([4f468fa](https://github.com/googleapis/google-cloud-python/commit/4f468fa598c51426ef31ef878f9c3b61f79802f9)) +* support for getting outbound public IP addresses of an instance ([4f468fa](https://github.com/googleapis/google-cloud-python/commit/4f468fa598c51426ef31ef878f9c3b61f79802f9)) +* support for setting maintenance update policy on a cluster ([4f468fa](https://github.com/googleapis/google-cloud-python/commit/4f468fa598c51426ef31ef878f9c3b61f79802f9)) + ## [0.3.12](https://github.com/googleapis/google-cloud-python/compare/google-cloud-alloydb-v0.3.11...google-cloud-alloydb-v0.3.12) (2024-07-30) diff --git a/packages/google-cloud-alloydb/google/cloud/alloydb/gapic_version.py b/packages/google-cloud-alloydb/google/cloud/alloydb/gapic_version.py index ab68833be4be..fb3463bbb3c2 100644 --- a/packages/google-cloud-alloydb/google/cloud/alloydb/gapic_version.py +++ b/packages/google-cloud-alloydb/google/cloud/alloydb/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.3.12" # {x-release-please-version} +__version__ = "0.3.13" # {x-release-please-version} diff --git a/packages/google-cloud-alloydb/google/cloud/alloydb_v1/gapic_version.py b/packages/google-cloud-alloydb/google/cloud/alloydb_v1/gapic_version.py index ab68833be4be..fb3463bbb3c2 100644 --- a/packages/google-cloud-alloydb/google/cloud/alloydb_v1/gapic_version.py +++ b/packages/google-cloud-alloydb/google/cloud/alloydb_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.3.12" # {x-release-please-version} +__version__ = "0.3.13" # {x-release-please-version} diff --git a/packages/google-cloud-alloydb/google/cloud/alloydb_v1/services/alloy_db_admin/async_client.py b/packages/google-cloud-alloydb/google/cloud/alloydb_v1/services/alloy_db_admin/async_client.py index 0b4f876d17f6..ad4f6077b5cd 100644 --- a/packages/google-cloud-alloydb/google/cloud/alloydb_v1/services/alloy_db_admin/async_client.py +++ b/packages/google-cloud-alloydb/google/cloud/alloydb_v1/services/alloy_db_admin/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -215,9 +214,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(AlloyDBAdminClient).get_transport_class, type(AlloyDBAdminClient) - ) + get_transport_class = AlloyDBAdminClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-alloydb/google/cloud/alloydb_v1/services/alloy_db_admin/client.py b/packages/google-cloud-alloydb/google/cloud/alloydb_v1/services/alloy_db_admin/client.py index c6ebe8516c05..868bc60993fa 100644 --- a/packages/google-cloud-alloydb/google/cloud/alloydb_v1/services/alloy_db_admin/client.py +++ b/packages/google-cloud-alloydb/google/cloud/alloydb_v1/services/alloy_db_admin/client.py @@ -826,7 +826,7 @@ def __init__( transport_init: Union[ Type[AlloyDBAdminTransport], Callable[..., AlloyDBAdminTransport] ] = ( - type(self).get_transport_class(transport) + AlloyDBAdminClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., AlloyDBAdminTransport], transport) ) diff --git a/packages/google-cloud-alloydb/google/cloud/alloydb_v1alpha/gapic_version.py b/packages/google-cloud-alloydb/google/cloud/alloydb_v1alpha/gapic_version.py index ab68833be4be..fb3463bbb3c2 100644 --- a/packages/google-cloud-alloydb/google/cloud/alloydb_v1alpha/gapic_version.py +++ b/packages/google-cloud-alloydb/google/cloud/alloydb_v1alpha/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.3.12" # {x-release-please-version} +__version__ = "0.3.13" # {x-release-please-version} diff --git a/packages/google-cloud-alloydb/google/cloud/alloydb_v1alpha/services/alloy_db_admin/async_client.py b/packages/google-cloud-alloydb/google/cloud/alloydb_v1alpha/services/alloy_db_admin/async_client.py index 5497fbab5978..7166d66ef0e7 100644 --- a/packages/google-cloud-alloydb/google/cloud/alloydb_v1alpha/services/alloy_db_admin/async_client.py +++ b/packages/google-cloud-alloydb/google/cloud/alloydb_v1alpha/services/alloy_db_admin/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -217,9 +216,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(AlloyDBAdminClient).get_transport_class, type(AlloyDBAdminClient) - ) + get_transport_class = AlloyDBAdminClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-alloydb/google/cloud/alloydb_v1alpha/services/alloy_db_admin/client.py b/packages/google-cloud-alloydb/google/cloud/alloydb_v1alpha/services/alloy_db_admin/client.py index dadcbdee6bb5..d843322e819e 100644 --- a/packages/google-cloud-alloydb/google/cloud/alloydb_v1alpha/services/alloy_db_admin/client.py +++ b/packages/google-cloud-alloydb/google/cloud/alloydb_v1alpha/services/alloy_db_admin/client.py @@ -850,7 +850,7 @@ def __init__( transport_init: Union[ Type[AlloyDBAdminTransport], Callable[..., AlloyDBAdminTransport] ] = ( - type(self).get_transport_class(transport) + AlloyDBAdminClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., AlloyDBAdminTransport], transport) ) diff --git a/packages/google-cloud-alloydb/google/cloud/alloydb_v1beta/__init__.py b/packages/google-cloud-alloydb/google/cloud/alloydb_v1beta/__init__.py index 648260737ae8..209ee9e5b9ce 100644 --- a/packages/google-cloud-alloydb/google/cloud/alloydb_v1beta/__init__.py +++ b/packages/google-cloud-alloydb/google/cloud/alloydb_v1beta/__init__.py @@ -35,6 +35,8 @@ EncryptionInfo, Instance, InstanceView, + MaintenanceSchedule, + MaintenanceUpdatePolicy, MigrationSource, SslConfig, SupportedDatabaseFlag, @@ -142,6 +144,8 @@ "ListSupportedDatabaseFlagsResponse", "ListUsersRequest", "ListUsersResponse", + "MaintenanceSchedule", + "MaintenanceUpdatePolicy", "MigrationSource", "OperationMetadata", "PromoteClusterRequest", diff --git a/packages/google-cloud-alloydb/google/cloud/alloydb_v1beta/gapic_version.py b/packages/google-cloud-alloydb/google/cloud/alloydb_v1beta/gapic_version.py index ab68833be4be..fb3463bbb3c2 100644 --- a/packages/google-cloud-alloydb/google/cloud/alloydb_v1beta/gapic_version.py +++ b/packages/google-cloud-alloydb/google/cloud/alloydb_v1beta/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.3.12" # {x-release-please-version} +__version__ = "0.3.13" # {x-release-please-version} diff --git a/packages/google-cloud-alloydb/google/cloud/alloydb_v1beta/services/alloy_db_admin/async_client.py b/packages/google-cloud-alloydb/google/cloud/alloydb_v1beta/services/alloy_db_admin/async_client.py index dc0acb2eb965..9e4dbdf4845b 100644 --- a/packages/google-cloud-alloydb/google/cloud/alloydb_v1beta/services/alloy_db_admin/async_client.py +++ b/packages/google-cloud-alloydb/google/cloud/alloydb_v1beta/services/alloy_db_admin/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -217,9 +216,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(AlloyDBAdminClient).get_transport_class, type(AlloyDBAdminClient) - ) + get_transport_class = AlloyDBAdminClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-alloydb/google/cloud/alloydb_v1beta/services/alloy_db_admin/client.py b/packages/google-cloud-alloydb/google/cloud/alloydb_v1beta/services/alloy_db_admin/client.py index 7c25c5f52022..4b4aa9d9597b 100644 --- a/packages/google-cloud-alloydb/google/cloud/alloydb_v1beta/services/alloy_db_admin/client.py +++ b/packages/google-cloud-alloydb/google/cloud/alloydb_v1beta/services/alloy_db_admin/client.py @@ -850,7 +850,7 @@ def __init__( transport_init: Union[ Type[AlloyDBAdminTransport], Callable[..., AlloyDBAdminTransport] ] = ( - type(self).get_transport_class(transport) + AlloyDBAdminClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., AlloyDBAdminTransport], transport) ) diff --git a/packages/google-cloud-alloydb/google/cloud/alloydb_v1beta/types/__init__.py b/packages/google-cloud-alloydb/google/cloud/alloydb_v1beta/types/__init__.py index 69269be33581..262b798ccfc1 100644 --- a/packages/google-cloud-alloydb/google/cloud/alloydb_v1beta/types/__init__.py +++ b/packages/google-cloud-alloydb/google/cloud/alloydb_v1beta/types/__init__.py @@ -29,6 +29,8 @@ EncryptionInfo, Instance, InstanceView, + MaintenanceSchedule, + MaintenanceUpdatePolicy, MigrationSource, SslConfig, SupportedDatabaseFlag, @@ -95,6 +97,8 @@ "EncryptionConfig", "EncryptionInfo", "Instance", + "MaintenanceSchedule", + "MaintenanceUpdatePolicy", "MigrationSource", "SslConfig", "SupportedDatabaseFlag", diff --git a/packages/google-cloud-alloydb/google/cloud/alloydb_v1beta/types/resources.py b/packages/google-cloud-alloydb/google/cloud/alloydb_v1beta/types/resources.py index ca93ce160aae..9cf140f311a4 100644 --- a/packages/google-cloud-alloydb/google/cloud/alloydb_v1beta/types/resources.py +++ b/packages/google-cloud-alloydb/google/cloud/alloydb_v1beta/types/resources.py @@ -40,6 +40,8 @@ "ContinuousBackupInfo", "BackupSource", "ContinuousBackupSource", + "MaintenanceUpdatePolicy", + "MaintenanceSchedule", "Cluster", "Instance", "ConnectionInfo", @@ -622,6 +624,69 @@ class ContinuousBackupSource(proto.Message): ) +class MaintenanceUpdatePolicy(proto.Message): + r"""MaintenanceUpdatePolicy defines the policy for system + updates. + + Attributes: + maintenance_windows (MutableSequence[google.cloud.alloydb_v1beta.types.MaintenanceUpdatePolicy.MaintenanceWindow]): + Preferred windows to perform maintenance. + Currently limited to 1. + """ + + class MaintenanceWindow(proto.Message): + r"""MaintenanceWindow specifies a preferred day and time for + maintenance. + + Attributes: + day (google.type.dayofweek_pb2.DayOfWeek): + Preferred day of the week for maintenance, + e.g. MONDAY, TUESDAY, etc. + start_time (google.type.timeofday_pb2.TimeOfDay): + Preferred time to start the maintenance + operation on the specified day. Maintenance will + start within 1 hour of this time. + """ + + day: dayofweek_pb2.DayOfWeek = proto.Field( + proto.ENUM, + number=1, + enum=dayofweek_pb2.DayOfWeek, + ) + start_time: timeofday_pb2.TimeOfDay = proto.Field( + proto.MESSAGE, + number=2, + message=timeofday_pb2.TimeOfDay, + ) + + maintenance_windows: MutableSequence[MaintenanceWindow] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=MaintenanceWindow, + ) + + +class MaintenanceSchedule(proto.Message): + r"""MaintenanceSchedule stores the maintenance schedule generated + from the MaintenanceUpdatePolicy, once a maintenance rollout is + triggered, if MaintenanceWindow is set, and if there is no + conflicting DenyPeriod. The schedule is cleared once the update + takes place. This field cannot be manually changed; modify the + MaintenanceUpdatePolicy instead. + + Attributes: + start_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The scheduled start time for the + maintenance. + """ + + start_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=1, + message=timestamp_pb2.Timestamp, + ) + + class Cluster(proto.Message): r"""A cluster is a collection of regional AlloyDB resources. It can include a primary instance and one or more read pool @@ -693,7 +758,7 @@ class Cluster(proto.Message): cluster resources are created and from which they are accessible via Private IP. The network must belong to the same project as the cluster. It is specified in the form: - "projects/{project}/global/networks/{network_id}". This is + ``projects/{project}/global/networks/{network_id}``. This is required to create a cluster. Deprecated, use network_config.network instead. etag (str): @@ -752,6 +817,13 @@ class Cluster(proto.Message): specific to PRIMARY cluster. satisfies_pzs (bool): Output only. Reserved for future use. + maintenance_update_policy (google.cloud.alloydb_v1beta.types.MaintenanceUpdatePolicy): + Optional. The maintenance update policy + determines when to allow or deny updates. + maintenance_schedule (google.cloud.alloydb_v1beta.types.MaintenanceSchedule): + Output only. The maintenance schedule for the + cluster, generated for a specific rollout if a + maintenance window is set. """ class State(proto.Enum): @@ -830,7 +902,7 @@ class NetworkConfig(proto.Message): cluster resources are created and from which they are accessible via Private IP. The network must belong to the same project as the cluster. It is specified in the form: - "projects/{project_number}/global/networks/{network_id}". + ``projects/{project_number}/global/networks/{network_id}``. This is required to create a cluster. allocated_ip_range (str): Optional. Name of the allocated IP range for the private IP @@ -1014,6 +1086,16 @@ class PrimaryConfig(proto.Message): proto.BOOL, number=30, ) + maintenance_update_policy: "MaintenanceUpdatePolicy" = proto.Field( + proto.MESSAGE, + number=32, + message="MaintenanceUpdatePolicy", + ) + maintenance_schedule: "MaintenanceSchedule" = proto.Field( + proto.MESSAGE, + number=37, + message="MaintenanceSchedule", + ) class Instance(proto.Message): @@ -1147,6 +1229,9 @@ class Instance(proto.Message): network_config (google.cloud.alloydb_v1beta.types.Instance.InstanceNetworkConfig): Optional. Instance level network configuration. + outbound_public_ip_addresses (MutableSequence[str]): + Output only. All outbound public IP addresses + configured for the instance. """ class State(proto.Enum): @@ -1514,6 +1599,10 @@ class InstanceNetworkConfig(proto.Message): enable_public_ip (bool): Optional. Enabling public ip for the instance. + enable_outbound_public_ip (bool): + Optional. Enabling an outbound public IP + address to support a database server sending + requests out into the internet. """ class AuthorizedNetwork(proto.Message): @@ -1542,6 +1631,10 @@ class AuthorizedNetwork(proto.Message): proto.BOOL, number=2, ) + enable_outbound_public_ip: bool = proto.Field( + proto.BOOL, + number=3, + ) name: str = proto.Field( proto.STRING, @@ -1669,6 +1762,10 @@ class AuthorizedNetwork(proto.Message): number=29, message=InstanceNetworkConfig, ) + outbound_public_ip_addresses: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=34, + ) class ConnectionInfo(proto.Message): diff --git a/packages/google-cloud-alloydb/samples/generated_samples/snippet_metadata_google.cloud.alloydb.v1.json b/packages/google-cloud-alloydb/samples/generated_samples/snippet_metadata_google.cloud.alloydb.v1.json index d3ea7218d351..70a94ca97046 100644 --- a/packages/google-cloud-alloydb/samples/generated_samples/snippet_metadata_google.cloud.alloydb.v1.json +++ b/packages/google-cloud-alloydb/samples/generated_samples/snippet_metadata_google.cloud.alloydb.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-alloydb", - "version": "0.3.12" + "version": "0.3.13" }, "snippets": [ { diff --git a/packages/google-cloud-alloydb/samples/generated_samples/snippet_metadata_google.cloud.alloydb.v1alpha.json b/packages/google-cloud-alloydb/samples/generated_samples/snippet_metadata_google.cloud.alloydb.v1alpha.json index 56f45a7164c9..1ed9939262fa 100644 --- a/packages/google-cloud-alloydb/samples/generated_samples/snippet_metadata_google.cloud.alloydb.v1alpha.json +++ b/packages/google-cloud-alloydb/samples/generated_samples/snippet_metadata_google.cloud.alloydb.v1alpha.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-alloydb", - "version": "0.3.12" + "version": "0.3.13" }, "snippets": [ { diff --git a/packages/google-cloud-alloydb/samples/generated_samples/snippet_metadata_google.cloud.alloydb.v1beta.json b/packages/google-cloud-alloydb/samples/generated_samples/snippet_metadata_google.cloud.alloydb.v1beta.json index f2e7b2fbe03f..8aac8040c177 100644 --- a/packages/google-cloud-alloydb/samples/generated_samples/snippet_metadata_google.cloud.alloydb.v1beta.json +++ b/packages/google-cloud-alloydb/samples/generated_samples/snippet_metadata_google.cloud.alloydb.v1beta.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-alloydb", - "version": "0.3.12" + "version": "0.3.13" }, "snippets": [ { diff --git a/packages/google-cloud-alloydb/tests/unit/gapic/alloydb_v1/test_alloy_db_admin.py b/packages/google-cloud-alloydb/tests/unit/gapic/alloydb_v1/test_alloy_db_admin.py index f937e65bcc12..3714063ac2bb 100644 --- a/packages/google-cloud-alloydb/tests/unit/gapic/alloydb_v1/test_alloy_db_admin.py +++ b/packages/google-cloud-alloydb/tests/unit/gapic/alloydb_v1/test_alloy_db_admin.py @@ -1287,22 +1287,23 @@ async def test_list_clusters_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_clusters - ] = mock_object + ] = mock_rpc request = {} await client.list_clusters(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_clusters(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1871,22 +1872,23 @@ async def test_get_cluster_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_cluster - ] = mock_object + ] = mock_rpc request = {} await client.get_cluster(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_cluster(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2195,8 +2197,9 @@ def test_create_cluster_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.create_cluster(request) @@ -2250,26 +2253,28 @@ async def test_create_cluster_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_cluster - ] = mock_object + ] = mock_rpc request = {} await client.create_cluster(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.create_cluster(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2591,8 +2596,9 @@ def test_update_cluster_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.update_cluster(request) @@ -2646,26 +2652,28 @@ async def test_update_cluster_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_cluster - ] = mock_object + ] = mock_rpc request = {} await client.update_cluster(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.update_cluster(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2981,8 +2989,9 @@ def test_delete_cluster_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.delete_cluster(request) @@ -3036,26 +3045,28 @@ async def test_delete_cluster_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_cluster - ] = mock_object + ] = mock_rpc request = {} await client.delete_cluster(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.delete_cluster(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3349,8 +3360,9 @@ def test_promote_cluster_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.promote_cluster(request) @@ -3404,26 +3416,28 @@ async def test_promote_cluster_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.promote_cluster - ] = mock_object + ] = mock_rpc request = {} await client.promote_cluster(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.promote_cluster(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3717,8 +3731,9 @@ def test_restore_cluster_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.restore_cluster(request) @@ -3772,26 +3787,28 @@ async def test_restore_cluster_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.restore_cluster - ] = mock_object + ] = mock_rpc request = {} await client.restore_cluster(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.restore_cluster(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4014,8 +4031,9 @@ def test_create_secondary_cluster_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.create_secondary_cluster(request) @@ -4071,26 +4089,28 @@ async def test_create_secondary_cluster_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_secondary_cluster - ] = mock_object + ] = mock_rpc request = {} await client.create_secondary_cluster(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.create_secondary_cluster(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4487,22 +4507,23 @@ async def test_list_instances_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_instances - ] = mock_object + ] = mock_rpc request = {} await client.list_instances(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_instances(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5074,22 +5095,23 @@ async def test_get_instance_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_instance - ] = mock_object + ] = mock_rpc request = {} await client.get_instance(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_instance(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5400,8 +5422,9 @@ def test_create_instance_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.create_instance(request) @@ -5455,26 +5478,28 @@ async def test_create_instance_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_instance - ] = mock_object + ] = mock_rpc request = {} await client.create_instance(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.create_instance(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5799,8 +5824,9 @@ def test_create_secondary_instance_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.create_secondary_instance(request) @@ -5856,26 +5882,28 @@ async def test_create_secondary_instance_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_secondary_instance - ] = mock_object + ] = mock_rpc request = {} await client.create_secondary_instance(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.create_secondary_instance(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -6208,8 +6236,9 @@ def test_batch_create_instances_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.batch_create_instances(request) @@ -6265,26 +6294,28 @@ async def test_batch_create_instances_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.batch_create_instances - ] = mock_object + ] = mock_rpc request = {} await client.batch_create_instances(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.batch_create_instances(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -6498,8 +6529,9 @@ def test_update_instance_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.update_instance(request) @@ -6553,26 +6585,28 @@ async def test_update_instance_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_instance - ] = mock_object + ] = mock_rpc request = {} await client.update_instance(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.update_instance(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -6876,8 +6910,9 @@ def test_delete_instance_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.delete_instance(request) @@ -6931,26 +6966,28 @@ async def test_delete_instance_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_instance - ] = mock_object + ] = mock_rpc request = {} await client.delete_instance(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.delete_instance(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -7250,8 +7287,9 @@ def test_failover_instance_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.failover_instance(request) @@ -7307,26 +7345,28 @@ async def test_failover_instance_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.failover_instance - ] = mock_object + ] = mock_rpc request = {} await client.failover_instance(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.failover_instance(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -7628,8 +7668,9 @@ def test_inject_fault_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.inject_fault(request) @@ -7683,26 +7724,28 @@ async def test_inject_fault_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.inject_fault - ] = mock_object + ] = mock_rpc request = {} await client.inject_fault(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.inject_fault(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -8006,8 +8049,9 @@ def test_restart_instance_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.restart_instance(request) @@ -8061,26 +8105,28 @@ async def test_restart_instance_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.restart_instance - ] = mock_object + ] = mock_rpc request = {} await client.restart_instance(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.restart_instance(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -8435,22 +8481,23 @@ async def test_list_backups_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_backups - ] = mock_object + ] = mock_rpc request = {} await client.list_backups(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_backups(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -9026,22 +9073,23 @@ async def test_get_backup_async_use_cached_wrapped_rpc(transport: str = "grpc_as ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_backup - ] = mock_object + ] = mock_rpc request = {} await client.get_backup(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_backup(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -9356,8 +9404,9 @@ def test_create_backup_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.create_backup(request) @@ -9411,26 +9460,28 @@ async def test_create_backup_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_backup - ] = mock_object + ] = mock_rpc request = {} await client.create_backup(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.create_backup(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -9740,8 +9791,9 @@ def test_update_backup_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.update_backup(request) @@ -9795,26 +9847,28 @@ async def test_update_backup_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_backup - ] = mock_object + ] = mock_rpc request = {} await client.update_backup(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.update_backup(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -10118,8 +10172,9 @@ def test_delete_backup_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.delete_backup(request) @@ -10173,26 +10228,28 @@ async def test_delete_backup_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_backup - ] = mock_object + ] = mock_rpc request = {} await client.delete_backup(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.delete_backup(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -10553,22 +10610,23 @@ async def test_list_supported_database_flags_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_supported_database_flags - ] = mock_object + ] = mock_rpc request = {} await client.list_supported_database_flags(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_supported_database_flags(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -11152,22 +11210,23 @@ async def test_generate_client_certificate_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.generate_client_certificate - ] = mock_object + ] = mock_rpc request = {} await client.generate_client_certificate(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.generate_client_certificate(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -11549,22 +11608,23 @@ async def test_get_connection_info_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_connection_info - ] = mock_object + ] = mock_rpc request = {} await client.get_connection_info(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_connection_info(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -11934,22 +11994,23 @@ async def test_list_users_async_use_cached_wrapped_rpc(transport: str = "grpc_as ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_users - ] = mock_object + ] = mock_rpc request = {} await client.list_users(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_users(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -12501,22 +12562,23 @@ async def test_get_user_async_use_cached_wrapped_rpc(transport: str = "grpc_asyn ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_user - ] = mock_object + ] = mock_rpc request = {} await client.get_user(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_user(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -12880,22 +12942,23 @@ async def test_create_user_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_user - ] = mock_object + ] = mock_rpc request = {} await client.create_user(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_user(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -13275,22 +13338,23 @@ async def test_update_user_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_user - ] = mock_object + ] = mock_rpc request = {} await client.update_user(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_user(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -13646,22 +13710,23 @@ async def test_delete_user_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_user - ] = mock_object + ] = mock_rpc request = {} await client.delete_user(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_user(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-alloydb/tests/unit/gapic/alloydb_v1alpha/test_alloy_db_admin.py b/packages/google-cloud-alloydb/tests/unit/gapic/alloydb_v1alpha/test_alloy_db_admin.py index 79cd2240746d..631b1b909ac2 100644 --- a/packages/google-cloud-alloydb/tests/unit/gapic/alloydb_v1alpha/test_alloy_db_admin.py +++ b/packages/google-cloud-alloydb/tests/unit/gapic/alloydb_v1alpha/test_alloy_db_admin.py @@ -1287,22 +1287,23 @@ async def test_list_clusters_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_clusters - ] = mock_object + ] = mock_rpc request = {} await client.list_clusters(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_clusters(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1877,22 +1878,23 @@ async def test_get_cluster_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_cluster - ] = mock_object + ] = mock_rpc request = {} await client.get_cluster(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_cluster(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2205,8 +2207,9 @@ def test_create_cluster_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.create_cluster(request) @@ -2260,26 +2263,28 @@ async def test_create_cluster_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_cluster - ] = mock_object + ] = mock_rpc request = {} await client.create_cluster(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.create_cluster(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2601,8 +2606,9 @@ def test_update_cluster_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.update_cluster(request) @@ -2656,26 +2662,28 @@ async def test_update_cluster_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_cluster - ] = mock_object + ] = mock_rpc request = {} await client.update_cluster(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.update_cluster(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2991,8 +2999,9 @@ def test_delete_cluster_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.delete_cluster(request) @@ -3046,26 +3055,28 @@ async def test_delete_cluster_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_cluster - ] = mock_object + ] = mock_rpc request = {} await client.delete_cluster(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.delete_cluster(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3359,8 +3370,9 @@ def test_promote_cluster_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.promote_cluster(request) @@ -3414,26 +3426,28 @@ async def test_promote_cluster_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.promote_cluster - ] = mock_object + ] = mock_rpc request = {} await client.promote_cluster(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.promote_cluster(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3727,8 +3741,9 @@ def test_restore_cluster_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.restore_cluster(request) @@ -3782,26 +3797,28 @@ async def test_restore_cluster_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.restore_cluster - ] = mock_object + ] = mock_rpc request = {} await client.restore_cluster(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.restore_cluster(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4024,8 +4041,9 @@ def test_create_secondary_cluster_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.create_secondary_cluster(request) @@ -4081,26 +4099,28 @@ async def test_create_secondary_cluster_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_secondary_cluster - ] = mock_object + ] = mock_rpc request = {} await client.create_secondary_cluster(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.create_secondary_cluster(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4497,22 +4517,23 @@ async def test_list_instances_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_instances - ] = mock_object + ] = mock_rpc request = {} await client.list_instances(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_instances(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5093,22 +5114,23 @@ async def test_get_instance_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_instance - ] = mock_object + ] = mock_rpc request = {} await client.get_instance(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_instance(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5425,8 +5447,9 @@ def test_create_instance_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.create_instance(request) @@ -5480,26 +5503,28 @@ async def test_create_instance_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_instance - ] = mock_object + ] = mock_rpc request = {} await client.create_instance(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.create_instance(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5824,8 +5849,9 @@ def test_create_secondary_instance_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.create_secondary_instance(request) @@ -5881,26 +5907,28 @@ async def test_create_secondary_instance_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_secondary_instance - ] = mock_object + ] = mock_rpc request = {} await client.create_secondary_instance(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.create_secondary_instance(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -6233,8 +6261,9 @@ def test_batch_create_instances_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.batch_create_instances(request) @@ -6290,26 +6319,28 @@ async def test_batch_create_instances_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.batch_create_instances - ] = mock_object + ] = mock_rpc request = {} await client.batch_create_instances(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.batch_create_instances(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -6523,8 +6554,9 @@ def test_update_instance_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.update_instance(request) @@ -6578,26 +6610,28 @@ async def test_update_instance_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_instance - ] = mock_object + ] = mock_rpc request = {} await client.update_instance(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.update_instance(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -6901,8 +6935,9 @@ def test_delete_instance_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.delete_instance(request) @@ -6956,26 +6991,28 @@ async def test_delete_instance_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_instance - ] = mock_object + ] = mock_rpc request = {} await client.delete_instance(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.delete_instance(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -7275,8 +7312,9 @@ def test_failover_instance_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.failover_instance(request) @@ -7332,26 +7370,28 @@ async def test_failover_instance_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.failover_instance - ] = mock_object + ] = mock_rpc request = {} await client.failover_instance(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.failover_instance(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -7653,8 +7693,9 @@ def test_inject_fault_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.inject_fault(request) @@ -7708,26 +7749,28 @@ async def test_inject_fault_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.inject_fault - ] = mock_object + ] = mock_rpc request = {} await client.inject_fault(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.inject_fault(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -8031,8 +8074,9 @@ def test_restart_instance_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.restart_instance(request) @@ -8086,26 +8130,28 @@ async def test_restart_instance_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.restart_instance - ] = mock_object + ] = mock_rpc request = {} await client.restart_instance(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.restart_instance(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -8460,22 +8506,23 @@ async def test_list_backups_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_backups - ] = mock_object + ] = mock_rpc request = {} await client.list_backups(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_backups(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -9057,22 +9104,23 @@ async def test_get_backup_async_use_cached_wrapped_rpc(transport: str = "grpc_as ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_backup - ] = mock_object + ] = mock_rpc request = {} await client.get_backup(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_backup(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -9391,8 +9439,9 @@ def test_create_backup_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.create_backup(request) @@ -9446,26 +9495,28 @@ async def test_create_backup_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_backup - ] = mock_object + ] = mock_rpc request = {} await client.create_backup(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.create_backup(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -9775,8 +9826,9 @@ def test_update_backup_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.update_backup(request) @@ -9830,26 +9882,28 @@ async def test_update_backup_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_backup - ] = mock_object + ] = mock_rpc request = {} await client.update_backup(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.update_backup(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -10153,8 +10207,9 @@ def test_delete_backup_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.delete_backup(request) @@ -10208,26 +10263,28 @@ async def test_delete_backup_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_backup - ] = mock_object + ] = mock_rpc request = {} await client.delete_backup(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.delete_backup(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -10588,22 +10645,23 @@ async def test_list_supported_database_flags_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_supported_database_flags - ] = mock_object + ] = mock_rpc request = {} await client.list_supported_database_flags(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_supported_database_flags(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -11192,22 +11250,23 @@ async def test_generate_client_certificate_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.generate_client_certificate - ] = mock_object + ] = mock_rpc request = {} await client.generate_client_certificate(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.generate_client_certificate(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -11600,22 +11659,23 @@ async def test_get_connection_info_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_connection_info - ] = mock_object + ] = mock_rpc request = {} await client.get_connection_info(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_connection_info(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -11991,22 +12051,23 @@ async def test_list_users_async_use_cached_wrapped_rpc(transport: str = "grpc_as ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_users - ] = mock_object + ] = mock_rpc request = {} await client.list_users(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_users(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -12558,22 +12619,23 @@ async def test_get_user_async_use_cached_wrapped_rpc(transport: str = "grpc_asyn ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_user - ] = mock_object + ] = mock_rpc request = {} await client.get_user(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_user(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -12937,22 +12999,23 @@ async def test_create_user_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_user - ] = mock_object + ] = mock_rpc request = {} await client.create_user(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_user(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -13332,22 +13395,23 @@ async def test_update_user_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_user - ] = mock_object + ] = mock_rpc request = {} await client.update_user(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_user(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -13703,22 +13767,23 @@ async def test_delete_user_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_user - ] = mock_object + ] = mock_rpc request = {} await client.delete_user(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_user(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -14062,22 +14127,23 @@ async def test_list_databases_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_databases - ] = mock_object + ] = mock_rpc request = {} await client.list_databases(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_databases(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-alloydb/tests/unit/gapic/alloydb_v1beta/test_alloy_db_admin.py b/packages/google-cloud-alloydb/tests/unit/gapic/alloydb_v1beta/test_alloy_db_admin.py index b5b6c9bdcb8f..ab12494b9268 100644 --- a/packages/google-cloud-alloydb/tests/unit/gapic/alloydb_v1beta/test_alloy_db_admin.py +++ b/packages/google-cloud-alloydb/tests/unit/gapic/alloydb_v1beta/test_alloy_db_admin.py @@ -1287,22 +1287,23 @@ async def test_list_clusters_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_clusters - ] = mock_object + ] = mock_rpc request = {} await client.list_clusters(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_clusters(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1874,22 +1875,23 @@ async def test_get_cluster_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_cluster - ] = mock_object + ] = mock_rpc request = {} await client.get_cluster(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_cluster(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2200,8 +2202,9 @@ def test_create_cluster_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.create_cluster(request) @@ -2255,26 +2258,28 @@ async def test_create_cluster_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_cluster - ] = mock_object + ] = mock_rpc request = {} await client.create_cluster(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.create_cluster(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2596,8 +2601,9 @@ def test_update_cluster_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.update_cluster(request) @@ -2651,26 +2657,28 @@ async def test_update_cluster_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_cluster - ] = mock_object + ] = mock_rpc request = {} await client.update_cluster(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.update_cluster(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2986,8 +2994,9 @@ def test_delete_cluster_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.delete_cluster(request) @@ -3041,26 +3050,28 @@ async def test_delete_cluster_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_cluster - ] = mock_object + ] = mock_rpc request = {} await client.delete_cluster(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.delete_cluster(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3354,8 +3365,9 @@ def test_promote_cluster_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.promote_cluster(request) @@ -3409,26 +3421,28 @@ async def test_promote_cluster_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.promote_cluster - ] = mock_object + ] = mock_rpc request = {} await client.promote_cluster(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.promote_cluster(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3722,8 +3736,9 @@ def test_restore_cluster_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.restore_cluster(request) @@ -3777,26 +3792,28 @@ async def test_restore_cluster_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.restore_cluster - ] = mock_object + ] = mock_rpc request = {} await client.restore_cluster(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.restore_cluster(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4019,8 +4036,9 @@ def test_create_secondary_cluster_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.create_secondary_cluster(request) @@ -4076,26 +4094,28 @@ async def test_create_secondary_cluster_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_secondary_cluster - ] = mock_object + ] = mock_rpc request = {} await client.create_secondary_cluster(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.create_secondary_cluster(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4492,22 +4512,23 @@ async def test_list_instances_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_instances - ] = mock_object + ] = mock_rpc request = {} await client.list_instances(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_instances(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4921,6 +4942,7 @@ def test_get_instance(request_type, transport: str = "grpc"): reconciling=True, etag="etag_value", satisfies_pzs=True, + outbound_public_ip_addresses=["outbound_public_ip_addresses_value"], ) response = client.get_instance(request) @@ -4944,6 +4966,9 @@ def test_get_instance(request_type, transport: str = "grpc"): assert response.reconciling is True assert response.etag == "etag_value" assert response.satisfies_pzs is True + assert response.outbound_public_ip_addresses == [ + "outbound_public_ip_addresses_value" + ] def test_get_instance_empty_call(): @@ -5054,6 +5079,7 @@ async def test_get_instance_empty_call_async(): reconciling=True, etag="etag_value", satisfies_pzs=True, + outbound_public_ip_addresses=["outbound_public_ip_addresses_value"], ) ) response = await client.get_instance() @@ -5085,22 +5111,23 @@ async def test_get_instance_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_instance - ] = mock_object + ] = mock_rpc request = {} await client.get_instance(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_instance(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5133,6 +5160,7 @@ async def test_get_instance_async( reconciling=True, etag="etag_value", satisfies_pzs=True, + outbound_public_ip_addresses=["outbound_public_ip_addresses_value"], ) ) response = await client.get_instance(request) @@ -5157,6 +5185,9 @@ async def test_get_instance_async( assert response.reconciling is True assert response.etag == "etag_value" assert response.satisfies_pzs is True + assert response.outbound_public_ip_addresses == [ + "outbound_public_ip_addresses_value" + ] @pytest.mark.asyncio @@ -5415,8 +5446,9 @@ def test_create_instance_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.create_instance(request) @@ -5470,26 +5502,28 @@ async def test_create_instance_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_instance - ] = mock_object + ] = mock_rpc request = {} await client.create_instance(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.create_instance(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5814,8 +5848,9 @@ def test_create_secondary_instance_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.create_secondary_instance(request) @@ -5871,26 +5906,28 @@ async def test_create_secondary_instance_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_secondary_instance - ] = mock_object + ] = mock_rpc request = {} await client.create_secondary_instance(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.create_secondary_instance(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -6223,8 +6260,9 @@ def test_batch_create_instances_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.batch_create_instances(request) @@ -6280,26 +6318,28 @@ async def test_batch_create_instances_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.batch_create_instances - ] = mock_object + ] = mock_rpc request = {} await client.batch_create_instances(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.batch_create_instances(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -6513,8 +6553,9 @@ def test_update_instance_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.update_instance(request) @@ -6568,26 +6609,28 @@ async def test_update_instance_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_instance - ] = mock_object + ] = mock_rpc request = {} await client.update_instance(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.update_instance(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -6891,8 +6934,9 @@ def test_delete_instance_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.delete_instance(request) @@ -6946,26 +6990,28 @@ async def test_delete_instance_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_instance - ] = mock_object + ] = mock_rpc request = {} await client.delete_instance(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.delete_instance(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -7265,8 +7311,9 @@ def test_failover_instance_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.failover_instance(request) @@ -7322,26 +7369,28 @@ async def test_failover_instance_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.failover_instance - ] = mock_object + ] = mock_rpc request = {} await client.failover_instance(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.failover_instance(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -7643,8 +7692,9 @@ def test_inject_fault_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.inject_fault(request) @@ -7698,26 +7748,28 @@ async def test_inject_fault_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.inject_fault - ] = mock_object + ] = mock_rpc request = {} await client.inject_fault(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.inject_fault(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -8021,8 +8073,9 @@ def test_restart_instance_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.restart_instance(request) @@ -8076,26 +8129,28 @@ async def test_restart_instance_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.restart_instance - ] = mock_object + ] = mock_rpc request = {} await client.restart_instance(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.restart_instance(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -8450,22 +8505,23 @@ async def test_list_backups_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_backups - ] = mock_object + ] = mock_rpc request = {} await client.list_backups(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_backups(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -9044,22 +9100,23 @@ async def test_get_backup_async_use_cached_wrapped_rpc(transport: str = "grpc_as ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_backup - ] = mock_object + ] = mock_rpc request = {} await client.get_backup(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_backup(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -9376,8 +9433,9 @@ def test_create_backup_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.create_backup(request) @@ -9431,26 +9489,28 @@ async def test_create_backup_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_backup - ] = mock_object + ] = mock_rpc request = {} await client.create_backup(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.create_backup(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -9760,8 +9820,9 @@ def test_update_backup_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.update_backup(request) @@ -9815,26 +9876,28 @@ async def test_update_backup_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_backup - ] = mock_object + ] = mock_rpc request = {} await client.update_backup(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.update_backup(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -10138,8 +10201,9 @@ def test_delete_backup_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.delete_backup(request) @@ -10193,26 +10257,28 @@ async def test_delete_backup_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_backup - ] = mock_object + ] = mock_rpc request = {} await client.delete_backup(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.delete_backup(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -10573,22 +10639,23 @@ async def test_list_supported_database_flags_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_supported_database_flags - ] = mock_object + ] = mock_rpc request = {} await client.list_supported_database_flags(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_supported_database_flags(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -11177,22 +11244,23 @@ async def test_generate_client_certificate_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.generate_client_certificate - ] = mock_object + ] = mock_rpc request = {} await client.generate_client_certificate(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.generate_client_certificate(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -11585,22 +11653,23 @@ async def test_get_connection_info_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_connection_info - ] = mock_object + ] = mock_rpc request = {} await client.get_connection_info(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_connection_info(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -11976,22 +12045,23 @@ async def test_list_users_async_use_cached_wrapped_rpc(transport: str = "grpc_as ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_users - ] = mock_object + ] = mock_rpc request = {} await client.list_users(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_users(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -12543,22 +12613,23 @@ async def test_get_user_async_use_cached_wrapped_rpc(transport: str = "grpc_asyn ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_user - ] = mock_object + ] = mock_rpc request = {} await client.get_user(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_user(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -12922,22 +12993,23 @@ async def test_create_user_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_user - ] = mock_object + ] = mock_rpc request = {} await client.create_user(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_user(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -13317,22 +13389,23 @@ async def test_update_user_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_user - ] = mock_object + ] = mock_rpc request = {} await client.update_user(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_user(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -13688,22 +13761,23 @@ async def test_delete_user_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_user - ] = mock_object + ] = mock_rpc request = {} await client.delete_user(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_user(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -14047,22 +14121,23 @@ async def test_list_databases_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_databases - ] = mock_object + ] = mock_rpc request = {} await client.list_databases(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_databases(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -15219,6 +15294,10 @@ def test_create_cluster_rest(request_type): ] }, "satisfies_pzs": True, + "maintenance_update_policy": { + "maintenance_windows": [{"day": 1, "start_time": {}}] + }, + "maintenance_schedule": {"start_time": {}}, } # The version of a generated dependency at test runtime may differ from the version used during generation. # Delete any fields which are not present in the current runtime dependency @@ -15700,6 +15779,10 @@ def test_update_cluster_rest(request_type): ] }, "satisfies_pzs": True, + "maintenance_update_policy": { + "maintenance_windows": [{"day": 1, "start_time": {}}] + }, + "maintenance_schedule": {"start_time": {}}, } # The version of a generated dependency at test runtime may differ from the version used during generation. # Delete any fields which are not present in the current runtime dependency @@ -17027,6 +17110,10 @@ def test_create_secondary_cluster_rest(request_type): ] }, "satisfies_pzs": True, + "maintenance_update_policy": { + "maintenance_windows": [{"day": 1, "start_time": {}}] + }, + "maintenance_schedule": {"start_time": {}}, } # The version of a generated dependency at test runtime may differ from the version used during generation. # Delete any fields which are not present in the current runtime dependency @@ -17843,6 +17930,7 @@ def test_get_instance_rest(request_type): reconciling=True, etag="etag_value", satisfies_pzs=True, + outbound_public_ip_addresses=["outbound_public_ip_addresses_value"], ) # Wrap the value into a proper Response obj @@ -17870,6 +17958,9 @@ def test_get_instance_rest(request_type): assert response.reconciling is True assert response.etag == "etag_value" assert response.satisfies_pzs is True + assert response.outbound_public_ip_addresses == [ + "outbound_public_ip_addresses_value" + ] def test_get_instance_rest_use_cached_wrapped_rpc(): @@ -18217,7 +18308,12 @@ def test_create_instance_rest(request_type): "network_config": { "authorized_external_networks": [{"cidr_range": "cidr_range_value"}], "enable_public_ip": True, + "enable_outbound_public_ip": True, }, + "outbound_public_ip_addresses": [ + "outbound_public_ip_addresses_value1", + "outbound_public_ip_addresses_value2", + ], } # The version of a generated dependency at test runtime may differ from the version used during generation. # Delete any fields which are not present in the current runtime dependency @@ -18693,7 +18789,12 @@ def test_create_secondary_instance_rest(request_type): "network_config": { "authorized_external_networks": [{"cidr_range": "cidr_range_value"}], "enable_public_ip": True, + "enable_outbound_public_ip": True, }, + "outbound_public_ip_addresses": [ + "outbound_public_ip_addresses_value1", + "outbound_public_ip_addresses_value2", + ], } # The version of a generated dependency at test runtime may differ from the version used during generation. # Delete any fields which are not present in the current runtime dependency @@ -19183,7 +19284,12 @@ def test_batch_create_instances_rest(request_type): {"cidr_range": "cidr_range_value"} ], "enable_public_ip": True, + "enable_outbound_public_ip": True, }, + "outbound_public_ip_addresses": [ + "outbound_public_ip_addresses_value1", + "outbound_public_ip_addresses_value2", + ], }, "request_id": "request_id_value", "validate_only": True, @@ -19588,7 +19694,12 @@ def test_update_instance_rest(request_type): "network_config": { "authorized_external_networks": [{"cidr_range": "cidr_range_value"}], "enable_public_ip": True, + "enable_outbound_public_ip": True, }, + "outbound_public_ip_addresses": [ + "outbound_public_ip_addresses_value1", + "outbound_public_ip_addresses_value2", + ], } # The version of a generated dependency at test runtime may differ from the version used during generation. # Delete any fields which are not present in the current runtime dependency diff --git a/packages/google-cloud-api-gateway/google/cloud/apigateway/gapic_version.py b/packages/google-cloud-api-gateway/google/cloud/apigateway/gapic_version.py index 64ddc0e431e3..558c8aab67c5 100644 --- a/packages/google-cloud-api-gateway/google/cloud/apigateway/gapic_version.py +++ b/packages/google-cloud-api-gateway/google/cloud/apigateway/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.9.5" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-api-gateway/google/cloud/apigateway_v1/gapic_version.py b/packages/google-cloud-api-gateway/google/cloud/apigateway_v1/gapic_version.py index 64ddc0e431e3..558c8aab67c5 100644 --- a/packages/google-cloud-api-gateway/google/cloud/apigateway_v1/gapic_version.py +++ b/packages/google-cloud-api-gateway/google/cloud/apigateway_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.9.5" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-api-gateway/google/cloud/apigateway_v1/services/api_gateway_service/async_client.py b/packages/google-cloud-api-gateway/google/cloud/apigateway_v1/services/api_gateway_service/async_client.py index a2335db549af..1a2af4b47dea 100644 --- a/packages/google-cloud-api-gateway/google/cloud/apigateway_v1/services/api_gateway_service/async_client.py +++ b/packages/google-cloud-api-gateway/google/cloud/apigateway_v1/services/api_gateway_service/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -209,9 +208,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(ApiGatewayServiceClient).get_transport_class, type(ApiGatewayServiceClient) - ) + get_transport_class = ApiGatewayServiceClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-api-gateway/google/cloud/apigateway_v1/services/api_gateway_service/client.py b/packages/google-cloud-api-gateway/google/cloud/apigateway_v1/services/api_gateway_service/client.py index 037b0a4b9d8a..d3ad4c8f4101 100644 --- a/packages/google-cloud-api-gateway/google/cloud/apigateway_v1/services/api_gateway_service/client.py +++ b/packages/google-cloud-api-gateway/google/cloud/apigateway_v1/services/api_gateway_service/client.py @@ -766,7 +766,7 @@ def __init__( Type[ApiGatewayServiceTransport], Callable[..., ApiGatewayServiceTransport], ] = ( - type(self).get_transport_class(transport) + ApiGatewayServiceClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., ApiGatewayServiceTransport], transport) ) diff --git a/packages/google-cloud-api-gateway/samples/generated_samples/snippet_metadata_google.cloud.apigateway.v1.json b/packages/google-cloud-api-gateway/samples/generated_samples/snippet_metadata_google.cloud.apigateway.v1.json index 1496842e4c28..fdf981856ea3 100644 --- a/packages/google-cloud-api-gateway/samples/generated_samples/snippet_metadata_google.cloud.apigateway.v1.json +++ b/packages/google-cloud-api-gateway/samples/generated_samples/snippet_metadata_google.cloud.apigateway.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-api-gateway", - "version": "1.9.5" + "version": "0.1.0" }, "snippets": [ { diff --git a/packages/google-cloud-api-gateway/tests/unit/gapic/apigateway_v1/test_api_gateway_service.py b/packages/google-cloud-api-gateway/tests/unit/gapic/apigateway_v1/test_api_gateway_service.py index c46ffa458645..dbaa24996aed 100644 --- a/packages/google-cloud-api-gateway/tests/unit/gapic/apigateway_v1/test_api_gateway_service.py +++ b/packages/google-cloud-api-gateway/tests/unit/gapic/apigateway_v1/test_api_gateway_service.py @@ -1341,22 +1341,23 @@ async def test_list_gateways_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_gateways - ] = mock_object + ] = mock_rpc request = {} await client.list_gateways(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_gateways(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1913,22 +1914,23 @@ async def test_get_gateway_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_gateway - ] = mock_object + ] = mock_rpc request = {} await client.get_gateway(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_gateway(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2227,8 +2229,9 @@ def test_create_gateway_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.create_gateway(request) @@ -2282,26 +2285,28 @@ async def test_create_gateway_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_gateway - ] = mock_object + ] = mock_rpc request = {} await client.create_gateway(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.create_gateway(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2607,8 +2612,9 @@ def test_update_gateway_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.update_gateway(request) @@ -2662,26 +2668,28 @@ async def test_update_gateway_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_gateway - ] = mock_object + ] = mock_rpc request = {} await client.update_gateway(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.update_gateway(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2981,8 +2989,9 @@ def test_delete_gateway_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.delete_gateway(request) @@ -3036,26 +3045,28 @@ async def test_delete_gateway_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_gateway - ] = mock_object + ] = mock_rpc request = {} await client.delete_gateway(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.delete_gateway(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3408,22 +3419,23 @@ async def test_list_apis_async_use_cached_wrapped_rpc(transport: str = "grpc_asy ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_apis - ] = mock_object + ] = mock_rpc request = {} await client.list_apis(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_apis(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3975,22 +3987,23 @@ async def test_get_api_async_use_cached_wrapped_rpc(transport: str = "grpc_async ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_api - ] = mock_object + ] = mock_rpc request = {} await client.get_api(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_api(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4287,8 +4300,9 @@ def test_create_api_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.create_api(request) @@ -4340,26 +4354,28 @@ async def test_create_api_async_use_cached_wrapped_rpc(transport: str = "grpc_as ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_api - ] = mock_object + ] = mock_rpc request = {} await client.create_api(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.create_api(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4665,8 +4681,9 @@ def test_update_api_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.update_api(request) @@ -4718,26 +4735,28 @@ async def test_update_api_async_use_cached_wrapped_rpc(transport: str = "grpc_as ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_api - ] = mock_object + ] = mock_rpc request = {} await client.update_api(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.update_api(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5037,8 +5056,9 @@ def test_delete_api_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.delete_api(request) @@ -5090,26 +5110,28 @@ async def test_delete_api_async_use_cached_wrapped_rpc(transport: str = "grpc_as ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_api - ] = mock_object + ] = mock_rpc request = {} await client.delete_api(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.delete_api(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5466,22 +5488,23 @@ async def test_list_api_configs_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_api_configs - ] = mock_object + ] = mock_rpc request = {} await client.list_api_configs(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_api_configs(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -6038,22 +6061,23 @@ async def test_get_api_config_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_api_config - ] = mock_object + ] = mock_rpc request = {} await client.get_api_config(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_api_config(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -6364,8 +6388,9 @@ def test_create_api_config_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.create_api_config(request) @@ -6421,26 +6446,28 @@ async def test_create_api_config_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_api_config - ] = mock_object + ] = mock_rpc request = {} await client.create_api_config(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.create_api_config(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -6764,8 +6791,9 @@ def test_update_api_config_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.update_api_config(request) @@ -6821,26 +6849,28 @@ async def test_update_api_config_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_api_config - ] = mock_object + ] = mock_rpc request = {} await client.update_api_config(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.update_api_config(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -7158,8 +7188,9 @@ def test_delete_api_config_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.delete_api_config(request) @@ -7215,26 +7246,28 @@ async def test_delete_api_config_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_api_config - ] = mock_object + ] = mock_rpc request = {} await client.delete_api_config(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.delete_api_config(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-api-keys/google/cloud/api_keys/gapic_version.py b/packages/google-cloud-api-keys/google/cloud/api_keys/gapic_version.py index 0d21cc226e8f..558c8aab67c5 100644 --- a/packages/google-cloud-api-keys/google/cloud/api_keys/gapic_version.py +++ b/packages/google-cloud-api-keys/google/cloud/api_keys/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.5.11" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-api-keys/google/cloud/api_keys_v2/gapic_version.py b/packages/google-cloud-api-keys/google/cloud/api_keys_v2/gapic_version.py index 0d21cc226e8f..558c8aab67c5 100644 --- a/packages/google-cloud-api-keys/google/cloud/api_keys_v2/gapic_version.py +++ b/packages/google-cloud-api-keys/google/cloud/api_keys_v2/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.5.11" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-api-keys/google/cloud/api_keys_v2/services/api_keys/async_client.py b/packages/google-cloud-api-keys/google/cloud/api_keys_v2/services/api_keys/async_client.py index dee1a213b742..4195ea44ee16 100644 --- a/packages/google-cloud-api-keys/google/cloud/api_keys_v2/services/api_keys/async_client.py +++ b/packages/google-cloud-api-keys/google/cloud/api_keys_v2/services/api_keys/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -186,9 +185,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(ApiKeysClient).get_transport_class, type(ApiKeysClient) - ) + get_transport_class = ApiKeysClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-api-keys/google/cloud/api_keys_v2/services/api_keys/client.py b/packages/google-cloud-api-keys/google/cloud/api_keys_v2/services/api_keys/client.py index a500d1d53251..747e9bb8dde2 100644 --- a/packages/google-cloud-api-keys/google/cloud/api_keys_v2/services/api_keys/client.py +++ b/packages/google-cloud-api-keys/google/cloud/api_keys_v2/services/api_keys/client.py @@ -662,7 +662,7 @@ def __init__( transport_init: Union[ Type[ApiKeysTransport], Callable[..., ApiKeysTransport] ] = ( - type(self).get_transport_class(transport) + ApiKeysClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., ApiKeysTransport], transport) ) diff --git a/packages/google-cloud-api-keys/samples/generated_samples/snippet_metadata_google.api.apikeys.v2.json b/packages/google-cloud-api-keys/samples/generated_samples/snippet_metadata_google.api.apikeys.v2.json index 88554b7a7f43..3d8902cac4a6 100644 --- a/packages/google-cloud-api-keys/samples/generated_samples/snippet_metadata_google.api.apikeys.v2.json +++ b/packages/google-cloud-api-keys/samples/generated_samples/snippet_metadata_google.api.apikeys.v2.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-api-keys", - "version": "0.5.11" + "version": "0.1.0" }, "snippets": [ { diff --git a/packages/google-cloud-api-keys/tests/unit/gapic/api_keys_v2/test_api_keys.py b/packages/google-cloud-api-keys/tests/unit/gapic/api_keys_v2/test_api_keys.py index 94fbff52eb57..19a306f11630 100644 --- a/packages/google-cloud-api-keys/tests/unit/gapic/api_keys_v2/test_api_keys.py +++ b/packages/google-cloud-api-keys/tests/unit/gapic/api_keys_v2/test_api_keys.py @@ -1178,8 +1178,9 @@ def test_create_key_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.create_key(request) @@ -1231,26 +1232,28 @@ async def test_create_key_async_use_cached_wrapped_rpc(transport: str = "grpc_as ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_key - ] = mock_object + ] = mock_rpc request = {} await client.create_key(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.create_key(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1616,22 +1619,23 @@ async def test_list_keys_async_use_cached_wrapped_rpc(transport: str = "grpc_asy ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_keys - ] = mock_object + ] = mock_rpc request = {} await client.list_keys(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_keys(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2184,22 +2188,23 @@ async def test_get_key_async_use_cached_wrapped_rpc(transport: str = "grpc_async ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_key - ] = mock_object + ] = mock_rpc request = {} await client.get_key(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_key(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2552,22 +2557,23 @@ async def test_get_key_string_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_key_string - ] = mock_object + ] = mock_rpc request = {} await client.get_key_string(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_key_string(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2856,8 +2862,9 @@ def test_update_key_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.update_key(request) @@ -2909,26 +2916,28 @@ async def test_update_key_async_use_cached_wrapped_rpc(transport: str = "grpc_as ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_key - ] = mock_object + ] = mock_rpc request = {} await client.update_key(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.update_key(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3230,8 +3239,9 @@ def test_delete_key_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.delete_key(request) @@ -3283,26 +3293,28 @@ async def test_delete_key_async_use_cached_wrapped_rpc(transport: str = "grpc_as ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_key - ] = mock_object + ] = mock_rpc request = {} await client.delete_key(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.delete_key(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3592,8 +3604,9 @@ def test_undelete_key_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.undelete_key(request) @@ -3647,26 +3660,28 @@ async def test_undelete_key_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.undelete_key - ] = mock_object + ] = mock_rpc request = {} await client.undelete_key(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.undelete_key(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3931,22 +3946,23 @@ async def test_lookup_key_async_use_cached_wrapped_rpc(transport: str = "grpc_as ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.lookup_key - ] = mock_object + ] = mock_rpc request = {} await client.lookup_key(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.lookup_key(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-apigee-connect/google/cloud/apigeeconnect/gapic_version.py b/packages/google-cloud-apigee-connect/google/cloud/apigeeconnect/gapic_version.py index 64ddc0e431e3..558c8aab67c5 100644 --- a/packages/google-cloud-apigee-connect/google/cloud/apigeeconnect/gapic_version.py +++ b/packages/google-cloud-apigee-connect/google/cloud/apigeeconnect/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.9.5" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-apigee-connect/google/cloud/apigeeconnect_v1/gapic_version.py b/packages/google-cloud-apigee-connect/google/cloud/apigeeconnect_v1/gapic_version.py index 64ddc0e431e3..558c8aab67c5 100644 --- a/packages/google-cloud-apigee-connect/google/cloud/apigeeconnect_v1/gapic_version.py +++ b/packages/google-cloud-apigee-connect/google/cloud/apigeeconnect_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.9.5" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-apigee-connect/google/cloud/apigeeconnect_v1/services/connection_service/async_client.py b/packages/google-cloud-apigee-connect/google/cloud/apigeeconnect_v1/services/connection_service/async_client.py index a76f610ddc4b..692a5b47d977 100644 --- a/packages/google-cloud-apigee-connect/google/cloud/apigeeconnect_v1/services/connection_service/async_client.py +++ b/packages/google-cloud-apigee-connect/google/cloud/apigeeconnect_v1/services/connection_service/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -189,9 +188,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(ConnectionServiceClient).get_transport_class, type(ConnectionServiceClient) - ) + get_transport_class = ConnectionServiceClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-apigee-connect/google/cloud/apigeeconnect_v1/services/connection_service/client.py b/packages/google-cloud-apigee-connect/google/cloud/apigeeconnect_v1/services/connection_service/client.py index cf735e09880d..8b0713d63ed3 100644 --- a/packages/google-cloud-apigee-connect/google/cloud/apigeeconnect_v1/services/connection_service/client.py +++ b/packages/google-cloud-apigee-connect/google/cloud/apigeeconnect_v1/services/connection_service/client.py @@ -660,7 +660,7 @@ def __init__( Type[ConnectionServiceTransport], Callable[..., ConnectionServiceTransport], ] = ( - type(self).get_transport_class(transport) + ConnectionServiceClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., ConnectionServiceTransport], transport) ) diff --git a/packages/google-cloud-apigee-connect/google/cloud/apigeeconnect_v1/services/tether/async_client.py b/packages/google-cloud-apigee-connect/google/cloud/apigeeconnect_v1/services/tether/async_client.py index 6824a486e07a..a9d118e68dcb 100644 --- a/packages/google-cloud-apigee-connect/google/cloud/apigeeconnect_v1/services/tether/async_client.py +++ b/packages/google-cloud-apigee-connect/google/cloud/apigeeconnect_v1/services/tether/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( AsyncIterable, @@ -183,9 +182,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(TetherClient).get_transport_class, type(TetherClient) - ) + get_transport_class = TetherClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-apigee-connect/google/cloud/apigeeconnect_v1/services/tether/client.py b/packages/google-cloud-apigee-connect/google/cloud/apigeeconnect_v1/services/tether/client.py index 9be50b0e18c2..7b651a854222 100644 --- a/packages/google-cloud-apigee-connect/google/cloud/apigeeconnect_v1/services/tether/client.py +++ b/packages/google-cloud-apigee-connect/google/cloud/apigeeconnect_v1/services/tether/client.py @@ -638,7 +638,7 @@ def __init__( transport_init: Union[ Type[TetherTransport], Callable[..., TetherTransport] ] = ( - type(self).get_transport_class(transport) + TetherClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., TetherTransport], transport) ) diff --git a/packages/google-cloud-apigee-connect/samples/generated_samples/snippet_metadata_google.cloud.apigeeconnect.v1.json b/packages/google-cloud-apigee-connect/samples/generated_samples/snippet_metadata_google.cloud.apigeeconnect.v1.json index fb979b656282..b8c1a4c55ebc 100644 --- a/packages/google-cloud-apigee-connect/samples/generated_samples/snippet_metadata_google.cloud.apigeeconnect.v1.json +++ b/packages/google-cloud-apigee-connect/samples/generated_samples/snippet_metadata_google.cloud.apigeeconnect.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-apigee-connect", - "version": "1.9.5" + "version": "0.1.0" }, "snippets": [ { diff --git a/packages/google-cloud-apigee-connect/tests/unit/gapic/apigeeconnect_v1/test_connection_service.py b/packages/google-cloud-apigee-connect/tests/unit/gapic/apigeeconnect_v1/test_connection_service.py index 0c846df35117..ee82ef2f59e9 100644 --- a/packages/google-cloud-apigee-connect/tests/unit/gapic/apigeeconnect_v1/test_connection_service.py +++ b/packages/google-cloud-apigee-connect/tests/unit/gapic/apigeeconnect_v1/test_connection_service.py @@ -1285,22 +1285,23 @@ async def test_list_connections_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_connections - ] = mock_object + ] = mock_rpc request = {} await client.list_connections(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_connections(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-apigee-connect/tests/unit/gapic/apigeeconnect_v1/test_tether.py b/packages/google-cloud-apigee-connect/tests/unit/gapic/apigeeconnect_v1/test_tether.py index 56abbe13b325..c3857955a069 100644 --- a/packages/google-cloud-apigee-connect/tests/unit/gapic/apigeeconnect_v1/test_tether.py +++ b/packages/google-cloud-apigee-connect/tests/unit/gapic/apigeeconnect_v1/test_tether.py @@ -1118,22 +1118,23 @@ async def test_egress_async_use_cached_wrapped_rpc(transport: str = "grpc_asynci ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.egress - ] = mock_object + ] = mock_rpc request = [{}] await client.egress(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.egress(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-apigee-registry/google/cloud/apigee_registry/gapic_version.py b/packages/google-cloud-apigee-registry/google/cloud/apigee_registry/gapic_version.py index 02b228845902..558c8aab67c5 100644 --- a/packages/google-cloud-apigee-registry/google/cloud/apigee_registry/gapic_version.py +++ b/packages/google-cloud-apigee-registry/google/cloud/apigee_registry/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.6.11" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-apigee-registry/google/cloud/apigee_registry_v1/gapic_version.py b/packages/google-cloud-apigee-registry/google/cloud/apigee_registry_v1/gapic_version.py index 02b228845902..558c8aab67c5 100644 --- a/packages/google-cloud-apigee-registry/google/cloud/apigee_registry_v1/gapic_version.py +++ b/packages/google-cloud-apigee-registry/google/cloud/apigee_registry_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.6.11" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-apigee-registry/google/cloud/apigee_registry_v1/services/provisioning/async_client.py b/packages/google-cloud-apigee-registry/google/cloud/apigee_registry_v1/services/provisioning/async_client.py index df2b9af6b019..b4c9eddd7e6a 100644 --- a/packages/google-cloud-apigee-registry/google/cloud/apigee_registry_v1/services/provisioning/async_client.py +++ b/packages/google-cloud-apigee-registry/google/cloud/apigee_registry_v1/services/provisioning/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -193,9 +192,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(ProvisioningClient).get_transport_class, type(ProvisioningClient) - ) + get_transport_class = ProvisioningClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-apigee-registry/google/cloud/apigee_registry_v1/services/provisioning/client.py b/packages/google-cloud-apigee-registry/google/cloud/apigee_registry_v1/services/provisioning/client.py index 46523fab45a9..dc2a49426692 100644 --- a/packages/google-cloud-apigee-registry/google/cloud/apigee_registry_v1/services/provisioning/client.py +++ b/packages/google-cloud-apigee-registry/google/cloud/apigee_registry_v1/services/provisioning/client.py @@ -665,7 +665,7 @@ def __init__( transport_init: Union[ Type[ProvisioningTransport], Callable[..., ProvisioningTransport] ] = ( - type(self).get_transport_class(transport) + ProvisioningClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., ProvisioningTransport], transport) ) diff --git a/packages/google-cloud-apigee-registry/google/cloud/apigee_registry_v1/services/registry/async_client.py b/packages/google-cloud-apigee-registry/google/cloud/apigee_registry_v1/services/registry/async_client.py index c562dc69e398..b9cb56512196 100644 --- a/packages/google-cloud-apigee-registry/google/cloud/apigee_registry_v1/services/registry/async_client.py +++ b/packages/google-cloud-apigee-registry/google/cloud/apigee_registry_v1/services/registry/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -198,9 +197,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(RegistryClient).get_transport_class, type(RegistryClient) - ) + get_transport_class = RegistryClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-apigee-registry/google/cloud/apigee_registry_v1/services/registry/client.py b/packages/google-cloud-apigee-registry/google/cloud/apigee_registry_v1/services/registry/client.py index 15e7bfbae605..210cea34e08a 100644 --- a/packages/google-cloud-apigee-registry/google/cloud/apigee_registry_v1/services/registry/client.py +++ b/packages/google-cloud-apigee-registry/google/cloud/apigee_registry_v1/services/registry/client.py @@ -762,7 +762,7 @@ def __init__( transport_init: Union[ Type[RegistryTransport], Callable[..., RegistryTransport] ] = ( - type(self).get_transport_class(transport) + RegistryClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., RegistryTransport], transport) ) diff --git a/packages/google-cloud-apigee-registry/samples/generated_samples/snippet_metadata_google.cloud.apigeeregistry.v1.json b/packages/google-cloud-apigee-registry/samples/generated_samples/snippet_metadata_google.cloud.apigeeregistry.v1.json index a325ed82ef5c..399743edd3de 100644 --- a/packages/google-cloud-apigee-registry/samples/generated_samples/snippet_metadata_google.cloud.apigeeregistry.v1.json +++ b/packages/google-cloud-apigee-registry/samples/generated_samples/snippet_metadata_google.cloud.apigeeregistry.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-apigee-registry", - "version": "0.6.11" + "version": "0.1.0" }, "snippets": [ { diff --git a/packages/google-cloud-apigee-registry/tests/unit/gapic/apigee_registry_v1/test_provisioning.py b/packages/google-cloud-apigee-registry/tests/unit/gapic/apigee_registry_v1/test_provisioning.py index f5ef900a382f..b0cb0a3aa8a3 100644 --- a/packages/google-cloud-apigee-registry/tests/unit/gapic/apigee_registry_v1/test_provisioning.py +++ b/packages/google-cloud-apigee-registry/tests/unit/gapic/apigee_registry_v1/test_provisioning.py @@ -1219,8 +1219,9 @@ def test_create_instance_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.create_instance(request) @@ -1274,26 +1275,28 @@ async def test_create_instance_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_instance - ] = mock_object + ] = mock_rpc request = {} await client.create_instance(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.create_instance(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1604,8 +1607,9 @@ def test_delete_instance_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.delete_instance(request) @@ -1659,26 +1663,28 @@ async def test_delete_instance_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_instance - ] = mock_object + ] = mock_rpc request = {} await client.delete_instance(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.delete_instance(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2031,22 +2037,23 @@ async def test_get_instance_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_instance - ] = mock_object + ] = mock_rpc request = {} await client.get_instance(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_instance(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-apigee-registry/tests/unit/gapic/apigee_registry_v1/test_registry.py b/packages/google-cloud-apigee-registry/tests/unit/gapic/apigee_registry_v1/test_registry.py index 3f718f87e226..28ed860fab7b 100644 --- a/packages/google-cloud-apigee-registry/tests/unit/gapic/apigee_registry_v1/test_registry.py +++ b/packages/google-cloud-apigee-registry/tests/unit/gapic/apigee_registry_v1/test_registry.py @@ -1236,22 +1236,23 @@ async def test_list_apis_async_use_cached_wrapped_rpc(transport: str = "grpc_asy ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_apis - ] = mock_object + ] = mock_rpc request = {} await client.list_apis(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_apis(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1807,22 +1808,23 @@ async def test_get_api_async_use_cached_wrapped_rpc(transport: str = "grpc_async ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_api - ] = mock_object + ] = mock_rpc request = {} await client.get_api(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_api(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2192,22 +2194,23 @@ async def test_create_api_async_use_cached_wrapped_rpc(transport: str = "grpc_as ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_api - ] = mock_object + ] = mock_rpc request = {} await client.create_api(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_api(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2591,22 +2594,23 @@ async def test_update_api_async_use_cached_wrapped_rpc(transport: str = "grpc_as ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_api - ] = mock_object + ] = mock_rpc request = {} await client.update_api(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_api(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2962,22 +2966,23 @@ async def test_delete_api_async_use_cached_wrapped_rpc(transport: str = "grpc_as ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_api - ] = mock_object + ] = mock_rpc request = {} await client.delete_api(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_api(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3331,22 +3336,23 @@ async def test_list_api_versions_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_api_versions - ] = mock_object + ] = mock_rpc request = {} await client.list_api_versions(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_api_versions(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3917,22 +3923,23 @@ async def test_get_api_version_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_api_version - ] = mock_object + ] = mock_rpc request = {} await client.get_api_version(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_api_version(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4310,22 +4317,23 @@ async def test_create_api_version_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_api_version - ] = mock_object + ] = mock_rpc request = {} await client.create_api_version(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_api_version(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4728,22 +4736,23 @@ async def test_update_api_version_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_api_version - ] = mock_object + ] = mock_rpc request = {} await client.update_api_version(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_api_version(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5124,22 +5133,23 @@ async def test_delete_api_version_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_api_version - ] = mock_object + ] = mock_rpc request = {} await client.delete_api_version(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_api_version(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5494,22 +5504,23 @@ async def test_list_api_specs_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_api_specs - ] = mock_object + ] = mock_rpc request = {} await client.list_api_specs(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_api_specs(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -6076,22 +6087,23 @@ async def test_get_api_spec_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_api_spec - ] = mock_object + ] = mock_rpc request = {} await client.get_api_spec(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_api_spec(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -6472,22 +6484,23 @@ async def test_get_api_spec_contents_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_api_spec_contents - ] = mock_object + ] = mock_rpc request = {} await client.get_api_spec_contents(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_api_spec_contents(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -6875,22 +6888,23 @@ async def test_create_api_spec_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_api_spec - ] = mock_object + ] = mock_rpc request = {} await client.create_api_spec(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_api_spec(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -7295,22 +7309,23 @@ async def test_update_api_spec_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_api_spec - ] = mock_object + ] = mock_rpc request = {} await client.update_api_spec(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_api_spec(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -7678,22 +7693,23 @@ async def test_delete_api_spec_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_api_spec - ] = mock_object + ] = mock_rpc request = {} await client.delete_api_spec(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_api_spec(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -8072,22 +8088,23 @@ async def test_tag_api_spec_revision_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.tag_api_spec_revision - ] = mock_object + ] = mock_rpc request = {} await client.tag_api_spec_revision(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.tag_api_spec_revision(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -8392,22 +8409,23 @@ async def test_list_api_spec_revisions_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_api_spec_revisions - ] = mock_object + ] = mock_rpc request = {} await client.list_api_spec_revisions(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_api_spec_revisions(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -8919,22 +8937,23 @@ async def test_rollback_api_spec_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.rollback_api_spec - ] = mock_object + ] = mock_rpc request = {} await client.rollback_api_spec(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.rollback_api_spec(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -9261,22 +9280,23 @@ async def test_delete_api_spec_revision_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_api_spec_revision - ] = mock_object + ] = mock_rpc request = {} await client.delete_api_spec_revision(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_api_spec_revision(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -9668,22 +9688,23 @@ async def test_list_api_deployments_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_api_deployments - ] = mock_object + ] = mock_rpc request = {} await client.list_api_deployments(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_api_deployments(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -10281,22 +10302,23 @@ async def test_get_api_deployment_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_api_deployment - ] = mock_object + ] = mock_rpc request = {} await client.get_api_deployment(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_api_deployment(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -10711,22 +10733,23 @@ async def test_create_api_deployment_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_api_deployment - ] = mock_object + ] = mock_rpc request = {} await client.create_api_deployment(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_api_deployment(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -11155,22 +11178,23 @@ async def test_update_api_deployment_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_api_deployment - ] = mock_object + ] = mock_rpc request = {} await client.update_api_deployment(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_api_deployment(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -11562,22 +11586,23 @@ async def test_delete_api_deployment_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_api_deployment - ] = mock_object + ] = mock_rpc request = {} await client.delete_api_deployment(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_api_deployment(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -11967,22 +11992,23 @@ async def test_tag_api_deployment_revision_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.tag_api_deployment_revision - ] = mock_object + ] = mock_rpc request = {} await client.tag_api_deployment_revision(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.tag_api_deployment_revision(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -12287,22 +12313,23 @@ async def test_list_api_deployment_revisions_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_api_deployment_revisions - ] = mock_object + ] = mock_rpc request = {} await client.list_api_deployment_revisions(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_api_deployment_revisions(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -12819,22 +12846,23 @@ async def test_rollback_api_deployment_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.rollback_api_deployment - ] = mock_object + ] = mock_rpc request = {} await client.rollback_api_deployment(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.rollback_api_deployment(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -13161,22 +13189,23 @@ async def test_delete_api_deployment_revision_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_api_deployment_revision - ] = mock_object + ] = mock_rpc request = {} await client.delete_api_deployment_revision(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_api_deployment_revision(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -13556,22 +13585,23 @@ async def test_list_artifacts_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_artifacts - ] = mock_object + ] = mock_rpc request = {} await client.list_artifacts(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_artifacts(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -14126,22 +14156,23 @@ async def test_get_artifact_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_artifact - ] = mock_object + ] = mock_rpc request = {} await client.get_artifact(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_artifact(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -14514,22 +14545,23 @@ async def test_get_artifact_contents_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_artifact_contents - ] = mock_object + ] = mock_rpc request = {} await client.get_artifact_contents(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_artifact_contents(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -14905,22 +14937,23 @@ async def test_create_artifact_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_artifact - ] = mock_object + ] = mock_rpc request = {} await client.create_artifact(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_artifact(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -15307,22 +15340,23 @@ async def test_replace_artifact_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.replace_artifact - ] = mock_object + ] = mock_rpc request = {} await client.replace_artifact(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.replace_artifact(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -15673,22 +15707,23 @@ async def test_delete_artifact_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_artifact - ] = mock_object + ] = mock_rpc request = {} await client.delete_artifact(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_artifact(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-apihub/.OwlBot.yaml b/packages/google-cloud-apihub/.OwlBot.yaml new file mode 100644 index 000000000000..a4fe33a60a86 --- /dev/null +++ b/packages/google-cloud-apihub/.OwlBot.yaml @@ -0,0 +1,18 @@ +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +deep-copy-regex: + - source: /google/cloud/apihub/(v.*)/.*-py + dest: /owl-bot-staging/google-cloud-apihub/$1 +api-name: google-cloud-apihub diff --git a/packages/google-cloud-apihub/.coveragerc b/packages/google-cloud-apihub/.coveragerc new file mode 100644 index 000000000000..962c8dd579b8 --- /dev/null +++ b/packages/google-cloud-apihub/.coveragerc @@ -0,0 +1,13 @@ +[run] +branch = True + +[report] +show_missing = True +omit = + google/cloud/apihub/__init__.py + google/cloud/apihub/gapic_version.py +exclude_lines = + # Re-enable the standard pragma + pragma: NO COVER + # Ignore debug-only repr + def __repr__ diff --git a/packages/google-cloud-apihub/.flake8 b/packages/google-cloud-apihub/.flake8 new file mode 100644 index 000000000000..87f6e408c47d --- /dev/null +++ b/packages/google-cloud-apihub/.flake8 @@ -0,0 +1,33 @@ +# -*- coding: utf-8 -*- +# +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Generated by synthtool. DO NOT EDIT! +[flake8] +ignore = E203, E231, E266, E501, W503 +exclude = + # Exclude generated code. + **/proto/** + **/gapic/** + **/services/** + **/types/** + *_pb2.py + + # Standard linting exemptions. + **/.nox/** + __pycache__, + .git, + *.pyc, + conf.py diff --git a/packages/google-cloud-apihub/.gitignore b/packages/google-cloud-apihub/.gitignore new file mode 100644 index 000000000000..b4243ced74e4 --- /dev/null +++ b/packages/google-cloud-apihub/.gitignore @@ -0,0 +1,63 @@ +*.py[cod] +*.sw[op] + +# C extensions +*.so + +# Packages +*.egg +*.egg-info +dist +build +eggs +.eggs +parts +bin +var +sdist +develop-eggs +.installed.cfg +lib +lib64 +__pycache__ + +# Installer logs +pip-log.txt + +# Unit test / coverage reports +.coverage +.nox +.cache +.pytest_cache + + +# Mac +.DS_Store + +# JetBrains +.idea + +# VS Code +.vscode + +# emacs +*~ + +# Built documentation +docs/_build +bigquery/docs/generated +docs.metadata + +# Virtual environment +env/ + +# Test logs +coverage.xml +*sponge_log.xml + +# System test environment variables. +system_tests/local_test_setup + +# Make sure a generated file isn't accidentally committed. +pylintrc +pylintrc.test diff --git a/packages/google-cloud-apihub/.repo-metadata.json b/packages/google-cloud-apihub/.repo-metadata.json new file mode 100644 index 000000000000..4fde262e68c2 --- /dev/null +++ b/packages/google-cloud-apihub/.repo-metadata.json @@ -0,0 +1,17 @@ +{ + "name": "google-cloud-apihub", + "name_pretty": "API Hub API", + "api_description": "API hub lets you consolidate and organize information about all of the APIs of interest to your organization. API hub lets you capture critical information about APIs that allows developers to discover and evaluate them easily and leverage the work of other teams wherever possible. API platform teams can use API hub to have visibility into and manage their portfolio of APIs.", + "product_documentation": "https://cloud.google.com/apigee/docs/apihub/what-is-api-hub", + "client_documentation": "https://cloud.google.com/python/docs/reference/google-cloud-apihub/latest", + "issue_tracker": "https://issuetracker.google.com/issues/new?component=1447560", + "release_level": "preview", + "language": "python", + "library_type": "GAPIC_AUTO", + "repo": "googleapis/google-cloud-python", + "distribution_name": "google-cloud-apihub", + "api_id": "apihub.googleapis.com", + "default_version": "v1", + "codeowner_team": "", + "api_shortname": "apihub" +} diff --git a/packages/google-cloud-apihub/CHANGELOG.md b/packages/google-cloud-apihub/CHANGELOG.md new file mode 100644 index 000000000000..6b8fb34aaf0b --- /dev/null +++ b/packages/google-cloud-apihub/CHANGELOG.md @@ -0,0 +1,21 @@ +# Changelog + +## [0.2.0](https://github.com/googleapis/google-cloud-python/compare/google-cloud-apihub-v0.1.0...google-cloud-apihub-v0.2.0) (2024-09-04) + + +### ⚠ BREAKING CHANGES + +* [google-cloud-apihub] remove gRPC support for client libraries ([#13055](https://github.com/googleapis/google-cloud-python/issues/13055)) + +### Bug Fixes + +* [google-cloud-apihub] remove gRPC support for client libraries ([#13055](https://github.com/googleapis/google-cloud-python/issues/13055)) ([3762ff4](https://github.com/googleapis/google-cloud-python/commit/3762ff40e51466bc516939a31732300c8e20211a)) + +## 0.1.0 (2024-08-08) + + +### Features + +* add initial files for google.cloud.apihub.v1 ([#12993](https://github.com/googleapis/google-cloud-python/issues/12993)) ([2ac4597](https://github.com/googleapis/google-cloud-python/commit/2ac4597188c70a922479bf48adf2a88d850bc534)) + +## Changelog diff --git a/packages/google-cloud-apihub/CODE_OF_CONDUCT.md b/packages/google-cloud-apihub/CODE_OF_CONDUCT.md new file mode 100644 index 000000000000..039f43681204 --- /dev/null +++ b/packages/google-cloud-apihub/CODE_OF_CONDUCT.md @@ -0,0 +1,95 @@ + +# Code of Conduct + +## Our Pledge + +In the interest of fostering an open and welcoming environment, we as +contributors and maintainers pledge to making participation in our project and +our community a harassment-free experience for everyone, regardless of age, body +size, disability, ethnicity, gender identity and expression, level of +experience, education, socio-economic status, nationality, personal appearance, +race, religion, or sexual identity and orientation. + +## Our Standards + +Examples of behavior that contributes to creating a positive environment +include: + +* Using welcoming and inclusive language +* Being respectful of differing viewpoints and experiences +* Gracefully accepting constructive criticism +* Focusing on what is best for the community +* Showing empathy towards other community members + +Examples of unacceptable behavior by participants include: + +* The use of sexualized language or imagery and unwelcome sexual attention or + advances +* Trolling, insulting/derogatory comments, and personal or political attacks +* Public or private harassment +* Publishing others' private information, such as a physical or electronic + address, without explicit permission +* Other conduct which could reasonably be considered inappropriate in a + professional setting + +## Our Responsibilities + +Project maintainers are responsible for clarifying the standards of acceptable +behavior and are expected to take appropriate and fair corrective action in +response to any instances of unacceptable behavior. + +Project maintainers have the right and responsibility to remove, edit, or reject +comments, commits, code, wiki edits, issues, and other contributions that are +not aligned to this Code of Conduct, or to ban temporarily or permanently any +contributor for other behaviors that they deem inappropriate, threatening, +offensive, or harmful. + +## Scope + +This Code of Conduct applies both within project spaces and in public spaces +when an individual is representing the project or its community. Examples of +representing a project or community include using an official project e-mail +address, posting via an official social media account, or acting as an appointed +representative at an online or offline event. Representation of a project may be +further defined and clarified by project maintainers. + +This Code of Conduct also applies outside the project spaces when the Project +Steward has a reasonable belief that an individual's behavior may have a +negative impact on the project or its community. + +## Conflict Resolution + +We do not believe that all conflict is bad; healthy debate and disagreement +often yield positive results. However, it is never okay to be disrespectful or +to engage in behavior that violates the project’s code of conduct. + +If you see someone violating the code of conduct, you are encouraged to address +the behavior directly with those involved. Many issues can be resolved quickly +and easily, and this gives people more control over the outcome of their +dispute. If you are unable to resolve the matter for any reason, or if the +behavior is threatening or harassing, report it. We are dedicated to providing +an environment where participants feel welcome and safe. + + +Reports should be directed to *googleapis-stewards@google.com*, the +Project Steward(s) for *Google Cloud Client Libraries*. It is the Project Steward’s duty to +receive and address reported violations of the code of conduct. They will then +work with a committee consisting of representatives from the Open Source +Programs Office and the Google Open Source Strategy team. If for any reason you +are uncomfortable reaching out to the Project Steward, please email +opensource@google.com. + +We will investigate every complaint, but you may not receive a direct response. +We will use our discretion in determining when and how to follow up on reported +incidents, which may range from not taking action to permanent expulsion from +the project and project-sponsored spaces. We will notify the accused of the +report and provide them an opportunity to discuss it before any action is taken. +The identity of the reporter will be omitted from the details of the report +supplied to the accused. In potentially harmful situations, such as ongoing +harassment or threats to anyone's safety, we may take action without notice. + +## Attribution + +This Code of Conduct is adapted from the Contributor Covenant, version 1.4, +available at +https://www.contributor-covenant.org/version/1/4/code-of-conduct.html \ No newline at end of file diff --git a/packages/google-cloud-apihub/CONTRIBUTING.rst b/packages/google-cloud-apihub/CONTRIBUTING.rst new file mode 100644 index 000000000000..dbc72c8e69fd --- /dev/null +++ b/packages/google-cloud-apihub/CONTRIBUTING.rst @@ -0,0 +1,271 @@ +.. Generated by synthtool. DO NOT EDIT! +############ +Contributing +############ + +#. **Please sign one of the contributor license agreements below.** +#. Fork the repo, develop and test your code changes, add docs. +#. Make sure that your commit messages clearly describe the changes. +#. Send a pull request. (Please Read: `Faster Pull Request Reviews`_) + +.. _Faster Pull Request Reviews: https://github.com/kubernetes/community/blob/master/contributors/guide/pull-requests.md#best-practices-for-faster-reviews + +.. contents:: Here are some guidelines for hacking on the Google Cloud Client libraries. + +*************** +Adding Features +*************** + +In order to add a feature: + +- The feature must be documented in both the API and narrative + documentation. + +- The feature must work fully on the following CPython versions: + 3.7, 3.8, 3.9, 3.10, 3.11 and 3.12 on both UNIX and Windows. + +- The feature must not add unnecessary dependencies (where + "unnecessary" is of course subjective, but new dependencies should + be discussed). + +**************************** +Using a Development Checkout +**************************** + +You'll have to create a development environment using a Git checkout: + +- While logged into your GitHub account, navigate to the + ``google-cloud-python`` `repo`_ on GitHub. + +- Fork and clone the ``google-cloud-python`` repository to your GitHub account by + clicking the "Fork" button. + +- Clone your fork of ``google-cloud-python`` from your GitHub account to your local + computer, substituting your account username and specifying the destination + as ``hack-on-google-cloud-python``. E.g.:: + + $ cd ${HOME} + $ git clone git@github.com:USERNAME/google-cloud-python.git hack-on-google-cloud-python + $ cd hack-on-google-cloud-python + # Configure remotes such that you can pull changes from the googleapis/google-cloud-python + # repository into your local repository. + $ git remote add upstream git@github.com:googleapis/google-cloud-python.git + # fetch and merge changes from upstream into main + $ git fetch upstream + $ git merge upstream/main + +Now your local repo is set up such that you will push changes to your GitHub +repo, from which you can submit a pull request. + +To work on the codebase and run the tests, we recommend using ``nox``, +but you can also use a ``virtualenv`` of your own creation. + +.. _repo: https://github.com/googleapis/google-cloud-python + +Using ``nox`` +============= + +We use `nox `__ to instrument our tests. + +- To test your changes, run unit tests with ``nox``:: + $ nox -s unit + +- To run a single unit test:: + + $ nox -s unit-3.12 -- -k + + + .. note:: + + The unit tests and system tests are described in the + ``noxfile.py`` files in each directory. + +.. nox: https://pypi.org/project/nox/ + +***************************************** +I'm getting weird errors... Can you help? +***************************************** + +If the error mentions ``Python.h`` not being found, +install ``python-dev`` and try again. +On Debian/Ubuntu:: + + $ sudo apt-get install python-dev + +************ +Coding Style +************ +- We use the automatic code formatter ``black``. You can run it using + the nox session ``blacken``. This will eliminate many lint errors. Run via:: + + $ nox -s blacken + +- PEP8 compliance is required, with exceptions defined in the linter configuration. + If you have ``nox`` installed, you can test that you have not introduced + any non-compliant code via:: + + $ nox -s lint + +- In order to make ``nox -s lint`` run faster, you can set some environment + variables:: + + export GOOGLE_CLOUD_TESTING_REMOTE="upstream" + export GOOGLE_CLOUD_TESTING_BRANCH="main" + + By doing this, you are specifying the location of the most up-to-date + version of ``google-cloud-python``. The + remote name ``upstream`` should point to the official ``googleapis`` + checkout and the branch should be the default branch on that remote (``main``). + +- This repository contains configuration for the + `pre-commit `__ tool, which automates checking + our linters during a commit. If you have it installed on your ``$PATH``, + you can enable enforcing those checks via: + +.. code-block:: bash + + $ pre-commit install + pre-commit installed at .git/hooks/pre-commit + +Exceptions to PEP8: + +- Many unit tests use a helper method, ``_call_fut`` ("FUT" is short for + "Function-Under-Test"), which is PEP8-incompliant, but more readable. + Some also use a local variable, ``MUT`` (short for "Module-Under-Test"). + +******************** +Running System Tests +******************** + +- To run system tests, you can execute:: + + # Run all system tests + $ nox -s system + + # Run a single system test + $ nox -s system-3.12 -- -k + + + .. note:: + + System tests are only configured to run under Python 3.8, 3.9, 3.10, 3.11 and 3.12. + For expediency, we do not run them in older versions of Python 3. + + This alone will not run the tests. You'll need to change some local + auth settings and change some configuration in your project to + run all the tests. + +- System tests will be run against an actual project. You should use local credentials from gcloud when possible. See `Best practices for application authentication `__. Some tests require a service account. For those tests see `Authenticating as a service account `__. + +************* +Test Coverage +************* + +- The codebase *must* have 100% test statement coverage after each commit. + You can test coverage via ``nox -s cover``. + +****************************************************** +Documentation Coverage and Building HTML Documentation +****************************************************** + +If you fix a bug, and the bug requires an API or behavior modification, all +documentation in this package which references that API or behavior must be +changed to reflect the bug fix, ideally in the same commit that fixes the bug +or adds the feature. + +Build the docs via: + + $ nox -s docs + +************************* +Samples and code snippets +************************* + +Code samples and snippets live in the `samples/` catalogue. Feel free to +provide more examples, but make sure to write tests for those examples. +Each folder containing example code requires its own `noxfile.py` script +which automates testing. If you decide to create a new folder, you can +base it on the `samples/snippets` folder (providing `noxfile.py` and +the requirements files). + +The tests will run against a real Google Cloud Project, so you should +configure them just like the System Tests. + +- To run sample tests, you can execute:: + + # Run all tests in a folder + $ cd samples/snippets + $ nox -s py-3.8 + + # Run a single sample test + $ cd samples/snippets + $ nox -s py-3.8 -- -k + +******************************************** +Note About ``README`` as it pertains to PyPI +******************************************** + +The `description on PyPI`_ for the project comes directly from the +``README``. Due to the reStructuredText (``rst``) parser used by +PyPI, relative links which will work on GitHub (e.g. ``CONTRIBUTING.rst`` +instead of +``https://github.com/googleapis/google-cloud-python/blob/main/CONTRIBUTING.rst``) +may cause problems creating links or rendering the description. + +.. _description on PyPI: https://pypi.org/project/google-cloud-apihub + + +************************* +Supported Python Versions +************************* + +We support: + +- `Python 3.7`_ +- `Python 3.8`_ +- `Python 3.9`_ +- `Python 3.10`_ +- `Python 3.11`_ +- `Python 3.12`_ + +.. _Python 3.7: https://docs.python.org/3.7/ +.. _Python 3.8: https://docs.python.org/3.8/ +.. _Python 3.9: https://docs.python.org/3.9/ +.. _Python 3.10: https://docs.python.org/3.10/ +.. _Python 3.11: https://docs.python.org/3.11/ +.. _Python 3.12: https://docs.python.org/3.12/ + + +Supported versions can be found in our ``noxfile.py`` `config`_. + +.. _config: https://github.com/googleapis/google-cloud-python/blob/main/packages/google-cloud-apihub/noxfile.py + + +********** +Versioning +********** + +This library follows `Semantic Versioning`_. + +.. _Semantic Versioning: http://semver.org/ + +Some packages are currently in major version zero (``0.y.z``), which means that +anything may change at any time and the public API should not be considered +stable. + +****************************** +Contributor License Agreements +****************************** + +Before we can accept your pull requests you'll need to sign a Contributor +License Agreement (CLA): + +- **If you are an individual writing original source code** and **you own the + intellectual property**, then you'll need to sign an + `individual CLA `__. +- **If you work for a company that wants to allow you to contribute your work**, + then you'll need to sign a + `corporate CLA `__. + +You can sign these electronically (just scroll to the bottom). After that, +we'll be able to accept your pull requests. diff --git a/packages/google-cloud-apihub/LICENSE b/packages/google-cloud-apihub/LICENSE new file mode 100644 index 000000000000..d64569567334 --- /dev/null +++ b/packages/google-cloud-apihub/LICENSE @@ -0,0 +1,202 @@ + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/packages/google-cloud-apihub/MANIFEST.in b/packages/google-cloud-apihub/MANIFEST.in new file mode 100644 index 000000000000..e0a66705318e --- /dev/null +++ b/packages/google-cloud-apihub/MANIFEST.in @@ -0,0 +1,25 @@ +# -*- coding: utf-8 -*- +# +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Generated by synthtool. DO NOT EDIT! +include README.rst LICENSE +recursive-include google *.json *.proto py.typed +recursive-include tests * +global-exclude *.py[co] +global-exclude __pycache__ + +# Exclude scripts for samples readmegen +prune scripts/readme-gen diff --git a/packages/google-cloud-apihub/README.rst b/packages/google-cloud-apihub/README.rst new file mode 100644 index 000000000000..b99839094b9e --- /dev/null +++ b/packages/google-cloud-apihub/README.rst @@ -0,0 +1,108 @@ +Python Client for API Hub API +============================= + +|preview| |pypi| |versions| + +`API Hub API`_: API hub lets you consolidate and organize information about all of the APIs of interest to your organization. API hub lets you capture critical information about APIs that allows developers to discover and evaluate them easily and leverage the work of other teams wherever possible. API platform teams can use API hub to have visibility into and manage their portfolio of APIs. + +- `Client Library Documentation`_ +- `Product Documentation`_ + +.. |preview| image:: https://img.shields.io/badge/support-preview-orange.svg + :target: https://github.com/googleapis/google-cloud-python/blob/main/README.rst#stability-levels +.. |pypi| image:: https://img.shields.io/pypi/v/google-cloud-apihub.svg + :target: https://pypi.org/project/google-cloud-apihub/ +.. |versions| image:: https://img.shields.io/pypi/pyversions/google-cloud-apihub.svg + :target: https://pypi.org/project/google-cloud-apihub/ +.. _API Hub API: https://cloud.google.com/apigee/docs/apihub/what-is-api-hub +.. _Client Library Documentation: https://cloud.google.com/python/docs/reference/google-cloud-apihub/latest/summary_overview +.. _Product Documentation: https://cloud.google.com/apigee/docs/apihub/what-is-api-hub + +Quick Start +----------- + +In order to use this library, you first need to go through the following steps: + +1. `Select or create a Cloud Platform project.`_ +2. `Enable billing for your project.`_ +3. `Enable the API Hub API.`_ +4. `Setup Authentication.`_ + +.. _Select or create a Cloud Platform project.: https://console.cloud.google.com/project +.. _Enable billing for your project.: https://cloud.google.com/billing/docs/how-to/modify-project#enable_billing_for_a_project +.. _Enable the API Hub API.: https://cloud.google.com/apigee/docs/apihub/what-is-api-hub +.. _Setup Authentication.: https://googleapis.dev/python/google-api-core/latest/auth.html + +Installation +~~~~~~~~~~~~ + +Install this library in a virtual environment using `venv`_. `venv`_ is a tool that +creates isolated Python environments. These isolated environments can have separate +versions of Python packages, which allows you to isolate one project's dependencies +from the dependencies of other projects. + +With `venv`_, it's possible to install this library without needing system +install permissions, and without clashing with the installed system +dependencies. + +.. _`venv`: https://docs.python.org/3/library/venv.html + + +Code samples and snippets +~~~~~~~~~~~~~~~~~~~~~~~~~ + +Code samples and snippets live in the `samples/`_ folder. + +.. _samples/: https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-apihub/samples + + +Supported Python Versions +^^^^^^^^^^^^^^^^^^^^^^^^^ +Our client libraries are compatible with all current `active`_ and `maintenance`_ versions of +Python. + +Python >= 3.7 + +.. _active: https://devguide.python.org/devcycle/#in-development-main-branch +.. _maintenance: https://devguide.python.org/devcycle/#maintenance-branches + +Unsupported Python Versions +^^^^^^^^^^^^^^^^^^^^^^^^^^^ +Python <= 3.6 + +If you are using an `end-of-life`_ +version of Python, we recommend that you update as soon as possible to an actively supported version. + +.. _end-of-life: https://devguide.python.org/devcycle/#end-of-life-branches + +Mac/Linux +^^^^^^^^^ + +.. code-block:: console + + python3 -m venv + source /bin/activate + pip install google-cloud-apihub + + +Windows +^^^^^^^ + +.. code-block:: console + + py -m venv + .\\Scripts\activate + pip install google-cloud-apihub + +Next Steps +~~~~~~~~~~ + +- Read the `Client Library Documentation`_ for API Hub API + to see other available methods on the client. +- Read the `API Hub API Product documentation`_ to learn + more about the product and see How-to Guides. +- View this `README`_ to see the full list of Cloud + APIs that we cover. + +.. _API Hub API Product documentation: https://cloud.google.com/apigee/docs/apihub/what-is-api-hub +.. _README: https://github.com/googleapis/google-cloud-python/blob/main/README.rst diff --git a/packages/google-cloud-apihub/docs/CHANGELOG.md b/packages/google-cloud-apihub/docs/CHANGELOG.md new file mode 120000 index 000000000000..04c99a55caae --- /dev/null +++ b/packages/google-cloud-apihub/docs/CHANGELOG.md @@ -0,0 +1 @@ +../CHANGELOG.md \ No newline at end of file diff --git a/packages/google-cloud-apihub/docs/README.rst b/packages/google-cloud-apihub/docs/README.rst new file mode 120000 index 000000000000..89a0106941ff --- /dev/null +++ b/packages/google-cloud-apihub/docs/README.rst @@ -0,0 +1 @@ +../README.rst \ No newline at end of file diff --git a/packages/google-cloud-apihub/docs/_static/custom.css b/packages/google-cloud-apihub/docs/_static/custom.css new file mode 100644 index 000000000000..b0a295464b23 --- /dev/null +++ b/packages/google-cloud-apihub/docs/_static/custom.css @@ -0,0 +1,20 @@ +div#python2-eol { + border-color: red; + border-width: medium; +} + +/* Ensure minimum width for 'Parameters' / 'Returns' column */ +dl.field-list > dt { + min-width: 100px +} + +/* Insert space between methods for readability */ +dl.method { + padding-top: 10px; + padding-bottom: 10px +} + +/* Insert empty space between classes */ +dl.class { + padding-bottom: 50px +} diff --git a/packages/google-cloud-apihub/docs/_templates/layout.html b/packages/google-cloud-apihub/docs/_templates/layout.html new file mode 100644 index 000000000000..6316a537f72b --- /dev/null +++ b/packages/google-cloud-apihub/docs/_templates/layout.html @@ -0,0 +1,50 @@ + +{% extends "!layout.html" %} +{%- block content %} +{%- if theme_fixed_sidebar|lower == 'true' %} +
+ {{ sidebar() }} + {%- block document %} +
+ {%- if render_sidebar %} +
+ {%- endif %} + + {%- block relbar_top %} + {%- if theme_show_relbar_top|tobool %} + + {%- endif %} + {% endblock %} + +
+
+ As of January 1, 2020 this library no longer supports Python 2 on the latest released version. + Library versions released prior to that date will continue to be available. For more information please + visit Python 2 support on Google Cloud. +
+ {% block body %} {% endblock %} +
+ + {%- block relbar_bottom %} + {%- if theme_show_relbar_bottom|tobool %} + + {%- endif %} + {% endblock %} + + {%- if render_sidebar %} +
+ {%- endif %} +
+ {%- endblock %} +
+
+{%- else %} +{{ super() }} +{%- endif %} +{%- endblock %} diff --git a/packages/google-cloud-apihub/docs/apihub_v1/api_hub.rst b/packages/google-cloud-apihub/docs/apihub_v1/api_hub.rst new file mode 100644 index 000000000000..defa77a24580 --- /dev/null +++ b/packages/google-cloud-apihub/docs/apihub_v1/api_hub.rst @@ -0,0 +1,10 @@ +ApiHub +------------------------ + +.. automodule:: google.cloud.apihub_v1.services.api_hub + :members: + :inherited-members: + +.. automodule:: google.cloud.apihub_v1.services.api_hub.pagers + :members: + :inherited-members: diff --git a/packages/google-cloud-apihub/docs/apihub_v1/api_hub_dependencies.rst b/packages/google-cloud-apihub/docs/apihub_v1/api_hub_dependencies.rst new file mode 100644 index 000000000000..11a49a200a37 --- /dev/null +++ b/packages/google-cloud-apihub/docs/apihub_v1/api_hub_dependencies.rst @@ -0,0 +1,10 @@ +ApiHubDependencies +------------------------------------ + +.. automodule:: google.cloud.apihub_v1.services.api_hub_dependencies + :members: + :inherited-members: + +.. automodule:: google.cloud.apihub_v1.services.api_hub_dependencies.pagers + :members: + :inherited-members: diff --git a/packages/google-cloud-apihub/docs/apihub_v1/api_hub_plugin.rst b/packages/google-cloud-apihub/docs/apihub_v1/api_hub_plugin.rst new file mode 100644 index 000000000000..8754bfe52594 --- /dev/null +++ b/packages/google-cloud-apihub/docs/apihub_v1/api_hub_plugin.rst @@ -0,0 +1,6 @@ +ApiHubPlugin +------------------------------ + +.. automodule:: google.cloud.apihub_v1.services.api_hub_plugin + :members: + :inherited-members: diff --git a/packages/google-cloud-apihub/docs/apihub_v1/host_project_registration_service.rst b/packages/google-cloud-apihub/docs/apihub_v1/host_project_registration_service.rst new file mode 100644 index 000000000000..fccf30f3f0b2 --- /dev/null +++ b/packages/google-cloud-apihub/docs/apihub_v1/host_project_registration_service.rst @@ -0,0 +1,10 @@ +HostProjectRegistrationService +------------------------------------------------ + +.. automodule:: google.cloud.apihub_v1.services.host_project_registration_service + :members: + :inherited-members: + +.. automodule:: google.cloud.apihub_v1.services.host_project_registration_service.pagers + :members: + :inherited-members: diff --git a/packages/google-cloud-apihub/docs/apihub_v1/linting_service.rst b/packages/google-cloud-apihub/docs/apihub_v1/linting_service.rst new file mode 100644 index 000000000000..8b672826e91b --- /dev/null +++ b/packages/google-cloud-apihub/docs/apihub_v1/linting_service.rst @@ -0,0 +1,6 @@ +LintingService +-------------------------------- + +.. automodule:: google.cloud.apihub_v1.services.linting_service + :members: + :inherited-members: diff --git a/packages/google-cloud-apihub/docs/apihub_v1/provisioning.rst b/packages/google-cloud-apihub/docs/apihub_v1/provisioning.rst new file mode 100644 index 000000000000..039ebb4fac84 --- /dev/null +++ b/packages/google-cloud-apihub/docs/apihub_v1/provisioning.rst @@ -0,0 +1,6 @@ +Provisioning +------------------------------ + +.. automodule:: google.cloud.apihub_v1.services.provisioning + :members: + :inherited-members: diff --git a/packages/google-cloud-apihub/docs/apihub_v1/runtime_project_attachment_service.rst b/packages/google-cloud-apihub/docs/apihub_v1/runtime_project_attachment_service.rst new file mode 100644 index 000000000000..808ed005fd1b --- /dev/null +++ b/packages/google-cloud-apihub/docs/apihub_v1/runtime_project_attachment_service.rst @@ -0,0 +1,10 @@ +RuntimeProjectAttachmentService +------------------------------------------------- + +.. automodule:: google.cloud.apihub_v1.services.runtime_project_attachment_service + :members: + :inherited-members: + +.. automodule:: google.cloud.apihub_v1.services.runtime_project_attachment_service.pagers + :members: + :inherited-members: diff --git a/packages/google-cloud-apihub/docs/apihub_v1/services_.rst b/packages/google-cloud-apihub/docs/apihub_v1/services_.rst new file mode 100644 index 000000000000..24ac6a8986a7 --- /dev/null +++ b/packages/google-cloud-apihub/docs/apihub_v1/services_.rst @@ -0,0 +1,12 @@ +Services for Google Cloud Apihub v1 API +======================================= +.. toctree:: + :maxdepth: 2 + + api_hub + api_hub_dependencies + api_hub_plugin + host_project_registration_service + linting_service + provisioning + runtime_project_attachment_service diff --git a/packages/google-cloud-apihub/docs/apihub_v1/types_.rst b/packages/google-cloud-apihub/docs/apihub_v1/types_.rst new file mode 100644 index 000000000000..bb5663b94836 --- /dev/null +++ b/packages/google-cloud-apihub/docs/apihub_v1/types_.rst @@ -0,0 +1,6 @@ +Types for Google Cloud Apihub v1 API +==================================== + +.. automodule:: google.cloud.apihub_v1.types + :members: + :show-inheritance: diff --git a/packages/google-cloud-apihub/docs/conf.py b/packages/google-cloud-apihub/docs/conf.py new file mode 100644 index 000000000000..939e0b6666a0 --- /dev/null +++ b/packages/google-cloud-apihub/docs/conf.py @@ -0,0 +1,384 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# google-cloud-apihub documentation build configuration file +# +# This file is execfile()d with the current directory set to its +# containing dir. +# +# Note that not all possible configuration values are present in this +# autogenerated file. +# +# All configuration values have a default; values that are commented out +# serve to show the default. + +import os +import shlex +import sys + +# If extensions (or modules to document with autodoc) are in another directory, +# add these directories to sys.path here. If the directory is relative to the +# documentation root, use os.path.abspath to make it absolute, like shown here. +sys.path.insert(0, os.path.abspath("..")) + +# For plugins that can not read conf.py. +# See also: https://github.com/docascode/sphinx-docfx-yaml/issues/85 +sys.path.insert(0, os.path.abspath(".")) + +__version__ = "" + +# -- General configuration ------------------------------------------------ + +# If your documentation needs a minimal Sphinx version, state it here. +needs_sphinx = "1.5.5" + +# Add any Sphinx extension module names here, as strings. They can be +# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom +# ones. +extensions = [ + "sphinx.ext.autodoc", + "sphinx.ext.autosummary", + "sphinx.ext.intersphinx", + "sphinx.ext.coverage", + "sphinx.ext.doctest", + "sphinx.ext.napoleon", + "sphinx.ext.todo", + "sphinx.ext.viewcode", + "recommonmark", +] + +# autodoc/autosummary flags +autoclass_content = "both" +autodoc_default_options = {"members": True} +autosummary_generate = True + + +# Add any paths that contain templates here, relative to this directory. +templates_path = ["_templates"] + +# The suffix(es) of source filenames. +# You can specify multiple suffix as a list of string: +# source_suffix = ['.rst', '.md'] +source_suffix = [".rst", ".md"] + +# The encoding of source files. +# source_encoding = 'utf-8-sig' + +# The root toctree document. +root_doc = "index" + +# General information about the project. +project = "google-cloud-apihub" +copyright = "2019, Google" +author = "Google APIs" + +# The version info for the project you're documenting, acts as replacement for +# |version| and |release|, also used in various other places throughout the +# built documents. +# +# The full version, including alpha/beta/rc tags. +release = __version__ +# The short X.Y version. +version = ".".join(release.split(".")[0:2]) + +# The language for content autogenerated by Sphinx. Refer to documentation +# for a list of supported languages. +# +# This is also used if you do content translation via gettext catalogs. +# Usually you set "language" from the command line for these cases. +language = None + +# There are two options for replacing |today|: either, you set today to some +# non-false value, then it is used: +# today = '' +# Else, today_fmt is used as the format for a strftime call. +# today_fmt = '%B %d, %Y' + +# List of patterns, relative to source directory, that match files and +# directories to ignore when looking for source files. +exclude_patterns = [ + "_build", + "**/.nox/**/*", + "samples/AUTHORING_GUIDE.md", + "samples/CONTRIBUTING.md", + "samples/snippets/README.rst", +] + +# The reST default role (used for this markup: `text`) to use for all +# documents. +# default_role = None + +# If true, '()' will be appended to :func: etc. cross-reference text. +# add_function_parentheses = True + +# If true, the current module name will be prepended to all description +# unit titles (such as .. function::). +# add_module_names = True + +# If true, sectionauthor and moduleauthor directives will be shown in the +# output. They are ignored by default. +# show_authors = False + +# The name of the Pygments (syntax highlighting) style to use. +pygments_style = "sphinx" + +# A list of ignored prefixes for module index sorting. +# modindex_common_prefix = [] + +# If true, keep warnings as "system message" paragraphs in the built documents. +# keep_warnings = False + +# If true, `todo` and `todoList` produce output, else they produce nothing. +todo_include_todos = True + + +# -- Options for HTML output ---------------------------------------------- + +# The theme to use for HTML and HTML Help pages. See the documentation for +# a list of builtin themes. +html_theme = "alabaster" + +# Theme options are theme-specific and customize the look and feel of a theme +# further. For a list of options available for each theme, see the +# documentation. +html_theme_options = { + "description": "Google Cloud Client Libraries for google-cloud-apihub", + "github_user": "googleapis", + "github_repo": "google-cloud-python", + "github_banner": True, + "font_family": "'Roboto', Georgia, sans", + "head_font_family": "'Roboto', Georgia, serif", + "code_font_family": "'Roboto Mono', 'Consolas', monospace", +} + +# Add any paths that contain custom themes here, relative to this directory. +# html_theme_path = [] + +# The name for this set of Sphinx documents. If None, it defaults to +# " v documentation". +# html_title = None + +# A shorter title for the navigation bar. Default is the same as html_title. +# html_short_title = None + +# The name of an image file (relative to this directory) to place at the top +# of the sidebar. +# html_logo = None + +# The name of an image file (within the static path) to use as favicon of the +# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 +# pixels large. +# html_favicon = None + +# Add any paths that contain custom static files (such as style sheets) here, +# relative to this directory. They are copied after the builtin static files, +# so a file named "default.css" will overwrite the builtin "default.css". +html_static_path = ["_static"] + +# Add any extra paths that contain custom files (such as robots.txt or +# .htaccess) here, relative to this directory. These files are copied +# directly to the root of the documentation. +# html_extra_path = [] + +# If not '', a 'Last updated on:' timestamp is inserted at every page bottom, +# using the given strftime format. +# html_last_updated_fmt = '%b %d, %Y' + +# If true, SmartyPants will be used to convert quotes and dashes to +# typographically correct entities. +# html_use_smartypants = True + +# Custom sidebar templates, maps document names to template names. +# html_sidebars = {} + +# Additional templates that should be rendered to pages, maps page names to +# template names. +# html_additional_pages = {} + +# If false, no module index is generated. +# html_domain_indices = True + +# If false, no index is generated. +# html_use_index = True + +# If true, the index is split into individual pages for each letter. +# html_split_index = False + +# If true, links to the reST sources are added to the pages. +# html_show_sourcelink = True + +# If true, "Created using Sphinx" is shown in the HTML footer. Default is True. +# html_show_sphinx = True + +# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. +# html_show_copyright = True + +# If true, an OpenSearch description file will be output, and all pages will +# contain a tag referring to it. The value of this option must be the +# base URL from which the finished HTML is served. +# html_use_opensearch = '' + +# This is the file name suffix for HTML files (e.g. ".xhtml"). +# html_file_suffix = None + +# Language to be used for generating the HTML full-text search index. +# Sphinx supports the following languages: +# 'da', 'de', 'en', 'es', 'fi', 'fr', 'hu', 'it', 'ja' +# 'nl', 'no', 'pt', 'ro', 'ru', 'sv', 'tr' +# html_search_language = 'en' + +# A dictionary with options for the search language support, empty by default. +# Now only 'ja' uses this config value +# html_search_options = {'type': 'default'} + +# The name of a javascript file (relative to the configuration directory) that +# implements a search results scorer. If empty, the default will be used. +# html_search_scorer = 'scorer.js' + +# Output file base name for HTML help builder. +htmlhelp_basename = "google-cloud-apihub-doc" + +# -- Options for warnings ------------------------------------------------------ + + +suppress_warnings = [ + # Temporarily suppress this to avoid "more than one target found for + # cross-reference" warning, which are intractable for us to avoid while in + # a mono-repo. + # See https://github.com/sphinx-doc/sphinx/blob + # /2a65ffeef5c107c19084fabdd706cdff3f52d93c/sphinx/domains/python.py#L843 + "ref.python" +] + +# -- Options for LaTeX output --------------------------------------------- + +latex_elements = { + # The paper size ('letterpaper' or 'a4paper'). + #'papersize': 'letterpaper', + # The font size ('10pt', '11pt' or '12pt'). + #'pointsize': '10pt', + # Additional stuff for the LaTeX preamble. + #'preamble': '', + # Latex figure (float) alignment + #'figure_align': 'htbp', +} + +# Grouping the document tree into LaTeX files. List of tuples +# (source start file, target name, title, +# author, documentclass [howto, manual, or own class]). +latex_documents = [ + ( + root_doc, + "google-cloud-apihub.tex", + "google-cloud-apihub Documentation", + author, + "manual", + ) +] + +# The name of an image file (relative to this directory) to place at the top of +# the title page. +# latex_logo = None + +# For "manual" documents, if this is true, then toplevel headings are parts, +# not chapters. +# latex_use_parts = False + +# If true, show page references after internal links. +# latex_show_pagerefs = False + +# If true, show URL addresses after external links. +# latex_show_urls = False + +# Documents to append as an appendix to all manuals. +# latex_appendices = [] + +# If false, no module index is generated. +# latex_domain_indices = True + + +# -- Options for manual page output --------------------------------------- + +# One entry per manual page. List of tuples +# (source start file, name, description, authors, manual section). +man_pages = [ + ( + root_doc, + "google-cloud-apihub", + "google-cloud-apihub Documentation", + [author], + 1, + ) +] + +# If true, show URL addresses after external links. +# man_show_urls = False + + +# -- Options for Texinfo output ------------------------------------------- + +# Grouping the document tree into Texinfo files. List of tuples +# (source start file, target name, title, author, +# dir menu entry, description, category) +texinfo_documents = [ + ( + root_doc, + "google-cloud-apihub", + "google-cloud-apihub Documentation", + author, + "google-cloud-apihub", + "google-cloud-apihub Library", + "APIs", + ) +] + +# Documents to append as an appendix to all manuals. +# texinfo_appendices = [] + +# If false, no module index is generated. +# texinfo_domain_indices = True + +# How to display URL addresses: 'footnote', 'no', or 'inline'. +# texinfo_show_urls = 'footnote' + +# If true, do not generate a @detailmenu in the "Top" node's menu. +# texinfo_no_detailmenu = False + + +# Example configuration for intersphinx: refer to the Python standard library. +intersphinx_mapping = { + "python": ("https://python.readthedocs.org/en/latest/", None), + "google-auth": ("https://googleapis.dev/python/google-auth/latest/", None), + "google.api_core": ( + "https://googleapis.dev/python/google-api-core/latest/", + None, + ), + "grpc": ("https://grpc.github.io/grpc/python/", None), + "proto-plus": ("https://proto-plus-python.readthedocs.io/en/latest/", None), + "protobuf": ("https://googleapis.dev/python/protobuf/latest/", None), +} + + +# Napoleon settings +napoleon_google_docstring = True +napoleon_numpy_docstring = True +napoleon_include_private_with_doc = False +napoleon_include_special_with_doc = True +napoleon_use_admonition_for_examples = False +napoleon_use_admonition_for_notes = False +napoleon_use_admonition_for_references = False +napoleon_use_ivar = False +napoleon_use_param = True +napoleon_use_rtype = True diff --git a/packages/google-cloud-apihub/docs/index.rst b/packages/google-cloud-apihub/docs/index.rst new file mode 100644 index 000000000000..0a0dd1b9e07b --- /dev/null +++ b/packages/google-cloud-apihub/docs/index.rst @@ -0,0 +1,28 @@ +.. include:: README.rst + +.. include:: multiprocessing.rst + + +API Reference +------------- +.. toctree:: + :maxdepth: 2 + + apihub_v1/services_ + apihub_v1/types_ + + +Changelog +--------- + +For a list of all ``google-cloud-apihub`` releases: + +.. toctree:: + :maxdepth: 2 + + CHANGELOG + +.. toctree:: + :hidden: + + summary_overview.md diff --git a/packages/google-cloud-apihub/docs/multiprocessing.rst b/packages/google-cloud-apihub/docs/multiprocessing.rst new file mode 100644 index 000000000000..536d17b2ea65 --- /dev/null +++ b/packages/google-cloud-apihub/docs/multiprocessing.rst @@ -0,0 +1,7 @@ +.. note:: + + Because this client uses :mod:`grpc` library, it is safe to + share instances across threads. In multiprocessing scenarios, the best + practice is to create client instances *after* the invocation of + :func:`os.fork` by :class:`multiprocessing.pool.Pool` or + :class:`multiprocessing.Process`. diff --git a/packages/google-cloud-apihub/docs/summary_overview.md b/packages/google-cloud-apihub/docs/summary_overview.md new file mode 100644 index 000000000000..9acfbab3dac3 --- /dev/null +++ b/packages/google-cloud-apihub/docs/summary_overview.md @@ -0,0 +1,22 @@ +[ +This is a templated file. Adding content to this file may result in it being +reverted. Instead, if you want to place additional content, create an +"overview_content.md" file in `docs/` directory. The Sphinx tool will +pick up on the content and merge the content. +]: # + +# API Hub API API + +Overview of the APIs available for API Hub API API. + +## All entries + +Classes, methods and properties & attributes for +API Hub API API. + +[classes](https://cloud.google.com/python/docs/reference/google-cloud-apihub/latest/summary_class.html) + +[methods](https://cloud.google.com/python/docs/reference/google-cloud-apihub/latest/summary_method.html) + +[properties and +attributes](https://cloud.google.com/python/docs/reference/google-cloud-apihub/latest/summary_property.html) diff --git a/packages/google-cloud-apihub/google/cloud/apihub/__init__.py b/packages/google-cloud-apihub/google/cloud/apihub/__init__.py new file mode 100644 index 000000000000..72b5c1f8fbe5 --- /dev/null +++ b/packages/google-cloud-apihub/google/cloud/apihub/__init__.py @@ -0,0 +1,273 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from google.cloud.apihub import gapic_version as package_version + +__version__ = package_version.__version__ + + +from google.cloud.apihub_v1.services.api_hub.client import ApiHubClient +from google.cloud.apihub_v1.services.api_hub_dependencies.client import ( + ApiHubDependenciesClient, +) +from google.cloud.apihub_v1.services.api_hub_plugin.client import ApiHubPluginClient +from google.cloud.apihub_v1.services.host_project_registration_service.client import ( + HostProjectRegistrationServiceClient, +) +from google.cloud.apihub_v1.services.linting_service.client import LintingServiceClient +from google.cloud.apihub_v1.services.provisioning.client import ProvisioningClient +from google.cloud.apihub_v1.services.runtime_project_attachment_service.client import ( + RuntimeProjectAttachmentServiceClient, +) +from google.cloud.apihub_v1.types.apihub_service import ( + ApiHubResource, + CreateApiRequest, + CreateAttributeRequest, + CreateDependencyRequest, + CreateDeploymentRequest, + CreateExternalApiRequest, + CreateSpecRequest, + CreateVersionRequest, + DeleteApiRequest, + DeleteAttributeRequest, + DeleteDependencyRequest, + DeleteDeploymentRequest, + DeleteExternalApiRequest, + DeleteSpecRequest, + DeleteVersionRequest, + GetApiOperationRequest, + GetApiRequest, + GetAttributeRequest, + GetDefinitionRequest, + GetDependencyRequest, + GetDeploymentRequest, + GetExternalApiRequest, + GetSpecContentsRequest, + GetSpecRequest, + GetVersionRequest, + ListApiOperationsRequest, + ListApiOperationsResponse, + ListApisRequest, + ListApisResponse, + ListAttributesRequest, + ListAttributesResponse, + ListDependenciesRequest, + ListDependenciesResponse, + ListDeploymentsRequest, + ListDeploymentsResponse, + ListExternalApisRequest, + ListExternalApisResponse, + ListSpecsRequest, + ListSpecsResponse, + ListVersionsRequest, + ListVersionsResponse, + SearchResourcesRequest, + SearchResourcesResponse, + SearchResult, + UpdateApiRequest, + UpdateAttributeRequest, + UpdateDependencyRequest, + UpdateDeploymentRequest, + UpdateExternalApiRequest, + UpdateSpecRequest, + UpdateVersionRequest, +) +from google.cloud.apihub_v1.types.common_fields import ( + Api, + ApiHubInstance, + ApiOperation, + Attribute, + AttributeValues, + Definition, + Dependency, + DependencyEntityReference, + DependencyErrorDetail, + Deployment, + Documentation, + ExternalApi, + HttpOperation, + Issue, + Linter, + LintResponse, + LintState, + OpenApiSpecDetails, + OperationDetails, + OperationMetadata, + Owner, + Path, + Point, + Range, + Schema, + Severity, + Spec, + SpecContents, + SpecDetails, + Version, +) +from google.cloud.apihub_v1.types.host_project_registration_service import ( + CreateHostProjectRegistrationRequest, + GetHostProjectRegistrationRequest, + HostProjectRegistration, + ListHostProjectRegistrationsRequest, + ListHostProjectRegistrationsResponse, +) +from google.cloud.apihub_v1.types.linting_service import ( + GetStyleGuideContentsRequest, + GetStyleGuideRequest, + LintSpecRequest, + StyleGuide, + StyleGuideContents, + UpdateStyleGuideRequest, +) +from google.cloud.apihub_v1.types.plugin_service import ( + DisablePluginRequest, + EnablePluginRequest, + GetPluginRequest, + Plugin, +) +from google.cloud.apihub_v1.types.provisioning_service import ( + CreateApiHubInstanceRequest, + GetApiHubInstanceRequest, + LookupApiHubInstanceRequest, + LookupApiHubInstanceResponse, +) +from google.cloud.apihub_v1.types.runtime_project_attachment_service import ( + CreateRuntimeProjectAttachmentRequest, + DeleteRuntimeProjectAttachmentRequest, + GetRuntimeProjectAttachmentRequest, + ListRuntimeProjectAttachmentsRequest, + ListRuntimeProjectAttachmentsResponse, + LookupRuntimeProjectAttachmentRequest, + LookupRuntimeProjectAttachmentResponse, + RuntimeProjectAttachment, +) + +__all__ = ( + "ApiHubClient", + "ApiHubDependenciesClient", + "ApiHubPluginClient", + "HostProjectRegistrationServiceClient", + "LintingServiceClient", + "ProvisioningClient", + "RuntimeProjectAttachmentServiceClient", + "ApiHubResource", + "CreateApiRequest", + "CreateAttributeRequest", + "CreateDependencyRequest", + "CreateDeploymentRequest", + "CreateExternalApiRequest", + "CreateSpecRequest", + "CreateVersionRequest", + "DeleteApiRequest", + "DeleteAttributeRequest", + "DeleteDependencyRequest", + "DeleteDeploymentRequest", + "DeleteExternalApiRequest", + "DeleteSpecRequest", + "DeleteVersionRequest", + "GetApiOperationRequest", + "GetApiRequest", + "GetAttributeRequest", + "GetDefinitionRequest", + "GetDependencyRequest", + "GetDeploymentRequest", + "GetExternalApiRequest", + "GetSpecContentsRequest", + "GetSpecRequest", + "GetVersionRequest", + "ListApiOperationsRequest", + "ListApiOperationsResponse", + "ListApisRequest", + "ListApisResponse", + "ListAttributesRequest", + "ListAttributesResponse", + "ListDependenciesRequest", + "ListDependenciesResponse", + "ListDeploymentsRequest", + "ListDeploymentsResponse", + "ListExternalApisRequest", + "ListExternalApisResponse", + "ListSpecsRequest", + "ListSpecsResponse", + "ListVersionsRequest", + "ListVersionsResponse", + "SearchResourcesRequest", + "SearchResourcesResponse", + "SearchResult", + "UpdateApiRequest", + "UpdateAttributeRequest", + "UpdateDependencyRequest", + "UpdateDeploymentRequest", + "UpdateExternalApiRequest", + "UpdateSpecRequest", + "UpdateVersionRequest", + "Api", + "ApiHubInstance", + "ApiOperation", + "Attribute", + "AttributeValues", + "Definition", + "Dependency", + "DependencyEntityReference", + "DependencyErrorDetail", + "Deployment", + "Documentation", + "ExternalApi", + "HttpOperation", + "Issue", + "LintResponse", + "OpenApiSpecDetails", + "OperationDetails", + "OperationMetadata", + "Owner", + "Path", + "Point", + "Range", + "Schema", + "Spec", + "SpecContents", + "SpecDetails", + "Version", + "Linter", + "LintState", + "Severity", + "CreateHostProjectRegistrationRequest", + "GetHostProjectRegistrationRequest", + "HostProjectRegistration", + "ListHostProjectRegistrationsRequest", + "ListHostProjectRegistrationsResponse", + "GetStyleGuideContentsRequest", + "GetStyleGuideRequest", + "LintSpecRequest", + "StyleGuide", + "StyleGuideContents", + "UpdateStyleGuideRequest", + "DisablePluginRequest", + "EnablePluginRequest", + "GetPluginRequest", + "Plugin", + "CreateApiHubInstanceRequest", + "GetApiHubInstanceRequest", + "LookupApiHubInstanceRequest", + "LookupApiHubInstanceResponse", + "CreateRuntimeProjectAttachmentRequest", + "DeleteRuntimeProjectAttachmentRequest", + "GetRuntimeProjectAttachmentRequest", + "ListRuntimeProjectAttachmentsRequest", + "ListRuntimeProjectAttachmentsResponse", + "LookupRuntimeProjectAttachmentRequest", + "LookupRuntimeProjectAttachmentResponse", + "RuntimeProjectAttachment", +) diff --git a/packages/google-cloud-apihub/google/cloud/apihub/gapic_version.py b/packages/google-cloud-apihub/google/cloud/apihub/gapic_version.py new file mode 100644 index 000000000000..364164ddb134 --- /dev/null +++ b/packages/google-cloud-apihub/google/cloud/apihub/gapic_version.py @@ -0,0 +1,16 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +__version__ = "0.2.0" # {x-release-please-version} diff --git a/packages/google-cloud-apihub/google/cloud/apihub/py.typed b/packages/google-cloud-apihub/google/cloud/apihub/py.typed new file mode 100644 index 000000000000..20262ea57b51 --- /dev/null +++ b/packages/google-cloud-apihub/google/cloud/apihub/py.typed @@ -0,0 +1,2 @@ +# Marker file for PEP 561. +# The google-cloud-apihub package uses inline types. diff --git a/packages/google-cloud-apihub/google/cloud/apihub_v1/__init__.py b/packages/google-cloud-apihub/google/cloud/apihub_v1/__init__.py new file mode 100644 index 000000000000..ddde89662be7 --- /dev/null +++ b/packages/google-cloud-apihub/google/cloud/apihub_v1/__init__.py @@ -0,0 +1,271 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from google.cloud.apihub_v1 import gapic_version as package_version + +__version__ = package_version.__version__ + + +from .services.api_hub import ApiHubClient +from .services.api_hub_dependencies import ApiHubDependenciesClient +from .services.api_hub_plugin import ApiHubPluginClient +from .services.host_project_registration_service import ( + HostProjectRegistrationServiceClient, +) +from .services.linting_service import LintingServiceClient +from .services.provisioning import ProvisioningClient +from .services.runtime_project_attachment_service import ( + RuntimeProjectAttachmentServiceClient, +) +from .types.apihub_service import ( + ApiHubResource, + CreateApiRequest, + CreateAttributeRequest, + CreateDependencyRequest, + CreateDeploymentRequest, + CreateExternalApiRequest, + CreateSpecRequest, + CreateVersionRequest, + DeleteApiRequest, + DeleteAttributeRequest, + DeleteDependencyRequest, + DeleteDeploymentRequest, + DeleteExternalApiRequest, + DeleteSpecRequest, + DeleteVersionRequest, + GetApiOperationRequest, + GetApiRequest, + GetAttributeRequest, + GetDefinitionRequest, + GetDependencyRequest, + GetDeploymentRequest, + GetExternalApiRequest, + GetSpecContentsRequest, + GetSpecRequest, + GetVersionRequest, + ListApiOperationsRequest, + ListApiOperationsResponse, + ListApisRequest, + ListApisResponse, + ListAttributesRequest, + ListAttributesResponse, + ListDependenciesRequest, + ListDependenciesResponse, + ListDeploymentsRequest, + ListDeploymentsResponse, + ListExternalApisRequest, + ListExternalApisResponse, + ListSpecsRequest, + ListSpecsResponse, + ListVersionsRequest, + ListVersionsResponse, + SearchResourcesRequest, + SearchResourcesResponse, + SearchResult, + UpdateApiRequest, + UpdateAttributeRequest, + UpdateDependencyRequest, + UpdateDeploymentRequest, + UpdateExternalApiRequest, + UpdateSpecRequest, + UpdateVersionRequest, +) +from .types.common_fields import ( + Api, + ApiHubInstance, + ApiOperation, + Attribute, + AttributeValues, + Definition, + Dependency, + DependencyEntityReference, + DependencyErrorDetail, + Deployment, + Documentation, + ExternalApi, + HttpOperation, + Issue, + Linter, + LintResponse, + LintState, + OpenApiSpecDetails, + OperationDetails, + OperationMetadata, + Owner, + Path, + Point, + Range, + Schema, + Severity, + Spec, + SpecContents, + SpecDetails, + Version, +) +from .types.host_project_registration_service import ( + CreateHostProjectRegistrationRequest, + GetHostProjectRegistrationRequest, + HostProjectRegistration, + ListHostProjectRegistrationsRequest, + ListHostProjectRegistrationsResponse, +) +from .types.linting_service import ( + GetStyleGuideContentsRequest, + GetStyleGuideRequest, + LintSpecRequest, + StyleGuide, + StyleGuideContents, + UpdateStyleGuideRequest, +) +from .types.plugin_service import ( + DisablePluginRequest, + EnablePluginRequest, + GetPluginRequest, + Plugin, +) +from .types.provisioning_service import ( + CreateApiHubInstanceRequest, + GetApiHubInstanceRequest, + LookupApiHubInstanceRequest, + LookupApiHubInstanceResponse, +) +from .types.runtime_project_attachment_service import ( + CreateRuntimeProjectAttachmentRequest, + DeleteRuntimeProjectAttachmentRequest, + GetRuntimeProjectAttachmentRequest, + ListRuntimeProjectAttachmentsRequest, + ListRuntimeProjectAttachmentsResponse, + LookupRuntimeProjectAttachmentRequest, + LookupRuntimeProjectAttachmentResponse, + RuntimeProjectAttachment, +) + +__all__ = ( + "Api", + "ApiHubClient", + "ApiHubDependenciesClient", + "ApiHubInstance", + "ApiHubPluginClient", + "ApiHubResource", + "ApiOperation", + "Attribute", + "AttributeValues", + "CreateApiHubInstanceRequest", + "CreateApiRequest", + "CreateAttributeRequest", + "CreateDependencyRequest", + "CreateDeploymentRequest", + "CreateExternalApiRequest", + "CreateHostProjectRegistrationRequest", + "CreateRuntimeProjectAttachmentRequest", + "CreateSpecRequest", + "CreateVersionRequest", + "Definition", + "DeleteApiRequest", + "DeleteAttributeRequest", + "DeleteDependencyRequest", + "DeleteDeploymentRequest", + "DeleteExternalApiRequest", + "DeleteRuntimeProjectAttachmentRequest", + "DeleteSpecRequest", + "DeleteVersionRequest", + "Dependency", + "DependencyEntityReference", + "DependencyErrorDetail", + "Deployment", + "DisablePluginRequest", + "Documentation", + "EnablePluginRequest", + "ExternalApi", + "GetApiHubInstanceRequest", + "GetApiOperationRequest", + "GetApiRequest", + "GetAttributeRequest", + "GetDefinitionRequest", + "GetDependencyRequest", + "GetDeploymentRequest", + "GetExternalApiRequest", + "GetHostProjectRegistrationRequest", + "GetPluginRequest", + "GetRuntimeProjectAttachmentRequest", + "GetSpecContentsRequest", + "GetSpecRequest", + "GetStyleGuideContentsRequest", + "GetStyleGuideRequest", + "GetVersionRequest", + "HostProjectRegistration", + "HostProjectRegistrationServiceClient", + "HttpOperation", + "Issue", + "LintResponse", + "LintSpecRequest", + "LintState", + "Linter", + "LintingServiceClient", + "ListApiOperationsRequest", + "ListApiOperationsResponse", + "ListApisRequest", + "ListApisResponse", + "ListAttributesRequest", + "ListAttributesResponse", + "ListDependenciesRequest", + "ListDependenciesResponse", + "ListDeploymentsRequest", + "ListDeploymentsResponse", + "ListExternalApisRequest", + "ListExternalApisResponse", + "ListHostProjectRegistrationsRequest", + "ListHostProjectRegistrationsResponse", + "ListRuntimeProjectAttachmentsRequest", + "ListRuntimeProjectAttachmentsResponse", + "ListSpecsRequest", + "ListSpecsResponse", + "ListVersionsRequest", + "ListVersionsResponse", + "LookupApiHubInstanceRequest", + "LookupApiHubInstanceResponse", + "LookupRuntimeProjectAttachmentRequest", + "LookupRuntimeProjectAttachmentResponse", + "OpenApiSpecDetails", + "OperationDetails", + "OperationMetadata", + "Owner", + "Path", + "Plugin", + "Point", + "ProvisioningClient", + "Range", + "RuntimeProjectAttachment", + "RuntimeProjectAttachmentServiceClient", + "Schema", + "SearchResourcesRequest", + "SearchResourcesResponse", + "SearchResult", + "Severity", + "Spec", + "SpecContents", + "SpecDetails", + "StyleGuide", + "StyleGuideContents", + "UpdateApiRequest", + "UpdateAttributeRequest", + "UpdateDependencyRequest", + "UpdateDeploymentRequest", + "UpdateExternalApiRequest", + "UpdateSpecRequest", + "UpdateStyleGuideRequest", + "UpdateVersionRequest", + "Version", +) diff --git a/packages/google-cloud-apihub/google/cloud/apihub_v1/gapic_metadata.json b/packages/google-cloud-apihub/google/cloud/apihub_v1/gapic_metadata.json new file mode 100644 index 000000000000..1585fa5ee448 --- /dev/null +++ b/packages/google-cloud-apihub/google/cloud/apihub_v1/gapic_metadata.json @@ -0,0 +1,362 @@ + { + "comment": "This file maps proto services/RPCs to the corresponding library clients/methods", + "language": "python", + "libraryPackage": "google.cloud.apihub_v1", + "protoPackage": "google.cloud.apihub.v1", + "schema": "1.0", + "services": { + "ApiHub": { + "clients": { + "rest": { + "libraryClient": "ApiHubClient", + "rpcs": { + "CreateApi": { + "methods": [ + "create_api" + ] + }, + "CreateAttribute": { + "methods": [ + "create_attribute" + ] + }, + "CreateDeployment": { + "methods": [ + "create_deployment" + ] + }, + "CreateExternalApi": { + "methods": [ + "create_external_api" + ] + }, + "CreateSpec": { + "methods": [ + "create_spec" + ] + }, + "CreateVersion": { + "methods": [ + "create_version" + ] + }, + "DeleteApi": { + "methods": [ + "delete_api" + ] + }, + "DeleteAttribute": { + "methods": [ + "delete_attribute" + ] + }, + "DeleteDeployment": { + "methods": [ + "delete_deployment" + ] + }, + "DeleteExternalApi": { + "methods": [ + "delete_external_api" + ] + }, + "DeleteSpec": { + "methods": [ + "delete_spec" + ] + }, + "DeleteVersion": { + "methods": [ + "delete_version" + ] + }, + "GetApi": { + "methods": [ + "get_api" + ] + }, + "GetApiOperation": { + "methods": [ + "get_api_operation" + ] + }, + "GetAttribute": { + "methods": [ + "get_attribute" + ] + }, + "GetDefinition": { + "methods": [ + "get_definition" + ] + }, + "GetDeployment": { + "methods": [ + "get_deployment" + ] + }, + "GetExternalApi": { + "methods": [ + "get_external_api" + ] + }, + "GetSpec": { + "methods": [ + "get_spec" + ] + }, + "GetSpecContents": { + "methods": [ + "get_spec_contents" + ] + }, + "GetVersion": { + "methods": [ + "get_version" + ] + }, + "ListApiOperations": { + "methods": [ + "list_api_operations" + ] + }, + "ListApis": { + "methods": [ + "list_apis" + ] + }, + "ListAttributes": { + "methods": [ + "list_attributes" + ] + }, + "ListDeployments": { + "methods": [ + "list_deployments" + ] + }, + "ListExternalApis": { + "methods": [ + "list_external_apis" + ] + }, + "ListSpecs": { + "methods": [ + "list_specs" + ] + }, + "ListVersions": { + "methods": [ + "list_versions" + ] + }, + "SearchResources": { + "methods": [ + "search_resources" + ] + }, + "UpdateApi": { + "methods": [ + "update_api" + ] + }, + "UpdateAttribute": { + "methods": [ + "update_attribute" + ] + }, + "UpdateDeployment": { + "methods": [ + "update_deployment" + ] + }, + "UpdateExternalApi": { + "methods": [ + "update_external_api" + ] + }, + "UpdateSpec": { + "methods": [ + "update_spec" + ] + }, + "UpdateVersion": { + "methods": [ + "update_version" + ] + } + } + } + } + }, + "ApiHubDependencies": { + "clients": { + "rest": { + "libraryClient": "ApiHubDependenciesClient", + "rpcs": { + "CreateDependency": { + "methods": [ + "create_dependency" + ] + }, + "DeleteDependency": { + "methods": [ + "delete_dependency" + ] + }, + "GetDependency": { + "methods": [ + "get_dependency" + ] + }, + "ListDependencies": { + "methods": [ + "list_dependencies" + ] + }, + "UpdateDependency": { + "methods": [ + "update_dependency" + ] + } + } + } + } + }, + "ApiHubPlugin": { + "clients": { + "rest": { + "libraryClient": "ApiHubPluginClient", + "rpcs": { + "DisablePlugin": { + "methods": [ + "disable_plugin" + ] + }, + "EnablePlugin": { + "methods": [ + "enable_plugin" + ] + }, + "GetPlugin": { + "methods": [ + "get_plugin" + ] + } + } + } + } + }, + "HostProjectRegistrationService": { + "clients": { + "rest": { + "libraryClient": "HostProjectRegistrationServiceClient", + "rpcs": { + "CreateHostProjectRegistration": { + "methods": [ + "create_host_project_registration" + ] + }, + "GetHostProjectRegistration": { + "methods": [ + "get_host_project_registration" + ] + }, + "ListHostProjectRegistrations": { + "methods": [ + "list_host_project_registrations" + ] + } + } + } + } + }, + "LintingService": { + "clients": { + "rest": { + "libraryClient": "LintingServiceClient", + "rpcs": { + "GetStyleGuide": { + "methods": [ + "get_style_guide" + ] + }, + "GetStyleGuideContents": { + "methods": [ + "get_style_guide_contents" + ] + }, + "LintSpec": { + "methods": [ + "lint_spec" + ] + }, + "UpdateStyleGuide": { + "methods": [ + "update_style_guide" + ] + } + } + } + } + }, + "Provisioning": { + "clients": { + "rest": { + "libraryClient": "ProvisioningClient", + "rpcs": { + "CreateApiHubInstance": { + "methods": [ + "create_api_hub_instance" + ] + }, + "GetApiHubInstance": { + "methods": [ + "get_api_hub_instance" + ] + }, + "LookupApiHubInstance": { + "methods": [ + "lookup_api_hub_instance" + ] + } + } + } + } + }, + "RuntimeProjectAttachmentService": { + "clients": { + "rest": { + "libraryClient": "RuntimeProjectAttachmentServiceClient", + "rpcs": { + "CreateRuntimeProjectAttachment": { + "methods": [ + "create_runtime_project_attachment" + ] + }, + "DeleteRuntimeProjectAttachment": { + "methods": [ + "delete_runtime_project_attachment" + ] + }, + "GetRuntimeProjectAttachment": { + "methods": [ + "get_runtime_project_attachment" + ] + }, + "ListRuntimeProjectAttachments": { + "methods": [ + "list_runtime_project_attachments" + ] + }, + "LookupRuntimeProjectAttachment": { + "methods": [ + "lookup_runtime_project_attachment" + ] + } + } + } + } + } + } +} diff --git a/packages/google-cloud-apihub/google/cloud/apihub_v1/gapic_version.py b/packages/google-cloud-apihub/google/cloud/apihub_v1/gapic_version.py new file mode 100644 index 000000000000..364164ddb134 --- /dev/null +++ b/packages/google-cloud-apihub/google/cloud/apihub_v1/gapic_version.py @@ -0,0 +1,16 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +__version__ = "0.2.0" # {x-release-please-version} diff --git a/packages/google-cloud-apihub/google/cloud/apihub_v1/py.typed b/packages/google-cloud-apihub/google/cloud/apihub_v1/py.typed new file mode 100644 index 000000000000..20262ea57b51 --- /dev/null +++ b/packages/google-cloud-apihub/google/cloud/apihub_v1/py.typed @@ -0,0 +1,2 @@ +# Marker file for PEP 561. +# The google-cloud-apihub package uses inline types. diff --git a/packages/google-cloud-apihub/google/cloud/apihub_v1/services/__init__.py b/packages/google-cloud-apihub/google/cloud/apihub_v1/services/__init__.py new file mode 100644 index 000000000000..8f6cf068242c --- /dev/null +++ b/packages/google-cloud-apihub/google/cloud/apihub_v1/services/__init__.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/packages/google-ads-admanager/google/ads/admanager_v1/services/team_service/__init__.py b/packages/google-cloud-apihub/google/cloud/apihub_v1/services/api_hub/__init__.py similarity index 89% rename from packages/google-ads-admanager/google/ads/admanager_v1/services/team_service/__init__.py rename to packages/google-cloud-apihub/google/cloud/apihub_v1/services/api_hub/__init__.py index 70d33daa80f4..0d50a3548806 100644 --- a/packages/google-ads-admanager/google/ads/admanager_v1/services/team_service/__init__.py +++ b/packages/google-cloud-apihub/google/cloud/apihub_v1/services/api_hub/__init__.py @@ -13,6 +13,6 @@ # See the License for the specific language governing permissions and # limitations under the License. # -from .client import TeamServiceClient +from .client import ApiHubClient -__all__ = ("TeamServiceClient",) +__all__ = ("ApiHubClient",) diff --git a/packages/google-cloud-apihub/google/cloud/apihub_v1/services/api_hub/async_client.py b/packages/google-cloud-apihub/google/cloud/apihub_v1/services/api_hub/async_client.py new file mode 100644 index 000000000000..9d1b494ddccf --- /dev/null +++ b/packages/google-cloud-apihub/google/cloud/apihub_v1/services/api_hub/async_client.py @@ -0,0 +1,4964 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import re +from typing import ( + Callable, + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, +) + +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry_async as retries +from google.api_core.client_options import ClientOptions +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.apihub_v1 import gapic_version as package_version + +try: + OptionalRetry = Union[retries.AsyncRetry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.AsyncRetry, object, None] # type: ignore + +from google.cloud.location import locations_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore + +from google.cloud.apihub_v1.services.api_hub import pagers +from google.cloud.apihub_v1.types import apihub_service, common_fields + +from .client import ApiHubClient +from .transports.base import DEFAULT_CLIENT_INFO, ApiHubTransport +from .transports.grpc_asyncio import ApiHubGrpcAsyncIOTransport + + +class ApiHubAsyncClient: + """This service provides all methods related to the API hub.""" + + _client: ApiHubClient + + # Copy defaults from the synchronous client for use here. + # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. + DEFAULT_ENDPOINT = ApiHubClient.DEFAULT_ENDPOINT + DEFAULT_MTLS_ENDPOINT = ApiHubClient.DEFAULT_MTLS_ENDPOINT + _DEFAULT_ENDPOINT_TEMPLATE = ApiHubClient._DEFAULT_ENDPOINT_TEMPLATE + _DEFAULT_UNIVERSE = ApiHubClient._DEFAULT_UNIVERSE + + api_path = staticmethod(ApiHubClient.api_path) + parse_api_path = staticmethod(ApiHubClient.parse_api_path) + api_operation_path = staticmethod(ApiHubClient.api_operation_path) + parse_api_operation_path = staticmethod(ApiHubClient.parse_api_operation_path) + attribute_path = staticmethod(ApiHubClient.attribute_path) + parse_attribute_path = staticmethod(ApiHubClient.parse_attribute_path) + definition_path = staticmethod(ApiHubClient.definition_path) + parse_definition_path = staticmethod(ApiHubClient.parse_definition_path) + deployment_path = staticmethod(ApiHubClient.deployment_path) + parse_deployment_path = staticmethod(ApiHubClient.parse_deployment_path) + external_api_path = staticmethod(ApiHubClient.external_api_path) + parse_external_api_path = staticmethod(ApiHubClient.parse_external_api_path) + spec_path = staticmethod(ApiHubClient.spec_path) + parse_spec_path = staticmethod(ApiHubClient.parse_spec_path) + version_path = staticmethod(ApiHubClient.version_path) + parse_version_path = staticmethod(ApiHubClient.parse_version_path) + common_billing_account_path = staticmethod(ApiHubClient.common_billing_account_path) + parse_common_billing_account_path = staticmethod( + ApiHubClient.parse_common_billing_account_path + ) + common_folder_path = staticmethod(ApiHubClient.common_folder_path) + parse_common_folder_path = staticmethod(ApiHubClient.parse_common_folder_path) + common_organization_path = staticmethod(ApiHubClient.common_organization_path) + parse_common_organization_path = staticmethod( + ApiHubClient.parse_common_organization_path + ) + common_project_path = staticmethod(ApiHubClient.common_project_path) + parse_common_project_path = staticmethod(ApiHubClient.parse_common_project_path) + common_location_path = staticmethod(ApiHubClient.common_location_path) + parse_common_location_path = staticmethod(ApiHubClient.parse_common_location_path) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + ApiHubAsyncClient: The constructed client. + """ + return ApiHubClient.from_service_account_info.__func__(ApiHubAsyncClient, info, *args, **kwargs) # type: ignore + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + ApiHubAsyncClient: The constructed client. + """ + return ApiHubClient.from_service_account_file.__func__(ApiHubAsyncClient, filename, *args, **kwargs) # type: ignore + + from_service_account_json = from_service_account_file + + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[ClientOptions] = None + ): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + return ApiHubClient.get_mtls_endpoint_and_cert_source(client_options) # type: ignore + + @property + def transport(self) -> ApiHubTransport: + """Returns the transport used by the client instance. + + Returns: + ApiHubTransport: The transport used by the client instance. + """ + return self._client.transport + + @property + def api_endpoint(self): + """Return the API endpoint used by the client instance. + + Returns: + str: The API endpoint used by the client instance. + """ + return self._client._api_endpoint + + @property + def universe_domain(self) -> str: + """Return the universe domain used by the client instance. + + Returns: + str: The universe domain used + by the client instance. + """ + return self._client._universe_domain + + get_transport_class = ApiHubClient.get_transport_class + + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[ + Union[str, ApiHubTransport, Callable[..., ApiHubTransport]] + ] = "grpc_asyncio", + client_options: Optional[ClientOptions] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the api hub async client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Optional[Union[str,ApiHubTransport,Callable[..., ApiHubTransport]]]): + The transport to use, or a Callable that constructs and returns a new transport to use. + If a Callable is given, it will be called with the same set of initialization + arguments as used in the ApiHubTransport constructor. + If set to None, a transport is chosen automatically. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): + Custom options for the client. + + 1. The ``api_endpoint`` property can be used to override the + default endpoint provided by the client when ``transport`` is + not explicitly provided. Only if this property is not set and + ``transport`` was not explicitly provided, the endpoint is + determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment + variable, which have one of the following values: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto-switch to the + default mTLS endpoint if client certificate is present; this is + the default value). + + 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide a client certificate for mTLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + 3. The ``universe_domain`` property can be used to override the + default "googleapis.com" universe. Note that ``api_endpoint`` + property still takes precedence; and ``universe_domain`` is + currently not supported for mTLS. + + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client = ApiHubClient( + credentials=credentials, + transport=transport, + client_options=client_options, + client_info=client_info, + ) + + async def create_api( + self, + request: Optional[Union[apihub_service.CreateApiRequest, dict]] = None, + *, + parent: Optional[str] = None, + api: Optional[common_fields.Api] = None, + api_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> common_fields.Api: + r"""Create an API resource in the API hub. + Once an API resource is created, versions can be added + to it. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import apihub_v1 + + async def sample_create_api(): + # Create a client + client = apihub_v1.ApiHubAsyncClient() + + # Initialize request argument(s) + api = apihub_v1.Api() + api.display_name = "display_name_value" + + request = apihub_v1.CreateApiRequest( + parent="parent_value", + api=api, + ) + + # Make the request + response = await client.create_api(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.apihub_v1.types.CreateApiRequest, dict]]): + The request object. The [CreateApi][google.cloud.apihub.v1.ApiHub.CreateApi] + method's request. + parent (:class:`str`): + Required. The parent resource for the API resource. + Format: ``projects/{project}/locations/{location}`` + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + api (:class:`google.cloud.apihub_v1.types.Api`): + Required. The API resource to create. + This corresponds to the ``api`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + api_id (:class:`str`): + Optional. The ID to use for the API resource, which will + become the final component of the API's resource name. + This field is optional. + + - If provided, the same will be used. The service will + throw an error if the specified id is already used by + another API resource in the API hub. + - If not provided, a system generated id will be used. + + This value should be 4-500 characters, and valid + characters are /[a-z][A-Z][0-9]-_/. + + This corresponds to the ``api_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.apihub_v1.types.Api: + An API resource in the API Hub. + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, api, api_id]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, apihub_service.CreateApiRequest): + request = apihub_service.CreateApiRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if api is not None: + request.api = api + if api_id is not None: + request.api_id = api_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.create_api + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_api( + self, + request: Optional[Union[apihub_service.GetApiRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> common_fields.Api: + r"""Get API resource details including the API versions + contained in it. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import apihub_v1 + + async def sample_get_api(): + # Create a client + client = apihub_v1.ApiHubAsyncClient() + + # Initialize request argument(s) + request = apihub_v1.GetApiRequest( + name="name_value", + ) + + # Make the request + response = await client.get_api(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.apihub_v1.types.GetApiRequest, dict]]): + The request object. The [GetApi][google.cloud.apihub.v1.ApiHub.GetApi] + method's request. + name (:class:`str`): + Required. The name of the API resource to retrieve. + Format: + ``projects/{project}/locations/{location}/apis/{api}`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.apihub_v1.types.Api: + An API resource in the API Hub. + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, apihub_service.GetApiRequest): + request = apihub_service.GetApiRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.get_api] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_apis( + self, + request: Optional[Union[apihub_service.ListApisRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListApisAsyncPager: + r"""List API resources in the API hub. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import apihub_v1 + + async def sample_list_apis(): + # Create a client + client = apihub_v1.ApiHubAsyncClient() + + # Initialize request argument(s) + request = apihub_v1.ListApisRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_apis(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.apihub_v1.types.ListApisRequest, dict]]): + The request object. The [ListApis][google.cloud.apihub.v1.ApiHub.ListApis] + method's request. + parent (:class:`str`): + Required. The parent, which owns this collection of API + resources. Format: + ``projects/{project}/locations/{location}`` + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.apihub_v1.services.api_hub.pagers.ListApisAsyncPager: + The [ListApis][google.cloud.apihub.v1.ApiHub.ListApis] + method's response. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, apihub_service.ListApisRequest): + request = apihub_service.ListApisRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.list_apis + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListApisAsyncPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def update_api( + self, + request: Optional[Union[apihub_service.UpdateApiRequest, dict]] = None, + *, + api: Optional[common_fields.Api] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> common_fields.Api: + r"""Update an API resource in the API hub. The following fields in + the [API][] can be updated: + + - [display_name][google.cloud.apihub.v1.Api.display_name] + - [description][google.cloud.apihub.v1.Api.description] + - [owner][google.cloud.apihub.v1.Api.owner] + - [documentation][google.cloud.apihub.v1.Api.documentation] + - [target_user][google.cloud.apihub.v1.Api.target_user] + - [team][google.cloud.apihub.v1.Api.team] + - [business_unit][google.cloud.apihub.v1.Api.business_unit] + - [maturity_level][google.cloud.apihub.v1.Api.maturity_level] + - [attributes][google.cloud.apihub.v1.Api.attributes] + + The + [update_mask][google.cloud.apihub.v1.UpdateApiRequest.update_mask] + should be used to specify the fields being updated. + + Updating the owner field requires complete owner message and + updates both owner and email fields. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import apihub_v1 + + async def sample_update_api(): + # Create a client + client = apihub_v1.ApiHubAsyncClient() + + # Initialize request argument(s) + api = apihub_v1.Api() + api.display_name = "display_name_value" + + request = apihub_v1.UpdateApiRequest( + api=api, + ) + + # Make the request + response = await client.update_api(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.apihub_v1.types.UpdateApiRequest, dict]]): + The request object. The [UpdateApi][google.cloud.apihub.v1.ApiHub.UpdateApi] + method's request. + api (:class:`google.cloud.apihub_v1.types.Api`): + Required. The API resource to update. + + The API resource's ``name`` field is used to identify + the API resource to update. Format: + ``projects/{project}/locations/{location}/apis/{api}`` + + This corresponds to the ``api`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): + Required. The list of fields to + update. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.apihub_v1.types.Api: + An API resource in the API Hub. + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([api, update_mask]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, apihub_service.UpdateApiRequest): + request = apihub_service.UpdateApiRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if api is not None: + request.api = api + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.update_api + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("api.name", request.api.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def delete_api( + self, + request: Optional[Union[apihub_service.DeleteApiRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Delete an API resource in the API hub. API can only + be deleted if all underlying versions are deleted. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import apihub_v1 + + async def sample_delete_api(): + # Create a client + client = apihub_v1.ApiHubAsyncClient() + + # Initialize request argument(s) + request = apihub_v1.DeleteApiRequest( + name="name_value", + ) + + # Make the request + await client.delete_api(request=request) + + Args: + request (Optional[Union[google.cloud.apihub_v1.types.DeleteApiRequest, dict]]): + The request object. The [DeleteApi][google.cloud.apihub.v1.ApiHub.DeleteApi] + method's request. + name (:class:`str`): + Required. The name of the API resource to delete. + Format: + ``projects/{project}/locations/{location}/apis/{api}`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, apihub_service.DeleteApiRequest): + request = apihub_service.DeleteApiRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.delete_api + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + async def create_version( + self, + request: Optional[Union[apihub_service.CreateVersionRequest, dict]] = None, + *, + parent: Optional[str] = None, + version: Optional[common_fields.Version] = None, + version_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> common_fields.Version: + r"""Create an API version for an API resource in the API + hub. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import apihub_v1 + + async def sample_create_version(): + # Create a client + client = apihub_v1.ApiHubAsyncClient() + + # Initialize request argument(s) + version = apihub_v1.Version() + version.display_name = "display_name_value" + + request = apihub_v1.CreateVersionRequest( + parent="parent_value", + version=version, + ) + + # Make the request + response = await client.create_version(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.apihub_v1.types.CreateVersionRequest, dict]]): + The request object. The + [CreateVersion][google.cloud.apihub.v1.ApiHub.CreateVersion] + method's request. + parent (:class:`str`): + Required. The parent resource for API version. Format: + ``projects/{project}/locations/{location}/apis/{api}`` + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + version (:class:`google.cloud.apihub_v1.types.Version`): + Required. The version to create. + This corresponds to the ``version`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + version_id (:class:`str`): + Optional. The ID to use for the API version, which will + become the final component of the version's resource + name. This field is optional. + + - If provided, the same will be used. The service will + throw an error if the specified id is already used by + another version in the API resource. + - If not provided, a system generated id will be used. + + This value should be 4-500 characters, and valid + characters are /[a-z][A-Z][0-9]-_/. + + This corresponds to the ``version_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.apihub_v1.types.Version: + Represents a version of the API + resource in API hub. This is also + referred to as the API version. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, version, version_id]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, apihub_service.CreateVersionRequest): + request = apihub_service.CreateVersionRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if version is not None: + request.version = version + if version_id is not None: + request.version_id = version_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.create_version + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_version( + self, + request: Optional[Union[apihub_service.GetVersionRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> common_fields.Version: + r"""Get details about the API version of an API resource. + This will include information about the specs and + operations present in the API version as well as the + deployments linked to it. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import apihub_v1 + + async def sample_get_version(): + # Create a client + client = apihub_v1.ApiHubAsyncClient() + + # Initialize request argument(s) + request = apihub_v1.GetVersionRequest( + name="name_value", + ) + + # Make the request + response = await client.get_version(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.apihub_v1.types.GetVersionRequest, dict]]): + The request object. The + [GetVersion][google.cloud.apihub.v1.ApiHub.GetVersion] + method's request. + name (:class:`str`): + Required. The name of the API version to retrieve. + Format: + ``projects/{project}/locations/{location}/apis/{api}/versions/{version}`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.apihub_v1.types.Version: + Represents a version of the API + resource in API hub. This is also + referred to as the API version. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, apihub_service.GetVersionRequest): + request = apihub_service.GetVersionRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.get_version + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_versions( + self, + request: Optional[Union[apihub_service.ListVersionsRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListVersionsAsyncPager: + r"""List API versions of an API resource in the API hub. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import apihub_v1 + + async def sample_list_versions(): + # Create a client + client = apihub_v1.ApiHubAsyncClient() + + # Initialize request argument(s) + request = apihub_v1.ListVersionsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_versions(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.apihub_v1.types.ListVersionsRequest, dict]]): + The request object. The + [ListVersions][google.cloud.apihub.v1.ApiHub.ListVersions] + method's request. + parent (:class:`str`): + Required. The parent which owns this collection of API + versions i.e., the API resource Format: + ``projects/{project}/locations/{location}/apis/{api}`` + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.apihub_v1.services.api_hub.pagers.ListVersionsAsyncPager: + The [ListVersions][google.cloud.apihub.v1.ApiHub.ListVersions] method's + response. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, apihub_service.ListVersionsRequest): + request = apihub_service.ListVersionsRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.list_versions + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListVersionsAsyncPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def update_version( + self, + request: Optional[Union[apihub_service.UpdateVersionRequest, dict]] = None, + *, + version: Optional[common_fields.Version] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> common_fields.Version: + r"""Update API version. The following fields in the + [version][google.cloud.apihub.v1.Version] can be updated + currently: + + - [display_name][google.cloud.apihub.v1.Version.display_name] + - [description][google.cloud.apihub.v1.Version.description] + - [documentation][google.cloud.apihub.v1.Version.documentation] + - [deployments][google.cloud.apihub.v1.Version.deployments] + - [lifecycle][google.cloud.apihub.v1.Version.lifecycle] + - [compliance][google.cloud.apihub.v1.Version.compliance] + - [accreditation][google.cloud.apihub.v1.Version.accreditation] + - [attributes][google.cloud.apihub.v1.Version.attributes] + + The + [update_mask][google.cloud.apihub.v1.UpdateVersionRequest.update_mask] + should be used to specify the fields being updated. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import apihub_v1 + + async def sample_update_version(): + # Create a client + client = apihub_v1.ApiHubAsyncClient() + + # Initialize request argument(s) + version = apihub_v1.Version() + version.display_name = "display_name_value" + + request = apihub_v1.UpdateVersionRequest( + version=version, + ) + + # Make the request + response = await client.update_version(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.apihub_v1.types.UpdateVersionRequest, dict]]): + The request object. The + [UpdateVersion][google.cloud.apihub.v1.ApiHub.UpdateVersion] + method's request. + version (:class:`google.cloud.apihub_v1.types.Version`): + Required. The API version to update. + + The version's ``name`` field is used to identify the API + version to update. Format: + ``projects/{project}/locations/{location}/apis/{api}/versions/{version}`` + + This corresponds to the ``version`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): + Required. The list of fields to + update. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.apihub_v1.types.Version: + Represents a version of the API + resource in API hub. This is also + referred to as the API version. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([version, update_mask]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, apihub_service.UpdateVersionRequest): + request = apihub_service.UpdateVersionRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if version is not None: + request.version = version + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.update_version + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("version.name", request.version.name),) + ), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def delete_version( + self, + request: Optional[Union[apihub_service.DeleteVersionRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Delete an API version. Version can only be deleted if + all underlying specs, operations, definitions and linked + deployments are deleted. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import apihub_v1 + + async def sample_delete_version(): + # Create a client + client = apihub_v1.ApiHubAsyncClient() + + # Initialize request argument(s) + request = apihub_v1.DeleteVersionRequest( + name="name_value", + ) + + # Make the request + await client.delete_version(request=request) + + Args: + request (Optional[Union[google.cloud.apihub_v1.types.DeleteVersionRequest, dict]]): + The request object. The + [DeleteVersion][google.cloud.apihub.v1.ApiHub.DeleteVersion] + method's request. + name (:class:`str`): + Required. The name of the version to delete. Format: + ``projects/{project}/locations/{location}/apis/{api}/versions/{version}`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, apihub_service.DeleteVersionRequest): + request = apihub_service.DeleteVersionRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.delete_version + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + async def create_spec( + self, + request: Optional[Union[apihub_service.CreateSpecRequest, dict]] = None, + *, + parent: Optional[str] = None, + spec: Optional[common_fields.Spec] = None, + spec_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> common_fields.Spec: + r"""Add a spec to an API version in the API hub. Multiple specs can + be added to an API version. Note, while adding a spec, at least + one of ``contents`` or ``source_uri`` must be provided. If + ``contents`` is provided, then ``spec_type`` must also be + provided. + + On adding a spec with contents to the version, the operations + present in it will be added to the version.Note that the file + contents in the spec should be of the same type as defined in + the + ``projects/{project}/locations/{location}/attributes/system-spec-type`` + attribute associated with spec resource. Note that specs of + various types can be uploaded, however parsing of details is + supported for OpenAPI spec currently. + + In order to access the information parsed from the spec, use the + [GetSpec][google.cloud.apihub.v1.ApiHub.GetSpec] method. In + order to access the raw contents for a particular spec, use the + [GetSpecContents][google.cloud.apihub.v1.ApiHub.GetSpecContents] + method. In order to access the operations parsed from the spec, + use the + [ListAPIOperations][google.cloud.apihub.v1.ApiHub.ListApiOperations] + method. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import apihub_v1 + + async def sample_create_spec(): + # Create a client + client = apihub_v1.ApiHubAsyncClient() + + # Initialize request argument(s) + spec = apihub_v1.Spec() + spec.display_name = "display_name_value" + spec.spec_type.enum_values.values.id = "id_value" + spec.spec_type.enum_values.values.display_name = "display_name_value" + + request = apihub_v1.CreateSpecRequest( + parent="parent_value", + spec=spec, + ) + + # Make the request + response = await client.create_spec(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.apihub_v1.types.CreateSpecRequest, dict]]): + The request object. The + [CreateSpec][google.cloud.apihub.v1.ApiHub.CreateSpec] + method's request. + parent (:class:`str`): + Required. The parent resource for Spec. Format: + ``projects/{project}/locations/{location}/apis/{api}/versions/{version}`` + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + spec (:class:`google.cloud.apihub_v1.types.Spec`): + Required. The spec to create. + This corresponds to the ``spec`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + spec_id (:class:`str`): + Optional. The ID to use for the spec, which will become + the final component of the spec's resource name. This + field is optional. + + - If provided, the same will be used. The service will + throw an error if the specified id is already used by + another spec in the API resource. + - If not provided, a system generated id will be used. + + This value should be 4-500 characters, and valid + characters are /[a-z][A-Z][0-9]-_/. + + This corresponds to the ``spec_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.apihub_v1.types.Spec: + Represents a spec associated with an + API version in the API Hub. Note that + specs of various types can be uploaded, + however parsing of details is supported + for OpenAPI spec currently. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, spec, spec_id]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, apihub_service.CreateSpecRequest): + request = apihub_service.CreateSpecRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if spec is not None: + request.spec = spec + if spec_id is not None: + request.spec_id = spec_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.create_spec + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_spec( + self, + request: Optional[Union[apihub_service.GetSpecRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> common_fields.Spec: + r"""Get details about the information parsed from a spec. Note that + this method does not return the raw spec contents. Use + [GetSpecContents][google.cloud.apihub.v1.ApiHub.GetSpecContents] + method to retrieve the same. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import apihub_v1 + + async def sample_get_spec(): + # Create a client + client = apihub_v1.ApiHubAsyncClient() + + # Initialize request argument(s) + request = apihub_v1.GetSpecRequest( + name="name_value", + ) + + # Make the request + response = await client.get_spec(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.apihub_v1.types.GetSpecRequest, dict]]): + The request object. The [GetSpec][google.cloud.apihub.v1.ApiHub.GetSpec] + method's request. + name (:class:`str`): + Required. The name of the spec to retrieve. Format: + ``projects/{project}/locations/{location}/apis/{api}/versions/{version}/specs/{spec}`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.apihub_v1.types.Spec: + Represents a spec associated with an + API version in the API Hub. Note that + specs of various types can be uploaded, + however parsing of details is supported + for OpenAPI spec currently. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, apihub_service.GetSpecRequest): + request = apihub_service.GetSpecRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.get_spec] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_spec_contents( + self, + request: Optional[Union[apihub_service.GetSpecContentsRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> common_fields.SpecContents: + r"""Get spec contents. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import apihub_v1 + + async def sample_get_spec_contents(): + # Create a client + client = apihub_v1.ApiHubAsyncClient() + + # Initialize request argument(s) + request = apihub_v1.GetSpecContentsRequest( + name="name_value", + ) + + # Make the request + response = await client.get_spec_contents(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.apihub_v1.types.GetSpecContentsRequest, dict]]): + The request object. The + [GetSpecContents][google.cloud.apihub.v1.ApiHub.GetSpecContents] + method's request. + name (:class:`str`): + Required. The name of the spec whose contents need to be + retrieved. Format: + ``projects/{project}/locations/{location}/apis/{api}/versions/{version}/specs/{spec}`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.apihub_v1.types.SpecContents: + The spec contents. + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, apihub_service.GetSpecContentsRequest): + request = apihub_service.GetSpecContentsRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.get_spec_contents + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_specs( + self, + request: Optional[Union[apihub_service.ListSpecsRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListSpecsAsyncPager: + r"""List specs corresponding to a particular API + resource. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import apihub_v1 + + async def sample_list_specs(): + # Create a client + client = apihub_v1.ApiHubAsyncClient() + + # Initialize request argument(s) + request = apihub_v1.ListSpecsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_specs(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.apihub_v1.types.ListSpecsRequest, dict]]): + The request object. The [ListSpecs][ListSpecs] method's request. + parent (:class:`str`): + Required. The parent, which owns this collection of + specs. Format: + ``projects/{project}/locations/{location}/apis/{api}/versions/{version}`` + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.apihub_v1.services.api_hub.pagers.ListSpecsAsyncPager: + The [ListSpecs][google.cloud.apihub.v1.ApiHub.ListSpecs] + method's response. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, apihub_service.ListSpecsRequest): + request = apihub_service.ListSpecsRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.list_specs + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListSpecsAsyncPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def update_spec( + self, + request: Optional[Union[apihub_service.UpdateSpecRequest, dict]] = None, + *, + spec: Optional[common_fields.Spec] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> common_fields.Spec: + r"""Update spec. The following fields in the + [spec][google.cloud.apihub.v1.Spec] can be updated: + + - [display_name][google.cloud.apihub.v1.Spec.display_name] + - [source_uri][google.cloud.apihub.v1.Spec.source_uri] + - [lint_response][google.cloud.apihub.v1.Spec.lint_response] + - [attributes][google.cloud.apihub.v1.Spec.attributes] + - [contents][google.cloud.apihub.v1.Spec.contents] + - [spec_type][google.cloud.apihub.v1.Spec.spec_type] + + In case of an OAS spec, updating spec contents can lead to: + + 1. Creation, deletion and update of operations. + 2. Creation, deletion and update of definitions. + 3. Update of other info parsed out from the new spec. + + In case of contents or source_uri being present in update mask, + spec_type must also be present. Also, spec_type can not be + present in update mask if contents or source_uri is not present. + + The + [update_mask][google.cloud.apihub.v1.UpdateSpecRequest.update_mask] + should be used to specify the fields being updated. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import apihub_v1 + + async def sample_update_spec(): + # Create a client + client = apihub_v1.ApiHubAsyncClient() + + # Initialize request argument(s) + spec = apihub_v1.Spec() + spec.display_name = "display_name_value" + spec.spec_type.enum_values.values.id = "id_value" + spec.spec_type.enum_values.values.display_name = "display_name_value" + + request = apihub_v1.UpdateSpecRequest( + spec=spec, + ) + + # Make the request + response = await client.update_spec(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.apihub_v1.types.UpdateSpecRequest, dict]]): + The request object. The + [UpdateSpec][google.cloud.apihub.v1.ApiHub.UpdateSpec] + method's request. + spec (:class:`google.cloud.apihub_v1.types.Spec`): + Required. The spec to update. + + The spec's ``name`` field is used to identify the spec + to update. Format: + ``projects/{project}/locations/{location}/apis/{api}/versions/{version}/specs/{spec}`` + + This corresponds to the ``spec`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): + Required. The list of fields to + update. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.apihub_v1.types.Spec: + Represents a spec associated with an + API version in the API Hub. Note that + specs of various types can be uploaded, + however parsing of details is supported + for OpenAPI spec currently. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([spec, update_mask]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, apihub_service.UpdateSpecRequest): + request = apihub_service.UpdateSpecRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if spec is not None: + request.spec = spec + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.update_spec + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("spec.name", request.spec.name),) + ), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def delete_spec( + self, + request: Optional[Union[apihub_service.DeleteSpecRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Delete a spec. + Deleting a spec will also delete the associated + operations from the version. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import apihub_v1 + + async def sample_delete_spec(): + # Create a client + client = apihub_v1.ApiHubAsyncClient() + + # Initialize request argument(s) + request = apihub_v1.DeleteSpecRequest( + name="name_value", + ) + + # Make the request + await client.delete_spec(request=request) + + Args: + request (Optional[Union[google.cloud.apihub_v1.types.DeleteSpecRequest, dict]]): + The request object. The + [DeleteSpec][google.cloud.apihub.v1.ApiHub.DeleteSpec] + method's request. + name (:class:`str`): + Required. The name of the spec to delete. Format: + ``projects/{project}/locations/{location}/apis/{api}/versions/{version}/specs/{spec}`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, apihub_service.DeleteSpecRequest): + request = apihub_service.DeleteSpecRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.delete_spec + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + async def get_api_operation( + self, + request: Optional[Union[apihub_service.GetApiOperationRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> common_fields.ApiOperation: + r"""Get details about a particular operation in API + version. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import apihub_v1 + + async def sample_get_api_operation(): + # Create a client + client = apihub_v1.ApiHubAsyncClient() + + # Initialize request argument(s) + request = apihub_v1.GetApiOperationRequest( + name="name_value", + ) + + # Make the request + response = await client.get_api_operation(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.apihub_v1.types.GetApiOperationRequest, dict]]): + The request object. The + [GetApiOperation][google.cloud.apihub.v1.ApiHub.GetApiOperation] + method's request. + name (:class:`str`): + Required. The name of the operation to retrieve. Format: + ``projects/{project}/locations/{location}/apis/{api}/versions/{version}/operations/{operation}`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.apihub_v1.types.ApiOperation: + Represents an operation contained in + an API version in the API Hub. An + operation is added/updated/deleted in an + API version when a new spec is added or + an existing spec is updated/deleted in a + version. Currently, an operation will be + created only corresponding to OpenAPI + spec as parsing is supported for OpenAPI + spec. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, apihub_service.GetApiOperationRequest): + request = apihub_service.GetApiOperationRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.get_api_operation + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_api_operations( + self, + request: Optional[Union[apihub_service.ListApiOperationsRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListApiOperationsAsyncPager: + r"""List operations in an API version. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import apihub_v1 + + async def sample_list_api_operations(): + # Create a client + client = apihub_v1.ApiHubAsyncClient() + + # Initialize request argument(s) + request = apihub_v1.ListApiOperationsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_api_operations(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.apihub_v1.types.ListApiOperationsRequest, dict]]): + The request object. The + [ListApiOperations][google.cloud.apihub.v1.ApiHub.ListApiOperations] + method's request. + parent (:class:`str`): + Required. The parent which owns this collection of + operations i.e., the API version. Format: + ``projects/{project}/locations/{location}/apis/{api}/versions/{version}`` + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.apihub_v1.services.api_hub.pagers.ListApiOperationsAsyncPager: + The [ListApiOperations][google.cloud.apihub.v1.ApiHub.ListApiOperations] + method's response. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, apihub_service.ListApiOperationsRequest): + request = apihub_service.ListApiOperationsRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.list_api_operations + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListApiOperationsAsyncPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_definition( + self, + request: Optional[Union[apihub_service.GetDefinitionRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> common_fields.Definition: + r"""Get details about a definition in an API version. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import apihub_v1 + + async def sample_get_definition(): + # Create a client + client = apihub_v1.ApiHubAsyncClient() + + # Initialize request argument(s) + request = apihub_v1.GetDefinitionRequest( + name="name_value", + ) + + # Make the request + response = await client.get_definition(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.apihub_v1.types.GetDefinitionRequest, dict]]): + The request object. The + [GetDefinition][google.cloud.apihub.v1.ApiHub.GetDefinition] + method's request. + name (:class:`str`): + Required. The name of the definition to retrieve. + Format: + ``projects/{project}/locations/{location}/apis/{api}/versions/{version}/definitions/{definition}`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.apihub_v1.types.Definition: + Represents a definition for example schema, request, response definitions + contained in an API version. A definition is + added/updated/deleted in an API version when a new + spec is added or an existing spec is updated/deleted + in a version. Currently, definition will be created + only corresponding to OpenAPI spec as parsing is + supported for OpenAPI spec. Also, within OpenAPI + spec, only schema object is supported. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, apihub_service.GetDefinitionRequest): + request = apihub_service.GetDefinitionRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.get_definition + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def create_deployment( + self, + request: Optional[Union[apihub_service.CreateDeploymentRequest, dict]] = None, + *, + parent: Optional[str] = None, + deployment: Optional[common_fields.Deployment] = None, + deployment_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> common_fields.Deployment: + r"""Create a deployment resource in the API hub. + Once a deployment resource is created, it can be + associated with API versions. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import apihub_v1 + + async def sample_create_deployment(): + # Create a client + client = apihub_v1.ApiHubAsyncClient() + + # Initialize request argument(s) + deployment = apihub_v1.Deployment() + deployment.display_name = "display_name_value" + deployment.deployment_type.enum_values.values.id = "id_value" + deployment.deployment_type.enum_values.values.display_name = "display_name_value" + deployment.resource_uri = "resource_uri_value" + deployment.endpoints = ['endpoints_value1', 'endpoints_value2'] + + request = apihub_v1.CreateDeploymentRequest( + parent="parent_value", + deployment=deployment, + ) + + # Make the request + response = await client.create_deployment(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.apihub_v1.types.CreateDeploymentRequest, dict]]): + The request object. The + [CreateDeployment][google.cloud.apihub.v1.ApiHub.CreateDeployment] + method's request. + parent (:class:`str`): + Required. The parent resource for the deployment + resource. Format: + ``projects/{project}/locations/{location}`` + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + deployment (:class:`google.cloud.apihub_v1.types.Deployment`): + Required. The deployment resource to + create. + + This corresponds to the ``deployment`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + deployment_id (:class:`str`): + Optional. The ID to use for the deployment resource, + which will become the final component of the + deployment's resource name. This field is optional. + + - If provided, the same will be used. The service will + throw an error if the specified id is already used by + another deployment resource in the API hub. + - If not provided, a system generated id will be used. + + This value should be 4-500 characters, and valid + characters are /[a-z][A-Z][0-9]-_/. + + This corresponds to the ``deployment_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.apihub_v1.types.Deployment: + Details of the deployment where APIs + are hosted. A deployment could represent + an Apigee proxy, API gateway, other + Google Cloud services or non-Google + Cloud services as well. A deployment + entity is a root level entity in the API + hub and exists independent of any API. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, deployment, deployment_id]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, apihub_service.CreateDeploymentRequest): + request = apihub_service.CreateDeploymentRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if deployment is not None: + request.deployment = deployment + if deployment_id is not None: + request.deployment_id = deployment_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.create_deployment + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_deployment( + self, + request: Optional[Union[apihub_service.GetDeploymentRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> common_fields.Deployment: + r"""Get details about a deployment and the API versions + linked to it. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import apihub_v1 + + async def sample_get_deployment(): + # Create a client + client = apihub_v1.ApiHubAsyncClient() + + # Initialize request argument(s) + request = apihub_v1.GetDeploymentRequest( + name="name_value", + ) + + # Make the request + response = await client.get_deployment(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.apihub_v1.types.GetDeploymentRequest, dict]]): + The request object. The + [GetDeployment][google.cloud.apihub.v1.ApiHub.GetDeployment] + method's request. + name (:class:`str`): + Required. The name of the deployment resource to + retrieve. Format: + ``projects/{project}/locations/{location}/deployments/{deployment}`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.apihub_v1.types.Deployment: + Details of the deployment where APIs + are hosted. A deployment could represent + an Apigee proxy, API gateway, other + Google Cloud services or non-Google + Cloud services as well. A deployment + entity is a root level entity in the API + hub and exists independent of any API. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, apihub_service.GetDeploymentRequest): + request = apihub_service.GetDeploymentRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.get_deployment + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_deployments( + self, + request: Optional[Union[apihub_service.ListDeploymentsRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListDeploymentsAsyncPager: + r"""List deployment resources in the API hub. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import apihub_v1 + + async def sample_list_deployments(): + # Create a client + client = apihub_v1.ApiHubAsyncClient() + + # Initialize request argument(s) + request = apihub_v1.ListDeploymentsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_deployments(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.apihub_v1.types.ListDeploymentsRequest, dict]]): + The request object. The + [ListDeployments][google.cloud.apihub.v1.ApiHub.ListDeployments] + method's request. + parent (:class:`str`): + Required. The parent, which owns this collection of + deployment resources. Format: + ``projects/{project}/locations/{location}`` + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.apihub_v1.services.api_hub.pagers.ListDeploymentsAsyncPager: + The [ListDeployments][google.cloud.apihub.v1.ApiHub.ListDeployments] method's + response. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, apihub_service.ListDeploymentsRequest): + request = apihub_service.ListDeploymentsRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.list_deployments + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListDeploymentsAsyncPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def update_deployment( + self, + request: Optional[Union[apihub_service.UpdateDeploymentRequest, dict]] = None, + *, + deployment: Optional[common_fields.Deployment] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> common_fields.Deployment: + r"""Update a deployment resource in the API hub. The following + fields in the [deployment + resource][google.cloud.apihub.v1.Deployment] can be updated: + + - [display_name][google.cloud.apihub.v1.Deployment.display_name] + - [description][google.cloud.apihub.v1.Deployment.description] + - [documentation][google.cloud.apihub.v1.Deployment.documentation] + - [deployment_type][google.cloud.apihub.v1.Deployment.deployment_type] + - [resource_uri][google.cloud.apihub.v1.Deployment.resource_uri] + - [endpoints][google.cloud.apihub.v1.Deployment.endpoints] + - [slo][google.cloud.apihub.v1.Deployment.slo] + - [environment][google.cloud.apihub.v1.Deployment.environment] + - [attributes][google.cloud.apihub.v1.Deployment.attributes] + + The + [update_mask][google.cloud.apihub.v1.UpdateDeploymentRequest.update_mask] + should be used to specify the fields being updated. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import apihub_v1 + + async def sample_update_deployment(): + # Create a client + client = apihub_v1.ApiHubAsyncClient() + + # Initialize request argument(s) + deployment = apihub_v1.Deployment() + deployment.display_name = "display_name_value" + deployment.deployment_type.enum_values.values.id = "id_value" + deployment.deployment_type.enum_values.values.display_name = "display_name_value" + deployment.resource_uri = "resource_uri_value" + deployment.endpoints = ['endpoints_value1', 'endpoints_value2'] + + request = apihub_v1.UpdateDeploymentRequest( + deployment=deployment, + ) + + # Make the request + response = await client.update_deployment(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.apihub_v1.types.UpdateDeploymentRequest, dict]]): + The request object. The + [UpdateDeployment][google.cloud.apihub.v1.ApiHub.UpdateDeployment] + method's request. + deployment (:class:`google.cloud.apihub_v1.types.Deployment`): + Required. The deployment resource to update. + + The deployment resource's ``name`` field is used to + identify the deployment resource to update. Format: + ``projects/{project}/locations/{location}/deployments/{deployment}`` + + This corresponds to the ``deployment`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): + Required. The list of fields to + update. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.apihub_v1.types.Deployment: + Details of the deployment where APIs + are hosted. A deployment could represent + an Apigee proxy, API gateway, other + Google Cloud services or non-Google + Cloud services as well. A deployment + entity is a root level entity in the API + hub and exists independent of any API. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([deployment, update_mask]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, apihub_service.UpdateDeploymentRequest): + request = apihub_service.UpdateDeploymentRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if deployment is not None: + request.deployment = deployment + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.update_deployment + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("deployment.name", request.deployment.name),) + ), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def delete_deployment( + self, + request: Optional[Union[apihub_service.DeleteDeploymentRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Delete a deployment resource in the API hub. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import apihub_v1 + + async def sample_delete_deployment(): + # Create a client + client = apihub_v1.ApiHubAsyncClient() + + # Initialize request argument(s) + request = apihub_v1.DeleteDeploymentRequest( + name="name_value", + ) + + # Make the request + await client.delete_deployment(request=request) + + Args: + request (Optional[Union[google.cloud.apihub_v1.types.DeleteDeploymentRequest, dict]]): + The request object. The + [DeleteDeployment][google.cloud.apihub.v1.ApiHub.DeleteDeployment] + method's request. + name (:class:`str`): + Required. The name of the deployment resource to delete. + Format: + ``projects/{project}/locations/{location}/deployments/{deployment}`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, apihub_service.DeleteDeploymentRequest): + request = apihub_service.DeleteDeploymentRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.delete_deployment + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + async def create_attribute( + self, + request: Optional[Union[apihub_service.CreateAttributeRequest, dict]] = None, + *, + parent: Optional[str] = None, + attribute: Optional[common_fields.Attribute] = None, + attribute_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> common_fields.Attribute: + r"""Create a user defined attribute. + + Certain pre defined attributes are already created by the API + hub. These attributes will have type as ``SYSTEM_DEFINED`` and + can be listed via + [ListAttributes][google.cloud.apihub.v1.ApiHub.ListAttributes] + method. Allowed values for the same can be updated via + [UpdateAttribute][google.cloud.apihub.v1.ApiHub.UpdateAttribute] + method. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import apihub_v1 + + async def sample_create_attribute(): + # Create a client + client = apihub_v1.ApiHubAsyncClient() + + # Initialize request argument(s) + attribute = apihub_v1.Attribute() + attribute.display_name = "display_name_value" + attribute.scope = "PLUGIN" + attribute.data_type = "STRING" + + request = apihub_v1.CreateAttributeRequest( + parent="parent_value", + attribute=attribute, + ) + + # Make the request + response = await client.create_attribute(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.apihub_v1.types.CreateAttributeRequest, dict]]): + The request object. The + [CreateAttribute][google.cloud.apihub.v1.ApiHub.CreateAttribute] + method's request. + parent (:class:`str`): + Required. The parent resource for Attribute. Format: + ``projects/{project}/locations/{location}`` + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + attribute (:class:`google.cloud.apihub_v1.types.Attribute`): + Required. The attribute to create. + This corresponds to the ``attribute`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + attribute_id (:class:`str`): + Optional. The ID to use for the attribute, which will + become the final component of the attribute's resource + name. This field is optional. + + - If provided, the same will be used. The service will + throw an error if the specified id is already used by + another attribute resource in the API hub. + - If not provided, a system generated id will be used. + + This value should be 4-500 characters, and valid + characters are /[a-z][A-Z][0-9]-_/. + + This corresponds to the ``attribute_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.apihub_v1.types.Attribute: + An attribute in the API Hub. + An attribute is a name value pair which + can be attached to different resources + in the API hub based on the scope of the + attribute. Attributes can either be + pre-defined by the API Hub or created by + users. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, attribute, attribute_id]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, apihub_service.CreateAttributeRequest): + request = apihub_service.CreateAttributeRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if attribute is not None: + request.attribute = attribute + if attribute_id is not None: + request.attribute_id = attribute_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.create_attribute + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_attribute( + self, + request: Optional[Union[apihub_service.GetAttributeRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> common_fields.Attribute: + r"""Get details about the attribute. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import apihub_v1 + + async def sample_get_attribute(): + # Create a client + client = apihub_v1.ApiHubAsyncClient() + + # Initialize request argument(s) + request = apihub_v1.GetAttributeRequest( + name="name_value", + ) + + # Make the request + response = await client.get_attribute(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.apihub_v1.types.GetAttributeRequest, dict]]): + The request object. The + [GetAttribute][google.cloud.apihub.v1.ApiHub.GetAttribute] + method's request. + name (:class:`str`): + Required. The name of the attribute to retrieve. Format: + ``projects/{project}/locations/{location}/attributes/{attribute}`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.apihub_v1.types.Attribute: + An attribute in the API Hub. + An attribute is a name value pair which + can be attached to different resources + in the API hub based on the scope of the + attribute. Attributes can either be + pre-defined by the API Hub or created by + users. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, apihub_service.GetAttributeRequest): + request = apihub_service.GetAttributeRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.get_attribute + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def update_attribute( + self, + request: Optional[Union[apihub_service.UpdateAttributeRequest, dict]] = None, + *, + attribute: Optional[common_fields.Attribute] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> common_fields.Attribute: + r"""Update the attribute. The following fields in the [Attribute + resource][google.cloud.apihub.v1.Attribute] can be updated: + + - [display_name][google.cloud.apihub.v1.Attribute.display_name] + The display name can be updated for user defined attributes + only. + - [description][google.cloud.apihub.v1.Attribute.description] + The description can be updated for user defined attributes + only. + - [allowed_values][google.cloud.apihub.v1.Attribute.allowed_values] + To update the list of allowed values, clients need to use the + fetched list of allowed values and add or remove values to or + from the same list. The mutable allowed values can be updated + for both user defined and System defined attributes. The + immutable allowed values cannot be updated or deleted. The + updated list of allowed values cannot be empty. If an allowed + value that is already used by some resource's attribute is + deleted, then the association between the resource and the + attribute value will also be deleted. + - [cardinality][google.cloud.apihub.v1.Attribute.cardinality] + The cardinality can be updated for user defined attributes + only. Cardinality can only be increased during an update. + + The + [update_mask][google.cloud.apihub.v1.UpdateAttributeRequest.update_mask] + should be used to specify the fields being updated. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import apihub_v1 + + async def sample_update_attribute(): + # Create a client + client = apihub_v1.ApiHubAsyncClient() + + # Initialize request argument(s) + attribute = apihub_v1.Attribute() + attribute.display_name = "display_name_value" + attribute.scope = "PLUGIN" + attribute.data_type = "STRING" + + request = apihub_v1.UpdateAttributeRequest( + attribute=attribute, + ) + + # Make the request + response = await client.update_attribute(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.apihub_v1.types.UpdateAttributeRequest, dict]]): + The request object. The + [UpdateAttribute][google.cloud.apihub.v1.ApiHub.UpdateAttribute] + method's request. + attribute (:class:`google.cloud.apihub_v1.types.Attribute`): + Required. The attribute to update. + + The attribute's ``name`` field is used to identify the + attribute to update. Format: + ``projects/{project}/locations/{location}/attributes/{attribute}`` + + This corresponds to the ``attribute`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): + Required. The list of fields to + update. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.apihub_v1.types.Attribute: + An attribute in the API Hub. + An attribute is a name value pair which + can be attached to different resources + in the API hub based on the scope of the + attribute. Attributes can either be + pre-defined by the API Hub or created by + users. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([attribute, update_mask]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, apihub_service.UpdateAttributeRequest): + request = apihub_service.UpdateAttributeRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if attribute is not None: + request.attribute = attribute + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.update_attribute + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("attribute.name", request.attribute.name),) + ), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def delete_attribute( + self, + request: Optional[Union[apihub_service.DeleteAttributeRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Delete an attribute. + + Note: System defined attributes cannot be deleted. All + associations of the attribute being deleted with any API + hub resource will also get deleted. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import apihub_v1 + + async def sample_delete_attribute(): + # Create a client + client = apihub_v1.ApiHubAsyncClient() + + # Initialize request argument(s) + request = apihub_v1.DeleteAttributeRequest( + name="name_value", + ) + + # Make the request + await client.delete_attribute(request=request) + + Args: + request (Optional[Union[google.cloud.apihub_v1.types.DeleteAttributeRequest, dict]]): + The request object. The + [DeleteAttribute][google.cloud.apihub.v1.ApiHub.DeleteAttribute] + method's request. + name (:class:`str`): + Required. The name of the attribute to delete. Format: + ``projects/{project}/locations/{location}/attributes/{attribute}`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, apihub_service.DeleteAttributeRequest): + request = apihub_service.DeleteAttributeRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.delete_attribute + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + async def list_attributes( + self, + request: Optional[Union[apihub_service.ListAttributesRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListAttributesAsyncPager: + r"""List all attributes. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import apihub_v1 + + async def sample_list_attributes(): + # Create a client + client = apihub_v1.ApiHubAsyncClient() + + # Initialize request argument(s) + request = apihub_v1.ListAttributesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_attributes(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.apihub_v1.types.ListAttributesRequest, dict]]): + The request object. The + [ListAttributes][google.cloud.apihub.v1.ApiHub.ListAttributes] + method's request. + parent (:class:`str`): + Required. The parent resource for Attribute. Format: + ``projects/{project}/locations/{location}`` + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.apihub_v1.services.api_hub.pagers.ListAttributesAsyncPager: + The [ListAttributes][google.cloud.apihub.v1.ApiHub.ListAttributes] method's + response. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, apihub_service.ListAttributesRequest): + request = apihub_service.ListAttributesRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.list_attributes + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListAttributesAsyncPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def search_resources( + self, + request: Optional[Union[apihub_service.SearchResourcesRequest, dict]] = None, + *, + location: Optional[str] = None, + query: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.SearchResourcesAsyncPager: + r"""Search across API-Hub resources. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import apihub_v1 + + async def sample_search_resources(): + # Create a client + client = apihub_v1.ApiHubAsyncClient() + + # Initialize request argument(s) + request = apihub_v1.SearchResourcesRequest( + location="location_value", + query="query_value", + ) + + # Make the request + page_result = client.search_resources(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.apihub_v1.types.SearchResourcesRequest, dict]]): + The request object. The + [SearchResources][google.cloud.apihub.v1.ApiHub.SearchResources] + method's request. + location (:class:`str`): + Required. The resource name of the location which will + be of the type + ``projects/{project_id}/locations/{location_id}``. This + field is used to identify the instance of API-Hub in + which resources should be searched. + + This corresponds to the ``location`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + query (:class:`str`): + Required. The free text search query. + This query can contain keywords which + could be related to any detail of the + API-Hub resources such display names, + descriptions, attributes etc. + + This corresponds to the ``query`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.apihub_v1.services.api_hub.pagers.SearchResourcesAsyncPager: + Response for the + [SearchResources][google.cloud.apihub.v1.ApiHub.SearchResources] + method. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([location, query]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, apihub_service.SearchResourcesRequest): + request = apihub_service.SearchResourcesRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if location is not None: + request.location = location + if query is not None: + request.query = query + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.search_resources + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("location", request.location),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.SearchResourcesAsyncPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def create_external_api( + self, + request: Optional[Union[apihub_service.CreateExternalApiRequest, dict]] = None, + *, + parent: Optional[str] = None, + external_api: Optional[common_fields.ExternalApi] = None, + external_api_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> common_fields.ExternalApi: + r"""Create an External API resource in the API hub. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import apihub_v1 + + async def sample_create_external_api(): + # Create a client + client = apihub_v1.ApiHubAsyncClient() + + # Initialize request argument(s) + external_api = apihub_v1.ExternalApi() + external_api.display_name = "display_name_value" + + request = apihub_v1.CreateExternalApiRequest( + parent="parent_value", + external_api=external_api, + ) + + # Make the request + response = await client.create_external_api(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.apihub_v1.types.CreateExternalApiRequest, dict]]): + The request object. The + [CreateExternalApi][google.cloud.apihub.v1.ApiHub.CreateExternalApi] + method's request. + parent (:class:`str`): + Required. The parent resource for the External API + resource. Format: + ``projects/{project}/locations/{location}`` + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + external_api (:class:`google.cloud.apihub_v1.types.ExternalApi`): + Required. The External API resource + to create. + + This corresponds to the ``external_api`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + external_api_id (:class:`str`): + Optional. The ID to use for the External API resource, + which will become the final component of the External + API's resource name. This field is optional. + + - If provided, the same will be used. The service will + throw an error if the specified id is already used by + another External API resource in the API hub. + - If not provided, a system generated id will be used. + + This value should be 4-500 characters, and valid + characters are /[a-z][A-Z][0-9]-_/. + + This corresponds to the ``external_api_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.apihub_v1.types.ExternalApi: + An external API represents an API + being provided by external sources. This + can be used to model third-party APIs + and can be used to define dependencies. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, external_api, external_api_id]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, apihub_service.CreateExternalApiRequest): + request = apihub_service.CreateExternalApiRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if external_api is not None: + request.external_api = external_api + if external_api_id is not None: + request.external_api_id = external_api_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.create_external_api + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_external_api( + self, + request: Optional[Union[apihub_service.GetExternalApiRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> common_fields.ExternalApi: + r"""Get details about an External API resource in the API + hub. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import apihub_v1 + + async def sample_get_external_api(): + # Create a client + client = apihub_v1.ApiHubAsyncClient() + + # Initialize request argument(s) + request = apihub_v1.GetExternalApiRequest( + name="name_value", + ) + + # Make the request + response = await client.get_external_api(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.apihub_v1.types.GetExternalApiRequest, dict]]): + The request object. The + [GetExternalApi][google.cloud.apihub.v1.ApiHub.GetExternalApi] + method's request. + name (:class:`str`): + Required. The name of the External API resource to + retrieve. Format: + ``projects/{project}/locations/{location}/externalApis/{externalApi}`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.apihub_v1.types.ExternalApi: + An external API represents an API + being provided by external sources. This + can be used to model third-party APIs + and can be used to define dependencies. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, apihub_service.GetExternalApiRequest): + request = apihub_service.GetExternalApiRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.get_external_api + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def update_external_api( + self, + request: Optional[Union[apihub_service.UpdateExternalApiRequest, dict]] = None, + *, + external_api: Optional[common_fields.ExternalApi] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> common_fields.ExternalApi: + r"""Update an External API resource in the API hub. The following + fields can be updated: + + - [display_name][google.cloud.apihub.v1.ExternalApi.display_name] + - [description][google.cloud.apihub.v1.ExternalApi.description] + - [documentation][google.cloud.apihub.v1.ExternalApi.documentation] + - [endpoints][google.cloud.apihub.v1.ExternalApi.endpoints] + - [paths][google.cloud.apihub.v1.ExternalApi.paths] + + The + [update_mask][google.cloud.apihub.v1.UpdateExternalApiRequest.update_mask] + should be used to specify the fields being updated. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import apihub_v1 + + async def sample_update_external_api(): + # Create a client + client = apihub_v1.ApiHubAsyncClient() + + # Initialize request argument(s) + external_api = apihub_v1.ExternalApi() + external_api.display_name = "display_name_value" + + request = apihub_v1.UpdateExternalApiRequest( + external_api=external_api, + ) + + # Make the request + response = await client.update_external_api(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.apihub_v1.types.UpdateExternalApiRequest, dict]]): + The request object. The + [UpdateExternalApi][google.cloud.apihub.v1.ApiHub.UpdateExternalApi] + method's request. + external_api (:class:`google.cloud.apihub_v1.types.ExternalApi`): + Required. The External API resource to update. + + The External API resource's ``name`` field is used to + identify the External API resource to update. Format: + ``projects/{project}/locations/{location}/externalApis/{externalApi}`` + + This corresponds to the ``external_api`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): + Required. The list of fields to + update. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.apihub_v1.types.ExternalApi: + An external API represents an API + being provided by external sources. This + can be used to model third-party APIs + and can be used to define dependencies. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([external_api, update_mask]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, apihub_service.UpdateExternalApiRequest): + request = apihub_service.UpdateExternalApiRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if external_api is not None: + request.external_api = external_api + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.update_external_api + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("external_api.name", request.external_api.name),) + ), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def delete_external_api( + self, + request: Optional[Union[apihub_service.DeleteExternalApiRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Delete an External API resource in the API hub. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import apihub_v1 + + async def sample_delete_external_api(): + # Create a client + client = apihub_v1.ApiHubAsyncClient() + + # Initialize request argument(s) + request = apihub_v1.DeleteExternalApiRequest( + name="name_value", + ) + + # Make the request + await client.delete_external_api(request=request) + + Args: + request (Optional[Union[google.cloud.apihub_v1.types.DeleteExternalApiRequest, dict]]): + The request object. The + [DeleteExternalApi][google.cloud.apihub.v1.ApiHub.DeleteExternalApi] + method's request. + name (:class:`str`): + Required. The name of the External API resource to + delete. Format: + ``projects/{project}/locations/{location}/externalApis/{externalApi}`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, apihub_service.DeleteExternalApiRequest): + request = apihub_service.DeleteExternalApiRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.delete_external_api + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + async def list_external_apis( + self, + request: Optional[Union[apihub_service.ListExternalApisRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListExternalApisAsyncPager: + r"""List External API resources in the API hub. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import apihub_v1 + + async def sample_list_external_apis(): + # Create a client + client = apihub_v1.ApiHubAsyncClient() + + # Initialize request argument(s) + request = apihub_v1.ListExternalApisRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_external_apis(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.apihub_v1.types.ListExternalApisRequest, dict]]): + The request object. The + [ListExternalApis][google.cloud.apihub.v1.ApiHub.ListExternalApis] + method's request. + parent (:class:`str`): + Required. The parent, which owns this collection of + External API resources. Format: + ``projects/{project}/locations/{location}`` + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.apihub_v1.services.api_hub.pagers.ListExternalApisAsyncPager: + The [ListExternalApis][google.cloud.apihub.v1.ApiHub.ListExternalApis] + method's response. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, apihub_service.ListExternalApisRequest): + request = apihub_service.ListExternalApisRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.list_external_apis + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListExternalApisAsyncPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_operations( + self, + request: Optional[operations_pb2.ListOperationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.ListOperationsResponse: + r"""Lists operations that match the specified filter in the request. + + Args: + request (:class:`~.operations_pb2.ListOperationsRequest`): + The request object. Request message for + `ListOperations` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.ListOperationsResponse: + Response message for ``ListOperations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.ListOperationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.list_operations, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_operation( + self, + request: Optional[operations_pb2.GetOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Gets the latest state of a long-running operation. + + Args: + request (:class:`~.operations_pb2.GetOperationRequest`): + The request object. Request message for + `GetOperation` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.Operation: + An ``Operation`` object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.GetOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def delete_operation( + self, + request: Optional[operations_pb2.DeleteOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes a long-running operation. + + This method indicates that the client is no longer interested + in the operation result. It does not cancel the operation. + If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.DeleteOperationRequest`): + The request object. Request message for + `DeleteOperation` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.DeleteOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.delete_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + async def cancel_operation( + self, + request: Optional[operations_pb2.CancelOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Starts asynchronous cancellation on a long-running operation. + + The server makes a best effort to cancel the operation, but success + is not guaranteed. If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.CancelOperationRequest`): + The request object. Request message for + `CancelOperation` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.CancelOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.cancel_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + async def get_location( + self, + request: Optional[locations_pb2.GetLocationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> locations_pb2.Location: + r"""Gets information about a location. + + Args: + request (:class:`~.location_pb2.GetLocationRequest`): + The request object. Request message for + `GetLocation` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.location_pb2.Location: + Location object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = locations_pb2.GetLocationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_location, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_locations( + self, + request: Optional[locations_pb2.ListLocationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> locations_pb2.ListLocationsResponse: + r"""Lists information about the supported locations for this service. + + Args: + request (:class:`~.location_pb2.ListLocationsRequest`): + The request object. Request message for + `ListLocations` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.location_pb2.ListLocationsResponse: + Response message for ``ListLocations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = locations_pb2.ListLocationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.list_locations, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def __aenter__(self) -> "ApiHubAsyncClient": + return self + + async def __aexit__(self, exc_type, exc, tb): + await self.transport.close() + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +__all__ = ("ApiHubAsyncClient",) diff --git a/packages/google-cloud-apihub/google/cloud/apihub_v1/services/api_hub/client.py b/packages/google-cloud-apihub/google/cloud/apihub_v1/services/api_hub/client.py new file mode 100644 index 000000000000..77ddc5472962 --- /dev/null +++ b/packages/google-cloud-apihub/google/cloud/apihub_v1/services/api_hub/client.py @@ -0,0 +1,5450 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import os +import re +from typing import ( + Callable, + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, + cast, +) +import warnings + +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.apihub_v1 import gapic_version as package_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object, None] # type: ignore + +from google.cloud.location import locations_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore + +from google.cloud.apihub_v1.services.api_hub import pagers +from google.cloud.apihub_v1.types import apihub_service, common_fields + +from .transports.base import DEFAULT_CLIENT_INFO, ApiHubTransport +from .transports.rest import ApiHubRestTransport + + +class ApiHubClientMeta(type): + """Metaclass for the ApiHub client. + + This provides class-level methods for building and retrieving + support objects (e.g. transport) without polluting the client instance + objects. + """ + + _transport_registry = OrderedDict() # type: Dict[str, Type[ApiHubTransport]] + _transport_registry["rest"] = ApiHubRestTransport + + def get_transport_class( + cls, + label: Optional[str] = None, + ) -> Type[ApiHubTransport]: + """Returns an appropriate transport class. + + Args: + label: The name of the desired transport. If none is + provided, then the first transport in the registry is used. + + Returns: + The transport class to use. + """ + # If a specific transport is requested, return that one. + if label: + return cls._transport_registry[label] + + # No transport is requested; return the default (that is, the first one + # in the dictionary). + return next(iter(cls._transport_registry.values())) + + +class ApiHubClient(metaclass=ApiHubClientMeta): + """This service provides all methods related to the API hub.""" + + @staticmethod + def _get_default_mtls_endpoint(api_endpoint): + """Converts api endpoint to mTLS endpoint. + + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to + "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. + Args: + api_endpoint (Optional[str]): the api endpoint to convert. + Returns: + str: converted mTLS api endpoint. + """ + if not api_endpoint: + return api_endpoint + + mtls_endpoint_re = re.compile( + r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" + ) + + m = mtls_endpoint_re.match(api_endpoint) + name, mtls, sandbox, googledomain = m.groups() + if mtls or not googledomain: + return api_endpoint + + if sandbox: + return api_endpoint.replace( + "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" + ) + + return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + + # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. + DEFAULT_ENDPOINT = "apihub.googleapis.com" + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore + DEFAULT_ENDPOINT + ) + + _DEFAULT_ENDPOINT_TEMPLATE = "apihub.{UNIVERSE_DOMAIN}" + _DEFAULT_UNIVERSE = "googleapis.com" + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + ApiHubClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_info(info) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + ApiHubClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file(filename) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> ApiHubTransport: + """Returns the transport used by the client instance. + + Returns: + ApiHubTransport: The transport used by the client + instance. + """ + return self._transport + + @staticmethod + def api_path( + project: str, + location: str, + api: str, + ) -> str: + """Returns a fully-qualified api string.""" + return "projects/{project}/locations/{location}/apis/{api}".format( + project=project, + location=location, + api=api, + ) + + @staticmethod + def parse_api_path(path: str) -> Dict[str, str]: + """Parses a api path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/apis/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + + @staticmethod + def api_operation_path( + project: str, + location: str, + api: str, + version: str, + operation: str, + ) -> str: + """Returns a fully-qualified api_operation string.""" + return "projects/{project}/locations/{location}/apis/{api}/versions/{version}/operations/{operation}".format( + project=project, + location=location, + api=api, + version=version, + operation=operation, + ) + + @staticmethod + def parse_api_operation_path(path: str) -> Dict[str, str]: + """Parses a api_operation path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/apis/(?P.+?)/versions/(?P.+?)/operations/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + + @staticmethod + def attribute_path( + project: str, + location: str, + attribute: str, + ) -> str: + """Returns a fully-qualified attribute string.""" + return "projects/{project}/locations/{location}/attributes/{attribute}".format( + project=project, + location=location, + attribute=attribute, + ) + + @staticmethod + def parse_attribute_path(path: str) -> Dict[str, str]: + """Parses a attribute path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/attributes/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + + @staticmethod + def definition_path( + project: str, + location: str, + api: str, + version: str, + definition: str, + ) -> str: + """Returns a fully-qualified definition string.""" + return "projects/{project}/locations/{location}/apis/{api}/versions/{version}/definitions/{definition}".format( + project=project, + location=location, + api=api, + version=version, + definition=definition, + ) + + @staticmethod + def parse_definition_path(path: str) -> Dict[str, str]: + """Parses a definition path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/apis/(?P.+?)/versions/(?P.+?)/definitions/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + + @staticmethod + def deployment_path( + project: str, + location: str, + deployment: str, + ) -> str: + """Returns a fully-qualified deployment string.""" + return ( + "projects/{project}/locations/{location}/deployments/{deployment}".format( + project=project, + location=location, + deployment=deployment, + ) + ) + + @staticmethod + def parse_deployment_path(path: str) -> Dict[str, str]: + """Parses a deployment path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/deployments/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + + @staticmethod + def external_api_path( + project: str, + location: str, + external_api: str, + ) -> str: + """Returns a fully-qualified external_api string.""" + return "projects/{project}/locations/{location}/externalApis/{external_api}".format( + project=project, + location=location, + external_api=external_api, + ) + + @staticmethod + def parse_external_api_path(path: str) -> Dict[str, str]: + """Parses a external_api path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/externalApis/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + + @staticmethod + def spec_path( + project: str, + location: str, + api: str, + version: str, + spec: str, + ) -> str: + """Returns a fully-qualified spec string.""" + return "projects/{project}/locations/{location}/apis/{api}/versions/{version}/specs/{spec}".format( + project=project, + location=location, + api=api, + version=version, + spec=spec, + ) + + @staticmethod + def parse_spec_path(path: str) -> Dict[str, str]: + """Parses a spec path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/apis/(?P.+?)/versions/(?P.+?)/specs/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + + @staticmethod + def version_path( + project: str, + location: str, + api: str, + version: str, + ) -> str: + """Returns a fully-qualified version string.""" + return "projects/{project}/locations/{location}/apis/{api}/versions/{version}".format( + project=project, + location=location, + api=api, + version=version, + ) + + @staticmethod + def parse_version_path(path: str) -> Dict[str, str]: + """Parses a version path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/apis/(?P.+?)/versions/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + + @staticmethod + def common_billing_account_path( + billing_account: str, + ) -> str: + """Returns a fully-qualified billing_account string.""" + return "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + + @staticmethod + def parse_common_billing_account_path(path: str) -> Dict[str, str]: + """Parse a billing_account path into its component segments.""" + m = re.match(r"^billingAccounts/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_folder_path( + folder: str, + ) -> str: + """Returns a fully-qualified folder string.""" + return "folders/{folder}".format( + folder=folder, + ) + + @staticmethod + def parse_common_folder_path(path: str) -> Dict[str, str]: + """Parse a folder path into its component segments.""" + m = re.match(r"^folders/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_organization_path( + organization: str, + ) -> str: + """Returns a fully-qualified organization string.""" + return "organizations/{organization}".format( + organization=organization, + ) + + @staticmethod + def parse_common_organization_path(path: str) -> Dict[str, str]: + """Parse a organization path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_project_path( + project: str, + ) -> str: + """Returns a fully-qualified project string.""" + return "projects/{project}".format( + project=project, + ) + + @staticmethod + def parse_common_project_path(path: str) -> Dict[str, str]: + """Parse a project path into its component segments.""" + m = re.match(r"^projects/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_location_path( + project: str, + location: str, + ) -> str: + """Returns a fully-qualified location string.""" + return "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) + + @staticmethod + def parse_common_location_path(path: str) -> Dict[str, str]: + """Parse a location path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[client_options_lib.ClientOptions] = None + ): + """Deprecated. Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + + warnings.warn( + "get_mtls_endpoint_and_cert_source is deprecated. Use the api_endpoint property instead.", + DeprecationWarning, + ) + if client_options is None: + client_options = client_options_lib.ClientOptions() + use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") + if use_client_cert not in ("true", "false"): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError( + "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + # Figure out the client cert source to use. + client_cert_source = None + if use_client_cert == "true": + if client_options.client_cert_source: + client_cert_source = client_options.client_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + elif use_mtls_endpoint == "always" or ( + use_mtls_endpoint == "auto" and client_cert_source + ): + api_endpoint = cls.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = cls.DEFAULT_ENDPOINT + + return api_endpoint, client_cert_source + + @staticmethod + def _read_environment_variables(): + """Returns the environment variables used by the client. + + Returns: + Tuple[bool, str, str]: returns the GOOGLE_API_USE_CLIENT_CERTIFICATE, + GOOGLE_API_USE_MTLS_ENDPOINT, and GOOGLE_CLOUD_UNIVERSE_DOMAIN environment variables. + + Raises: + ValueError: If GOOGLE_API_USE_CLIENT_CERTIFICATE is not + any of ["true", "false"]. + google.auth.exceptions.MutualTLSChannelError: If GOOGLE_API_USE_MTLS_ENDPOINT + is not any of ["auto", "never", "always"]. + """ + use_client_cert = os.getenv( + "GOOGLE_API_USE_CLIENT_CERTIFICATE", "false" + ).lower() + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto").lower() + universe_domain_env = os.getenv("GOOGLE_CLOUD_UNIVERSE_DOMAIN") + if use_client_cert not in ("true", "false"): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError( + "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + return use_client_cert == "true", use_mtls_endpoint, universe_domain_env + + @staticmethod + def _get_client_cert_source(provided_cert_source, use_cert_flag): + """Return the client cert source to be used by the client. + + Args: + provided_cert_source (bytes): The client certificate source provided. + use_cert_flag (bool): A flag indicating whether to use the client certificate. + + Returns: + bytes or None: The client cert source to be used by the client. + """ + client_cert_source = None + if use_cert_flag: + if provided_cert_source: + client_cert_source = provided_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + return client_cert_source + + @staticmethod + def _get_api_endpoint( + api_override, client_cert_source, universe_domain, use_mtls_endpoint + ): + """Return the API endpoint used by the client. + + Args: + api_override (str): The API endpoint override. If specified, this is always + the return value of this function and the other arguments are not used. + client_cert_source (bytes): The client certificate source used by the client. + universe_domain (str): The universe domain used by the client. + use_mtls_endpoint (str): How to use the mTLS endpoint, which depends also on the other parameters. + Possible values are "always", "auto", or "never". + + Returns: + str: The API endpoint to be used by the client. + """ + if api_override is not None: + api_endpoint = api_override + elif use_mtls_endpoint == "always" or ( + use_mtls_endpoint == "auto" and client_cert_source + ): + _default_universe = ApiHubClient._DEFAULT_UNIVERSE + if universe_domain != _default_universe: + raise MutualTLSChannelError( + f"mTLS is not supported in any universe other than {_default_universe}." + ) + api_endpoint = ApiHubClient.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = ApiHubClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=universe_domain + ) + return api_endpoint + + @staticmethod + def _get_universe_domain( + client_universe_domain: Optional[str], universe_domain_env: Optional[str] + ) -> str: + """Return the universe domain used by the client. + + Args: + client_universe_domain (Optional[str]): The universe domain configured via the client options. + universe_domain_env (Optional[str]): The universe domain configured via the "GOOGLE_CLOUD_UNIVERSE_DOMAIN" environment variable. + + Returns: + str: The universe domain to be used by the client. + + Raises: + ValueError: If the universe domain is an empty string. + """ + universe_domain = ApiHubClient._DEFAULT_UNIVERSE + if client_universe_domain is not None: + universe_domain = client_universe_domain + elif universe_domain_env is not None: + universe_domain = universe_domain_env + if len(universe_domain.strip()) == 0: + raise ValueError("Universe Domain cannot be an empty string.") + return universe_domain + + @staticmethod + def _compare_universes( + client_universe: str, credentials: ga_credentials.Credentials + ) -> bool: + """Returns True iff the universe domains used by the client and credentials match. + + Args: + client_universe (str): The universe domain configured via the client options. + credentials (ga_credentials.Credentials): The credentials being used in the client. + + Returns: + bool: True iff client_universe matches the universe in credentials. + + Raises: + ValueError: when client_universe does not match the universe in credentials. + """ + + default_universe = ApiHubClient._DEFAULT_UNIVERSE + credentials_universe = getattr(credentials, "universe_domain", default_universe) + + if client_universe != credentials_universe: + raise ValueError( + "The configured universe domain " + f"({client_universe}) does not match the universe domain " + f"found in the credentials ({credentials_universe}). " + "If you haven't configured the universe domain explicitly, " + f"`{default_universe}` is the default." + ) + return True + + def _validate_universe_domain(self): + """Validates client's and credentials' universe domains are consistent. + + Returns: + bool: True iff the configured universe domain is valid. + + Raises: + ValueError: If the configured universe domain is not valid. + """ + self._is_universe_domain_valid = ( + self._is_universe_domain_valid + or ApiHubClient._compare_universes( + self.universe_domain, self.transport._credentials + ) + ) + return self._is_universe_domain_valid + + @property + def api_endpoint(self): + """Return the API endpoint used by the client instance. + + Returns: + str: The API endpoint used by the client instance. + """ + return self._api_endpoint + + @property + def universe_domain(self) -> str: + """Return the universe domain used by the client instance. + + Returns: + str: The universe domain used by the client instance. + """ + return self._universe_domain + + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[ + Union[str, ApiHubTransport, Callable[..., ApiHubTransport]] + ] = None, + client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the api hub client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Optional[Union[str,ApiHubTransport,Callable[..., ApiHubTransport]]]): + The transport to use, or a Callable that constructs and returns a new transport. + If a Callable is given, it will be called with the same set of initialization + arguments as used in the ApiHubTransport constructor. + If set to None, a transport is chosen automatically. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): + Custom options for the client. + + 1. The ``api_endpoint`` property can be used to override the + default endpoint provided by the client when ``transport`` is + not explicitly provided. Only if this property is not set and + ``transport`` was not explicitly provided, the endpoint is + determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment + variable, which have one of the following values: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto-switch to the + default mTLS endpoint if client certificate is present; this is + the default value). + + 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide a client certificate for mTLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + 3. The ``universe_domain`` property can be used to override the + default "googleapis.com" universe. Note that the ``api_endpoint`` + property still takes precedence; and ``universe_domain`` is + currently not supported for mTLS. + + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client_options = client_options + if isinstance(self._client_options, dict): + self._client_options = client_options_lib.from_dict(self._client_options) + if self._client_options is None: + self._client_options = client_options_lib.ClientOptions() + self._client_options = cast( + client_options_lib.ClientOptions, self._client_options + ) + + universe_domain_opt = getattr(self._client_options, "universe_domain", None) + + ( + self._use_client_cert, + self._use_mtls_endpoint, + self._universe_domain_env, + ) = ApiHubClient._read_environment_variables() + self._client_cert_source = ApiHubClient._get_client_cert_source( + self._client_options.client_cert_source, self._use_client_cert + ) + self._universe_domain = ApiHubClient._get_universe_domain( + universe_domain_opt, self._universe_domain_env + ) + self._api_endpoint = None # updated below, depending on `transport` + + # Initialize the universe domain validation. + self._is_universe_domain_valid = False + + api_key_value = getattr(self._client_options, "api_key", None) + if api_key_value and credentials: + raise ValueError( + "client_options.api_key and credentials are mutually exclusive" + ) + + # Save or instantiate the transport. + # Ordinarily, we provide the transport, but allowing a custom transport + # instance provides an extensibility point for unusual situations. + transport_provided = isinstance(transport, ApiHubTransport) + if transport_provided: + # transport is a ApiHubTransport instance. + if credentials or self._client_options.credentials_file or api_key_value: + raise ValueError( + "When providing a transport instance, " + "provide its credentials directly." + ) + if self._client_options.scopes: + raise ValueError( + "When providing a transport instance, provide its scopes " + "directly." + ) + self._transport = cast(ApiHubTransport, transport) + self._api_endpoint = self._transport.host + + self._api_endpoint = self._api_endpoint or ApiHubClient._get_api_endpoint( + self._client_options.api_endpoint, + self._client_cert_source, + self._universe_domain, + self._use_mtls_endpoint, + ) + + if not transport_provided: + import google.auth._default # type: ignore + + if api_key_value and hasattr( + google.auth._default, "get_api_key_credentials" + ): + credentials = google.auth._default.get_api_key_credentials( + api_key_value + ) + + transport_init: Union[ + Type[ApiHubTransport], Callable[..., ApiHubTransport] + ] = ( + ApiHubClient.get_transport_class(transport) + if isinstance(transport, str) or transport is None + else cast(Callable[..., ApiHubTransport], transport) + ) + # initialize with the provided callable or the passed in class + self._transport = transport_init( + credentials=credentials, + credentials_file=self._client_options.credentials_file, + host=self._api_endpoint, + scopes=self._client_options.scopes, + client_cert_source_for_mtls=self._client_cert_source, + quota_project_id=self._client_options.quota_project_id, + client_info=client_info, + always_use_jwt_access=True, + api_audience=self._client_options.api_audience, + ) + + def create_api( + self, + request: Optional[Union[apihub_service.CreateApiRequest, dict]] = None, + *, + parent: Optional[str] = None, + api: Optional[common_fields.Api] = None, + api_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> common_fields.Api: + r"""Create an API resource in the API hub. + Once an API resource is created, versions can be added + to it. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import apihub_v1 + + def sample_create_api(): + # Create a client + client = apihub_v1.ApiHubClient() + + # Initialize request argument(s) + api = apihub_v1.Api() + api.display_name = "display_name_value" + + request = apihub_v1.CreateApiRequest( + parent="parent_value", + api=api, + ) + + # Make the request + response = client.create_api(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.apihub_v1.types.CreateApiRequest, dict]): + The request object. The [CreateApi][google.cloud.apihub.v1.ApiHub.CreateApi] + method's request. + parent (str): + Required. The parent resource for the API resource. + Format: ``projects/{project}/locations/{location}`` + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + api (google.cloud.apihub_v1.types.Api): + Required. The API resource to create. + This corresponds to the ``api`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + api_id (str): + Optional. The ID to use for the API resource, which will + become the final component of the API's resource name. + This field is optional. + + - If provided, the same will be used. The service will + throw an error if the specified id is already used by + another API resource in the API hub. + - If not provided, a system generated id will be used. + + This value should be 4-500 characters, and valid + characters are /[a-z][A-Z][0-9]-_/. + + This corresponds to the ``api_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.apihub_v1.types.Api: + An API resource in the API Hub. + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, api, api_id]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, apihub_service.CreateApiRequest): + request = apihub_service.CreateApiRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if api is not None: + request.api = api + if api_id is not None: + request.api_id = api_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.create_api] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_api( + self, + request: Optional[Union[apihub_service.GetApiRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> common_fields.Api: + r"""Get API resource details including the API versions + contained in it. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import apihub_v1 + + def sample_get_api(): + # Create a client + client = apihub_v1.ApiHubClient() + + # Initialize request argument(s) + request = apihub_v1.GetApiRequest( + name="name_value", + ) + + # Make the request + response = client.get_api(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.apihub_v1.types.GetApiRequest, dict]): + The request object. The [GetApi][google.cloud.apihub.v1.ApiHub.GetApi] + method's request. + name (str): + Required. The name of the API resource to retrieve. + Format: + ``projects/{project}/locations/{location}/apis/{api}`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.apihub_v1.types.Api: + An API resource in the API Hub. + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, apihub_service.GetApiRequest): + request = apihub_service.GetApiRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_api] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def list_apis( + self, + request: Optional[Union[apihub_service.ListApisRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListApisPager: + r"""List API resources in the API hub. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import apihub_v1 + + def sample_list_apis(): + # Create a client + client = apihub_v1.ApiHubClient() + + # Initialize request argument(s) + request = apihub_v1.ListApisRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_apis(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.apihub_v1.types.ListApisRequest, dict]): + The request object. The [ListApis][google.cloud.apihub.v1.ApiHub.ListApis] + method's request. + parent (str): + Required. The parent, which owns this collection of API + resources. Format: + ``projects/{project}/locations/{location}`` + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.apihub_v1.services.api_hub.pagers.ListApisPager: + The [ListApis][google.cloud.apihub.v1.ApiHub.ListApis] + method's response. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, apihub_service.ListApisRequest): + request = apihub_service.ListApisRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_apis] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListApisPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def update_api( + self, + request: Optional[Union[apihub_service.UpdateApiRequest, dict]] = None, + *, + api: Optional[common_fields.Api] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> common_fields.Api: + r"""Update an API resource in the API hub. The following fields in + the [API][] can be updated: + + - [display_name][google.cloud.apihub.v1.Api.display_name] + - [description][google.cloud.apihub.v1.Api.description] + - [owner][google.cloud.apihub.v1.Api.owner] + - [documentation][google.cloud.apihub.v1.Api.documentation] + - [target_user][google.cloud.apihub.v1.Api.target_user] + - [team][google.cloud.apihub.v1.Api.team] + - [business_unit][google.cloud.apihub.v1.Api.business_unit] + - [maturity_level][google.cloud.apihub.v1.Api.maturity_level] + - [attributes][google.cloud.apihub.v1.Api.attributes] + + The + [update_mask][google.cloud.apihub.v1.UpdateApiRequest.update_mask] + should be used to specify the fields being updated. + + Updating the owner field requires complete owner message and + updates both owner and email fields. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import apihub_v1 + + def sample_update_api(): + # Create a client + client = apihub_v1.ApiHubClient() + + # Initialize request argument(s) + api = apihub_v1.Api() + api.display_name = "display_name_value" + + request = apihub_v1.UpdateApiRequest( + api=api, + ) + + # Make the request + response = client.update_api(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.apihub_v1.types.UpdateApiRequest, dict]): + The request object. The [UpdateApi][google.cloud.apihub.v1.ApiHub.UpdateApi] + method's request. + api (google.cloud.apihub_v1.types.Api): + Required. The API resource to update. + + The API resource's ``name`` field is used to identify + the API resource to update. Format: + ``projects/{project}/locations/{location}/apis/{api}`` + + This corresponds to the ``api`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Required. The list of fields to + update. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.apihub_v1.types.Api: + An API resource in the API Hub. + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([api, update_mask]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, apihub_service.UpdateApiRequest): + request = apihub_service.UpdateApiRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if api is not None: + request.api = api + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.update_api] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("api.name", request.api.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def delete_api( + self, + request: Optional[Union[apihub_service.DeleteApiRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Delete an API resource in the API hub. API can only + be deleted if all underlying versions are deleted. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import apihub_v1 + + def sample_delete_api(): + # Create a client + client = apihub_v1.ApiHubClient() + + # Initialize request argument(s) + request = apihub_v1.DeleteApiRequest( + name="name_value", + ) + + # Make the request + client.delete_api(request=request) + + Args: + request (Union[google.cloud.apihub_v1.types.DeleteApiRequest, dict]): + The request object. The [DeleteApi][google.cloud.apihub.v1.ApiHub.DeleteApi] + method's request. + name (str): + Required. The name of the API resource to delete. + Format: + ``projects/{project}/locations/{location}/apis/{api}`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, apihub_service.DeleteApiRequest): + request = apihub_service.DeleteApiRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_api] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + def create_version( + self, + request: Optional[Union[apihub_service.CreateVersionRequest, dict]] = None, + *, + parent: Optional[str] = None, + version: Optional[common_fields.Version] = None, + version_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> common_fields.Version: + r"""Create an API version for an API resource in the API + hub. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import apihub_v1 + + def sample_create_version(): + # Create a client + client = apihub_v1.ApiHubClient() + + # Initialize request argument(s) + version = apihub_v1.Version() + version.display_name = "display_name_value" + + request = apihub_v1.CreateVersionRequest( + parent="parent_value", + version=version, + ) + + # Make the request + response = client.create_version(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.apihub_v1.types.CreateVersionRequest, dict]): + The request object. The + [CreateVersion][google.cloud.apihub.v1.ApiHub.CreateVersion] + method's request. + parent (str): + Required. The parent resource for API version. Format: + ``projects/{project}/locations/{location}/apis/{api}`` + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + version (google.cloud.apihub_v1.types.Version): + Required. The version to create. + This corresponds to the ``version`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + version_id (str): + Optional. The ID to use for the API version, which will + become the final component of the version's resource + name. This field is optional. + + - If provided, the same will be used. The service will + throw an error if the specified id is already used by + another version in the API resource. + - If not provided, a system generated id will be used. + + This value should be 4-500 characters, and valid + characters are /[a-z][A-Z][0-9]-_/. + + This corresponds to the ``version_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.apihub_v1.types.Version: + Represents a version of the API + resource in API hub. This is also + referred to as the API version. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, version, version_id]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, apihub_service.CreateVersionRequest): + request = apihub_service.CreateVersionRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if version is not None: + request.version = version + if version_id is not None: + request.version_id = version_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.create_version] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_version( + self, + request: Optional[Union[apihub_service.GetVersionRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> common_fields.Version: + r"""Get details about the API version of an API resource. + This will include information about the specs and + operations present in the API version as well as the + deployments linked to it. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import apihub_v1 + + def sample_get_version(): + # Create a client + client = apihub_v1.ApiHubClient() + + # Initialize request argument(s) + request = apihub_v1.GetVersionRequest( + name="name_value", + ) + + # Make the request + response = client.get_version(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.apihub_v1.types.GetVersionRequest, dict]): + The request object. The + [GetVersion][google.cloud.apihub.v1.ApiHub.GetVersion] + method's request. + name (str): + Required. The name of the API version to retrieve. + Format: + ``projects/{project}/locations/{location}/apis/{api}/versions/{version}`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.apihub_v1.types.Version: + Represents a version of the API + resource in API hub. This is also + referred to as the API version. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, apihub_service.GetVersionRequest): + request = apihub_service.GetVersionRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_version] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def list_versions( + self, + request: Optional[Union[apihub_service.ListVersionsRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListVersionsPager: + r"""List API versions of an API resource in the API hub. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import apihub_v1 + + def sample_list_versions(): + # Create a client + client = apihub_v1.ApiHubClient() + + # Initialize request argument(s) + request = apihub_v1.ListVersionsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_versions(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.apihub_v1.types.ListVersionsRequest, dict]): + The request object. The + [ListVersions][google.cloud.apihub.v1.ApiHub.ListVersions] + method's request. + parent (str): + Required. The parent which owns this collection of API + versions i.e., the API resource Format: + ``projects/{project}/locations/{location}/apis/{api}`` + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.apihub_v1.services.api_hub.pagers.ListVersionsPager: + The [ListVersions][google.cloud.apihub.v1.ApiHub.ListVersions] method's + response. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, apihub_service.ListVersionsRequest): + request = apihub_service.ListVersionsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_versions] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListVersionsPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def update_version( + self, + request: Optional[Union[apihub_service.UpdateVersionRequest, dict]] = None, + *, + version: Optional[common_fields.Version] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> common_fields.Version: + r"""Update API version. The following fields in the + [version][google.cloud.apihub.v1.Version] can be updated + currently: + + - [display_name][google.cloud.apihub.v1.Version.display_name] + - [description][google.cloud.apihub.v1.Version.description] + - [documentation][google.cloud.apihub.v1.Version.documentation] + - [deployments][google.cloud.apihub.v1.Version.deployments] + - [lifecycle][google.cloud.apihub.v1.Version.lifecycle] + - [compliance][google.cloud.apihub.v1.Version.compliance] + - [accreditation][google.cloud.apihub.v1.Version.accreditation] + - [attributes][google.cloud.apihub.v1.Version.attributes] + + The + [update_mask][google.cloud.apihub.v1.UpdateVersionRequest.update_mask] + should be used to specify the fields being updated. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import apihub_v1 + + def sample_update_version(): + # Create a client + client = apihub_v1.ApiHubClient() + + # Initialize request argument(s) + version = apihub_v1.Version() + version.display_name = "display_name_value" + + request = apihub_v1.UpdateVersionRequest( + version=version, + ) + + # Make the request + response = client.update_version(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.apihub_v1.types.UpdateVersionRequest, dict]): + The request object. The + [UpdateVersion][google.cloud.apihub.v1.ApiHub.UpdateVersion] + method's request. + version (google.cloud.apihub_v1.types.Version): + Required. The API version to update. + + The version's ``name`` field is used to identify the API + version to update. Format: + ``projects/{project}/locations/{location}/apis/{api}/versions/{version}`` + + This corresponds to the ``version`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Required. The list of fields to + update. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.apihub_v1.types.Version: + Represents a version of the API + resource in API hub. This is also + referred to as the API version. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([version, update_mask]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, apihub_service.UpdateVersionRequest): + request = apihub_service.UpdateVersionRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if version is not None: + request.version = version + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.update_version] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("version.name", request.version.name),) + ), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def delete_version( + self, + request: Optional[Union[apihub_service.DeleteVersionRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Delete an API version. Version can only be deleted if + all underlying specs, operations, definitions and linked + deployments are deleted. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import apihub_v1 + + def sample_delete_version(): + # Create a client + client = apihub_v1.ApiHubClient() + + # Initialize request argument(s) + request = apihub_v1.DeleteVersionRequest( + name="name_value", + ) + + # Make the request + client.delete_version(request=request) + + Args: + request (Union[google.cloud.apihub_v1.types.DeleteVersionRequest, dict]): + The request object. The + [DeleteVersion][google.cloud.apihub.v1.ApiHub.DeleteVersion] + method's request. + name (str): + Required. The name of the version to delete. Format: + ``projects/{project}/locations/{location}/apis/{api}/versions/{version}`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, apihub_service.DeleteVersionRequest): + request = apihub_service.DeleteVersionRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_version] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + def create_spec( + self, + request: Optional[Union[apihub_service.CreateSpecRequest, dict]] = None, + *, + parent: Optional[str] = None, + spec: Optional[common_fields.Spec] = None, + spec_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> common_fields.Spec: + r"""Add a spec to an API version in the API hub. Multiple specs can + be added to an API version. Note, while adding a spec, at least + one of ``contents`` or ``source_uri`` must be provided. If + ``contents`` is provided, then ``spec_type`` must also be + provided. + + On adding a spec with contents to the version, the operations + present in it will be added to the version.Note that the file + contents in the spec should be of the same type as defined in + the + ``projects/{project}/locations/{location}/attributes/system-spec-type`` + attribute associated with spec resource. Note that specs of + various types can be uploaded, however parsing of details is + supported for OpenAPI spec currently. + + In order to access the information parsed from the spec, use the + [GetSpec][google.cloud.apihub.v1.ApiHub.GetSpec] method. In + order to access the raw contents for a particular spec, use the + [GetSpecContents][google.cloud.apihub.v1.ApiHub.GetSpecContents] + method. In order to access the operations parsed from the spec, + use the + [ListAPIOperations][google.cloud.apihub.v1.ApiHub.ListApiOperations] + method. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import apihub_v1 + + def sample_create_spec(): + # Create a client + client = apihub_v1.ApiHubClient() + + # Initialize request argument(s) + spec = apihub_v1.Spec() + spec.display_name = "display_name_value" + spec.spec_type.enum_values.values.id = "id_value" + spec.spec_type.enum_values.values.display_name = "display_name_value" + + request = apihub_v1.CreateSpecRequest( + parent="parent_value", + spec=spec, + ) + + # Make the request + response = client.create_spec(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.apihub_v1.types.CreateSpecRequest, dict]): + The request object. The + [CreateSpec][google.cloud.apihub.v1.ApiHub.CreateSpec] + method's request. + parent (str): + Required. The parent resource for Spec. Format: + ``projects/{project}/locations/{location}/apis/{api}/versions/{version}`` + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + spec (google.cloud.apihub_v1.types.Spec): + Required. The spec to create. + This corresponds to the ``spec`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + spec_id (str): + Optional. The ID to use for the spec, which will become + the final component of the spec's resource name. This + field is optional. + + - If provided, the same will be used. The service will + throw an error if the specified id is already used by + another spec in the API resource. + - If not provided, a system generated id will be used. + + This value should be 4-500 characters, and valid + characters are /[a-z][A-Z][0-9]-_/. + + This corresponds to the ``spec_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.apihub_v1.types.Spec: + Represents a spec associated with an + API version in the API Hub. Note that + specs of various types can be uploaded, + however parsing of details is supported + for OpenAPI spec currently. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, spec, spec_id]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, apihub_service.CreateSpecRequest): + request = apihub_service.CreateSpecRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if spec is not None: + request.spec = spec + if spec_id is not None: + request.spec_id = spec_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.create_spec] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_spec( + self, + request: Optional[Union[apihub_service.GetSpecRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> common_fields.Spec: + r"""Get details about the information parsed from a spec. Note that + this method does not return the raw spec contents. Use + [GetSpecContents][google.cloud.apihub.v1.ApiHub.GetSpecContents] + method to retrieve the same. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import apihub_v1 + + def sample_get_spec(): + # Create a client + client = apihub_v1.ApiHubClient() + + # Initialize request argument(s) + request = apihub_v1.GetSpecRequest( + name="name_value", + ) + + # Make the request + response = client.get_spec(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.apihub_v1.types.GetSpecRequest, dict]): + The request object. The [GetSpec][google.cloud.apihub.v1.ApiHub.GetSpec] + method's request. + name (str): + Required. The name of the spec to retrieve. Format: + ``projects/{project}/locations/{location}/apis/{api}/versions/{version}/specs/{spec}`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.apihub_v1.types.Spec: + Represents a spec associated with an + API version in the API Hub. Note that + specs of various types can be uploaded, + however parsing of details is supported + for OpenAPI spec currently. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, apihub_service.GetSpecRequest): + request = apihub_service.GetSpecRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_spec] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_spec_contents( + self, + request: Optional[Union[apihub_service.GetSpecContentsRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> common_fields.SpecContents: + r"""Get spec contents. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import apihub_v1 + + def sample_get_spec_contents(): + # Create a client + client = apihub_v1.ApiHubClient() + + # Initialize request argument(s) + request = apihub_v1.GetSpecContentsRequest( + name="name_value", + ) + + # Make the request + response = client.get_spec_contents(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.apihub_v1.types.GetSpecContentsRequest, dict]): + The request object. The + [GetSpecContents][google.cloud.apihub.v1.ApiHub.GetSpecContents] + method's request. + name (str): + Required. The name of the spec whose contents need to be + retrieved. Format: + ``projects/{project}/locations/{location}/apis/{api}/versions/{version}/specs/{spec}`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.apihub_v1.types.SpecContents: + The spec contents. + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, apihub_service.GetSpecContentsRequest): + request = apihub_service.GetSpecContentsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_spec_contents] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def list_specs( + self, + request: Optional[Union[apihub_service.ListSpecsRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListSpecsPager: + r"""List specs corresponding to a particular API + resource. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import apihub_v1 + + def sample_list_specs(): + # Create a client + client = apihub_v1.ApiHubClient() + + # Initialize request argument(s) + request = apihub_v1.ListSpecsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_specs(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.apihub_v1.types.ListSpecsRequest, dict]): + The request object. The [ListSpecs][ListSpecs] method's request. + parent (str): + Required. The parent, which owns this collection of + specs. Format: + ``projects/{project}/locations/{location}/apis/{api}/versions/{version}`` + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.apihub_v1.services.api_hub.pagers.ListSpecsPager: + The [ListSpecs][google.cloud.apihub.v1.ApiHub.ListSpecs] + method's response. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, apihub_service.ListSpecsRequest): + request = apihub_service.ListSpecsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_specs] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListSpecsPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def update_spec( + self, + request: Optional[Union[apihub_service.UpdateSpecRequest, dict]] = None, + *, + spec: Optional[common_fields.Spec] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> common_fields.Spec: + r"""Update spec. The following fields in the + [spec][google.cloud.apihub.v1.Spec] can be updated: + + - [display_name][google.cloud.apihub.v1.Spec.display_name] + - [source_uri][google.cloud.apihub.v1.Spec.source_uri] + - [lint_response][google.cloud.apihub.v1.Spec.lint_response] + - [attributes][google.cloud.apihub.v1.Spec.attributes] + - [contents][google.cloud.apihub.v1.Spec.contents] + - [spec_type][google.cloud.apihub.v1.Spec.spec_type] + + In case of an OAS spec, updating spec contents can lead to: + + 1. Creation, deletion and update of operations. + 2. Creation, deletion and update of definitions. + 3. Update of other info parsed out from the new spec. + + In case of contents or source_uri being present in update mask, + spec_type must also be present. Also, spec_type can not be + present in update mask if contents or source_uri is not present. + + The + [update_mask][google.cloud.apihub.v1.UpdateSpecRequest.update_mask] + should be used to specify the fields being updated. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import apihub_v1 + + def sample_update_spec(): + # Create a client + client = apihub_v1.ApiHubClient() + + # Initialize request argument(s) + spec = apihub_v1.Spec() + spec.display_name = "display_name_value" + spec.spec_type.enum_values.values.id = "id_value" + spec.spec_type.enum_values.values.display_name = "display_name_value" + + request = apihub_v1.UpdateSpecRequest( + spec=spec, + ) + + # Make the request + response = client.update_spec(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.apihub_v1.types.UpdateSpecRequest, dict]): + The request object. The + [UpdateSpec][google.cloud.apihub.v1.ApiHub.UpdateSpec] + method's request. + spec (google.cloud.apihub_v1.types.Spec): + Required. The spec to update. + + The spec's ``name`` field is used to identify the spec + to update. Format: + ``projects/{project}/locations/{location}/apis/{api}/versions/{version}/specs/{spec}`` + + This corresponds to the ``spec`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Required. The list of fields to + update. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.apihub_v1.types.Spec: + Represents a spec associated with an + API version in the API Hub. Note that + specs of various types can be uploaded, + however parsing of details is supported + for OpenAPI spec currently. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([spec, update_mask]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, apihub_service.UpdateSpecRequest): + request = apihub_service.UpdateSpecRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if spec is not None: + request.spec = spec + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.update_spec] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("spec.name", request.spec.name),) + ), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def delete_spec( + self, + request: Optional[Union[apihub_service.DeleteSpecRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Delete a spec. + Deleting a spec will also delete the associated + operations from the version. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import apihub_v1 + + def sample_delete_spec(): + # Create a client + client = apihub_v1.ApiHubClient() + + # Initialize request argument(s) + request = apihub_v1.DeleteSpecRequest( + name="name_value", + ) + + # Make the request + client.delete_spec(request=request) + + Args: + request (Union[google.cloud.apihub_v1.types.DeleteSpecRequest, dict]): + The request object. The + [DeleteSpec][google.cloud.apihub.v1.ApiHub.DeleteSpec] + method's request. + name (str): + Required. The name of the spec to delete. Format: + ``projects/{project}/locations/{location}/apis/{api}/versions/{version}/specs/{spec}`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, apihub_service.DeleteSpecRequest): + request = apihub_service.DeleteSpecRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_spec] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + def get_api_operation( + self, + request: Optional[Union[apihub_service.GetApiOperationRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> common_fields.ApiOperation: + r"""Get details about a particular operation in API + version. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import apihub_v1 + + def sample_get_api_operation(): + # Create a client + client = apihub_v1.ApiHubClient() + + # Initialize request argument(s) + request = apihub_v1.GetApiOperationRequest( + name="name_value", + ) + + # Make the request + response = client.get_api_operation(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.apihub_v1.types.GetApiOperationRequest, dict]): + The request object. The + [GetApiOperation][google.cloud.apihub.v1.ApiHub.GetApiOperation] + method's request. + name (str): + Required. The name of the operation to retrieve. Format: + ``projects/{project}/locations/{location}/apis/{api}/versions/{version}/operations/{operation}`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.apihub_v1.types.ApiOperation: + Represents an operation contained in + an API version in the API Hub. An + operation is added/updated/deleted in an + API version when a new spec is added or + an existing spec is updated/deleted in a + version. Currently, an operation will be + created only corresponding to OpenAPI + spec as parsing is supported for OpenAPI + spec. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, apihub_service.GetApiOperationRequest): + request = apihub_service.GetApiOperationRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_api_operation] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def list_api_operations( + self, + request: Optional[Union[apihub_service.ListApiOperationsRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListApiOperationsPager: + r"""List operations in an API version. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import apihub_v1 + + def sample_list_api_operations(): + # Create a client + client = apihub_v1.ApiHubClient() + + # Initialize request argument(s) + request = apihub_v1.ListApiOperationsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_api_operations(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.apihub_v1.types.ListApiOperationsRequest, dict]): + The request object. The + [ListApiOperations][google.cloud.apihub.v1.ApiHub.ListApiOperations] + method's request. + parent (str): + Required. The parent which owns this collection of + operations i.e., the API version. Format: + ``projects/{project}/locations/{location}/apis/{api}/versions/{version}`` + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.apihub_v1.services.api_hub.pagers.ListApiOperationsPager: + The [ListApiOperations][google.cloud.apihub.v1.ApiHub.ListApiOperations] + method's response. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, apihub_service.ListApiOperationsRequest): + request = apihub_service.ListApiOperationsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_api_operations] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListApiOperationsPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_definition( + self, + request: Optional[Union[apihub_service.GetDefinitionRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> common_fields.Definition: + r"""Get details about a definition in an API version. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import apihub_v1 + + def sample_get_definition(): + # Create a client + client = apihub_v1.ApiHubClient() + + # Initialize request argument(s) + request = apihub_v1.GetDefinitionRequest( + name="name_value", + ) + + # Make the request + response = client.get_definition(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.apihub_v1.types.GetDefinitionRequest, dict]): + The request object. The + [GetDefinition][google.cloud.apihub.v1.ApiHub.GetDefinition] + method's request. + name (str): + Required. The name of the definition to retrieve. + Format: + ``projects/{project}/locations/{location}/apis/{api}/versions/{version}/definitions/{definition}`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.apihub_v1.types.Definition: + Represents a definition for example schema, request, response definitions + contained in an API version. A definition is + added/updated/deleted in an API version when a new + spec is added or an existing spec is updated/deleted + in a version. Currently, definition will be created + only corresponding to OpenAPI spec as parsing is + supported for OpenAPI spec. Also, within OpenAPI + spec, only schema object is supported. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, apihub_service.GetDefinitionRequest): + request = apihub_service.GetDefinitionRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_definition] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def create_deployment( + self, + request: Optional[Union[apihub_service.CreateDeploymentRequest, dict]] = None, + *, + parent: Optional[str] = None, + deployment: Optional[common_fields.Deployment] = None, + deployment_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> common_fields.Deployment: + r"""Create a deployment resource in the API hub. + Once a deployment resource is created, it can be + associated with API versions. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import apihub_v1 + + def sample_create_deployment(): + # Create a client + client = apihub_v1.ApiHubClient() + + # Initialize request argument(s) + deployment = apihub_v1.Deployment() + deployment.display_name = "display_name_value" + deployment.deployment_type.enum_values.values.id = "id_value" + deployment.deployment_type.enum_values.values.display_name = "display_name_value" + deployment.resource_uri = "resource_uri_value" + deployment.endpoints = ['endpoints_value1', 'endpoints_value2'] + + request = apihub_v1.CreateDeploymentRequest( + parent="parent_value", + deployment=deployment, + ) + + # Make the request + response = client.create_deployment(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.apihub_v1.types.CreateDeploymentRequest, dict]): + The request object. The + [CreateDeployment][google.cloud.apihub.v1.ApiHub.CreateDeployment] + method's request. + parent (str): + Required. The parent resource for the deployment + resource. Format: + ``projects/{project}/locations/{location}`` + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + deployment (google.cloud.apihub_v1.types.Deployment): + Required. The deployment resource to + create. + + This corresponds to the ``deployment`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + deployment_id (str): + Optional. The ID to use for the deployment resource, + which will become the final component of the + deployment's resource name. This field is optional. + + - If provided, the same will be used. The service will + throw an error if the specified id is already used by + another deployment resource in the API hub. + - If not provided, a system generated id will be used. + + This value should be 4-500 characters, and valid + characters are /[a-z][A-Z][0-9]-_/. + + This corresponds to the ``deployment_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.apihub_v1.types.Deployment: + Details of the deployment where APIs + are hosted. A deployment could represent + an Apigee proxy, API gateway, other + Google Cloud services or non-Google + Cloud services as well. A deployment + entity is a root level entity in the API + hub and exists independent of any API. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, deployment, deployment_id]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, apihub_service.CreateDeploymentRequest): + request = apihub_service.CreateDeploymentRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if deployment is not None: + request.deployment = deployment + if deployment_id is not None: + request.deployment_id = deployment_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.create_deployment] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_deployment( + self, + request: Optional[Union[apihub_service.GetDeploymentRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> common_fields.Deployment: + r"""Get details about a deployment and the API versions + linked to it. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import apihub_v1 + + def sample_get_deployment(): + # Create a client + client = apihub_v1.ApiHubClient() + + # Initialize request argument(s) + request = apihub_v1.GetDeploymentRequest( + name="name_value", + ) + + # Make the request + response = client.get_deployment(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.apihub_v1.types.GetDeploymentRequest, dict]): + The request object. The + [GetDeployment][google.cloud.apihub.v1.ApiHub.GetDeployment] + method's request. + name (str): + Required. The name of the deployment resource to + retrieve. Format: + ``projects/{project}/locations/{location}/deployments/{deployment}`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.apihub_v1.types.Deployment: + Details of the deployment where APIs + are hosted. A deployment could represent + an Apigee proxy, API gateway, other + Google Cloud services or non-Google + Cloud services as well. A deployment + entity is a root level entity in the API + hub and exists independent of any API. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, apihub_service.GetDeploymentRequest): + request = apihub_service.GetDeploymentRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_deployment] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def list_deployments( + self, + request: Optional[Union[apihub_service.ListDeploymentsRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListDeploymentsPager: + r"""List deployment resources in the API hub. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import apihub_v1 + + def sample_list_deployments(): + # Create a client + client = apihub_v1.ApiHubClient() + + # Initialize request argument(s) + request = apihub_v1.ListDeploymentsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_deployments(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.apihub_v1.types.ListDeploymentsRequest, dict]): + The request object. The + [ListDeployments][google.cloud.apihub.v1.ApiHub.ListDeployments] + method's request. + parent (str): + Required. The parent, which owns this collection of + deployment resources. Format: + ``projects/{project}/locations/{location}`` + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.apihub_v1.services.api_hub.pagers.ListDeploymentsPager: + The [ListDeployments][google.cloud.apihub.v1.ApiHub.ListDeployments] method's + response. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, apihub_service.ListDeploymentsRequest): + request = apihub_service.ListDeploymentsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_deployments] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListDeploymentsPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def update_deployment( + self, + request: Optional[Union[apihub_service.UpdateDeploymentRequest, dict]] = None, + *, + deployment: Optional[common_fields.Deployment] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> common_fields.Deployment: + r"""Update a deployment resource in the API hub. The following + fields in the [deployment + resource][google.cloud.apihub.v1.Deployment] can be updated: + + - [display_name][google.cloud.apihub.v1.Deployment.display_name] + - [description][google.cloud.apihub.v1.Deployment.description] + - [documentation][google.cloud.apihub.v1.Deployment.documentation] + - [deployment_type][google.cloud.apihub.v1.Deployment.deployment_type] + - [resource_uri][google.cloud.apihub.v1.Deployment.resource_uri] + - [endpoints][google.cloud.apihub.v1.Deployment.endpoints] + - [slo][google.cloud.apihub.v1.Deployment.slo] + - [environment][google.cloud.apihub.v1.Deployment.environment] + - [attributes][google.cloud.apihub.v1.Deployment.attributes] + + The + [update_mask][google.cloud.apihub.v1.UpdateDeploymentRequest.update_mask] + should be used to specify the fields being updated. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import apihub_v1 + + def sample_update_deployment(): + # Create a client + client = apihub_v1.ApiHubClient() + + # Initialize request argument(s) + deployment = apihub_v1.Deployment() + deployment.display_name = "display_name_value" + deployment.deployment_type.enum_values.values.id = "id_value" + deployment.deployment_type.enum_values.values.display_name = "display_name_value" + deployment.resource_uri = "resource_uri_value" + deployment.endpoints = ['endpoints_value1', 'endpoints_value2'] + + request = apihub_v1.UpdateDeploymentRequest( + deployment=deployment, + ) + + # Make the request + response = client.update_deployment(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.apihub_v1.types.UpdateDeploymentRequest, dict]): + The request object. The + [UpdateDeployment][google.cloud.apihub.v1.ApiHub.UpdateDeployment] + method's request. + deployment (google.cloud.apihub_v1.types.Deployment): + Required. The deployment resource to update. + + The deployment resource's ``name`` field is used to + identify the deployment resource to update. Format: + ``projects/{project}/locations/{location}/deployments/{deployment}`` + + This corresponds to the ``deployment`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Required. The list of fields to + update. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.apihub_v1.types.Deployment: + Details of the deployment where APIs + are hosted. A deployment could represent + an Apigee proxy, API gateway, other + Google Cloud services or non-Google + Cloud services as well. A deployment + entity is a root level entity in the API + hub and exists independent of any API. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([deployment, update_mask]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, apihub_service.UpdateDeploymentRequest): + request = apihub_service.UpdateDeploymentRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if deployment is not None: + request.deployment = deployment + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.update_deployment] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("deployment.name", request.deployment.name),) + ), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def delete_deployment( + self, + request: Optional[Union[apihub_service.DeleteDeploymentRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Delete a deployment resource in the API hub. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import apihub_v1 + + def sample_delete_deployment(): + # Create a client + client = apihub_v1.ApiHubClient() + + # Initialize request argument(s) + request = apihub_v1.DeleteDeploymentRequest( + name="name_value", + ) + + # Make the request + client.delete_deployment(request=request) + + Args: + request (Union[google.cloud.apihub_v1.types.DeleteDeploymentRequest, dict]): + The request object. The + [DeleteDeployment][google.cloud.apihub.v1.ApiHub.DeleteDeployment] + method's request. + name (str): + Required. The name of the deployment resource to delete. + Format: + ``projects/{project}/locations/{location}/deployments/{deployment}`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, apihub_service.DeleteDeploymentRequest): + request = apihub_service.DeleteDeploymentRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_deployment] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + def create_attribute( + self, + request: Optional[Union[apihub_service.CreateAttributeRequest, dict]] = None, + *, + parent: Optional[str] = None, + attribute: Optional[common_fields.Attribute] = None, + attribute_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> common_fields.Attribute: + r"""Create a user defined attribute. + + Certain pre defined attributes are already created by the API + hub. These attributes will have type as ``SYSTEM_DEFINED`` and + can be listed via + [ListAttributes][google.cloud.apihub.v1.ApiHub.ListAttributes] + method. Allowed values for the same can be updated via + [UpdateAttribute][google.cloud.apihub.v1.ApiHub.UpdateAttribute] + method. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import apihub_v1 + + def sample_create_attribute(): + # Create a client + client = apihub_v1.ApiHubClient() + + # Initialize request argument(s) + attribute = apihub_v1.Attribute() + attribute.display_name = "display_name_value" + attribute.scope = "PLUGIN" + attribute.data_type = "STRING" + + request = apihub_v1.CreateAttributeRequest( + parent="parent_value", + attribute=attribute, + ) + + # Make the request + response = client.create_attribute(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.apihub_v1.types.CreateAttributeRequest, dict]): + The request object. The + [CreateAttribute][google.cloud.apihub.v1.ApiHub.CreateAttribute] + method's request. + parent (str): + Required. The parent resource for Attribute. Format: + ``projects/{project}/locations/{location}`` + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + attribute (google.cloud.apihub_v1.types.Attribute): + Required. The attribute to create. + This corresponds to the ``attribute`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + attribute_id (str): + Optional. The ID to use for the attribute, which will + become the final component of the attribute's resource + name. This field is optional. + + - If provided, the same will be used. The service will + throw an error if the specified id is already used by + another attribute resource in the API hub. + - If not provided, a system generated id will be used. + + This value should be 4-500 characters, and valid + characters are /[a-z][A-Z][0-9]-_/. + + This corresponds to the ``attribute_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.apihub_v1.types.Attribute: + An attribute in the API Hub. + An attribute is a name value pair which + can be attached to different resources + in the API hub based on the scope of the + attribute. Attributes can either be + pre-defined by the API Hub or created by + users. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, attribute, attribute_id]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, apihub_service.CreateAttributeRequest): + request = apihub_service.CreateAttributeRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if attribute is not None: + request.attribute = attribute + if attribute_id is not None: + request.attribute_id = attribute_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.create_attribute] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_attribute( + self, + request: Optional[Union[apihub_service.GetAttributeRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> common_fields.Attribute: + r"""Get details about the attribute. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import apihub_v1 + + def sample_get_attribute(): + # Create a client + client = apihub_v1.ApiHubClient() + + # Initialize request argument(s) + request = apihub_v1.GetAttributeRequest( + name="name_value", + ) + + # Make the request + response = client.get_attribute(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.apihub_v1.types.GetAttributeRequest, dict]): + The request object. The + [GetAttribute][google.cloud.apihub.v1.ApiHub.GetAttribute] + method's request. + name (str): + Required. The name of the attribute to retrieve. Format: + ``projects/{project}/locations/{location}/attributes/{attribute}`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.apihub_v1.types.Attribute: + An attribute in the API Hub. + An attribute is a name value pair which + can be attached to different resources + in the API hub based on the scope of the + attribute. Attributes can either be + pre-defined by the API Hub or created by + users. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, apihub_service.GetAttributeRequest): + request = apihub_service.GetAttributeRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_attribute] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def update_attribute( + self, + request: Optional[Union[apihub_service.UpdateAttributeRequest, dict]] = None, + *, + attribute: Optional[common_fields.Attribute] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> common_fields.Attribute: + r"""Update the attribute. The following fields in the [Attribute + resource][google.cloud.apihub.v1.Attribute] can be updated: + + - [display_name][google.cloud.apihub.v1.Attribute.display_name] + The display name can be updated for user defined attributes + only. + - [description][google.cloud.apihub.v1.Attribute.description] + The description can be updated for user defined attributes + only. + - [allowed_values][google.cloud.apihub.v1.Attribute.allowed_values] + To update the list of allowed values, clients need to use the + fetched list of allowed values and add or remove values to or + from the same list. The mutable allowed values can be updated + for both user defined and System defined attributes. The + immutable allowed values cannot be updated or deleted. The + updated list of allowed values cannot be empty. If an allowed + value that is already used by some resource's attribute is + deleted, then the association between the resource and the + attribute value will also be deleted. + - [cardinality][google.cloud.apihub.v1.Attribute.cardinality] + The cardinality can be updated for user defined attributes + only. Cardinality can only be increased during an update. + + The + [update_mask][google.cloud.apihub.v1.UpdateAttributeRequest.update_mask] + should be used to specify the fields being updated. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import apihub_v1 + + def sample_update_attribute(): + # Create a client + client = apihub_v1.ApiHubClient() + + # Initialize request argument(s) + attribute = apihub_v1.Attribute() + attribute.display_name = "display_name_value" + attribute.scope = "PLUGIN" + attribute.data_type = "STRING" + + request = apihub_v1.UpdateAttributeRequest( + attribute=attribute, + ) + + # Make the request + response = client.update_attribute(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.apihub_v1.types.UpdateAttributeRequest, dict]): + The request object. The + [UpdateAttribute][google.cloud.apihub.v1.ApiHub.UpdateAttribute] + method's request. + attribute (google.cloud.apihub_v1.types.Attribute): + Required. The attribute to update. + + The attribute's ``name`` field is used to identify the + attribute to update. Format: + ``projects/{project}/locations/{location}/attributes/{attribute}`` + + This corresponds to the ``attribute`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Required. The list of fields to + update. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.apihub_v1.types.Attribute: + An attribute in the API Hub. + An attribute is a name value pair which + can be attached to different resources + in the API hub based on the scope of the + attribute. Attributes can either be + pre-defined by the API Hub or created by + users. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([attribute, update_mask]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, apihub_service.UpdateAttributeRequest): + request = apihub_service.UpdateAttributeRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if attribute is not None: + request.attribute = attribute + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.update_attribute] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("attribute.name", request.attribute.name),) + ), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def delete_attribute( + self, + request: Optional[Union[apihub_service.DeleteAttributeRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Delete an attribute. + + Note: System defined attributes cannot be deleted. All + associations of the attribute being deleted with any API + hub resource will also get deleted. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import apihub_v1 + + def sample_delete_attribute(): + # Create a client + client = apihub_v1.ApiHubClient() + + # Initialize request argument(s) + request = apihub_v1.DeleteAttributeRequest( + name="name_value", + ) + + # Make the request + client.delete_attribute(request=request) + + Args: + request (Union[google.cloud.apihub_v1.types.DeleteAttributeRequest, dict]): + The request object. The + [DeleteAttribute][google.cloud.apihub.v1.ApiHub.DeleteAttribute] + method's request. + name (str): + Required. The name of the attribute to delete. Format: + ``projects/{project}/locations/{location}/attributes/{attribute}`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, apihub_service.DeleteAttributeRequest): + request = apihub_service.DeleteAttributeRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_attribute] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + def list_attributes( + self, + request: Optional[Union[apihub_service.ListAttributesRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListAttributesPager: + r"""List all attributes. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import apihub_v1 + + def sample_list_attributes(): + # Create a client + client = apihub_v1.ApiHubClient() + + # Initialize request argument(s) + request = apihub_v1.ListAttributesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_attributes(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.apihub_v1.types.ListAttributesRequest, dict]): + The request object. The + [ListAttributes][google.cloud.apihub.v1.ApiHub.ListAttributes] + method's request. + parent (str): + Required. The parent resource for Attribute. Format: + ``projects/{project}/locations/{location}`` + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.apihub_v1.services.api_hub.pagers.ListAttributesPager: + The [ListAttributes][google.cloud.apihub.v1.ApiHub.ListAttributes] method's + response. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, apihub_service.ListAttributesRequest): + request = apihub_service.ListAttributesRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_attributes] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListAttributesPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def search_resources( + self, + request: Optional[Union[apihub_service.SearchResourcesRequest, dict]] = None, + *, + location: Optional[str] = None, + query: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.SearchResourcesPager: + r"""Search across API-Hub resources. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import apihub_v1 + + def sample_search_resources(): + # Create a client + client = apihub_v1.ApiHubClient() + + # Initialize request argument(s) + request = apihub_v1.SearchResourcesRequest( + location="location_value", + query="query_value", + ) + + # Make the request + page_result = client.search_resources(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.apihub_v1.types.SearchResourcesRequest, dict]): + The request object. The + [SearchResources][google.cloud.apihub.v1.ApiHub.SearchResources] + method's request. + location (str): + Required. The resource name of the location which will + be of the type + ``projects/{project_id}/locations/{location_id}``. This + field is used to identify the instance of API-Hub in + which resources should be searched. + + This corresponds to the ``location`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + query (str): + Required. The free text search query. + This query can contain keywords which + could be related to any detail of the + API-Hub resources such display names, + descriptions, attributes etc. + + This corresponds to the ``query`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.apihub_v1.services.api_hub.pagers.SearchResourcesPager: + Response for the + [SearchResources][google.cloud.apihub.v1.ApiHub.SearchResources] + method. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([location, query]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, apihub_service.SearchResourcesRequest): + request = apihub_service.SearchResourcesRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if location is not None: + request.location = location + if query is not None: + request.query = query + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.search_resources] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("location", request.location),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.SearchResourcesPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def create_external_api( + self, + request: Optional[Union[apihub_service.CreateExternalApiRequest, dict]] = None, + *, + parent: Optional[str] = None, + external_api: Optional[common_fields.ExternalApi] = None, + external_api_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> common_fields.ExternalApi: + r"""Create an External API resource in the API hub. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import apihub_v1 + + def sample_create_external_api(): + # Create a client + client = apihub_v1.ApiHubClient() + + # Initialize request argument(s) + external_api = apihub_v1.ExternalApi() + external_api.display_name = "display_name_value" + + request = apihub_v1.CreateExternalApiRequest( + parent="parent_value", + external_api=external_api, + ) + + # Make the request + response = client.create_external_api(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.apihub_v1.types.CreateExternalApiRequest, dict]): + The request object. The + [CreateExternalApi][google.cloud.apihub.v1.ApiHub.CreateExternalApi] + method's request. + parent (str): + Required. The parent resource for the External API + resource. Format: + ``projects/{project}/locations/{location}`` + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + external_api (google.cloud.apihub_v1.types.ExternalApi): + Required. The External API resource + to create. + + This corresponds to the ``external_api`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + external_api_id (str): + Optional. The ID to use for the External API resource, + which will become the final component of the External + API's resource name. This field is optional. + + - If provided, the same will be used. The service will + throw an error if the specified id is already used by + another External API resource in the API hub. + - If not provided, a system generated id will be used. + + This value should be 4-500 characters, and valid + characters are /[a-z][A-Z][0-9]-_/. + + This corresponds to the ``external_api_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.apihub_v1.types.ExternalApi: + An external API represents an API + being provided by external sources. This + can be used to model third-party APIs + and can be used to define dependencies. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, external_api, external_api_id]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, apihub_service.CreateExternalApiRequest): + request = apihub_service.CreateExternalApiRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if external_api is not None: + request.external_api = external_api + if external_api_id is not None: + request.external_api_id = external_api_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.create_external_api] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_external_api( + self, + request: Optional[Union[apihub_service.GetExternalApiRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> common_fields.ExternalApi: + r"""Get details about an External API resource in the API + hub. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import apihub_v1 + + def sample_get_external_api(): + # Create a client + client = apihub_v1.ApiHubClient() + + # Initialize request argument(s) + request = apihub_v1.GetExternalApiRequest( + name="name_value", + ) + + # Make the request + response = client.get_external_api(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.apihub_v1.types.GetExternalApiRequest, dict]): + The request object. The + [GetExternalApi][google.cloud.apihub.v1.ApiHub.GetExternalApi] + method's request. + name (str): + Required. The name of the External API resource to + retrieve. Format: + ``projects/{project}/locations/{location}/externalApis/{externalApi}`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.apihub_v1.types.ExternalApi: + An external API represents an API + being provided by external sources. This + can be used to model third-party APIs + and can be used to define dependencies. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, apihub_service.GetExternalApiRequest): + request = apihub_service.GetExternalApiRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_external_api] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def update_external_api( + self, + request: Optional[Union[apihub_service.UpdateExternalApiRequest, dict]] = None, + *, + external_api: Optional[common_fields.ExternalApi] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> common_fields.ExternalApi: + r"""Update an External API resource in the API hub. The following + fields can be updated: + + - [display_name][google.cloud.apihub.v1.ExternalApi.display_name] + - [description][google.cloud.apihub.v1.ExternalApi.description] + - [documentation][google.cloud.apihub.v1.ExternalApi.documentation] + - [endpoints][google.cloud.apihub.v1.ExternalApi.endpoints] + - [paths][google.cloud.apihub.v1.ExternalApi.paths] + + The + [update_mask][google.cloud.apihub.v1.UpdateExternalApiRequest.update_mask] + should be used to specify the fields being updated. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import apihub_v1 + + def sample_update_external_api(): + # Create a client + client = apihub_v1.ApiHubClient() + + # Initialize request argument(s) + external_api = apihub_v1.ExternalApi() + external_api.display_name = "display_name_value" + + request = apihub_v1.UpdateExternalApiRequest( + external_api=external_api, + ) + + # Make the request + response = client.update_external_api(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.apihub_v1.types.UpdateExternalApiRequest, dict]): + The request object. The + [UpdateExternalApi][google.cloud.apihub.v1.ApiHub.UpdateExternalApi] + method's request. + external_api (google.cloud.apihub_v1.types.ExternalApi): + Required. The External API resource to update. + + The External API resource's ``name`` field is used to + identify the External API resource to update. Format: + ``projects/{project}/locations/{location}/externalApis/{externalApi}`` + + This corresponds to the ``external_api`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Required. The list of fields to + update. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.apihub_v1.types.ExternalApi: + An external API represents an API + being provided by external sources. This + can be used to model third-party APIs + and can be used to define dependencies. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([external_api, update_mask]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, apihub_service.UpdateExternalApiRequest): + request = apihub_service.UpdateExternalApiRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if external_api is not None: + request.external_api = external_api + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.update_external_api] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("external_api.name", request.external_api.name),) + ), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def delete_external_api( + self, + request: Optional[Union[apihub_service.DeleteExternalApiRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Delete an External API resource in the API hub. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import apihub_v1 + + def sample_delete_external_api(): + # Create a client + client = apihub_v1.ApiHubClient() + + # Initialize request argument(s) + request = apihub_v1.DeleteExternalApiRequest( + name="name_value", + ) + + # Make the request + client.delete_external_api(request=request) + + Args: + request (Union[google.cloud.apihub_v1.types.DeleteExternalApiRequest, dict]): + The request object. The + [DeleteExternalApi][google.cloud.apihub.v1.ApiHub.DeleteExternalApi] + method's request. + name (str): + Required. The name of the External API resource to + delete. Format: + ``projects/{project}/locations/{location}/externalApis/{externalApi}`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, apihub_service.DeleteExternalApiRequest): + request = apihub_service.DeleteExternalApiRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_external_api] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + def list_external_apis( + self, + request: Optional[Union[apihub_service.ListExternalApisRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListExternalApisPager: + r"""List External API resources in the API hub. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import apihub_v1 + + def sample_list_external_apis(): + # Create a client + client = apihub_v1.ApiHubClient() + + # Initialize request argument(s) + request = apihub_v1.ListExternalApisRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_external_apis(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.apihub_v1.types.ListExternalApisRequest, dict]): + The request object. The + [ListExternalApis][google.cloud.apihub.v1.ApiHub.ListExternalApis] + method's request. + parent (str): + Required. The parent, which owns this collection of + External API resources. Format: + ``projects/{project}/locations/{location}`` + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.apihub_v1.services.api_hub.pagers.ListExternalApisPager: + The [ListExternalApis][google.cloud.apihub.v1.ApiHub.ListExternalApis] + method's response. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, apihub_service.ListExternalApisRequest): + request = apihub_service.ListExternalApisRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_external_apis] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListExternalApisPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def __enter__(self) -> "ApiHubClient": + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + + def list_operations( + self, + request: Optional[operations_pb2.ListOperationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.ListOperationsResponse: + r"""Lists operations that match the specified filter in the request. + + Args: + request (:class:`~.operations_pb2.ListOperationsRequest`): + The request object. Request message for + `ListOperations` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.ListOperationsResponse: + Response message for ``ListOperations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.ListOperationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.list_operations, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_operation( + self, + request: Optional[operations_pb2.GetOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Gets the latest state of a long-running operation. + + Args: + request (:class:`~.operations_pb2.GetOperationRequest`): + The request object. Request message for + `GetOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.Operation: + An ``Operation`` object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.GetOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.get_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def delete_operation( + self, + request: Optional[operations_pb2.DeleteOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes a long-running operation. + + This method indicates that the client is no longer interested + in the operation result. It does not cancel the operation. + If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.DeleteOperationRequest`): + The request object. Request message for + `DeleteOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.DeleteOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.delete_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + def cancel_operation( + self, + request: Optional[operations_pb2.CancelOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Starts asynchronous cancellation on a long-running operation. + + The server makes a best effort to cancel the operation, but success + is not guaranteed. If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.CancelOperationRequest`): + The request object. Request message for + `CancelOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.CancelOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.cancel_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + def get_location( + self, + request: Optional[locations_pb2.GetLocationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> locations_pb2.Location: + r"""Gets information about a location. + + Args: + request (:class:`~.location_pb2.GetLocationRequest`): + The request object. Request message for + `GetLocation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.location_pb2.Location: + Location object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = locations_pb2.GetLocationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.get_location, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def list_locations( + self, + request: Optional[locations_pb2.ListLocationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> locations_pb2.ListLocationsResponse: + r"""Lists information about the supported locations for this service. + + Args: + request (:class:`~.location_pb2.ListLocationsRequest`): + The request object. Request message for + `ListLocations` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.location_pb2.ListLocationsResponse: + Response message for ``ListLocations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = locations_pb2.ListLocationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.list_locations, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +__all__ = ("ApiHubClient",) diff --git a/packages/google-cloud-apihub/google/cloud/apihub_v1/services/api_hub/pagers.py b/packages/google-cloud-apihub/google/cloud/apihub_v1/services/api_hub/pagers.py new file mode 100644 index 000000000000..510192a2f321 --- /dev/null +++ b/packages/google-cloud-apihub/google/cloud/apihub_v1/services/api_hub/pagers.py @@ -0,0 +1,633 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import ( + Any, + AsyncIterator, + Awaitable, + Callable, + Iterator, + Optional, + Sequence, + Tuple, + Union, +) + +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.api_core import retry_async as retries_async + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] + OptionalAsyncRetry = Union[ + retries_async.AsyncRetry, gapic_v1.method._MethodDefault, None + ] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object, None] # type: ignore + OptionalAsyncRetry = Union[retries_async.AsyncRetry, object, None] # type: ignore + +from google.cloud.apihub_v1.types import apihub_service, common_fields + + +class ListApisPager: + """A pager for iterating through ``list_apis`` requests. + + This class thinly wraps an initial + :class:`google.cloud.apihub_v1.types.ListApisResponse` object, and + provides an ``__iter__`` method to iterate through its + ``apis`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListApis`` requests and continue to iterate + through the ``apis`` field on the + corresponding responses. + + All the usual :class:`google.cloud.apihub_v1.types.ListApisResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., apihub_service.ListApisResponse], + request: apihub_service.ListApisRequest, + response: apihub_service.ListApisResponse, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.apihub_v1.types.ListApisRequest): + The initial request object. + response (google.cloud.apihub_v1.types.ListApisResponse): + The initial response object. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = apihub_service.ListApisRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[apihub_service.ListApisResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) + yield self._response + + def __iter__(self) -> Iterator[common_fields.Api]: + for page in self.pages: + yield from page.apis + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListVersionsPager: + """A pager for iterating through ``list_versions`` requests. + + This class thinly wraps an initial + :class:`google.cloud.apihub_v1.types.ListVersionsResponse` object, and + provides an ``__iter__`` method to iterate through its + ``versions`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListVersions`` requests and continue to iterate + through the ``versions`` field on the + corresponding responses. + + All the usual :class:`google.cloud.apihub_v1.types.ListVersionsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., apihub_service.ListVersionsResponse], + request: apihub_service.ListVersionsRequest, + response: apihub_service.ListVersionsResponse, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.apihub_v1.types.ListVersionsRequest): + The initial request object. + response (google.cloud.apihub_v1.types.ListVersionsResponse): + The initial response object. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = apihub_service.ListVersionsRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[apihub_service.ListVersionsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) + yield self._response + + def __iter__(self) -> Iterator[common_fields.Version]: + for page in self.pages: + yield from page.versions + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListSpecsPager: + """A pager for iterating through ``list_specs`` requests. + + This class thinly wraps an initial + :class:`google.cloud.apihub_v1.types.ListSpecsResponse` object, and + provides an ``__iter__`` method to iterate through its + ``specs`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListSpecs`` requests and continue to iterate + through the ``specs`` field on the + corresponding responses. + + All the usual :class:`google.cloud.apihub_v1.types.ListSpecsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., apihub_service.ListSpecsResponse], + request: apihub_service.ListSpecsRequest, + response: apihub_service.ListSpecsResponse, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.apihub_v1.types.ListSpecsRequest): + The initial request object. + response (google.cloud.apihub_v1.types.ListSpecsResponse): + The initial response object. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = apihub_service.ListSpecsRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[apihub_service.ListSpecsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) + yield self._response + + def __iter__(self) -> Iterator[common_fields.Spec]: + for page in self.pages: + yield from page.specs + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListApiOperationsPager: + """A pager for iterating through ``list_api_operations`` requests. + + This class thinly wraps an initial + :class:`google.cloud.apihub_v1.types.ListApiOperationsResponse` object, and + provides an ``__iter__`` method to iterate through its + ``api_operations`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListApiOperations`` requests and continue to iterate + through the ``api_operations`` field on the + corresponding responses. + + All the usual :class:`google.cloud.apihub_v1.types.ListApiOperationsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., apihub_service.ListApiOperationsResponse], + request: apihub_service.ListApiOperationsRequest, + response: apihub_service.ListApiOperationsResponse, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.apihub_v1.types.ListApiOperationsRequest): + The initial request object. + response (google.cloud.apihub_v1.types.ListApiOperationsResponse): + The initial response object. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = apihub_service.ListApiOperationsRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[apihub_service.ListApiOperationsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) + yield self._response + + def __iter__(self) -> Iterator[common_fields.ApiOperation]: + for page in self.pages: + yield from page.api_operations + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListDeploymentsPager: + """A pager for iterating through ``list_deployments`` requests. + + This class thinly wraps an initial + :class:`google.cloud.apihub_v1.types.ListDeploymentsResponse` object, and + provides an ``__iter__`` method to iterate through its + ``deployments`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListDeployments`` requests and continue to iterate + through the ``deployments`` field on the + corresponding responses. + + All the usual :class:`google.cloud.apihub_v1.types.ListDeploymentsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., apihub_service.ListDeploymentsResponse], + request: apihub_service.ListDeploymentsRequest, + response: apihub_service.ListDeploymentsResponse, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.apihub_v1.types.ListDeploymentsRequest): + The initial request object. + response (google.cloud.apihub_v1.types.ListDeploymentsResponse): + The initial response object. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = apihub_service.ListDeploymentsRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[apihub_service.ListDeploymentsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) + yield self._response + + def __iter__(self) -> Iterator[common_fields.Deployment]: + for page in self.pages: + yield from page.deployments + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListAttributesPager: + """A pager for iterating through ``list_attributes`` requests. + + This class thinly wraps an initial + :class:`google.cloud.apihub_v1.types.ListAttributesResponse` object, and + provides an ``__iter__`` method to iterate through its + ``attributes`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListAttributes`` requests and continue to iterate + through the ``attributes`` field on the + corresponding responses. + + All the usual :class:`google.cloud.apihub_v1.types.ListAttributesResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., apihub_service.ListAttributesResponse], + request: apihub_service.ListAttributesRequest, + response: apihub_service.ListAttributesResponse, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.apihub_v1.types.ListAttributesRequest): + The initial request object. + response (google.cloud.apihub_v1.types.ListAttributesResponse): + The initial response object. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = apihub_service.ListAttributesRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[apihub_service.ListAttributesResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) + yield self._response + + def __iter__(self) -> Iterator[common_fields.Attribute]: + for page in self.pages: + yield from page.attributes + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class SearchResourcesPager: + """A pager for iterating through ``search_resources`` requests. + + This class thinly wraps an initial + :class:`google.cloud.apihub_v1.types.SearchResourcesResponse` object, and + provides an ``__iter__`` method to iterate through its + ``search_results`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``SearchResources`` requests and continue to iterate + through the ``search_results`` field on the + corresponding responses. + + All the usual :class:`google.cloud.apihub_v1.types.SearchResourcesResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., apihub_service.SearchResourcesResponse], + request: apihub_service.SearchResourcesRequest, + response: apihub_service.SearchResourcesResponse, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.apihub_v1.types.SearchResourcesRequest): + The initial request object. + response (google.cloud.apihub_v1.types.SearchResourcesResponse): + The initial response object. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = apihub_service.SearchResourcesRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[apihub_service.SearchResourcesResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) + yield self._response + + def __iter__(self) -> Iterator[apihub_service.SearchResult]: + for page in self.pages: + yield from page.search_results + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListExternalApisPager: + """A pager for iterating through ``list_external_apis`` requests. + + This class thinly wraps an initial + :class:`google.cloud.apihub_v1.types.ListExternalApisResponse` object, and + provides an ``__iter__`` method to iterate through its + ``external_apis`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListExternalApis`` requests and continue to iterate + through the ``external_apis`` field on the + corresponding responses. + + All the usual :class:`google.cloud.apihub_v1.types.ListExternalApisResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., apihub_service.ListExternalApisResponse], + request: apihub_service.ListExternalApisRequest, + response: apihub_service.ListExternalApisResponse, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.apihub_v1.types.ListExternalApisRequest): + The initial request object. + response (google.cloud.apihub_v1.types.ListExternalApisResponse): + The initial response object. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = apihub_service.ListExternalApisRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[apihub_service.ListExternalApisResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) + yield self._response + + def __iter__(self) -> Iterator[common_fields.ExternalApi]: + for page in self.pages: + yield from page.external_apis + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) diff --git a/packages/google-ads-admanager/google/ads/admanager_v1/services/team_service/transports/__init__.py b/packages/google-cloud-apihub/google/cloud/apihub_v1/services/api_hub/transports/__init__.py similarity index 67% rename from packages/google-ads-admanager/google/ads/admanager_v1/services/team_service/transports/__init__.py rename to packages/google-cloud-apihub/google/cloud/apihub_v1/services/api_hub/transports/__init__.py index 0cdd254a8628..904125024a7b 100644 --- a/packages/google-ads-admanager/google/ads/admanager_v1/services/team_service/transports/__init__.py +++ b/packages/google-cloud-apihub/google/cloud/apihub_v1/services/api_hub/transports/__init__.py @@ -16,15 +16,15 @@ from collections import OrderedDict from typing import Dict, Type -from .base import TeamServiceTransport -from .rest import TeamServiceRestInterceptor, TeamServiceRestTransport +from .base import ApiHubTransport +from .rest import ApiHubRestInterceptor, ApiHubRestTransport # Compile a registry of transports. -_transport_registry = OrderedDict() # type: Dict[str, Type[TeamServiceTransport]] -_transport_registry["rest"] = TeamServiceRestTransport +_transport_registry = OrderedDict() # type: Dict[str, Type[ApiHubTransport]] +_transport_registry["rest"] = ApiHubRestTransport __all__ = ( - "TeamServiceTransport", - "TeamServiceRestTransport", - "TeamServiceRestInterceptor", + "ApiHubTransport", + "ApiHubRestTransport", + "ApiHubRestInterceptor", ) diff --git a/packages/google-cloud-apihub/google/cloud/apihub_v1/services/api_hub/transports/base.py b/packages/google-cloud-apihub/google/cloud/apihub_v1/services/api_hub/transports/base.py new file mode 100644 index 000000000000..521136f5fd5f --- /dev/null +++ b/packages/google-cloud-apihub/google/cloud/apihub_v1/services/api_hub/transports/base.py @@ -0,0 +1,870 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import abc +from typing import Awaitable, Callable, Dict, Optional, Sequence, Union + +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.oauth2 import service_account # type: ignore +from google.protobuf import empty_pb2 # type: ignore + +from google.cloud.apihub_v1 import gapic_version as package_version +from google.cloud.apihub_v1.types import apihub_service, common_fields + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +class ApiHubTransport(abc.ABC): + """Abstract transport class for ApiHub.""" + + AUTH_SCOPES = ("https://www.googleapis.com/auth/cloud-platform",) + + DEFAULT_HOST: str = "apihub.googleapis.com" + + def __init__( + self, + *, + host: str = DEFAULT_HOST, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + **kwargs, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'apihub.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A list of scopes. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + """ + + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} + + # Save the scopes. + self._scopes = scopes + if not hasattr(self, "_ignore_credentials"): + self._ignore_credentials: bool = False + + # If no credentials are provided, then determine the appropriate + # defaults. + if credentials and credentials_file: + raise core_exceptions.DuplicateCredentialArgs( + "'credentials_file' and 'credentials' are mutually exclusive" + ) + + if credentials_file is not None: + credentials, _ = google.auth.load_credentials_from_file( + credentials_file, **scopes_kwargs, quota_project_id=quota_project_id + ) + elif credentials is None and not self._ignore_credentials: + credentials, _ = google.auth.default( + **scopes_kwargs, quota_project_id=quota_project_id + ) + # Don't apply audience if the credentials file passed from user. + if hasattr(credentials, "with_gdch_audience"): + credentials = credentials.with_gdch_audience( + api_audience if api_audience else host + ) + + # If the credentials are service account credentials, then always try to use self signed JWT. + if ( + always_use_jwt_access + and isinstance(credentials, service_account.Credentials) + and hasattr(service_account.Credentials, "with_always_use_jwt_access") + ): + credentials = credentials.with_always_use_jwt_access(True) + + # Save the credentials. + self._credentials = credentials + + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ":" not in host: + host += ":443" + self._host = host + + @property + def host(self): + return self._host + + def _prep_wrapped_messages(self, client_info): + # Precompute the wrapped methods. + self._wrapped_methods = { + self.create_api: gapic_v1.method.wrap_method( + self.create_api, + default_timeout=60.0, + client_info=client_info, + ), + self.get_api: gapic_v1.method.wrap_method( + self.get_api, + default_retry=retries.Retry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.list_apis: gapic_v1.method.wrap_method( + self.list_apis, + default_retry=retries.Retry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.update_api: gapic_v1.method.wrap_method( + self.update_api, + default_timeout=60.0, + client_info=client_info, + ), + self.delete_api: gapic_v1.method.wrap_method( + self.delete_api, + default_timeout=60.0, + client_info=client_info, + ), + self.create_version: gapic_v1.method.wrap_method( + self.create_version, + default_timeout=60.0, + client_info=client_info, + ), + self.get_version: gapic_v1.method.wrap_method( + self.get_version, + default_retry=retries.Retry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.list_versions: gapic_v1.method.wrap_method( + self.list_versions, + default_retry=retries.Retry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.update_version: gapic_v1.method.wrap_method( + self.update_version, + default_timeout=60.0, + client_info=client_info, + ), + self.delete_version: gapic_v1.method.wrap_method( + self.delete_version, + default_timeout=60.0, + client_info=client_info, + ), + self.create_spec: gapic_v1.method.wrap_method( + self.create_spec, + default_timeout=60.0, + client_info=client_info, + ), + self.get_spec: gapic_v1.method.wrap_method( + self.get_spec, + default_retry=retries.Retry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.get_spec_contents: gapic_v1.method.wrap_method( + self.get_spec_contents, + default_retry=retries.Retry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.list_specs: gapic_v1.method.wrap_method( + self.list_specs, + default_retry=retries.Retry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.update_spec: gapic_v1.method.wrap_method( + self.update_spec, + default_timeout=60.0, + client_info=client_info, + ), + self.delete_spec: gapic_v1.method.wrap_method( + self.delete_spec, + default_timeout=60.0, + client_info=client_info, + ), + self.get_api_operation: gapic_v1.method.wrap_method( + self.get_api_operation, + default_retry=retries.Retry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.list_api_operations: gapic_v1.method.wrap_method( + self.list_api_operations, + default_retry=retries.Retry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.get_definition: gapic_v1.method.wrap_method( + self.get_definition, + default_retry=retries.Retry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.create_deployment: gapic_v1.method.wrap_method( + self.create_deployment, + default_timeout=60.0, + client_info=client_info, + ), + self.get_deployment: gapic_v1.method.wrap_method( + self.get_deployment, + default_retry=retries.Retry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.list_deployments: gapic_v1.method.wrap_method( + self.list_deployments, + default_retry=retries.Retry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.update_deployment: gapic_v1.method.wrap_method( + self.update_deployment, + default_timeout=60.0, + client_info=client_info, + ), + self.delete_deployment: gapic_v1.method.wrap_method( + self.delete_deployment, + default_timeout=60.0, + client_info=client_info, + ), + self.create_attribute: gapic_v1.method.wrap_method( + self.create_attribute, + default_timeout=60.0, + client_info=client_info, + ), + self.get_attribute: gapic_v1.method.wrap_method( + self.get_attribute, + default_retry=retries.Retry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.update_attribute: gapic_v1.method.wrap_method( + self.update_attribute, + default_timeout=60.0, + client_info=client_info, + ), + self.delete_attribute: gapic_v1.method.wrap_method( + self.delete_attribute, + default_timeout=60.0, + client_info=client_info, + ), + self.list_attributes: gapic_v1.method.wrap_method( + self.list_attributes, + default_retry=retries.Retry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.search_resources: gapic_v1.method.wrap_method( + self.search_resources, + default_retry=retries.Retry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.create_external_api: gapic_v1.method.wrap_method( + self.create_external_api, + default_timeout=60.0, + client_info=client_info, + ), + self.get_external_api: gapic_v1.method.wrap_method( + self.get_external_api, + default_retry=retries.Retry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.update_external_api: gapic_v1.method.wrap_method( + self.update_external_api, + default_timeout=60.0, + client_info=client_info, + ), + self.delete_external_api: gapic_v1.method.wrap_method( + self.delete_external_api, + default_timeout=60.0, + client_info=client_info, + ), + self.list_external_apis: gapic_v1.method.wrap_method( + self.list_external_apis, + default_retry=retries.Retry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + } + + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + + @property + def create_api( + self, + ) -> Callable[ + [apihub_service.CreateApiRequest], + Union[common_fields.Api, Awaitable[common_fields.Api]], + ]: + raise NotImplementedError() + + @property + def get_api( + self, + ) -> Callable[ + [apihub_service.GetApiRequest], + Union[common_fields.Api, Awaitable[common_fields.Api]], + ]: + raise NotImplementedError() + + @property + def list_apis( + self, + ) -> Callable[ + [apihub_service.ListApisRequest], + Union[ + apihub_service.ListApisResponse, Awaitable[apihub_service.ListApisResponse] + ], + ]: + raise NotImplementedError() + + @property + def update_api( + self, + ) -> Callable[ + [apihub_service.UpdateApiRequest], + Union[common_fields.Api, Awaitable[common_fields.Api]], + ]: + raise NotImplementedError() + + @property + def delete_api( + self, + ) -> Callable[ + [apihub_service.DeleteApiRequest], + Union[empty_pb2.Empty, Awaitable[empty_pb2.Empty]], + ]: + raise NotImplementedError() + + @property + def create_version( + self, + ) -> Callable[ + [apihub_service.CreateVersionRequest], + Union[common_fields.Version, Awaitable[common_fields.Version]], + ]: + raise NotImplementedError() + + @property + def get_version( + self, + ) -> Callable[ + [apihub_service.GetVersionRequest], + Union[common_fields.Version, Awaitable[common_fields.Version]], + ]: + raise NotImplementedError() + + @property + def list_versions( + self, + ) -> Callable[ + [apihub_service.ListVersionsRequest], + Union[ + apihub_service.ListVersionsResponse, + Awaitable[apihub_service.ListVersionsResponse], + ], + ]: + raise NotImplementedError() + + @property + def update_version( + self, + ) -> Callable[ + [apihub_service.UpdateVersionRequest], + Union[common_fields.Version, Awaitable[common_fields.Version]], + ]: + raise NotImplementedError() + + @property + def delete_version( + self, + ) -> Callable[ + [apihub_service.DeleteVersionRequest], + Union[empty_pb2.Empty, Awaitable[empty_pb2.Empty]], + ]: + raise NotImplementedError() + + @property + def create_spec( + self, + ) -> Callable[ + [apihub_service.CreateSpecRequest], + Union[common_fields.Spec, Awaitable[common_fields.Spec]], + ]: + raise NotImplementedError() + + @property + def get_spec( + self, + ) -> Callable[ + [apihub_service.GetSpecRequest], + Union[common_fields.Spec, Awaitable[common_fields.Spec]], + ]: + raise NotImplementedError() + + @property + def get_spec_contents( + self, + ) -> Callable[ + [apihub_service.GetSpecContentsRequest], + Union[common_fields.SpecContents, Awaitable[common_fields.SpecContents]], + ]: + raise NotImplementedError() + + @property + def list_specs( + self, + ) -> Callable[ + [apihub_service.ListSpecsRequest], + Union[ + apihub_service.ListSpecsResponse, + Awaitable[apihub_service.ListSpecsResponse], + ], + ]: + raise NotImplementedError() + + @property + def update_spec( + self, + ) -> Callable[ + [apihub_service.UpdateSpecRequest], + Union[common_fields.Spec, Awaitable[common_fields.Spec]], + ]: + raise NotImplementedError() + + @property + def delete_spec( + self, + ) -> Callable[ + [apihub_service.DeleteSpecRequest], + Union[empty_pb2.Empty, Awaitable[empty_pb2.Empty]], + ]: + raise NotImplementedError() + + @property + def get_api_operation( + self, + ) -> Callable[ + [apihub_service.GetApiOperationRequest], + Union[common_fields.ApiOperation, Awaitable[common_fields.ApiOperation]], + ]: + raise NotImplementedError() + + @property + def list_api_operations( + self, + ) -> Callable[ + [apihub_service.ListApiOperationsRequest], + Union[ + apihub_service.ListApiOperationsResponse, + Awaitable[apihub_service.ListApiOperationsResponse], + ], + ]: + raise NotImplementedError() + + @property + def get_definition( + self, + ) -> Callable[ + [apihub_service.GetDefinitionRequest], + Union[common_fields.Definition, Awaitable[common_fields.Definition]], + ]: + raise NotImplementedError() + + @property + def create_deployment( + self, + ) -> Callable[ + [apihub_service.CreateDeploymentRequest], + Union[common_fields.Deployment, Awaitable[common_fields.Deployment]], + ]: + raise NotImplementedError() + + @property + def get_deployment( + self, + ) -> Callable[ + [apihub_service.GetDeploymentRequest], + Union[common_fields.Deployment, Awaitable[common_fields.Deployment]], + ]: + raise NotImplementedError() + + @property + def list_deployments( + self, + ) -> Callable[ + [apihub_service.ListDeploymentsRequest], + Union[ + apihub_service.ListDeploymentsResponse, + Awaitable[apihub_service.ListDeploymentsResponse], + ], + ]: + raise NotImplementedError() + + @property + def update_deployment( + self, + ) -> Callable[ + [apihub_service.UpdateDeploymentRequest], + Union[common_fields.Deployment, Awaitable[common_fields.Deployment]], + ]: + raise NotImplementedError() + + @property + def delete_deployment( + self, + ) -> Callable[ + [apihub_service.DeleteDeploymentRequest], + Union[empty_pb2.Empty, Awaitable[empty_pb2.Empty]], + ]: + raise NotImplementedError() + + @property + def create_attribute( + self, + ) -> Callable[ + [apihub_service.CreateAttributeRequest], + Union[common_fields.Attribute, Awaitable[common_fields.Attribute]], + ]: + raise NotImplementedError() + + @property + def get_attribute( + self, + ) -> Callable[ + [apihub_service.GetAttributeRequest], + Union[common_fields.Attribute, Awaitable[common_fields.Attribute]], + ]: + raise NotImplementedError() + + @property + def update_attribute( + self, + ) -> Callable[ + [apihub_service.UpdateAttributeRequest], + Union[common_fields.Attribute, Awaitable[common_fields.Attribute]], + ]: + raise NotImplementedError() + + @property + def delete_attribute( + self, + ) -> Callable[ + [apihub_service.DeleteAttributeRequest], + Union[empty_pb2.Empty, Awaitable[empty_pb2.Empty]], + ]: + raise NotImplementedError() + + @property + def list_attributes( + self, + ) -> Callable[ + [apihub_service.ListAttributesRequest], + Union[ + apihub_service.ListAttributesResponse, + Awaitable[apihub_service.ListAttributesResponse], + ], + ]: + raise NotImplementedError() + + @property + def search_resources( + self, + ) -> Callable[ + [apihub_service.SearchResourcesRequest], + Union[ + apihub_service.SearchResourcesResponse, + Awaitable[apihub_service.SearchResourcesResponse], + ], + ]: + raise NotImplementedError() + + @property + def create_external_api( + self, + ) -> Callable[ + [apihub_service.CreateExternalApiRequest], + Union[common_fields.ExternalApi, Awaitable[common_fields.ExternalApi]], + ]: + raise NotImplementedError() + + @property + def get_external_api( + self, + ) -> Callable[ + [apihub_service.GetExternalApiRequest], + Union[common_fields.ExternalApi, Awaitable[common_fields.ExternalApi]], + ]: + raise NotImplementedError() + + @property + def update_external_api( + self, + ) -> Callable[ + [apihub_service.UpdateExternalApiRequest], + Union[common_fields.ExternalApi, Awaitable[common_fields.ExternalApi]], + ]: + raise NotImplementedError() + + @property + def delete_external_api( + self, + ) -> Callable[ + [apihub_service.DeleteExternalApiRequest], + Union[empty_pb2.Empty, Awaitable[empty_pb2.Empty]], + ]: + raise NotImplementedError() + + @property + def list_external_apis( + self, + ) -> Callable[ + [apihub_service.ListExternalApisRequest], + Union[ + apihub_service.ListExternalApisResponse, + Awaitable[apihub_service.ListExternalApisResponse], + ], + ]: + raise NotImplementedError() + + @property + def list_operations( + self, + ) -> Callable[ + [operations_pb2.ListOperationsRequest], + Union[ + operations_pb2.ListOperationsResponse, + Awaitable[operations_pb2.ListOperationsResponse], + ], + ]: + raise NotImplementedError() + + @property + def get_operation( + self, + ) -> Callable[ + [operations_pb2.GetOperationRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None,]: + raise NotImplementedError() + + @property + def delete_operation( + self, + ) -> Callable[[operations_pb2.DeleteOperationRequest], None,]: + raise NotImplementedError() + + @property + def get_location( + self, + ) -> Callable[ + [locations_pb2.GetLocationRequest], + Union[locations_pb2.Location, Awaitable[locations_pb2.Location]], + ]: + raise NotImplementedError() + + @property + def list_locations( + self, + ) -> Callable[ + [locations_pb2.ListLocationsRequest], + Union[ + locations_pb2.ListLocationsResponse, + Awaitable[locations_pb2.ListLocationsResponse], + ], + ]: + raise NotImplementedError() + + @property + def kind(self) -> str: + raise NotImplementedError() + + +__all__ = ("ApiHubTransport",) diff --git a/packages/google-cloud-apihub/google/cloud/apihub_v1/services/api_hub/transports/grpc.py b/packages/google-cloud-apihub/google/cloud/apihub_v1/services/api_hub/transports/grpc.py new file mode 100644 index 000000000000..81496577ce93 --- /dev/null +++ b/packages/google-cloud-apihub/google/cloud/apihub_v1/services/api_hub/transports/grpc.py @@ -0,0 +1,1436 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import gapic_v1, grpc_helpers +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore +import grpc # type: ignore + +from google.cloud.apihub_v1.types import apihub_service, common_fields + +from .base import DEFAULT_CLIENT_INFO, ApiHubTransport + + +class ApiHubGrpcTransport(ApiHubTransport): + """gRPC backend transport for ApiHub. + + This service provides all methods related to the API hub. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _stubs: Dict[str, Callable] + + def __init__( + self, + *, + host: str = "apihub.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'apihub.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if a ``channel`` instance is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if a ``channel`` instance is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if a ``channel`` instance is provided. + channel (Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]]): + A ``Channel`` instance through which to make calls, or a Callable + that constructs and returns one. If set to None, ``self.create_channel`` + is used to create the channel. If a Callable is given, it will be called + with the same arguments as used in ``self.create_channel``. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if a ``channel`` instance is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if a ``channel`` instance or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if isinstance(channel, grpc.Channel): + # Ignore credentials if a channel was passed. + credentials = None + self._ignore_credentials = True + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + # initialize with the provided callable or the default channel + channel_init = channel or type(self).create_channel + self._grpc_channel = channel_init( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @classmethod + def create_channel( + cls, + host: str = "apihub.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> grpc.Channel: + """Create and return a gRPC channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + grpc.Channel: A gRPC channel object. + + Raises: + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + + return grpc_helpers.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs, + ) + + @property + def grpc_channel(self) -> grpc.Channel: + """Return the channel designed to connect to this service.""" + return self._grpc_channel + + @property + def create_api( + self, + ) -> Callable[[apihub_service.CreateApiRequest], common_fields.Api]: + r"""Return a callable for the create api method over gRPC. + + Create an API resource in the API hub. + Once an API resource is created, versions can be added + to it. + + Returns: + Callable[[~.CreateApiRequest], + ~.Api]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_api" not in self._stubs: + self._stubs["create_api"] = self.grpc_channel.unary_unary( + "/google.cloud.apihub.v1.ApiHub/CreateApi", + request_serializer=apihub_service.CreateApiRequest.serialize, + response_deserializer=common_fields.Api.deserialize, + ) + return self._stubs["create_api"] + + @property + def get_api(self) -> Callable[[apihub_service.GetApiRequest], common_fields.Api]: + r"""Return a callable for the get api method over gRPC. + + Get API resource details including the API versions + contained in it. + + Returns: + Callable[[~.GetApiRequest], + ~.Api]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_api" not in self._stubs: + self._stubs["get_api"] = self.grpc_channel.unary_unary( + "/google.cloud.apihub.v1.ApiHub/GetApi", + request_serializer=apihub_service.GetApiRequest.serialize, + response_deserializer=common_fields.Api.deserialize, + ) + return self._stubs["get_api"] + + @property + def list_apis( + self, + ) -> Callable[[apihub_service.ListApisRequest], apihub_service.ListApisResponse]: + r"""Return a callable for the list apis method over gRPC. + + List API resources in the API hub. + + Returns: + Callable[[~.ListApisRequest], + ~.ListApisResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_apis" not in self._stubs: + self._stubs["list_apis"] = self.grpc_channel.unary_unary( + "/google.cloud.apihub.v1.ApiHub/ListApis", + request_serializer=apihub_service.ListApisRequest.serialize, + response_deserializer=apihub_service.ListApisResponse.deserialize, + ) + return self._stubs["list_apis"] + + @property + def update_api( + self, + ) -> Callable[[apihub_service.UpdateApiRequest], common_fields.Api]: + r"""Return a callable for the update api method over gRPC. + + Update an API resource in the API hub. The following fields in + the [API][] can be updated: + + - [display_name][google.cloud.apihub.v1.Api.display_name] + - [description][google.cloud.apihub.v1.Api.description] + - [owner][google.cloud.apihub.v1.Api.owner] + - [documentation][google.cloud.apihub.v1.Api.documentation] + - [target_user][google.cloud.apihub.v1.Api.target_user] + - [team][google.cloud.apihub.v1.Api.team] + - [business_unit][google.cloud.apihub.v1.Api.business_unit] + - [maturity_level][google.cloud.apihub.v1.Api.maturity_level] + - [attributes][google.cloud.apihub.v1.Api.attributes] + + The + [update_mask][google.cloud.apihub.v1.UpdateApiRequest.update_mask] + should be used to specify the fields being updated. + + Updating the owner field requires complete owner message and + updates both owner and email fields. + + Returns: + Callable[[~.UpdateApiRequest], + ~.Api]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_api" not in self._stubs: + self._stubs["update_api"] = self.grpc_channel.unary_unary( + "/google.cloud.apihub.v1.ApiHub/UpdateApi", + request_serializer=apihub_service.UpdateApiRequest.serialize, + response_deserializer=common_fields.Api.deserialize, + ) + return self._stubs["update_api"] + + @property + def delete_api( + self, + ) -> Callable[[apihub_service.DeleteApiRequest], empty_pb2.Empty]: + r"""Return a callable for the delete api method over gRPC. + + Delete an API resource in the API hub. API can only + be deleted if all underlying versions are deleted. + + Returns: + Callable[[~.DeleteApiRequest], + ~.Empty]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_api" not in self._stubs: + self._stubs["delete_api"] = self.grpc_channel.unary_unary( + "/google.cloud.apihub.v1.ApiHub/DeleteApi", + request_serializer=apihub_service.DeleteApiRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs["delete_api"] + + @property + def create_version( + self, + ) -> Callable[[apihub_service.CreateVersionRequest], common_fields.Version]: + r"""Return a callable for the create version method over gRPC. + + Create an API version for an API resource in the API + hub. + + Returns: + Callable[[~.CreateVersionRequest], + ~.Version]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_version" not in self._stubs: + self._stubs["create_version"] = self.grpc_channel.unary_unary( + "/google.cloud.apihub.v1.ApiHub/CreateVersion", + request_serializer=apihub_service.CreateVersionRequest.serialize, + response_deserializer=common_fields.Version.deserialize, + ) + return self._stubs["create_version"] + + @property + def get_version( + self, + ) -> Callable[[apihub_service.GetVersionRequest], common_fields.Version]: + r"""Return a callable for the get version method over gRPC. + + Get details about the API version of an API resource. + This will include information about the specs and + operations present in the API version as well as the + deployments linked to it. + + Returns: + Callable[[~.GetVersionRequest], + ~.Version]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_version" not in self._stubs: + self._stubs["get_version"] = self.grpc_channel.unary_unary( + "/google.cloud.apihub.v1.ApiHub/GetVersion", + request_serializer=apihub_service.GetVersionRequest.serialize, + response_deserializer=common_fields.Version.deserialize, + ) + return self._stubs["get_version"] + + @property + def list_versions( + self, + ) -> Callable[ + [apihub_service.ListVersionsRequest], apihub_service.ListVersionsResponse + ]: + r"""Return a callable for the list versions method over gRPC. + + List API versions of an API resource in the API hub. + + Returns: + Callable[[~.ListVersionsRequest], + ~.ListVersionsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_versions" not in self._stubs: + self._stubs["list_versions"] = self.grpc_channel.unary_unary( + "/google.cloud.apihub.v1.ApiHub/ListVersions", + request_serializer=apihub_service.ListVersionsRequest.serialize, + response_deserializer=apihub_service.ListVersionsResponse.deserialize, + ) + return self._stubs["list_versions"] + + @property + def update_version( + self, + ) -> Callable[[apihub_service.UpdateVersionRequest], common_fields.Version]: + r"""Return a callable for the update version method over gRPC. + + Update API version. The following fields in the + [version][google.cloud.apihub.v1.Version] can be updated + currently: + + - [display_name][google.cloud.apihub.v1.Version.display_name] + - [description][google.cloud.apihub.v1.Version.description] + - [documentation][google.cloud.apihub.v1.Version.documentation] + - [deployments][google.cloud.apihub.v1.Version.deployments] + - [lifecycle][google.cloud.apihub.v1.Version.lifecycle] + - [compliance][google.cloud.apihub.v1.Version.compliance] + - [accreditation][google.cloud.apihub.v1.Version.accreditation] + - [attributes][google.cloud.apihub.v1.Version.attributes] + + The + [update_mask][google.cloud.apihub.v1.UpdateVersionRequest.update_mask] + should be used to specify the fields being updated. + + Returns: + Callable[[~.UpdateVersionRequest], + ~.Version]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_version" not in self._stubs: + self._stubs["update_version"] = self.grpc_channel.unary_unary( + "/google.cloud.apihub.v1.ApiHub/UpdateVersion", + request_serializer=apihub_service.UpdateVersionRequest.serialize, + response_deserializer=common_fields.Version.deserialize, + ) + return self._stubs["update_version"] + + @property + def delete_version( + self, + ) -> Callable[[apihub_service.DeleteVersionRequest], empty_pb2.Empty]: + r"""Return a callable for the delete version method over gRPC. + + Delete an API version. Version can only be deleted if + all underlying specs, operations, definitions and linked + deployments are deleted. + + Returns: + Callable[[~.DeleteVersionRequest], + ~.Empty]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_version" not in self._stubs: + self._stubs["delete_version"] = self.grpc_channel.unary_unary( + "/google.cloud.apihub.v1.ApiHub/DeleteVersion", + request_serializer=apihub_service.DeleteVersionRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs["delete_version"] + + @property + def create_spec( + self, + ) -> Callable[[apihub_service.CreateSpecRequest], common_fields.Spec]: + r"""Return a callable for the create spec method over gRPC. + + Add a spec to an API version in the API hub. Multiple specs can + be added to an API version. Note, while adding a spec, at least + one of ``contents`` or ``source_uri`` must be provided. If + ``contents`` is provided, then ``spec_type`` must also be + provided. + + On adding a spec with contents to the version, the operations + present in it will be added to the version.Note that the file + contents in the spec should be of the same type as defined in + the + ``projects/{project}/locations/{location}/attributes/system-spec-type`` + attribute associated with spec resource. Note that specs of + various types can be uploaded, however parsing of details is + supported for OpenAPI spec currently. + + In order to access the information parsed from the spec, use the + [GetSpec][google.cloud.apihub.v1.ApiHub.GetSpec] method. In + order to access the raw contents for a particular spec, use the + [GetSpecContents][google.cloud.apihub.v1.ApiHub.GetSpecContents] + method. In order to access the operations parsed from the spec, + use the + [ListAPIOperations][google.cloud.apihub.v1.ApiHub.ListApiOperations] + method. + + Returns: + Callable[[~.CreateSpecRequest], + ~.Spec]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_spec" not in self._stubs: + self._stubs["create_spec"] = self.grpc_channel.unary_unary( + "/google.cloud.apihub.v1.ApiHub/CreateSpec", + request_serializer=apihub_service.CreateSpecRequest.serialize, + response_deserializer=common_fields.Spec.deserialize, + ) + return self._stubs["create_spec"] + + @property + def get_spec(self) -> Callable[[apihub_service.GetSpecRequest], common_fields.Spec]: + r"""Return a callable for the get spec method over gRPC. + + Get details about the information parsed from a spec. Note that + this method does not return the raw spec contents. Use + [GetSpecContents][google.cloud.apihub.v1.ApiHub.GetSpecContents] + method to retrieve the same. + + Returns: + Callable[[~.GetSpecRequest], + ~.Spec]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_spec" not in self._stubs: + self._stubs["get_spec"] = self.grpc_channel.unary_unary( + "/google.cloud.apihub.v1.ApiHub/GetSpec", + request_serializer=apihub_service.GetSpecRequest.serialize, + response_deserializer=common_fields.Spec.deserialize, + ) + return self._stubs["get_spec"] + + @property + def get_spec_contents( + self, + ) -> Callable[[apihub_service.GetSpecContentsRequest], common_fields.SpecContents]: + r"""Return a callable for the get spec contents method over gRPC. + + Get spec contents. + + Returns: + Callable[[~.GetSpecContentsRequest], + ~.SpecContents]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_spec_contents" not in self._stubs: + self._stubs["get_spec_contents"] = self.grpc_channel.unary_unary( + "/google.cloud.apihub.v1.ApiHub/GetSpecContents", + request_serializer=apihub_service.GetSpecContentsRequest.serialize, + response_deserializer=common_fields.SpecContents.deserialize, + ) + return self._stubs["get_spec_contents"] + + @property + def list_specs( + self, + ) -> Callable[[apihub_service.ListSpecsRequest], apihub_service.ListSpecsResponse]: + r"""Return a callable for the list specs method over gRPC. + + List specs corresponding to a particular API + resource. + + Returns: + Callable[[~.ListSpecsRequest], + ~.ListSpecsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_specs" not in self._stubs: + self._stubs["list_specs"] = self.grpc_channel.unary_unary( + "/google.cloud.apihub.v1.ApiHub/ListSpecs", + request_serializer=apihub_service.ListSpecsRequest.serialize, + response_deserializer=apihub_service.ListSpecsResponse.deserialize, + ) + return self._stubs["list_specs"] + + @property + def update_spec( + self, + ) -> Callable[[apihub_service.UpdateSpecRequest], common_fields.Spec]: + r"""Return a callable for the update spec method over gRPC. + + Update spec. The following fields in the + [spec][google.cloud.apihub.v1.Spec] can be updated: + + - [display_name][google.cloud.apihub.v1.Spec.display_name] + - [source_uri][google.cloud.apihub.v1.Spec.source_uri] + - [lint_response][google.cloud.apihub.v1.Spec.lint_response] + - [attributes][google.cloud.apihub.v1.Spec.attributes] + - [contents][google.cloud.apihub.v1.Spec.contents] + - [spec_type][google.cloud.apihub.v1.Spec.spec_type] + + In case of an OAS spec, updating spec contents can lead to: + + 1. Creation, deletion and update of operations. + 2. Creation, deletion and update of definitions. + 3. Update of other info parsed out from the new spec. + + In case of contents or source_uri being present in update mask, + spec_type must also be present. Also, spec_type can not be + present in update mask if contents or source_uri is not present. + + The + [update_mask][google.cloud.apihub.v1.UpdateSpecRequest.update_mask] + should be used to specify the fields being updated. + + Returns: + Callable[[~.UpdateSpecRequest], + ~.Spec]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_spec" not in self._stubs: + self._stubs["update_spec"] = self.grpc_channel.unary_unary( + "/google.cloud.apihub.v1.ApiHub/UpdateSpec", + request_serializer=apihub_service.UpdateSpecRequest.serialize, + response_deserializer=common_fields.Spec.deserialize, + ) + return self._stubs["update_spec"] + + @property + def delete_spec( + self, + ) -> Callable[[apihub_service.DeleteSpecRequest], empty_pb2.Empty]: + r"""Return a callable for the delete spec method over gRPC. + + Delete a spec. + Deleting a spec will also delete the associated + operations from the version. + + Returns: + Callable[[~.DeleteSpecRequest], + ~.Empty]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_spec" not in self._stubs: + self._stubs["delete_spec"] = self.grpc_channel.unary_unary( + "/google.cloud.apihub.v1.ApiHub/DeleteSpec", + request_serializer=apihub_service.DeleteSpecRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs["delete_spec"] + + @property + def get_api_operation( + self, + ) -> Callable[[apihub_service.GetApiOperationRequest], common_fields.ApiOperation]: + r"""Return a callable for the get api operation method over gRPC. + + Get details about a particular operation in API + version. + + Returns: + Callable[[~.GetApiOperationRequest], + ~.ApiOperation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_api_operation" not in self._stubs: + self._stubs["get_api_operation"] = self.grpc_channel.unary_unary( + "/google.cloud.apihub.v1.ApiHub/GetApiOperation", + request_serializer=apihub_service.GetApiOperationRequest.serialize, + response_deserializer=common_fields.ApiOperation.deserialize, + ) + return self._stubs["get_api_operation"] + + @property + def list_api_operations( + self, + ) -> Callable[ + [apihub_service.ListApiOperationsRequest], + apihub_service.ListApiOperationsResponse, + ]: + r"""Return a callable for the list api operations method over gRPC. + + List operations in an API version. + + Returns: + Callable[[~.ListApiOperationsRequest], + ~.ListApiOperationsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_api_operations" not in self._stubs: + self._stubs["list_api_operations"] = self.grpc_channel.unary_unary( + "/google.cloud.apihub.v1.ApiHub/ListApiOperations", + request_serializer=apihub_service.ListApiOperationsRequest.serialize, + response_deserializer=apihub_service.ListApiOperationsResponse.deserialize, + ) + return self._stubs["list_api_operations"] + + @property + def get_definition( + self, + ) -> Callable[[apihub_service.GetDefinitionRequest], common_fields.Definition]: + r"""Return a callable for the get definition method over gRPC. + + Get details about a definition in an API version. + + Returns: + Callable[[~.GetDefinitionRequest], + ~.Definition]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_definition" not in self._stubs: + self._stubs["get_definition"] = self.grpc_channel.unary_unary( + "/google.cloud.apihub.v1.ApiHub/GetDefinition", + request_serializer=apihub_service.GetDefinitionRequest.serialize, + response_deserializer=common_fields.Definition.deserialize, + ) + return self._stubs["get_definition"] + + @property + def create_deployment( + self, + ) -> Callable[[apihub_service.CreateDeploymentRequest], common_fields.Deployment]: + r"""Return a callable for the create deployment method over gRPC. + + Create a deployment resource in the API hub. + Once a deployment resource is created, it can be + associated with API versions. + + Returns: + Callable[[~.CreateDeploymentRequest], + ~.Deployment]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_deployment" not in self._stubs: + self._stubs["create_deployment"] = self.grpc_channel.unary_unary( + "/google.cloud.apihub.v1.ApiHub/CreateDeployment", + request_serializer=apihub_service.CreateDeploymentRequest.serialize, + response_deserializer=common_fields.Deployment.deserialize, + ) + return self._stubs["create_deployment"] + + @property + def get_deployment( + self, + ) -> Callable[[apihub_service.GetDeploymentRequest], common_fields.Deployment]: + r"""Return a callable for the get deployment method over gRPC. + + Get details about a deployment and the API versions + linked to it. + + Returns: + Callable[[~.GetDeploymentRequest], + ~.Deployment]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_deployment" not in self._stubs: + self._stubs["get_deployment"] = self.grpc_channel.unary_unary( + "/google.cloud.apihub.v1.ApiHub/GetDeployment", + request_serializer=apihub_service.GetDeploymentRequest.serialize, + response_deserializer=common_fields.Deployment.deserialize, + ) + return self._stubs["get_deployment"] + + @property + def list_deployments( + self, + ) -> Callable[ + [apihub_service.ListDeploymentsRequest], apihub_service.ListDeploymentsResponse + ]: + r"""Return a callable for the list deployments method over gRPC. + + List deployment resources in the API hub. + + Returns: + Callable[[~.ListDeploymentsRequest], + ~.ListDeploymentsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_deployments" not in self._stubs: + self._stubs["list_deployments"] = self.grpc_channel.unary_unary( + "/google.cloud.apihub.v1.ApiHub/ListDeployments", + request_serializer=apihub_service.ListDeploymentsRequest.serialize, + response_deserializer=apihub_service.ListDeploymentsResponse.deserialize, + ) + return self._stubs["list_deployments"] + + @property + def update_deployment( + self, + ) -> Callable[[apihub_service.UpdateDeploymentRequest], common_fields.Deployment]: + r"""Return a callable for the update deployment method over gRPC. + + Update a deployment resource in the API hub. The following + fields in the [deployment + resource][google.cloud.apihub.v1.Deployment] can be updated: + + - [display_name][google.cloud.apihub.v1.Deployment.display_name] + - [description][google.cloud.apihub.v1.Deployment.description] + - [documentation][google.cloud.apihub.v1.Deployment.documentation] + - [deployment_type][google.cloud.apihub.v1.Deployment.deployment_type] + - [resource_uri][google.cloud.apihub.v1.Deployment.resource_uri] + - [endpoints][google.cloud.apihub.v1.Deployment.endpoints] + - [slo][google.cloud.apihub.v1.Deployment.slo] + - [environment][google.cloud.apihub.v1.Deployment.environment] + - [attributes][google.cloud.apihub.v1.Deployment.attributes] + + The + [update_mask][google.cloud.apihub.v1.UpdateDeploymentRequest.update_mask] + should be used to specify the fields being updated. + + Returns: + Callable[[~.UpdateDeploymentRequest], + ~.Deployment]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_deployment" not in self._stubs: + self._stubs["update_deployment"] = self.grpc_channel.unary_unary( + "/google.cloud.apihub.v1.ApiHub/UpdateDeployment", + request_serializer=apihub_service.UpdateDeploymentRequest.serialize, + response_deserializer=common_fields.Deployment.deserialize, + ) + return self._stubs["update_deployment"] + + @property + def delete_deployment( + self, + ) -> Callable[[apihub_service.DeleteDeploymentRequest], empty_pb2.Empty]: + r"""Return a callable for the delete deployment method over gRPC. + + Delete a deployment resource in the API hub. + + Returns: + Callable[[~.DeleteDeploymentRequest], + ~.Empty]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_deployment" not in self._stubs: + self._stubs["delete_deployment"] = self.grpc_channel.unary_unary( + "/google.cloud.apihub.v1.ApiHub/DeleteDeployment", + request_serializer=apihub_service.DeleteDeploymentRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs["delete_deployment"] + + @property + def create_attribute( + self, + ) -> Callable[[apihub_service.CreateAttributeRequest], common_fields.Attribute]: + r"""Return a callable for the create attribute method over gRPC. + + Create a user defined attribute. + + Certain pre defined attributes are already created by the API + hub. These attributes will have type as ``SYSTEM_DEFINED`` and + can be listed via + [ListAttributes][google.cloud.apihub.v1.ApiHub.ListAttributes] + method. Allowed values for the same can be updated via + [UpdateAttribute][google.cloud.apihub.v1.ApiHub.UpdateAttribute] + method. + + Returns: + Callable[[~.CreateAttributeRequest], + ~.Attribute]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_attribute" not in self._stubs: + self._stubs["create_attribute"] = self.grpc_channel.unary_unary( + "/google.cloud.apihub.v1.ApiHub/CreateAttribute", + request_serializer=apihub_service.CreateAttributeRequest.serialize, + response_deserializer=common_fields.Attribute.deserialize, + ) + return self._stubs["create_attribute"] + + @property + def get_attribute( + self, + ) -> Callable[[apihub_service.GetAttributeRequest], common_fields.Attribute]: + r"""Return a callable for the get attribute method over gRPC. + + Get details about the attribute. + + Returns: + Callable[[~.GetAttributeRequest], + ~.Attribute]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_attribute" not in self._stubs: + self._stubs["get_attribute"] = self.grpc_channel.unary_unary( + "/google.cloud.apihub.v1.ApiHub/GetAttribute", + request_serializer=apihub_service.GetAttributeRequest.serialize, + response_deserializer=common_fields.Attribute.deserialize, + ) + return self._stubs["get_attribute"] + + @property + def update_attribute( + self, + ) -> Callable[[apihub_service.UpdateAttributeRequest], common_fields.Attribute]: + r"""Return a callable for the update attribute method over gRPC. + + Update the attribute. The following fields in the [Attribute + resource][google.cloud.apihub.v1.Attribute] can be updated: + + - [display_name][google.cloud.apihub.v1.Attribute.display_name] + The display name can be updated for user defined attributes + only. + - [description][google.cloud.apihub.v1.Attribute.description] + The description can be updated for user defined attributes + only. + - [allowed_values][google.cloud.apihub.v1.Attribute.allowed_values] + To update the list of allowed values, clients need to use the + fetched list of allowed values and add or remove values to or + from the same list. The mutable allowed values can be updated + for both user defined and System defined attributes. The + immutable allowed values cannot be updated or deleted. The + updated list of allowed values cannot be empty. If an allowed + value that is already used by some resource's attribute is + deleted, then the association between the resource and the + attribute value will also be deleted. + - [cardinality][google.cloud.apihub.v1.Attribute.cardinality] + The cardinality can be updated for user defined attributes + only. Cardinality can only be increased during an update. + + The + [update_mask][google.cloud.apihub.v1.UpdateAttributeRequest.update_mask] + should be used to specify the fields being updated. + + Returns: + Callable[[~.UpdateAttributeRequest], + ~.Attribute]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_attribute" not in self._stubs: + self._stubs["update_attribute"] = self.grpc_channel.unary_unary( + "/google.cloud.apihub.v1.ApiHub/UpdateAttribute", + request_serializer=apihub_service.UpdateAttributeRequest.serialize, + response_deserializer=common_fields.Attribute.deserialize, + ) + return self._stubs["update_attribute"] + + @property + def delete_attribute( + self, + ) -> Callable[[apihub_service.DeleteAttributeRequest], empty_pb2.Empty]: + r"""Return a callable for the delete attribute method over gRPC. + + Delete an attribute. + + Note: System defined attributes cannot be deleted. All + associations of the attribute being deleted with any API + hub resource will also get deleted. + + Returns: + Callable[[~.DeleteAttributeRequest], + ~.Empty]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_attribute" not in self._stubs: + self._stubs["delete_attribute"] = self.grpc_channel.unary_unary( + "/google.cloud.apihub.v1.ApiHub/DeleteAttribute", + request_serializer=apihub_service.DeleteAttributeRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs["delete_attribute"] + + @property + def list_attributes( + self, + ) -> Callable[ + [apihub_service.ListAttributesRequest], apihub_service.ListAttributesResponse + ]: + r"""Return a callable for the list attributes method over gRPC. + + List all attributes. + + Returns: + Callable[[~.ListAttributesRequest], + ~.ListAttributesResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_attributes" not in self._stubs: + self._stubs["list_attributes"] = self.grpc_channel.unary_unary( + "/google.cloud.apihub.v1.ApiHub/ListAttributes", + request_serializer=apihub_service.ListAttributesRequest.serialize, + response_deserializer=apihub_service.ListAttributesResponse.deserialize, + ) + return self._stubs["list_attributes"] + + @property + def search_resources( + self, + ) -> Callable[ + [apihub_service.SearchResourcesRequest], apihub_service.SearchResourcesResponse + ]: + r"""Return a callable for the search resources method over gRPC. + + Search across API-Hub resources. + + Returns: + Callable[[~.SearchResourcesRequest], + ~.SearchResourcesResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "search_resources" not in self._stubs: + self._stubs["search_resources"] = self.grpc_channel.unary_unary( + "/google.cloud.apihub.v1.ApiHub/SearchResources", + request_serializer=apihub_service.SearchResourcesRequest.serialize, + response_deserializer=apihub_service.SearchResourcesResponse.deserialize, + ) + return self._stubs["search_resources"] + + @property + def create_external_api( + self, + ) -> Callable[[apihub_service.CreateExternalApiRequest], common_fields.ExternalApi]: + r"""Return a callable for the create external api method over gRPC. + + Create an External API resource in the API hub. + + Returns: + Callable[[~.CreateExternalApiRequest], + ~.ExternalApi]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_external_api" not in self._stubs: + self._stubs["create_external_api"] = self.grpc_channel.unary_unary( + "/google.cloud.apihub.v1.ApiHub/CreateExternalApi", + request_serializer=apihub_service.CreateExternalApiRequest.serialize, + response_deserializer=common_fields.ExternalApi.deserialize, + ) + return self._stubs["create_external_api"] + + @property + def get_external_api( + self, + ) -> Callable[[apihub_service.GetExternalApiRequest], common_fields.ExternalApi]: + r"""Return a callable for the get external api method over gRPC. + + Get details about an External API resource in the API + hub. + + Returns: + Callable[[~.GetExternalApiRequest], + ~.ExternalApi]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_external_api" not in self._stubs: + self._stubs["get_external_api"] = self.grpc_channel.unary_unary( + "/google.cloud.apihub.v1.ApiHub/GetExternalApi", + request_serializer=apihub_service.GetExternalApiRequest.serialize, + response_deserializer=common_fields.ExternalApi.deserialize, + ) + return self._stubs["get_external_api"] + + @property + def update_external_api( + self, + ) -> Callable[[apihub_service.UpdateExternalApiRequest], common_fields.ExternalApi]: + r"""Return a callable for the update external api method over gRPC. + + Update an External API resource in the API hub. The following + fields can be updated: + + - [display_name][google.cloud.apihub.v1.ExternalApi.display_name] + - [description][google.cloud.apihub.v1.ExternalApi.description] + - [documentation][google.cloud.apihub.v1.ExternalApi.documentation] + - [endpoints][google.cloud.apihub.v1.ExternalApi.endpoints] + - [paths][google.cloud.apihub.v1.ExternalApi.paths] + + The + [update_mask][google.cloud.apihub.v1.UpdateExternalApiRequest.update_mask] + should be used to specify the fields being updated. + + Returns: + Callable[[~.UpdateExternalApiRequest], + ~.ExternalApi]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_external_api" not in self._stubs: + self._stubs["update_external_api"] = self.grpc_channel.unary_unary( + "/google.cloud.apihub.v1.ApiHub/UpdateExternalApi", + request_serializer=apihub_service.UpdateExternalApiRequest.serialize, + response_deserializer=common_fields.ExternalApi.deserialize, + ) + return self._stubs["update_external_api"] + + @property + def delete_external_api( + self, + ) -> Callable[[apihub_service.DeleteExternalApiRequest], empty_pb2.Empty]: + r"""Return a callable for the delete external api method over gRPC. + + Delete an External API resource in the API hub. + + Returns: + Callable[[~.DeleteExternalApiRequest], + ~.Empty]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_external_api" not in self._stubs: + self._stubs["delete_external_api"] = self.grpc_channel.unary_unary( + "/google.cloud.apihub.v1.ApiHub/DeleteExternalApi", + request_serializer=apihub_service.DeleteExternalApiRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs["delete_external_api"] + + @property + def list_external_apis( + self, + ) -> Callable[ + [apihub_service.ListExternalApisRequest], + apihub_service.ListExternalApisResponse, + ]: + r"""Return a callable for the list external apis method over gRPC. + + List External API resources in the API hub. + + Returns: + Callable[[~.ListExternalApisRequest], + ~.ListExternalApisResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_external_apis" not in self._stubs: + self._stubs["list_external_apis"] = self.grpc_channel.unary_unary( + "/google.cloud.apihub.v1.ApiHub/ListExternalApis", + request_serializer=apihub_service.ListExternalApisRequest.serialize, + response_deserializer=apihub_service.ListExternalApisResponse.deserialize, + ) + return self._stubs["list_external_apis"] + + def close(self): + self.grpc_channel.close() + + @property + def delete_operation( + self, + ) -> Callable[[operations_pb2.DeleteOperationRequest], None]: + r"""Return a callable for the delete_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_operation" not in self._stubs: + self._stubs["delete_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/DeleteOperation", + request_serializer=operations_pb2.DeleteOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["delete_operation"] + + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None]: + r"""Return a callable for the cancel_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "cancel_operation" not in self._stubs: + self._stubs["cancel_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/CancelOperation", + request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["cancel_operation"] + + @property + def get_operation( + self, + ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: + r"""Return a callable for the get_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_operation" not in self._stubs: + self._stubs["get_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/GetOperation", + request_serializer=operations_pb2.GetOperationRequest.SerializeToString, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["get_operation"] + + @property + def list_operations( + self, + ) -> Callable[ + [operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse + ]: + r"""Return a callable for the list_operations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_operations" not in self._stubs: + self._stubs["list_operations"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/ListOperations", + request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, + response_deserializer=operations_pb2.ListOperationsResponse.FromString, + ) + return self._stubs["list_operations"] + + @property + def list_locations( + self, + ) -> Callable[ + [locations_pb2.ListLocationsRequest], locations_pb2.ListLocationsResponse + ]: + r"""Return a callable for the list locations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_locations" not in self._stubs: + self._stubs["list_locations"] = self.grpc_channel.unary_unary( + "/google.cloud.location.Locations/ListLocations", + request_serializer=locations_pb2.ListLocationsRequest.SerializeToString, + response_deserializer=locations_pb2.ListLocationsResponse.FromString, + ) + return self._stubs["list_locations"] + + @property + def get_location( + self, + ) -> Callable[[locations_pb2.GetLocationRequest], locations_pb2.Location]: + r"""Return a callable for the list locations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_location" not in self._stubs: + self._stubs["get_location"] = self.grpc_channel.unary_unary( + "/google.cloud.location.Locations/GetLocation", + request_serializer=locations_pb2.GetLocationRequest.SerializeToString, + response_deserializer=locations_pb2.Location.FromString, + ) + return self._stubs["get_location"] + + @property + def kind(self) -> str: + return "grpc" + + +__all__ = ("ApiHubGrpcTransport",) diff --git a/packages/google-cloud-apihub/google/cloud/apihub_v1/services/api_hub/transports/grpc_asyncio.py b/packages/google-cloud-apihub/google/cloud/apihub_v1/services/api_hub/transports/grpc_asyncio.py new file mode 100644 index 000000000000..30051bed6768 --- /dev/null +++ b/packages/google-cloud-apihub/google/cloud/apihub_v1/services/api_hub/transports/grpc_asyncio.py @@ -0,0 +1,1811 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1, grpc_helpers_async +from google.api_core import retry_async as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore +import grpc # type: ignore +from grpc.experimental import aio # type: ignore + +from google.cloud.apihub_v1.types import apihub_service, common_fields + +from .base import DEFAULT_CLIENT_INFO, ApiHubTransport +from .grpc import ApiHubGrpcTransport + + +class ApiHubGrpcAsyncIOTransport(ApiHubTransport): + """gRPC AsyncIO backend transport for ApiHub. + + This service provides all methods related to the API hub. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _grpc_channel: aio.Channel + _stubs: Dict[str, Callable] = {} + + @classmethod + def create_channel( + cls, + host: str = "apihub.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> aio.Channel: + """Create and return a gRPC AsyncIO channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + aio.Channel: A gRPC AsyncIO channel object. + """ + + return grpc_helpers_async.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs, + ) + + def __init__( + self, + *, + host: str = "apihub.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[Union[aio.Channel, Callable[..., aio.Channel]]] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'apihub.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if a ``channel`` instance is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if a ``channel`` instance is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + channel (Optional[Union[aio.Channel, Callable[..., aio.Channel]]]): + A ``Channel`` instance through which to make calls, or a Callable + that constructs and returns one. If set to None, ``self.create_channel`` + is used to create the channel. If a Callable is given, it will be called + with the same arguments as used in ``self.create_channel``. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if a ``channel`` instance is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if a ``channel`` instance or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if isinstance(channel, aio.Channel): + # Ignore credentials if a channel was passed. + credentials = None + self._ignore_credentials = True + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + # initialize with the provided callable or the default channel + channel_init = channel or type(self).create_channel + self._grpc_channel = channel_init( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @property + def grpc_channel(self) -> aio.Channel: + """Create the channel designed to connect to this service. + + This property caches on the instance; repeated calls return + the same channel. + """ + # Return the channel from cache. + return self._grpc_channel + + @property + def create_api( + self, + ) -> Callable[[apihub_service.CreateApiRequest], Awaitable[common_fields.Api]]: + r"""Return a callable for the create api method over gRPC. + + Create an API resource in the API hub. + Once an API resource is created, versions can be added + to it. + + Returns: + Callable[[~.CreateApiRequest], + Awaitable[~.Api]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_api" not in self._stubs: + self._stubs["create_api"] = self.grpc_channel.unary_unary( + "/google.cloud.apihub.v1.ApiHub/CreateApi", + request_serializer=apihub_service.CreateApiRequest.serialize, + response_deserializer=common_fields.Api.deserialize, + ) + return self._stubs["create_api"] + + @property + def get_api( + self, + ) -> Callable[[apihub_service.GetApiRequest], Awaitable[common_fields.Api]]: + r"""Return a callable for the get api method over gRPC. + + Get API resource details including the API versions + contained in it. + + Returns: + Callable[[~.GetApiRequest], + Awaitable[~.Api]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_api" not in self._stubs: + self._stubs["get_api"] = self.grpc_channel.unary_unary( + "/google.cloud.apihub.v1.ApiHub/GetApi", + request_serializer=apihub_service.GetApiRequest.serialize, + response_deserializer=common_fields.Api.deserialize, + ) + return self._stubs["get_api"] + + @property + def list_apis( + self, + ) -> Callable[ + [apihub_service.ListApisRequest], Awaitable[apihub_service.ListApisResponse] + ]: + r"""Return a callable for the list apis method over gRPC. + + List API resources in the API hub. + + Returns: + Callable[[~.ListApisRequest], + Awaitable[~.ListApisResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_apis" not in self._stubs: + self._stubs["list_apis"] = self.grpc_channel.unary_unary( + "/google.cloud.apihub.v1.ApiHub/ListApis", + request_serializer=apihub_service.ListApisRequest.serialize, + response_deserializer=apihub_service.ListApisResponse.deserialize, + ) + return self._stubs["list_apis"] + + @property + def update_api( + self, + ) -> Callable[[apihub_service.UpdateApiRequest], Awaitable[common_fields.Api]]: + r"""Return a callable for the update api method over gRPC. + + Update an API resource in the API hub. The following fields in + the [API][] can be updated: + + - [display_name][google.cloud.apihub.v1.Api.display_name] + - [description][google.cloud.apihub.v1.Api.description] + - [owner][google.cloud.apihub.v1.Api.owner] + - [documentation][google.cloud.apihub.v1.Api.documentation] + - [target_user][google.cloud.apihub.v1.Api.target_user] + - [team][google.cloud.apihub.v1.Api.team] + - [business_unit][google.cloud.apihub.v1.Api.business_unit] + - [maturity_level][google.cloud.apihub.v1.Api.maturity_level] + - [attributes][google.cloud.apihub.v1.Api.attributes] + + The + [update_mask][google.cloud.apihub.v1.UpdateApiRequest.update_mask] + should be used to specify the fields being updated. + + Updating the owner field requires complete owner message and + updates both owner and email fields. + + Returns: + Callable[[~.UpdateApiRequest], + Awaitable[~.Api]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_api" not in self._stubs: + self._stubs["update_api"] = self.grpc_channel.unary_unary( + "/google.cloud.apihub.v1.ApiHub/UpdateApi", + request_serializer=apihub_service.UpdateApiRequest.serialize, + response_deserializer=common_fields.Api.deserialize, + ) + return self._stubs["update_api"] + + @property + def delete_api( + self, + ) -> Callable[[apihub_service.DeleteApiRequest], Awaitable[empty_pb2.Empty]]: + r"""Return a callable for the delete api method over gRPC. + + Delete an API resource in the API hub. API can only + be deleted if all underlying versions are deleted. + + Returns: + Callable[[~.DeleteApiRequest], + Awaitable[~.Empty]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_api" not in self._stubs: + self._stubs["delete_api"] = self.grpc_channel.unary_unary( + "/google.cloud.apihub.v1.ApiHub/DeleteApi", + request_serializer=apihub_service.DeleteApiRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs["delete_api"] + + @property + def create_version( + self, + ) -> Callable[ + [apihub_service.CreateVersionRequest], Awaitable[common_fields.Version] + ]: + r"""Return a callable for the create version method over gRPC. + + Create an API version for an API resource in the API + hub. + + Returns: + Callable[[~.CreateVersionRequest], + Awaitable[~.Version]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_version" not in self._stubs: + self._stubs["create_version"] = self.grpc_channel.unary_unary( + "/google.cloud.apihub.v1.ApiHub/CreateVersion", + request_serializer=apihub_service.CreateVersionRequest.serialize, + response_deserializer=common_fields.Version.deserialize, + ) + return self._stubs["create_version"] + + @property + def get_version( + self, + ) -> Callable[[apihub_service.GetVersionRequest], Awaitable[common_fields.Version]]: + r"""Return a callable for the get version method over gRPC. + + Get details about the API version of an API resource. + This will include information about the specs and + operations present in the API version as well as the + deployments linked to it. + + Returns: + Callable[[~.GetVersionRequest], + Awaitable[~.Version]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_version" not in self._stubs: + self._stubs["get_version"] = self.grpc_channel.unary_unary( + "/google.cloud.apihub.v1.ApiHub/GetVersion", + request_serializer=apihub_service.GetVersionRequest.serialize, + response_deserializer=common_fields.Version.deserialize, + ) + return self._stubs["get_version"] + + @property + def list_versions( + self, + ) -> Callable[ + [apihub_service.ListVersionsRequest], + Awaitable[apihub_service.ListVersionsResponse], + ]: + r"""Return a callable for the list versions method over gRPC. + + List API versions of an API resource in the API hub. + + Returns: + Callable[[~.ListVersionsRequest], + Awaitable[~.ListVersionsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_versions" not in self._stubs: + self._stubs["list_versions"] = self.grpc_channel.unary_unary( + "/google.cloud.apihub.v1.ApiHub/ListVersions", + request_serializer=apihub_service.ListVersionsRequest.serialize, + response_deserializer=apihub_service.ListVersionsResponse.deserialize, + ) + return self._stubs["list_versions"] + + @property + def update_version( + self, + ) -> Callable[ + [apihub_service.UpdateVersionRequest], Awaitable[common_fields.Version] + ]: + r"""Return a callable for the update version method over gRPC. + + Update API version. The following fields in the + [version][google.cloud.apihub.v1.Version] can be updated + currently: + + - [display_name][google.cloud.apihub.v1.Version.display_name] + - [description][google.cloud.apihub.v1.Version.description] + - [documentation][google.cloud.apihub.v1.Version.documentation] + - [deployments][google.cloud.apihub.v1.Version.deployments] + - [lifecycle][google.cloud.apihub.v1.Version.lifecycle] + - [compliance][google.cloud.apihub.v1.Version.compliance] + - [accreditation][google.cloud.apihub.v1.Version.accreditation] + - [attributes][google.cloud.apihub.v1.Version.attributes] + + The + [update_mask][google.cloud.apihub.v1.UpdateVersionRequest.update_mask] + should be used to specify the fields being updated. + + Returns: + Callable[[~.UpdateVersionRequest], + Awaitable[~.Version]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_version" not in self._stubs: + self._stubs["update_version"] = self.grpc_channel.unary_unary( + "/google.cloud.apihub.v1.ApiHub/UpdateVersion", + request_serializer=apihub_service.UpdateVersionRequest.serialize, + response_deserializer=common_fields.Version.deserialize, + ) + return self._stubs["update_version"] + + @property + def delete_version( + self, + ) -> Callable[[apihub_service.DeleteVersionRequest], Awaitable[empty_pb2.Empty]]: + r"""Return a callable for the delete version method over gRPC. + + Delete an API version. Version can only be deleted if + all underlying specs, operations, definitions and linked + deployments are deleted. + + Returns: + Callable[[~.DeleteVersionRequest], + Awaitable[~.Empty]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_version" not in self._stubs: + self._stubs["delete_version"] = self.grpc_channel.unary_unary( + "/google.cloud.apihub.v1.ApiHub/DeleteVersion", + request_serializer=apihub_service.DeleteVersionRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs["delete_version"] + + @property + def create_spec( + self, + ) -> Callable[[apihub_service.CreateSpecRequest], Awaitable[common_fields.Spec]]: + r"""Return a callable for the create spec method over gRPC. + + Add a spec to an API version in the API hub. Multiple specs can + be added to an API version. Note, while adding a spec, at least + one of ``contents`` or ``source_uri`` must be provided. If + ``contents`` is provided, then ``spec_type`` must also be + provided. + + On adding a spec with contents to the version, the operations + present in it will be added to the version.Note that the file + contents in the spec should be of the same type as defined in + the + ``projects/{project}/locations/{location}/attributes/system-spec-type`` + attribute associated with spec resource. Note that specs of + various types can be uploaded, however parsing of details is + supported for OpenAPI spec currently. + + In order to access the information parsed from the spec, use the + [GetSpec][google.cloud.apihub.v1.ApiHub.GetSpec] method. In + order to access the raw contents for a particular spec, use the + [GetSpecContents][google.cloud.apihub.v1.ApiHub.GetSpecContents] + method. In order to access the operations parsed from the spec, + use the + [ListAPIOperations][google.cloud.apihub.v1.ApiHub.ListApiOperations] + method. + + Returns: + Callable[[~.CreateSpecRequest], + Awaitable[~.Spec]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_spec" not in self._stubs: + self._stubs["create_spec"] = self.grpc_channel.unary_unary( + "/google.cloud.apihub.v1.ApiHub/CreateSpec", + request_serializer=apihub_service.CreateSpecRequest.serialize, + response_deserializer=common_fields.Spec.deserialize, + ) + return self._stubs["create_spec"] + + @property + def get_spec( + self, + ) -> Callable[[apihub_service.GetSpecRequest], Awaitable[common_fields.Spec]]: + r"""Return a callable for the get spec method over gRPC. + + Get details about the information parsed from a spec. Note that + this method does not return the raw spec contents. Use + [GetSpecContents][google.cloud.apihub.v1.ApiHub.GetSpecContents] + method to retrieve the same. + + Returns: + Callable[[~.GetSpecRequest], + Awaitable[~.Spec]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_spec" not in self._stubs: + self._stubs["get_spec"] = self.grpc_channel.unary_unary( + "/google.cloud.apihub.v1.ApiHub/GetSpec", + request_serializer=apihub_service.GetSpecRequest.serialize, + response_deserializer=common_fields.Spec.deserialize, + ) + return self._stubs["get_spec"] + + @property + def get_spec_contents( + self, + ) -> Callable[ + [apihub_service.GetSpecContentsRequest], Awaitable[common_fields.SpecContents] + ]: + r"""Return a callable for the get spec contents method over gRPC. + + Get spec contents. + + Returns: + Callable[[~.GetSpecContentsRequest], + Awaitable[~.SpecContents]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_spec_contents" not in self._stubs: + self._stubs["get_spec_contents"] = self.grpc_channel.unary_unary( + "/google.cloud.apihub.v1.ApiHub/GetSpecContents", + request_serializer=apihub_service.GetSpecContentsRequest.serialize, + response_deserializer=common_fields.SpecContents.deserialize, + ) + return self._stubs["get_spec_contents"] + + @property + def list_specs( + self, + ) -> Callable[ + [apihub_service.ListSpecsRequest], Awaitable[apihub_service.ListSpecsResponse] + ]: + r"""Return a callable for the list specs method over gRPC. + + List specs corresponding to a particular API + resource. + + Returns: + Callable[[~.ListSpecsRequest], + Awaitable[~.ListSpecsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_specs" not in self._stubs: + self._stubs["list_specs"] = self.grpc_channel.unary_unary( + "/google.cloud.apihub.v1.ApiHub/ListSpecs", + request_serializer=apihub_service.ListSpecsRequest.serialize, + response_deserializer=apihub_service.ListSpecsResponse.deserialize, + ) + return self._stubs["list_specs"] + + @property + def update_spec( + self, + ) -> Callable[[apihub_service.UpdateSpecRequest], Awaitable[common_fields.Spec]]: + r"""Return a callable for the update spec method over gRPC. + + Update spec. The following fields in the + [spec][google.cloud.apihub.v1.Spec] can be updated: + + - [display_name][google.cloud.apihub.v1.Spec.display_name] + - [source_uri][google.cloud.apihub.v1.Spec.source_uri] + - [lint_response][google.cloud.apihub.v1.Spec.lint_response] + - [attributes][google.cloud.apihub.v1.Spec.attributes] + - [contents][google.cloud.apihub.v1.Spec.contents] + - [spec_type][google.cloud.apihub.v1.Spec.spec_type] + + In case of an OAS spec, updating spec contents can lead to: + + 1. Creation, deletion and update of operations. + 2. Creation, deletion and update of definitions. + 3. Update of other info parsed out from the new spec. + + In case of contents or source_uri being present in update mask, + spec_type must also be present. Also, spec_type can not be + present in update mask if contents or source_uri is not present. + + The + [update_mask][google.cloud.apihub.v1.UpdateSpecRequest.update_mask] + should be used to specify the fields being updated. + + Returns: + Callable[[~.UpdateSpecRequest], + Awaitable[~.Spec]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_spec" not in self._stubs: + self._stubs["update_spec"] = self.grpc_channel.unary_unary( + "/google.cloud.apihub.v1.ApiHub/UpdateSpec", + request_serializer=apihub_service.UpdateSpecRequest.serialize, + response_deserializer=common_fields.Spec.deserialize, + ) + return self._stubs["update_spec"] + + @property + def delete_spec( + self, + ) -> Callable[[apihub_service.DeleteSpecRequest], Awaitable[empty_pb2.Empty]]: + r"""Return a callable for the delete spec method over gRPC. + + Delete a spec. + Deleting a spec will also delete the associated + operations from the version. + + Returns: + Callable[[~.DeleteSpecRequest], + Awaitable[~.Empty]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_spec" not in self._stubs: + self._stubs["delete_spec"] = self.grpc_channel.unary_unary( + "/google.cloud.apihub.v1.ApiHub/DeleteSpec", + request_serializer=apihub_service.DeleteSpecRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs["delete_spec"] + + @property + def get_api_operation( + self, + ) -> Callable[ + [apihub_service.GetApiOperationRequest], Awaitable[common_fields.ApiOperation] + ]: + r"""Return a callable for the get api operation method over gRPC. + + Get details about a particular operation in API + version. + + Returns: + Callable[[~.GetApiOperationRequest], + Awaitable[~.ApiOperation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_api_operation" not in self._stubs: + self._stubs["get_api_operation"] = self.grpc_channel.unary_unary( + "/google.cloud.apihub.v1.ApiHub/GetApiOperation", + request_serializer=apihub_service.GetApiOperationRequest.serialize, + response_deserializer=common_fields.ApiOperation.deserialize, + ) + return self._stubs["get_api_operation"] + + @property + def list_api_operations( + self, + ) -> Callable[ + [apihub_service.ListApiOperationsRequest], + Awaitable[apihub_service.ListApiOperationsResponse], + ]: + r"""Return a callable for the list api operations method over gRPC. + + List operations in an API version. + + Returns: + Callable[[~.ListApiOperationsRequest], + Awaitable[~.ListApiOperationsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_api_operations" not in self._stubs: + self._stubs["list_api_operations"] = self.grpc_channel.unary_unary( + "/google.cloud.apihub.v1.ApiHub/ListApiOperations", + request_serializer=apihub_service.ListApiOperationsRequest.serialize, + response_deserializer=apihub_service.ListApiOperationsResponse.deserialize, + ) + return self._stubs["list_api_operations"] + + @property + def get_definition( + self, + ) -> Callable[ + [apihub_service.GetDefinitionRequest], Awaitable[common_fields.Definition] + ]: + r"""Return a callable for the get definition method over gRPC. + + Get details about a definition in an API version. + + Returns: + Callable[[~.GetDefinitionRequest], + Awaitable[~.Definition]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_definition" not in self._stubs: + self._stubs["get_definition"] = self.grpc_channel.unary_unary( + "/google.cloud.apihub.v1.ApiHub/GetDefinition", + request_serializer=apihub_service.GetDefinitionRequest.serialize, + response_deserializer=common_fields.Definition.deserialize, + ) + return self._stubs["get_definition"] + + @property + def create_deployment( + self, + ) -> Callable[ + [apihub_service.CreateDeploymentRequest], Awaitable[common_fields.Deployment] + ]: + r"""Return a callable for the create deployment method over gRPC. + + Create a deployment resource in the API hub. + Once a deployment resource is created, it can be + associated with API versions. + + Returns: + Callable[[~.CreateDeploymentRequest], + Awaitable[~.Deployment]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_deployment" not in self._stubs: + self._stubs["create_deployment"] = self.grpc_channel.unary_unary( + "/google.cloud.apihub.v1.ApiHub/CreateDeployment", + request_serializer=apihub_service.CreateDeploymentRequest.serialize, + response_deserializer=common_fields.Deployment.deserialize, + ) + return self._stubs["create_deployment"] + + @property + def get_deployment( + self, + ) -> Callable[ + [apihub_service.GetDeploymentRequest], Awaitable[common_fields.Deployment] + ]: + r"""Return a callable for the get deployment method over gRPC. + + Get details about a deployment and the API versions + linked to it. + + Returns: + Callable[[~.GetDeploymentRequest], + Awaitable[~.Deployment]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_deployment" not in self._stubs: + self._stubs["get_deployment"] = self.grpc_channel.unary_unary( + "/google.cloud.apihub.v1.ApiHub/GetDeployment", + request_serializer=apihub_service.GetDeploymentRequest.serialize, + response_deserializer=common_fields.Deployment.deserialize, + ) + return self._stubs["get_deployment"] + + @property + def list_deployments( + self, + ) -> Callable[ + [apihub_service.ListDeploymentsRequest], + Awaitable[apihub_service.ListDeploymentsResponse], + ]: + r"""Return a callable for the list deployments method over gRPC. + + List deployment resources in the API hub. + + Returns: + Callable[[~.ListDeploymentsRequest], + Awaitable[~.ListDeploymentsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_deployments" not in self._stubs: + self._stubs["list_deployments"] = self.grpc_channel.unary_unary( + "/google.cloud.apihub.v1.ApiHub/ListDeployments", + request_serializer=apihub_service.ListDeploymentsRequest.serialize, + response_deserializer=apihub_service.ListDeploymentsResponse.deserialize, + ) + return self._stubs["list_deployments"] + + @property + def update_deployment( + self, + ) -> Callable[ + [apihub_service.UpdateDeploymentRequest], Awaitable[common_fields.Deployment] + ]: + r"""Return a callable for the update deployment method over gRPC. + + Update a deployment resource in the API hub. The following + fields in the [deployment + resource][google.cloud.apihub.v1.Deployment] can be updated: + + - [display_name][google.cloud.apihub.v1.Deployment.display_name] + - [description][google.cloud.apihub.v1.Deployment.description] + - [documentation][google.cloud.apihub.v1.Deployment.documentation] + - [deployment_type][google.cloud.apihub.v1.Deployment.deployment_type] + - [resource_uri][google.cloud.apihub.v1.Deployment.resource_uri] + - [endpoints][google.cloud.apihub.v1.Deployment.endpoints] + - [slo][google.cloud.apihub.v1.Deployment.slo] + - [environment][google.cloud.apihub.v1.Deployment.environment] + - [attributes][google.cloud.apihub.v1.Deployment.attributes] + + The + [update_mask][google.cloud.apihub.v1.UpdateDeploymentRequest.update_mask] + should be used to specify the fields being updated. + + Returns: + Callable[[~.UpdateDeploymentRequest], + Awaitable[~.Deployment]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_deployment" not in self._stubs: + self._stubs["update_deployment"] = self.grpc_channel.unary_unary( + "/google.cloud.apihub.v1.ApiHub/UpdateDeployment", + request_serializer=apihub_service.UpdateDeploymentRequest.serialize, + response_deserializer=common_fields.Deployment.deserialize, + ) + return self._stubs["update_deployment"] + + @property + def delete_deployment( + self, + ) -> Callable[[apihub_service.DeleteDeploymentRequest], Awaitable[empty_pb2.Empty]]: + r"""Return a callable for the delete deployment method over gRPC. + + Delete a deployment resource in the API hub. + + Returns: + Callable[[~.DeleteDeploymentRequest], + Awaitable[~.Empty]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_deployment" not in self._stubs: + self._stubs["delete_deployment"] = self.grpc_channel.unary_unary( + "/google.cloud.apihub.v1.ApiHub/DeleteDeployment", + request_serializer=apihub_service.DeleteDeploymentRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs["delete_deployment"] + + @property + def create_attribute( + self, + ) -> Callable[ + [apihub_service.CreateAttributeRequest], Awaitable[common_fields.Attribute] + ]: + r"""Return a callable for the create attribute method over gRPC. + + Create a user defined attribute. + + Certain pre defined attributes are already created by the API + hub. These attributes will have type as ``SYSTEM_DEFINED`` and + can be listed via + [ListAttributes][google.cloud.apihub.v1.ApiHub.ListAttributes] + method. Allowed values for the same can be updated via + [UpdateAttribute][google.cloud.apihub.v1.ApiHub.UpdateAttribute] + method. + + Returns: + Callable[[~.CreateAttributeRequest], + Awaitable[~.Attribute]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_attribute" not in self._stubs: + self._stubs["create_attribute"] = self.grpc_channel.unary_unary( + "/google.cloud.apihub.v1.ApiHub/CreateAttribute", + request_serializer=apihub_service.CreateAttributeRequest.serialize, + response_deserializer=common_fields.Attribute.deserialize, + ) + return self._stubs["create_attribute"] + + @property + def get_attribute( + self, + ) -> Callable[ + [apihub_service.GetAttributeRequest], Awaitable[common_fields.Attribute] + ]: + r"""Return a callable for the get attribute method over gRPC. + + Get details about the attribute. + + Returns: + Callable[[~.GetAttributeRequest], + Awaitable[~.Attribute]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_attribute" not in self._stubs: + self._stubs["get_attribute"] = self.grpc_channel.unary_unary( + "/google.cloud.apihub.v1.ApiHub/GetAttribute", + request_serializer=apihub_service.GetAttributeRequest.serialize, + response_deserializer=common_fields.Attribute.deserialize, + ) + return self._stubs["get_attribute"] + + @property + def update_attribute( + self, + ) -> Callable[ + [apihub_service.UpdateAttributeRequest], Awaitable[common_fields.Attribute] + ]: + r"""Return a callable for the update attribute method over gRPC. + + Update the attribute. The following fields in the [Attribute + resource][google.cloud.apihub.v1.Attribute] can be updated: + + - [display_name][google.cloud.apihub.v1.Attribute.display_name] + The display name can be updated for user defined attributes + only. + - [description][google.cloud.apihub.v1.Attribute.description] + The description can be updated for user defined attributes + only. + - [allowed_values][google.cloud.apihub.v1.Attribute.allowed_values] + To update the list of allowed values, clients need to use the + fetched list of allowed values and add or remove values to or + from the same list. The mutable allowed values can be updated + for both user defined and System defined attributes. The + immutable allowed values cannot be updated or deleted. The + updated list of allowed values cannot be empty. If an allowed + value that is already used by some resource's attribute is + deleted, then the association between the resource and the + attribute value will also be deleted. + - [cardinality][google.cloud.apihub.v1.Attribute.cardinality] + The cardinality can be updated for user defined attributes + only. Cardinality can only be increased during an update. + + The + [update_mask][google.cloud.apihub.v1.UpdateAttributeRequest.update_mask] + should be used to specify the fields being updated. + + Returns: + Callable[[~.UpdateAttributeRequest], + Awaitable[~.Attribute]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_attribute" not in self._stubs: + self._stubs["update_attribute"] = self.grpc_channel.unary_unary( + "/google.cloud.apihub.v1.ApiHub/UpdateAttribute", + request_serializer=apihub_service.UpdateAttributeRequest.serialize, + response_deserializer=common_fields.Attribute.deserialize, + ) + return self._stubs["update_attribute"] + + @property + def delete_attribute( + self, + ) -> Callable[[apihub_service.DeleteAttributeRequest], Awaitable[empty_pb2.Empty]]: + r"""Return a callable for the delete attribute method over gRPC. + + Delete an attribute. + + Note: System defined attributes cannot be deleted. All + associations of the attribute being deleted with any API + hub resource will also get deleted. + + Returns: + Callable[[~.DeleteAttributeRequest], + Awaitable[~.Empty]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_attribute" not in self._stubs: + self._stubs["delete_attribute"] = self.grpc_channel.unary_unary( + "/google.cloud.apihub.v1.ApiHub/DeleteAttribute", + request_serializer=apihub_service.DeleteAttributeRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs["delete_attribute"] + + @property + def list_attributes( + self, + ) -> Callable[ + [apihub_service.ListAttributesRequest], + Awaitable[apihub_service.ListAttributesResponse], + ]: + r"""Return a callable for the list attributes method over gRPC. + + List all attributes. + + Returns: + Callable[[~.ListAttributesRequest], + Awaitable[~.ListAttributesResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_attributes" not in self._stubs: + self._stubs["list_attributes"] = self.grpc_channel.unary_unary( + "/google.cloud.apihub.v1.ApiHub/ListAttributes", + request_serializer=apihub_service.ListAttributesRequest.serialize, + response_deserializer=apihub_service.ListAttributesResponse.deserialize, + ) + return self._stubs["list_attributes"] + + @property + def search_resources( + self, + ) -> Callable[ + [apihub_service.SearchResourcesRequest], + Awaitable[apihub_service.SearchResourcesResponse], + ]: + r"""Return a callable for the search resources method over gRPC. + + Search across API-Hub resources. + + Returns: + Callable[[~.SearchResourcesRequest], + Awaitable[~.SearchResourcesResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "search_resources" not in self._stubs: + self._stubs["search_resources"] = self.grpc_channel.unary_unary( + "/google.cloud.apihub.v1.ApiHub/SearchResources", + request_serializer=apihub_service.SearchResourcesRequest.serialize, + response_deserializer=apihub_service.SearchResourcesResponse.deserialize, + ) + return self._stubs["search_resources"] + + @property + def create_external_api( + self, + ) -> Callable[ + [apihub_service.CreateExternalApiRequest], Awaitable[common_fields.ExternalApi] + ]: + r"""Return a callable for the create external api method over gRPC. + + Create an External API resource in the API hub. + + Returns: + Callable[[~.CreateExternalApiRequest], + Awaitable[~.ExternalApi]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_external_api" not in self._stubs: + self._stubs["create_external_api"] = self.grpc_channel.unary_unary( + "/google.cloud.apihub.v1.ApiHub/CreateExternalApi", + request_serializer=apihub_service.CreateExternalApiRequest.serialize, + response_deserializer=common_fields.ExternalApi.deserialize, + ) + return self._stubs["create_external_api"] + + @property + def get_external_api( + self, + ) -> Callable[ + [apihub_service.GetExternalApiRequest], Awaitable[common_fields.ExternalApi] + ]: + r"""Return a callable for the get external api method over gRPC. + + Get details about an External API resource in the API + hub. + + Returns: + Callable[[~.GetExternalApiRequest], + Awaitable[~.ExternalApi]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_external_api" not in self._stubs: + self._stubs["get_external_api"] = self.grpc_channel.unary_unary( + "/google.cloud.apihub.v1.ApiHub/GetExternalApi", + request_serializer=apihub_service.GetExternalApiRequest.serialize, + response_deserializer=common_fields.ExternalApi.deserialize, + ) + return self._stubs["get_external_api"] + + @property + def update_external_api( + self, + ) -> Callable[ + [apihub_service.UpdateExternalApiRequest], Awaitable[common_fields.ExternalApi] + ]: + r"""Return a callable for the update external api method over gRPC. + + Update an External API resource in the API hub. The following + fields can be updated: + + - [display_name][google.cloud.apihub.v1.ExternalApi.display_name] + - [description][google.cloud.apihub.v1.ExternalApi.description] + - [documentation][google.cloud.apihub.v1.ExternalApi.documentation] + - [endpoints][google.cloud.apihub.v1.ExternalApi.endpoints] + - [paths][google.cloud.apihub.v1.ExternalApi.paths] + + The + [update_mask][google.cloud.apihub.v1.UpdateExternalApiRequest.update_mask] + should be used to specify the fields being updated. + + Returns: + Callable[[~.UpdateExternalApiRequest], + Awaitable[~.ExternalApi]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_external_api" not in self._stubs: + self._stubs["update_external_api"] = self.grpc_channel.unary_unary( + "/google.cloud.apihub.v1.ApiHub/UpdateExternalApi", + request_serializer=apihub_service.UpdateExternalApiRequest.serialize, + response_deserializer=common_fields.ExternalApi.deserialize, + ) + return self._stubs["update_external_api"] + + @property + def delete_external_api( + self, + ) -> Callable[ + [apihub_service.DeleteExternalApiRequest], Awaitable[empty_pb2.Empty] + ]: + r"""Return a callable for the delete external api method over gRPC. + + Delete an External API resource in the API hub. + + Returns: + Callable[[~.DeleteExternalApiRequest], + Awaitable[~.Empty]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_external_api" not in self._stubs: + self._stubs["delete_external_api"] = self.grpc_channel.unary_unary( + "/google.cloud.apihub.v1.ApiHub/DeleteExternalApi", + request_serializer=apihub_service.DeleteExternalApiRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs["delete_external_api"] + + @property + def list_external_apis( + self, + ) -> Callable[ + [apihub_service.ListExternalApisRequest], + Awaitable[apihub_service.ListExternalApisResponse], + ]: + r"""Return a callable for the list external apis method over gRPC. + + List External API resources in the API hub. + + Returns: + Callable[[~.ListExternalApisRequest], + Awaitable[~.ListExternalApisResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_external_apis" not in self._stubs: + self._stubs["list_external_apis"] = self.grpc_channel.unary_unary( + "/google.cloud.apihub.v1.ApiHub/ListExternalApis", + request_serializer=apihub_service.ListExternalApisRequest.serialize, + response_deserializer=apihub_service.ListExternalApisResponse.deserialize, + ) + return self._stubs["list_external_apis"] + + def _prep_wrapped_messages(self, client_info): + """Precompute the wrapped methods, overriding the base class method to use async wrappers.""" + self._wrapped_methods = { + self.create_api: gapic_v1.method_async.wrap_method( + self.create_api, + default_timeout=60.0, + client_info=client_info, + ), + self.get_api: gapic_v1.method_async.wrap_method( + self.get_api, + default_retry=retries.AsyncRetry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.list_apis: gapic_v1.method_async.wrap_method( + self.list_apis, + default_retry=retries.AsyncRetry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.update_api: gapic_v1.method_async.wrap_method( + self.update_api, + default_timeout=60.0, + client_info=client_info, + ), + self.delete_api: gapic_v1.method_async.wrap_method( + self.delete_api, + default_timeout=60.0, + client_info=client_info, + ), + self.create_version: gapic_v1.method_async.wrap_method( + self.create_version, + default_timeout=60.0, + client_info=client_info, + ), + self.get_version: gapic_v1.method_async.wrap_method( + self.get_version, + default_retry=retries.AsyncRetry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.list_versions: gapic_v1.method_async.wrap_method( + self.list_versions, + default_retry=retries.AsyncRetry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.update_version: gapic_v1.method_async.wrap_method( + self.update_version, + default_timeout=60.0, + client_info=client_info, + ), + self.delete_version: gapic_v1.method_async.wrap_method( + self.delete_version, + default_timeout=60.0, + client_info=client_info, + ), + self.create_spec: gapic_v1.method_async.wrap_method( + self.create_spec, + default_timeout=60.0, + client_info=client_info, + ), + self.get_spec: gapic_v1.method_async.wrap_method( + self.get_spec, + default_retry=retries.AsyncRetry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.get_spec_contents: gapic_v1.method_async.wrap_method( + self.get_spec_contents, + default_retry=retries.AsyncRetry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.list_specs: gapic_v1.method_async.wrap_method( + self.list_specs, + default_retry=retries.AsyncRetry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.update_spec: gapic_v1.method_async.wrap_method( + self.update_spec, + default_timeout=60.0, + client_info=client_info, + ), + self.delete_spec: gapic_v1.method_async.wrap_method( + self.delete_spec, + default_timeout=60.0, + client_info=client_info, + ), + self.get_api_operation: gapic_v1.method_async.wrap_method( + self.get_api_operation, + default_retry=retries.AsyncRetry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.list_api_operations: gapic_v1.method_async.wrap_method( + self.list_api_operations, + default_retry=retries.AsyncRetry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.get_definition: gapic_v1.method_async.wrap_method( + self.get_definition, + default_retry=retries.AsyncRetry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.create_deployment: gapic_v1.method_async.wrap_method( + self.create_deployment, + default_timeout=60.0, + client_info=client_info, + ), + self.get_deployment: gapic_v1.method_async.wrap_method( + self.get_deployment, + default_retry=retries.AsyncRetry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.list_deployments: gapic_v1.method_async.wrap_method( + self.list_deployments, + default_retry=retries.AsyncRetry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.update_deployment: gapic_v1.method_async.wrap_method( + self.update_deployment, + default_timeout=60.0, + client_info=client_info, + ), + self.delete_deployment: gapic_v1.method_async.wrap_method( + self.delete_deployment, + default_timeout=60.0, + client_info=client_info, + ), + self.create_attribute: gapic_v1.method_async.wrap_method( + self.create_attribute, + default_timeout=60.0, + client_info=client_info, + ), + self.get_attribute: gapic_v1.method_async.wrap_method( + self.get_attribute, + default_retry=retries.AsyncRetry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.update_attribute: gapic_v1.method_async.wrap_method( + self.update_attribute, + default_timeout=60.0, + client_info=client_info, + ), + self.delete_attribute: gapic_v1.method_async.wrap_method( + self.delete_attribute, + default_timeout=60.0, + client_info=client_info, + ), + self.list_attributes: gapic_v1.method_async.wrap_method( + self.list_attributes, + default_retry=retries.AsyncRetry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.search_resources: gapic_v1.method_async.wrap_method( + self.search_resources, + default_retry=retries.AsyncRetry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.create_external_api: gapic_v1.method_async.wrap_method( + self.create_external_api, + default_timeout=60.0, + client_info=client_info, + ), + self.get_external_api: gapic_v1.method_async.wrap_method( + self.get_external_api, + default_retry=retries.AsyncRetry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.update_external_api: gapic_v1.method_async.wrap_method( + self.update_external_api, + default_timeout=60.0, + client_info=client_info, + ), + self.delete_external_api: gapic_v1.method_async.wrap_method( + self.delete_external_api, + default_timeout=60.0, + client_info=client_info, + ), + self.list_external_apis: gapic_v1.method_async.wrap_method( + self.list_external_apis, + default_retry=retries.AsyncRetry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + } + + def close(self): + return self.grpc_channel.close() + + @property + def delete_operation( + self, + ) -> Callable[[operations_pb2.DeleteOperationRequest], None]: + r"""Return a callable for the delete_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_operation" not in self._stubs: + self._stubs["delete_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/DeleteOperation", + request_serializer=operations_pb2.DeleteOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["delete_operation"] + + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None]: + r"""Return a callable for the cancel_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "cancel_operation" not in self._stubs: + self._stubs["cancel_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/CancelOperation", + request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["cancel_operation"] + + @property + def get_operation( + self, + ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: + r"""Return a callable for the get_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_operation" not in self._stubs: + self._stubs["get_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/GetOperation", + request_serializer=operations_pb2.GetOperationRequest.SerializeToString, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["get_operation"] + + @property + def list_operations( + self, + ) -> Callable[ + [operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse + ]: + r"""Return a callable for the list_operations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_operations" not in self._stubs: + self._stubs["list_operations"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/ListOperations", + request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, + response_deserializer=operations_pb2.ListOperationsResponse.FromString, + ) + return self._stubs["list_operations"] + + @property + def list_locations( + self, + ) -> Callable[ + [locations_pb2.ListLocationsRequest], locations_pb2.ListLocationsResponse + ]: + r"""Return a callable for the list locations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_locations" not in self._stubs: + self._stubs["list_locations"] = self.grpc_channel.unary_unary( + "/google.cloud.location.Locations/ListLocations", + request_serializer=locations_pb2.ListLocationsRequest.SerializeToString, + response_deserializer=locations_pb2.ListLocationsResponse.FromString, + ) + return self._stubs["list_locations"] + + @property + def get_location( + self, + ) -> Callable[[locations_pb2.GetLocationRequest], locations_pb2.Location]: + r"""Return a callable for the list locations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_location" not in self._stubs: + self._stubs["get_location"] = self.grpc_channel.unary_unary( + "/google.cloud.location.Locations/GetLocation", + request_serializer=locations_pb2.GetLocationRequest.SerializeToString, + response_deserializer=locations_pb2.Location.FromString, + ) + return self._stubs["get_location"] + + +__all__ = ("ApiHubGrpcAsyncIOTransport",) diff --git a/packages/google-cloud-apihub/google/cloud/apihub_v1/services/api_hub/transports/rest.py b/packages/google-cloud-apihub/google/cloud/apihub_v1/services/api_hub/transports/rest.py new file mode 100644 index 000000000000..2a842e41f556 --- /dev/null +++ b/packages/google-cloud-apihub/google/cloud/apihub_v1/services/api_hub/transports/rest.py @@ -0,0 +1,5194 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import dataclasses +import json # type: ignore +import re +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import gapic_v1, path_template, rest_helpers, rest_streaming +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth.transport.requests import AuthorizedSession # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.protobuf import json_format +import grpc # type: ignore +from requests import __version__ as requests_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object, None] # type: ignore + + +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore + +from google.cloud.apihub_v1.types import apihub_service, common_fields + +from .base import ApiHubTransport +from .base import DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=requests_version, +) + + +class ApiHubRestInterceptor: + """Interceptor for ApiHub. + + Interceptors are used to manipulate requests, request metadata, and responses + in arbitrary ways. + Example use cases include: + * Logging + * Verifying requests according to service or custom semantics + * Stripping extraneous information from responses + + These use cases and more can be enabled by injecting an + instance of a custom subclass when constructing the ApiHubRestTransport. + + .. code-block:: python + class MyCustomApiHubInterceptor(ApiHubRestInterceptor): + def pre_create_api(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_create_api(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_create_attribute(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_create_attribute(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_create_deployment(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_create_deployment(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_create_external_api(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_create_external_api(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_create_spec(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_create_spec(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_create_version(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_create_version(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_delete_api(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def pre_delete_attribute(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def pre_delete_deployment(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def pre_delete_external_api(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def pre_delete_spec(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def pre_delete_version(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def pre_get_api(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_api(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get_api_operation(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_api_operation(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get_attribute(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_attribute(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get_definition(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_definition(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get_deployment(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_deployment(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get_external_api(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_external_api(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get_spec(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_spec(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get_spec_contents(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_spec_contents(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get_version(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_version(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_api_operations(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_api_operations(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_apis(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_apis(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_attributes(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_attributes(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_deployments(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_deployments(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_external_apis(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_external_apis(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_specs(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_specs(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_versions(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_versions(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_search_resources(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_search_resources(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_update_api(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_update_api(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_update_attribute(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_update_attribute(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_update_deployment(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_update_deployment(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_update_external_api(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_update_external_api(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_update_spec(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_update_spec(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_update_version(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_update_version(self, response): + logging.log(f"Received response: {response}") + return response + + transport = ApiHubRestTransport(interceptor=MyCustomApiHubInterceptor()) + client = ApiHubClient(transport=transport) + + + """ + + def pre_create_api( + self, + request: apihub_service.CreateApiRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[apihub_service.CreateApiRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for create_api + + Override in a subclass to manipulate the request or metadata + before they are sent to the ApiHub server. + """ + return request, metadata + + def post_create_api(self, response: common_fields.Api) -> common_fields.Api: + """Post-rpc interceptor for create_api + + Override in a subclass to manipulate the response + after it is returned by the ApiHub server but before + it is returned to user code. + """ + return response + + def pre_create_attribute( + self, + request: apihub_service.CreateAttributeRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[apihub_service.CreateAttributeRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for create_attribute + + Override in a subclass to manipulate the request or metadata + before they are sent to the ApiHub server. + """ + return request, metadata + + def post_create_attribute( + self, response: common_fields.Attribute + ) -> common_fields.Attribute: + """Post-rpc interceptor for create_attribute + + Override in a subclass to manipulate the response + after it is returned by the ApiHub server but before + it is returned to user code. + """ + return response + + def pre_create_deployment( + self, + request: apihub_service.CreateDeploymentRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[apihub_service.CreateDeploymentRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for create_deployment + + Override in a subclass to manipulate the request or metadata + before they are sent to the ApiHub server. + """ + return request, metadata + + def post_create_deployment( + self, response: common_fields.Deployment + ) -> common_fields.Deployment: + """Post-rpc interceptor for create_deployment + + Override in a subclass to manipulate the response + after it is returned by the ApiHub server but before + it is returned to user code. + """ + return response + + def pre_create_external_api( + self, + request: apihub_service.CreateExternalApiRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[apihub_service.CreateExternalApiRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for create_external_api + + Override in a subclass to manipulate the request or metadata + before they are sent to the ApiHub server. + """ + return request, metadata + + def post_create_external_api( + self, response: common_fields.ExternalApi + ) -> common_fields.ExternalApi: + """Post-rpc interceptor for create_external_api + + Override in a subclass to manipulate the response + after it is returned by the ApiHub server but before + it is returned to user code. + """ + return response + + def pre_create_spec( + self, + request: apihub_service.CreateSpecRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[apihub_service.CreateSpecRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for create_spec + + Override in a subclass to manipulate the request or metadata + before they are sent to the ApiHub server. + """ + return request, metadata + + def post_create_spec(self, response: common_fields.Spec) -> common_fields.Spec: + """Post-rpc interceptor for create_spec + + Override in a subclass to manipulate the response + after it is returned by the ApiHub server but before + it is returned to user code. + """ + return response + + def pre_create_version( + self, + request: apihub_service.CreateVersionRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[apihub_service.CreateVersionRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for create_version + + Override in a subclass to manipulate the request or metadata + before they are sent to the ApiHub server. + """ + return request, metadata + + def post_create_version( + self, response: common_fields.Version + ) -> common_fields.Version: + """Post-rpc interceptor for create_version + + Override in a subclass to manipulate the response + after it is returned by the ApiHub server but before + it is returned to user code. + """ + return response + + def pre_delete_api( + self, + request: apihub_service.DeleteApiRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[apihub_service.DeleteApiRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for delete_api + + Override in a subclass to manipulate the request or metadata + before they are sent to the ApiHub server. + """ + return request, metadata + + def pre_delete_attribute( + self, + request: apihub_service.DeleteAttributeRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[apihub_service.DeleteAttributeRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for delete_attribute + + Override in a subclass to manipulate the request or metadata + before they are sent to the ApiHub server. + """ + return request, metadata + + def pre_delete_deployment( + self, + request: apihub_service.DeleteDeploymentRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[apihub_service.DeleteDeploymentRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for delete_deployment + + Override in a subclass to manipulate the request or metadata + before they are sent to the ApiHub server. + """ + return request, metadata + + def pre_delete_external_api( + self, + request: apihub_service.DeleteExternalApiRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[apihub_service.DeleteExternalApiRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for delete_external_api + + Override in a subclass to manipulate the request or metadata + before they are sent to the ApiHub server. + """ + return request, metadata + + def pre_delete_spec( + self, + request: apihub_service.DeleteSpecRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[apihub_service.DeleteSpecRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for delete_spec + + Override in a subclass to manipulate the request or metadata + before they are sent to the ApiHub server. + """ + return request, metadata + + def pre_delete_version( + self, + request: apihub_service.DeleteVersionRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[apihub_service.DeleteVersionRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for delete_version + + Override in a subclass to manipulate the request or metadata + before they are sent to the ApiHub server. + """ + return request, metadata + + def pre_get_api( + self, request: apihub_service.GetApiRequest, metadata: Sequence[Tuple[str, str]] + ) -> Tuple[apihub_service.GetApiRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_api + + Override in a subclass to manipulate the request or metadata + before they are sent to the ApiHub server. + """ + return request, metadata + + def post_get_api(self, response: common_fields.Api) -> common_fields.Api: + """Post-rpc interceptor for get_api + + Override in a subclass to manipulate the response + after it is returned by the ApiHub server but before + it is returned to user code. + """ + return response + + def pre_get_api_operation( + self, + request: apihub_service.GetApiOperationRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[apihub_service.GetApiOperationRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_api_operation + + Override in a subclass to manipulate the request or metadata + before they are sent to the ApiHub server. + """ + return request, metadata + + def post_get_api_operation( + self, response: common_fields.ApiOperation + ) -> common_fields.ApiOperation: + """Post-rpc interceptor for get_api_operation + + Override in a subclass to manipulate the response + after it is returned by the ApiHub server but before + it is returned to user code. + """ + return response + + def pre_get_attribute( + self, + request: apihub_service.GetAttributeRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[apihub_service.GetAttributeRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_attribute + + Override in a subclass to manipulate the request or metadata + before they are sent to the ApiHub server. + """ + return request, metadata + + def post_get_attribute( + self, response: common_fields.Attribute + ) -> common_fields.Attribute: + """Post-rpc interceptor for get_attribute + + Override in a subclass to manipulate the response + after it is returned by the ApiHub server but before + it is returned to user code. + """ + return response + + def pre_get_definition( + self, + request: apihub_service.GetDefinitionRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[apihub_service.GetDefinitionRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_definition + + Override in a subclass to manipulate the request or metadata + before they are sent to the ApiHub server. + """ + return request, metadata + + def post_get_definition( + self, response: common_fields.Definition + ) -> common_fields.Definition: + """Post-rpc interceptor for get_definition + + Override in a subclass to manipulate the response + after it is returned by the ApiHub server but before + it is returned to user code. + """ + return response + + def pre_get_deployment( + self, + request: apihub_service.GetDeploymentRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[apihub_service.GetDeploymentRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_deployment + + Override in a subclass to manipulate the request or metadata + before they are sent to the ApiHub server. + """ + return request, metadata + + def post_get_deployment( + self, response: common_fields.Deployment + ) -> common_fields.Deployment: + """Post-rpc interceptor for get_deployment + + Override in a subclass to manipulate the response + after it is returned by the ApiHub server but before + it is returned to user code. + """ + return response + + def pre_get_external_api( + self, + request: apihub_service.GetExternalApiRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[apihub_service.GetExternalApiRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_external_api + + Override in a subclass to manipulate the request or metadata + before they are sent to the ApiHub server. + """ + return request, metadata + + def post_get_external_api( + self, response: common_fields.ExternalApi + ) -> common_fields.ExternalApi: + """Post-rpc interceptor for get_external_api + + Override in a subclass to manipulate the response + after it is returned by the ApiHub server but before + it is returned to user code. + """ + return response + + def pre_get_spec( + self, + request: apihub_service.GetSpecRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[apihub_service.GetSpecRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_spec + + Override in a subclass to manipulate the request or metadata + before they are sent to the ApiHub server. + """ + return request, metadata + + def post_get_spec(self, response: common_fields.Spec) -> common_fields.Spec: + """Post-rpc interceptor for get_spec + + Override in a subclass to manipulate the response + after it is returned by the ApiHub server but before + it is returned to user code. + """ + return response + + def pre_get_spec_contents( + self, + request: apihub_service.GetSpecContentsRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[apihub_service.GetSpecContentsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_spec_contents + + Override in a subclass to manipulate the request or metadata + before they are sent to the ApiHub server. + """ + return request, metadata + + def post_get_spec_contents( + self, response: common_fields.SpecContents + ) -> common_fields.SpecContents: + """Post-rpc interceptor for get_spec_contents + + Override in a subclass to manipulate the response + after it is returned by the ApiHub server but before + it is returned to user code. + """ + return response + + def pre_get_version( + self, + request: apihub_service.GetVersionRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[apihub_service.GetVersionRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_version + + Override in a subclass to manipulate the request or metadata + before they are sent to the ApiHub server. + """ + return request, metadata + + def post_get_version( + self, response: common_fields.Version + ) -> common_fields.Version: + """Post-rpc interceptor for get_version + + Override in a subclass to manipulate the response + after it is returned by the ApiHub server but before + it is returned to user code. + """ + return response + + def pre_list_api_operations( + self, + request: apihub_service.ListApiOperationsRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[apihub_service.ListApiOperationsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_api_operations + + Override in a subclass to manipulate the request or metadata + before they are sent to the ApiHub server. + """ + return request, metadata + + def post_list_api_operations( + self, response: apihub_service.ListApiOperationsResponse + ) -> apihub_service.ListApiOperationsResponse: + """Post-rpc interceptor for list_api_operations + + Override in a subclass to manipulate the response + after it is returned by the ApiHub server but before + it is returned to user code. + """ + return response + + def pre_list_apis( + self, + request: apihub_service.ListApisRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[apihub_service.ListApisRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_apis + + Override in a subclass to manipulate the request or metadata + before they are sent to the ApiHub server. + """ + return request, metadata + + def post_list_apis( + self, response: apihub_service.ListApisResponse + ) -> apihub_service.ListApisResponse: + """Post-rpc interceptor for list_apis + + Override in a subclass to manipulate the response + after it is returned by the ApiHub server but before + it is returned to user code. + """ + return response + + def pre_list_attributes( + self, + request: apihub_service.ListAttributesRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[apihub_service.ListAttributesRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_attributes + + Override in a subclass to manipulate the request or metadata + before they are sent to the ApiHub server. + """ + return request, metadata + + def post_list_attributes( + self, response: apihub_service.ListAttributesResponse + ) -> apihub_service.ListAttributesResponse: + """Post-rpc interceptor for list_attributes + + Override in a subclass to manipulate the response + after it is returned by the ApiHub server but before + it is returned to user code. + """ + return response + + def pre_list_deployments( + self, + request: apihub_service.ListDeploymentsRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[apihub_service.ListDeploymentsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_deployments + + Override in a subclass to manipulate the request or metadata + before they are sent to the ApiHub server. + """ + return request, metadata + + def post_list_deployments( + self, response: apihub_service.ListDeploymentsResponse + ) -> apihub_service.ListDeploymentsResponse: + """Post-rpc interceptor for list_deployments + + Override in a subclass to manipulate the response + after it is returned by the ApiHub server but before + it is returned to user code. + """ + return response + + def pre_list_external_apis( + self, + request: apihub_service.ListExternalApisRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[apihub_service.ListExternalApisRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_external_apis + + Override in a subclass to manipulate the request or metadata + before they are sent to the ApiHub server. + """ + return request, metadata + + def post_list_external_apis( + self, response: apihub_service.ListExternalApisResponse + ) -> apihub_service.ListExternalApisResponse: + """Post-rpc interceptor for list_external_apis + + Override in a subclass to manipulate the response + after it is returned by the ApiHub server but before + it is returned to user code. + """ + return response + + def pre_list_specs( + self, + request: apihub_service.ListSpecsRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[apihub_service.ListSpecsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_specs + + Override in a subclass to manipulate the request or metadata + before they are sent to the ApiHub server. + """ + return request, metadata + + def post_list_specs( + self, response: apihub_service.ListSpecsResponse + ) -> apihub_service.ListSpecsResponse: + """Post-rpc interceptor for list_specs + + Override in a subclass to manipulate the response + after it is returned by the ApiHub server but before + it is returned to user code. + """ + return response + + def pre_list_versions( + self, + request: apihub_service.ListVersionsRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[apihub_service.ListVersionsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_versions + + Override in a subclass to manipulate the request or metadata + before they are sent to the ApiHub server. + """ + return request, metadata + + def post_list_versions( + self, response: apihub_service.ListVersionsResponse + ) -> apihub_service.ListVersionsResponse: + """Post-rpc interceptor for list_versions + + Override in a subclass to manipulate the response + after it is returned by the ApiHub server but before + it is returned to user code. + """ + return response + + def pre_search_resources( + self, + request: apihub_service.SearchResourcesRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[apihub_service.SearchResourcesRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for search_resources + + Override in a subclass to manipulate the request or metadata + before they are sent to the ApiHub server. + """ + return request, metadata + + def post_search_resources( + self, response: apihub_service.SearchResourcesResponse + ) -> apihub_service.SearchResourcesResponse: + """Post-rpc interceptor for search_resources + + Override in a subclass to manipulate the response + after it is returned by the ApiHub server but before + it is returned to user code. + """ + return response + + def pre_update_api( + self, + request: apihub_service.UpdateApiRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[apihub_service.UpdateApiRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for update_api + + Override in a subclass to manipulate the request or metadata + before they are sent to the ApiHub server. + """ + return request, metadata + + def post_update_api(self, response: common_fields.Api) -> common_fields.Api: + """Post-rpc interceptor for update_api + + Override in a subclass to manipulate the response + after it is returned by the ApiHub server but before + it is returned to user code. + """ + return response + + def pre_update_attribute( + self, + request: apihub_service.UpdateAttributeRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[apihub_service.UpdateAttributeRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for update_attribute + + Override in a subclass to manipulate the request or metadata + before they are sent to the ApiHub server. + """ + return request, metadata + + def post_update_attribute( + self, response: common_fields.Attribute + ) -> common_fields.Attribute: + """Post-rpc interceptor for update_attribute + + Override in a subclass to manipulate the response + after it is returned by the ApiHub server but before + it is returned to user code. + """ + return response + + def pre_update_deployment( + self, + request: apihub_service.UpdateDeploymentRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[apihub_service.UpdateDeploymentRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for update_deployment + + Override in a subclass to manipulate the request or metadata + before they are sent to the ApiHub server. + """ + return request, metadata + + def post_update_deployment( + self, response: common_fields.Deployment + ) -> common_fields.Deployment: + """Post-rpc interceptor for update_deployment + + Override in a subclass to manipulate the response + after it is returned by the ApiHub server but before + it is returned to user code. + """ + return response + + def pre_update_external_api( + self, + request: apihub_service.UpdateExternalApiRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[apihub_service.UpdateExternalApiRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for update_external_api + + Override in a subclass to manipulate the request or metadata + before they are sent to the ApiHub server. + """ + return request, metadata + + def post_update_external_api( + self, response: common_fields.ExternalApi + ) -> common_fields.ExternalApi: + """Post-rpc interceptor for update_external_api + + Override in a subclass to manipulate the response + after it is returned by the ApiHub server but before + it is returned to user code. + """ + return response + + def pre_update_spec( + self, + request: apihub_service.UpdateSpecRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[apihub_service.UpdateSpecRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for update_spec + + Override in a subclass to manipulate the request or metadata + before they are sent to the ApiHub server. + """ + return request, metadata + + def post_update_spec(self, response: common_fields.Spec) -> common_fields.Spec: + """Post-rpc interceptor for update_spec + + Override in a subclass to manipulate the response + after it is returned by the ApiHub server but before + it is returned to user code. + """ + return response + + def pre_update_version( + self, + request: apihub_service.UpdateVersionRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[apihub_service.UpdateVersionRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for update_version + + Override in a subclass to manipulate the request or metadata + before they are sent to the ApiHub server. + """ + return request, metadata + + def post_update_version( + self, response: common_fields.Version + ) -> common_fields.Version: + """Post-rpc interceptor for update_version + + Override in a subclass to manipulate the response + after it is returned by the ApiHub server but before + it is returned to user code. + """ + return response + + def pre_get_location( + self, + request: locations_pb2.GetLocationRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[locations_pb2.GetLocationRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_location + + Override in a subclass to manipulate the request or metadata + before they are sent to the ApiHub server. + """ + return request, metadata + + def post_get_location( + self, response: locations_pb2.Location + ) -> locations_pb2.Location: + """Post-rpc interceptor for get_location + + Override in a subclass to manipulate the response + after it is returned by the ApiHub server but before + it is returned to user code. + """ + return response + + def pre_list_locations( + self, + request: locations_pb2.ListLocationsRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[locations_pb2.ListLocationsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_locations + + Override in a subclass to manipulate the request or metadata + before they are sent to the ApiHub server. + """ + return request, metadata + + def post_list_locations( + self, response: locations_pb2.ListLocationsResponse + ) -> locations_pb2.ListLocationsResponse: + """Post-rpc interceptor for list_locations + + Override in a subclass to manipulate the response + after it is returned by the ApiHub server but before + it is returned to user code. + """ + return response + + def pre_cancel_operation( + self, + request: operations_pb2.CancelOperationRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[operations_pb2.CancelOperationRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for cancel_operation + + Override in a subclass to manipulate the request or metadata + before they are sent to the ApiHub server. + """ + return request, metadata + + def post_cancel_operation(self, response: None) -> None: + """Post-rpc interceptor for cancel_operation + + Override in a subclass to manipulate the response + after it is returned by the ApiHub server but before + it is returned to user code. + """ + return response + + def pre_delete_operation( + self, + request: operations_pb2.DeleteOperationRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[operations_pb2.DeleteOperationRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for delete_operation + + Override in a subclass to manipulate the request or metadata + before they are sent to the ApiHub server. + """ + return request, metadata + + def post_delete_operation(self, response: None) -> None: + """Post-rpc interceptor for delete_operation + + Override in a subclass to manipulate the response + after it is returned by the ApiHub server but before + it is returned to user code. + """ + return response + + def pre_get_operation( + self, + request: operations_pb2.GetOperationRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[operations_pb2.GetOperationRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_operation + + Override in a subclass to manipulate the request or metadata + before they are sent to the ApiHub server. + """ + return request, metadata + + def post_get_operation( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for get_operation + + Override in a subclass to manipulate the response + after it is returned by the ApiHub server but before + it is returned to user code. + """ + return response + + def pre_list_operations( + self, + request: operations_pb2.ListOperationsRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[operations_pb2.ListOperationsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_operations + + Override in a subclass to manipulate the request or metadata + before they are sent to the ApiHub server. + """ + return request, metadata + + def post_list_operations( + self, response: operations_pb2.ListOperationsResponse + ) -> operations_pb2.ListOperationsResponse: + """Post-rpc interceptor for list_operations + + Override in a subclass to manipulate the response + after it is returned by the ApiHub server but before + it is returned to user code. + """ + return response + + +@dataclasses.dataclass +class ApiHubRestStub: + _session: AuthorizedSession + _host: str + _interceptor: ApiHubRestInterceptor + + +class ApiHubRestTransport(ApiHubTransport): + """REST backend transport for ApiHub. + + This service provides all methods related to the API hub. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + + """ + + def __init__( + self, + *, + host: str = "apihub.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = "https", + interceptor: Optional[ApiHubRestInterceptor] = None, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'apihub.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client + certificate to configure mutual TLS HTTP channel. It is ignored + if ``channel`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. + # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the + # credentials object + maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) + if maybe_url_match is None: + raise ValueError( + f"Unexpected hostname structure: {host}" + ) # pragma: NO COVER + + url_match_items = maybe_url_match.groupdict() + + host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host + + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + self._session = AuthorizedSession( + self._credentials, default_host=self.DEFAULT_HOST + ) + if client_cert_source_for_mtls: + self._session.configure_mtls_channel(client_cert_source_for_mtls) + self._interceptor = interceptor or ApiHubRestInterceptor() + self._prep_wrapped_messages(client_info) + + class _CreateApi(ApiHubRestStub): + def __hash__(self): + return hash("CreateApi") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: apihub_service.CreateApiRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> common_fields.Api: + r"""Call the create api method over HTTP. + + Args: + request (~.apihub_service.CreateApiRequest): + The request object. The [CreateApi][google.cloud.apihub.v1.ApiHub.CreateApi] + method's request. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.common_fields.Api: + An API resource in the API Hub. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{parent=projects/*/locations/*}/apis", + "body": "api", + }, + ] + request, metadata = self._interceptor.pre_create_api(request, metadata) + pb_request = apihub_service.CreateApiRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = common_fields.Api() + pb_resp = common_fields.Api.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_create_api(resp) + return resp + + class _CreateAttribute(ApiHubRestStub): + def __hash__(self): + return hash("CreateAttribute") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: apihub_service.CreateAttributeRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> common_fields.Attribute: + r"""Call the create attribute method over HTTP. + + Args: + request (~.apihub_service.CreateAttributeRequest): + The request object. The + [CreateAttribute][google.cloud.apihub.v1.ApiHub.CreateAttribute] + method's request. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.common_fields.Attribute: + An attribute in the API Hub. + An attribute is a name value pair which + can be attached to different resources + in the API hub based on the scope of the + attribute. Attributes can either be + pre-defined by the API Hub or created by + users. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{parent=projects/*/locations/*}/attributes", + "body": "attribute", + }, + ] + request, metadata = self._interceptor.pre_create_attribute( + request, metadata + ) + pb_request = apihub_service.CreateAttributeRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = common_fields.Attribute() + pb_resp = common_fields.Attribute.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_create_attribute(resp) + return resp + + class _CreateDeployment(ApiHubRestStub): + def __hash__(self): + return hash("CreateDeployment") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: apihub_service.CreateDeploymentRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> common_fields.Deployment: + r"""Call the create deployment method over HTTP. + + Args: + request (~.apihub_service.CreateDeploymentRequest): + The request object. The + [CreateDeployment][google.cloud.apihub.v1.ApiHub.CreateDeployment] + method's request. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.common_fields.Deployment: + Details of the deployment where APIs + are hosted. A deployment could represent + an Apigee proxy, API gateway, other + Google Cloud services or non-Google + Cloud services as well. A deployment + entity is a root level entity in the API + hub and exists independent of any API. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{parent=projects/*/locations/*}/deployments", + "body": "deployment", + }, + ] + request, metadata = self._interceptor.pre_create_deployment( + request, metadata + ) + pb_request = apihub_service.CreateDeploymentRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = common_fields.Deployment() + pb_resp = common_fields.Deployment.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_create_deployment(resp) + return resp + + class _CreateExternalApi(ApiHubRestStub): + def __hash__(self): + return hash("CreateExternalApi") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: apihub_service.CreateExternalApiRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> common_fields.ExternalApi: + r"""Call the create external api method over HTTP. + + Args: + request (~.apihub_service.CreateExternalApiRequest): + The request object. The + [CreateExternalApi][google.cloud.apihub.v1.ApiHub.CreateExternalApi] + method's request. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.common_fields.ExternalApi: + An external API represents an API + being provided by external sources. This + can be used to model third-party APIs + and can be used to define dependencies. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{parent=projects/*/locations/*}/externalApis", + "body": "external_api", + }, + ] + request, metadata = self._interceptor.pre_create_external_api( + request, metadata + ) + pb_request = apihub_service.CreateExternalApiRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = common_fields.ExternalApi() + pb_resp = common_fields.ExternalApi.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_create_external_api(resp) + return resp + + class _CreateSpec(ApiHubRestStub): + def __hash__(self): + return hash("CreateSpec") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: apihub_service.CreateSpecRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> common_fields.Spec: + r"""Call the create spec method over HTTP. + + Args: + request (~.apihub_service.CreateSpecRequest): + The request object. The + [CreateSpec][google.cloud.apihub.v1.ApiHub.CreateSpec] + method's request. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.common_fields.Spec: + Represents a spec associated with an + API version in the API Hub. Note that + specs of various types can be uploaded, + however parsing of details is supported + for OpenAPI spec currently. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{parent=projects/*/locations/*/apis/*/versions/*}/specs", + "body": "spec", + }, + ] + request, metadata = self._interceptor.pre_create_spec(request, metadata) + pb_request = apihub_service.CreateSpecRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = common_fields.Spec() + pb_resp = common_fields.Spec.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_create_spec(resp) + return resp + + class _CreateVersion(ApiHubRestStub): + def __hash__(self): + return hash("CreateVersion") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: apihub_service.CreateVersionRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> common_fields.Version: + r"""Call the create version method over HTTP. + + Args: + request (~.apihub_service.CreateVersionRequest): + The request object. The + [CreateVersion][google.cloud.apihub.v1.ApiHub.CreateVersion] + method's request. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.common_fields.Version: + Represents a version of the API + resource in API hub. This is also + referred to as the API version. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{parent=projects/*/locations/*/apis/*}/versions", + "body": "version", + }, + ] + request, metadata = self._interceptor.pre_create_version(request, metadata) + pb_request = apihub_service.CreateVersionRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = common_fields.Version() + pb_resp = common_fields.Version.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_create_version(resp) + return resp + + class _DeleteApi(ApiHubRestStub): + def __hash__(self): + return hash("DeleteApi") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: apihub_service.DeleteApiRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ): + r"""Call the delete api method over HTTP. + + Args: + request (~.apihub_service.DeleteApiRequest): + The request object. The [DeleteApi][google.cloud.apihub.v1.ApiHub.DeleteApi] + method's request. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "delete", + "uri": "/v1/{name=projects/*/locations/*/apis/*}", + }, + ] + request, metadata = self._interceptor.pre_delete_api(request, metadata) + pb_request = apihub_service.DeleteApiRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + class _DeleteAttribute(ApiHubRestStub): + def __hash__(self): + return hash("DeleteAttribute") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: apihub_service.DeleteAttributeRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ): + r"""Call the delete attribute method over HTTP. + + Args: + request (~.apihub_service.DeleteAttributeRequest): + The request object. The + [DeleteAttribute][google.cloud.apihub.v1.ApiHub.DeleteAttribute] + method's request. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "delete", + "uri": "/v1/{name=projects/*/locations/*/attributes/*}", + }, + ] + request, metadata = self._interceptor.pre_delete_attribute( + request, metadata + ) + pb_request = apihub_service.DeleteAttributeRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + class _DeleteDeployment(ApiHubRestStub): + def __hash__(self): + return hash("DeleteDeployment") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: apihub_service.DeleteDeploymentRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ): + r"""Call the delete deployment method over HTTP. + + Args: + request (~.apihub_service.DeleteDeploymentRequest): + The request object. The + [DeleteDeployment][google.cloud.apihub.v1.ApiHub.DeleteDeployment] + method's request. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "delete", + "uri": "/v1/{name=projects/*/locations/*/deployments/*}", + }, + ] + request, metadata = self._interceptor.pre_delete_deployment( + request, metadata + ) + pb_request = apihub_service.DeleteDeploymentRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + class _DeleteExternalApi(ApiHubRestStub): + def __hash__(self): + return hash("DeleteExternalApi") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: apihub_service.DeleteExternalApiRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ): + r"""Call the delete external api method over HTTP. + + Args: + request (~.apihub_service.DeleteExternalApiRequest): + The request object. The + [DeleteExternalApi][google.cloud.apihub.v1.ApiHub.DeleteExternalApi] + method's request. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "delete", + "uri": "/v1/{name=projects/*/locations/*/externalApis/*}", + }, + ] + request, metadata = self._interceptor.pre_delete_external_api( + request, metadata + ) + pb_request = apihub_service.DeleteExternalApiRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + class _DeleteSpec(ApiHubRestStub): + def __hash__(self): + return hash("DeleteSpec") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: apihub_service.DeleteSpecRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ): + r"""Call the delete spec method over HTTP. + + Args: + request (~.apihub_service.DeleteSpecRequest): + The request object. The + [DeleteSpec][google.cloud.apihub.v1.ApiHub.DeleteSpec] + method's request. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "delete", + "uri": "/v1/{name=projects/*/locations/*/apis/*/versions/*/specs/*}", + }, + ] + request, metadata = self._interceptor.pre_delete_spec(request, metadata) + pb_request = apihub_service.DeleteSpecRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + class _DeleteVersion(ApiHubRestStub): + def __hash__(self): + return hash("DeleteVersion") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: apihub_service.DeleteVersionRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ): + r"""Call the delete version method over HTTP. + + Args: + request (~.apihub_service.DeleteVersionRequest): + The request object. The + [DeleteVersion][google.cloud.apihub.v1.ApiHub.DeleteVersion] + method's request. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "delete", + "uri": "/v1/{name=projects/*/locations/*/apis/*/versions/*}", + }, + ] + request, metadata = self._interceptor.pre_delete_version(request, metadata) + pb_request = apihub_service.DeleteVersionRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + class _GetApi(ApiHubRestStub): + def __hash__(self): + return hash("GetApi") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: apihub_service.GetApiRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> common_fields.Api: + r"""Call the get api method over HTTP. + + Args: + request (~.apihub_service.GetApiRequest): + The request object. The [GetApi][google.cloud.apihub.v1.ApiHub.GetApi] + method's request. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.common_fields.Api: + An API resource in the API Hub. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/apis/*}", + }, + ] + request, metadata = self._interceptor.pre_get_api(request, metadata) + pb_request = apihub_service.GetApiRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = common_fields.Api() + pb_resp = common_fields.Api.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_api(resp) + return resp + + class _GetApiOperation(ApiHubRestStub): + def __hash__(self): + return hash("GetApiOperation") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: apihub_service.GetApiOperationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> common_fields.ApiOperation: + r"""Call the get api operation method over HTTP. + + Args: + request (~.apihub_service.GetApiOperationRequest): + The request object. The + [GetApiOperation][google.cloud.apihub.v1.ApiHub.GetApiOperation] + method's request. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.common_fields.ApiOperation: + Represents an operation contained in + an API version in the API Hub. An + operation is added/updated/deleted in an + API version when a new spec is added or + an existing spec is updated/deleted in a + version. Currently, an operation will be + created only corresponding to OpenAPI + spec as parsing is supported for OpenAPI + spec. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/apis/*/versions/*/operations/*}", + }, + ] + request, metadata = self._interceptor.pre_get_api_operation( + request, metadata + ) + pb_request = apihub_service.GetApiOperationRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = common_fields.ApiOperation() + pb_resp = common_fields.ApiOperation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_api_operation(resp) + return resp + + class _GetAttribute(ApiHubRestStub): + def __hash__(self): + return hash("GetAttribute") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: apihub_service.GetAttributeRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> common_fields.Attribute: + r"""Call the get attribute method over HTTP. + + Args: + request (~.apihub_service.GetAttributeRequest): + The request object. The + [GetAttribute][google.cloud.apihub.v1.ApiHub.GetAttribute] + method's request. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.common_fields.Attribute: + An attribute in the API Hub. + An attribute is a name value pair which + can be attached to different resources + in the API hub based on the scope of the + attribute. Attributes can either be + pre-defined by the API Hub or created by + users. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/attributes/*}", + }, + ] + request, metadata = self._interceptor.pre_get_attribute(request, metadata) + pb_request = apihub_service.GetAttributeRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = common_fields.Attribute() + pb_resp = common_fields.Attribute.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_attribute(resp) + return resp + + class _GetDefinition(ApiHubRestStub): + def __hash__(self): + return hash("GetDefinition") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: apihub_service.GetDefinitionRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> common_fields.Definition: + r"""Call the get definition method over HTTP. + + Args: + request (~.apihub_service.GetDefinitionRequest): + The request object. The + [GetDefinition][google.cloud.apihub.v1.ApiHub.GetDefinition] + method's request. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.common_fields.Definition: + Represents a definition for example schema, request, + response definitions contained in an API version. A + definition is added/updated/deleted in an API version + when a new spec is added or an existing spec is + updated/deleted in a version. Currently, definition will + be created only corresponding to OpenAPI spec as parsing + is supported for OpenAPI spec. Also, within OpenAPI + spec, only ``schema`` object is supported. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/apis/*/versions/*/definitions/*}", + }, + ] + request, metadata = self._interceptor.pre_get_definition(request, metadata) + pb_request = apihub_service.GetDefinitionRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = common_fields.Definition() + pb_resp = common_fields.Definition.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_definition(resp) + return resp + + class _GetDeployment(ApiHubRestStub): + def __hash__(self): + return hash("GetDeployment") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: apihub_service.GetDeploymentRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> common_fields.Deployment: + r"""Call the get deployment method over HTTP. + + Args: + request (~.apihub_service.GetDeploymentRequest): + The request object. The + [GetDeployment][google.cloud.apihub.v1.ApiHub.GetDeployment] + method's request. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.common_fields.Deployment: + Details of the deployment where APIs + are hosted. A deployment could represent + an Apigee proxy, API gateway, other + Google Cloud services or non-Google + Cloud services as well. A deployment + entity is a root level entity in the API + hub and exists independent of any API. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/deployments/*}", + }, + ] + request, metadata = self._interceptor.pre_get_deployment(request, metadata) + pb_request = apihub_service.GetDeploymentRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = common_fields.Deployment() + pb_resp = common_fields.Deployment.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_deployment(resp) + return resp + + class _GetExternalApi(ApiHubRestStub): + def __hash__(self): + return hash("GetExternalApi") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: apihub_service.GetExternalApiRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> common_fields.ExternalApi: + r"""Call the get external api method over HTTP. + + Args: + request (~.apihub_service.GetExternalApiRequest): + The request object. The + [GetExternalApi][google.cloud.apihub.v1.ApiHub.GetExternalApi] + method's request. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.common_fields.ExternalApi: + An external API represents an API + being provided by external sources. This + can be used to model third-party APIs + and can be used to define dependencies. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/externalApis/*}", + }, + ] + request, metadata = self._interceptor.pre_get_external_api( + request, metadata + ) + pb_request = apihub_service.GetExternalApiRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = common_fields.ExternalApi() + pb_resp = common_fields.ExternalApi.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_external_api(resp) + return resp + + class _GetSpec(ApiHubRestStub): + def __hash__(self): + return hash("GetSpec") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: apihub_service.GetSpecRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> common_fields.Spec: + r"""Call the get spec method over HTTP. + + Args: + request (~.apihub_service.GetSpecRequest): + The request object. The [GetSpec][google.cloud.apihub.v1.ApiHub.GetSpec] + method's request. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.common_fields.Spec: + Represents a spec associated with an + API version in the API Hub. Note that + specs of various types can be uploaded, + however parsing of details is supported + for OpenAPI spec currently. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/apis/*/versions/*/specs/*}", + }, + ] + request, metadata = self._interceptor.pre_get_spec(request, metadata) + pb_request = apihub_service.GetSpecRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = common_fields.Spec() + pb_resp = common_fields.Spec.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_spec(resp) + return resp + + class _GetSpecContents(ApiHubRestStub): + def __hash__(self): + return hash("GetSpecContents") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: apihub_service.GetSpecContentsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> common_fields.SpecContents: + r"""Call the get spec contents method over HTTP. + + Args: + request (~.apihub_service.GetSpecContentsRequest): + The request object. The + [GetSpecContents][google.cloud.apihub.v1.ApiHub.GetSpecContents] + method's request. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.common_fields.SpecContents: + The spec contents. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/apis/*/versions/*/specs/*}:contents", + }, + ] + request, metadata = self._interceptor.pre_get_spec_contents( + request, metadata + ) + pb_request = apihub_service.GetSpecContentsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = common_fields.SpecContents() + pb_resp = common_fields.SpecContents.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_spec_contents(resp) + return resp + + class _GetVersion(ApiHubRestStub): + def __hash__(self): + return hash("GetVersion") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: apihub_service.GetVersionRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> common_fields.Version: + r"""Call the get version method over HTTP. + + Args: + request (~.apihub_service.GetVersionRequest): + The request object. The + [GetVersion][google.cloud.apihub.v1.ApiHub.GetVersion] + method's request. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.common_fields.Version: + Represents a version of the API + resource in API hub. This is also + referred to as the API version. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/apis/*/versions/*}", + }, + ] + request, metadata = self._interceptor.pre_get_version(request, metadata) + pb_request = apihub_service.GetVersionRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = common_fields.Version() + pb_resp = common_fields.Version.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_version(resp) + return resp + + class _ListApiOperations(ApiHubRestStub): + def __hash__(self): + return hash("ListApiOperations") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: apihub_service.ListApiOperationsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> apihub_service.ListApiOperationsResponse: + r"""Call the list api operations method over HTTP. + + Args: + request (~.apihub_service.ListApiOperationsRequest): + The request object. The + [ListApiOperations][google.cloud.apihub.v1.ApiHub.ListApiOperations] + method's request. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.apihub_service.ListApiOperationsResponse: + The + [ListApiOperations][google.cloud.apihub.v1.ApiHub.ListApiOperations] + method's response. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{parent=projects/*/locations/*/apis/*/versions/*}/operations", + }, + ] + request, metadata = self._interceptor.pre_list_api_operations( + request, metadata + ) + pb_request = apihub_service.ListApiOperationsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = apihub_service.ListApiOperationsResponse() + pb_resp = apihub_service.ListApiOperationsResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_api_operations(resp) + return resp + + class _ListApis(ApiHubRestStub): + def __hash__(self): + return hash("ListApis") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: apihub_service.ListApisRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> apihub_service.ListApisResponse: + r"""Call the list apis method over HTTP. + + Args: + request (~.apihub_service.ListApisRequest): + The request object. The [ListApis][google.cloud.apihub.v1.ApiHub.ListApis] + method's request. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.apihub_service.ListApisResponse: + The [ListApis][google.cloud.apihub.v1.ApiHub.ListApis] + method's response. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{parent=projects/*/locations/*}/apis", + }, + ] + request, metadata = self._interceptor.pre_list_apis(request, metadata) + pb_request = apihub_service.ListApisRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = apihub_service.ListApisResponse() + pb_resp = apihub_service.ListApisResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_apis(resp) + return resp + + class _ListAttributes(ApiHubRestStub): + def __hash__(self): + return hash("ListAttributes") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: apihub_service.ListAttributesRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> apihub_service.ListAttributesResponse: + r"""Call the list attributes method over HTTP. + + Args: + request (~.apihub_service.ListAttributesRequest): + The request object. The + [ListAttributes][google.cloud.apihub.v1.ApiHub.ListAttributes] + method's request. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.apihub_service.ListAttributesResponse: + The + [ListAttributes][google.cloud.apihub.v1.ApiHub.ListAttributes] + method's response. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{parent=projects/*/locations/*}/attributes", + }, + ] + request, metadata = self._interceptor.pre_list_attributes(request, metadata) + pb_request = apihub_service.ListAttributesRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = apihub_service.ListAttributesResponse() + pb_resp = apihub_service.ListAttributesResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_attributes(resp) + return resp + + class _ListDeployments(ApiHubRestStub): + def __hash__(self): + return hash("ListDeployments") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: apihub_service.ListDeploymentsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> apihub_service.ListDeploymentsResponse: + r"""Call the list deployments method over HTTP. + + Args: + request (~.apihub_service.ListDeploymentsRequest): + The request object. The + [ListDeployments][google.cloud.apihub.v1.ApiHub.ListDeployments] + method's request. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.apihub_service.ListDeploymentsResponse: + The + [ListDeployments][google.cloud.apihub.v1.ApiHub.ListDeployments] + method's response. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{parent=projects/*/locations/*}/deployments", + }, + ] + request, metadata = self._interceptor.pre_list_deployments( + request, metadata + ) + pb_request = apihub_service.ListDeploymentsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = apihub_service.ListDeploymentsResponse() + pb_resp = apihub_service.ListDeploymentsResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_deployments(resp) + return resp + + class _ListExternalApis(ApiHubRestStub): + def __hash__(self): + return hash("ListExternalApis") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: apihub_service.ListExternalApisRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> apihub_service.ListExternalApisResponse: + r"""Call the list external apis method over HTTP. + + Args: + request (~.apihub_service.ListExternalApisRequest): + The request object. The + [ListExternalApis][google.cloud.apihub.v1.ApiHub.ListExternalApis] + method's request. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.apihub_service.ListExternalApisResponse: + The + [ListExternalApis][google.cloud.apihub.v1.ApiHub.ListExternalApis] + method's response. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{parent=projects/*/locations/*}/externalApis", + }, + ] + request, metadata = self._interceptor.pre_list_external_apis( + request, metadata + ) + pb_request = apihub_service.ListExternalApisRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = apihub_service.ListExternalApisResponse() + pb_resp = apihub_service.ListExternalApisResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_external_apis(resp) + return resp + + class _ListSpecs(ApiHubRestStub): + def __hash__(self): + return hash("ListSpecs") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: apihub_service.ListSpecsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> apihub_service.ListSpecsResponse: + r"""Call the list specs method over HTTP. + + Args: + request (~.apihub_service.ListSpecsRequest): + The request object. The [ListSpecs][ListSpecs] method's request. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.apihub_service.ListSpecsResponse: + The [ListSpecs][google.cloud.apihub.v1.ApiHub.ListSpecs] + method's response. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{parent=projects/*/locations/*/apis/*/versions/*}/specs", + }, + ] + request, metadata = self._interceptor.pre_list_specs(request, metadata) + pb_request = apihub_service.ListSpecsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = apihub_service.ListSpecsResponse() + pb_resp = apihub_service.ListSpecsResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_specs(resp) + return resp + + class _ListVersions(ApiHubRestStub): + def __hash__(self): + return hash("ListVersions") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: apihub_service.ListVersionsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> apihub_service.ListVersionsResponse: + r"""Call the list versions method over HTTP. + + Args: + request (~.apihub_service.ListVersionsRequest): + The request object. The + [ListVersions][google.cloud.apihub.v1.ApiHub.ListVersions] + method's request. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.apihub_service.ListVersionsResponse: + The + [ListVersions][google.cloud.apihub.v1.ApiHub.ListVersions] + method's response. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{parent=projects/*/locations/*/apis/*}/versions", + }, + ] + request, metadata = self._interceptor.pre_list_versions(request, metadata) + pb_request = apihub_service.ListVersionsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = apihub_service.ListVersionsResponse() + pb_resp = apihub_service.ListVersionsResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_versions(resp) + return resp + + class _SearchResources(ApiHubRestStub): + def __hash__(self): + return hash("SearchResources") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: apihub_service.SearchResourcesRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> apihub_service.SearchResourcesResponse: + r"""Call the search resources method over HTTP. + + Args: + request (~.apihub_service.SearchResourcesRequest): + The request object. The + [SearchResources][google.cloud.apihub.v1.ApiHub.SearchResources] + method's request. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.apihub_service.SearchResourcesResponse: + Response for the + [SearchResources][google.cloud.apihub.v1.ApiHub.SearchResources] + method. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{location=projects/*/locations/*}:searchResources", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_search_resources( + request, metadata + ) + pb_request = apihub_service.SearchResourcesRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = apihub_service.SearchResourcesResponse() + pb_resp = apihub_service.SearchResourcesResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_search_resources(resp) + return resp + + class _UpdateApi(ApiHubRestStub): + def __hash__(self): + return hash("UpdateApi") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "updateMask": {}, + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: apihub_service.UpdateApiRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> common_fields.Api: + r"""Call the update api method over HTTP. + + Args: + request (~.apihub_service.UpdateApiRequest): + The request object. The [UpdateApi][google.cloud.apihub.v1.ApiHub.UpdateApi] + method's request. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.common_fields.Api: + An API resource in the API Hub. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "patch", + "uri": "/v1/{api.name=projects/*/locations/*/apis/*}", + "body": "api", + }, + ] + request, metadata = self._interceptor.pre_update_api(request, metadata) + pb_request = apihub_service.UpdateApiRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = common_fields.Api() + pb_resp = common_fields.Api.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_update_api(resp) + return resp + + class _UpdateAttribute(ApiHubRestStub): + def __hash__(self): + return hash("UpdateAttribute") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "updateMask": {}, + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: apihub_service.UpdateAttributeRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> common_fields.Attribute: + r"""Call the update attribute method over HTTP. + + Args: + request (~.apihub_service.UpdateAttributeRequest): + The request object. The + [UpdateAttribute][google.cloud.apihub.v1.ApiHub.UpdateAttribute] + method's request. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.common_fields.Attribute: + An attribute in the API Hub. + An attribute is a name value pair which + can be attached to different resources + in the API hub based on the scope of the + attribute. Attributes can either be + pre-defined by the API Hub or created by + users. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "patch", + "uri": "/v1/{attribute.name=projects/*/locations/*/attributes/*}", + "body": "attribute", + }, + ] + request, metadata = self._interceptor.pre_update_attribute( + request, metadata + ) + pb_request = apihub_service.UpdateAttributeRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = common_fields.Attribute() + pb_resp = common_fields.Attribute.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_update_attribute(resp) + return resp + + class _UpdateDeployment(ApiHubRestStub): + def __hash__(self): + return hash("UpdateDeployment") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "updateMask": {}, + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: apihub_service.UpdateDeploymentRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> common_fields.Deployment: + r"""Call the update deployment method over HTTP. + + Args: + request (~.apihub_service.UpdateDeploymentRequest): + The request object. The + [UpdateDeployment][google.cloud.apihub.v1.ApiHub.UpdateDeployment] + method's request. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.common_fields.Deployment: + Details of the deployment where APIs + are hosted. A deployment could represent + an Apigee proxy, API gateway, other + Google Cloud services or non-Google + Cloud services as well. A deployment + entity is a root level entity in the API + hub and exists independent of any API. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "patch", + "uri": "/v1/{deployment.name=projects/*/locations/*/deployments/*}", + "body": "deployment", + }, + ] + request, metadata = self._interceptor.pre_update_deployment( + request, metadata + ) + pb_request = apihub_service.UpdateDeploymentRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = common_fields.Deployment() + pb_resp = common_fields.Deployment.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_update_deployment(resp) + return resp + + class _UpdateExternalApi(ApiHubRestStub): + def __hash__(self): + return hash("UpdateExternalApi") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "updateMask": {}, + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: apihub_service.UpdateExternalApiRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> common_fields.ExternalApi: + r"""Call the update external api method over HTTP. + + Args: + request (~.apihub_service.UpdateExternalApiRequest): + The request object. The + [UpdateExternalApi][google.cloud.apihub.v1.ApiHub.UpdateExternalApi] + method's request. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.common_fields.ExternalApi: + An external API represents an API + being provided by external sources. This + can be used to model third-party APIs + and can be used to define dependencies. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "patch", + "uri": "/v1/{external_api.name=projects/*/locations/*/externalApis/*}", + "body": "external_api", + }, + ] + request, metadata = self._interceptor.pre_update_external_api( + request, metadata + ) + pb_request = apihub_service.UpdateExternalApiRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = common_fields.ExternalApi() + pb_resp = common_fields.ExternalApi.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_update_external_api(resp) + return resp + + class _UpdateSpec(ApiHubRestStub): + def __hash__(self): + return hash("UpdateSpec") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "updateMask": {}, + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: apihub_service.UpdateSpecRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> common_fields.Spec: + r"""Call the update spec method over HTTP. + + Args: + request (~.apihub_service.UpdateSpecRequest): + The request object. The + [UpdateSpec][google.cloud.apihub.v1.ApiHub.UpdateSpec] + method's request. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.common_fields.Spec: + Represents a spec associated with an + API version in the API Hub. Note that + specs of various types can be uploaded, + however parsing of details is supported + for OpenAPI spec currently. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "patch", + "uri": "/v1/{spec.name=projects/*/locations/*/apis/*/versions/*/specs/*}", + "body": "spec", + }, + ] + request, metadata = self._interceptor.pre_update_spec(request, metadata) + pb_request = apihub_service.UpdateSpecRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = common_fields.Spec() + pb_resp = common_fields.Spec.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_update_spec(resp) + return resp + + class _UpdateVersion(ApiHubRestStub): + def __hash__(self): + return hash("UpdateVersion") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "updateMask": {}, + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: apihub_service.UpdateVersionRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> common_fields.Version: + r"""Call the update version method over HTTP. + + Args: + request (~.apihub_service.UpdateVersionRequest): + The request object. The + [UpdateVersion][google.cloud.apihub.v1.ApiHub.UpdateVersion] + method's request. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.common_fields.Version: + Represents a version of the API + resource in API hub. This is also + referred to as the API version. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "patch", + "uri": "/v1/{version.name=projects/*/locations/*/apis/*/versions/*}", + "body": "version", + }, + ] + request, metadata = self._interceptor.pre_update_version(request, metadata) + pb_request = apihub_service.UpdateVersionRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = common_fields.Version() + pb_resp = common_fields.Version.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_update_version(resp) + return resp + + @property + def create_api( + self, + ) -> Callable[[apihub_service.CreateApiRequest], common_fields.Api]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._CreateApi(self._session, self._host, self._interceptor) # type: ignore + + @property + def create_attribute( + self, + ) -> Callable[[apihub_service.CreateAttributeRequest], common_fields.Attribute]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._CreateAttribute(self._session, self._host, self._interceptor) # type: ignore + + @property + def create_deployment( + self, + ) -> Callable[[apihub_service.CreateDeploymentRequest], common_fields.Deployment]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._CreateDeployment(self._session, self._host, self._interceptor) # type: ignore + + @property + def create_external_api( + self, + ) -> Callable[[apihub_service.CreateExternalApiRequest], common_fields.ExternalApi]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._CreateExternalApi(self._session, self._host, self._interceptor) # type: ignore + + @property + def create_spec( + self, + ) -> Callable[[apihub_service.CreateSpecRequest], common_fields.Spec]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._CreateSpec(self._session, self._host, self._interceptor) # type: ignore + + @property + def create_version( + self, + ) -> Callable[[apihub_service.CreateVersionRequest], common_fields.Version]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._CreateVersion(self._session, self._host, self._interceptor) # type: ignore + + @property + def delete_api( + self, + ) -> Callable[[apihub_service.DeleteApiRequest], empty_pb2.Empty]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._DeleteApi(self._session, self._host, self._interceptor) # type: ignore + + @property + def delete_attribute( + self, + ) -> Callable[[apihub_service.DeleteAttributeRequest], empty_pb2.Empty]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._DeleteAttribute(self._session, self._host, self._interceptor) # type: ignore + + @property + def delete_deployment( + self, + ) -> Callable[[apihub_service.DeleteDeploymentRequest], empty_pb2.Empty]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._DeleteDeployment(self._session, self._host, self._interceptor) # type: ignore + + @property + def delete_external_api( + self, + ) -> Callable[[apihub_service.DeleteExternalApiRequest], empty_pb2.Empty]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._DeleteExternalApi(self._session, self._host, self._interceptor) # type: ignore + + @property + def delete_spec( + self, + ) -> Callable[[apihub_service.DeleteSpecRequest], empty_pb2.Empty]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._DeleteSpec(self._session, self._host, self._interceptor) # type: ignore + + @property + def delete_version( + self, + ) -> Callable[[apihub_service.DeleteVersionRequest], empty_pb2.Empty]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._DeleteVersion(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_api(self) -> Callable[[apihub_service.GetApiRequest], common_fields.Api]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetApi(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_api_operation( + self, + ) -> Callable[[apihub_service.GetApiOperationRequest], common_fields.ApiOperation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetApiOperation(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_attribute( + self, + ) -> Callable[[apihub_service.GetAttributeRequest], common_fields.Attribute]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetAttribute(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_definition( + self, + ) -> Callable[[apihub_service.GetDefinitionRequest], common_fields.Definition]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetDefinition(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_deployment( + self, + ) -> Callable[[apihub_service.GetDeploymentRequest], common_fields.Deployment]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetDeployment(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_external_api( + self, + ) -> Callable[[apihub_service.GetExternalApiRequest], common_fields.ExternalApi]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetExternalApi(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_spec(self) -> Callable[[apihub_service.GetSpecRequest], common_fields.Spec]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetSpec(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_spec_contents( + self, + ) -> Callable[[apihub_service.GetSpecContentsRequest], common_fields.SpecContents]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetSpecContents(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_version( + self, + ) -> Callable[[apihub_service.GetVersionRequest], common_fields.Version]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetVersion(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_api_operations( + self, + ) -> Callable[ + [apihub_service.ListApiOperationsRequest], + apihub_service.ListApiOperationsResponse, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListApiOperations(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_apis( + self, + ) -> Callable[[apihub_service.ListApisRequest], apihub_service.ListApisResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListApis(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_attributes( + self, + ) -> Callable[ + [apihub_service.ListAttributesRequest], apihub_service.ListAttributesResponse + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListAttributes(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_deployments( + self, + ) -> Callable[ + [apihub_service.ListDeploymentsRequest], apihub_service.ListDeploymentsResponse + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListDeployments(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_external_apis( + self, + ) -> Callable[ + [apihub_service.ListExternalApisRequest], + apihub_service.ListExternalApisResponse, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListExternalApis(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_specs( + self, + ) -> Callable[[apihub_service.ListSpecsRequest], apihub_service.ListSpecsResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListSpecs(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_versions( + self, + ) -> Callable[ + [apihub_service.ListVersionsRequest], apihub_service.ListVersionsResponse + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListVersions(self._session, self._host, self._interceptor) # type: ignore + + @property + def search_resources( + self, + ) -> Callable[ + [apihub_service.SearchResourcesRequest], apihub_service.SearchResourcesResponse + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._SearchResources(self._session, self._host, self._interceptor) # type: ignore + + @property + def update_api( + self, + ) -> Callable[[apihub_service.UpdateApiRequest], common_fields.Api]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._UpdateApi(self._session, self._host, self._interceptor) # type: ignore + + @property + def update_attribute( + self, + ) -> Callable[[apihub_service.UpdateAttributeRequest], common_fields.Attribute]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._UpdateAttribute(self._session, self._host, self._interceptor) # type: ignore + + @property + def update_deployment( + self, + ) -> Callable[[apihub_service.UpdateDeploymentRequest], common_fields.Deployment]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._UpdateDeployment(self._session, self._host, self._interceptor) # type: ignore + + @property + def update_external_api( + self, + ) -> Callable[[apihub_service.UpdateExternalApiRequest], common_fields.ExternalApi]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._UpdateExternalApi(self._session, self._host, self._interceptor) # type: ignore + + @property + def update_spec( + self, + ) -> Callable[[apihub_service.UpdateSpecRequest], common_fields.Spec]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._UpdateSpec(self._session, self._host, self._interceptor) # type: ignore + + @property + def update_version( + self, + ) -> Callable[[apihub_service.UpdateVersionRequest], common_fields.Version]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._UpdateVersion(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_location(self): + return self._GetLocation(self._session, self._host, self._interceptor) # type: ignore + + class _GetLocation(ApiHubRestStub): + def __call__( + self, + request: locations_pb2.GetLocationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> locations_pb2.Location: + r"""Call the get location method over HTTP. + + Args: + request (locations_pb2.GetLocationRequest): + The request object for GetLocation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + locations_pb2.Location: Response from GetLocation method. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*}", + }, + ] + + request, metadata = self._interceptor.pre_get_location(request, metadata) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + resp = locations_pb2.Location() + resp = json_format.Parse(response.content.decode("utf-8"), resp) + resp = self._interceptor.post_get_location(resp) + return resp + + @property + def list_locations(self): + return self._ListLocations(self._session, self._host, self._interceptor) # type: ignore + + class _ListLocations(ApiHubRestStub): + def __call__( + self, + request: locations_pb2.ListLocationsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> locations_pb2.ListLocationsResponse: + r"""Call the list locations method over HTTP. + + Args: + request (locations_pb2.ListLocationsRequest): + The request object for ListLocations method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + locations_pb2.ListLocationsResponse: Response from ListLocations method. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=projects/*}/locations", + }, + ] + + request, metadata = self._interceptor.pre_list_locations(request, metadata) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + resp = locations_pb2.ListLocationsResponse() + resp = json_format.Parse(response.content.decode("utf-8"), resp) + resp = self._interceptor.post_list_locations(resp) + return resp + + @property + def cancel_operation(self): + return self._CancelOperation(self._session, self._host, self._interceptor) # type: ignore + + class _CancelOperation(ApiHubRestStub): + def __call__( + self, + request: operations_pb2.CancelOperationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Call the cancel operation method over HTTP. + + Args: + request (operations_pb2.CancelOperationRequest): + The request object for CancelOperation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{name=projects/*/locations/*/operations/*}:cancel", + "body": "*", + }, + ] + + request, metadata = self._interceptor.pre_cancel_operation( + request, metadata + ) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + body = json.dumps(transcoded_request["body"]) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + return self._interceptor.post_cancel_operation(None) + + @property + def delete_operation(self): + return self._DeleteOperation(self._session, self._host, self._interceptor) # type: ignore + + class _DeleteOperation(ApiHubRestStub): + def __call__( + self, + request: operations_pb2.DeleteOperationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Call the delete operation method over HTTP. + + Args: + request (operations_pb2.DeleteOperationRequest): + The request object for DeleteOperation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "delete", + "uri": "/v1/{name=projects/*/locations/*/operations/*}", + }, + ] + + request, metadata = self._interceptor.pre_delete_operation( + request, metadata + ) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + return self._interceptor.post_delete_operation(None) + + @property + def get_operation(self): + return self._GetOperation(self._session, self._host, self._interceptor) # type: ignore + + class _GetOperation(ApiHubRestStub): + def __call__( + self, + request: operations_pb2.GetOperationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the get operation method over HTTP. + + Args: + request (operations_pb2.GetOperationRequest): + The request object for GetOperation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + operations_pb2.Operation: Response from GetOperation method. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/operations/*}", + }, + ] + + request, metadata = self._interceptor.pre_get_operation(request, metadata) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + resp = operations_pb2.Operation() + resp = json_format.Parse(response.content.decode("utf-8"), resp) + resp = self._interceptor.post_get_operation(resp) + return resp + + @property + def list_operations(self): + return self._ListOperations(self._session, self._host, self._interceptor) # type: ignore + + class _ListOperations(ApiHubRestStub): + def __call__( + self, + request: operations_pb2.ListOperationsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.ListOperationsResponse: + r"""Call the list operations method over HTTP. + + Args: + request (operations_pb2.ListOperationsRequest): + The request object for ListOperations method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + operations_pb2.ListOperationsResponse: Response from ListOperations method. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*}/operations", + }, + ] + + request, metadata = self._interceptor.pre_list_operations(request, metadata) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + resp = operations_pb2.ListOperationsResponse() + resp = json_format.Parse(response.content.decode("utf-8"), resp) + resp = self._interceptor.post_list_operations(resp) + return resp + + @property + def kind(self) -> str: + return "rest" + + def close(self): + self._session.close() + + +__all__ = ("ApiHubRestTransport",) diff --git a/packages/google-ads-admanager/google/ads/admanager_v1/services/ad_partner_service/__init__.py b/packages/google-cloud-apihub/google/cloud/apihub_v1/services/api_hub_dependencies/__init__.py similarity index 87% rename from packages/google-ads-admanager/google/ads/admanager_v1/services/ad_partner_service/__init__.py rename to packages/google-cloud-apihub/google/cloud/apihub_v1/services/api_hub_dependencies/__init__.py index e22dd3b66e24..146b28fe4729 100644 --- a/packages/google-ads-admanager/google/ads/admanager_v1/services/ad_partner_service/__init__.py +++ b/packages/google-cloud-apihub/google/cloud/apihub_v1/services/api_hub_dependencies/__init__.py @@ -13,6 +13,6 @@ # See the License for the specific language governing permissions and # limitations under the License. # -from .client import AdPartnerServiceClient +from .client import ApiHubDependenciesClient -__all__ = ("AdPartnerServiceClient",) +__all__ = ("ApiHubDependenciesClient",) diff --git a/packages/google-cloud-apihub/google/cloud/apihub_v1/services/api_hub_dependencies/async_client.py b/packages/google-cloud-apihub/google/cloud/apihub_v1/services/api_hub_dependencies/async_client.py new file mode 100644 index 000000000000..42f5c884a8ec --- /dev/null +++ b/packages/google-cloud-apihub/google/cloud/apihub_v1/services/api_hub_dependencies/async_client.py @@ -0,0 +1,1245 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import re +from typing import ( + Callable, + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, +) + +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry_async as retries +from google.api_core.client_options import ClientOptions +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.apihub_v1 import gapic_version as package_version + +try: + OptionalRetry = Union[retries.AsyncRetry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.AsyncRetry, object, None] # type: ignore + +from google.cloud.location import locations_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore + +from google.cloud.apihub_v1.services.api_hub_dependencies import pagers +from google.cloud.apihub_v1.types import apihub_service, common_fields + +from .client import ApiHubDependenciesClient +from .transports.base import DEFAULT_CLIENT_INFO, ApiHubDependenciesTransport +from .transports.grpc_asyncio import ApiHubDependenciesGrpcAsyncIOTransport + + +class ApiHubDependenciesAsyncClient: + """This service provides methods for various operations related to a + [Dependency][google.cloud.apihub.v1.Dependency] in the API hub. + """ + + _client: ApiHubDependenciesClient + + # Copy defaults from the synchronous client for use here. + # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. + DEFAULT_ENDPOINT = ApiHubDependenciesClient.DEFAULT_ENDPOINT + DEFAULT_MTLS_ENDPOINT = ApiHubDependenciesClient.DEFAULT_MTLS_ENDPOINT + _DEFAULT_ENDPOINT_TEMPLATE = ApiHubDependenciesClient._DEFAULT_ENDPOINT_TEMPLATE + _DEFAULT_UNIVERSE = ApiHubDependenciesClient._DEFAULT_UNIVERSE + + attribute_path = staticmethod(ApiHubDependenciesClient.attribute_path) + parse_attribute_path = staticmethod(ApiHubDependenciesClient.parse_attribute_path) + dependency_path = staticmethod(ApiHubDependenciesClient.dependency_path) + parse_dependency_path = staticmethod(ApiHubDependenciesClient.parse_dependency_path) + common_billing_account_path = staticmethod( + ApiHubDependenciesClient.common_billing_account_path + ) + parse_common_billing_account_path = staticmethod( + ApiHubDependenciesClient.parse_common_billing_account_path + ) + common_folder_path = staticmethod(ApiHubDependenciesClient.common_folder_path) + parse_common_folder_path = staticmethod( + ApiHubDependenciesClient.parse_common_folder_path + ) + common_organization_path = staticmethod( + ApiHubDependenciesClient.common_organization_path + ) + parse_common_organization_path = staticmethod( + ApiHubDependenciesClient.parse_common_organization_path + ) + common_project_path = staticmethod(ApiHubDependenciesClient.common_project_path) + parse_common_project_path = staticmethod( + ApiHubDependenciesClient.parse_common_project_path + ) + common_location_path = staticmethod(ApiHubDependenciesClient.common_location_path) + parse_common_location_path = staticmethod( + ApiHubDependenciesClient.parse_common_location_path + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + ApiHubDependenciesAsyncClient: The constructed client. + """ + return ApiHubDependenciesClient.from_service_account_info.__func__(ApiHubDependenciesAsyncClient, info, *args, **kwargs) # type: ignore + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + ApiHubDependenciesAsyncClient: The constructed client. + """ + return ApiHubDependenciesClient.from_service_account_file.__func__(ApiHubDependenciesAsyncClient, filename, *args, **kwargs) # type: ignore + + from_service_account_json = from_service_account_file + + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[ClientOptions] = None + ): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + return ApiHubDependenciesClient.get_mtls_endpoint_and_cert_source(client_options) # type: ignore + + @property + def transport(self) -> ApiHubDependenciesTransport: + """Returns the transport used by the client instance. + + Returns: + ApiHubDependenciesTransport: The transport used by the client instance. + """ + return self._client.transport + + @property + def api_endpoint(self): + """Return the API endpoint used by the client instance. + + Returns: + str: The API endpoint used by the client instance. + """ + return self._client._api_endpoint + + @property + def universe_domain(self) -> str: + """Return the universe domain used by the client instance. + + Returns: + str: The universe domain used + by the client instance. + """ + return self._client._universe_domain + + get_transport_class = ApiHubDependenciesClient.get_transport_class + + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[ + Union[ + str, + ApiHubDependenciesTransport, + Callable[..., ApiHubDependenciesTransport], + ] + ] = "grpc_asyncio", + client_options: Optional[ClientOptions] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the api hub dependencies async client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Optional[Union[str,ApiHubDependenciesTransport,Callable[..., ApiHubDependenciesTransport]]]): + The transport to use, or a Callable that constructs and returns a new transport to use. + If a Callable is given, it will be called with the same set of initialization + arguments as used in the ApiHubDependenciesTransport constructor. + If set to None, a transport is chosen automatically. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): + Custom options for the client. + + 1. The ``api_endpoint`` property can be used to override the + default endpoint provided by the client when ``transport`` is + not explicitly provided. Only if this property is not set and + ``transport`` was not explicitly provided, the endpoint is + determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment + variable, which have one of the following values: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto-switch to the + default mTLS endpoint if client certificate is present; this is + the default value). + + 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide a client certificate for mTLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + 3. The ``universe_domain`` property can be used to override the + default "googleapis.com" universe. Note that ``api_endpoint`` + property still takes precedence; and ``universe_domain`` is + currently not supported for mTLS. + + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client = ApiHubDependenciesClient( + credentials=credentials, + transport=transport, + client_options=client_options, + client_info=client_info, + ) + + async def create_dependency( + self, + request: Optional[Union[apihub_service.CreateDependencyRequest, dict]] = None, + *, + parent: Optional[str] = None, + dependency: Optional[common_fields.Dependency] = None, + dependency_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> common_fields.Dependency: + r"""Create a dependency between two entities in the API + hub. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import apihub_v1 + + async def sample_create_dependency(): + # Create a client + client = apihub_v1.ApiHubDependenciesAsyncClient() + + # Initialize request argument(s) + dependency = apihub_v1.Dependency() + dependency.consumer.operation_resource_name = "operation_resource_name_value" + dependency.supplier.operation_resource_name = "operation_resource_name_value" + + request = apihub_v1.CreateDependencyRequest( + parent="parent_value", + dependency=dependency, + ) + + # Make the request + response = await client.create_dependency(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.apihub_v1.types.CreateDependencyRequest, dict]]): + The request object. The + [CreateDependency][google.cloud.apihub.v1.ApiHubDependencies.CreateDependency] + method's request. + parent (:class:`str`): + Required. The parent resource for the dependency + resource. Format: + ``projects/{project}/locations/{location}`` + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + dependency (:class:`google.cloud.apihub_v1.types.Dependency`): + Required. The dependency resource to + create. + + This corresponds to the ``dependency`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + dependency_id (:class:`str`): + Optional. The ID to use for the dependency resource, + which will become the final component of the + dependency's resource name. This field is optional. + + - If provided, the same will be used. The service will + throw an error if duplicate id is provided by the + client. + - If not provided, a system generated id will be used. + + This value should be 4-500 characters, and valid + characters are ``[a-z][A-Z][0-9]-_``. + + This corresponds to the ``dependency_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.apihub_v1.types.Dependency: + A dependency resource defined in the API hub describes a dependency directed + from a consumer to a supplier entity. A dependency + can be defined between two + [Operations][google.cloud.apihub.v1.Operation] or + between an + [Operation][google.cloud.apihub.v1.Operation] and + [External API][google.cloud.apihub.v1.ExternalApi]. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, dependency, dependency_id]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, apihub_service.CreateDependencyRequest): + request = apihub_service.CreateDependencyRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if dependency is not None: + request.dependency = dependency + if dependency_id is not None: + request.dependency_id = dependency_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.create_dependency + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_dependency( + self, + request: Optional[Union[apihub_service.GetDependencyRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> common_fields.Dependency: + r"""Get details about a dependency resource in the API + hub. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import apihub_v1 + + async def sample_get_dependency(): + # Create a client + client = apihub_v1.ApiHubDependenciesAsyncClient() + + # Initialize request argument(s) + request = apihub_v1.GetDependencyRequest( + name="name_value", + ) + + # Make the request + response = await client.get_dependency(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.apihub_v1.types.GetDependencyRequest, dict]]): + The request object. The [GetDependency][.ApiHubDependencies.GetDependency] + method's request. + name (:class:`str`): + Required. The name of the dependency resource to + retrieve. Format: + ``projects/{project}/locations/{location}/dependencies/{dependency}`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.apihub_v1.types.Dependency: + A dependency resource defined in the API hub describes a dependency directed + from a consumer to a supplier entity. A dependency + can be defined between two + [Operations][google.cloud.apihub.v1.Operation] or + between an + [Operation][google.cloud.apihub.v1.Operation] and + [External API][google.cloud.apihub.v1.ExternalApi]. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, apihub_service.GetDependencyRequest): + request = apihub_service.GetDependencyRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.get_dependency + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def update_dependency( + self, + request: Optional[Union[apihub_service.UpdateDependencyRequest, dict]] = None, + *, + dependency: Optional[common_fields.Dependency] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> common_fields.Dependency: + r"""Update a dependency based on the + [update_mask][google.cloud.apihub.v1.UpdateDependencyRequest.update_mask] + provided in the request. + + The following fields in the + [dependency][google.cloud.apihub.v1.Dependency] can be updated: + + - [description][google.cloud.apihub.v1.Dependency.description] + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import apihub_v1 + + async def sample_update_dependency(): + # Create a client + client = apihub_v1.ApiHubDependenciesAsyncClient() + + # Initialize request argument(s) + dependency = apihub_v1.Dependency() + dependency.consumer.operation_resource_name = "operation_resource_name_value" + dependency.supplier.operation_resource_name = "operation_resource_name_value" + + request = apihub_v1.UpdateDependencyRequest( + dependency=dependency, + ) + + # Make the request + response = await client.update_dependency(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.apihub_v1.types.UpdateDependencyRequest, dict]]): + The request object. The + [UpdateDependency][google.cloud.apihub.v1.ApiHubDependencies.UpdateDependency] + method's request. + dependency (:class:`google.cloud.apihub_v1.types.Dependency`): + Required. The dependency resource to update. + + The dependency's ``name`` field is used to identify the + dependency to update. Format: + ``projects/{project}/locations/{location}/dependencies/{dependency}`` + + This corresponds to the ``dependency`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): + Required. The list of fields to + update. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.apihub_v1.types.Dependency: + A dependency resource defined in the API hub describes a dependency directed + from a consumer to a supplier entity. A dependency + can be defined between two + [Operations][google.cloud.apihub.v1.Operation] or + between an + [Operation][google.cloud.apihub.v1.Operation] and + [External API][google.cloud.apihub.v1.ExternalApi]. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([dependency, update_mask]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, apihub_service.UpdateDependencyRequest): + request = apihub_service.UpdateDependencyRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if dependency is not None: + request.dependency = dependency + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.update_dependency + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("dependency.name", request.dependency.name),) + ), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def delete_dependency( + self, + request: Optional[Union[apihub_service.DeleteDependencyRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Delete the dependency resource. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import apihub_v1 + + async def sample_delete_dependency(): + # Create a client + client = apihub_v1.ApiHubDependenciesAsyncClient() + + # Initialize request argument(s) + request = apihub_v1.DeleteDependencyRequest( + name="name_value", + ) + + # Make the request + await client.delete_dependency(request=request) + + Args: + request (Optional[Union[google.cloud.apihub_v1.types.DeleteDependencyRequest, dict]]): + The request object. The + [DeleteDependency][google.cloud.apihub.v1.ApiHubDependencies.DeleteDependency] + method's request. + name (:class:`str`): + Required. The name of the dependency resource to delete. + Format: + ``projects/{project}/locations/{location}/dependencies/{dependency}`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, apihub_service.DeleteDependencyRequest): + request = apihub_service.DeleteDependencyRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.delete_dependency + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + async def list_dependencies( + self, + request: Optional[Union[apihub_service.ListDependenciesRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListDependenciesAsyncPager: + r"""List dependencies based on the provided filter and + pagination parameters. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import apihub_v1 + + async def sample_list_dependencies(): + # Create a client + client = apihub_v1.ApiHubDependenciesAsyncClient() + + # Initialize request argument(s) + request = apihub_v1.ListDependenciesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_dependencies(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.apihub_v1.types.ListDependenciesRequest, dict]]): + The request object. The + [ListDependencies][google.cloud.apihub.v1.ApiHubDependencies.ListDependencies] + method's request. + parent (:class:`str`): + Required. The parent which owns this collection of + dependency resources. Format: + ``projects/{project}/locations/{location}`` + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.apihub_v1.services.api_hub_dependencies.pagers.ListDependenciesAsyncPager: + The + [ListDependencies][google.cloud.apihub.v1.ApiHubDependencies.ListDependencies] + method's response. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, apihub_service.ListDependenciesRequest): + request = apihub_service.ListDependenciesRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.list_dependencies + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListDependenciesAsyncPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_operations( + self, + request: Optional[operations_pb2.ListOperationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.ListOperationsResponse: + r"""Lists operations that match the specified filter in the request. + + Args: + request (:class:`~.operations_pb2.ListOperationsRequest`): + The request object. Request message for + `ListOperations` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.ListOperationsResponse: + Response message for ``ListOperations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.ListOperationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.list_operations, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_operation( + self, + request: Optional[operations_pb2.GetOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Gets the latest state of a long-running operation. + + Args: + request (:class:`~.operations_pb2.GetOperationRequest`): + The request object. Request message for + `GetOperation` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.Operation: + An ``Operation`` object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.GetOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def delete_operation( + self, + request: Optional[operations_pb2.DeleteOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes a long-running operation. + + This method indicates that the client is no longer interested + in the operation result. It does not cancel the operation. + If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.DeleteOperationRequest`): + The request object. Request message for + `DeleteOperation` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.DeleteOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.delete_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + async def cancel_operation( + self, + request: Optional[operations_pb2.CancelOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Starts asynchronous cancellation on a long-running operation. + + The server makes a best effort to cancel the operation, but success + is not guaranteed. If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.CancelOperationRequest`): + The request object. Request message for + `CancelOperation` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.CancelOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.cancel_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + async def get_location( + self, + request: Optional[locations_pb2.GetLocationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> locations_pb2.Location: + r"""Gets information about a location. + + Args: + request (:class:`~.location_pb2.GetLocationRequest`): + The request object. Request message for + `GetLocation` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.location_pb2.Location: + Location object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = locations_pb2.GetLocationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_location, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_locations( + self, + request: Optional[locations_pb2.ListLocationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> locations_pb2.ListLocationsResponse: + r"""Lists information about the supported locations for this service. + + Args: + request (:class:`~.location_pb2.ListLocationsRequest`): + The request object. Request message for + `ListLocations` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.location_pb2.ListLocationsResponse: + Response message for ``ListLocations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = locations_pb2.ListLocationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.list_locations, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def __aenter__(self) -> "ApiHubDependenciesAsyncClient": + return self + + async def __aexit__(self, exc_type, exc, tb): + await self.transport.close() + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +__all__ = ("ApiHubDependenciesAsyncClient",) diff --git a/packages/google-cloud-apihub/google/cloud/apihub_v1/services/api_hub_dependencies/client.py b/packages/google-cloud-apihub/google/cloud/apihub_v1/services/api_hub_dependencies/client.py new file mode 100644 index 000000000000..70a952fe282c --- /dev/null +++ b/packages/google-cloud-apihub/google/cloud/apihub_v1/services/api_hub_dependencies/client.py @@ -0,0 +1,1679 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import os +import re +from typing import ( + Callable, + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, + cast, +) +import warnings + +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.apihub_v1 import gapic_version as package_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object, None] # type: ignore + +from google.cloud.location import locations_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore + +from google.cloud.apihub_v1.services.api_hub_dependencies import pagers +from google.cloud.apihub_v1.types import apihub_service, common_fields + +from .transports.base import DEFAULT_CLIENT_INFO, ApiHubDependenciesTransport +from .transports.rest import ApiHubDependenciesRestTransport + + +class ApiHubDependenciesClientMeta(type): + """Metaclass for the ApiHubDependencies client. + + This provides class-level methods for building and retrieving + support objects (e.g. transport) without polluting the client instance + objects. + """ + + _transport_registry = ( + OrderedDict() + ) # type: Dict[str, Type[ApiHubDependenciesTransport]] + _transport_registry["rest"] = ApiHubDependenciesRestTransport + + def get_transport_class( + cls, + label: Optional[str] = None, + ) -> Type[ApiHubDependenciesTransport]: + """Returns an appropriate transport class. + + Args: + label: The name of the desired transport. If none is + provided, then the first transport in the registry is used. + + Returns: + The transport class to use. + """ + # If a specific transport is requested, return that one. + if label: + return cls._transport_registry[label] + + # No transport is requested; return the default (that is, the first one + # in the dictionary). + return next(iter(cls._transport_registry.values())) + + +class ApiHubDependenciesClient(metaclass=ApiHubDependenciesClientMeta): + """This service provides methods for various operations related to a + [Dependency][google.cloud.apihub.v1.Dependency] in the API hub. + """ + + @staticmethod + def _get_default_mtls_endpoint(api_endpoint): + """Converts api endpoint to mTLS endpoint. + + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to + "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. + Args: + api_endpoint (Optional[str]): the api endpoint to convert. + Returns: + str: converted mTLS api endpoint. + """ + if not api_endpoint: + return api_endpoint + + mtls_endpoint_re = re.compile( + r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" + ) + + m = mtls_endpoint_re.match(api_endpoint) + name, mtls, sandbox, googledomain = m.groups() + if mtls or not googledomain: + return api_endpoint + + if sandbox: + return api_endpoint.replace( + "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" + ) + + return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + + # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. + DEFAULT_ENDPOINT = "apihub.googleapis.com" + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore + DEFAULT_ENDPOINT + ) + + _DEFAULT_ENDPOINT_TEMPLATE = "apihub.{UNIVERSE_DOMAIN}" + _DEFAULT_UNIVERSE = "googleapis.com" + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + ApiHubDependenciesClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_info(info) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + ApiHubDependenciesClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file(filename) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> ApiHubDependenciesTransport: + """Returns the transport used by the client instance. + + Returns: + ApiHubDependenciesTransport: The transport used by the client + instance. + """ + return self._transport + + @staticmethod + def attribute_path( + project: str, + location: str, + attribute: str, + ) -> str: + """Returns a fully-qualified attribute string.""" + return "projects/{project}/locations/{location}/attributes/{attribute}".format( + project=project, + location=location, + attribute=attribute, + ) + + @staticmethod + def parse_attribute_path(path: str) -> Dict[str, str]: + """Parses a attribute path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/attributes/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + + @staticmethod + def dependency_path( + project: str, + location: str, + dependency: str, + ) -> str: + """Returns a fully-qualified dependency string.""" + return ( + "projects/{project}/locations/{location}/dependencies/{dependency}".format( + project=project, + location=location, + dependency=dependency, + ) + ) + + @staticmethod + def parse_dependency_path(path: str) -> Dict[str, str]: + """Parses a dependency path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/dependencies/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + + @staticmethod + def common_billing_account_path( + billing_account: str, + ) -> str: + """Returns a fully-qualified billing_account string.""" + return "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + + @staticmethod + def parse_common_billing_account_path(path: str) -> Dict[str, str]: + """Parse a billing_account path into its component segments.""" + m = re.match(r"^billingAccounts/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_folder_path( + folder: str, + ) -> str: + """Returns a fully-qualified folder string.""" + return "folders/{folder}".format( + folder=folder, + ) + + @staticmethod + def parse_common_folder_path(path: str) -> Dict[str, str]: + """Parse a folder path into its component segments.""" + m = re.match(r"^folders/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_organization_path( + organization: str, + ) -> str: + """Returns a fully-qualified organization string.""" + return "organizations/{organization}".format( + organization=organization, + ) + + @staticmethod + def parse_common_organization_path(path: str) -> Dict[str, str]: + """Parse a organization path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_project_path( + project: str, + ) -> str: + """Returns a fully-qualified project string.""" + return "projects/{project}".format( + project=project, + ) + + @staticmethod + def parse_common_project_path(path: str) -> Dict[str, str]: + """Parse a project path into its component segments.""" + m = re.match(r"^projects/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_location_path( + project: str, + location: str, + ) -> str: + """Returns a fully-qualified location string.""" + return "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) + + @staticmethod + def parse_common_location_path(path: str) -> Dict[str, str]: + """Parse a location path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[client_options_lib.ClientOptions] = None + ): + """Deprecated. Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + + warnings.warn( + "get_mtls_endpoint_and_cert_source is deprecated. Use the api_endpoint property instead.", + DeprecationWarning, + ) + if client_options is None: + client_options = client_options_lib.ClientOptions() + use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") + if use_client_cert not in ("true", "false"): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError( + "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + # Figure out the client cert source to use. + client_cert_source = None + if use_client_cert == "true": + if client_options.client_cert_source: + client_cert_source = client_options.client_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + elif use_mtls_endpoint == "always" or ( + use_mtls_endpoint == "auto" and client_cert_source + ): + api_endpoint = cls.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = cls.DEFAULT_ENDPOINT + + return api_endpoint, client_cert_source + + @staticmethod + def _read_environment_variables(): + """Returns the environment variables used by the client. + + Returns: + Tuple[bool, str, str]: returns the GOOGLE_API_USE_CLIENT_CERTIFICATE, + GOOGLE_API_USE_MTLS_ENDPOINT, and GOOGLE_CLOUD_UNIVERSE_DOMAIN environment variables. + + Raises: + ValueError: If GOOGLE_API_USE_CLIENT_CERTIFICATE is not + any of ["true", "false"]. + google.auth.exceptions.MutualTLSChannelError: If GOOGLE_API_USE_MTLS_ENDPOINT + is not any of ["auto", "never", "always"]. + """ + use_client_cert = os.getenv( + "GOOGLE_API_USE_CLIENT_CERTIFICATE", "false" + ).lower() + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto").lower() + universe_domain_env = os.getenv("GOOGLE_CLOUD_UNIVERSE_DOMAIN") + if use_client_cert not in ("true", "false"): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError( + "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + return use_client_cert == "true", use_mtls_endpoint, universe_domain_env + + @staticmethod + def _get_client_cert_source(provided_cert_source, use_cert_flag): + """Return the client cert source to be used by the client. + + Args: + provided_cert_source (bytes): The client certificate source provided. + use_cert_flag (bool): A flag indicating whether to use the client certificate. + + Returns: + bytes or None: The client cert source to be used by the client. + """ + client_cert_source = None + if use_cert_flag: + if provided_cert_source: + client_cert_source = provided_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + return client_cert_source + + @staticmethod + def _get_api_endpoint( + api_override, client_cert_source, universe_domain, use_mtls_endpoint + ): + """Return the API endpoint used by the client. + + Args: + api_override (str): The API endpoint override. If specified, this is always + the return value of this function and the other arguments are not used. + client_cert_source (bytes): The client certificate source used by the client. + universe_domain (str): The universe domain used by the client. + use_mtls_endpoint (str): How to use the mTLS endpoint, which depends also on the other parameters. + Possible values are "always", "auto", or "never". + + Returns: + str: The API endpoint to be used by the client. + """ + if api_override is not None: + api_endpoint = api_override + elif use_mtls_endpoint == "always" or ( + use_mtls_endpoint == "auto" and client_cert_source + ): + _default_universe = ApiHubDependenciesClient._DEFAULT_UNIVERSE + if universe_domain != _default_universe: + raise MutualTLSChannelError( + f"mTLS is not supported in any universe other than {_default_universe}." + ) + api_endpoint = ApiHubDependenciesClient.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = ApiHubDependenciesClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=universe_domain + ) + return api_endpoint + + @staticmethod + def _get_universe_domain( + client_universe_domain: Optional[str], universe_domain_env: Optional[str] + ) -> str: + """Return the universe domain used by the client. + + Args: + client_universe_domain (Optional[str]): The universe domain configured via the client options. + universe_domain_env (Optional[str]): The universe domain configured via the "GOOGLE_CLOUD_UNIVERSE_DOMAIN" environment variable. + + Returns: + str: The universe domain to be used by the client. + + Raises: + ValueError: If the universe domain is an empty string. + """ + universe_domain = ApiHubDependenciesClient._DEFAULT_UNIVERSE + if client_universe_domain is not None: + universe_domain = client_universe_domain + elif universe_domain_env is not None: + universe_domain = universe_domain_env + if len(universe_domain.strip()) == 0: + raise ValueError("Universe Domain cannot be an empty string.") + return universe_domain + + @staticmethod + def _compare_universes( + client_universe: str, credentials: ga_credentials.Credentials + ) -> bool: + """Returns True iff the universe domains used by the client and credentials match. + + Args: + client_universe (str): The universe domain configured via the client options. + credentials (ga_credentials.Credentials): The credentials being used in the client. + + Returns: + bool: True iff client_universe matches the universe in credentials. + + Raises: + ValueError: when client_universe does not match the universe in credentials. + """ + + default_universe = ApiHubDependenciesClient._DEFAULT_UNIVERSE + credentials_universe = getattr(credentials, "universe_domain", default_universe) + + if client_universe != credentials_universe: + raise ValueError( + "The configured universe domain " + f"({client_universe}) does not match the universe domain " + f"found in the credentials ({credentials_universe}). " + "If you haven't configured the universe domain explicitly, " + f"`{default_universe}` is the default." + ) + return True + + def _validate_universe_domain(self): + """Validates client's and credentials' universe domains are consistent. + + Returns: + bool: True iff the configured universe domain is valid. + + Raises: + ValueError: If the configured universe domain is not valid. + """ + self._is_universe_domain_valid = ( + self._is_universe_domain_valid + or ApiHubDependenciesClient._compare_universes( + self.universe_domain, self.transport._credentials + ) + ) + return self._is_universe_domain_valid + + @property + def api_endpoint(self): + """Return the API endpoint used by the client instance. + + Returns: + str: The API endpoint used by the client instance. + """ + return self._api_endpoint + + @property + def universe_domain(self) -> str: + """Return the universe domain used by the client instance. + + Returns: + str: The universe domain used by the client instance. + """ + return self._universe_domain + + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[ + Union[ + str, + ApiHubDependenciesTransport, + Callable[..., ApiHubDependenciesTransport], + ] + ] = None, + client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the api hub dependencies client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Optional[Union[str,ApiHubDependenciesTransport,Callable[..., ApiHubDependenciesTransport]]]): + The transport to use, or a Callable that constructs and returns a new transport. + If a Callable is given, it will be called with the same set of initialization + arguments as used in the ApiHubDependenciesTransport constructor. + If set to None, a transport is chosen automatically. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): + Custom options for the client. + + 1. The ``api_endpoint`` property can be used to override the + default endpoint provided by the client when ``transport`` is + not explicitly provided. Only if this property is not set and + ``transport`` was not explicitly provided, the endpoint is + determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment + variable, which have one of the following values: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto-switch to the + default mTLS endpoint if client certificate is present; this is + the default value). + + 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide a client certificate for mTLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + 3. The ``universe_domain`` property can be used to override the + default "googleapis.com" universe. Note that the ``api_endpoint`` + property still takes precedence; and ``universe_domain`` is + currently not supported for mTLS. + + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client_options = client_options + if isinstance(self._client_options, dict): + self._client_options = client_options_lib.from_dict(self._client_options) + if self._client_options is None: + self._client_options = client_options_lib.ClientOptions() + self._client_options = cast( + client_options_lib.ClientOptions, self._client_options + ) + + universe_domain_opt = getattr(self._client_options, "universe_domain", None) + + ( + self._use_client_cert, + self._use_mtls_endpoint, + self._universe_domain_env, + ) = ApiHubDependenciesClient._read_environment_variables() + self._client_cert_source = ApiHubDependenciesClient._get_client_cert_source( + self._client_options.client_cert_source, self._use_client_cert + ) + self._universe_domain = ApiHubDependenciesClient._get_universe_domain( + universe_domain_opt, self._universe_domain_env + ) + self._api_endpoint = None # updated below, depending on `transport` + + # Initialize the universe domain validation. + self._is_universe_domain_valid = False + + api_key_value = getattr(self._client_options, "api_key", None) + if api_key_value and credentials: + raise ValueError( + "client_options.api_key and credentials are mutually exclusive" + ) + + # Save or instantiate the transport. + # Ordinarily, we provide the transport, but allowing a custom transport + # instance provides an extensibility point for unusual situations. + transport_provided = isinstance(transport, ApiHubDependenciesTransport) + if transport_provided: + # transport is a ApiHubDependenciesTransport instance. + if credentials or self._client_options.credentials_file or api_key_value: + raise ValueError( + "When providing a transport instance, " + "provide its credentials directly." + ) + if self._client_options.scopes: + raise ValueError( + "When providing a transport instance, provide its scopes " + "directly." + ) + self._transport = cast(ApiHubDependenciesTransport, transport) + self._api_endpoint = self._transport.host + + self._api_endpoint = ( + self._api_endpoint + or ApiHubDependenciesClient._get_api_endpoint( + self._client_options.api_endpoint, + self._client_cert_source, + self._universe_domain, + self._use_mtls_endpoint, + ) + ) + + if not transport_provided: + import google.auth._default # type: ignore + + if api_key_value and hasattr( + google.auth._default, "get_api_key_credentials" + ): + credentials = google.auth._default.get_api_key_credentials( + api_key_value + ) + + transport_init: Union[ + Type[ApiHubDependenciesTransport], + Callable[..., ApiHubDependenciesTransport], + ] = ( + ApiHubDependenciesClient.get_transport_class(transport) + if isinstance(transport, str) or transport is None + else cast(Callable[..., ApiHubDependenciesTransport], transport) + ) + # initialize with the provided callable or the passed in class + self._transport = transport_init( + credentials=credentials, + credentials_file=self._client_options.credentials_file, + host=self._api_endpoint, + scopes=self._client_options.scopes, + client_cert_source_for_mtls=self._client_cert_source, + quota_project_id=self._client_options.quota_project_id, + client_info=client_info, + always_use_jwt_access=True, + api_audience=self._client_options.api_audience, + ) + + def create_dependency( + self, + request: Optional[Union[apihub_service.CreateDependencyRequest, dict]] = None, + *, + parent: Optional[str] = None, + dependency: Optional[common_fields.Dependency] = None, + dependency_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> common_fields.Dependency: + r"""Create a dependency between two entities in the API + hub. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import apihub_v1 + + def sample_create_dependency(): + # Create a client + client = apihub_v1.ApiHubDependenciesClient() + + # Initialize request argument(s) + dependency = apihub_v1.Dependency() + dependency.consumer.operation_resource_name = "operation_resource_name_value" + dependency.supplier.operation_resource_name = "operation_resource_name_value" + + request = apihub_v1.CreateDependencyRequest( + parent="parent_value", + dependency=dependency, + ) + + # Make the request + response = client.create_dependency(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.apihub_v1.types.CreateDependencyRequest, dict]): + The request object. The + [CreateDependency][google.cloud.apihub.v1.ApiHubDependencies.CreateDependency] + method's request. + parent (str): + Required. The parent resource for the dependency + resource. Format: + ``projects/{project}/locations/{location}`` + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + dependency (google.cloud.apihub_v1.types.Dependency): + Required. The dependency resource to + create. + + This corresponds to the ``dependency`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + dependency_id (str): + Optional. The ID to use for the dependency resource, + which will become the final component of the + dependency's resource name. This field is optional. + + - If provided, the same will be used. The service will + throw an error if duplicate id is provided by the + client. + - If not provided, a system generated id will be used. + + This value should be 4-500 characters, and valid + characters are ``[a-z][A-Z][0-9]-_``. + + This corresponds to the ``dependency_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.apihub_v1.types.Dependency: + A dependency resource defined in the API hub describes a dependency directed + from a consumer to a supplier entity. A dependency + can be defined between two + [Operations][google.cloud.apihub.v1.Operation] or + between an + [Operation][google.cloud.apihub.v1.Operation] and + [External API][google.cloud.apihub.v1.ExternalApi]. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, dependency, dependency_id]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, apihub_service.CreateDependencyRequest): + request = apihub_service.CreateDependencyRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if dependency is not None: + request.dependency = dependency + if dependency_id is not None: + request.dependency_id = dependency_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.create_dependency] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_dependency( + self, + request: Optional[Union[apihub_service.GetDependencyRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> common_fields.Dependency: + r"""Get details about a dependency resource in the API + hub. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import apihub_v1 + + def sample_get_dependency(): + # Create a client + client = apihub_v1.ApiHubDependenciesClient() + + # Initialize request argument(s) + request = apihub_v1.GetDependencyRequest( + name="name_value", + ) + + # Make the request + response = client.get_dependency(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.apihub_v1.types.GetDependencyRequest, dict]): + The request object. The [GetDependency][.ApiHubDependencies.GetDependency] + method's request. + name (str): + Required. The name of the dependency resource to + retrieve. Format: + ``projects/{project}/locations/{location}/dependencies/{dependency}`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.apihub_v1.types.Dependency: + A dependency resource defined in the API hub describes a dependency directed + from a consumer to a supplier entity. A dependency + can be defined between two + [Operations][google.cloud.apihub.v1.Operation] or + between an + [Operation][google.cloud.apihub.v1.Operation] and + [External API][google.cloud.apihub.v1.ExternalApi]. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, apihub_service.GetDependencyRequest): + request = apihub_service.GetDependencyRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_dependency] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def update_dependency( + self, + request: Optional[Union[apihub_service.UpdateDependencyRequest, dict]] = None, + *, + dependency: Optional[common_fields.Dependency] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> common_fields.Dependency: + r"""Update a dependency based on the + [update_mask][google.cloud.apihub.v1.UpdateDependencyRequest.update_mask] + provided in the request. + + The following fields in the + [dependency][google.cloud.apihub.v1.Dependency] can be updated: + + - [description][google.cloud.apihub.v1.Dependency.description] + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import apihub_v1 + + def sample_update_dependency(): + # Create a client + client = apihub_v1.ApiHubDependenciesClient() + + # Initialize request argument(s) + dependency = apihub_v1.Dependency() + dependency.consumer.operation_resource_name = "operation_resource_name_value" + dependency.supplier.operation_resource_name = "operation_resource_name_value" + + request = apihub_v1.UpdateDependencyRequest( + dependency=dependency, + ) + + # Make the request + response = client.update_dependency(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.apihub_v1.types.UpdateDependencyRequest, dict]): + The request object. The + [UpdateDependency][google.cloud.apihub.v1.ApiHubDependencies.UpdateDependency] + method's request. + dependency (google.cloud.apihub_v1.types.Dependency): + Required. The dependency resource to update. + + The dependency's ``name`` field is used to identify the + dependency to update. Format: + ``projects/{project}/locations/{location}/dependencies/{dependency}`` + + This corresponds to the ``dependency`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Required. The list of fields to + update. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.apihub_v1.types.Dependency: + A dependency resource defined in the API hub describes a dependency directed + from a consumer to a supplier entity. A dependency + can be defined between two + [Operations][google.cloud.apihub.v1.Operation] or + between an + [Operation][google.cloud.apihub.v1.Operation] and + [External API][google.cloud.apihub.v1.ExternalApi]. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([dependency, update_mask]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, apihub_service.UpdateDependencyRequest): + request = apihub_service.UpdateDependencyRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if dependency is not None: + request.dependency = dependency + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.update_dependency] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("dependency.name", request.dependency.name),) + ), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def delete_dependency( + self, + request: Optional[Union[apihub_service.DeleteDependencyRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Delete the dependency resource. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import apihub_v1 + + def sample_delete_dependency(): + # Create a client + client = apihub_v1.ApiHubDependenciesClient() + + # Initialize request argument(s) + request = apihub_v1.DeleteDependencyRequest( + name="name_value", + ) + + # Make the request + client.delete_dependency(request=request) + + Args: + request (Union[google.cloud.apihub_v1.types.DeleteDependencyRequest, dict]): + The request object. The + [DeleteDependency][google.cloud.apihub.v1.ApiHubDependencies.DeleteDependency] + method's request. + name (str): + Required. The name of the dependency resource to delete. + Format: + ``projects/{project}/locations/{location}/dependencies/{dependency}`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, apihub_service.DeleteDependencyRequest): + request = apihub_service.DeleteDependencyRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_dependency] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + def list_dependencies( + self, + request: Optional[Union[apihub_service.ListDependenciesRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListDependenciesPager: + r"""List dependencies based on the provided filter and + pagination parameters. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import apihub_v1 + + def sample_list_dependencies(): + # Create a client + client = apihub_v1.ApiHubDependenciesClient() + + # Initialize request argument(s) + request = apihub_v1.ListDependenciesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_dependencies(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.apihub_v1.types.ListDependenciesRequest, dict]): + The request object. The + [ListDependencies][google.cloud.apihub.v1.ApiHubDependencies.ListDependencies] + method's request. + parent (str): + Required. The parent which owns this collection of + dependency resources. Format: + ``projects/{project}/locations/{location}`` + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.apihub_v1.services.api_hub_dependencies.pagers.ListDependenciesPager: + The + [ListDependencies][google.cloud.apihub.v1.ApiHubDependencies.ListDependencies] + method's response. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, apihub_service.ListDependenciesRequest): + request = apihub_service.ListDependenciesRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_dependencies] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListDependenciesPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def __enter__(self) -> "ApiHubDependenciesClient": + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + + def list_operations( + self, + request: Optional[operations_pb2.ListOperationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.ListOperationsResponse: + r"""Lists operations that match the specified filter in the request. + + Args: + request (:class:`~.operations_pb2.ListOperationsRequest`): + The request object. Request message for + `ListOperations` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.ListOperationsResponse: + Response message for ``ListOperations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.ListOperationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.list_operations, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_operation( + self, + request: Optional[operations_pb2.GetOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Gets the latest state of a long-running operation. + + Args: + request (:class:`~.operations_pb2.GetOperationRequest`): + The request object. Request message for + `GetOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.Operation: + An ``Operation`` object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.GetOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.get_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def delete_operation( + self, + request: Optional[operations_pb2.DeleteOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes a long-running operation. + + This method indicates that the client is no longer interested + in the operation result. It does not cancel the operation. + If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.DeleteOperationRequest`): + The request object. Request message for + `DeleteOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.DeleteOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.delete_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + def cancel_operation( + self, + request: Optional[operations_pb2.CancelOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Starts asynchronous cancellation on a long-running operation. + + The server makes a best effort to cancel the operation, but success + is not guaranteed. If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.CancelOperationRequest`): + The request object. Request message for + `CancelOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.CancelOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.cancel_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + def get_location( + self, + request: Optional[locations_pb2.GetLocationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> locations_pb2.Location: + r"""Gets information about a location. + + Args: + request (:class:`~.location_pb2.GetLocationRequest`): + The request object. Request message for + `GetLocation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.location_pb2.Location: + Location object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = locations_pb2.GetLocationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.get_location, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def list_locations( + self, + request: Optional[locations_pb2.ListLocationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> locations_pb2.ListLocationsResponse: + r"""Lists information about the supported locations for this service. + + Args: + request (:class:`~.location_pb2.ListLocationsRequest`): + The request object. Request message for + `ListLocations` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.location_pb2.ListLocationsResponse: + Response message for ``ListLocations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = locations_pb2.ListLocationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.list_locations, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +__all__ = ("ApiHubDependenciesClient",) diff --git a/packages/google-ads-admanager/google/ads/admanager_v1/services/line_item_service/pagers.py b/packages/google-cloud-apihub/google/cloud/apihub_v1/services/api_hub_dependencies/pagers.py similarity index 76% rename from packages/google-ads-admanager/google/ads/admanager_v1/services/line_item_service/pagers.py rename to packages/google-cloud-apihub/google/cloud/apihub_v1/services/api_hub_dependencies/pagers.py index 7d60d7683956..6f0fa634f84d 100644 --- a/packages/google-ads-admanager/google/ads/admanager_v1/services/line_item_service/pagers.py +++ b/packages/google-cloud-apihub/google/cloud/apihub_v1/services/api_hub_dependencies/pagers.py @@ -38,32 +38,32 @@ OptionalRetry = Union[retries.Retry, object, None] # type: ignore OptionalAsyncRetry = Union[retries_async.AsyncRetry, object, None] # type: ignore -from google.ads.admanager_v1.types import line_item_service +from google.cloud.apihub_v1.types import apihub_service, common_fields -class ListLineItemsPager: - """A pager for iterating through ``list_line_items`` requests. +class ListDependenciesPager: + """A pager for iterating through ``list_dependencies`` requests. This class thinly wraps an initial - :class:`google.ads.admanager_v1.types.ListLineItemsResponse` object, and + :class:`google.cloud.apihub_v1.types.ListDependenciesResponse` object, and provides an ``__iter__`` method to iterate through its - ``line_items`` field. + ``dependencies`` field. If there are more pages, the ``__iter__`` method will make additional - ``ListLineItems`` requests and continue to iterate - through the ``line_items`` field on the + ``ListDependencies`` requests and continue to iterate + through the ``dependencies`` field on the corresponding responses. - All the usual :class:`google.ads.admanager_v1.types.ListLineItemsResponse` + All the usual :class:`google.cloud.apihub_v1.types.ListDependenciesResponse` attributes are available on the pager. If multiple requests are made, only the most recent response is retained, and thus used for attribute lookup. """ def __init__( self, - method: Callable[..., line_item_service.ListLineItemsResponse], - request: line_item_service.ListLineItemsRequest, - response: line_item_service.ListLineItemsResponse, + method: Callable[..., apihub_service.ListDependenciesResponse], + request: apihub_service.ListDependenciesRequest, + response: apihub_service.ListDependenciesResponse, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, @@ -74,9 +74,9 @@ def __init__( Args: method (Callable): The method that was originally called, and which instantiated this pager. - request (google.ads.admanager_v1.types.ListLineItemsRequest): + request (google.cloud.apihub_v1.types.ListDependenciesRequest): The initial request object. - response (google.ads.admanager_v1.types.ListLineItemsResponse): + response (google.cloud.apihub_v1.types.ListDependenciesResponse): The initial response object. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. @@ -85,7 +85,7 @@ def __init__( sent along with the request as metadata. """ self._method = method - self._request = line_item_service.ListLineItemsRequest(request) + self._request = apihub_service.ListDependenciesRequest(request) self._response = response self._retry = retry self._timeout = timeout @@ -95,7 +95,7 @@ def __getattr__(self, name: str) -> Any: return getattr(self._response, name) @property - def pages(self) -> Iterator[line_item_service.ListLineItemsResponse]: + def pages(self) -> Iterator[apihub_service.ListDependenciesResponse]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token @@ -107,9 +107,9 @@ def pages(self) -> Iterator[line_item_service.ListLineItemsResponse]: ) yield self._response - def __iter__(self) -> Iterator[line_item_service.LineItem]: + def __iter__(self) -> Iterator[common_fields.Dependency]: for page in self.pages: - yield from page.line_items + yield from page.dependencies def __repr__(self) -> str: return "{0}<{1!r}>".format(self.__class__.__name__, self._response) diff --git a/packages/google-ads-admanager/google/ads/admanager_v1/services/creative_service/transports/__init__.py b/packages/google-cloud-apihub/google/cloud/apihub_v1/services/api_hub_dependencies/transports/__init__.py similarity index 63% rename from packages/google-ads-admanager/google/ads/admanager_v1/services/creative_service/transports/__init__.py rename to packages/google-cloud-apihub/google/cloud/apihub_v1/services/api_hub_dependencies/transports/__init__.py index f787889ae7b3..5de2b44a3808 100644 --- a/packages/google-ads-admanager/google/ads/admanager_v1/services/creative_service/transports/__init__.py +++ b/packages/google-cloud-apihub/google/cloud/apihub_v1/services/api_hub_dependencies/transports/__init__.py @@ -16,15 +16,17 @@ from collections import OrderedDict from typing import Dict, Type -from .base import CreativeServiceTransport -from .rest import CreativeServiceRestInterceptor, CreativeServiceRestTransport +from .base import ApiHubDependenciesTransport +from .rest import ApiHubDependenciesRestInterceptor, ApiHubDependenciesRestTransport # Compile a registry of transports. -_transport_registry = OrderedDict() # type: Dict[str, Type[CreativeServiceTransport]] -_transport_registry["rest"] = CreativeServiceRestTransport +_transport_registry = ( + OrderedDict() +) # type: Dict[str, Type[ApiHubDependenciesTransport]] +_transport_registry["rest"] = ApiHubDependenciesRestTransport __all__ = ( - "CreativeServiceTransport", - "CreativeServiceRestTransport", - "CreativeServiceRestInterceptor", + "ApiHubDependenciesTransport", + "ApiHubDependenciesRestTransport", + "ApiHubDependenciesRestInterceptor", ) diff --git a/packages/google-cloud-apihub/google/cloud/apihub_v1/services/api_hub_dependencies/transports/base.py b/packages/google-cloud-apihub/google/cloud/apihub_v1/services/api_hub_dependencies/transports/base.py new file mode 100644 index 000000000000..9a47928384f4 --- /dev/null +++ b/packages/google-cloud-apihub/google/cloud/apihub_v1/services/api_hub_dependencies/transports/base.py @@ -0,0 +1,295 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import abc +from typing import Awaitable, Callable, Dict, Optional, Sequence, Union + +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.oauth2 import service_account # type: ignore +from google.protobuf import empty_pb2 # type: ignore + +from google.cloud.apihub_v1 import gapic_version as package_version +from google.cloud.apihub_v1.types import apihub_service, common_fields + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +class ApiHubDependenciesTransport(abc.ABC): + """Abstract transport class for ApiHubDependencies.""" + + AUTH_SCOPES = ("https://www.googleapis.com/auth/cloud-platform",) + + DEFAULT_HOST: str = "apihub.googleapis.com" + + def __init__( + self, + *, + host: str = DEFAULT_HOST, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + **kwargs, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'apihub.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A list of scopes. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + """ + + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} + + # Save the scopes. + self._scopes = scopes + if not hasattr(self, "_ignore_credentials"): + self._ignore_credentials: bool = False + + # If no credentials are provided, then determine the appropriate + # defaults. + if credentials and credentials_file: + raise core_exceptions.DuplicateCredentialArgs( + "'credentials_file' and 'credentials' are mutually exclusive" + ) + + if credentials_file is not None: + credentials, _ = google.auth.load_credentials_from_file( + credentials_file, **scopes_kwargs, quota_project_id=quota_project_id + ) + elif credentials is None and not self._ignore_credentials: + credentials, _ = google.auth.default( + **scopes_kwargs, quota_project_id=quota_project_id + ) + # Don't apply audience if the credentials file passed from user. + if hasattr(credentials, "with_gdch_audience"): + credentials = credentials.with_gdch_audience( + api_audience if api_audience else host + ) + + # If the credentials are service account credentials, then always try to use self signed JWT. + if ( + always_use_jwt_access + and isinstance(credentials, service_account.Credentials) + and hasattr(service_account.Credentials, "with_always_use_jwt_access") + ): + credentials = credentials.with_always_use_jwt_access(True) + + # Save the credentials. + self._credentials = credentials + + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ":" not in host: + host += ":443" + self._host = host + + @property + def host(self): + return self._host + + def _prep_wrapped_messages(self, client_info): + # Precompute the wrapped methods. + self._wrapped_methods = { + self.create_dependency: gapic_v1.method.wrap_method( + self.create_dependency, + default_timeout=60.0, + client_info=client_info, + ), + self.get_dependency: gapic_v1.method.wrap_method( + self.get_dependency, + default_retry=retries.Retry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.update_dependency: gapic_v1.method.wrap_method( + self.update_dependency, + default_timeout=60.0, + client_info=client_info, + ), + self.delete_dependency: gapic_v1.method.wrap_method( + self.delete_dependency, + default_timeout=60.0, + client_info=client_info, + ), + self.list_dependencies: gapic_v1.method.wrap_method( + self.list_dependencies, + default_retry=retries.Retry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + } + + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + + @property + def create_dependency( + self, + ) -> Callable[ + [apihub_service.CreateDependencyRequest], + Union[common_fields.Dependency, Awaitable[common_fields.Dependency]], + ]: + raise NotImplementedError() + + @property + def get_dependency( + self, + ) -> Callable[ + [apihub_service.GetDependencyRequest], + Union[common_fields.Dependency, Awaitable[common_fields.Dependency]], + ]: + raise NotImplementedError() + + @property + def update_dependency( + self, + ) -> Callable[ + [apihub_service.UpdateDependencyRequest], + Union[common_fields.Dependency, Awaitable[common_fields.Dependency]], + ]: + raise NotImplementedError() + + @property + def delete_dependency( + self, + ) -> Callable[ + [apihub_service.DeleteDependencyRequest], + Union[empty_pb2.Empty, Awaitable[empty_pb2.Empty]], + ]: + raise NotImplementedError() + + @property + def list_dependencies( + self, + ) -> Callable[ + [apihub_service.ListDependenciesRequest], + Union[ + apihub_service.ListDependenciesResponse, + Awaitable[apihub_service.ListDependenciesResponse], + ], + ]: + raise NotImplementedError() + + @property + def list_operations( + self, + ) -> Callable[ + [operations_pb2.ListOperationsRequest], + Union[ + operations_pb2.ListOperationsResponse, + Awaitable[operations_pb2.ListOperationsResponse], + ], + ]: + raise NotImplementedError() + + @property + def get_operation( + self, + ) -> Callable[ + [operations_pb2.GetOperationRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None,]: + raise NotImplementedError() + + @property + def delete_operation( + self, + ) -> Callable[[operations_pb2.DeleteOperationRequest], None,]: + raise NotImplementedError() + + @property + def get_location( + self, + ) -> Callable[ + [locations_pb2.GetLocationRequest], + Union[locations_pb2.Location, Awaitable[locations_pb2.Location]], + ]: + raise NotImplementedError() + + @property + def list_locations( + self, + ) -> Callable[ + [locations_pb2.ListLocationsRequest], + Union[ + locations_pb2.ListLocationsResponse, + Awaitable[locations_pb2.ListLocationsResponse], + ], + ]: + raise NotImplementedError() + + @property + def kind(self) -> str: + raise NotImplementedError() + + +__all__ = ("ApiHubDependenciesTransport",) diff --git a/packages/google-cloud-apihub/google/cloud/apihub_v1/services/api_hub_dependencies/transports/grpc.py b/packages/google-cloud-apihub/google/cloud/apihub_v1/services/api_hub_dependencies/transports/grpc.py new file mode 100644 index 000000000000..38c7bda2bef2 --- /dev/null +++ b/packages/google-cloud-apihub/google/cloud/apihub_v1/services/api_hub_dependencies/transports/grpc.py @@ -0,0 +1,498 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import gapic_v1, grpc_helpers +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore +import grpc # type: ignore + +from google.cloud.apihub_v1.types import apihub_service, common_fields + +from .base import DEFAULT_CLIENT_INFO, ApiHubDependenciesTransport + + +class ApiHubDependenciesGrpcTransport(ApiHubDependenciesTransport): + """gRPC backend transport for ApiHubDependencies. + + This service provides methods for various operations related to a + [Dependency][google.cloud.apihub.v1.Dependency] in the API hub. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _stubs: Dict[str, Callable] + + def __init__( + self, + *, + host: str = "apihub.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'apihub.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if a ``channel`` instance is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if a ``channel`` instance is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if a ``channel`` instance is provided. + channel (Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]]): + A ``Channel`` instance through which to make calls, or a Callable + that constructs and returns one. If set to None, ``self.create_channel`` + is used to create the channel. If a Callable is given, it will be called + with the same arguments as used in ``self.create_channel``. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if a ``channel`` instance is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if a ``channel`` instance or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if isinstance(channel, grpc.Channel): + # Ignore credentials if a channel was passed. + credentials = None + self._ignore_credentials = True + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + # initialize with the provided callable or the default channel + channel_init = channel or type(self).create_channel + self._grpc_channel = channel_init( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @classmethod + def create_channel( + cls, + host: str = "apihub.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> grpc.Channel: + """Create and return a gRPC channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + grpc.Channel: A gRPC channel object. + + Raises: + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + + return grpc_helpers.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs, + ) + + @property + def grpc_channel(self) -> grpc.Channel: + """Return the channel designed to connect to this service.""" + return self._grpc_channel + + @property + def create_dependency( + self, + ) -> Callable[[apihub_service.CreateDependencyRequest], common_fields.Dependency]: + r"""Return a callable for the create dependency method over gRPC. + + Create a dependency between two entities in the API + hub. + + Returns: + Callable[[~.CreateDependencyRequest], + ~.Dependency]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_dependency" not in self._stubs: + self._stubs["create_dependency"] = self.grpc_channel.unary_unary( + "/google.cloud.apihub.v1.ApiHubDependencies/CreateDependency", + request_serializer=apihub_service.CreateDependencyRequest.serialize, + response_deserializer=common_fields.Dependency.deserialize, + ) + return self._stubs["create_dependency"] + + @property + def get_dependency( + self, + ) -> Callable[[apihub_service.GetDependencyRequest], common_fields.Dependency]: + r"""Return a callable for the get dependency method over gRPC. + + Get details about a dependency resource in the API + hub. + + Returns: + Callable[[~.GetDependencyRequest], + ~.Dependency]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_dependency" not in self._stubs: + self._stubs["get_dependency"] = self.grpc_channel.unary_unary( + "/google.cloud.apihub.v1.ApiHubDependencies/GetDependency", + request_serializer=apihub_service.GetDependencyRequest.serialize, + response_deserializer=common_fields.Dependency.deserialize, + ) + return self._stubs["get_dependency"] + + @property + def update_dependency( + self, + ) -> Callable[[apihub_service.UpdateDependencyRequest], common_fields.Dependency]: + r"""Return a callable for the update dependency method over gRPC. + + Update a dependency based on the + [update_mask][google.cloud.apihub.v1.UpdateDependencyRequest.update_mask] + provided in the request. + + The following fields in the + [dependency][google.cloud.apihub.v1.Dependency] can be updated: + + - [description][google.cloud.apihub.v1.Dependency.description] + + Returns: + Callable[[~.UpdateDependencyRequest], + ~.Dependency]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_dependency" not in self._stubs: + self._stubs["update_dependency"] = self.grpc_channel.unary_unary( + "/google.cloud.apihub.v1.ApiHubDependencies/UpdateDependency", + request_serializer=apihub_service.UpdateDependencyRequest.serialize, + response_deserializer=common_fields.Dependency.deserialize, + ) + return self._stubs["update_dependency"] + + @property + def delete_dependency( + self, + ) -> Callable[[apihub_service.DeleteDependencyRequest], empty_pb2.Empty]: + r"""Return a callable for the delete dependency method over gRPC. + + Delete the dependency resource. + + Returns: + Callable[[~.DeleteDependencyRequest], + ~.Empty]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_dependency" not in self._stubs: + self._stubs["delete_dependency"] = self.grpc_channel.unary_unary( + "/google.cloud.apihub.v1.ApiHubDependencies/DeleteDependency", + request_serializer=apihub_service.DeleteDependencyRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs["delete_dependency"] + + @property + def list_dependencies( + self, + ) -> Callable[ + [apihub_service.ListDependenciesRequest], + apihub_service.ListDependenciesResponse, + ]: + r"""Return a callable for the list dependencies method over gRPC. + + List dependencies based on the provided filter and + pagination parameters. + + Returns: + Callable[[~.ListDependenciesRequest], + ~.ListDependenciesResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_dependencies" not in self._stubs: + self._stubs["list_dependencies"] = self.grpc_channel.unary_unary( + "/google.cloud.apihub.v1.ApiHubDependencies/ListDependencies", + request_serializer=apihub_service.ListDependenciesRequest.serialize, + response_deserializer=apihub_service.ListDependenciesResponse.deserialize, + ) + return self._stubs["list_dependencies"] + + def close(self): + self.grpc_channel.close() + + @property + def delete_operation( + self, + ) -> Callable[[operations_pb2.DeleteOperationRequest], None]: + r"""Return a callable for the delete_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_operation" not in self._stubs: + self._stubs["delete_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/DeleteOperation", + request_serializer=operations_pb2.DeleteOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["delete_operation"] + + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None]: + r"""Return a callable for the cancel_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "cancel_operation" not in self._stubs: + self._stubs["cancel_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/CancelOperation", + request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["cancel_operation"] + + @property + def get_operation( + self, + ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: + r"""Return a callable for the get_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_operation" not in self._stubs: + self._stubs["get_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/GetOperation", + request_serializer=operations_pb2.GetOperationRequest.SerializeToString, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["get_operation"] + + @property + def list_operations( + self, + ) -> Callable[ + [operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse + ]: + r"""Return a callable for the list_operations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_operations" not in self._stubs: + self._stubs["list_operations"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/ListOperations", + request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, + response_deserializer=operations_pb2.ListOperationsResponse.FromString, + ) + return self._stubs["list_operations"] + + @property + def list_locations( + self, + ) -> Callable[ + [locations_pb2.ListLocationsRequest], locations_pb2.ListLocationsResponse + ]: + r"""Return a callable for the list locations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_locations" not in self._stubs: + self._stubs["list_locations"] = self.grpc_channel.unary_unary( + "/google.cloud.location.Locations/ListLocations", + request_serializer=locations_pb2.ListLocationsRequest.SerializeToString, + response_deserializer=locations_pb2.ListLocationsResponse.FromString, + ) + return self._stubs["list_locations"] + + @property + def get_location( + self, + ) -> Callable[[locations_pb2.GetLocationRequest], locations_pb2.Location]: + r"""Return a callable for the list locations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_location" not in self._stubs: + self._stubs["get_location"] = self.grpc_channel.unary_unary( + "/google.cloud.location.Locations/GetLocation", + request_serializer=locations_pb2.GetLocationRequest.SerializeToString, + response_deserializer=locations_pb2.Location.FromString, + ) + return self._stubs["get_location"] + + @property + def kind(self) -> str: + return "grpc" + + +__all__ = ("ApiHubDependenciesGrpcTransport",) diff --git a/packages/google-cloud-apihub/google/cloud/apihub_v1/services/api_hub_dependencies/transports/grpc_asyncio.py b/packages/google-cloud-apihub/google/cloud/apihub_v1/services/api_hub_dependencies/transports/grpc_asyncio.py new file mode 100644 index 000000000000..07929dbda3e9 --- /dev/null +++ b/packages/google-cloud-apihub/google/cloud/apihub_v1/services/api_hub_dependencies/transports/grpc_asyncio.py @@ -0,0 +1,552 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1, grpc_helpers_async +from google.api_core import retry_async as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore +import grpc # type: ignore +from grpc.experimental import aio # type: ignore + +from google.cloud.apihub_v1.types import apihub_service, common_fields + +from .base import DEFAULT_CLIENT_INFO, ApiHubDependenciesTransport +from .grpc import ApiHubDependenciesGrpcTransport + + +class ApiHubDependenciesGrpcAsyncIOTransport(ApiHubDependenciesTransport): + """gRPC AsyncIO backend transport for ApiHubDependencies. + + This service provides methods for various operations related to a + [Dependency][google.cloud.apihub.v1.Dependency] in the API hub. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _grpc_channel: aio.Channel + _stubs: Dict[str, Callable] = {} + + @classmethod + def create_channel( + cls, + host: str = "apihub.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> aio.Channel: + """Create and return a gRPC AsyncIO channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + aio.Channel: A gRPC AsyncIO channel object. + """ + + return grpc_helpers_async.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs, + ) + + def __init__( + self, + *, + host: str = "apihub.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[Union[aio.Channel, Callable[..., aio.Channel]]] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'apihub.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if a ``channel`` instance is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if a ``channel`` instance is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + channel (Optional[Union[aio.Channel, Callable[..., aio.Channel]]]): + A ``Channel`` instance through which to make calls, or a Callable + that constructs and returns one. If set to None, ``self.create_channel`` + is used to create the channel. If a Callable is given, it will be called + with the same arguments as used in ``self.create_channel``. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if a ``channel`` instance is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if a ``channel`` instance or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if isinstance(channel, aio.Channel): + # Ignore credentials if a channel was passed. + credentials = None + self._ignore_credentials = True + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + # initialize with the provided callable or the default channel + channel_init = channel or type(self).create_channel + self._grpc_channel = channel_init( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @property + def grpc_channel(self) -> aio.Channel: + """Create the channel designed to connect to this service. + + This property caches on the instance; repeated calls return + the same channel. + """ + # Return the channel from cache. + return self._grpc_channel + + @property + def create_dependency( + self, + ) -> Callable[ + [apihub_service.CreateDependencyRequest], Awaitable[common_fields.Dependency] + ]: + r"""Return a callable for the create dependency method over gRPC. + + Create a dependency between two entities in the API + hub. + + Returns: + Callable[[~.CreateDependencyRequest], + Awaitable[~.Dependency]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_dependency" not in self._stubs: + self._stubs["create_dependency"] = self.grpc_channel.unary_unary( + "/google.cloud.apihub.v1.ApiHubDependencies/CreateDependency", + request_serializer=apihub_service.CreateDependencyRequest.serialize, + response_deserializer=common_fields.Dependency.deserialize, + ) + return self._stubs["create_dependency"] + + @property + def get_dependency( + self, + ) -> Callable[ + [apihub_service.GetDependencyRequest], Awaitable[common_fields.Dependency] + ]: + r"""Return a callable for the get dependency method over gRPC. + + Get details about a dependency resource in the API + hub. + + Returns: + Callable[[~.GetDependencyRequest], + Awaitable[~.Dependency]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_dependency" not in self._stubs: + self._stubs["get_dependency"] = self.grpc_channel.unary_unary( + "/google.cloud.apihub.v1.ApiHubDependencies/GetDependency", + request_serializer=apihub_service.GetDependencyRequest.serialize, + response_deserializer=common_fields.Dependency.deserialize, + ) + return self._stubs["get_dependency"] + + @property + def update_dependency( + self, + ) -> Callable[ + [apihub_service.UpdateDependencyRequest], Awaitable[common_fields.Dependency] + ]: + r"""Return a callable for the update dependency method over gRPC. + + Update a dependency based on the + [update_mask][google.cloud.apihub.v1.UpdateDependencyRequest.update_mask] + provided in the request. + + The following fields in the + [dependency][google.cloud.apihub.v1.Dependency] can be updated: + + - [description][google.cloud.apihub.v1.Dependency.description] + + Returns: + Callable[[~.UpdateDependencyRequest], + Awaitable[~.Dependency]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_dependency" not in self._stubs: + self._stubs["update_dependency"] = self.grpc_channel.unary_unary( + "/google.cloud.apihub.v1.ApiHubDependencies/UpdateDependency", + request_serializer=apihub_service.UpdateDependencyRequest.serialize, + response_deserializer=common_fields.Dependency.deserialize, + ) + return self._stubs["update_dependency"] + + @property + def delete_dependency( + self, + ) -> Callable[[apihub_service.DeleteDependencyRequest], Awaitable[empty_pb2.Empty]]: + r"""Return a callable for the delete dependency method over gRPC. + + Delete the dependency resource. + + Returns: + Callable[[~.DeleteDependencyRequest], + Awaitable[~.Empty]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_dependency" not in self._stubs: + self._stubs["delete_dependency"] = self.grpc_channel.unary_unary( + "/google.cloud.apihub.v1.ApiHubDependencies/DeleteDependency", + request_serializer=apihub_service.DeleteDependencyRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs["delete_dependency"] + + @property + def list_dependencies( + self, + ) -> Callable[ + [apihub_service.ListDependenciesRequest], + Awaitable[apihub_service.ListDependenciesResponse], + ]: + r"""Return a callable for the list dependencies method over gRPC. + + List dependencies based on the provided filter and + pagination parameters. + + Returns: + Callable[[~.ListDependenciesRequest], + Awaitable[~.ListDependenciesResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_dependencies" not in self._stubs: + self._stubs["list_dependencies"] = self.grpc_channel.unary_unary( + "/google.cloud.apihub.v1.ApiHubDependencies/ListDependencies", + request_serializer=apihub_service.ListDependenciesRequest.serialize, + response_deserializer=apihub_service.ListDependenciesResponse.deserialize, + ) + return self._stubs["list_dependencies"] + + def _prep_wrapped_messages(self, client_info): + """Precompute the wrapped methods, overriding the base class method to use async wrappers.""" + self._wrapped_methods = { + self.create_dependency: gapic_v1.method_async.wrap_method( + self.create_dependency, + default_timeout=60.0, + client_info=client_info, + ), + self.get_dependency: gapic_v1.method_async.wrap_method( + self.get_dependency, + default_retry=retries.AsyncRetry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.update_dependency: gapic_v1.method_async.wrap_method( + self.update_dependency, + default_timeout=60.0, + client_info=client_info, + ), + self.delete_dependency: gapic_v1.method_async.wrap_method( + self.delete_dependency, + default_timeout=60.0, + client_info=client_info, + ), + self.list_dependencies: gapic_v1.method_async.wrap_method( + self.list_dependencies, + default_retry=retries.AsyncRetry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + } + + def close(self): + return self.grpc_channel.close() + + @property + def delete_operation( + self, + ) -> Callable[[operations_pb2.DeleteOperationRequest], None]: + r"""Return a callable for the delete_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_operation" not in self._stubs: + self._stubs["delete_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/DeleteOperation", + request_serializer=operations_pb2.DeleteOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["delete_operation"] + + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None]: + r"""Return a callable for the cancel_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "cancel_operation" not in self._stubs: + self._stubs["cancel_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/CancelOperation", + request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["cancel_operation"] + + @property + def get_operation( + self, + ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: + r"""Return a callable for the get_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_operation" not in self._stubs: + self._stubs["get_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/GetOperation", + request_serializer=operations_pb2.GetOperationRequest.SerializeToString, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["get_operation"] + + @property + def list_operations( + self, + ) -> Callable[ + [operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse + ]: + r"""Return a callable for the list_operations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_operations" not in self._stubs: + self._stubs["list_operations"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/ListOperations", + request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, + response_deserializer=operations_pb2.ListOperationsResponse.FromString, + ) + return self._stubs["list_operations"] + + @property + def list_locations( + self, + ) -> Callable[ + [locations_pb2.ListLocationsRequest], locations_pb2.ListLocationsResponse + ]: + r"""Return a callable for the list locations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_locations" not in self._stubs: + self._stubs["list_locations"] = self.grpc_channel.unary_unary( + "/google.cloud.location.Locations/ListLocations", + request_serializer=locations_pb2.ListLocationsRequest.SerializeToString, + response_deserializer=locations_pb2.ListLocationsResponse.FromString, + ) + return self._stubs["list_locations"] + + @property + def get_location( + self, + ) -> Callable[[locations_pb2.GetLocationRequest], locations_pb2.Location]: + r"""Return a callable for the list locations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_location" not in self._stubs: + self._stubs["get_location"] = self.grpc_channel.unary_unary( + "/google.cloud.location.Locations/GetLocation", + request_serializer=locations_pb2.GetLocationRequest.SerializeToString, + response_deserializer=locations_pb2.Location.FromString, + ) + return self._stubs["get_location"] + + +__all__ = ("ApiHubDependenciesGrpcAsyncIOTransport",) diff --git a/packages/google-cloud-apihub/google/cloud/apihub_v1/services/api_hub_dependencies/transports/rest.py b/packages/google-cloud-apihub/google/cloud/apihub_v1/services/api_hub_dependencies/transports/rest.py new file mode 100644 index 000000000000..20bf7a1555b8 --- /dev/null +++ b/packages/google-cloud-apihub/google/cloud/apihub_v1/services/api_hub_dependencies/transports/rest.py @@ -0,0 +1,1357 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import dataclasses +import json # type: ignore +import re +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import gapic_v1, path_template, rest_helpers, rest_streaming +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth.transport.requests import AuthorizedSession # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.protobuf import json_format +import grpc # type: ignore +from requests import __version__ as requests_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object, None] # type: ignore + + +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore + +from google.cloud.apihub_v1.types import apihub_service, common_fields + +from .base import ApiHubDependenciesTransport +from .base import DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=requests_version, +) + + +class ApiHubDependenciesRestInterceptor: + """Interceptor for ApiHubDependencies. + + Interceptors are used to manipulate requests, request metadata, and responses + in arbitrary ways. + Example use cases include: + * Logging + * Verifying requests according to service or custom semantics + * Stripping extraneous information from responses + + These use cases and more can be enabled by injecting an + instance of a custom subclass when constructing the ApiHubDependenciesRestTransport. + + .. code-block:: python + class MyCustomApiHubDependenciesInterceptor(ApiHubDependenciesRestInterceptor): + def pre_create_dependency(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_create_dependency(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_delete_dependency(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def pre_get_dependency(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_dependency(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_dependencies(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_dependencies(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_update_dependency(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_update_dependency(self, response): + logging.log(f"Received response: {response}") + return response + + transport = ApiHubDependenciesRestTransport(interceptor=MyCustomApiHubDependenciesInterceptor()) + client = ApiHubDependenciesClient(transport=transport) + + + """ + + def pre_create_dependency( + self, + request: apihub_service.CreateDependencyRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[apihub_service.CreateDependencyRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for create_dependency + + Override in a subclass to manipulate the request or metadata + before they are sent to the ApiHubDependencies server. + """ + return request, metadata + + def post_create_dependency( + self, response: common_fields.Dependency + ) -> common_fields.Dependency: + """Post-rpc interceptor for create_dependency + + Override in a subclass to manipulate the response + after it is returned by the ApiHubDependencies server but before + it is returned to user code. + """ + return response + + def pre_delete_dependency( + self, + request: apihub_service.DeleteDependencyRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[apihub_service.DeleteDependencyRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for delete_dependency + + Override in a subclass to manipulate the request or metadata + before they are sent to the ApiHubDependencies server. + """ + return request, metadata + + def pre_get_dependency( + self, + request: apihub_service.GetDependencyRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[apihub_service.GetDependencyRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_dependency + + Override in a subclass to manipulate the request or metadata + before they are sent to the ApiHubDependencies server. + """ + return request, metadata + + def post_get_dependency( + self, response: common_fields.Dependency + ) -> common_fields.Dependency: + """Post-rpc interceptor for get_dependency + + Override in a subclass to manipulate the response + after it is returned by the ApiHubDependencies server but before + it is returned to user code. + """ + return response + + def pre_list_dependencies( + self, + request: apihub_service.ListDependenciesRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[apihub_service.ListDependenciesRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_dependencies + + Override in a subclass to manipulate the request or metadata + before they are sent to the ApiHubDependencies server. + """ + return request, metadata + + def post_list_dependencies( + self, response: apihub_service.ListDependenciesResponse + ) -> apihub_service.ListDependenciesResponse: + """Post-rpc interceptor for list_dependencies + + Override in a subclass to manipulate the response + after it is returned by the ApiHubDependencies server but before + it is returned to user code. + """ + return response + + def pre_update_dependency( + self, + request: apihub_service.UpdateDependencyRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[apihub_service.UpdateDependencyRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for update_dependency + + Override in a subclass to manipulate the request or metadata + before they are sent to the ApiHubDependencies server. + """ + return request, metadata + + def post_update_dependency( + self, response: common_fields.Dependency + ) -> common_fields.Dependency: + """Post-rpc interceptor for update_dependency + + Override in a subclass to manipulate the response + after it is returned by the ApiHubDependencies server but before + it is returned to user code. + """ + return response + + def pre_get_location( + self, + request: locations_pb2.GetLocationRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[locations_pb2.GetLocationRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_location + + Override in a subclass to manipulate the request or metadata + before they are sent to the ApiHubDependencies server. + """ + return request, metadata + + def post_get_location( + self, response: locations_pb2.Location + ) -> locations_pb2.Location: + """Post-rpc interceptor for get_location + + Override in a subclass to manipulate the response + after it is returned by the ApiHubDependencies server but before + it is returned to user code. + """ + return response + + def pre_list_locations( + self, + request: locations_pb2.ListLocationsRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[locations_pb2.ListLocationsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_locations + + Override in a subclass to manipulate the request or metadata + before they are sent to the ApiHubDependencies server. + """ + return request, metadata + + def post_list_locations( + self, response: locations_pb2.ListLocationsResponse + ) -> locations_pb2.ListLocationsResponse: + """Post-rpc interceptor for list_locations + + Override in a subclass to manipulate the response + after it is returned by the ApiHubDependencies server but before + it is returned to user code. + """ + return response + + def pre_cancel_operation( + self, + request: operations_pb2.CancelOperationRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[operations_pb2.CancelOperationRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for cancel_operation + + Override in a subclass to manipulate the request or metadata + before they are sent to the ApiHubDependencies server. + """ + return request, metadata + + def post_cancel_operation(self, response: None) -> None: + """Post-rpc interceptor for cancel_operation + + Override in a subclass to manipulate the response + after it is returned by the ApiHubDependencies server but before + it is returned to user code. + """ + return response + + def pre_delete_operation( + self, + request: operations_pb2.DeleteOperationRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[operations_pb2.DeleteOperationRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for delete_operation + + Override in a subclass to manipulate the request or metadata + before they are sent to the ApiHubDependencies server. + """ + return request, metadata + + def post_delete_operation(self, response: None) -> None: + """Post-rpc interceptor for delete_operation + + Override in a subclass to manipulate the response + after it is returned by the ApiHubDependencies server but before + it is returned to user code. + """ + return response + + def pre_get_operation( + self, + request: operations_pb2.GetOperationRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[operations_pb2.GetOperationRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_operation + + Override in a subclass to manipulate the request or metadata + before they are sent to the ApiHubDependencies server. + """ + return request, metadata + + def post_get_operation( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for get_operation + + Override in a subclass to manipulate the response + after it is returned by the ApiHubDependencies server but before + it is returned to user code. + """ + return response + + def pre_list_operations( + self, + request: operations_pb2.ListOperationsRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[operations_pb2.ListOperationsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_operations + + Override in a subclass to manipulate the request or metadata + before they are sent to the ApiHubDependencies server. + """ + return request, metadata + + def post_list_operations( + self, response: operations_pb2.ListOperationsResponse + ) -> operations_pb2.ListOperationsResponse: + """Post-rpc interceptor for list_operations + + Override in a subclass to manipulate the response + after it is returned by the ApiHubDependencies server but before + it is returned to user code. + """ + return response + + +@dataclasses.dataclass +class ApiHubDependenciesRestStub: + _session: AuthorizedSession + _host: str + _interceptor: ApiHubDependenciesRestInterceptor + + +class ApiHubDependenciesRestTransport(ApiHubDependenciesTransport): + """REST backend transport for ApiHubDependencies. + + This service provides methods for various operations related to a + [Dependency][google.cloud.apihub.v1.Dependency] in the API hub. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + + """ + + def __init__( + self, + *, + host: str = "apihub.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = "https", + interceptor: Optional[ApiHubDependenciesRestInterceptor] = None, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'apihub.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client + certificate to configure mutual TLS HTTP channel. It is ignored + if ``channel`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. + # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the + # credentials object + maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) + if maybe_url_match is None: + raise ValueError( + f"Unexpected hostname structure: {host}" + ) # pragma: NO COVER + + url_match_items = maybe_url_match.groupdict() + + host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host + + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + self._session = AuthorizedSession( + self._credentials, default_host=self.DEFAULT_HOST + ) + if client_cert_source_for_mtls: + self._session.configure_mtls_channel(client_cert_source_for_mtls) + self._interceptor = interceptor or ApiHubDependenciesRestInterceptor() + self._prep_wrapped_messages(client_info) + + class _CreateDependency(ApiHubDependenciesRestStub): + def __hash__(self): + return hash("CreateDependency") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: apihub_service.CreateDependencyRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> common_fields.Dependency: + r"""Call the create dependency method over HTTP. + + Args: + request (~.apihub_service.CreateDependencyRequest): + The request object. The + [CreateDependency][google.cloud.apihub.v1.ApiHubDependencies.CreateDependency] + method's request. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.common_fields.Dependency: + A dependency resource defined in the API hub describes a + dependency directed from a consumer to a supplier + entity. A dependency can be defined between two + [Operations][google.cloud.apihub.v1.Operation] or + between an [Operation][google.cloud.apihub.v1.Operation] + and [External API][google.cloud.apihub.v1.ExternalApi]. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{parent=projects/*/locations/*}/dependencies", + "body": "dependency", + }, + ] + request, metadata = self._interceptor.pre_create_dependency( + request, metadata + ) + pb_request = apihub_service.CreateDependencyRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = common_fields.Dependency() + pb_resp = common_fields.Dependency.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_create_dependency(resp) + return resp + + class _DeleteDependency(ApiHubDependenciesRestStub): + def __hash__(self): + return hash("DeleteDependency") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: apihub_service.DeleteDependencyRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ): + r"""Call the delete dependency method over HTTP. + + Args: + request (~.apihub_service.DeleteDependencyRequest): + The request object. The + [DeleteDependency][google.cloud.apihub.v1.ApiHubDependencies.DeleteDependency] + method's request. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "delete", + "uri": "/v1/{name=projects/*/locations/*/dependencies/*}", + }, + ] + request, metadata = self._interceptor.pre_delete_dependency( + request, metadata + ) + pb_request = apihub_service.DeleteDependencyRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + class _GetDependency(ApiHubDependenciesRestStub): + def __hash__(self): + return hash("GetDependency") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: apihub_service.GetDependencyRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> common_fields.Dependency: + r"""Call the get dependency method over HTTP. + + Args: + request (~.apihub_service.GetDependencyRequest): + The request object. The [GetDependency][.ApiHubDependencies.GetDependency] + method's request. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.common_fields.Dependency: + A dependency resource defined in the API hub describes a + dependency directed from a consumer to a supplier + entity. A dependency can be defined between two + [Operations][google.cloud.apihub.v1.Operation] or + between an [Operation][google.cloud.apihub.v1.Operation] + and [External API][google.cloud.apihub.v1.ExternalApi]. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/dependencies/*}", + }, + ] + request, metadata = self._interceptor.pre_get_dependency(request, metadata) + pb_request = apihub_service.GetDependencyRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = common_fields.Dependency() + pb_resp = common_fields.Dependency.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_dependency(resp) + return resp + + class _ListDependencies(ApiHubDependenciesRestStub): + def __hash__(self): + return hash("ListDependencies") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: apihub_service.ListDependenciesRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> apihub_service.ListDependenciesResponse: + r"""Call the list dependencies method over HTTP. + + Args: + request (~.apihub_service.ListDependenciesRequest): + The request object. The + [ListDependencies][google.cloud.apihub.v1.ApiHubDependencies.ListDependencies] + method's request. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.apihub_service.ListDependenciesResponse: + The + [ListDependencies][google.cloud.apihub.v1.ApiHubDependencies.ListDependencies] + method's response. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{parent=projects/*/locations/*}/dependencies", + }, + ] + request, metadata = self._interceptor.pre_list_dependencies( + request, metadata + ) + pb_request = apihub_service.ListDependenciesRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = apihub_service.ListDependenciesResponse() + pb_resp = apihub_service.ListDependenciesResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_dependencies(resp) + return resp + + class _UpdateDependency(ApiHubDependenciesRestStub): + def __hash__(self): + return hash("UpdateDependency") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "updateMask": {}, + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: apihub_service.UpdateDependencyRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> common_fields.Dependency: + r"""Call the update dependency method over HTTP. + + Args: + request (~.apihub_service.UpdateDependencyRequest): + The request object. The + [UpdateDependency][google.cloud.apihub.v1.ApiHubDependencies.UpdateDependency] + method's request. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.common_fields.Dependency: + A dependency resource defined in the API hub describes a + dependency directed from a consumer to a supplier + entity. A dependency can be defined between two + [Operations][google.cloud.apihub.v1.Operation] or + between an [Operation][google.cloud.apihub.v1.Operation] + and [External API][google.cloud.apihub.v1.ExternalApi]. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "patch", + "uri": "/v1/{dependency.name=projects/*/locations/*/dependencies/*}", + "body": "dependency", + }, + ] + request, metadata = self._interceptor.pre_update_dependency( + request, metadata + ) + pb_request = apihub_service.UpdateDependencyRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = common_fields.Dependency() + pb_resp = common_fields.Dependency.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_update_dependency(resp) + return resp + + @property + def create_dependency( + self, + ) -> Callable[[apihub_service.CreateDependencyRequest], common_fields.Dependency]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._CreateDependency(self._session, self._host, self._interceptor) # type: ignore + + @property + def delete_dependency( + self, + ) -> Callable[[apihub_service.DeleteDependencyRequest], empty_pb2.Empty]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._DeleteDependency(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_dependency( + self, + ) -> Callable[[apihub_service.GetDependencyRequest], common_fields.Dependency]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetDependency(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_dependencies( + self, + ) -> Callable[ + [apihub_service.ListDependenciesRequest], + apihub_service.ListDependenciesResponse, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListDependencies(self._session, self._host, self._interceptor) # type: ignore + + @property + def update_dependency( + self, + ) -> Callable[[apihub_service.UpdateDependencyRequest], common_fields.Dependency]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._UpdateDependency(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_location(self): + return self._GetLocation(self._session, self._host, self._interceptor) # type: ignore + + class _GetLocation(ApiHubDependenciesRestStub): + def __call__( + self, + request: locations_pb2.GetLocationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> locations_pb2.Location: + r"""Call the get location method over HTTP. + + Args: + request (locations_pb2.GetLocationRequest): + The request object for GetLocation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + locations_pb2.Location: Response from GetLocation method. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*}", + }, + ] + + request, metadata = self._interceptor.pre_get_location(request, metadata) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + resp = locations_pb2.Location() + resp = json_format.Parse(response.content.decode("utf-8"), resp) + resp = self._interceptor.post_get_location(resp) + return resp + + @property + def list_locations(self): + return self._ListLocations(self._session, self._host, self._interceptor) # type: ignore + + class _ListLocations(ApiHubDependenciesRestStub): + def __call__( + self, + request: locations_pb2.ListLocationsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> locations_pb2.ListLocationsResponse: + r"""Call the list locations method over HTTP. + + Args: + request (locations_pb2.ListLocationsRequest): + The request object for ListLocations method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + locations_pb2.ListLocationsResponse: Response from ListLocations method. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=projects/*}/locations", + }, + ] + + request, metadata = self._interceptor.pre_list_locations(request, metadata) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + resp = locations_pb2.ListLocationsResponse() + resp = json_format.Parse(response.content.decode("utf-8"), resp) + resp = self._interceptor.post_list_locations(resp) + return resp + + @property + def cancel_operation(self): + return self._CancelOperation(self._session, self._host, self._interceptor) # type: ignore + + class _CancelOperation(ApiHubDependenciesRestStub): + def __call__( + self, + request: operations_pb2.CancelOperationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Call the cancel operation method over HTTP. + + Args: + request (operations_pb2.CancelOperationRequest): + The request object for CancelOperation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{name=projects/*/locations/*/operations/*}:cancel", + "body": "*", + }, + ] + + request, metadata = self._interceptor.pre_cancel_operation( + request, metadata + ) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + body = json.dumps(transcoded_request["body"]) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + return self._interceptor.post_cancel_operation(None) + + @property + def delete_operation(self): + return self._DeleteOperation(self._session, self._host, self._interceptor) # type: ignore + + class _DeleteOperation(ApiHubDependenciesRestStub): + def __call__( + self, + request: operations_pb2.DeleteOperationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Call the delete operation method over HTTP. + + Args: + request (operations_pb2.DeleteOperationRequest): + The request object for DeleteOperation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "delete", + "uri": "/v1/{name=projects/*/locations/*/operations/*}", + }, + ] + + request, metadata = self._interceptor.pre_delete_operation( + request, metadata + ) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + return self._interceptor.post_delete_operation(None) + + @property + def get_operation(self): + return self._GetOperation(self._session, self._host, self._interceptor) # type: ignore + + class _GetOperation(ApiHubDependenciesRestStub): + def __call__( + self, + request: operations_pb2.GetOperationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the get operation method over HTTP. + + Args: + request (operations_pb2.GetOperationRequest): + The request object for GetOperation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + operations_pb2.Operation: Response from GetOperation method. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/operations/*}", + }, + ] + + request, metadata = self._interceptor.pre_get_operation(request, metadata) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + resp = operations_pb2.Operation() + resp = json_format.Parse(response.content.decode("utf-8"), resp) + resp = self._interceptor.post_get_operation(resp) + return resp + + @property + def list_operations(self): + return self._ListOperations(self._session, self._host, self._interceptor) # type: ignore + + class _ListOperations(ApiHubDependenciesRestStub): + def __call__( + self, + request: operations_pb2.ListOperationsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.ListOperationsResponse: + r"""Call the list operations method over HTTP. + + Args: + request (operations_pb2.ListOperationsRequest): + The request object for ListOperations method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + operations_pb2.ListOperationsResponse: Response from ListOperations method. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*}/operations", + }, + ] + + request, metadata = self._interceptor.pre_list_operations(request, metadata) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + resp = operations_pb2.ListOperationsResponse() + resp = json_format.Parse(response.content.decode("utf-8"), resp) + resp = self._interceptor.post_list_operations(resp) + return resp + + @property + def kind(self) -> str: + return "rest" + + def close(self): + self._session.close() + + +__all__ = ("ApiHubDependenciesRestTransport",) diff --git a/packages/google-ads-admanager/google/ads/admanager_v1/services/label_service/__init__.py b/packages/google-cloud-apihub/google/cloud/apihub_v1/services/api_hub_plugin/__init__.py similarity index 89% rename from packages/google-ads-admanager/google/ads/admanager_v1/services/label_service/__init__.py rename to packages/google-cloud-apihub/google/cloud/apihub_v1/services/api_hub_plugin/__init__.py index 2944d1a2145f..5cbea89992b0 100644 --- a/packages/google-ads-admanager/google/ads/admanager_v1/services/label_service/__init__.py +++ b/packages/google-cloud-apihub/google/cloud/apihub_v1/services/api_hub_plugin/__init__.py @@ -13,6 +13,6 @@ # See the License for the specific language governing permissions and # limitations under the License. # -from .client import LabelServiceClient +from .client import ApiHubPluginClient -__all__ = ("LabelServiceClient",) +__all__ = ("ApiHubPluginClient",) diff --git a/packages/google-cloud-apihub/google/cloud/apihub_v1/services/api_hub_plugin/async_client.py b/packages/google-cloud-apihub/google/cloud/apihub_v1/services/api_hub_plugin/async_client.py new file mode 100644 index 000000000000..6f83920a990a --- /dev/null +++ b/packages/google-cloud-apihub/google/cloud/apihub_v1/services/api_hub_plugin/async_client.py @@ -0,0 +1,929 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import re +from typing import ( + Callable, + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, +) + +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry_async as retries +from google.api_core.client_options import ClientOptions +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.apihub_v1 import gapic_version as package_version + +try: + OptionalRetry = Union[retries.AsyncRetry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.AsyncRetry, object, None] # type: ignore + +from google.cloud.location import locations_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore + +from google.cloud.apihub_v1.types import common_fields, plugin_service + +from .client import ApiHubPluginClient +from .transports.base import DEFAULT_CLIENT_INFO, ApiHubPluginTransport +from .transports.grpc_asyncio import ApiHubPluginGrpcAsyncIOTransport + + +class ApiHubPluginAsyncClient: + """This service is used for managing plugins inside the API Hub.""" + + _client: ApiHubPluginClient + + # Copy defaults from the synchronous client for use here. + # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. + DEFAULT_ENDPOINT = ApiHubPluginClient.DEFAULT_ENDPOINT + DEFAULT_MTLS_ENDPOINT = ApiHubPluginClient.DEFAULT_MTLS_ENDPOINT + _DEFAULT_ENDPOINT_TEMPLATE = ApiHubPluginClient._DEFAULT_ENDPOINT_TEMPLATE + _DEFAULT_UNIVERSE = ApiHubPluginClient._DEFAULT_UNIVERSE + + attribute_path = staticmethod(ApiHubPluginClient.attribute_path) + parse_attribute_path = staticmethod(ApiHubPluginClient.parse_attribute_path) + plugin_path = staticmethod(ApiHubPluginClient.plugin_path) + parse_plugin_path = staticmethod(ApiHubPluginClient.parse_plugin_path) + common_billing_account_path = staticmethod( + ApiHubPluginClient.common_billing_account_path + ) + parse_common_billing_account_path = staticmethod( + ApiHubPluginClient.parse_common_billing_account_path + ) + common_folder_path = staticmethod(ApiHubPluginClient.common_folder_path) + parse_common_folder_path = staticmethod(ApiHubPluginClient.parse_common_folder_path) + common_organization_path = staticmethod(ApiHubPluginClient.common_organization_path) + parse_common_organization_path = staticmethod( + ApiHubPluginClient.parse_common_organization_path + ) + common_project_path = staticmethod(ApiHubPluginClient.common_project_path) + parse_common_project_path = staticmethod( + ApiHubPluginClient.parse_common_project_path + ) + common_location_path = staticmethod(ApiHubPluginClient.common_location_path) + parse_common_location_path = staticmethod( + ApiHubPluginClient.parse_common_location_path + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + ApiHubPluginAsyncClient: The constructed client. + """ + return ApiHubPluginClient.from_service_account_info.__func__(ApiHubPluginAsyncClient, info, *args, **kwargs) # type: ignore + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + ApiHubPluginAsyncClient: The constructed client. + """ + return ApiHubPluginClient.from_service_account_file.__func__(ApiHubPluginAsyncClient, filename, *args, **kwargs) # type: ignore + + from_service_account_json = from_service_account_file + + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[ClientOptions] = None + ): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + return ApiHubPluginClient.get_mtls_endpoint_and_cert_source(client_options) # type: ignore + + @property + def transport(self) -> ApiHubPluginTransport: + """Returns the transport used by the client instance. + + Returns: + ApiHubPluginTransport: The transport used by the client instance. + """ + return self._client.transport + + @property + def api_endpoint(self): + """Return the API endpoint used by the client instance. + + Returns: + str: The API endpoint used by the client instance. + """ + return self._client._api_endpoint + + @property + def universe_domain(self) -> str: + """Return the universe domain used by the client instance. + + Returns: + str: The universe domain used + by the client instance. + """ + return self._client._universe_domain + + get_transport_class = ApiHubPluginClient.get_transport_class + + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[ + Union[str, ApiHubPluginTransport, Callable[..., ApiHubPluginTransport]] + ] = "grpc_asyncio", + client_options: Optional[ClientOptions] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the api hub plugin async client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Optional[Union[str,ApiHubPluginTransport,Callable[..., ApiHubPluginTransport]]]): + The transport to use, or a Callable that constructs and returns a new transport to use. + If a Callable is given, it will be called with the same set of initialization + arguments as used in the ApiHubPluginTransport constructor. + If set to None, a transport is chosen automatically. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): + Custom options for the client. + + 1. The ``api_endpoint`` property can be used to override the + default endpoint provided by the client when ``transport`` is + not explicitly provided. Only if this property is not set and + ``transport`` was not explicitly provided, the endpoint is + determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment + variable, which have one of the following values: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto-switch to the + default mTLS endpoint if client certificate is present; this is + the default value). + + 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide a client certificate for mTLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + 3. The ``universe_domain`` property can be used to override the + default "googleapis.com" universe. Note that ``api_endpoint`` + property still takes precedence; and ``universe_domain`` is + currently not supported for mTLS. + + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client = ApiHubPluginClient( + credentials=credentials, + transport=transport, + client_options=client_options, + client_info=client_info, + ) + + async def get_plugin( + self, + request: Optional[Union[plugin_service.GetPluginRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> plugin_service.Plugin: + r"""Get details about an API Hub plugin. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import apihub_v1 + + async def sample_get_plugin(): + # Create a client + client = apihub_v1.ApiHubPluginAsyncClient() + + # Initialize request argument(s) + request = apihub_v1.GetPluginRequest( + name="name_value", + ) + + # Make the request + response = await client.get_plugin(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.apihub_v1.types.GetPluginRequest, dict]]): + The request object. The + [GetPlugin][google.cloud.apihub.v1.ApiHubPlugin.GetPlugin] + method's request. + name (:class:`str`): + Required. The name of the plugin to retrieve. Format: + ``projects/{project}/locations/{location}/plugins/{plugin}``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.apihub_v1.types.Plugin: + A plugin resource in the API Hub. + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, plugin_service.GetPluginRequest): + request = plugin_service.GetPluginRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.get_plugin + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def enable_plugin( + self, + request: Optional[Union[plugin_service.EnablePluginRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> plugin_service.Plugin: + r"""Enables a plugin. The ``state`` of the plugin after enabling is + ``ENABLED`` + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import apihub_v1 + + async def sample_enable_plugin(): + # Create a client + client = apihub_v1.ApiHubPluginAsyncClient() + + # Initialize request argument(s) + request = apihub_v1.EnablePluginRequest( + name="name_value", + ) + + # Make the request + response = await client.enable_plugin(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.apihub_v1.types.EnablePluginRequest, dict]]): + The request object. The + [EnablePlugin][google.cloud.apihub.v1.ApiHubPlugin.EnablePlugin] + method's request. + name (:class:`str`): + Required. The name of the plugin to enable. Format: + ``projects/{project}/locations/{location}/plugins/{plugin}``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.apihub_v1.types.Plugin: + A plugin resource in the API Hub. + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, plugin_service.EnablePluginRequest): + request = plugin_service.EnablePluginRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.enable_plugin + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def disable_plugin( + self, + request: Optional[Union[plugin_service.DisablePluginRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> plugin_service.Plugin: + r"""Disables a plugin. The ``state`` of the plugin after disabling + is ``DISABLED`` + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import apihub_v1 + + async def sample_disable_plugin(): + # Create a client + client = apihub_v1.ApiHubPluginAsyncClient() + + # Initialize request argument(s) + request = apihub_v1.DisablePluginRequest( + name="name_value", + ) + + # Make the request + response = await client.disable_plugin(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.apihub_v1.types.DisablePluginRequest, dict]]): + The request object. The + [DisablePlugin][google.cloud.apihub.v1.ApiHubPlugin.DisablePlugin] + method's request. + name (:class:`str`): + Required. The name of the plugin to disable. Format: + ``projects/{project}/locations/{location}/plugins/{plugin}``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.apihub_v1.types.Plugin: + A plugin resource in the API Hub. + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, plugin_service.DisablePluginRequest): + request = plugin_service.DisablePluginRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.disable_plugin + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_operations( + self, + request: Optional[operations_pb2.ListOperationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.ListOperationsResponse: + r"""Lists operations that match the specified filter in the request. + + Args: + request (:class:`~.operations_pb2.ListOperationsRequest`): + The request object. Request message for + `ListOperations` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.ListOperationsResponse: + Response message for ``ListOperations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.ListOperationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.list_operations, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_operation( + self, + request: Optional[operations_pb2.GetOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Gets the latest state of a long-running operation. + + Args: + request (:class:`~.operations_pb2.GetOperationRequest`): + The request object. Request message for + `GetOperation` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.Operation: + An ``Operation`` object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.GetOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def delete_operation( + self, + request: Optional[operations_pb2.DeleteOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes a long-running operation. + + This method indicates that the client is no longer interested + in the operation result. It does not cancel the operation. + If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.DeleteOperationRequest`): + The request object. Request message for + `DeleteOperation` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.DeleteOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.delete_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + async def cancel_operation( + self, + request: Optional[operations_pb2.CancelOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Starts asynchronous cancellation on a long-running operation. + + The server makes a best effort to cancel the operation, but success + is not guaranteed. If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.CancelOperationRequest`): + The request object. Request message for + `CancelOperation` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.CancelOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.cancel_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + async def get_location( + self, + request: Optional[locations_pb2.GetLocationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> locations_pb2.Location: + r"""Gets information about a location. + + Args: + request (:class:`~.location_pb2.GetLocationRequest`): + The request object. Request message for + `GetLocation` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.location_pb2.Location: + Location object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = locations_pb2.GetLocationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_location, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_locations( + self, + request: Optional[locations_pb2.ListLocationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> locations_pb2.ListLocationsResponse: + r"""Lists information about the supported locations for this service. + + Args: + request (:class:`~.location_pb2.ListLocationsRequest`): + The request object. Request message for + `ListLocations` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.location_pb2.ListLocationsResponse: + Response message for ``ListLocations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = locations_pb2.ListLocationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.list_locations, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def __aenter__(self) -> "ApiHubPluginAsyncClient": + return self + + async def __aexit__(self, exc_type, exc, tb): + await self.transport.close() + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +__all__ = ("ApiHubPluginAsyncClient",) diff --git a/packages/google-cloud-apihub/google/cloud/apihub_v1/services/api_hub_plugin/client.py b/packages/google-cloud-apihub/google/cloud/apihub_v1/services/api_hub_plugin/client.py new file mode 100644 index 000000000000..dbfedb9a41d0 --- /dev/null +++ b/packages/google-cloud-apihub/google/cloud/apihub_v1/services/api_hub_plugin/client.py @@ -0,0 +1,1365 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import os +import re +from typing import ( + Callable, + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, + cast, +) +import warnings + +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.apihub_v1 import gapic_version as package_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object, None] # type: ignore + +from google.cloud.location import locations_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore + +from google.cloud.apihub_v1.types import common_fields, plugin_service + +from .transports.base import DEFAULT_CLIENT_INFO, ApiHubPluginTransport +from .transports.rest import ApiHubPluginRestTransport + + +class ApiHubPluginClientMeta(type): + """Metaclass for the ApiHubPlugin client. + + This provides class-level methods for building and retrieving + support objects (e.g. transport) without polluting the client instance + objects. + """ + + _transport_registry = OrderedDict() # type: Dict[str, Type[ApiHubPluginTransport]] + _transport_registry["rest"] = ApiHubPluginRestTransport + + def get_transport_class( + cls, + label: Optional[str] = None, + ) -> Type[ApiHubPluginTransport]: + """Returns an appropriate transport class. + + Args: + label: The name of the desired transport. If none is + provided, then the first transport in the registry is used. + + Returns: + The transport class to use. + """ + # If a specific transport is requested, return that one. + if label: + return cls._transport_registry[label] + + # No transport is requested; return the default (that is, the first one + # in the dictionary). + return next(iter(cls._transport_registry.values())) + + +class ApiHubPluginClient(metaclass=ApiHubPluginClientMeta): + """This service is used for managing plugins inside the API Hub.""" + + @staticmethod + def _get_default_mtls_endpoint(api_endpoint): + """Converts api endpoint to mTLS endpoint. + + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to + "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. + Args: + api_endpoint (Optional[str]): the api endpoint to convert. + Returns: + str: converted mTLS api endpoint. + """ + if not api_endpoint: + return api_endpoint + + mtls_endpoint_re = re.compile( + r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" + ) + + m = mtls_endpoint_re.match(api_endpoint) + name, mtls, sandbox, googledomain = m.groups() + if mtls or not googledomain: + return api_endpoint + + if sandbox: + return api_endpoint.replace( + "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" + ) + + return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + + # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. + DEFAULT_ENDPOINT = "apihub.googleapis.com" + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore + DEFAULT_ENDPOINT + ) + + _DEFAULT_ENDPOINT_TEMPLATE = "apihub.{UNIVERSE_DOMAIN}" + _DEFAULT_UNIVERSE = "googleapis.com" + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + ApiHubPluginClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_info(info) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + ApiHubPluginClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file(filename) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> ApiHubPluginTransport: + """Returns the transport used by the client instance. + + Returns: + ApiHubPluginTransport: The transport used by the client + instance. + """ + return self._transport + + @staticmethod + def attribute_path( + project: str, + location: str, + attribute: str, + ) -> str: + """Returns a fully-qualified attribute string.""" + return "projects/{project}/locations/{location}/attributes/{attribute}".format( + project=project, + location=location, + attribute=attribute, + ) + + @staticmethod + def parse_attribute_path(path: str) -> Dict[str, str]: + """Parses a attribute path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/attributes/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + + @staticmethod + def plugin_path( + project: str, + location: str, + plugin: str, + ) -> str: + """Returns a fully-qualified plugin string.""" + return "projects/{project}/locations/{location}/plugins/{plugin}".format( + project=project, + location=location, + plugin=plugin, + ) + + @staticmethod + def parse_plugin_path(path: str) -> Dict[str, str]: + """Parses a plugin path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/plugins/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + + @staticmethod + def common_billing_account_path( + billing_account: str, + ) -> str: + """Returns a fully-qualified billing_account string.""" + return "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + + @staticmethod + def parse_common_billing_account_path(path: str) -> Dict[str, str]: + """Parse a billing_account path into its component segments.""" + m = re.match(r"^billingAccounts/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_folder_path( + folder: str, + ) -> str: + """Returns a fully-qualified folder string.""" + return "folders/{folder}".format( + folder=folder, + ) + + @staticmethod + def parse_common_folder_path(path: str) -> Dict[str, str]: + """Parse a folder path into its component segments.""" + m = re.match(r"^folders/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_organization_path( + organization: str, + ) -> str: + """Returns a fully-qualified organization string.""" + return "organizations/{organization}".format( + organization=organization, + ) + + @staticmethod + def parse_common_organization_path(path: str) -> Dict[str, str]: + """Parse a organization path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_project_path( + project: str, + ) -> str: + """Returns a fully-qualified project string.""" + return "projects/{project}".format( + project=project, + ) + + @staticmethod + def parse_common_project_path(path: str) -> Dict[str, str]: + """Parse a project path into its component segments.""" + m = re.match(r"^projects/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_location_path( + project: str, + location: str, + ) -> str: + """Returns a fully-qualified location string.""" + return "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) + + @staticmethod + def parse_common_location_path(path: str) -> Dict[str, str]: + """Parse a location path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[client_options_lib.ClientOptions] = None + ): + """Deprecated. Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + + warnings.warn( + "get_mtls_endpoint_and_cert_source is deprecated. Use the api_endpoint property instead.", + DeprecationWarning, + ) + if client_options is None: + client_options = client_options_lib.ClientOptions() + use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") + if use_client_cert not in ("true", "false"): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError( + "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + # Figure out the client cert source to use. + client_cert_source = None + if use_client_cert == "true": + if client_options.client_cert_source: + client_cert_source = client_options.client_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + elif use_mtls_endpoint == "always" or ( + use_mtls_endpoint == "auto" and client_cert_source + ): + api_endpoint = cls.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = cls.DEFAULT_ENDPOINT + + return api_endpoint, client_cert_source + + @staticmethod + def _read_environment_variables(): + """Returns the environment variables used by the client. + + Returns: + Tuple[bool, str, str]: returns the GOOGLE_API_USE_CLIENT_CERTIFICATE, + GOOGLE_API_USE_MTLS_ENDPOINT, and GOOGLE_CLOUD_UNIVERSE_DOMAIN environment variables. + + Raises: + ValueError: If GOOGLE_API_USE_CLIENT_CERTIFICATE is not + any of ["true", "false"]. + google.auth.exceptions.MutualTLSChannelError: If GOOGLE_API_USE_MTLS_ENDPOINT + is not any of ["auto", "never", "always"]. + """ + use_client_cert = os.getenv( + "GOOGLE_API_USE_CLIENT_CERTIFICATE", "false" + ).lower() + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto").lower() + universe_domain_env = os.getenv("GOOGLE_CLOUD_UNIVERSE_DOMAIN") + if use_client_cert not in ("true", "false"): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError( + "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + return use_client_cert == "true", use_mtls_endpoint, universe_domain_env + + @staticmethod + def _get_client_cert_source(provided_cert_source, use_cert_flag): + """Return the client cert source to be used by the client. + + Args: + provided_cert_source (bytes): The client certificate source provided. + use_cert_flag (bool): A flag indicating whether to use the client certificate. + + Returns: + bytes or None: The client cert source to be used by the client. + """ + client_cert_source = None + if use_cert_flag: + if provided_cert_source: + client_cert_source = provided_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + return client_cert_source + + @staticmethod + def _get_api_endpoint( + api_override, client_cert_source, universe_domain, use_mtls_endpoint + ): + """Return the API endpoint used by the client. + + Args: + api_override (str): The API endpoint override. If specified, this is always + the return value of this function and the other arguments are not used. + client_cert_source (bytes): The client certificate source used by the client. + universe_domain (str): The universe domain used by the client. + use_mtls_endpoint (str): How to use the mTLS endpoint, which depends also on the other parameters. + Possible values are "always", "auto", or "never". + + Returns: + str: The API endpoint to be used by the client. + """ + if api_override is not None: + api_endpoint = api_override + elif use_mtls_endpoint == "always" or ( + use_mtls_endpoint == "auto" and client_cert_source + ): + _default_universe = ApiHubPluginClient._DEFAULT_UNIVERSE + if universe_domain != _default_universe: + raise MutualTLSChannelError( + f"mTLS is not supported in any universe other than {_default_universe}." + ) + api_endpoint = ApiHubPluginClient.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = ApiHubPluginClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=universe_domain + ) + return api_endpoint + + @staticmethod + def _get_universe_domain( + client_universe_domain: Optional[str], universe_domain_env: Optional[str] + ) -> str: + """Return the universe domain used by the client. + + Args: + client_universe_domain (Optional[str]): The universe domain configured via the client options. + universe_domain_env (Optional[str]): The universe domain configured via the "GOOGLE_CLOUD_UNIVERSE_DOMAIN" environment variable. + + Returns: + str: The universe domain to be used by the client. + + Raises: + ValueError: If the universe domain is an empty string. + """ + universe_domain = ApiHubPluginClient._DEFAULT_UNIVERSE + if client_universe_domain is not None: + universe_domain = client_universe_domain + elif universe_domain_env is not None: + universe_domain = universe_domain_env + if len(universe_domain.strip()) == 0: + raise ValueError("Universe Domain cannot be an empty string.") + return universe_domain + + @staticmethod + def _compare_universes( + client_universe: str, credentials: ga_credentials.Credentials + ) -> bool: + """Returns True iff the universe domains used by the client and credentials match. + + Args: + client_universe (str): The universe domain configured via the client options. + credentials (ga_credentials.Credentials): The credentials being used in the client. + + Returns: + bool: True iff client_universe matches the universe in credentials. + + Raises: + ValueError: when client_universe does not match the universe in credentials. + """ + + default_universe = ApiHubPluginClient._DEFAULT_UNIVERSE + credentials_universe = getattr(credentials, "universe_domain", default_universe) + + if client_universe != credentials_universe: + raise ValueError( + "The configured universe domain " + f"({client_universe}) does not match the universe domain " + f"found in the credentials ({credentials_universe}). " + "If you haven't configured the universe domain explicitly, " + f"`{default_universe}` is the default." + ) + return True + + def _validate_universe_domain(self): + """Validates client's and credentials' universe domains are consistent. + + Returns: + bool: True iff the configured universe domain is valid. + + Raises: + ValueError: If the configured universe domain is not valid. + """ + self._is_universe_domain_valid = ( + self._is_universe_domain_valid + or ApiHubPluginClient._compare_universes( + self.universe_domain, self.transport._credentials + ) + ) + return self._is_universe_domain_valid + + @property + def api_endpoint(self): + """Return the API endpoint used by the client instance. + + Returns: + str: The API endpoint used by the client instance. + """ + return self._api_endpoint + + @property + def universe_domain(self) -> str: + """Return the universe domain used by the client instance. + + Returns: + str: The universe domain used by the client instance. + """ + return self._universe_domain + + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[ + Union[str, ApiHubPluginTransport, Callable[..., ApiHubPluginTransport]] + ] = None, + client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the api hub plugin client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Optional[Union[str,ApiHubPluginTransport,Callable[..., ApiHubPluginTransport]]]): + The transport to use, or a Callable that constructs and returns a new transport. + If a Callable is given, it will be called with the same set of initialization + arguments as used in the ApiHubPluginTransport constructor. + If set to None, a transport is chosen automatically. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): + Custom options for the client. + + 1. The ``api_endpoint`` property can be used to override the + default endpoint provided by the client when ``transport`` is + not explicitly provided. Only if this property is not set and + ``transport`` was not explicitly provided, the endpoint is + determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment + variable, which have one of the following values: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto-switch to the + default mTLS endpoint if client certificate is present; this is + the default value). + + 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide a client certificate for mTLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + 3. The ``universe_domain`` property can be used to override the + default "googleapis.com" universe. Note that the ``api_endpoint`` + property still takes precedence; and ``universe_domain`` is + currently not supported for mTLS. + + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client_options = client_options + if isinstance(self._client_options, dict): + self._client_options = client_options_lib.from_dict(self._client_options) + if self._client_options is None: + self._client_options = client_options_lib.ClientOptions() + self._client_options = cast( + client_options_lib.ClientOptions, self._client_options + ) + + universe_domain_opt = getattr(self._client_options, "universe_domain", None) + + ( + self._use_client_cert, + self._use_mtls_endpoint, + self._universe_domain_env, + ) = ApiHubPluginClient._read_environment_variables() + self._client_cert_source = ApiHubPluginClient._get_client_cert_source( + self._client_options.client_cert_source, self._use_client_cert + ) + self._universe_domain = ApiHubPluginClient._get_universe_domain( + universe_domain_opt, self._universe_domain_env + ) + self._api_endpoint = None # updated below, depending on `transport` + + # Initialize the universe domain validation. + self._is_universe_domain_valid = False + + api_key_value = getattr(self._client_options, "api_key", None) + if api_key_value and credentials: + raise ValueError( + "client_options.api_key and credentials are mutually exclusive" + ) + + # Save or instantiate the transport. + # Ordinarily, we provide the transport, but allowing a custom transport + # instance provides an extensibility point for unusual situations. + transport_provided = isinstance(transport, ApiHubPluginTransport) + if transport_provided: + # transport is a ApiHubPluginTransport instance. + if credentials or self._client_options.credentials_file or api_key_value: + raise ValueError( + "When providing a transport instance, " + "provide its credentials directly." + ) + if self._client_options.scopes: + raise ValueError( + "When providing a transport instance, provide its scopes " + "directly." + ) + self._transport = cast(ApiHubPluginTransport, transport) + self._api_endpoint = self._transport.host + + self._api_endpoint = self._api_endpoint or ApiHubPluginClient._get_api_endpoint( + self._client_options.api_endpoint, + self._client_cert_source, + self._universe_domain, + self._use_mtls_endpoint, + ) + + if not transport_provided: + import google.auth._default # type: ignore + + if api_key_value and hasattr( + google.auth._default, "get_api_key_credentials" + ): + credentials = google.auth._default.get_api_key_credentials( + api_key_value + ) + + transport_init: Union[ + Type[ApiHubPluginTransport], Callable[..., ApiHubPluginTransport] + ] = ( + ApiHubPluginClient.get_transport_class(transport) + if isinstance(transport, str) or transport is None + else cast(Callable[..., ApiHubPluginTransport], transport) + ) + # initialize with the provided callable or the passed in class + self._transport = transport_init( + credentials=credentials, + credentials_file=self._client_options.credentials_file, + host=self._api_endpoint, + scopes=self._client_options.scopes, + client_cert_source_for_mtls=self._client_cert_source, + quota_project_id=self._client_options.quota_project_id, + client_info=client_info, + always_use_jwt_access=True, + api_audience=self._client_options.api_audience, + ) + + def get_plugin( + self, + request: Optional[Union[plugin_service.GetPluginRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> plugin_service.Plugin: + r"""Get details about an API Hub plugin. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import apihub_v1 + + def sample_get_plugin(): + # Create a client + client = apihub_v1.ApiHubPluginClient() + + # Initialize request argument(s) + request = apihub_v1.GetPluginRequest( + name="name_value", + ) + + # Make the request + response = client.get_plugin(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.apihub_v1.types.GetPluginRequest, dict]): + The request object. The + [GetPlugin][google.cloud.apihub.v1.ApiHubPlugin.GetPlugin] + method's request. + name (str): + Required. The name of the plugin to retrieve. Format: + ``projects/{project}/locations/{location}/plugins/{plugin}``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.apihub_v1.types.Plugin: + A plugin resource in the API Hub. + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, plugin_service.GetPluginRequest): + request = plugin_service.GetPluginRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_plugin] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def enable_plugin( + self, + request: Optional[Union[plugin_service.EnablePluginRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> plugin_service.Plugin: + r"""Enables a plugin. The ``state`` of the plugin after enabling is + ``ENABLED`` + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import apihub_v1 + + def sample_enable_plugin(): + # Create a client + client = apihub_v1.ApiHubPluginClient() + + # Initialize request argument(s) + request = apihub_v1.EnablePluginRequest( + name="name_value", + ) + + # Make the request + response = client.enable_plugin(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.apihub_v1.types.EnablePluginRequest, dict]): + The request object. The + [EnablePlugin][google.cloud.apihub.v1.ApiHubPlugin.EnablePlugin] + method's request. + name (str): + Required. The name of the plugin to enable. Format: + ``projects/{project}/locations/{location}/plugins/{plugin}``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.apihub_v1.types.Plugin: + A plugin resource in the API Hub. + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, plugin_service.EnablePluginRequest): + request = plugin_service.EnablePluginRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.enable_plugin] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def disable_plugin( + self, + request: Optional[Union[plugin_service.DisablePluginRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> plugin_service.Plugin: + r"""Disables a plugin. The ``state`` of the plugin after disabling + is ``DISABLED`` + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import apihub_v1 + + def sample_disable_plugin(): + # Create a client + client = apihub_v1.ApiHubPluginClient() + + # Initialize request argument(s) + request = apihub_v1.DisablePluginRequest( + name="name_value", + ) + + # Make the request + response = client.disable_plugin(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.apihub_v1.types.DisablePluginRequest, dict]): + The request object. The + [DisablePlugin][google.cloud.apihub.v1.ApiHubPlugin.DisablePlugin] + method's request. + name (str): + Required. The name of the plugin to disable. Format: + ``projects/{project}/locations/{location}/plugins/{plugin}``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.apihub_v1.types.Plugin: + A plugin resource in the API Hub. + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, plugin_service.DisablePluginRequest): + request = plugin_service.DisablePluginRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.disable_plugin] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def __enter__(self) -> "ApiHubPluginClient": + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + + def list_operations( + self, + request: Optional[operations_pb2.ListOperationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.ListOperationsResponse: + r"""Lists operations that match the specified filter in the request. + + Args: + request (:class:`~.operations_pb2.ListOperationsRequest`): + The request object. Request message for + `ListOperations` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.ListOperationsResponse: + Response message for ``ListOperations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.ListOperationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.list_operations, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_operation( + self, + request: Optional[operations_pb2.GetOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Gets the latest state of a long-running operation. + + Args: + request (:class:`~.operations_pb2.GetOperationRequest`): + The request object. Request message for + `GetOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.Operation: + An ``Operation`` object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.GetOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.get_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def delete_operation( + self, + request: Optional[operations_pb2.DeleteOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes a long-running operation. + + This method indicates that the client is no longer interested + in the operation result. It does not cancel the operation. + If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.DeleteOperationRequest`): + The request object. Request message for + `DeleteOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.DeleteOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.delete_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + def cancel_operation( + self, + request: Optional[operations_pb2.CancelOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Starts asynchronous cancellation on a long-running operation. + + The server makes a best effort to cancel the operation, but success + is not guaranteed. If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.CancelOperationRequest`): + The request object. Request message for + `CancelOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.CancelOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.cancel_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + def get_location( + self, + request: Optional[locations_pb2.GetLocationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> locations_pb2.Location: + r"""Gets information about a location. + + Args: + request (:class:`~.location_pb2.GetLocationRequest`): + The request object. Request message for + `GetLocation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.location_pb2.Location: + Location object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = locations_pb2.GetLocationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.get_location, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def list_locations( + self, + request: Optional[locations_pb2.ListLocationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> locations_pb2.ListLocationsResponse: + r"""Lists information about the supported locations for this service. + + Args: + request (:class:`~.location_pb2.ListLocationsRequest`): + The request object. Request message for + `ListLocations` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.location_pb2.ListLocationsResponse: + Response message for ``ListLocations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = locations_pb2.ListLocationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.list_locations, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +__all__ = ("ApiHubPluginClient",) diff --git a/packages/google-ads-admanager/google/ads/admanager_v1/services/label_service/transports/__init__.py b/packages/google-cloud-apihub/google/cloud/apihub_v1/services/api_hub_plugin/transports/__init__.py similarity index 68% rename from packages/google-ads-admanager/google/ads/admanager_v1/services/label_service/transports/__init__.py rename to packages/google-cloud-apihub/google/cloud/apihub_v1/services/api_hub_plugin/transports/__init__.py index ed3d20633539..9ecb3eaee613 100644 --- a/packages/google-ads-admanager/google/ads/admanager_v1/services/label_service/transports/__init__.py +++ b/packages/google-cloud-apihub/google/cloud/apihub_v1/services/api_hub_plugin/transports/__init__.py @@ -16,15 +16,15 @@ from collections import OrderedDict from typing import Dict, Type -from .base import LabelServiceTransport -from .rest import LabelServiceRestInterceptor, LabelServiceRestTransport +from .base import ApiHubPluginTransport +from .rest import ApiHubPluginRestInterceptor, ApiHubPluginRestTransport # Compile a registry of transports. -_transport_registry = OrderedDict() # type: Dict[str, Type[LabelServiceTransport]] -_transport_registry["rest"] = LabelServiceRestTransport +_transport_registry = OrderedDict() # type: Dict[str, Type[ApiHubPluginTransport]] +_transport_registry["rest"] = ApiHubPluginRestTransport __all__ = ( - "LabelServiceTransport", - "LabelServiceRestTransport", - "LabelServiceRestInterceptor", + "ApiHubPluginTransport", + "ApiHubPluginRestTransport", + "ApiHubPluginRestInterceptor", ) diff --git a/packages/google-cloud-apihub/google/cloud/apihub_v1/services/api_hub_plugin/transports/base.py b/packages/google-cloud-apihub/google/cloud/apihub_v1/services/api_hub_plugin/transports/base.py new file mode 100644 index 000000000000..05521c0cb97f --- /dev/null +++ b/packages/google-cloud-apihub/google/cloud/apihub_v1/services/api_hub_plugin/transports/base.py @@ -0,0 +1,254 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import abc +from typing import Awaitable, Callable, Dict, Optional, Sequence, Union + +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.apihub_v1 import gapic_version as package_version +from google.cloud.apihub_v1.types import plugin_service + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +class ApiHubPluginTransport(abc.ABC): + """Abstract transport class for ApiHubPlugin.""" + + AUTH_SCOPES = ("https://www.googleapis.com/auth/cloud-platform",) + + DEFAULT_HOST: str = "apihub.googleapis.com" + + def __init__( + self, + *, + host: str = DEFAULT_HOST, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + **kwargs, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'apihub.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A list of scopes. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + """ + + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} + + # Save the scopes. + self._scopes = scopes + if not hasattr(self, "_ignore_credentials"): + self._ignore_credentials: bool = False + + # If no credentials are provided, then determine the appropriate + # defaults. + if credentials and credentials_file: + raise core_exceptions.DuplicateCredentialArgs( + "'credentials_file' and 'credentials' are mutually exclusive" + ) + + if credentials_file is not None: + credentials, _ = google.auth.load_credentials_from_file( + credentials_file, **scopes_kwargs, quota_project_id=quota_project_id + ) + elif credentials is None and not self._ignore_credentials: + credentials, _ = google.auth.default( + **scopes_kwargs, quota_project_id=quota_project_id + ) + # Don't apply audience if the credentials file passed from user. + if hasattr(credentials, "with_gdch_audience"): + credentials = credentials.with_gdch_audience( + api_audience if api_audience else host + ) + + # If the credentials are service account credentials, then always try to use self signed JWT. + if ( + always_use_jwt_access + and isinstance(credentials, service_account.Credentials) + and hasattr(service_account.Credentials, "with_always_use_jwt_access") + ): + credentials = credentials.with_always_use_jwt_access(True) + + # Save the credentials. + self._credentials = credentials + + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ":" not in host: + host += ":443" + self._host = host + + @property + def host(self): + return self._host + + def _prep_wrapped_messages(self, client_info): + # Precompute the wrapped methods. + self._wrapped_methods = { + self.get_plugin: gapic_v1.method.wrap_method( + self.get_plugin, + default_retry=retries.Retry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.enable_plugin: gapic_v1.method.wrap_method( + self.enable_plugin, + default_timeout=60.0, + client_info=client_info, + ), + self.disable_plugin: gapic_v1.method.wrap_method( + self.disable_plugin, + default_timeout=60.0, + client_info=client_info, + ), + } + + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + + @property + def get_plugin( + self, + ) -> Callable[ + [plugin_service.GetPluginRequest], + Union[plugin_service.Plugin, Awaitable[plugin_service.Plugin]], + ]: + raise NotImplementedError() + + @property + def enable_plugin( + self, + ) -> Callable[ + [plugin_service.EnablePluginRequest], + Union[plugin_service.Plugin, Awaitable[plugin_service.Plugin]], + ]: + raise NotImplementedError() + + @property + def disable_plugin( + self, + ) -> Callable[ + [plugin_service.DisablePluginRequest], + Union[plugin_service.Plugin, Awaitable[plugin_service.Plugin]], + ]: + raise NotImplementedError() + + @property + def list_operations( + self, + ) -> Callable[ + [operations_pb2.ListOperationsRequest], + Union[ + operations_pb2.ListOperationsResponse, + Awaitable[operations_pb2.ListOperationsResponse], + ], + ]: + raise NotImplementedError() + + @property + def get_operation( + self, + ) -> Callable[ + [operations_pb2.GetOperationRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None,]: + raise NotImplementedError() + + @property + def delete_operation( + self, + ) -> Callable[[operations_pb2.DeleteOperationRequest], None,]: + raise NotImplementedError() + + @property + def get_location( + self, + ) -> Callable[ + [locations_pb2.GetLocationRequest], + Union[locations_pb2.Location, Awaitable[locations_pb2.Location]], + ]: + raise NotImplementedError() + + @property + def list_locations( + self, + ) -> Callable[ + [locations_pb2.ListLocationsRequest], + Union[ + locations_pb2.ListLocationsResponse, + Awaitable[locations_pb2.ListLocationsResponse], + ], + ]: + raise NotImplementedError() + + @property + def kind(self) -> str: + raise NotImplementedError() + + +__all__ = ("ApiHubPluginTransport",) diff --git a/packages/google-cloud-apihub/google/cloud/apihub_v1/services/api_hub_plugin/transports/grpc.py b/packages/google-cloud-apihub/google/cloud/apihub_v1/services/api_hub_plugin/transports/grpc.py new file mode 100644 index 000000000000..e40d93292b20 --- /dev/null +++ b/packages/google-cloud-apihub/google/cloud/apihub_v1/services/api_hub_plugin/transports/grpc.py @@ -0,0 +1,433 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import gapic_v1, grpc_helpers +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +import grpc # type: ignore + +from google.cloud.apihub_v1.types import plugin_service + +from .base import DEFAULT_CLIENT_INFO, ApiHubPluginTransport + + +class ApiHubPluginGrpcTransport(ApiHubPluginTransport): + """gRPC backend transport for ApiHubPlugin. + + This service is used for managing plugins inside the API Hub. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _stubs: Dict[str, Callable] + + def __init__( + self, + *, + host: str = "apihub.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'apihub.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if a ``channel`` instance is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if a ``channel`` instance is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if a ``channel`` instance is provided. + channel (Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]]): + A ``Channel`` instance through which to make calls, or a Callable + that constructs and returns one. If set to None, ``self.create_channel`` + is used to create the channel. If a Callable is given, it will be called + with the same arguments as used in ``self.create_channel``. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if a ``channel`` instance is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if a ``channel`` instance or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if isinstance(channel, grpc.Channel): + # Ignore credentials if a channel was passed. + credentials = None + self._ignore_credentials = True + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + # initialize with the provided callable or the default channel + channel_init = channel or type(self).create_channel + self._grpc_channel = channel_init( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @classmethod + def create_channel( + cls, + host: str = "apihub.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> grpc.Channel: + """Create and return a gRPC channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + grpc.Channel: A gRPC channel object. + + Raises: + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + + return grpc_helpers.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs, + ) + + @property + def grpc_channel(self) -> grpc.Channel: + """Return the channel designed to connect to this service.""" + return self._grpc_channel + + @property + def get_plugin( + self, + ) -> Callable[[plugin_service.GetPluginRequest], plugin_service.Plugin]: + r"""Return a callable for the get plugin method over gRPC. + + Get details about an API Hub plugin. + + Returns: + Callable[[~.GetPluginRequest], + ~.Plugin]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_plugin" not in self._stubs: + self._stubs["get_plugin"] = self.grpc_channel.unary_unary( + "/google.cloud.apihub.v1.ApiHubPlugin/GetPlugin", + request_serializer=plugin_service.GetPluginRequest.serialize, + response_deserializer=plugin_service.Plugin.deserialize, + ) + return self._stubs["get_plugin"] + + @property + def enable_plugin( + self, + ) -> Callable[[plugin_service.EnablePluginRequest], plugin_service.Plugin]: + r"""Return a callable for the enable plugin method over gRPC. + + Enables a plugin. The ``state`` of the plugin after enabling is + ``ENABLED`` + + Returns: + Callable[[~.EnablePluginRequest], + ~.Plugin]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "enable_plugin" not in self._stubs: + self._stubs["enable_plugin"] = self.grpc_channel.unary_unary( + "/google.cloud.apihub.v1.ApiHubPlugin/EnablePlugin", + request_serializer=plugin_service.EnablePluginRequest.serialize, + response_deserializer=plugin_service.Plugin.deserialize, + ) + return self._stubs["enable_plugin"] + + @property + def disable_plugin( + self, + ) -> Callable[[plugin_service.DisablePluginRequest], plugin_service.Plugin]: + r"""Return a callable for the disable plugin method over gRPC. + + Disables a plugin. The ``state`` of the plugin after disabling + is ``DISABLED`` + + Returns: + Callable[[~.DisablePluginRequest], + ~.Plugin]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "disable_plugin" not in self._stubs: + self._stubs["disable_plugin"] = self.grpc_channel.unary_unary( + "/google.cloud.apihub.v1.ApiHubPlugin/DisablePlugin", + request_serializer=plugin_service.DisablePluginRequest.serialize, + response_deserializer=plugin_service.Plugin.deserialize, + ) + return self._stubs["disable_plugin"] + + def close(self): + self.grpc_channel.close() + + @property + def delete_operation( + self, + ) -> Callable[[operations_pb2.DeleteOperationRequest], None]: + r"""Return a callable for the delete_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_operation" not in self._stubs: + self._stubs["delete_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/DeleteOperation", + request_serializer=operations_pb2.DeleteOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["delete_operation"] + + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None]: + r"""Return a callable for the cancel_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "cancel_operation" not in self._stubs: + self._stubs["cancel_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/CancelOperation", + request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["cancel_operation"] + + @property + def get_operation( + self, + ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: + r"""Return a callable for the get_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_operation" not in self._stubs: + self._stubs["get_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/GetOperation", + request_serializer=operations_pb2.GetOperationRequest.SerializeToString, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["get_operation"] + + @property + def list_operations( + self, + ) -> Callable[ + [operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse + ]: + r"""Return a callable for the list_operations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_operations" not in self._stubs: + self._stubs["list_operations"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/ListOperations", + request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, + response_deserializer=operations_pb2.ListOperationsResponse.FromString, + ) + return self._stubs["list_operations"] + + @property + def list_locations( + self, + ) -> Callable[ + [locations_pb2.ListLocationsRequest], locations_pb2.ListLocationsResponse + ]: + r"""Return a callable for the list locations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_locations" not in self._stubs: + self._stubs["list_locations"] = self.grpc_channel.unary_unary( + "/google.cloud.location.Locations/ListLocations", + request_serializer=locations_pb2.ListLocationsRequest.SerializeToString, + response_deserializer=locations_pb2.ListLocationsResponse.FromString, + ) + return self._stubs["list_locations"] + + @property + def get_location( + self, + ) -> Callable[[locations_pb2.GetLocationRequest], locations_pb2.Location]: + r"""Return a callable for the list locations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_location" not in self._stubs: + self._stubs["get_location"] = self.grpc_channel.unary_unary( + "/google.cloud.location.Locations/GetLocation", + request_serializer=locations_pb2.GetLocationRequest.SerializeToString, + response_deserializer=locations_pb2.Location.FromString, + ) + return self._stubs["get_location"] + + @property + def kind(self) -> str: + return "grpc" + + +__all__ = ("ApiHubPluginGrpcTransport",) diff --git a/packages/google-cloud-apihub/google/cloud/apihub_v1/services/api_hub_plugin/transports/grpc_asyncio.py b/packages/google-cloud-apihub/google/cloud/apihub_v1/services/api_hub_plugin/transports/grpc_asyncio.py new file mode 100644 index 000000000000..32ddaaecb024 --- /dev/null +++ b/packages/google-cloud-apihub/google/cloud/apihub_v1/services/api_hub_plugin/transports/grpc_asyncio.py @@ -0,0 +1,466 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1, grpc_helpers_async +from google.api_core import retry_async as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +import grpc # type: ignore +from grpc.experimental import aio # type: ignore + +from google.cloud.apihub_v1.types import plugin_service + +from .base import DEFAULT_CLIENT_INFO, ApiHubPluginTransport +from .grpc import ApiHubPluginGrpcTransport + + +class ApiHubPluginGrpcAsyncIOTransport(ApiHubPluginTransport): + """gRPC AsyncIO backend transport for ApiHubPlugin. + + This service is used for managing plugins inside the API Hub. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _grpc_channel: aio.Channel + _stubs: Dict[str, Callable] = {} + + @classmethod + def create_channel( + cls, + host: str = "apihub.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> aio.Channel: + """Create and return a gRPC AsyncIO channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + aio.Channel: A gRPC AsyncIO channel object. + """ + + return grpc_helpers_async.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs, + ) + + def __init__( + self, + *, + host: str = "apihub.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[Union[aio.Channel, Callable[..., aio.Channel]]] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'apihub.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if a ``channel`` instance is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if a ``channel`` instance is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + channel (Optional[Union[aio.Channel, Callable[..., aio.Channel]]]): + A ``Channel`` instance through which to make calls, or a Callable + that constructs and returns one. If set to None, ``self.create_channel`` + is used to create the channel. If a Callable is given, it will be called + with the same arguments as used in ``self.create_channel``. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if a ``channel`` instance is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if a ``channel`` instance or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if isinstance(channel, aio.Channel): + # Ignore credentials if a channel was passed. + credentials = None + self._ignore_credentials = True + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + # initialize with the provided callable or the default channel + channel_init = channel or type(self).create_channel + self._grpc_channel = channel_init( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @property + def grpc_channel(self) -> aio.Channel: + """Create the channel designed to connect to this service. + + This property caches on the instance; repeated calls return + the same channel. + """ + # Return the channel from cache. + return self._grpc_channel + + @property + def get_plugin( + self, + ) -> Callable[[plugin_service.GetPluginRequest], Awaitable[plugin_service.Plugin]]: + r"""Return a callable for the get plugin method over gRPC. + + Get details about an API Hub plugin. + + Returns: + Callable[[~.GetPluginRequest], + Awaitable[~.Plugin]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_plugin" not in self._stubs: + self._stubs["get_plugin"] = self.grpc_channel.unary_unary( + "/google.cloud.apihub.v1.ApiHubPlugin/GetPlugin", + request_serializer=plugin_service.GetPluginRequest.serialize, + response_deserializer=plugin_service.Plugin.deserialize, + ) + return self._stubs["get_plugin"] + + @property + def enable_plugin( + self, + ) -> Callable[ + [plugin_service.EnablePluginRequest], Awaitable[plugin_service.Plugin] + ]: + r"""Return a callable for the enable plugin method over gRPC. + + Enables a plugin. The ``state`` of the plugin after enabling is + ``ENABLED`` + + Returns: + Callable[[~.EnablePluginRequest], + Awaitable[~.Plugin]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "enable_plugin" not in self._stubs: + self._stubs["enable_plugin"] = self.grpc_channel.unary_unary( + "/google.cloud.apihub.v1.ApiHubPlugin/EnablePlugin", + request_serializer=plugin_service.EnablePluginRequest.serialize, + response_deserializer=plugin_service.Plugin.deserialize, + ) + return self._stubs["enable_plugin"] + + @property + def disable_plugin( + self, + ) -> Callable[ + [plugin_service.DisablePluginRequest], Awaitable[plugin_service.Plugin] + ]: + r"""Return a callable for the disable plugin method over gRPC. + + Disables a plugin. The ``state`` of the plugin after disabling + is ``DISABLED`` + + Returns: + Callable[[~.DisablePluginRequest], + Awaitable[~.Plugin]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "disable_plugin" not in self._stubs: + self._stubs["disable_plugin"] = self.grpc_channel.unary_unary( + "/google.cloud.apihub.v1.ApiHubPlugin/DisablePlugin", + request_serializer=plugin_service.DisablePluginRequest.serialize, + response_deserializer=plugin_service.Plugin.deserialize, + ) + return self._stubs["disable_plugin"] + + def _prep_wrapped_messages(self, client_info): + """Precompute the wrapped methods, overriding the base class method to use async wrappers.""" + self._wrapped_methods = { + self.get_plugin: gapic_v1.method_async.wrap_method( + self.get_plugin, + default_retry=retries.AsyncRetry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.enable_plugin: gapic_v1.method_async.wrap_method( + self.enable_plugin, + default_timeout=60.0, + client_info=client_info, + ), + self.disable_plugin: gapic_v1.method_async.wrap_method( + self.disable_plugin, + default_timeout=60.0, + client_info=client_info, + ), + } + + def close(self): + return self.grpc_channel.close() + + @property + def delete_operation( + self, + ) -> Callable[[operations_pb2.DeleteOperationRequest], None]: + r"""Return a callable for the delete_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_operation" not in self._stubs: + self._stubs["delete_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/DeleteOperation", + request_serializer=operations_pb2.DeleteOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["delete_operation"] + + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None]: + r"""Return a callable for the cancel_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "cancel_operation" not in self._stubs: + self._stubs["cancel_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/CancelOperation", + request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["cancel_operation"] + + @property + def get_operation( + self, + ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: + r"""Return a callable for the get_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_operation" not in self._stubs: + self._stubs["get_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/GetOperation", + request_serializer=operations_pb2.GetOperationRequest.SerializeToString, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["get_operation"] + + @property + def list_operations( + self, + ) -> Callable[ + [operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse + ]: + r"""Return a callable for the list_operations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_operations" not in self._stubs: + self._stubs["list_operations"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/ListOperations", + request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, + response_deserializer=operations_pb2.ListOperationsResponse.FromString, + ) + return self._stubs["list_operations"] + + @property + def list_locations( + self, + ) -> Callable[ + [locations_pb2.ListLocationsRequest], locations_pb2.ListLocationsResponse + ]: + r"""Return a callable for the list locations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_locations" not in self._stubs: + self._stubs["list_locations"] = self.grpc_channel.unary_unary( + "/google.cloud.location.Locations/ListLocations", + request_serializer=locations_pb2.ListLocationsRequest.SerializeToString, + response_deserializer=locations_pb2.ListLocationsResponse.FromString, + ) + return self._stubs["list_locations"] + + @property + def get_location( + self, + ) -> Callable[[locations_pb2.GetLocationRequest], locations_pb2.Location]: + r"""Return a callable for the list locations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_location" not in self._stubs: + self._stubs["get_location"] = self.grpc_channel.unary_unary( + "/google.cloud.location.Locations/GetLocation", + request_serializer=locations_pb2.GetLocationRequest.SerializeToString, + response_deserializer=locations_pb2.Location.FromString, + ) + return self._stubs["get_location"] + + +__all__ = ("ApiHubPluginGrpcAsyncIOTransport",) diff --git a/packages/google-cloud-apihub/google/cloud/apihub_v1/services/api_hub_plugin/transports/rest.py b/packages/google-cloud-apihub/google/cloud/apihub_v1/services/api_hub_plugin/transports/rest.py new file mode 100644 index 000000000000..9668a024d055 --- /dev/null +++ b/packages/google-cloud-apihub/google/cloud/apihub_v1/services/api_hub_plugin/transports/rest.py @@ -0,0 +1,1095 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import dataclasses +import json # type: ignore +import re +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import gapic_v1, path_template, rest_helpers, rest_streaming +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth.transport.requests import AuthorizedSession # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.protobuf import json_format +import grpc # type: ignore +from requests import __version__ as requests_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object, None] # type: ignore + + +from google.longrunning import operations_pb2 # type: ignore + +from google.cloud.apihub_v1.types import plugin_service + +from .base import ApiHubPluginTransport +from .base import DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=requests_version, +) + + +class ApiHubPluginRestInterceptor: + """Interceptor for ApiHubPlugin. + + Interceptors are used to manipulate requests, request metadata, and responses + in arbitrary ways. + Example use cases include: + * Logging + * Verifying requests according to service or custom semantics + * Stripping extraneous information from responses + + These use cases and more can be enabled by injecting an + instance of a custom subclass when constructing the ApiHubPluginRestTransport. + + .. code-block:: python + class MyCustomApiHubPluginInterceptor(ApiHubPluginRestInterceptor): + def pre_disable_plugin(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_disable_plugin(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_enable_plugin(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_enable_plugin(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get_plugin(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_plugin(self, response): + logging.log(f"Received response: {response}") + return response + + transport = ApiHubPluginRestTransport(interceptor=MyCustomApiHubPluginInterceptor()) + client = ApiHubPluginClient(transport=transport) + + + """ + + def pre_disable_plugin( + self, + request: plugin_service.DisablePluginRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[plugin_service.DisablePluginRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for disable_plugin + + Override in a subclass to manipulate the request or metadata + before they are sent to the ApiHubPlugin server. + """ + return request, metadata + + def post_disable_plugin( + self, response: plugin_service.Plugin + ) -> plugin_service.Plugin: + """Post-rpc interceptor for disable_plugin + + Override in a subclass to manipulate the response + after it is returned by the ApiHubPlugin server but before + it is returned to user code. + """ + return response + + def pre_enable_plugin( + self, + request: plugin_service.EnablePluginRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[plugin_service.EnablePluginRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for enable_plugin + + Override in a subclass to manipulate the request or metadata + before they are sent to the ApiHubPlugin server. + """ + return request, metadata + + def post_enable_plugin( + self, response: plugin_service.Plugin + ) -> plugin_service.Plugin: + """Post-rpc interceptor for enable_plugin + + Override in a subclass to manipulate the response + after it is returned by the ApiHubPlugin server but before + it is returned to user code. + """ + return response + + def pre_get_plugin( + self, + request: plugin_service.GetPluginRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[plugin_service.GetPluginRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_plugin + + Override in a subclass to manipulate the request or metadata + before they are sent to the ApiHubPlugin server. + """ + return request, metadata + + def post_get_plugin(self, response: plugin_service.Plugin) -> plugin_service.Plugin: + """Post-rpc interceptor for get_plugin + + Override in a subclass to manipulate the response + after it is returned by the ApiHubPlugin server but before + it is returned to user code. + """ + return response + + def pre_get_location( + self, + request: locations_pb2.GetLocationRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[locations_pb2.GetLocationRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_location + + Override in a subclass to manipulate the request or metadata + before they are sent to the ApiHubPlugin server. + """ + return request, metadata + + def post_get_location( + self, response: locations_pb2.Location + ) -> locations_pb2.Location: + """Post-rpc interceptor for get_location + + Override in a subclass to manipulate the response + after it is returned by the ApiHubPlugin server but before + it is returned to user code. + """ + return response + + def pre_list_locations( + self, + request: locations_pb2.ListLocationsRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[locations_pb2.ListLocationsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_locations + + Override in a subclass to manipulate the request or metadata + before they are sent to the ApiHubPlugin server. + """ + return request, metadata + + def post_list_locations( + self, response: locations_pb2.ListLocationsResponse + ) -> locations_pb2.ListLocationsResponse: + """Post-rpc interceptor for list_locations + + Override in a subclass to manipulate the response + after it is returned by the ApiHubPlugin server but before + it is returned to user code. + """ + return response + + def pre_cancel_operation( + self, + request: operations_pb2.CancelOperationRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[operations_pb2.CancelOperationRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for cancel_operation + + Override in a subclass to manipulate the request or metadata + before they are sent to the ApiHubPlugin server. + """ + return request, metadata + + def post_cancel_operation(self, response: None) -> None: + """Post-rpc interceptor for cancel_operation + + Override in a subclass to manipulate the response + after it is returned by the ApiHubPlugin server but before + it is returned to user code. + """ + return response + + def pre_delete_operation( + self, + request: operations_pb2.DeleteOperationRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[operations_pb2.DeleteOperationRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for delete_operation + + Override in a subclass to manipulate the request or metadata + before they are sent to the ApiHubPlugin server. + """ + return request, metadata + + def post_delete_operation(self, response: None) -> None: + """Post-rpc interceptor for delete_operation + + Override in a subclass to manipulate the response + after it is returned by the ApiHubPlugin server but before + it is returned to user code. + """ + return response + + def pre_get_operation( + self, + request: operations_pb2.GetOperationRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[operations_pb2.GetOperationRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_operation + + Override in a subclass to manipulate the request or metadata + before they are sent to the ApiHubPlugin server. + """ + return request, metadata + + def post_get_operation( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for get_operation + + Override in a subclass to manipulate the response + after it is returned by the ApiHubPlugin server but before + it is returned to user code. + """ + return response + + def pre_list_operations( + self, + request: operations_pb2.ListOperationsRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[operations_pb2.ListOperationsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_operations + + Override in a subclass to manipulate the request or metadata + before they are sent to the ApiHubPlugin server. + """ + return request, metadata + + def post_list_operations( + self, response: operations_pb2.ListOperationsResponse + ) -> operations_pb2.ListOperationsResponse: + """Post-rpc interceptor for list_operations + + Override in a subclass to manipulate the response + after it is returned by the ApiHubPlugin server but before + it is returned to user code. + """ + return response + + +@dataclasses.dataclass +class ApiHubPluginRestStub: + _session: AuthorizedSession + _host: str + _interceptor: ApiHubPluginRestInterceptor + + +class ApiHubPluginRestTransport(ApiHubPluginTransport): + """REST backend transport for ApiHubPlugin. + + This service is used for managing plugins inside the API Hub. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + + """ + + def __init__( + self, + *, + host: str = "apihub.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = "https", + interceptor: Optional[ApiHubPluginRestInterceptor] = None, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'apihub.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client + certificate to configure mutual TLS HTTP channel. It is ignored + if ``channel`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. + # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the + # credentials object + maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) + if maybe_url_match is None: + raise ValueError( + f"Unexpected hostname structure: {host}" + ) # pragma: NO COVER + + url_match_items = maybe_url_match.groupdict() + + host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host + + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + self._session = AuthorizedSession( + self._credentials, default_host=self.DEFAULT_HOST + ) + if client_cert_source_for_mtls: + self._session.configure_mtls_channel(client_cert_source_for_mtls) + self._interceptor = interceptor or ApiHubPluginRestInterceptor() + self._prep_wrapped_messages(client_info) + + class _DisablePlugin(ApiHubPluginRestStub): + def __hash__(self): + return hash("DisablePlugin") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: plugin_service.DisablePluginRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> plugin_service.Plugin: + r"""Call the disable plugin method over HTTP. + + Args: + request (~.plugin_service.DisablePluginRequest): + The request object. The + [DisablePlugin][google.cloud.apihub.v1.ApiHubPlugin.DisablePlugin] + method's request. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.plugin_service.Plugin: + A plugin resource in the API Hub. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{name=projects/*/locations/*/plugins/*}:disable", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_disable_plugin(request, metadata) + pb_request = plugin_service.DisablePluginRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = plugin_service.Plugin() + pb_resp = plugin_service.Plugin.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_disable_plugin(resp) + return resp + + class _EnablePlugin(ApiHubPluginRestStub): + def __hash__(self): + return hash("EnablePlugin") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: plugin_service.EnablePluginRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> plugin_service.Plugin: + r"""Call the enable plugin method over HTTP. + + Args: + request (~.plugin_service.EnablePluginRequest): + The request object. The + [EnablePlugin][google.cloud.apihub.v1.ApiHubPlugin.EnablePlugin] + method's request. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.plugin_service.Plugin: + A plugin resource in the API Hub. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{name=projects/*/locations/*/plugins/*}:enable", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_enable_plugin(request, metadata) + pb_request = plugin_service.EnablePluginRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = plugin_service.Plugin() + pb_resp = plugin_service.Plugin.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_enable_plugin(resp) + return resp + + class _GetPlugin(ApiHubPluginRestStub): + def __hash__(self): + return hash("GetPlugin") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: plugin_service.GetPluginRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> plugin_service.Plugin: + r"""Call the get plugin method over HTTP. + + Args: + request (~.plugin_service.GetPluginRequest): + The request object. The + [GetPlugin][google.cloud.apihub.v1.ApiHubPlugin.GetPlugin] + method's request. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.plugin_service.Plugin: + A plugin resource in the API Hub. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/plugins/*}", + }, + ] + request, metadata = self._interceptor.pre_get_plugin(request, metadata) + pb_request = plugin_service.GetPluginRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = plugin_service.Plugin() + pb_resp = plugin_service.Plugin.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_plugin(resp) + return resp + + @property + def disable_plugin( + self, + ) -> Callable[[plugin_service.DisablePluginRequest], plugin_service.Plugin]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._DisablePlugin(self._session, self._host, self._interceptor) # type: ignore + + @property + def enable_plugin( + self, + ) -> Callable[[plugin_service.EnablePluginRequest], plugin_service.Plugin]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._EnablePlugin(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_plugin( + self, + ) -> Callable[[plugin_service.GetPluginRequest], plugin_service.Plugin]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetPlugin(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_location(self): + return self._GetLocation(self._session, self._host, self._interceptor) # type: ignore + + class _GetLocation(ApiHubPluginRestStub): + def __call__( + self, + request: locations_pb2.GetLocationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> locations_pb2.Location: + r"""Call the get location method over HTTP. + + Args: + request (locations_pb2.GetLocationRequest): + The request object for GetLocation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + locations_pb2.Location: Response from GetLocation method. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*}", + }, + ] + + request, metadata = self._interceptor.pre_get_location(request, metadata) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + resp = locations_pb2.Location() + resp = json_format.Parse(response.content.decode("utf-8"), resp) + resp = self._interceptor.post_get_location(resp) + return resp + + @property + def list_locations(self): + return self._ListLocations(self._session, self._host, self._interceptor) # type: ignore + + class _ListLocations(ApiHubPluginRestStub): + def __call__( + self, + request: locations_pb2.ListLocationsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> locations_pb2.ListLocationsResponse: + r"""Call the list locations method over HTTP. + + Args: + request (locations_pb2.ListLocationsRequest): + The request object for ListLocations method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + locations_pb2.ListLocationsResponse: Response from ListLocations method. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=projects/*}/locations", + }, + ] + + request, metadata = self._interceptor.pre_list_locations(request, metadata) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + resp = locations_pb2.ListLocationsResponse() + resp = json_format.Parse(response.content.decode("utf-8"), resp) + resp = self._interceptor.post_list_locations(resp) + return resp + + @property + def cancel_operation(self): + return self._CancelOperation(self._session, self._host, self._interceptor) # type: ignore + + class _CancelOperation(ApiHubPluginRestStub): + def __call__( + self, + request: operations_pb2.CancelOperationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Call the cancel operation method over HTTP. + + Args: + request (operations_pb2.CancelOperationRequest): + The request object for CancelOperation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{name=projects/*/locations/*/operations/*}:cancel", + "body": "*", + }, + ] + + request, metadata = self._interceptor.pre_cancel_operation( + request, metadata + ) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + body = json.dumps(transcoded_request["body"]) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + return self._interceptor.post_cancel_operation(None) + + @property + def delete_operation(self): + return self._DeleteOperation(self._session, self._host, self._interceptor) # type: ignore + + class _DeleteOperation(ApiHubPluginRestStub): + def __call__( + self, + request: operations_pb2.DeleteOperationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Call the delete operation method over HTTP. + + Args: + request (operations_pb2.DeleteOperationRequest): + The request object for DeleteOperation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "delete", + "uri": "/v1/{name=projects/*/locations/*/operations/*}", + }, + ] + + request, metadata = self._interceptor.pre_delete_operation( + request, metadata + ) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + return self._interceptor.post_delete_operation(None) + + @property + def get_operation(self): + return self._GetOperation(self._session, self._host, self._interceptor) # type: ignore + + class _GetOperation(ApiHubPluginRestStub): + def __call__( + self, + request: operations_pb2.GetOperationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the get operation method over HTTP. + + Args: + request (operations_pb2.GetOperationRequest): + The request object for GetOperation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + operations_pb2.Operation: Response from GetOperation method. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/operations/*}", + }, + ] + + request, metadata = self._interceptor.pre_get_operation(request, metadata) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + resp = operations_pb2.Operation() + resp = json_format.Parse(response.content.decode("utf-8"), resp) + resp = self._interceptor.post_get_operation(resp) + return resp + + @property + def list_operations(self): + return self._ListOperations(self._session, self._host, self._interceptor) # type: ignore + + class _ListOperations(ApiHubPluginRestStub): + def __call__( + self, + request: operations_pb2.ListOperationsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.ListOperationsResponse: + r"""Call the list operations method over HTTP. + + Args: + request (operations_pb2.ListOperationsRequest): + The request object for ListOperations method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + operations_pb2.ListOperationsResponse: Response from ListOperations method. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*}/operations", + }, + ] + + request, metadata = self._interceptor.pre_list_operations(request, metadata) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + resp = operations_pb2.ListOperationsResponse() + resp = json_format.Parse(response.content.decode("utf-8"), resp) + resp = self._interceptor.post_list_operations(resp) + return resp + + @property + def kind(self) -> str: + return "rest" + + def close(self): + self._session.close() + + +__all__ = ("ApiHubPluginRestTransport",) diff --git a/packages/google-cloud-apihub/google/cloud/apihub_v1/services/host_project_registration_service/__init__.py b/packages/google-cloud-apihub/google/cloud/apihub_v1/services/host_project_registration_service/__init__.py new file mode 100644 index 000000000000..f5f90e47cdb9 --- /dev/null +++ b/packages/google-cloud-apihub/google/cloud/apihub_v1/services/host_project_registration_service/__init__.py @@ -0,0 +1,18 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .client import HostProjectRegistrationServiceClient + +__all__ = ("HostProjectRegistrationServiceClient",) diff --git a/packages/google-cloud-apihub/google/cloud/apihub_v1/services/host_project_registration_service/async_client.py b/packages/google-cloud-apihub/google/cloud/apihub_v1/services/host_project_registration_service/async_client.py new file mode 100644 index 000000000000..be906eaa63df --- /dev/null +++ b/packages/google-cloud-apihub/google/cloud/apihub_v1/services/host_project_registration_service/async_client.py @@ -0,0 +1,1062 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import re +from typing import ( + Callable, + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, +) + +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry_async as retries +from google.api_core.client_options import ClientOptions +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.apihub_v1 import gapic_version as package_version + +try: + OptionalRetry = Union[retries.AsyncRetry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.AsyncRetry, object, None] # type: ignore + +from google.cloud.location import locations_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore + +from google.cloud.apihub_v1.services.host_project_registration_service import pagers +from google.cloud.apihub_v1.types import host_project_registration_service + +from .client import HostProjectRegistrationServiceClient +from .transports.base import ( + DEFAULT_CLIENT_INFO, + HostProjectRegistrationServiceTransport, +) +from .transports.grpc_asyncio import HostProjectRegistrationServiceGrpcAsyncIOTransport + + +class HostProjectRegistrationServiceAsyncClient: + """This service is used for managing the host project + registrations. + """ + + _client: HostProjectRegistrationServiceClient + + # Copy defaults from the synchronous client for use here. + # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. + DEFAULT_ENDPOINT = HostProjectRegistrationServiceClient.DEFAULT_ENDPOINT + DEFAULT_MTLS_ENDPOINT = HostProjectRegistrationServiceClient.DEFAULT_MTLS_ENDPOINT + _DEFAULT_ENDPOINT_TEMPLATE = ( + HostProjectRegistrationServiceClient._DEFAULT_ENDPOINT_TEMPLATE + ) + _DEFAULT_UNIVERSE = HostProjectRegistrationServiceClient._DEFAULT_UNIVERSE + + host_project_registration_path = staticmethod( + HostProjectRegistrationServiceClient.host_project_registration_path + ) + parse_host_project_registration_path = staticmethod( + HostProjectRegistrationServiceClient.parse_host_project_registration_path + ) + common_billing_account_path = staticmethod( + HostProjectRegistrationServiceClient.common_billing_account_path + ) + parse_common_billing_account_path = staticmethod( + HostProjectRegistrationServiceClient.parse_common_billing_account_path + ) + common_folder_path = staticmethod( + HostProjectRegistrationServiceClient.common_folder_path + ) + parse_common_folder_path = staticmethod( + HostProjectRegistrationServiceClient.parse_common_folder_path + ) + common_organization_path = staticmethod( + HostProjectRegistrationServiceClient.common_organization_path + ) + parse_common_organization_path = staticmethod( + HostProjectRegistrationServiceClient.parse_common_organization_path + ) + common_project_path = staticmethod( + HostProjectRegistrationServiceClient.common_project_path + ) + parse_common_project_path = staticmethod( + HostProjectRegistrationServiceClient.parse_common_project_path + ) + common_location_path = staticmethod( + HostProjectRegistrationServiceClient.common_location_path + ) + parse_common_location_path = staticmethod( + HostProjectRegistrationServiceClient.parse_common_location_path + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + HostProjectRegistrationServiceAsyncClient: The constructed client. + """ + return HostProjectRegistrationServiceClient.from_service_account_info.__func__(HostProjectRegistrationServiceAsyncClient, info, *args, **kwargs) # type: ignore + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + HostProjectRegistrationServiceAsyncClient: The constructed client. + """ + return HostProjectRegistrationServiceClient.from_service_account_file.__func__(HostProjectRegistrationServiceAsyncClient, filename, *args, **kwargs) # type: ignore + + from_service_account_json = from_service_account_file + + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[ClientOptions] = None + ): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + return HostProjectRegistrationServiceClient.get_mtls_endpoint_and_cert_source(client_options) # type: ignore + + @property + def transport(self) -> HostProjectRegistrationServiceTransport: + """Returns the transport used by the client instance. + + Returns: + HostProjectRegistrationServiceTransport: The transport used by the client instance. + """ + return self._client.transport + + @property + def api_endpoint(self): + """Return the API endpoint used by the client instance. + + Returns: + str: The API endpoint used by the client instance. + """ + return self._client._api_endpoint + + @property + def universe_domain(self) -> str: + """Return the universe domain used by the client instance. + + Returns: + str: The universe domain used + by the client instance. + """ + return self._client._universe_domain + + get_transport_class = HostProjectRegistrationServiceClient.get_transport_class + + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[ + Union[ + str, + HostProjectRegistrationServiceTransport, + Callable[..., HostProjectRegistrationServiceTransport], + ] + ] = "grpc_asyncio", + client_options: Optional[ClientOptions] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the host project registration service async client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Optional[Union[str,HostProjectRegistrationServiceTransport,Callable[..., HostProjectRegistrationServiceTransport]]]): + The transport to use, or a Callable that constructs and returns a new transport to use. + If a Callable is given, it will be called with the same set of initialization + arguments as used in the HostProjectRegistrationServiceTransport constructor. + If set to None, a transport is chosen automatically. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): + Custom options for the client. + + 1. The ``api_endpoint`` property can be used to override the + default endpoint provided by the client when ``transport`` is + not explicitly provided. Only if this property is not set and + ``transport`` was not explicitly provided, the endpoint is + determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment + variable, which have one of the following values: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto-switch to the + default mTLS endpoint if client certificate is present; this is + the default value). + + 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide a client certificate for mTLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + 3. The ``universe_domain`` property can be used to override the + default "googleapis.com" universe. Note that ``api_endpoint`` + property still takes precedence; and ``universe_domain`` is + currently not supported for mTLS. + + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client = HostProjectRegistrationServiceClient( + credentials=credentials, + transport=transport, + client_options=client_options, + client_info=client_info, + ) + + async def create_host_project_registration( + self, + request: Optional[ + Union[ + host_project_registration_service.CreateHostProjectRegistrationRequest, + dict, + ] + ] = None, + *, + parent: Optional[str] = None, + host_project_registration: Optional[ + host_project_registration_service.HostProjectRegistration + ] = None, + host_project_registration_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> host_project_registration_service.HostProjectRegistration: + r"""Create a host project registration. + A Google cloud project can be registered as a host + project if it is not attached as a runtime project to + another host project. A project can be registered as a + host project only once. Subsequent register calls for + the same project will fail. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import apihub_v1 + + async def sample_create_host_project_registration(): + # Create a client + client = apihub_v1.HostProjectRegistrationServiceAsyncClient() + + # Initialize request argument(s) + host_project_registration = apihub_v1.HostProjectRegistration() + host_project_registration.gcp_project = "gcp_project_value" + + request = apihub_v1.CreateHostProjectRegistrationRequest( + parent="parent_value", + host_project_registration_id="host_project_registration_id_value", + host_project_registration=host_project_registration, + ) + + # Make the request + response = await client.create_host_project_registration(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.apihub_v1.types.CreateHostProjectRegistrationRequest, dict]]): + The request object. The + [CreateHostProjectRegistration][google.cloud.apihub.v1.HostProjectRegistrationService.CreateHostProjectRegistration] + method's request. + parent (:class:`str`): + Required. The parent resource for the host project. + Format: ``projects/{project}/locations/{location}`` + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + host_project_registration (:class:`google.cloud.apihub_v1.types.HostProjectRegistration`): + Required. The host project + registration to register. + + This corresponds to the ``host_project_registration`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + host_project_registration_id (:class:`str`): + Required. The ID to use for the Host Project + Registration, which will become the final component of + the host project registration's resource name. The ID + must be the same as the Google cloud project specified + in the host_project_registration.gcp_project field. + + This corresponds to the ``host_project_registration_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.apihub_v1.types.HostProjectRegistration: + Host project registration refers to + the registration of a Google cloud + project with Api Hub as a host project. + This is the project where Api Hub is + provisioned. It acts as the consumer + project for the Api Hub instance + provisioned. Multiple runtime projects + can be attached to the host project and + these attachments define the scope of + Api Hub. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any( + [parent, host_project_registration, host_project_registration_id] + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance( + request, + host_project_registration_service.CreateHostProjectRegistrationRequest, + ): + request = ( + host_project_registration_service.CreateHostProjectRegistrationRequest( + request + ) + ) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if host_project_registration is not None: + request.host_project_registration = host_project_registration + if host_project_registration_id is not None: + request.host_project_registration_id = host_project_registration_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.create_host_project_registration + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_host_project_registration( + self, + request: Optional[ + Union[ + host_project_registration_service.GetHostProjectRegistrationRequest, + dict, + ] + ] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> host_project_registration_service.HostProjectRegistration: + r"""Get a host project registration. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import apihub_v1 + + async def sample_get_host_project_registration(): + # Create a client + client = apihub_v1.HostProjectRegistrationServiceAsyncClient() + + # Initialize request argument(s) + request = apihub_v1.GetHostProjectRegistrationRequest( + name="name_value", + ) + + # Make the request + response = await client.get_host_project_registration(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.apihub_v1.types.GetHostProjectRegistrationRequest, dict]]): + The request object. The + [GetHostProjectRegistration][google.cloud.apihub.v1.HostProjectRegistrationService.GetHostProjectRegistration] + method's request. + name (:class:`str`): + Required. Host project registration resource name. + projects/{project}/locations/{location}/hostProjectRegistrations/{host_project_registration_id} + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.apihub_v1.types.HostProjectRegistration: + Host project registration refers to + the registration of a Google cloud + project with Api Hub as a host project. + This is the project where Api Hub is + provisioned. It acts as the consumer + project for the Api Hub instance + provisioned. Multiple runtime projects + can be attached to the host project and + these attachments define the scope of + Api Hub. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance( + request, host_project_registration_service.GetHostProjectRegistrationRequest + ): + request = ( + host_project_registration_service.GetHostProjectRegistrationRequest( + request + ) + ) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.get_host_project_registration + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_host_project_registrations( + self, + request: Optional[ + Union[ + host_project_registration_service.ListHostProjectRegistrationsRequest, + dict, + ] + ] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListHostProjectRegistrationsAsyncPager: + r"""Lists host project registrations. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import apihub_v1 + + async def sample_list_host_project_registrations(): + # Create a client + client = apihub_v1.HostProjectRegistrationServiceAsyncClient() + + # Initialize request argument(s) + request = apihub_v1.ListHostProjectRegistrationsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_host_project_registrations(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.apihub_v1.types.ListHostProjectRegistrationsRequest, dict]]): + The request object. The + [ListHostProjectRegistrations][google.cloud.apihub.v1.HostProjectRegistrationService.ListHostProjectRegistrations] + method's request. + parent (:class:`str`): + Required. The parent, which owns this collection of host + projects. Format: ``projects/*/locations/*`` + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.apihub_v1.services.host_project_registration_service.pagers.ListHostProjectRegistrationsAsyncPager: + The + [ListHostProjectRegistrations][google.cloud.apihub.v1.HostProjectRegistrationService.ListHostProjectRegistrations] + method's response. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance( + request, + host_project_registration_service.ListHostProjectRegistrationsRequest, + ): + request = ( + host_project_registration_service.ListHostProjectRegistrationsRequest( + request + ) + ) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.list_host_project_registrations + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListHostProjectRegistrationsAsyncPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_operations( + self, + request: Optional[operations_pb2.ListOperationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.ListOperationsResponse: + r"""Lists operations that match the specified filter in the request. + + Args: + request (:class:`~.operations_pb2.ListOperationsRequest`): + The request object. Request message for + `ListOperations` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.ListOperationsResponse: + Response message for ``ListOperations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.ListOperationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.list_operations, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_operation( + self, + request: Optional[operations_pb2.GetOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Gets the latest state of a long-running operation. + + Args: + request (:class:`~.operations_pb2.GetOperationRequest`): + The request object. Request message for + `GetOperation` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.Operation: + An ``Operation`` object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.GetOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def delete_operation( + self, + request: Optional[operations_pb2.DeleteOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes a long-running operation. + + This method indicates that the client is no longer interested + in the operation result. It does not cancel the operation. + If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.DeleteOperationRequest`): + The request object. Request message for + `DeleteOperation` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.DeleteOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.delete_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + async def cancel_operation( + self, + request: Optional[operations_pb2.CancelOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Starts asynchronous cancellation on a long-running operation. + + The server makes a best effort to cancel the operation, but success + is not guaranteed. If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.CancelOperationRequest`): + The request object. Request message for + `CancelOperation` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.CancelOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.cancel_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + async def get_location( + self, + request: Optional[locations_pb2.GetLocationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> locations_pb2.Location: + r"""Gets information about a location. + + Args: + request (:class:`~.location_pb2.GetLocationRequest`): + The request object. Request message for + `GetLocation` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.location_pb2.Location: + Location object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = locations_pb2.GetLocationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_location, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_locations( + self, + request: Optional[locations_pb2.ListLocationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> locations_pb2.ListLocationsResponse: + r"""Lists information about the supported locations for this service. + + Args: + request (:class:`~.location_pb2.ListLocationsRequest`): + The request object. Request message for + `ListLocations` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.location_pb2.ListLocationsResponse: + Response message for ``ListLocations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = locations_pb2.ListLocationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.list_locations, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def __aenter__(self) -> "HostProjectRegistrationServiceAsyncClient": + return self + + async def __aexit__(self, exc_type, exc, tb): + await self.transport.close() + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +__all__ = ("HostProjectRegistrationServiceAsyncClient",) diff --git a/packages/google-cloud-apihub/google/cloud/apihub_v1/services/host_project_registration_service/client.py b/packages/google-cloud-apihub/google/cloud/apihub_v1/services/host_project_registration_service/client.py new file mode 100644 index 000000000000..2e5897cdad20 --- /dev/null +++ b/packages/google-cloud-apihub/google/cloud/apihub_v1/services/host_project_registration_service/client.py @@ -0,0 +1,1486 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import os +import re +from typing import ( + Callable, + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, + cast, +) +import warnings + +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.apihub_v1 import gapic_version as package_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object, None] # type: ignore + +from google.cloud.location import locations_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore + +from google.cloud.apihub_v1.services.host_project_registration_service import pagers +from google.cloud.apihub_v1.types import host_project_registration_service + +from .transports.base import ( + DEFAULT_CLIENT_INFO, + HostProjectRegistrationServiceTransport, +) +from .transports.rest import HostProjectRegistrationServiceRestTransport + + +class HostProjectRegistrationServiceClientMeta(type): + """Metaclass for the HostProjectRegistrationService client. + + This provides class-level methods for building and retrieving + support objects (e.g. transport) without polluting the client instance + objects. + """ + + _transport_registry = ( + OrderedDict() + ) # type: Dict[str, Type[HostProjectRegistrationServiceTransport]] + _transport_registry["rest"] = HostProjectRegistrationServiceRestTransport + + def get_transport_class( + cls, + label: Optional[str] = None, + ) -> Type[HostProjectRegistrationServiceTransport]: + """Returns an appropriate transport class. + + Args: + label: The name of the desired transport. If none is + provided, then the first transport in the registry is used. + + Returns: + The transport class to use. + """ + # If a specific transport is requested, return that one. + if label: + return cls._transport_registry[label] + + # No transport is requested; return the default (that is, the first one + # in the dictionary). + return next(iter(cls._transport_registry.values())) + + +class HostProjectRegistrationServiceClient( + metaclass=HostProjectRegistrationServiceClientMeta +): + """This service is used for managing the host project + registrations. + """ + + @staticmethod + def _get_default_mtls_endpoint(api_endpoint): + """Converts api endpoint to mTLS endpoint. + + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to + "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. + Args: + api_endpoint (Optional[str]): the api endpoint to convert. + Returns: + str: converted mTLS api endpoint. + """ + if not api_endpoint: + return api_endpoint + + mtls_endpoint_re = re.compile( + r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" + ) + + m = mtls_endpoint_re.match(api_endpoint) + name, mtls, sandbox, googledomain = m.groups() + if mtls or not googledomain: + return api_endpoint + + if sandbox: + return api_endpoint.replace( + "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" + ) + + return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + + # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. + DEFAULT_ENDPOINT = "apihub.googleapis.com" + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore + DEFAULT_ENDPOINT + ) + + _DEFAULT_ENDPOINT_TEMPLATE = "apihub.{UNIVERSE_DOMAIN}" + _DEFAULT_UNIVERSE = "googleapis.com" + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + HostProjectRegistrationServiceClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_info(info) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + HostProjectRegistrationServiceClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file(filename) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> HostProjectRegistrationServiceTransport: + """Returns the transport used by the client instance. + + Returns: + HostProjectRegistrationServiceTransport: The transport used by the client + instance. + """ + return self._transport + + @staticmethod + def host_project_registration_path( + project: str, + location: str, + host_project_registration: str, + ) -> str: + """Returns a fully-qualified host_project_registration string.""" + return "projects/{project}/locations/{location}/hostProjectRegistrations/{host_project_registration}".format( + project=project, + location=location, + host_project_registration=host_project_registration, + ) + + @staticmethod + def parse_host_project_registration_path(path: str) -> Dict[str, str]: + """Parses a host_project_registration path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/hostProjectRegistrations/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + + @staticmethod + def common_billing_account_path( + billing_account: str, + ) -> str: + """Returns a fully-qualified billing_account string.""" + return "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + + @staticmethod + def parse_common_billing_account_path(path: str) -> Dict[str, str]: + """Parse a billing_account path into its component segments.""" + m = re.match(r"^billingAccounts/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_folder_path( + folder: str, + ) -> str: + """Returns a fully-qualified folder string.""" + return "folders/{folder}".format( + folder=folder, + ) + + @staticmethod + def parse_common_folder_path(path: str) -> Dict[str, str]: + """Parse a folder path into its component segments.""" + m = re.match(r"^folders/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_organization_path( + organization: str, + ) -> str: + """Returns a fully-qualified organization string.""" + return "organizations/{organization}".format( + organization=organization, + ) + + @staticmethod + def parse_common_organization_path(path: str) -> Dict[str, str]: + """Parse a organization path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_project_path( + project: str, + ) -> str: + """Returns a fully-qualified project string.""" + return "projects/{project}".format( + project=project, + ) + + @staticmethod + def parse_common_project_path(path: str) -> Dict[str, str]: + """Parse a project path into its component segments.""" + m = re.match(r"^projects/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_location_path( + project: str, + location: str, + ) -> str: + """Returns a fully-qualified location string.""" + return "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) + + @staticmethod + def parse_common_location_path(path: str) -> Dict[str, str]: + """Parse a location path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[client_options_lib.ClientOptions] = None + ): + """Deprecated. Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + + warnings.warn( + "get_mtls_endpoint_and_cert_source is deprecated. Use the api_endpoint property instead.", + DeprecationWarning, + ) + if client_options is None: + client_options = client_options_lib.ClientOptions() + use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") + if use_client_cert not in ("true", "false"): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError( + "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + # Figure out the client cert source to use. + client_cert_source = None + if use_client_cert == "true": + if client_options.client_cert_source: + client_cert_source = client_options.client_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + elif use_mtls_endpoint == "always" or ( + use_mtls_endpoint == "auto" and client_cert_source + ): + api_endpoint = cls.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = cls.DEFAULT_ENDPOINT + + return api_endpoint, client_cert_source + + @staticmethod + def _read_environment_variables(): + """Returns the environment variables used by the client. + + Returns: + Tuple[bool, str, str]: returns the GOOGLE_API_USE_CLIENT_CERTIFICATE, + GOOGLE_API_USE_MTLS_ENDPOINT, and GOOGLE_CLOUD_UNIVERSE_DOMAIN environment variables. + + Raises: + ValueError: If GOOGLE_API_USE_CLIENT_CERTIFICATE is not + any of ["true", "false"]. + google.auth.exceptions.MutualTLSChannelError: If GOOGLE_API_USE_MTLS_ENDPOINT + is not any of ["auto", "never", "always"]. + """ + use_client_cert = os.getenv( + "GOOGLE_API_USE_CLIENT_CERTIFICATE", "false" + ).lower() + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto").lower() + universe_domain_env = os.getenv("GOOGLE_CLOUD_UNIVERSE_DOMAIN") + if use_client_cert not in ("true", "false"): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError( + "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + return use_client_cert == "true", use_mtls_endpoint, universe_domain_env + + @staticmethod + def _get_client_cert_source(provided_cert_source, use_cert_flag): + """Return the client cert source to be used by the client. + + Args: + provided_cert_source (bytes): The client certificate source provided. + use_cert_flag (bool): A flag indicating whether to use the client certificate. + + Returns: + bytes or None: The client cert source to be used by the client. + """ + client_cert_source = None + if use_cert_flag: + if provided_cert_source: + client_cert_source = provided_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + return client_cert_source + + @staticmethod + def _get_api_endpoint( + api_override, client_cert_source, universe_domain, use_mtls_endpoint + ): + """Return the API endpoint used by the client. + + Args: + api_override (str): The API endpoint override. If specified, this is always + the return value of this function and the other arguments are not used. + client_cert_source (bytes): The client certificate source used by the client. + universe_domain (str): The universe domain used by the client. + use_mtls_endpoint (str): How to use the mTLS endpoint, which depends also on the other parameters. + Possible values are "always", "auto", or "never". + + Returns: + str: The API endpoint to be used by the client. + """ + if api_override is not None: + api_endpoint = api_override + elif use_mtls_endpoint == "always" or ( + use_mtls_endpoint == "auto" and client_cert_source + ): + _default_universe = HostProjectRegistrationServiceClient._DEFAULT_UNIVERSE + if universe_domain != _default_universe: + raise MutualTLSChannelError( + f"mTLS is not supported in any universe other than {_default_universe}." + ) + api_endpoint = HostProjectRegistrationServiceClient.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = ( + HostProjectRegistrationServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=universe_domain + ) + ) + return api_endpoint + + @staticmethod + def _get_universe_domain( + client_universe_domain: Optional[str], universe_domain_env: Optional[str] + ) -> str: + """Return the universe domain used by the client. + + Args: + client_universe_domain (Optional[str]): The universe domain configured via the client options. + universe_domain_env (Optional[str]): The universe domain configured via the "GOOGLE_CLOUD_UNIVERSE_DOMAIN" environment variable. + + Returns: + str: The universe domain to be used by the client. + + Raises: + ValueError: If the universe domain is an empty string. + """ + universe_domain = HostProjectRegistrationServiceClient._DEFAULT_UNIVERSE + if client_universe_domain is not None: + universe_domain = client_universe_domain + elif universe_domain_env is not None: + universe_domain = universe_domain_env + if len(universe_domain.strip()) == 0: + raise ValueError("Universe Domain cannot be an empty string.") + return universe_domain + + @staticmethod + def _compare_universes( + client_universe: str, credentials: ga_credentials.Credentials + ) -> bool: + """Returns True iff the universe domains used by the client and credentials match. + + Args: + client_universe (str): The universe domain configured via the client options. + credentials (ga_credentials.Credentials): The credentials being used in the client. + + Returns: + bool: True iff client_universe matches the universe in credentials. + + Raises: + ValueError: when client_universe does not match the universe in credentials. + """ + + default_universe = HostProjectRegistrationServiceClient._DEFAULT_UNIVERSE + credentials_universe = getattr(credentials, "universe_domain", default_universe) + + if client_universe != credentials_universe: + raise ValueError( + "The configured universe domain " + f"({client_universe}) does not match the universe domain " + f"found in the credentials ({credentials_universe}). " + "If you haven't configured the universe domain explicitly, " + f"`{default_universe}` is the default." + ) + return True + + def _validate_universe_domain(self): + """Validates client's and credentials' universe domains are consistent. + + Returns: + bool: True iff the configured universe domain is valid. + + Raises: + ValueError: If the configured universe domain is not valid. + """ + self._is_universe_domain_valid = ( + self._is_universe_domain_valid + or HostProjectRegistrationServiceClient._compare_universes( + self.universe_domain, self.transport._credentials + ) + ) + return self._is_universe_domain_valid + + @property + def api_endpoint(self): + """Return the API endpoint used by the client instance. + + Returns: + str: The API endpoint used by the client instance. + """ + return self._api_endpoint + + @property + def universe_domain(self) -> str: + """Return the universe domain used by the client instance. + + Returns: + str: The universe domain used by the client instance. + """ + return self._universe_domain + + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[ + Union[ + str, + HostProjectRegistrationServiceTransport, + Callable[..., HostProjectRegistrationServiceTransport], + ] + ] = None, + client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the host project registration service client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Optional[Union[str,HostProjectRegistrationServiceTransport,Callable[..., HostProjectRegistrationServiceTransport]]]): + The transport to use, or a Callable that constructs and returns a new transport. + If a Callable is given, it will be called with the same set of initialization + arguments as used in the HostProjectRegistrationServiceTransport constructor. + If set to None, a transport is chosen automatically. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): + Custom options for the client. + + 1. The ``api_endpoint`` property can be used to override the + default endpoint provided by the client when ``transport`` is + not explicitly provided. Only if this property is not set and + ``transport`` was not explicitly provided, the endpoint is + determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment + variable, which have one of the following values: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto-switch to the + default mTLS endpoint if client certificate is present; this is + the default value). + + 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide a client certificate for mTLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + 3. The ``universe_domain`` property can be used to override the + default "googleapis.com" universe. Note that the ``api_endpoint`` + property still takes precedence; and ``universe_domain`` is + currently not supported for mTLS. + + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client_options = client_options + if isinstance(self._client_options, dict): + self._client_options = client_options_lib.from_dict(self._client_options) + if self._client_options is None: + self._client_options = client_options_lib.ClientOptions() + self._client_options = cast( + client_options_lib.ClientOptions, self._client_options + ) + + universe_domain_opt = getattr(self._client_options, "universe_domain", None) + + ( + self._use_client_cert, + self._use_mtls_endpoint, + self._universe_domain_env, + ) = HostProjectRegistrationServiceClient._read_environment_variables() + self._client_cert_source = ( + HostProjectRegistrationServiceClient._get_client_cert_source( + self._client_options.client_cert_source, self._use_client_cert + ) + ) + self._universe_domain = ( + HostProjectRegistrationServiceClient._get_universe_domain( + universe_domain_opt, self._universe_domain_env + ) + ) + self._api_endpoint = None # updated below, depending on `transport` + + # Initialize the universe domain validation. + self._is_universe_domain_valid = False + + api_key_value = getattr(self._client_options, "api_key", None) + if api_key_value and credentials: + raise ValueError( + "client_options.api_key and credentials are mutually exclusive" + ) + + # Save or instantiate the transport. + # Ordinarily, we provide the transport, but allowing a custom transport + # instance provides an extensibility point for unusual situations. + transport_provided = isinstance( + transport, HostProjectRegistrationServiceTransport + ) + if transport_provided: + # transport is a HostProjectRegistrationServiceTransport instance. + if credentials or self._client_options.credentials_file or api_key_value: + raise ValueError( + "When providing a transport instance, " + "provide its credentials directly." + ) + if self._client_options.scopes: + raise ValueError( + "When providing a transport instance, provide its scopes " + "directly." + ) + self._transport = cast(HostProjectRegistrationServiceTransport, transport) + self._api_endpoint = self._transport.host + + self._api_endpoint = ( + self._api_endpoint + or HostProjectRegistrationServiceClient._get_api_endpoint( + self._client_options.api_endpoint, + self._client_cert_source, + self._universe_domain, + self._use_mtls_endpoint, + ) + ) + + if not transport_provided: + import google.auth._default # type: ignore + + if api_key_value and hasattr( + google.auth._default, "get_api_key_credentials" + ): + credentials = google.auth._default.get_api_key_credentials( + api_key_value + ) + + transport_init: Union[ + Type[HostProjectRegistrationServiceTransport], + Callable[..., HostProjectRegistrationServiceTransport], + ] = ( + HostProjectRegistrationServiceClient.get_transport_class(transport) + if isinstance(transport, str) or transport is None + else cast( + Callable[..., HostProjectRegistrationServiceTransport], transport + ) + ) + # initialize with the provided callable or the passed in class + self._transport = transport_init( + credentials=credentials, + credentials_file=self._client_options.credentials_file, + host=self._api_endpoint, + scopes=self._client_options.scopes, + client_cert_source_for_mtls=self._client_cert_source, + quota_project_id=self._client_options.quota_project_id, + client_info=client_info, + always_use_jwt_access=True, + api_audience=self._client_options.api_audience, + ) + + def create_host_project_registration( + self, + request: Optional[ + Union[ + host_project_registration_service.CreateHostProjectRegistrationRequest, + dict, + ] + ] = None, + *, + parent: Optional[str] = None, + host_project_registration: Optional[ + host_project_registration_service.HostProjectRegistration + ] = None, + host_project_registration_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> host_project_registration_service.HostProjectRegistration: + r"""Create a host project registration. + A Google cloud project can be registered as a host + project if it is not attached as a runtime project to + another host project. A project can be registered as a + host project only once. Subsequent register calls for + the same project will fail. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import apihub_v1 + + def sample_create_host_project_registration(): + # Create a client + client = apihub_v1.HostProjectRegistrationServiceClient() + + # Initialize request argument(s) + host_project_registration = apihub_v1.HostProjectRegistration() + host_project_registration.gcp_project = "gcp_project_value" + + request = apihub_v1.CreateHostProjectRegistrationRequest( + parent="parent_value", + host_project_registration_id="host_project_registration_id_value", + host_project_registration=host_project_registration, + ) + + # Make the request + response = client.create_host_project_registration(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.apihub_v1.types.CreateHostProjectRegistrationRequest, dict]): + The request object. The + [CreateHostProjectRegistration][google.cloud.apihub.v1.HostProjectRegistrationService.CreateHostProjectRegistration] + method's request. + parent (str): + Required. The parent resource for the host project. + Format: ``projects/{project}/locations/{location}`` + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + host_project_registration (google.cloud.apihub_v1.types.HostProjectRegistration): + Required. The host project + registration to register. + + This corresponds to the ``host_project_registration`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + host_project_registration_id (str): + Required. The ID to use for the Host Project + Registration, which will become the final component of + the host project registration's resource name. The ID + must be the same as the Google cloud project specified + in the host_project_registration.gcp_project field. + + This corresponds to the ``host_project_registration_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.apihub_v1.types.HostProjectRegistration: + Host project registration refers to + the registration of a Google cloud + project with Api Hub as a host project. + This is the project where Api Hub is + provisioned. It acts as the consumer + project for the Api Hub instance + provisioned. Multiple runtime projects + can be attached to the host project and + these attachments define the scope of + Api Hub. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any( + [parent, host_project_registration, host_project_registration_id] + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance( + request, + host_project_registration_service.CreateHostProjectRegistrationRequest, + ): + request = ( + host_project_registration_service.CreateHostProjectRegistrationRequest( + request + ) + ) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if host_project_registration is not None: + request.host_project_registration = host_project_registration + if host_project_registration_id is not None: + request.host_project_registration_id = host_project_registration_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[ + self._transport.create_host_project_registration + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_host_project_registration( + self, + request: Optional[ + Union[ + host_project_registration_service.GetHostProjectRegistrationRequest, + dict, + ] + ] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> host_project_registration_service.HostProjectRegistration: + r"""Get a host project registration. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import apihub_v1 + + def sample_get_host_project_registration(): + # Create a client + client = apihub_v1.HostProjectRegistrationServiceClient() + + # Initialize request argument(s) + request = apihub_v1.GetHostProjectRegistrationRequest( + name="name_value", + ) + + # Make the request + response = client.get_host_project_registration(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.apihub_v1.types.GetHostProjectRegistrationRequest, dict]): + The request object. The + [GetHostProjectRegistration][google.cloud.apihub.v1.HostProjectRegistrationService.GetHostProjectRegistration] + method's request. + name (str): + Required. Host project registration resource name. + projects/{project}/locations/{location}/hostProjectRegistrations/{host_project_registration_id} + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.apihub_v1.types.HostProjectRegistration: + Host project registration refers to + the registration of a Google cloud + project with Api Hub as a host project. + This is the project where Api Hub is + provisioned. It acts as the consumer + project for the Api Hub instance + provisioned. Multiple runtime projects + can be attached to the host project and + these attachments define the scope of + Api Hub. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance( + request, host_project_registration_service.GetHostProjectRegistrationRequest + ): + request = ( + host_project_registration_service.GetHostProjectRegistrationRequest( + request + ) + ) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[ + self._transport.get_host_project_registration + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def list_host_project_registrations( + self, + request: Optional[ + Union[ + host_project_registration_service.ListHostProjectRegistrationsRequest, + dict, + ] + ] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListHostProjectRegistrationsPager: + r"""Lists host project registrations. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import apihub_v1 + + def sample_list_host_project_registrations(): + # Create a client + client = apihub_v1.HostProjectRegistrationServiceClient() + + # Initialize request argument(s) + request = apihub_v1.ListHostProjectRegistrationsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_host_project_registrations(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.apihub_v1.types.ListHostProjectRegistrationsRequest, dict]): + The request object. The + [ListHostProjectRegistrations][google.cloud.apihub.v1.HostProjectRegistrationService.ListHostProjectRegistrations] + method's request. + parent (str): + Required. The parent, which owns this collection of host + projects. Format: ``projects/*/locations/*`` + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.apihub_v1.services.host_project_registration_service.pagers.ListHostProjectRegistrationsPager: + The + [ListHostProjectRegistrations][google.cloud.apihub.v1.HostProjectRegistrationService.ListHostProjectRegistrations] + method's response. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance( + request, + host_project_registration_service.ListHostProjectRegistrationsRequest, + ): + request = ( + host_project_registration_service.ListHostProjectRegistrationsRequest( + request + ) + ) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[ + self._transport.list_host_project_registrations + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListHostProjectRegistrationsPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def __enter__(self) -> "HostProjectRegistrationServiceClient": + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + + def list_operations( + self, + request: Optional[operations_pb2.ListOperationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.ListOperationsResponse: + r"""Lists operations that match the specified filter in the request. + + Args: + request (:class:`~.operations_pb2.ListOperationsRequest`): + The request object. Request message for + `ListOperations` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.ListOperationsResponse: + Response message for ``ListOperations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.ListOperationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.list_operations, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_operation( + self, + request: Optional[operations_pb2.GetOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Gets the latest state of a long-running operation. + + Args: + request (:class:`~.operations_pb2.GetOperationRequest`): + The request object. Request message for + `GetOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.Operation: + An ``Operation`` object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.GetOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.get_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def delete_operation( + self, + request: Optional[operations_pb2.DeleteOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes a long-running operation. + + This method indicates that the client is no longer interested + in the operation result. It does not cancel the operation. + If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.DeleteOperationRequest`): + The request object. Request message for + `DeleteOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.DeleteOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.delete_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + def cancel_operation( + self, + request: Optional[operations_pb2.CancelOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Starts asynchronous cancellation on a long-running operation. + + The server makes a best effort to cancel the operation, but success + is not guaranteed. If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.CancelOperationRequest`): + The request object. Request message for + `CancelOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.CancelOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.cancel_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + def get_location( + self, + request: Optional[locations_pb2.GetLocationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> locations_pb2.Location: + r"""Gets information about a location. + + Args: + request (:class:`~.location_pb2.GetLocationRequest`): + The request object. Request message for + `GetLocation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.location_pb2.Location: + Location object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = locations_pb2.GetLocationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.get_location, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def list_locations( + self, + request: Optional[locations_pb2.ListLocationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> locations_pb2.ListLocationsResponse: + r"""Lists information about the supported locations for this service. + + Args: + request (:class:`~.location_pb2.ListLocationsRequest`): + The request object. Request message for + `ListLocations` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.location_pb2.ListLocationsResponse: + Response message for ``ListLocations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = locations_pb2.ListLocationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.list_locations, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +__all__ = ("HostProjectRegistrationServiceClient",) diff --git a/packages/google-ads-admanager/google/ads/admanager_v1/services/contact_service/pagers.py b/packages/google-cloud-apihub/google/cloud/apihub_v1/services/host_project_registration_service/pagers.py similarity index 68% rename from packages/google-ads-admanager/google/ads/admanager_v1/services/contact_service/pagers.py rename to packages/google-cloud-apihub/google/cloud/apihub_v1/services/host_project_registration_service/pagers.py index 30f2279d1f01..4bb7e2ec7541 100644 --- a/packages/google-ads-admanager/google/ads/admanager_v1/services/contact_service/pagers.py +++ b/packages/google-cloud-apihub/google/cloud/apihub_v1/services/host_project_registration_service/pagers.py @@ -38,32 +38,34 @@ OptionalRetry = Union[retries.Retry, object, None] # type: ignore OptionalAsyncRetry = Union[retries_async.AsyncRetry, object, None] # type: ignore -from google.ads.admanager_v1.types import contact_service +from google.cloud.apihub_v1.types import host_project_registration_service -class ListContactsPager: - """A pager for iterating through ``list_contacts`` requests. +class ListHostProjectRegistrationsPager: + """A pager for iterating through ``list_host_project_registrations`` requests. This class thinly wraps an initial - :class:`google.ads.admanager_v1.types.ListContactsResponse` object, and + :class:`google.cloud.apihub_v1.types.ListHostProjectRegistrationsResponse` object, and provides an ``__iter__`` method to iterate through its - ``contacts`` field. + ``host_project_registrations`` field. If there are more pages, the ``__iter__`` method will make additional - ``ListContacts`` requests and continue to iterate - through the ``contacts`` field on the + ``ListHostProjectRegistrations`` requests and continue to iterate + through the ``host_project_registrations`` field on the corresponding responses. - All the usual :class:`google.ads.admanager_v1.types.ListContactsResponse` + All the usual :class:`google.cloud.apihub_v1.types.ListHostProjectRegistrationsResponse` attributes are available on the pager. If multiple requests are made, only the most recent response is retained, and thus used for attribute lookup. """ def __init__( self, - method: Callable[..., contact_service.ListContactsResponse], - request: contact_service.ListContactsRequest, - response: contact_service.ListContactsResponse, + method: Callable[ + ..., host_project_registration_service.ListHostProjectRegistrationsResponse + ], + request: host_project_registration_service.ListHostProjectRegistrationsRequest, + response: host_project_registration_service.ListHostProjectRegistrationsResponse, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, @@ -74,9 +76,9 @@ def __init__( Args: method (Callable): The method that was originally called, and which instantiated this pager. - request (google.ads.admanager_v1.types.ListContactsRequest): + request (google.cloud.apihub_v1.types.ListHostProjectRegistrationsRequest): The initial request object. - response (google.ads.admanager_v1.types.ListContactsResponse): + response (google.cloud.apihub_v1.types.ListHostProjectRegistrationsResponse): The initial response object. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. @@ -85,7 +87,11 @@ def __init__( sent along with the request as metadata. """ self._method = method - self._request = contact_service.ListContactsRequest(request) + self._request = ( + host_project_registration_service.ListHostProjectRegistrationsRequest( + request + ) + ) self._response = response self._retry = retry self._timeout = timeout @@ -95,7 +101,11 @@ def __getattr__(self, name: str) -> Any: return getattr(self._response, name) @property - def pages(self) -> Iterator[contact_service.ListContactsResponse]: + def pages( + self, + ) -> Iterator[ + host_project_registration_service.ListHostProjectRegistrationsResponse + ]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token @@ -107,9 +117,11 @@ def pages(self) -> Iterator[contact_service.ListContactsResponse]: ) yield self._response - def __iter__(self) -> Iterator[contact_service.Contact]: + def __iter__( + self, + ) -> Iterator[host_project_registration_service.HostProjectRegistration]: for page in self.pages: - yield from page.contacts + yield from page.host_project_registrations def __repr__(self) -> str: return "{0}<{1!r}>".format(self.__class__.__name__, self._response) diff --git a/packages/google-cloud-apihub/google/cloud/apihub_v1/services/host_project_registration_service/transports/__init__.py b/packages/google-cloud-apihub/google/cloud/apihub_v1/services/host_project_registration_service/transports/__init__.py new file mode 100644 index 000000000000..c80657406ff6 --- /dev/null +++ b/packages/google-cloud-apihub/google/cloud/apihub_v1/services/host_project_registration_service/transports/__init__.py @@ -0,0 +1,35 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from typing import Dict, Type + +from .base import HostProjectRegistrationServiceTransport +from .rest import ( + HostProjectRegistrationServiceRestInterceptor, + HostProjectRegistrationServiceRestTransport, +) + +# Compile a registry of transports. +_transport_registry = ( + OrderedDict() +) # type: Dict[str, Type[HostProjectRegistrationServiceTransport]] +_transport_registry["rest"] = HostProjectRegistrationServiceRestTransport + +__all__ = ( + "HostProjectRegistrationServiceTransport", + "HostProjectRegistrationServiceRestTransport", + "HostProjectRegistrationServiceRestInterceptor", +) diff --git a/packages/google-cloud-apihub/google/cloud/apihub_v1/services/host_project_registration_service/transports/base.py b/packages/google-cloud-apihub/google/cloud/apihub_v1/services/host_project_registration_service/transports/base.py new file mode 100644 index 000000000000..63c06d3e9b29 --- /dev/null +++ b/packages/google-cloud-apihub/google/cloud/apihub_v1/services/host_project_registration_service/transports/base.py @@ -0,0 +1,274 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import abc +from typing import Awaitable, Callable, Dict, Optional, Sequence, Union + +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.apihub_v1 import gapic_version as package_version +from google.cloud.apihub_v1.types import host_project_registration_service + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +class HostProjectRegistrationServiceTransport(abc.ABC): + """Abstract transport class for HostProjectRegistrationService.""" + + AUTH_SCOPES = ("https://www.googleapis.com/auth/cloud-platform",) + + DEFAULT_HOST: str = "apihub.googleapis.com" + + def __init__( + self, + *, + host: str = DEFAULT_HOST, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + **kwargs, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'apihub.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A list of scopes. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + """ + + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} + + # Save the scopes. + self._scopes = scopes + if not hasattr(self, "_ignore_credentials"): + self._ignore_credentials: bool = False + + # If no credentials are provided, then determine the appropriate + # defaults. + if credentials and credentials_file: + raise core_exceptions.DuplicateCredentialArgs( + "'credentials_file' and 'credentials' are mutually exclusive" + ) + + if credentials_file is not None: + credentials, _ = google.auth.load_credentials_from_file( + credentials_file, **scopes_kwargs, quota_project_id=quota_project_id + ) + elif credentials is None and not self._ignore_credentials: + credentials, _ = google.auth.default( + **scopes_kwargs, quota_project_id=quota_project_id + ) + # Don't apply audience if the credentials file passed from user. + if hasattr(credentials, "with_gdch_audience"): + credentials = credentials.with_gdch_audience( + api_audience if api_audience else host + ) + + # If the credentials are service account credentials, then always try to use self signed JWT. + if ( + always_use_jwt_access + and isinstance(credentials, service_account.Credentials) + and hasattr(service_account.Credentials, "with_always_use_jwt_access") + ): + credentials = credentials.with_always_use_jwt_access(True) + + # Save the credentials. + self._credentials = credentials + + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ":" not in host: + host += ":443" + self._host = host + + @property + def host(self): + return self._host + + def _prep_wrapped_messages(self, client_info): + # Precompute the wrapped methods. + self._wrapped_methods = { + self.create_host_project_registration: gapic_v1.method.wrap_method( + self.create_host_project_registration, + default_timeout=60.0, + client_info=client_info, + ), + self.get_host_project_registration: gapic_v1.method.wrap_method( + self.get_host_project_registration, + default_retry=retries.Retry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.list_host_project_registrations: gapic_v1.method.wrap_method( + self.list_host_project_registrations, + default_retry=retries.Retry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + } + + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + + @property + def create_host_project_registration( + self, + ) -> Callable[ + [host_project_registration_service.CreateHostProjectRegistrationRequest], + Union[ + host_project_registration_service.HostProjectRegistration, + Awaitable[host_project_registration_service.HostProjectRegistration], + ], + ]: + raise NotImplementedError() + + @property + def get_host_project_registration( + self, + ) -> Callable[ + [host_project_registration_service.GetHostProjectRegistrationRequest], + Union[ + host_project_registration_service.HostProjectRegistration, + Awaitable[host_project_registration_service.HostProjectRegistration], + ], + ]: + raise NotImplementedError() + + @property + def list_host_project_registrations( + self, + ) -> Callable[ + [host_project_registration_service.ListHostProjectRegistrationsRequest], + Union[ + host_project_registration_service.ListHostProjectRegistrationsResponse, + Awaitable[ + host_project_registration_service.ListHostProjectRegistrationsResponse + ], + ], + ]: + raise NotImplementedError() + + @property + def list_operations( + self, + ) -> Callable[ + [operations_pb2.ListOperationsRequest], + Union[ + operations_pb2.ListOperationsResponse, + Awaitable[operations_pb2.ListOperationsResponse], + ], + ]: + raise NotImplementedError() + + @property + def get_operation( + self, + ) -> Callable[ + [operations_pb2.GetOperationRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None,]: + raise NotImplementedError() + + @property + def delete_operation( + self, + ) -> Callable[[operations_pb2.DeleteOperationRequest], None,]: + raise NotImplementedError() + + @property + def get_location( + self, + ) -> Callable[ + [locations_pb2.GetLocationRequest], + Union[locations_pb2.Location, Awaitable[locations_pb2.Location]], + ]: + raise NotImplementedError() + + @property + def list_locations( + self, + ) -> Callable[ + [locations_pb2.ListLocationsRequest], + Union[ + locations_pb2.ListLocationsResponse, + Awaitable[locations_pb2.ListLocationsResponse], + ], + ]: + raise NotImplementedError() + + @property + def kind(self) -> str: + raise NotImplementedError() + + +__all__ = ("HostProjectRegistrationServiceTransport",) diff --git a/packages/google-cloud-apihub/google/cloud/apihub_v1/services/host_project_registration_service/transports/grpc.py b/packages/google-cloud-apihub/google/cloud/apihub_v1/services/host_project_registration_service/transports/grpc.py new file mode 100644 index 000000000000..5ea890aefe8e --- /dev/null +++ b/packages/google-cloud-apihub/google/cloud/apihub_v1/services/host_project_registration_service/transports/grpc.py @@ -0,0 +1,456 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import gapic_v1, grpc_helpers +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +import grpc # type: ignore + +from google.cloud.apihub_v1.types import host_project_registration_service + +from .base import DEFAULT_CLIENT_INFO, HostProjectRegistrationServiceTransport + + +class HostProjectRegistrationServiceGrpcTransport( + HostProjectRegistrationServiceTransport +): + """gRPC backend transport for HostProjectRegistrationService. + + This service is used for managing the host project + registrations. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _stubs: Dict[str, Callable] + + def __init__( + self, + *, + host: str = "apihub.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'apihub.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if a ``channel`` instance is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if a ``channel`` instance is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if a ``channel`` instance is provided. + channel (Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]]): + A ``Channel`` instance through which to make calls, or a Callable + that constructs and returns one. If set to None, ``self.create_channel`` + is used to create the channel. If a Callable is given, it will be called + with the same arguments as used in ``self.create_channel``. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if a ``channel`` instance is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if a ``channel`` instance or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if isinstance(channel, grpc.Channel): + # Ignore credentials if a channel was passed. + credentials = None + self._ignore_credentials = True + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + # initialize with the provided callable or the default channel + channel_init = channel or type(self).create_channel + self._grpc_channel = channel_init( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @classmethod + def create_channel( + cls, + host: str = "apihub.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> grpc.Channel: + """Create and return a gRPC channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + grpc.Channel: A gRPC channel object. + + Raises: + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + + return grpc_helpers.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs, + ) + + @property + def grpc_channel(self) -> grpc.Channel: + """Return the channel designed to connect to this service.""" + return self._grpc_channel + + @property + def create_host_project_registration( + self, + ) -> Callable[ + [host_project_registration_service.CreateHostProjectRegistrationRequest], + host_project_registration_service.HostProjectRegistration, + ]: + r"""Return a callable for the create host project + registration method over gRPC. + + Create a host project registration. + A Google cloud project can be registered as a host + project if it is not attached as a runtime project to + another host project. A project can be registered as a + host project only once. Subsequent register calls for + the same project will fail. + + Returns: + Callable[[~.CreateHostProjectRegistrationRequest], + ~.HostProjectRegistration]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_host_project_registration" not in self._stubs: + self._stubs[ + "create_host_project_registration" + ] = self.grpc_channel.unary_unary( + "/google.cloud.apihub.v1.HostProjectRegistrationService/CreateHostProjectRegistration", + request_serializer=host_project_registration_service.CreateHostProjectRegistrationRequest.serialize, + response_deserializer=host_project_registration_service.HostProjectRegistration.deserialize, + ) + return self._stubs["create_host_project_registration"] + + @property + def get_host_project_registration( + self, + ) -> Callable[ + [host_project_registration_service.GetHostProjectRegistrationRequest], + host_project_registration_service.HostProjectRegistration, + ]: + r"""Return a callable for the get host project registration method over gRPC. + + Get a host project registration. + + Returns: + Callable[[~.GetHostProjectRegistrationRequest], + ~.HostProjectRegistration]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_host_project_registration" not in self._stubs: + self._stubs[ + "get_host_project_registration" + ] = self.grpc_channel.unary_unary( + "/google.cloud.apihub.v1.HostProjectRegistrationService/GetHostProjectRegistration", + request_serializer=host_project_registration_service.GetHostProjectRegistrationRequest.serialize, + response_deserializer=host_project_registration_service.HostProjectRegistration.deserialize, + ) + return self._stubs["get_host_project_registration"] + + @property + def list_host_project_registrations( + self, + ) -> Callable[ + [host_project_registration_service.ListHostProjectRegistrationsRequest], + host_project_registration_service.ListHostProjectRegistrationsResponse, + ]: + r"""Return a callable for the list host project + registrations method over gRPC. + + Lists host project registrations. + + Returns: + Callable[[~.ListHostProjectRegistrationsRequest], + ~.ListHostProjectRegistrationsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_host_project_registrations" not in self._stubs: + self._stubs[ + "list_host_project_registrations" + ] = self.grpc_channel.unary_unary( + "/google.cloud.apihub.v1.HostProjectRegistrationService/ListHostProjectRegistrations", + request_serializer=host_project_registration_service.ListHostProjectRegistrationsRequest.serialize, + response_deserializer=host_project_registration_service.ListHostProjectRegistrationsResponse.deserialize, + ) + return self._stubs["list_host_project_registrations"] + + def close(self): + self.grpc_channel.close() + + @property + def delete_operation( + self, + ) -> Callable[[operations_pb2.DeleteOperationRequest], None]: + r"""Return a callable for the delete_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_operation" not in self._stubs: + self._stubs["delete_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/DeleteOperation", + request_serializer=operations_pb2.DeleteOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["delete_operation"] + + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None]: + r"""Return a callable for the cancel_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "cancel_operation" not in self._stubs: + self._stubs["cancel_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/CancelOperation", + request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["cancel_operation"] + + @property + def get_operation( + self, + ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: + r"""Return a callable for the get_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_operation" not in self._stubs: + self._stubs["get_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/GetOperation", + request_serializer=operations_pb2.GetOperationRequest.SerializeToString, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["get_operation"] + + @property + def list_operations( + self, + ) -> Callable[ + [operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse + ]: + r"""Return a callable for the list_operations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_operations" not in self._stubs: + self._stubs["list_operations"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/ListOperations", + request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, + response_deserializer=operations_pb2.ListOperationsResponse.FromString, + ) + return self._stubs["list_operations"] + + @property + def list_locations( + self, + ) -> Callable[ + [locations_pb2.ListLocationsRequest], locations_pb2.ListLocationsResponse + ]: + r"""Return a callable for the list locations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_locations" not in self._stubs: + self._stubs["list_locations"] = self.grpc_channel.unary_unary( + "/google.cloud.location.Locations/ListLocations", + request_serializer=locations_pb2.ListLocationsRequest.SerializeToString, + response_deserializer=locations_pb2.ListLocationsResponse.FromString, + ) + return self._stubs["list_locations"] + + @property + def get_location( + self, + ) -> Callable[[locations_pb2.GetLocationRequest], locations_pb2.Location]: + r"""Return a callable for the list locations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_location" not in self._stubs: + self._stubs["get_location"] = self.grpc_channel.unary_unary( + "/google.cloud.location.Locations/GetLocation", + request_serializer=locations_pb2.GetLocationRequest.SerializeToString, + response_deserializer=locations_pb2.Location.FromString, + ) + return self._stubs["get_location"] + + @property + def kind(self) -> str: + return "grpc" + + +__all__ = ("HostProjectRegistrationServiceGrpcTransport",) diff --git a/packages/google-cloud-apihub/google/cloud/apihub_v1/services/host_project_registration_service/transports/grpc_asyncio.py b/packages/google-cloud-apihub/google/cloud/apihub_v1/services/host_project_registration_service/transports/grpc_asyncio.py new file mode 100644 index 000000000000..31680c310ff8 --- /dev/null +++ b/packages/google-cloud-apihub/google/cloud/apihub_v1/services/host_project_registration_service/transports/grpc_asyncio.py @@ -0,0 +1,496 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1, grpc_helpers_async +from google.api_core import retry_async as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +import grpc # type: ignore +from grpc.experimental import aio # type: ignore + +from google.cloud.apihub_v1.types import host_project_registration_service + +from .base import DEFAULT_CLIENT_INFO, HostProjectRegistrationServiceTransport +from .grpc import HostProjectRegistrationServiceGrpcTransport + + +class HostProjectRegistrationServiceGrpcAsyncIOTransport( + HostProjectRegistrationServiceTransport +): + """gRPC AsyncIO backend transport for HostProjectRegistrationService. + + This service is used for managing the host project + registrations. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _grpc_channel: aio.Channel + _stubs: Dict[str, Callable] = {} + + @classmethod + def create_channel( + cls, + host: str = "apihub.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> aio.Channel: + """Create and return a gRPC AsyncIO channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + aio.Channel: A gRPC AsyncIO channel object. + """ + + return grpc_helpers_async.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs, + ) + + def __init__( + self, + *, + host: str = "apihub.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[Union[aio.Channel, Callable[..., aio.Channel]]] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'apihub.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if a ``channel`` instance is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if a ``channel`` instance is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + channel (Optional[Union[aio.Channel, Callable[..., aio.Channel]]]): + A ``Channel`` instance through which to make calls, or a Callable + that constructs and returns one. If set to None, ``self.create_channel`` + is used to create the channel. If a Callable is given, it will be called + with the same arguments as used in ``self.create_channel``. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if a ``channel`` instance is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if a ``channel`` instance or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if isinstance(channel, aio.Channel): + # Ignore credentials if a channel was passed. + credentials = None + self._ignore_credentials = True + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + # initialize with the provided callable or the default channel + channel_init = channel or type(self).create_channel + self._grpc_channel = channel_init( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @property + def grpc_channel(self) -> aio.Channel: + """Create the channel designed to connect to this service. + + This property caches on the instance; repeated calls return + the same channel. + """ + # Return the channel from cache. + return self._grpc_channel + + @property + def create_host_project_registration( + self, + ) -> Callable[ + [host_project_registration_service.CreateHostProjectRegistrationRequest], + Awaitable[host_project_registration_service.HostProjectRegistration], + ]: + r"""Return a callable for the create host project + registration method over gRPC. + + Create a host project registration. + A Google cloud project can be registered as a host + project if it is not attached as a runtime project to + another host project. A project can be registered as a + host project only once. Subsequent register calls for + the same project will fail. + + Returns: + Callable[[~.CreateHostProjectRegistrationRequest], + Awaitable[~.HostProjectRegistration]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_host_project_registration" not in self._stubs: + self._stubs[ + "create_host_project_registration" + ] = self.grpc_channel.unary_unary( + "/google.cloud.apihub.v1.HostProjectRegistrationService/CreateHostProjectRegistration", + request_serializer=host_project_registration_service.CreateHostProjectRegistrationRequest.serialize, + response_deserializer=host_project_registration_service.HostProjectRegistration.deserialize, + ) + return self._stubs["create_host_project_registration"] + + @property + def get_host_project_registration( + self, + ) -> Callable[ + [host_project_registration_service.GetHostProjectRegistrationRequest], + Awaitable[host_project_registration_service.HostProjectRegistration], + ]: + r"""Return a callable for the get host project registration method over gRPC. + + Get a host project registration. + + Returns: + Callable[[~.GetHostProjectRegistrationRequest], + Awaitable[~.HostProjectRegistration]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_host_project_registration" not in self._stubs: + self._stubs[ + "get_host_project_registration" + ] = self.grpc_channel.unary_unary( + "/google.cloud.apihub.v1.HostProjectRegistrationService/GetHostProjectRegistration", + request_serializer=host_project_registration_service.GetHostProjectRegistrationRequest.serialize, + response_deserializer=host_project_registration_service.HostProjectRegistration.deserialize, + ) + return self._stubs["get_host_project_registration"] + + @property + def list_host_project_registrations( + self, + ) -> Callable[ + [host_project_registration_service.ListHostProjectRegistrationsRequest], + Awaitable[ + host_project_registration_service.ListHostProjectRegistrationsResponse + ], + ]: + r"""Return a callable for the list host project + registrations method over gRPC. + + Lists host project registrations. + + Returns: + Callable[[~.ListHostProjectRegistrationsRequest], + Awaitable[~.ListHostProjectRegistrationsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_host_project_registrations" not in self._stubs: + self._stubs[ + "list_host_project_registrations" + ] = self.grpc_channel.unary_unary( + "/google.cloud.apihub.v1.HostProjectRegistrationService/ListHostProjectRegistrations", + request_serializer=host_project_registration_service.ListHostProjectRegistrationsRequest.serialize, + response_deserializer=host_project_registration_service.ListHostProjectRegistrationsResponse.deserialize, + ) + return self._stubs["list_host_project_registrations"] + + def _prep_wrapped_messages(self, client_info): + """Precompute the wrapped methods, overriding the base class method to use async wrappers.""" + self._wrapped_methods = { + self.create_host_project_registration: gapic_v1.method_async.wrap_method( + self.create_host_project_registration, + default_timeout=60.0, + client_info=client_info, + ), + self.get_host_project_registration: gapic_v1.method_async.wrap_method( + self.get_host_project_registration, + default_retry=retries.AsyncRetry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.list_host_project_registrations: gapic_v1.method_async.wrap_method( + self.list_host_project_registrations, + default_retry=retries.AsyncRetry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + } + + def close(self): + return self.grpc_channel.close() + + @property + def delete_operation( + self, + ) -> Callable[[operations_pb2.DeleteOperationRequest], None]: + r"""Return a callable for the delete_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_operation" not in self._stubs: + self._stubs["delete_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/DeleteOperation", + request_serializer=operations_pb2.DeleteOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["delete_operation"] + + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None]: + r"""Return a callable for the cancel_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "cancel_operation" not in self._stubs: + self._stubs["cancel_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/CancelOperation", + request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["cancel_operation"] + + @property + def get_operation( + self, + ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: + r"""Return a callable for the get_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_operation" not in self._stubs: + self._stubs["get_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/GetOperation", + request_serializer=operations_pb2.GetOperationRequest.SerializeToString, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["get_operation"] + + @property + def list_operations( + self, + ) -> Callable[ + [operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse + ]: + r"""Return a callable for the list_operations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_operations" not in self._stubs: + self._stubs["list_operations"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/ListOperations", + request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, + response_deserializer=operations_pb2.ListOperationsResponse.FromString, + ) + return self._stubs["list_operations"] + + @property + def list_locations( + self, + ) -> Callable[ + [locations_pb2.ListLocationsRequest], locations_pb2.ListLocationsResponse + ]: + r"""Return a callable for the list locations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_locations" not in self._stubs: + self._stubs["list_locations"] = self.grpc_channel.unary_unary( + "/google.cloud.location.Locations/ListLocations", + request_serializer=locations_pb2.ListLocationsRequest.SerializeToString, + response_deserializer=locations_pb2.ListLocationsResponse.FromString, + ) + return self._stubs["list_locations"] + + @property + def get_location( + self, + ) -> Callable[[locations_pb2.GetLocationRequest], locations_pb2.Location]: + r"""Return a callable for the list locations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_location" not in self._stubs: + self._stubs["get_location"] = self.grpc_channel.unary_unary( + "/google.cloud.location.Locations/GetLocation", + request_serializer=locations_pb2.GetLocationRequest.SerializeToString, + response_deserializer=locations_pb2.Location.FromString, + ) + return self._stubs["get_location"] + + +__all__ = ("HostProjectRegistrationServiceGrpcAsyncIOTransport",) diff --git a/packages/google-cloud-apihub/google/cloud/apihub_v1/services/host_project_registration_service/transports/rest.py b/packages/google-cloud-apihub/google/cloud/apihub_v1/services/host_project_registration_service/transports/rest.py new file mode 100644 index 000000000000..966081b95b65 --- /dev/null +++ b/packages/google-cloud-apihub/google/cloud/apihub_v1/services/host_project_registration_service/transports/rest.py @@ -0,0 +1,1160 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import dataclasses +import json # type: ignore +import re +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import gapic_v1, path_template, rest_helpers, rest_streaming +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth.transport.requests import AuthorizedSession # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.protobuf import json_format +import grpc # type: ignore +from requests import __version__ as requests_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object, None] # type: ignore + + +from google.longrunning import operations_pb2 # type: ignore + +from google.cloud.apihub_v1.types import host_project_registration_service + +from .base import DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO +from .base import HostProjectRegistrationServiceTransport + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=requests_version, +) + + +class HostProjectRegistrationServiceRestInterceptor: + """Interceptor for HostProjectRegistrationService. + + Interceptors are used to manipulate requests, request metadata, and responses + in arbitrary ways. + Example use cases include: + * Logging + * Verifying requests according to service or custom semantics + * Stripping extraneous information from responses + + These use cases and more can be enabled by injecting an + instance of a custom subclass when constructing the HostProjectRegistrationServiceRestTransport. + + .. code-block:: python + class MyCustomHostProjectRegistrationServiceInterceptor(HostProjectRegistrationServiceRestInterceptor): + def pre_create_host_project_registration(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_create_host_project_registration(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get_host_project_registration(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_host_project_registration(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_host_project_registrations(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_host_project_registrations(self, response): + logging.log(f"Received response: {response}") + return response + + transport = HostProjectRegistrationServiceRestTransport(interceptor=MyCustomHostProjectRegistrationServiceInterceptor()) + client = HostProjectRegistrationServiceClient(transport=transport) + + + """ + + def pre_create_host_project_registration( + self, + request: host_project_registration_service.CreateHostProjectRegistrationRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[ + host_project_registration_service.CreateHostProjectRegistrationRequest, + Sequence[Tuple[str, str]], + ]: + """Pre-rpc interceptor for create_host_project_registration + + Override in a subclass to manipulate the request or metadata + before they are sent to the HostProjectRegistrationService server. + """ + return request, metadata + + def post_create_host_project_registration( + self, response: host_project_registration_service.HostProjectRegistration + ) -> host_project_registration_service.HostProjectRegistration: + """Post-rpc interceptor for create_host_project_registration + + Override in a subclass to manipulate the response + after it is returned by the HostProjectRegistrationService server but before + it is returned to user code. + """ + return response + + def pre_get_host_project_registration( + self, + request: host_project_registration_service.GetHostProjectRegistrationRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[ + host_project_registration_service.GetHostProjectRegistrationRequest, + Sequence[Tuple[str, str]], + ]: + """Pre-rpc interceptor for get_host_project_registration + + Override in a subclass to manipulate the request or metadata + before they are sent to the HostProjectRegistrationService server. + """ + return request, metadata + + def post_get_host_project_registration( + self, response: host_project_registration_service.HostProjectRegistration + ) -> host_project_registration_service.HostProjectRegistration: + """Post-rpc interceptor for get_host_project_registration + + Override in a subclass to manipulate the response + after it is returned by the HostProjectRegistrationService server but before + it is returned to user code. + """ + return response + + def pre_list_host_project_registrations( + self, + request: host_project_registration_service.ListHostProjectRegistrationsRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[ + host_project_registration_service.ListHostProjectRegistrationsRequest, + Sequence[Tuple[str, str]], + ]: + """Pre-rpc interceptor for list_host_project_registrations + + Override in a subclass to manipulate the request or metadata + before they are sent to the HostProjectRegistrationService server. + """ + return request, metadata + + def post_list_host_project_registrations( + self, + response: host_project_registration_service.ListHostProjectRegistrationsResponse, + ) -> host_project_registration_service.ListHostProjectRegistrationsResponse: + """Post-rpc interceptor for list_host_project_registrations + + Override in a subclass to manipulate the response + after it is returned by the HostProjectRegistrationService server but before + it is returned to user code. + """ + return response + + def pre_get_location( + self, + request: locations_pb2.GetLocationRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[locations_pb2.GetLocationRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_location + + Override in a subclass to manipulate the request or metadata + before they are sent to the HostProjectRegistrationService server. + """ + return request, metadata + + def post_get_location( + self, response: locations_pb2.Location + ) -> locations_pb2.Location: + """Post-rpc interceptor for get_location + + Override in a subclass to manipulate the response + after it is returned by the HostProjectRegistrationService server but before + it is returned to user code. + """ + return response + + def pre_list_locations( + self, + request: locations_pb2.ListLocationsRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[locations_pb2.ListLocationsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_locations + + Override in a subclass to manipulate the request or metadata + before they are sent to the HostProjectRegistrationService server. + """ + return request, metadata + + def post_list_locations( + self, response: locations_pb2.ListLocationsResponse + ) -> locations_pb2.ListLocationsResponse: + """Post-rpc interceptor for list_locations + + Override in a subclass to manipulate the response + after it is returned by the HostProjectRegistrationService server but before + it is returned to user code. + """ + return response + + def pre_cancel_operation( + self, + request: operations_pb2.CancelOperationRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[operations_pb2.CancelOperationRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for cancel_operation + + Override in a subclass to manipulate the request or metadata + before they are sent to the HostProjectRegistrationService server. + """ + return request, metadata + + def post_cancel_operation(self, response: None) -> None: + """Post-rpc interceptor for cancel_operation + + Override in a subclass to manipulate the response + after it is returned by the HostProjectRegistrationService server but before + it is returned to user code. + """ + return response + + def pre_delete_operation( + self, + request: operations_pb2.DeleteOperationRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[operations_pb2.DeleteOperationRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for delete_operation + + Override in a subclass to manipulate the request or metadata + before they are sent to the HostProjectRegistrationService server. + """ + return request, metadata + + def post_delete_operation(self, response: None) -> None: + """Post-rpc interceptor for delete_operation + + Override in a subclass to manipulate the response + after it is returned by the HostProjectRegistrationService server but before + it is returned to user code. + """ + return response + + def pre_get_operation( + self, + request: operations_pb2.GetOperationRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[operations_pb2.GetOperationRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_operation + + Override in a subclass to manipulate the request or metadata + before they are sent to the HostProjectRegistrationService server. + """ + return request, metadata + + def post_get_operation( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for get_operation + + Override in a subclass to manipulate the response + after it is returned by the HostProjectRegistrationService server but before + it is returned to user code. + """ + return response + + def pre_list_operations( + self, + request: operations_pb2.ListOperationsRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[operations_pb2.ListOperationsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_operations + + Override in a subclass to manipulate the request or metadata + before they are sent to the HostProjectRegistrationService server. + """ + return request, metadata + + def post_list_operations( + self, response: operations_pb2.ListOperationsResponse + ) -> operations_pb2.ListOperationsResponse: + """Post-rpc interceptor for list_operations + + Override in a subclass to manipulate the response + after it is returned by the HostProjectRegistrationService server but before + it is returned to user code. + """ + return response + + +@dataclasses.dataclass +class HostProjectRegistrationServiceRestStub: + _session: AuthorizedSession + _host: str + _interceptor: HostProjectRegistrationServiceRestInterceptor + + +class HostProjectRegistrationServiceRestTransport( + HostProjectRegistrationServiceTransport +): + """REST backend transport for HostProjectRegistrationService. + + This service is used for managing the host project + registrations. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + + """ + + def __init__( + self, + *, + host: str = "apihub.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = "https", + interceptor: Optional[HostProjectRegistrationServiceRestInterceptor] = None, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'apihub.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client + certificate to configure mutual TLS HTTP channel. It is ignored + if ``channel`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. + # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the + # credentials object + maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) + if maybe_url_match is None: + raise ValueError( + f"Unexpected hostname structure: {host}" + ) # pragma: NO COVER + + url_match_items = maybe_url_match.groupdict() + + host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host + + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + self._session = AuthorizedSession( + self._credentials, default_host=self.DEFAULT_HOST + ) + if client_cert_source_for_mtls: + self._session.configure_mtls_channel(client_cert_source_for_mtls) + self._interceptor = ( + interceptor or HostProjectRegistrationServiceRestInterceptor() + ) + self._prep_wrapped_messages(client_info) + + class _CreateHostProjectRegistration(HostProjectRegistrationServiceRestStub): + def __hash__(self): + return hash("CreateHostProjectRegistration") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "hostProjectRegistrationId": "", + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: host_project_registration_service.CreateHostProjectRegistrationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> host_project_registration_service.HostProjectRegistration: + r"""Call the create host project + registration method over HTTP. + + Args: + request (~.host_project_registration_service.CreateHostProjectRegistrationRequest): + The request object. The + [CreateHostProjectRegistration][google.cloud.apihub.v1.HostProjectRegistrationService.CreateHostProjectRegistration] + method's request. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.host_project_registration_service.HostProjectRegistration: + Host project registration refers to + the registration of a Google cloud + project with Api Hub as a host project. + This is the project where Api Hub is + provisioned. It acts as the consumer + project for the Api Hub instance + provisioned. Multiple runtime projects + can be attached to the host project and + these attachments define the scope of + Api Hub. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{parent=projects/*/locations/*}/hostProjectRegistrations", + "body": "host_project_registration", + }, + ] + request, metadata = self._interceptor.pre_create_host_project_registration( + request, metadata + ) + pb_request = host_project_registration_service.CreateHostProjectRegistrationRequest.pb( + request + ) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = host_project_registration_service.HostProjectRegistration() + pb_resp = host_project_registration_service.HostProjectRegistration.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_create_host_project_registration(resp) + return resp + + class _GetHostProjectRegistration(HostProjectRegistrationServiceRestStub): + def __hash__(self): + return hash("GetHostProjectRegistration") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: host_project_registration_service.GetHostProjectRegistrationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> host_project_registration_service.HostProjectRegistration: + r"""Call the get host project + registration method over HTTP. + + Args: + request (~.host_project_registration_service.GetHostProjectRegistrationRequest): + The request object. The + [GetHostProjectRegistration][google.cloud.apihub.v1.HostProjectRegistrationService.GetHostProjectRegistration] + method's request. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.host_project_registration_service.HostProjectRegistration: + Host project registration refers to + the registration of a Google cloud + project with Api Hub as a host project. + This is the project where Api Hub is + provisioned. It acts as the consumer + project for the Api Hub instance + provisioned. Multiple runtime projects + can be attached to the host project and + these attachments define the scope of + Api Hub. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/hostProjectRegistrations/*}", + }, + ] + request, metadata = self._interceptor.pre_get_host_project_registration( + request, metadata + ) + pb_request = ( + host_project_registration_service.GetHostProjectRegistrationRequest.pb( + request + ) + ) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = host_project_registration_service.HostProjectRegistration() + pb_resp = host_project_registration_service.HostProjectRegistration.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_host_project_registration(resp) + return resp + + class _ListHostProjectRegistrations(HostProjectRegistrationServiceRestStub): + def __hash__(self): + return hash("ListHostProjectRegistrations") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: host_project_registration_service.ListHostProjectRegistrationsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> host_project_registration_service.ListHostProjectRegistrationsResponse: + r"""Call the list host project + registrations method over HTTP. + + Args: + request (~.host_project_registration_service.ListHostProjectRegistrationsRequest): + The request object. The + [ListHostProjectRegistrations][google.cloud.apihub.v1.HostProjectRegistrationService.ListHostProjectRegistrations] + method's request. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.host_project_registration_service.ListHostProjectRegistrationsResponse: + The + [ListHostProjectRegistrations][google.cloud.apihub.v1.HostProjectRegistrationService.ListHostProjectRegistrations] + method's response. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{parent=projects/*/locations/*}/hostProjectRegistrations", + }, + ] + request, metadata = self._interceptor.pre_list_host_project_registrations( + request, metadata + ) + pb_request = host_project_registration_service.ListHostProjectRegistrationsRequest.pb( + request + ) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = ( + host_project_registration_service.ListHostProjectRegistrationsResponse() + ) + pb_resp = host_project_registration_service.ListHostProjectRegistrationsResponse.pb( + resp + ) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_host_project_registrations(resp) + return resp + + @property + def create_host_project_registration( + self, + ) -> Callable[ + [host_project_registration_service.CreateHostProjectRegistrationRequest], + host_project_registration_service.HostProjectRegistration, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._CreateHostProjectRegistration(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_host_project_registration( + self, + ) -> Callable[ + [host_project_registration_service.GetHostProjectRegistrationRequest], + host_project_registration_service.HostProjectRegistration, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetHostProjectRegistration(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_host_project_registrations( + self, + ) -> Callable[ + [host_project_registration_service.ListHostProjectRegistrationsRequest], + host_project_registration_service.ListHostProjectRegistrationsResponse, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListHostProjectRegistrations(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_location(self): + return self._GetLocation(self._session, self._host, self._interceptor) # type: ignore + + class _GetLocation(HostProjectRegistrationServiceRestStub): + def __call__( + self, + request: locations_pb2.GetLocationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> locations_pb2.Location: + r"""Call the get location method over HTTP. + + Args: + request (locations_pb2.GetLocationRequest): + The request object for GetLocation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + locations_pb2.Location: Response from GetLocation method. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*}", + }, + ] + + request, metadata = self._interceptor.pre_get_location(request, metadata) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + resp = locations_pb2.Location() + resp = json_format.Parse(response.content.decode("utf-8"), resp) + resp = self._interceptor.post_get_location(resp) + return resp + + @property + def list_locations(self): + return self._ListLocations(self._session, self._host, self._interceptor) # type: ignore + + class _ListLocations(HostProjectRegistrationServiceRestStub): + def __call__( + self, + request: locations_pb2.ListLocationsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> locations_pb2.ListLocationsResponse: + r"""Call the list locations method over HTTP. + + Args: + request (locations_pb2.ListLocationsRequest): + The request object for ListLocations method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + locations_pb2.ListLocationsResponse: Response from ListLocations method. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=projects/*}/locations", + }, + ] + + request, metadata = self._interceptor.pre_list_locations(request, metadata) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + resp = locations_pb2.ListLocationsResponse() + resp = json_format.Parse(response.content.decode("utf-8"), resp) + resp = self._interceptor.post_list_locations(resp) + return resp + + @property + def cancel_operation(self): + return self._CancelOperation(self._session, self._host, self._interceptor) # type: ignore + + class _CancelOperation(HostProjectRegistrationServiceRestStub): + def __call__( + self, + request: operations_pb2.CancelOperationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Call the cancel operation method over HTTP. + + Args: + request (operations_pb2.CancelOperationRequest): + The request object for CancelOperation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{name=projects/*/locations/*/operations/*}:cancel", + "body": "*", + }, + ] + + request, metadata = self._interceptor.pre_cancel_operation( + request, metadata + ) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + body = json.dumps(transcoded_request["body"]) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + return self._interceptor.post_cancel_operation(None) + + @property + def delete_operation(self): + return self._DeleteOperation(self._session, self._host, self._interceptor) # type: ignore + + class _DeleteOperation(HostProjectRegistrationServiceRestStub): + def __call__( + self, + request: operations_pb2.DeleteOperationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Call the delete operation method over HTTP. + + Args: + request (operations_pb2.DeleteOperationRequest): + The request object for DeleteOperation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "delete", + "uri": "/v1/{name=projects/*/locations/*/operations/*}", + }, + ] + + request, metadata = self._interceptor.pre_delete_operation( + request, metadata + ) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + return self._interceptor.post_delete_operation(None) + + @property + def get_operation(self): + return self._GetOperation(self._session, self._host, self._interceptor) # type: ignore + + class _GetOperation(HostProjectRegistrationServiceRestStub): + def __call__( + self, + request: operations_pb2.GetOperationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the get operation method over HTTP. + + Args: + request (operations_pb2.GetOperationRequest): + The request object for GetOperation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + operations_pb2.Operation: Response from GetOperation method. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/operations/*}", + }, + ] + + request, metadata = self._interceptor.pre_get_operation(request, metadata) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + resp = operations_pb2.Operation() + resp = json_format.Parse(response.content.decode("utf-8"), resp) + resp = self._interceptor.post_get_operation(resp) + return resp + + @property + def list_operations(self): + return self._ListOperations(self._session, self._host, self._interceptor) # type: ignore + + class _ListOperations(HostProjectRegistrationServiceRestStub): + def __call__( + self, + request: operations_pb2.ListOperationsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.ListOperationsResponse: + r"""Call the list operations method over HTTP. + + Args: + request (operations_pb2.ListOperationsRequest): + The request object for ListOperations method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + operations_pb2.ListOperationsResponse: Response from ListOperations method. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*}/operations", + }, + ] + + request, metadata = self._interceptor.pre_list_operations(request, metadata) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + resp = operations_pb2.ListOperationsResponse() + resp = json_format.Parse(response.content.decode("utf-8"), resp) + resp = self._interceptor.post_list_operations(resp) + return resp + + @property + def kind(self) -> str: + return "rest" + + def close(self): + self._session.close() + + +__all__ = ("HostProjectRegistrationServiceRestTransport",) diff --git a/packages/google-ads-admanager/google/ads/admanager_v1/services/contact_service/__init__.py b/packages/google-cloud-apihub/google/cloud/apihub_v1/services/linting_service/__init__.py similarity index 88% rename from packages/google-ads-admanager/google/ads/admanager_v1/services/contact_service/__init__.py rename to packages/google-cloud-apihub/google/cloud/apihub_v1/services/linting_service/__init__.py index 20eee0424097..68f5fe54993b 100644 --- a/packages/google-ads-admanager/google/ads/admanager_v1/services/contact_service/__init__.py +++ b/packages/google-cloud-apihub/google/cloud/apihub_v1/services/linting_service/__init__.py @@ -13,6 +13,6 @@ # See the License for the specific language governing permissions and # limitations under the License. # -from .client import ContactServiceClient +from .client import LintingServiceClient -__all__ = ("ContactServiceClient",) +__all__ = ("LintingServiceClient",) diff --git a/packages/google-cloud-apihub/google/cloud/apihub_v1/services/linting_service/async_client.py b/packages/google-cloud-apihub/google/cloud/apihub_v1/services/linting_service/async_client.py new file mode 100644 index 000000000000..e0a1abb6ea97 --- /dev/null +++ b/packages/google-cloud-apihub/google/cloud/apihub_v1/services/linting_service/async_client.py @@ -0,0 +1,1033 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import re +from typing import ( + Callable, + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, +) + +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry_async as retries +from google.api_core.client_options import ClientOptions +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.apihub_v1 import gapic_version as package_version + +try: + OptionalRetry = Union[retries.AsyncRetry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.AsyncRetry, object, None] # type: ignore + +from google.cloud.location import locations_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore + +from google.cloud.apihub_v1.types import common_fields, linting_service + +from .client import LintingServiceClient +from .transports.base import DEFAULT_CLIENT_INFO, LintingServiceTransport +from .transports.grpc_asyncio import LintingServiceGrpcAsyncIOTransport + + +class LintingServiceAsyncClient: + """This service provides all methods related to the 1p Linter.""" + + _client: LintingServiceClient + + # Copy defaults from the synchronous client for use here. + # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. + DEFAULT_ENDPOINT = LintingServiceClient.DEFAULT_ENDPOINT + DEFAULT_MTLS_ENDPOINT = LintingServiceClient.DEFAULT_MTLS_ENDPOINT + _DEFAULT_ENDPOINT_TEMPLATE = LintingServiceClient._DEFAULT_ENDPOINT_TEMPLATE + _DEFAULT_UNIVERSE = LintingServiceClient._DEFAULT_UNIVERSE + + spec_path = staticmethod(LintingServiceClient.spec_path) + parse_spec_path = staticmethod(LintingServiceClient.parse_spec_path) + style_guide_path = staticmethod(LintingServiceClient.style_guide_path) + parse_style_guide_path = staticmethod(LintingServiceClient.parse_style_guide_path) + common_billing_account_path = staticmethod( + LintingServiceClient.common_billing_account_path + ) + parse_common_billing_account_path = staticmethod( + LintingServiceClient.parse_common_billing_account_path + ) + common_folder_path = staticmethod(LintingServiceClient.common_folder_path) + parse_common_folder_path = staticmethod( + LintingServiceClient.parse_common_folder_path + ) + common_organization_path = staticmethod( + LintingServiceClient.common_organization_path + ) + parse_common_organization_path = staticmethod( + LintingServiceClient.parse_common_organization_path + ) + common_project_path = staticmethod(LintingServiceClient.common_project_path) + parse_common_project_path = staticmethod( + LintingServiceClient.parse_common_project_path + ) + common_location_path = staticmethod(LintingServiceClient.common_location_path) + parse_common_location_path = staticmethod( + LintingServiceClient.parse_common_location_path + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + LintingServiceAsyncClient: The constructed client. + """ + return LintingServiceClient.from_service_account_info.__func__(LintingServiceAsyncClient, info, *args, **kwargs) # type: ignore + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + LintingServiceAsyncClient: The constructed client. + """ + return LintingServiceClient.from_service_account_file.__func__(LintingServiceAsyncClient, filename, *args, **kwargs) # type: ignore + + from_service_account_json = from_service_account_file + + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[ClientOptions] = None + ): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + return LintingServiceClient.get_mtls_endpoint_and_cert_source(client_options) # type: ignore + + @property + def transport(self) -> LintingServiceTransport: + """Returns the transport used by the client instance. + + Returns: + LintingServiceTransport: The transport used by the client instance. + """ + return self._client.transport + + @property + def api_endpoint(self): + """Return the API endpoint used by the client instance. + + Returns: + str: The API endpoint used by the client instance. + """ + return self._client._api_endpoint + + @property + def universe_domain(self) -> str: + """Return the universe domain used by the client instance. + + Returns: + str: The universe domain used + by the client instance. + """ + return self._client._universe_domain + + get_transport_class = LintingServiceClient.get_transport_class + + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[ + Union[str, LintingServiceTransport, Callable[..., LintingServiceTransport]] + ] = "grpc_asyncio", + client_options: Optional[ClientOptions] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the linting service async client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Optional[Union[str,LintingServiceTransport,Callable[..., LintingServiceTransport]]]): + The transport to use, or a Callable that constructs and returns a new transport to use. + If a Callable is given, it will be called with the same set of initialization + arguments as used in the LintingServiceTransport constructor. + If set to None, a transport is chosen automatically. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): + Custom options for the client. + + 1. The ``api_endpoint`` property can be used to override the + default endpoint provided by the client when ``transport`` is + not explicitly provided. Only if this property is not set and + ``transport`` was not explicitly provided, the endpoint is + determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment + variable, which have one of the following values: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto-switch to the + default mTLS endpoint if client certificate is present; this is + the default value). + + 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide a client certificate for mTLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + 3. The ``universe_domain`` property can be used to override the + default "googleapis.com" universe. Note that ``api_endpoint`` + property still takes precedence; and ``universe_domain`` is + currently not supported for mTLS. + + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client = LintingServiceClient( + credentials=credentials, + transport=transport, + client_options=client_options, + client_info=client_info, + ) + + async def get_style_guide( + self, + request: Optional[Union[linting_service.GetStyleGuideRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> linting_service.StyleGuide: + r"""Get the style guide being used for linting. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import apihub_v1 + + async def sample_get_style_guide(): + # Create a client + client = apihub_v1.LintingServiceAsyncClient() + + # Initialize request argument(s) + request = apihub_v1.GetStyleGuideRequest( + name="name_value", + ) + + # Make the request + response = await client.get_style_guide(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.apihub_v1.types.GetStyleGuideRequest, dict]]): + The request object. The [GetStyleGuide][ApiHub.GetStyleGuide] method's + request. + name (:class:`str`): + Required. The name of the spec to retrieve. Format: + ``projects/{project}/locations/{location}/plugins/{plugin}/styleGuide``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.apihub_v1.types.StyleGuide: + Represents a singleton style guide + resource to be used for linting Open API + specs. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, linting_service.GetStyleGuideRequest): + request = linting_service.GetStyleGuideRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.get_style_guide + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def update_style_guide( + self, + request: Optional[Union[linting_service.UpdateStyleGuideRequest, dict]] = None, + *, + style_guide: Optional[linting_service.StyleGuide] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> linting_service.StyleGuide: + r"""Update the styleGuide to be used for liniting in by + API hub. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import apihub_v1 + + async def sample_update_style_guide(): + # Create a client + client = apihub_v1.LintingServiceAsyncClient() + + # Initialize request argument(s) + style_guide = apihub_v1.StyleGuide() + style_guide.linter = "OTHER" + style_guide.contents.contents = b'contents_blob' + style_guide.contents.mime_type = "mime_type_value" + + request = apihub_v1.UpdateStyleGuideRequest( + style_guide=style_guide, + ) + + # Make the request + response = await client.update_style_guide(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.apihub_v1.types.UpdateStyleGuideRequest, dict]]): + The request object. The [UpdateStyleGuide][ApiHub.UpdateStyleGuide] method's + request. + style_guide (:class:`google.cloud.apihub_v1.types.StyleGuide`): + Required. The Style guide resource to + update. + + This corresponds to the ``style_guide`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): + Optional. The list of fields to + update. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.apihub_v1.types.StyleGuide: + Represents a singleton style guide + resource to be used for linting Open API + specs. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([style_guide, update_mask]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, linting_service.UpdateStyleGuideRequest): + request = linting_service.UpdateStyleGuideRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if style_guide is not None: + request.style_guide = style_guide + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.update_style_guide + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("style_guide.name", request.style_guide.name),) + ), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_style_guide_contents( + self, + request: Optional[ + Union[linting_service.GetStyleGuideContentsRequest, dict] + ] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> linting_service.StyleGuideContents: + r"""Get the contents of the style guide. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import apihub_v1 + + async def sample_get_style_guide_contents(): + # Create a client + client = apihub_v1.LintingServiceAsyncClient() + + # Initialize request argument(s) + request = apihub_v1.GetStyleGuideContentsRequest( + name="name_value", + ) + + # Make the request + response = await client.get_style_guide_contents(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.apihub_v1.types.GetStyleGuideContentsRequest, dict]]): + The request object. The + [GetStyleGuideContents][ApiHub.GetStyleGuideContents] + method's request. + name (:class:`str`): + Required. The name of the StyleGuide whose contents need + to be retrieved. There is exactly one style guide + resource per project per location. The expected format + is + ``projects/{project}/locations/{location}/plugins/{plugin}/styleGuide``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.apihub_v1.types.StyleGuideContents: + The style guide contents. + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, linting_service.GetStyleGuideContentsRequest): + request = linting_service.GetStyleGuideContentsRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.get_style_guide_contents + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def lint_spec( + self, + request: Optional[Union[linting_service.LintSpecRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Lints the requested spec and updates the + corresponding API Spec with the lint response. This lint + response will be available in all subsequent Get and + List Spec calls to Core service. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import apihub_v1 + + async def sample_lint_spec(): + # Create a client + client = apihub_v1.LintingServiceAsyncClient() + + # Initialize request argument(s) + request = apihub_v1.LintSpecRequest( + name="name_value", + ) + + # Make the request + await client.lint_spec(request=request) + + Args: + request (Optional[Union[google.cloud.apihub_v1.types.LintSpecRequest, dict]]): + The request object. The [LintSpec][ApiHub.LintSpec] method's request. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, linting_service.LintSpecRequest): + request = linting_service.LintSpecRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.lint_spec + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + async def list_operations( + self, + request: Optional[operations_pb2.ListOperationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.ListOperationsResponse: + r"""Lists operations that match the specified filter in the request. + + Args: + request (:class:`~.operations_pb2.ListOperationsRequest`): + The request object. Request message for + `ListOperations` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.ListOperationsResponse: + Response message for ``ListOperations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.ListOperationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.list_operations, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_operation( + self, + request: Optional[operations_pb2.GetOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Gets the latest state of a long-running operation. + + Args: + request (:class:`~.operations_pb2.GetOperationRequest`): + The request object. Request message for + `GetOperation` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.Operation: + An ``Operation`` object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.GetOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def delete_operation( + self, + request: Optional[operations_pb2.DeleteOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes a long-running operation. + + This method indicates that the client is no longer interested + in the operation result. It does not cancel the operation. + If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.DeleteOperationRequest`): + The request object. Request message for + `DeleteOperation` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.DeleteOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.delete_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + async def cancel_operation( + self, + request: Optional[operations_pb2.CancelOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Starts asynchronous cancellation on a long-running operation. + + The server makes a best effort to cancel the operation, but success + is not guaranteed. If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.CancelOperationRequest`): + The request object. Request message for + `CancelOperation` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.CancelOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.cancel_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + async def get_location( + self, + request: Optional[locations_pb2.GetLocationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> locations_pb2.Location: + r"""Gets information about a location. + + Args: + request (:class:`~.location_pb2.GetLocationRequest`): + The request object. Request message for + `GetLocation` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.location_pb2.Location: + Location object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = locations_pb2.GetLocationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_location, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_locations( + self, + request: Optional[locations_pb2.ListLocationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> locations_pb2.ListLocationsResponse: + r"""Lists information about the supported locations for this service. + + Args: + request (:class:`~.location_pb2.ListLocationsRequest`): + The request object. Request message for + `ListLocations` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.location_pb2.ListLocationsResponse: + Response message for ``ListLocations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = locations_pb2.ListLocationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.list_locations, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def __aenter__(self) -> "LintingServiceAsyncClient": + return self + + async def __aexit__(self, exc_type, exc, tb): + await self.transport.close() + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +__all__ = ("LintingServiceAsyncClient",) diff --git a/packages/google-cloud-apihub/google/cloud/apihub_v1/services/linting_service/client.py b/packages/google-cloud-apihub/google/cloud/apihub_v1/services/linting_service/client.py new file mode 100644 index 000000000000..608153448c3b --- /dev/null +++ b/packages/google-cloud-apihub/google/cloud/apihub_v1/services/linting_service/client.py @@ -0,0 +1,1472 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import os +import re +from typing import ( + Callable, + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, + cast, +) +import warnings + +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.apihub_v1 import gapic_version as package_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object, None] # type: ignore + +from google.cloud.location import locations_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore + +from google.cloud.apihub_v1.types import common_fields, linting_service + +from .transports.base import DEFAULT_CLIENT_INFO, LintingServiceTransport +from .transports.rest import LintingServiceRestTransport + + +class LintingServiceClientMeta(type): + """Metaclass for the LintingService client. + + This provides class-level methods for building and retrieving + support objects (e.g. transport) without polluting the client instance + objects. + """ + + _transport_registry = ( + OrderedDict() + ) # type: Dict[str, Type[LintingServiceTransport]] + _transport_registry["rest"] = LintingServiceRestTransport + + def get_transport_class( + cls, + label: Optional[str] = None, + ) -> Type[LintingServiceTransport]: + """Returns an appropriate transport class. + + Args: + label: The name of the desired transport. If none is + provided, then the first transport in the registry is used. + + Returns: + The transport class to use. + """ + # If a specific transport is requested, return that one. + if label: + return cls._transport_registry[label] + + # No transport is requested; return the default (that is, the first one + # in the dictionary). + return next(iter(cls._transport_registry.values())) + + +class LintingServiceClient(metaclass=LintingServiceClientMeta): + """This service provides all methods related to the 1p Linter.""" + + @staticmethod + def _get_default_mtls_endpoint(api_endpoint): + """Converts api endpoint to mTLS endpoint. + + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to + "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. + Args: + api_endpoint (Optional[str]): the api endpoint to convert. + Returns: + str: converted mTLS api endpoint. + """ + if not api_endpoint: + return api_endpoint + + mtls_endpoint_re = re.compile( + r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" + ) + + m = mtls_endpoint_re.match(api_endpoint) + name, mtls, sandbox, googledomain = m.groups() + if mtls or not googledomain: + return api_endpoint + + if sandbox: + return api_endpoint.replace( + "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" + ) + + return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + + # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. + DEFAULT_ENDPOINT = "apihub.googleapis.com" + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore + DEFAULT_ENDPOINT + ) + + _DEFAULT_ENDPOINT_TEMPLATE = "apihub.{UNIVERSE_DOMAIN}" + _DEFAULT_UNIVERSE = "googleapis.com" + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + LintingServiceClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_info(info) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + LintingServiceClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file(filename) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> LintingServiceTransport: + """Returns the transport used by the client instance. + + Returns: + LintingServiceTransport: The transport used by the client + instance. + """ + return self._transport + + @staticmethod + def spec_path( + project: str, + location: str, + api: str, + version: str, + spec: str, + ) -> str: + """Returns a fully-qualified spec string.""" + return "projects/{project}/locations/{location}/apis/{api}/versions/{version}/specs/{spec}".format( + project=project, + location=location, + api=api, + version=version, + spec=spec, + ) + + @staticmethod + def parse_spec_path(path: str) -> Dict[str, str]: + """Parses a spec path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/apis/(?P.+?)/versions/(?P.+?)/specs/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + + @staticmethod + def style_guide_path( + project: str, + location: str, + plugin: str, + ) -> str: + """Returns a fully-qualified style_guide string.""" + return "projects/{project}/locations/{location}/plugins/{plugin}/styleGuide".format( + project=project, + location=location, + plugin=plugin, + ) + + @staticmethod + def parse_style_guide_path(path: str) -> Dict[str, str]: + """Parses a style_guide path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/plugins/(?P.+?)/styleGuide$", + path, + ) + return m.groupdict() if m else {} + + @staticmethod + def common_billing_account_path( + billing_account: str, + ) -> str: + """Returns a fully-qualified billing_account string.""" + return "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + + @staticmethod + def parse_common_billing_account_path(path: str) -> Dict[str, str]: + """Parse a billing_account path into its component segments.""" + m = re.match(r"^billingAccounts/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_folder_path( + folder: str, + ) -> str: + """Returns a fully-qualified folder string.""" + return "folders/{folder}".format( + folder=folder, + ) + + @staticmethod + def parse_common_folder_path(path: str) -> Dict[str, str]: + """Parse a folder path into its component segments.""" + m = re.match(r"^folders/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_organization_path( + organization: str, + ) -> str: + """Returns a fully-qualified organization string.""" + return "organizations/{organization}".format( + organization=organization, + ) + + @staticmethod + def parse_common_organization_path(path: str) -> Dict[str, str]: + """Parse a organization path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_project_path( + project: str, + ) -> str: + """Returns a fully-qualified project string.""" + return "projects/{project}".format( + project=project, + ) + + @staticmethod + def parse_common_project_path(path: str) -> Dict[str, str]: + """Parse a project path into its component segments.""" + m = re.match(r"^projects/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_location_path( + project: str, + location: str, + ) -> str: + """Returns a fully-qualified location string.""" + return "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) + + @staticmethod + def parse_common_location_path(path: str) -> Dict[str, str]: + """Parse a location path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[client_options_lib.ClientOptions] = None + ): + """Deprecated. Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + + warnings.warn( + "get_mtls_endpoint_and_cert_source is deprecated. Use the api_endpoint property instead.", + DeprecationWarning, + ) + if client_options is None: + client_options = client_options_lib.ClientOptions() + use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") + if use_client_cert not in ("true", "false"): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError( + "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + # Figure out the client cert source to use. + client_cert_source = None + if use_client_cert == "true": + if client_options.client_cert_source: + client_cert_source = client_options.client_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + elif use_mtls_endpoint == "always" or ( + use_mtls_endpoint == "auto" and client_cert_source + ): + api_endpoint = cls.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = cls.DEFAULT_ENDPOINT + + return api_endpoint, client_cert_source + + @staticmethod + def _read_environment_variables(): + """Returns the environment variables used by the client. + + Returns: + Tuple[bool, str, str]: returns the GOOGLE_API_USE_CLIENT_CERTIFICATE, + GOOGLE_API_USE_MTLS_ENDPOINT, and GOOGLE_CLOUD_UNIVERSE_DOMAIN environment variables. + + Raises: + ValueError: If GOOGLE_API_USE_CLIENT_CERTIFICATE is not + any of ["true", "false"]. + google.auth.exceptions.MutualTLSChannelError: If GOOGLE_API_USE_MTLS_ENDPOINT + is not any of ["auto", "never", "always"]. + """ + use_client_cert = os.getenv( + "GOOGLE_API_USE_CLIENT_CERTIFICATE", "false" + ).lower() + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto").lower() + universe_domain_env = os.getenv("GOOGLE_CLOUD_UNIVERSE_DOMAIN") + if use_client_cert not in ("true", "false"): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError( + "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + return use_client_cert == "true", use_mtls_endpoint, universe_domain_env + + @staticmethod + def _get_client_cert_source(provided_cert_source, use_cert_flag): + """Return the client cert source to be used by the client. + + Args: + provided_cert_source (bytes): The client certificate source provided. + use_cert_flag (bool): A flag indicating whether to use the client certificate. + + Returns: + bytes or None: The client cert source to be used by the client. + """ + client_cert_source = None + if use_cert_flag: + if provided_cert_source: + client_cert_source = provided_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + return client_cert_source + + @staticmethod + def _get_api_endpoint( + api_override, client_cert_source, universe_domain, use_mtls_endpoint + ): + """Return the API endpoint used by the client. + + Args: + api_override (str): The API endpoint override. If specified, this is always + the return value of this function and the other arguments are not used. + client_cert_source (bytes): The client certificate source used by the client. + universe_domain (str): The universe domain used by the client. + use_mtls_endpoint (str): How to use the mTLS endpoint, which depends also on the other parameters. + Possible values are "always", "auto", or "never". + + Returns: + str: The API endpoint to be used by the client. + """ + if api_override is not None: + api_endpoint = api_override + elif use_mtls_endpoint == "always" or ( + use_mtls_endpoint == "auto" and client_cert_source + ): + _default_universe = LintingServiceClient._DEFAULT_UNIVERSE + if universe_domain != _default_universe: + raise MutualTLSChannelError( + f"mTLS is not supported in any universe other than {_default_universe}." + ) + api_endpoint = LintingServiceClient.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = LintingServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=universe_domain + ) + return api_endpoint + + @staticmethod + def _get_universe_domain( + client_universe_domain: Optional[str], universe_domain_env: Optional[str] + ) -> str: + """Return the universe domain used by the client. + + Args: + client_universe_domain (Optional[str]): The universe domain configured via the client options. + universe_domain_env (Optional[str]): The universe domain configured via the "GOOGLE_CLOUD_UNIVERSE_DOMAIN" environment variable. + + Returns: + str: The universe domain to be used by the client. + + Raises: + ValueError: If the universe domain is an empty string. + """ + universe_domain = LintingServiceClient._DEFAULT_UNIVERSE + if client_universe_domain is not None: + universe_domain = client_universe_domain + elif universe_domain_env is not None: + universe_domain = universe_domain_env + if len(universe_domain.strip()) == 0: + raise ValueError("Universe Domain cannot be an empty string.") + return universe_domain + + @staticmethod + def _compare_universes( + client_universe: str, credentials: ga_credentials.Credentials + ) -> bool: + """Returns True iff the universe domains used by the client and credentials match. + + Args: + client_universe (str): The universe domain configured via the client options. + credentials (ga_credentials.Credentials): The credentials being used in the client. + + Returns: + bool: True iff client_universe matches the universe in credentials. + + Raises: + ValueError: when client_universe does not match the universe in credentials. + """ + + default_universe = LintingServiceClient._DEFAULT_UNIVERSE + credentials_universe = getattr(credentials, "universe_domain", default_universe) + + if client_universe != credentials_universe: + raise ValueError( + "The configured universe domain " + f"({client_universe}) does not match the universe domain " + f"found in the credentials ({credentials_universe}). " + "If you haven't configured the universe domain explicitly, " + f"`{default_universe}` is the default." + ) + return True + + def _validate_universe_domain(self): + """Validates client's and credentials' universe domains are consistent. + + Returns: + bool: True iff the configured universe domain is valid. + + Raises: + ValueError: If the configured universe domain is not valid. + """ + self._is_universe_domain_valid = ( + self._is_universe_domain_valid + or LintingServiceClient._compare_universes( + self.universe_domain, self.transport._credentials + ) + ) + return self._is_universe_domain_valid + + @property + def api_endpoint(self): + """Return the API endpoint used by the client instance. + + Returns: + str: The API endpoint used by the client instance. + """ + return self._api_endpoint + + @property + def universe_domain(self) -> str: + """Return the universe domain used by the client instance. + + Returns: + str: The universe domain used by the client instance. + """ + return self._universe_domain + + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[ + Union[str, LintingServiceTransport, Callable[..., LintingServiceTransport]] + ] = None, + client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the linting service client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Optional[Union[str,LintingServiceTransport,Callable[..., LintingServiceTransport]]]): + The transport to use, or a Callable that constructs and returns a new transport. + If a Callable is given, it will be called with the same set of initialization + arguments as used in the LintingServiceTransport constructor. + If set to None, a transport is chosen automatically. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): + Custom options for the client. + + 1. The ``api_endpoint`` property can be used to override the + default endpoint provided by the client when ``transport`` is + not explicitly provided. Only if this property is not set and + ``transport`` was not explicitly provided, the endpoint is + determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment + variable, which have one of the following values: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto-switch to the + default mTLS endpoint if client certificate is present; this is + the default value). + + 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide a client certificate for mTLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + 3. The ``universe_domain`` property can be used to override the + default "googleapis.com" universe. Note that the ``api_endpoint`` + property still takes precedence; and ``universe_domain`` is + currently not supported for mTLS. + + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client_options = client_options + if isinstance(self._client_options, dict): + self._client_options = client_options_lib.from_dict(self._client_options) + if self._client_options is None: + self._client_options = client_options_lib.ClientOptions() + self._client_options = cast( + client_options_lib.ClientOptions, self._client_options + ) + + universe_domain_opt = getattr(self._client_options, "universe_domain", None) + + ( + self._use_client_cert, + self._use_mtls_endpoint, + self._universe_domain_env, + ) = LintingServiceClient._read_environment_variables() + self._client_cert_source = LintingServiceClient._get_client_cert_source( + self._client_options.client_cert_source, self._use_client_cert + ) + self._universe_domain = LintingServiceClient._get_universe_domain( + universe_domain_opt, self._universe_domain_env + ) + self._api_endpoint = None # updated below, depending on `transport` + + # Initialize the universe domain validation. + self._is_universe_domain_valid = False + + api_key_value = getattr(self._client_options, "api_key", None) + if api_key_value and credentials: + raise ValueError( + "client_options.api_key and credentials are mutually exclusive" + ) + + # Save or instantiate the transport. + # Ordinarily, we provide the transport, but allowing a custom transport + # instance provides an extensibility point for unusual situations. + transport_provided = isinstance(transport, LintingServiceTransport) + if transport_provided: + # transport is a LintingServiceTransport instance. + if credentials or self._client_options.credentials_file or api_key_value: + raise ValueError( + "When providing a transport instance, " + "provide its credentials directly." + ) + if self._client_options.scopes: + raise ValueError( + "When providing a transport instance, provide its scopes " + "directly." + ) + self._transport = cast(LintingServiceTransport, transport) + self._api_endpoint = self._transport.host + + self._api_endpoint = ( + self._api_endpoint + or LintingServiceClient._get_api_endpoint( + self._client_options.api_endpoint, + self._client_cert_source, + self._universe_domain, + self._use_mtls_endpoint, + ) + ) + + if not transport_provided: + import google.auth._default # type: ignore + + if api_key_value and hasattr( + google.auth._default, "get_api_key_credentials" + ): + credentials = google.auth._default.get_api_key_credentials( + api_key_value + ) + + transport_init: Union[ + Type[LintingServiceTransport], Callable[..., LintingServiceTransport] + ] = ( + LintingServiceClient.get_transport_class(transport) + if isinstance(transport, str) or transport is None + else cast(Callable[..., LintingServiceTransport], transport) + ) + # initialize with the provided callable or the passed in class + self._transport = transport_init( + credentials=credentials, + credentials_file=self._client_options.credentials_file, + host=self._api_endpoint, + scopes=self._client_options.scopes, + client_cert_source_for_mtls=self._client_cert_source, + quota_project_id=self._client_options.quota_project_id, + client_info=client_info, + always_use_jwt_access=True, + api_audience=self._client_options.api_audience, + ) + + def get_style_guide( + self, + request: Optional[Union[linting_service.GetStyleGuideRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> linting_service.StyleGuide: + r"""Get the style guide being used for linting. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import apihub_v1 + + def sample_get_style_guide(): + # Create a client + client = apihub_v1.LintingServiceClient() + + # Initialize request argument(s) + request = apihub_v1.GetStyleGuideRequest( + name="name_value", + ) + + # Make the request + response = client.get_style_guide(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.apihub_v1.types.GetStyleGuideRequest, dict]): + The request object. The [GetStyleGuide][ApiHub.GetStyleGuide] method's + request. + name (str): + Required. The name of the spec to retrieve. Format: + ``projects/{project}/locations/{location}/plugins/{plugin}/styleGuide``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.apihub_v1.types.StyleGuide: + Represents a singleton style guide + resource to be used for linting Open API + specs. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, linting_service.GetStyleGuideRequest): + request = linting_service.GetStyleGuideRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_style_guide] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def update_style_guide( + self, + request: Optional[Union[linting_service.UpdateStyleGuideRequest, dict]] = None, + *, + style_guide: Optional[linting_service.StyleGuide] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> linting_service.StyleGuide: + r"""Update the styleGuide to be used for liniting in by + API hub. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import apihub_v1 + + def sample_update_style_guide(): + # Create a client + client = apihub_v1.LintingServiceClient() + + # Initialize request argument(s) + style_guide = apihub_v1.StyleGuide() + style_guide.linter = "OTHER" + style_guide.contents.contents = b'contents_blob' + style_guide.contents.mime_type = "mime_type_value" + + request = apihub_v1.UpdateStyleGuideRequest( + style_guide=style_guide, + ) + + # Make the request + response = client.update_style_guide(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.apihub_v1.types.UpdateStyleGuideRequest, dict]): + The request object. The [UpdateStyleGuide][ApiHub.UpdateStyleGuide] method's + request. + style_guide (google.cloud.apihub_v1.types.StyleGuide): + Required. The Style guide resource to + update. + + This corresponds to the ``style_guide`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Optional. The list of fields to + update. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.apihub_v1.types.StyleGuide: + Represents a singleton style guide + resource to be used for linting Open API + specs. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([style_guide, update_mask]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, linting_service.UpdateStyleGuideRequest): + request = linting_service.UpdateStyleGuideRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if style_guide is not None: + request.style_guide = style_guide + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.update_style_guide] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("style_guide.name", request.style_guide.name),) + ), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_style_guide_contents( + self, + request: Optional[ + Union[linting_service.GetStyleGuideContentsRequest, dict] + ] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> linting_service.StyleGuideContents: + r"""Get the contents of the style guide. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import apihub_v1 + + def sample_get_style_guide_contents(): + # Create a client + client = apihub_v1.LintingServiceClient() + + # Initialize request argument(s) + request = apihub_v1.GetStyleGuideContentsRequest( + name="name_value", + ) + + # Make the request + response = client.get_style_guide_contents(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.apihub_v1.types.GetStyleGuideContentsRequest, dict]): + The request object. The + [GetStyleGuideContents][ApiHub.GetStyleGuideContents] + method's request. + name (str): + Required. The name of the StyleGuide whose contents need + to be retrieved. There is exactly one style guide + resource per project per location. The expected format + is + ``projects/{project}/locations/{location}/plugins/{plugin}/styleGuide``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.apihub_v1.types.StyleGuideContents: + The style guide contents. + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, linting_service.GetStyleGuideContentsRequest): + request = linting_service.GetStyleGuideContentsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_style_guide_contents] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def lint_spec( + self, + request: Optional[Union[linting_service.LintSpecRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Lints the requested spec and updates the + corresponding API Spec with the lint response. This lint + response will be available in all subsequent Get and + List Spec calls to Core service. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import apihub_v1 + + def sample_lint_spec(): + # Create a client + client = apihub_v1.LintingServiceClient() + + # Initialize request argument(s) + request = apihub_v1.LintSpecRequest( + name="name_value", + ) + + # Make the request + client.lint_spec(request=request) + + Args: + request (Union[google.cloud.apihub_v1.types.LintSpecRequest, dict]): + The request object. The [LintSpec][ApiHub.LintSpec] method's request. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, linting_service.LintSpecRequest): + request = linting_service.LintSpecRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.lint_spec] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + def __enter__(self) -> "LintingServiceClient": + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + + def list_operations( + self, + request: Optional[operations_pb2.ListOperationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.ListOperationsResponse: + r"""Lists operations that match the specified filter in the request. + + Args: + request (:class:`~.operations_pb2.ListOperationsRequest`): + The request object. Request message for + `ListOperations` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.ListOperationsResponse: + Response message for ``ListOperations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.ListOperationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.list_operations, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_operation( + self, + request: Optional[operations_pb2.GetOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Gets the latest state of a long-running operation. + + Args: + request (:class:`~.operations_pb2.GetOperationRequest`): + The request object. Request message for + `GetOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.Operation: + An ``Operation`` object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.GetOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.get_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def delete_operation( + self, + request: Optional[operations_pb2.DeleteOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes a long-running operation. + + This method indicates that the client is no longer interested + in the operation result. It does not cancel the operation. + If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.DeleteOperationRequest`): + The request object. Request message for + `DeleteOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.DeleteOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.delete_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + def cancel_operation( + self, + request: Optional[operations_pb2.CancelOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Starts asynchronous cancellation on a long-running operation. + + The server makes a best effort to cancel the operation, but success + is not guaranteed. If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.CancelOperationRequest`): + The request object. Request message for + `CancelOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.CancelOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.cancel_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + def get_location( + self, + request: Optional[locations_pb2.GetLocationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> locations_pb2.Location: + r"""Gets information about a location. + + Args: + request (:class:`~.location_pb2.GetLocationRequest`): + The request object. Request message for + `GetLocation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.location_pb2.Location: + Location object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = locations_pb2.GetLocationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.get_location, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def list_locations( + self, + request: Optional[locations_pb2.ListLocationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> locations_pb2.ListLocationsResponse: + r"""Lists information about the supported locations for this service. + + Args: + request (:class:`~.location_pb2.ListLocationsRequest`): + The request object. Request message for + `ListLocations` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.location_pb2.ListLocationsResponse: + Response message for ``ListLocations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = locations_pb2.ListLocationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.list_locations, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +__all__ = ("LintingServiceClient",) diff --git a/packages/google-ads-admanager/google/ads/admanager_v1/services/contact_service/transports/__init__.py b/packages/google-cloud-apihub/google/cloud/apihub_v1/services/linting_service/transports/__init__.py similarity index 67% rename from packages/google-ads-admanager/google/ads/admanager_v1/services/contact_service/transports/__init__.py rename to packages/google-cloud-apihub/google/cloud/apihub_v1/services/linting_service/transports/__init__.py index 4dde7a60bd0f..f8d2f54aac8c 100644 --- a/packages/google-ads-admanager/google/ads/admanager_v1/services/contact_service/transports/__init__.py +++ b/packages/google-cloud-apihub/google/cloud/apihub_v1/services/linting_service/transports/__init__.py @@ -16,15 +16,15 @@ from collections import OrderedDict from typing import Dict, Type -from .base import ContactServiceTransport -from .rest import ContactServiceRestInterceptor, ContactServiceRestTransport +from .base import LintingServiceTransport +from .rest import LintingServiceRestInterceptor, LintingServiceRestTransport # Compile a registry of transports. -_transport_registry = OrderedDict() # type: Dict[str, Type[ContactServiceTransport]] -_transport_registry["rest"] = ContactServiceRestTransport +_transport_registry = OrderedDict() # type: Dict[str, Type[LintingServiceTransport]] +_transport_registry["rest"] = LintingServiceRestTransport __all__ = ( - "ContactServiceTransport", - "ContactServiceRestTransport", - "ContactServiceRestInterceptor", + "LintingServiceTransport", + "LintingServiceRestTransport", + "LintingServiceRestInterceptor", ) diff --git a/packages/google-cloud-apihub/google/cloud/apihub_v1/services/linting_service/transports/base.py b/packages/google-cloud-apihub/google/cloud/apihub_v1/services/linting_service/transports/base.py new file mode 100644 index 000000000000..05bd201acdcc --- /dev/null +++ b/packages/google-cloud-apihub/google/cloud/apihub_v1/services/linting_service/transports/base.py @@ -0,0 +1,281 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import abc +from typing import Awaitable, Callable, Dict, Optional, Sequence, Union + +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.oauth2 import service_account # type: ignore +from google.protobuf import empty_pb2 # type: ignore + +from google.cloud.apihub_v1 import gapic_version as package_version +from google.cloud.apihub_v1.types import linting_service + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +class LintingServiceTransport(abc.ABC): + """Abstract transport class for LintingService.""" + + AUTH_SCOPES = ("https://www.googleapis.com/auth/cloud-platform",) + + DEFAULT_HOST: str = "apihub.googleapis.com" + + def __init__( + self, + *, + host: str = DEFAULT_HOST, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + **kwargs, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'apihub.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A list of scopes. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + """ + + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} + + # Save the scopes. + self._scopes = scopes + if not hasattr(self, "_ignore_credentials"): + self._ignore_credentials: bool = False + + # If no credentials are provided, then determine the appropriate + # defaults. + if credentials and credentials_file: + raise core_exceptions.DuplicateCredentialArgs( + "'credentials_file' and 'credentials' are mutually exclusive" + ) + + if credentials_file is not None: + credentials, _ = google.auth.load_credentials_from_file( + credentials_file, **scopes_kwargs, quota_project_id=quota_project_id + ) + elif credentials is None and not self._ignore_credentials: + credentials, _ = google.auth.default( + **scopes_kwargs, quota_project_id=quota_project_id + ) + # Don't apply audience if the credentials file passed from user. + if hasattr(credentials, "with_gdch_audience"): + credentials = credentials.with_gdch_audience( + api_audience if api_audience else host + ) + + # If the credentials are service account credentials, then always try to use self signed JWT. + if ( + always_use_jwt_access + and isinstance(credentials, service_account.Credentials) + and hasattr(service_account.Credentials, "with_always_use_jwt_access") + ): + credentials = credentials.with_always_use_jwt_access(True) + + # Save the credentials. + self._credentials = credentials + + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ":" not in host: + host += ":443" + self._host = host + + @property + def host(self): + return self._host + + def _prep_wrapped_messages(self, client_info): + # Precompute the wrapped methods. + self._wrapped_methods = { + self.get_style_guide: gapic_v1.method.wrap_method( + self.get_style_guide, + default_retry=retries.Retry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.update_style_guide: gapic_v1.method.wrap_method( + self.update_style_guide, + default_timeout=60.0, + client_info=client_info, + ), + self.get_style_guide_contents: gapic_v1.method.wrap_method( + self.get_style_guide_contents, + default_retry=retries.Retry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.lint_spec: gapic_v1.method.wrap_method( + self.lint_spec, + default_timeout=60.0, + client_info=client_info, + ), + } + + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + + @property + def get_style_guide( + self, + ) -> Callable[ + [linting_service.GetStyleGuideRequest], + Union[linting_service.StyleGuide, Awaitable[linting_service.StyleGuide]], + ]: + raise NotImplementedError() + + @property + def update_style_guide( + self, + ) -> Callable[ + [linting_service.UpdateStyleGuideRequest], + Union[linting_service.StyleGuide, Awaitable[linting_service.StyleGuide]], + ]: + raise NotImplementedError() + + @property + def get_style_guide_contents( + self, + ) -> Callable[ + [linting_service.GetStyleGuideContentsRequest], + Union[ + linting_service.StyleGuideContents, + Awaitable[linting_service.StyleGuideContents], + ], + ]: + raise NotImplementedError() + + @property + def lint_spec( + self, + ) -> Callable[ + [linting_service.LintSpecRequest], + Union[empty_pb2.Empty, Awaitable[empty_pb2.Empty]], + ]: + raise NotImplementedError() + + @property + def list_operations( + self, + ) -> Callable[ + [operations_pb2.ListOperationsRequest], + Union[ + operations_pb2.ListOperationsResponse, + Awaitable[operations_pb2.ListOperationsResponse], + ], + ]: + raise NotImplementedError() + + @property + def get_operation( + self, + ) -> Callable[ + [operations_pb2.GetOperationRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None,]: + raise NotImplementedError() + + @property + def delete_operation( + self, + ) -> Callable[[operations_pb2.DeleteOperationRequest], None,]: + raise NotImplementedError() + + @property + def get_location( + self, + ) -> Callable[ + [locations_pb2.GetLocationRequest], + Union[locations_pb2.Location, Awaitable[locations_pb2.Location]], + ]: + raise NotImplementedError() + + @property + def list_locations( + self, + ) -> Callable[ + [locations_pb2.ListLocationsRequest], + Union[ + locations_pb2.ListLocationsResponse, + Awaitable[locations_pb2.ListLocationsResponse], + ], + ]: + raise NotImplementedError() + + @property + def kind(self) -> str: + raise NotImplementedError() + + +__all__ = ("LintingServiceTransport",) diff --git a/packages/google-cloud-apihub/google/cloud/apihub_v1/services/linting_service/transports/grpc.py b/packages/google-cloud-apihub/google/cloud/apihub_v1/services/linting_service/transports/grpc.py new file mode 100644 index 000000000000..1096e78975f6 --- /dev/null +++ b/packages/google-cloud-apihub/google/cloud/apihub_v1/services/linting_service/transports/grpc.py @@ -0,0 +1,465 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import gapic_v1, grpc_helpers +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore +import grpc # type: ignore + +from google.cloud.apihub_v1.types import linting_service + +from .base import DEFAULT_CLIENT_INFO, LintingServiceTransport + + +class LintingServiceGrpcTransport(LintingServiceTransport): + """gRPC backend transport for LintingService. + + This service provides all methods related to the 1p Linter. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _stubs: Dict[str, Callable] + + def __init__( + self, + *, + host: str = "apihub.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'apihub.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if a ``channel`` instance is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if a ``channel`` instance is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if a ``channel`` instance is provided. + channel (Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]]): + A ``Channel`` instance through which to make calls, or a Callable + that constructs and returns one. If set to None, ``self.create_channel`` + is used to create the channel. If a Callable is given, it will be called + with the same arguments as used in ``self.create_channel``. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if a ``channel`` instance is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if a ``channel`` instance or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if isinstance(channel, grpc.Channel): + # Ignore credentials if a channel was passed. + credentials = None + self._ignore_credentials = True + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + # initialize with the provided callable or the default channel + channel_init = channel or type(self).create_channel + self._grpc_channel = channel_init( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @classmethod + def create_channel( + cls, + host: str = "apihub.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> grpc.Channel: + """Create and return a gRPC channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + grpc.Channel: A gRPC channel object. + + Raises: + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + + return grpc_helpers.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs, + ) + + @property + def grpc_channel(self) -> grpc.Channel: + """Return the channel designed to connect to this service.""" + return self._grpc_channel + + @property + def get_style_guide( + self, + ) -> Callable[[linting_service.GetStyleGuideRequest], linting_service.StyleGuide]: + r"""Return a callable for the get style guide method over gRPC. + + Get the style guide being used for linting. + + Returns: + Callable[[~.GetStyleGuideRequest], + ~.StyleGuide]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_style_guide" not in self._stubs: + self._stubs["get_style_guide"] = self.grpc_channel.unary_unary( + "/google.cloud.apihub.v1.LintingService/GetStyleGuide", + request_serializer=linting_service.GetStyleGuideRequest.serialize, + response_deserializer=linting_service.StyleGuide.deserialize, + ) + return self._stubs["get_style_guide"] + + @property + def update_style_guide( + self, + ) -> Callable[ + [linting_service.UpdateStyleGuideRequest], linting_service.StyleGuide + ]: + r"""Return a callable for the update style guide method over gRPC. + + Update the styleGuide to be used for liniting in by + API hub. + + Returns: + Callable[[~.UpdateStyleGuideRequest], + ~.StyleGuide]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_style_guide" not in self._stubs: + self._stubs["update_style_guide"] = self.grpc_channel.unary_unary( + "/google.cloud.apihub.v1.LintingService/UpdateStyleGuide", + request_serializer=linting_service.UpdateStyleGuideRequest.serialize, + response_deserializer=linting_service.StyleGuide.deserialize, + ) + return self._stubs["update_style_guide"] + + @property + def get_style_guide_contents( + self, + ) -> Callable[ + [linting_service.GetStyleGuideContentsRequest], + linting_service.StyleGuideContents, + ]: + r"""Return a callable for the get style guide contents method over gRPC. + + Get the contents of the style guide. + + Returns: + Callable[[~.GetStyleGuideContentsRequest], + ~.StyleGuideContents]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_style_guide_contents" not in self._stubs: + self._stubs["get_style_guide_contents"] = self.grpc_channel.unary_unary( + "/google.cloud.apihub.v1.LintingService/GetStyleGuideContents", + request_serializer=linting_service.GetStyleGuideContentsRequest.serialize, + response_deserializer=linting_service.StyleGuideContents.deserialize, + ) + return self._stubs["get_style_guide_contents"] + + @property + def lint_spec(self) -> Callable[[linting_service.LintSpecRequest], empty_pb2.Empty]: + r"""Return a callable for the lint spec method over gRPC. + + Lints the requested spec and updates the + corresponding API Spec with the lint response. This lint + response will be available in all subsequent Get and + List Spec calls to Core service. + + Returns: + Callable[[~.LintSpecRequest], + ~.Empty]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "lint_spec" not in self._stubs: + self._stubs["lint_spec"] = self.grpc_channel.unary_unary( + "/google.cloud.apihub.v1.LintingService/LintSpec", + request_serializer=linting_service.LintSpecRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs["lint_spec"] + + def close(self): + self.grpc_channel.close() + + @property + def delete_operation( + self, + ) -> Callable[[operations_pb2.DeleteOperationRequest], None]: + r"""Return a callable for the delete_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_operation" not in self._stubs: + self._stubs["delete_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/DeleteOperation", + request_serializer=operations_pb2.DeleteOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["delete_operation"] + + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None]: + r"""Return a callable for the cancel_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "cancel_operation" not in self._stubs: + self._stubs["cancel_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/CancelOperation", + request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["cancel_operation"] + + @property + def get_operation( + self, + ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: + r"""Return a callable for the get_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_operation" not in self._stubs: + self._stubs["get_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/GetOperation", + request_serializer=operations_pb2.GetOperationRequest.SerializeToString, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["get_operation"] + + @property + def list_operations( + self, + ) -> Callable[ + [operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse + ]: + r"""Return a callable for the list_operations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_operations" not in self._stubs: + self._stubs["list_operations"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/ListOperations", + request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, + response_deserializer=operations_pb2.ListOperationsResponse.FromString, + ) + return self._stubs["list_operations"] + + @property + def list_locations( + self, + ) -> Callable[ + [locations_pb2.ListLocationsRequest], locations_pb2.ListLocationsResponse + ]: + r"""Return a callable for the list locations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_locations" not in self._stubs: + self._stubs["list_locations"] = self.grpc_channel.unary_unary( + "/google.cloud.location.Locations/ListLocations", + request_serializer=locations_pb2.ListLocationsRequest.SerializeToString, + response_deserializer=locations_pb2.ListLocationsResponse.FromString, + ) + return self._stubs["list_locations"] + + @property + def get_location( + self, + ) -> Callable[[locations_pb2.GetLocationRequest], locations_pb2.Location]: + r"""Return a callable for the list locations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_location" not in self._stubs: + self._stubs["get_location"] = self.grpc_channel.unary_unary( + "/google.cloud.location.Locations/GetLocation", + request_serializer=locations_pb2.GetLocationRequest.SerializeToString, + response_deserializer=locations_pb2.Location.FromString, + ) + return self._stubs["get_location"] + + @property + def kind(self) -> str: + return "grpc" + + +__all__ = ("LintingServiceGrpcTransport",) diff --git a/packages/google-cloud-apihub/google/cloud/apihub_v1/services/linting_service/transports/grpc_asyncio.py b/packages/google-cloud-apihub/google/cloud/apihub_v1/services/linting_service/transports/grpc_asyncio.py new file mode 100644 index 000000000000..a1753eed897d --- /dev/null +++ b/packages/google-cloud-apihub/google/cloud/apihub_v1/services/linting_service/transports/grpc_asyncio.py @@ -0,0 +1,512 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1, grpc_helpers_async +from google.api_core import retry_async as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore +import grpc # type: ignore +from grpc.experimental import aio # type: ignore + +from google.cloud.apihub_v1.types import linting_service + +from .base import DEFAULT_CLIENT_INFO, LintingServiceTransport +from .grpc import LintingServiceGrpcTransport + + +class LintingServiceGrpcAsyncIOTransport(LintingServiceTransport): + """gRPC AsyncIO backend transport for LintingService. + + This service provides all methods related to the 1p Linter. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _grpc_channel: aio.Channel + _stubs: Dict[str, Callable] = {} + + @classmethod + def create_channel( + cls, + host: str = "apihub.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> aio.Channel: + """Create and return a gRPC AsyncIO channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + aio.Channel: A gRPC AsyncIO channel object. + """ + + return grpc_helpers_async.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs, + ) + + def __init__( + self, + *, + host: str = "apihub.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[Union[aio.Channel, Callable[..., aio.Channel]]] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'apihub.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if a ``channel`` instance is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if a ``channel`` instance is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + channel (Optional[Union[aio.Channel, Callable[..., aio.Channel]]]): + A ``Channel`` instance through which to make calls, or a Callable + that constructs and returns one. If set to None, ``self.create_channel`` + is used to create the channel. If a Callable is given, it will be called + with the same arguments as used in ``self.create_channel``. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if a ``channel`` instance is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if a ``channel`` instance or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if isinstance(channel, aio.Channel): + # Ignore credentials if a channel was passed. + credentials = None + self._ignore_credentials = True + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + # initialize with the provided callable or the default channel + channel_init = channel or type(self).create_channel + self._grpc_channel = channel_init( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @property + def grpc_channel(self) -> aio.Channel: + """Create the channel designed to connect to this service. + + This property caches on the instance; repeated calls return + the same channel. + """ + # Return the channel from cache. + return self._grpc_channel + + @property + def get_style_guide( + self, + ) -> Callable[ + [linting_service.GetStyleGuideRequest], Awaitable[linting_service.StyleGuide] + ]: + r"""Return a callable for the get style guide method over gRPC. + + Get the style guide being used for linting. + + Returns: + Callable[[~.GetStyleGuideRequest], + Awaitable[~.StyleGuide]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_style_guide" not in self._stubs: + self._stubs["get_style_guide"] = self.grpc_channel.unary_unary( + "/google.cloud.apihub.v1.LintingService/GetStyleGuide", + request_serializer=linting_service.GetStyleGuideRequest.serialize, + response_deserializer=linting_service.StyleGuide.deserialize, + ) + return self._stubs["get_style_guide"] + + @property + def update_style_guide( + self, + ) -> Callable[ + [linting_service.UpdateStyleGuideRequest], Awaitable[linting_service.StyleGuide] + ]: + r"""Return a callable for the update style guide method over gRPC. + + Update the styleGuide to be used for liniting in by + API hub. + + Returns: + Callable[[~.UpdateStyleGuideRequest], + Awaitable[~.StyleGuide]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_style_guide" not in self._stubs: + self._stubs["update_style_guide"] = self.grpc_channel.unary_unary( + "/google.cloud.apihub.v1.LintingService/UpdateStyleGuide", + request_serializer=linting_service.UpdateStyleGuideRequest.serialize, + response_deserializer=linting_service.StyleGuide.deserialize, + ) + return self._stubs["update_style_guide"] + + @property + def get_style_guide_contents( + self, + ) -> Callable[ + [linting_service.GetStyleGuideContentsRequest], + Awaitable[linting_service.StyleGuideContents], + ]: + r"""Return a callable for the get style guide contents method over gRPC. + + Get the contents of the style guide. + + Returns: + Callable[[~.GetStyleGuideContentsRequest], + Awaitable[~.StyleGuideContents]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_style_guide_contents" not in self._stubs: + self._stubs["get_style_guide_contents"] = self.grpc_channel.unary_unary( + "/google.cloud.apihub.v1.LintingService/GetStyleGuideContents", + request_serializer=linting_service.GetStyleGuideContentsRequest.serialize, + response_deserializer=linting_service.StyleGuideContents.deserialize, + ) + return self._stubs["get_style_guide_contents"] + + @property + def lint_spec( + self, + ) -> Callable[[linting_service.LintSpecRequest], Awaitable[empty_pb2.Empty]]: + r"""Return a callable for the lint spec method over gRPC. + + Lints the requested spec and updates the + corresponding API Spec with the lint response. This lint + response will be available in all subsequent Get and + List Spec calls to Core service. + + Returns: + Callable[[~.LintSpecRequest], + Awaitable[~.Empty]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "lint_spec" not in self._stubs: + self._stubs["lint_spec"] = self.grpc_channel.unary_unary( + "/google.cloud.apihub.v1.LintingService/LintSpec", + request_serializer=linting_service.LintSpecRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs["lint_spec"] + + def _prep_wrapped_messages(self, client_info): + """Precompute the wrapped methods, overriding the base class method to use async wrappers.""" + self._wrapped_methods = { + self.get_style_guide: gapic_v1.method_async.wrap_method( + self.get_style_guide, + default_retry=retries.AsyncRetry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.update_style_guide: gapic_v1.method_async.wrap_method( + self.update_style_guide, + default_timeout=60.0, + client_info=client_info, + ), + self.get_style_guide_contents: gapic_v1.method_async.wrap_method( + self.get_style_guide_contents, + default_retry=retries.AsyncRetry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.lint_spec: gapic_v1.method_async.wrap_method( + self.lint_spec, + default_timeout=60.0, + client_info=client_info, + ), + } + + def close(self): + return self.grpc_channel.close() + + @property + def delete_operation( + self, + ) -> Callable[[operations_pb2.DeleteOperationRequest], None]: + r"""Return a callable for the delete_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_operation" not in self._stubs: + self._stubs["delete_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/DeleteOperation", + request_serializer=operations_pb2.DeleteOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["delete_operation"] + + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None]: + r"""Return a callable for the cancel_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "cancel_operation" not in self._stubs: + self._stubs["cancel_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/CancelOperation", + request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["cancel_operation"] + + @property + def get_operation( + self, + ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: + r"""Return a callable for the get_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_operation" not in self._stubs: + self._stubs["get_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/GetOperation", + request_serializer=operations_pb2.GetOperationRequest.SerializeToString, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["get_operation"] + + @property + def list_operations( + self, + ) -> Callable[ + [operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse + ]: + r"""Return a callable for the list_operations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_operations" not in self._stubs: + self._stubs["list_operations"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/ListOperations", + request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, + response_deserializer=operations_pb2.ListOperationsResponse.FromString, + ) + return self._stubs["list_operations"] + + @property + def list_locations( + self, + ) -> Callable[ + [locations_pb2.ListLocationsRequest], locations_pb2.ListLocationsResponse + ]: + r"""Return a callable for the list locations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_locations" not in self._stubs: + self._stubs["list_locations"] = self.grpc_channel.unary_unary( + "/google.cloud.location.Locations/ListLocations", + request_serializer=locations_pb2.ListLocationsRequest.SerializeToString, + response_deserializer=locations_pb2.ListLocationsResponse.FromString, + ) + return self._stubs["list_locations"] + + @property + def get_location( + self, + ) -> Callable[[locations_pb2.GetLocationRequest], locations_pb2.Location]: + r"""Return a callable for the list locations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_location" not in self._stubs: + self._stubs["get_location"] = self.grpc_channel.unary_unary( + "/google.cloud.location.Locations/GetLocation", + request_serializer=locations_pb2.GetLocationRequest.SerializeToString, + response_deserializer=locations_pb2.Location.FromString, + ) + return self._stubs["get_location"] + + +__all__ = ("LintingServiceGrpcAsyncIOTransport",) diff --git a/packages/google-cloud-apihub/google/cloud/apihub_v1/services/linting_service/transports/rest.py b/packages/google-cloud-apihub/google/cloud/apihub_v1/services/linting_service/transports/rest.py new file mode 100644 index 000000000000..e7504327ec31 --- /dev/null +++ b/packages/google-cloud-apihub/google/cloud/apihub_v1/services/linting_service/transports/rest.py @@ -0,0 +1,1206 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import dataclasses +import json # type: ignore +import re +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import gapic_v1, path_template, rest_helpers, rest_streaming +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth.transport.requests import AuthorizedSession # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.protobuf import json_format +import grpc # type: ignore +from requests import __version__ as requests_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object, None] # type: ignore + + +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore + +from google.cloud.apihub_v1.types import linting_service + +from .base import DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO +from .base import LintingServiceTransport + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=requests_version, +) + + +class LintingServiceRestInterceptor: + """Interceptor for LintingService. + + Interceptors are used to manipulate requests, request metadata, and responses + in arbitrary ways. + Example use cases include: + * Logging + * Verifying requests according to service or custom semantics + * Stripping extraneous information from responses + + These use cases and more can be enabled by injecting an + instance of a custom subclass when constructing the LintingServiceRestTransport. + + .. code-block:: python + class MyCustomLintingServiceInterceptor(LintingServiceRestInterceptor): + def pre_get_style_guide(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_style_guide(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get_style_guide_contents(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_style_guide_contents(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_lint_spec(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def pre_update_style_guide(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_update_style_guide(self, response): + logging.log(f"Received response: {response}") + return response + + transport = LintingServiceRestTransport(interceptor=MyCustomLintingServiceInterceptor()) + client = LintingServiceClient(transport=transport) + + + """ + + def pre_get_style_guide( + self, + request: linting_service.GetStyleGuideRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[linting_service.GetStyleGuideRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_style_guide + + Override in a subclass to manipulate the request or metadata + before they are sent to the LintingService server. + """ + return request, metadata + + def post_get_style_guide( + self, response: linting_service.StyleGuide + ) -> linting_service.StyleGuide: + """Post-rpc interceptor for get_style_guide + + Override in a subclass to manipulate the response + after it is returned by the LintingService server but before + it is returned to user code. + """ + return response + + def pre_get_style_guide_contents( + self, + request: linting_service.GetStyleGuideContentsRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[linting_service.GetStyleGuideContentsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_style_guide_contents + + Override in a subclass to manipulate the request or metadata + before they are sent to the LintingService server. + """ + return request, metadata + + def post_get_style_guide_contents( + self, response: linting_service.StyleGuideContents + ) -> linting_service.StyleGuideContents: + """Post-rpc interceptor for get_style_guide_contents + + Override in a subclass to manipulate the response + after it is returned by the LintingService server but before + it is returned to user code. + """ + return response + + def pre_lint_spec( + self, + request: linting_service.LintSpecRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[linting_service.LintSpecRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for lint_spec + + Override in a subclass to manipulate the request or metadata + before they are sent to the LintingService server. + """ + return request, metadata + + def pre_update_style_guide( + self, + request: linting_service.UpdateStyleGuideRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[linting_service.UpdateStyleGuideRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for update_style_guide + + Override in a subclass to manipulate the request or metadata + before they are sent to the LintingService server. + """ + return request, metadata + + def post_update_style_guide( + self, response: linting_service.StyleGuide + ) -> linting_service.StyleGuide: + """Post-rpc interceptor for update_style_guide + + Override in a subclass to manipulate the response + after it is returned by the LintingService server but before + it is returned to user code. + """ + return response + + def pre_get_location( + self, + request: locations_pb2.GetLocationRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[locations_pb2.GetLocationRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_location + + Override in a subclass to manipulate the request or metadata + before they are sent to the LintingService server. + """ + return request, metadata + + def post_get_location( + self, response: locations_pb2.Location + ) -> locations_pb2.Location: + """Post-rpc interceptor for get_location + + Override in a subclass to manipulate the response + after it is returned by the LintingService server but before + it is returned to user code. + """ + return response + + def pre_list_locations( + self, + request: locations_pb2.ListLocationsRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[locations_pb2.ListLocationsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_locations + + Override in a subclass to manipulate the request or metadata + before they are sent to the LintingService server. + """ + return request, metadata + + def post_list_locations( + self, response: locations_pb2.ListLocationsResponse + ) -> locations_pb2.ListLocationsResponse: + """Post-rpc interceptor for list_locations + + Override in a subclass to manipulate the response + after it is returned by the LintingService server but before + it is returned to user code. + """ + return response + + def pre_cancel_operation( + self, + request: operations_pb2.CancelOperationRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[operations_pb2.CancelOperationRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for cancel_operation + + Override in a subclass to manipulate the request or metadata + before they are sent to the LintingService server. + """ + return request, metadata + + def post_cancel_operation(self, response: None) -> None: + """Post-rpc interceptor for cancel_operation + + Override in a subclass to manipulate the response + after it is returned by the LintingService server but before + it is returned to user code. + """ + return response + + def pre_delete_operation( + self, + request: operations_pb2.DeleteOperationRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[operations_pb2.DeleteOperationRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for delete_operation + + Override in a subclass to manipulate the request or metadata + before they are sent to the LintingService server. + """ + return request, metadata + + def post_delete_operation(self, response: None) -> None: + """Post-rpc interceptor for delete_operation + + Override in a subclass to manipulate the response + after it is returned by the LintingService server but before + it is returned to user code. + """ + return response + + def pre_get_operation( + self, + request: operations_pb2.GetOperationRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[operations_pb2.GetOperationRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_operation + + Override in a subclass to manipulate the request or metadata + before they are sent to the LintingService server. + """ + return request, metadata + + def post_get_operation( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for get_operation + + Override in a subclass to manipulate the response + after it is returned by the LintingService server but before + it is returned to user code. + """ + return response + + def pre_list_operations( + self, + request: operations_pb2.ListOperationsRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[operations_pb2.ListOperationsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_operations + + Override in a subclass to manipulate the request or metadata + before they are sent to the LintingService server. + """ + return request, metadata + + def post_list_operations( + self, response: operations_pb2.ListOperationsResponse + ) -> operations_pb2.ListOperationsResponse: + """Post-rpc interceptor for list_operations + + Override in a subclass to manipulate the response + after it is returned by the LintingService server but before + it is returned to user code. + """ + return response + + +@dataclasses.dataclass +class LintingServiceRestStub: + _session: AuthorizedSession + _host: str + _interceptor: LintingServiceRestInterceptor + + +class LintingServiceRestTransport(LintingServiceTransport): + """REST backend transport for LintingService. + + This service provides all methods related to the 1p Linter. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + + """ + + def __init__( + self, + *, + host: str = "apihub.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = "https", + interceptor: Optional[LintingServiceRestInterceptor] = None, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'apihub.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client + certificate to configure mutual TLS HTTP channel. It is ignored + if ``channel`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. + # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the + # credentials object + maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) + if maybe_url_match is None: + raise ValueError( + f"Unexpected hostname structure: {host}" + ) # pragma: NO COVER + + url_match_items = maybe_url_match.groupdict() + + host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host + + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + self._session = AuthorizedSession( + self._credentials, default_host=self.DEFAULT_HOST + ) + if client_cert_source_for_mtls: + self._session.configure_mtls_channel(client_cert_source_for_mtls) + self._interceptor = interceptor or LintingServiceRestInterceptor() + self._prep_wrapped_messages(client_info) + + class _GetStyleGuide(LintingServiceRestStub): + def __hash__(self): + return hash("GetStyleGuide") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: linting_service.GetStyleGuideRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> linting_service.StyleGuide: + r"""Call the get style guide method over HTTP. + + Args: + request (~.linting_service.GetStyleGuideRequest): + The request object. The [GetStyleGuide][ApiHub.GetStyleGuide] method's + request. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.linting_service.StyleGuide: + Represents a singleton style guide + resource to be used for linting Open API + specs. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/plugins/*/styleGuide}", + }, + ] + request, metadata = self._interceptor.pre_get_style_guide(request, metadata) + pb_request = linting_service.GetStyleGuideRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = linting_service.StyleGuide() + pb_resp = linting_service.StyleGuide.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_style_guide(resp) + return resp + + class _GetStyleGuideContents(LintingServiceRestStub): + def __hash__(self): + return hash("GetStyleGuideContents") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: linting_service.GetStyleGuideContentsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> linting_service.StyleGuideContents: + r"""Call the get style guide contents method over HTTP. + + Args: + request (~.linting_service.GetStyleGuideContentsRequest): + The request object. The + [GetStyleGuideContents][ApiHub.GetStyleGuideContents] + method's request. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.linting_service.StyleGuideContents: + The style guide contents. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/plugins/*/styleGuide}:contents", + }, + ] + request, metadata = self._interceptor.pre_get_style_guide_contents( + request, metadata + ) + pb_request = linting_service.GetStyleGuideContentsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = linting_service.StyleGuideContents() + pb_resp = linting_service.StyleGuideContents.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_style_guide_contents(resp) + return resp + + class _LintSpec(LintingServiceRestStub): + def __hash__(self): + return hash("LintSpec") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: linting_service.LintSpecRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ): + r"""Call the lint spec method over HTTP. + + Args: + request (~.linting_service.LintSpecRequest): + The request object. The [LintSpec][ApiHub.LintSpec] method's request. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{name=projects/*/locations/*/apis/*/versions/*/specs/*}:lint", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_lint_spec(request, metadata) + pb_request = linting_service.LintSpecRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + class _UpdateStyleGuide(LintingServiceRestStub): + def __hash__(self): + return hash("UpdateStyleGuide") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: linting_service.UpdateStyleGuideRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> linting_service.StyleGuide: + r"""Call the update style guide method over HTTP. + + Args: + request (~.linting_service.UpdateStyleGuideRequest): + The request object. The [UpdateStyleGuide][ApiHub.UpdateStyleGuide] method's + request. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.linting_service.StyleGuide: + Represents a singleton style guide + resource to be used for linting Open API + specs. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "patch", + "uri": "/v1/{style_guide.name=projects/*/locations/*/plugins/*/styleGuide}", + "body": "style_guide", + }, + ] + request, metadata = self._interceptor.pre_update_style_guide( + request, metadata + ) + pb_request = linting_service.UpdateStyleGuideRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = linting_service.StyleGuide() + pb_resp = linting_service.StyleGuide.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_update_style_guide(resp) + return resp + + @property + def get_style_guide( + self, + ) -> Callable[[linting_service.GetStyleGuideRequest], linting_service.StyleGuide]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetStyleGuide(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_style_guide_contents( + self, + ) -> Callable[ + [linting_service.GetStyleGuideContentsRequest], + linting_service.StyleGuideContents, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetStyleGuideContents(self._session, self._host, self._interceptor) # type: ignore + + @property + def lint_spec(self) -> Callable[[linting_service.LintSpecRequest], empty_pb2.Empty]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._LintSpec(self._session, self._host, self._interceptor) # type: ignore + + @property + def update_style_guide( + self, + ) -> Callable[ + [linting_service.UpdateStyleGuideRequest], linting_service.StyleGuide + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._UpdateStyleGuide(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_location(self): + return self._GetLocation(self._session, self._host, self._interceptor) # type: ignore + + class _GetLocation(LintingServiceRestStub): + def __call__( + self, + request: locations_pb2.GetLocationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> locations_pb2.Location: + r"""Call the get location method over HTTP. + + Args: + request (locations_pb2.GetLocationRequest): + The request object for GetLocation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + locations_pb2.Location: Response from GetLocation method. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*}", + }, + ] + + request, metadata = self._interceptor.pre_get_location(request, metadata) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + resp = locations_pb2.Location() + resp = json_format.Parse(response.content.decode("utf-8"), resp) + resp = self._interceptor.post_get_location(resp) + return resp + + @property + def list_locations(self): + return self._ListLocations(self._session, self._host, self._interceptor) # type: ignore + + class _ListLocations(LintingServiceRestStub): + def __call__( + self, + request: locations_pb2.ListLocationsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> locations_pb2.ListLocationsResponse: + r"""Call the list locations method over HTTP. + + Args: + request (locations_pb2.ListLocationsRequest): + The request object for ListLocations method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + locations_pb2.ListLocationsResponse: Response from ListLocations method. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=projects/*}/locations", + }, + ] + + request, metadata = self._interceptor.pre_list_locations(request, metadata) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + resp = locations_pb2.ListLocationsResponse() + resp = json_format.Parse(response.content.decode("utf-8"), resp) + resp = self._interceptor.post_list_locations(resp) + return resp + + @property + def cancel_operation(self): + return self._CancelOperation(self._session, self._host, self._interceptor) # type: ignore + + class _CancelOperation(LintingServiceRestStub): + def __call__( + self, + request: operations_pb2.CancelOperationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Call the cancel operation method over HTTP. + + Args: + request (operations_pb2.CancelOperationRequest): + The request object for CancelOperation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{name=projects/*/locations/*/operations/*}:cancel", + "body": "*", + }, + ] + + request, metadata = self._interceptor.pre_cancel_operation( + request, metadata + ) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + body = json.dumps(transcoded_request["body"]) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + return self._interceptor.post_cancel_operation(None) + + @property + def delete_operation(self): + return self._DeleteOperation(self._session, self._host, self._interceptor) # type: ignore + + class _DeleteOperation(LintingServiceRestStub): + def __call__( + self, + request: operations_pb2.DeleteOperationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Call the delete operation method over HTTP. + + Args: + request (operations_pb2.DeleteOperationRequest): + The request object for DeleteOperation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "delete", + "uri": "/v1/{name=projects/*/locations/*/operations/*}", + }, + ] + + request, metadata = self._interceptor.pre_delete_operation( + request, metadata + ) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + return self._interceptor.post_delete_operation(None) + + @property + def get_operation(self): + return self._GetOperation(self._session, self._host, self._interceptor) # type: ignore + + class _GetOperation(LintingServiceRestStub): + def __call__( + self, + request: operations_pb2.GetOperationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the get operation method over HTTP. + + Args: + request (operations_pb2.GetOperationRequest): + The request object for GetOperation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + operations_pb2.Operation: Response from GetOperation method. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/operations/*}", + }, + ] + + request, metadata = self._interceptor.pre_get_operation(request, metadata) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + resp = operations_pb2.Operation() + resp = json_format.Parse(response.content.decode("utf-8"), resp) + resp = self._interceptor.post_get_operation(resp) + return resp + + @property + def list_operations(self): + return self._ListOperations(self._session, self._host, self._interceptor) # type: ignore + + class _ListOperations(LintingServiceRestStub): + def __call__( + self, + request: operations_pb2.ListOperationsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.ListOperationsResponse: + r"""Call the list operations method over HTTP. + + Args: + request (operations_pb2.ListOperationsRequest): + The request object for ListOperations method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + operations_pb2.ListOperationsResponse: Response from ListOperations method. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*}/operations", + }, + ] + + request, metadata = self._interceptor.pre_list_operations(request, metadata) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + resp = operations_pb2.ListOperationsResponse() + resp = json_format.Parse(response.content.decode("utf-8"), resp) + resp = self._interceptor.post_list_operations(resp) + return resp + + @property + def kind(self) -> str: + return "rest" + + def close(self): + self._session.close() + + +__all__ = ("LintingServiceRestTransport",) diff --git a/packages/google-ads-admanager/google/ads/admanager_v1/services/creative_service/__init__.py b/packages/google-cloud-apihub/google/cloud/apihub_v1/services/provisioning/__init__.py similarity index 88% rename from packages/google-ads-admanager/google/ads/admanager_v1/services/creative_service/__init__.py rename to packages/google-cloud-apihub/google/cloud/apihub_v1/services/provisioning/__init__.py index 65fa5abb358e..3df245148ed6 100644 --- a/packages/google-ads-admanager/google/ads/admanager_v1/services/creative_service/__init__.py +++ b/packages/google-cloud-apihub/google/cloud/apihub_v1/services/provisioning/__init__.py @@ -13,6 +13,6 @@ # See the License for the specific language governing permissions and # limitations under the License. # -from .client import CreativeServiceClient +from .client import ProvisioningClient -__all__ = ("CreativeServiceClient",) +__all__ = ("ProvisioningClient",) diff --git a/packages/google-cloud-apihub/google/cloud/apihub_v1/services/provisioning/async_client.py b/packages/google-cloud-apihub/google/cloud/apihub_v1/services/provisioning/async_client.py new file mode 100644 index 000000000000..5f52e6bf1413 --- /dev/null +++ b/packages/google-cloud-apihub/google/cloud/apihub_v1/services/provisioning/async_client.py @@ -0,0 +1,995 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import re +from typing import ( + Callable, + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, +) + +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry_async as retries +from google.api_core.client_options import ClientOptions +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.apihub_v1 import gapic_version as package_version + +try: + OptionalRetry = Union[retries.AsyncRetry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.AsyncRetry, object, None] # type: ignore + +from google.api_core import operation # type: ignore +from google.api_core import operation_async # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore + +from google.cloud.apihub_v1.types import common_fields, provisioning_service + +from .client import ProvisioningClient +from .transports.base import DEFAULT_CLIENT_INFO, ProvisioningTransport +from .transports.grpc_asyncio import ProvisioningGrpcAsyncIOTransport + + +class ProvisioningAsyncClient: + """This service is used for managing the data plane provisioning + of the API hub. + """ + + _client: ProvisioningClient + + # Copy defaults from the synchronous client for use here. + # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. + DEFAULT_ENDPOINT = ProvisioningClient.DEFAULT_ENDPOINT + DEFAULT_MTLS_ENDPOINT = ProvisioningClient.DEFAULT_MTLS_ENDPOINT + _DEFAULT_ENDPOINT_TEMPLATE = ProvisioningClient._DEFAULT_ENDPOINT_TEMPLATE + _DEFAULT_UNIVERSE = ProvisioningClient._DEFAULT_UNIVERSE + + api_hub_instance_path = staticmethod(ProvisioningClient.api_hub_instance_path) + parse_api_hub_instance_path = staticmethod( + ProvisioningClient.parse_api_hub_instance_path + ) + common_billing_account_path = staticmethod( + ProvisioningClient.common_billing_account_path + ) + parse_common_billing_account_path = staticmethod( + ProvisioningClient.parse_common_billing_account_path + ) + common_folder_path = staticmethod(ProvisioningClient.common_folder_path) + parse_common_folder_path = staticmethod(ProvisioningClient.parse_common_folder_path) + common_organization_path = staticmethod(ProvisioningClient.common_organization_path) + parse_common_organization_path = staticmethod( + ProvisioningClient.parse_common_organization_path + ) + common_project_path = staticmethod(ProvisioningClient.common_project_path) + parse_common_project_path = staticmethod( + ProvisioningClient.parse_common_project_path + ) + common_location_path = staticmethod(ProvisioningClient.common_location_path) + parse_common_location_path = staticmethod( + ProvisioningClient.parse_common_location_path + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + ProvisioningAsyncClient: The constructed client. + """ + return ProvisioningClient.from_service_account_info.__func__(ProvisioningAsyncClient, info, *args, **kwargs) # type: ignore + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + ProvisioningAsyncClient: The constructed client. + """ + return ProvisioningClient.from_service_account_file.__func__(ProvisioningAsyncClient, filename, *args, **kwargs) # type: ignore + + from_service_account_json = from_service_account_file + + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[ClientOptions] = None + ): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + return ProvisioningClient.get_mtls_endpoint_and_cert_source(client_options) # type: ignore + + @property + def transport(self) -> ProvisioningTransport: + """Returns the transport used by the client instance. + + Returns: + ProvisioningTransport: The transport used by the client instance. + """ + return self._client.transport + + @property + def api_endpoint(self): + """Return the API endpoint used by the client instance. + + Returns: + str: The API endpoint used by the client instance. + """ + return self._client._api_endpoint + + @property + def universe_domain(self) -> str: + """Return the universe domain used by the client instance. + + Returns: + str: The universe domain used + by the client instance. + """ + return self._client._universe_domain + + get_transport_class = ProvisioningClient.get_transport_class + + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[ + Union[str, ProvisioningTransport, Callable[..., ProvisioningTransport]] + ] = "grpc_asyncio", + client_options: Optional[ClientOptions] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the provisioning async client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Optional[Union[str,ProvisioningTransport,Callable[..., ProvisioningTransport]]]): + The transport to use, or a Callable that constructs and returns a new transport to use. + If a Callable is given, it will be called with the same set of initialization + arguments as used in the ProvisioningTransport constructor. + If set to None, a transport is chosen automatically. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): + Custom options for the client. + + 1. The ``api_endpoint`` property can be used to override the + default endpoint provided by the client when ``transport`` is + not explicitly provided. Only if this property is not set and + ``transport`` was not explicitly provided, the endpoint is + determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment + variable, which have one of the following values: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto-switch to the + default mTLS endpoint if client certificate is present; this is + the default value). + + 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide a client certificate for mTLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + 3. The ``universe_domain`` property can be used to override the + default "googleapis.com" universe. Note that ``api_endpoint`` + property still takes precedence; and ``universe_domain`` is + currently not supported for mTLS. + + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client = ProvisioningClient( + credentials=credentials, + transport=transport, + client_options=client_options, + client_info=client_info, + ) + + async def create_api_hub_instance( + self, + request: Optional[ + Union[provisioning_service.CreateApiHubInstanceRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + api_hub_instance: Optional[common_fields.ApiHubInstance] = None, + api_hub_instance_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Provisions instance resources for the API Hub. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import apihub_v1 + + async def sample_create_api_hub_instance(): + # Create a client + client = apihub_v1.ProvisioningAsyncClient() + + # Initialize request argument(s) + api_hub_instance = apihub_v1.ApiHubInstance() + api_hub_instance.config.cmek_key_name = "cmek_key_name_value" + + request = apihub_v1.CreateApiHubInstanceRequest( + parent="parent_value", + api_hub_instance=api_hub_instance, + ) + + # Make the request + operation = client.create_api_hub_instance(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.apihub_v1.types.CreateApiHubInstanceRequest, dict]]): + The request object. The + [CreateApiHubInstance][google.cloud.apihub.v1.Provisioning.CreateApiHubInstance] + method's request. + parent (:class:`str`): + Required. The parent resource for the Api Hub instance + resource. Format: + ``projects/{project}/locations/{location}`` + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + api_hub_instance (:class:`google.cloud.apihub_v1.types.ApiHubInstance`): + Required. The ApiHub instance. + This corresponds to the ``api_hub_instance`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + api_hub_instance_id (:class:`str`): + Optional. Identifier to assign to the Api Hub instance. + Must be unique within scope of the parent resource. If + the field is not provided, system generated id will be + used. + + This value should be 4-40 characters, and valid + characters are ``/[a-z][A-Z][0-9]-_/``. + + This corresponds to the ``api_hub_instance_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.apihub_v1.types.ApiHubInstance` An ApiHubInstance represents the instance resources of the API Hub. + Currently, only one ApiHub instance is allowed for + each project. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, api_hub_instance, api_hub_instance_id]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, provisioning_service.CreateApiHubInstanceRequest): + request = provisioning_service.CreateApiHubInstanceRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if api_hub_instance is not None: + request.api_hub_instance = api_hub_instance + if api_hub_instance_id is not None: + request.api_hub_instance_id = api_hub_instance_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.create_api_hub_instance + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + common_fields.ApiHubInstance, + metadata_type=common_fields.OperationMetadata, + ) + + # Done; return the response. + return response + + async def get_api_hub_instance( + self, + request: Optional[ + Union[provisioning_service.GetApiHubInstanceRequest, dict] + ] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> common_fields.ApiHubInstance: + r"""Gets details of a single API Hub instance. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import apihub_v1 + + async def sample_get_api_hub_instance(): + # Create a client + client = apihub_v1.ProvisioningAsyncClient() + + # Initialize request argument(s) + request = apihub_v1.GetApiHubInstanceRequest( + name="name_value", + ) + + # Make the request + response = await client.get_api_hub_instance(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.apihub_v1.types.GetApiHubInstanceRequest, dict]]): + The request object. The + [GetApiHubInstance][google.cloud.apihub.v1.Provisioning.GetApiHubInstance] + method's request. + name (:class:`str`): + Required. The name of the Api Hub instance to retrieve. + Format: + ``projects/{project}/locations/{location}/apiHubInstances/{apiHubInstance}``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.apihub_v1.types.ApiHubInstance: + An ApiHubInstance represents the + instance resources of the API Hub. + Currently, only one ApiHub instance is + allowed for each project. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, provisioning_service.GetApiHubInstanceRequest): + request = provisioning_service.GetApiHubInstanceRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.get_api_hub_instance + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def lookup_api_hub_instance( + self, + request: Optional[ + Union[provisioning_service.LookupApiHubInstanceRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> provisioning_service.LookupApiHubInstanceResponse: + r"""Looks up an Api Hub instance in a given GCP project. + There will always be only one Api Hub instance for a GCP + project across all locations. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import apihub_v1 + + async def sample_lookup_api_hub_instance(): + # Create a client + client = apihub_v1.ProvisioningAsyncClient() + + # Initialize request argument(s) + request = apihub_v1.LookupApiHubInstanceRequest( + parent="parent_value", + ) + + # Make the request + response = await client.lookup_api_hub_instance(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.apihub_v1.types.LookupApiHubInstanceRequest, dict]]): + The request object. The + [LookupApiHubInstance][google.cloud.apihub.v1.Provisioning.LookupApiHubInstance] + method's request. + parent (:class:`str`): + Required. There will always be only one Api Hub instance + for a GCP project across all locations. The parent + resource for the Api Hub instance resource. Format: + ``projects/{project}/locations/{location}`` + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.apihub_v1.types.LookupApiHubInstanceResponse: + The + [LookupApiHubInstance][google.cloud.apihub.v1.Provisioning.LookupApiHubInstance] + method's response.\` + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, provisioning_service.LookupApiHubInstanceRequest): + request = provisioning_service.LookupApiHubInstanceRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.lookup_api_hub_instance + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_operations( + self, + request: Optional[operations_pb2.ListOperationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.ListOperationsResponse: + r"""Lists operations that match the specified filter in the request. + + Args: + request (:class:`~.operations_pb2.ListOperationsRequest`): + The request object. Request message for + `ListOperations` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.ListOperationsResponse: + Response message for ``ListOperations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.ListOperationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.list_operations, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_operation( + self, + request: Optional[operations_pb2.GetOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Gets the latest state of a long-running operation. + + Args: + request (:class:`~.operations_pb2.GetOperationRequest`): + The request object. Request message for + `GetOperation` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.Operation: + An ``Operation`` object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.GetOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def delete_operation( + self, + request: Optional[operations_pb2.DeleteOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes a long-running operation. + + This method indicates that the client is no longer interested + in the operation result. It does not cancel the operation. + If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.DeleteOperationRequest`): + The request object. Request message for + `DeleteOperation` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.DeleteOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.delete_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + async def cancel_operation( + self, + request: Optional[operations_pb2.CancelOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Starts asynchronous cancellation on a long-running operation. + + The server makes a best effort to cancel the operation, but success + is not guaranteed. If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.CancelOperationRequest`): + The request object. Request message for + `CancelOperation` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.CancelOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.cancel_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + async def get_location( + self, + request: Optional[locations_pb2.GetLocationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> locations_pb2.Location: + r"""Gets information about a location. + + Args: + request (:class:`~.location_pb2.GetLocationRequest`): + The request object. Request message for + `GetLocation` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.location_pb2.Location: + Location object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = locations_pb2.GetLocationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_location, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_locations( + self, + request: Optional[locations_pb2.ListLocationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> locations_pb2.ListLocationsResponse: + r"""Lists information about the supported locations for this service. + + Args: + request (:class:`~.location_pb2.ListLocationsRequest`): + The request object. Request message for + `ListLocations` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.location_pb2.ListLocationsResponse: + Response message for ``ListLocations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = locations_pb2.ListLocationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.list_locations, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def __aenter__(self) -> "ProvisioningAsyncClient": + return self + + async def __aexit__(self, exc_type, exc, tb): + await self.transport.close() + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +__all__ = ("ProvisioningAsyncClient",) diff --git a/packages/google-cloud-apihub/google/cloud/apihub_v1/services/provisioning/client.py b/packages/google-cloud-apihub/google/cloud/apihub_v1/services/provisioning/client.py new file mode 100644 index 000000000000..56a83e91bd00 --- /dev/null +++ b/packages/google-cloud-apihub/google/cloud/apihub_v1/services/provisioning/client.py @@ -0,0 +1,1409 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import os +import re +from typing import ( + Callable, + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, + cast, +) +import warnings + +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.apihub_v1 import gapic_version as package_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object, None] # type: ignore + +from google.api_core import operation # type: ignore +from google.api_core import operation_async # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore + +from google.cloud.apihub_v1.types import common_fields, provisioning_service + +from .transports.base import DEFAULT_CLIENT_INFO, ProvisioningTransport +from .transports.rest import ProvisioningRestTransport + + +class ProvisioningClientMeta(type): + """Metaclass for the Provisioning client. + + This provides class-level methods for building and retrieving + support objects (e.g. transport) without polluting the client instance + objects. + """ + + _transport_registry = OrderedDict() # type: Dict[str, Type[ProvisioningTransport]] + _transport_registry["rest"] = ProvisioningRestTransport + + def get_transport_class( + cls, + label: Optional[str] = None, + ) -> Type[ProvisioningTransport]: + """Returns an appropriate transport class. + + Args: + label: The name of the desired transport. If none is + provided, then the first transport in the registry is used. + + Returns: + The transport class to use. + """ + # If a specific transport is requested, return that one. + if label: + return cls._transport_registry[label] + + # No transport is requested; return the default (that is, the first one + # in the dictionary). + return next(iter(cls._transport_registry.values())) + + +class ProvisioningClient(metaclass=ProvisioningClientMeta): + """This service is used for managing the data plane provisioning + of the API hub. + """ + + @staticmethod + def _get_default_mtls_endpoint(api_endpoint): + """Converts api endpoint to mTLS endpoint. + + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to + "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. + Args: + api_endpoint (Optional[str]): the api endpoint to convert. + Returns: + str: converted mTLS api endpoint. + """ + if not api_endpoint: + return api_endpoint + + mtls_endpoint_re = re.compile( + r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" + ) + + m = mtls_endpoint_re.match(api_endpoint) + name, mtls, sandbox, googledomain = m.groups() + if mtls or not googledomain: + return api_endpoint + + if sandbox: + return api_endpoint.replace( + "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" + ) + + return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + + # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. + DEFAULT_ENDPOINT = "apihub.googleapis.com" + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore + DEFAULT_ENDPOINT + ) + + _DEFAULT_ENDPOINT_TEMPLATE = "apihub.{UNIVERSE_DOMAIN}" + _DEFAULT_UNIVERSE = "googleapis.com" + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + ProvisioningClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_info(info) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + ProvisioningClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file(filename) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> ProvisioningTransport: + """Returns the transport used by the client instance. + + Returns: + ProvisioningTransport: The transport used by the client + instance. + """ + return self._transport + + @staticmethod + def api_hub_instance_path( + project: str, + location: str, + api_hub_instance: str, + ) -> str: + """Returns a fully-qualified api_hub_instance string.""" + return "projects/{project}/locations/{location}/apiHubInstances/{api_hub_instance}".format( + project=project, + location=location, + api_hub_instance=api_hub_instance, + ) + + @staticmethod + def parse_api_hub_instance_path(path: str) -> Dict[str, str]: + """Parses a api_hub_instance path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/apiHubInstances/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + + @staticmethod + def common_billing_account_path( + billing_account: str, + ) -> str: + """Returns a fully-qualified billing_account string.""" + return "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + + @staticmethod + def parse_common_billing_account_path(path: str) -> Dict[str, str]: + """Parse a billing_account path into its component segments.""" + m = re.match(r"^billingAccounts/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_folder_path( + folder: str, + ) -> str: + """Returns a fully-qualified folder string.""" + return "folders/{folder}".format( + folder=folder, + ) + + @staticmethod + def parse_common_folder_path(path: str) -> Dict[str, str]: + """Parse a folder path into its component segments.""" + m = re.match(r"^folders/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_organization_path( + organization: str, + ) -> str: + """Returns a fully-qualified organization string.""" + return "organizations/{organization}".format( + organization=organization, + ) + + @staticmethod + def parse_common_organization_path(path: str) -> Dict[str, str]: + """Parse a organization path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_project_path( + project: str, + ) -> str: + """Returns a fully-qualified project string.""" + return "projects/{project}".format( + project=project, + ) + + @staticmethod + def parse_common_project_path(path: str) -> Dict[str, str]: + """Parse a project path into its component segments.""" + m = re.match(r"^projects/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_location_path( + project: str, + location: str, + ) -> str: + """Returns a fully-qualified location string.""" + return "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) + + @staticmethod + def parse_common_location_path(path: str) -> Dict[str, str]: + """Parse a location path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[client_options_lib.ClientOptions] = None + ): + """Deprecated. Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + + warnings.warn( + "get_mtls_endpoint_and_cert_source is deprecated. Use the api_endpoint property instead.", + DeprecationWarning, + ) + if client_options is None: + client_options = client_options_lib.ClientOptions() + use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") + if use_client_cert not in ("true", "false"): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError( + "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + # Figure out the client cert source to use. + client_cert_source = None + if use_client_cert == "true": + if client_options.client_cert_source: + client_cert_source = client_options.client_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + elif use_mtls_endpoint == "always" or ( + use_mtls_endpoint == "auto" and client_cert_source + ): + api_endpoint = cls.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = cls.DEFAULT_ENDPOINT + + return api_endpoint, client_cert_source + + @staticmethod + def _read_environment_variables(): + """Returns the environment variables used by the client. + + Returns: + Tuple[bool, str, str]: returns the GOOGLE_API_USE_CLIENT_CERTIFICATE, + GOOGLE_API_USE_MTLS_ENDPOINT, and GOOGLE_CLOUD_UNIVERSE_DOMAIN environment variables. + + Raises: + ValueError: If GOOGLE_API_USE_CLIENT_CERTIFICATE is not + any of ["true", "false"]. + google.auth.exceptions.MutualTLSChannelError: If GOOGLE_API_USE_MTLS_ENDPOINT + is not any of ["auto", "never", "always"]. + """ + use_client_cert = os.getenv( + "GOOGLE_API_USE_CLIENT_CERTIFICATE", "false" + ).lower() + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto").lower() + universe_domain_env = os.getenv("GOOGLE_CLOUD_UNIVERSE_DOMAIN") + if use_client_cert not in ("true", "false"): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError( + "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + return use_client_cert == "true", use_mtls_endpoint, universe_domain_env + + @staticmethod + def _get_client_cert_source(provided_cert_source, use_cert_flag): + """Return the client cert source to be used by the client. + + Args: + provided_cert_source (bytes): The client certificate source provided. + use_cert_flag (bool): A flag indicating whether to use the client certificate. + + Returns: + bytes or None: The client cert source to be used by the client. + """ + client_cert_source = None + if use_cert_flag: + if provided_cert_source: + client_cert_source = provided_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + return client_cert_source + + @staticmethod + def _get_api_endpoint( + api_override, client_cert_source, universe_domain, use_mtls_endpoint + ): + """Return the API endpoint used by the client. + + Args: + api_override (str): The API endpoint override. If specified, this is always + the return value of this function and the other arguments are not used. + client_cert_source (bytes): The client certificate source used by the client. + universe_domain (str): The universe domain used by the client. + use_mtls_endpoint (str): How to use the mTLS endpoint, which depends also on the other parameters. + Possible values are "always", "auto", or "never". + + Returns: + str: The API endpoint to be used by the client. + """ + if api_override is not None: + api_endpoint = api_override + elif use_mtls_endpoint == "always" or ( + use_mtls_endpoint == "auto" and client_cert_source + ): + _default_universe = ProvisioningClient._DEFAULT_UNIVERSE + if universe_domain != _default_universe: + raise MutualTLSChannelError( + f"mTLS is not supported in any universe other than {_default_universe}." + ) + api_endpoint = ProvisioningClient.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = ProvisioningClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=universe_domain + ) + return api_endpoint + + @staticmethod + def _get_universe_domain( + client_universe_domain: Optional[str], universe_domain_env: Optional[str] + ) -> str: + """Return the universe domain used by the client. + + Args: + client_universe_domain (Optional[str]): The universe domain configured via the client options. + universe_domain_env (Optional[str]): The universe domain configured via the "GOOGLE_CLOUD_UNIVERSE_DOMAIN" environment variable. + + Returns: + str: The universe domain to be used by the client. + + Raises: + ValueError: If the universe domain is an empty string. + """ + universe_domain = ProvisioningClient._DEFAULT_UNIVERSE + if client_universe_domain is not None: + universe_domain = client_universe_domain + elif universe_domain_env is not None: + universe_domain = universe_domain_env + if len(universe_domain.strip()) == 0: + raise ValueError("Universe Domain cannot be an empty string.") + return universe_domain + + @staticmethod + def _compare_universes( + client_universe: str, credentials: ga_credentials.Credentials + ) -> bool: + """Returns True iff the universe domains used by the client and credentials match. + + Args: + client_universe (str): The universe domain configured via the client options. + credentials (ga_credentials.Credentials): The credentials being used in the client. + + Returns: + bool: True iff client_universe matches the universe in credentials. + + Raises: + ValueError: when client_universe does not match the universe in credentials. + """ + + default_universe = ProvisioningClient._DEFAULT_UNIVERSE + credentials_universe = getattr(credentials, "universe_domain", default_universe) + + if client_universe != credentials_universe: + raise ValueError( + "The configured universe domain " + f"({client_universe}) does not match the universe domain " + f"found in the credentials ({credentials_universe}). " + "If you haven't configured the universe domain explicitly, " + f"`{default_universe}` is the default." + ) + return True + + def _validate_universe_domain(self): + """Validates client's and credentials' universe domains are consistent. + + Returns: + bool: True iff the configured universe domain is valid. + + Raises: + ValueError: If the configured universe domain is not valid. + """ + self._is_universe_domain_valid = ( + self._is_universe_domain_valid + or ProvisioningClient._compare_universes( + self.universe_domain, self.transport._credentials + ) + ) + return self._is_universe_domain_valid + + @property + def api_endpoint(self): + """Return the API endpoint used by the client instance. + + Returns: + str: The API endpoint used by the client instance. + """ + return self._api_endpoint + + @property + def universe_domain(self) -> str: + """Return the universe domain used by the client instance. + + Returns: + str: The universe domain used by the client instance. + """ + return self._universe_domain + + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[ + Union[str, ProvisioningTransport, Callable[..., ProvisioningTransport]] + ] = None, + client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the provisioning client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Optional[Union[str,ProvisioningTransport,Callable[..., ProvisioningTransport]]]): + The transport to use, or a Callable that constructs and returns a new transport. + If a Callable is given, it will be called with the same set of initialization + arguments as used in the ProvisioningTransport constructor. + If set to None, a transport is chosen automatically. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): + Custom options for the client. + + 1. The ``api_endpoint`` property can be used to override the + default endpoint provided by the client when ``transport`` is + not explicitly provided. Only if this property is not set and + ``transport`` was not explicitly provided, the endpoint is + determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment + variable, which have one of the following values: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto-switch to the + default mTLS endpoint if client certificate is present; this is + the default value). + + 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide a client certificate for mTLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + 3. The ``universe_domain`` property can be used to override the + default "googleapis.com" universe. Note that the ``api_endpoint`` + property still takes precedence; and ``universe_domain`` is + currently not supported for mTLS. + + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client_options = client_options + if isinstance(self._client_options, dict): + self._client_options = client_options_lib.from_dict(self._client_options) + if self._client_options is None: + self._client_options = client_options_lib.ClientOptions() + self._client_options = cast( + client_options_lib.ClientOptions, self._client_options + ) + + universe_domain_opt = getattr(self._client_options, "universe_domain", None) + + ( + self._use_client_cert, + self._use_mtls_endpoint, + self._universe_domain_env, + ) = ProvisioningClient._read_environment_variables() + self._client_cert_source = ProvisioningClient._get_client_cert_source( + self._client_options.client_cert_source, self._use_client_cert + ) + self._universe_domain = ProvisioningClient._get_universe_domain( + universe_domain_opt, self._universe_domain_env + ) + self._api_endpoint = None # updated below, depending on `transport` + + # Initialize the universe domain validation. + self._is_universe_domain_valid = False + + api_key_value = getattr(self._client_options, "api_key", None) + if api_key_value and credentials: + raise ValueError( + "client_options.api_key and credentials are mutually exclusive" + ) + + # Save or instantiate the transport. + # Ordinarily, we provide the transport, but allowing a custom transport + # instance provides an extensibility point for unusual situations. + transport_provided = isinstance(transport, ProvisioningTransport) + if transport_provided: + # transport is a ProvisioningTransport instance. + if credentials or self._client_options.credentials_file or api_key_value: + raise ValueError( + "When providing a transport instance, " + "provide its credentials directly." + ) + if self._client_options.scopes: + raise ValueError( + "When providing a transport instance, provide its scopes " + "directly." + ) + self._transport = cast(ProvisioningTransport, transport) + self._api_endpoint = self._transport.host + + self._api_endpoint = self._api_endpoint or ProvisioningClient._get_api_endpoint( + self._client_options.api_endpoint, + self._client_cert_source, + self._universe_domain, + self._use_mtls_endpoint, + ) + + if not transport_provided: + import google.auth._default # type: ignore + + if api_key_value and hasattr( + google.auth._default, "get_api_key_credentials" + ): + credentials = google.auth._default.get_api_key_credentials( + api_key_value + ) + + transport_init: Union[ + Type[ProvisioningTransport], Callable[..., ProvisioningTransport] + ] = ( + ProvisioningClient.get_transport_class(transport) + if isinstance(transport, str) or transport is None + else cast(Callable[..., ProvisioningTransport], transport) + ) + # initialize with the provided callable or the passed in class + self._transport = transport_init( + credentials=credentials, + credentials_file=self._client_options.credentials_file, + host=self._api_endpoint, + scopes=self._client_options.scopes, + client_cert_source_for_mtls=self._client_cert_source, + quota_project_id=self._client_options.quota_project_id, + client_info=client_info, + always_use_jwt_access=True, + api_audience=self._client_options.api_audience, + ) + + def create_api_hub_instance( + self, + request: Optional[ + Union[provisioning_service.CreateApiHubInstanceRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + api_hub_instance: Optional[common_fields.ApiHubInstance] = None, + api_hub_instance_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Provisions instance resources for the API Hub. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import apihub_v1 + + def sample_create_api_hub_instance(): + # Create a client + client = apihub_v1.ProvisioningClient() + + # Initialize request argument(s) + api_hub_instance = apihub_v1.ApiHubInstance() + api_hub_instance.config.cmek_key_name = "cmek_key_name_value" + + request = apihub_v1.CreateApiHubInstanceRequest( + parent="parent_value", + api_hub_instance=api_hub_instance, + ) + + # Make the request + operation = client.create_api_hub_instance(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.apihub_v1.types.CreateApiHubInstanceRequest, dict]): + The request object. The + [CreateApiHubInstance][google.cloud.apihub.v1.Provisioning.CreateApiHubInstance] + method's request. + parent (str): + Required. The parent resource for the Api Hub instance + resource. Format: + ``projects/{project}/locations/{location}`` + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + api_hub_instance (google.cloud.apihub_v1.types.ApiHubInstance): + Required. The ApiHub instance. + This corresponds to the ``api_hub_instance`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + api_hub_instance_id (str): + Optional. Identifier to assign to the Api Hub instance. + Must be unique within scope of the parent resource. If + the field is not provided, system generated id will be + used. + + This value should be 4-40 characters, and valid + characters are ``/[a-z][A-Z][0-9]-_/``. + + This corresponds to the ``api_hub_instance_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.apihub_v1.types.ApiHubInstance` An ApiHubInstance represents the instance resources of the API Hub. + Currently, only one ApiHub instance is allowed for + each project. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, api_hub_instance, api_hub_instance_id]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, provisioning_service.CreateApiHubInstanceRequest): + request = provisioning_service.CreateApiHubInstanceRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if api_hub_instance is not None: + request.api_hub_instance = api_hub_instance + if api_hub_instance_id is not None: + request.api_hub_instance_id = api_hub_instance_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.create_api_hub_instance] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + common_fields.ApiHubInstance, + metadata_type=common_fields.OperationMetadata, + ) + + # Done; return the response. + return response + + def get_api_hub_instance( + self, + request: Optional[ + Union[provisioning_service.GetApiHubInstanceRequest, dict] + ] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> common_fields.ApiHubInstance: + r"""Gets details of a single API Hub instance. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import apihub_v1 + + def sample_get_api_hub_instance(): + # Create a client + client = apihub_v1.ProvisioningClient() + + # Initialize request argument(s) + request = apihub_v1.GetApiHubInstanceRequest( + name="name_value", + ) + + # Make the request + response = client.get_api_hub_instance(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.apihub_v1.types.GetApiHubInstanceRequest, dict]): + The request object. The + [GetApiHubInstance][google.cloud.apihub.v1.Provisioning.GetApiHubInstance] + method's request. + name (str): + Required. The name of the Api Hub instance to retrieve. + Format: + ``projects/{project}/locations/{location}/apiHubInstances/{apiHubInstance}``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.apihub_v1.types.ApiHubInstance: + An ApiHubInstance represents the + instance resources of the API Hub. + Currently, only one ApiHub instance is + allowed for each project. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, provisioning_service.GetApiHubInstanceRequest): + request = provisioning_service.GetApiHubInstanceRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_api_hub_instance] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def lookup_api_hub_instance( + self, + request: Optional[ + Union[provisioning_service.LookupApiHubInstanceRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> provisioning_service.LookupApiHubInstanceResponse: + r"""Looks up an Api Hub instance in a given GCP project. + There will always be only one Api Hub instance for a GCP + project across all locations. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import apihub_v1 + + def sample_lookup_api_hub_instance(): + # Create a client + client = apihub_v1.ProvisioningClient() + + # Initialize request argument(s) + request = apihub_v1.LookupApiHubInstanceRequest( + parent="parent_value", + ) + + # Make the request + response = client.lookup_api_hub_instance(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.apihub_v1.types.LookupApiHubInstanceRequest, dict]): + The request object. The + [LookupApiHubInstance][google.cloud.apihub.v1.Provisioning.LookupApiHubInstance] + method's request. + parent (str): + Required. There will always be only one Api Hub instance + for a GCP project across all locations. The parent + resource for the Api Hub instance resource. Format: + ``projects/{project}/locations/{location}`` + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.apihub_v1.types.LookupApiHubInstanceResponse: + The + [LookupApiHubInstance][google.cloud.apihub.v1.Provisioning.LookupApiHubInstance] + method's response.\` + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, provisioning_service.LookupApiHubInstanceRequest): + request = provisioning_service.LookupApiHubInstanceRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.lookup_api_hub_instance] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def __enter__(self) -> "ProvisioningClient": + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + + def list_operations( + self, + request: Optional[operations_pb2.ListOperationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.ListOperationsResponse: + r"""Lists operations that match the specified filter in the request. + + Args: + request (:class:`~.operations_pb2.ListOperationsRequest`): + The request object. Request message for + `ListOperations` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.ListOperationsResponse: + Response message for ``ListOperations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.ListOperationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.list_operations, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_operation( + self, + request: Optional[operations_pb2.GetOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Gets the latest state of a long-running operation. + + Args: + request (:class:`~.operations_pb2.GetOperationRequest`): + The request object. Request message for + `GetOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.Operation: + An ``Operation`` object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.GetOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.get_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def delete_operation( + self, + request: Optional[operations_pb2.DeleteOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes a long-running operation. + + This method indicates that the client is no longer interested + in the operation result. It does not cancel the operation. + If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.DeleteOperationRequest`): + The request object. Request message for + `DeleteOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.DeleteOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.delete_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + def cancel_operation( + self, + request: Optional[operations_pb2.CancelOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Starts asynchronous cancellation on a long-running operation. + + The server makes a best effort to cancel the operation, but success + is not guaranteed. If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.CancelOperationRequest`): + The request object. Request message for + `CancelOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.CancelOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.cancel_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + def get_location( + self, + request: Optional[locations_pb2.GetLocationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> locations_pb2.Location: + r"""Gets information about a location. + + Args: + request (:class:`~.location_pb2.GetLocationRequest`): + The request object. Request message for + `GetLocation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.location_pb2.Location: + Location object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = locations_pb2.GetLocationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.get_location, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def list_locations( + self, + request: Optional[locations_pb2.ListLocationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> locations_pb2.ListLocationsResponse: + r"""Lists information about the supported locations for this service. + + Args: + request (:class:`~.location_pb2.ListLocationsRequest`): + The request object. Request message for + `ListLocations` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.location_pb2.ListLocationsResponse: + Response message for ``ListLocations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = locations_pb2.ListLocationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.list_locations, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +__all__ = ("ProvisioningClient",) diff --git a/packages/google-ads-admanager/google/ads/admanager_v1/services/line_item_service/transports/__init__.py b/packages/google-cloud-apihub/google/cloud/apihub_v1/services/provisioning/transports/__init__.py similarity index 65% rename from packages/google-ads-admanager/google/ads/admanager_v1/services/line_item_service/transports/__init__.py rename to packages/google-cloud-apihub/google/cloud/apihub_v1/services/provisioning/transports/__init__.py index a187611acc17..c82beafe4a3e 100644 --- a/packages/google-ads-admanager/google/ads/admanager_v1/services/line_item_service/transports/__init__.py +++ b/packages/google-cloud-apihub/google/cloud/apihub_v1/services/provisioning/transports/__init__.py @@ -16,15 +16,15 @@ from collections import OrderedDict from typing import Dict, Type -from .base import LineItemServiceTransport -from .rest import LineItemServiceRestInterceptor, LineItemServiceRestTransport +from .base import ProvisioningTransport +from .rest import ProvisioningRestInterceptor, ProvisioningRestTransport # Compile a registry of transports. -_transport_registry = OrderedDict() # type: Dict[str, Type[LineItemServiceTransport]] -_transport_registry["rest"] = LineItemServiceRestTransport +_transport_registry = OrderedDict() # type: Dict[str, Type[ProvisioningTransport]] +_transport_registry["rest"] = ProvisioningRestTransport __all__ = ( - "LineItemServiceTransport", - "LineItemServiceRestTransport", - "LineItemServiceRestInterceptor", + "ProvisioningTransport", + "ProvisioningRestTransport", + "ProvisioningRestInterceptor", ) diff --git a/packages/google-cloud-apihub/google/cloud/apihub_v1/services/provisioning/transports/base.py b/packages/google-cloud-apihub/google/cloud/apihub_v1/services/provisioning/transports/base.py new file mode 100644 index 000000000000..480617227784 --- /dev/null +++ b/packages/google-cloud-apihub/google/cloud/apihub_v1/services/provisioning/transports/base.py @@ -0,0 +1,271 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import abc +from typing import Awaitable, Callable, Dict, Optional, Sequence, Union + +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1, operations_v1 +from google.api_core import retry as retries +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.apihub_v1 import gapic_version as package_version +from google.cloud.apihub_v1.types import common_fields, provisioning_service + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +class ProvisioningTransport(abc.ABC): + """Abstract transport class for Provisioning.""" + + AUTH_SCOPES = ("https://www.googleapis.com/auth/cloud-platform",) + + DEFAULT_HOST: str = "apihub.googleapis.com" + + def __init__( + self, + *, + host: str = DEFAULT_HOST, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + **kwargs, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'apihub.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A list of scopes. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + """ + + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} + + # Save the scopes. + self._scopes = scopes + if not hasattr(self, "_ignore_credentials"): + self._ignore_credentials: bool = False + + # If no credentials are provided, then determine the appropriate + # defaults. + if credentials and credentials_file: + raise core_exceptions.DuplicateCredentialArgs( + "'credentials_file' and 'credentials' are mutually exclusive" + ) + + if credentials_file is not None: + credentials, _ = google.auth.load_credentials_from_file( + credentials_file, **scopes_kwargs, quota_project_id=quota_project_id + ) + elif credentials is None and not self._ignore_credentials: + credentials, _ = google.auth.default( + **scopes_kwargs, quota_project_id=quota_project_id + ) + # Don't apply audience if the credentials file passed from user. + if hasattr(credentials, "with_gdch_audience"): + credentials = credentials.with_gdch_audience( + api_audience if api_audience else host + ) + + # If the credentials are service account credentials, then always try to use self signed JWT. + if ( + always_use_jwt_access + and isinstance(credentials, service_account.Credentials) + and hasattr(service_account.Credentials, "with_always_use_jwt_access") + ): + credentials = credentials.with_always_use_jwt_access(True) + + # Save the credentials. + self._credentials = credentials + + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ":" not in host: + host += ":443" + self._host = host + + @property + def host(self): + return self._host + + def _prep_wrapped_messages(self, client_info): + # Precompute the wrapped methods. + self._wrapped_methods = { + self.create_api_hub_instance: gapic_v1.method.wrap_method( + self.create_api_hub_instance, + default_timeout=60.0, + client_info=client_info, + ), + self.get_api_hub_instance: gapic_v1.method.wrap_method( + self.get_api_hub_instance, + default_retry=retries.Retry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.lookup_api_hub_instance: gapic_v1.method.wrap_method( + self.lookup_api_hub_instance, + default_retry=retries.Retry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + } + + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + + @property + def operations_client(self): + """Return the client designed to process long-running operations.""" + raise NotImplementedError() + + @property + def create_api_hub_instance( + self, + ) -> Callable[ + [provisioning_service.CreateApiHubInstanceRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def get_api_hub_instance( + self, + ) -> Callable[ + [provisioning_service.GetApiHubInstanceRequest], + Union[common_fields.ApiHubInstance, Awaitable[common_fields.ApiHubInstance]], + ]: + raise NotImplementedError() + + @property + def lookup_api_hub_instance( + self, + ) -> Callable[ + [provisioning_service.LookupApiHubInstanceRequest], + Union[ + provisioning_service.LookupApiHubInstanceResponse, + Awaitable[provisioning_service.LookupApiHubInstanceResponse], + ], + ]: + raise NotImplementedError() + + @property + def list_operations( + self, + ) -> Callable[ + [operations_pb2.ListOperationsRequest], + Union[ + operations_pb2.ListOperationsResponse, + Awaitable[operations_pb2.ListOperationsResponse], + ], + ]: + raise NotImplementedError() + + @property + def get_operation( + self, + ) -> Callable[ + [operations_pb2.GetOperationRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None,]: + raise NotImplementedError() + + @property + def delete_operation( + self, + ) -> Callable[[operations_pb2.DeleteOperationRequest], None,]: + raise NotImplementedError() + + @property + def get_location( + self, + ) -> Callable[ + [locations_pb2.GetLocationRequest], + Union[locations_pb2.Location, Awaitable[locations_pb2.Location]], + ]: + raise NotImplementedError() + + @property + def list_locations( + self, + ) -> Callable[ + [locations_pb2.ListLocationsRequest], + Union[ + locations_pb2.ListLocationsResponse, + Awaitable[locations_pb2.ListLocationsResponse], + ], + ]: + raise NotImplementedError() + + @property + def kind(self) -> str: + raise NotImplementedError() + + +__all__ = ("ProvisioningTransport",) diff --git a/packages/google-cloud-apihub/google/cloud/apihub_v1/services/provisioning/transports/grpc.py b/packages/google-cloud-apihub/google/cloud/apihub_v1/services/provisioning/transports/grpc.py new file mode 100644 index 000000000000..70851c33861e --- /dev/null +++ b/packages/google-cloud-apihub/google/cloud/apihub_v1/services/provisioning/transports/grpc.py @@ -0,0 +1,456 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import gapic_v1, grpc_helpers, operations_v1 +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +import grpc # type: ignore + +from google.cloud.apihub_v1.types import common_fields, provisioning_service + +from .base import DEFAULT_CLIENT_INFO, ProvisioningTransport + + +class ProvisioningGrpcTransport(ProvisioningTransport): + """gRPC backend transport for Provisioning. + + This service is used for managing the data plane provisioning + of the API hub. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _stubs: Dict[str, Callable] + + def __init__( + self, + *, + host: str = "apihub.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'apihub.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if a ``channel`` instance is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if a ``channel`` instance is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if a ``channel`` instance is provided. + channel (Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]]): + A ``Channel`` instance through which to make calls, or a Callable + that constructs and returns one. If set to None, ``self.create_channel`` + is used to create the channel. If a Callable is given, it will be called + with the same arguments as used in ``self.create_channel``. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if a ``channel`` instance is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if a ``channel`` instance or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + self._operations_client: Optional[operations_v1.OperationsClient] = None + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if isinstance(channel, grpc.Channel): + # Ignore credentials if a channel was passed. + credentials = None + self._ignore_credentials = True + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + # initialize with the provided callable or the default channel + channel_init = channel or type(self).create_channel + self._grpc_channel = channel_init( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @classmethod + def create_channel( + cls, + host: str = "apihub.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> grpc.Channel: + """Create and return a gRPC channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + grpc.Channel: A gRPC channel object. + + Raises: + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + + return grpc_helpers.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs, + ) + + @property + def grpc_channel(self) -> grpc.Channel: + """Return the channel designed to connect to this service.""" + return self._grpc_channel + + @property + def operations_client(self) -> operations_v1.OperationsClient: + """Create the client designed to process long-running operations. + + This property caches on the instance; repeated calls return the same + client. + """ + # Quick check: Only create a new client if we do not already have one. + if self._operations_client is None: + self._operations_client = operations_v1.OperationsClient(self.grpc_channel) + + # Return the client from cache. + return self._operations_client + + @property + def create_api_hub_instance( + self, + ) -> Callable[ + [provisioning_service.CreateApiHubInstanceRequest], operations_pb2.Operation + ]: + r"""Return a callable for the create api hub instance method over gRPC. + + Provisions instance resources for the API Hub. + + Returns: + Callable[[~.CreateApiHubInstanceRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_api_hub_instance" not in self._stubs: + self._stubs["create_api_hub_instance"] = self.grpc_channel.unary_unary( + "/google.cloud.apihub.v1.Provisioning/CreateApiHubInstance", + request_serializer=provisioning_service.CreateApiHubInstanceRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["create_api_hub_instance"] + + @property + def get_api_hub_instance( + self, + ) -> Callable[ + [provisioning_service.GetApiHubInstanceRequest], common_fields.ApiHubInstance + ]: + r"""Return a callable for the get api hub instance method over gRPC. + + Gets details of a single API Hub instance. + + Returns: + Callable[[~.GetApiHubInstanceRequest], + ~.ApiHubInstance]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_api_hub_instance" not in self._stubs: + self._stubs["get_api_hub_instance"] = self.grpc_channel.unary_unary( + "/google.cloud.apihub.v1.Provisioning/GetApiHubInstance", + request_serializer=provisioning_service.GetApiHubInstanceRequest.serialize, + response_deserializer=common_fields.ApiHubInstance.deserialize, + ) + return self._stubs["get_api_hub_instance"] + + @property + def lookup_api_hub_instance( + self, + ) -> Callable[ + [provisioning_service.LookupApiHubInstanceRequest], + provisioning_service.LookupApiHubInstanceResponse, + ]: + r"""Return a callable for the lookup api hub instance method over gRPC. + + Looks up an Api Hub instance in a given GCP project. + There will always be only one Api Hub instance for a GCP + project across all locations. + + Returns: + Callable[[~.LookupApiHubInstanceRequest], + ~.LookupApiHubInstanceResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "lookup_api_hub_instance" not in self._stubs: + self._stubs["lookup_api_hub_instance"] = self.grpc_channel.unary_unary( + "/google.cloud.apihub.v1.Provisioning/LookupApiHubInstance", + request_serializer=provisioning_service.LookupApiHubInstanceRequest.serialize, + response_deserializer=provisioning_service.LookupApiHubInstanceResponse.deserialize, + ) + return self._stubs["lookup_api_hub_instance"] + + def close(self): + self.grpc_channel.close() + + @property + def delete_operation( + self, + ) -> Callable[[operations_pb2.DeleteOperationRequest], None]: + r"""Return a callable for the delete_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_operation" not in self._stubs: + self._stubs["delete_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/DeleteOperation", + request_serializer=operations_pb2.DeleteOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["delete_operation"] + + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None]: + r"""Return a callable for the cancel_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "cancel_operation" not in self._stubs: + self._stubs["cancel_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/CancelOperation", + request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["cancel_operation"] + + @property + def get_operation( + self, + ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: + r"""Return a callable for the get_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_operation" not in self._stubs: + self._stubs["get_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/GetOperation", + request_serializer=operations_pb2.GetOperationRequest.SerializeToString, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["get_operation"] + + @property + def list_operations( + self, + ) -> Callable[ + [operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse + ]: + r"""Return a callable for the list_operations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_operations" not in self._stubs: + self._stubs["list_operations"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/ListOperations", + request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, + response_deserializer=operations_pb2.ListOperationsResponse.FromString, + ) + return self._stubs["list_operations"] + + @property + def list_locations( + self, + ) -> Callable[ + [locations_pb2.ListLocationsRequest], locations_pb2.ListLocationsResponse + ]: + r"""Return a callable for the list locations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_locations" not in self._stubs: + self._stubs["list_locations"] = self.grpc_channel.unary_unary( + "/google.cloud.location.Locations/ListLocations", + request_serializer=locations_pb2.ListLocationsRequest.SerializeToString, + response_deserializer=locations_pb2.ListLocationsResponse.FromString, + ) + return self._stubs["list_locations"] + + @property + def get_location( + self, + ) -> Callable[[locations_pb2.GetLocationRequest], locations_pb2.Location]: + r"""Return a callable for the list locations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_location" not in self._stubs: + self._stubs["get_location"] = self.grpc_channel.unary_unary( + "/google.cloud.location.Locations/GetLocation", + request_serializer=locations_pb2.GetLocationRequest.SerializeToString, + response_deserializer=locations_pb2.Location.FromString, + ) + return self._stubs["get_location"] + + @property + def kind(self) -> str: + return "grpc" + + +__all__ = ("ProvisioningGrpcTransport",) diff --git a/packages/google-cloud-apihub/google/cloud/apihub_v1/services/provisioning/transports/grpc_asyncio.py b/packages/google-cloud-apihub/google/cloud/apihub_v1/services/provisioning/transports/grpc_asyncio.py new file mode 100644 index 000000000000..ad2087a81b75 --- /dev/null +++ b/packages/google-cloud-apihub/google/cloud/apihub_v1/services/provisioning/transports/grpc_asyncio.py @@ -0,0 +1,498 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1, grpc_helpers_async, operations_v1 +from google.api_core import retry_async as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +import grpc # type: ignore +from grpc.experimental import aio # type: ignore + +from google.cloud.apihub_v1.types import common_fields, provisioning_service + +from .base import DEFAULT_CLIENT_INFO, ProvisioningTransport +from .grpc import ProvisioningGrpcTransport + + +class ProvisioningGrpcAsyncIOTransport(ProvisioningTransport): + """gRPC AsyncIO backend transport for Provisioning. + + This service is used for managing the data plane provisioning + of the API hub. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _grpc_channel: aio.Channel + _stubs: Dict[str, Callable] = {} + + @classmethod + def create_channel( + cls, + host: str = "apihub.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> aio.Channel: + """Create and return a gRPC AsyncIO channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + aio.Channel: A gRPC AsyncIO channel object. + """ + + return grpc_helpers_async.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs, + ) + + def __init__( + self, + *, + host: str = "apihub.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[Union[aio.Channel, Callable[..., aio.Channel]]] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'apihub.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if a ``channel`` instance is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if a ``channel`` instance is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + channel (Optional[Union[aio.Channel, Callable[..., aio.Channel]]]): + A ``Channel`` instance through which to make calls, or a Callable + that constructs and returns one. If set to None, ``self.create_channel`` + is used to create the channel. If a Callable is given, it will be called + with the same arguments as used in ``self.create_channel``. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if a ``channel`` instance is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if a ``channel`` instance or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + self._operations_client: Optional[operations_v1.OperationsAsyncClient] = None + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if isinstance(channel, aio.Channel): + # Ignore credentials if a channel was passed. + credentials = None + self._ignore_credentials = True + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + # initialize with the provided callable or the default channel + channel_init = channel or type(self).create_channel + self._grpc_channel = channel_init( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @property + def grpc_channel(self) -> aio.Channel: + """Create the channel designed to connect to this service. + + This property caches on the instance; repeated calls return + the same channel. + """ + # Return the channel from cache. + return self._grpc_channel + + @property + def operations_client(self) -> operations_v1.OperationsAsyncClient: + """Create the client designed to process long-running operations. + + This property caches on the instance; repeated calls return the same + client. + """ + # Quick check: Only create a new client if we do not already have one. + if self._operations_client is None: + self._operations_client = operations_v1.OperationsAsyncClient( + self.grpc_channel + ) + + # Return the client from cache. + return self._operations_client + + @property + def create_api_hub_instance( + self, + ) -> Callable[ + [provisioning_service.CreateApiHubInstanceRequest], + Awaitable[operations_pb2.Operation], + ]: + r"""Return a callable for the create api hub instance method over gRPC. + + Provisions instance resources for the API Hub. + + Returns: + Callable[[~.CreateApiHubInstanceRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_api_hub_instance" not in self._stubs: + self._stubs["create_api_hub_instance"] = self.grpc_channel.unary_unary( + "/google.cloud.apihub.v1.Provisioning/CreateApiHubInstance", + request_serializer=provisioning_service.CreateApiHubInstanceRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["create_api_hub_instance"] + + @property + def get_api_hub_instance( + self, + ) -> Callable[ + [provisioning_service.GetApiHubInstanceRequest], + Awaitable[common_fields.ApiHubInstance], + ]: + r"""Return a callable for the get api hub instance method over gRPC. + + Gets details of a single API Hub instance. + + Returns: + Callable[[~.GetApiHubInstanceRequest], + Awaitable[~.ApiHubInstance]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_api_hub_instance" not in self._stubs: + self._stubs["get_api_hub_instance"] = self.grpc_channel.unary_unary( + "/google.cloud.apihub.v1.Provisioning/GetApiHubInstance", + request_serializer=provisioning_service.GetApiHubInstanceRequest.serialize, + response_deserializer=common_fields.ApiHubInstance.deserialize, + ) + return self._stubs["get_api_hub_instance"] + + @property + def lookup_api_hub_instance( + self, + ) -> Callable[ + [provisioning_service.LookupApiHubInstanceRequest], + Awaitable[provisioning_service.LookupApiHubInstanceResponse], + ]: + r"""Return a callable for the lookup api hub instance method over gRPC. + + Looks up an Api Hub instance in a given GCP project. + There will always be only one Api Hub instance for a GCP + project across all locations. + + Returns: + Callable[[~.LookupApiHubInstanceRequest], + Awaitable[~.LookupApiHubInstanceResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "lookup_api_hub_instance" not in self._stubs: + self._stubs["lookup_api_hub_instance"] = self.grpc_channel.unary_unary( + "/google.cloud.apihub.v1.Provisioning/LookupApiHubInstance", + request_serializer=provisioning_service.LookupApiHubInstanceRequest.serialize, + response_deserializer=provisioning_service.LookupApiHubInstanceResponse.deserialize, + ) + return self._stubs["lookup_api_hub_instance"] + + def _prep_wrapped_messages(self, client_info): + """Precompute the wrapped methods, overriding the base class method to use async wrappers.""" + self._wrapped_methods = { + self.create_api_hub_instance: gapic_v1.method_async.wrap_method( + self.create_api_hub_instance, + default_timeout=60.0, + client_info=client_info, + ), + self.get_api_hub_instance: gapic_v1.method_async.wrap_method( + self.get_api_hub_instance, + default_retry=retries.AsyncRetry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.lookup_api_hub_instance: gapic_v1.method_async.wrap_method( + self.lookup_api_hub_instance, + default_retry=retries.AsyncRetry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + } + + def close(self): + return self.grpc_channel.close() + + @property + def delete_operation( + self, + ) -> Callable[[operations_pb2.DeleteOperationRequest], None]: + r"""Return a callable for the delete_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_operation" not in self._stubs: + self._stubs["delete_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/DeleteOperation", + request_serializer=operations_pb2.DeleteOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["delete_operation"] + + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None]: + r"""Return a callable for the cancel_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "cancel_operation" not in self._stubs: + self._stubs["cancel_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/CancelOperation", + request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["cancel_operation"] + + @property + def get_operation( + self, + ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: + r"""Return a callable for the get_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_operation" not in self._stubs: + self._stubs["get_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/GetOperation", + request_serializer=operations_pb2.GetOperationRequest.SerializeToString, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["get_operation"] + + @property + def list_operations( + self, + ) -> Callable[ + [operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse + ]: + r"""Return a callable for the list_operations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_operations" not in self._stubs: + self._stubs["list_operations"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/ListOperations", + request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, + response_deserializer=operations_pb2.ListOperationsResponse.FromString, + ) + return self._stubs["list_operations"] + + @property + def list_locations( + self, + ) -> Callable[ + [locations_pb2.ListLocationsRequest], locations_pb2.ListLocationsResponse + ]: + r"""Return a callable for the list locations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_locations" not in self._stubs: + self._stubs["list_locations"] = self.grpc_channel.unary_unary( + "/google.cloud.location.Locations/ListLocations", + request_serializer=locations_pb2.ListLocationsRequest.SerializeToString, + response_deserializer=locations_pb2.ListLocationsResponse.FromString, + ) + return self._stubs["list_locations"] + + @property + def get_location( + self, + ) -> Callable[[locations_pb2.GetLocationRequest], locations_pb2.Location]: + r"""Return a callable for the list locations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_location" not in self._stubs: + self._stubs["get_location"] = self.grpc_channel.unary_unary( + "/google.cloud.location.Locations/GetLocation", + request_serializer=locations_pb2.GetLocationRequest.SerializeToString, + response_deserializer=locations_pb2.Location.FromString, + ) + return self._stubs["get_location"] + + +__all__ = ("ProvisioningGrpcAsyncIOTransport",) diff --git a/packages/google-cloud-apihub/google/cloud/apihub_v1/services/provisioning/transports/rest.py b/packages/google-cloud-apihub/google/cloud/apihub_v1/services/provisioning/transports/rest.py new file mode 100644 index 000000000000..0b05771fdd54 --- /dev/null +++ b/packages/google-cloud-apihub/google/cloud/apihub_v1/services/provisioning/transports/rest.py @@ -0,0 +1,1178 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import dataclasses +import json # type: ignore +import re +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import ( + gapic_v1, + operations_v1, + path_template, + rest_helpers, + rest_streaming, +) +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth.transport.requests import AuthorizedSession # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.protobuf import json_format +import grpc # type: ignore +from requests import __version__ as requests_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object, None] # type: ignore + + +from google.longrunning import operations_pb2 # type: ignore + +from google.cloud.apihub_v1.types import common_fields, provisioning_service + +from .base import DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO +from .base import ProvisioningTransport + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=requests_version, +) + + +class ProvisioningRestInterceptor: + """Interceptor for Provisioning. + + Interceptors are used to manipulate requests, request metadata, and responses + in arbitrary ways. + Example use cases include: + * Logging + * Verifying requests according to service or custom semantics + * Stripping extraneous information from responses + + These use cases and more can be enabled by injecting an + instance of a custom subclass when constructing the ProvisioningRestTransport. + + .. code-block:: python + class MyCustomProvisioningInterceptor(ProvisioningRestInterceptor): + def pre_create_api_hub_instance(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_create_api_hub_instance(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get_api_hub_instance(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_api_hub_instance(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_lookup_api_hub_instance(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_lookup_api_hub_instance(self, response): + logging.log(f"Received response: {response}") + return response + + transport = ProvisioningRestTransport(interceptor=MyCustomProvisioningInterceptor()) + client = ProvisioningClient(transport=transport) + + + """ + + def pre_create_api_hub_instance( + self, + request: provisioning_service.CreateApiHubInstanceRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[ + provisioning_service.CreateApiHubInstanceRequest, Sequence[Tuple[str, str]] + ]: + """Pre-rpc interceptor for create_api_hub_instance + + Override in a subclass to manipulate the request or metadata + before they are sent to the Provisioning server. + """ + return request, metadata + + def post_create_api_hub_instance( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for create_api_hub_instance + + Override in a subclass to manipulate the response + after it is returned by the Provisioning server but before + it is returned to user code. + """ + return response + + def pre_get_api_hub_instance( + self, + request: provisioning_service.GetApiHubInstanceRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[ + provisioning_service.GetApiHubInstanceRequest, Sequence[Tuple[str, str]] + ]: + """Pre-rpc interceptor for get_api_hub_instance + + Override in a subclass to manipulate the request or metadata + before they are sent to the Provisioning server. + """ + return request, metadata + + def post_get_api_hub_instance( + self, response: common_fields.ApiHubInstance + ) -> common_fields.ApiHubInstance: + """Post-rpc interceptor for get_api_hub_instance + + Override in a subclass to manipulate the response + after it is returned by the Provisioning server but before + it is returned to user code. + """ + return response + + def pre_lookup_api_hub_instance( + self, + request: provisioning_service.LookupApiHubInstanceRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[ + provisioning_service.LookupApiHubInstanceRequest, Sequence[Tuple[str, str]] + ]: + """Pre-rpc interceptor for lookup_api_hub_instance + + Override in a subclass to manipulate the request or metadata + before they are sent to the Provisioning server. + """ + return request, metadata + + def post_lookup_api_hub_instance( + self, response: provisioning_service.LookupApiHubInstanceResponse + ) -> provisioning_service.LookupApiHubInstanceResponse: + """Post-rpc interceptor for lookup_api_hub_instance + + Override in a subclass to manipulate the response + after it is returned by the Provisioning server but before + it is returned to user code. + """ + return response + + def pre_get_location( + self, + request: locations_pb2.GetLocationRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[locations_pb2.GetLocationRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_location + + Override in a subclass to manipulate the request or metadata + before they are sent to the Provisioning server. + """ + return request, metadata + + def post_get_location( + self, response: locations_pb2.Location + ) -> locations_pb2.Location: + """Post-rpc interceptor for get_location + + Override in a subclass to manipulate the response + after it is returned by the Provisioning server but before + it is returned to user code. + """ + return response + + def pre_list_locations( + self, + request: locations_pb2.ListLocationsRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[locations_pb2.ListLocationsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_locations + + Override in a subclass to manipulate the request or metadata + before they are sent to the Provisioning server. + """ + return request, metadata + + def post_list_locations( + self, response: locations_pb2.ListLocationsResponse + ) -> locations_pb2.ListLocationsResponse: + """Post-rpc interceptor for list_locations + + Override in a subclass to manipulate the response + after it is returned by the Provisioning server but before + it is returned to user code. + """ + return response + + def pre_cancel_operation( + self, + request: operations_pb2.CancelOperationRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[operations_pb2.CancelOperationRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for cancel_operation + + Override in a subclass to manipulate the request or metadata + before they are sent to the Provisioning server. + """ + return request, metadata + + def post_cancel_operation(self, response: None) -> None: + """Post-rpc interceptor for cancel_operation + + Override in a subclass to manipulate the response + after it is returned by the Provisioning server but before + it is returned to user code. + """ + return response + + def pre_delete_operation( + self, + request: operations_pb2.DeleteOperationRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[operations_pb2.DeleteOperationRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for delete_operation + + Override in a subclass to manipulate the request or metadata + before they are sent to the Provisioning server. + """ + return request, metadata + + def post_delete_operation(self, response: None) -> None: + """Post-rpc interceptor for delete_operation + + Override in a subclass to manipulate the response + after it is returned by the Provisioning server but before + it is returned to user code. + """ + return response + + def pre_get_operation( + self, + request: operations_pb2.GetOperationRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[operations_pb2.GetOperationRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_operation + + Override in a subclass to manipulate the request or metadata + before they are sent to the Provisioning server. + """ + return request, metadata + + def post_get_operation( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for get_operation + + Override in a subclass to manipulate the response + after it is returned by the Provisioning server but before + it is returned to user code. + """ + return response + + def pre_list_operations( + self, + request: operations_pb2.ListOperationsRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[operations_pb2.ListOperationsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_operations + + Override in a subclass to manipulate the request or metadata + before they are sent to the Provisioning server. + """ + return request, metadata + + def post_list_operations( + self, response: operations_pb2.ListOperationsResponse + ) -> operations_pb2.ListOperationsResponse: + """Post-rpc interceptor for list_operations + + Override in a subclass to manipulate the response + after it is returned by the Provisioning server but before + it is returned to user code. + """ + return response + + +@dataclasses.dataclass +class ProvisioningRestStub: + _session: AuthorizedSession + _host: str + _interceptor: ProvisioningRestInterceptor + + +class ProvisioningRestTransport(ProvisioningTransport): + """REST backend transport for Provisioning. + + This service is used for managing the data plane provisioning + of the API hub. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + + """ + + def __init__( + self, + *, + host: str = "apihub.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = "https", + interceptor: Optional[ProvisioningRestInterceptor] = None, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'apihub.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client + certificate to configure mutual TLS HTTP channel. It is ignored + if ``channel`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. + # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the + # credentials object + maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) + if maybe_url_match is None: + raise ValueError( + f"Unexpected hostname structure: {host}" + ) # pragma: NO COVER + + url_match_items = maybe_url_match.groupdict() + + host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host + + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + self._session = AuthorizedSession( + self._credentials, default_host=self.DEFAULT_HOST + ) + self._operations_client: Optional[operations_v1.AbstractOperationsClient] = None + if client_cert_source_for_mtls: + self._session.configure_mtls_channel(client_cert_source_for_mtls) + self._interceptor = interceptor or ProvisioningRestInterceptor() + self._prep_wrapped_messages(client_info) + + @property + def operations_client(self) -> operations_v1.AbstractOperationsClient: + """Create the client designed to process long-running operations. + + This property caches on the instance; repeated calls return the same + client. + """ + # Only create a new client if we do not already have one. + if self._operations_client is None: + http_options: Dict[str, List[Dict[str, str]]] = { + "google.longrunning.Operations.CancelOperation": [ + { + "method": "post", + "uri": "/v1/{name=projects/*/locations/*/operations/*}:cancel", + "body": "*", + }, + ], + "google.longrunning.Operations.DeleteOperation": [ + { + "method": "delete", + "uri": "/v1/{name=projects/*/locations/*/operations/*}", + }, + ], + "google.longrunning.Operations.GetOperation": [ + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/operations/*}", + }, + ], + "google.longrunning.Operations.ListOperations": [ + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*}/operations", + }, + ], + } + + rest_transport = operations_v1.OperationsRestTransport( + host=self._host, + # use the credentials which are saved + credentials=self._credentials, + scopes=self._scopes, + http_options=http_options, + path_prefix="v1", + ) + + self._operations_client = operations_v1.AbstractOperationsClient( + transport=rest_transport + ) + + # Return the client from cache. + return self._operations_client + + class _CreateApiHubInstance(ProvisioningRestStub): + def __hash__(self): + return hash("CreateApiHubInstance") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: provisioning_service.CreateApiHubInstanceRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the create api hub instance method over HTTP. + + Args: + request (~.provisioning_service.CreateApiHubInstanceRequest): + The request object. The + [CreateApiHubInstance][google.cloud.apihub.v1.Provisioning.CreateApiHubInstance] + method's request. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{parent=projects/*/locations/*}/apiHubInstances", + "body": "api_hub_instance", + }, + ] + request, metadata = self._interceptor.pre_create_api_hub_instance( + request, metadata + ) + pb_request = provisioning_service.CreateApiHubInstanceRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_create_api_hub_instance(resp) + return resp + + class _GetApiHubInstance(ProvisioningRestStub): + def __hash__(self): + return hash("GetApiHubInstance") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: provisioning_service.GetApiHubInstanceRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> common_fields.ApiHubInstance: + r"""Call the get api hub instance method over HTTP. + + Args: + request (~.provisioning_service.GetApiHubInstanceRequest): + The request object. The + [GetApiHubInstance][google.cloud.apihub.v1.Provisioning.GetApiHubInstance] + method's request. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.common_fields.ApiHubInstance: + An ApiHubInstance represents the + instance resources of the API Hub. + Currently, only one ApiHub instance is + allowed for each project. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/apiHubInstances/*}", + }, + ] + request, metadata = self._interceptor.pre_get_api_hub_instance( + request, metadata + ) + pb_request = provisioning_service.GetApiHubInstanceRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = common_fields.ApiHubInstance() + pb_resp = common_fields.ApiHubInstance.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_api_hub_instance(resp) + return resp + + class _LookupApiHubInstance(ProvisioningRestStub): + def __hash__(self): + return hash("LookupApiHubInstance") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: provisioning_service.LookupApiHubInstanceRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> provisioning_service.LookupApiHubInstanceResponse: + r"""Call the lookup api hub instance method over HTTP. + + Args: + request (~.provisioning_service.LookupApiHubInstanceRequest): + The request object. The + [LookupApiHubInstance][google.cloud.apihub.v1.Provisioning.LookupApiHubInstance] + method's request. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.provisioning_service.LookupApiHubInstanceResponse: + The + [LookupApiHubInstance][google.cloud.apihub.v1.Provisioning.LookupApiHubInstance] + method's response.\` + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{parent=projects/*/locations/*}/apiHubInstances:lookup", + }, + ] + request, metadata = self._interceptor.pre_lookup_api_hub_instance( + request, metadata + ) + pb_request = provisioning_service.LookupApiHubInstanceRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = provisioning_service.LookupApiHubInstanceResponse() + pb_resp = provisioning_service.LookupApiHubInstanceResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_lookup_api_hub_instance(resp) + return resp + + @property + def create_api_hub_instance( + self, + ) -> Callable[ + [provisioning_service.CreateApiHubInstanceRequest], operations_pb2.Operation + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._CreateApiHubInstance(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_api_hub_instance( + self, + ) -> Callable[ + [provisioning_service.GetApiHubInstanceRequest], common_fields.ApiHubInstance + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetApiHubInstance(self._session, self._host, self._interceptor) # type: ignore + + @property + def lookup_api_hub_instance( + self, + ) -> Callable[ + [provisioning_service.LookupApiHubInstanceRequest], + provisioning_service.LookupApiHubInstanceResponse, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._LookupApiHubInstance(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_location(self): + return self._GetLocation(self._session, self._host, self._interceptor) # type: ignore + + class _GetLocation(ProvisioningRestStub): + def __call__( + self, + request: locations_pb2.GetLocationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> locations_pb2.Location: + r"""Call the get location method over HTTP. + + Args: + request (locations_pb2.GetLocationRequest): + The request object for GetLocation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + locations_pb2.Location: Response from GetLocation method. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*}", + }, + ] + + request, metadata = self._interceptor.pre_get_location(request, metadata) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + resp = locations_pb2.Location() + resp = json_format.Parse(response.content.decode("utf-8"), resp) + resp = self._interceptor.post_get_location(resp) + return resp + + @property + def list_locations(self): + return self._ListLocations(self._session, self._host, self._interceptor) # type: ignore + + class _ListLocations(ProvisioningRestStub): + def __call__( + self, + request: locations_pb2.ListLocationsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> locations_pb2.ListLocationsResponse: + r"""Call the list locations method over HTTP. + + Args: + request (locations_pb2.ListLocationsRequest): + The request object for ListLocations method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + locations_pb2.ListLocationsResponse: Response from ListLocations method. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=projects/*}/locations", + }, + ] + + request, metadata = self._interceptor.pre_list_locations(request, metadata) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + resp = locations_pb2.ListLocationsResponse() + resp = json_format.Parse(response.content.decode("utf-8"), resp) + resp = self._interceptor.post_list_locations(resp) + return resp + + @property + def cancel_operation(self): + return self._CancelOperation(self._session, self._host, self._interceptor) # type: ignore + + class _CancelOperation(ProvisioningRestStub): + def __call__( + self, + request: operations_pb2.CancelOperationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Call the cancel operation method over HTTP. + + Args: + request (operations_pb2.CancelOperationRequest): + The request object for CancelOperation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{name=projects/*/locations/*/operations/*}:cancel", + "body": "*", + }, + ] + + request, metadata = self._interceptor.pre_cancel_operation( + request, metadata + ) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + body = json.dumps(transcoded_request["body"]) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + return self._interceptor.post_cancel_operation(None) + + @property + def delete_operation(self): + return self._DeleteOperation(self._session, self._host, self._interceptor) # type: ignore + + class _DeleteOperation(ProvisioningRestStub): + def __call__( + self, + request: operations_pb2.DeleteOperationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Call the delete operation method over HTTP. + + Args: + request (operations_pb2.DeleteOperationRequest): + The request object for DeleteOperation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "delete", + "uri": "/v1/{name=projects/*/locations/*/operations/*}", + }, + ] + + request, metadata = self._interceptor.pre_delete_operation( + request, metadata + ) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + return self._interceptor.post_delete_operation(None) + + @property + def get_operation(self): + return self._GetOperation(self._session, self._host, self._interceptor) # type: ignore + + class _GetOperation(ProvisioningRestStub): + def __call__( + self, + request: operations_pb2.GetOperationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the get operation method over HTTP. + + Args: + request (operations_pb2.GetOperationRequest): + The request object for GetOperation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + operations_pb2.Operation: Response from GetOperation method. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/operations/*}", + }, + ] + + request, metadata = self._interceptor.pre_get_operation(request, metadata) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + resp = operations_pb2.Operation() + resp = json_format.Parse(response.content.decode("utf-8"), resp) + resp = self._interceptor.post_get_operation(resp) + return resp + + @property + def list_operations(self): + return self._ListOperations(self._session, self._host, self._interceptor) # type: ignore + + class _ListOperations(ProvisioningRestStub): + def __call__( + self, + request: operations_pb2.ListOperationsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.ListOperationsResponse: + r"""Call the list operations method over HTTP. + + Args: + request (operations_pb2.ListOperationsRequest): + The request object for ListOperations method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + operations_pb2.ListOperationsResponse: Response from ListOperations method. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*}/operations", + }, + ] + + request, metadata = self._interceptor.pre_list_operations(request, metadata) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + resp = operations_pb2.ListOperationsResponse() + resp = json_format.Parse(response.content.decode("utf-8"), resp) + resp = self._interceptor.post_list_operations(resp) + return resp + + @property + def kind(self) -> str: + return "rest" + + def close(self): + self._session.close() + + +__all__ = ("ProvisioningRestTransport",) diff --git a/packages/google-cloud-apihub/google/cloud/apihub_v1/services/runtime_project_attachment_service/__init__.py b/packages/google-cloud-apihub/google/cloud/apihub_v1/services/runtime_project_attachment_service/__init__.py new file mode 100644 index 000000000000..28875a7e7af2 --- /dev/null +++ b/packages/google-cloud-apihub/google/cloud/apihub_v1/services/runtime_project_attachment_service/__init__.py @@ -0,0 +1,18 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .client import RuntimeProjectAttachmentServiceClient + +__all__ = ("RuntimeProjectAttachmentServiceClient",) diff --git a/packages/google-cloud-apihub/google/cloud/apihub_v1/services/runtime_project_attachment_service/async_client.py b/packages/google-cloud-apihub/google/cloud/apihub_v1/services/runtime_project_attachment_service/async_client.py new file mode 100644 index 000000000000..aa2a3acd7dd3 --- /dev/null +++ b/packages/google-cloud-apihub/google/cloud/apihub_v1/services/runtime_project_attachment_service/async_client.py @@ -0,0 +1,1283 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import re +from typing import ( + Callable, + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, +) + +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry_async as retries +from google.api_core.client_options import ClientOptions +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.apihub_v1 import gapic_version as package_version + +try: + OptionalRetry = Union[retries.AsyncRetry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.AsyncRetry, object, None] # type: ignore + +from google.cloud.location import locations_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore + +from google.cloud.apihub_v1.services.runtime_project_attachment_service import pagers +from google.cloud.apihub_v1.types import runtime_project_attachment_service + +from .client import RuntimeProjectAttachmentServiceClient +from .transports.base import ( + DEFAULT_CLIENT_INFO, + RuntimeProjectAttachmentServiceTransport, +) +from .transports.grpc_asyncio import RuntimeProjectAttachmentServiceGrpcAsyncIOTransport + + +class RuntimeProjectAttachmentServiceAsyncClient: + """This service is used for managing the runtime project + attachments. + """ + + _client: RuntimeProjectAttachmentServiceClient + + # Copy defaults from the synchronous client for use here. + # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. + DEFAULT_ENDPOINT = RuntimeProjectAttachmentServiceClient.DEFAULT_ENDPOINT + DEFAULT_MTLS_ENDPOINT = RuntimeProjectAttachmentServiceClient.DEFAULT_MTLS_ENDPOINT + _DEFAULT_ENDPOINT_TEMPLATE = ( + RuntimeProjectAttachmentServiceClient._DEFAULT_ENDPOINT_TEMPLATE + ) + _DEFAULT_UNIVERSE = RuntimeProjectAttachmentServiceClient._DEFAULT_UNIVERSE + + runtime_project_attachment_path = staticmethod( + RuntimeProjectAttachmentServiceClient.runtime_project_attachment_path + ) + parse_runtime_project_attachment_path = staticmethod( + RuntimeProjectAttachmentServiceClient.parse_runtime_project_attachment_path + ) + common_billing_account_path = staticmethod( + RuntimeProjectAttachmentServiceClient.common_billing_account_path + ) + parse_common_billing_account_path = staticmethod( + RuntimeProjectAttachmentServiceClient.parse_common_billing_account_path + ) + common_folder_path = staticmethod( + RuntimeProjectAttachmentServiceClient.common_folder_path + ) + parse_common_folder_path = staticmethod( + RuntimeProjectAttachmentServiceClient.parse_common_folder_path + ) + common_organization_path = staticmethod( + RuntimeProjectAttachmentServiceClient.common_organization_path + ) + parse_common_organization_path = staticmethod( + RuntimeProjectAttachmentServiceClient.parse_common_organization_path + ) + common_project_path = staticmethod( + RuntimeProjectAttachmentServiceClient.common_project_path + ) + parse_common_project_path = staticmethod( + RuntimeProjectAttachmentServiceClient.parse_common_project_path + ) + common_location_path = staticmethod( + RuntimeProjectAttachmentServiceClient.common_location_path + ) + parse_common_location_path = staticmethod( + RuntimeProjectAttachmentServiceClient.parse_common_location_path + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + RuntimeProjectAttachmentServiceAsyncClient: The constructed client. + """ + return RuntimeProjectAttachmentServiceClient.from_service_account_info.__func__(RuntimeProjectAttachmentServiceAsyncClient, info, *args, **kwargs) # type: ignore + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + RuntimeProjectAttachmentServiceAsyncClient: The constructed client. + """ + return RuntimeProjectAttachmentServiceClient.from_service_account_file.__func__(RuntimeProjectAttachmentServiceAsyncClient, filename, *args, **kwargs) # type: ignore + + from_service_account_json = from_service_account_file + + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[ClientOptions] = None + ): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + return RuntimeProjectAttachmentServiceClient.get_mtls_endpoint_and_cert_source(client_options) # type: ignore + + @property + def transport(self) -> RuntimeProjectAttachmentServiceTransport: + """Returns the transport used by the client instance. + + Returns: + RuntimeProjectAttachmentServiceTransport: The transport used by the client instance. + """ + return self._client.transport + + @property + def api_endpoint(self): + """Return the API endpoint used by the client instance. + + Returns: + str: The API endpoint used by the client instance. + """ + return self._client._api_endpoint + + @property + def universe_domain(self) -> str: + """Return the universe domain used by the client instance. + + Returns: + str: The universe domain used + by the client instance. + """ + return self._client._universe_domain + + get_transport_class = RuntimeProjectAttachmentServiceClient.get_transport_class + + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[ + Union[ + str, + RuntimeProjectAttachmentServiceTransport, + Callable[..., RuntimeProjectAttachmentServiceTransport], + ] + ] = "grpc_asyncio", + client_options: Optional[ClientOptions] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the runtime project attachment service async client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Optional[Union[str,RuntimeProjectAttachmentServiceTransport,Callable[..., RuntimeProjectAttachmentServiceTransport]]]): + The transport to use, or a Callable that constructs and returns a new transport to use. + If a Callable is given, it will be called with the same set of initialization + arguments as used in the RuntimeProjectAttachmentServiceTransport constructor. + If set to None, a transport is chosen automatically. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): + Custom options for the client. + + 1. The ``api_endpoint`` property can be used to override the + default endpoint provided by the client when ``transport`` is + not explicitly provided. Only if this property is not set and + ``transport`` was not explicitly provided, the endpoint is + determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment + variable, which have one of the following values: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto-switch to the + default mTLS endpoint if client certificate is present; this is + the default value). + + 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide a client certificate for mTLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + 3. The ``universe_domain`` property can be used to override the + default "googleapis.com" universe. Note that ``api_endpoint`` + property still takes precedence; and ``universe_domain`` is + currently not supported for mTLS. + + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client = RuntimeProjectAttachmentServiceClient( + credentials=credentials, + transport=transport, + client_options=client_options, + client_info=client_info, + ) + + async def create_runtime_project_attachment( + self, + request: Optional[ + Union[ + runtime_project_attachment_service.CreateRuntimeProjectAttachmentRequest, + dict, + ] + ] = None, + *, + parent: Optional[str] = None, + runtime_project_attachment: Optional[ + runtime_project_attachment_service.RuntimeProjectAttachment + ] = None, + runtime_project_attachment_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> runtime_project_attachment_service.RuntimeProjectAttachment: + r"""Attaches a runtime project to the host project. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import apihub_v1 + + async def sample_create_runtime_project_attachment(): + # Create a client + client = apihub_v1.RuntimeProjectAttachmentServiceAsyncClient() + + # Initialize request argument(s) + runtime_project_attachment = apihub_v1.RuntimeProjectAttachment() + runtime_project_attachment.runtime_project = "runtime_project_value" + + request = apihub_v1.CreateRuntimeProjectAttachmentRequest( + parent="parent_value", + runtime_project_attachment_id="runtime_project_attachment_id_value", + runtime_project_attachment=runtime_project_attachment, + ) + + # Make the request + response = await client.create_runtime_project_attachment(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.apihub_v1.types.CreateRuntimeProjectAttachmentRequest, dict]]): + The request object. The + [CreateRuntimeProjectAttachment][google.cloud.apihub.v1.RuntimeProjectAttachmentService.CreateRuntimeProjectAttachment] + method's request. + parent (:class:`str`): + Required. The parent resource for the Runtime Project + Attachment. Format: + ``projects/{project}/locations/{location}`` + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + runtime_project_attachment (:class:`google.cloud.apihub_v1.types.RuntimeProjectAttachment`): + Required. The Runtime Project + Attachment to create. + + This corresponds to the ``runtime_project_attachment`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + runtime_project_attachment_id (:class:`str`): + Required. The ID to use for the Runtime Project + Attachment, which will become the final component of the + Runtime Project Attachment's name. The ID must be the + same as the project ID of the Google cloud project + specified in the + runtime_project_attachment.runtime_project field. + + This corresponds to the ``runtime_project_attachment_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.apihub_v1.types.RuntimeProjectAttachment: + Runtime project attachment represents + an attachment from the runtime project + to the host project. Api Hub looks for + deployments in the attached runtime + projects and creates corresponding + resources in Api Hub for the discovered + deployments. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any( + [parent, runtime_project_attachment, runtime_project_attachment_id] + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance( + request, + runtime_project_attachment_service.CreateRuntimeProjectAttachmentRequest, + ): + request = runtime_project_attachment_service.CreateRuntimeProjectAttachmentRequest( + request + ) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if runtime_project_attachment is not None: + request.runtime_project_attachment = runtime_project_attachment + if runtime_project_attachment_id is not None: + request.runtime_project_attachment_id = runtime_project_attachment_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.create_runtime_project_attachment + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_runtime_project_attachment( + self, + request: Optional[ + Union[ + runtime_project_attachment_service.GetRuntimeProjectAttachmentRequest, + dict, + ] + ] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> runtime_project_attachment_service.RuntimeProjectAttachment: + r"""Gets a runtime project attachment. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import apihub_v1 + + async def sample_get_runtime_project_attachment(): + # Create a client + client = apihub_v1.RuntimeProjectAttachmentServiceAsyncClient() + + # Initialize request argument(s) + request = apihub_v1.GetRuntimeProjectAttachmentRequest( + name="name_value", + ) + + # Make the request + response = await client.get_runtime_project_attachment(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.apihub_v1.types.GetRuntimeProjectAttachmentRequest, dict]]): + The request object. The + [GetRuntimeProjectAttachment][google.cloud.apihub.v1.RuntimeProjectAttachmentService.GetRuntimeProjectAttachment] + method's request. + name (:class:`str`): + Required. The name of the API resource to retrieve. + Format: + ``projects/{project}/locations/{location}/runtimeProjectAttachments/{runtime_project_attachment}`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.apihub_v1.types.RuntimeProjectAttachment: + Runtime project attachment represents + an attachment from the runtime project + to the host project. Api Hub looks for + deployments in the attached runtime + projects and creates corresponding + resources in Api Hub for the discovered + deployments. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance( + request, + runtime_project_attachment_service.GetRuntimeProjectAttachmentRequest, + ): + request = ( + runtime_project_attachment_service.GetRuntimeProjectAttachmentRequest( + request + ) + ) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.get_runtime_project_attachment + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_runtime_project_attachments( + self, + request: Optional[ + Union[ + runtime_project_attachment_service.ListRuntimeProjectAttachmentsRequest, + dict, + ] + ] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListRuntimeProjectAttachmentsAsyncPager: + r"""List runtime projects attached to the host project. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import apihub_v1 + + async def sample_list_runtime_project_attachments(): + # Create a client + client = apihub_v1.RuntimeProjectAttachmentServiceAsyncClient() + + # Initialize request argument(s) + request = apihub_v1.ListRuntimeProjectAttachmentsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_runtime_project_attachments(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.apihub_v1.types.ListRuntimeProjectAttachmentsRequest, dict]]): + The request object. The + [ListRuntimeProjectAttachments][google.cloud.apihub.v1.RuntimeProjectAttachmentService.ListRuntimeProjectAttachments] + method's request. + parent (:class:`str`): + Required. The parent, which owns this collection of + runtime project attachments. Format: + ``projects/{project}/locations/{location}`` + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.apihub_v1.services.runtime_project_attachment_service.pagers.ListRuntimeProjectAttachmentsAsyncPager: + The + [ListRuntimeProjectAttachments][google.cloud.apihub.v1.RuntimeProjectAttachmentService.ListRuntimeProjectAttachments] + method's response. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance( + request, + runtime_project_attachment_service.ListRuntimeProjectAttachmentsRequest, + ): + request = ( + runtime_project_attachment_service.ListRuntimeProjectAttachmentsRequest( + request + ) + ) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.list_runtime_project_attachments + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListRuntimeProjectAttachmentsAsyncPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def delete_runtime_project_attachment( + self, + request: Optional[ + Union[ + runtime_project_attachment_service.DeleteRuntimeProjectAttachmentRequest, + dict, + ] + ] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Delete a runtime project attachment in the API Hub. + This call will detach the runtime project from the host + project. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import apihub_v1 + + async def sample_delete_runtime_project_attachment(): + # Create a client + client = apihub_v1.RuntimeProjectAttachmentServiceAsyncClient() + + # Initialize request argument(s) + request = apihub_v1.DeleteRuntimeProjectAttachmentRequest( + name="name_value", + ) + + # Make the request + await client.delete_runtime_project_attachment(request=request) + + Args: + request (Optional[Union[google.cloud.apihub_v1.types.DeleteRuntimeProjectAttachmentRequest, dict]]): + The request object. The + [DeleteRuntimeProjectAttachment][google.cloud.apihub.v1.RuntimeProjectAttachmentService.DeleteRuntimeProjectAttachment] + method's request. + name (:class:`str`): + Required. The name of the Runtime Project Attachment to + delete. Format: + ``projects/{project}/locations/{location}/runtimeProjectAttachments/{runtime_project_attachment}`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance( + request, + runtime_project_attachment_service.DeleteRuntimeProjectAttachmentRequest, + ): + request = runtime_project_attachment_service.DeleteRuntimeProjectAttachmentRequest( + request + ) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.delete_runtime_project_attachment + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + async def lookup_runtime_project_attachment( + self, + request: Optional[ + Union[ + runtime_project_attachment_service.LookupRuntimeProjectAttachmentRequest, + dict, + ] + ] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> runtime_project_attachment_service.LookupRuntimeProjectAttachmentResponse: + r"""Look up a runtime project attachment. This API can be + called in the context of any project. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import apihub_v1 + + async def sample_lookup_runtime_project_attachment(): + # Create a client + client = apihub_v1.RuntimeProjectAttachmentServiceAsyncClient() + + # Initialize request argument(s) + request = apihub_v1.LookupRuntimeProjectAttachmentRequest( + name="name_value", + ) + + # Make the request + response = await client.lookup_runtime_project_attachment(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.apihub_v1.types.LookupRuntimeProjectAttachmentRequest, dict]]): + The request object. The + [LookupRuntimeProjectAttachment][google.cloud.apihub.v1.RuntimeProjectAttachmentService.LookupRuntimeProjectAttachment] + method's request. + name (:class:`str`): + Required. Runtime project ID to look up runtime project + attachment for. Lookup happens across all regions. + Expected format: + ``projects/{project}/locations/{location}``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.apihub_v1.types.LookupRuntimeProjectAttachmentResponse: + The + [ListRuntimeProjectAttachments][google.cloud.apihub.v1.RuntimeProjectAttachmentService.ListRuntimeProjectAttachments] + method's response. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance( + request, + runtime_project_attachment_service.LookupRuntimeProjectAttachmentRequest, + ): + request = runtime_project_attachment_service.LookupRuntimeProjectAttachmentRequest( + request + ) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.lookup_runtime_project_attachment + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_operations( + self, + request: Optional[operations_pb2.ListOperationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.ListOperationsResponse: + r"""Lists operations that match the specified filter in the request. + + Args: + request (:class:`~.operations_pb2.ListOperationsRequest`): + The request object. Request message for + `ListOperations` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.ListOperationsResponse: + Response message for ``ListOperations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.ListOperationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.list_operations, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_operation( + self, + request: Optional[operations_pb2.GetOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Gets the latest state of a long-running operation. + + Args: + request (:class:`~.operations_pb2.GetOperationRequest`): + The request object. Request message for + `GetOperation` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.Operation: + An ``Operation`` object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.GetOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def delete_operation( + self, + request: Optional[operations_pb2.DeleteOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes a long-running operation. + + This method indicates that the client is no longer interested + in the operation result. It does not cancel the operation. + If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.DeleteOperationRequest`): + The request object. Request message for + `DeleteOperation` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.DeleteOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.delete_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + async def cancel_operation( + self, + request: Optional[operations_pb2.CancelOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Starts asynchronous cancellation on a long-running operation. + + The server makes a best effort to cancel the operation, but success + is not guaranteed. If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.CancelOperationRequest`): + The request object. Request message for + `CancelOperation` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.CancelOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.cancel_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + async def get_location( + self, + request: Optional[locations_pb2.GetLocationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> locations_pb2.Location: + r"""Gets information about a location. + + Args: + request (:class:`~.location_pb2.GetLocationRequest`): + The request object. Request message for + `GetLocation` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.location_pb2.Location: + Location object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = locations_pb2.GetLocationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_location, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_locations( + self, + request: Optional[locations_pb2.ListLocationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> locations_pb2.ListLocationsResponse: + r"""Lists information about the supported locations for this service. + + Args: + request (:class:`~.location_pb2.ListLocationsRequest`): + The request object. Request message for + `ListLocations` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.location_pb2.ListLocationsResponse: + Response message for ``ListLocations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = locations_pb2.ListLocationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.list_locations, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def __aenter__(self) -> "RuntimeProjectAttachmentServiceAsyncClient": + return self + + async def __aexit__(self, exc_type, exc, tb): + await self.transport.close() + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +__all__ = ("RuntimeProjectAttachmentServiceAsyncClient",) diff --git a/packages/google-cloud-apihub/google/cloud/apihub_v1/services/runtime_project_attachment_service/client.py b/packages/google-cloud-apihub/google/cloud/apihub_v1/services/runtime_project_attachment_service/client.py new file mode 100644 index 000000000000..dc9c1039381a --- /dev/null +++ b/packages/google-cloud-apihub/google/cloud/apihub_v1/services/runtime_project_attachment_service/client.py @@ -0,0 +1,1705 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import os +import re +from typing import ( + Callable, + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, + cast, +) +import warnings + +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.apihub_v1 import gapic_version as package_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object, None] # type: ignore + +from google.cloud.location import locations_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore + +from google.cloud.apihub_v1.services.runtime_project_attachment_service import pagers +from google.cloud.apihub_v1.types import runtime_project_attachment_service + +from .transports.base import ( + DEFAULT_CLIENT_INFO, + RuntimeProjectAttachmentServiceTransport, +) +from .transports.rest import RuntimeProjectAttachmentServiceRestTransport + + +class RuntimeProjectAttachmentServiceClientMeta(type): + """Metaclass for the RuntimeProjectAttachmentService client. + + This provides class-level methods for building and retrieving + support objects (e.g. transport) without polluting the client instance + objects. + """ + + _transport_registry = ( + OrderedDict() + ) # type: Dict[str, Type[RuntimeProjectAttachmentServiceTransport]] + _transport_registry["rest"] = RuntimeProjectAttachmentServiceRestTransport + + def get_transport_class( + cls, + label: Optional[str] = None, + ) -> Type[RuntimeProjectAttachmentServiceTransport]: + """Returns an appropriate transport class. + + Args: + label: The name of the desired transport. If none is + provided, then the first transport in the registry is used. + + Returns: + The transport class to use. + """ + # If a specific transport is requested, return that one. + if label: + return cls._transport_registry[label] + + # No transport is requested; return the default (that is, the first one + # in the dictionary). + return next(iter(cls._transport_registry.values())) + + +class RuntimeProjectAttachmentServiceClient( + metaclass=RuntimeProjectAttachmentServiceClientMeta +): + """This service is used for managing the runtime project + attachments. + """ + + @staticmethod + def _get_default_mtls_endpoint(api_endpoint): + """Converts api endpoint to mTLS endpoint. + + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to + "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. + Args: + api_endpoint (Optional[str]): the api endpoint to convert. + Returns: + str: converted mTLS api endpoint. + """ + if not api_endpoint: + return api_endpoint + + mtls_endpoint_re = re.compile( + r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" + ) + + m = mtls_endpoint_re.match(api_endpoint) + name, mtls, sandbox, googledomain = m.groups() + if mtls or not googledomain: + return api_endpoint + + if sandbox: + return api_endpoint.replace( + "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" + ) + + return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + + # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. + DEFAULT_ENDPOINT = "apihub.googleapis.com" + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore + DEFAULT_ENDPOINT + ) + + _DEFAULT_ENDPOINT_TEMPLATE = "apihub.{UNIVERSE_DOMAIN}" + _DEFAULT_UNIVERSE = "googleapis.com" + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + RuntimeProjectAttachmentServiceClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_info(info) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + RuntimeProjectAttachmentServiceClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file(filename) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> RuntimeProjectAttachmentServiceTransport: + """Returns the transport used by the client instance. + + Returns: + RuntimeProjectAttachmentServiceTransport: The transport used by the client + instance. + """ + return self._transport + + @staticmethod + def runtime_project_attachment_path( + project: str, + location: str, + runtime_project_attachment: str, + ) -> str: + """Returns a fully-qualified runtime_project_attachment string.""" + return "projects/{project}/locations/{location}/runtimeProjectAttachments/{runtime_project_attachment}".format( + project=project, + location=location, + runtime_project_attachment=runtime_project_attachment, + ) + + @staticmethod + def parse_runtime_project_attachment_path(path: str) -> Dict[str, str]: + """Parses a runtime_project_attachment path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/runtimeProjectAttachments/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + + @staticmethod + def common_billing_account_path( + billing_account: str, + ) -> str: + """Returns a fully-qualified billing_account string.""" + return "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + + @staticmethod + def parse_common_billing_account_path(path: str) -> Dict[str, str]: + """Parse a billing_account path into its component segments.""" + m = re.match(r"^billingAccounts/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_folder_path( + folder: str, + ) -> str: + """Returns a fully-qualified folder string.""" + return "folders/{folder}".format( + folder=folder, + ) + + @staticmethod + def parse_common_folder_path(path: str) -> Dict[str, str]: + """Parse a folder path into its component segments.""" + m = re.match(r"^folders/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_organization_path( + organization: str, + ) -> str: + """Returns a fully-qualified organization string.""" + return "organizations/{organization}".format( + organization=organization, + ) + + @staticmethod + def parse_common_organization_path(path: str) -> Dict[str, str]: + """Parse a organization path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_project_path( + project: str, + ) -> str: + """Returns a fully-qualified project string.""" + return "projects/{project}".format( + project=project, + ) + + @staticmethod + def parse_common_project_path(path: str) -> Dict[str, str]: + """Parse a project path into its component segments.""" + m = re.match(r"^projects/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_location_path( + project: str, + location: str, + ) -> str: + """Returns a fully-qualified location string.""" + return "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) + + @staticmethod + def parse_common_location_path(path: str) -> Dict[str, str]: + """Parse a location path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[client_options_lib.ClientOptions] = None + ): + """Deprecated. Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + + warnings.warn( + "get_mtls_endpoint_and_cert_source is deprecated. Use the api_endpoint property instead.", + DeprecationWarning, + ) + if client_options is None: + client_options = client_options_lib.ClientOptions() + use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") + if use_client_cert not in ("true", "false"): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError( + "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + # Figure out the client cert source to use. + client_cert_source = None + if use_client_cert == "true": + if client_options.client_cert_source: + client_cert_source = client_options.client_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + elif use_mtls_endpoint == "always" or ( + use_mtls_endpoint == "auto" and client_cert_source + ): + api_endpoint = cls.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = cls.DEFAULT_ENDPOINT + + return api_endpoint, client_cert_source + + @staticmethod + def _read_environment_variables(): + """Returns the environment variables used by the client. + + Returns: + Tuple[bool, str, str]: returns the GOOGLE_API_USE_CLIENT_CERTIFICATE, + GOOGLE_API_USE_MTLS_ENDPOINT, and GOOGLE_CLOUD_UNIVERSE_DOMAIN environment variables. + + Raises: + ValueError: If GOOGLE_API_USE_CLIENT_CERTIFICATE is not + any of ["true", "false"]. + google.auth.exceptions.MutualTLSChannelError: If GOOGLE_API_USE_MTLS_ENDPOINT + is not any of ["auto", "never", "always"]. + """ + use_client_cert = os.getenv( + "GOOGLE_API_USE_CLIENT_CERTIFICATE", "false" + ).lower() + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto").lower() + universe_domain_env = os.getenv("GOOGLE_CLOUD_UNIVERSE_DOMAIN") + if use_client_cert not in ("true", "false"): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError( + "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + return use_client_cert == "true", use_mtls_endpoint, universe_domain_env + + @staticmethod + def _get_client_cert_source(provided_cert_source, use_cert_flag): + """Return the client cert source to be used by the client. + + Args: + provided_cert_source (bytes): The client certificate source provided. + use_cert_flag (bool): A flag indicating whether to use the client certificate. + + Returns: + bytes or None: The client cert source to be used by the client. + """ + client_cert_source = None + if use_cert_flag: + if provided_cert_source: + client_cert_source = provided_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + return client_cert_source + + @staticmethod + def _get_api_endpoint( + api_override, client_cert_source, universe_domain, use_mtls_endpoint + ): + """Return the API endpoint used by the client. + + Args: + api_override (str): The API endpoint override. If specified, this is always + the return value of this function and the other arguments are not used. + client_cert_source (bytes): The client certificate source used by the client. + universe_domain (str): The universe domain used by the client. + use_mtls_endpoint (str): How to use the mTLS endpoint, which depends also on the other parameters. + Possible values are "always", "auto", or "never". + + Returns: + str: The API endpoint to be used by the client. + """ + if api_override is not None: + api_endpoint = api_override + elif use_mtls_endpoint == "always" or ( + use_mtls_endpoint == "auto" and client_cert_source + ): + _default_universe = RuntimeProjectAttachmentServiceClient._DEFAULT_UNIVERSE + if universe_domain != _default_universe: + raise MutualTLSChannelError( + f"mTLS is not supported in any universe other than {_default_universe}." + ) + api_endpoint = RuntimeProjectAttachmentServiceClient.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = ( + RuntimeProjectAttachmentServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=universe_domain + ) + ) + return api_endpoint + + @staticmethod + def _get_universe_domain( + client_universe_domain: Optional[str], universe_domain_env: Optional[str] + ) -> str: + """Return the universe domain used by the client. + + Args: + client_universe_domain (Optional[str]): The universe domain configured via the client options. + universe_domain_env (Optional[str]): The universe domain configured via the "GOOGLE_CLOUD_UNIVERSE_DOMAIN" environment variable. + + Returns: + str: The universe domain to be used by the client. + + Raises: + ValueError: If the universe domain is an empty string. + """ + universe_domain = RuntimeProjectAttachmentServiceClient._DEFAULT_UNIVERSE + if client_universe_domain is not None: + universe_domain = client_universe_domain + elif universe_domain_env is not None: + universe_domain = universe_domain_env + if len(universe_domain.strip()) == 0: + raise ValueError("Universe Domain cannot be an empty string.") + return universe_domain + + @staticmethod + def _compare_universes( + client_universe: str, credentials: ga_credentials.Credentials + ) -> bool: + """Returns True iff the universe domains used by the client and credentials match. + + Args: + client_universe (str): The universe domain configured via the client options. + credentials (ga_credentials.Credentials): The credentials being used in the client. + + Returns: + bool: True iff client_universe matches the universe in credentials. + + Raises: + ValueError: when client_universe does not match the universe in credentials. + """ + + default_universe = RuntimeProjectAttachmentServiceClient._DEFAULT_UNIVERSE + credentials_universe = getattr(credentials, "universe_domain", default_universe) + + if client_universe != credentials_universe: + raise ValueError( + "The configured universe domain " + f"({client_universe}) does not match the universe domain " + f"found in the credentials ({credentials_universe}). " + "If you haven't configured the universe domain explicitly, " + f"`{default_universe}` is the default." + ) + return True + + def _validate_universe_domain(self): + """Validates client's and credentials' universe domains are consistent. + + Returns: + bool: True iff the configured universe domain is valid. + + Raises: + ValueError: If the configured universe domain is not valid. + """ + self._is_universe_domain_valid = ( + self._is_universe_domain_valid + or RuntimeProjectAttachmentServiceClient._compare_universes( + self.universe_domain, self.transport._credentials + ) + ) + return self._is_universe_domain_valid + + @property + def api_endpoint(self): + """Return the API endpoint used by the client instance. + + Returns: + str: The API endpoint used by the client instance. + """ + return self._api_endpoint + + @property + def universe_domain(self) -> str: + """Return the universe domain used by the client instance. + + Returns: + str: The universe domain used by the client instance. + """ + return self._universe_domain + + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[ + Union[ + str, + RuntimeProjectAttachmentServiceTransport, + Callable[..., RuntimeProjectAttachmentServiceTransport], + ] + ] = None, + client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the runtime project attachment service client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Optional[Union[str,RuntimeProjectAttachmentServiceTransport,Callable[..., RuntimeProjectAttachmentServiceTransport]]]): + The transport to use, or a Callable that constructs and returns a new transport. + If a Callable is given, it will be called with the same set of initialization + arguments as used in the RuntimeProjectAttachmentServiceTransport constructor. + If set to None, a transport is chosen automatically. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): + Custom options for the client. + + 1. The ``api_endpoint`` property can be used to override the + default endpoint provided by the client when ``transport`` is + not explicitly provided. Only if this property is not set and + ``transport`` was not explicitly provided, the endpoint is + determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment + variable, which have one of the following values: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto-switch to the + default mTLS endpoint if client certificate is present; this is + the default value). + + 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide a client certificate for mTLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + 3. The ``universe_domain`` property can be used to override the + default "googleapis.com" universe. Note that the ``api_endpoint`` + property still takes precedence; and ``universe_domain`` is + currently not supported for mTLS. + + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client_options = client_options + if isinstance(self._client_options, dict): + self._client_options = client_options_lib.from_dict(self._client_options) + if self._client_options is None: + self._client_options = client_options_lib.ClientOptions() + self._client_options = cast( + client_options_lib.ClientOptions, self._client_options + ) + + universe_domain_opt = getattr(self._client_options, "universe_domain", None) + + ( + self._use_client_cert, + self._use_mtls_endpoint, + self._universe_domain_env, + ) = RuntimeProjectAttachmentServiceClient._read_environment_variables() + self._client_cert_source = ( + RuntimeProjectAttachmentServiceClient._get_client_cert_source( + self._client_options.client_cert_source, self._use_client_cert + ) + ) + self._universe_domain = ( + RuntimeProjectAttachmentServiceClient._get_universe_domain( + universe_domain_opt, self._universe_domain_env + ) + ) + self._api_endpoint = None # updated below, depending on `transport` + + # Initialize the universe domain validation. + self._is_universe_domain_valid = False + + api_key_value = getattr(self._client_options, "api_key", None) + if api_key_value and credentials: + raise ValueError( + "client_options.api_key and credentials are mutually exclusive" + ) + + # Save or instantiate the transport. + # Ordinarily, we provide the transport, but allowing a custom transport + # instance provides an extensibility point for unusual situations. + transport_provided = isinstance( + transport, RuntimeProjectAttachmentServiceTransport + ) + if transport_provided: + # transport is a RuntimeProjectAttachmentServiceTransport instance. + if credentials or self._client_options.credentials_file or api_key_value: + raise ValueError( + "When providing a transport instance, " + "provide its credentials directly." + ) + if self._client_options.scopes: + raise ValueError( + "When providing a transport instance, provide its scopes " + "directly." + ) + self._transport = cast(RuntimeProjectAttachmentServiceTransport, transport) + self._api_endpoint = self._transport.host + + self._api_endpoint = ( + self._api_endpoint + or RuntimeProjectAttachmentServiceClient._get_api_endpoint( + self._client_options.api_endpoint, + self._client_cert_source, + self._universe_domain, + self._use_mtls_endpoint, + ) + ) + + if not transport_provided: + import google.auth._default # type: ignore + + if api_key_value and hasattr( + google.auth._default, "get_api_key_credentials" + ): + credentials = google.auth._default.get_api_key_credentials( + api_key_value + ) + + transport_init: Union[ + Type[RuntimeProjectAttachmentServiceTransport], + Callable[..., RuntimeProjectAttachmentServiceTransport], + ] = ( + RuntimeProjectAttachmentServiceClient.get_transport_class(transport) + if isinstance(transport, str) or transport is None + else cast( + Callable[..., RuntimeProjectAttachmentServiceTransport], transport + ) + ) + # initialize with the provided callable or the passed in class + self._transport = transport_init( + credentials=credentials, + credentials_file=self._client_options.credentials_file, + host=self._api_endpoint, + scopes=self._client_options.scopes, + client_cert_source_for_mtls=self._client_cert_source, + quota_project_id=self._client_options.quota_project_id, + client_info=client_info, + always_use_jwt_access=True, + api_audience=self._client_options.api_audience, + ) + + def create_runtime_project_attachment( + self, + request: Optional[ + Union[ + runtime_project_attachment_service.CreateRuntimeProjectAttachmentRequest, + dict, + ] + ] = None, + *, + parent: Optional[str] = None, + runtime_project_attachment: Optional[ + runtime_project_attachment_service.RuntimeProjectAttachment + ] = None, + runtime_project_attachment_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> runtime_project_attachment_service.RuntimeProjectAttachment: + r"""Attaches a runtime project to the host project. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import apihub_v1 + + def sample_create_runtime_project_attachment(): + # Create a client + client = apihub_v1.RuntimeProjectAttachmentServiceClient() + + # Initialize request argument(s) + runtime_project_attachment = apihub_v1.RuntimeProjectAttachment() + runtime_project_attachment.runtime_project = "runtime_project_value" + + request = apihub_v1.CreateRuntimeProjectAttachmentRequest( + parent="parent_value", + runtime_project_attachment_id="runtime_project_attachment_id_value", + runtime_project_attachment=runtime_project_attachment, + ) + + # Make the request + response = client.create_runtime_project_attachment(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.apihub_v1.types.CreateRuntimeProjectAttachmentRequest, dict]): + The request object. The + [CreateRuntimeProjectAttachment][google.cloud.apihub.v1.RuntimeProjectAttachmentService.CreateRuntimeProjectAttachment] + method's request. + parent (str): + Required. The parent resource for the Runtime Project + Attachment. Format: + ``projects/{project}/locations/{location}`` + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + runtime_project_attachment (google.cloud.apihub_v1.types.RuntimeProjectAttachment): + Required. The Runtime Project + Attachment to create. + + This corresponds to the ``runtime_project_attachment`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + runtime_project_attachment_id (str): + Required. The ID to use for the Runtime Project + Attachment, which will become the final component of the + Runtime Project Attachment's name. The ID must be the + same as the project ID of the Google cloud project + specified in the + runtime_project_attachment.runtime_project field. + + This corresponds to the ``runtime_project_attachment_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.apihub_v1.types.RuntimeProjectAttachment: + Runtime project attachment represents + an attachment from the runtime project + to the host project. Api Hub looks for + deployments in the attached runtime + projects and creates corresponding + resources in Api Hub for the discovered + deployments. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any( + [parent, runtime_project_attachment, runtime_project_attachment_id] + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance( + request, + runtime_project_attachment_service.CreateRuntimeProjectAttachmentRequest, + ): + request = runtime_project_attachment_service.CreateRuntimeProjectAttachmentRequest( + request + ) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if runtime_project_attachment is not None: + request.runtime_project_attachment = runtime_project_attachment + if runtime_project_attachment_id is not None: + request.runtime_project_attachment_id = runtime_project_attachment_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[ + self._transport.create_runtime_project_attachment + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_runtime_project_attachment( + self, + request: Optional[ + Union[ + runtime_project_attachment_service.GetRuntimeProjectAttachmentRequest, + dict, + ] + ] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> runtime_project_attachment_service.RuntimeProjectAttachment: + r"""Gets a runtime project attachment. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import apihub_v1 + + def sample_get_runtime_project_attachment(): + # Create a client + client = apihub_v1.RuntimeProjectAttachmentServiceClient() + + # Initialize request argument(s) + request = apihub_v1.GetRuntimeProjectAttachmentRequest( + name="name_value", + ) + + # Make the request + response = client.get_runtime_project_attachment(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.apihub_v1.types.GetRuntimeProjectAttachmentRequest, dict]): + The request object. The + [GetRuntimeProjectAttachment][google.cloud.apihub.v1.RuntimeProjectAttachmentService.GetRuntimeProjectAttachment] + method's request. + name (str): + Required. The name of the API resource to retrieve. + Format: + ``projects/{project}/locations/{location}/runtimeProjectAttachments/{runtime_project_attachment}`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.apihub_v1.types.RuntimeProjectAttachment: + Runtime project attachment represents + an attachment from the runtime project + to the host project. Api Hub looks for + deployments in the attached runtime + projects and creates corresponding + resources in Api Hub for the discovered + deployments. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance( + request, + runtime_project_attachment_service.GetRuntimeProjectAttachmentRequest, + ): + request = ( + runtime_project_attachment_service.GetRuntimeProjectAttachmentRequest( + request + ) + ) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[ + self._transport.get_runtime_project_attachment + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def list_runtime_project_attachments( + self, + request: Optional[ + Union[ + runtime_project_attachment_service.ListRuntimeProjectAttachmentsRequest, + dict, + ] + ] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListRuntimeProjectAttachmentsPager: + r"""List runtime projects attached to the host project. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import apihub_v1 + + def sample_list_runtime_project_attachments(): + # Create a client + client = apihub_v1.RuntimeProjectAttachmentServiceClient() + + # Initialize request argument(s) + request = apihub_v1.ListRuntimeProjectAttachmentsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_runtime_project_attachments(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.apihub_v1.types.ListRuntimeProjectAttachmentsRequest, dict]): + The request object. The + [ListRuntimeProjectAttachments][google.cloud.apihub.v1.RuntimeProjectAttachmentService.ListRuntimeProjectAttachments] + method's request. + parent (str): + Required. The parent, which owns this collection of + runtime project attachments. Format: + ``projects/{project}/locations/{location}`` + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.apihub_v1.services.runtime_project_attachment_service.pagers.ListRuntimeProjectAttachmentsPager: + The + [ListRuntimeProjectAttachments][google.cloud.apihub.v1.RuntimeProjectAttachmentService.ListRuntimeProjectAttachments] + method's response. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance( + request, + runtime_project_attachment_service.ListRuntimeProjectAttachmentsRequest, + ): + request = ( + runtime_project_attachment_service.ListRuntimeProjectAttachmentsRequest( + request + ) + ) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[ + self._transport.list_runtime_project_attachments + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListRuntimeProjectAttachmentsPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def delete_runtime_project_attachment( + self, + request: Optional[ + Union[ + runtime_project_attachment_service.DeleteRuntimeProjectAttachmentRequest, + dict, + ] + ] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Delete a runtime project attachment in the API Hub. + This call will detach the runtime project from the host + project. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import apihub_v1 + + def sample_delete_runtime_project_attachment(): + # Create a client + client = apihub_v1.RuntimeProjectAttachmentServiceClient() + + # Initialize request argument(s) + request = apihub_v1.DeleteRuntimeProjectAttachmentRequest( + name="name_value", + ) + + # Make the request + client.delete_runtime_project_attachment(request=request) + + Args: + request (Union[google.cloud.apihub_v1.types.DeleteRuntimeProjectAttachmentRequest, dict]): + The request object. The + [DeleteRuntimeProjectAttachment][google.cloud.apihub.v1.RuntimeProjectAttachmentService.DeleteRuntimeProjectAttachment] + method's request. + name (str): + Required. The name of the Runtime Project Attachment to + delete. Format: + ``projects/{project}/locations/{location}/runtimeProjectAttachments/{runtime_project_attachment}`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance( + request, + runtime_project_attachment_service.DeleteRuntimeProjectAttachmentRequest, + ): + request = runtime_project_attachment_service.DeleteRuntimeProjectAttachmentRequest( + request + ) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[ + self._transport.delete_runtime_project_attachment + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + def lookup_runtime_project_attachment( + self, + request: Optional[ + Union[ + runtime_project_attachment_service.LookupRuntimeProjectAttachmentRequest, + dict, + ] + ] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> runtime_project_attachment_service.LookupRuntimeProjectAttachmentResponse: + r"""Look up a runtime project attachment. This API can be + called in the context of any project. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import apihub_v1 + + def sample_lookup_runtime_project_attachment(): + # Create a client + client = apihub_v1.RuntimeProjectAttachmentServiceClient() + + # Initialize request argument(s) + request = apihub_v1.LookupRuntimeProjectAttachmentRequest( + name="name_value", + ) + + # Make the request + response = client.lookup_runtime_project_attachment(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.apihub_v1.types.LookupRuntimeProjectAttachmentRequest, dict]): + The request object. The + [LookupRuntimeProjectAttachment][google.cloud.apihub.v1.RuntimeProjectAttachmentService.LookupRuntimeProjectAttachment] + method's request. + name (str): + Required. Runtime project ID to look up runtime project + attachment for. Lookup happens across all regions. + Expected format: + ``projects/{project}/locations/{location}``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.apihub_v1.types.LookupRuntimeProjectAttachmentResponse: + The + [ListRuntimeProjectAttachments][google.cloud.apihub.v1.RuntimeProjectAttachmentService.ListRuntimeProjectAttachments] + method's response. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance( + request, + runtime_project_attachment_service.LookupRuntimeProjectAttachmentRequest, + ): + request = runtime_project_attachment_service.LookupRuntimeProjectAttachmentRequest( + request + ) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[ + self._transport.lookup_runtime_project_attachment + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def __enter__(self) -> "RuntimeProjectAttachmentServiceClient": + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + + def list_operations( + self, + request: Optional[operations_pb2.ListOperationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.ListOperationsResponse: + r"""Lists operations that match the specified filter in the request. + + Args: + request (:class:`~.operations_pb2.ListOperationsRequest`): + The request object. Request message for + `ListOperations` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.ListOperationsResponse: + Response message for ``ListOperations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.ListOperationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.list_operations, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_operation( + self, + request: Optional[operations_pb2.GetOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Gets the latest state of a long-running operation. + + Args: + request (:class:`~.operations_pb2.GetOperationRequest`): + The request object. Request message for + `GetOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.Operation: + An ``Operation`` object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.GetOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.get_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def delete_operation( + self, + request: Optional[operations_pb2.DeleteOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes a long-running operation. + + This method indicates that the client is no longer interested + in the operation result. It does not cancel the operation. + If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.DeleteOperationRequest`): + The request object. Request message for + `DeleteOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.DeleteOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.delete_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + def cancel_operation( + self, + request: Optional[operations_pb2.CancelOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Starts asynchronous cancellation on a long-running operation. + + The server makes a best effort to cancel the operation, but success + is not guaranteed. If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.CancelOperationRequest`): + The request object. Request message for + `CancelOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.CancelOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.cancel_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + def get_location( + self, + request: Optional[locations_pb2.GetLocationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> locations_pb2.Location: + r"""Gets information about a location. + + Args: + request (:class:`~.location_pb2.GetLocationRequest`): + The request object. Request message for + `GetLocation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.location_pb2.Location: + Location object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = locations_pb2.GetLocationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.get_location, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def list_locations( + self, + request: Optional[locations_pb2.ListLocationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> locations_pb2.ListLocationsResponse: + r"""Lists information about the supported locations for this service. + + Args: + request (:class:`~.location_pb2.ListLocationsRequest`): + The request object. Request message for + `ListLocations` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.location_pb2.ListLocationsResponse: + Response message for ``ListLocations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = locations_pb2.ListLocationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.list_locations, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +__all__ = ("RuntimeProjectAttachmentServiceClient",) diff --git a/packages/google-ads-admanager/google/ads/admanager_v1/services/label_service/pagers.py b/packages/google-cloud-apihub/google/cloud/apihub_v1/services/runtime_project_attachment_service/pagers.py similarity index 67% rename from packages/google-ads-admanager/google/ads/admanager_v1/services/label_service/pagers.py rename to packages/google-cloud-apihub/google/cloud/apihub_v1/services/runtime_project_attachment_service/pagers.py index ba3ba0c00821..7e63e765df51 100644 --- a/packages/google-ads-admanager/google/ads/admanager_v1/services/label_service/pagers.py +++ b/packages/google-cloud-apihub/google/cloud/apihub_v1/services/runtime_project_attachment_service/pagers.py @@ -38,32 +38,35 @@ OptionalRetry = Union[retries.Retry, object, None] # type: ignore OptionalAsyncRetry = Union[retries_async.AsyncRetry, object, None] # type: ignore -from google.ads.admanager_v1.types import label_service +from google.cloud.apihub_v1.types import runtime_project_attachment_service -class ListLabelsPager: - """A pager for iterating through ``list_labels`` requests. +class ListRuntimeProjectAttachmentsPager: + """A pager for iterating through ``list_runtime_project_attachments`` requests. This class thinly wraps an initial - :class:`google.ads.admanager_v1.types.ListLabelsResponse` object, and + :class:`google.cloud.apihub_v1.types.ListRuntimeProjectAttachmentsResponse` object, and provides an ``__iter__`` method to iterate through its - ``labels`` field. + ``runtime_project_attachments`` field. If there are more pages, the ``__iter__`` method will make additional - ``ListLabels`` requests and continue to iterate - through the ``labels`` field on the + ``ListRuntimeProjectAttachments`` requests and continue to iterate + through the ``runtime_project_attachments`` field on the corresponding responses. - All the usual :class:`google.ads.admanager_v1.types.ListLabelsResponse` + All the usual :class:`google.cloud.apihub_v1.types.ListRuntimeProjectAttachmentsResponse` attributes are available on the pager. If multiple requests are made, only the most recent response is retained, and thus used for attribute lookup. """ def __init__( self, - method: Callable[..., label_service.ListLabelsResponse], - request: label_service.ListLabelsRequest, - response: label_service.ListLabelsResponse, + method: Callable[ + ..., + runtime_project_attachment_service.ListRuntimeProjectAttachmentsResponse, + ], + request: runtime_project_attachment_service.ListRuntimeProjectAttachmentsRequest, + response: runtime_project_attachment_service.ListRuntimeProjectAttachmentsResponse, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, @@ -74,9 +77,9 @@ def __init__( Args: method (Callable): The method that was originally called, and which instantiated this pager. - request (google.ads.admanager_v1.types.ListLabelsRequest): + request (google.cloud.apihub_v1.types.ListRuntimeProjectAttachmentsRequest): The initial request object. - response (google.ads.admanager_v1.types.ListLabelsResponse): + response (google.cloud.apihub_v1.types.ListRuntimeProjectAttachmentsResponse): The initial response object. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. @@ -85,7 +88,11 @@ def __init__( sent along with the request as metadata. """ self._method = method - self._request = label_service.ListLabelsRequest(request) + self._request = ( + runtime_project_attachment_service.ListRuntimeProjectAttachmentsRequest( + request + ) + ) self._response = response self._retry = retry self._timeout = timeout @@ -95,7 +102,11 @@ def __getattr__(self, name: str) -> Any: return getattr(self._response, name) @property - def pages(self) -> Iterator[label_service.ListLabelsResponse]: + def pages( + self, + ) -> Iterator[ + runtime_project_attachment_service.ListRuntimeProjectAttachmentsResponse + ]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token @@ -107,9 +118,11 @@ def pages(self) -> Iterator[label_service.ListLabelsResponse]: ) yield self._response - def __iter__(self) -> Iterator[label_service.Label]: + def __iter__( + self, + ) -> Iterator[runtime_project_attachment_service.RuntimeProjectAttachment]: for page in self.pages: - yield from page.labels + yield from page.runtime_project_attachments def __repr__(self) -> str: return "{0}<{1!r}>".format(self.__class__.__name__, self._response) diff --git a/packages/google-cloud-apihub/google/cloud/apihub_v1/services/runtime_project_attachment_service/transports/__init__.py b/packages/google-cloud-apihub/google/cloud/apihub_v1/services/runtime_project_attachment_service/transports/__init__.py new file mode 100644 index 000000000000..604d33074e46 --- /dev/null +++ b/packages/google-cloud-apihub/google/cloud/apihub_v1/services/runtime_project_attachment_service/transports/__init__.py @@ -0,0 +1,35 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from typing import Dict, Type + +from .base import RuntimeProjectAttachmentServiceTransport +from .rest import ( + RuntimeProjectAttachmentServiceRestInterceptor, + RuntimeProjectAttachmentServiceRestTransport, +) + +# Compile a registry of transports. +_transport_registry = ( + OrderedDict() +) # type: Dict[str, Type[RuntimeProjectAttachmentServiceTransport]] +_transport_registry["rest"] = RuntimeProjectAttachmentServiceRestTransport + +__all__ = ( + "RuntimeProjectAttachmentServiceTransport", + "RuntimeProjectAttachmentServiceRestTransport", + "RuntimeProjectAttachmentServiceRestInterceptor", +) diff --git a/packages/google-cloud-apihub/google/cloud/apihub_v1/services/runtime_project_attachment_service/transports/base.py b/packages/google-cloud-apihub/google/cloud/apihub_v1/services/runtime_project_attachment_service/transports/base.py new file mode 100644 index 000000000000..4ad02bf6b42d --- /dev/null +++ b/packages/google-cloud-apihub/google/cloud/apihub_v1/services/runtime_project_attachment_service/transports/base.py @@ -0,0 +1,317 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import abc +from typing import Awaitable, Callable, Dict, Optional, Sequence, Union + +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.oauth2 import service_account # type: ignore +from google.protobuf import empty_pb2 # type: ignore + +from google.cloud.apihub_v1 import gapic_version as package_version +from google.cloud.apihub_v1.types import runtime_project_attachment_service + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +class RuntimeProjectAttachmentServiceTransport(abc.ABC): + """Abstract transport class for RuntimeProjectAttachmentService.""" + + AUTH_SCOPES = ("https://www.googleapis.com/auth/cloud-platform",) + + DEFAULT_HOST: str = "apihub.googleapis.com" + + def __init__( + self, + *, + host: str = DEFAULT_HOST, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + **kwargs, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'apihub.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A list of scopes. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + """ + + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} + + # Save the scopes. + self._scopes = scopes + if not hasattr(self, "_ignore_credentials"): + self._ignore_credentials: bool = False + + # If no credentials are provided, then determine the appropriate + # defaults. + if credentials and credentials_file: + raise core_exceptions.DuplicateCredentialArgs( + "'credentials_file' and 'credentials' are mutually exclusive" + ) + + if credentials_file is not None: + credentials, _ = google.auth.load_credentials_from_file( + credentials_file, **scopes_kwargs, quota_project_id=quota_project_id + ) + elif credentials is None and not self._ignore_credentials: + credentials, _ = google.auth.default( + **scopes_kwargs, quota_project_id=quota_project_id + ) + # Don't apply audience if the credentials file passed from user. + if hasattr(credentials, "with_gdch_audience"): + credentials = credentials.with_gdch_audience( + api_audience if api_audience else host + ) + + # If the credentials are service account credentials, then always try to use self signed JWT. + if ( + always_use_jwt_access + and isinstance(credentials, service_account.Credentials) + and hasattr(service_account.Credentials, "with_always_use_jwt_access") + ): + credentials = credentials.with_always_use_jwt_access(True) + + # Save the credentials. + self._credentials = credentials + + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ":" not in host: + host += ":443" + self._host = host + + @property + def host(self): + return self._host + + def _prep_wrapped_messages(self, client_info): + # Precompute the wrapped methods. + self._wrapped_methods = { + self.create_runtime_project_attachment: gapic_v1.method.wrap_method( + self.create_runtime_project_attachment, + default_timeout=60.0, + client_info=client_info, + ), + self.get_runtime_project_attachment: gapic_v1.method.wrap_method( + self.get_runtime_project_attachment, + default_retry=retries.Retry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.list_runtime_project_attachments: gapic_v1.method.wrap_method( + self.list_runtime_project_attachments, + default_retry=retries.Retry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.delete_runtime_project_attachment: gapic_v1.method.wrap_method( + self.delete_runtime_project_attachment, + default_timeout=60.0, + client_info=client_info, + ), + self.lookup_runtime_project_attachment: gapic_v1.method.wrap_method( + self.lookup_runtime_project_attachment, + default_retry=retries.Retry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + } + + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + + @property + def create_runtime_project_attachment( + self, + ) -> Callable[ + [runtime_project_attachment_service.CreateRuntimeProjectAttachmentRequest], + Union[ + runtime_project_attachment_service.RuntimeProjectAttachment, + Awaitable[runtime_project_attachment_service.RuntimeProjectAttachment], + ], + ]: + raise NotImplementedError() + + @property + def get_runtime_project_attachment( + self, + ) -> Callable[ + [runtime_project_attachment_service.GetRuntimeProjectAttachmentRequest], + Union[ + runtime_project_attachment_service.RuntimeProjectAttachment, + Awaitable[runtime_project_attachment_service.RuntimeProjectAttachment], + ], + ]: + raise NotImplementedError() + + @property + def list_runtime_project_attachments( + self, + ) -> Callable[ + [runtime_project_attachment_service.ListRuntimeProjectAttachmentsRequest], + Union[ + runtime_project_attachment_service.ListRuntimeProjectAttachmentsResponse, + Awaitable[ + runtime_project_attachment_service.ListRuntimeProjectAttachmentsResponse + ], + ], + ]: + raise NotImplementedError() + + @property + def delete_runtime_project_attachment( + self, + ) -> Callable[ + [runtime_project_attachment_service.DeleteRuntimeProjectAttachmentRequest], + Union[empty_pb2.Empty, Awaitable[empty_pb2.Empty]], + ]: + raise NotImplementedError() + + @property + def lookup_runtime_project_attachment( + self, + ) -> Callable[ + [runtime_project_attachment_service.LookupRuntimeProjectAttachmentRequest], + Union[ + runtime_project_attachment_service.LookupRuntimeProjectAttachmentResponse, + Awaitable[ + runtime_project_attachment_service.LookupRuntimeProjectAttachmentResponse + ], + ], + ]: + raise NotImplementedError() + + @property + def list_operations( + self, + ) -> Callable[ + [operations_pb2.ListOperationsRequest], + Union[ + operations_pb2.ListOperationsResponse, + Awaitable[operations_pb2.ListOperationsResponse], + ], + ]: + raise NotImplementedError() + + @property + def get_operation( + self, + ) -> Callable[ + [operations_pb2.GetOperationRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None,]: + raise NotImplementedError() + + @property + def delete_operation( + self, + ) -> Callable[[operations_pb2.DeleteOperationRequest], None,]: + raise NotImplementedError() + + @property + def get_location( + self, + ) -> Callable[ + [locations_pb2.GetLocationRequest], + Union[locations_pb2.Location, Awaitable[locations_pb2.Location]], + ]: + raise NotImplementedError() + + @property + def list_locations( + self, + ) -> Callable[ + [locations_pb2.ListLocationsRequest], + Union[ + locations_pb2.ListLocationsResponse, + Awaitable[locations_pb2.ListLocationsResponse], + ], + ]: + raise NotImplementedError() + + @property + def kind(self) -> str: + raise NotImplementedError() + + +__all__ = ("RuntimeProjectAttachmentServiceTransport",) diff --git a/packages/google-cloud-apihub/google/cloud/apihub_v1/services/runtime_project_attachment_service/transports/grpc.py b/packages/google-cloud-apihub/google/cloud/apihub_v1/services/runtime_project_attachment_service/transports/grpc.py new file mode 100644 index 000000000000..5eaca16634d5 --- /dev/null +++ b/packages/google-cloud-apihub/google/cloud/apihub_v1/services/runtime_project_attachment_service/transports/grpc.py @@ -0,0 +1,519 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import gapic_v1, grpc_helpers +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore +import grpc # type: ignore + +from google.cloud.apihub_v1.types import runtime_project_attachment_service + +from .base import DEFAULT_CLIENT_INFO, RuntimeProjectAttachmentServiceTransport + + +class RuntimeProjectAttachmentServiceGrpcTransport( + RuntimeProjectAttachmentServiceTransport +): + """gRPC backend transport for RuntimeProjectAttachmentService. + + This service is used for managing the runtime project + attachments. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _stubs: Dict[str, Callable] + + def __init__( + self, + *, + host: str = "apihub.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'apihub.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if a ``channel`` instance is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if a ``channel`` instance is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if a ``channel`` instance is provided. + channel (Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]]): + A ``Channel`` instance through which to make calls, or a Callable + that constructs and returns one. If set to None, ``self.create_channel`` + is used to create the channel. If a Callable is given, it will be called + with the same arguments as used in ``self.create_channel``. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if a ``channel`` instance is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if a ``channel`` instance or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if isinstance(channel, grpc.Channel): + # Ignore credentials if a channel was passed. + credentials = None + self._ignore_credentials = True + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + # initialize with the provided callable or the default channel + channel_init = channel or type(self).create_channel + self._grpc_channel = channel_init( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @classmethod + def create_channel( + cls, + host: str = "apihub.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> grpc.Channel: + """Create and return a gRPC channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + grpc.Channel: A gRPC channel object. + + Raises: + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + + return grpc_helpers.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs, + ) + + @property + def grpc_channel(self) -> grpc.Channel: + """Return the channel designed to connect to this service.""" + return self._grpc_channel + + @property + def create_runtime_project_attachment( + self, + ) -> Callable[ + [runtime_project_attachment_service.CreateRuntimeProjectAttachmentRequest], + runtime_project_attachment_service.RuntimeProjectAttachment, + ]: + r"""Return a callable for the create runtime project + attachment method over gRPC. + + Attaches a runtime project to the host project. + + Returns: + Callable[[~.CreateRuntimeProjectAttachmentRequest], + ~.RuntimeProjectAttachment]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_runtime_project_attachment" not in self._stubs: + self._stubs[ + "create_runtime_project_attachment" + ] = self.grpc_channel.unary_unary( + "/google.cloud.apihub.v1.RuntimeProjectAttachmentService/CreateRuntimeProjectAttachment", + request_serializer=runtime_project_attachment_service.CreateRuntimeProjectAttachmentRequest.serialize, + response_deserializer=runtime_project_attachment_service.RuntimeProjectAttachment.deserialize, + ) + return self._stubs["create_runtime_project_attachment"] + + @property + def get_runtime_project_attachment( + self, + ) -> Callable[ + [runtime_project_attachment_service.GetRuntimeProjectAttachmentRequest], + runtime_project_attachment_service.RuntimeProjectAttachment, + ]: + r"""Return a callable for the get runtime project attachment method over gRPC. + + Gets a runtime project attachment. + + Returns: + Callable[[~.GetRuntimeProjectAttachmentRequest], + ~.RuntimeProjectAttachment]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_runtime_project_attachment" not in self._stubs: + self._stubs[ + "get_runtime_project_attachment" + ] = self.grpc_channel.unary_unary( + "/google.cloud.apihub.v1.RuntimeProjectAttachmentService/GetRuntimeProjectAttachment", + request_serializer=runtime_project_attachment_service.GetRuntimeProjectAttachmentRequest.serialize, + response_deserializer=runtime_project_attachment_service.RuntimeProjectAttachment.deserialize, + ) + return self._stubs["get_runtime_project_attachment"] + + @property + def list_runtime_project_attachments( + self, + ) -> Callable[ + [runtime_project_attachment_service.ListRuntimeProjectAttachmentsRequest], + runtime_project_attachment_service.ListRuntimeProjectAttachmentsResponse, + ]: + r"""Return a callable for the list runtime project + attachments method over gRPC. + + List runtime projects attached to the host project. + + Returns: + Callable[[~.ListRuntimeProjectAttachmentsRequest], + ~.ListRuntimeProjectAttachmentsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_runtime_project_attachments" not in self._stubs: + self._stubs[ + "list_runtime_project_attachments" + ] = self.grpc_channel.unary_unary( + "/google.cloud.apihub.v1.RuntimeProjectAttachmentService/ListRuntimeProjectAttachments", + request_serializer=runtime_project_attachment_service.ListRuntimeProjectAttachmentsRequest.serialize, + response_deserializer=runtime_project_attachment_service.ListRuntimeProjectAttachmentsResponse.deserialize, + ) + return self._stubs["list_runtime_project_attachments"] + + @property + def delete_runtime_project_attachment( + self, + ) -> Callable[ + [runtime_project_attachment_service.DeleteRuntimeProjectAttachmentRequest], + empty_pb2.Empty, + ]: + r"""Return a callable for the delete runtime project + attachment method over gRPC. + + Delete a runtime project attachment in the API Hub. + This call will detach the runtime project from the host + project. + + Returns: + Callable[[~.DeleteRuntimeProjectAttachmentRequest], + ~.Empty]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_runtime_project_attachment" not in self._stubs: + self._stubs[ + "delete_runtime_project_attachment" + ] = self.grpc_channel.unary_unary( + "/google.cloud.apihub.v1.RuntimeProjectAttachmentService/DeleteRuntimeProjectAttachment", + request_serializer=runtime_project_attachment_service.DeleteRuntimeProjectAttachmentRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs["delete_runtime_project_attachment"] + + @property + def lookup_runtime_project_attachment( + self, + ) -> Callable[ + [runtime_project_attachment_service.LookupRuntimeProjectAttachmentRequest], + runtime_project_attachment_service.LookupRuntimeProjectAttachmentResponse, + ]: + r"""Return a callable for the lookup runtime project + attachment method over gRPC. + + Look up a runtime project attachment. This API can be + called in the context of any project. + + Returns: + Callable[[~.LookupRuntimeProjectAttachmentRequest], + ~.LookupRuntimeProjectAttachmentResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "lookup_runtime_project_attachment" not in self._stubs: + self._stubs[ + "lookup_runtime_project_attachment" + ] = self.grpc_channel.unary_unary( + "/google.cloud.apihub.v1.RuntimeProjectAttachmentService/LookupRuntimeProjectAttachment", + request_serializer=runtime_project_attachment_service.LookupRuntimeProjectAttachmentRequest.serialize, + response_deserializer=runtime_project_attachment_service.LookupRuntimeProjectAttachmentResponse.deserialize, + ) + return self._stubs["lookup_runtime_project_attachment"] + + def close(self): + self.grpc_channel.close() + + @property + def delete_operation( + self, + ) -> Callable[[operations_pb2.DeleteOperationRequest], None]: + r"""Return a callable for the delete_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_operation" not in self._stubs: + self._stubs["delete_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/DeleteOperation", + request_serializer=operations_pb2.DeleteOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["delete_operation"] + + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None]: + r"""Return a callable for the cancel_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "cancel_operation" not in self._stubs: + self._stubs["cancel_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/CancelOperation", + request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["cancel_operation"] + + @property + def get_operation( + self, + ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: + r"""Return a callable for the get_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_operation" not in self._stubs: + self._stubs["get_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/GetOperation", + request_serializer=operations_pb2.GetOperationRequest.SerializeToString, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["get_operation"] + + @property + def list_operations( + self, + ) -> Callable[ + [operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse + ]: + r"""Return a callable for the list_operations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_operations" not in self._stubs: + self._stubs["list_operations"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/ListOperations", + request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, + response_deserializer=operations_pb2.ListOperationsResponse.FromString, + ) + return self._stubs["list_operations"] + + @property + def list_locations( + self, + ) -> Callable[ + [locations_pb2.ListLocationsRequest], locations_pb2.ListLocationsResponse + ]: + r"""Return a callable for the list locations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_locations" not in self._stubs: + self._stubs["list_locations"] = self.grpc_channel.unary_unary( + "/google.cloud.location.Locations/ListLocations", + request_serializer=locations_pb2.ListLocationsRequest.SerializeToString, + response_deserializer=locations_pb2.ListLocationsResponse.FromString, + ) + return self._stubs["list_locations"] + + @property + def get_location( + self, + ) -> Callable[[locations_pb2.GetLocationRequest], locations_pb2.Location]: + r"""Return a callable for the list locations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_location" not in self._stubs: + self._stubs["get_location"] = self.grpc_channel.unary_unary( + "/google.cloud.location.Locations/GetLocation", + request_serializer=locations_pb2.GetLocationRequest.SerializeToString, + response_deserializer=locations_pb2.Location.FromString, + ) + return self._stubs["get_location"] + + @property + def kind(self) -> str: + return "grpc" + + +__all__ = ("RuntimeProjectAttachmentServiceGrpcTransport",) diff --git a/packages/google-cloud-apihub/google/cloud/apihub_v1/services/runtime_project_attachment_service/transports/grpc_asyncio.py b/packages/google-cloud-apihub/google/cloud/apihub_v1/services/runtime_project_attachment_service/transports/grpc_asyncio.py new file mode 100644 index 000000000000..cf4121623217 --- /dev/null +++ b/packages/google-cloud-apihub/google/cloud/apihub_v1/services/runtime_project_attachment_service/transports/grpc_asyncio.py @@ -0,0 +1,580 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1, grpc_helpers_async +from google.api_core import retry_async as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore +import grpc # type: ignore +from grpc.experimental import aio # type: ignore + +from google.cloud.apihub_v1.types import runtime_project_attachment_service + +from .base import DEFAULT_CLIENT_INFO, RuntimeProjectAttachmentServiceTransport +from .grpc import RuntimeProjectAttachmentServiceGrpcTransport + + +class RuntimeProjectAttachmentServiceGrpcAsyncIOTransport( + RuntimeProjectAttachmentServiceTransport +): + """gRPC AsyncIO backend transport for RuntimeProjectAttachmentService. + + This service is used for managing the runtime project + attachments. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _grpc_channel: aio.Channel + _stubs: Dict[str, Callable] = {} + + @classmethod + def create_channel( + cls, + host: str = "apihub.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> aio.Channel: + """Create and return a gRPC AsyncIO channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + aio.Channel: A gRPC AsyncIO channel object. + """ + + return grpc_helpers_async.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs, + ) + + def __init__( + self, + *, + host: str = "apihub.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[Union[aio.Channel, Callable[..., aio.Channel]]] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'apihub.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if a ``channel`` instance is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if a ``channel`` instance is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + channel (Optional[Union[aio.Channel, Callable[..., aio.Channel]]]): + A ``Channel`` instance through which to make calls, or a Callable + that constructs and returns one. If set to None, ``self.create_channel`` + is used to create the channel. If a Callable is given, it will be called + with the same arguments as used in ``self.create_channel``. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if a ``channel`` instance is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if a ``channel`` instance or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if isinstance(channel, aio.Channel): + # Ignore credentials if a channel was passed. + credentials = None + self._ignore_credentials = True + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + # initialize with the provided callable or the default channel + channel_init = channel or type(self).create_channel + self._grpc_channel = channel_init( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @property + def grpc_channel(self) -> aio.Channel: + """Create the channel designed to connect to this service. + + This property caches on the instance; repeated calls return + the same channel. + """ + # Return the channel from cache. + return self._grpc_channel + + @property + def create_runtime_project_attachment( + self, + ) -> Callable[ + [runtime_project_attachment_service.CreateRuntimeProjectAttachmentRequest], + Awaitable[runtime_project_attachment_service.RuntimeProjectAttachment], + ]: + r"""Return a callable for the create runtime project + attachment method over gRPC. + + Attaches a runtime project to the host project. + + Returns: + Callable[[~.CreateRuntimeProjectAttachmentRequest], + Awaitable[~.RuntimeProjectAttachment]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_runtime_project_attachment" not in self._stubs: + self._stubs[ + "create_runtime_project_attachment" + ] = self.grpc_channel.unary_unary( + "/google.cloud.apihub.v1.RuntimeProjectAttachmentService/CreateRuntimeProjectAttachment", + request_serializer=runtime_project_attachment_service.CreateRuntimeProjectAttachmentRequest.serialize, + response_deserializer=runtime_project_attachment_service.RuntimeProjectAttachment.deserialize, + ) + return self._stubs["create_runtime_project_attachment"] + + @property + def get_runtime_project_attachment( + self, + ) -> Callable[ + [runtime_project_attachment_service.GetRuntimeProjectAttachmentRequest], + Awaitable[runtime_project_attachment_service.RuntimeProjectAttachment], + ]: + r"""Return a callable for the get runtime project attachment method over gRPC. + + Gets a runtime project attachment. + + Returns: + Callable[[~.GetRuntimeProjectAttachmentRequest], + Awaitable[~.RuntimeProjectAttachment]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_runtime_project_attachment" not in self._stubs: + self._stubs[ + "get_runtime_project_attachment" + ] = self.grpc_channel.unary_unary( + "/google.cloud.apihub.v1.RuntimeProjectAttachmentService/GetRuntimeProjectAttachment", + request_serializer=runtime_project_attachment_service.GetRuntimeProjectAttachmentRequest.serialize, + response_deserializer=runtime_project_attachment_service.RuntimeProjectAttachment.deserialize, + ) + return self._stubs["get_runtime_project_attachment"] + + @property + def list_runtime_project_attachments( + self, + ) -> Callable[ + [runtime_project_attachment_service.ListRuntimeProjectAttachmentsRequest], + Awaitable[ + runtime_project_attachment_service.ListRuntimeProjectAttachmentsResponse + ], + ]: + r"""Return a callable for the list runtime project + attachments method over gRPC. + + List runtime projects attached to the host project. + + Returns: + Callable[[~.ListRuntimeProjectAttachmentsRequest], + Awaitable[~.ListRuntimeProjectAttachmentsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_runtime_project_attachments" not in self._stubs: + self._stubs[ + "list_runtime_project_attachments" + ] = self.grpc_channel.unary_unary( + "/google.cloud.apihub.v1.RuntimeProjectAttachmentService/ListRuntimeProjectAttachments", + request_serializer=runtime_project_attachment_service.ListRuntimeProjectAttachmentsRequest.serialize, + response_deserializer=runtime_project_attachment_service.ListRuntimeProjectAttachmentsResponse.deserialize, + ) + return self._stubs["list_runtime_project_attachments"] + + @property + def delete_runtime_project_attachment( + self, + ) -> Callable[ + [runtime_project_attachment_service.DeleteRuntimeProjectAttachmentRequest], + Awaitable[empty_pb2.Empty], + ]: + r"""Return a callable for the delete runtime project + attachment method over gRPC. + + Delete a runtime project attachment in the API Hub. + This call will detach the runtime project from the host + project. + + Returns: + Callable[[~.DeleteRuntimeProjectAttachmentRequest], + Awaitable[~.Empty]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_runtime_project_attachment" not in self._stubs: + self._stubs[ + "delete_runtime_project_attachment" + ] = self.grpc_channel.unary_unary( + "/google.cloud.apihub.v1.RuntimeProjectAttachmentService/DeleteRuntimeProjectAttachment", + request_serializer=runtime_project_attachment_service.DeleteRuntimeProjectAttachmentRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs["delete_runtime_project_attachment"] + + @property + def lookup_runtime_project_attachment( + self, + ) -> Callable[ + [runtime_project_attachment_service.LookupRuntimeProjectAttachmentRequest], + Awaitable[ + runtime_project_attachment_service.LookupRuntimeProjectAttachmentResponse + ], + ]: + r"""Return a callable for the lookup runtime project + attachment method over gRPC. + + Look up a runtime project attachment. This API can be + called in the context of any project. + + Returns: + Callable[[~.LookupRuntimeProjectAttachmentRequest], + Awaitable[~.LookupRuntimeProjectAttachmentResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "lookup_runtime_project_attachment" not in self._stubs: + self._stubs[ + "lookup_runtime_project_attachment" + ] = self.grpc_channel.unary_unary( + "/google.cloud.apihub.v1.RuntimeProjectAttachmentService/LookupRuntimeProjectAttachment", + request_serializer=runtime_project_attachment_service.LookupRuntimeProjectAttachmentRequest.serialize, + response_deserializer=runtime_project_attachment_service.LookupRuntimeProjectAttachmentResponse.deserialize, + ) + return self._stubs["lookup_runtime_project_attachment"] + + def _prep_wrapped_messages(self, client_info): + """Precompute the wrapped methods, overriding the base class method to use async wrappers.""" + self._wrapped_methods = { + self.create_runtime_project_attachment: gapic_v1.method_async.wrap_method( + self.create_runtime_project_attachment, + default_timeout=60.0, + client_info=client_info, + ), + self.get_runtime_project_attachment: gapic_v1.method_async.wrap_method( + self.get_runtime_project_attachment, + default_retry=retries.AsyncRetry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.list_runtime_project_attachments: gapic_v1.method_async.wrap_method( + self.list_runtime_project_attachments, + default_retry=retries.AsyncRetry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.delete_runtime_project_attachment: gapic_v1.method_async.wrap_method( + self.delete_runtime_project_attachment, + default_timeout=60.0, + client_info=client_info, + ), + self.lookup_runtime_project_attachment: gapic_v1.method_async.wrap_method( + self.lookup_runtime_project_attachment, + default_retry=retries.AsyncRetry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + } + + def close(self): + return self.grpc_channel.close() + + @property + def delete_operation( + self, + ) -> Callable[[operations_pb2.DeleteOperationRequest], None]: + r"""Return a callable for the delete_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_operation" not in self._stubs: + self._stubs["delete_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/DeleteOperation", + request_serializer=operations_pb2.DeleteOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["delete_operation"] + + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None]: + r"""Return a callable for the cancel_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "cancel_operation" not in self._stubs: + self._stubs["cancel_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/CancelOperation", + request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["cancel_operation"] + + @property + def get_operation( + self, + ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: + r"""Return a callable for the get_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_operation" not in self._stubs: + self._stubs["get_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/GetOperation", + request_serializer=operations_pb2.GetOperationRequest.SerializeToString, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["get_operation"] + + @property + def list_operations( + self, + ) -> Callable[ + [operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse + ]: + r"""Return a callable for the list_operations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_operations" not in self._stubs: + self._stubs["list_operations"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/ListOperations", + request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, + response_deserializer=operations_pb2.ListOperationsResponse.FromString, + ) + return self._stubs["list_operations"] + + @property + def list_locations( + self, + ) -> Callable[ + [locations_pb2.ListLocationsRequest], locations_pb2.ListLocationsResponse + ]: + r"""Return a callable for the list locations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_locations" not in self._stubs: + self._stubs["list_locations"] = self.grpc_channel.unary_unary( + "/google.cloud.location.Locations/ListLocations", + request_serializer=locations_pb2.ListLocationsRequest.SerializeToString, + response_deserializer=locations_pb2.ListLocationsResponse.FromString, + ) + return self._stubs["list_locations"] + + @property + def get_location( + self, + ) -> Callable[[locations_pb2.GetLocationRequest], locations_pb2.Location]: + r"""Return a callable for the list locations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_location" not in self._stubs: + self._stubs["get_location"] = self.grpc_channel.unary_unary( + "/google.cloud.location.Locations/GetLocation", + request_serializer=locations_pb2.GetLocationRequest.SerializeToString, + response_deserializer=locations_pb2.Location.FromString, + ) + return self._stubs["get_location"] + + +__all__ = ("RuntimeProjectAttachmentServiceGrpcAsyncIOTransport",) diff --git a/packages/google-cloud-apihub/google/cloud/apihub_v1/services/runtime_project_attachment_service/transports/rest.py b/packages/google-cloud-apihub/google/cloud/apihub_v1/services/runtime_project_attachment_service/transports/rest.py new file mode 100644 index 000000000000..f2d0a98f26a7 --- /dev/null +++ b/packages/google-cloud-apihub/google/cloud/apihub_v1/services/runtime_project_attachment_service/transports/rest.py @@ -0,0 +1,1412 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import dataclasses +import json # type: ignore +import re +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import gapic_v1, path_template, rest_helpers, rest_streaming +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth.transport.requests import AuthorizedSession # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.protobuf import json_format +import grpc # type: ignore +from requests import __version__ as requests_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object, None] # type: ignore + + +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore + +from google.cloud.apihub_v1.types import runtime_project_attachment_service + +from .base import DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO +from .base import RuntimeProjectAttachmentServiceTransport + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=requests_version, +) + + +class RuntimeProjectAttachmentServiceRestInterceptor: + """Interceptor for RuntimeProjectAttachmentService. + + Interceptors are used to manipulate requests, request metadata, and responses + in arbitrary ways. + Example use cases include: + * Logging + * Verifying requests according to service or custom semantics + * Stripping extraneous information from responses + + These use cases and more can be enabled by injecting an + instance of a custom subclass when constructing the RuntimeProjectAttachmentServiceRestTransport. + + .. code-block:: python + class MyCustomRuntimeProjectAttachmentServiceInterceptor(RuntimeProjectAttachmentServiceRestInterceptor): + def pre_create_runtime_project_attachment(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_create_runtime_project_attachment(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_delete_runtime_project_attachment(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def pre_get_runtime_project_attachment(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_runtime_project_attachment(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_runtime_project_attachments(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_runtime_project_attachments(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_lookup_runtime_project_attachment(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_lookup_runtime_project_attachment(self, response): + logging.log(f"Received response: {response}") + return response + + transport = RuntimeProjectAttachmentServiceRestTransport(interceptor=MyCustomRuntimeProjectAttachmentServiceInterceptor()) + client = RuntimeProjectAttachmentServiceClient(transport=transport) + + + """ + + def pre_create_runtime_project_attachment( + self, + request: runtime_project_attachment_service.CreateRuntimeProjectAttachmentRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[ + runtime_project_attachment_service.CreateRuntimeProjectAttachmentRequest, + Sequence[Tuple[str, str]], + ]: + """Pre-rpc interceptor for create_runtime_project_attachment + + Override in a subclass to manipulate the request or metadata + before they are sent to the RuntimeProjectAttachmentService server. + """ + return request, metadata + + def post_create_runtime_project_attachment( + self, response: runtime_project_attachment_service.RuntimeProjectAttachment + ) -> runtime_project_attachment_service.RuntimeProjectAttachment: + """Post-rpc interceptor for create_runtime_project_attachment + + Override in a subclass to manipulate the response + after it is returned by the RuntimeProjectAttachmentService server but before + it is returned to user code. + """ + return response + + def pre_delete_runtime_project_attachment( + self, + request: runtime_project_attachment_service.DeleteRuntimeProjectAttachmentRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[ + runtime_project_attachment_service.DeleteRuntimeProjectAttachmentRequest, + Sequence[Tuple[str, str]], + ]: + """Pre-rpc interceptor for delete_runtime_project_attachment + + Override in a subclass to manipulate the request or metadata + before they are sent to the RuntimeProjectAttachmentService server. + """ + return request, metadata + + def pre_get_runtime_project_attachment( + self, + request: runtime_project_attachment_service.GetRuntimeProjectAttachmentRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[ + runtime_project_attachment_service.GetRuntimeProjectAttachmentRequest, + Sequence[Tuple[str, str]], + ]: + """Pre-rpc interceptor for get_runtime_project_attachment + + Override in a subclass to manipulate the request or metadata + before they are sent to the RuntimeProjectAttachmentService server. + """ + return request, metadata + + def post_get_runtime_project_attachment( + self, response: runtime_project_attachment_service.RuntimeProjectAttachment + ) -> runtime_project_attachment_service.RuntimeProjectAttachment: + """Post-rpc interceptor for get_runtime_project_attachment + + Override in a subclass to manipulate the response + after it is returned by the RuntimeProjectAttachmentService server but before + it is returned to user code. + """ + return response + + def pre_list_runtime_project_attachments( + self, + request: runtime_project_attachment_service.ListRuntimeProjectAttachmentsRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[ + runtime_project_attachment_service.ListRuntimeProjectAttachmentsRequest, + Sequence[Tuple[str, str]], + ]: + """Pre-rpc interceptor for list_runtime_project_attachments + + Override in a subclass to manipulate the request or metadata + before they are sent to the RuntimeProjectAttachmentService server. + """ + return request, metadata + + def post_list_runtime_project_attachments( + self, + response: runtime_project_attachment_service.ListRuntimeProjectAttachmentsResponse, + ) -> runtime_project_attachment_service.ListRuntimeProjectAttachmentsResponse: + """Post-rpc interceptor for list_runtime_project_attachments + + Override in a subclass to manipulate the response + after it is returned by the RuntimeProjectAttachmentService server but before + it is returned to user code. + """ + return response + + def pre_lookup_runtime_project_attachment( + self, + request: runtime_project_attachment_service.LookupRuntimeProjectAttachmentRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[ + runtime_project_attachment_service.LookupRuntimeProjectAttachmentRequest, + Sequence[Tuple[str, str]], + ]: + """Pre-rpc interceptor for lookup_runtime_project_attachment + + Override in a subclass to manipulate the request or metadata + before they are sent to the RuntimeProjectAttachmentService server. + """ + return request, metadata + + def post_lookup_runtime_project_attachment( + self, + response: runtime_project_attachment_service.LookupRuntimeProjectAttachmentResponse, + ) -> runtime_project_attachment_service.LookupRuntimeProjectAttachmentResponse: + """Post-rpc interceptor for lookup_runtime_project_attachment + + Override in a subclass to manipulate the response + after it is returned by the RuntimeProjectAttachmentService server but before + it is returned to user code. + """ + return response + + def pre_get_location( + self, + request: locations_pb2.GetLocationRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[locations_pb2.GetLocationRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_location + + Override in a subclass to manipulate the request or metadata + before they are sent to the RuntimeProjectAttachmentService server. + """ + return request, metadata + + def post_get_location( + self, response: locations_pb2.Location + ) -> locations_pb2.Location: + """Post-rpc interceptor for get_location + + Override in a subclass to manipulate the response + after it is returned by the RuntimeProjectAttachmentService server but before + it is returned to user code. + """ + return response + + def pre_list_locations( + self, + request: locations_pb2.ListLocationsRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[locations_pb2.ListLocationsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_locations + + Override in a subclass to manipulate the request or metadata + before they are sent to the RuntimeProjectAttachmentService server. + """ + return request, metadata + + def post_list_locations( + self, response: locations_pb2.ListLocationsResponse + ) -> locations_pb2.ListLocationsResponse: + """Post-rpc interceptor for list_locations + + Override in a subclass to manipulate the response + after it is returned by the RuntimeProjectAttachmentService server but before + it is returned to user code. + """ + return response + + def pre_cancel_operation( + self, + request: operations_pb2.CancelOperationRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[operations_pb2.CancelOperationRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for cancel_operation + + Override in a subclass to manipulate the request or metadata + before they are sent to the RuntimeProjectAttachmentService server. + """ + return request, metadata + + def post_cancel_operation(self, response: None) -> None: + """Post-rpc interceptor for cancel_operation + + Override in a subclass to manipulate the response + after it is returned by the RuntimeProjectAttachmentService server but before + it is returned to user code. + """ + return response + + def pre_delete_operation( + self, + request: operations_pb2.DeleteOperationRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[operations_pb2.DeleteOperationRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for delete_operation + + Override in a subclass to manipulate the request or metadata + before they are sent to the RuntimeProjectAttachmentService server. + """ + return request, metadata + + def post_delete_operation(self, response: None) -> None: + """Post-rpc interceptor for delete_operation + + Override in a subclass to manipulate the response + after it is returned by the RuntimeProjectAttachmentService server but before + it is returned to user code. + """ + return response + + def pre_get_operation( + self, + request: operations_pb2.GetOperationRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[operations_pb2.GetOperationRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_operation + + Override in a subclass to manipulate the request or metadata + before they are sent to the RuntimeProjectAttachmentService server. + """ + return request, metadata + + def post_get_operation( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for get_operation + + Override in a subclass to manipulate the response + after it is returned by the RuntimeProjectAttachmentService server but before + it is returned to user code. + """ + return response + + def pre_list_operations( + self, + request: operations_pb2.ListOperationsRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[operations_pb2.ListOperationsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_operations + + Override in a subclass to manipulate the request or metadata + before they are sent to the RuntimeProjectAttachmentService server. + """ + return request, metadata + + def post_list_operations( + self, response: operations_pb2.ListOperationsResponse + ) -> operations_pb2.ListOperationsResponse: + """Post-rpc interceptor for list_operations + + Override in a subclass to manipulate the response + after it is returned by the RuntimeProjectAttachmentService server but before + it is returned to user code. + """ + return response + + +@dataclasses.dataclass +class RuntimeProjectAttachmentServiceRestStub: + _session: AuthorizedSession + _host: str + _interceptor: RuntimeProjectAttachmentServiceRestInterceptor + + +class RuntimeProjectAttachmentServiceRestTransport( + RuntimeProjectAttachmentServiceTransport +): + """REST backend transport for RuntimeProjectAttachmentService. + + This service is used for managing the runtime project + attachments. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + + """ + + def __init__( + self, + *, + host: str = "apihub.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = "https", + interceptor: Optional[RuntimeProjectAttachmentServiceRestInterceptor] = None, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'apihub.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client + certificate to configure mutual TLS HTTP channel. It is ignored + if ``channel`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. + # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the + # credentials object + maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) + if maybe_url_match is None: + raise ValueError( + f"Unexpected hostname structure: {host}" + ) # pragma: NO COVER + + url_match_items = maybe_url_match.groupdict() + + host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host + + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + self._session = AuthorizedSession( + self._credentials, default_host=self.DEFAULT_HOST + ) + if client_cert_source_for_mtls: + self._session.configure_mtls_channel(client_cert_source_for_mtls) + self._interceptor = ( + interceptor or RuntimeProjectAttachmentServiceRestInterceptor() + ) + self._prep_wrapped_messages(client_info) + + class _CreateRuntimeProjectAttachment(RuntimeProjectAttachmentServiceRestStub): + def __hash__(self): + return hash("CreateRuntimeProjectAttachment") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "runtimeProjectAttachmentId": "", + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: runtime_project_attachment_service.CreateRuntimeProjectAttachmentRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> runtime_project_attachment_service.RuntimeProjectAttachment: + r"""Call the create runtime project + attachment method over HTTP. + + Args: + request (~.runtime_project_attachment_service.CreateRuntimeProjectAttachmentRequest): + The request object. The + [CreateRuntimeProjectAttachment][google.cloud.apihub.v1.RuntimeProjectAttachmentService.CreateRuntimeProjectAttachment] + method's request. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.runtime_project_attachment_service.RuntimeProjectAttachment: + Runtime project attachment represents + an attachment from the runtime project + to the host project. Api Hub looks for + deployments in the attached runtime + projects and creates corresponding + resources in Api Hub for the discovered + deployments. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{parent=projects/*/locations/*}/runtimeProjectAttachments", + "body": "runtime_project_attachment", + }, + ] + request, metadata = self._interceptor.pre_create_runtime_project_attachment( + request, metadata + ) + pb_request = runtime_project_attachment_service.CreateRuntimeProjectAttachmentRequest.pb( + request + ) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = runtime_project_attachment_service.RuntimeProjectAttachment() + pb_resp = runtime_project_attachment_service.RuntimeProjectAttachment.pb( + resp + ) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_create_runtime_project_attachment(resp) + return resp + + class _DeleteRuntimeProjectAttachment(RuntimeProjectAttachmentServiceRestStub): + def __hash__(self): + return hash("DeleteRuntimeProjectAttachment") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: runtime_project_attachment_service.DeleteRuntimeProjectAttachmentRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ): + r"""Call the delete runtime project + attachment method over HTTP. + + Args: + request (~.runtime_project_attachment_service.DeleteRuntimeProjectAttachmentRequest): + The request object. The + [DeleteRuntimeProjectAttachment][google.cloud.apihub.v1.RuntimeProjectAttachmentService.DeleteRuntimeProjectAttachment] + method's request. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "delete", + "uri": "/v1/{name=projects/*/locations/*/runtimeProjectAttachments/*}", + }, + ] + request, metadata = self._interceptor.pre_delete_runtime_project_attachment( + request, metadata + ) + pb_request = runtime_project_attachment_service.DeleteRuntimeProjectAttachmentRequest.pb( + request + ) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + class _GetRuntimeProjectAttachment(RuntimeProjectAttachmentServiceRestStub): + def __hash__(self): + return hash("GetRuntimeProjectAttachment") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: runtime_project_attachment_service.GetRuntimeProjectAttachmentRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> runtime_project_attachment_service.RuntimeProjectAttachment: + r"""Call the get runtime project + attachment method over HTTP. + + Args: + request (~.runtime_project_attachment_service.GetRuntimeProjectAttachmentRequest): + The request object. The + [GetRuntimeProjectAttachment][google.cloud.apihub.v1.RuntimeProjectAttachmentService.GetRuntimeProjectAttachment] + method's request. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.runtime_project_attachment_service.RuntimeProjectAttachment: + Runtime project attachment represents + an attachment from the runtime project + to the host project. Api Hub looks for + deployments in the attached runtime + projects and creates corresponding + resources in Api Hub for the discovered + deployments. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/runtimeProjectAttachments/*}", + }, + ] + request, metadata = self._interceptor.pre_get_runtime_project_attachment( + request, metadata + ) + pb_request = runtime_project_attachment_service.GetRuntimeProjectAttachmentRequest.pb( + request + ) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = runtime_project_attachment_service.RuntimeProjectAttachment() + pb_resp = runtime_project_attachment_service.RuntimeProjectAttachment.pb( + resp + ) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_runtime_project_attachment(resp) + return resp + + class _ListRuntimeProjectAttachments(RuntimeProjectAttachmentServiceRestStub): + def __hash__(self): + return hash("ListRuntimeProjectAttachments") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: runtime_project_attachment_service.ListRuntimeProjectAttachmentsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> runtime_project_attachment_service.ListRuntimeProjectAttachmentsResponse: + r"""Call the list runtime project + attachments method over HTTP. + + Args: + request (~.runtime_project_attachment_service.ListRuntimeProjectAttachmentsRequest): + The request object. The + [ListRuntimeProjectAttachments][google.cloud.apihub.v1.RuntimeProjectAttachmentService.ListRuntimeProjectAttachments] + method's request. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.runtime_project_attachment_service.ListRuntimeProjectAttachmentsResponse: + The + [ListRuntimeProjectAttachments][google.cloud.apihub.v1.RuntimeProjectAttachmentService.ListRuntimeProjectAttachments] + method's response. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{parent=projects/*/locations/*}/runtimeProjectAttachments", + }, + ] + request, metadata = self._interceptor.pre_list_runtime_project_attachments( + request, metadata + ) + pb_request = runtime_project_attachment_service.ListRuntimeProjectAttachmentsRequest.pb( + request + ) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = ( + runtime_project_attachment_service.ListRuntimeProjectAttachmentsResponse() + ) + pb_resp = runtime_project_attachment_service.ListRuntimeProjectAttachmentsResponse.pb( + resp + ) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_runtime_project_attachments(resp) + return resp + + class _LookupRuntimeProjectAttachment(RuntimeProjectAttachmentServiceRestStub): + def __hash__(self): + return hash("LookupRuntimeProjectAttachment") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: runtime_project_attachment_service.LookupRuntimeProjectAttachmentRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> runtime_project_attachment_service.LookupRuntimeProjectAttachmentResponse: + r"""Call the lookup runtime project + attachment method over HTTP. + + Args: + request (~.runtime_project_attachment_service.LookupRuntimeProjectAttachmentRequest): + The request object. The + [LookupRuntimeProjectAttachment][google.cloud.apihub.v1.RuntimeProjectAttachmentService.LookupRuntimeProjectAttachment] + method's request. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.runtime_project_attachment_service.LookupRuntimeProjectAttachmentResponse: + The + [ListRuntimeProjectAttachments][google.cloud.apihub.v1.RuntimeProjectAttachmentService.ListRuntimeProjectAttachments] + method's response. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*}:lookupRuntimeProjectAttachment", + }, + ] + request, metadata = self._interceptor.pre_lookup_runtime_project_attachment( + request, metadata + ) + pb_request = runtime_project_attachment_service.LookupRuntimeProjectAttachmentRequest.pb( + request + ) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = ( + runtime_project_attachment_service.LookupRuntimeProjectAttachmentResponse() + ) + pb_resp = runtime_project_attachment_service.LookupRuntimeProjectAttachmentResponse.pb( + resp + ) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_lookup_runtime_project_attachment(resp) + return resp + + @property + def create_runtime_project_attachment( + self, + ) -> Callable[ + [runtime_project_attachment_service.CreateRuntimeProjectAttachmentRequest], + runtime_project_attachment_service.RuntimeProjectAttachment, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._CreateRuntimeProjectAttachment(self._session, self._host, self._interceptor) # type: ignore + + @property + def delete_runtime_project_attachment( + self, + ) -> Callable[ + [runtime_project_attachment_service.DeleteRuntimeProjectAttachmentRequest], + empty_pb2.Empty, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._DeleteRuntimeProjectAttachment(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_runtime_project_attachment( + self, + ) -> Callable[ + [runtime_project_attachment_service.GetRuntimeProjectAttachmentRequest], + runtime_project_attachment_service.RuntimeProjectAttachment, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetRuntimeProjectAttachment(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_runtime_project_attachments( + self, + ) -> Callable[ + [runtime_project_attachment_service.ListRuntimeProjectAttachmentsRequest], + runtime_project_attachment_service.ListRuntimeProjectAttachmentsResponse, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListRuntimeProjectAttachments(self._session, self._host, self._interceptor) # type: ignore + + @property + def lookup_runtime_project_attachment( + self, + ) -> Callable[ + [runtime_project_attachment_service.LookupRuntimeProjectAttachmentRequest], + runtime_project_attachment_service.LookupRuntimeProjectAttachmentResponse, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._LookupRuntimeProjectAttachment(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_location(self): + return self._GetLocation(self._session, self._host, self._interceptor) # type: ignore + + class _GetLocation(RuntimeProjectAttachmentServiceRestStub): + def __call__( + self, + request: locations_pb2.GetLocationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> locations_pb2.Location: + r"""Call the get location method over HTTP. + + Args: + request (locations_pb2.GetLocationRequest): + The request object for GetLocation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + locations_pb2.Location: Response from GetLocation method. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*}", + }, + ] + + request, metadata = self._interceptor.pre_get_location(request, metadata) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + resp = locations_pb2.Location() + resp = json_format.Parse(response.content.decode("utf-8"), resp) + resp = self._interceptor.post_get_location(resp) + return resp + + @property + def list_locations(self): + return self._ListLocations(self._session, self._host, self._interceptor) # type: ignore + + class _ListLocations(RuntimeProjectAttachmentServiceRestStub): + def __call__( + self, + request: locations_pb2.ListLocationsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> locations_pb2.ListLocationsResponse: + r"""Call the list locations method over HTTP. + + Args: + request (locations_pb2.ListLocationsRequest): + The request object for ListLocations method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + locations_pb2.ListLocationsResponse: Response from ListLocations method. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=projects/*}/locations", + }, + ] + + request, metadata = self._interceptor.pre_list_locations(request, metadata) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + resp = locations_pb2.ListLocationsResponse() + resp = json_format.Parse(response.content.decode("utf-8"), resp) + resp = self._interceptor.post_list_locations(resp) + return resp + + @property + def cancel_operation(self): + return self._CancelOperation(self._session, self._host, self._interceptor) # type: ignore + + class _CancelOperation(RuntimeProjectAttachmentServiceRestStub): + def __call__( + self, + request: operations_pb2.CancelOperationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Call the cancel operation method over HTTP. + + Args: + request (operations_pb2.CancelOperationRequest): + The request object for CancelOperation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{name=projects/*/locations/*/operations/*}:cancel", + "body": "*", + }, + ] + + request, metadata = self._interceptor.pre_cancel_operation( + request, metadata + ) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + body = json.dumps(transcoded_request["body"]) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + return self._interceptor.post_cancel_operation(None) + + @property + def delete_operation(self): + return self._DeleteOperation(self._session, self._host, self._interceptor) # type: ignore + + class _DeleteOperation(RuntimeProjectAttachmentServiceRestStub): + def __call__( + self, + request: operations_pb2.DeleteOperationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Call the delete operation method over HTTP. + + Args: + request (operations_pb2.DeleteOperationRequest): + The request object for DeleteOperation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "delete", + "uri": "/v1/{name=projects/*/locations/*/operations/*}", + }, + ] + + request, metadata = self._interceptor.pre_delete_operation( + request, metadata + ) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + return self._interceptor.post_delete_operation(None) + + @property + def get_operation(self): + return self._GetOperation(self._session, self._host, self._interceptor) # type: ignore + + class _GetOperation(RuntimeProjectAttachmentServiceRestStub): + def __call__( + self, + request: operations_pb2.GetOperationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the get operation method over HTTP. + + Args: + request (operations_pb2.GetOperationRequest): + The request object for GetOperation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + operations_pb2.Operation: Response from GetOperation method. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/operations/*}", + }, + ] + + request, metadata = self._interceptor.pre_get_operation(request, metadata) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + resp = operations_pb2.Operation() + resp = json_format.Parse(response.content.decode("utf-8"), resp) + resp = self._interceptor.post_get_operation(resp) + return resp + + @property + def list_operations(self): + return self._ListOperations(self._session, self._host, self._interceptor) # type: ignore + + class _ListOperations(RuntimeProjectAttachmentServiceRestStub): + def __call__( + self, + request: operations_pb2.ListOperationsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.ListOperationsResponse: + r"""Call the list operations method over HTTP. + + Args: + request (operations_pb2.ListOperationsRequest): + The request object for ListOperations method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + operations_pb2.ListOperationsResponse: Response from ListOperations method. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*}/operations", + }, + ] + + request, metadata = self._interceptor.pre_list_operations(request, metadata) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + resp = operations_pb2.ListOperationsResponse() + resp = json_format.Parse(response.content.decode("utf-8"), resp) + resp = self._interceptor.post_list_operations(resp) + return resp + + @property + def kind(self) -> str: + return "rest" + + def close(self): + self._session.close() + + +__all__ = ("RuntimeProjectAttachmentServiceRestTransport",) diff --git a/packages/google-cloud-apihub/google/cloud/apihub_v1/types/__init__.py b/packages/google-cloud-apihub/google/cloud/apihub_v1/types/__init__.py new file mode 100644 index 000000000000..a5d0a6c327f4 --- /dev/null +++ b/packages/google-cloud-apihub/google/cloud/apihub_v1/types/__init__.py @@ -0,0 +1,248 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .apihub_service import ( + ApiHubResource, + CreateApiRequest, + CreateAttributeRequest, + CreateDependencyRequest, + CreateDeploymentRequest, + CreateExternalApiRequest, + CreateSpecRequest, + CreateVersionRequest, + DeleteApiRequest, + DeleteAttributeRequest, + DeleteDependencyRequest, + DeleteDeploymentRequest, + DeleteExternalApiRequest, + DeleteSpecRequest, + DeleteVersionRequest, + GetApiOperationRequest, + GetApiRequest, + GetAttributeRequest, + GetDefinitionRequest, + GetDependencyRequest, + GetDeploymentRequest, + GetExternalApiRequest, + GetSpecContentsRequest, + GetSpecRequest, + GetVersionRequest, + ListApiOperationsRequest, + ListApiOperationsResponse, + ListApisRequest, + ListApisResponse, + ListAttributesRequest, + ListAttributesResponse, + ListDependenciesRequest, + ListDependenciesResponse, + ListDeploymentsRequest, + ListDeploymentsResponse, + ListExternalApisRequest, + ListExternalApisResponse, + ListSpecsRequest, + ListSpecsResponse, + ListVersionsRequest, + ListVersionsResponse, + SearchResourcesRequest, + SearchResourcesResponse, + SearchResult, + UpdateApiRequest, + UpdateAttributeRequest, + UpdateDependencyRequest, + UpdateDeploymentRequest, + UpdateExternalApiRequest, + UpdateSpecRequest, + UpdateVersionRequest, +) +from .common_fields import ( + Api, + ApiHubInstance, + ApiOperation, + Attribute, + AttributeValues, + Definition, + Dependency, + DependencyEntityReference, + DependencyErrorDetail, + Deployment, + Documentation, + ExternalApi, + HttpOperation, + Issue, + Linter, + LintResponse, + LintState, + OpenApiSpecDetails, + OperationDetails, + OperationMetadata, + Owner, + Path, + Point, + Range, + Schema, + Severity, + Spec, + SpecContents, + SpecDetails, + Version, +) +from .host_project_registration_service import ( + CreateHostProjectRegistrationRequest, + GetHostProjectRegistrationRequest, + HostProjectRegistration, + ListHostProjectRegistrationsRequest, + ListHostProjectRegistrationsResponse, +) +from .linting_service import ( + GetStyleGuideContentsRequest, + GetStyleGuideRequest, + LintSpecRequest, + StyleGuide, + StyleGuideContents, + UpdateStyleGuideRequest, +) +from .plugin_service import ( + DisablePluginRequest, + EnablePluginRequest, + GetPluginRequest, + Plugin, +) +from .provisioning_service import ( + CreateApiHubInstanceRequest, + GetApiHubInstanceRequest, + LookupApiHubInstanceRequest, + LookupApiHubInstanceResponse, +) +from .runtime_project_attachment_service import ( + CreateRuntimeProjectAttachmentRequest, + DeleteRuntimeProjectAttachmentRequest, + GetRuntimeProjectAttachmentRequest, + ListRuntimeProjectAttachmentsRequest, + ListRuntimeProjectAttachmentsResponse, + LookupRuntimeProjectAttachmentRequest, + LookupRuntimeProjectAttachmentResponse, + RuntimeProjectAttachment, +) + +__all__ = ( + "ApiHubResource", + "CreateApiRequest", + "CreateAttributeRequest", + "CreateDependencyRequest", + "CreateDeploymentRequest", + "CreateExternalApiRequest", + "CreateSpecRequest", + "CreateVersionRequest", + "DeleteApiRequest", + "DeleteAttributeRequest", + "DeleteDependencyRequest", + "DeleteDeploymentRequest", + "DeleteExternalApiRequest", + "DeleteSpecRequest", + "DeleteVersionRequest", + "GetApiOperationRequest", + "GetApiRequest", + "GetAttributeRequest", + "GetDefinitionRequest", + "GetDependencyRequest", + "GetDeploymentRequest", + "GetExternalApiRequest", + "GetSpecContentsRequest", + "GetSpecRequest", + "GetVersionRequest", + "ListApiOperationsRequest", + "ListApiOperationsResponse", + "ListApisRequest", + "ListApisResponse", + "ListAttributesRequest", + "ListAttributesResponse", + "ListDependenciesRequest", + "ListDependenciesResponse", + "ListDeploymentsRequest", + "ListDeploymentsResponse", + "ListExternalApisRequest", + "ListExternalApisResponse", + "ListSpecsRequest", + "ListSpecsResponse", + "ListVersionsRequest", + "ListVersionsResponse", + "SearchResourcesRequest", + "SearchResourcesResponse", + "SearchResult", + "UpdateApiRequest", + "UpdateAttributeRequest", + "UpdateDependencyRequest", + "UpdateDeploymentRequest", + "UpdateExternalApiRequest", + "UpdateSpecRequest", + "UpdateVersionRequest", + "Api", + "ApiHubInstance", + "ApiOperation", + "Attribute", + "AttributeValues", + "Definition", + "Dependency", + "DependencyEntityReference", + "DependencyErrorDetail", + "Deployment", + "Documentation", + "ExternalApi", + "HttpOperation", + "Issue", + "LintResponse", + "OpenApiSpecDetails", + "OperationDetails", + "OperationMetadata", + "Owner", + "Path", + "Point", + "Range", + "Schema", + "Spec", + "SpecContents", + "SpecDetails", + "Version", + "Linter", + "LintState", + "Severity", + "CreateHostProjectRegistrationRequest", + "GetHostProjectRegistrationRequest", + "HostProjectRegistration", + "ListHostProjectRegistrationsRequest", + "ListHostProjectRegistrationsResponse", + "GetStyleGuideContentsRequest", + "GetStyleGuideRequest", + "LintSpecRequest", + "StyleGuide", + "StyleGuideContents", + "UpdateStyleGuideRequest", + "DisablePluginRequest", + "EnablePluginRequest", + "GetPluginRequest", + "Plugin", + "CreateApiHubInstanceRequest", + "GetApiHubInstanceRequest", + "LookupApiHubInstanceRequest", + "LookupApiHubInstanceResponse", + "CreateRuntimeProjectAttachmentRequest", + "DeleteRuntimeProjectAttachmentRequest", + "GetRuntimeProjectAttachmentRequest", + "ListRuntimeProjectAttachmentsRequest", + "ListRuntimeProjectAttachmentsResponse", + "LookupRuntimeProjectAttachmentRequest", + "LookupRuntimeProjectAttachmentResponse", + "RuntimeProjectAttachment", +) diff --git a/packages/google-cloud-apihub/google/cloud/apihub_v1/types/apihub_service.py b/packages/google-cloud-apihub/google/cloud/apihub_v1/types/apihub_service.py new file mode 100644 index 000000000000..22a7c41c5cd3 --- /dev/null +++ b/packages/google-cloud-apihub/google/cloud/apihub_v1/types/apihub_service.py @@ -0,0 +1,2057 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +from google.protobuf import field_mask_pb2 # type: ignore +import proto # type: ignore + +from google.cloud.apihub_v1.types import common_fields + +__protobuf__ = proto.module( + package="google.cloud.apihub.v1", + manifest={ + "CreateApiRequest", + "GetApiRequest", + "UpdateApiRequest", + "DeleteApiRequest", + "ListApisRequest", + "ListApisResponse", + "CreateVersionRequest", + "GetVersionRequest", + "UpdateVersionRequest", + "DeleteVersionRequest", + "ListVersionsRequest", + "ListVersionsResponse", + "CreateSpecRequest", + "GetSpecRequest", + "UpdateSpecRequest", + "DeleteSpecRequest", + "ListSpecsRequest", + "ListSpecsResponse", + "GetSpecContentsRequest", + "GetApiOperationRequest", + "ListApiOperationsRequest", + "ListApiOperationsResponse", + "GetDefinitionRequest", + "CreateDeploymentRequest", + "GetDeploymentRequest", + "UpdateDeploymentRequest", + "DeleteDeploymentRequest", + "ListDeploymentsRequest", + "ListDeploymentsResponse", + "CreateAttributeRequest", + "GetAttributeRequest", + "UpdateAttributeRequest", + "DeleteAttributeRequest", + "ListAttributesRequest", + "ListAttributesResponse", + "SearchResourcesRequest", + "ApiHubResource", + "SearchResult", + "SearchResourcesResponse", + "CreateDependencyRequest", + "GetDependencyRequest", + "UpdateDependencyRequest", + "DeleteDependencyRequest", + "ListDependenciesRequest", + "ListDependenciesResponse", + "CreateExternalApiRequest", + "GetExternalApiRequest", + "UpdateExternalApiRequest", + "DeleteExternalApiRequest", + "ListExternalApisRequest", + "ListExternalApisResponse", + }, +) + + +class CreateApiRequest(proto.Message): + r"""The [CreateApi][google.cloud.apihub.v1.ApiHub.CreateApi] method's + request. + + Attributes: + parent (str): + Required. The parent resource for the API resource. Format: + ``projects/{project}/locations/{location}`` + api_id (str): + Optional. The ID to use for the API resource, which will + become the final component of the API's resource name. This + field is optional. + + - If provided, the same will be used. The service will + throw an error if the specified id is already used by + another API resource in the API hub. + - If not provided, a system generated id will be used. + + This value should be 4-500 characters, and valid characters + are /[a-z][A-Z][0-9]-_/. + api (google.cloud.apihub_v1.types.Api): + Required. The API resource to create. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + api_id: str = proto.Field( + proto.STRING, + number=2, + ) + api: common_fields.Api = proto.Field( + proto.MESSAGE, + number=3, + message=common_fields.Api, + ) + + +class GetApiRequest(proto.Message): + r"""The [GetApi][google.cloud.apihub.v1.ApiHub.GetApi] method's request. + + Attributes: + name (str): + Required. The name of the API resource to retrieve. Format: + ``projects/{project}/locations/{location}/apis/{api}`` + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class UpdateApiRequest(proto.Message): + r"""The [UpdateApi][google.cloud.apihub.v1.ApiHub.UpdateApi] method's + request. + + Attributes: + api (google.cloud.apihub_v1.types.Api): + Required. The API resource to update. + + The API resource's ``name`` field is used to identify the + API resource to update. Format: + ``projects/{project}/locations/{location}/apis/{api}`` + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Required. The list of fields to update. + """ + + api: common_fields.Api = proto.Field( + proto.MESSAGE, + number=1, + message=common_fields.Api, + ) + update_mask: field_mask_pb2.FieldMask = proto.Field( + proto.MESSAGE, + number=2, + message=field_mask_pb2.FieldMask, + ) + + +class DeleteApiRequest(proto.Message): + r"""The [DeleteApi][google.cloud.apihub.v1.ApiHub.DeleteApi] method's + request. + + Attributes: + name (str): + Required. The name of the API resource to delete. Format: + ``projects/{project}/locations/{location}/apis/{api}`` + force (bool): + Optional. If set to true, any versions from + this API will also be deleted. Otherwise, the + request will only work if the API has no + versions. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + force: bool = proto.Field( + proto.BOOL, + number=2, + ) + + +class ListApisRequest(proto.Message): + r"""The [ListApis][google.cloud.apihub.v1.ApiHub.ListApis] method's + request. + + Attributes: + parent (str): + Required. The parent, which owns this collection of API + resources. Format: + ``projects/{project}/locations/{location}`` + filter (str): + Optional. An expression that filters the list of + ApiResources. + + A filter expression consists of a field name, a comparison + operator, and a value for filtering. The value must be a + string. The comparison operator must be one of: ``<``, + ``>``, ``:`` or ``=``. Filters are not case sensitive. + + The following fields in the ``ApiResource`` are eligible for + filtering: + + - ``owner.email`` - The email of the team which owns the + ApiResource. Allowed comparison operators: ``=``. + - ``create_time`` - The time at which the ApiResource was + created. The value should be in the + (RFC3339)[https://tools.ietf.org/html/rfc3339] format. + Allowed comparison operators: ``>`` and ``<``. + - ``display_name`` - The display name of the ApiResource. + Allowed comparison operators: ``=``. + - ``target_user.enum_values.values.id`` - The allowed value + id of the target users attribute associated with the + ApiResource. Allowed comparison operator is ``:``. + - ``target_user.enum_values.values.display_name`` - The + allowed value display name of the target users attribute + associated with the ApiResource. Allowed comparison + operator is ``:``. + - ``team.enum_values.values.id`` - The allowed value id of + the team attribute associated with the ApiResource. + Allowed comparison operator is ``:``. + - ``team.enum_values.values.display_name`` - The allowed + value display name of the team attribute associated with + the ApiResource. Allowed comparison operator is ``:``. + - ``business_unit.enum_values.values.id`` - The allowed + value id of the business unit attribute associated with + the ApiResource. Allowed comparison operator is ``:``. + - ``business_unit.enum_values.values.display_name`` - The + allowed value display name of the business unit attribute + associated with the ApiResource. Allowed comparison + operator is ``:``. + - ``maturity_level.enum_values.values.id`` - The allowed + value id of the maturity level attribute associated with + the ApiResource. Allowed comparison operator is ``:``. + - ``maturity_level.enum_values.values.display_name`` - The + allowed value display name of the maturity level + attribute associated with the ApiResource. Allowed + comparison operator is ``:``. + - ``api_style.enum_values.values.id`` - The allowed value + id of the api style attribute associated with the + ApiResource. Allowed comparison operator is ``:``. + - ``api_style.enum_values.values.display_name`` - The + allowed value display name of the api style attribute + associated with the ApiResource. Allowed comparison + operator is ``:``. + + Expressions are combined with either ``AND`` logic operator + or ``OR`` logical operator but not both of them together + i.e. only one of the ``AND`` or ``OR`` operator can be used + throughout the filter string and both the operators cannot + be used together. No other logical operators are supported. + At most three filter fields are allowed in the filter string + and if provided more than that then ``INVALID_ARGUMENT`` + error is returned by the API. + + Here are a few examples: + + - ``owner.email = \"apihub@google.com\"`` - - The owner + team email is *apihub@google.com*. + - ``owner.email = \"apihub@google.com\" AND create_time < \"2021-08-15T14:50:00Z\" AND create_time > \"2021-08-10T12:00:00Z\"`` + - The owner team email is *apihub@google.com* and the api + was created before *2021-08-15 14:50:00 UTC* and after + *2021-08-10 12:00:00 UTC*. + - ``owner.email = \"apihub@google.com\" OR team.enum_values.values.id: apihub-team-id`` + - The filter string specifies the APIs where the owner + team email is *apihub@google.com* or the id of the + allowed value associated with the team attribute is + *apihub-team-id*. + - ``owner.email = \"apihub@google.com\" OR team.enum_values.values.display_name: ApiHub Team`` + - The filter string specifies the APIs where the owner + team email is *apihub@google.com* or the display name of + the allowed value associated with the team attribute is + ``ApiHub Team``. + page_size (int): + Optional. The maximum number of API resources + to return. The service may return fewer than + this value. If unspecified, at most 50 Apis will + be returned. The maximum value is 1000; values + above 1000 will be coerced to 1000. + page_token (str): + Optional. A page token, received from a previous + ``ListApis`` call. Provide this to retrieve the subsequent + page. + + When paginating, all other parameters (except page_size) + provided to ``ListApis`` must match the call that provided + the page token. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + filter: str = proto.Field( + proto.STRING, + number=2, + ) + page_size: int = proto.Field( + proto.INT32, + number=3, + ) + page_token: str = proto.Field( + proto.STRING, + number=4, + ) + + +class ListApisResponse(proto.Message): + r"""The [ListApis][google.cloud.apihub.v1.ApiHub.ListApis] method's + response. + + Attributes: + apis (MutableSequence[google.cloud.apihub_v1.types.Api]): + The API resources present in the API hub. + next_page_token (str): + A token, which can be sent as ``page_token`` to retrieve the + next page. If this field is omitted, there are no subsequent + pages. + """ + + @property + def raw_page(self): + return self + + apis: MutableSequence[common_fields.Api] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=common_fields.Api, + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + + +class CreateVersionRequest(proto.Message): + r"""The [CreateVersion][google.cloud.apihub.v1.ApiHub.CreateVersion] + method's request. + + Attributes: + parent (str): + Required. The parent resource for API version. Format: + ``projects/{project}/locations/{location}/apis/{api}`` + version_id (str): + Optional. The ID to use for the API version, which will + become the final component of the version's resource name. + This field is optional. + + - If provided, the same will be used. The service will + throw an error if the specified id is already used by + another version in the API resource. + - If not provided, a system generated id will be used. + + This value should be 4-500 characters, and valid characters + are /[a-z][A-Z][0-9]-_/. + version (google.cloud.apihub_v1.types.Version): + Required. The version to create. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + version_id: str = proto.Field( + proto.STRING, + number=2, + ) + version: common_fields.Version = proto.Field( + proto.MESSAGE, + number=3, + message=common_fields.Version, + ) + + +class GetVersionRequest(proto.Message): + r"""The [GetVersion][google.cloud.apihub.v1.ApiHub.GetVersion] method's + request. + + Attributes: + name (str): + Required. The name of the API version to retrieve. Format: + ``projects/{project}/locations/{location}/apis/{api}/versions/{version}`` + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class UpdateVersionRequest(proto.Message): + r"""The [UpdateVersion][google.cloud.apihub.v1.ApiHub.UpdateVersion] + method's request. + + Attributes: + version (google.cloud.apihub_v1.types.Version): + Required. The API version to update. + + The version's ``name`` field is used to identify the API + version to update. Format: + ``projects/{project}/locations/{location}/apis/{api}/versions/{version}`` + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Required. The list of fields to update. + """ + + version: common_fields.Version = proto.Field( + proto.MESSAGE, + number=1, + message=common_fields.Version, + ) + update_mask: field_mask_pb2.FieldMask = proto.Field( + proto.MESSAGE, + number=2, + message=field_mask_pb2.FieldMask, + ) + + +class DeleteVersionRequest(proto.Message): + r"""The [DeleteVersion][google.cloud.apihub.v1.ApiHub.DeleteVersion] + method's request. + + Attributes: + name (str): + Required. The name of the version to delete. Format: + ``projects/{project}/locations/{location}/apis/{api}/versions/{version}`` + force (bool): + Optional. If set to true, any specs from this + version will also be deleted. Otherwise, the + request will only work if the version has no + specs. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + force: bool = proto.Field( + proto.BOOL, + number=2, + ) + + +class ListVersionsRequest(proto.Message): + r"""The [ListVersions][google.cloud.apihub.v1.ApiHub.ListVersions] + method's request. + + Attributes: + parent (str): + Required. The parent which owns this collection of API + versions i.e., the API resource Format: + ``projects/{project}/locations/{location}/apis/{api}`` + filter (str): + Optional. An expression that filters the list of Versions. + + A filter expression consists of a field name, a comparison + operator, and a value for filtering. The value must be a + string, a number, or a boolean. The comparison operator must + be one of: ``<``, ``>`` or ``=``. Filters are not case + sensitive. + + The following fields in the ``Version`` are eligible for + filtering: + + - ``display_name`` - The display name of the Version. + Allowed comparison operators: ``=``. + - ``create_time`` - The time at which the Version was + created. The value should be in the + (RFC3339)[https://tools.ietf.org/html/rfc3339] format. + Allowed comparison operators: ``>`` and ``<``. + - ``lifecycle.enum_values.values.id`` - The allowed value + id of the lifecycle attribute associated with the + Version. Allowed comparison operators: ``:``. + - ``lifecycle.enum_values.values.display_name`` - The + allowed value display name of the lifecycle attribute + associated with the Version. Allowed comparison + operators: ``:``. + - ``compliance.enum_values.values.id`` - The allowed value + id of the compliances attribute associated with the + Version. Allowed comparison operators: ``:``. + - ``compliance.enum_values.values.display_name`` - The + allowed value display name of the compliances attribute + associated with the Version. Allowed comparison + operators: ``:``. + - ``accreditation.enum_values.values.id`` - The allowed + value id of the accreditations attribute associated with + the Version. Allowed comparison operators: ``:``. + - ``accreditation.enum_values.values.display_name`` - The + allowed value display name of the accreditations + attribute associated with the Version. Allowed comparison + operators: ``:``. + + Expressions are combined with either ``AND`` logic operator + or ``OR`` logical operator but not both of them together + i.e. only one of the ``AND`` or ``OR`` operator can be used + throughout the filter string and both the operators cannot + be used together. No other logical operators are supported. + At most three filter fields are allowed in the filter string + and if provided more than that then ``INVALID_ARGUMENT`` + error is returned by the API. + + Here are a few examples: + + - ``lifecycle.enum_values.values.id: preview-id`` - The + filter string specifies that the id of the allowed value + associated with the lifecycle attribute of the Version is + *preview-id*. + - ``lifecycle.enum_values.values.display_name: \"Preview Display Name\"`` + - The filter string specifies that the display name of + the allowed value associated with the lifecycle attribute + of the Version is ``Preview Display Name``. + - ``lifecycle.enum_values.values.id: preview-id AND create_time < \"2021-08-15T14:50:00Z\" AND create_time > \"2021-08-10T12:00:00Z\"`` + - The id of the allowed value associated with the + lifecycle attribute of the Version is *preview-id* and it + was created before *2021-08-15 14:50:00 UTC* and after + *2021-08-10 12:00:00 UTC*. + - ``compliance.enum_values.values.id: gdpr-id OR compliance.enum_values.values.id: pci-dss-id`` + + - The id of the allowed value associated with the + compliance attribute is *gdpr-id* or *pci-dss-id*. + page_size (int): + Optional. The maximum number of versions to + return. The service may return fewer than this + value. If unspecified, at most 50 versions will + be returned. The maximum value is 1000; values + above 1000 will be coerced to 1000. + page_token (str): + Optional. A page token, received from a previous + ``ListVersions`` call. Provide this to retrieve the + subsequent page. + + When paginating, all other parameters (except page_size) + provided to ``ListVersions`` must match the call that + provided the page token. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + filter: str = proto.Field( + proto.STRING, + number=2, + ) + page_size: int = proto.Field( + proto.INT32, + number=3, + ) + page_token: str = proto.Field( + proto.STRING, + number=4, + ) + + +class ListVersionsResponse(proto.Message): + r"""The [ListVersions][google.cloud.apihub.v1.ApiHub.ListVersions] + method's response. + + Attributes: + versions (MutableSequence[google.cloud.apihub_v1.types.Version]): + The versions corresponding to an API. + next_page_token (str): + A token, which can be sent as ``page_token`` to retrieve the + next page. If this field is omitted, there are no subsequent + pages. + """ + + @property + def raw_page(self): + return self + + versions: MutableSequence[common_fields.Version] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=common_fields.Version, + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + + +class CreateSpecRequest(proto.Message): + r"""The [CreateSpec][google.cloud.apihub.v1.ApiHub.CreateSpec] method's + request. + + Attributes: + parent (str): + Required. The parent resource for Spec. Format: + ``projects/{project}/locations/{location}/apis/{api}/versions/{version}`` + spec_id (str): + Optional. The ID to use for the spec, which will become the + final component of the spec's resource name. This field is + optional. + + - If provided, the same will be used. The service will + throw an error if the specified id is already used by + another spec in the API resource. + - If not provided, a system generated id will be used. + + This value should be 4-500 characters, and valid characters + are /[a-z][A-Z][0-9]-_/. + spec (google.cloud.apihub_v1.types.Spec): + Required. The spec to create. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + spec_id: str = proto.Field( + proto.STRING, + number=2, + ) + spec: common_fields.Spec = proto.Field( + proto.MESSAGE, + number=3, + message=common_fields.Spec, + ) + + +class GetSpecRequest(proto.Message): + r"""The [GetSpec][google.cloud.apihub.v1.ApiHub.GetSpec] method's + request. + + Attributes: + name (str): + Required. The name of the spec to retrieve. Format: + ``projects/{project}/locations/{location}/apis/{api}/versions/{version}/specs/{spec}`` + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class UpdateSpecRequest(proto.Message): + r"""The [UpdateSpec][google.cloud.apihub.v1.ApiHub.UpdateSpec] method's + request. + + Attributes: + spec (google.cloud.apihub_v1.types.Spec): + Required. The spec to update. + + The spec's ``name`` field is used to identify the spec to + update. Format: + ``projects/{project}/locations/{location}/apis/{api}/versions/{version}/specs/{spec}`` + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Required. The list of fields to update. + """ + + spec: common_fields.Spec = proto.Field( + proto.MESSAGE, + number=1, + message=common_fields.Spec, + ) + update_mask: field_mask_pb2.FieldMask = proto.Field( + proto.MESSAGE, + number=2, + message=field_mask_pb2.FieldMask, + ) + + +class DeleteSpecRequest(proto.Message): + r"""The [DeleteSpec][google.cloud.apihub.v1.ApiHub.DeleteSpec] method's + request. + + Attributes: + name (str): + Required. The name of the spec to delete. Format: + ``projects/{project}/locations/{location}/apis/{api}/versions/{version}/specs/{spec}`` + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class ListSpecsRequest(proto.Message): + r"""The [ListSpecs][ListSpecs] method's request. + + Attributes: + parent (str): + Required. The parent, which owns this collection of specs. + Format: + ``projects/{project}/locations/{location}/apis/{api}/versions/{version}`` + filter (str): + Optional. An expression that filters the list of Specs. + + A filter expression consists of a field name, a comparison + operator, and a value for filtering. The value must be a + string. The comparison operator must be one of: ``<``, + ``>``, ``:`` or ``=``. Filters are not case sensitive. + + The following fields in the ``Spec`` are eligible for + filtering: + + - ``display_name`` - The display name of the Spec. Allowed + comparison operators: ``=``. + - ``create_time`` - The time at which the Spec was created. + The value should be in the + (RFC3339)[https://tools.ietf.org/html/rfc3339] format. + Allowed comparison operators: ``>`` and ``<``. + - ``spec_type.enum_values.values.id`` - The allowed value + id of the spec_type attribute associated with the Spec. + Allowed comparison operators: ``:``. + - ``spec_type.enum_values.values.display_name`` - The + allowed value display name of the spec_type attribute + associated with the Spec. Allowed comparison operators: + ``:``. + - ``lint_response.json_values.values`` - The json value of + the lint_response attribute associated with the Spec. + Allowed comparison operators: ``:``. + - ``mime_type`` - The MIME type of the Spec. Allowed + comparison operators: ``=``. + + Expressions are combined with either ``AND`` logic operator + or ``OR`` logical operator but not both of them together + i.e. only one of the ``AND`` or ``OR`` operator can be used + throughout the filter string and both the operators cannot + be used together. No other logical operators are supported. + At most three filter fields are allowed in the filter string + and if provided more than that then ``INVALID_ARGUMENT`` + error is returned by the API. + + Here are a few examples: + + - ``spec_type.enum_values.values.id: rest-id`` - The filter + string specifies that the id of the allowed value + associated with the spec_type attribute is *rest-id*. + - ``spec_type.enum_values.values.display_name: \"Rest Display Name\"`` + - The filter string specifies that the display name of + the allowed value associated with the spec_type attribute + is ``Rest Display Name``. + - ``spec_type.enum_values.values.id: grpc-id AND create_time < \"2021-08-15T14:50:00Z\" AND create_time > \"2021-08-10T12:00:00Z\"`` + - The id of the allowed value associated with the + spec_type attribute is *grpc-id* and the spec was created + before *2021-08-15 14:50:00 UTC* and after *2021-08-10 + 12:00:00 UTC*. + - ``spec_type.enum_values.values.id: rest-id OR spec_type.enum_values.values.id: grpc-id`` + + - The id of the allowed value associated with the spec_type + attribute is *rest-id* or *grpc-id*. + page_size (int): + Optional. The maximum number of specs to + return. The service may return fewer than this + value. If unspecified, at most 50 specs will be + returned. The maximum value is 1000; values + above 1000 will be coerced to 1000. + page_token (str): + Optional. A page token, received from a previous + ``ListSpecs`` call. Provide this to retrieve the subsequent + page. + + When paginating, all other parameters provided to + ``ListSpecs`` must match the call that provided the page + token. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + filter: str = proto.Field( + proto.STRING, + number=2, + ) + page_size: int = proto.Field( + proto.INT32, + number=3, + ) + page_token: str = proto.Field( + proto.STRING, + number=4, + ) + + +class ListSpecsResponse(proto.Message): + r"""The [ListSpecs][google.cloud.apihub.v1.ApiHub.ListSpecs] method's + response. + + Attributes: + specs (MutableSequence[google.cloud.apihub_v1.types.Spec]): + The specs corresponding to an API. + next_page_token (str): + A token, which can be sent as ``page_token`` to retrieve the + next page. If this field is omitted, there are no subsequent + pages. + """ + + @property + def raw_page(self): + return self + + specs: MutableSequence[common_fields.Spec] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=common_fields.Spec, + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + + +class GetSpecContentsRequest(proto.Message): + r"""The [GetSpecContents][google.cloud.apihub.v1.ApiHub.GetSpecContents] + method's request. + + Attributes: + name (str): + Required. The name of the spec whose contents need to be + retrieved. Format: + ``projects/{project}/locations/{location}/apis/{api}/versions/{version}/specs/{spec}`` + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class GetApiOperationRequest(proto.Message): + r"""The [GetApiOperation][google.cloud.apihub.v1.ApiHub.GetApiOperation] + method's request. + + Attributes: + name (str): + Required. The name of the operation to retrieve. Format: + ``projects/{project}/locations/{location}/apis/{api}/versions/{version}/operations/{operation}`` + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class ListApiOperationsRequest(proto.Message): + r"""The + [ListApiOperations][google.cloud.apihub.v1.ApiHub.ListApiOperations] + method's request. + + Attributes: + parent (str): + Required. The parent which owns this collection of + operations i.e., the API version. Format: + ``projects/{project}/locations/{location}/apis/{api}/versions/{version}`` + filter (str): + Optional. An expression that filters the list of + ApiOperations. + + A filter expression consists of a field name, a comparison + operator, and a value for filtering. The value must be a + string or a boolean. The comparison operator must be one of: + ``<``, ``>`` or ``=``. Filters are not case sensitive. + + The following fields in the ``ApiOperation`` are eligible + for filtering: + + - ``name`` - The ApiOperation resource name. Allowed + comparison operators: ``=``. + - ``details.http_operation.path.path`` - The http + operation's complete path relative to server endpoint. + Allowed comparison operators: ``=``. + - ``details.http_operation.method`` - The http operation + method type. Allowed comparison operators: ``=``. + - ``details.deprecated`` - Indicates if the ApiOperation is + deprecated. Allowed values are True / False indicating + the deprycation status of the ApiOperation. Allowed + comparison operators: ``=``. + - ``create_time`` - The time at which the ApiOperation was + created. The value should be in the + (RFC3339)[https://tools.ietf.org/html/rfc3339] format. + Allowed comparison operators: ``>`` and ``<``. + + Expressions are combined with either ``AND`` logic operator + or ``OR`` logical operator but not both of them together + i.e. only one of the ``AND`` or ``OR`` operator can be used + throughout the filter string and both the operators cannot + be used together. No other logical operators are supported. + At most three filter fields are allowed in the filter string + and if provided more than that then ``INVALID_ARGUMENT`` + error is returned by the API. + + Here are a few examples: + + - ``details.deprecated = True`` - The ApiOperation is + deprecated. + - ``details.http_operation.method = GET AND create_time < \"2021-08-15T14:50:00Z\" AND create_time > \"2021-08-10T12:00:00Z\"`` + - The method of the http operation of the ApiOperation is + *GET* and the spec was created before *2021-08-15 + 14:50:00 UTC* and after *2021-08-10 12:00:00 UTC*. + - ``details.http_operation.method = GET OR details.http_operation.method = POST``. + - The http operation of the method of ApiOperation is + *GET* or *POST*. + page_size (int): + Optional. The maximum number of operations to + return. The service may return fewer than this + value. If unspecified, at most 50 operations + will be returned. The maximum value is 1000; + values above 1000 will be coerced to 1000. + page_token (str): + Optional. A page token, received from a previous + ``ListApiOperations`` call. Provide this to retrieve the + subsequent page. + + When paginating, all other parameters (except page_size) + provided to ``ListApiOperations`` must match the call that + provided the page token. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + filter: str = proto.Field( + proto.STRING, + number=2, + ) + page_size: int = proto.Field( + proto.INT32, + number=3, + ) + page_token: str = proto.Field( + proto.STRING, + number=4, + ) + + +class ListApiOperationsResponse(proto.Message): + r"""The + [ListApiOperations][google.cloud.apihub.v1.ApiHub.ListApiOperations] + method's response. + + Attributes: + api_operations (MutableSequence[google.cloud.apihub_v1.types.ApiOperation]): + The operations corresponding to an API version. Only + following field will be populated in the response: name, + spec, details.deprecated, details.http_operation.path.path, + details.http_operation.method and + details.documentation.external_uri. + next_page_token (str): + A token, which can be sent as ``page_token`` to retrieve the + next page. If this field is omitted, there are no subsequent + pages. + """ + + @property + def raw_page(self): + return self + + api_operations: MutableSequence[common_fields.ApiOperation] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=common_fields.ApiOperation, + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + + +class GetDefinitionRequest(proto.Message): + r"""The [GetDefinition][google.cloud.apihub.v1.ApiHub.GetDefinition] + method's request. + + Attributes: + name (str): + Required. The name of the definition to retrieve. Format: + ``projects/{project}/locations/{location}/apis/{api}/versions/{version}/definitions/{definition}`` + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class CreateDeploymentRequest(proto.Message): + r"""The + [CreateDeployment][google.cloud.apihub.v1.ApiHub.CreateDeployment] + method's request. + + Attributes: + parent (str): + Required. The parent resource for the deployment resource. + Format: ``projects/{project}/locations/{location}`` + deployment_id (str): + Optional. The ID to use for the deployment resource, which + will become the final component of the deployment's resource + name. This field is optional. + + - If provided, the same will be used. The service will + throw an error if the specified id is already used by + another deployment resource in the API hub. + - If not provided, a system generated id will be used. + + This value should be 4-500 characters, and valid characters + are /[a-z][A-Z][0-9]-_/. + deployment (google.cloud.apihub_v1.types.Deployment): + Required. The deployment resource to create. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + deployment_id: str = proto.Field( + proto.STRING, + number=2, + ) + deployment: common_fields.Deployment = proto.Field( + proto.MESSAGE, + number=3, + message=common_fields.Deployment, + ) + + +class GetDeploymentRequest(proto.Message): + r"""The [GetDeployment][google.cloud.apihub.v1.ApiHub.GetDeployment] + method's request. + + Attributes: + name (str): + Required. The name of the deployment resource to retrieve. + Format: + ``projects/{project}/locations/{location}/deployments/{deployment}`` + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class UpdateDeploymentRequest(proto.Message): + r"""The + [UpdateDeployment][google.cloud.apihub.v1.ApiHub.UpdateDeployment] + method's request. + + Attributes: + deployment (google.cloud.apihub_v1.types.Deployment): + Required. The deployment resource to update. + + The deployment resource's ``name`` field is used to identify + the deployment resource to update. Format: + ``projects/{project}/locations/{location}/deployments/{deployment}`` + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Required. The list of fields to update. + """ + + deployment: common_fields.Deployment = proto.Field( + proto.MESSAGE, + number=1, + message=common_fields.Deployment, + ) + update_mask: field_mask_pb2.FieldMask = proto.Field( + proto.MESSAGE, + number=2, + message=field_mask_pb2.FieldMask, + ) + + +class DeleteDeploymentRequest(proto.Message): + r"""The + [DeleteDeployment][google.cloud.apihub.v1.ApiHub.DeleteDeployment] + method's request. + + Attributes: + name (str): + Required. The name of the deployment resource to delete. + Format: + ``projects/{project}/locations/{location}/deployments/{deployment}`` + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class ListDeploymentsRequest(proto.Message): + r"""The [ListDeployments][google.cloud.apihub.v1.ApiHub.ListDeployments] + method's request. + + Attributes: + parent (str): + Required. The parent, which owns this collection of + deployment resources. Format: + ``projects/{project}/locations/{location}`` + filter (str): + Optional. An expression that filters the list of + Deployments. + + A filter expression consists of a field name, a comparison + operator, and a value for filtering. The value must be a + string. The comparison operator must be one of: ``<``, ``>`` + or ``=``. Filters are not case sensitive. + + The following fields in the ``Deployments`` are eligible for + filtering: + + - ``display_name`` - The display name of the Deployment. + Allowed comparison operators: ``=``. + - ``create_time`` - The time at which the Deployment was + created. The value should be in the + (RFC3339)[https://tools.ietf.org/html/rfc3339] format. + Allowed comparison operators: ``>`` and ``<``. + - ``resource_uri`` - A URI to the deployment resource. + Allowed comparison operators: ``=``. + - ``api_versions`` - The API versions linked to this + deployment. Allowed comparison operators: ``:``. + - ``deployment_type.enum_values.values.id`` - The allowed + value id of the deployment_type attribute associated with + the Deployment. Allowed comparison operators: ``:``. + - ``deployment_type.enum_values.values.display_name`` - The + allowed value display name of the deployment_type + attribute associated with the Deployment. Allowed + comparison operators: ``:``. + - ``slo.string_values.values`` -The allowed string value of + the slo attribute associated with the deployment. Allowed + comparison operators: ``:``. + - ``environment.enum_values.values.id`` - The allowed value + id of the environment attribute associated with the + deployment. Allowed comparison operators: ``:``. + - ``environment.enum_values.values.display_name`` - The + allowed value display name of the environment attribute + associated with the deployment. Allowed comparison + operators: ``:``. + + Expressions are combined with either ``AND`` logic operator + or ``OR`` logical operator but not both of them together + i.e. only one of the ``AND`` or ``OR`` operator can be used + throughout the filter string and both the operators cannot + be used together. No other logical operators are supported. + At most three filter fields are allowed in the filter string + and if provided more than that then ``INVALID_ARGUMENT`` + error is returned by the API. + + Here are a few examples: + + - ``environment.enum_values.values.id: staging-id`` - The + allowed value id of the environment attribute associated + with the Deployment is *staging-id*. + - ``environment.enum_values.values.display_name: \"Staging Deployment\"`` + - The allowed value display name of the environment + attribute associated with the Deployment is + ``Staging Deployment``. + - ``environment.enum_values.values.id: production-id AND create_time < \"2021-08-15T14:50:00Z\" AND create_time > \"2021-08-10T12:00:00Z\"`` + - The allowed value id of the environment attribute + associated with the Deployment is *production-id* and + Deployment was created before *2021-08-15 14:50:00 UTC* + and after *2021-08-10 12:00:00 UTC*. + - ``environment.enum_values.values.id: production-id OR slo.string_values.values: \"99.99%\"`` + + - The allowed value id of the environment attribute + Deployment is *production-id* or string value of the slo + attribute is *99.99%*. + page_size (int): + Optional. The maximum number of deployment + resources to return. The service may return + fewer than this value. If unspecified, at most + 50 deployments will be returned. The maximum + value is 1000; values above 1000 will be coerced + to 1000. + page_token (str): + Optional. A page token, received from a previous + ``ListDeployments`` call. Provide this to retrieve the + subsequent page. + + When paginating, all other parameters (except page_size) + provided to ``ListDeployments`` must match the call that + provided the page token. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + filter: str = proto.Field( + proto.STRING, + number=2, + ) + page_size: int = proto.Field( + proto.INT32, + number=3, + ) + page_token: str = proto.Field( + proto.STRING, + number=4, + ) + + +class ListDeploymentsResponse(proto.Message): + r"""The [ListDeployments][google.cloud.apihub.v1.ApiHub.ListDeployments] + method's response. + + Attributes: + deployments (MutableSequence[google.cloud.apihub_v1.types.Deployment]): + The deployment resources present in the API + hub. + next_page_token (str): + A token, which can be sent as ``page_token`` to retrieve the + next page. If this field is omitted, there are no subsequent + pages. + """ + + @property + def raw_page(self): + return self + + deployments: MutableSequence[common_fields.Deployment] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=common_fields.Deployment, + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + + +class CreateAttributeRequest(proto.Message): + r"""The [CreateAttribute][google.cloud.apihub.v1.ApiHub.CreateAttribute] + method's request. + + Attributes: + parent (str): + Required. The parent resource for Attribute. Format: + ``projects/{project}/locations/{location}`` + attribute_id (str): + Optional. The ID to use for the attribute, which will become + the final component of the attribute's resource name. This + field is optional. + + - If provided, the same will be used. The service will + throw an error if the specified id is already used by + another attribute resource in the API hub. + - If not provided, a system generated id will be used. + + This value should be 4-500 characters, and valid characters + are /[a-z][A-Z][0-9]-_/. + attribute (google.cloud.apihub_v1.types.Attribute): + Required. The attribute to create. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + attribute_id: str = proto.Field( + proto.STRING, + number=2, + ) + attribute: common_fields.Attribute = proto.Field( + proto.MESSAGE, + number=3, + message=common_fields.Attribute, + ) + + +class GetAttributeRequest(proto.Message): + r"""The [GetAttribute][google.cloud.apihub.v1.ApiHub.GetAttribute] + method's request. + + Attributes: + name (str): + Required. The name of the attribute to retrieve. Format: + ``projects/{project}/locations/{location}/attributes/{attribute}`` + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class UpdateAttributeRequest(proto.Message): + r"""The [UpdateAttribute][google.cloud.apihub.v1.ApiHub.UpdateAttribute] + method's request. + + Attributes: + attribute (google.cloud.apihub_v1.types.Attribute): + Required. The attribute to update. + + The attribute's ``name`` field is used to identify the + attribute to update. Format: + ``projects/{project}/locations/{location}/attributes/{attribute}`` + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Required. The list of fields to update. + """ + + attribute: common_fields.Attribute = proto.Field( + proto.MESSAGE, + number=1, + message=common_fields.Attribute, + ) + update_mask: field_mask_pb2.FieldMask = proto.Field( + proto.MESSAGE, + number=2, + message=field_mask_pb2.FieldMask, + ) + + +class DeleteAttributeRequest(proto.Message): + r"""The [DeleteAttribute][google.cloud.apihub.v1.ApiHub.DeleteAttribute] + method's request. + + Attributes: + name (str): + Required. The name of the attribute to delete. Format: + ``projects/{project}/locations/{location}/attributes/{attribute}`` + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class ListAttributesRequest(proto.Message): + r"""The [ListAttributes][google.cloud.apihub.v1.ApiHub.ListAttributes] + method's request. + + Attributes: + parent (str): + Required. The parent resource for Attribute. Format: + ``projects/{project}/locations/{location}`` + filter (str): + Optional. An expression that filters the list of Attributes. + + A filter expression consists of a field name, a comparison + operator, and a value for filtering. The value must be a + string or a boolean. The comparison operator must be one of: + ``<``, ``>`` or ``=``. Filters are not case sensitive. + + The following fields in the ``Attribute`` are eligible for + filtering: + + - ``display_name`` - The display name of the Attribute. + Allowed comparison operators: ``=``. + - ``definition_type`` - The definition type of the + attribute. Allowed comparison operators: ``=``. + - ``scope`` - The scope of the attribute. Allowed + comparison operators: ``=``. + - ``data_type`` - The type of the data of the attribute. + Allowed comparison operators: ``=``. + - ``mandatory`` - Denotes whether the attribute is + mandatory or not. Allowed comparison operators: ``=``. + - ``create_time`` - The time at which the Attribute was + created. The value should be in the + (RFC3339)[https://tools.ietf.org/html/rfc3339] format. + Allowed comparison operators: ``>`` and ``<``. + + Expressions are combined with either ``AND`` logic operator + or ``OR`` logical operator but not both of them together + i.e. only one of the ``AND`` or ``OR`` operator can be used + throughout the filter string and both the operators cannot + be used together. No other logical operators are supported. + At most three filter fields are allowed in the filter string + and if provided more than that then ``INVALID_ARGUMENT`` + error is returned by the API. + + Here are a few examples: + + - ``display_name = production`` - - The display name of the + attribute is *production*. + - ``(display_name = production) AND (create_time < \"2021-08-15T14:50:00Z\") AND (create_time > \"2021-08-10T12:00:00Z\")`` + - The display name of the attribute is *production* and + the attribute was created before *2021-08-15 14:50:00 + UTC* and after *2021-08-10 12:00:00 UTC*. + - ``display_name = production OR scope = api`` - The + attribute where the display name is *production* or the + scope is *api*. + page_size (int): + Optional. The maximum number of attribute + resources to return. The service may return + fewer than this value. If unspecified, at most + 50 attributes will be returned. The maximum + value is 1000; values above 1000 will be coerced + to 1000. + page_token (str): + Optional. A page token, received from a previous + ``ListAttributes`` call. Provide this to retrieve the + subsequent page. + + When paginating, all other parameters provided to + ``ListAttributes`` must match the call that provided the + page token. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + filter: str = proto.Field( + proto.STRING, + number=2, + ) + page_size: int = proto.Field( + proto.INT32, + number=3, + ) + page_token: str = proto.Field( + proto.STRING, + number=4, + ) + + +class ListAttributesResponse(proto.Message): + r"""The [ListAttributes][google.cloud.apihub.v1.ApiHub.ListAttributes] + method's response. + + Attributes: + attributes (MutableSequence[google.cloud.apihub_v1.types.Attribute]): + The list of all attributes. + next_page_token (str): + A token, which can be sent as ``page_token`` to retrieve the + next page. If this field is omitted, there are no subsequent + pages. + """ + + @property + def raw_page(self): + return self + + attributes: MutableSequence[common_fields.Attribute] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=common_fields.Attribute, + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + + +class SearchResourcesRequest(proto.Message): + r"""The [SearchResources][google.cloud.apihub.v1.ApiHub.SearchResources] + method's request. + + Attributes: + location (str): + Required. The resource name of the location which will be of + the type ``projects/{project_id}/locations/{location_id}``. + This field is used to identify the instance of API-Hub in + which resources should be searched. + query (str): + Required. The free text search query. This + query can contain keywords which could be + related to any detail of the API-Hub resources + such display names, descriptions, attributes + etc. + filter (str): + Optional. An expression that filters the list of search + results. + + A filter expression consists of a field name, a comparison + operator, and a value for filtering. The value must be a + string, a number, or a boolean. The comparison operator must + be ``=``. Filters are not case sensitive. + + The following field names are eligible for filtering: \* + ``resource_type`` - The type of resource in the search + results. Must be one of the following: ``Api``, + ``ApiOperation``, ``Deployment``, ``Definition``, ``Spec`` + or ``Version``. This field can only be specified once in the + filter. + + Here are is an example: + + - ``resource_type = Api`` - The resource_type is *Api*. + page_size (int): + Optional. The maximum number of search results to return. + The service may return fewer than this value. If unspecified + at most 10 search results will be returned. If value is + negative then ``INVALID_ARGUMENT`` error is returned. The + maximum value is 25; values above 25 will be coerced to 25. + While paginating, you can specify a new page size parameter + for each page of search results to be listed. + page_token (str): + Optional. A page token, received from a previous + [SearchResources][SearchResources] call. Specify this + parameter to retrieve the next page of transactions. + + When paginating, you must specify the ``page_token`` + parameter and all the other parameters except + [page_size][google.cloud.apihub.v1.SearchResourcesRequest.page_size] + should be specified with the same value which was used in + the previous call. If the other fields are set with a + different value than the previous call then + ``INVALID_ARGUMENT`` error is returned. + """ + + location: str = proto.Field( + proto.STRING, + number=1, + ) + query: str = proto.Field( + proto.STRING, + number=2, + ) + filter: str = proto.Field( + proto.STRING, + number=3, + ) + page_size: int = proto.Field( + proto.INT32, + number=4, + ) + page_token: str = proto.Field( + proto.STRING, + number=5, + ) + + +class ApiHubResource(proto.Message): + r"""ApiHubResource is one of the resources such as Api, + Operation, Deployment, Definition, Spec and Version resources + stored in API-Hub. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + api (google.cloud.apihub_v1.types.Api): + This represents Api resource in search results. Only name, + display_name, description and owner fields are populated in + search results. + + This field is a member of `oneof`_ ``resource``. + operation (google.cloud.apihub_v1.types.ApiOperation): + This represents ApiOperation resource in + search results. Only name, and description + fields are populated in search results. + + This field is a member of `oneof`_ ``resource``. + deployment (google.cloud.apihub_v1.types.Deployment): + This represents Deployment resource in search results. Only + name, display_name and description fields are populated in + search results. + + This field is a member of `oneof`_ ``resource``. + spec (google.cloud.apihub_v1.types.Spec): + This represents Spec resource in search results. Only name, + display_name and description fields are populated in search + results. + + This field is a member of `oneof`_ ``resource``. + definition (google.cloud.apihub_v1.types.Definition): + This represents Definition resource in search + results. Only name field is populated in search + results. + + This field is a member of `oneof`_ ``resource``. + version (google.cloud.apihub_v1.types.Version): + This represents Version resource in search results. Only + name, display_name and description fields are populated in + search results. + + This field is a member of `oneof`_ ``resource``. + """ + + api: common_fields.Api = proto.Field( + proto.MESSAGE, + number=1, + oneof="resource", + message=common_fields.Api, + ) + operation: common_fields.ApiOperation = proto.Field( + proto.MESSAGE, + number=2, + oneof="resource", + message=common_fields.ApiOperation, + ) + deployment: common_fields.Deployment = proto.Field( + proto.MESSAGE, + number=3, + oneof="resource", + message=common_fields.Deployment, + ) + spec: common_fields.Spec = proto.Field( + proto.MESSAGE, + number=4, + oneof="resource", + message=common_fields.Spec, + ) + definition: common_fields.Definition = proto.Field( + proto.MESSAGE, + number=5, + oneof="resource", + message=common_fields.Definition, + ) + version: common_fields.Version = proto.Field( + proto.MESSAGE, + number=6, + oneof="resource", + message=common_fields.Version, + ) + + +class SearchResult(proto.Message): + r"""Represents the search results. + + Attributes: + resource (google.cloud.apihub_v1.types.ApiHubResource): + This represents the ApiHubResource. + Note: Only selected fields of the resources are + populated in response. + """ + + resource: "ApiHubResource" = proto.Field( + proto.MESSAGE, + number=1, + message="ApiHubResource", + ) + + +class SearchResourcesResponse(proto.Message): + r"""Response for the + [SearchResources][google.cloud.apihub.v1.ApiHub.SearchResources] + method. + + Attributes: + search_results (MutableSequence[google.cloud.apihub_v1.types.SearchResult]): + List of search results according to the + filter and search query specified. The order of + search results represents the ranking. + next_page_token (str): + Pass this token in the + [SearchResourcesRequest][google.cloud.apihub.v1.SearchResourcesRequest] + to continue to list results. If all results have been + returned, this field is an empty string or not present in + the response. + """ + + @property + def raw_page(self): + return self + + search_results: MutableSequence["SearchResult"] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="SearchResult", + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + + +class CreateDependencyRequest(proto.Message): + r"""The + [CreateDependency][google.cloud.apihub.v1.ApiHubDependencies.CreateDependency] + method's request. + + Attributes: + parent (str): + Required. The parent resource for the dependency resource. + Format: ``projects/{project}/locations/{location}`` + dependency_id (str): + Optional. The ID to use for the dependency resource, which + will become the final component of the dependency's resource + name. This field is optional. + + - If provided, the same will be used. The service will + throw an error if duplicate id is provided by the client. + - If not provided, a system generated id will be used. + + This value should be 4-500 characters, and valid characters + are ``[a-z][A-Z][0-9]-_``. + dependency (google.cloud.apihub_v1.types.Dependency): + Required. The dependency resource to create. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + dependency_id: str = proto.Field( + proto.STRING, + number=2, + ) + dependency: common_fields.Dependency = proto.Field( + proto.MESSAGE, + number=3, + message=common_fields.Dependency, + ) + + +class GetDependencyRequest(proto.Message): + r"""The [GetDependency][.ApiHubDependencies.GetDependency] method's + request. + + Attributes: + name (str): + Required. The name of the dependency resource to retrieve. + Format: + ``projects/{project}/locations/{location}/dependencies/{dependency}`` + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class UpdateDependencyRequest(proto.Message): + r"""The + [UpdateDependency][google.cloud.apihub.v1.ApiHubDependencies.UpdateDependency] + method's request. + + Attributes: + dependency (google.cloud.apihub_v1.types.Dependency): + Required. The dependency resource to update. + + The dependency's ``name`` field is used to identify the + dependency to update. Format: + ``projects/{project}/locations/{location}/dependencies/{dependency}`` + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Required. The list of fields to update. + """ + + dependency: common_fields.Dependency = proto.Field( + proto.MESSAGE, + number=1, + message=common_fields.Dependency, + ) + update_mask: field_mask_pb2.FieldMask = proto.Field( + proto.MESSAGE, + number=2, + message=field_mask_pb2.FieldMask, + ) + + +class DeleteDependencyRequest(proto.Message): + r"""The + [DeleteDependency][google.cloud.apihub.v1.ApiHubDependencies.DeleteDependency] + method's request. + + Attributes: + name (str): + Required. The name of the dependency resource to delete. + Format: + ``projects/{project}/locations/{location}/dependencies/{dependency}`` + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class ListDependenciesRequest(proto.Message): + r"""The + [ListDependencies][google.cloud.apihub.v1.ApiHubDependencies.ListDependencies] + method's request. + + Attributes: + parent (str): + Required. The parent which owns this collection of + dependency resources. Format: + ``projects/{project}/locations/{location}`` + filter (str): + Optional. An expression that filters the list of + Dependencies. + + A filter expression consists of a field name, a comparison + operator, and a value for filtering. The value must be a + string. Allowed comparison operator is ``=``. Filters are + not case sensitive. + + The following fields in the ``Dependency`` are eligible for + filtering: + + - ``consumer.operation_resource_name`` - The operation + resource name for the consumer entity involved in a + dependency. Allowed comparison operators: ``=``. + - ``consumer.external_api_resource_name`` - The external + api resource name for the consumer entity involved in a + dependency. Allowed comparison operators: ``=``. + - ``supplier.operation_resource_name`` - The operation + resource name for the supplier entity involved in a + dependency. Allowed comparison operators: ``=``. + - ``supplier.external_api_resource_name`` - The external + api resource name for the supplier entity involved in a + dependency. Allowed comparison operators: ``=``. + + Expressions are combined with either ``AND`` logic operator + or ``OR`` logical operator but not both of them together + i.e. only one of the ``AND`` or ``OR`` operator can be used + throughout the filter string and both the operators cannot + be used together. No other logical operators are supported. + At most three filter fields are allowed in the filter string + and if provided more than that then ``INVALID_ARGUMENT`` + error is returned by the API. + + For example, + ``consumer.operation_resource_name = \"projects/p1/locations/global/apis/a1/versions/v1/operations/o1\" OR supplier.operation_resource_name = \"projects/p1/locations/global/apis/a1/versions/v1/operations/o1\"`` + - The dependencies with either consumer or supplier + operation resource name as + *projects/p1/locations/global/apis/a1/versions/v1/operations/o1*. + page_size (int): + Optional. The maximum number of dependency + resources to return. The service may return + fewer than this value. If unspecified, at most + 50 dependencies will be returned. The maximum + value is 1000; values above 1000 will be coerced + to 1000. + page_token (str): + Optional. A page token, received from a previous + ``ListDependencies`` call. Provide this to retrieve the + subsequent page. + + When paginating, all other parameters provided to + ``ListDependencies`` must match the call that provided the + page token. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + filter: str = proto.Field( + proto.STRING, + number=2, + ) + page_size: int = proto.Field( + proto.INT32, + number=3, + ) + page_token: str = proto.Field( + proto.STRING, + number=4, + ) + + +class ListDependenciesResponse(proto.Message): + r"""The + [ListDependencies][google.cloud.apihub.v1.ApiHubDependencies.ListDependencies] + method's response. + + Attributes: + dependencies (MutableSequence[google.cloud.apihub_v1.types.Dependency]): + The dependency resources present in the API + hub. Only following field will be populated in + the response: name. + next_page_token (str): + A token, which can be sent as ``page_token`` to retrieve the + next page. If this field is omitted, there are no subsequent + pages. + """ + + @property + def raw_page(self): + return self + + dependencies: MutableSequence[common_fields.Dependency] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=common_fields.Dependency, + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + + +class CreateExternalApiRequest(proto.Message): + r"""The + [CreateExternalApi][google.cloud.apihub.v1.ApiHub.CreateExternalApi] + method's request. + + Attributes: + parent (str): + Required. The parent resource for the External API resource. + Format: ``projects/{project}/locations/{location}`` + external_api_id (str): + Optional. The ID to use for the External API resource, which + will become the final component of the External API's + resource name. This field is optional. + + - If provided, the same will be used. The service will + throw an error if the specified id is already used by + another External API resource in the API hub. + - If not provided, a system generated id will be used. + + This value should be 4-500 characters, and valid characters + are /[a-z][A-Z][0-9]-_/. + external_api (google.cloud.apihub_v1.types.ExternalApi): + Required. The External API resource to + create. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + external_api_id: str = proto.Field( + proto.STRING, + number=2, + ) + external_api: common_fields.ExternalApi = proto.Field( + proto.MESSAGE, + number=3, + message=common_fields.ExternalApi, + ) + + +class GetExternalApiRequest(proto.Message): + r"""The [GetExternalApi][google.cloud.apihub.v1.ApiHub.GetExternalApi] + method's request. + + Attributes: + name (str): + Required. The name of the External API resource to retrieve. + Format: + ``projects/{project}/locations/{location}/externalApis/{externalApi}`` + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class UpdateExternalApiRequest(proto.Message): + r"""The + [UpdateExternalApi][google.cloud.apihub.v1.ApiHub.UpdateExternalApi] + method's request. + + Attributes: + external_api (google.cloud.apihub_v1.types.ExternalApi): + Required. The External API resource to update. + + The External API resource's ``name`` field is used to + identify the External API resource to update. Format: + ``projects/{project}/locations/{location}/externalApis/{externalApi}`` + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Required. The list of fields to update. + """ + + external_api: common_fields.ExternalApi = proto.Field( + proto.MESSAGE, + number=1, + message=common_fields.ExternalApi, + ) + update_mask: field_mask_pb2.FieldMask = proto.Field( + proto.MESSAGE, + number=2, + message=field_mask_pb2.FieldMask, + ) + + +class DeleteExternalApiRequest(proto.Message): + r"""The + [DeleteExternalApi][google.cloud.apihub.v1.ApiHub.DeleteExternalApi] + method's request. + + Attributes: + name (str): + Required. The name of the External API resource to delete. + Format: + ``projects/{project}/locations/{location}/externalApis/{externalApi}`` + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class ListExternalApisRequest(proto.Message): + r"""The + [ListExternalApis][google.cloud.apihub.v1.ApiHub.ListExternalApis] + method's request. + + Attributes: + parent (str): + Required. The parent, which owns this collection of External + API resources. Format: + ``projects/{project}/locations/{location}`` + page_size (int): + Optional. The maximum number of External API + resources to return. The service may return + fewer than this value. If unspecified, at most + 50 ExternalApis will be returned. The maximum + value is 1000; values above 1000 will be coerced + to 1000. + page_token (str): + Optional. A page token, received from a previous + ``ListExternalApis`` call. Provide this to retrieve the + subsequent page. + + When paginating, all other parameters (except page_size) + provided to ``ListExternalApis`` must match the call that + provided the page token. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + page_size: int = proto.Field( + proto.INT32, + number=2, + ) + page_token: str = proto.Field( + proto.STRING, + number=3, + ) + + +class ListExternalApisResponse(proto.Message): + r"""The + [ListExternalApis][google.cloud.apihub.v1.ApiHub.ListExternalApis] + method's response. + + Attributes: + external_apis (MutableSequence[google.cloud.apihub_v1.types.ExternalApi]): + The External API resources present in the API hub. Only + following fields will be populated in the response: name, + display_name, documentation.external_uri. + next_page_token (str): + A token, which can be sent as ``page_token`` to retrieve the + next page. If this field is omitted, there are no subsequent + pages. + """ + + @property + def raw_page(self): + return self + + external_apis: MutableSequence[common_fields.ExternalApi] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=common_fields.ExternalApi, + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-apihub/google/cloud/apihub_v1/types/common_fields.py b/packages/google-cloud-apihub/google/cloud/apihub_v1/types/common_fields.py new file mode 100644 index 000000000000..942da69052c0 --- /dev/null +++ b/packages/google-cloud-apihub/google/cloud/apihub_v1/types/common_fields.py @@ -0,0 +1,2099 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +from google.protobuf import timestamp_pb2 # type: ignore +import proto # type: ignore + +__protobuf__ = proto.module( + package="google.cloud.apihub.v1", + manifest={ + "LintState", + "Linter", + "Severity", + "Api", + "Version", + "Spec", + "Deployment", + "ApiOperation", + "Definition", + "Attribute", + "SpecContents", + "SpecDetails", + "OpenApiSpecDetails", + "OperationDetails", + "HttpOperation", + "Path", + "Schema", + "Owner", + "Documentation", + "AttributeValues", + "Dependency", + "DependencyEntityReference", + "DependencyErrorDetail", + "LintResponse", + "Issue", + "Range", + "Point", + "OperationMetadata", + "ApiHubInstance", + "ExternalApi", + }, +) + + +class LintState(proto.Enum): + r"""Lint state represents success or failure for linting. + + Values: + LINT_STATE_UNSPECIFIED (0): + Lint state unspecified. + LINT_STATE_SUCCESS (1): + Linting was completed successfully. + LINT_STATE_ERROR (2): + Linting encountered errors. + """ + LINT_STATE_UNSPECIFIED = 0 + LINT_STATE_SUCCESS = 1 + LINT_STATE_ERROR = 2 + + +class Linter(proto.Enum): + r"""Enumeration of linter types. + + Values: + LINTER_UNSPECIFIED (0): + Linter type unspecified. + SPECTRAL (1): + Linter type spectral. + OTHER (2): + Linter type other. + """ + LINTER_UNSPECIFIED = 0 + SPECTRAL = 1 + OTHER = 2 + + +class Severity(proto.Enum): + r"""Severity of the issue. + + Values: + SEVERITY_UNSPECIFIED (0): + Severity unspecified. + SEVERITY_ERROR (1): + Severity error. + SEVERITY_WARNING (2): + Severity warning. + SEVERITY_INFO (3): + Severity info. + SEVERITY_HINT (4): + Severity hint. + """ + SEVERITY_UNSPECIFIED = 0 + SEVERITY_ERROR = 1 + SEVERITY_WARNING = 2 + SEVERITY_INFO = 3 + SEVERITY_HINT = 4 + + +class Api(proto.Message): + r"""An API resource in the API Hub. + + Attributes: + name (str): + Identifier. The name of the API resource in the API Hub. + + Format: + ``projects/{project}/locations/{location}/apis/{api}`` + display_name (str): + Required. The display name of the API + resource. + description (str): + Optional. The description of the API + resource. + documentation (google.cloud.apihub_v1.types.Documentation): + Optional. The documentation for the API + resource. + owner (google.cloud.apihub_v1.types.Owner): + Optional. Owner details for the API resource. + versions (MutableSequence[str]): + Output only. The list of versions present in an API + resource. Note: An API resource can be associated with more + than 1 version. Format is + ``projects/{project}/locations/{location}/apis/{api}/versions/{version}`` + create_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The time at which the API + resource was created. + update_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The time at which the API + resource was last updated. + target_user (google.cloud.apihub_v1.types.AttributeValues): + Optional. The target users for the API. This maps to the + following system defined attribute: + ``projects/{project}/locations/{location}/attributes/system-target-user`` + attribute. The number of values for this attribute will be + based on the cardinality of the attribute. The same can be + retrieved via GetAttribute API. All values should be from + the list of allowed values defined for the attribute. + team (google.cloud.apihub_v1.types.AttributeValues): + Optional. The team owning the API. This maps to the + following system defined attribute: + ``projects/{project}/locations/{location}/attributes/system-team`` + attribute. The number of values for this attribute will be + based on the cardinality of the attribute. The same can be + retrieved via GetAttribute API. All values should be from + the list of allowed values defined for the attribute. + business_unit (google.cloud.apihub_v1.types.AttributeValues): + Optional. The business unit owning the API. This maps to the + following system defined attribute: + ``projects/{project}/locations/{location}/attributes/system-business-unit`` + attribute. The number of values for this attribute will be + based on the cardinality of the attribute. The same can be + retrieved via GetAttribute API. All values should be from + the list of allowed values defined for the attribute. + maturity_level (google.cloud.apihub_v1.types.AttributeValues): + Optional. The maturity level of the API. This maps to the + following system defined attribute: + ``projects/{project}/locations/{location}/attributes/system-maturity-level`` + attribute. The number of values for this attribute will be + based on the cardinality of the attribute. The same can be + retrieved via GetAttribute API. All values should be from + the list of allowed values defined for the attribute. + attributes (MutableMapping[str, google.cloud.apihub_v1.types.AttributeValues]): + Optional. The list of user defined attributes associated + with the API resource. The key is the attribute name. It + will be of the format: + ``projects/{project}/locations/{location}/attributes/{attribute}``. + The value is the attribute values associated with the + resource. + api_style (google.cloud.apihub_v1.types.AttributeValues): + Optional. The style of the API. This maps to the following + system defined attribute: + ``projects/{project}/locations/{location}/attributes/system-api-style`` + attribute. The number of values for this attribute will be + based on the cardinality of the attribute. The same can be + retrieved via GetAttribute API. All values should be from + the list of allowed values defined for the attribute. + selected_version (str): + Optional. The selected version for an API resource. This can + be used when special handling is needed on client side for + particular version of the API. Format is + ``projects/{project}/locations/{location}/apis/{api}/versions/{version}`` + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + display_name: str = proto.Field( + proto.STRING, + number=2, + ) + description: str = proto.Field( + proto.STRING, + number=3, + ) + documentation: "Documentation" = proto.Field( + proto.MESSAGE, + number=4, + message="Documentation", + ) + owner: "Owner" = proto.Field( + proto.MESSAGE, + number=5, + message="Owner", + ) + versions: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=6, + ) + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=7, + message=timestamp_pb2.Timestamp, + ) + update_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=8, + message=timestamp_pb2.Timestamp, + ) + target_user: "AttributeValues" = proto.Field( + proto.MESSAGE, + number=9, + message="AttributeValues", + ) + team: "AttributeValues" = proto.Field( + proto.MESSAGE, + number=10, + message="AttributeValues", + ) + business_unit: "AttributeValues" = proto.Field( + proto.MESSAGE, + number=11, + message="AttributeValues", + ) + maturity_level: "AttributeValues" = proto.Field( + proto.MESSAGE, + number=12, + message="AttributeValues", + ) + attributes: MutableMapping[str, "AttributeValues"] = proto.MapField( + proto.STRING, + proto.MESSAGE, + number=13, + message="AttributeValues", + ) + api_style: "AttributeValues" = proto.Field( + proto.MESSAGE, + number=14, + message="AttributeValues", + ) + selected_version: str = proto.Field( + proto.STRING, + number=15, + ) + + +class Version(proto.Message): + r"""Represents a version of the API resource in API hub. This is + also referred to as the API version. + + Attributes: + name (str): + Identifier. The name of the version. + + Format: + ``projects/{project}/locations/{location}/apis/{api}/versions/{version}`` + display_name (str): + Required. The display name of the version. + description (str): + Optional. The description of the version. + documentation (google.cloud.apihub_v1.types.Documentation): + Optional. The documentation of the version. + specs (MutableSequence[str]): + Output only. The specs associated with this version. Note + that an API version can be associated with multiple specs. + Format is + ``projects/{project}/locations/{location}/apis/{api}/versions/{version}/specs/{spec}`` + api_operations (MutableSequence[str]): + Output only. The operations contained in the API version. + These operations will be added to the version when a new + spec is added or when an existing spec is updated. Format is + ``projects/{project}/locations/{location}/apis/{api}/versions/{version}/operations/{operation}`` + definitions (MutableSequence[str]): + Output only. The definitions contained in the API version. + These definitions will be added to the version when a new + spec is added or when an existing spec is updated. Format is + ``projects/{project}/locations/{location}/apis/{api}/versions/{version}/definitions/{definition}`` + deployments (MutableSequence[str]): + Optional. The deployments linked to this API version. Note: + A particular API version could be deployed to multiple + deployments (for dev deployment, UAT deployment, etc) Format + is + ``projects/{project}/locations/{location}/deployments/{deployment}`` + create_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The time at which the version + was created. + update_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The time at which the version + was last updated. + lifecycle (google.cloud.apihub_v1.types.AttributeValues): + Optional. The lifecycle of the API version. This maps to the + following system defined attribute: + ``projects/{project}/locations/{location}/attributes/system-lifecycle`` + attribute. The number of values for this attribute will be + based on the cardinality of the attribute. The same can be + retrieved via GetAttribute API. All values should be from + the list of allowed values defined for the attribute. + compliance (google.cloud.apihub_v1.types.AttributeValues): + Optional. The compliance associated with the API version. + This maps to the following system defined attribute: + ``projects/{project}/locations/{location}/attributes/system-compliance`` + attribute. The number of values for this attribute will be + based on the cardinality of the attribute. The same can be + retrieved via GetAttribute API. All values should be from + the list of allowed values defined for the attribute. + accreditation (google.cloud.apihub_v1.types.AttributeValues): + Optional. The accreditations associated with the API + version. This maps to the following system defined + attribute: + ``projects/{project}/locations/{location}/attributes/system-accreditation`` + attribute. The number of values for this attribute will be + based on the cardinality of the attribute. The same can be + retrieved via GetAttribute API. All values should be from + the list of allowed values defined for the attribute. + attributes (MutableMapping[str, google.cloud.apihub_v1.types.AttributeValues]): + Optional. The list of user defined attributes associated + with the Version resource. The key is the attribute name. It + will be of the format: + ``projects/{project}/locations/{location}/attributes/{attribute}``. + The value is the attribute values associated with the + resource. + selected_deployment (str): + Optional. The selected deployment for a Version resource. + This can be used when special handling is needed on client + side for a particular deployment linked to the version. + Format is + ``projects/{project}/locations/{location}/deployments/{deployment}`` + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + display_name: str = proto.Field( + proto.STRING, + number=2, + ) + description: str = proto.Field( + proto.STRING, + number=3, + ) + documentation: "Documentation" = proto.Field( + proto.MESSAGE, + number=4, + message="Documentation", + ) + specs: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=5, + ) + api_operations: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=6, + ) + definitions: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=7, + ) + deployments: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=8, + ) + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=9, + message=timestamp_pb2.Timestamp, + ) + update_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=10, + message=timestamp_pb2.Timestamp, + ) + lifecycle: "AttributeValues" = proto.Field( + proto.MESSAGE, + number=11, + message="AttributeValues", + ) + compliance: "AttributeValues" = proto.Field( + proto.MESSAGE, + number=12, + message="AttributeValues", + ) + accreditation: "AttributeValues" = proto.Field( + proto.MESSAGE, + number=13, + message="AttributeValues", + ) + attributes: MutableMapping[str, "AttributeValues"] = proto.MapField( + proto.STRING, + proto.MESSAGE, + number=14, + message="AttributeValues", + ) + selected_deployment: str = proto.Field( + proto.STRING, + number=16, + ) + + +class Spec(proto.Message): + r"""Represents a spec associated with an API version in the API + Hub. Note that specs of various types can be uploaded, however + parsing of details is supported for OpenAPI spec currently. + + Attributes: + name (str): + Identifier. The name of the spec. + + Format: + ``projects/{project}/locations/{location}/apis/{api}/versions/{version}/specs/{spec}`` + display_name (str): + Required. The display name of the spec. + This can contain the file name of the spec. + spec_type (google.cloud.apihub_v1.types.AttributeValues): + Required. The type of spec. The value should be one of the + allowed values defined for + ``projects/{project}/locations/{location}/attributes/system-spec-type`` + attribute. The number of values for this attribute will be + based on the cardinality of the attribute. The same can be + retrieved via GetAttribute API. + + Note, this field is mandatory if content is provided. + contents (google.cloud.apihub_v1.types.SpecContents): + Optional. Input only. The contents of the + uploaded spec. + details (google.cloud.apihub_v1.types.SpecDetails): + Output only. Details parsed from the spec. + source_uri (str): + Optional. The URI of the spec source in case + file is uploaded from an external version + control system. + create_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The time at which the spec was + created. + update_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The time at which the spec was + last updated. + lint_response (google.cloud.apihub_v1.types.LintResponse): + Optional. The lint response for the spec. + attributes (MutableMapping[str, google.cloud.apihub_v1.types.AttributeValues]): + Optional. The list of user defined attributes associated + with the spec. The key is the attribute name. It will be of + the format: + ``projects/{project}/locations/{location}/attributes/{attribute}``. + The value is the attribute values associated with the + resource. + documentation (google.cloud.apihub_v1.types.Documentation): + Optional. The documentation of the spec. For OpenAPI spec, + this will be populated from ``externalDocs`` in OpenAPI + spec. + parsing_mode (google.cloud.apihub_v1.types.Spec.ParsingMode): + Optional. Input only. Enum specifying the + parsing mode for OpenAPI Specification (OAS) + parsing. + """ + + class ParsingMode(proto.Enum): + r"""Specifies the parsing mode for API specifications during creation + and update. + + - ``RELAXED``: Parsing errors in the specification content do not + fail the API call. + - ``STRICT``: Parsing errors in the specification content result in + failure of the API call. If not specified, defaults to + ``RELAXED``. + + Values: + PARSING_MODE_UNSPECIFIED (0): + Defaults to ``RELAXED``. + RELAXED (1): + Parsing of the Spec on create and update is + relaxed, meaning that parsing errors the spec + contents will not fail the API call. + STRICT (2): + Parsing of the Spec on create and update is + strict, meaning that parsing errors in the spec + contents will fail the API call. + """ + PARSING_MODE_UNSPECIFIED = 0 + RELAXED = 1 + STRICT = 2 + + name: str = proto.Field( + proto.STRING, + number=1, + ) + display_name: str = proto.Field( + proto.STRING, + number=2, + ) + spec_type: "AttributeValues" = proto.Field( + proto.MESSAGE, + number=3, + message="AttributeValues", + ) + contents: "SpecContents" = proto.Field( + proto.MESSAGE, + number=4, + message="SpecContents", + ) + details: "SpecDetails" = proto.Field( + proto.MESSAGE, + number=5, + message="SpecDetails", + ) + source_uri: str = proto.Field( + proto.STRING, + number=6, + ) + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=7, + message=timestamp_pb2.Timestamp, + ) + update_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=8, + message=timestamp_pb2.Timestamp, + ) + lint_response: "LintResponse" = proto.Field( + proto.MESSAGE, + number=9, + message="LintResponse", + ) + attributes: MutableMapping[str, "AttributeValues"] = proto.MapField( + proto.STRING, + proto.MESSAGE, + number=10, + message="AttributeValues", + ) + documentation: "Documentation" = proto.Field( + proto.MESSAGE, + number=11, + message="Documentation", + ) + parsing_mode: ParsingMode = proto.Field( + proto.ENUM, + number=12, + enum=ParsingMode, + ) + + +class Deployment(proto.Message): + r"""Details of the deployment where APIs are hosted. + A deployment could represent an Apigee proxy, API gateway, other + Google Cloud services or non-Google Cloud services as well. A + deployment entity is a root level entity in the API hub and + exists independent of any API. + + Attributes: + name (str): + Identifier. The name of the deployment. + + Format: + ``projects/{project}/locations/{location}/deployments/{deployment}`` + display_name (str): + Required. The display name of the deployment. + description (str): + Optional. The description of the deployment. + documentation (google.cloud.apihub_v1.types.Documentation): + Optional. The documentation of the + deployment. + deployment_type (google.cloud.apihub_v1.types.AttributeValues): + Required. The type of deployment. This maps to the following + system defined attribute: + ``projects/{project}/locations/{location}/attributes/system-deployment-type`` + attribute. The number of values for this attribute will be + based on the cardinality of the attribute. The same can be + retrieved via GetAttribute API. All values should be from + the list of allowed values defined for the attribute. + resource_uri (str): + Required. A URI to the runtime resource. This URI can be + used to manage the resource. For example, if the runtime + resource is of type APIGEE_PROXY, then this field will + contain the URI to the management UI of the proxy. + endpoints (MutableSequence[str]): + Required. The endpoints at which this + deployment resource is listening for API + requests. This could be a list of complete URIs, + hostnames or an IP addresses. + api_versions (MutableSequence[str]): + Output only. The API versions linked to this + deployment. Note: A particular deployment could + be linked to multiple different API versions (of + same or different APIs). + create_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The time at which the deployment + was created. + update_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The time at which the deployment + was last updated. + slo (google.cloud.apihub_v1.types.AttributeValues): + Optional. The SLO for this deployment. This maps to the + following system defined attribute: + ``projects/{project}/locations/{location}/attributes/system-slo`` + attribute. The number of values for this attribute will be + based on the cardinality of the attribute. The same can be + retrieved via GetAttribute API. All values should be from + the list of allowed values defined for the attribute. + environment (google.cloud.apihub_v1.types.AttributeValues): + Optional. The environment mapping to this deployment. This + maps to the following system defined attribute: + ``projects/{project}/locations/{location}/attributes/system-environment`` + attribute. The number of values for this attribute will be + based on the cardinality of the attribute. The same can be + retrieved via GetAttribute API. All values should be from + the list of allowed values defined for the attribute. + attributes (MutableMapping[str, google.cloud.apihub_v1.types.AttributeValues]): + Optional. The list of user defined attributes associated + with the deployment resource. The key is the attribute name. + It will be of the format: + ``projects/{project}/locations/{location}/attributes/{attribute}``. + The value is the attribute values associated with the + resource. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + display_name: str = proto.Field( + proto.STRING, + number=2, + ) + description: str = proto.Field( + proto.STRING, + number=3, + ) + documentation: "Documentation" = proto.Field( + proto.MESSAGE, + number=4, + message="Documentation", + ) + deployment_type: "AttributeValues" = proto.Field( + proto.MESSAGE, + number=5, + message="AttributeValues", + ) + resource_uri: str = proto.Field( + proto.STRING, + number=6, + ) + endpoints: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=7, + ) + api_versions: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=8, + ) + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=9, + message=timestamp_pb2.Timestamp, + ) + update_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=10, + message=timestamp_pb2.Timestamp, + ) + slo: "AttributeValues" = proto.Field( + proto.MESSAGE, + number=11, + message="AttributeValues", + ) + environment: "AttributeValues" = proto.Field( + proto.MESSAGE, + number=12, + message="AttributeValues", + ) + attributes: MutableMapping[str, "AttributeValues"] = proto.MapField( + proto.STRING, + proto.MESSAGE, + number=13, + message="AttributeValues", + ) + + +class ApiOperation(proto.Message): + r"""Represents an operation contained in an API version in the + API Hub. An operation is added/updated/deleted in an API version + when a new spec is added or an existing spec is updated/deleted + in a version. Currently, an operation will be created only + corresponding to OpenAPI spec as parsing is supported for + OpenAPI spec. + + Attributes: + name (str): + Identifier. The name of the operation. + + Format: + ``projects/{project}/locations/{location}/apis/{api}/versions/{version}/operations/{operation}`` + spec (str): + Output only. The name of the spec from where the operation + was parsed. Format is + ``projects/{project}/locations/{location}/apis/{api}/versions/{version}/specs/{spec}`` + details (google.cloud.apihub_v1.types.OperationDetails): + Output only. Operation details. + create_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The time at which the operation + was created. + update_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The time at which the operation + was last updated. + attributes (MutableMapping[str, google.cloud.apihub_v1.types.AttributeValues]): + Optional. The list of user defined attributes associated + with the API operation resource. The key is the attribute + name. It will be of the format: + ``projects/{project}/locations/{location}/attributes/{attribute}``. + The value is the attribute values associated with the + resource. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + spec: str = proto.Field( + proto.STRING, + number=2, + ) + details: "OperationDetails" = proto.Field( + proto.MESSAGE, + number=3, + message="OperationDetails", + ) + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=4, + message=timestamp_pb2.Timestamp, + ) + update_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=5, + message=timestamp_pb2.Timestamp, + ) + attributes: MutableMapping[str, "AttributeValues"] = proto.MapField( + proto.STRING, + proto.MESSAGE, + number=6, + message="AttributeValues", + ) + + +class Definition(proto.Message): + r"""Represents a definition for example schema, request, response + definitions contained in an API version. A definition is + added/updated/deleted in an API version when a new spec is added or + an existing spec is updated/deleted in a version. Currently, + definition will be created only corresponding to OpenAPI spec as + parsing is supported for OpenAPI spec. Also, within OpenAPI spec, + only ``schema`` object is supported. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + schema (google.cloud.apihub_v1.types.Schema): + Output only. The value of a schema + definition. + + This field is a member of `oneof`_ ``value``. + name (str): + Identifier. The name of the definition. + + Format: + ``projects/{project}/locations/{location}/apis/{api}/versions/{version}/definitions/{definition}`` + spec (str): + Output only. The name of the spec from where the definition + was parsed. Format is + ``projects/{project}/locations/{location}/apis/{api}/versions/{version}/specs/{spec}`` + type_ (google.cloud.apihub_v1.types.Definition.Type): + Output only. The type of the definition. + create_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The time at which the definition + was created. + update_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The time at which the definition + was last updated. + attributes (MutableMapping[str, google.cloud.apihub_v1.types.AttributeValues]): + Optional. The list of user defined attributes associated + with the definition resource. The key is the attribute name. + It will be of the format: + ``projects/{project}/locations/{location}/attributes/{attribute}``. + The value is the attribute values associated with the + resource. + """ + + class Type(proto.Enum): + r"""Enumeration of definition types. + + Values: + TYPE_UNSPECIFIED (0): + Definition type unspecified. + SCHEMA (1): + Definition type schema. + """ + TYPE_UNSPECIFIED = 0 + SCHEMA = 1 + + schema: "Schema" = proto.Field( + proto.MESSAGE, + number=4, + oneof="value", + message="Schema", + ) + name: str = proto.Field( + proto.STRING, + number=1, + ) + spec: str = proto.Field( + proto.STRING, + number=2, + ) + type_: Type = proto.Field( + proto.ENUM, + number=3, + enum=Type, + ) + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=5, + message=timestamp_pb2.Timestamp, + ) + update_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=6, + message=timestamp_pb2.Timestamp, + ) + attributes: MutableMapping[str, "AttributeValues"] = proto.MapField( + proto.STRING, + proto.MESSAGE, + number=7, + message="AttributeValues", + ) + + +class Attribute(proto.Message): + r"""An attribute in the API Hub. + An attribute is a name value pair which can be attached to + different resources in the API hub based on the scope of the + attribute. Attributes can either be pre-defined by the API Hub + or created by users. + + Attributes: + name (str): + Identifier. The name of the attribute in the API Hub. + + Format: + ``projects/{project}/locations/{location}/attributes/{attribute}`` + display_name (str): + Required. The display name of the attribute. + description (str): + Optional. The description of the attribute. + definition_type (google.cloud.apihub_v1.types.Attribute.DefinitionType): + Output only. The definition type of the + attribute. + scope (google.cloud.apihub_v1.types.Attribute.Scope): + Required. The scope of the attribute. It + represents the resource in the API Hub to which + the attribute can be linked. + data_type (google.cloud.apihub_v1.types.Attribute.DataType): + Required. The type of the data of the + attribute. + allowed_values (MutableSequence[google.cloud.apihub_v1.types.Attribute.AllowedValue]): + Optional. The list of allowed values when the attribute + value is of type enum. This is required when the data_type + of the attribute is ENUM. The maximum number of allowed + values of an attribute will be 1000. + cardinality (int): + Optional. The maximum number of values that + the attribute can have when associated with an + API Hub resource. Cardinality 1 would represent + a single-valued attribute. It must not be less + than 1 or greater than 20. If not specified, the + cardinality would be set to 1 by default and + represent a single-valued attribute. + mandatory (bool): + Output only. When mandatory is true, the + attribute is mandatory for the resource + specified in the scope. Only System defined + attributes can be mandatory. + create_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The time at which the attribute + was created. + update_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The time at which the attribute + was last updated. + """ + + class DefinitionType(proto.Enum): + r"""Enumeration of attribute definition types. + + Values: + DEFINITION_TYPE_UNSPECIFIED (0): + Attribute definition type unspecified. + SYSTEM_DEFINED (1): + The attribute is predefined by the API Hub. + Note that only the list of allowed values can be + updated in this case via UpdateAttribute method. + USER_DEFINED (2): + The attribute is defined by the user. + """ + DEFINITION_TYPE_UNSPECIFIED = 0 + SYSTEM_DEFINED = 1 + USER_DEFINED = 2 + + class Scope(proto.Enum): + r"""Enumeration for the scope of the attribute representing the + resource in the API Hub to which the attribute can be linked. + + Values: + SCOPE_UNSPECIFIED (0): + Scope Unspecified. + API (1): + Attribute can be linked to an API. + VERSION (2): + Attribute can be linked to an API version. + SPEC (3): + Attribute can be linked to a Spec. + API_OPERATION (4): + Attribute can be linked to an API Operation. + DEPLOYMENT (5): + Attribute can be linked to a Deployment. + DEPENDENCY (6): + Attribute can be linked to a Dependency. + DEFINITION (7): + Attribute can be linked to a definition. + EXTERNAL_API (8): + Attribute can be linked to a ExternalAPI. + PLUGIN (9): + Attribute can be linked to a Plugin. + """ + SCOPE_UNSPECIFIED = 0 + API = 1 + VERSION = 2 + SPEC = 3 + API_OPERATION = 4 + DEPLOYMENT = 5 + DEPENDENCY = 6 + DEFINITION = 7 + EXTERNAL_API = 8 + PLUGIN = 9 + + class DataType(proto.Enum): + r"""Enumeration of attribute's data type. + + Values: + DATA_TYPE_UNSPECIFIED (0): + Attribute data type unspecified. + ENUM (1): + Attribute's value is of type enum. + JSON (2): + Attribute's value is of type json. + STRING (3): + Attribute's value is of type string. + """ + DATA_TYPE_UNSPECIFIED = 0 + ENUM = 1 + JSON = 2 + STRING = 3 + + class AllowedValue(proto.Message): + r"""The value that can be assigned to the attribute when the data + type is enum. + + Attributes: + id (str): + Required. The ID of the allowed value. + + - If provided, the same will be used. The service will + throw an error if the specified id is already used by + another allowed value in the same attribute resource. + - If not provided, a system generated id derived from the + display name will be used. In this case, the service will + handle conflict resolution by adding a system generated + suffix in case of duplicates. + + This value should be 4-63 characters, and valid characters + are /[a-z][0-9]-/. + display_name (str): + Required. The display name of the allowed + value. + description (str): + Optional. The detailed description of the + allowed value. + immutable (bool): + Optional. When set to true, the allowed value + cannot be updated or deleted by the user. It can + only be true for System defined attributes. + """ + + id: str = proto.Field( + proto.STRING, + number=1, + ) + display_name: str = proto.Field( + proto.STRING, + number=2, + ) + description: str = proto.Field( + proto.STRING, + number=3, + ) + immutable: bool = proto.Field( + proto.BOOL, + number=4, + ) + + name: str = proto.Field( + proto.STRING, + number=1, + ) + display_name: str = proto.Field( + proto.STRING, + number=2, + ) + description: str = proto.Field( + proto.STRING, + number=3, + ) + definition_type: DefinitionType = proto.Field( + proto.ENUM, + number=4, + enum=DefinitionType, + ) + scope: Scope = proto.Field( + proto.ENUM, + number=5, + enum=Scope, + ) + data_type: DataType = proto.Field( + proto.ENUM, + number=6, + enum=DataType, + ) + allowed_values: MutableSequence[AllowedValue] = proto.RepeatedField( + proto.MESSAGE, + number=7, + message=AllowedValue, + ) + cardinality: int = proto.Field( + proto.INT32, + number=8, + ) + mandatory: bool = proto.Field( + proto.BOOL, + number=9, + ) + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=10, + message=timestamp_pb2.Timestamp, + ) + update_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=11, + message=timestamp_pb2.Timestamp, + ) + + +class SpecContents(proto.Message): + r"""The spec contents. + + Attributes: + contents (bytes): + Required. The contents of the spec. + mime_type (str): + Required. The mime type of the content for + example application/json, application/yaml, + application/wsdl etc. + """ + + contents: bytes = proto.Field( + proto.BYTES, + number=1, + ) + mime_type: str = proto.Field( + proto.STRING, + number=2, + ) + + +class SpecDetails(proto.Message): + r"""SpecDetails contains the details parsed from supported + spec types. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + open_api_spec_details (google.cloud.apihub_v1.types.OpenApiSpecDetails): + Output only. Additional details apart from + ``OperationDetails`` parsed from an OpenAPI spec. The + OperationDetails parsed from the spec can be obtained by + using + [ListAPIOperations][google.cloud.apihub.v1.ApiHub.ListApiOperations] + method. + + This field is a member of `oneof`_ ``details``. + description (str): + Output only. The description of the spec. + """ + + open_api_spec_details: "OpenApiSpecDetails" = proto.Field( + proto.MESSAGE, + number=2, + oneof="details", + message="OpenApiSpecDetails", + ) + description: str = proto.Field( + proto.STRING, + number=1, + ) + + +class OpenApiSpecDetails(proto.Message): + r"""OpenApiSpecDetails contains the details parsed from an OpenAPI spec + in addition to the fields mentioned in + [SpecDetails][google.cloud.apihub.v1.SpecDetails]. + + Attributes: + format_ (google.cloud.apihub_v1.types.OpenApiSpecDetails.Format): + Output only. The format of the spec. + version (str): + Output only. The version in the spec. This maps to + ``info.version`` in OpenAPI spec. + owner (google.cloud.apihub_v1.types.Owner): + Output only. Owner details for the spec. This maps to + ``info.contact`` in OpenAPI spec. + """ + + class Format(proto.Enum): + r"""Enumeration of spec formats. + + Values: + FORMAT_UNSPECIFIED (0): + SpecFile type unspecified. + OPEN_API_SPEC_2_0 (1): + OpenAPI Spec v2.0. + OPEN_API_SPEC_3_0 (2): + OpenAPI Spec v3.0. + OPEN_API_SPEC_3_1 (3): + OpenAPI Spec v3.1. + """ + FORMAT_UNSPECIFIED = 0 + OPEN_API_SPEC_2_0 = 1 + OPEN_API_SPEC_3_0 = 2 + OPEN_API_SPEC_3_1 = 3 + + format_: Format = proto.Field( + proto.ENUM, + number=1, + enum=Format, + ) + version: str = proto.Field( + proto.STRING, + number=2, + ) + owner: "Owner" = proto.Field( + proto.MESSAGE, + number=3, + message="Owner", + ) + + +class OperationDetails(proto.Message): + r"""The operation details parsed from the spec. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + http_operation (google.cloud.apihub_v1.types.HttpOperation): + The HTTP Operation. + + This field is a member of `oneof`_ ``operation``. + description (str): + Output only. Description of the operation behavior. For + OpenAPI spec, this will map to ``operation.description`` in + the spec, in case description is empty, + ``operation.summary`` will be used. + documentation (google.cloud.apihub_v1.types.Documentation): + Output only. Additional external documentation for this + operation. For OpenAPI spec, this will map to + ``operation.documentation`` in the spec. + deprecated (bool): + Output only. For OpenAPI spec, this will be set if + ``operation.deprecated``\ is marked as ``true`` in the spec. + """ + + http_operation: "HttpOperation" = proto.Field( + proto.MESSAGE, + number=4, + oneof="operation", + message="HttpOperation", + ) + description: str = proto.Field( + proto.STRING, + number=1, + ) + documentation: "Documentation" = proto.Field( + proto.MESSAGE, + number=2, + message="Documentation", + ) + deprecated: bool = proto.Field( + proto.BOOL, + number=3, + ) + + +class HttpOperation(proto.Message): + r"""The HTTP Operation. + + Attributes: + path (google.cloud.apihub_v1.types.Path): + Output only. The path details for the + Operation. + method (google.cloud.apihub_v1.types.HttpOperation.Method): + Output only. Operation method + """ + + class Method(proto.Enum): + r"""Enumeration of Method types. + + Values: + METHOD_UNSPECIFIED (0): + Method unspecified. + GET (1): + Get Operation type. + PUT (2): + Put Operation type. + POST (3): + Post Operation type. + DELETE (4): + Delete Operation type. + OPTIONS (5): + Options Operation type. + HEAD (6): + Head Operation type. + PATCH (7): + Patch Operation type. + TRACE (8): + Trace Operation type. + """ + METHOD_UNSPECIFIED = 0 + GET = 1 + PUT = 2 + POST = 3 + DELETE = 4 + OPTIONS = 5 + HEAD = 6 + PATCH = 7 + TRACE = 8 + + path: "Path" = proto.Field( + proto.MESSAGE, + number=1, + message="Path", + ) + method: Method = proto.Field( + proto.ENUM, + number=2, + enum=Method, + ) + + +class Path(proto.Message): + r"""The path details derived from the spec. + + Attributes: + path (str): + Output only. Complete path relative to server + endpoint. + description (str): + Output only. A short description for the path + applicable to all operations. + """ + + path: str = proto.Field( + proto.STRING, + number=1, + ) + description: str = proto.Field( + proto.STRING, + number=2, + ) + + +class Schema(proto.Message): + r"""The schema details derived from the spec. Currently, this entity is + supported for OpenAPI spec only. For OpenAPI spec, this maps to the + schema defined in the ``definitions`` section for OpenAPI 2.0 + version and in ``components.schemas`` section for OpenAPI 3.0 and + 3.1 version. + + Attributes: + display_name (str): + Output only. The display name of the schema. + This will map to the name of the schema in the + spec. + raw_value (bytes): + Output only. The raw value of the schema + definition corresponding to the schema name in + the spec. + """ + + display_name: str = proto.Field( + proto.STRING, + number=1, + ) + raw_value: bytes = proto.Field( + proto.BYTES, + number=2, + ) + + +class Owner(proto.Message): + r"""Owner details. + + Attributes: + display_name (str): + Optional. The name of the owner. + email (str): + Required. The email of the owner. + """ + + display_name: str = proto.Field( + proto.STRING, + number=1, + ) + email: str = proto.Field( + proto.STRING, + number=2, + ) + + +class Documentation(proto.Message): + r"""Documentation details. + + Attributes: + external_uri (str): + Optional. The uri of the externally hosted + documentation. + """ + + external_uri: str = proto.Field( + proto.STRING, + number=1, + ) + + +class AttributeValues(proto.Message): + r"""The attribute values associated with resource. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + enum_values (google.cloud.apihub_v1.types.AttributeValues.EnumAttributeValues): + The attribute values associated with a + resource in case attribute data type is enum. + + This field is a member of `oneof`_ ``Value``. + string_values (google.cloud.apihub_v1.types.AttributeValues.StringAttributeValues): + The attribute values associated with a + resource in case attribute data type is string. + + This field is a member of `oneof`_ ``Value``. + json_values (google.cloud.apihub_v1.types.AttributeValues.StringAttributeValues): + The attribute values associated with a + resource in case attribute data type is JSON. + + This field is a member of `oneof`_ ``Value``. + attribute (str): + Output only. The name of the attribute. + Format: + projects/{project}/locations/{location}/attributes/{attribute} + """ + + class EnumAttributeValues(proto.Message): + r"""The attribute values of data type enum. + + Attributes: + values (MutableSequence[google.cloud.apihub_v1.types.Attribute.AllowedValue]): + Required. The attribute values in case + attribute data type is enum. + """ + + values: MutableSequence["Attribute.AllowedValue"] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="Attribute.AllowedValue", + ) + + class StringAttributeValues(proto.Message): + r"""The attribute values of data type string or JSON. + + Attributes: + values (MutableSequence[str]): + Required. The attribute values in case + attribute data type is string or JSON. + """ + + values: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=1, + ) + + enum_values: EnumAttributeValues = proto.Field( + proto.MESSAGE, + number=2, + oneof="Value", + message=EnumAttributeValues, + ) + string_values: StringAttributeValues = proto.Field( + proto.MESSAGE, + number=3, + oneof="Value", + message=StringAttributeValues, + ) + json_values: StringAttributeValues = proto.Field( + proto.MESSAGE, + number=4, + oneof="Value", + message=StringAttributeValues, + ) + attribute: str = proto.Field( + proto.STRING, + number=1, + ) + + +class Dependency(proto.Message): + r"""A dependency resource defined in the API hub describes a dependency + directed from a consumer to a supplier entity. A dependency can be + defined between two [Operations][google.cloud.apihub.v1.Operation] + or between an [Operation][google.cloud.apihub.v1.Operation] and + [External API][google.cloud.apihub.v1.ExternalApi]. + + Attributes: + name (str): + Identifier. The name of the dependency in the API Hub. + + Format: + ``projects/{project}/locations/{location}/dependencies/{dependency}`` + consumer (google.cloud.apihub_v1.types.DependencyEntityReference): + Required. Immutable. The entity acting as the + consumer in the dependency. + supplier (google.cloud.apihub_v1.types.DependencyEntityReference): + Required. Immutable. The entity acting as the + supplier in the dependency. + state (google.cloud.apihub_v1.types.Dependency.State): + Output only. State of the dependency. + description (str): + Optional. Human readable description + corresponding of the dependency. + discovery_mode (google.cloud.apihub_v1.types.Dependency.DiscoveryMode): + Output only. Discovery mode of the + dependency. + error_detail (google.cloud.apihub_v1.types.DependencyErrorDetail): + Output only. Error details of a dependency if + the system has detected it internally. + create_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The time at which the dependency + was created. + update_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The time at which the dependency + was last updated. + attributes (MutableMapping[str, google.cloud.apihub_v1.types.AttributeValues]): + Optional. The list of user defined attributes associated + with the dependency resource. The key is the attribute name. + It will be of the format: + ``projects/{project}/locations/{location}/attributes/{attribute}``. + The value is the attribute values associated with the + resource. + """ + + class State(proto.Enum): + r"""Possible states for a dependency. + + Values: + STATE_UNSPECIFIED (0): + Default value. This value is unused. + PROPOSED (1): + Dependency will be in a proposed state when + it is newly identified by the API hub on its + own. + VALIDATED (2): + Dependency will be in a validated state when + it is validated by the admin or manually created + in the API hub. + """ + STATE_UNSPECIFIED = 0 + PROPOSED = 1 + VALIDATED = 2 + + class DiscoveryMode(proto.Enum): + r"""Possible modes of discovering the dependency. + + Values: + DISCOVERY_MODE_UNSPECIFIED (0): + Default value. This value is unused. + MANUAL (1): + Manual mode of discovery when the dependency + is defined by the user. + """ + DISCOVERY_MODE_UNSPECIFIED = 0 + MANUAL = 1 + + name: str = proto.Field( + proto.STRING, + number=1, + ) + consumer: "DependencyEntityReference" = proto.Field( + proto.MESSAGE, + number=2, + message="DependencyEntityReference", + ) + supplier: "DependencyEntityReference" = proto.Field( + proto.MESSAGE, + number=3, + message="DependencyEntityReference", + ) + state: State = proto.Field( + proto.ENUM, + number=4, + enum=State, + ) + description: str = proto.Field( + proto.STRING, + number=5, + ) + discovery_mode: DiscoveryMode = proto.Field( + proto.ENUM, + number=6, + enum=DiscoveryMode, + ) + error_detail: "DependencyErrorDetail" = proto.Field( + proto.MESSAGE, + number=7, + message="DependencyErrorDetail", + ) + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=8, + message=timestamp_pb2.Timestamp, + ) + update_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=9, + message=timestamp_pb2.Timestamp, + ) + attributes: MutableMapping[str, "AttributeValues"] = proto.MapField( + proto.STRING, + proto.MESSAGE, + number=10, + message="AttributeValues", + ) + + +class DependencyEntityReference(proto.Message): + r"""Reference to an entity participating in a dependency. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + operation_resource_name (str): + The resource name of an operation in the API Hub. + + Format: + ``projects/{project}/locations/{location}/apis/{api}/versions/{version}/operations/{operation}`` + + This field is a member of `oneof`_ ``identifier``. + external_api_resource_name (str): + The resource name of an external API in the API Hub. + + Format: + ``projects/{project}/locations/{location}/externalApis/{external_api}`` + + This field is a member of `oneof`_ ``identifier``. + display_name (str): + Output only. Display name of the entity. + """ + + operation_resource_name: str = proto.Field( + proto.STRING, + number=2, + oneof="identifier", + ) + external_api_resource_name: str = proto.Field( + proto.STRING, + number=3, + oneof="identifier", + ) + display_name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class DependencyErrorDetail(proto.Message): + r"""Details describing error condition of a dependency. + + Attributes: + error (google.cloud.apihub_v1.types.DependencyErrorDetail.Error): + Optional. Error in the dependency. + error_time (google.protobuf.timestamp_pb2.Timestamp): + Optional. Timestamp at which the error was + found. + """ + + class Error(proto.Enum): + r"""Possible values representing an error in the dependency. + + Values: + ERROR_UNSPECIFIED (0): + Default value used for no error in the + dependency. + SUPPLIER_NOT_FOUND (1): + Supplier entity has been deleted. + SUPPLIER_RECREATED (2): + Supplier entity has been recreated. + """ + ERROR_UNSPECIFIED = 0 + SUPPLIER_NOT_FOUND = 1 + SUPPLIER_RECREATED = 2 + + error: Error = proto.Field( + proto.ENUM, + number=1, + enum=Error, + ) + error_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=2, + message=timestamp_pb2.Timestamp, + ) + + +class LintResponse(proto.Message): + r"""LintResponse contains the response from the linter. + + Attributes: + issues (MutableSequence[google.cloud.apihub_v1.types.Issue]): + Optional. Array of issues found in the + analyzed document. + summary (MutableSequence[google.cloud.apihub_v1.types.LintResponse.SummaryEntry]): + Optional. Summary of all issue types and + counts for each severity level. + state (google.cloud.apihub_v1.types.LintState): + Required. Lint state represents success or + failure for linting. + source (str): + Required. Name of the linting application. + linter (google.cloud.apihub_v1.types.Linter): + Required. Name of the linter used. + create_time (google.protobuf.timestamp_pb2.Timestamp): + Required. Timestamp when the linting response + was generated. + """ + + class SummaryEntry(proto.Message): + r"""Count of issues with a given severity. + + Attributes: + severity (google.cloud.apihub_v1.types.Severity): + Required. Severity of the issue. + count (int): + Required. Count of issues with the given + severity. + """ + + severity: "Severity" = proto.Field( + proto.ENUM, + number=1, + enum="Severity", + ) + count: int = proto.Field( + proto.INT32, + number=2, + ) + + issues: MutableSequence["Issue"] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="Issue", + ) + summary: MutableSequence[SummaryEntry] = proto.RepeatedField( + proto.MESSAGE, + number=2, + message=SummaryEntry, + ) + state: "LintState" = proto.Field( + proto.ENUM, + number=3, + enum="LintState", + ) + source: str = proto.Field( + proto.STRING, + number=4, + ) + linter: "Linter" = proto.Field( + proto.ENUM, + number=5, + enum="Linter", + ) + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=6, + message=timestamp_pb2.Timestamp, + ) + + +class Issue(proto.Message): + r"""Issue contains the details of a single issue found by the + linter. + + Attributes: + code (str): + Required. Rule code unique to each rule + defined in linter. + path (MutableSequence[str]): + Required. An array of strings indicating the + location in the analyzed document where the rule + was triggered. + message (str): + Required. Human-readable message describing + the issue found by the linter. + severity (google.cloud.apihub_v1.types.Severity): + Required. Severity level of the rule + violation. + range_ (google.cloud.apihub_v1.types.Range): + Required. Object describing where in the file + the issue was found. + """ + + code: str = proto.Field( + proto.STRING, + number=1, + ) + path: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=2, + ) + message: str = proto.Field( + proto.STRING, + number=3, + ) + severity: "Severity" = proto.Field( + proto.ENUM, + number=4, + enum="Severity", + ) + range_: "Range" = proto.Field( + proto.MESSAGE, + number=5, + message="Range", + ) + + +class Range(proto.Message): + r"""Object describing where in the file the issue was found. + + Attributes: + start (google.cloud.apihub_v1.types.Point): + Required. Start of the issue. + end (google.cloud.apihub_v1.types.Point): + Required. End of the issue. + """ + + start: "Point" = proto.Field( + proto.MESSAGE, + number=1, + message="Point", + ) + end: "Point" = proto.Field( + proto.MESSAGE, + number=2, + message="Point", + ) + + +class Point(proto.Message): + r"""Point within the file (line and character). + + Attributes: + line (int): + Required. Line number (zero-indexed). + character (int): + Required. Character position within the line + (zero-indexed). + """ + + line: int = proto.Field( + proto.INT32, + number=1, + ) + character: int = proto.Field( + proto.INT32, + number=2, + ) + + +class OperationMetadata(proto.Message): + r"""Represents the metadata of the long-running operation. + + Attributes: + create_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The time the operation was + created. + end_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The time the operation finished + running. + target (str): + Output only. Server-defined resource path for + the target of the operation. + verb (str): + Output only. Name of the verb executed by the + operation. + status_message (str): + Output only. Human-readable status of the + operation, if any. + requested_cancellation (bool): + Output only. Identifies whether the user has requested + cancellation of the operation. Operations that have been + cancelled successfully have [Operation.error][] value with a + [google.rpc.Status.code][google.rpc.Status.code] of 1, + corresponding to ``Code.CANCELLED``. + api_version (str): + Output only. API version used to start the + operation. + """ + + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=1, + message=timestamp_pb2.Timestamp, + ) + end_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=2, + message=timestamp_pb2.Timestamp, + ) + target: str = proto.Field( + proto.STRING, + number=3, + ) + verb: str = proto.Field( + proto.STRING, + number=4, + ) + status_message: str = proto.Field( + proto.STRING, + number=5, + ) + requested_cancellation: bool = proto.Field( + proto.BOOL, + number=6, + ) + api_version: str = proto.Field( + proto.STRING, + number=7, + ) + + +class ApiHubInstance(proto.Message): + r"""An ApiHubInstance represents the instance resources of the + API Hub. Currently, only one ApiHub instance is allowed for each + project. + + Attributes: + name (str): + Identifier. Format: + ``projects/{project}/locations/{location}/apiHubInstances/{apiHubInstance}``. + create_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. Creation timestamp. + update_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. Last update timestamp. + state (google.cloud.apihub_v1.types.ApiHubInstance.State): + Output only. The current state of the ApiHub + instance. + state_message (str): + Output only. Extra information about ApiHub instance state. + Currently the message would be populated when state is + ``FAILED``. + config (google.cloud.apihub_v1.types.ApiHubInstance.Config): + Required. Config of the ApiHub instance. + labels (MutableMapping[str, str]): + Optional. Instance labels to represent + user-provided metadata. Refer to cloud + documentation on labels for more details. + https://cloud.google.com/compute/docs/labeling-resources + description (str): + Optional. Description of the ApiHub instance. + """ + + class State(proto.Enum): + r"""State of the ApiHub Instance. + + Values: + STATE_UNSPECIFIED (0): + The default value. This value is used if the + state is omitted. + INACTIVE (1): + The ApiHub instance has not been initialized + or has been deleted. + CREATING (2): + The ApiHub instance is being created. + ACTIVE (3): + The ApiHub instance has been created and is + ready for use. + UPDATING (4): + The ApiHub instance is being updated. + DELETING (5): + The ApiHub instance is being deleted. + FAILED (6): + The ApiHub instance encountered an error + during a state change. + """ + STATE_UNSPECIFIED = 0 + INACTIVE = 1 + CREATING = 2 + ACTIVE = 3 + UPDATING = 4 + DELETING = 5 + FAILED = 6 + + class Config(proto.Message): + r"""Available configurations to provision an ApiHub Instance. + + Attributes: + cmek_key_name (str): + Required. The Customer Managed Encryption Key (CMEK) used + for data encryption. The CMEK name should follow the format + of + ``projects/([^/]+)/locations/([^/]+)/keyRings/([^/]+)/cryptoKeys/([^/]+)``, + where the location must match the instance location. + """ + + cmek_key_name: str = proto.Field( + proto.STRING, + number=1, + ) + + name: str = proto.Field( + proto.STRING, + number=1, + ) + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=2, + message=timestamp_pb2.Timestamp, + ) + update_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=3, + message=timestamp_pb2.Timestamp, + ) + state: State = proto.Field( + proto.ENUM, + number=4, + enum=State, + ) + state_message: str = proto.Field( + proto.STRING, + number=5, + ) + config: Config = proto.Field( + proto.MESSAGE, + number=6, + message=Config, + ) + labels: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=7, + ) + description: str = proto.Field( + proto.STRING, + number=8, + ) + + +class ExternalApi(proto.Message): + r"""An external API represents an API being provided by external + sources. This can be used to model third-party APIs and can be + used to define dependencies. + + Attributes: + name (str): + Identifier. Format: + ``projects/{project}/locations/{location}/externalApi/{externalApi}``. + display_name (str): + Required. Display name of the external API. + Max length is 63 characters (Unicode Code + Points). + description (str): + Optional. Description of the external API. + Max length is 2000 characters (Unicode Code + Points). + endpoints (MutableSequence[str]): + Optional. List of endpoints on which this API + is accessible. + paths (MutableSequence[str]): + Optional. List of paths served by this API. + documentation (google.cloud.apihub_v1.types.Documentation): + Optional. Documentation of the external API. + attributes (MutableMapping[str, google.cloud.apihub_v1.types.AttributeValues]): + Optional. The list of user defined attributes associated + with the Version resource. The key is the attribute name. It + will be of the format: + ``projects/{project}/locations/{location}/attributes/{attribute}``. + The value is the attribute values associated with the + resource. + create_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. Creation timestamp. + update_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. Last update timestamp. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + display_name: str = proto.Field( + proto.STRING, + number=2, + ) + description: str = proto.Field( + proto.STRING, + number=3, + ) + endpoints: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=4, + ) + paths: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=5, + ) + documentation: "Documentation" = proto.Field( + proto.MESSAGE, + number=6, + message="Documentation", + ) + attributes: MutableMapping[str, "AttributeValues"] = proto.MapField( + proto.STRING, + proto.MESSAGE, + number=7, + message="AttributeValues", + ) + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=8, + message=timestamp_pb2.Timestamp, + ) + update_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=9, + message=timestamp_pb2.Timestamp, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-apihub/google/cloud/apihub_v1/types/host_project_registration_service.py b/packages/google-cloud-apihub/google/cloud/apihub_v1/types/host_project_registration_service.py new file mode 100644 index 000000000000..646a072171c9 --- /dev/null +++ b/packages/google-cloud-apihub/google/cloud/apihub_v1/types/host_project_registration_service.py @@ -0,0 +1,226 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +from google.protobuf import timestamp_pb2 # type: ignore +import proto # type: ignore + +__protobuf__ = proto.module( + package="google.cloud.apihub.v1", + manifest={ + "CreateHostProjectRegistrationRequest", + "GetHostProjectRegistrationRequest", + "ListHostProjectRegistrationsRequest", + "ListHostProjectRegistrationsResponse", + "HostProjectRegistration", + }, +) + + +class CreateHostProjectRegistrationRequest(proto.Message): + r"""The + [CreateHostProjectRegistration][google.cloud.apihub.v1.HostProjectRegistrationService.CreateHostProjectRegistration] + method's request. + + Attributes: + parent (str): + Required. The parent resource for the host project. Format: + ``projects/{project}/locations/{location}`` + host_project_registration_id (str): + Required. The ID to use for the Host Project Registration, + which will become the final component of the host project + registration's resource name. The ID must be the same as the + Google cloud project specified in the + host_project_registration.gcp_project field. + host_project_registration (google.cloud.apihub_v1.types.HostProjectRegistration): + Required. The host project registration to + register. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + host_project_registration_id: str = proto.Field( + proto.STRING, + number=2, + ) + host_project_registration: "HostProjectRegistration" = proto.Field( + proto.MESSAGE, + number=3, + message="HostProjectRegistration", + ) + + +class GetHostProjectRegistrationRequest(proto.Message): + r"""The + [GetHostProjectRegistration][google.cloud.apihub.v1.HostProjectRegistrationService.GetHostProjectRegistration] + method's request. + + Attributes: + name (str): + Required. Host project registration resource name. + projects/{project}/locations/{location}/hostProjectRegistrations/{host_project_registration_id} + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class ListHostProjectRegistrationsRequest(proto.Message): + r"""The + [ListHostProjectRegistrations][google.cloud.apihub.v1.HostProjectRegistrationService.ListHostProjectRegistrations] + method's request. + + Attributes: + parent (str): + Required. The parent, which owns this collection of host + projects. Format: ``projects/*/locations/*`` + page_size (int): + Optional. The maximum number of host project + registrations to return. The service may return + fewer than this value. If unspecified, at most + 50 host project registrations will be returned. + The maximum value is 1000; values above 1000 + will be coerced to 1000. + page_token (str): + Optional. A page token, received from a previous + ``ListHostProjectRegistrations`` call. Provide this to + retrieve the subsequent page. + + When paginating, all other parameters (except page_size) + provided to ``ListHostProjectRegistrations`` must match the + call that provided the page token. + filter (str): + Optional. An expression that filters the list of + HostProjectRegistrations. + + A filter expression consists of a field name, a comparison + operator, and a value for filtering. The value must be a + string. All standard operators as documented at + https://google.aip.dev/160 are supported. + + The following fields in the ``HostProjectRegistration`` are + eligible for filtering: + + - ``name`` - The name of the HostProjectRegistration. + - ``create_time`` - The time at which the + HostProjectRegistration was created. The value should be + in the (RFC3339)[https://tools.ietf.org/html/rfc3339] + format. + - ``gcp_project`` - The Google cloud project associated + with the HostProjectRegistration. + order_by (str): + Optional. Hint for how to order the results. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + page_size: int = proto.Field( + proto.INT32, + number=2, + ) + page_token: str = proto.Field( + proto.STRING, + number=3, + ) + filter: str = proto.Field( + proto.STRING, + number=4, + ) + order_by: str = proto.Field( + proto.STRING, + number=5, + ) + + +class ListHostProjectRegistrationsResponse(proto.Message): + r"""The + [ListHostProjectRegistrations][google.cloud.apihub.v1.HostProjectRegistrationService.ListHostProjectRegistrations] + method's response. + + Attributes: + host_project_registrations (MutableSequence[google.cloud.apihub_v1.types.HostProjectRegistration]): + The list of host project registrations. + next_page_token (str): + A token, which can be sent as ``page_token`` to retrieve the + next page. If this field is omitted, there are no subsequent + pages. + """ + + @property + def raw_page(self): + return self + + host_project_registrations: MutableSequence[ + "HostProjectRegistration" + ] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="HostProjectRegistration", + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + + +class HostProjectRegistration(proto.Message): + r"""Host project registration refers to the registration of a + Google cloud project with Api Hub as a host project. This is the + project where Api Hub is provisioned. It acts as the consumer + project for the Api Hub instance provisioned. Multiple runtime + projects can be attached to the host project and these + attachments define the scope of Api Hub. + + Attributes: + name (str): + Identifier. The name of the host project registration. + Format: + "projects/{project}/locations/{location}/hostProjectRegistrations/{host_project_registration}". + gcp_project (str): + Required. Immutable. Google cloud project + name in the format: "projects/abc" or + "projects/123". As input, project name with + either project id or number are accepted. As + output, this field will contain project number. + create_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The time at which the host + project registration was created. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + gcp_project: str = proto.Field( + proto.STRING, + number=2, + ) + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=3, + message=timestamp_pb2.Timestamp, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-apihub/google/cloud/apihub_v1/types/linting_service.py b/packages/google-cloud-apihub/google/cloud/apihub_v1/types/linting_service.py new file mode 100644 index 000000000000..df9451ffd032 --- /dev/null +++ b/packages/google-cloud-apihub/google/cloud/apihub_v1/types/linting_service.py @@ -0,0 +1,161 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +from google.protobuf import field_mask_pb2 # type: ignore +import proto # type: ignore + +from google.cloud.apihub_v1.types import common_fields + +__protobuf__ = proto.module( + package="google.cloud.apihub.v1", + manifest={ + "GetStyleGuideRequest", + "UpdateStyleGuideRequest", + "GetStyleGuideContentsRequest", + "LintSpecRequest", + "StyleGuideContents", + "StyleGuide", + }, +) + + +class GetStyleGuideRequest(proto.Message): + r"""The [GetStyleGuide][ApiHub.GetStyleGuide] method's request. + + Attributes: + name (str): + Required. The name of the spec to retrieve. Format: + ``projects/{project}/locations/{location}/plugins/{plugin}/styleGuide``. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class UpdateStyleGuideRequest(proto.Message): + r"""The [UpdateStyleGuide][ApiHub.UpdateStyleGuide] method's request. + + Attributes: + style_guide (google.cloud.apihub_v1.types.StyleGuide): + Required. The Style guide resource to update. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Optional. The list of fields to update. + """ + + style_guide: "StyleGuide" = proto.Field( + proto.MESSAGE, + number=1, + message="StyleGuide", + ) + update_mask: field_mask_pb2.FieldMask = proto.Field( + proto.MESSAGE, + number=2, + message=field_mask_pb2.FieldMask, + ) + + +class GetStyleGuideContentsRequest(proto.Message): + r"""The [GetStyleGuideContents][ApiHub.GetStyleGuideContents] method's + request. + + Attributes: + name (str): + Required. The name of the StyleGuide whose contents need to + be retrieved. There is exactly one style guide resource per + project per location. The expected format is + ``projects/{project}/locations/{location}/plugins/{plugin}/styleGuide``. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class LintSpecRequest(proto.Message): + r"""The [LintSpec][ApiHub.LintSpec] method's request. + + Attributes: + name (str): + Required. The name of the spec to be linted. Format: + ``projects/{project}/locations/{location}/apis/{api}/versions/{version}/specs/{spec}`` + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class StyleGuideContents(proto.Message): + r"""The style guide contents. + + Attributes: + contents (bytes): + Required. The contents of the style guide. + mime_type (str): + Required. The mime type of the content. + """ + + contents: bytes = proto.Field( + proto.BYTES, + number=1, + ) + mime_type: str = proto.Field( + proto.STRING, + number=2, + ) + + +class StyleGuide(proto.Message): + r"""Represents a singleton style guide resource to be used for + linting Open API specs. + + Attributes: + name (str): + Identifier. The name of the style guide. + + Format: + ``projects/{project}/locations/{location}/plugins/{plugin}/styleGuide`` + linter (google.cloud.apihub_v1.types.Linter): + Required. Target linter for the style guide. + contents (google.cloud.apihub_v1.types.StyleGuideContents): + Required. Input only. The contents of the + uploaded style guide. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + linter: common_fields.Linter = proto.Field( + proto.ENUM, + number=2, + enum=common_fields.Linter, + ) + contents: "StyleGuideContents" = proto.Field( + proto.MESSAGE, + number=3, + message="StyleGuideContents", + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-apihub/google/cloud/apihub_v1/types/plugin_service.py b/packages/google-cloud-apihub/google/cloud/apihub_v1/types/plugin_service.py new file mode 100644 index 000000000000..0db996f84929 --- /dev/null +++ b/packages/google-cloud-apihub/google/cloud/apihub_v1/types/plugin_service.py @@ -0,0 +1,153 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +import proto # type: ignore + +from google.cloud.apihub_v1.types import common_fields + +__protobuf__ = proto.module( + package="google.cloud.apihub.v1", + manifest={ + "Plugin", + "GetPluginRequest", + "EnablePluginRequest", + "DisablePluginRequest", + }, +) + + +class Plugin(proto.Message): + r"""A plugin resource in the API Hub. + + Attributes: + name (str): + Identifier. The name of the plugin. Format: + ``projects/{project}/locations/{location}/plugins/{plugin}`` + display_name (str): + Required. The display name of the plugin. Max + length is 50 characters (Unicode code points). + type_ (google.cloud.apihub_v1.types.AttributeValues): + Required. The type of the API. This maps to the following + system defined attribute: + ``projects/{project}/locations/{location}/attributes/system-plugin-type`` + attribute. The number of allowed values for this attribute + will be based on the cardinality of the attribute. The same + can be retrieved via GetAttribute API. All values should be + from the list of allowed values defined for the attribute. + description (str): + Optional. The plugin description. Max length + is 2000 characters (Unicode code points). + state (google.cloud.apihub_v1.types.Plugin.State): + Output only. Represents the state of the + plugin. + """ + + class State(proto.Enum): + r"""Possible states a plugin can have. Note that this enum may + receive new values in the future. Consumers are advised to + always code against the enum values expecting new states can be + added later on. + + Values: + STATE_UNSPECIFIED (0): + The default value. This value is used if the + state is omitted. + ENABLED (1): + The plugin is enabled. + DISABLED (2): + The plugin is disabled. + """ + STATE_UNSPECIFIED = 0 + ENABLED = 1 + DISABLED = 2 + + name: str = proto.Field( + proto.STRING, + number=1, + ) + display_name: str = proto.Field( + proto.STRING, + number=2, + ) + type_: common_fields.AttributeValues = proto.Field( + proto.MESSAGE, + number=3, + message=common_fields.AttributeValues, + ) + description: str = proto.Field( + proto.STRING, + number=4, + ) + state: State = proto.Field( + proto.ENUM, + number=5, + enum=State, + ) + + +class GetPluginRequest(proto.Message): + r"""The [GetPlugin][google.cloud.apihub.v1.ApiHubPlugin.GetPlugin] + method's request. + + Attributes: + name (str): + Required. The name of the plugin to retrieve. Format: + ``projects/{project}/locations/{location}/plugins/{plugin}``. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class EnablePluginRequest(proto.Message): + r"""The [EnablePlugin][google.cloud.apihub.v1.ApiHubPlugin.EnablePlugin] + method's request. + + Attributes: + name (str): + Required. The name of the plugin to enable. Format: + ``projects/{project}/locations/{location}/plugins/{plugin}``. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class DisablePluginRequest(proto.Message): + r"""The + [DisablePlugin][google.cloud.apihub.v1.ApiHubPlugin.DisablePlugin] + method's request. + + Attributes: + name (str): + Required. The name of the plugin to disable. Format: + ``projects/{project}/locations/{location}/plugins/{plugin}``. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-apihub/google/cloud/apihub_v1/types/provisioning_service.py b/packages/google-cloud-apihub/google/cloud/apihub_v1/types/provisioning_service.py new file mode 100644 index 000000000000..3c3230a25896 --- /dev/null +++ b/packages/google-cloud-apihub/google/cloud/apihub_v1/types/provisioning_service.py @@ -0,0 +1,126 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +import proto # type: ignore + +from google.cloud.apihub_v1.types import common_fields + +__protobuf__ = proto.module( + package="google.cloud.apihub.v1", + manifest={ + "CreateApiHubInstanceRequest", + "GetApiHubInstanceRequest", + "LookupApiHubInstanceRequest", + "LookupApiHubInstanceResponse", + }, +) + + +class CreateApiHubInstanceRequest(proto.Message): + r"""The + [CreateApiHubInstance][google.cloud.apihub.v1.Provisioning.CreateApiHubInstance] + method's request. + + Attributes: + parent (str): + Required. The parent resource for the Api Hub instance + resource. Format: + ``projects/{project}/locations/{location}`` + api_hub_instance_id (str): + Optional. Identifier to assign to the Api Hub instance. Must + be unique within scope of the parent resource. If the field + is not provided, system generated id will be used. + + This value should be 4-40 characters, and valid characters + are ``/[a-z][A-Z][0-9]-_/``. + api_hub_instance (google.cloud.apihub_v1.types.ApiHubInstance): + Required. The ApiHub instance. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + api_hub_instance_id: str = proto.Field( + proto.STRING, + number=2, + ) + api_hub_instance: common_fields.ApiHubInstance = proto.Field( + proto.MESSAGE, + number=3, + message=common_fields.ApiHubInstance, + ) + + +class GetApiHubInstanceRequest(proto.Message): + r"""The + [GetApiHubInstance][google.cloud.apihub.v1.Provisioning.GetApiHubInstance] + method's request. + + Attributes: + name (str): + Required. The name of the Api Hub instance to retrieve. + Format: + ``projects/{project}/locations/{location}/apiHubInstances/{apiHubInstance}``. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class LookupApiHubInstanceRequest(proto.Message): + r"""The + [LookupApiHubInstance][google.cloud.apihub.v1.Provisioning.LookupApiHubInstance] + method's request. + + Attributes: + parent (str): + Required. There will always be only one Api Hub instance for + a GCP project across all locations. The parent resource for + the Api Hub instance resource. Format: + ``projects/{project}/locations/{location}`` + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + + +class LookupApiHubInstanceResponse(proto.Message): + r"""The + [LookupApiHubInstance][google.cloud.apihub.v1.Provisioning.LookupApiHubInstance] + method's response.\` + + Attributes: + api_hub_instance (google.cloud.apihub_v1.types.ApiHubInstance): + API Hub instance for a project if it exists, + empty otherwise. + """ + + api_hub_instance: common_fields.ApiHubInstance = proto.Field( + proto.MESSAGE, + number=1, + message=common_fields.ApiHubInstance, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-apihub/google/cloud/apihub_v1/types/runtime_project_attachment_service.py b/packages/google-cloud-apihub/google/cloud/apihub_v1/types/runtime_project_attachment_service.py new file mode 100644 index 000000000000..2be200cd47a7 --- /dev/null +++ b/packages/google-cloud-apihub/google/cloud/apihub_v1/types/runtime_project_attachment_service.py @@ -0,0 +1,283 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +from google.protobuf import timestamp_pb2 # type: ignore +import proto # type: ignore + +__protobuf__ = proto.module( + package="google.cloud.apihub.v1", + manifest={ + "CreateRuntimeProjectAttachmentRequest", + "GetRuntimeProjectAttachmentRequest", + "ListRuntimeProjectAttachmentsRequest", + "ListRuntimeProjectAttachmentsResponse", + "DeleteRuntimeProjectAttachmentRequest", + "LookupRuntimeProjectAttachmentRequest", + "LookupRuntimeProjectAttachmentResponse", + "RuntimeProjectAttachment", + }, +) + + +class CreateRuntimeProjectAttachmentRequest(proto.Message): + r"""The + [CreateRuntimeProjectAttachment][google.cloud.apihub.v1.RuntimeProjectAttachmentService.CreateRuntimeProjectAttachment] + method's request. + + Attributes: + parent (str): + Required. The parent resource for the Runtime Project + Attachment. Format: + ``projects/{project}/locations/{location}`` + runtime_project_attachment_id (str): + Required. The ID to use for the Runtime Project Attachment, + which will become the final component of the Runtime Project + Attachment's name. The ID must be the same as the project ID + of the Google cloud project specified in the + runtime_project_attachment.runtime_project field. + runtime_project_attachment (google.cloud.apihub_v1.types.RuntimeProjectAttachment): + Required. The Runtime Project Attachment to + create. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + runtime_project_attachment_id: str = proto.Field( + proto.STRING, + number=2, + ) + runtime_project_attachment: "RuntimeProjectAttachment" = proto.Field( + proto.MESSAGE, + number=3, + message="RuntimeProjectAttachment", + ) + + +class GetRuntimeProjectAttachmentRequest(proto.Message): + r"""The + [GetRuntimeProjectAttachment][google.cloud.apihub.v1.RuntimeProjectAttachmentService.GetRuntimeProjectAttachment] + method's request. + + Attributes: + name (str): + Required. The name of the API resource to retrieve. Format: + ``projects/{project}/locations/{location}/runtimeProjectAttachments/{runtime_project_attachment}`` + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class ListRuntimeProjectAttachmentsRequest(proto.Message): + r"""The + [ListRuntimeProjectAttachments][google.cloud.apihub.v1.RuntimeProjectAttachmentService.ListRuntimeProjectAttachments] + method's request. + + Attributes: + parent (str): + Required. The parent, which owns this collection of runtime + project attachments. Format: + ``projects/{project}/locations/{location}`` + page_size (int): + Optional. The maximum number of runtime + project attachments to return. The service may + return fewer than this value. If unspecified, at + most 50 runtime project attachments will be + returned. The maximum value is 1000; values + above 1000 will be coerced to 1000. + page_token (str): + Optional. A page token, received from a previous + ``ListRuntimeProjectAttachments`` call. Provide this to + retrieve the subsequent page. + + When paginating, all other parameters (except page_size) + provided to ``ListRuntimeProjectAttachments`` must match the + call that provided the page token. + filter (str): + Optional. An expression that filters the list of + RuntimeProjectAttachments. + + A filter expression consists of a field name, a comparison + operator, and a value for filtering. The value must be a + string. All standard operators as documented at + https://google.aip.dev/160 are supported. + + The following fields in the ``RuntimeProjectAttachment`` are + eligible for filtering: + + - ``name`` - The name of the RuntimeProjectAttachment. + - ``create_time`` - The time at which the + RuntimeProjectAttachment was created. The value should be + in the (RFC3339)[https://tools.ietf.org/html/rfc3339] + format. + - ``runtime_project`` - The Google cloud project associated + with the RuntimeProjectAttachment. + order_by (str): + Optional. Hint for how to order the results. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + page_size: int = proto.Field( + proto.INT32, + number=2, + ) + page_token: str = proto.Field( + proto.STRING, + number=3, + ) + filter: str = proto.Field( + proto.STRING, + number=4, + ) + order_by: str = proto.Field( + proto.STRING, + number=5, + ) + + +class ListRuntimeProjectAttachmentsResponse(proto.Message): + r"""The + [ListRuntimeProjectAttachments][google.cloud.apihub.v1.RuntimeProjectAttachmentService.ListRuntimeProjectAttachments] + method's response. + + Attributes: + runtime_project_attachments (MutableSequence[google.cloud.apihub_v1.types.RuntimeProjectAttachment]): + List of runtime project attachments. + next_page_token (str): + A token, which can be sent as ``page_token`` to retrieve the + next page. If this field is omitted, there are no subsequent + pages. + """ + + @property + def raw_page(self): + return self + + runtime_project_attachments: MutableSequence[ + "RuntimeProjectAttachment" + ] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="RuntimeProjectAttachment", + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + + +class DeleteRuntimeProjectAttachmentRequest(proto.Message): + r"""The + [DeleteRuntimeProjectAttachment][google.cloud.apihub.v1.RuntimeProjectAttachmentService.DeleteRuntimeProjectAttachment] + method's request. + + Attributes: + name (str): + Required. The name of the Runtime Project Attachment to + delete. Format: + ``projects/{project}/locations/{location}/runtimeProjectAttachments/{runtime_project_attachment}`` + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class LookupRuntimeProjectAttachmentRequest(proto.Message): + r"""The + [LookupRuntimeProjectAttachment][google.cloud.apihub.v1.RuntimeProjectAttachmentService.LookupRuntimeProjectAttachment] + method's request. + + Attributes: + name (str): + Required. Runtime project ID to look up runtime project + attachment for. Lookup happens across all regions. Expected + format: ``projects/{project}/locations/{location}``. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class LookupRuntimeProjectAttachmentResponse(proto.Message): + r"""The + [ListRuntimeProjectAttachments][google.cloud.apihub.v1.RuntimeProjectAttachmentService.ListRuntimeProjectAttachments] + method's response. + + Attributes: + runtime_project_attachment (google.cloud.apihub_v1.types.RuntimeProjectAttachment): + Runtime project attachment for a project if + exists, empty otherwise. + """ + + runtime_project_attachment: "RuntimeProjectAttachment" = proto.Field( + proto.MESSAGE, + number=1, + message="RuntimeProjectAttachment", + ) + + +class RuntimeProjectAttachment(proto.Message): + r"""Runtime project attachment represents an attachment from the + runtime project to the host project. Api Hub looks for + deployments in the attached runtime projects and creates + corresponding resources in Api Hub for the discovered + deployments. + + Attributes: + name (str): + Identifier. The resource name of a runtime project + attachment. Format: + "projects/{project}/locations/{location}/runtimeProjectAttachments/{runtime_project_attachment}". + runtime_project (str): + Required. Immutable. Google cloud project + name in the format: "projects/abc" or + "projects/123". As input, project name with + either project id or number are accepted. As + output, this field will contain project number. + create_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. Create time. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + runtime_project: str = proto.Field( + proto.STRING, + number=2, + ) + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=3, + message=timestamp_pb2.Timestamp, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-apihub/mypy.ini b/packages/google-cloud-apihub/mypy.ini new file mode 100644 index 000000000000..574c5aed394b --- /dev/null +++ b/packages/google-cloud-apihub/mypy.ini @@ -0,0 +1,3 @@ +[mypy] +python_version = 3.7 +namespace_packages = True diff --git a/packages/google-cloud-apihub/noxfile.py b/packages/google-cloud-apihub/noxfile.py new file mode 100644 index 000000000000..67b7265f7586 --- /dev/null +++ b/packages/google-cloud-apihub/noxfile.py @@ -0,0 +1,452 @@ +# -*- coding: utf-8 -*- +# +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Generated by synthtool. DO NOT EDIT! + +from __future__ import absolute_import + +import os +import pathlib +import re +import shutil +from typing import Dict, List +import warnings + +import nox + +BLACK_VERSION = "black[jupyter]==23.7.0" +ISORT_VERSION = "isort==5.11.0" + +LINT_PATHS = ["docs", "google", "tests", "noxfile.py", "setup.py"] + + +DEFAULT_PYTHON_VERSION = "3.10" + +UNIT_TEST_PYTHON_VERSIONS: List[str] = ["3.7", "3.8", "3.9", "3.10", "3.11", "3.12"] +UNIT_TEST_STANDARD_DEPENDENCIES = [ + "mock", + "asyncmock", + "pytest", + "pytest-cov", + "pytest-asyncio", +] +UNIT_TEST_EXTERNAL_DEPENDENCIES: List[str] = [] +UNIT_TEST_LOCAL_DEPENDENCIES: List[str] = [] +UNIT_TEST_DEPENDENCIES: List[str] = [] +UNIT_TEST_EXTRAS: List[str] = [] +UNIT_TEST_EXTRAS_BY_PYTHON: Dict[str, List[str]] = {} + +SYSTEM_TEST_PYTHON_VERSIONS: List[str] = ["3.8", "3.9", "3.10", "3.11", "3.12"] +SYSTEM_TEST_STANDARD_DEPENDENCIES = [ + "mock", + "pytest", + "google-cloud-testutils", +] +SYSTEM_TEST_EXTERNAL_DEPENDENCIES: List[str] = [] +SYSTEM_TEST_LOCAL_DEPENDENCIES: List[str] = [] +SYSTEM_TEST_DEPENDENCIES: List[str] = [] +SYSTEM_TEST_EXTRAS: List[str] = [] +SYSTEM_TEST_EXTRAS_BY_PYTHON: Dict[str, List[str]] = {} + +CURRENT_DIRECTORY = pathlib.Path(__file__).parent.absolute() + +# 'docfx' is excluded since it only needs to run in 'docs-presubmit' +nox.options.sessions = [ + "unit", + "system", + "cover", + "lint", + "lint_setup_py", + "blacken", + "docs", +] + +# Error if a python version is missing +nox.options.error_on_missing_interpreters = True + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def lint(session): + """Run linters. + + Returns a failure if the linters find linting errors or sufficiently + serious code quality issues. + """ + session.install("flake8", BLACK_VERSION) + session.run( + "black", + "--check", + *LINT_PATHS, + ) + + session.run("flake8", "google", "tests") + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def blacken(session): + """Run black. Format code to uniform standard.""" + session.install(BLACK_VERSION) + session.run( + "black", + *LINT_PATHS, + ) + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def format(session): + """ + Run isort to sort imports. Then run black + to format code to uniform standard. + """ + session.install(BLACK_VERSION, ISORT_VERSION) + # Use the --fss option to sort imports using strict alphabetical order. + # See https://pycqa.github.io/isort/docs/configuration/options.html#force-sort-within-sections + session.run( + "isort", + "--fss", + *LINT_PATHS, + ) + session.run( + "black", + *LINT_PATHS, + ) + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def lint_setup_py(session): + """Verify that setup.py is valid (including RST check).""" + session.install("docutils", "pygments") + session.run("python", "setup.py", "check", "--restructuredtext", "--strict") + + +def install_unittest_dependencies(session, *constraints): + standard_deps = UNIT_TEST_STANDARD_DEPENDENCIES + UNIT_TEST_DEPENDENCIES + session.install(*standard_deps, *constraints) + + if UNIT_TEST_EXTERNAL_DEPENDENCIES: + warnings.warn( + "'unit_test_external_dependencies' is deprecated. Instead, please " + "use 'unit_test_dependencies' or 'unit_test_local_dependencies'.", + DeprecationWarning, + ) + session.install(*UNIT_TEST_EXTERNAL_DEPENDENCIES, *constraints) + + if UNIT_TEST_LOCAL_DEPENDENCIES: + session.install(*UNIT_TEST_LOCAL_DEPENDENCIES, *constraints) + + if UNIT_TEST_EXTRAS_BY_PYTHON: + extras = UNIT_TEST_EXTRAS_BY_PYTHON.get(session.python, []) + elif UNIT_TEST_EXTRAS: + extras = UNIT_TEST_EXTRAS + else: + extras = [] + + if extras: + session.install("-e", f".[{','.join(extras)}]", *constraints) + else: + session.install("-e", ".", *constraints) + + +@nox.session(python=UNIT_TEST_PYTHON_VERSIONS) +@nox.parametrize( + "protobuf_implementation", + ["python", "upb", "cpp"], +) +def unit(session, protobuf_implementation): + # Install all test dependencies, then install this package in-place. + + if protobuf_implementation == "cpp" and session.python in ("3.11", "3.12"): + session.skip("cpp implementation is not supported in python 3.11+") + + constraints_path = str( + CURRENT_DIRECTORY / "testing" / f"constraints-{session.python}.txt" + ) + install_unittest_dependencies(session, "-c", constraints_path) + + # TODO(https://github.com/googleapis/synthtool/issues/1976): + # Remove the 'cpp' implementation once support for Protobuf 3.x is dropped. + # The 'cpp' implementation requires Protobuf<4. + if protobuf_implementation == "cpp": + session.install("protobuf<4") + + # Run py.test against the unit tests. + session.run( + "py.test", + "--quiet", + f"--junitxml=unit_{session.python}_sponge_log.xml", + "--cov=google", + "--cov=tests/unit", + "--cov-append", + "--cov-config=.coveragerc", + "--cov-report=", + "--cov-fail-under=0", + os.path.join("tests", "unit"), + *session.posargs, + env={ + "PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION": protobuf_implementation, + }, + ) + + +def install_systemtest_dependencies(session, *constraints): + # Use pre-release gRPC for system tests. + # Exclude version 1.52.0rc1 which has a known issue. + # See https://github.com/grpc/grpc/issues/32163 + session.install("--pre", "grpcio!=1.52.0rc1") + + session.install(*SYSTEM_TEST_STANDARD_DEPENDENCIES, *constraints) + + if SYSTEM_TEST_EXTERNAL_DEPENDENCIES: + session.install(*SYSTEM_TEST_EXTERNAL_DEPENDENCIES, *constraints) + + if SYSTEM_TEST_LOCAL_DEPENDENCIES: + session.install("-e", *SYSTEM_TEST_LOCAL_DEPENDENCIES, *constraints) + + if SYSTEM_TEST_DEPENDENCIES: + session.install("-e", *SYSTEM_TEST_DEPENDENCIES, *constraints) + + if SYSTEM_TEST_EXTRAS_BY_PYTHON: + extras = SYSTEM_TEST_EXTRAS_BY_PYTHON.get(session.python, []) + elif SYSTEM_TEST_EXTRAS: + extras = SYSTEM_TEST_EXTRAS + else: + extras = [] + + if extras: + session.install("-e", f".[{','.join(extras)}]", *constraints) + else: + session.install("-e", ".", *constraints) + + +@nox.session(python=SYSTEM_TEST_PYTHON_VERSIONS) +def system(session): + """Run the system test suite.""" + constraints_path = str( + CURRENT_DIRECTORY / "testing" / f"constraints-{session.python}.txt" + ) + system_test_path = os.path.join("tests", "system.py") + system_test_folder_path = os.path.join("tests", "system") + + # Check the value of `RUN_SYSTEM_TESTS` env var. It defaults to true. + if os.environ.get("RUN_SYSTEM_TESTS", "true") == "false": + session.skip("RUN_SYSTEM_TESTS is set to false, skipping") + # Install pyopenssl for mTLS testing. + if os.environ.get("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") == "true": + session.install("pyopenssl") + + system_test_exists = os.path.exists(system_test_path) + system_test_folder_exists = os.path.exists(system_test_folder_path) + # Sanity check: only run tests if found. + if not system_test_exists and not system_test_folder_exists: + session.skip("System tests were not found") + + install_systemtest_dependencies(session, "-c", constraints_path) + + # Run py.test against the system tests. + if system_test_exists: + session.run( + "py.test", + "--quiet", + f"--junitxml=system_{session.python}_sponge_log.xml", + system_test_path, + *session.posargs, + ) + if system_test_folder_exists: + session.run( + "py.test", + "--quiet", + f"--junitxml=system_{session.python}_sponge_log.xml", + system_test_folder_path, + *session.posargs, + ) + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def cover(session): + """Run the final coverage report. + + This outputs the coverage report aggregating coverage from the unit + test runs (not system test runs), and then erases coverage data. + """ + session.install("coverage", "pytest-cov") + session.run("coverage", "report", "--show-missing", "--fail-under=100") + + session.run("coverage", "erase") + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def docs(session): + """Build the docs for this library.""" + + session.install("-e", ".") + session.install( + # We need to pin to specific versions of the `sphinxcontrib-*` packages + # which still support sphinx 4.x. + # See https://github.com/googleapis/sphinx-docfx-yaml/issues/344 + # and https://github.com/googleapis/sphinx-docfx-yaml/issues/345. + "sphinxcontrib-applehelp==1.0.4", + "sphinxcontrib-devhelp==1.0.2", + "sphinxcontrib-htmlhelp==2.0.1", + "sphinxcontrib-qthelp==1.0.3", + "sphinxcontrib-serializinghtml==1.1.5", + "sphinx==4.5.0", + "alabaster", + "recommonmark", + ) + + shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True) + session.run( + "sphinx-build", + "-W", # warnings as errors + "-T", # show full traceback on exception + "-N", # no colors + "-b", + "html", + "-d", + os.path.join("docs", "_build", "doctrees", ""), + os.path.join("docs", ""), + os.path.join("docs", "_build", "html", ""), + ) + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def docfx(session): + """Build the docfx yaml files for this library.""" + + session.install("-e", ".") + session.install( + # We need to pin to specific versions of the `sphinxcontrib-*` packages + # which still support sphinx 4.x. + # See https://github.com/googleapis/sphinx-docfx-yaml/issues/344 + # and https://github.com/googleapis/sphinx-docfx-yaml/issues/345. + "sphinxcontrib-applehelp==1.0.4", + "sphinxcontrib-devhelp==1.0.2", + "sphinxcontrib-htmlhelp==2.0.1", + "sphinxcontrib-qthelp==1.0.3", + "sphinxcontrib-serializinghtml==1.1.5", + "gcp-sphinx-docfx-yaml", + "alabaster", + "recommonmark", + ) + + shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True) + session.run( + "sphinx-build", + "-T", # show full traceback on exception + "-N", # no colors + "-D", + ( + "extensions=sphinx.ext.autodoc," + "sphinx.ext.autosummary," + "docfx_yaml.extension," + "sphinx.ext.intersphinx," + "sphinx.ext.coverage," + "sphinx.ext.napoleon," + "sphinx.ext.todo," + "sphinx.ext.viewcode," + "recommonmark" + ), + "-b", + "html", + "-d", + os.path.join("docs", "_build", "doctrees", ""), + os.path.join("docs", ""), + os.path.join("docs", "_build", "html", ""), + ) + + +@nox.session(python="3.12") +@nox.parametrize( + "protobuf_implementation", + ["python", "upb", "cpp"], +) +def prerelease_deps(session, protobuf_implementation): + """Run all tests with prerelease versions of dependencies installed.""" + + if protobuf_implementation == "cpp" and session.python in ("3.11", "3.12"): + session.skip("cpp implementation is not supported in python 3.11+") + + # Install all dependencies + session.install("-e", ".[all, tests, tracing]") + unit_deps_all = UNIT_TEST_STANDARD_DEPENDENCIES + UNIT_TEST_EXTERNAL_DEPENDENCIES + session.install(*unit_deps_all) + system_deps_all = ( + SYSTEM_TEST_STANDARD_DEPENDENCIES + + SYSTEM_TEST_EXTERNAL_DEPENDENCIES + + SYSTEM_TEST_EXTRAS + ) + session.install(*system_deps_all) + + # Because we test minimum dependency versions on the minimum Python + # version, the first version we test with in the unit tests sessions has a + # constraints file containing all dependencies and extras. + with open( + CURRENT_DIRECTORY + / "testing" + / f"constraints-{UNIT_TEST_PYTHON_VERSIONS[0]}.txt", + encoding="utf-8", + ) as constraints_file: + constraints_text = constraints_file.read() + + # Ignore leading whitespace and comment lines. + constraints_deps = [ + match.group(1) + for match in re.finditer( + r"^\s*(\S+)(?===\S+)", constraints_text, flags=re.MULTILINE + ) + ] + + session.install(*constraints_deps) + + prerel_deps = [ + "protobuf", + # dependency of grpc + "six", + "grpc-google-iam-v1", + "googleapis-common-protos", + "grpcio", + "grpcio-status", + "google-api-core", + "google-auth", + "proto-plus", + "google-cloud-testutils", + # dependencies of google-cloud-testutils" + "click", + ] + + for dep in prerel_deps: + session.install("--pre", "--no-deps", "--upgrade", dep) + + # Remaining dependencies + other_deps = [ + "requests", + ] + session.install(*other_deps) + + # Print out prerelease package versions + session.run( + "python", "-c", "import google.protobuf; print(google.protobuf.__version__)" + ) + session.run("python", "-c", "import grpc; print(grpc.__version__)") + session.run("python", "-c", "import google.auth; print(google.auth.__version__)") + + session.run( + "py.test", + "tests/unit", + env={ + "PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION": protobuf_implementation, + }, + ) diff --git a/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_create_api_sync.py b/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_create_api_sync.py new file mode 100644 index 000000000000..c6c7658d8f74 --- /dev/null +++ b/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_create_api_sync.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateApi +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-apihub + + +# [START apihub_v1_generated_ApiHub_CreateApi_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import apihub_v1 + + +def sample_create_api(): + # Create a client + client = apihub_v1.ApiHubClient() + + # Initialize request argument(s) + api = apihub_v1.Api() + api.display_name = "display_name_value" + + request = apihub_v1.CreateApiRequest( + parent="parent_value", + api=api, + ) + + # Make the request + response = client.create_api(request=request) + + # Handle the response + print(response) + +# [END apihub_v1_generated_ApiHub_CreateApi_sync] diff --git a/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_create_attribute_sync.py b/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_create_attribute_sync.py new file mode 100644 index 000000000000..3de153d06286 --- /dev/null +++ b/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_create_attribute_sync.py @@ -0,0 +1,58 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateAttribute +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-apihub + + +# [START apihub_v1_generated_ApiHub_CreateAttribute_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import apihub_v1 + + +def sample_create_attribute(): + # Create a client + client = apihub_v1.ApiHubClient() + + # Initialize request argument(s) + attribute = apihub_v1.Attribute() + attribute.display_name = "display_name_value" + attribute.scope = "PLUGIN" + attribute.data_type = "STRING" + + request = apihub_v1.CreateAttributeRequest( + parent="parent_value", + attribute=attribute, + ) + + # Make the request + response = client.create_attribute(request=request) + + # Handle the response + print(response) + +# [END apihub_v1_generated_ApiHub_CreateAttribute_sync] diff --git a/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_create_deployment_sync.py b/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_create_deployment_sync.py new file mode 100644 index 000000000000..918fd3c76350 --- /dev/null +++ b/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_create_deployment_sync.py @@ -0,0 +1,60 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateDeployment +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-apihub + + +# [START apihub_v1_generated_ApiHub_CreateDeployment_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import apihub_v1 + + +def sample_create_deployment(): + # Create a client + client = apihub_v1.ApiHubClient() + + # Initialize request argument(s) + deployment = apihub_v1.Deployment() + deployment.display_name = "display_name_value" + deployment.deployment_type.enum_values.values.id = "id_value" + deployment.deployment_type.enum_values.values.display_name = "display_name_value" + deployment.resource_uri = "resource_uri_value" + deployment.endpoints = ['endpoints_value1', 'endpoints_value2'] + + request = apihub_v1.CreateDeploymentRequest( + parent="parent_value", + deployment=deployment, + ) + + # Make the request + response = client.create_deployment(request=request) + + # Handle the response + print(response) + +# [END apihub_v1_generated_ApiHub_CreateDeployment_sync] diff --git a/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_create_external_api_sync.py b/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_create_external_api_sync.py new file mode 100644 index 000000000000..2333601e804b --- /dev/null +++ b/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_create_external_api_sync.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateExternalApi +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-apihub + + +# [START apihub_v1_generated_ApiHub_CreateExternalApi_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import apihub_v1 + + +def sample_create_external_api(): + # Create a client + client = apihub_v1.ApiHubClient() + + # Initialize request argument(s) + external_api = apihub_v1.ExternalApi() + external_api.display_name = "display_name_value" + + request = apihub_v1.CreateExternalApiRequest( + parent="parent_value", + external_api=external_api, + ) + + # Make the request + response = client.create_external_api(request=request) + + # Handle the response + print(response) + +# [END apihub_v1_generated_ApiHub_CreateExternalApi_sync] diff --git a/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_create_spec_sync.py b/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_create_spec_sync.py new file mode 100644 index 000000000000..bc438ea63fdc --- /dev/null +++ b/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_create_spec_sync.py @@ -0,0 +1,58 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateSpec +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-apihub + + +# [START apihub_v1_generated_ApiHub_CreateSpec_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import apihub_v1 + + +def sample_create_spec(): + # Create a client + client = apihub_v1.ApiHubClient() + + # Initialize request argument(s) + spec = apihub_v1.Spec() + spec.display_name = "display_name_value" + spec.spec_type.enum_values.values.id = "id_value" + spec.spec_type.enum_values.values.display_name = "display_name_value" + + request = apihub_v1.CreateSpecRequest( + parent="parent_value", + spec=spec, + ) + + # Make the request + response = client.create_spec(request=request) + + # Handle the response + print(response) + +# [END apihub_v1_generated_ApiHub_CreateSpec_sync] diff --git a/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_create_version_sync.py b/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_create_version_sync.py new file mode 100644 index 000000000000..564a899030bc --- /dev/null +++ b/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_create_version_sync.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateVersion +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-apihub + + +# [START apihub_v1_generated_ApiHub_CreateVersion_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import apihub_v1 + + +def sample_create_version(): + # Create a client + client = apihub_v1.ApiHubClient() + + # Initialize request argument(s) + version = apihub_v1.Version() + version.display_name = "display_name_value" + + request = apihub_v1.CreateVersionRequest( + parent="parent_value", + version=version, + ) + + # Make the request + response = client.create_version(request=request) + + # Handle the response + print(response) + +# [END apihub_v1_generated_ApiHub_CreateVersion_sync] diff --git a/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_delete_api_sync.py b/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_delete_api_sync.py new file mode 100644 index 000000000000..8e96a3fba984 --- /dev/null +++ b/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_delete_api_sync.py @@ -0,0 +1,50 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteApi +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-apihub + + +# [START apihub_v1_generated_ApiHub_DeleteApi_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import apihub_v1 + + +def sample_delete_api(): + # Create a client + client = apihub_v1.ApiHubClient() + + # Initialize request argument(s) + request = apihub_v1.DeleteApiRequest( + name="name_value", + ) + + # Make the request + client.delete_api(request=request) + + +# [END apihub_v1_generated_ApiHub_DeleteApi_sync] diff --git a/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_delete_attribute_sync.py b/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_delete_attribute_sync.py new file mode 100644 index 000000000000..eeb0e3280124 --- /dev/null +++ b/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_delete_attribute_sync.py @@ -0,0 +1,50 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteAttribute +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-apihub + + +# [START apihub_v1_generated_ApiHub_DeleteAttribute_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import apihub_v1 + + +def sample_delete_attribute(): + # Create a client + client = apihub_v1.ApiHubClient() + + # Initialize request argument(s) + request = apihub_v1.DeleteAttributeRequest( + name="name_value", + ) + + # Make the request + client.delete_attribute(request=request) + + +# [END apihub_v1_generated_ApiHub_DeleteAttribute_sync] diff --git a/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_delete_deployment_sync.py b/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_delete_deployment_sync.py new file mode 100644 index 000000000000..8c75739bb0f8 --- /dev/null +++ b/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_delete_deployment_sync.py @@ -0,0 +1,50 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteDeployment +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-apihub + + +# [START apihub_v1_generated_ApiHub_DeleteDeployment_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import apihub_v1 + + +def sample_delete_deployment(): + # Create a client + client = apihub_v1.ApiHubClient() + + # Initialize request argument(s) + request = apihub_v1.DeleteDeploymentRequest( + name="name_value", + ) + + # Make the request + client.delete_deployment(request=request) + + +# [END apihub_v1_generated_ApiHub_DeleteDeployment_sync] diff --git a/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_delete_external_api_sync.py b/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_delete_external_api_sync.py new file mode 100644 index 000000000000..06f8a4f35859 --- /dev/null +++ b/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_delete_external_api_sync.py @@ -0,0 +1,50 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteExternalApi +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-apihub + + +# [START apihub_v1_generated_ApiHub_DeleteExternalApi_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import apihub_v1 + + +def sample_delete_external_api(): + # Create a client + client = apihub_v1.ApiHubClient() + + # Initialize request argument(s) + request = apihub_v1.DeleteExternalApiRequest( + name="name_value", + ) + + # Make the request + client.delete_external_api(request=request) + + +# [END apihub_v1_generated_ApiHub_DeleteExternalApi_sync] diff --git a/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_delete_spec_sync.py b/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_delete_spec_sync.py new file mode 100644 index 000000000000..6ca3e4353843 --- /dev/null +++ b/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_delete_spec_sync.py @@ -0,0 +1,50 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteSpec +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-apihub + + +# [START apihub_v1_generated_ApiHub_DeleteSpec_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import apihub_v1 + + +def sample_delete_spec(): + # Create a client + client = apihub_v1.ApiHubClient() + + # Initialize request argument(s) + request = apihub_v1.DeleteSpecRequest( + name="name_value", + ) + + # Make the request + client.delete_spec(request=request) + + +# [END apihub_v1_generated_ApiHub_DeleteSpec_sync] diff --git a/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_delete_version_sync.py b/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_delete_version_sync.py new file mode 100644 index 000000000000..17a9f9b9c22c --- /dev/null +++ b/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_delete_version_sync.py @@ -0,0 +1,50 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteVersion +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-apihub + + +# [START apihub_v1_generated_ApiHub_DeleteVersion_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import apihub_v1 + + +def sample_delete_version(): + # Create a client + client = apihub_v1.ApiHubClient() + + # Initialize request argument(s) + request = apihub_v1.DeleteVersionRequest( + name="name_value", + ) + + # Make the request + client.delete_version(request=request) + + +# [END apihub_v1_generated_ApiHub_DeleteVersion_sync] diff --git a/packages/google-cloud-gke-connect-gateway/samples/generated_samples/connectgateway_v1beta1_generated_gateway_service_get_resource_sync.py b/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_dependencies_create_dependency_sync.py similarity index 67% rename from packages/google-cloud-gke-connect-gateway/samples/generated_samples/connectgateway_v1beta1_generated_gateway_service_get_resource_sync.py rename to packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_dependencies_create_dependency_sync.py index c83488e8f680..8a47e5fc94b9 100644 --- a/packages/google-cloud-gke-connect-gateway/samples/generated_samples/connectgateway_v1beta1_generated_gateway_service_get_resource_sync.py +++ b/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_dependencies_create_dependency_sync.py @@ -15,15 +15,15 @@ # # Generated code. DO NOT EDIT! # -# Snippet for GetResource +# Snippet for CreateDependency # NOTE: This snippet has been automatically generated for illustrative purposes only. # It may require modifications to work in your environment. # To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-gke-connect-gateway +# python3 -m pip install google-cloud-apihub -# [START connectgateway_v1beta1_generated_GatewayService_GetResource_sync] +# [START apihub_v1_generated_ApiHubDependencies_CreateDependency_sync] # This snippet has been automatically generated and should be regarded as a # code template only. # It will require modifications to work: @@ -31,22 +31,27 @@ # - It may require specifying regional endpoints when creating the service # client as shown in: # https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.api import httpbody_pb2 # type: ignore -from google.cloud.gkeconnect import gateway_v1beta1 +from google.cloud import apihub_v1 -def sample_get_resource(): +def sample_create_dependency(): # Create a client - client = gateway_v1beta1.GatewayServiceClient() + client = apihub_v1.ApiHubDependenciesClient() # Initialize request argument(s) - request = httpbody_pb2.HttpBody( + dependency = apihub_v1.Dependency() + dependency.consumer.operation_resource_name = "operation_resource_name_value" + dependency.supplier.operation_resource_name = "operation_resource_name_value" + + request = apihub_v1.CreateDependencyRequest( + parent="parent_value", + dependency=dependency, ) # Make the request - response = client.get_resource(request=request) + response = client.create_dependency(request=request) # Handle the response print(response) -# [END connectgateway_v1beta1_generated_GatewayService_GetResource_sync] +# [END apihub_v1_generated_ApiHubDependencies_CreateDependency_sync] diff --git a/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_dependencies_delete_dependency_sync.py b/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_dependencies_delete_dependency_sync.py new file mode 100644 index 000000000000..c5e69c45d19b --- /dev/null +++ b/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_dependencies_delete_dependency_sync.py @@ -0,0 +1,50 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteDependency +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-apihub + + +# [START apihub_v1_generated_ApiHubDependencies_DeleteDependency_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import apihub_v1 + + +def sample_delete_dependency(): + # Create a client + client = apihub_v1.ApiHubDependenciesClient() + + # Initialize request argument(s) + request = apihub_v1.DeleteDependencyRequest( + name="name_value", + ) + + # Make the request + client.delete_dependency(request=request) + + +# [END apihub_v1_generated_ApiHubDependencies_DeleteDependency_sync] diff --git a/packages/google-ads-admanager/samples/generated_samples/admanager_v1_generated_line_item_service_get_line_item_sync.py b/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_dependencies_get_dependency_sync.py similarity index 77% rename from packages/google-ads-admanager/samples/generated_samples/admanager_v1_generated_line_item_service_get_line_item_sync.py rename to packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_dependencies_get_dependency_sync.py index 62f40009bc85..66d969fbf5a4 100644 --- a/packages/google-ads-admanager/samples/generated_samples/admanager_v1_generated_line_item_service_get_line_item_sync.py +++ b/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_dependencies_get_dependency_sync.py @@ -15,15 +15,15 @@ # # Generated code. DO NOT EDIT! # -# Snippet for GetLineItem +# Snippet for GetDependency # NOTE: This snippet has been automatically generated for illustrative purposes only. # It may require modifications to work in your environment. # To install the latest published package dependency, execute the following: -# python3 -m pip install google-ads-admanager +# python3 -m pip install google-cloud-apihub -# [START admanager_v1_generated_LineItemService_GetLineItem_sync] +# [START apihub_v1_generated_ApiHubDependencies_GetDependency_sync] # This snippet has been automatically generated and should be regarded as a # code template only. # It will require modifications to work: @@ -31,22 +31,22 @@ # - It may require specifying regional endpoints when creating the service # client as shown in: # https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.ads import admanager_v1 +from google.cloud import apihub_v1 -def sample_get_line_item(): +def sample_get_dependency(): # Create a client - client = admanager_v1.LineItemServiceClient() + client = apihub_v1.ApiHubDependenciesClient() # Initialize request argument(s) - request = admanager_v1.GetLineItemRequest( + request = apihub_v1.GetDependencyRequest( name="name_value", ) # Make the request - response = client.get_line_item(request=request) + response = client.get_dependency(request=request) # Handle the response print(response) -# [END admanager_v1_generated_LineItemService_GetLineItem_sync] +# [END apihub_v1_generated_ApiHubDependencies_GetDependency_sync] diff --git a/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_dependencies_list_dependencies_sync.py b/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_dependencies_list_dependencies_sync.py new file mode 100644 index 000000000000..cf4387222d94 --- /dev/null +++ b/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_dependencies_list_dependencies_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListDependencies +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-apihub + + +# [START apihub_v1_generated_ApiHubDependencies_ListDependencies_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import apihub_v1 + + +def sample_list_dependencies(): + # Create a client + client = apihub_v1.ApiHubDependenciesClient() + + # Initialize request argument(s) + request = apihub_v1.ListDependenciesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_dependencies(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END apihub_v1_generated_ApiHubDependencies_ListDependencies_sync] diff --git a/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_dependencies_update_dependency_sync.py b/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_dependencies_update_dependency_sync.py new file mode 100644 index 000000000000..d998a9526f5d --- /dev/null +++ b/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_dependencies_update_dependency_sync.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateDependency +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-apihub + + +# [START apihub_v1_generated_ApiHubDependencies_UpdateDependency_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import apihub_v1 + + +def sample_update_dependency(): + # Create a client + client = apihub_v1.ApiHubDependenciesClient() + + # Initialize request argument(s) + dependency = apihub_v1.Dependency() + dependency.consumer.operation_resource_name = "operation_resource_name_value" + dependency.supplier.operation_resource_name = "operation_resource_name_value" + + request = apihub_v1.UpdateDependencyRequest( + dependency=dependency, + ) + + # Make the request + response = client.update_dependency(request=request) + + # Handle the response + print(response) + +# [END apihub_v1_generated_ApiHubDependencies_UpdateDependency_sync] diff --git a/packages/google-ads-admanager/samples/generated_samples/admanager_v1_generated_contact_service_get_contact_sync.py b/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_get_api_operation_sync.py similarity index 77% rename from packages/google-ads-admanager/samples/generated_samples/admanager_v1_generated_contact_service_get_contact_sync.py rename to packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_get_api_operation_sync.py index d5f793f8c3e8..34e74eacfce6 100644 --- a/packages/google-ads-admanager/samples/generated_samples/admanager_v1_generated_contact_service_get_contact_sync.py +++ b/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_get_api_operation_sync.py @@ -15,15 +15,15 @@ # # Generated code. DO NOT EDIT! # -# Snippet for GetContact +# Snippet for GetApiOperation # NOTE: This snippet has been automatically generated for illustrative purposes only. # It may require modifications to work in your environment. # To install the latest published package dependency, execute the following: -# python3 -m pip install google-ads-admanager +# python3 -m pip install google-cloud-apihub -# [START admanager_v1_generated_ContactService_GetContact_sync] +# [START apihub_v1_generated_ApiHub_GetApiOperation_sync] # This snippet has been automatically generated and should be regarded as a # code template only. # It will require modifications to work: @@ -31,22 +31,22 @@ # - It may require specifying regional endpoints when creating the service # client as shown in: # https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.ads import admanager_v1 +from google.cloud import apihub_v1 -def sample_get_contact(): +def sample_get_api_operation(): # Create a client - client = admanager_v1.ContactServiceClient() + client = apihub_v1.ApiHubClient() # Initialize request argument(s) - request = admanager_v1.GetContactRequest( + request = apihub_v1.GetApiOperationRequest( name="name_value", ) # Make the request - response = client.get_contact(request=request) + response = client.get_api_operation(request=request) # Handle the response print(response) -# [END admanager_v1_generated_ContactService_GetContact_sync] +# [END apihub_v1_generated_ApiHub_GetApiOperation_sync] diff --git a/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_get_api_sync.py b/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_get_api_sync.py new file mode 100644 index 000000000000..9d3b08bfa858 --- /dev/null +++ b/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_get_api_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetApi +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-apihub + + +# [START apihub_v1_generated_ApiHub_GetApi_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import apihub_v1 + + +def sample_get_api(): + # Create a client + client = apihub_v1.ApiHubClient() + + # Initialize request argument(s) + request = apihub_v1.GetApiRequest( + name="name_value", + ) + + # Make the request + response = client.get_api(request=request) + + # Handle the response + print(response) + +# [END apihub_v1_generated_ApiHub_GetApi_sync] diff --git a/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_get_attribute_sync.py b/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_get_attribute_sync.py new file mode 100644 index 000000000000..e7b9e97fec49 --- /dev/null +++ b/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_get_attribute_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetAttribute +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-apihub + + +# [START apihub_v1_generated_ApiHub_GetAttribute_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import apihub_v1 + + +def sample_get_attribute(): + # Create a client + client = apihub_v1.ApiHubClient() + + # Initialize request argument(s) + request = apihub_v1.GetAttributeRequest( + name="name_value", + ) + + # Make the request + response = client.get_attribute(request=request) + + # Handle the response + print(response) + +# [END apihub_v1_generated_ApiHub_GetAttribute_sync] diff --git a/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_get_definition_sync.py b/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_get_definition_sync.py new file mode 100644 index 000000000000..7fb09118118e --- /dev/null +++ b/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_get_definition_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetDefinition +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-apihub + + +# [START apihub_v1_generated_ApiHub_GetDefinition_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import apihub_v1 + + +def sample_get_definition(): + # Create a client + client = apihub_v1.ApiHubClient() + + # Initialize request argument(s) + request = apihub_v1.GetDefinitionRequest( + name="name_value", + ) + + # Make the request + response = client.get_definition(request=request) + + # Handle the response + print(response) + +# [END apihub_v1_generated_ApiHub_GetDefinition_sync] diff --git a/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_get_deployment_sync.py b/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_get_deployment_sync.py new file mode 100644 index 000000000000..cef43a8bd895 --- /dev/null +++ b/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_get_deployment_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetDeployment +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-apihub + + +# [START apihub_v1_generated_ApiHub_GetDeployment_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import apihub_v1 + + +def sample_get_deployment(): + # Create a client + client = apihub_v1.ApiHubClient() + + # Initialize request argument(s) + request = apihub_v1.GetDeploymentRequest( + name="name_value", + ) + + # Make the request + response = client.get_deployment(request=request) + + # Handle the response + print(response) + +# [END apihub_v1_generated_ApiHub_GetDeployment_sync] diff --git a/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_get_external_api_sync.py b/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_get_external_api_sync.py new file mode 100644 index 000000000000..a6cf44044286 --- /dev/null +++ b/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_get_external_api_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetExternalApi +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-apihub + + +# [START apihub_v1_generated_ApiHub_GetExternalApi_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import apihub_v1 + + +def sample_get_external_api(): + # Create a client + client = apihub_v1.ApiHubClient() + + # Initialize request argument(s) + request = apihub_v1.GetExternalApiRequest( + name="name_value", + ) + + # Make the request + response = client.get_external_api(request=request) + + # Handle the response + print(response) + +# [END apihub_v1_generated_ApiHub_GetExternalApi_sync] diff --git a/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_get_spec_contents_sync.py b/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_get_spec_contents_sync.py new file mode 100644 index 000000000000..9a53a77e5bbf --- /dev/null +++ b/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_get_spec_contents_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetSpecContents +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-apihub + + +# [START apihub_v1_generated_ApiHub_GetSpecContents_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import apihub_v1 + + +def sample_get_spec_contents(): + # Create a client + client = apihub_v1.ApiHubClient() + + # Initialize request argument(s) + request = apihub_v1.GetSpecContentsRequest( + name="name_value", + ) + + # Make the request + response = client.get_spec_contents(request=request) + + # Handle the response + print(response) + +# [END apihub_v1_generated_ApiHub_GetSpecContents_sync] diff --git a/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_get_spec_sync.py b/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_get_spec_sync.py new file mode 100644 index 000000000000..8ac4d78e6c0f --- /dev/null +++ b/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_get_spec_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetSpec +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-apihub + + +# [START apihub_v1_generated_ApiHub_GetSpec_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import apihub_v1 + + +def sample_get_spec(): + # Create a client + client = apihub_v1.ApiHubClient() + + # Initialize request argument(s) + request = apihub_v1.GetSpecRequest( + name="name_value", + ) + + # Make the request + response = client.get_spec(request=request) + + # Handle the response + print(response) + +# [END apihub_v1_generated_ApiHub_GetSpec_sync] diff --git a/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_get_version_sync.py b/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_get_version_sync.py new file mode 100644 index 000000000000..3cc8355e269b --- /dev/null +++ b/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_get_version_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetVersion +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-apihub + + +# [START apihub_v1_generated_ApiHub_GetVersion_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import apihub_v1 + + +def sample_get_version(): + # Create a client + client = apihub_v1.ApiHubClient() + + # Initialize request argument(s) + request = apihub_v1.GetVersionRequest( + name="name_value", + ) + + # Make the request + response = client.get_version(request=request) + + # Handle the response + print(response) + +# [END apihub_v1_generated_ApiHub_GetVersion_sync] diff --git a/packages/google-ads-admanager/samples/generated_samples/admanager_v1_generated_contact_service_list_contacts_sync.py b/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_list_api_operations_sync.py similarity index 77% rename from packages/google-ads-admanager/samples/generated_samples/admanager_v1_generated_contact_service_list_contacts_sync.py rename to packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_list_api_operations_sync.py index 6f17b86732e5..c4bcf8f4beb2 100644 --- a/packages/google-ads-admanager/samples/generated_samples/admanager_v1_generated_contact_service_list_contacts_sync.py +++ b/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_list_api_operations_sync.py @@ -15,15 +15,15 @@ # # Generated code. DO NOT EDIT! # -# Snippet for ListContacts +# Snippet for ListApiOperations # NOTE: This snippet has been automatically generated for illustrative purposes only. # It may require modifications to work in your environment. # To install the latest published package dependency, execute the following: -# python3 -m pip install google-ads-admanager +# python3 -m pip install google-cloud-apihub -# [START admanager_v1_generated_ContactService_ListContacts_sync] +# [START apihub_v1_generated_ApiHub_ListApiOperations_sync] # This snippet has been automatically generated and should be regarded as a # code template only. # It will require modifications to work: @@ -31,23 +31,23 @@ # - It may require specifying regional endpoints when creating the service # client as shown in: # https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.ads import admanager_v1 +from google.cloud import apihub_v1 -def sample_list_contacts(): +def sample_list_api_operations(): # Create a client - client = admanager_v1.ContactServiceClient() + client = apihub_v1.ApiHubClient() # Initialize request argument(s) - request = admanager_v1.ListContactsRequest( + request = apihub_v1.ListApiOperationsRequest( parent="parent_value", ) # Make the request - page_result = client.list_contacts(request=request) + page_result = client.list_api_operations(request=request) # Handle the response for response in page_result: print(response) -# [END admanager_v1_generated_ContactService_ListContacts_sync] +# [END apihub_v1_generated_ApiHub_ListApiOperations_sync] diff --git a/packages/google-ads-admanager/samples/generated_samples/admanager_v1_generated_team_service_list_teams_sync.py b/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_list_apis_sync.py similarity index 78% rename from packages/google-ads-admanager/samples/generated_samples/admanager_v1_generated_team_service_list_teams_sync.py rename to packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_list_apis_sync.py index 947a825e3a3d..626f918ae453 100644 --- a/packages/google-ads-admanager/samples/generated_samples/admanager_v1_generated_team_service_list_teams_sync.py +++ b/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_list_apis_sync.py @@ -15,15 +15,15 @@ # # Generated code. DO NOT EDIT! # -# Snippet for ListTeams +# Snippet for ListApis # NOTE: This snippet has been automatically generated for illustrative purposes only. # It may require modifications to work in your environment. # To install the latest published package dependency, execute the following: -# python3 -m pip install google-ads-admanager +# python3 -m pip install google-cloud-apihub -# [START admanager_v1_generated_TeamService_ListTeams_sync] +# [START apihub_v1_generated_ApiHub_ListApis_sync] # This snippet has been automatically generated and should be regarded as a # code template only. # It will require modifications to work: @@ -31,23 +31,23 @@ # - It may require specifying regional endpoints when creating the service # client as shown in: # https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.ads import admanager_v1 +from google.cloud import apihub_v1 -def sample_list_teams(): +def sample_list_apis(): # Create a client - client = admanager_v1.TeamServiceClient() + client = apihub_v1.ApiHubClient() # Initialize request argument(s) - request = admanager_v1.ListTeamsRequest( + request = apihub_v1.ListApisRequest( parent="parent_value", ) # Make the request - page_result = client.list_teams(request=request) + page_result = client.list_apis(request=request) # Handle the response for response in page_result: print(response) -# [END admanager_v1_generated_TeamService_ListTeams_sync] +# [END apihub_v1_generated_ApiHub_ListApis_sync] diff --git a/packages/google-ads-admanager/samples/generated_samples/admanager_v1_generated_label_service_list_labels_sync.py b/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_list_attributes_sync.py similarity index 78% rename from packages/google-ads-admanager/samples/generated_samples/admanager_v1_generated_label_service_list_labels_sync.py rename to packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_list_attributes_sync.py index bc207268f8da..3d1a97f4d9b5 100644 --- a/packages/google-ads-admanager/samples/generated_samples/admanager_v1_generated_label_service_list_labels_sync.py +++ b/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_list_attributes_sync.py @@ -15,15 +15,15 @@ # # Generated code. DO NOT EDIT! # -# Snippet for ListLabels +# Snippet for ListAttributes # NOTE: This snippet has been automatically generated for illustrative purposes only. # It may require modifications to work in your environment. # To install the latest published package dependency, execute the following: -# python3 -m pip install google-ads-admanager +# python3 -m pip install google-cloud-apihub -# [START admanager_v1_generated_LabelService_ListLabels_sync] +# [START apihub_v1_generated_ApiHub_ListAttributes_sync] # This snippet has been automatically generated and should be regarded as a # code template only. # It will require modifications to work: @@ -31,23 +31,23 @@ # - It may require specifying regional endpoints when creating the service # client as shown in: # https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.ads import admanager_v1 +from google.cloud import apihub_v1 -def sample_list_labels(): +def sample_list_attributes(): # Create a client - client = admanager_v1.LabelServiceClient() + client = apihub_v1.ApiHubClient() # Initialize request argument(s) - request = admanager_v1.ListLabelsRequest( + request = apihub_v1.ListAttributesRequest( parent="parent_value", ) # Make the request - page_result = client.list_labels(request=request) + page_result = client.list_attributes(request=request) # Handle the response for response in page_result: print(response) -# [END admanager_v1_generated_LabelService_ListLabels_sync] +# [END apihub_v1_generated_ApiHub_ListAttributes_sync] diff --git a/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_list_deployments_sync.py b/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_list_deployments_sync.py new file mode 100644 index 000000000000..098a35bdfcd3 --- /dev/null +++ b/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_list_deployments_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListDeployments +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-apihub + + +# [START apihub_v1_generated_ApiHub_ListDeployments_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import apihub_v1 + + +def sample_list_deployments(): + # Create a client + client = apihub_v1.ApiHubClient() + + # Initialize request argument(s) + request = apihub_v1.ListDeploymentsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_deployments(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END apihub_v1_generated_ApiHub_ListDeployments_sync] diff --git a/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_list_external_apis_sync.py b/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_list_external_apis_sync.py new file mode 100644 index 000000000000..e631489c7f7d --- /dev/null +++ b/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_list_external_apis_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListExternalApis +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-apihub + + +# [START apihub_v1_generated_ApiHub_ListExternalApis_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import apihub_v1 + + +def sample_list_external_apis(): + # Create a client + client = apihub_v1.ApiHubClient() + + # Initialize request argument(s) + request = apihub_v1.ListExternalApisRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_external_apis(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END apihub_v1_generated_ApiHub_ListExternalApis_sync] diff --git a/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_list_specs_sync.py b/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_list_specs_sync.py new file mode 100644 index 000000000000..2e758cc882af --- /dev/null +++ b/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_list_specs_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListSpecs +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-apihub + + +# [START apihub_v1_generated_ApiHub_ListSpecs_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import apihub_v1 + + +def sample_list_specs(): + # Create a client + client = apihub_v1.ApiHubClient() + + # Initialize request argument(s) + request = apihub_v1.ListSpecsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_specs(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END apihub_v1_generated_ApiHub_ListSpecs_sync] diff --git a/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_list_versions_sync.py b/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_list_versions_sync.py new file mode 100644 index 000000000000..acaca1f72972 --- /dev/null +++ b/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_list_versions_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListVersions +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-apihub + + +# [START apihub_v1_generated_ApiHub_ListVersions_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import apihub_v1 + + +def sample_list_versions(): + # Create a client + client = apihub_v1.ApiHubClient() + + # Initialize request argument(s) + request = apihub_v1.ListVersionsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_versions(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END apihub_v1_generated_ApiHub_ListVersions_sync] diff --git a/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_plugin_disable_plugin_sync.py b/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_plugin_disable_plugin_sync.py new file mode 100644 index 000000000000..b6e59d25f274 --- /dev/null +++ b/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_plugin_disable_plugin_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DisablePlugin +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-apihub + + +# [START apihub_v1_generated_ApiHubPlugin_DisablePlugin_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import apihub_v1 + + +def sample_disable_plugin(): + # Create a client + client = apihub_v1.ApiHubPluginClient() + + # Initialize request argument(s) + request = apihub_v1.DisablePluginRequest( + name="name_value", + ) + + # Make the request + response = client.disable_plugin(request=request) + + # Handle the response + print(response) + +# [END apihub_v1_generated_ApiHubPlugin_DisablePlugin_sync] diff --git a/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_plugin_enable_plugin_sync.py b/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_plugin_enable_plugin_sync.py new file mode 100644 index 000000000000..5fb146b41ed0 --- /dev/null +++ b/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_plugin_enable_plugin_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for EnablePlugin +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-apihub + + +# [START apihub_v1_generated_ApiHubPlugin_EnablePlugin_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import apihub_v1 + + +def sample_enable_plugin(): + # Create a client + client = apihub_v1.ApiHubPluginClient() + + # Initialize request argument(s) + request = apihub_v1.EnablePluginRequest( + name="name_value", + ) + + # Make the request + response = client.enable_plugin(request=request) + + # Handle the response + print(response) + +# [END apihub_v1_generated_ApiHubPlugin_EnablePlugin_sync] diff --git a/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_plugin_get_plugin_sync.py b/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_plugin_get_plugin_sync.py new file mode 100644 index 000000000000..22a1089c2bdd --- /dev/null +++ b/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_plugin_get_plugin_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetPlugin +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-apihub + + +# [START apihub_v1_generated_ApiHubPlugin_GetPlugin_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import apihub_v1 + + +def sample_get_plugin(): + # Create a client + client = apihub_v1.ApiHubPluginClient() + + # Initialize request argument(s) + request = apihub_v1.GetPluginRequest( + name="name_value", + ) + + # Make the request + response = client.get_plugin(request=request) + + # Handle the response + print(response) + +# [END apihub_v1_generated_ApiHubPlugin_GetPlugin_sync] diff --git a/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_search_resources_sync.py b/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_search_resources_sync.py new file mode 100644 index 000000000000..ad33c1d83adb --- /dev/null +++ b/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_search_resources_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for SearchResources +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-apihub + + +# [START apihub_v1_generated_ApiHub_SearchResources_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import apihub_v1 + + +def sample_search_resources(): + # Create a client + client = apihub_v1.ApiHubClient() + + # Initialize request argument(s) + request = apihub_v1.SearchResourcesRequest( + location="location_value", + query="query_value", + ) + + # Make the request + page_result = client.search_resources(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END apihub_v1_generated_ApiHub_SearchResources_sync] diff --git a/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_update_api_sync.py b/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_update_api_sync.py new file mode 100644 index 000000000000..9562ae762e7a --- /dev/null +++ b/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_update_api_sync.py @@ -0,0 +1,55 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateApi +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-apihub + + +# [START apihub_v1_generated_ApiHub_UpdateApi_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import apihub_v1 + + +def sample_update_api(): + # Create a client + client = apihub_v1.ApiHubClient() + + # Initialize request argument(s) + api = apihub_v1.Api() + api.display_name = "display_name_value" + + request = apihub_v1.UpdateApiRequest( + api=api, + ) + + # Make the request + response = client.update_api(request=request) + + # Handle the response + print(response) + +# [END apihub_v1_generated_ApiHub_UpdateApi_sync] diff --git a/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_update_attribute_sync.py b/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_update_attribute_sync.py new file mode 100644 index 000000000000..cea6cf1e4fcf --- /dev/null +++ b/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_update_attribute_sync.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateAttribute +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-apihub + + +# [START apihub_v1_generated_ApiHub_UpdateAttribute_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import apihub_v1 + + +def sample_update_attribute(): + # Create a client + client = apihub_v1.ApiHubClient() + + # Initialize request argument(s) + attribute = apihub_v1.Attribute() + attribute.display_name = "display_name_value" + attribute.scope = "PLUGIN" + attribute.data_type = "STRING" + + request = apihub_v1.UpdateAttributeRequest( + attribute=attribute, + ) + + # Make the request + response = client.update_attribute(request=request) + + # Handle the response + print(response) + +# [END apihub_v1_generated_ApiHub_UpdateAttribute_sync] diff --git a/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_update_deployment_sync.py b/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_update_deployment_sync.py new file mode 100644 index 000000000000..ab6980b3b573 --- /dev/null +++ b/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_update_deployment_sync.py @@ -0,0 +1,59 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateDeployment +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-apihub + + +# [START apihub_v1_generated_ApiHub_UpdateDeployment_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import apihub_v1 + + +def sample_update_deployment(): + # Create a client + client = apihub_v1.ApiHubClient() + + # Initialize request argument(s) + deployment = apihub_v1.Deployment() + deployment.display_name = "display_name_value" + deployment.deployment_type.enum_values.values.id = "id_value" + deployment.deployment_type.enum_values.values.display_name = "display_name_value" + deployment.resource_uri = "resource_uri_value" + deployment.endpoints = ['endpoints_value1', 'endpoints_value2'] + + request = apihub_v1.UpdateDeploymentRequest( + deployment=deployment, + ) + + # Make the request + response = client.update_deployment(request=request) + + # Handle the response + print(response) + +# [END apihub_v1_generated_ApiHub_UpdateDeployment_sync] diff --git a/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_update_external_api_sync.py b/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_update_external_api_sync.py new file mode 100644 index 000000000000..476879e42ad2 --- /dev/null +++ b/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_update_external_api_sync.py @@ -0,0 +1,55 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateExternalApi +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-apihub + + +# [START apihub_v1_generated_ApiHub_UpdateExternalApi_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import apihub_v1 + + +def sample_update_external_api(): + # Create a client + client = apihub_v1.ApiHubClient() + + # Initialize request argument(s) + external_api = apihub_v1.ExternalApi() + external_api.display_name = "display_name_value" + + request = apihub_v1.UpdateExternalApiRequest( + external_api=external_api, + ) + + # Make the request + response = client.update_external_api(request=request) + + # Handle the response + print(response) + +# [END apihub_v1_generated_ApiHub_UpdateExternalApi_sync] diff --git a/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_update_spec_sync.py b/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_update_spec_sync.py new file mode 100644 index 000000000000..6f6fc9d969a4 --- /dev/null +++ b/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_update_spec_sync.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateSpec +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-apihub + + +# [START apihub_v1_generated_ApiHub_UpdateSpec_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import apihub_v1 + + +def sample_update_spec(): + # Create a client + client = apihub_v1.ApiHubClient() + + # Initialize request argument(s) + spec = apihub_v1.Spec() + spec.display_name = "display_name_value" + spec.spec_type.enum_values.values.id = "id_value" + spec.spec_type.enum_values.values.display_name = "display_name_value" + + request = apihub_v1.UpdateSpecRequest( + spec=spec, + ) + + # Make the request + response = client.update_spec(request=request) + + # Handle the response + print(response) + +# [END apihub_v1_generated_ApiHub_UpdateSpec_sync] diff --git a/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_update_version_sync.py b/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_update_version_sync.py new file mode 100644 index 000000000000..6edbe9ea2c37 --- /dev/null +++ b/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_update_version_sync.py @@ -0,0 +1,55 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateVersion +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-apihub + + +# [START apihub_v1_generated_ApiHub_UpdateVersion_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import apihub_v1 + + +def sample_update_version(): + # Create a client + client = apihub_v1.ApiHubClient() + + # Initialize request argument(s) + version = apihub_v1.Version() + version.display_name = "display_name_value" + + request = apihub_v1.UpdateVersionRequest( + version=version, + ) + + # Make the request + response = client.update_version(request=request) + + # Handle the response + print(response) + +# [END apihub_v1_generated_ApiHub_UpdateVersion_sync] diff --git a/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_host_project_registration_service_create_host_project_registration_sync.py b/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_host_project_registration_service_create_host_project_registration_sync.py new file mode 100644 index 000000000000..e54632c77d9c --- /dev/null +++ b/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_host_project_registration_service_create_host_project_registration_sync.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateHostProjectRegistration +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-apihub + + +# [START apihub_v1_generated_HostProjectRegistrationService_CreateHostProjectRegistration_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import apihub_v1 + + +def sample_create_host_project_registration(): + # Create a client + client = apihub_v1.HostProjectRegistrationServiceClient() + + # Initialize request argument(s) + host_project_registration = apihub_v1.HostProjectRegistration() + host_project_registration.gcp_project = "gcp_project_value" + + request = apihub_v1.CreateHostProjectRegistrationRequest( + parent="parent_value", + host_project_registration_id="host_project_registration_id_value", + host_project_registration=host_project_registration, + ) + + # Make the request + response = client.create_host_project_registration(request=request) + + # Handle the response + print(response) + +# [END apihub_v1_generated_HostProjectRegistrationService_CreateHostProjectRegistration_sync] diff --git a/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_host_project_registration_service_get_host_project_registration_sync.py b/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_host_project_registration_service_get_host_project_registration_sync.py new file mode 100644 index 000000000000..2d844668f901 --- /dev/null +++ b/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_host_project_registration_service_get_host_project_registration_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetHostProjectRegistration +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-apihub + + +# [START apihub_v1_generated_HostProjectRegistrationService_GetHostProjectRegistration_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import apihub_v1 + + +def sample_get_host_project_registration(): + # Create a client + client = apihub_v1.HostProjectRegistrationServiceClient() + + # Initialize request argument(s) + request = apihub_v1.GetHostProjectRegistrationRequest( + name="name_value", + ) + + # Make the request + response = client.get_host_project_registration(request=request) + + # Handle the response + print(response) + +# [END apihub_v1_generated_HostProjectRegistrationService_GetHostProjectRegistration_sync] diff --git a/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_host_project_registration_service_list_host_project_registrations_sync.py b/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_host_project_registration_service_list_host_project_registrations_sync.py new file mode 100644 index 000000000000..60a558b0df4b --- /dev/null +++ b/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_host_project_registration_service_list_host_project_registrations_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListHostProjectRegistrations +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-apihub + + +# [START apihub_v1_generated_HostProjectRegistrationService_ListHostProjectRegistrations_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import apihub_v1 + + +def sample_list_host_project_registrations(): + # Create a client + client = apihub_v1.HostProjectRegistrationServiceClient() + + # Initialize request argument(s) + request = apihub_v1.ListHostProjectRegistrationsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_host_project_registrations(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END apihub_v1_generated_HostProjectRegistrationService_ListHostProjectRegistrations_sync] diff --git a/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_linting_service_get_style_guide_contents_sync.py b/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_linting_service_get_style_guide_contents_sync.py new file mode 100644 index 000000000000..bc8b56e39946 --- /dev/null +++ b/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_linting_service_get_style_guide_contents_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetStyleGuideContents +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-apihub + + +# [START apihub_v1_generated_LintingService_GetStyleGuideContents_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import apihub_v1 + + +def sample_get_style_guide_contents(): + # Create a client + client = apihub_v1.LintingServiceClient() + + # Initialize request argument(s) + request = apihub_v1.GetStyleGuideContentsRequest( + name="name_value", + ) + + # Make the request + response = client.get_style_guide_contents(request=request) + + # Handle the response + print(response) + +# [END apihub_v1_generated_LintingService_GetStyleGuideContents_sync] diff --git a/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_linting_service_get_style_guide_sync.py b/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_linting_service_get_style_guide_sync.py new file mode 100644 index 000000000000..3826750cec90 --- /dev/null +++ b/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_linting_service_get_style_guide_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetStyleGuide +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-apihub + + +# [START apihub_v1_generated_LintingService_GetStyleGuide_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import apihub_v1 + + +def sample_get_style_guide(): + # Create a client + client = apihub_v1.LintingServiceClient() + + # Initialize request argument(s) + request = apihub_v1.GetStyleGuideRequest( + name="name_value", + ) + + # Make the request + response = client.get_style_guide(request=request) + + # Handle the response + print(response) + +# [END apihub_v1_generated_LintingService_GetStyleGuide_sync] diff --git a/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_linting_service_lint_spec_sync.py b/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_linting_service_lint_spec_sync.py new file mode 100644 index 000000000000..d04912e4b505 --- /dev/null +++ b/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_linting_service_lint_spec_sync.py @@ -0,0 +1,50 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for LintSpec +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-apihub + + +# [START apihub_v1_generated_LintingService_LintSpec_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import apihub_v1 + + +def sample_lint_spec(): + # Create a client + client = apihub_v1.LintingServiceClient() + + # Initialize request argument(s) + request = apihub_v1.LintSpecRequest( + name="name_value", + ) + + # Make the request + client.lint_spec(request=request) + + +# [END apihub_v1_generated_LintingService_LintSpec_sync] diff --git a/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_linting_service_update_style_guide_sync.py b/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_linting_service_update_style_guide_sync.py new file mode 100644 index 000000000000..f413bc90c64f --- /dev/null +++ b/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_linting_service_update_style_guide_sync.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateStyleGuide +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-apihub + + +# [START apihub_v1_generated_LintingService_UpdateStyleGuide_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import apihub_v1 + + +def sample_update_style_guide(): + # Create a client + client = apihub_v1.LintingServiceClient() + + # Initialize request argument(s) + style_guide = apihub_v1.StyleGuide() + style_guide.linter = "OTHER" + style_guide.contents.contents = b'contents_blob' + style_guide.contents.mime_type = "mime_type_value" + + request = apihub_v1.UpdateStyleGuideRequest( + style_guide=style_guide, + ) + + # Make the request + response = client.update_style_guide(request=request) + + # Handle the response + print(response) + +# [END apihub_v1_generated_LintingService_UpdateStyleGuide_sync] diff --git a/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_provisioning_create_api_hub_instance_sync.py b/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_provisioning_create_api_hub_instance_sync.py new file mode 100644 index 000000000000..06b928d1ef31 --- /dev/null +++ b/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_provisioning_create_api_hub_instance_sync.py @@ -0,0 +1,60 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateApiHubInstance +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-apihub + + +# [START apihub_v1_generated_Provisioning_CreateApiHubInstance_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import apihub_v1 + + +def sample_create_api_hub_instance(): + # Create a client + client = apihub_v1.ProvisioningClient() + + # Initialize request argument(s) + api_hub_instance = apihub_v1.ApiHubInstance() + api_hub_instance.config.cmek_key_name = "cmek_key_name_value" + + request = apihub_v1.CreateApiHubInstanceRequest( + parent="parent_value", + api_hub_instance=api_hub_instance, + ) + + # Make the request + operation = client.create_api_hub_instance(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END apihub_v1_generated_Provisioning_CreateApiHubInstance_sync] diff --git a/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_provisioning_get_api_hub_instance_sync.py b/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_provisioning_get_api_hub_instance_sync.py new file mode 100644 index 000000000000..28f9f8a9dc4f --- /dev/null +++ b/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_provisioning_get_api_hub_instance_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetApiHubInstance +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-apihub + + +# [START apihub_v1_generated_Provisioning_GetApiHubInstance_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import apihub_v1 + + +def sample_get_api_hub_instance(): + # Create a client + client = apihub_v1.ProvisioningClient() + + # Initialize request argument(s) + request = apihub_v1.GetApiHubInstanceRequest( + name="name_value", + ) + + # Make the request + response = client.get_api_hub_instance(request=request) + + # Handle the response + print(response) + +# [END apihub_v1_generated_Provisioning_GetApiHubInstance_sync] diff --git a/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_provisioning_lookup_api_hub_instance_sync.py b/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_provisioning_lookup_api_hub_instance_sync.py new file mode 100644 index 000000000000..df7e7fe3d0ec --- /dev/null +++ b/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_provisioning_lookup_api_hub_instance_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for LookupApiHubInstance +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-apihub + + +# [START apihub_v1_generated_Provisioning_LookupApiHubInstance_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import apihub_v1 + + +def sample_lookup_api_hub_instance(): + # Create a client + client = apihub_v1.ProvisioningClient() + + # Initialize request argument(s) + request = apihub_v1.LookupApiHubInstanceRequest( + parent="parent_value", + ) + + # Make the request + response = client.lookup_api_hub_instance(request=request) + + # Handle the response + print(response) + +# [END apihub_v1_generated_Provisioning_LookupApiHubInstance_sync] diff --git a/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_runtime_project_attachment_service_create_runtime_project_attachment_sync.py b/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_runtime_project_attachment_service_create_runtime_project_attachment_sync.py new file mode 100644 index 000000000000..f76c027128df --- /dev/null +++ b/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_runtime_project_attachment_service_create_runtime_project_attachment_sync.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateRuntimeProjectAttachment +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-apihub + + +# [START apihub_v1_generated_RuntimeProjectAttachmentService_CreateRuntimeProjectAttachment_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import apihub_v1 + + +def sample_create_runtime_project_attachment(): + # Create a client + client = apihub_v1.RuntimeProjectAttachmentServiceClient() + + # Initialize request argument(s) + runtime_project_attachment = apihub_v1.RuntimeProjectAttachment() + runtime_project_attachment.runtime_project = "runtime_project_value" + + request = apihub_v1.CreateRuntimeProjectAttachmentRequest( + parent="parent_value", + runtime_project_attachment_id="runtime_project_attachment_id_value", + runtime_project_attachment=runtime_project_attachment, + ) + + # Make the request + response = client.create_runtime_project_attachment(request=request) + + # Handle the response + print(response) + +# [END apihub_v1_generated_RuntimeProjectAttachmentService_CreateRuntimeProjectAttachment_sync] diff --git a/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_runtime_project_attachment_service_delete_runtime_project_attachment_sync.py b/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_runtime_project_attachment_service_delete_runtime_project_attachment_sync.py new file mode 100644 index 000000000000..81c4cef14830 --- /dev/null +++ b/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_runtime_project_attachment_service_delete_runtime_project_attachment_sync.py @@ -0,0 +1,50 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteRuntimeProjectAttachment +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-apihub + + +# [START apihub_v1_generated_RuntimeProjectAttachmentService_DeleteRuntimeProjectAttachment_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import apihub_v1 + + +def sample_delete_runtime_project_attachment(): + # Create a client + client = apihub_v1.RuntimeProjectAttachmentServiceClient() + + # Initialize request argument(s) + request = apihub_v1.DeleteRuntimeProjectAttachmentRequest( + name="name_value", + ) + + # Make the request + client.delete_runtime_project_attachment(request=request) + + +# [END apihub_v1_generated_RuntimeProjectAttachmentService_DeleteRuntimeProjectAttachment_sync] diff --git a/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_runtime_project_attachment_service_get_runtime_project_attachment_sync.py b/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_runtime_project_attachment_service_get_runtime_project_attachment_sync.py new file mode 100644 index 000000000000..b6fc7ccf55ed --- /dev/null +++ b/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_runtime_project_attachment_service_get_runtime_project_attachment_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetRuntimeProjectAttachment +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-apihub + + +# [START apihub_v1_generated_RuntimeProjectAttachmentService_GetRuntimeProjectAttachment_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import apihub_v1 + + +def sample_get_runtime_project_attachment(): + # Create a client + client = apihub_v1.RuntimeProjectAttachmentServiceClient() + + # Initialize request argument(s) + request = apihub_v1.GetRuntimeProjectAttachmentRequest( + name="name_value", + ) + + # Make the request + response = client.get_runtime_project_attachment(request=request) + + # Handle the response + print(response) + +# [END apihub_v1_generated_RuntimeProjectAttachmentService_GetRuntimeProjectAttachment_sync] diff --git a/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_runtime_project_attachment_service_list_runtime_project_attachments_sync.py b/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_runtime_project_attachment_service_list_runtime_project_attachments_sync.py new file mode 100644 index 000000000000..9e966617605e --- /dev/null +++ b/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_runtime_project_attachment_service_list_runtime_project_attachments_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListRuntimeProjectAttachments +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-apihub + + +# [START apihub_v1_generated_RuntimeProjectAttachmentService_ListRuntimeProjectAttachments_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import apihub_v1 + + +def sample_list_runtime_project_attachments(): + # Create a client + client = apihub_v1.RuntimeProjectAttachmentServiceClient() + + # Initialize request argument(s) + request = apihub_v1.ListRuntimeProjectAttachmentsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_runtime_project_attachments(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END apihub_v1_generated_RuntimeProjectAttachmentService_ListRuntimeProjectAttachments_sync] diff --git a/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_runtime_project_attachment_service_lookup_runtime_project_attachment_sync.py b/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_runtime_project_attachment_service_lookup_runtime_project_attachment_sync.py new file mode 100644 index 000000000000..1b8b199be9f7 --- /dev/null +++ b/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_runtime_project_attachment_service_lookup_runtime_project_attachment_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for LookupRuntimeProjectAttachment +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-apihub + + +# [START apihub_v1_generated_RuntimeProjectAttachmentService_LookupRuntimeProjectAttachment_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import apihub_v1 + + +def sample_lookup_runtime_project_attachment(): + # Create a client + client = apihub_v1.RuntimeProjectAttachmentServiceClient() + + # Initialize request argument(s) + request = apihub_v1.LookupRuntimeProjectAttachmentRequest( + name="name_value", + ) + + # Make the request + response = client.lookup_runtime_project_attachment(request=request) + + # Handle the response + print(response) + +# [END apihub_v1_generated_RuntimeProjectAttachmentService_LookupRuntimeProjectAttachment_sync] diff --git a/packages/google-cloud-apihub/samples/generated_samples/snippet_metadata_google.cloud.apihub.v1.json b/packages/google-cloud-apihub/samples/generated_samples/snippet_metadata_google.cloud.apihub.v1.json new file mode 100644 index 000000000000..2dd2f7e3c87d --- /dev/null +++ b/packages/google-cloud-apihub/samples/generated_samples/snippet_metadata_google.cloud.apihub.v1.json @@ -0,0 +1,4740 @@ +{ + "clientLibrary": { + "apis": [ + { + "id": "google.cloud.apihub.v1", + "version": "v1" + } + ], + "language": "PYTHON", + "name": "google-cloud-apihub", + "version": "0.2.0" + }, + "snippets": [ + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.apihub_v1.ApiHubDependenciesClient", + "shortName": "ApiHubDependenciesClient" + }, + "fullName": "google.cloud.apihub_v1.ApiHubDependenciesClient.create_dependency", + "method": { + "fullName": "google.cloud.apihub.v1.ApiHubDependencies.CreateDependency", + "service": { + "fullName": "google.cloud.apihub.v1.ApiHubDependencies", + "shortName": "ApiHubDependencies" + }, + "shortName": "CreateDependency" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.apihub_v1.types.CreateDependencyRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "dependency", + "type": "google.cloud.apihub_v1.types.Dependency" + }, + { + "name": "dependency_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.apihub_v1.types.Dependency", + "shortName": "create_dependency" + }, + "description": "Sample for CreateDependency", + "file": "apihub_v1_generated_api_hub_dependencies_create_dependency_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "apihub_v1_generated_ApiHubDependencies_CreateDependency_sync", + "segments": [ + { + "end": 56, + "start": 27, + "type": "FULL" + }, + { + "end": 56, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 50, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 53, + "start": 51, + "type": "REQUEST_EXECUTION" + }, + { + "end": 57, + "start": 54, + "type": "RESPONSE_HANDLING" + } + ], + "title": "apihub_v1_generated_api_hub_dependencies_create_dependency_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.apihub_v1.ApiHubDependenciesClient", + "shortName": "ApiHubDependenciesClient" + }, + "fullName": "google.cloud.apihub_v1.ApiHubDependenciesClient.delete_dependency", + "method": { + "fullName": "google.cloud.apihub.v1.ApiHubDependencies.DeleteDependency", + "service": { + "fullName": "google.cloud.apihub.v1.ApiHubDependencies", + "shortName": "ApiHubDependencies" + }, + "shortName": "DeleteDependency" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.apihub_v1.types.DeleteDependencyRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "shortName": "delete_dependency" + }, + "description": "Sample for DeleteDependency", + "file": "apihub_v1_generated_api_hub_dependencies_delete_dependency_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "apihub_v1_generated_ApiHubDependencies_DeleteDependency_sync", + "segments": [ + { + "end": 49, + "start": 27, + "type": "FULL" + }, + { + "end": 49, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "apihub_v1_generated_api_hub_dependencies_delete_dependency_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.apihub_v1.ApiHubDependenciesClient", + "shortName": "ApiHubDependenciesClient" + }, + "fullName": "google.cloud.apihub_v1.ApiHubDependenciesClient.get_dependency", + "method": { + "fullName": "google.cloud.apihub.v1.ApiHubDependencies.GetDependency", + "service": { + "fullName": "google.cloud.apihub.v1.ApiHubDependencies", + "shortName": "ApiHubDependencies" + }, + "shortName": "GetDependency" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.apihub_v1.types.GetDependencyRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.apihub_v1.types.Dependency", + "shortName": "get_dependency" + }, + "description": "Sample for GetDependency", + "file": "apihub_v1_generated_api_hub_dependencies_get_dependency_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "apihub_v1_generated_ApiHubDependencies_GetDependency_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "apihub_v1_generated_api_hub_dependencies_get_dependency_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.apihub_v1.ApiHubDependenciesClient", + "shortName": "ApiHubDependenciesClient" + }, + "fullName": "google.cloud.apihub_v1.ApiHubDependenciesClient.list_dependencies", + "method": { + "fullName": "google.cloud.apihub.v1.ApiHubDependencies.ListDependencies", + "service": { + "fullName": "google.cloud.apihub.v1.ApiHubDependencies", + "shortName": "ApiHubDependencies" + }, + "shortName": "ListDependencies" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.apihub_v1.types.ListDependenciesRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.apihub_v1.services.api_hub_dependencies.pagers.ListDependenciesPager", + "shortName": "list_dependencies" + }, + "description": "Sample for ListDependencies", + "file": "apihub_v1_generated_api_hub_dependencies_list_dependencies_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "apihub_v1_generated_ApiHubDependencies_ListDependencies_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "apihub_v1_generated_api_hub_dependencies_list_dependencies_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.apihub_v1.ApiHubDependenciesClient", + "shortName": "ApiHubDependenciesClient" + }, + "fullName": "google.cloud.apihub_v1.ApiHubDependenciesClient.update_dependency", + "method": { + "fullName": "google.cloud.apihub.v1.ApiHubDependencies.UpdateDependency", + "service": { + "fullName": "google.cloud.apihub.v1.ApiHubDependencies", + "shortName": "ApiHubDependencies" + }, + "shortName": "UpdateDependency" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.apihub_v1.types.UpdateDependencyRequest" + }, + { + "name": "dependency", + "type": "google.cloud.apihub_v1.types.Dependency" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.apihub_v1.types.Dependency", + "shortName": "update_dependency" + }, + "description": "Sample for UpdateDependency", + "file": "apihub_v1_generated_api_hub_dependencies_update_dependency_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "apihub_v1_generated_ApiHubDependencies_UpdateDependency_sync", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 49, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 50, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "apihub_v1_generated_api_hub_dependencies_update_dependency_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.apihub_v1.ApiHubPluginClient", + "shortName": "ApiHubPluginClient" + }, + "fullName": "google.cloud.apihub_v1.ApiHubPluginClient.disable_plugin", + "method": { + "fullName": "google.cloud.apihub.v1.ApiHubPlugin.DisablePlugin", + "service": { + "fullName": "google.cloud.apihub.v1.ApiHubPlugin", + "shortName": "ApiHubPlugin" + }, + "shortName": "DisablePlugin" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.apihub_v1.types.DisablePluginRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.apihub_v1.types.Plugin", + "shortName": "disable_plugin" + }, + "description": "Sample for DisablePlugin", + "file": "apihub_v1_generated_api_hub_plugin_disable_plugin_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "apihub_v1_generated_ApiHubPlugin_DisablePlugin_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "apihub_v1_generated_api_hub_plugin_disable_plugin_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.apihub_v1.ApiHubPluginClient", + "shortName": "ApiHubPluginClient" + }, + "fullName": "google.cloud.apihub_v1.ApiHubPluginClient.enable_plugin", + "method": { + "fullName": "google.cloud.apihub.v1.ApiHubPlugin.EnablePlugin", + "service": { + "fullName": "google.cloud.apihub.v1.ApiHubPlugin", + "shortName": "ApiHubPlugin" + }, + "shortName": "EnablePlugin" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.apihub_v1.types.EnablePluginRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.apihub_v1.types.Plugin", + "shortName": "enable_plugin" + }, + "description": "Sample for EnablePlugin", + "file": "apihub_v1_generated_api_hub_plugin_enable_plugin_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "apihub_v1_generated_ApiHubPlugin_EnablePlugin_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "apihub_v1_generated_api_hub_plugin_enable_plugin_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.apihub_v1.ApiHubPluginClient", + "shortName": "ApiHubPluginClient" + }, + "fullName": "google.cloud.apihub_v1.ApiHubPluginClient.get_plugin", + "method": { + "fullName": "google.cloud.apihub.v1.ApiHubPlugin.GetPlugin", + "service": { + "fullName": "google.cloud.apihub.v1.ApiHubPlugin", + "shortName": "ApiHubPlugin" + }, + "shortName": "GetPlugin" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.apihub_v1.types.GetPluginRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.apihub_v1.types.Plugin", + "shortName": "get_plugin" + }, + "description": "Sample for GetPlugin", + "file": "apihub_v1_generated_api_hub_plugin_get_plugin_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "apihub_v1_generated_ApiHubPlugin_GetPlugin_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "apihub_v1_generated_api_hub_plugin_get_plugin_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.apihub_v1.ApiHubClient", + "shortName": "ApiHubClient" + }, + "fullName": "google.cloud.apihub_v1.ApiHubClient.create_api", + "method": { + "fullName": "google.cloud.apihub.v1.ApiHub.CreateApi", + "service": { + "fullName": "google.cloud.apihub.v1.ApiHub", + "shortName": "ApiHub" + }, + "shortName": "CreateApi" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.apihub_v1.types.CreateApiRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "api", + "type": "google.cloud.apihub_v1.types.Api" + }, + { + "name": "api_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.apihub_v1.types.Api", + "shortName": "create_api" + }, + "description": "Sample for CreateApi", + "file": "apihub_v1_generated_api_hub_create_api_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "apihub_v1_generated_ApiHub_CreateApi_sync", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 49, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 50, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "apihub_v1_generated_api_hub_create_api_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.apihub_v1.ApiHubClient", + "shortName": "ApiHubClient" + }, + "fullName": "google.cloud.apihub_v1.ApiHubClient.create_attribute", + "method": { + "fullName": "google.cloud.apihub.v1.ApiHub.CreateAttribute", + "service": { + "fullName": "google.cloud.apihub.v1.ApiHub", + "shortName": "ApiHub" + }, + "shortName": "CreateAttribute" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.apihub_v1.types.CreateAttributeRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "attribute", + "type": "google.cloud.apihub_v1.types.Attribute" + }, + { + "name": "attribute_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.apihub_v1.types.Attribute", + "shortName": "create_attribute" + }, + "description": "Sample for CreateAttribute", + "file": "apihub_v1_generated_api_hub_create_attribute_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "apihub_v1_generated_ApiHub_CreateAttribute_sync", + "segments": [ + { + "end": 57, + "start": 27, + "type": "FULL" + }, + { + "end": 57, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 51, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 54, + "start": 52, + "type": "REQUEST_EXECUTION" + }, + { + "end": 58, + "start": 55, + "type": "RESPONSE_HANDLING" + } + ], + "title": "apihub_v1_generated_api_hub_create_attribute_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.apihub_v1.ApiHubClient", + "shortName": "ApiHubClient" + }, + "fullName": "google.cloud.apihub_v1.ApiHubClient.create_deployment", + "method": { + "fullName": "google.cloud.apihub.v1.ApiHub.CreateDeployment", + "service": { + "fullName": "google.cloud.apihub.v1.ApiHub", + "shortName": "ApiHub" + }, + "shortName": "CreateDeployment" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.apihub_v1.types.CreateDeploymentRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "deployment", + "type": "google.cloud.apihub_v1.types.Deployment" + }, + { + "name": "deployment_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.apihub_v1.types.Deployment", + "shortName": "create_deployment" + }, + "description": "Sample for CreateDeployment", + "file": "apihub_v1_generated_api_hub_create_deployment_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "apihub_v1_generated_ApiHub_CreateDeployment_sync", + "segments": [ + { + "end": 59, + "start": 27, + "type": "FULL" + }, + { + "end": 59, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 53, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 56, + "start": 54, + "type": "REQUEST_EXECUTION" + }, + { + "end": 60, + "start": 57, + "type": "RESPONSE_HANDLING" + } + ], + "title": "apihub_v1_generated_api_hub_create_deployment_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.apihub_v1.ApiHubClient", + "shortName": "ApiHubClient" + }, + "fullName": "google.cloud.apihub_v1.ApiHubClient.create_external_api", + "method": { + "fullName": "google.cloud.apihub.v1.ApiHub.CreateExternalApi", + "service": { + "fullName": "google.cloud.apihub.v1.ApiHub", + "shortName": "ApiHub" + }, + "shortName": "CreateExternalApi" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.apihub_v1.types.CreateExternalApiRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "external_api", + "type": "google.cloud.apihub_v1.types.ExternalApi" + }, + { + "name": "external_api_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.apihub_v1.types.ExternalApi", + "shortName": "create_external_api" + }, + "description": "Sample for CreateExternalApi", + "file": "apihub_v1_generated_api_hub_create_external_api_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "apihub_v1_generated_ApiHub_CreateExternalApi_sync", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 49, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 50, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "apihub_v1_generated_api_hub_create_external_api_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.apihub_v1.ApiHubClient", + "shortName": "ApiHubClient" + }, + "fullName": "google.cloud.apihub_v1.ApiHubClient.create_spec", + "method": { + "fullName": "google.cloud.apihub.v1.ApiHub.CreateSpec", + "service": { + "fullName": "google.cloud.apihub.v1.ApiHub", + "shortName": "ApiHub" + }, + "shortName": "CreateSpec" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.apihub_v1.types.CreateSpecRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "spec", + "type": "google.cloud.apihub_v1.types.Spec" + }, + { + "name": "spec_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.apihub_v1.types.Spec", + "shortName": "create_spec" + }, + "description": "Sample for CreateSpec", + "file": "apihub_v1_generated_api_hub_create_spec_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "apihub_v1_generated_ApiHub_CreateSpec_sync", + "segments": [ + { + "end": 57, + "start": 27, + "type": "FULL" + }, + { + "end": 57, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 51, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 54, + "start": 52, + "type": "REQUEST_EXECUTION" + }, + { + "end": 58, + "start": 55, + "type": "RESPONSE_HANDLING" + } + ], + "title": "apihub_v1_generated_api_hub_create_spec_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.apihub_v1.ApiHubClient", + "shortName": "ApiHubClient" + }, + "fullName": "google.cloud.apihub_v1.ApiHubClient.create_version", + "method": { + "fullName": "google.cloud.apihub.v1.ApiHub.CreateVersion", + "service": { + "fullName": "google.cloud.apihub.v1.ApiHub", + "shortName": "ApiHub" + }, + "shortName": "CreateVersion" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.apihub_v1.types.CreateVersionRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "version", + "type": "google.cloud.apihub_v1.types.Version" + }, + { + "name": "version_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.apihub_v1.types.Version", + "shortName": "create_version" + }, + "description": "Sample for CreateVersion", + "file": "apihub_v1_generated_api_hub_create_version_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "apihub_v1_generated_ApiHub_CreateVersion_sync", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 49, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 50, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "apihub_v1_generated_api_hub_create_version_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.apihub_v1.ApiHubClient", + "shortName": "ApiHubClient" + }, + "fullName": "google.cloud.apihub_v1.ApiHubClient.delete_api", + "method": { + "fullName": "google.cloud.apihub.v1.ApiHub.DeleteApi", + "service": { + "fullName": "google.cloud.apihub.v1.ApiHub", + "shortName": "ApiHub" + }, + "shortName": "DeleteApi" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.apihub_v1.types.DeleteApiRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "shortName": "delete_api" + }, + "description": "Sample for DeleteApi", + "file": "apihub_v1_generated_api_hub_delete_api_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "apihub_v1_generated_ApiHub_DeleteApi_sync", + "segments": [ + { + "end": 49, + "start": 27, + "type": "FULL" + }, + { + "end": 49, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "apihub_v1_generated_api_hub_delete_api_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.apihub_v1.ApiHubClient", + "shortName": "ApiHubClient" + }, + "fullName": "google.cloud.apihub_v1.ApiHubClient.delete_attribute", + "method": { + "fullName": "google.cloud.apihub.v1.ApiHub.DeleteAttribute", + "service": { + "fullName": "google.cloud.apihub.v1.ApiHub", + "shortName": "ApiHub" + }, + "shortName": "DeleteAttribute" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.apihub_v1.types.DeleteAttributeRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "shortName": "delete_attribute" + }, + "description": "Sample for DeleteAttribute", + "file": "apihub_v1_generated_api_hub_delete_attribute_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "apihub_v1_generated_ApiHub_DeleteAttribute_sync", + "segments": [ + { + "end": 49, + "start": 27, + "type": "FULL" + }, + { + "end": 49, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "apihub_v1_generated_api_hub_delete_attribute_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.apihub_v1.ApiHubClient", + "shortName": "ApiHubClient" + }, + "fullName": "google.cloud.apihub_v1.ApiHubClient.delete_deployment", + "method": { + "fullName": "google.cloud.apihub.v1.ApiHub.DeleteDeployment", + "service": { + "fullName": "google.cloud.apihub.v1.ApiHub", + "shortName": "ApiHub" + }, + "shortName": "DeleteDeployment" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.apihub_v1.types.DeleteDeploymentRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "shortName": "delete_deployment" + }, + "description": "Sample for DeleteDeployment", + "file": "apihub_v1_generated_api_hub_delete_deployment_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "apihub_v1_generated_ApiHub_DeleteDeployment_sync", + "segments": [ + { + "end": 49, + "start": 27, + "type": "FULL" + }, + { + "end": 49, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "apihub_v1_generated_api_hub_delete_deployment_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.apihub_v1.ApiHubClient", + "shortName": "ApiHubClient" + }, + "fullName": "google.cloud.apihub_v1.ApiHubClient.delete_external_api", + "method": { + "fullName": "google.cloud.apihub.v1.ApiHub.DeleteExternalApi", + "service": { + "fullName": "google.cloud.apihub.v1.ApiHub", + "shortName": "ApiHub" + }, + "shortName": "DeleteExternalApi" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.apihub_v1.types.DeleteExternalApiRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "shortName": "delete_external_api" + }, + "description": "Sample for DeleteExternalApi", + "file": "apihub_v1_generated_api_hub_delete_external_api_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "apihub_v1_generated_ApiHub_DeleteExternalApi_sync", + "segments": [ + { + "end": 49, + "start": 27, + "type": "FULL" + }, + { + "end": 49, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "apihub_v1_generated_api_hub_delete_external_api_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.apihub_v1.ApiHubClient", + "shortName": "ApiHubClient" + }, + "fullName": "google.cloud.apihub_v1.ApiHubClient.delete_spec", + "method": { + "fullName": "google.cloud.apihub.v1.ApiHub.DeleteSpec", + "service": { + "fullName": "google.cloud.apihub.v1.ApiHub", + "shortName": "ApiHub" + }, + "shortName": "DeleteSpec" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.apihub_v1.types.DeleteSpecRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "shortName": "delete_spec" + }, + "description": "Sample for DeleteSpec", + "file": "apihub_v1_generated_api_hub_delete_spec_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "apihub_v1_generated_ApiHub_DeleteSpec_sync", + "segments": [ + { + "end": 49, + "start": 27, + "type": "FULL" + }, + { + "end": 49, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "apihub_v1_generated_api_hub_delete_spec_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.apihub_v1.ApiHubClient", + "shortName": "ApiHubClient" + }, + "fullName": "google.cloud.apihub_v1.ApiHubClient.delete_version", + "method": { + "fullName": "google.cloud.apihub.v1.ApiHub.DeleteVersion", + "service": { + "fullName": "google.cloud.apihub.v1.ApiHub", + "shortName": "ApiHub" + }, + "shortName": "DeleteVersion" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.apihub_v1.types.DeleteVersionRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "shortName": "delete_version" + }, + "description": "Sample for DeleteVersion", + "file": "apihub_v1_generated_api_hub_delete_version_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "apihub_v1_generated_ApiHub_DeleteVersion_sync", + "segments": [ + { + "end": 49, + "start": 27, + "type": "FULL" + }, + { + "end": 49, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "apihub_v1_generated_api_hub_delete_version_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.apihub_v1.ApiHubClient", + "shortName": "ApiHubClient" + }, + "fullName": "google.cloud.apihub_v1.ApiHubClient.get_api_operation", + "method": { + "fullName": "google.cloud.apihub.v1.ApiHub.GetApiOperation", + "service": { + "fullName": "google.cloud.apihub.v1.ApiHub", + "shortName": "ApiHub" + }, + "shortName": "GetApiOperation" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.apihub_v1.types.GetApiOperationRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.apihub_v1.types.ApiOperation", + "shortName": "get_api_operation" + }, + "description": "Sample for GetApiOperation", + "file": "apihub_v1_generated_api_hub_get_api_operation_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "apihub_v1_generated_ApiHub_GetApiOperation_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "apihub_v1_generated_api_hub_get_api_operation_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.apihub_v1.ApiHubClient", + "shortName": "ApiHubClient" + }, + "fullName": "google.cloud.apihub_v1.ApiHubClient.get_api", + "method": { + "fullName": "google.cloud.apihub.v1.ApiHub.GetApi", + "service": { + "fullName": "google.cloud.apihub.v1.ApiHub", + "shortName": "ApiHub" + }, + "shortName": "GetApi" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.apihub_v1.types.GetApiRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.apihub_v1.types.Api", + "shortName": "get_api" + }, + "description": "Sample for GetApi", + "file": "apihub_v1_generated_api_hub_get_api_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "apihub_v1_generated_ApiHub_GetApi_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "apihub_v1_generated_api_hub_get_api_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.apihub_v1.ApiHubClient", + "shortName": "ApiHubClient" + }, + "fullName": "google.cloud.apihub_v1.ApiHubClient.get_attribute", + "method": { + "fullName": "google.cloud.apihub.v1.ApiHub.GetAttribute", + "service": { + "fullName": "google.cloud.apihub.v1.ApiHub", + "shortName": "ApiHub" + }, + "shortName": "GetAttribute" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.apihub_v1.types.GetAttributeRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.apihub_v1.types.Attribute", + "shortName": "get_attribute" + }, + "description": "Sample for GetAttribute", + "file": "apihub_v1_generated_api_hub_get_attribute_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "apihub_v1_generated_ApiHub_GetAttribute_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "apihub_v1_generated_api_hub_get_attribute_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.apihub_v1.ApiHubClient", + "shortName": "ApiHubClient" + }, + "fullName": "google.cloud.apihub_v1.ApiHubClient.get_definition", + "method": { + "fullName": "google.cloud.apihub.v1.ApiHub.GetDefinition", + "service": { + "fullName": "google.cloud.apihub.v1.ApiHub", + "shortName": "ApiHub" + }, + "shortName": "GetDefinition" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.apihub_v1.types.GetDefinitionRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.apihub_v1.types.Definition", + "shortName": "get_definition" + }, + "description": "Sample for GetDefinition", + "file": "apihub_v1_generated_api_hub_get_definition_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "apihub_v1_generated_ApiHub_GetDefinition_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "apihub_v1_generated_api_hub_get_definition_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.apihub_v1.ApiHubClient", + "shortName": "ApiHubClient" + }, + "fullName": "google.cloud.apihub_v1.ApiHubClient.get_deployment", + "method": { + "fullName": "google.cloud.apihub.v1.ApiHub.GetDeployment", + "service": { + "fullName": "google.cloud.apihub.v1.ApiHub", + "shortName": "ApiHub" + }, + "shortName": "GetDeployment" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.apihub_v1.types.GetDeploymentRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.apihub_v1.types.Deployment", + "shortName": "get_deployment" + }, + "description": "Sample for GetDeployment", + "file": "apihub_v1_generated_api_hub_get_deployment_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "apihub_v1_generated_ApiHub_GetDeployment_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "apihub_v1_generated_api_hub_get_deployment_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.apihub_v1.ApiHubClient", + "shortName": "ApiHubClient" + }, + "fullName": "google.cloud.apihub_v1.ApiHubClient.get_external_api", + "method": { + "fullName": "google.cloud.apihub.v1.ApiHub.GetExternalApi", + "service": { + "fullName": "google.cloud.apihub.v1.ApiHub", + "shortName": "ApiHub" + }, + "shortName": "GetExternalApi" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.apihub_v1.types.GetExternalApiRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.apihub_v1.types.ExternalApi", + "shortName": "get_external_api" + }, + "description": "Sample for GetExternalApi", + "file": "apihub_v1_generated_api_hub_get_external_api_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "apihub_v1_generated_ApiHub_GetExternalApi_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "apihub_v1_generated_api_hub_get_external_api_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.apihub_v1.ApiHubClient", + "shortName": "ApiHubClient" + }, + "fullName": "google.cloud.apihub_v1.ApiHubClient.get_spec_contents", + "method": { + "fullName": "google.cloud.apihub.v1.ApiHub.GetSpecContents", + "service": { + "fullName": "google.cloud.apihub.v1.ApiHub", + "shortName": "ApiHub" + }, + "shortName": "GetSpecContents" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.apihub_v1.types.GetSpecContentsRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.apihub_v1.types.SpecContents", + "shortName": "get_spec_contents" + }, + "description": "Sample for GetSpecContents", + "file": "apihub_v1_generated_api_hub_get_spec_contents_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "apihub_v1_generated_ApiHub_GetSpecContents_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "apihub_v1_generated_api_hub_get_spec_contents_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.apihub_v1.ApiHubClient", + "shortName": "ApiHubClient" + }, + "fullName": "google.cloud.apihub_v1.ApiHubClient.get_spec", + "method": { + "fullName": "google.cloud.apihub.v1.ApiHub.GetSpec", + "service": { + "fullName": "google.cloud.apihub.v1.ApiHub", + "shortName": "ApiHub" + }, + "shortName": "GetSpec" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.apihub_v1.types.GetSpecRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.apihub_v1.types.Spec", + "shortName": "get_spec" + }, + "description": "Sample for GetSpec", + "file": "apihub_v1_generated_api_hub_get_spec_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "apihub_v1_generated_ApiHub_GetSpec_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "apihub_v1_generated_api_hub_get_spec_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.apihub_v1.ApiHubClient", + "shortName": "ApiHubClient" + }, + "fullName": "google.cloud.apihub_v1.ApiHubClient.get_version", + "method": { + "fullName": "google.cloud.apihub.v1.ApiHub.GetVersion", + "service": { + "fullName": "google.cloud.apihub.v1.ApiHub", + "shortName": "ApiHub" + }, + "shortName": "GetVersion" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.apihub_v1.types.GetVersionRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.apihub_v1.types.Version", + "shortName": "get_version" + }, + "description": "Sample for GetVersion", + "file": "apihub_v1_generated_api_hub_get_version_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "apihub_v1_generated_ApiHub_GetVersion_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "apihub_v1_generated_api_hub_get_version_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.apihub_v1.ApiHubClient", + "shortName": "ApiHubClient" + }, + "fullName": "google.cloud.apihub_v1.ApiHubClient.list_api_operations", + "method": { + "fullName": "google.cloud.apihub.v1.ApiHub.ListApiOperations", + "service": { + "fullName": "google.cloud.apihub.v1.ApiHub", + "shortName": "ApiHub" + }, + "shortName": "ListApiOperations" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.apihub_v1.types.ListApiOperationsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.apihub_v1.services.api_hub.pagers.ListApiOperationsPager", + "shortName": "list_api_operations" + }, + "description": "Sample for ListApiOperations", + "file": "apihub_v1_generated_api_hub_list_api_operations_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "apihub_v1_generated_ApiHub_ListApiOperations_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "apihub_v1_generated_api_hub_list_api_operations_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.apihub_v1.ApiHubClient", + "shortName": "ApiHubClient" + }, + "fullName": "google.cloud.apihub_v1.ApiHubClient.list_apis", + "method": { + "fullName": "google.cloud.apihub.v1.ApiHub.ListApis", + "service": { + "fullName": "google.cloud.apihub.v1.ApiHub", + "shortName": "ApiHub" + }, + "shortName": "ListApis" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.apihub_v1.types.ListApisRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.apihub_v1.services.api_hub.pagers.ListApisPager", + "shortName": "list_apis" + }, + "description": "Sample for ListApis", + "file": "apihub_v1_generated_api_hub_list_apis_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "apihub_v1_generated_ApiHub_ListApis_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "apihub_v1_generated_api_hub_list_apis_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.apihub_v1.ApiHubClient", + "shortName": "ApiHubClient" + }, + "fullName": "google.cloud.apihub_v1.ApiHubClient.list_attributes", + "method": { + "fullName": "google.cloud.apihub.v1.ApiHub.ListAttributes", + "service": { + "fullName": "google.cloud.apihub.v1.ApiHub", + "shortName": "ApiHub" + }, + "shortName": "ListAttributes" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.apihub_v1.types.ListAttributesRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.apihub_v1.services.api_hub.pagers.ListAttributesPager", + "shortName": "list_attributes" + }, + "description": "Sample for ListAttributes", + "file": "apihub_v1_generated_api_hub_list_attributes_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "apihub_v1_generated_ApiHub_ListAttributes_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "apihub_v1_generated_api_hub_list_attributes_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.apihub_v1.ApiHubClient", + "shortName": "ApiHubClient" + }, + "fullName": "google.cloud.apihub_v1.ApiHubClient.list_deployments", + "method": { + "fullName": "google.cloud.apihub.v1.ApiHub.ListDeployments", + "service": { + "fullName": "google.cloud.apihub.v1.ApiHub", + "shortName": "ApiHub" + }, + "shortName": "ListDeployments" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.apihub_v1.types.ListDeploymentsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.apihub_v1.services.api_hub.pagers.ListDeploymentsPager", + "shortName": "list_deployments" + }, + "description": "Sample for ListDeployments", + "file": "apihub_v1_generated_api_hub_list_deployments_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "apihub_v1_generated_ApiHub_ListDeployments_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "apihub_v1_generated_api_hub_list_deployments_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.apihub_v1.ApiHubClient", + "shortName": "ApiHubClient" + }, + "fullName": "google.cloud.apihub_v1.ApiHubClient.list_external_apis", + "method": { + "fullName": "google.cloud.apihub.v1.ApiHub.ListExternalApis", + "service": { + "fullName": "google.cloud.apihub.v1.ApiHub", + "shortName": "ApiHub" + }, + "shortName": "ListExternalApis" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.apihub_v1.types.ListExternalApisRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.apihub_v1.services.api_hub.pagers.ListExternalApisPager", + "shortName": "list_external_apis" + }, + "description": "Sample for ListExternalApis", + "file": "apihub_v1_generated_api_hub_list_external_apis_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "apihub_v1_generated_ApiHub_ListExternalApis_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "apihub_v1_generated_api_hub_list_external_apis_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.apihub_v1.ApiHubClient", + "shortName": "ApiHubClient" + }, + "fullName": "google.cloud.apihub_v1.ApiHubClient.list_specs", + "method": { + "fullName": "google.cloud.apihub.v1.ApiHub.ListSpecs", + "service": { + "fullName": "google.cloud.apihub.v1.ApiHub", + "shortName": "ApiHub" + }, + "shortName": "ListSpecs" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.apihub_v1.types.ListSpecsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.apihub_v1.services.api_hub.pagers.ListSpecsPager", + "shortName": "list_specs" + }, + "description": "Sample for ListSpecs", + "file": "apihub_v1_generated_api_hub_list_specs_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "apihub_v1_generated_ApiHub_ListSpecs_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "apihub_v1_generated_api_hub_list_specs_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.apihub_v1.ApiHubClient", + "shortName": "ApiHubClient" + }, + "fullName": "google.cloud.apihub_v1.ApiHubClient.list_versions", + "method": { + "fullName": "google.cloud.apihub.v1.ApiHub.ListVersions", + "service": { + "fullName": "google.cloud.apihub.v1.ApiHub", + "shortName": "ApiHub" + }, + "shortName": "ListVersions" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.apihub_v1.types.ListVersionsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.apihub_v1.services.api_hub.pagers.ListVersionsPager", + "shortName": "list_versions" + }, + "description": "Sample for ListVersions", + "file": "apihub_v1_generated_api_hub_list_versions_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "apihub_v1_generated_ApiHub_ListVersions_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "apihub_v1_generated_api_hub_list_versions_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.apihub_v1.ApiHubClient", + "shortName": "ApiHubClient" + }, + "fullName": "google.cloud.apihub_v1.ApiHubClient.search_resources", + "method": { + "fullName": "google.cloud.apihub.v1.ApiHub.SearchResources", + "service": { + "fullName": "google.cloud.apihub.v1.ApiHub", + "shortName": "ApiHub" + }, + "shortName": "SearchResources" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.apihub_v1.types.SearchResourcesRequest" + }, + { + "name": "location", + "type": "str" + }, + { + "name": "query", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.apihub_v1.services.api_hub.pagers.SearchResourcesPager", + "shortName": "search_resources" + }, + "description": "Sample for SearchResources", + "file": "apihub_v1_generated_api_hub_search_resources_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "apihub_v1_generated_ApiHub_SearchResources_sync", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "apihub_v1_generated_api_hub_search_resources_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.apihub_v1.ApiHubClient", + "shortName": "ApiHubClient" + }, + "fullName": "google.cloud.apihub_v1.ApiHubClient.update_api", + "method": { + "fullName": "google.cloud.apihub.v1.ApiHub.UpdateApi", + "service": { + "fullName": "google.cloud.apihub.v1.ApiHub", + "shortName": "ApiHub" + }, + "shortName": "UpdateApi" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.apihub_v1.types.UpdateApiRequest" + }, + { + "name": "api", + "type": "google.cloud.apihub_v1.types.Api" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.apihub_v1.types.Api", + "shortName": "update_api" + }, + "description": "Sample for UpdateApi", + "file": "apihub_v1_generated_api_hub_update_api_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "apihub_v1_generated_ApiHub_UpdateApi_sync", + "segments": [ + { + "end": 54, + "start": 27, + "type": "FULL" + }, + { + "end": 54, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 48, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 51, + "start": 49, + "type": "REQUEST_EXECUTION" + }, + { + "end": 55, + "start": 52, + "type": "RESPONSE_HANDLING" + } + ], + "title": "apihub_v1_generated_api_hub_update_api_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.apihub_v1.ApiHubClient", + "shortName": "ApiHubClient" + }, + "fullName": "google.cloud.apihub_v1.ApiHubClient.update_attribute", + "method": { + "fullName": "google.cloud.apihub.v1.ApiHub.UpdateAttribute", + "service": { + "fullName": "google.cloud.apihub.v1.ApiHub", + "shortName": "ApiHub" + }, + "shortName": "UpdateAttribute" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.apihub_v1.types.UpdateAttributeRequest" + }, + { + "name": "attribute", + "type": "google.cloud.apihub_v1.types.Attribute" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.apihub_v1.types.Attribute", + "shortName": "update_attribute" + }, + "description": "Sample for UpdateAttribute", + "file": "apihub_v1_generated_api_hub_update_attribute_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "apihub_v1_generated_ApiHub_UpdateAttribute_sync", + "segments": [ + { + "end": 56, + "start": 27, + "type": "FULL" + }, + { + "end": 56, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 50, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 53, + "start": 51, + "type": "REQUEST_EXECUTION" + }, + { + "end": 57, + "start": 54, + "type": "RESPONSE_HANDLING" + } + ], + "title": "apihub_v1_generated_api_hub_update_attribute_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.apihub_v1.ApiHubClient", + "shortName": "ApiHubClient" + }, + "fullName": "google.cloud.apihub_v1.ApiHubClient.update_deployment", + "method": { + "fullName": "google.cloud.apihub.v1.ApiHub.UpdateDeployment", + "service": { + "fullName": "google.cloud.apihub.v1.ApiHub", + "shortName": "ApiHub" + }, + "shortName": "UpdateDeployment" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.apihub_v1.types.UpdateDeploymentRequest" + }, + { + "name": "deployment", + "type": "google.cloud.apihub_v1.types.Deployment" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.apihub_v1.types.Deployment", + "shortName": "update_deployment" + }, + "description": "Sample for UpdateDeployment", + "file": "apihub_v1_generated_api_hub_update_deployment_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "apihub_v1_generated_ApiHub_UpdateDeployment_sync", + "segments": [ + { + "end": 58, + "start": 27, + "type": "FULL" + }, + { + "end": 58, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 52, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 55, + "start": 53, + "type": "REQUEST_EXECUTION" + }, + { + "end": 59, + "start": 56, + "type": "RESPONSE_HANDLING" + } + ], + "title": "apihub_v1_generated_api_hub_update_deployment_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.apihub_v1.ApiHubClient", + "shortName": "ApiHubClient" + }, + "fullName": "google.cloud.apihub_v1.ApiHubClient.update_external_api", + "method": { + "fullName": "google.cloud.apihub.v1.ApiHub.UpdateExternalApi", + "service": { + "fullName": "google.cloud.apihub.v1.ApiHub", + "shortName": "ApiHub" + }, + "shortName": "UpdateExternalApi" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.apihub_v1.types.UpdateExternalApiRequest" + }, + { + "name": "external_api", + "type": "google.cloud.apihub_v1.types.ExternalApi" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.apihub_v1.types.ExternalApi", + "shortName": "update_external_api" + }, + "description": "Sample for UpdateExternalApi", + "file": "apihub_v1_generated_api_hub_update_external_api_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "apihub_v1_generated_ApiHub_UpdateExternalApi_sync", + "segments": [ + { + "end": 54, + "start": 27, + "type": "FULL" + }, + { + "end": 54, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 48, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 51, + "start": 49, + "type": "REQUEST_EXECUTION" + }, + { + "end": 55, + "start": 52, + "type": "RESPONSE_HANDLING" + } + ], + "title": "apihub_v1_generated_api_hub_update_external_api_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.apihub_v1.ApiHubClient", + "shortName": "ApiHubClient" + }, + "fullName": "google.cloud.apihub_v1.ApiHubClient.update_spec", + "method": { + "fullName": "google.cloud.apihub.v1.ApiHub.UpdateSpec", + "service": { + "fullName": "google.cloud.apihub.v1.ApiHub", + "shortName": "ApiHub" + }, + "shortName": "UpdateSpec" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.apihub_v1.types.UpdateSpecRequest" + }, + { + "name": "spec", + "type": "google.cloud.apihub_v1.types.Spec" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.apihub_v1.types.Spec", + "shortName": "update_spec" + }, + "description": "Sample for UpdateSpec", + "file": "apihub_v1_generated_api_hub_update_spec_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "apihub_v1_generated_ApiHub_UpdateSpec_sync", + "segments": [ + { + "end": 56, + "start": 27, + "type": "FULL" + }, + { + "end": 56, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 50, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 53, + "start": 51, + "type": "REQUEST_EXECUTION" + }, + { + "end": 57, + "start": 54, + "type": "RESPONSE_HANDLING" + } + ], + "title": "apihub_v1_generated_api_hub_update_spec_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.apihub_v1.ApiHubClient", + "shortName": "ApiHubClient" + }, + "fullName": "google.cloud.apihub_v1.ApiHubClient.update_version", + "method": { + "fullName": "google.cloud.apihub.v1.ApiHub.UpdateVersion", + "service": { + "fullName": "google.cloud.apihub.v1.ApiHub", + "shortName": "ApiHub" + }, + "shortName": "UpdateVersion" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.apihub_v1.types.UpdateVersionRequest" + }, + { + "name": "version", + "type": "google.cloud.apihub_v1.types.Version" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.apihub_v1.types.Version", + "shortName": "update_version" + }, + "description": "Sample for UpdateVersion", + "file": "apihub_v1_generated_api_hub_update_version_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "apihub_v1_generated_ApiHub_UpdateVersion_sync", + "segments": [ + { + "end": 54, + "start": 27, + "type": "FULL" + }, + { + "end": 54, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 48, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 51, + "start": 49, + "type": "REQUEST_EXECUTION" + }, + { + "end": 55, + "start": 52, + "type": "RESPONSE_HANDLING" + } + ], + "title": "apihub_v1_generated_api_hub_update_version_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.apihub_v1.HostProjectRegistrationServiceClient", + "shortName": "HostProjectRegistrationServiceClient" + }, + "fullName": "google.cloud.apihub_v1.HostProjectRegistrationServiceClient.create_host_project_registration", + "method": { + "fullName": "google.cloud.apihub.v1.HostProjectRegistrationService.CreateHostProjectRegistration", + "service": { + "fullName": "google.cloud.apihub.v1.HostProjectRegistrationService", + "shortName": "HostProjectRegistrationService" + }, + "shortName": "CreateHostProjectRegistration" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.apihub_v1.types.CreateHostProjectRegistrationRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "host_project_registration", + "type": "google.cloud.apihub_v1.types.HostProjectRegistration" + }, + { + "name": "host_project_registration_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.apihub_v1.types.HostProjectRegistration", + "shortName": "create_host_project_registration" + }, + "description": "Sample for CreateHostProjectRegistration", + "file": "apihub_v1_generated_host_project_registration_service_create_host_project_registration_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "apihub_v1_generated_HostProjectRegistrationService_CreateHostProjectRegistration_sync", + "segments": [ + { + "end": 56, + "start": 27, + "type": "FULL" + }, + { + "end": 56, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 50, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 53, + "start": 51, + "type": "REQUEST_EXECUTION" + }, + { + "end": 57, + "start": 54, + "type": "RESPONSE_HANDLING" + } + ], + "title": "apihub_v1_generated_host_project_registration_service_create_host_project_registration_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.apihub_v1.HostProjectRegistrationServiceClient", + "shortName": "HostProjectRegistrationServiceClient" + }, + "fullName": "google.cloud.apihub_v1.HostProjectRegistrationServiceClient.get_host_project_registration", + "method": { + "fullName": "google.cloud.apihub.v1.HostProjectRegistrationService.GetHostProjectRegistration", + "service": { + "fullName": "google.cloud.apihub.v1.HostProjectRegistrationService", + "shortName": "HostProjectRegistrationService" + }, + "shortName": "GetHostProjectRegistration" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.apihub_v1.types.GetHostProjectRegistrationRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.apihub_v1.types.HostProjectRegistration", + "shortName": "get_host_project_registration" + }, + "description": "Sample for GetHostProjectRegistration", + "file": "apihub_v1_generated_host_project_registration_service_get_host_project_registration_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "apihub_v1_generated_HostProjectRegistrationService_GetHostProjectRegistration_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "apihub_v1_generated_host_project_registration_service_get_host_project_registration_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.apihub_v1.HostProjectRegistrationServiceClient", + "shortName": "HostProjectRegistrationServiceClient" + }, + "fullName": "google.cloud.apihub_v1.HostProjectRegistrationServiceClient.list_host_project_registrations", + "method": { + "fullName": "google.cloud.apihub.v1.HostProjectRegistrationService.ListHostProjectRegistrations", + "service": { + "fullName": "google.cloud.apihub.v1.HostProjectRegistrationService", + "shortName": "HostProjectRegistrationService" + }, + "shortName": "ListHostProjectRegistrations" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.apihub_v1.types.ListHostProjectRegistrationsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.apihub_v1.services.host_project_registration_service.pagers.ListHostProjectRegistrationsPager", + "shortName": "list_host_project_registrations" + }, + "description": "Sample for ListHostProjectRegistrations", + "file": "apihub_v1_generated_host_project_registration_service_list_host_project_registrations_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "apihub_v1_generated_HostProjectRegistrationService_ListHostProjectRegistrations_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "apihub_v1_generated_host_project_registration_service_list_host_project_registrations_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.apihub_v1.LintingServiceClient", + "shortName": "LintingServiceClient" + }, + "fullName": "google.cloud.apihub_v1.LintingServiceClient.get_style_guide_contents", + "method": { + "fullName": "google.cloud.apihub.v1.LintingService.GetStyleGuideContents", + "service": { + "fullName": "google.cloud.apihub.v1.LintingService", + "shortName": "LintingService" + }, + "shortName": "GetStyleGuideContents" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.apihub_v1.types.GetStyleGuideContentsRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.apihub_v1.types.StyleGuideContents", + "shortName": "get_style_guide_contents" + }, + "description": "Sample for GetStyleGuideContents", + "file": "apihub_v1_generated_linting_service_get_style_guide_contents_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "apihub_v1_generated_LintingService_GetStyleGuideContents_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "apihub_v1_generated_linting_service_get_style_guide_contents_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.apihub_v1.LintingServiceClient", + "shortName": "LintingServiceClient" + }, + "fullName": "google.cloud.apihub_v1.LintingServiceClient.get_style_guide", + "method": { + "fullName": "google.cloud.apihub.v1.LintingService.GetStyleGuide", + "service": { + "fullName": "google.cloud.apihub.v1.LintingService", + "shortName": "LintingService" + }, + "shortName": "GetStyleGuide" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.apihub_v1.types.GetStyleGuideRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.apihub_v1.types.StyleGuide", + "shortName": "get_style_guide" + }, + "description": "Sample for GetStyleGuide", + "file": "apihub_v1_generated_linting_service_get_style_guide_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "apihub_v1_generated_LintingService_GetStyleGuide_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "apihub_v1_generated_linting_service_get_style_guide_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.apihub_v1.LintingServiceClient", + "shortName": "LintingServiceClient" + }, + "fullName": "google.cloud.apihub_v1.LintingServiceClient.lint_spec", + "method": { + "fullName": "google.cloud.apihub.v1.LintingService.LintSpec", + "service": { + "fullName": "google.cloud.apihub.v1.LintingService", + "shortName": "LintingService" + }, + "shortName": "LintSpec" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.apihub_v1.types.LintSpecRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "shortName": "lint_spec" + }, + "description": "Sample for LintSpec", + "file": "apihub_v1_generated_linting_service_lint_spec_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "apihub_v1_generated_LintingService_LintSpec_sync", + "segments": [ + { + "end": 49, + "start": 27, + "type": "FULL" + }, + { + "end": 49, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "apihub_v1_generated_linting_service_lint_spec_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.apihub_v1.LintingServiceClient", + "shortName": "LintingServiceClient" + }, + "fullName": "google.cloud.apihub_v1.LintingServiceClient.update_style_guide", + "method": { + "fullName": "google.cloud.apihub.v1.LintingService.UpdateStyleGuide", + "service": { + "fullName": "google.cloud.apihub.v1.LintingService", + "shortName": "LintingService" + }, + "shortName": "UpdateStyleGuide" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.apihub_v1.types.UpdateStyleGuideRequest" + }, + { + "name": "style_guide", + "type": "google.cloud.apihub_v1.types.StyleGuide" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.apihub_v1.types.StyleGuide", + "shortName": "update_style_guide" + }, + "description": "Sample for UpdateStyleGuide", + "file": "apihub_v1_generated_linting_service_update_style_guide_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "apihub_v1_generated_LintingService_UpdateStyleGuide_sync", + "segments": [ + { + "end": 56, + "start": 27, + "type": "FULL" + }, + { + "end": 56, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 50, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 53, + "start": 51, + "type": "REQUEST_EXECUTION" + }, + { + "end": 57, + "start": 54, + "type": "RESPONSE_HANDLING" + } + ], + "title": "apihub_v1_generated_linting_service_update_style_guide_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.apihub_v1.ProvisioningClient", + "shortName": "ProvisioningClient" + }, + "fullName": "google.cloud.apihub_v1.ProvisioningClient.create_api_hub_instance", + "method": { + "fullName": "google.cloud.apihub.v1.Provisioning.CreateApiHubInstance", + "service": { + "fullName": "google.cloud.apihub.v1.Provisioning", + "shortName": "Provisioning" + }, + "shortName": "CreateApiHubInstance" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.apihub_v1.types.CreateApiHubInstanceRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "api_hub_instance", + "type": "google.cloud.apihub_v1.types.ApiHubInstance" + }, + { + "name": "api_hub_instance_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "create_api_hub_instance" + }, + "description": "Sample for CreateApiHubInstance", + "file": "apihub_v1_generated_provisioning_create_api_hub_instance_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "apihub_v1_generated_Provisioning_CreateApiHubInstance_sync", + "segments": [ + { + "end": 59, + "start": 27, + "type": "FULL" + }, + { + "end": 59, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 49, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 56, + "start": 50, + "type": "REQUEST_EXECUTION" + }, + { + "end": 60, + "start": 57, + "type": "RESPONSE_HANDLING" + } + ], + "title": "apihub_v1_generated_provisioning_create_api_hub_instance_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.apihub_v1.ProvisioningClient", + "shortName": "ProvisioningClient" + }, + "fullName": "google.cloud.apihub_v1.ProvisioningClient.get_api_hub_instance", + "method": { + "fullName": "google.cloud.apihub.v1.Provisioning.GetApiHubInstance", + "service": { + "fullName": "google.cloud.apihub.v1.Provisioning", + "shortName": "Provisioning" + }, + "shortName": "GetApiHubInstance" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.apihub_v1.types.GetApiHubInstanceRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.apihub_v1.types.ApiHubInstance", + "shortName": "get_api_hub_instance" + }, + "description": "Sample for GetApiHubInstance", + "file": "apihub_v1_generated_provisioning_get_api_hub_instance_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "apihub_v1_generated_Provisioning_GetApiHubInstance_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "apihub_v1_generated_provisioning_get_api_hub_instance_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.apihub_v1.ProvisioningClient", + "shortName": "ProvisioningClient" + }, + "fullName": "google.cloud.apihub_v1.ProvisioningClient.lookup_api_hub_instance", + "method": { + "fullName": "google.cloud.apihub.v1.Provisioning.LookupApiHubInstance", + "service": { + "fullName": "google.cloud.apihub.v1.Provisioning", + "shortName": "Provisioning" + }, + "shortName": "LookupApiHubInstance" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.apihub_v1.types.LookupApiHubInstanceRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.apihub_v1.types.LookupApiHubInstanceResponse", + "shortName": "lookup_api_hub_instance" + }, + "description": "Sample for LookupApiHubInstance", + "file": "apihub_v1_generated_provisioning_lookup_api_hub_instance_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "apihub_v1_generated_Provisioning_LookupApiHubInstance_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "apihub_v1_generated_provisioning_lookup_api_hub_instance_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.apihub_v1.RuntimeProjectAttachmentServiceClient", + "shortName": "RuntimeProjectAttachmentServiceClient" + }, + "fullName": "google.cloud.apihub_v1.RuntimeProjectAttachmentServiceClient.create_runtime_project_attachment", + "method": { + "fullName": "google.cloud.apihub.v1.RuntimeProjectAttachmentService.CreateRuntimeProjectAttachment", + "service": { + "fullName": "google.cloud.apihub.v1.RuntimeProjectAttachmentService", + "shortName": "RuntimeProjectAttachmentService" + }, + "shortName": "CreateRuntimeProjectAttachment" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.apihub_v1.types.CreateRuntimeProjectAttachmentRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "runtime_project_attachment", + "type": "google.cloud.apihub_v1.types.RuntimeProjectAttachment" + }, + { + "name": "runtime_project_attachment_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.apihub_v1.types.RuntimeProjectAttachment", + "shortName": "create_runtime_project_attachment" + }, + "description": "Sample for CreateRuntimeProjectAttachment", + "file": "apihub_v1_generated_runtime_project_attachment_service_create_runtime_project_attachment_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "apihub_v1_generated_RuntimeProjectAttachmentService_CreateRuntimeProjectAttachment_sync", + "segments": [ + { + "end": 56, + "start": 27, + "type": "FULL" + }, + { + "end": 56, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 50, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 53, + "start": 51, + "type": "REQUEST_EXECUTION" + }, + { + "end": 57, + "start": 54, + "type": "RESPONSE_HANDLING" + } + ], + "title": "apihub_v1_generated_runtime_project_attachment_service_create_runtime_project_attachment_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.apihub_v1.RuntimeProjectAttachmentServiceClient", + "shortName": "RuntimeProjectAttachmentServiceClient" + }, + "fullName": "google.cloud.apihub_v1.RuntimeProjectAttachmentServiceClient.delete_runtime_project_attachment", + "method": { + "fullName": "google.cloud.apihub.v1.RuntimeProjectAttachmentService.DeleteRuntimeProjectAttachment", + "service": { + "fullName": "google.cloud.apihub.v1.RuntimeProjectAttachmentService", + "shortName": "RuntimeProjectAttachmentService" + }, + "shortName": "DeleteRuntimeProjectAttachment" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.apihub_v1.types.DeleteRuntimeProjectAttachmentRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "shortName": "delete_runtime_project_attachment" + }, + "description": "Sample for DeleteRuntimeProjectAttachment", + "file": "apihub_v1_generated_runtime_project_attachment_service_delete_runtime_project_attachment_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "apihub_v1_generated_RuntimeProjectAttachmentService_DeleteRuntimeProjectAttachment_sync", + "segments": [ + { + "end": 49, + "start": 27, + "type": "FULL" + }, + { + "end": 49, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "apihub_v1_generated_runtime_project_attachment_service_delete_runtime_project_attachment_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.apihub_v1.RuntimeProjectAttachmentServiceClient", + "shortName": "RuntimeProjectAttachmentServiceClient" + }, + "fullName": "google.cloud.apihub_v1.RuntimeProjectAttachmentServiceClient.get_runtime_project_attachment", + "method": { + "fullName": "google.cloud.apihub.v1.RuntimeProjectAttachmentService.GetRuntimeProjectAttachment", + "service": { + "fullName": "google.cloud.apihub.v1.RuntimeProjectAttachmentService", + "shortName": "RuntimeProjectAttachmentService" + }, + "shortName": "GetRuntimeProjectAttachment" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.apihub_v1.types.GetRuntimeProjectAttachmentRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.apihub_v1.types.RuntimeProjectAttachment", + "shortName": "get_runtime_project_attachment" + }, + "description": "Sample for GetRuntimeProjectAttachment", + "file": "apihub_v1_generated_runtime_project_attachment_service_get_runtime_project_attachment_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "apihub_v1_generated_RuntimeProjectAttachmentService_GetRuntimeProjectAttachment_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "apihub_v1_generated_runtime_project_attachment_service_get_runtime_project_attachment_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.apihub_v1.RuntimeProjectAttachmentServiceClient", + "shortName": "RuntimeProjectAttachmentServiceClient" + }, + "fullName": "google.cloud.apihub_v1.RuntimeProjectAttachmentServiceClient.list_runtime_project_attachments", + "method": { + "fullName": "google.cloud.apihub.v1.RuntimeProjectAttachmentService.ListRuntimeProjectAttachments", + "service": { + "fullName": "google.cloud.apihub.v1.RuntimeProjectAttachmentService", + "shortName": "RuntimeProjectAttachmentService" + }, + "shortName": "ListRuntimeProjectAttachments" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.apihub_v1.types.ListRuntimeProjectAttachmentsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.apihub_v1.services.runtime_project_attachment_service.pagers.ListRuntimeProjectAttachmentsPager", + "shortName": "list_runtime_project_attachments" + }, + "description": "Sample for ListRuntimeProjectAttachments", + "file": "apihub_v1_generated_runtime_project_attachment_service_list_runtime_project_attachments_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "apihub_v1_generated_RuntimeProjectAttachmentService_ListRuntimeProjectAttachments_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "apihub_v1_generated_runtime_project_attachment_service_list_runtime_project_attachments_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.apihub_v1.RuntimeProjectAttachmentServiceClient", + "shortName": "RuntimeProjectAttachmentServiceClient" + }, + "fullName": "google.cloud.apihub_v1.RuntimeProjectAttachmentServiceClient.lookup_runtime_project_attachment", + "method": { + "fullName": "google.cloud.apihub.v1.RuntimeProjectAttachmentService.LookupRuntimeProjectAttachment", + "service": { + "fullName": "google.cloud.apihub.v1.RuntimeProjectAttachmentService", + "shortName": "RuntimeProjectAttachmentService" + }, + "shortName": "LookupRuntimeProjectAttachment" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.apihub_v1.types.LookupRuntimeProjectAttachmentRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.apihub_v1.types.LookupRuntimeProjectAttachmentResponse", + "shortName": "lookup_runtime_project_attachment" + }, + "description": "Sample for LookupRuntimeProjectAttachment", + "file": "apihub_v1_generated_runtime_project_attachment_service_lookup_runtime_project_attachment_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "apihub_v1_generated_RuntimeProjectAttachmentService_LookupRuntimeProjectAttachment_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "apihub_v1_generated_runtime_project_attachment_service_lookup_runtime_project_attachment_sync.py" + } + ] +} diff --git a/packages/google-cloud-apihub/scripts/decrypt-secrets.sh b/packages/google-cloud-apihub/scripts/decrypt-secrets.sh new file mode 100755 index 000000000000..0018b421ddf8 --- /dev/null +++ b/packages/google-cloud-apihub/scripts/decrypt-secrets.sh @@ -0,0 +1,46 @@ +#!/bin/bash + +# Copyright 2023 Google LLC All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )" +ROOT=$( dirname "$DIR" ) + +# Work from the project root. +cd $ROOT + +# Prevent it from overriding files. +# We recommend that sample authors use their own service account files and cloud project. +# In that case, they are supposed to prepare these files by themselves. +if [[ -f "testing/test-env.sh" ]] || \ + [[ -f "testing/service-account.json" ]] || \ + [[ -f "testing/client-secrets.json" ]]; then + echo "One or more target files exist, aborting." + exit 1 +fi + +# Use SECRET_MANAGER_PROJECT if set, fallback to cloud-devrel-kokoro-resources. +PROJECT_ID="${SECRET_MANAGER_PROJECT:-cloud-devrel-kokoro-resources}" + +gcloud secrets versions access latest --secret="python-docs-samples-test-env" \ + --project="${PROJECT_ID}" \ + > testing/test-env.sh +gcloud secrets versions access latest \ + --secret="python-docs-samples-service-account" \ + --project="${PROJECT_ID}" \ + > testing/service-account.json +gcloud secrets versions access latest \ + --secret="python-docs-samples-client-secrets" \ + --project="${PROJECT_ID}" \ + > testing/client-secrets.json diff --git a/packages/google-cloud-apihub/scripts/fixup_apihub_v1_keywords.py b/packages/google-cloud-apihub/scripts/fixup_apihub_v1_keywords.py new file mode 100644 index 000000000000..75054a44830e --- /dev/null +++ b/packages/google-cloud-apihub/scripts/fixup_apihub_v1_keywords.py @@ -0,0 +1,233 @@ +#! /usr/bin/env python3 +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import argparse +import os +import libcst as cst +import pathlib +import sys +from typing import (Any, Callable, Dict, List, Sequence, Tuple) + + +def partition( + predicate: Callable[[Any], bool], + iterator: Sequence[Any] +) -> Tuple[List[Any], List[Any]]: + """A stable, out-of-place partition.""" + results = ([], []) + + for i in iterator: + results[int(predicate(i))].append(i) + + # Returns trueList, falseList + return results[1], results[0] + + +class apihubCallTransformer(cst.CSTTransformer): + CTRL_PARAMS: Tuple[str] = ('retry', 'timeout', 'metadata') + METHOD_TO_PARAMS: Dict[str, Tuple[str]] = { + 'create_api': ('parent', 'api', 'api_id', ), + 'create_api_hub_instance': ('parent', 'api_hub_instance', 'api_hub_instance_id', ), + 'create_attribute': ('parent', 'attribute', 'attribute_id', ), + 'create_dependency': ('parent', 'dependency', 'dependency_id', ), + 'create_deployment': ('parent', 'deployment', 'deployment_id', ), + 'create_external_api': ('parent', 'external_api', 'external_api_id', ), + 'create_host_project_registration': ('parent', 'host_project_registration_id', 'host_project_registration', ), + 'create_runtime_project_attachment': ('parent', 'runtime_project_attachment_id', 'runtime_project_attachment', ), + 'create_spec': ('parent', 'spec', 'spec_id', ), + 'create_version': ('parent', 'version', 'version_id', ), + 'delete_api': ('name', 'force', ), + 'delete_attribute': ('name', ), + 'delete_dependency': ('name', ), + 'delete_deployment': ('name', ), + 'delete_external_api': ('name', ), + 'delete_runtime_project_attachment': ('name', ), + 'delete_spec': ('name', ), + 'delete_version': ('name', 'force', ), + 'disable_plugin': ('name', ), + 'enable_plugin': ('name', ), + 'get_api': ('name', ), + 'get_api_hub_instance': ('name', ), + 'get_api_operation': ('name', ), + 'get_attribute': ('name', ), + 'get_definition': ('name', ), + 'get_dependency': ('name', ), + 'get_deployment': ('name', ), + 'get_external_api': ('name', ), + 'get_host_project_registration': ('name', ), + 'get_plugin': ('name', ), + 'get_runtime_project_attachment': ('name', ), + 'get_spec': ('name', ), + 'get_spec_contents': ('name', ), + 'get_style_guide': ('name', ), + 'get_style_guide_contents': ('name', ), + 'get_version': ('name', ), + 'lint_spec': ('name', ), + 'list_api_operations': ('parent', 'filter', 'page_size', 'page_token', ), + 'list_apis': ('parent', 'filter', 'page_size', 'page_token', ), + 'list_attributes': ('parent', 'filter', 'page_size', 'page_token', ), + 'list_dependencies': ('parent', 'filter', 'page_size', 'page_token', ), + 'list_deployments': ('parent', 'filter', 'page_size', 'page_token', ), + 'list_external_apis': ('parent', 'page_size', 'page_token', ), + 'list_host_project_registrations': ('parent', 'page_size', 'page_token', 'filter', 'order_by', ), + 'list_runtime_project_attachments': ('parent', 'page_size', 'page_token', 'filter', 'order_by', ), + 'list_specs': ('parent', 'filter', 'page_size', 'page_token', ), + 'list_versions': ('parent', 'filter', 'page_size', 'page_token', ), + 'lookup_api_hub_instance': ('parent', ), + 'lookup_runtime_project_attachment': ('name', ), + 'search_resources': ('location', 'query', 'filter', 'page_size', 'page_token', ), + 'update_api': ('api', 'update_mask', ), + 'update_attribute': ('attribute', 'update_mask', ), + 'update_dependency': ('dependency', 'update_mask', ), + 'update_deployment': ('deployment', 'update_mask', ), + 'update_external_api': ('external_api', 'update_mask', ), + 'update_spec': ('spec', 'update_mask', ), + 'update_style_guide': ('style_guide', 'update_mask', ), + 'update_version': ('version', 'update_mask', ), + } + + def leave_Call(self, original: cst.Call, updated: cst.Call) -> cst.CSTNode: + try: + key = original.func.attr.value + kword_params = self.METHOD_TO_PARAMS[key] + except (AttributeError, KeyError): + # Either not a method from the API or too convoluted to be sure. + return updated + + # If the existing code is valid, keyword args come after positional args. + # Therefore, all positional args must map to the first parameters. + args, kwargs = partition(lambda a: not bool(a.keyword), updated.args) + if any(k.keyword.value == "request" for k in kwargs): + # We've already fixed this file, don't fix it again. + return updated + + kwargs, ctrl_kwargs = partition( + lambda a: a.keyword.value not in self.CTRL_PARAMS, + kwargs + ) + + args, ctrl_args = args[:len(kword_params)], args[len(kword_params):] + ctrl_kwargs.extend(cst.Arg(value=a.value, keyword=cst.Name(value=ctrl)) + for a, ctrl in zip(ctrl_args, self.CTRL_PARAMS)) + + request_arg = cst.Arg( + value=cst.Dict([ + cst.DictElement( + cst.SimpleString("'{}'".format(name)), +cst.Element(value=arg.value) + ) + # Note: the args + kwargs looks silly, but keep in mind that + # the control parameters had to be stripped out, and that + # those could have been passed positionally or by keyword. + for name, arg in zip(kword_params, args + kwargs)]), + keyword=cst.Name("request") + ) + + return updated.with_changes( + args=[request_arg] + ctrl_kwargs + ) + + +def fix_files( + in_dir: pathlib.Path, + out_dir: pathlib.Path, + *, + transformer=apihubCallTransformer(), +): + """Duplicate the input dir to the output dir, fixing file method calls. + + Preconditions: + * in_dir is a real directory + * out_dir is a real, empty directory + """ + pyfile_gen = ( + pathlib.Path(os.path.join(root, f)) + for root, _, files in os.walk(in_dir) + for f in files if os.path.splitext(f)[1] == ".py" + ) + + for fpath in pyfile_gen: + with open(fpath, 'r') as f: + src = f.read() + + # Parse the code and insert method call fixes. + tree = cst.parse_module(src) + updated = tree.visit(transformer) + + # Create the path and directory structure for the new file. + updated_path = out_dir.joinpath(fpath.relative_to(in_dir)) + updated_path.parent.mkdir(parents=True, exist_ok=True) + + # Generate the updated source file at the corresponding path. + with open(updated_path, 'w') as f: + f.write(updated.code) + + +if __name__ == '__main__': + parser = argparse.ArgumentParser( + description="""Fix up source that uses the apihub client library. + +The existing sources are NOT overwritten but are copied to output_dir with changes made. + +Note: This tool operates at a best-effort level at converting positional + parameters in client method calls to keyword based parameters. + Cases where it WILL FAIL include + A) * or ** expansion in a method call. + B) Calls via function or method alias (includes free function calls) + C) Indirect or dispatched calls (e.g. the method is looked up dynamically) + + These all constitute false negatives. The tool will also detect false + positives when an API method shares a name with another method. +""") + parser.add_argument( + '-d', + '--input-directory', + required=True, + dest='input_dir', + help='the input directory to walk for python files to fix up', + ) + parser.add_argument( + '-o', + '--output-directory', + required=True, + dest='output_dir', + help='the directory to output files fixed via un-flattening', + ) + args = parser.parse_args() + input_dir = pathlib.Path(args.input_dir) + output_dir = pathlib.Path(args.output_dir) + if not input_dir.is_dir(): + print( + f"input directory '{input_dir}' does not exist or is not a directory", + file=sys.stderr, + ) + sys.exit(-1) + + if not output_dir.is_dir(): + print( + f"output directory '{output_dir}' does not exist or is not a directory", + file=sys.stderr, + ) + sys.exit(-1) + + if os.listdir(output_dir): + print( + f"output directory '{output_dir}' is not empty", + file=sys.stderr, + ) + sys.exit(-1) + + fix_files(input_dir, output_dir) diff --git a/packages/google-cloud-apihub/setup.py b/packages/google-cloud-apihub/setup.py new file mode 100644 index 000000000000..a5e263388aa9 --- /dev/null +++ b/packages/google-cloud-apihub/setup.py @@ -0,0 +1,93 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import io +import os +import re + +import setuptools # type: ignore + +package_root = os.path.abspath(os.path.dirname(__file__)) + +name = "google-cloud-apihub" + + +description = "Google Cloud Apihub API client library" + +version = None + +with open(os.path.join(package_root, "google/cloud/apihub/gapic_version.py")) as fp: + version_candidates = re.findall(r"(?<=\")\d+.\d+.\d+(?=\")", fp.read()) + assert len(version_candidates) == 1 + version = version_candidates[0] + +if version[0] == "0": + release_status = "Development Status :: 4 - Beta" +else: + release_status = "Development Status :: 5 - Production/Stable" + +dependencies = [ + "google-api-core[grpc] >= 1.34.1, <3.0.0dev,!=2.0.*,!=2.1.*,!=2.2.*,!=2.3.*,!=2.4.*,!=2.5.*,!=2.6.*,!=2.7.*,!=2.8.*,!=2.9.*,!=2.10.*", + # Exclude incompatible versions of `google-auth` + # See https://github.com/googleapis/google-cloud-python/issues/12364 + "google-auth >= 2.14.1, <3.0.0dev,!=2.24.0,!=2.25.0", + "proto-plus >= 1.22.3, <2.0.0dev", + "protobuf>=3.20.2,<6.0.0dev,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5", +] +url = "https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-apihub" + +package_root = os.path.abspath(os.path.dirname(__file__)) + +readme_filename = os.path.join(package_root, "README.rst") +with io.open(readme_filename, encoding="utf-8") as readme_file: + readme = readme_file.read() + +packages = [ + package + for package in setuptools.find_namespace_packages() + if package.startswith("google") +] + +setuptools.setup( + name=name, + version=version, + description=description, + long_description=readme, + author="Google LLC", + author_email="googleapis-packages@google.com", + license="Apache 2.0", + url=url, + classifiers=[ + release_status, + "Intended Audience :: Developers", + "License :: OSI Approved :: Apache Software License", + "Programming Language :: Python", + "Programming Language :: Python :: 3", + "Programming Language :: Python :: 3.7", + "Programming Language :: Python :: 3.8", + "Programming Language :: Python :: 3.9", + "Programming Language :: Python :: 3.10", + "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", + "Operating System :: OS Independent", + "Topic :: Internet", + ], + platforms="Posix; MacOS X; Windows", + packages=packages, + python_requires=">=3.7", + install_requires=dependencies, + include_package_data=True, + zip_safe=False, +) diff --git a/packages/google-cloud-apihub/testing/.gitignore b/packages/google-cloud-apihub/testing/.gitignore new file mode 100644 index 000000000000..b05fbd630881 --- /dev/null +++ b/packages/google-cloud-apihub/testing/.gitignore @@ -0,0 +1,3 @@ +test-env.sh +service-account.json +client-secrets.json \ No newline at end of file diff --git a/packages/google-cloud-apihub/testing/constraints-3.10.txt b/packages/google-cloud-apihub/testing/constraints-3.10.txt new file mode 100644 index 000000000000..ed7f9aed2559 --- /dev/null +++ b/packages/google-cloud-apihub/testing/constraints-3.10.txt @@ -0,0 +1,6 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf diff --git a/packages/google-cloud-apihub/testing/constraints-3.11.txt b/packages/google-cloud-apihub/testing/constraints-3.11.txt new file mode 100644 index 000000000000..ed7f9aed2559 --- /dev/null +++ b/packages/google-cloud-apihub/testing/constraints-3.11.txt @@ -0,0 +1,6 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf diff --git a/packages/google-cloud-apihub/testing/constraints-3.12.txt b/packages/google-cloud-apihub/testing/constraints-3.12.txt new file mode 100644 index 000000000000..ed7f9aed2559 --- /dev/null +++ b/packages/google-cloud-apihub/testing/constraints-3.12.txt @@ -0,0 +1,6 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf diff --git a/packages/google-cloud-apihub/testing/constraints-3.7.txt b/packages/google-cloud-apihub/testing/constraints-3.7.txt new file mode 100644 index 000000000000..fc812592b0ee --- /dev/null +++ b/packages/google-cloud-apihub/testing/constraints-3.7.txt @@ -0,0 +1,10 @@ +# This constraints file is used to check that lower bounds +# are correct in setup.py +# List all library dependencies and extras in this file. +# Pin the version to the lower bound. +# e.g., if setup.py has "google-cloud-foo >= 1.14.0, < 2.0.0dev", +# Then this file should have google-cloud-foo==1.14.0 +google-api-core==1.34.1 +google-auth==2.14.1 +proto-plus==1.22.3 +protobuf==3.20.2 diff --git a/packages/google-cloud-apihub/testing/constraints-3.8.txt b/packages/google-cloud-apihub/testing/constraints-3.8.txt new file mode 100644 index 000000000000..ed7f9aed2559 --- /dev/null +++ b/packages/google-cloud-apihub/testing/constraints-3.8.txt @@ -0,0 +1,6 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf diff --git a/packages/google-cloud-apihub/testing/constraints-3.9.txt b/packages/google-cloud-apihub/testing/constraints-3.9.txt new file mode 100644 index 000000000000..ed7f9aed2559 --- /dev/null +++ b/packages/google-cloud-apihub/testing/constraints-3.9.txt @@ -0,0 +1,6 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf diff --git a/packages/google-cloud-apihub/tests/__init__.py b/packages/google-cloud-apihub/tests/__init__.py new file mode 100644 index 000000000000..8f6cf068242c --- /dev/null +++ b/packages/google-cloud-apihub/tests/__init__.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/packages/google-cloud-apihub/tests/unit/__init__.py b/packages/google-cloud-apihub/tests/unit/__init__.py new file mode 100644 index 000000000000..8f6cf068242c --- /dev/null +++ b/packages/google-cloud-apihub/tests/unit/__init__.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/packages/google-cloud-apihub/tests/unit/gapic/__init__.py b/packages/google-cloud-apihub/tests/unit/gapic/__init__.py new file mode 100644 index 000000000000..8f6cf068242c --- /dev/null +++ b/packages/google-cloud-apihub/tests/unit/gapic/__init__.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/packages/google-cloud-apihub/tests/unit/gapic/apihub_v1/__init__.py b/packages/google-cloud-apihub/tests/unit/gapic/apihub_v1/__init__.py new file mode 100644 index 000000000000..8f6cf068242c --- /dev/null +++ b/packages/google-cloud-apihub/tests/unit/gapic/apihub_v1/__init__.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/packages/google-cloud-apihub/tests/unit/gapic/apihub_v1/test_api_hub.py b/packages/google-cloud-apihub/tests/unit/gapic/apihub_v1/test_api_hub.py new file mode 100644 index 000000000000..5f7c71ab5ffb --- /dev/null +++ b/packages/google-cloud-apihub/tests/unit/gapic/apihub_v1/test_api_hub.py @@ -0,0 +1,14699 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import os + +# try/except added for compatibility with python < 3.8 +try: + from unittest import mock + from unittest.mock import AsyncMock # pragma: NO COVER +except ImportError: # pragma: NO COVER + import mock + +from collections.abc import Iterable +import json +import math + +from google.api_core import gapic_v1, grpc_helpers, grpc_helpers_async, path_template +from google.api_core import api_core_version, client_options +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +import google.auth +from google.auth import credentials as ga_credentials +from google.auth.exceptions import MutualTLSChannelError +from google.cloud.location import locations_pb2 +from google.longrunning import operations_pb2 # type: ignore +from google.oauth2 import service_account +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import json_format +from google.protobuf import timestamp_pb2 # type: ignore +import grpc +from grpc.experimental import aio +from proto.marshal.rules import wrappers +from proto.marshal.rules.dates import DurationRule, TimestampRule +import pytest +from requests import PreparedRequest, Request, Response +from requests.sessions import Session + +from google.cloud.apihub_v1.services.api_hub import ApiHubClient, pagers, transports +from google.cloud.apihub_v1.types import apihub_service, common_fields + + +def client_cert_source_callback(): + return b"cert bytes", b"key bytes" + + +# If default endpoint is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint(client): + return ( + "foo.googleapis.com" + if ("localhost" in client.DEFAULT_ENDPOINT) + else client.DEFAULT_ENDPOINT + ) + + +# If default endpoint template is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint template so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint_template(client): + return ( + "test.{UNIVERSE_DOMAIN}" + if ("localhost" in client._DEFAULT_ENDPOINT_TEMPLATE) + else client._DEFAULT_ENDPOINT_TEMPLATE + ) + + +def test__get_default_mtls_endpoint(): + api_endpoint = "example.googleapis.com" + api_mtls_endpoint = "example.mtls.googleapis.com" + sandbox_endpoint = "example.sandbox.googleapis.com" + sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" + non_googleapi = "api.example.com" + + assert ApiHubClient._get_default_mtls_endpoint(None) is None + assert ApiHubClient._get_default_mtls_endpoint(api_endpoint) == api_mtls_endpoint + assert ( + ApiHubClient._get_default_mtls_endpoint(api_mtls_endpoint) == api_mtls_endpoint + ) + assert ( + ApiHubClient._get_default_mtls_endpoint(sandbox_endpoint) + == sandbox_mtls_endpoint + ) + assert ( + ApiHubClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) + == sandbox_mtls_endpoint + ) + assert ApiHubClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi + + +def test__read_environment_variables(): + assert ApiHubClient._read_environment_variables() == (False, "auto", None) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + assert ApiHubClient._read_environment_variables() == (True, "auto", None) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + assert ApiHubClient._read_environment_variables() == (False, "auto", None) + + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + with pytest.raises(ValueError) as excinfo: + ApiHubClient._read_environment_variables() + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + assert ApiHubClient._read_environment_variables() == (False, "never", None) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + assert ApiHubClient._read_environment_variables() == (False, "always", None) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}): + assert ApiHubClient._read_environment_variables() == (False, "auto", None) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + ApiHubClient._read_environment_variables() + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + with mock.patch.dict(os.environ, {"GOOGLE_CLOUD_UNIVERSE_DOMAIN": "foo.com"}): + assert ApiHubClient._read_environment_variables() == (False, "auto", "foo.com") + + +def test__get_client_cert_source(): + mock_provided_cert_source = mock.Mock() + mock_default_cert_source = mock.Mock() + + assert ApiHubClient._get_client_cert_source(None, False) is None + assert ( + ApiHubClient._get_client_cert_source(mock_provided_cert_source, False) is None + ) + assert ( + ApiHubClient._get_client_cert_source(mock_provided_cert_source, True) + == mock_provided_cert_source + ) + + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", return_value=True + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=mock_default_cert_source, + ): + assert ( + ApiHubClient._get_client_cert_source(None, True) + is mock_default_cert_source + ) + assert ( + ApiHubClient._get_client_cert_source(mock_provided_cert_source, "true") + is mock_provided_cert_source + ) + + +@mock.patch.object( + ApiHubClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(ApiHubClient), +) +def test__get_api_endpoint(): + api_override = "foo.com" + mock_client_cert_source = mock.Mock() + default_universe = ApiHubClient._DEFAULT_UNIVERSE + default_endpoint = ApiHubClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=default_universe + ) + mock_universe = "bar.com" + mock_endpoint = ApiHubClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=mock_universe + ) + + assert ( + ApiHubClient._get_api_endpoint( + api_override, mock_client_cert_source, default_universe, "always" + ) + == api_override + ) + assert ( + ApiHubClient._get_api_endpoint( + None, mock_client_cert_source, default_universe, "auto" + ) + == ApiHubClient.DEFAULT_MTLS_ENDPOINT + ) + assert ( + ApiHubClient._get_api_endpoint(None, None, default_universe, "auto") + == default_endpoint + ) + assert ( + ApiHubClient._get_api_endpoint(None, None, default_universe, "always") + == ApiHubClient.DEFAULT_MTLS_ENDPOINT + ) + assert ( + ApiHubClient._get_api_endpoint( + None, mock_client_cert_source, default_universe, "always" + ) + == ApiHubClient.DEFAULT_MTLS_ENDPOINT + ) + assert ( + ApiHubClient._get_api_endpoint(None, None, mock_universe, "never") + == mock_endpoint + ) + assert ( + ApiHubClient._get_api_endpoint(None, None, default_universe, "never") + == default_endpoint + ) + + with pytest.raises(MutualTLSChannelError) as excinfo: + ApiHubClient._get_api_endpoint( + None, mock_client_cert_source, mock_universe, "auto" + ) + assert ( + str(excinfo.value) + == "mTLS is not supported in any universe other than googleapis.com." + ) + + +def test__get_universe_domain(): + client_universe_domain = "foo.com" + universe_domain_env = "bar.com" + + assert ( + ApiHubClient._get_universe_domain(client_universe_domain, universe_domain_env) + == client_universe_domain + ) + assert ( + ApiHubClient._get_universe_domain(None, universe_domain_env) + == universe_domain_env + ) + assert ( + ApiHubClient._get_universe_domain(None, None) == ApiHubClient._DEFAULT_UNIVERSE + ) + + with pytest.raises(ValueError) as excinfo: + ApiHubClient._get_universe_domain("", None) + assert str(excinfo.value) == "Universe Domain cannot be an empty string." + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + (ApiHubClient, transports.ApiHubRestTransport, "rest"), + ], +) +def test__validate_universe_domain(client_class, transport_class, transport_name): + client = client_class( + transport=transport_class(credentials=ga_credentials.AnonymousCredentials()) + ) + assert client._validate_universe_domain() == True + + # Test the case when universe is already validated. + assert client._validate_universe_domain() == True + + if transport_name == "grpc": + # Test the case where credentials are provided by the + # `local_channel_credentials`. The default universes in both match. + channel = grpc.secure_channel( + "http://localhost/", grpc.local_channel_credentials() + ) + client = client_class(transport=transport_class(channel=channel)) + assert client._validate_universe_domain() == True + + # Test the case where credentials do not exist: e.g. a transport is provided + # with no credentials. Validation should still succeed because there is no + # mismatch with non-existent credentials. + channel = grpc.secure_channel( + "http://localhost/", grpc.local_channel_credentials() + ) + transport = transport_class(channel=channel) + transport._credentials = None + client = client_class(transport=transport) + assert client._validate_universe_domain() == True + + # TODO: This is needed to cater for older versions of google-auth + # Make this test unconditional once the minimum supported version of + # google-auth becomes 2.23.0 or higher. + google_auth_major, google_auth_minor = [ + int(part) for part in google.auth.__version__.split(".")[0:2] + ] + if google_auth_major > 2 or (google_auth_major == 2 and google_auth_minor >= 23): + credentials = ga_credentials.AnonymousCredentials() + credentials._universe_domain = "foo.com" + # Test the case when there is a universe mismatch from the credentials. + client = client_class(transport=transport_class(credentials=credentials)) + with pytest.raises(ValueError) as excinfo: + client._validate_universe_domain() + assert ( + str(excinfo.value) + == "The configured universe domain (googleapis.com) does not match the universe domain found in the credentials (foo.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." + ) + + # Test the case when there is a universe mismatch from the client. + # + # TODO: Make this test unconditional once the minimum supported version of + # google-api-core becomes 2.15.0 or higher. + api_core_major, api_core_minor = [ + int(part) for part in api_core_version.__version__.split(".")[0:2] + ] + if api_core_major > 2 or (api_core_major == 2 and api_core_minor >= 15): + client = client_class( + client_options={"universe_domain": "bar.com"}, + transport=transport_class( + credentials=ga_credentials.AnonymousCredentials(), + ), + ) + with pytest.raises(ValueError) as excinfo: + client._validate_universe_domain() + assert ( + str(excinfo.value) + == "The configured universe domain (bar.com) does not match the universe domain found in the credentials (googleapis.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." + ) + + # Test that ValueError is raised if universe_domain is provided via client options and credentials is None + with pytest.raises(ValueError): + client._compare_universes("foo.bar", None) + + +@pytest.mark.parametrize( + "client_class,transport_name", + [ + (ApiHubClient, "rest"), + ], +) +def test_api_hub_client_from_service_account_info(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_info" + ) as factory: + factory.return_value = creds + info = {"valid": True} + client = client_class.from_service_account_info(info, transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + "apihub.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://apihub.googleapis.com" + ) + + +@pytest.mark.parametrize( + "transport_class,transport_name", + [ + (transports.ApiHubRestTransport, "rest"), + ], +) +def test_api_hub_client_service_account_always_use_jwt(transport_class, transport_name): + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=True) + use_jwt.assert_called_once_with(True) + + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=False) + use_jwt.assert_not_called() + + +@pytest.mark.parametrize( + "client_class,transport_name", + [ + (ApiHubClient, "rest"), + ], +) +def test_api_hub_client_from_service_account_file(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_file" + ) as factory: + factory.return_value = creds + client = client_class.from_service_account_file( + "dummy/file/path.json", transport=transport_name + ) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + client = client_class.from_service_account_json( + "dummy/file/path.json", transport=transport_name + ) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + "apihub.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://apihub.googleapis.com" + ) + + +def test_api_hub_client_get_transport_class(): + transport = ApiHubClient.get_transport_class() + available_transports = [ + transports.ApiHubRestTransport, + ] + assert transport in available_transports + + transport = ApiHubClient.get_transport_class("rest") + assert transport == transports.ApiHubRestTransport + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + (ApiHubClient, transports.ApiHubRestTransport, "rest"), + ], +) +@mock.patch.object( + ApiHubClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(ApiHubClient), +) +def test_api_hub_client_client_options(client_class, transport_class, transport_name): + # Check that if channel is provided we won't create a new one. + with mock.patch.object(ApiHubClient, "get_transport_class") as gtc: + transport = transport_class(credentials=ga_credentials.AnonymousCredentials()) + client = client_class(transport=transport) + gtc.assert_not_called() + + # Check that if channel is provided via str we will create a new one. + with mock.patch.object(ApiHubClient, "get_transport_class") as gtc: + client = client_class(transport=transport_name) + gtc.assert_called() + + # Check the case api_endpoint is provided. + options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name, client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_MTLS_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + client = client_class(transport=transport_name) + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + with pytest.raises(ValueError) as excinfo: + client = client_class(transport=transport_name) + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + + # Check the case quota_project_id is provided + options = client_options.ClientOptions(quota_project_id="octopus") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id="octopus", + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + # Check the case api_endpoint is provided + options = client_options.ClientOptions( + api_audience="https://language.googleapis.com" + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience="https://language.googleapis.com", + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,use_client_cert_env", + [ + (ApiHubClient, transports.ApiHubRestTransport, "rest", "true"), + (ApiHubClient, transports.ApiHubRestTransport, "rest", "false"), + ], +) +@mock.patch.object( + ApiHubClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(ApiHubClient), +) +@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) +def test_api_hub_client_mtls_env_auto( + client_class, transport_class, transport_name, use_client_cert_env +): + # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default + # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. + + # Check the case client_cert_source is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + options = client_options.ClientOptions( + client_cert_source=client_cert_source_callback + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + + if use_client_cert_env == "false": + expected_client_cert_source = None + expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ) + else: + expected_client_cert_source = client_cert_source_callback + expected_host = client.DEFAULT_MTLS_ENDPOINT + + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case ADC client cert is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=client_cert_source_callback, + ): + if use_client_cert_env == "false": + expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ) + expected_client_cert_source = None + else: + expected_host = client.DEFAULT_MTLS_ENDPOINT + expected_client_cert_source = client_cert_source_callback + + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case client_cert_source and ADC client cert are not provided. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize("client_class", [ApiHubClient]) +@mock.patch.object( + ApiHubClient, "DEFAULT_ENDPOINT", modify_default_endpoint(ApiHubClient) +) +def test_api_hub_client_get_mtls_endpoint_and_cert_source(client_class): + mock_client_cert_source = mock.Mock() + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source == mock_client_cert_source + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + mock_client_cert_source = mock.Mock() + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=mock_client_cert_source, + ): + ( + api_endpoint, + cert_source, + ) = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source == mock_client_cert_source + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + client_class.get_mtls_endpoint_and_cert_source() + + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + with pytest.raises(ValueError) as excinfo: + client_class.get_mtls_endpoint_and_cert_source() + + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + + +@pytest.mark.parametrize("client_class", [ApiHubClient]) +@mock.patch.object( + ApiHubClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(ApiHubClient), +) +def test_api_hub_client_client_api_endpoint(client_class): + mock_client_cert_source = client_cert_source_callback + api_override = "foo.com" + default_universe = ApiHubClient._DEFAULT_UNIVERSE + default_endpoint = ApiHubClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=default_universe + ) + mock_universe = "bar.com" + mock_endpoint = ApiHubClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=mock_universe + ) + + # If ClientOptions.api_endpoint is set and GOOGLE_API_USE_CLIENT_CERTIFICATE="true", + # use ClientOptions.api_endpoint as the api endpoint regardless. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.requests.AuthorizedSession.configure_mtls_channel" + ): + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=api_override + ) + client = client_class( + client_options=options, + credentials=ga_credentials.AnonymousCredentials(), + ) + assert client.api_endpoint == api_override + + # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="never", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + client = client_class(credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == default_endpoint + + # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="always", + # use the DEFAULT_MTLS_ENDPOINT as the api endpoint. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + client = client_class(credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + + # If ClientOptions.api_endpoint is not set, GOOGLE_API_USE_MTLS_ENDPOINT="auto" (default), + # GOOGLE_API_USE_CLIENT_CERTIFICATE="false" (default), default cert source doesn't exist, + # and ClientOptions.universe_domain="bar.com", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with universe domain as the api endpoint. + options = client_options.ClientOptions() + universe_exists = hasattr(options, "universe_domain") + if universe_exists: + options = client_options.ClientOptions(universe_domain=mock_universe) + client = client_class( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + else: + client = client_class( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + assert client.api_endpoint == ( + mock_endpoint if universe_exists else default_endpoint + ) + assert client.universe_domain == ( + mock_universe if universe_exists else default_universe + ) + + # If ClientOptions does not have a universe domain attribute and GOOGLE_API_USE_MTLS_ENDPOINT="never", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. + options = client_options.ClientOptions() + if hasattr(options, "universe_domain"): + delattr(options, "universe_domain") + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + client = client_class( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + assert client.api_endpoint == default_endpoint + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + (ApiHubClient, transports.ApiHubRestTransport, "rest"), + ], +) +def test_api_hub_client_client_options_scopes( + client_class, transport_class, transport_name +): + # Check the case scopes are provided. + options = client_options.ClientOptions( + scopes=["1", "2"], + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=["1", "2"], + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,grpc_helpers", + [ + (ApiHubClient, transports.ApiHubRestTransport, "rest", None), + ], +) +def test_api_hub_client_client_options_credentials_file( + client_class, transport_class, transport_name, grpc_helpers +): + # Check the case credentials file is provided. + options = client_options.ClientOptions(credentials_file="credentials.json") + + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "request_type", + [ + apihub_service.CreateApiRequest, + dict, + ], +) +def test_create_api_rest(request_type): + client = ApiHubClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request_init["api"] = { + "name": "name_value", + "display_name": "display_name_value", + "description": "description_value", + "documentation": {"external_uri": "external_uri_value"}, + "owner": {"display_name": "display_name_value", "email": "email_value"}, + "versions": ["versions_value1", "versions_value2"], + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "target_user": { + "enum_values": { + "values": [ + { + "id": "id_value", + "display_name": "display_name_value", + "description": "description_value", + "immutable": True, + } + ] + }, + "string_values": {"values": ["values_value1", "values_value2"]}, + "json_values": {}, + "attribute": "attribute_value", + }, + "team": {}, + "business_unit": {}, + "maturity_level": {}, + "attributes": {}, + "api_style": {}, + "selected_version": "selected_version_value", + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = apihub_service.CreateApiRequest.meta.fields["api"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["api"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["api"][field])): + del request_init["api"][field][i][subfield] + else: + del request_init["api"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = common_fields.Api( + name="name_value", + display_name="display_name_value", + description="description_value", + versions=["versions_value"], + selected_version="selected_version_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = common_fields.Api.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.create_api(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, common_fields.Api) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.description == "description_value" + assert response.versions == ["versions_value"] + assert response.selected_version == "selected_version_value" + + +def test_create_api_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ApiHubClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.create_api in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.create_api] = mock_rpc + + request = {} + client.create_api(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.create_api(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_create_api_rest_required_fields(request_type=apihub_service.CreateApiRequest): + transport_class = transports.ApiHubRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_api._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_api._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("api_id",)) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = ApiHubClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = common_fields.Api() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = common_fields.Api.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.create_api(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_create_api_rest_unset_required_fields(): + transport = transports.ApiHubRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.create_api._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(("apiId",)) + & set( + ( + "parent", + "api", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_create_api_rest_interceptors(null_interceptor): + transport = transports.ApiHubRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.ApiHubRestInterceptor(), + ) + client = ApiHubClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.ApiHubRestInterceptor, "post_create_api" + ) as post, mock.patch.object( + transports.ApiHubRestInterceptor, "pre_create_api" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = apihub_service.CreateApiRequest.pb( + apihub_service.CreateApiRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = common_fields.Api.to_json(common_fields.Api()) + + request = apihub_service.CreateApiRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = common_fields.Api() + + client.create_api( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_create_api_rest_bad_request( + transport: str = "rest", request_type=apihub_service.CreateApiRequest +): + client = ApiHubClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.create_api(request) + + +def test_create_api_rest_flattened(): + client = ApiHubClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = common_fields.Api() + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1/locations/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + api=common_fields.Api(name="name_value"), + api_id="api_id_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = common_fields.Api.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.create_api(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*}/apis" % client.transport._host, + args[1], + ) + + +def test_create_api_rest_flattened_error(transport: str = "rest"): + client = ApiHubClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_api( + apihub_service.CreateApiRequest(), + parent="parent_value", + api=common_fields.Api(name="name_value"), + api_id="api_id_value", + ) + + +def test_create_api_rest_error(): + client = ApiHubClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + apihub_service.GetApiRequest, + dict, + ], +) +def test_get_api_rest(request_type): + client = ApiHubClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/apis/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = common_fields.Api( + name="name_value", + display_name="display_name_value", + description="description_value", + versions=["versions_value"], + selected_version="selected_version_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = common_fields.Api.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.get_api(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, common_fields.Api) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.description == "description_value" + assert response.versions == ["versions_value"] + assert response.selected_version == "selected_version_value" + + +def test_get_api_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ApiHubClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_api in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.get_api] = mock_rpc + + request = {} + client.get_api(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_api(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_get_api_rest_required_fields(request_type=apihub_service.GetApiRequest): + transport_class = transports.ApiHubRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_api._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_api._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = ApiHubClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = common_fields.Api() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = common_fields.Api.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_api(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_get_api_rest_unset_required_fields(): + transport = transports.ApiHubRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.get_api._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_api_rest_interceptors(null_interceptor): + transport = transports.ApiHubRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.ApiHubRestInterceptor(), + ) + client = ApiHubClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.ApiHubRestInterceptor, "post_get_api" + ) as post, mock.patch.object( + transports.ApiHubRestInterceptor, "pre_get_api" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = apihub_service.GetApiRequest.pb(apihub_service.GetApiRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = common_fields.Api.to_json(common_fields.Api()) + + request = apihub_service.GetApiRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = common_fields.Api() + + client.get_api( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_api_rest_bad_request( + transport: str = "rest", request_type=apihub_service.GetApiRequest +): + client = ApiHubClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/apis/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_api(request) + + +def test_get_api_rest_flattened(): + client = ApiHubClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = common_fields.Api() + + # get arguments that satisfy an http rule for this method + sample_request = {"name": "projects/sample1/locations/sample2/apis/sample3"} + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = common_fields.Api.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.get_api(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/apis/*}" % client.transport._host, + args[1], + ) + + +def test_get_api_rest_flattened_error(transport: str = "rest"): + client = ApiHubClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_api( + apihub_service.GetApiRequest(), + name="name_value", + ) + + +def test_get_api_rest_error(): + client = ApiHubClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + apihub_service.ListApisRequest, + dict, + ], +) +def test_list_apis_rest(request_type): + client = ApiHubClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = apihub_service.ListApisResponse( + next_page_token="next_page_token_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = apihub_service.ListApisResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.list_apis(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListApisPager) + assert response.next_page_token == "next_page_token_value" + + +def test_list_apis_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ApiHubClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_apis in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.list_apis] = mock_rpc + + request = {} + client.list_apis(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_apis(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_list_apis_rest_required_fields(request_type=apihub_service.ListApisRequest): + transport_class = transports.ApiHubRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_apis._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_apis._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "filter", + "page_size", + "page_token", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = ApiHubClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = apihub_service.ListApisResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = apihub_service.ListApisResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.list_apis(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_list_apis_rest_unset_required_fields(): + transport = transports.ApiHubRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.list_apis._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "filter", + "pageSize", + "pageToken", + ) + ) + & set(("parent",)) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_apis_rest_interceptors(null_interceptor): + transport = transports.ApiHubRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.ApiHubRestInterceptor(), + ) + client = ApiHubClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.ApiHubRestInterceptor, "post_list_apis" + ) as post, mock.patch.object( + transports.ApiHubRestInterceptor, "pre_list_apis" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = apihub_service.ListApisRequest.pb(apihub_service.ListApisRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = apihub_service.ListApisResponse.to_json( + apihub_service.ListApisResponse() + ) + + request = apihub_service.ListApisRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = apihub_service.ListApisResponse() + + client.list_apis( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_apis_rest_bad_request( + transport: str = "rest", request_type=apihub_service.ListApisRequest +): + client = ApiHubClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_apis(request) + + +def test_list_apis_rest_flattened(): + client = ApiHubClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = apihub_service.ListApisResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1/locations/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = apihub_service.ListApisResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.list_apis(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*}/apis" % client.transport._host, + args[1], + ) + + +def test_list_apis_rest_flattened_error(transport: str = "rest"): + client = ApiHubClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_apis( + apihub_service.ListApisRequest(), + parent="parent_value", + ) + + +def test_list_apis_rest_pager(transport: str = "rest"): + client = ApiHubClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + apihub_service.ListApisResponse( + apis=[ + common_fields.Api(), + common_fields.Api(), + common_fields.Api(), + ], + next_page_token="abc", + ), + apihub_service.ListApisResponse( + apis=[], + next_page_token="def", + ), + apihub_service.ListApisResponse( + apis=[ + common_fields.Api(), + ], + next_page_token="ghi", + ), + apihub_service.ListApisResponse( + apis=[ + common_fields.Api(), + common_fields.Api(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(apihub_service.ListApisResponse.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {"parent": "projects/sample1/locations/sample2"} + + pager = client.list_apis(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, common_fields.Api) for i in results) + + pages = list(client.list_apis(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + apihub_service.UpdateApiRequest, + dict, + ], +) +def test_update_api_rest(request_type): + client = ApiHubClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"api": {"name": "projects/sample1/locations/sample2/apis/sample3"}} + request_init["api"] = { + "name": "projects/sample1/locations/sample2/apis/sample3", + "display_name": "display_name_value", + "description": "description_value", + "documentation": {"external_uri": "external_uri_value"}, + "owner": {"display_name": "display_name_value", "email": "email_value"}, + "versions": ["versions_value1", "versions_value2"], + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "target_user": { + "enum_values": { + "values": [ + { + "id": "id_value", + "display_name": "display_name_value", + "description": "description_value", + "immutable": True, + } + ] + }, + "string_values": {"values": ["values_value1", "values_value2"]}, + "json_values": {}, + "attribute": "attribute_value", + }, + "team": {}, + "business_unit": {}, + "maturity_level": {}, + "attributes": {}, + "api_style": {}, + "selected_version": "selected_version_value", + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = apihub_service.UpdateApiRequest.meta.fields["api"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["api"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["api"][field])): + del request_init["api"][field][i][subfield] + else: + del request_init["api"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = common_fields.Api( + name="name_value", + display_name="display_name_value", + description="description_value", + versions=["versions_value"], + selected_version="selected_version_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = common_fields.Api.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.update_api(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, common_fields.Api) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.description == "description_value" + assert response.versions == ["versions_value"] + assert response.selected_version == "selected_version_value" + + +def test_update_api_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ApiHubClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.update_api in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.update_api] = mock_rpc + + request = {} + client.update_api(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.update_api(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_update_api_rest_required_fields(request_type=apihub_service.UpdateApiRequest): + transport_class = transports.ApiHubRestTransport + + request_init = {} + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_api._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_api._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("update_mask",)) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + + client = ApiHubClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = common_fields.Api() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "patch", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = common_fields.Api.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.update_api(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_update_api_rest_unset_required_fields(): + transport = transports.ApiHubRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.update_api._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(("updateMask",)) + & set( + ( + "api", + "updateMask", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_update_api_rest_interceptors(null_interceptor): + transport = transports.ApiHubRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.ApiHubRestInterceptor(), + ) + client = ApiHubClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.ApiHubRestInterceptor, "post_update_api" + ) as post, mock.patch.object( + transports.ApiHubRestInterceptor, "pre_update_api" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = apihub_service.UpdateApiRequest.pb( + apihub_service.UpdateApiRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = common_fields.Api.to_json(common_fields.Api()) + + request = apihub_service.UpdateApiRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = common_fields.Api() + + client.update_api( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_update_api_rest_bad_request( + transport: str = "rest", request_type=apihub_service.UpdateApiRequest +): + client = ApiHubClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"api": {"name": "projects/sample1/locations/sample2/apis/sample3"}} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.update_api(request) + + +def test_update_api_rest_flattened(): + client = ApiHubClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = common_fields.Api() + + # get arguments that satisfy an http rule for this method + sample_request = { + "api": {"name": "projects/sample1/locations/sample2/apis/sample3"} + } + + # get truthy value for each flattened field + mock_args = dict( + api=common_fields.Api(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = common_fields.Api.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.update_api(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{api.name=projects/*/locations/*/apis/*}" % client.transport._host, + args[1], + ) + + +def test_update_api_rest_flattened_error(transport: str = "rest"): + client = ApiHubClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_api( + apihub_service.UpdateApiRequest(), + api=common_fields.Api(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +def test_update_api_rest_error(): + client = ApiHubClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + apihub_service.DeleteApiRequest, + dict, + ], +) +def test_delete_api_rest(request_type): + client = ApiHubClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/apis/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "" + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.delete_api(request) + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_api_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ApiHubClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.delete_api in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.delete_api] = mock_rpc + + request = {} + client.delete_api(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.delete_api(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_delete_api_rest_required_fields(request_type=apihub_service.DeleteApiRequest): + transport_class = transports.ApiHubRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_api._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_api._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("force",)) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = ApiHubClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = None + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "delete", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = "" + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.delete_api(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_delete_api_rest_unset_required_fields(): + transport = transports.ApiHubRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.delete_api._get_unset_required_fields({}) + assert set(unset_fields) == (set(("force",)) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_api_rest_interceptors(null_interceptor): + transport = transports.ApiHubRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.ApiHubRestInterceptor(), + ) + client = ApiHubClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.ApiHubRestInterceptor, "pre_delete_api" + ) as pre: + pre.assert_not_called() + pb_message = apihub_service.DeleteApiRequest.pb( + apihub_service.DeleteApiRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + + request = apihub_service.DeleteApiRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + + client.delete_api( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + + +def test_delete_api_rest_bad_request( + transport: str = "rest", request_type=apihub_service.DeleteApiRequest +): + client = ApiHubClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/apis/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete_api(request) + + +def test_delete_api_rest_flattened(): + client = ApiHubClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # get arguments that satisfy an http rule for this method + sample_request = {"name": "projects/sample1/locations/sample2/apis/sample3"} + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "" + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.delete_api(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/apis/*}" % client.transport._host, + args[1], + ) + + +def test_delete_api_rest_flattened_error(transport: str = "rest"): + client = ApiHubClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_api( + apihub_service.DeleteApiRequest(), + name="name_value", + ) + + +def test_delete_api_rest_error(): + client = ApiHubClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + apihub_service.CreateVersionRequest, + dict, + ], +) +def test_create_version_rest(request_type): + client = ApiHubClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2/apis/sample3"} + request_init["version"] = { + "name": "name_value", + "display_name": "display_name_value", + "description": "description_value", + "documentation": {"external_uri": "external_uri_value"}, + "specs": ["specs_value1", "specs_value2"], + "api_operations": ["api_operations_value1", "api_operations_value2"], + "definitions": ["definitions_value1", "definitions_value2"], + "deployments": ["deployments_value1", "deployments_value2"], + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "lifecycle": { + "enum_values": { + "values": [ + { + "id": "id_value", + "display_name": "display_name_value", + "description": "description_value", + "immutable": True, + } + ] + }, + "string_values": {"values": ["values_value1", "values_value2"]}, + "json_values": {}, + "attribute": "attribute_value", + }, + "compliance": {}, + "accreditation": {}, + "attributes": {}, + "selected_deployment": "selected_deployment_value", + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = apihub_service.CreateVersionRequest.meta.fields["version"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["version"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["version"][field])): + del request_init["version"][field][i][subfield] + else: + del request_init["version"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = common_fields.Version( + name="name_value", + display_name="display_name_value", + description="description_value", + specs=["specs_value"], + api_operations=["api_operations_value"], + definitions=["definitions_value"], + deployments=["deployments_value"], + selected_deployment="selected_deployment_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = common_fields.Version.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.create_version(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, common_fields.Version) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.description == "description_value" + assert response.specs == ["specs_value"] + assert response.api_operations == ["api_operations_value"] + assert response.definitions == ["definitions_value"] + assert response.deployments == ["deployments_value"] + assert response.selected_deployment == "selected_deployment_value" + + +def test_create_version_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ApiHubClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.create_version in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.create_version] = mock_rpc + + request = {} + client.create_version(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.create_version(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_create_version_rest_required_fields( + request_type=apihub_service.CreateVersionRequest, +): + transport_class = transports.ApiHubRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_version._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_version._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("version_id",)) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = ApiHubClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = common_fields.Version() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = common_fields.Version.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.create_version(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_create_version_rest_unset_required_fields(): + transport = transports.ApiHubRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.create_version._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(("versionId",)) + & set( + ( + "parent", + "version", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_create_version_rest_interceptors(null_interceptor): + transport = transports.ApiHubRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.ApiHubRestInterceptor(), + ) + client = ApiHubClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.ApiHubRestInterceptor, "post_create_version" + ) as post, mock.patch.object( + transports.ApiHubRestInterceptor, "pre_create_version" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = apihub_service.CreateVersionRequest.pb( + apihub_service.CreateVersionRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = common_fields.Version.to_json( + common_fields.Version() + ) + + request = apihub_service.CreateVersionRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = common_fields.Version() + + client.create_version( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_create_version_rest_bad_request( + transport: str = "rest", request_type=apihub_service.CreateVersionRequest +): + client = ApiHubClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2/apis/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.create_version(request) + + +def test_create_version_rest_flattened(): + client = ApiHubClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = common_fields.Version() + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1/locations/sample2/apis/sample3"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + version=common_fields.Version(name="name_value"), + version_id="version_id_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = common_fields.Version.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.create_version(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*/apis/*}/versions" + % client.transport._host, + args[1], + ) + + +def test_create_version_rest_flattened_error(transport: str = "rest"): + client = ApiHubClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_version( + apihub_service.CreateVersionRequest(), + parent="parent_value", + version=common_fields.Version(name="name_value"), + version_id="version_id_value", + ) + + +def test_create_version_rest_error(): + client = ApiHubClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + apihub_service.GetVersionRequest, + dict, + ], +) +def test_get_version_rest(request_type): + client = ApiHubClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/apis/sample3/versions/sample4" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = common_fields.Version( + name="name_value", + display_name="display_name_value", + description="description_value", + specs=["specs_value"], + api_operations=["api_operations_value"], + definitions=["definitions_value"], + deployments=["deployments_value"], + selected_deployment="selected_deployment_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = common_fields.Version.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.get_version(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, common_fields.Version) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.description == "description_value" + assert response.specs == ["specs_value"] + assert response.api_operations == ["api_operations_value"] + assert response.definitions == ["definitions_value"] + assert response.deployments == ["deployments_value"] + assert response.selected_deployment == "selected_deployment_value" + + +def test_get_version_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ApiHubClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_version in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.get_version] = mock_rpc + + request = {} + client.get_version(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_version(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_get_version_rest_required_fields( + request_type=apihub_service.GetVersionRequest, +): + transport_class = transports.ApiHubRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_version._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_version._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = ApiHubClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = common_fields.Version() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = common_fields.Version.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_version(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_get_version_rest_unset_required_fields(): + transport = transports.ApiHubRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.get_version._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_version_rest_interceptors(null_interceptor): + transport = transports.ApiHubRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.ApiHubRestInterceptor(), + ) + client = ApiHubClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.ApiHubRestInterceptor, "post_get_version" + ) as post, mock.patch.object( + transports.ApiHubRestInterceptor, "pre_get_version" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = apihub_service.GetVersionRequest.pb( + apihub_service.GetVersionRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = common_fields.Version.to_json( + common_fields.Version() + ) + + request = apihub_service.GetVersionRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = common_fields.Version() + + client.get_version( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_version_rest_bad_request( + transport: str = "rest", request_type=apihub_service.GetVersionRequest +): + client = ApiHubClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/apis/sample3/versions/sample4" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_version(request) + + +def test_get_version_rest_flattened(): + client = ApiHubClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = common_fields.Version() + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/apis/sample3/versions/sample4" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = common_fields.Version.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.get_version(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/apis/*/versions/*}" + % client.transport._host, + args[1], + ) + + +def test_get_version_rest_flattened_error(transport: str = "rest"): + client = ApiHubClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_version( + apihub_service.GetVersionRequest(), + name="name_value", + ) + + +def test_get_version_rest_error(): + client = ApiHubClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + apihub_service.ListVersionsRequest, + dict, + ], +) +def test_list_versions_rest(request_type): + client = ApiHubClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2/apis/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = apihub_service.ListVersionsResponse( + next_page_token="next_page_token_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = apihub_service.ListVersionsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.list_versions(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListVersionsPager) + assert response.next_page_token == "next_page_token_value" + + +def test_list_versions_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ApiHubClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_versions in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.list_versions] = mock_rpc + + request = {} + client.list_versions(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_versions(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_list_versions_rest_required_fields( + request_type=apihub_service.ListVersionsRequest, +): + transport_class = transports.ApiHubRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_versions._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_versions._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "filter", + "page_size", + "page_token", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = ApiHubClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = apihub_service.ListVersionsResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = apihub_service.ListVersionsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.list_versions(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_list_versions_rest_unset_required_fields(): + transport = transports.ApiHubRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.list_versions._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "filter", + "pageSize", + "pageToken", + ) + ) + & set(("parent",)) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_versions_rest_interceptors(null_interceptor): + transport = transports.ApiHubRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.ApiHubRestInterceptor(), + ) + client = ApiHubClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.ApiHubRestInterceptor, "post_list_versions" + ) as post, mock.patch.object( + transports.ApiHubRestInterceptor, "pre_list_versions" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = apihub_service.ListVersionsRequest.pb( + apihub_service.ListVersionsRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = apihub_service.ListVersionsResponse.to_json( + apihub_service.ListVersionsResponse() + ) + + request = apihub_service.ListVersionsRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = apihub_service.ListVersionsResponse() + + client.list_versions( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_versions_rest_bad_request( + transport: str = "rest", request_type=apihub_service.ListVersionsRequest +): + client = ApiHubClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2/apis/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_versions(request) + + +def test_list_versions_rest_flattened(): + client = ApiHubClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = apihub_service.ListVersionsResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1/locations/sample2/apis/sample3"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = apihub_service.ListVersionsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.list_versions(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*/apis/*}/versions" + % client.transport._host, + args[1], + ) + + +def test_list_versions_rest_flattened_error(transport: str = "rest"): + client = ApiHubClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_versions( + apihub_service.ListVersionsRequest(), + parent="parent_value", + ) + + +def test_list_versions_rest_pager(transport: str = "rest"): + client = ApiHubClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + apihub_service.ListVersionsResponse( + versions=[ + common_fields.Version(), + common_fields.Version(), + common_fields.Version(), + ], + next_page_token="abc", + ), + apihub_service.ListVersionsResponse( + versions=[], + next_page_token="def", + ), + apihub_service.ListVersionsResponse( + versions=[ + common_fields.Version(), + ], + next_page_token="ghi", + ), + apihub_service.ListVersionsResponse( + versions=[ + common_fields.Version(), + common_fields.Version(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple( + apihub_service.ListVersionsResponse.to_json(x) for x in response + ) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {"parent": "projects/sample1/locations/sample2/apis/sample3"} + + pager = client.list_versions(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, common_fields.Version) for i in results) + + pages = list(client.list_versions(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + apihub_service.UpdateVersionRequest, + dict, + ], +) +def test_update_version_rest(request_type): + client = ApiHubClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "version": { + "name": "projects/sample1/locations/sample2/apis/sample3/versions/sample4" + } + } + request_init["version"] = { + "name": "projects/sample1/locations/sample2/apis/sample3/versions/sample4", + "display_name": "display_name_value", + "description": "description_value", + "documentation": {"external_uri": "external_uri_value"}, + "specs": ["specs_value1", "specs_value2"], + "api_operations": ["api_operations_value1", "api_operations_value2"], + "definitions": ["definitions_value1", "definitions_value2"], + "deployments": ["deployments_value1", "deployments_value2"], + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "lifecycle": { + "enum_values": { + "values": [ + { + "id": "id_value", + "display_name": "display_name_value", + "description": "description_value", + "immutable": True, + } + ] + }, + "string_values": {"values": ["values_value1", "values_value2"]}, + "json_values": {}, + "attribute": "attribute_value", + }, + "compliance": {}, + "accreditation": {}, + "attributes": {}, + "selected_deployment": "selected_deployment_value", + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = apihub_service.UpdateVersionRequest.meta.fields["version"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["version"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["version"][field])): + del request_init["version"][field][i][subfield] + else: + del request_init["version"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = common_fields.Version( + name="name_value", + display_name="display_name_value", + description="description_value", + specs=["specs_value"], + api_operations=["api_operations_value"], + definitions=["definitions_value"], + deployments=["deployments_value"], + selected_deployment="selected_deployment_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = common_fields.Version.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.update_version(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, common_fields.Version) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.description == "description_value" + assert response.specs == ["specs_value"] + assert response.api_operations == ["api_operations_value"] + assert response.definitions == ["definitions_value"] + assert response.deployments == ["deployments_value"] + assert response.selected_deployment == "selected_deployment_value" + + +def test_update_version_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ApiHubClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.update_version in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.update_version] = mock_rpc + + request = {} + client.update_version(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.update_version(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_update_version_rest_required_fields( + request_type=apihub_service.UpdateVersionRequest, +): + transport_class = transports.ApiHubRestTransport + + request_init = {} + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_version._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_version._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("update_mask",)) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + + client = ApiHubClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = common_fields.Version() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "patch", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = common_fields.Version.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.update_version(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_update_version_rest_unset_required_fields(): + transport = transports.ApiHubRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.update_version._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(("updateMask",)) + & set( + ( + "version", + "updateMask", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_update_version_rest_interceptors(null_interceptor): + transport = transports.ApiHubRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.ApiHubRestInterceptor(), + ) + client = ApiHubClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.ApiHubRestInterceptor, "post_update_version" + ) as post, mock.patch.object( + transports.ApiHubRestInterceptor, "pre_update_version" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = apihub_service.UpdateVersionRequest.pb( + apihub_service.UpdateVersionRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = common_fields.Version.to_json( + common_fields.Version() + ) + + request = apihub_service.UpdateVersionRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = common_fields.Version() + + client.update_version( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_update_version_rest_bad_request( + transport: str = "rest", request_type=apihub_service.UpdateVersionRequest +): + client = ApiHubClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "version": { + "name": "projects/sample1/locations/sample2/apis/sample3/versions/sample4" + } + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.update_version(request) + + +def test_update_version_rest_flattened(): + client = ApiHubClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = common_fields.Version() + + # get arguments that satisfy an http rule for this method + sample_request = { + "version": { + "name": "projects/sample1/locations/sample2/apis/sample3/versions/sample4" + } + } + + # get truthy value for each flattened field + mock_args = dict( + version=common_fields.Version(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = common_fields.Version.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.update_version(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{version.name=projects/*/locations/*/apis/*/versions/*}" + % client.transport._host, + args[1], + ) + + +def test_update_version_rest_flattened_error(transport: str = "rest"): + client = ApiHubClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_version( + apihub_service.UpdateVersionRequest(), + version=common_fields.Version(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +def test_update_version_rest_error(): + client = ApiHubClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + apihub_service.DeleteVersionRequest, + dict, + ], +) +def test_delete_version_rest(request_type): + client = ApiHubClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/apis/sample3/versions/sample4" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "" + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.delete_version(request) + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_version_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ApiHubClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.delete_version in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.delete_version] = mock_rpc + + request = {} + client.delete_version(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.delete_version(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_delete_version_rest_required_fields( + request_type=apihub_service.DeleteVersionRequest, +): + transport_class = transports.ApiHubRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_version._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_version._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("force",)) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = ApiHubClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = None + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "delete", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = "" + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.delete_version(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_delete_version_rest_unset_required_fields(): + transport = transports.ApiHubRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.delete_version._get_unset_required_fields({}) + assert set(unset_fields) == (set(("force",)) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_version_rest_interceptors(null_interceptor): + transport = transports.ApiHubRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.ApiHubRestInterceptor(), + ) + client = ApiHubClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.ApiHubRestInterceptor, "pre_delete_version" + ) as pre: + pre.assert_not_called() + pb_message = apihub_service.DeleteVersionRequest.pb( + apihub_service.DeleteVersionRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + + request = apihub_service.DeleteVersionRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + + client.delete_version( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + + +def test_delete_version_rest_bad_request( + transport: str = "rest", request_type=apihub_service.DeleteVersionRequest +): + client = ApiHubClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/apis/sample3/versions/sample4" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete_version(request) + + +def test_delete_version_rest_flattened(): + client = ApiHubClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/apis/sample3/versions/sample4" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "" + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.delete_version(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/apis/*/versions/*}" + % client.transport._host, + args[1], + ) + + +def test_delete_version_rest_flattened_error(transport: str = "rest"): + client = ApiHubClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_version( + apihub_service.DeleteVersionRequest(), + name="name_value", + ) + + +def test_delete_version_rest_error(): + client = ApiHubClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + apihub_service.CreateSpecRequest, + dict, + ], +) +def test_create_spec_rest(request_type): + client = ApiHubClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "parent": "projects/sample1/locations/sample2/apis/sample3/versions/sample4" + } + request_init["spec"] = { + "name": "name_value", + "display_name": "display_name_value", + "spec_type": { + "enum_values": { + "values": [ + { + "id": "id_value", + "display_name": "display_name_value", + "description": "description_value", + "immutable": True, + } + ] + }, + "string_values": {"values": ["values_value1", "values_value2"]}, + "json_values": {}, + "attribute": "attribute_value", + }, + "contents": {"contents": b"contents_blob", "mime_type": "mime_type_value"}, + "details": { + "open_api_spec_details": { + "format_": 1, + "version": "version_value", + "owner": {"display_name": "display_name_value", "email": "email_value"}, + }, + "description": "description_value", + }, + "source_uri": "source_uri_value", + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "lint_response": { + "issues": [ + { + "code": "code_value", + "path": ["path_value1", "path_value2"], + "message": "message_value", + "severity": 1, + "range_": {"start": {"line": 424, "character": 941}, "end": {}}, + } + ], + "summary": [{"severity": 1, "count": 553}], + "state": 1, + "source": "source_value", + "linter": 1, + "create_time": {}, + }, + "attributes": {}, + "documentation": {"external_uri": "external_uri_value"}, + "parsing_mode": 1, + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = apihub_service.CreateSpecRequest.meta.fields["spec"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["spec"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["spec"][field])): + del request_init["spec"][field][i][subfield] + else: + del request_init["spec"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = common_fields.Spec( + name="name_value", + display_name="display_name_value", + source_uri="source_uri_value", + parsing_mode=common_fields.Spec.ParsingMode.RELAXED, + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = common_fields.Spec.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.create_spec(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, common_fields.Spec) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.source_uri == "source_uri_value" + assert response.parsing_mode == common_fields.Spec.ParsingMode.RELAXED + + +def test_create_spec_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ApiHubClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.create_spec in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.create_spec] = mock_rpc + + request = {} + client.create_spec(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.create_spec(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_create_spec_rest_required_fields( + request_type=apihub_service.CreateSpecRequest, +): + transport_class = transports.ApiHubRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_spec._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_spec._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("spec_id",)) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = ApiHubClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = common_fields.Spec() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = common_fields.Spec.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.create_spec(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_create_spec_rest_unset_required_fields(): + transport = transports.ApiHubRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.create_spec._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(("specId",)) + & set( + ( + "parent", + "spec", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_create_spec_rest_interceptors(null_interceptor): + transport = transports.ApiHubRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.ApiHubRestInterceptor(), + ) + client = ApiHubClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.ApiHubRestInterceptor, "post_create_spec" + ) as post, mock.patch.object( + transports.ApiHubRestInterceptor, "pre_create_spec" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = apihub_service.CreateSpecRequest.pb( + apihub_service.CreateSpecRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = common_fields.Spec.to_json(common_fields.Spec()) + + request = apihub_service.CreateSpecRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = common_fields.Spec() + + client.create_spec( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_create_spec_rest_bad_request( + transport: str = "rest", request_type=apihub_service.CreateSpecRequest +): + client = ApiHubClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "parent": "projects/sample1/locations/sample2/apis/sample3/versions/sample4" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.create_spec(request) + + +def test_create_spec_rest_flattened(): + client = ApiHubClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = common_fields.Spec() + + # get arguments that satisfy an http rule for this method + sample_request = { + "parent": "projects/sample1/locations/sample2/apis/sample3/versions/sample4" + } + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + spec=common_fields.Spec(name="name_value"), + spec_id="spec_id_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = common_fields.Spec.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.create_spec(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*/apis/*/versions/*}/specs" + % client.transport._host, + args[1], + ) + + +def test_create_spec_rest_flattened_error(transport: str = "rest"): + client = ApiHubClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_spec( + apihub_service.CreateSpecRequest(), + parent="parent_value", + spec=common_fields.Spec(name="name_value"), + spec_id="spec_id_value", + ) + + +def test_create_spec_rest_error(): + client = ApiHubClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + apihub_service.GetSpecRequest, + dict, + ], +) +def test_get_spec_rest(request_type): + client = ApiHubClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/apis/sample3/versions/sample4/specs/sample5" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = common_fields.Spec( + name="name_value", + display_name="display_name_value", + source_uri="source_uri_value", + parsing_mode=common_fields.Spec.ParsingMode.RELAXED, + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = common_fields.Spec.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.get_spec(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, common_fields.Spec) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.source_uri == "source_uri_value" + assert response.parsing_mode == common_fields.Spec.ParsingMode.RELAXED + + +def test_get_spec_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ApiHubClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_spec in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.get_spec] = mock_rpc + + request = {} + client.get_spec(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_spec(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_get_spec_rest_required_fields(request_type=apihub_service.GetSpecRequest): + transport_class = transports.ApiHubRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_spec._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_spec._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = ApiHubClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = common_fields.Spec() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = common_fields.Spec.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_spec(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_get_spec_rest_unset_required_fields(): + transport = transports.ApiHubRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.get_spec._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_spec_rest_interceptors(null_interceptor): + transport = transports.ApiHubRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.ApiHubRestInterceptor(), + ) + client = ApiHubClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.ApiHubRestInterceptor, "post_get_spec" + ) as post, mock.patch.object( + transports.ApiHubRestInterceptor, "pre_get_spec" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = apihub_service.GetSpecRequest.pb(apihub_service.GetSpecRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = common_fields.Spec.to_json(common_fields.Spec()) + + request = apihub_service.GetSpecRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = common_fields.Spec() + + client.get_spec( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_spec_rest_bad_request( + transport: str = "rest", request_type=apihub_service.GetSpecRequest +): + client = ApiHubClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/apis/sample3/versions/sample4/specs/sample5" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_spec(request) + + +def test_get_spec_rest_flattened(): + client = ApiHubClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = common_fields.Spec() + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/apis/sample3/versions/sample4/specs/sample5" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = common_fields.Spec.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.get_spec(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/apis/*/versions/*/specs/*}" + % client.transport._host, + args[1], + ) + + +def test_get_spec_rest_flattened_error(transport: str = "rest"): + client = ApiHubClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_spec( + apihub_service.GetSpecRequest(), + name="name_value", + ) + + +def test_get_spec_rest_error(): + client = ApiHubClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + apihub_service.GetSpecContentsRequest, + dict, + ], +) +def test_get_spec_contents_rest(request_type): + client = ApiHubClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/apis/sample3/versions/sample4/specs/sample5" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = common_fields.SpecContents( + contents=b"contents_blob", + mime_type="mime_type_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = common_fields.SpecContents.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.get_spec_contents(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, common_fields.SpecContents) + assert response.contents == b"contents_blob" + assert response.mime_type == "mime_type_value" + + +def test_get_spec_contents_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ApiHubClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_spec_contents in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.get_spec_contents + ] = mock_rpc + + request = {} + client.get_spec_contents(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_spec_contents(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_get_spec_contents_rest_required_fields( + request_type=apihub_service.GetSpecContentsRequest, +): + transport_class = transports.ApiHubRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_spec_contents._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_spec_contents._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = ApiHubClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = common_fields.SpecContents() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = common_fields.SpecContents.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_spec_contents(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_get_spec_contents_rest_unset_required_fields(): + transport = transports.ApiHubRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.get_spec_contents._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_spec_contents_rest_interceptors(null_interceptor): + transport = transports.ApiHubRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.ApiHubRestInterceptor(), + ) + client = ApiHubClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.ApiHubRestInterceptor, "post_get_spec_contents" + ) as post, mock.patch.object( + transports.ApiHubRestInterceptor, "pre_get_spec_contents" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = apihub_service.GetSpecContentsRequest.pb( + apihub_service.GetSpecContentsRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = common_fields.SpecContents.to_json( + common_fields.SpecContents() + ) + + request = apihub_service.GetSpecContentsRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = common_fields.SpecContents() + + client.get_spec_contents( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_spec_contents_rest_bad_request( + transport: str = "rest", request_type=apihub_service.GetSpecContentsRequest +): + client = ApiHubClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/apis/sample3/versions/sample4/specs/sample5" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_spec_contents(request) + + +def test_get_spec_contents_rest_flattened(): + client = ApiHubClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = common_fields.SpecContents() + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/apis/sample3/versions/sample4/specs/sample5" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = common_fields.SpecContents.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.get_spec_contents(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/apis/*/versions/*/specs/*}:contents" + % client.transport._host, + args[1], + ) + + +def test_get_spec_contents_rest_flattened_error(transport: str = "rest"): + client = ApiHubClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_spec_contents( + apihub_service.GetSpecContentsRequest(), + name="name_value", + ) + + +def test_get_spec_contents_rest_error(): + client = ApiHubClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + apihub_service.ListSpecsRequest, + dict, + ], +) +def test_list_specs_rest(request_type): + client = ApiHubClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "parent": "projects/sample1/locations/sample2/apis/sample3/versions/sample4" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = apihub_service.ListSpecsResponse( + next_page_token="next_page_token_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = apihub_service.ListSpecsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.list_specs(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListSpecsPager) + assert response.next_page_token == "next_page_token_value" + + +def test_list_specs_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ApiHubClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_specs in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.list_specs] = mock_rpc + + request = {} + client.list_specs(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_specs(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_list_specs_rest_required_fields(request_type=apihub_service.ListSpecsRequest): + transport_class = transports.ApiHubRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_specs._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_specs._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "filter", + "page_size", + "page_token", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = ApiHubClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = apihub_service.ListSpecsResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = apihub_service.ListSpecsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.list_specs(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_list_specs_rest_unset_required_fields(): + transport = transports.ApiHubRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.list_specs._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "filter", + "pageSize", + "pageToken", + ) + ) + & set(("parent",)) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_specs_rest_interceptors(null_interceptor): + transport = transports.ApiHubRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.ApiHubRestInterceptor(), + ) + client = ApiHubClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.ApiHubRestInterceptor, "post_list_specs" + ) as post, mock.patch.object( + transports.ApiHubRestInterceptor, "pre_list_specs" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = apihub_service.ListSpecsRequest.pb( + apihub_service.ListSpecsRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = apihub_service.ListSpecsResponse.to_json( + apihub_service.ListSpecsResponse() + ) + + request = apihub_service.ListSpecsRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = apihub_service.ListSpecsResponse() + + client.list_specs( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_specs_rest_bad_request( + transport: str = "rest", request_type=apihub_service.ListSpecsRequest +): + client = ApiHubClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "parent": "projects/sample1/locations/sample2/apis/sample3/versions/sample4" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_specs(request) + + +def test_list_specs_rest_flattened(): + client = ApiHubClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = apihub_service.ListSpecsResponse() + + # get arguments that satisfy an http rule for this method + sample_request = { + "parent": "projects/sample1/locations/sample2/apis/sample3/versions/sample4" + } + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = apihub_service.ListSpecsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.list_specs(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*/apis/*/versions/*}/specs" + % client.transport._host, + args[1], + ) + + +def test_list_specs_rest_flattened_error(transport: str = "rest"): + client = ApiHubClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_specs( + apihub_service.ListSpecsRequest(), + parent="parent_value", + ) + + +def test_list_specs_rest_pager(transport: str = "rest"): + client = ApiHubClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + apihub_service.ListSpecsResponse( + specs=[ + common_fields.Spec(), + common_fields.Spec(), + common_fields.Spec(), + ], + next_page_token="abc", + ), + apihub_service.ListSpecsResponse( + specs=[], + next_page_token="def", + ), + apihub_service.ListSpecsResponse( + specs=[ + common_fields.Spec(), + ], + next_page_token="ghi", + ), + apihub_service.ListSpecsResponse( + specs=[ + common_fields.Spec(), + common_fields.Spec(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(apihub_service.ListSpecsResponse.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = { + "parent": "projects/sample1/locations/sample2/apis/sample3/versions/sample4" + } + + pager = client.list_specs(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, common_fields.Spec) for i in results) + + pages = list(client.list_specs(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + apihub_service.UpdateSpecRequest, + dict, + ], +) +def test_update_spec_rest(request_type): + client = ApiHubClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "spec": { + "name": "projects/sample1/locations/sample2/apis/sample3/versions/sample4/specs/sample5" + } + } + request_init["spec"] = { + "name": "projects/sample1/locations/sample2/apis/sample3/versions/sample4/specs/sample5", + "display_name": "display_name_value", + "spec_type": { + "enum_values": { + "values": [ + { + "id": "id_value", + "display_name": "display_name_value", + "description": "description_value", + "immutable": True, + } + ] + }, + "string_values": {"values": ["values_value1", "values_value2"]}, + "json_values": {}, + "attribute": "attribute_value", + }, + "contents": {"contents": b"contents_blob", "mime_type": "mime_type_value"}, + "details": { + "open_api_spec_details": { + "format_": 1, + "version": "version_value", + "owner": {"display_name": "display_name_value", "email": "email_value"}, + }, + "description": "description_value", + }, + "source_uri": "source_uri_value", + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "lint_response": { + "issues": [ + { + "code": "code_value", + "path": ["path_value1", "path_value2"], + "message": "message_value", + "severity": 1, + "range_": {"start": {"line": 424, "character": 941}, "end": {}}, + } + ], + "summary": [{"severity": 1, "count": 553}], + "state": 1, + "source": "source_value", + "linter": 1, + "create_time": {}, + }, + "attributes": {}, + "documentation": {"external_uri": "external_uri_value"}, + "parsing_mode": 1, + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = apihub_service.UpdateSpecRequest.meta.fields["spec"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["spec"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["spec"][field])): + del request_init["spec"][field][i][subfield] + else: + del request_init["spec"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = common_fields.Spec( + name="name_value", + display_name="display_name_value", + source_uri="source_uri_value", + parsing_mode=common_fields.Spec.ParsingMode.RELAXED, + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = common_fields.Spec.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.update_spec(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, common_fields.Spec) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.source_uri == "source_uri_value" + assert response.parsing_mode == common_fields.Spec.ParsingMode.RELAXED + + +def test_update_spec_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ApiHubClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.update_spec in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.update_spec] = mock_rpc + + request = {} + client.update_spec(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.update_spec(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_update_spec_rest_required_fields( + request_type=apihub_service.UpdateSpecRequest, +): + transport_class = transports.ApiHubRestTransport + + request_init = {} + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_spec._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_spec._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("update_mask",)) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + + client = ApiHubClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = common_fields.Spec() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "patch", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = common_fields.Spec.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.update_spec(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_update_spec_rest_unset_required_fields(): + transport = transports.ApiHubRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.update_spec._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(("updateMask",)) + & set( + ( + "spec", + "updateMask", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_update_spec_rest_interceptors(null_interceptor): + transport = transports.ApiHubRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.ApiHubRestInterceptor(), + ) + client = ApiHubClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.ApiHubRestInterceptor, "post_update_spec" + ) as post, mock.patch.object( + transports.ApiHubRestInterceptor, "pre_update_spec" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = apihub_service.UpdateSpecRequest.pb( + apihub_service.UpdateSpecRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = common_fields.Spec.to_json(common_fields.Spec()) + + request = apihub_service.UpdateSpecRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = common_fields.Spec() + + client.update_spec( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_update_spec_rest_bad_request( + transport: str = "rest", request_type=apihub_service.UpdateSpecRequest +): + client = ApiHubClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "spec": { + "name": "projects/sample1/locations/sample2/apis/sample3/versions/sample4/specs/sample5" + } + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.update_spec(request) + + +def test_update_spec_rest_flattened(): + client = ApiHubClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = common_fields.Spec() + + # get arguments that satisfy an http rule for this method + sample_request = { + "spec": { + "name": "projects/sample1/locations/sample2/apis/sample3/versions/sample4/specs/sample5" + } + } + + # get truthy value for each flattened field + mock_args = dict( + spec=common_fields.Spec(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = common_fields.Spec.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.update_spec(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{spec.name=projects/*/locations/*/apis/*/versions/*/specs/*}" + % client.transport._host, + args[1], + ) + + +def test_update_spec_rest_flattened_error(transport: str = "rest"): + client = ApiHubClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_spec( + apihub_service.UpdateSpecRequest(), + spec=common_fields.Spec(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +def test_update_spec_rest_error(): + client = ApiHubClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + apihub_service.DeleteSpecRequest, + dict, + ], +) +def test_delete_spec_rest(request_type): + client = ApiHubClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/apis/sample3/versions/sample4/specs/sample5" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "" + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.delete_spec(request) + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_spec_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ApiHubClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.delete_spec in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.delete_spec] = mock_rpc + + request = {} + client.delete_spec(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.delete_spec(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_delete_spec_rest_required_fields( + request_type=apihub_service.DeleteSpecRequest, +): + transport_class = transports.ApiHubRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_spec._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_spec._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = ApiHubClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = None + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "delete", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = "" + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.delete_spec(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_delete_spec_rest_unset_required_fields(): + transport = transports.ApiHubRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.delete_spec._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_spec_rest_interceptors(null_interceptor): + transport = transports.ApiHubRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.ApiHubRestInterceptor(), + ) + client = ApiHubClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.ApiHubRestInterceptor, "pre_delete_spec" + ) as pre: + pre.assert_not_called() + pb_message = apihub_service.DeleteSpecRequest.pb( + apihub_service.DeleteSpecRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + + request = apihub_service.DeleteSpecRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + + client.delete_spec( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + + +def test_delete_spec_rest_bad_request( + transport: str = "rest", request_type=apihub_service.DeleteSpecRequest +): + client = ApiHubClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/apis/sample3/versions/sample4/specs/sample5" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete_spec(request) + + +def test_delete_spec_rest_flattened(): + client = ApiHubClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/apis/sample3/versions/sample4/specs/sample5" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "" + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.delete_spec(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/apis/*/versions/*/specs/*}" + % client.transport._host, + args[1], + ) + + +def test_delete_spec_rest_flattened_error(transport: str = "rest"): + client = ApiHubClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_spec( + apihub_service.DeleteSpecRequest(), + name="name_value", + ) + + +def test_delete_spec_rest_error(): + client = ApiHubClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + apihub_service.GetApiOperationRequest, + dict, + ], +) +def test_get_api_operation_rest(request_type): + client = ApiHubClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/apis/sample3/versions/sample4/operations/sample5" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = common_fields.ApiOperation( + name="name_value", + spec="spec_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = common_fields.ApiOperation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.get_api_operation(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, common_fields.ApiOperation) + assert response.name == "name_value" + assert response.spec == "spec_value" + + +def test_get_api_operation_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ApiHubClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_api_operation in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.get_api_operation + ] = mock_rpc + + request = {} + client.get_api_operation(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_api_operation(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_get_api_operation_rest_required_fields( + request_type=apihub_service.GetApiOperationRequest, +): + transport_class = transports.ApiHubRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_api_operation._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_api_operation._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = ApiHubClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = common_fields.ApiOperation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = common_fields.ApiOperation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_api_operation(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_get_api_operation_rest_unset_required_fields(): + transport = transports.ApiHubRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.get_api_operation._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_api_operation_rest_interceptors(null_interceptor): + transport = transports.ApiHubRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.ApiHubRestInterceptor(), + ) + client = ApiHubClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.ApiHubRestInterceptor, "post_get_api_operation" + ) as post, mock.patch.object( + transports.ApiHubRestInterceptor, "pre_get_api_operation" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = apihub_service.GetApiOperationRequest.pb( + apihub_service.GetApiOperationRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = common_fields.ApiOperation.to_json( + common_fields.ApiOperation() + ) + + request = apihub_service.GetApiOperationRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = common_fields.ApiOperation() + + client.get_api_operation( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_api_operation_rest_bad_request( + transport: str = "rest", request_type=apihub_service.GetApiOperationRequest +): + client = ApiHubClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/apis/sample3/versions/sample4/operations/sample5" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_api_operation(request) + + +def test_get_api_operation_rest_flattened(): + client = ApiHubClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = common_fields.ApiOperation() + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/apis/sample3/versions/sample4/operations/sample5" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = common_fields.ApiOperation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.get_api_operation(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/apis/*/versions/*/operations/*}" + % client.transport._host, + args[1], + ) + + +def test_get_api_operation_rest_flattened_error(transport: str = "rest"): + client = ApiHubClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_api_operation( + apihub_service.GetApiOperationRequest(), + name="name_value", + ) + + +def test_get_api_operation_rest_error(): + client = ApiHubClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + apihub_service.ListApiOperationsRequest, + dict, + ], +) +def test_list_api_operations_rest(request_type): + client = ApiHubClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "parent": "projects/sample1/locations/sample2/apis/sample3/versions/sample4" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = apihub_service.ListApiOperationsResponse( + next_page_token="next_page_token_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = apihub_service.ListApiOperationsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.list_api_operations(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListApiOperationsPager) + assert response.next_page_token == "next_page_token_value" + + +def test_list_api_operations_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ApiHubClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.list_api_operations in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.list_api_operations + ] = mock_rpc + + request = {} + client.list_api_operations(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_api_operations(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_list_api_operations_rest_required_fields( + request_type=apihub_service.ListApiOperationsRequest, +): + transport_class = transports.ApiHubRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_api_operations._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_api_operations._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "filter", + "page_size", + "page_token", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = ApiHubClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = apihub_service.ListApiOperationsResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = apihub_service.ListApiOperationsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.list_api_operations(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_list_api_operations_rest_unset_required_fields(): + transport = transports.ApiHubRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.list_api_operations._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "filter", + "pageSize", + "pageToken", + ) + ) + & set(("parent",)) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_api_operations_rest_interceptors(null_interceptor): + transport = transports.ApiHubRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.ApiHubRestInterceptor(), + ) + client = ApiHubClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.ApiHubRestInterceptor, "post_list_api_operations" + ) as post, mock.patch.object( + transports.ApiHubRestInterceptor, "pre_list_api_operations" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = apihub_service.ListApiOperationsRequest.pb( + apihub_service.ListApiOperationsRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = apihub_service.ListApiOperationsResponse.to_json( + apihub_service.ListApiOperationsResponse() + ) + + request = apihub_service.ListApiOperationsRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = apihub_service.ListApiOperationsResponse() + + client.list_api_operations( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_api_operations_rest_bad_request( + transport: str = "rest", request_type=apihub_service.ListApiOperationsRequest +): + client = ApiHubClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "parent": "projects/sample1/locations/sample2/apis/sample3/versions/sample4" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_api_operations(request) + + +def test_list_api_operations_rest_flattened(): + client = ApiHubClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = apihub_service.ListApiOperationsResponse() + + # get arguments that satisfy an http rule for this method + sample_request = { + "parent": "projects/sample1/locations/sample2/apis/sample3/versions/sample4" + } + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = apihub_service.ListApiOperationsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.list_api_operations(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*/apis/*/versions/*}/operations" + % client.transport._host, + args[1], + ) + + +def test_list_api_operations_rest_flattened_error(transport: str = "rest"): + client = ApiHubClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_api_operations( + apihub_service.ListApiOperationsRequest(), + parent="parent_value", + ) + + +def test_list_api_operations_rest_pager(transport: str = "rest"): + client = ApiHubClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + apihub_service.ListApiOperationsResponse( + api_operations=[ + common_fields.ApiOperation(), + common_fields.ApiOperation(), + common_fields.ApiOperation(), + ], + next_page_token="abc", + ), + apihub_service.ListApiOperationsResponse( + api_operations=[], + next_page_token="def", + ), + apihub_service.ListApiOperationsResponse( + api_operations=[ + common_fields.ApiOperation(), + ], + next_page_token="ghi", + ), + apihub_service.ListApiOperationsResponse( + api_operations=[ + common_fields.ApiOperation(), + common_fields.ApiOperation(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple( + apihub_service.ListApiOperationsResponse.to_json(x) for x in response + ) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = { + "parent": "projects/sample1/locations/sample2/apis/sample3/versions/sample4" + } + + pager = client.list_api_operations(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, common_fields.ApiOperation) for i in results) + + pages = list(client.list_api_operations(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + apihub_service.GetDefinitionRequest, + dict, + ], +) +def test_get_definition_rest(request_type): + client = ApiHubClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/apis/sample3/versions/sample4/definitions/sample5" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = common_fields.Definition( + name="name_value", + spec="spec_value", + type_=common_fields.Definition.Type.SCHEMA, + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = common_fields.Definition.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.get_definition(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, common_fields.Definition) + assert response.name == "name_value" + assert response.spec == "spec_value" + assert response.type_ == common_fields.Definition.Type.SCHEMA + + +def test_get_definition_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ApiHubClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_definition in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.get_definition] = mock_rpc + + request = {} + client.get_definition(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_definition(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_get_definition_rest_required_fields( + request_type=apihub_service.GetDefinitionRequest, +): + transport_class = transports.ApiHubRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_definition._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_definition._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = ApiHubClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = common_fields.Definition() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = common_fields.Definition.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_definition(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_get_definition_rest_unset_required_fields(): + transport = transports.ApiHubRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.get_definition._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_definition_rest_interceptors(null_interceptor): + transport = transports.ApiHubRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.ApiHubRestInterceptor(), + ) + client = ApiHubClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.ApiHubRestInterceptor, "post_get_definition" + ) as post, mock.patch.object( + transports.ApiHubRestInterceptor, "pre_get_definition" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = apihub_service.GetDefinitionRequest.pb( + apihub_service.GetDefinitionRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = common_fields.Definition.to_json( + common_fields.Definition() + ) + + request = apihub_service.GetDefinitionRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = common_fields.Definition() + + client.get_definition( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_definition_rest_bad_request( + transport: str = "rest", request_type=apihub_service.GetDefinitionRequest +): + client = ApiHubClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/apis/sample3/versions/sample4/definitions/sample5" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_definition(request) + + +def test_get_definition_rest_flattened(): + client = ApiHubClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = common_fields.Definition() + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/apis/sample3/versions/sample4/definitions/sample5" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = common_fields.Definition.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.get_definition(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/apis/*/versions/*/definitions/*}" + % client.transport._host, + args[1], + ) + + +def test_get_definition_rest_flattened_error(transport: str = "rest"): + client = ApiHubClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_definition( + apihub_service.GetDefinitionRequest(), + name="name_value", + ) + + +def test_get_definition_rest_error(): + client = ApiHubClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + apihub_service.CreateDeploymentRequest, + dict, + ], +) +def test_create_deployment_rest(request_type): + client = ApiHubClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request_init["deployment"] = { + "name": "name_value", + "display_name": "display_name_value", + "description": "description_value", + "documentation": {"external_uri": "external_uri_value"}, + "deployment_type": { + "enum_values": { + "values": [ + { + "id": "id_value", + "display_name": "display_name_value", + "description": "description_value", + "immutable": True, + } + ] + }, + "string_values": {"values": ["values_value1", "values_value2"]}, + "json_values": {}, + "attribute": "attribute_value", + }, + "resource_uri": "resource_uri_value", + "endpoints": ["endpoints_value1", "endpoints_value2"], + "api_versions": ["api_versions_value1", "api_versions_value2"], + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "slo": {}, + "environment": {}, + "attributes": {}, + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = apihub_service.CreateDeploymentRequest.meta.fields["deployment"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["deployment"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["deployment"][field])): + del request_init["deployment"][field][i][subfield] + else: + del request_init["deployment"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = common_fields.Deployment( + name="name_value", + display_name="display_name_value", + description="description_value", + resource_uri="resource_uri_value", + endpoints=["endpoints_value"], + api_versions=["api_versions_value"], + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = common_fields.Deployment.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.create_deployment(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, common_fields.Deployment) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.description == "description_value" + assert response.resource_uri == "resource_uri_value" + assert response.endpoints == ["endpoints_value"] + assert response.api_versions == ["api_versions_value"] + + +def test_create_deployment_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ApiHubClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.create_deployment in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.create_deployment + ] = mock_rpc + + request = {} + client.create_deployment(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.create_deployment(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_create_deployment_rest_required_fields( + request_type=apihub_service.CreateDeploymentRequest, +): + transport_class = transports.ApiHubRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_deployment._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_deployment._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("deployment_id",)) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = ApiHubClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = common_fields.Deployment() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = common_fields.Deployment.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.create_deployment(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_create_deployment_rest_unset_required_fields(): + transport = transports.ApiHubRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.create_deployment._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(("deploymentId",)) + & set( + ( + "parent", + "deployment", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_create_deployment_rest_interceptors(null_interceptor): + transport = transports.ApiHubRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.ApiHubRestInterceptor(), + ) + client = ApiHubClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.ApiHubRestInterceptor, "post_create_deployment" + ) as post, mock.patch.object( + transports.ApiHubRestInterceptor, "pre_create_deployment" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = apihub_service.CreateDeploymentRequest.pb( + apihub_service.CreateDeploymentRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = common_fields.Deployment.to_json( + common_fields.Deployment() + ) + + request = apihub_service.CreateDeploymentRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = common_fields.Deployment() + + client.create_deployment( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_create_deployment_rest_bad_request( + transport: str = "rest", request_type=apihub_service.CreateDeploymentRequest +): + client = ApiHubClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.create_deployment(request) + + +def test_create_deployment_rest_flattened(): + client = ApiHubClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = common_fields.Deployment() + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1/locations/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + deployment=common_fields.Deployment(name="name_value"), + deployment_id="deployment_id_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = common_fields.Deployment.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.create_deployment(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*}/deployments" + % client.transport._host, + args[1], + ) + + +def test_create_deployment_rest_flattened_error(transport: str = "rest"): + client = ApiHubClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_deployment( + apihub_service.CreateDeploymentRequest(), + parent="parent_value", + deployment=common_fields.Deployment(name="name_value"), + deployment_id="deployment_id_value", + ) + + +def test_create_deployment_rest_error(): + client = ApiHubClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + apihub_service.GetDeploymentRequest, + dict, + ], +) +def test_get_deployment_rest(request_type): + client = ApiHubClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/deployments/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = common_fields.Deployment( + name="name_value", + display_name="display_name_value", + description="description_value", + resource_uri="resource_uri_value", + endpoints=["endpoints_value"], + api_versions=["api_versions_value"], + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = common_fields.Deployment.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.get_deployment(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, common_fields.Deployment) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.description == "description_value" + assert response.resource_uri == "resource_uri_value" + assert response.endpoints == ["endpoints_value"] + assert response.api_versions == ["api_versions_value"] + + +def test_get_deployment_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ApiHubClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_deployment in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.get_deployment] = mock_rpc + + request = {} + client.get_deployment(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_deployment(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_get_deployment_rest_required_fields( + request_type=apihub_service.GetDeploymentRequest, +): + transport_class = transports.ApiHubRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_deployment._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_deployment._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = ApiHubClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = common_fields.Deployment() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = common_fields.Deployment.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_deployment(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_get_deployment_rest_unset_required_fields(): + transport = transports.ApiHubRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.get_deployment._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_deployment_rest_interceptors(null_interceptor): + transport = transports.ApiHubRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.ApiHubRestInterceptor(), + ) + client = ApiHubClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.ApiHubRestInterceptor, "post_get_deployment" + ) as post, mock.patch.object( + transports.ApiHubRestInterceptor, "pre_get_deployment" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = apihub_service.GetDeploymentRequest.pb( + apihub_service.GetDeploymentRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = common_fields.Deployment.to_json( + common_fields.Deployment() + ) + + request = apihub_service.GetDeploymentRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = common_fields.Deployment() + + client.get_deployment( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_deployment_rest_bad_request( + transport: str = "rest", request_type=apihub_service.GetDeploymentRequest +): + client = ApiHubClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/deployments/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_deployment(request) + + +def test_get_deployment_rest_flattened(): + client = ApiHubClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = common_fields.Deployment() + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/deployments/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = common_fields.Deployment.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.get_deployment(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/deployments/*}" + % client.transport._host, + args[1], + ) + + +def test_get_deployment_rest_flattened_error(transport: str = "rest"): + client = ApiHubClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_deployment( + apihub_service.GetDeploymentRequest(), + name="name_value", + ) + + +def test_get_deployment_rest_error(): + client = ApiHubClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + apihub_service.ListDeploymentsRequest, + dict, + ], +) +def test_list_deployments_rest(request_type): + client = ApiHubClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = apihub_service.ListDeploymentsResponse( + next_page_token="next_page_token_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = apihub_service.ListDeploymentsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.list_deployments(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListDeploymentsPager) + assert response.next_page_token == "next_page_token_value" + + +def test_list_deployments_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ApiHubClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_deployments in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.list_deployments + ] = mock_rpc + + request = {} + client.list_deployments(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_deployments(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_list_deployments_rest_required_fields( + request_type=apihub_service.ListDeploymentsRequest, +): + transport_class = transports.ApiHubRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_deployments._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_deployments._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "filter", + "page_size", + "page_token", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = ApiHubClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = apihub_service.ListDeploymentsResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = apihub_service.ListDeploymentsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.list_deployments(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_list_deployments_rest_unset_required_fields(): + transport = transports.ApiHubRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.list_deployments._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "filter", + "pageSize", + "pageToken", + ) + ) + & set(("parent",)) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_deployments_rest_interceptors(null_interceptor): + transport = transports.ApiHubRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.ApiHubRestInterceptor(), + ) + client = ApiHubClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.ApiHubRestInterceptor, "post_list_deployments" + ) as post, mock.patch.object( + transports.ApiHubRestInterceptor, "pre_list_deployments" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = apihub_service.ListDeploymentsRequest.pb( + apihub_service.ListDeploymentsRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = apihub_service.ListDeploymentsResponse.to_json( + apihub_service.ListDeploymentsResponse() + ) + + request = apihub_service.ListDeploymentsRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = apihub_service.ListDeploymentsResponse() + + client.list_deployments( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_deployments_rest_bad_request( + transport: str = "rest", request_type=apihub_service.ListDeploymentsRequest +): + client = ApiHubClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_deployments(request) + + +def test_list_deployments_rest_flattened(): + client = ApiHubClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = apihub_service.ListDeploymentsResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1/locations/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = apihub_service.ListDeploymentsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.list_deployments(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*}/deployments" + % client.transport._host, + args[1], + ) + + +def test_list_deployments_rest_flattened_error(transport: str = "rest"): + client = ApiHubClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_deployments( + apihub_service.ListDeploymentsRequest(), + parent="parent_value", + ) + + +def test_list_deployments_rest_pager(transport: str = "rest"): + client = ApiHubClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + apihub_service.ListDeploymentsResponse( + deployments=[ + common_fields.Deployment(), + common_fields.Deployment(), + common_fields.Deployment(), + ], + next_page_token="abc", + ), + apihub_service.ListDeploymentsResponse( + deployments=[], + next_page_token="def", + ), + apihub_service.ListDeploymentsResponse( + deployments=[ + common_fields.Deployment(), + ], + next_page_token="ghi", + ), + apihub_service.ListDeploymentsResponse( + deployments=[ + common_fields.Deployment(), + common_fields.Deployment(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple( + apihub_service.ListDeploymentsResponse.to_json(x) for x in response + ) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {"parent": "projects/sample1/locations/sample2"} + + pager = client.list_deployments(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, common_fields.Deployment) for i in results) + + pages = list(client.list_deployments(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + apihub_service.UpdateDeploymentRequest, + dict, + ], +) +def test_update_deployment_rest(request_type): + client = ApiHubClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "deployment": {"name": "projects/sample1/locations/sample2/deployments/sample3"} + } + request_init["deployment"] = { + "name": "projects/sample1/locations/sample2/deployments/sample3", + "display_name": "display_name_value", + "description": "description_value", + "documentation": {"external_uri": "external_uri_value"}, + "deployment_type": { + "enum_values": { + "values": [ + { + "id": "id_value", + "display_name": "display_name_value", + "description": "description_value", + "immutable": True, + } + ] + }, + "string_values": {"values": ["values_value1", "values_value2"]}, + "json_values": {}, + "attribute": "attribute_value", + }, + "resource_uri": "resource_uri_value", + "endpoints": ["endpoints_value1", "endpoints_value2"], + "api_versions": ["api_versions_value1", "api_versions_value2"], + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "slo": {}, + "environment": {}, + "attributes": {}, + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = apihub_service.UpdateDeploymentRequest.meta.fields["deployment"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["deployment"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["deployment"][field])): + del request_init["deployment"][field][i][subfield] + else: + del request_init["deployment"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = common_fields.Deployment( + name="name_value", + display_name="display_name_value", + description="description_value", + resource_uri="resource_uri_value", + endpoints=["endpoints_value"], + api_versions=["api_versions_value"], + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = common_fields.Deployment.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.update_deployment(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, common_fields.Deployment) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.description == "description_value" + assert response.resource_uri == "resource_uri_value" + assert response.endpoints == ["endpoints_value"] + assert response.api_versions == ["api_versions_value"] + + +def test_update_deployment_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ApiHubClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.update_deployment in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.update_deployment + ] = mock_rpc + + request = {} + client.update_deployment(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.update_deployment(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_update_deployment_rest_required_fields( + request_type=apihub_service.UpdateDeploymentRequest, +): + transport_class = transports.ApiHubRestTransport + + request_init = {} + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_deployment._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_deployment._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("update_mask",)) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + + client = ApiHubClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = common_fields.Deployment() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "patch", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = common_fields.Deployment.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.update_deployment(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_update_deployment_rest_unset_required_fields(): + transport = transports.ApiHubRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.update_deployment._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(("updateMask",)) + & set( + ( + "deployment", + "updateMask", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_update_deployment_rest_interceptors(null_interceptor): + transport = transports.ApiHubRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.ApiHubRestInterceptor(), + ) + client = ApiHubClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.ApiHubRestInterceptor, "post_update_deployment" + ) as post, mock.patch.object( + transports.ApiHubRestInterceptor, "pre_update_deployment" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = apihub_service.UpdateDeploymentRequest.pb( + apihub_service.UpdateDeploymentRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = common_fields.Deployment.to_json( + common_fields.Deployment() + ) + + request = apihub_service.UpdateDeploymentRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = common_fields.Deployment() + + client.update_deployment( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_update_deployment_rest_bad_request( + transport: str = "rest", request_type=apihub_service.UpdateDeploymentRequest +): + client = ApiHubClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "deployment": {"name": "projects/sample1/locations/sample2/deployments/sample3"} + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.update_deployment(request) + + +def test_update_deployment_rest_flattened(): + client = ApiHubClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = common_fields.Deployment() + + # get arguments that satisfy an http rule for this method + sample_request = { + "deployment": { + "name": "projects/sample1/locations/sample2/deployments/sample3" + } + } + + # get truthy value for each flattened field + mock_args = dict( + deployment=common_fields.Deployment(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = common_fields.Deployment.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.update_deployment(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{deployment.name=projects/*/locations/*/deployments/*}" + % client.transport._host, + args[1], + ) + + +def test_update_deployment_rest_flattened_error(transport: str = "rest"): + client = ApiHubClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_deployment( + apihub_service.UpdateDeploymentRequest(), + deployment=common_fields.Deployment(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +def test_update_deployment_rest_error(): + client = ApiHubClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + apihub_service.DeleteDeploymentRequest, + dict, + ], +) +def test_delete_deployment_rest(request_type): + client = ApiHubClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/deployments/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "" + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.delete_deployment(request) + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_deployment_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ApiHubClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.delete_deployment in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.delete_deployment + ] = mock_rpc + + request = {} + client.delete_deployment(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.delete_deployment(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_delete_deployment_rest_required_fields( + request_type=apihub_service.DeleteDeploymentRequest, +): + transport_class = transports.ApiHubRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_deployment._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_deployment._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = ApiHubClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = None + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "delete", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = "" + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.delete_deployment(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_delete_deployment_rest_unset_required_fields(): + transport = transports.ApiHubRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.delete_deployment._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_deployment_rest_interceptors(null_interceptor): + transport = transports.ApiHubRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.ApiHubRestInterceptor(), + ) + client = ApiHubClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.ApiHubRestInterceptor, "pre_delete_deployment" + ) as pre: + pre.assert_not_called() + pb_message = apihub_service.DeleteDeploymentRequest.pb( + apihub_service.DeleteDeploymentRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + + request = apihub_service.DeleteDeploymentRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + + client.delete_deployment( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + + +def test_delete_deployment_rest_bad_request( + transport: str = "rest", request_type=apihub_service.DeleteDeploymentRequest +): + client = ApiHubClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/deployments/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete_deployment(request) + + +def test_delete_deployment_rest_flattened(): + client = ApiHubClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/deployments/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "" + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.delete_deployment(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/deployments/*}" + % client.transport._host, + args[1], + ) + + +def test_delete_deployment_rest_flattened_error(transport: str = "rest"): + client = ApiHubClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_deployment( + apihub_service.DeleteDeploymentRequest(), + name="name_value", + ) + + +def test_delete_deployment_rest_error(): + client = ApiHubClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + apihub_service.CreateAttributeRequest, + dict, + ], +) +def test_create_attribute_rest(request_type): + client = ApiHubClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request_init["attribute"] = { + "name": "name_value", + "display_name": "display_name_value", + "description": "description_value", + "definition_type": 1, + "scope": 1, + "data_type": 1, + "allowed_values": [ + { + "id": "id_value", + "display_name": "display_name_value", + "description": "description_value", + "immutable": True, + } + ], + "cardinality": 1172, + "mandatory": True, + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = apihub_service.CreateAttributeRequest.meta.fields["attribute"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["attribute"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["attribute"][field])): + del request_init["attribute"][field][i][subfield] + else: + del request_init["attribute"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = common_fields.Attribute( + name="name_value", + display_name="display_name_value", + description="description_value", + definition_type=common_fields.Attribute.DefinitionType.SYSTEM_DEFINED, + scope=common_fields.Attribute.Scope.API, + data_type=common_fields.Attribute.DataType.ENUM, + cardinality=1172, + mandatory=True, + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = common_fields.Attribute.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.create_attribute(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, common_fields.Attribute) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.description == "description_value" + assert ( + response.definition_type + == common_fields.Attribute.DefinitionType.SYSTEM_DEFINED + ) + assert response.scope == common_fields.Attribute.Scope.API + assert response.data_type == common_fields.Attribute.DataType.ENUM + assert response.cardinality == 1172 + assert response.mandatory is True + + +def test_create_attribute_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ApiHubClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.create_attribute in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.create_attribute + ] = mock_rpc + + request = {} + client.create_attribute(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.create_attribute(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_create_attribute_rest_required_fields( + request_type=apihub_service.CreateAttributeRequest, +): + transport_class = transports.ApiHubRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_attribute._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_attribute._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("attribute_id",)) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = ApiHubClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = common_fields.Attribute() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = common_fields.Attribute.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.create_attribute(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_create_attribute_rest_unset_required_fields(): + transport = transports.ApiHubRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.create_attribute._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(("attributeId",)) + & set( + ( + "parent", + "attribute", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_create_attribute_rest_interceptors(null_interceptor): + transport = transports.ApiHubRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.ApiHubRestInterceptor(), + ) + client = ApiHubClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.ApiHubRestInterceptor, "post_create_attribute" + ) as post, mock.patch.object( + transports.ApiHubRestInterceptor, "pre_create_attribute" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = apihub_service.CreateAttributeRequest.pb( + apihub_service.CreateAttributeRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = common_fields.Attribute.to_json( + common_fields.Attribute() + ) + + request = apihub_service.CreateAttributeRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = common_fields.Attribute() + + client.create_attribute( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_create_attribute_rest_bad_request( + transport: str = "rest", request_type=apihub_service.CreateAttributeRequest +): + client = ApiHubClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.create_attribute(request) + + +def test_create_attribute_rest_flattened(): + client = ApiHubClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = common_fields.Attribute() + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1/locations/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + attribute=common_fields.Attribute(name="name_value"), + attribute_id="attribute_id_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = common_fields.Attribute.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.create_attribute(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*}/attributes" % client.transport._host, + args[1], + ) + + +def test_create_attribute_rest_flattened_error(transport: str = "rest"): + client = ApiHubClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_attribute( + apihub_service.CreateAttributeRequest(), + parent="parent_value", + attribute=common_fields.Attribute(name="name_value"), + attribute_id="attribute_id_value", + ) + + +def test_create_attribute_rest_error(): + client = ApiHubClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + apihub_service.GetAttributeRequest, + dict, + ], +) +def test_get_attribute_rest(request_type): + client = ApiHubClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/attributes/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = common_fields.Attribute( + name="name_value", + display_name="display_name_value", + description="description_value", + definition_type=common_fields.Attribute.DefinitionType.SYSTEM_DEFINED, + scope=common_fields.Attribute.Scope.API, + data_type=common_fields.Attribute.DataType.ENUM, + cardinality=1172, + mandatory=True, + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = common_fields.Attribute.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.get_attribute(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, common_fields.Attribute) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.description == "description_value" + assert ( + response.definition_type + == common_fields.Attribute.DefinitionType.SYSTEM_DEFINED + ) + assert response.scope == common_fields.Attribute.Scope.API + assert response.data_type == common_fields.Attribute.DataType.ENUM + assert response.cardinality == 1172 + assert response.mandatory is True + + +def test_get_attribute_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ApiHubClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_attribute in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.get_attribute] = mock_rpc + + request = {} + client.get_attribute(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_attribute(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_get_attribute_rest_required_fields( + request_type=apihub_service.GetAttributeRequest, +): + transport_class = transports.ApiHubRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_attribute._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_attribute._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = ApiHubClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = common_fields.Attribute() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = common_fields.Attribute.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_attribute(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_get_attribute_rest_unset_required_fields(): + transport = transports.ApiHubRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.get_attribute._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_attribute_rest_interceptors(null_interceptor): + transport = transports.ApiHubRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.ApiHubRestInterceptor(), + ) + client = ApiHubClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.ApiHubRestInterceptor, "post_get_attribute" + ) as post, mock.patch.object( + transports.ApiHubRestInterceptor, "pre_get_attribute" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = apihub_service.GetAttributeRequest.pb( + apihub_service.GetAttributeRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = common_fields.Attribute.to_json( + common_fields.Attribute() + ) + + request = apihub_service.GetAttributeRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = common_fields.Attribute() + + client.get_attribute( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_attribute_rest_bad_request( + transport: str = "rest", request_type=apihub_service.GetAttributeRequest +): + client = ApiHubClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/attributes/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_attribute(request) + + +def test_get_attribute_rest_flattened(): + client = ApiHubClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = common_fields.Attribute() + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/attributes/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = common_fields.Attribute.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.get_attribute(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/attributes/*}" % client.transport._host, + args[1], + ) + + +def test_get_attribute_rest_flattened_error(transport: str = "rest"): + client = ApiHubClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_attribute( + apihub_service.GetAttributeRequest(), + name="name_value", + ) + + +def test_get_attribute_rest_error(): + client = ApiHubClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + apihub_service.UpdateAttributeRequest, + dict, + ], +) +def test_update_attribute_rest(request_type): + client = ApiHubClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "attribute": {"name": "projects/sample1/locations/sample2/attributes/sample3"} + } + request_init["attribute"] = { + "name": "projects/sample1/locations/sample2/attributes/sample3", + "display_name": "display_name_value", + "description": "description_value", + "definition_type": 1, + "scope": 1, + "data_type": 1, + "allowed_values": [ + { + "id": "id_value", + "display_name": "display_name_value", + "description": "description_value", + "immutable": True, + } + ], + "cardinality": 1172, + "mandatory": True, + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = apihub_service.UpdateAttributeRequest.meta.fields["attribute"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["attribute"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["attribute"][field])): + del request_init["attribute"][field][i][subfield] + else: + del request_init["attribute"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = common_fields.Attribute( + name="name_value", + display_name="display_name_value", + description="description_value", + definition_type=common_fields.Attribute.DefinitionType.SYSTEM_DEFINED, + scope=common_fields.Attribute.Scope.API, + data_type=common_fields.Attribute.DataType.ENUM, + cardinality=1172, + mandatory=True, + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = common_fields.Attribute.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.update_attribute(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, common_fields.Attribute) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.description == "description_value" + assert ( + response.definition_type + == common_fields.Attribute.DefinitionType.SYSTEM_DEFINED + ) + assert response.scope == common_fields.Attribute.Scope.API + assert response.data_type == common_fields.Attribute.DataType.ENUM + assert response.cardinality == 1172 + assert response.mandatory is True + + +def test_update_attribute_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ApiHubClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.update_attribute in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.update_attribute + ] = mock_rpc + + request = {} + client.update_attribute(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.update_attribute(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_update_attribute_rest_required_fields( + request_type=apihub_service.UpdateAttributeRequest, +): + transport_class = transports.ApiHubRestTransport + + request_init = {} + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_attribute._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_attribute._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("update_mask",)) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + + client = ApiHubClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = common_fields.Attribute() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "patch", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = common_fields.Attribute.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.update_attribute(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_update_attribute_rest_unset_required_fields(): + transport = transports.ApiHubRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.update_attribute._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(("updateMask",)) + & set( + ( + "attribute", + "updateMask", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_update_attribute_rest_interceptors(null_interceptor): + transport = transports.ApiHubRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.ApiHubRestInterceptor(), + ) + client = ApiHubClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.ApiHubRestInterceptor, "post_update_attribute" + ) as post, mock.patch.object( + transports.ApiHubRestInterceptor, "pre_update_attribute" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = apihub_service.UpdateAttributeRequest.pb( + apihub_service.UpdateAttributeRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = common_fields.Attribute.to_json( + common_fields.Attribute() + ) + + request = apihub_service.UpdateAttributeRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = common_fields.Attribute() + + client.update_attribute( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_update_attribute_rest_bad_request( + transport: str = "rest", request_type=apihub_service.UpdateAttributeRequest +): + client = ApiHubClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "attribute": {"name": "projects/sample1/locations/sample2/attributes/sample3"} + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.update_attribute(request) + + +def test_update_attribute_rest_flattened(): + client = ApiHubClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = common_fields.Attribute() + + # get arguments that satisfy an http rule for this method + sample_request = { + "attribute": { + "name": "projects/sample1/locations/sample2/attributes/sample3" + } + } + + # get truthy value for each flattened field + mock_args = dict( + attribute=common_fields.Attribute(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = common_fields.Attribute.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.update_attribute(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{attribute.name=projects/*/locations/*/attributes/*}" + % client.transport._host, + args[1], + ) + + +def test_update_attribute_rest_flattened_error(transport: str = "rest"): + client = ApiHubClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_attribute( + apihub_service.UpdateAttributeRequest(), + attribute=common_fields.Attribute(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +def test_update_attribute_rest_error(): + client = ApiHubClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + apihub_service.DeleteAttributeRequest, + dict, + ], +) +def test_delete_attribute_rest(request_type): + client = ApiHubClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/attributes/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "" + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.delete_attribute(request) + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_attribute_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ApiHubClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.delete_attribute in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.delete_attribute + ] = mock_rpc + + request = {} + client.delete_attribute(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.delete_attribute(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_delete_attribute_rest_required_fields( + request_type=apihub_service.DeleteAttributeRequest, +): + transport_class = transports.ApiHubRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_attribute._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_attribute._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = ApiHubClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = None + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "delete", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = "" + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.delete_attribute(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_delete_attribute_rest_unset_required_fields(): + transport = transports.ApiHubRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.delete_attribute._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_attribute_rest_interceptors(null_interceptor): + transport = transports.ApiHubRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.ApiHubRestInterceptor(), + ) + client = ApiHubClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.ApiHubRestInterceptor, "pre_delete_attribute" + ) as pre: + pre.assert_not_called() + pb_message = apihub_service.DeleteAttributeRequest.pb( + apihub_service.DeleteAttributeRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + + request = apihub_service.DeleteAttributeRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + + client.delete_attribute( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + + +def test_delete_attribute_rest_bad_request( + transport: str = "rest", request_type=apihub_service.DeleteAttributeRequest +): + client = ApiHubClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/attributes/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete_attribute(request) + + +def test_delete_attribute_rest_flattened(): + client = ApiHubClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/attributes/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "" + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.delete_attribute(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/attributes/*}" % client.transport._host, + args[1], + ) + + +def test_delete_attribute_rest_flattened_error(transport: str = "rest"): + client = ApiHubClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_attribute( + apihub_service.DeleteAttributeRequest(), + name="name_value", + ) + + +def test_delete_attribute_rest_error(): + client = ApiHubClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + apihub_service.ListAttributesRequest, + dict, + ], +) +def test_list_attributes_rest(request_type): + client = ApiHubClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = apihub_service.ListAttributesResponse( + next_page_token="next_page_token_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = apihub_service.ListAttributesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.list_attributes(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListAttributesPager) + assert response.next_page_token == "next_page_token_value" + + +def test_list_attributes_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ApiHubClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_attributes in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.list_attributes] = mock_rpc + + request = {} + client.list_attributes(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_attributes(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_list_attributes_rest_required_fields( + request_type=apihub_service.ListAttributesRequest, +): + transport_class = transports.ApiHubRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_attributes._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_attributes._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "filter", + "page_size", + "page_token", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = ApiHubClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = apihub_service.ListAttributesResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = apihub_service.ListAttributesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.list_attributes(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_list_attributes_rest_unset_required_fields(): + transport = transports.ApiHubRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.list_attributes._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "filter", + "pageSize", + "pageToken", + ) + ) + & set(("parent",)) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_attributes_rest_interceptors(null_interceptor): + transport = transports.ApiHubRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.ApiHubRestInterceptor(), + ) + client = ApiHubClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.ApiHubRestInterceptor, "post_list_attributes" + ) as post, mock.patch.object( + transports.ApiHubRestInterceptor, "pre_list_attributes" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = apihub_service.ListAttributesRequest.pb( + apihub_service.ListAttributesRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = apihub_service.ListAttributesResponse.to_json( + apihub_service.ListAttributesResponse() + ) + + request = apihub_service.ListAttributesRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = apihub_service.ListAttributesResponse() + + client.list_attributes( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_attributes_rest_bad_request( + transport: str = "rest", request_type=apihub_service.ListAttributesRequest +): + client = ApiHubClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_attributes(request) + + +def test_list_attributes_rest_flattened(): + client = ApiHubClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = apihub_service.ListAttributesResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1/locations/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = apihub_service.ListAttributesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.list_attributes(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*}/attributes" % client.transport._host, + args[1], + ) + + +def test_list_attributes_rest_flattened_error(transport: str = "rest"): + client = ApiHubClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_attributes( + apihub_service.ListAttributesRequest(), + parent="parent_value", + ) + + +def test_list_attributes_rest_pager(transport: str = "rest"): + client = ApiHubClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + apihub_service.ListAttributesResponse( + attributes=[ + common_fields.Attribute(), + common_fields.Attribute(), + common_fields.Attribute(), + ], + next_page_token="abc", + ), + apihub_service.ListAttributesResponse( + attributes=[], + next_page_token="def", + ), + apihub_service.ListAttributesResponse( + attributes=[ + common_fields.Attribute(), + ], + next_page_token="ghi", + ), + apihub_service.ListAttributesResponse( + attributes=[ + common_fields.Attribute(), + common_fields.Attribute(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple( + apihub_service.ListAttributesResponse.to_json(x) for x in response + ) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {"parent": "projects/sample1/locations/sample2"} + + pager = client.list_attributes(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, common_fields.Attribute) for i in results) + + pages = list(client.list_attributes(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + apihub_service.SearchResourcesRequest, + dict, + ], +) +def test_search_resources_rest(request_type): + client = ApiHubClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"location": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = apihub_service.SearchResourcesResponse( + next_page_token="next_page_token_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = apihub_service.SearchResourcesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.search_resources(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.SearchResourcesPager) + assert response.next_page_token == "next_page_token_value" + + +def test_search_resources_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ApiHubClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.search_resources in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.search_resources + ] = mock_rpc + + request = {} + client.search_resources(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.search_resources(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_search_resources_rest_required_fields( + request_type=apihub_service.SearchResourcesRequest, +): + transport_class = transports.ApiHubRestTransport + + request_init = {} + request_init["location"] = "" + request_init["query"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).search_resources._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["location"] = "location_value" + jsonified_request["query"] = "query_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).search_resources._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "location" in jsonified_request + assert jsonified_request["location"] == "location_value" + assert "query" in jsonified_request + assert jsonified_request["query"] == "query_value" + + client = ApiHubClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = apihub_service.SearchResourcesResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = apihub_service.SearchResourcesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.search_resources(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_search_resources_rest_unset_required_fields(): + transport = transports.ApiHubRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.search_resources._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(()) + & set( + ( + "location", + "query", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_search_resources_rest_interceptors(null_interceptor): + transport = transports.ApiHubRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.ApiHubRestInterceptor(), + ) + client = ApiHubClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.ApiHubRestInterceptor, "post_search_resources" + ) as post, mock.patch.object( + transports.ApiHubRestInterceptor, "pre_search_resources" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = apihub_service.SearchResourcesRequest.pb( + apihub_service.SearchResourcesRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = apihub_service.SearchResourcesResponse.to_json( + apihub_service.SearchResourcesResponse() + ) + + request = apihub_service.SearchResourcesRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = apihub_service.SearchResourcesResponse() + + client.search_resources( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_search_resources_rest_bad_request( + transport: str = "rest", request_type=apihub_service.SearchResourcesRequest +): + client = ApiHubClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"location": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.search_resources(request) + + +def test_search_resources_rest_flattened(): + client = ApiHubClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = apihub_service.SearchResourcesResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {"location": "projects/sample1/locations/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + location="location_value", + query="query_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = apihub_service.SearchResourcesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.search_resources(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{location=projects/*/locations/*}:searchResources" + % client.transport._host, + args[1], + ) + + +def test_search_resources_rest_flattened_error(transport: str = "rest"): + client = ApiHubClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.search_resources( + apihub_service.SearchResourcesRequest(), + location="location_value", + query="query_value", + ) + + +def test_search_resources_rest_pager(transport: str = "rest"): + client = ApiHubClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + apihub_service.SearchResourcesResponse( + search_results=[ + apihub_service.SearchResult(), + apihub_service.SearchResult(), + apihub_service.SearchResult(), + ], + next_page_token="abc", + ), + apihub_service.SearchResourcesResponse( + search_results=[], + next_page_token="def", + ), + apihub_service.SearchResourcesResponse( + search_results=[ + apihub_service.SearchResult(), + ], + next_page_token="ghi", + ), + apihub_service.SearchResourcesResponse( + search_results=[ + apihub_service.SearchResult(), + apihub_service.SearchResult(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple( + apihub_service.SearchResourcesResponse.to_json(x) for x in response + ) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {"location": "projects/sample1/locations/sample2"} + + pager = client.search_resources(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, apihub_service.SearchResult) for i in results) + + pages = list(client.search_resources(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + apihub_service.CreateExternalApiRequest, + dict, + ], +) +def test_create_external_api_rest(request_type): + client = ApiHubClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request_init["external_api"] = { + "name": "name_value", + "display_name": "display_name_value", + "description": "description_value", + "endpoints": ["endpoints_value1", "endpoints_value2"], + "paths": ["paths_value1", "paths_value2"], + "documentation": {"external_uri": "external_uri_value"}, + "attributes": {}, + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = apihub_service.CreateExternalApiRequest.meta.fields["external_api"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["external_api"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["external_api"][field])): + del request_init["external_api"][field][i][subfield] + else: + del request_init["external_api"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = common_fields.ExternalApi( + name="name_value", + display_name="display_name_value", + description="description_value", + endpoints=["endpoints_value"], + paths=["paths_value"], + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = common_fields.ExternalApi.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.create_external_api(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, common_fields.ExternalApi) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.description == "description_value" + assert response.endpoints == ["endpoints_value"] + assert response.paths == ["paths_value"] + + +def test_create_external_api_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ApiHubClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.create_external_api in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.create_external_api + ] = mock_rpc + + request = {} + client.create_external_api(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.create_external_api(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_create_external_api_rest_required_fields( + request_type=apihub_service.CreateExternalApiRequest, +): + transport_class = transports.ApiHubRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_external_api._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_external_api._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("external_api_id",)) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = ApiHubClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = common_fields.ExternalApi() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = common_fields.ExternalApi.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.create_external_api(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_create_external_api_rest_unset_required_fields(): + transport = transports.ApiHubRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.create_external_api._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(("externalApiId",)) + & set( + ( + "parent", + "externalApi", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_create_external_api_rest_interceptors(null_interceptor): + transport = transports.ApiHubRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.ApiHubRestInterceptor(), + ) + client = ApiHubClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.ApiHubRestInterceptor, "post_create_external_api" + ) as post, mock.patch.object( + transports.ApiHubRestInterceptor, "pre_create_external_api" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = apihub_service.CreateExternalApiRequest.pb( + apihub_service.CreateExternalApiRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = common_fields.ExternalApi.to_json( + common_fields.ExternalApi() + ) + + request = apihub_service.CreateExternalApiRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = common_fields.ExternalApi() + + client.create_external_api( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_create_external_api_rest_bad_request( + transport: str = "rest", request_type=apihub_service.CreateExternalApiRequest +): + client = ApiHubClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.create_external_api(request) + + +def test_create_external_api_rest_flattened(): + client = ApiHubClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = common_fields.ExternalApi() + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1/locations/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + external_api=common_fields.ExternalApi(name="name_value"), + external_api_id="external_api_id_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = common_fields.ExternalApi.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.create_external_api(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*}/externalApis" + % client.transport._host, + args[1], + ) + + +def test_create_external_api_rest_flattened_error(transport: str = "rest"): + client = ApiHubClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_external_api( + apihub_service.CreateExternalApiRequest(), + parent="parent_value", + external_api=common_fields.ExternalApi(name="name_value"), + external_api_id="external_api_id_value", + ) + + +def test_create_external_api_rest_error(): + client = ApiHubClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + apihub_service.GetExternalApiRequest, + dict, + ], +) +def test_get_external_api_rest(request_type): + client = ApiHubClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/externalApis/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = common_fields.ExternalApi( + name="name_value", + display_name="display_name_value", + description="description_value", + endpoints=["endpoints_value"], + paths=["paths_value"], + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = common_fields.ExternalApi.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.get_external_api(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, common_fields.ExternalApi) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.description == "description_value" + assert response.endpoints == ["endpoints_value"] + assert response.paths == ["paths_value"] + + +def test_get_external_api_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ApiHubClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_external_api in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.get_external_api + ] = mock_rpc + + request = {} + client.get_external_api(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_external_api(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_get_external_api_rest_required_fields( + request_type=apihub_service.GetExternalApiRequest, +): + transport_class = transports.ApiHubRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_external_api._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_external_api._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = ApiHubClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = common_fields.ExternalApi() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = common_fields.ExternalApi.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_external_api(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_get_external_api_rest_unset_required_fields(): + transport = transports.ApiHubRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.get_external_api._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_external_api_rest_interceptors(null_interceptor): + transport = transports.ApiHubRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.ApiHubRestInterceptor(), + ) + client = ApiHubClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.ApiHubRestInterceptor, "post_get_external_api" + ) as post, mock.patch.object( + transports.ApiHubRestInterceptor, "pre_get_external_api" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = apihub_service.GetExternalApiRequest.pb( + apihub_service.GetExternalApiRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = common_fields.ExternalApi.to_json( + common_fields.ExternalApi() + ) + + request = apihub_service.GetExternalApiRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = common_fields.ExternalApi() + + client.get_external_api( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_external_api_rest_bad_request( + transport: str = "rest", request_type=apihub_service.GetExternalApiRequest +): + client = ApiHubClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/externalApis/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_external_api(request) + + +def test_get_external_api_rest_flattened(): + client = ApiHubClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = common_fields.ExternalApi() + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/externalApis/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = common_fields.ExternalApi.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.get_external_api(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/externalApis/*}" + % client.transport._host, + args[1], + ) + + +def test_get_external_api_rest_flattened_error(transport: str = "rest"): + client = ApiHubClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_external_api( + apihub_service.GetExternalApiRequest(), + name="name_value", + ) + + +def test_get_external_api_rest_error(): + client = ApiHubClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + apihub_service.UpdateExternalApiRequest, + dict, + ], +) +def test_update_external_api_rest(request_type): + client = ApiHubClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "external_api": { + "name": "projects/sample1/locations/sample2/externalApis/sample3" + } + } + request_init["external_api"] = { + "name": "projects/sample1/locations/sample2/externalApis/sample3", + "display_name": "display_name_value", + "description": "description_value", + "endpoints": ["endpoints_value1", "endpoints_value2"], + "paths": ["paths_value1", "paths_value2"], + "documentation": {"external_uri": "external_uri_value"}, + "attributes": {}, + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = apihub_service.UpdateExternalApiRequest.meta.fields["external_api"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["external_api"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["external_api"][field])): + del request_init["external_api"][field][i][subfield] + else: + del request_init["external_api"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = common_fields.ExternalApi( + name="name_value", + display_name="display_name_value", + description="description_value", + endpoints=["endpoints_value"], + paths=["paths_value"], + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = common_fields.ExternalApi.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.update_external_api(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, common_fields.ExternalApi) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.description == "description_value" + assert response.endpoints == ["endpoints_value"] + assert response.paths == ["paths_value"] + + +def test_update_external_api_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ApiHubClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.update_external_api in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.update_external_api + ] = mock_rpc + + request = {} + client.update_external_api(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.update_external_api(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_update_external_api_rest_required_fields( + request_type=apihub_service.UpdateExternalApiRequest, +): + transport_class = transports.ApiHubRestTransport + + request_init = {} + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_external_api._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_external_api._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("update_mask",)) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + + client = ApiHubClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = common_fields.ExternalApi() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "patch", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = common_fields.ExternalApi.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.update_external_api(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_update_external_api_rest_unset_required_fields(): + transport = transports.ApiHubRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.update_external_api._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(("updateMask",)) + & set( + ( + "externalApi", + "updateMask", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_update_external_api_rest_interceptors(null_interceptor): + transport = transports.ApiHubRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.ApiHubRestInterceptor(), + ) + client = ApiHubClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.ApiHubRestInterceptor, "post_update_external_api" + ) as post, mock.patch.object( + transports.ApiHubRestInterceptor, "pre_update_external_api" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = apihub_service.UpdateExternalApiRequest.pb( + apihub_service.UpdateExternalApiRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = common_fields.ExternalApi.to_json( + common_fields.ExternalApi() + ) + + request = apihub_service.UpdateExternalApiRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = common_fields.ExternalApi() + + client.update_external_api( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_update_external_api_rest_bad_request( + transport: str = "rest", request_type=apihub_service.UpdateExternalApiRequest +): + client = ApiHubClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "external_api": { + "name": "projects/sample1/locations/sample2/externalApis/sample3" + } + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.update_external_api(request) + + +def test_update_external_api_rest_flattened(): + client = ApiHubClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = common_fields.ExternalApi() + + # get arguments that satisfy an http rule for this method + sample_request = { + "external_api": { + "name": "projects/sample1/locations/sample2/externalApis/sample3" + } + } + + # get truthy value for each flattened field + mock_args = dict( + external_api=common_fields.ExternalApi(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = common_fields.ExternalApi.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.update_external_api(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{external_api.name=projects/*/locations/*/externalApis/*}" + % client.transport._host, + args[1], + ) + + +def test_update_external_api_rest_flattened_error(transport: str = "rest"): + client = ApiHubClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_external_api( + apihub_service.UpdateExternalApiRequest(), + external_api=common_fields.ExternalApi(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +def test_update_external_api_rest_error(): + client = ApiHubClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + apihub_service.DeleteExternalApiRequest, + dict, + ], +) +def test_delete_external_api_rest(request_type): + client = ApiHubClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/externalApis/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "" + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.delete_external_api(request) + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_external_api_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ApiHubClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.delete_external_api in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.delete_external_api + ] = mock_rpc + + request = {} + client.delete_external_api(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.delete_external_api(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_delete_external_api_rest_required_fields( + request_type=apihub_service.DeleteExternalApiRequest, +): + transport_class = transports.ApiHubRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_external_api._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_external_api._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = ApiHubClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = None + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "delete", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = "" + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.delete_external_api(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_delete_external_api_rest_unset_required_fields(): + transport = transports.ApiHubRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.delete_external_api._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_external_api_rest_interceptors(null_interceptor): + transport = transports.ApiHubRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.ApiHubRestInterceptor(), + ) + client = ApiHubClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.ApiHubRestInterceptor, "pre_delete_external_api" + ) as pre: + pre.assert_not_called() + pb_message = apihub_service.DeleteExternalApiRequest.pb( + apihub_service.DeleteExternalApiRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + + request = apihub_service.DeleteExternalApiRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + + client.delete_external_api( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + + +def test_delete_external_api_rest_bad_request( + transport: str = "rest", request_type=apihub_service.DeleteExternalApiRequest +): + client = ApiHubClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/externalApis/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete_external_api(request) + + +def test_delete_external_api_rest_flattened(): + client = ApiHubClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/externalApis/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "" + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.delete_external_api(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/externalApis/*}" + % client.transport._host, + args[1], + ) + + +def test_delete_external_api_rest_flattened_error(transport: str = "rest"): + client = ApiHubClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_external_api( + apihub_service.DeleteExternalApiRequest(), + name="name_value", + ) + + +def test_delete_external_api_rest_error(): + client = ApiHubClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + apihub_service.ListExternalApisRequest, + dict, + ], +) +def test_list_external_apis_rest(request_type): + client = ApiHubClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = apihub_service.ListExternalApisResponse( + next_page_token="next_page_token_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = apihub_service.ListExternalApisResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.list_external_apis(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListExternalApisPager) + assert response.next_page_token == "next_page_token_value" + + +def test_list_external_apis_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ApiHubClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.list_external_apis in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.list_external_apis + ] = mock_rpc + + request = {} + client.list_external_apis(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_external_apis(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_list_external_apis_rest_required_fields( + request_type=apihub_service.ListExternalApisRequest, +): + transport_class = transports.ApiHubRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_external_apis._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_external_apis._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "page_size", + "page_token", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = ApiHubClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = apihub_service.ListExternalApisResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = apihub_service.ListExternalApisResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.list_external_apis(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_list_external_apis_rest_unset_required_fields(): + transport = transports.ApiHubRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.list_external_apis._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "pageSize", + "pageToken", + ) + ) + & set(("parent",)) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_external_apis_rest_interceptors(null_interceptor): + transport = transports.ApiHubRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.ApiHubRestInterceptor(), + ) + client = ApiHubClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.ApiHubRestInterceptor, "post_list_external_apis" + ) as post, mock.patch.object( + transports.ApiHubRestInterceptor, "pre_list_external_apis" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = apihub_service.ListExternalApisRequest.pb( + apihub_service.ListExternalApisRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = apihub_service.ListExternalApisResponse.to_json( + apihub_service.ListExternalApisResponse() + ) + + request = apihub_service.ListExternalApisRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = apihub_service.ListExternalApisResponse() + + client.list_external_apis( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_external_apis_rest_bad_request( + transport: str = "rest", request_type=apihub_service.ListExternalApisRequest +): + client = ApiHubClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_external_apis(request) + + +def test_list_external_apis_rest_flattened(): + client = ApiHubClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = apihub_service.ListExternalApisResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1/locations/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = apihub_service.ListExternalApisResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.list_external_apis(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*}/externalApis" + % client.transport._host, + args[1], + ) + + +def test_list_external_apis_rest_flattened_error(transport: str = "rest"): + client = ApiHubClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_external_apis( + apihub_service.ListExternalApisRequest(), + parent="parent_value", + ) + + +def test_list_external_apis_rest_pager(transport: str = "rest"): + client = ApiHubClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + apihub_service.ListExternalApisResponse( + external_apis=[ + common_fields.ExternalApi(), + common_fields.ExternalApi(), + common_fields.ExternalApi(), + ], + next_page_token="abc", + ), + apihub_service.ListExternalApisResponse( + external_apis=[], + next_page_token="def", + ), + apihub_service.ListExternalApisResponse( + external_apis=[ + common_fields.ExternalApi(), + ], + next_page_token="ghi", + ), + apihub_service.ListExternalApisResponse( + external_apis=[ + common_fields.ExternalApi(), + common_fields.ExternalApi(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple( + apihub_service.ListExternalApisResponse.to_json(x) for x in response + ) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {"parent": "projects/sample1/locations/sample2"} + + pager = client.list_external_apis(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, common_fields.ExternalApi) for i in results) + + pages = list(client.list_external_apis(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.ApiHubRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = ApiHubClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.ApiHubRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = ApiHubClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide an api_key and a transport instance. + transport = transports.ApiHubRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = ApiHubClient( + client_options=options, + transport=transport, + ) + + # It is an error to provide an api_key and a credential. + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = ApiHubClient( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.ApiHubRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = ApiHubClient( + client_options={"scopes": ["1", "2"]}, + transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.ApiHubRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + client = ApiHubClient(transport=transport) + assert client.transport is transport + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.ApiHubRestTransport, + ], +) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + + +@pytest.mark.parametrize( + "transport_name", + [ + "rest", + ], +) +def test_transport_kind(transport_name): + transport = ApiHubClient.get_transport_class(transport_name)( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert transport.kind == transport_name + + +def test_api_hub_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(core_exceptions.DuplicateCredentialArgs): + transport = transports.ApiHubTransport( + credentials=ga_credentials.AnonymousCredentials(), + credentials_file="credentials.json", + ) + + +def test_api_hub_base_transport(): + # Instantiate the base transport. + with mock.patch( + "google.cloud.apihub_v1.services.api_hub.transports.ApiHubTransport.__init__" + ) as Transport: + Transport.return_value = None + transport = transports.ApiHubTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ( + "create_api", + "get_api", + "list_apis", + "update_api", + "delete_api", + "create_version", + "get_version", + "list_versions", + "update_version", + "delete_version", + "create_spec", + "get_spec", + "get_spec_contents", + "list_specs", + "update_spec", + "delete_spec", + "get_api_operation", + "list_api_operations", + "get_definition", + "create_deployment", + "get_deployment", + "list_deployments", + "update_deployment", + "delete_deployment", + "create_attribute", + "get_attribute", + "update_attribute", + "delete_attribute", + "list_attributes", + "search_resources", + "create_external_api", + "get_external_api", + "update_external_api", + "delete_external_api", + "list_external_apis", + "get_location", + "list_locations", + "get_operation", + "cancel_operation", + "delete_operation", + "list_operations", + ) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) + + with pytest.raises(NotImplementedError): + transport.close() + + # Catch all for all remaining methods and properties + remainder = [ + "kind", + ] + for r in remainder: + with pytest.raises(NotImplementedError): + getattr(transport, r)() + + +def test_api_hub_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch( + "google.cloud.apihub_v1.services.api_hub.transports.ApiHubTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.ApiHubTransport( + credentials_file="credentials.json", + quota_project_id="octopus", + ) + load_creds.assert_called_once_with( + "credentials.json", + scopes=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id="octopus", + ) + + +def test_api_hub_base_transport_with_adc(): + # Test the default credentials are used if credentials and credentials_file are None. + with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch( + "google.cloud.apihub_v1.services.api_hub.transports.ApiHubTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.ApiHubTransport() + adc.assert_called_once() + + +def test_api_hub_auth_adc(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + ApiHubClient() + adc.assert_called_once_with( + scopes=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id=None, + ) + + +def test_api_hub_http_transport_client_cert_source_for_mtls(): + cred = ga_credentials.AnonymousCredentials() + with mock.patch( + "google.auth.transport.requests.AuthorizedSession.configure_mtls_channel" + ) as mock_configure_mtls_channel: + transports.ApiHubRestTransport( + credentials=cred, client_cert_source_for_mtls=client_cert_source_callback + ) + mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) + + +@pytest.mark.parametrize( + "transport_name", + [ + "rest", + ], +) +def test_api_hub_host_no_port(transport_name): + client = ApiHubClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="apihub.googleapis.com" + ), + transport=transport_name, + ) + assert client.transport._host == ( + "apihub.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://apihub.googleapis.com" + ) + + +@pytest.mark.parametrize( + "transport_name", + [ + "rest", + ], +) +def test_api_hub_host_with_port(transport_name): + client = ApiHubClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="apihub.googleapis.com:8000" + ), + transport=transport_name, + ) + assert client.transport._host == ( + "apihub.googleapis.com:8000" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://apihub.googleapis.com:8000" + ) + + +@pytest.mark.parametrize( + "transport_name", + [ + "rest", + ], +) +def test_api_hub_client_transport_session_collision(transport_name): + creds1 = ga_credentials.AnonymousCredentials() + creds2 = ga_credentials.AnonymousCredentials() + client1 = ApiHubClient( + credentials=creds1, + transport=transport_name, + ) + client2 = ApiHubClient( + credentials=creds2, + transport=transport_name, + ) + session1 = client1.transport.create_api._session + session2 = client2.transport.create_api._session + assert session1 != session2 + session1 = client1.transport.get_api._session + session2 = client2.transport.get_api._session + assert session1 != session2 + session1 = client1.transport.list_apis._session + session2 = client2.transport.list_apis._session + assert session1 != session2 + session1 = client1.transport.update_api._session + session2 = client2.transport.update_api._session + assert session1 != session2 + session1 = client1.transport.delete_api._session + session2 = client2.transport.delete_api._session + assert session1 != session2 + session1 = client1.transport.create_version._session + session2 = client2.transport.create_version._session + assert session1 != session2 + session1 = client1.transport.get_version._session + session2 = client2.transport.get_version._session + assert session1 != session2 + session1 = client1.transport.list_versions._session + session2 = client2.transport.list_versions._session + assert session1 != session2 + session1 = client1.transport.update_version._session + session2 = client2.transport.update_version._session + assert session1 != session2 + session1 = client1.transport.delete_version._session + session2 = client2.transport.delete_version._session + assert session1 != session2 + session1 = client1.transport.create_spec._session + session2 = client2.transport.create_spec._session + assert session1 != session2 + session1 = client1.transport.get_spec._session + session2 = client2.transport.get_spec._session + assert session1 != session2 + session1 = client1.transport.get_spec_contents._session + session2 = client2.transport.get_spec_contents._session + assert session1 != session2 + session1 = client1.transport.list_specs._session + session2 = client2.transport.list_specs._session + assert session1 != session2 + session1 = client1.transport.update_spec._session + session2 = client2.transport.update_spec._session + assert session1 != session2 + session1 = client1.transport.delete_spec._session + session2 = client2.transport.delete_spec._session + assert session1 != session2 + session1 = client1.transport.get_api_operation._session + session2 = client2.transport.get_api_operation._session + assert session1 != session2 + session1 = client1.transport.list_api_operations._session + session2 = client2.transport.list_api_operations._session + assert session1 != session2 + session1 = client1.transport.get_definition._session + session2 = client2.transport.get_definition._session + assert session1 != session2 + session1 = client1.transport.create_deployment._session + session2 = client2.transport.create_deployment._session + assert session1 != session2 + session1 = client1.transport.get_deployment._session + session2 = client2.transport.get_deployment._session + assert session1 != session2 + session1 = client1.transport.list_deployments._session + session2 = client2.transport.list_deployments._session + assert session1 != session2 + session1 = client1.transport.update_deployment._session + session2 = client2.transport.update_deployment._session + assert session1 != session2 + session1 = client1.transport.delete_deployment._session + session2 = client2.transport.delete_deployment._session + assert session1 != session2 + session1 = client1.transport.create_attribute._session + session2 = client2.transport.create_attribute._session + assert session1 != session2 + session1 = client1.transport.get_attribute._session + session2 = client2.transport.get_attribute._session + assert session1 != session2 + session1 = client1.transport.update_attribute._session + session2 = client2.transport.update_attribute._session + assert session1 != session2 + session1 = client1.transport.delete_attribute._session + session2 = client2.transport.delete_attribute._session + assert session1 != session2 + session1 = client1.transport.list_attributes._session + session2 = client2.transport.list_attributes._session + assert session1 != session2 + session1 = client1.transport.search_resources._session + session2 = client2.transport.search_resources._session + assert session1 != session2 + session1 = client1.transport.create_external_api._session + session2 = client2.transport.create_external_api._session + assert session1 != session2 + session1 = client1.transport.get_external_api._session + session2 = client2.transport.get_external_api._session + assert session1 != session2 + session1 = client1.transport.update_external_api._session + session2 = client2.transport.update_external_api._session + assert session1 != session2 + session1 = client1.transport.delete_external_api._session + session2 = client2.transport.delete_external_api._session + assert session1 != session2 + session1 = client1.transport.list_external_apis._session + session2 = client2.transport.list_external_apis._session + assert session1 != session2 + + +def test_api_path(): + project = "squid" + location = "clam" + api = "whelk" + expected = "projects/{project}/locations/{location}/apis/{api}".format( + project=project, + location=location, + api=api, + ) + actual = ApiHubClient.api_path(project, location, api) + assert expected == actual + + +def test_parse_api_path(): + expected = { + "project": "octopus", + "location": "oyster", + "api": "nudibranch", + } + path = ApiHubClient.api_path(**expected) + + # Check that the path construction is reversible. + actual = ApiHubClient.parse_api_path(path) + assert expected == actual + + +def test_api_operation_path(): + project = "cuttlefish" + location = "mussel" + api = "winkle" + version = "nautilus" + operation = "scallop" + expected = "projects/{project}/locations/{location}/apis/{api}/versions/{version}/operations/{operation}".format( + project=project, + location=location, + api=api, + version=version, + operation=operation, + ) + actual = ApiHubClient.api_operation_path(project, location, api, version, operation) + assert expected == actual + + +def test_parse_api_operation_path(): + expected = { + "project": "abalone", + "location": "squid", + "api": "clam", + "version": "whelk", + "operation": "octopus", + } + path = ApiHubClient.api_operation_path(**expected) + + # Check that the path construction is reversible. + actual = ApiHubClient.parse_api_operation_path(path) + assert expected == actual + + +def test_attribute_path(): + project = "oyster" + location = "nudibranch" + attribute = "cuttlefish" + expected = "projects/{project}/locations/{location}/attributes/{attribute}".format( + project=project, + location=location, + attribute=attribute, + ) + actual = ApiHubClient.attribute_path(project, location, attribute) + assert expected == actual + + +def test_parse_attribute_path(): + expected = { + "project": "mussel", + "location": "winkle", + "attribute": "nautilus", + } + path = ApiHubClient.attribute_path(**expected) + + # Check that the path construction is reversible. + actual = ApiHubClient.parse_attribute_path(path) + assert expected == actual + + +def test_definition_path(): + project = "scallop" + location = "abalone" + api = "squid" + version = "clam" + definition = "whelk" + expected = "projects/{project}/locations/{location}/apis/{api}/versions/{version}/definitions/{definition}".format( + project=project, + location=location, + api=api, + version=version, + definition=definition, + ) + actual = ApiHubClient.definition_path(project, location, api, version, definition) + assert expected == actual + + +def test_parse_definition_path(): + expected = { + "project": "octopus", + "location": "oyster", + "api": "nudibranch", + "version": "cuttlefish", + "definition": "mussel", + } + path = ApiHubClient.definition_path(**expected) + + # Check that the path construction is reversible. + actual = ApiHubClient.parse_definition_path(path) + assert expected == actual + + +def test_deployment_path(): + project = "winkle" + location = "nautilus" + deployment = "scallop" + expected = ( + "projects/{project}/locations/{location}/deployments/{deployment}".format( + project=project, + location=location, + deployment=deployment, + ) + ) + actual = ApiHubClient.deployment_path(project, location, deployment) + assert expected == actual + + +def test_parse_deployment_path(): + expected = { + "project": "abalone", + "location": "squid", + "deployment": "clam", + } + path = ApiHubClient.deployment_path(**expected) + + # Check that the path construction is reversible. + actual = ApiHubClient.parse_deployment_path(path) + assert expected == actual + + +def test_external_api_path(): + project = "whelk" + location = "octopus" + external_api = "oyster" + expected = ( + "projects/{project}/locations/{location}/externalApis/{external_api}".format( + project=project, + location=location, + external_api=external_api, + ) + ) + actual = ApiHubClient.external_api_path(project, location, external_api) + assert expected == actual + + +def test_parse_external_api_path(): + expected = { + "project": "nudibranch", + "location": "cuttlefish", + "external_api": "mussel", + } + path = ApiHubClient.external_api_path(**expected) + + # Check that the path construction is reversible. + actual = ApiHubClient.parse_external_api_path(path) + assert expected == actual + + +def test_spec_path(): + project = "winkle" + location = "nautilus" + api = "scallop" + version = "abalone" + spec = "squid" + expected = "projects/{project}/locations/{location}/apis/{api}/versions/{version}/specs/{spec}".format( + project=project, + location=location, + api=api, + version=version, + spec=spec, + ) + actual = ApiHubClient.spec_path(project, location, api, version, spec) + assert expected == actual + + +def test_parse_spec_path(): + expected = { + "project": "clam", + "location": "whelk", + "api": "octopus", + "version": "oyster", + "spec": "nudibranch", + } + path = ApiHubClient.spec_path(**expected) + + # Check that the path construction is reversible. + actual = ApiHubClient.parse_spec_path(path) + assert expected == actual + + +def test_version_path(): + project = "cuttlefish" + location = "mussel" + api = "winkle" + version = "nautilus" + expected = ( + "projects/{project}/locations/{location}/apis/{api}/versions/{version}".format( + project=project, + location=location, + api=api, + version=version, + ) + ) + actual = ApiHubClient.version_path(project, location, api, version) + assert expected == actual + + +def test_parse_version_path(): + expected = { + "project": "scallop", + "location": "abalone", + "api": "squid", + "version": "clam", + } + path = ApiHubClient.version_path(**expected) + + # Check that the path construction is reversible. + actual = ApiHubClient.parse_version_path(path) + assert expected == actual + + +def test_common_billing_account_path(): + billing_account = "whelk" + expected = "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + actual = ApiHubClient.common_billing_account_path(billing_account) + assert expected == actual + + +def test_parse_common_billing_account_path(): + expected = { + "billing_account": "octopus", + } + path = ApiHubClient.common_billing_account_path(**expected) + + # Check that the path construction is reversible. + actual = ApiHubClient.parse_common_billing_account_path(path) + assert expected == actual + + +def test_common_folder_path(): + folder = "oyster" + expected = "folders/{folder}".format( + folder=folder, + ) + actual = ApiHubClient.common_folder_path(folder) + assert expected == actual + + +def test_parse_common_folder_path(): + expected = { + "folder": "nudibranch", + } + path = ApiHubClient.common_folder_path(**expected) + + # Check that the path construction is reversible. + actual = ApiHubClient.parse_common_folder_path(path) + assert expected == actual + + +def test_common_organization_path(): + organization = "cuttlefish" + expected = "organizations/{organization}".format( + organization=organization, + ) + actual = ApiHubClient.common_organization_path(organization) + assert expected == actual + + +def test_parse_common_organization_path(): + expected = { + "organization": "mussel", + } + path = ApiHubClient.common_organization_path(**expected) + + # Check that the path construction is reversible. + actual = ApiHubClient.parse_common_organization_path(path) + assert expected == actual + + +def test_common_project_path(): + project = "winkle" + expected = "projects/{project}".format( + project=project, + ) + actual = ApiHubClient.common_project_path(project) + assert expected == actual + + +def test_parse_common_project_path(): + expected = { + "project": "nautilus", + } + path = ApiHubClient.common_project_path(**expected) + + # Check that the path construction is reversible. + actual = ApiHubClient.parse_common_project_path(path) + assert expected == actual + + +def test_common_location_path(): + project = "scallop" + location = "abalone" + expected = "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) + actual = ApiHubClient.common_location_path(project, location) + assert expected == actual + + +def test_parse_common_location_path(): + expected = { + "project": "squid", + "location": "clam", + } + path = ApiHubClient.common_location_path(**expected) + + # Check that the path construction is reversible. + actual = ApiHubClient.parse_common_location_path(path) + assert expected == actual + + +def test_client_with_default_client_info(): + client_info = gapic_v1.client_info.ClientInfo() + + with mock.patch.object( + transports.ApiHubTransport, "_prep_wrapped_messages" + ) as prep: + client = ApiHubClient( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + with mock.patch.object( + transports.ApiHubTransport, "_prep_wrapped_messages" + ) as prep: + transport_class = ApiHubClient.get_transport_class() + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + +def test_get_location_rest_bad_request( + transport: str = "rest", request_type=locations_pb2.GetLocationRequest +): + client = ApiHubClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + request = request_type() + request = json_format.ParseDict( + {"name": "projects/sample1/locations/sample2"}, request + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_location(request) + + +@pytest.mark.parametrize( + "request_type", + [ + locations_pb2.GetLocationRequest, + dict, + ], +) +def test_get_location_rest(request_type): + client = ApiHubClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = {"name": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = locations_pb2.Location() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_location(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, locations_pb2.Location) + + +def test_list_locations_rest_bad_request( + transport: str = "rest", request_type=locations_pb2.ListLocationsRequest +): + client = ApiHubClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + request = request_type() + request = json_format.ParseDict({"name": "projects/sample1"}, request) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_locations(request) + + +@pytest.mark.parametrize( + "request_type", + [ + locations_pb2.ListLocationsRequest, + dict, + ], +) +def test_list_locations_rest(request_type): + client = ApiHubClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = {"name": "projects/sample1"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = locations_pb2.ListLocationsResponse() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.list_locations(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, locations_pb2.ListLocationsResponse) + + +def test_cancel_operation_rest_bad_request( + transport: str = "rest", request_type=operations_pb2.CancelOperationRequest +): + client = ApiHubClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + request = request_type() + request = json_format.ParseDict( + {"name": "projects/sample1/locations/sample2/operations/sample3"}, request + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.cancel_operation(request) + + +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.CancelOperationRequest, + dict, + ], +) +def test_cancel_operation_rest(request_type): + client = ApiHubClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = {"name": "projects/sample1/locations/sample2/operations/sample3"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "{}" + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.cancel_operation(request) + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_operation_rest_bad_request( + transport: str = "rest", request_type=operations_pb2.DeleteOperationRequest +): + client = ApiHubClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + request = request_type() + request = json_format.ParseDict( + {"name": "projects/sample1/locations/sample2/operations/sample3"}, request + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete_operation(request) + + +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.DeleteOperationRequest, + dict, + ], +) +def test_delete_operation_rest(request_type): + client = ApiHubClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = {"name": "projects/sample1/locations/sample2/operations/sample3"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "{}" + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.delete_operation(request) + + # Establish that the response is the type that we expect. + assert response is None + + +def test_get_operation_rest_bad_request( + transport: str = "rest", request_type=operations_pb2.GetOperationRequest +): + client = ApiHubClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + request = request_type() + request = json_format.ParseDict( + {"name": "projects/sample1/locations/sample2/operations/sample3"}, request + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_operation(request) + + +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.GetOperationRequest, + dict, + ], +) +def test_get_operation_rest(request_type): + client = ApiHubClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = {"name": "projects/sample1/locations/sample2/operations/sample3"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_operation(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) + + +def test_list_operations_rest_bad_request( + transport: str = "rest", request_type=operations_pb2.ListOperationsRequest +): + client = ApiHubClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + request = request_type() + request = json_format.ParseDict( + {"name": "projects/sample1/locations/sample2"}, request + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_operations(request) + + +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.ListOperationsRequest, + dict, + ], +) +def test_list_operations_rest(request_type): + client = ApiHubClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = {"name": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.ListOperationsResponse() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.list_operations(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.ListOperationsResponse) + + +def test_transport_close(): + transports = { + "rest": "_session", + } + + for transport, close_name in transports.items(): + client = ApiHubClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + with mock.patch.object( + type(getattr(client.transport, close_name)), "close" + ) as close: + with client: + close.assert_not_called() + close.assert_called_once() + + +def test_client_ctx(): + transports = [ + "rest", + ] + for transport in transports: + client = ApiHubClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called() + + +@pytest.mark.parametrize( + "client_class,transport_class", + [ + (ApiHubClient, transports.ApiHubRestTransport), + ], +) +def test_api_key_credentials(client_class, transport_class): + with mock.patch.object( + google.auth._default, "get_api_key_credentials", create=True + ) as get_api_key_credentials: + mock_cred = mock.Mock() + get_api_key_credentials.return_value = mock_cred + options = client_options.ClientOptions() + options.api_key = "api_key" + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=mock_cred, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) diff --git a/packages/google-cloud-apihub/tests/unit/gapic/apihub_v1/test_api_hub_dependencies.py b/packages/google-cloud-apihub/tests/unit/gapic/apihub_v1/test_api_hub_dependencies.py new file mode 100644 index 000000000000..525149783ded --- /dev/null +++ b/packages/google-cloud-apihub/tests/unit/gapic/apihub_v1/test_api_hub_dependencies.py @@ -0,0 +1,3649 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import os + +# try/except added for compatibility with python < 3.8 +try: + from unittest import mock + from unittest.mock import AsyncMock # pragma: NO COVER +except ImportError: # pragma: NO COVER + import mock + +from collections.abc import Iterable +import json +import math + +from google.api_core import gapic_v1, grpc_helpers, grpc_helpers_async, path_template +from google.api_core import api_core_version, client_options +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +import google.auth +from google.auth import credentials as ga_credentials +from google.auth.exceptions import MutualTLSChannelError +from google.cloud.location import locations_pb2 +from google.longrunning import operations_pb2 # type: ignore +from google.oauth2 import service_account +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import json_format +from google.protobuf import timestamp_pb2 # type: ignore +import grpc +from grpc.experimental import aio +from proto.marshal.rules import wrappers +from proto.marshal.rules.dates import DurationRule, TimestampRule +import pytest +from requests import PreparedRequest, Request, Response +from requests.sessions import Session + +from google.cloud.apihub_v1.services.api_hub_dependencies import ( + ApiHubDependenciesClient, + pagers, + transports, +) +from google.cloud.apihub_v1.types import apihub_service, common_fields + + +def client_cert_source_callback(): + return b"cert bytes", b"key bytes" + + +# If default endpoint is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint(client): + return ( + "foo.googleapis.com" + if ("localhost" in client.DEFAULT_ENDPOINT) + else client.DEFAULT_ENDPOINT + ) + + +# If default endpoint template is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint template so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint_template(client): + return ( + "test.{UNIVERSE_DOMAIN}" + if ("localhost" in client._DEFAULT_ENDPOINT_TEMPLATE) + else client._DEFAULT_ENDPOINT_TEMPLATE + ) + + +def test__get_default_mtls_endpoint(): + api_endpoint = "example.googleapis.com" + api_mtls_endpoint = "example.mtls.googleapis.com" + sandbox_endpoint = "example.sandbox.googleapis.com" + sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" + non_googleapi = "api.example.com" + + assert ApiHubDependenciesClient._get_default_mtls_endpoint(None) is None + assert ( + ApiHubDependenciesClient._get_default_mtls_endpoint(api_endpoint) + == api_mtls_endpoint + ) + assert ( + ApiHubDependenciesClient._get_default_mtls_endpoint(api_mtls_endpoint) + == api_mtls_endpoint + ) + assert ( + ApiHubDependenciesClient._get_default_mtls_endpoint(sandbox_endpoint) + == sandbox_mtls_endpoint + ) + assert ( + ApiHubDependenciesClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) + == sandbox_mtls_endpoint + ) + assert ( + ApiHubDependenciesClient._get_default_mtls_endpoint(non_googleapi) + == non_googleapi + ) + + +def test__read_environment_variables(): + assert ApiHubDependenciesClient._read_environment_variables() == ( + False, + "auto", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + assert ApiHubDependenciesClient._read_environment_variables() == ( + True, + "auto", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + assert ApiHubDependenciesClient._read_environment_variables() == ( + False, + "auto", + None, + ) + + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + with pytest.raises(ValueError) as excinfo: + ApiHubDependenciesClient._read_environment_variables() + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + assert ApiHubDependenciesClient._read_environment_variables() == ( + False, + "never", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + assert ApiHubDependenciesClient._read_environment_variables() == ( + False, + "always", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}): + assert ApiHubDependenciesClient._read_environment_variables() == ( + False, + "auto", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + ApiHubDependenciesClient._read_environment_variables() + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + with mock.patch.dict(os.environ, {"GOOGLE_CLOUD_UNIVERSE_DOMAIN": "foo.com"}): + assert ApiHubDependenciesClient._read_environment_variables() == ( + False, + "auto", + "foo.com", + ) + + +def test__get_client_cert_source(): + mock_provided_cert_source = mock.Mock() + mock_default_cert_source = mock.Mock() + + assert ApiHubDependenciesClient._get_client_cert_source(None, False) is None + assert ( + ApiHubDependenciesClient._get_client_cert_source( + mock_provided_cert_source, False + ) + is None + ) + assert ( + ApiHubDependenciesClient._get_client_cert_source( + mock_provided_cert_source, True + ) + == mock_provided_cert_source + ) + + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", return_value=True + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=mock_default_cert_source, + ): + assert ( + ApiHubDependenciesClient._get_client_cert_source(None, True) + is mock_default_cert_source + ) + assert ( + ApiHubDependenciesClient._get_client_cert_source( + mock_provided_cert_source, "true" + ) + is mock_provided_cert_source + ) + + +@mock.patch.object( + ApiHubDependenciesClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(ApiHubDependenciesClient), +) +def test__get_api_endpoint(): + api_override = "foo.com" + mock_client_cert_source = mock.Mock() + default_universe = ApiHubDependenciesClient._DEFAULT_UNIVERSE + default_endpoint = ApiHubDependenciesClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=default_universe + ) + mock_universe = "bar.com" + mock_endpoint = ApiHubDependenciesClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=mock_universe + ) + + assert ( + ApiHubDependenciesClient._get_api_endpoint( + api_override, mock_client_cert_source, default_universe, "always" + ) + == api_override + ) + assert ( + ApiHubDependenciesClient._get_api_endpoint( + None, mock_client_cert_source, default_universe, "auto" + ) + == ApiHubDependenciesClient.DEFAULT_MTLS_ENDPOINT + ) + assert ( + ApiHubDependenciesClient._get_api_endpoint(None, None, default_universe, "auto") + == default_endpoint + ) + assert ( + ApiHubDependenciesClient._get_api_endpoint( + None, None, default_universe, "always" + ) + == ApiHubDependenciesClient.DEFAULT_MTLS_ENDPOINT + ) + assert ( + ApiHubDependenciesClient._get_api_endpoint( + None, mock_client_cert_source, default_universe, "always" + ) + == ApiHubDependenciesClient.DEFAULT_MTLS_ENDPOINT + ) + assert ( + ApiHubDependenciesClient._get_api_endpoint(None, None, mock_universe, "never") + == mock_endpoint + ) + assert ( + ApiHubDependenciesClient._get_api_endpoint( + None, None, default_universe, "never" + ) + == default_endpoint + ) + + with pytest.raises(MutualTLSChannelError) as excinfo: + ApiHubDependenciesClient._get_api_endpoint( + None, mock_client_cert_source, mock_universe, "auto" + ) + assert ( + str(excinfo.value) + == "mTLS is not supported in any universe other than googleapis.com." + ) + + +def test__get_universe_domain(): + client_universe_domain = "foo.com" + universe_domain_env = "bar.com" + + assert ( + ApiHubDependenciesClient._get_universe_domain( + client_universe_domain, universe_domain_env + ) + == client_universe_domain + ) + assert ( + ApiHubDependenciesClient._get_universe_domain(None, universe_domain_env) + == universe_domain_env + ) + assert ( + ApiHubDependenciesClient._get_universe_domain(None, None) + == ApiHubDependenciesClient._DEFAULT_UNIVERSE + ) + + with pytest.raises(ValueError) as excinfo: + ApiHubDependenciesClient._get_universe_domain("", None) + assert str(excinfo.value) == "Universe Domain cannot be an empty string." + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + (ApiHubDependenciesClient, transports.ApiHubDependenciesRestTransport, "rest"), + ], +) +def test__validate_universe_domain(client_class, transport_class, transport_name): + client = client_class( + transport=transport_class(credentials=ga_credentials.AnonymousCredentials()) + ) + assert client._validate_universe_domain() == True + + # Test the case when universe is already validated. + assert client._validate_universe_domain() == True + + if transport_name == "grpc": + # Test the case where credentials are provided by the + # `local_channel_credentials`. The default universes in both match. + channel = grpc.secure_channel( + "http://localhost/", grpc.local_channel_credentials() + ) + client = client_class(transport=transport_class(channel=channel)) + assert client._validate_universe_domain() == True + + # Test the case where credentials do not exist: e.g. a transport is provided + # with no credentials. Validation should still succeed because there is no + # mismatch with non-existent credentials. + channel = grpc.secure_channel( + "http://localhost/", grpc.local_channel_credentials() + ) + transport = transport_class(channel=channel) + transport._credentials = None + client = client_class(transport=transport) + assert client._validate_universe_domain() == True + + # TODO: This is needed to cater for older versions of google-auth + # Make this test unconditional once the minimum supported version of + # google-auth becomes 2.23.0 or higher. + google_auth_major, google_auth_minor = [ + int(part) for part in google.auth.__version__.split(".")[0:2] + ] + if google_auth_major > 2 or (google_auth_major == 2 and google_auth_minor >= 23): + credentials = ga_credentials.AnonymousCredentials() + credentials._universe_domain = "foo.com" + # Test the case when there is a universe mismatch from the credentials. + client = client_class(transport=transport_class(credentials=credentials)) + with pytest.raises(ValueError) as excinfo: + client._validate_universe_domain() + assert ( + str(excinfo.value) + == "The configured universe domain (googleapis.com) does not match the universe domain found in the credentials (foo.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." + ) + + # Test the case when there is a universe mismatch from the client. + # + # TODO: Make this test unconditional once the minimum supported version of + # google-api-core becomes 2.15.0 or higher. + api_core_major, api_core_minor = [ + int(part) for part in api_core_version.__version__.split(".")[0:2] + ] + if api_core_major > 2 or (api_core_major == 2 and api_core_minor >= 15): + client = client_class( + client_options={"universe_domain": "bar.com"}, + transport=transport_class( + credentials=ga_credentials.AnonymousCredentials(), + ), + ) + with pytest.raises(ValueError) as excinfo: + client._validate_universe_domain() + assert ( + str(excinfo.value) + == "The configured universe domain (bar.com) does not match the universe domain found in the credentials (googleapis.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." + ) + + # Test that ValueError is raised if universe_domain is provided via client options and credentials is None + with pytest.raises(ValueError): + client._compare_universes("foo.bar", None) + + +@pytest.mark.parametrize( + "client_class,transport_name", + [ + (ApiHubDependenciesClient, "rest"), + ], +) +def test_api_hub_dependencies_client_from_service_account_info( + client_class, transport_name +): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_info" + ) as factory: + factory.return_value = creds + info = {"valid": True} + client = client_class.from_service_account_info(info, transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + "apihub.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://apihub.googleapis.com" + ) + + +@pytest.mark.parametrize( + "transport_class,transport_name", + [ + (transports.ApiHubDependenciesRestTransport, "rest"), + ], +) +def test_api_hub_dependencies_client_service_account_always_use_jwt( + transport_class, transport_name +): + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=True) + use_jwt.assert_called_once_with(True) + + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=False) + use_jwt.assert_not_called() + + +@pytest.mark.parametrize( + "client_class,transport_name", + [ + (ApiHubDependenciesClient, "rest"), + ], +) +def test_api_hub_dependencies_client_from_service_account_file( + client_class, transport_name +): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_file" + ) as factory: + factory.return_value = creds + client = client_class.from_service_account_file( + "dummy/file/path.json", transport=transport_name + ) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + client = client_class.from_service_account_json( + "dummy/file/path.json", transport=transport_name + ) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + "apihub.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://apihub.googleapis.com" + ) + + +def test_api_hub_dependencies_client_get_transport_class(): + transport = ApiHubDependenciesClient.get_transport_class() + available_transports = [ + transports.ApiHubDependenciesRestTransport, + ] + assert transport in available_transports + + transport = ApiHubDependenciesClient.get_transport_class("rest") + assert transport == transports.ApiHubDependenciesRestTransport + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + (ApiHubDependenciesClient, transports.ApiHubDependenciesRestTransport, "rest"), + ], +) +@mock.patch.object( + ApiHubDependenciesClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(ApiHubDependenciesClient), +) +def test_api_hub_dependencies_client_client_options( + client_class, transport_class, transport_name +): + # Check that if channel is provided we won't create a new one. + with mock.patch.object(ApiHubDependenciesClient, "get_transport_class") as gtc: + transport = transport_class(credentials=ga_credentials.AnonymousCredentials()) + client = client_class(transport=transport) + gtc.assert_not_called() + + # Check that if channel is provided via str we will create a new one. + with mock.patch.object(ApiHubDependenciesClient, "get_transport_class") as gtc: + client = client_class(transport=transport_name) + gtc.assert_called() + + # Check the case api_endpoint is provided. + options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name, client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_MTLS_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + client = client_class(transport=transport_name) + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + with pytest.raises(ValueError) as excinfo: + client = client_class(transport=transport_name) + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + + # Check the case quota_project_id is provided + options = client_options.ClientOptions(quota_project_id="octopus") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id="octopus", + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + # Check the case api_endpoint is provided + options = client_options.ClientOptions( + api_audience="https://language.googleapis.com" + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience="https://language.googleapis.com", + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,use_client_cert_env", + [ + ( + ApiHubDependenciesClient, + transports.ApiHubDependenciesRestTransport, + "rest", + "true", + ), + ( + ApiHubDependenciesClient, + transports.ApiHubDependenciesRestTransport, + "rest", + "false", + ), + ], +) +@mock.patch.object( + ApiHubDependenciesClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(ApiHubDependenciesClient), +) +@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) +def test_api_hub_dependencies_client_mtls_env_auto( + client_class, transport_class, transport_name, use_client_cert_env +): + # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default + # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. + + # Check the case client_cert_source is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + options = client_options.ClientOptions( + client_cert_source=client_cert_source_callback + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + + if use_client_cert_env == "false": + expected_client_cert_source = None + expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ) + else: + expected_client_cert_source = client_cert_source_callback + expected_host = client.DEFAULT_MTLS_ENDPOINT + + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case ADC client cert is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=client_cert_source_callback, + ): + if use_client_cert_env == "false": + expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ) + expected_client_cert_source = None + else: + expected_host = client.DEFAULT_MTLS_ENDPOINT + expected_client_cert_source = client_cert_source_callback + + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case client_cert_source and ADC client cert are not provided. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize("client_class", [ApiHubDependenciesClient]) +@mock.patch.object( + ApiHubDependenciesClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(ApiHubDependenciesClient), +) +def test_api_hub_dependencies_client_get_mtls_endpoint_and_cert_source(client_class): + mock_client_cert_source = mock.Mock() + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source == mock_client_cert_source + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + mock_client_cert_source = mock.Mock() + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=mock_client_cert_source, + ): + ( + api_endpoint, + cert_source, + ) = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source == mock_client_cert_source + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + client_class.get_mtls_endpoint_and_cert_source() + + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + with pytest.raises(ValueError) as excinfo: + client_class.get_mtls_endpoint_and_cert_source() + + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + + +@pytest.mark.parametrize("client_class", [ApiHubDependenciesClient]) +@mock.patch.object( + ApiHubDependenciesClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(ApiHubDependenciesClient), +) +def test_api_hub_dependencies_client_client_api_endpoint(client_class): + mock_client_cert_source = client_cert_source_callback + api_override = "foo.com" + default_universe = ApiHubDependenciesClient._DEFAULT_UNIVERSE + default_endpoint = ApiHubDependenciesClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=default_universe + ) + mock_universe = "bar.com" + mock_endpoint = ApiHubDependenciesClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=mock_universe + ) + + # If ClientOptions.api_endpoint is set and GOOGLE_API_USE_CLIENT_CERTIFICATE="true", + # use ClientOptions.api_endpoint as the api endpoint regardless. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.requests.AuthorizedSession.configure_mtls_channel" + ): + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=api_override + ) + client = client_class( + client_options=options, + credentials=ga_credentials.AnonymousCredentials(), + ) + assert client.api_endpoint == api_override + + # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="never", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + client = client_class(credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == default_endpoint + + # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="always", + # use the DEFAULT_MTLS_ENDPOINT as the api endpoint. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + client = client_class(credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + + # If ClientOptions.api_endpoint is not set, GOOGLE_API_USE_MTLS_ENDPOINT="auto" (default), + # GOOGLE_API_USE_CLIENT_CERTIFICATE="false" (default), default cert source doesn't exist, + # and ClientOptions.universe_domain="bar.com", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with universe domain as the api endpoint. + options = client_options.ClientOptions() + universe_exists = hasattr(options, "universe_domain") + if universe_exists: + options = client_options.ClientOptions(universe_domain=mock_universe) + client = client_class( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + else: + client = client_class( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + assert client.api_endpoint == ( + mock_endpoint if universe_exists else default_endpoint + ) + assert client.universe_domain == ( + mock_universe if universe_exists else default_universe + ) + + # If ClientOptions does not have a universe domain attribute and GOOGLE_API_USE_MTLS_ENDPOINT="never", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. + options = client_options.ClientOptions() + if hasattr(options, "universe_domain"): + delattr(options, "universe_domain") + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + client = client_class( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + assert client.api_endpoint == default_endpoint + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + (ApiHubDependenciesClient, transports.ApiHubDependenciesRestTransport, "rest"), + ], +) +def test_api_hub_dependencies_client_client_options_scopes( + client_class, transport_class, transport_name +): + # Check the case scopes are provided. + options = client_options.ClientOptions( + scopes=["1", "2"], + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=["1", "2"], + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,grpc_helpers", + [ + ( + ApiHubDependenciesClient, + transports.ApiHubDependenciesRestTransport, + "rest", + None, + ), + ], +) +def test_api_hub_dependencies_client_client_options_credentials_file( + client_class, transport_class, transport_name, grpc_helpers +): + # Check the case credentials file is provided. + options = client_options.ClientOptions(credentials_file="credentials.json") + + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "request_type", + [ + apihub_service.CreateDependencyRequest, + dict, + ], +) +def test_create_dependency_rest(request_type): + client = ApiHubDependenciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request_init["dependency"] = { + "name": "name_value", + "consumer": { + "operation_resource_name": "operation_resource_name_value", + "external_api_resource_name": "external_api_resource_name_value", + "display_name": "display_name_value", + }, + "supplier": {}, + "state": 1, + "description": "description_value", + "discovery_mode": 1, + "error_detail": {"error": 1, "error_time": {"seconds": 751, "nanos": 543}}, + "create_time": {}, + "update_time": {}, + "attributes": {}, + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = apihub_service.CreateDependencyRequest.meta.fields["dependency"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["dependency"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["dependency"][field])): + del request_init["dependency"][field][i][subfield] + else: + del request_init["dependency"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = common_fields.Dependency( + name="name_value", + state=common_fields.Dependency.State.PROPOSED, + description="description_value", + discovery_mode=common_fields.Dependency.DiscoveryMode.MANUAL, + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = common_fields.Dependency.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.create_dependency(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, common_fields.Dependency) + assert response.name == "name_value" + assert response.state == common_fields.Dependency.State.PROPOSED + assert response.description == "description_value" + assert response.discovery_mode == common_fields.Dependency.DiscoveryMode.MANUAL + + +def test_create_dependency_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ApiHubDependenciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.create_dependency in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.create_dependency + ] = mock_rpc + + request = {} + client.create_dependency(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.create_dependency(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_create_dependency_rest_required_fields( + request_type=apihub_service.CreateDependencyRequest, +): + transport_class = transports.ApiHubDependenciesRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_dependency._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_dependency._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("dependency_id",)) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = ApiHubDependenciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = common_fields.Dependency() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = common_fields.Dependency.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.create_dependency(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_create_dependency_rest_unset_required_fields(): + transport = transports.ApiHubDependenciesRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.create_dependency._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(("dependencyId",)) + & set( + ( + "parent", + "dependency", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_create_dependency_rest_interceptors(null_interceptor): + transport = transports.ApiHubDependenciesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.ApiHubDependenciesRestInterceptor(), + ) + client = ApiHubDependenciesClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.ApiHubDependenciesRestInterceptor, "post_create_dependency" + ) as post, mock.patch.object( + transports.ApiHubDependenciesRestInterceptor, "pre_create_dependency" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = apihub_service.CreateDependencyRequest.pb( + apihub_service.CreateDependencyRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = common_fields.Dependency.to_json( + common_fields.Dependency() + ) + + request = apihub_service.CreateDependencyRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = common_fields.Dependency() + + client.create_dependency( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_create_dependency_rest_bad_request( + transport: str = "rest", request_type=apihub_service.CreateDependencyRequest +): + client = ApiHubDependenciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.create_dependency(request) + + +def test_create_dependency_rest_flattened(): + client = ApiHubDependenciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = common_fields.Dependency() + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1/locations/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + dependency=common_fields.Dependency(name="name_value"), + dependency_id="dependency_id_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = common_fields.Dependency.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.create_dependency(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*}/dependencies" + % client.transport._host, + args[1], + ) + + +def test_create_dependency_rest_flattened_error(transport: str = "rest"): + client = ApiHubDependenciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_dependency( + apihub_service.CreateDependencyRequest(), + parent="parent_value", + dependency=common_fields.Dependency(name="name_value"), + dependency_id="dependency_id_value", + ) + + +def test_create_dependency_rest_error(): + client = ApiHubDependenciesClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + apihub_service.GetDependencyRequest, + dict, + ], +) +def test_get_dependency_rest(request_type): + client = ApiHubDependenciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/dependencies/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = common_fields.Dependency( + name="name_value", + state=common_fields.Dependency.State.PROPOSED, + description="description_value", + discovery_mode=common_fields.Dependency.DiscoveryMode.MANUAL, + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = common_fields.Dependency.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.get_dependency(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, common_fields.Dependency) + assert response.name == "name_value" + assert response.state == common_fields.Dependency.State.PROPOSED + assert response.description == "description_value" + assert response.discovery_mode == common_fields.Dependency.DiscoveryMode.MANUAL + + +def test_get_dependency_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ApiHubDependenciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_dependency in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.get_dependency] = mock_rpc + + request = {} + client.get_dependency(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_dependency(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_get_dependency_rest_required_fields( + request_type=apihub_service.GetDependencyRequest, +): + transport_class = transports.ApiHubDependenciesRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_dependency._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_dependency._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = ApiHubDependenciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = common_fields.Dependency() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = common_fields.Dependency.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_dependency(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_get_dependency_rest_unset_required_fields(): + transport = transports.ApiHubDependenciesRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.get_dependency._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_dependency_rest_interceptors(null_interceptor): + transport = transports.ApiHubDependenciesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.ApiHubDependenciesRestInterceptor(), + ) + client = ApiHubDependenciesClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.ApiHubDependenciesRestInterceptor, "post_get_dependency" + ) as post, mock.patch.object( + transports.ApiHubDependenciesRestInterceptor, "pre_get_dependency" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = apihub_service.GetDependencyRequest.pb( + apihub_service.GetDependencyRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = common_fields.Dependency.to_json( + common_fields.Dependency() + ) + + request = apihub_service.GetDependencyRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = common_fields.Dependency() + + client.get_dependency( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_dependency_rest_bad_request( + transport: str = "rest", request_type=apihub_service.GetDependencyRequest +): + client = ApiHubDependenciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/dependencies/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_dependency(request) + + +def test_get_dependency_rest_flattened(): + client = ApiHubDependenciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = common_fields.Dependency() + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/dependencies/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = common_fields.Dependency.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.get_dependency(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/dependencies/*}" + % client.transport._host, + args[1], + ) + + +def test_get_dependency_rest_flattened_error(transport: str = "rest"): + client = ApiHubDependenciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_dependency( + apihub_service.GetDependencyRequest(), + name="name_value", + ) + + +def test_get_dependency_rest_error(): + client = ApiHubDependenciesClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + apihub_service.UpdateDependencyRequest, + dict, + ], +) +def test_update_dependency_rest(request_type): + client = ApiHubDependenciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "dependency": { + "name": "projects/sample1/locations/sample2/dependencies/sample3" + } + } + request_init["dependency"] = { + "name": "projects/sample1/locations/sample2/dependencies/sample3", + "consumer": { + "operation_resource_name": "operation_resource_name_value", + "external_api_resource_name": "external_api_resource_name_value", + "display_name": "display_name_value", + }, + "supplier": {}, + "state": 1, + "description": "description_value", + "discovery_mode": 1, + "error_detail": {"error": 1, "error_time": {"seconds": 751, "nanos": 543}}, + "create_time": {}, + "update_time": {}, + "attributes": {}, + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = apihub_service.UpdateDependencyRequest.meta.fields["dependency"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["dependency"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["dependency"][field])): + del request_init["dependency"][field][i][subfield] + else: + del request_init["dependency"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = common_fields.Dependency( + name="name_value", + state=common_fields.Dependency.State.PROPOSED, + description="description_value", + discovery_mode=common_fields.Dependency.DiscoveryMode.MANUAL, + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = common_fields.Dependency.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.update_dependency(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, common_fields.Dependency) + assert response.name == "name_value" + assert response.state == common_fields.Dependency.State.PROPOSED + assert response.description == "description_value" + assert response.discovery_mode == common_fields.Dependency.DiscoveryMode.MANUAL + + +def test_update_dependency_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ApiHubDependenciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.update_dependency in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.update_dependency + ] = mock_rpc + + request = {} + client.update_dependency(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.update_dependency(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_update_dependency_rest_required_fields( + request_type=apihub_service.UpdateDependencyRequest, +): + transport_class = transports.ApiHubDependenciesRestTransport + + request_init = {} + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_dependency._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_dependency._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("update_mask",)) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + + client = ApiHubDependenciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = common_fields.Dependency() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "patch", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = common_fields.Dependency.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.update_dependency(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_update_dependency_rest_unset_required_fields(): + transport = transports.ApiHubDependenciesRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.update_dependency._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(("updateMask",)) + & set( + ( + "dependency", + "updateMask", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_update_dependency_rest_interceptors(null_interceptor): + transport = transports.ApiHubDependenciesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.ApiHubDependenciesRestInterceptor(), + ) + client = ApiHubDependenciesClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.ApiHubDependenciesRestInterceptor, "post_update_dependency" + ) as post, mock.patch.object( + transports.ApiHubDependenciesRestInterceptor, "pre_update_dependency" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = apihub_service.UpdateDependencyRequest.pb( + apihub_service.UpdateDependencyRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = common_fields.Dependency.to_json( + common_fields.Dependency() + ) + + request = apihub_service.UpdateDependencyRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = common_fields.Dependency() + + client.update_dependency( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_update_dependency_rest_bad_request( + transport: str = "rest", request_type=apihub_service.UpdateDependencyRequest +): + client = ApiHubDependenciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "dependency": { + "name": "projects/sample1/locations/sample2/dependencies/sample3" + } + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.update_dependency(request) + + +def test_update_dependency_rest_flattened(): + client = ApiHubDependenciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = common_fields.Dependency() + + # get arguments that satisfy an http rule for this method + sample_request = { + "dependency": { + "name": "projects/sample1/locations/sample2/dependencies/sample3" + } + } + + # get truthy value for each flattened field + mock_args = dict( + dependency=common_fields.Dependency(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = common_fields.Dependency.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.update_dependency(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{dependency.name=projects/*/locations/*/dependencies/*}" + % client.transport._host, + args[1], + ) + + +def test_update_dependency_rest_flattened_error(transport: str = "rest"): + client = ApiHubDependenciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_dependency( + apihub_service.UpdateDependencyRequest(), + dependency=common_fields.Dependency(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +def test_update_dependency_rest_error(): + client = ApiHubDependenciesClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + apihub_service.DeleteDependencyRequest, + dict, + ], +) +def test_delete_dependency_rest(request_type): + client = ApiHubDependenciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/dependencies/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "" + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.delete_dependency(request) + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_dependency_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ApiHubDependenciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.delete_dependency in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.delete_dependency + ] = mock_rpc + + request = {} + client.delete_dependency(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.delete_dependency(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_delete_dependency_rest_required_fields( + request_type=apihub_service.DeleteDependencyRequest, +): + transport_class = transports.ApiHubDependenciesRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_dependency._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_dependency._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = ApiHubDependenciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = None + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "delete", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = "" + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.delete_dependency(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_delete_dependency_rest_unset_required_fields(): + transport = transports.ApiHubDependenciesRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.delete_dependency._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_dependency_rest_interceptors(null_interceptor): + transport = transports.ApiHubDependenciesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.ApiHubDependenciesRestInterceptor(), + ) + client = ApiHubDependenciesClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.ApiHubDependenciesRestInterceptor, "pre_delete_dependency" + ) as pre: + pre.assert_not_called() + pb_message = apihub_service.DeleteDependencyRequest.pb( + apihub_service.DeleteDependencyRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + + request = apihub_service.DeleteDependencyRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + + client.delete_dependency( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + + +def test_delete_dependency_rest_bad_request( + transport: str = "rest", request_type=apihub_service.DeleteDependencyRequest +): + client = ApiHubDependenciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/dependencies/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete_dependency(request) + + +def test_delete_dependency_rest_flattened(): + client = ApiHubDependenciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/dependencies/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "" + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.delete_dependency(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/dependencies/*}" + % client.transport._host, + args[1], + ) + + +def test_delete_dependency_rest_flattened_error(transport: str = "rest"): + client = ApiHubDependenciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_dependency( + apihub_service.DeleteDependencyRequest(), + name="name_value", + ) + + +def test_delete_dependency_rest_error(): + client = ApiHubDependenciesClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + apihub_service.ListDependenciesRequest, + dict, + ], +) +def test_list_dependencies_rest(request_type): + client = ApiHubDependenciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = apihub_service.ListDependenciesResponse( + next_page_token="next_page_token_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = apihub_service.ListDependenciesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.list_dependencies(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListDependenciesPager) + assert response.next_page_token == "next_page_token_value" + + +def test_list_dependencies_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ApiHubDependenciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_dependencies in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.list_dependencies + ] = mock_rpc + + request = {} + client.list_dependencies(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_dependencies(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_list_dependencies_rest_required_fields( + request_type=apihub_service.ListDependenciesRequest, +): + transport_class = transports.ApiHubDependenciesRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_dependencies._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_dependencies._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "filter", + "page_size", + "page_token", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = ApiHubDependenciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = apihub_service.ListDependenciesResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = apihub_service.ListDependenciesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.list_dependencies(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_list_dependencies_rest_unset_required_fields(): + transport = transports.ApiHubDependenciesRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.list_dependencies._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "filter", + "pageSize", + "pageToken", + ) + ) + & set(("parent",)) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_dependencies_rest_interceptors(null_interceptor): + transport = transports.ApiHubDependenciesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.ApiHubDependenciesRestInterceptor(), + ) + client = ApiHubDependenciesClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.ApiHubDependenciesRestInterceptor, "post_list_dependencies" + ) as post, mock.patch.object( + transports.ApiHubDependenciesRestInterceptor, "pre_list_dependencies" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = apihub_service.ListDependenciesRequest.pb( + apihub_service.ListDependenciesRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = apihub_service.ListDependenciesResponse.to_json( + apihub_service.ListDependenciesResponse() + ) + + request = apihub_service.ListDependenciesRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = apihub_service.ListDependenciesResponse() + + client.list_dependencies( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_dependencies_rest_bad_request( + transport: str = "rest", request_type=apihub_service.ListDependenciesRequest +): + client = ApiHubDependenciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_dependencies(request) + + +def test_list_dependencies_rest_flattened(): + client = ApiHubDependenciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = apihub_service.ListDependenciesResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1/locations/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = apihub_service.ListDependenciesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.list_dependencies(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*}/dependencies" + % client.transport._host, + args[1], + ) + + +def test_list_dependencies_rest_flattened_error(transport: str = "rest"): + client = ApiHubDependenciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_dependencies( + apihub_service.ListDependenciesRequest(), + parent="parent_value", + ) + + +def test_list_dependencies_rest_pager(transport: str = "rest"): + client = ApiHubDependenciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + apihub_service.ListDependenciesResponse( + dependencies=[ + common_fields.Dependency(), + common_fields.Dependency(), + common_fields.Dependency(), + ], + next_page_token="abc", + ), + apihub_service.ListDependenciesResponse( + dependencies=[], + next_page_token="def", + ), + apihub_service.ListDependenciesResponse( + dependencies=[ + common_fields.Dependency(), + ], + next_page_token="ghi", + ), + apihub_service.ListDependenciesResponse( + dependencies=[ + common_fields.Dependency(), + common_fields.Dependency(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple( + apihub_service.ListDependenciesResponse.to_json(x) for x in response + ) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {"parent": "projects/sample1/locations/sample2"} + + pager = client.list_dependencies(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, common_fields.Dependency) for i in results) + + pages = list(client.list_dependencies(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.ApiHubDependenciesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = ApiHubDependenciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.ApiHubDependenciesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = ApiHubDependenciesClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide an api_key and a transport instance. + transport = transports.ApiHubDependenciesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = ApiHubDependenciesClient( + client_options=options, + transport=transport, + ) + + # It is an error to provide an api_key and a credential. + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = ApiHubDependenciesClient( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.ApiHubDependenciesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = ApiHubDependenciesClient( + client_options={"scopes": ["1", "2"]}, + transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.ApiHubDependenciesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + client = ApiHubDependenciesClient(transport=transport) + assert client.transport is transport + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.ApiHubDependenciesRestTransport, + ], +) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + + +@pytest.mark.parametrize( + "transport_name", + [ + "rest", + ], +) +def test_transport_kind(transport_name): + transport = ApiHubDependenciesClient.get_transport_class(transport_name)( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert transport.kind == transport_name + + +def test_api_hub_dependencies_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(core_exceptions.DuplicateCredentialArgs): + transport = transports.ApiHubDependenciesTransport( + credentials=ga_credentials.AnonymousCredentials(), + credentials_file="credentials.json", + ) + + +def test_api_hub_dependencies_base_transport(): + # Instantiate the base transport. + with mock.patch( + "google.cloud.apihub_v1.services.api_hub_dependencies.transports.ApiHubDependenciesTransport.__init__" + ) as Transport: + Transport.return_value = None + transport = transports.ApiHubDependenciesTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ( + "create_dependency", + "get_dependency", + "update_dependency", + "delete_dependency", + "list_dependencies", + "get_location", + "list_locations", + "get_operation", + "cancel_operation", + "delete_operation", + "list_operations", + ) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) + + with pytest.raises(NotImplementedError): + transport.close() + + # Catch all for all remaining methods and properties + remainder = [ + "kind", + ] + for r in remainder: + with pytest.raises(NotImplementedError): + getattr(transport, r)() + + +def test_api_hub_dependencies_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch( + "google.cloud.apihub_v1.services.api_hub_dependencies.transports.ApiHubDependenciesTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.ApiHubDependenciesTransport( + credentials_file="credentials.json", + quota_project_id="octopus", + ) + load_creds.assert_called_once_with( + "credentials.json", + scopes=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id="octopus", + ) + + +def test_api_hub_dependencies_base_transport_with_adc(): + # Test the default credentials are used if credentials and credentials_file are None. + with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch( + "google.cloud.apihub_v1.services.api_hub_dependencies.transports.ApiHubDependenciesTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.ApiHubDependenciesTransport() + adc.assert_called_once() + + +def test_api_hub_dependencies_auth_adc(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + ApiHubDependenciesClient() + adc.assert_called_once_with( + scopes=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id=None, + ) + + +def test_api_hub_dependencies_http_transport_client_cert_source_for_mtls(): + cred = ga_credentials.AnonymousCredentials() + with mock.patch( + "google.auth.transport.requests.AuthorizedSession.configure_mtls_channel" + ) as mock_configure_mtls_channel: + transports.ApiHubDependenciesRestTransport( + credentials=cred, client_cert_source_for_mtls=client_cert_source_callback + ) + mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) + + +@pytest.mark.parametrize( + "transport_name", + [ + "rest", + ], +) +def test_api_hub_dependencies_host_no_port(transport_name): + client = ApiHubDependenciesClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="apihub.googleapis.com" + ), + transport=transport_name, + ) + assert client.transport._host == ( + "apihub.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://apihub.googleapis.com" + ) + + +@pytest.mark.parametrize( + "transport_name", + [ + "rest", + ], +) +def test_api_hub_dependencies_host_with_port(transport_name): + client = ApiHubDependenciesClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="apihub.googleapis.com:8000" + ), + transport=transport_name, + ) + assert client.transport._host == ( + "apihub.googleapis.com:8000" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://apihub.googleapis.com:8000" + ) + + +@pytest.mark.parametrize( + "transport_name", + [ + "rest", + ], +) +def test_api_hub_dependencies_client_transport_session_collision(transport_name): + creds1 = ga_credentials.AnonymousCredentials() + creds2 = ga_credentials.AnonymousCredentials() + client1 = ApiHubDependenciesClient( + credentials=creds1, + transport=transport_name, + ) + client2 = ApiHubDependenciesClient( + credentials=creds2, + transport=transport_name, + ) + session1 = client1.transport.create_dependency._session + session2 = client2.transport.create_dependency._session + assert session1 != session2 + session1 = client1.transport.get_dependency._session + session2 = client2.transport.get_dependency._session + assert session1 != session2 + session1 = client1.transport.update_dependency._session + session2 = client2.transport.update_dependency._session + assert session1 != session2 + session1 = client1.transport.delete_dependency._session + session2 = client2.transport.delete_dependency._session + assert session1 != session2 + session1 = client1.transport.list_dependencies._session + session2 = client2.transport.list_dependencies._session + assert session1 != session2 + + +def test_attribute_path(): + project = "squid" + location = "clam" + attribute = "whelk" + expected = "projects/{project}/locations/{location}/attributes/{attribute}".format( + project=project, + location=location, + attribute=attribute, + ) + actual = ApiHubDependenciesClient.attribute_path(project, location, attribute) + assert expected == actual + + +def test_parse_attribute_path(): + expected = { + "project": "octopus", + "location": "oyster", + "attribute": "nudibranch", + } + path = ApiHubDependenciesClient.attribute_path(**expected) + + # Check that the path construction is reversible. + actual = ApiHubDependenciesClient.parse_attribute_path(path) + assert expected == actual + + +def test_dependency_path(): + project = "cuttlefish" + location = "mussel" + dependency = "winkle" + expected = ( + "projects/{project}/locations/{location}/dependencies/{dependency}".format( + project=project, + location=location, + dependency=dependency, + ) + ) + actual = ApiHubDependenciesClient.dependency_path(project, location, dependency) + assert expected == actual + + +def test_parse_dependency_path(): + expected = { + "project": "nautilus", + "location": "scallop", + "dependency": "abalone", + } + path = ApiHubDependenciesClient.dependency_path(**expected) + + # Check that the path construction is reversible. + actual = ApiHubDependenciesClient.parse_dependency_path(path) + assert expected == actual + + +def test_common_billing_account_path(): + billing_account = "squid" + expected = "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + actual = ApiHubDependenciesClient.common_billing_account_path(billing_account) + assert expected == actual + + +def test_parse_common_billing_account_path(): + expected = { + "billing_account": "clam", + } + path = ApiHubDependenciesClient.common_billing_account_path(**expected) + + # Check that the path construction is reversible. + actual = ApiHubDependenciesClient.parse_common_billing_account_path(path) + assert expected == actual + + +def test_common_folder_path(): + folder = "whelk" + expected = "folders/{folder}".format( + folder=folder, + ) + actual = ApiHubDependenciesClient.common_folder_path(folder) + assert expected == actual + + +def test_parse_common_folder_path(): + expected = { + "folder": "octopus", + } + path = ApiHubDependenciesClient.common_folder_path(**expected) + + # Check that the path construction is reversible. + actual = ApiHubDependenciesClient.parse_common_folder_path(path) + assert expected == actual + + +def test_common_organization_path(): + organization = "oyster" + expected = "organizations/{organization}".format( + organization=organization, + ) + actual = ApiHubDependenciesClient.common_organization_path(organization) + assert expected == actual + + +def test_parse_common_organization_path(): + expected = { + "organization": "nudibranch", + } + path = ApiHubDependenciesClient.common_organization_path(**expected) + + # Check that the path construction is reversible. + actual = ApiHubDependenciesClient.parse_common_organization_path(path) + assert expected == actual + + +def test_common_project_path(): + project = "cuttlefish" + expected = "projects/{project}".format( + project=project, + ) + actual = ApiHubDependenciesClient.common_project_path(project) + assert expected == actual + + +def test_parse_common_project_path(): + expected = { + "project": "mussel", + } + path = ApiHubDependenciesClient.common_project_path(**expected) + + # Check that the path construction is reversible. + actual = ApiHubDependenciesClient.parse_common_project_path(path) + assert expected == actual + + +def test_common_location_path(): + project = "winkle" + location = "nautilus" + expected = "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) + actual = ApiHubDependenciesClient.common_location_path(project, location) + assert expected == actual + + +def test_parse_common_location_path(): + expected = { + "project": "scallop", + "location": "abalone", + } + path = ApiHubDependenciesClient.common_location_path(**expected) + + # Check that the path construction is reversible. + actual = ApiHubDependenciesClient.parse_common_location_path(path) + assert expected == actual + + +def test_client_with_default_client_info(): + client_info = gapic_v1.client_info.ClientInfo() + + with mock.patch.object( + transports.ApiHubDependenciesTransport, "_prep_wrapped_messages" + ) as prep: + client = ApiHubDependenciesClient( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + with mock.patch.object( + transports.ApiHubDependenciesTransport, "_prep_wrapped_messages" + ) as prep: + transport_class = ApiHubDependenciesClient.get_transport_class() + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + +def test_get_location_rest_bad_request( + transport: str = "rest", request_type=locations_pb2.GetLocationRequest +): + client = ApiHubDependenciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + request = request_type() + request = json_format.ParseDict( + {"name": "projects/sample1/locations/sample2"}, request + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_location(request) + + +@pytest.mark.parametrize( + "request_type", + [ + locations_pb2.GetLocationRequest, + dict, + ], +) +def test_get_location_rest(request_type): + client = ApiHubDependenciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = {"name": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = locations_pb2.Location() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_location(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, locations_pb2.Location) + + +def test_list_locations_rest_bad_request( + transport: str = "rest", request_type=locations_pb2.ListLocationsRequest +): + client = ApiHubDependenciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + request = request_type() + request = json_format.ParseDict({"name": "projects/sample1"}, request) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_locations(request) + + +@pytest.mark.parametrize( + "request_type", + [ + locations_pb2.ListLocationsRequest, + dict, + ], +) +def test_list_locations_rest(request_type): + client = ApiHubDependenciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = {"name": "projects/sample1"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = locations_pb2.ListLocationsResponse() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.list_locations(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, locations_pb2.ListLocationsResponse) + + +def test_cancel_operation_rest_bad_request( + transport: str = "rest", request_type=operations_pb2.CancelOperationRequest +): + client = ApiHubDependenciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + request = request_type() + request = json_format.ParseDict( + {"name": "projects/sample1/locations/sample2/operations/sample3"}, request + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.cancel_operation(request) + + +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.CancelOperationRequest, + dict, + ], +) +def test_cancel_operation_rest(request_type): + client = ApiHubDependenciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = {"name": "projects/sample1/locations/sample2/operations/sample3"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "{}" + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.cancel_operation(request) + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_operation_rest_bad_request( + transport: str = "rest", request_type=operations_pb2.DeleteOperationRequest +): + client = ApiHubDependenciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + request = request_type() + request = json_format.ParseDict( + {"name": "projects/sample1/locations/sample2/operations/sample3"}, request + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete_operation(request) + + +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.DeleteOperationRequest, + dict, + ], +) +def test_delete_operation_rest(request_type): + client = ApiHubDependenciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = {"name": "projects/sample1/locations/sample2/operations/sample3"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "{}" + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.delete_operation(request) + + # Establish that the response is the type that we expect. + assert response is None + + +def test_get_operation_rest_bad_request( + transport: str = "rest", request_type=operations_pb2.GetOperationRequest +): + client = ApiHubDependenciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + request = request_type() + request = json_format.ParseDict( + {"name": "projects/sample1/locations/sample2/operations/sample3"}, request + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_operation(request) + + +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.GetOperationRequest, + dict, + ], +) +def test_get_operation_rest(request_type): + client = ApiHubDependenciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = {"name": "projects/sample1/locations/sample2/operations/sample3"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_operation(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) + + +def test_list_operations_rest_bad_request( + transport: str = "rest", request_type=operations_pb2.ListOperationsRequest +): + client = ApiHubDependenciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + request = request_type() + request = json_format.ParseDict( + {"name": "projects/sample1/locations/sample2"}, request + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_operations(request) + + +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.ListOperationsRequest, + dict, + ], +) +def test_list_operations_rest(request_type): + client = ApiHubDependenciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = {"name": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.ListOperationsResponse() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.list_operations(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.ListOperationsResponse) + + +def test_transport_close(): + transports = { + "rest": "_session", + } + + for transport, close_name in transports.items(): + client = ApiHubDependenciesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + with mock.patch.object( + type(getattr(client.transport, close_name)), "close" + ) as close: + with client: + close.assert_not_called() + close.assert_called_once() + + +def test_client_ctx(): + transports = [ + "rest", + ] + for transport in transports: + client = ApiHubDependenciesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called() + + +@pytest.mark.parametrize( + "client_class,transport_class", + [ + (ApiHubDependenciesClient, transports.ApiHubDependenciesRestTransport), + ], +) +def test_api_key_credentials(client_class, transport_class): + with mock.patch.object( + google.auth._default, "get_api_key_credentials", create=True + ) as get_api_key_credentials: + mock_cred = mock.Mock() + get_api_key_credentials.return_value = mock_cred + options = client_options.ClientOptions() + options.api_key = "api_key" + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=mock_cred, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) diff --git a/packages/google-ads-admanager/tests/unit/gapic/admanager_v1/test_label_service.py b/packages/google-cloud-apihub/tests/unit/gapic/apihub_v1/test_api_hub_plugin.py similarity index 60% rename from packages/google-ads-admanager/tests/unit/gapic/admanager_v1/test_label_service.py rename to packages/google-cloud-apihub/tests/unit/gapic/apihub_v1/test_api_hub_plugin.py index acbbbef71582..dc92c3df1475 100644 --- a/packages/google-ads-admanager/tests/unit/gapic/admanager_v1/test_label_service.py +++ b/packages/google-cloud-apihub/tests/unit/gapic/apihub_v1/test_api_hub_plugin.py @@ -33,6 +33,7 @@ import google.auth from google.auth import credentials as ga_credentials from google.auth.exceptions import MutualTLSChannelError +from google.cloud.location import locations_pb2 from google.longrunning import operations_pb2 # type: ignore from google.oauth2 import service_account from google.protobuf import json_format @@ -44,12 +45,11 @@ from requests import PreparedRequest, Request, Response from requests.sessions import Session -from google.ads.admanager_v1.services.label_service import ( - LabelServiceClient, - pagers, +from google.cloud.apihub_v1.services.api_hub_plugin import ( + ApiHubPluginClient, transports, ) -from google.ads.admanager_v1.types import label_service +from google.cloud.apihub_v1.types import common_fields, plugin_service def client_cert_source_callback(): @@ -85,71 +85,71 @@ def test__get_default_mtls_endpoint(): sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" non_googleapi = "api.example.com" - assert LabelServiceClient._get_default_mtls_endpoint(None) is None + assert ApiHubPluginClient._get_default_mtls_endpoint(None) is None assert ( - LabelServiceClient._get_default_mtls_endpoint(api_endpoint) == api_mtls_endpoint + ApiHubPluginClient._get_default_mtls_endpoint(api_endpoint) == api_mtls_endpoint ) assert ( - LabelServiceClient._get_default_mtls_endpoint(api_mtls_endpoint) + ApiHubPluginClient._get_default_mtls_endpoint(api_mtls_endpoint) == api_mtls_endpoint ) assert ( - LabelServiceClient._get_default_mtls_endpoint(sandbox_endpoint) + ApiHubPluginClient._get_default_mtls_endpoint(sandbox_endpoint) == sandbox_mtls_endpoint ) assert ( - LabelServiceClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) + ApiHubPluginClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) == sandbox_mtls_endpoint ) - assert LabelServiceClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi + assert ApiHubPluginClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi def test__read_environment_variables(): - assert LabelServiceClient._read_environment_variables() == (False, "auto", None) + assert ApiHubPluginClient._read_environment_variables() == (False, "auto", None) with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - assert LabelServiceClient._read_environment_variables() == (True, "auto", None) + assert ApiHubPluginClient._read_environment_variables() == (True, "auto", None) with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): - assert LabelServiceClient._read_environment_variables() == (False, "auto", None) + assert ApiHubPluginClient._read_environment_variables() == (False, "auto", None) with mock.patch.dict( os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} ): with pytest.raises(ValueError) as excinfo: - LabelServiceClient._read_environment_variables() + ApiHubPluginClient._read_environment_variables() assert ( str(excinfo.value) == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" ) with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - assert LabelServiceClient._read_environment_variables() == ( + assert ApiHubPluginClient._read_environment_variables() == ( False, "never", None, ) with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): - assert LabelServiceClient._read_environment_variables() == ( + assert ApiHubPluginClient._read_environment_variables() == ( False, "always", None, ) with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}): - assert LabelServiceClient._read_environment_variables() == (False, "auto", None) + assert ApiHubPluginClient._read_environment_variables() == (False, "auto", None) with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): with pytest.raises(MutualTLSChannelError) as excinfo: - LabelServiceClient._read_environment_variables() + ApiHubPluginClient._read_environment_variables() assert ( str(excinfo.value) == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" ) with mock.patch.dict(os.environ, {"GOOGLE_CLOUD_UNIVERSE_DOMAIN": "foo.com"}): - assert LabelServiceClient._read_environment_variables() == ( + assert ApiHubPluginClient._read_environment_variables() == ( False, "auto", "foo.com", @@ -160,13 +160,13 @@ def test__get_client_cert_source(): mock_provided_cert_source = mock.Mock() mock_default_cert_source = mock.Mock() - assert LabelServiceClient._get_client_cert_source(None, False) is None + assert ApiHubPluginClient._get_client_cert_source(None, False) is None assert ( - LabelServiceClient._get_client_cert_source(mock_provided_cert_source, False) + ApiHubPluginClient._get_client_cert_source(mock_provided_cert_source, False) is None ) assert ( - LabelServiceClient._get_client_cert_source(mock_provided_cert_source, True) + ApiHubPluginClient._get_client_cert_source(mock_provided_cert_source, True) == mock_provided_cert_source ) @@ -178,11 +178,11 @@ def test__get_client_cert_source(): return_value=mock_default_cert_source, ): assert ( - LabelServiceClient._get_client_cert_source(None, True) + ApiHubPluginClient._get_client_cert_source(None, True) is mock_default_cert_source ) assert ( - LabelServiceClient._get_client_cert_source( + ApiHubPluginClient._get_client_cert_source( mock_provided_cert_source, "true" ) is mock_provided_cert_source @@ -190,59 +190,59 @@ def test__get_client_cert_source(): @mock.patch.object( - LabelServiceClient, + ApiHubPluginClient, "_DEFAULT_ENDPOINT_TEMPLATE", - modify_default_endpoint_template(LabelServiceClient), + modify_default_endpoint_template(ApiHubPluginClient), ) def test__get_api_endpoint(): api_override = "foo.com" mock_client_cert_source = mock.Mock() - default_universe = LabelServiceClient._DEFAULT_UNIVERSE - default_endpoint = LabelServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( + default_universe = ApiHubPluginClient._DEFAULT_UNIVERSE + default_endpoint = ApiHubPluginClient._DEFAULT_ENDPOINT_TEMPLATE.format( UNIVERSE_DOMAIN=default_universe ) mock_universe = "bar.com" - mock_endpoint = LabelServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( + mock_endpoint = ApiHubPluginClient._DEFAULT_ENDPOINT_TEMPLATE.format( UNIVERSE_DOMAIN=mock_universe ) assert ( - LabelServiceClient._get_api_endpoint( + ApiHubPluginClient._get_api_endpoint( api_override, mock_client_cert_source, default_universe, "always" ) == api_override ) assert ( - LabelServiceClient._get_api_endpoint( + ApiHubPluginClient._get_api_endpoint( None, mock_client_cert_source, default_universe, "auto" ) - == LabelServiceClient.DEFAULT_MTLS_ENDPOINT + == ApiHubPluginClient.DEFAULT_MTLS_ENDPOINT ) assert ( - LabelServiceClient._get_api_endpoint(None, None, default_universe, "auto") + ApiHubPluginClient._get_api_endpoint(None, None, default_universe, "auto") == default_endpoint ) assert ( - LabelServiceClient._get_api_endpoint(None, None, default_universe, "always") - == LabelServiceClient.DEFAULT_MTLS_ENDPOINT + ApiHubPluginClient._get_api_endpoint(None, None, default_universe, "always") + == ApiHubPluginClient.DEFAULT_MTLS_ENDPOINT ) assert ( - LabelServiceClient._get_api_endpoint( + ApiHubPluginClient._get_api_endpoint( None, mock_client_cert_source, default_universe, "always" ) - == LabelServiceClient.DEFAULT_MTLS_ENDPOINT + == ApiHubPluginClient.DEFAULT_MTLS_ENDPOINT ) assert ( - LabelServiceClient._get_api_endpoint(None, None, mock_universe, "never") + ApiHubPluginClient._get_api_endpoint(None, None, mock_universe, "never") == mock_endpoint ) assert ( - LabelServiceClient._get_api_endpoint(None, None, default_universe, "never") + ApiHubPluginClient._get_api_endpoint(None, None, default_universe, "never") == default_endpoint ) with pytest.raises(MutualTLSChannelError) as excinfo: - LabelServiceClient._get_api_endpoint( + ApiHubPluginClient._get_api_endpoint( None, mock_client_cert_source, mock_universe, "auto" ) assert ( @@ -256,29 +256,29 @@ def test__get_universe_domain(): universe_domain_env = "bar.com" assert ( - LabelServiceClient._get_universe_domain( + ApiHubPluginClient._get_universe_domain( client_universe_domain, universe_domain_env ) == client_universe_domain ) assert ( - LabelServiceClient._get_universe_domain(None, universe_domain_env) + ApiHubPluginClient._get_universe_domain(None, universe_domain_env) == universe_domain_env ) assert ( - LabelServiceClient._get_universe_domain(None, None) - == LabelServiceClient._DEFAULT_UNIVERSE + ApiHubPluginClient._get_universe_domain(None, None) + == ApiHubPluginClient._DEFAULT_UNIVERSE ) with pytest.raises(ValueError) as excinfo: - LabelServiceClient._get_universe_domain("", None) + ApiHubPluginClient._get_universe_domain("", None) assert str(excinfo.value) == "Universe Domain cannot be an empty string." @pytest.mark.parametrize( "client_class,transport_class,transport_name", [ - (LabelServiceClient, transports.LabelServiceRestTransport, "rest"), + (ApiHubPluginClient, transports.ApiHubPluginRestTransport, "rest"), ], ) def test__validate_universe_domain(client_class, transport_class, transport_name): @@ -357,10 +357,10 @@ def test__validate_universe_domain(client_class, transport_class, transport_name @pytest.mark.parametrize( "client_class,transport_name", [ - (LabelServiceClient, "rest"), + (ApiHubPluginClient, "rest"), ], ) -def test_label_service_client_from_service_account_info(client_class, transport_name): +def test_api_hub_plugin_client_from_service_account_info(client_class, transport_name): creds = ga_credentials.AnonymousCredentials() with mock.patch.object( service_account.Credentials, "from_service_account_info" @@ -372,19 +372,19 @@ def test_label_service_client_from_service_account_info(client_class, transport_ assert isinstance(client, client_class) assert client.transport._host == ( - "admanager.googleapis.com:443" + "apihub.googleapis.com:443" if transport_name in ["grpc", "grpc_asyncio"] - else "https://admanager.googleapis.com" + else "https://apihub.googleapis.com" ) @pytest.mark.parametrize( "transport_class,transport_name", [ - (transports.LabelServiceRestTransport, "rest"), + (transports.ApiHubPluginRestTransport, "rest"), ], ) -def test_label_service_client_service_account_always_use_jwt( +def test_api_hub_plugin_client_service_account_always_use_jwt( transport_class, transport_name ): with mock.patch.object( @@ -405,10 +405,10 @@ def test_label_service_client_service_account_always_use_jwt( @pytest.mark.parametrize( "client_class,transport_name", [ - (LabelServiceClient, "rest"), + (ApiHubPluginClient, "rest"), ], ) -def test_label_service_client_from_service_account_file(client_class, transport_name): +def test_api_hub_plugin_client_from_service_account_file(client_class, transport_name): creds = ga_credentials.AnonymousCredentials() with mock.patch.object( service_account.Credentials, "from_service_account_file" @@ -427,45 +427,45 @@ def test_label_service_client_from_service_account_file(client_class, transport_ assert isinstance(client, client_class) assert client.transport._host == ( - "admanager.googleapis.com:443" + "apihub.googleapis.com:443" if transport_name in ["grpc", "grpc_asyncio"] - else "https://admanager.googleapis.com" + else "https://apihub.googleapis.com" ) -def test_label_service_client_get_transport_class(): - transport = LabelServiceClient.get_transport_class() +def test_api_hub_plugin_client_get_transport_class(): + transport = ApiHubPluginClient.get_transport_class() available_transports = [ - transports.LabelServiceRestTransport, + transports.ApiHubPluginRestTransport, ] assert transport in available_transports - transport = LabelServiceClient.get_transport_class("rest") - assert transport == transports.LabelServiceRestTransport + transport = ApiHubPluginClient.get_transport_class("rest") + assert transport == transports.ApiHubPluginRestTransport @pytest.mark.parametrize( "client_class,transport_class,transport_name", [ - (LabelServiceClient, transports.LabelServiceRestTransport, "rest"), + (ApiHubPluginClient, transports.ApiHubPluginRestTransport, "rest"), ], ) @mock.patch.object( - LabelServiceClient, + ApiHubPluginClient, "_DEFAULT_ENDPOINT_TEMPLATE", - modify_default_endpoint_template(LabelServiceClient), + modify_default_endpoint_template(ApiHubPluginClient), ) -def test_label_service_client_client_options( +def test_api_hub_plugin_client_client_options( client_class, transport_class, transport_name ): # Check that if channel is provided we won't create a new one. - with mock.patch.object(LabelServiceClient, "get_transport_class") as gtc: + with mock.patch.object(ApiHubPluginClient, "get_transport_class") as gtc: transport = transport_class(credentials=ga_credentials.AnonymousCredentials()) client = client_class(transport=transport) gtc.assert_not_called() # Check that if channel is provided via str we will create a new one. - with mock.patch.object(LabelServiceClient, "get_transport_class") as gtc: + with mock.patch.object(ApiHubPluginClient, "get_transport_class") as gtc: client = client_class(transport=transport_name) gtc.assert_called() @@ -588,17 +588,17 @@ def test_label_service_client_client_options( @pytest.mark.parametrize( "client_class,transport_class,transport_name,use_client_cert_env", [ - (LabelServiceClient, transports.LabelServiceRestTransport, "rest", "true"), - (LabelServiceClient, transports.LabelServiceRestTransport, "rest", "false"), + (ApiHubPluginClient, transports.ApiHubPluginRestTransport, "rest", "true"), + (ApiHubPluginClient, transports.ApiHubPluginRestTransport, "rest", "false"), ], ) @mock.patch.object( - LabelServiceClient, + ApiHubPluginClient, "_DEFAULT_ENDPOINT_TEMPLATE", - modify_default_endpoint_template(LabelServiceClient), + modify_default_endpoint_template(ApiHubPluginClient), ) @mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) -def test_label_service_client_mtls_env_auto( +def test_api_hub_plugin_client_mtls_env_auto( client_class, transport_class, transport_name, use_client_cert_env ): # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default @@ -700,11 +700,11 @@ def test_label_service_client_mtls_env_auto( ) -@pytest.mark.parametrize("client_class", [LabelServiceClient]) +@pytest.mark.parametrize("client_class", [ApiHubPluginClient]) @mock.patch.object( - LabelServiceClient, "DEFAULT_ENDPOINT", modify_default_endpoint(LabelServiceClient) + ApiHubPluginClient, "DEFAULT_ENDPOINT", modify_default_endpoint(ApiHubPluginClient) ) -def test_label_service_client_get_mtls_endpoint_and_cert_source(client_class): +def test_api_hub_plugin_client_get_mtls_endpoint_and_cert_source(client_class): mock_client_cert_source = mock.Mock() # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". @@ -795,21 +795,21 @@ def test_label_service_client_get_mtls_endpoint_and_cert_source(client_class): ) -@pytest.mark.parametrize("client_class", [LabelServiceClient]) +@pytest.mark.parametrize("client_class", [ApiHubPluginClient]) @mock.patch.object( - LabelServiceClient, + ApiHubPluginClient, "_DEFAULT_ENDPOINT_TEMPLATE", - modify_default_endpoint_template(LabelServiceClient), + modify_default_endpoint_template(ApiHubPluginClient), ) -def test_label_service_client_client_api_endpoint(client_class): +def test_api_hub_plugin_client_client_api_endpoint(client_class): mock_client_cert_source = client_cert_source_callback api_override = "foo.com" - default_universe = LabelServiceClient._DEFAULT_UNIVERSE - default_endpoint = LabelServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( + default_universe = ApiHubPluginClient._DEFAULT_UNIVERSE + default_endpoint = ApiHubPluginClient._DEFAULT_ENDPOINT_TEMPLATE.format( UNIVERSE_DOMAIN=default_universe ) mock_universe = "bar.com" - mock_endpoint = LabelServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( + mock_endpoint = ApiHubPluginClient._DEFAULT_ENDPOINT_TEMPLATE.format( UNIVERSE_DOMAIN=mock_universe ) @@ -877,10 +877,10 @@ def test_label_service_client_client_api_endpoint(client_class): @pytest.mark.parametrize( "client_class,transport_class,transport_name", [ - (LabelServiceClient, transports.LabelServiceRestTransport, "rest"), + (ApiHubPluginClient, transports.ApiHubPluginRestTransport, "rest"), ], ) -def test_label_service_client_client_options_scopes( +def test_api_hub_plugin_client_client_options_scopes( client_class, transport_class, transport_name ): # Check the case scopes are provided. @@ -908,10 +908,10 @@ def test_label_service_client_client_options_scopes( @pytest.mark.parametrize( "client_class,transport_class,transport_name,grpc_helpers", [ - (LabelServiceClient, transports.LabelServiceRestTransport, "rest", None), + (ApiHubPluginClient, transports.ApiHubPluginRestTransport, "rest", None), ], ) -def test_label_service_client_client_options_credentials_file( +def test_api_hub_plugin_client_client_options_credentials_file( client_class, transport_class, transport_name, grpc_helpers ): # Check the case credentials file is provided. @@ -938,48 +938,54 @@ def test_label_service_client_client_options_credentials_file( @pytest.mark.parametrize( "request_type", [ - label_service.GetLabelRequest, + plugin_service.GetPluginRequest, dict, ], ) -def test_get_label_rest(request_type): - client = LabelServiceClient( +def test_get_plugin_rest(request_type): + client = ApiHubPluginClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = {"name": "networks/sample1/labels/sample2"} + request_init = {"name": "projects/sample1/locations/sample2/plugins/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = label_service.Label( + return_value = plugin_service.Plugin( name="name_value", + display_name="display_name_value", + description="description_value", + state=plugin_service.Plugin.State.ENABLED, ) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = label_service.Label.pb(return_value) + return_value = plugin_service.Plugin.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.get_label(request) + response = client.get_plugin(request) # Establish that the response is the type that we expect. - assert isinstance(response, label_service.Label) + assert isinstance(response, plugin_service.Plugin) assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.description == "description_value" + assert response.state == plugin_service.Plugin.State.ENABLED -def test_get_label_rest_use_cached_wrapped_rpc(): +def test_get_plugin_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = LabelServiceClient( + client = ApiHubPluginClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) @@ -989,30 +995,30 @@ def test_get_label_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.get_label in client._transport._wrapped_methods + assert client._transport.get_plugin in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.get_label] = mock_rpc + client._transport._wrapped_methods[client._transport.get_plugin] = mock_rpc request = {} - client.get_label(request) + client.get_plugin(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.get_label(request) + client.get_plugin(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_get_label_rest_required_fields(request_type=label_service.GetLabelRequest): - transport_class = transports.LabelServiceRestTransport +def test_get_plugin_rest_required_fields(request_type=plugin_service.GetPluginRequest): + transport_class = transports.ApiHubPluginRestTransport request_init = {} request_init["name"] = "" @@ -1026,7 +1032,7 @@ def test_get_label_rest_required_fields(request_type=label_service.GetLabelReque unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).get_label._get_unset_required_fields(jsonified_request) + ).get_plugin._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present @@ -1035,21 +1041,21 @@ def test_get_label_rest_required_fields(request_type=label_service.GetLabelReque unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).get_label._get_unset_required_fields(jsonified_request) + ).get_plugin._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone assert "name" in jsonified_request assert jsonified_request["name"] == "name_value" - client = LabelServiceClient( + client = ApiHubPluginClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = label_service.Label() + return_value = plugin_service.Plugin() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -1070,49 +1076,51 @@ def test_get_label_rest_required_fields(request_type=label_service.GetLabelReque response_value.status_code = 200 # Convert return value to protobuf type - return_value = label_service.Label.pb(return_value) + return_value = plugin_service.Plugin.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.get_label(request) + response = client.get_plugin(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_get_label_rest_unset_required_fields(): - transport = transports.LabelServiceRestTransport( +def test_get_plugin_rest_unset_required_fields(): + transport = transports.ApiHubPluginRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.get_label._get_unset_required_fields({}) + unset_fields = transport.get_plugin._get_unset_required_fields({}) assert set(unset_fields) == (set(()) & set(("name",))) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_label_rest_interceptors(null_interceptor): - transport = transports.LabelServiceRestTransport( +def test_get_plugin_rest_interceptors(null_interceptor): + transport = transports.ApiHubPluginRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None if null_interceptor - else transports.LabelServiceRestInterceptor(), + else transports.ApiHubPluginRestInterceptor(), ) - client = LabelServiceClient(transport=transport) + client = ApiHubPluginClient(transport=transport) with mock.patch.object( type(client.transport._session), "request" ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.LabelServiceRestInterceptor, "post_get_label" + transports.ApiHubPluginRestInterceptor, "post_get_plugin" ) as post, mock.patch.object( - transports.LabelServiceRestInterceptor, "pre_get_label" + transports.ApiHubPluginRestInterceptor, "pre_get_plugin" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = label_service.GetLabelRequest.pb(label_service.GetLabelRequest()) + pb_message = plugin_service.GetPluginRequest.pb( + plugin_service.GetPluginRequest() + ) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -1123,17 +1131,19 @@ def test_get_label_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = label_service.Label.to_json(label_service.Label()) + req.return_value._content = plugin_service.Plugin.to_json( + plugin_service.Plugin() + ) - request = label_service.GetLabelRequest() + request = plugin_service.GetPluginRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = label_service.Label() + post.return_value = plugin_service.Plugin() - client.get_label( + client.get_plugin( request, metadata=[ ("key", "val"), @@ -1145,16 +1155,16 @@ def test_get_label_rest_interceptors(null_interceptor): post.assert_called_once() -def test_get_label_rest_bad_request( - transport: str = "rest", request_type=label_service.GetLabelRequest +def test_get_plugin_rest_bad_request( + transport: str = "rest", request_type=plugin_service.GetPluginRequest ): - client = LabelServiceClient( + client = ApiHubPluginClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # send a request that will satisfy transcoding - request_init = {"name": "networks/sample1/labels/sample2"} + request_init = {"name": "projects/sample1/locations/sample2/plugins/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -1166,11 +1176,11 @@ def test_get_label_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.get_label(request) + client.get_plugin(request) -def test_get_label_rest_flattened(): - client = LabelServiceClient( +def test_get_plugin_rest_flattened(): + client = ApiHubPluginClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) @@ -1178,10 +1188,10 @@ def test_get_label_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = label_service.Label() + return_value = plugin_service.Plugin() # get arguments that satisfy an http rule for this method - sample_request = {"name": "networks/sample1/labels/sample2"} + sample_request = {"name": "projects/sample1/locations/sample2/plugins/sample3"} # get truthy value for each flattened field mock_args = dict( @@ -1193,24 +1203,25 @@ def test_get_label_rest_flattened(): response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = label_service.Label.pb(return_value) + return_value = plugin_service.Plugin.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.get_label(**mock_args) + client.get_plugin(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{name=networks/*/labels/*}" % client.transport._host, args[1] + "%s/v1/{name=projects/*/locations/*/plugins/*}" % client.transport._host, + args[1], ) -def test_get_label_rest_flattened_error(transport: str = "rest"): - client = LabelServiceClient( +def test_get_plugin_rest_flattened_error(transport: str = "rest"): + client = ApiHubPluginClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -1218,14 +1229,14 @@ def test_get_label_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.get_label( - label_service.GetLabelRequest(), + client.get_plugin( + plugin_service.GetPluginRequest(), name="name_value", ) -def test_get_label_rest_error(): - client = LabelServiceClient( +def test_get_plugin_rest_error(): + client = ApiHubPluginClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -1233,50 +1244,54 @@ def test_get_label_rest_error(): @pytest.mark.parametrize( "request_type", [ - label_service.ListLabelsRequest, + plugin_service.EnablePluginRequest, dict, ], ) -def test_list_labels_rest(request_type): - client = LabelServiceClient( +def test_enable_plugin_rest(request_type): + client = ApiHubPluginClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = {"parent": "networks/sample1"} + request_init = {"name": "projects/sample1/locations/sample2/plugins/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = label_service.ListLabelsResponse( - next_page_token="next_page_token_value", - total_size=1086, + return_value = plugin_service.Plugin( + name="name_value", + display_name="display_name_value", + description="description_value", + state=plugin_service.Plugin.State.ENABLED, ) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = label_service.ListLabelsResponse.pb(return_value) + return_value = plugin_service.Plugin.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.list_labels(request) + response = client.enable_plugin(request) # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListLabelsPager) - assert response.next_page_token == "next_page_token_value" - assert response.total_size == 1086 + assert isinstance(response, plugin_service.Plugin) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.description == "description_value" + assert response.state == plugin_service.Plugin.State.ENABLED -def test_list_labels_rest_use_cached_wrapped_rpc(): +def test_enable_plugin_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = LabelServiceClient( + client = ApiHubPluginClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) @@ -1286,33 +1301,35 @@ def test_list_labels_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.list_labels in client._transport._wrapped_methods + assert client._transport.enable_plugin in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.list_labels] = mock_rpc + client._transport._wrapped_methods[client._transport.enable_plugin] = mock_rpc request = {} - client.list_labels(request) + client.enable_plugin(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.list_labels(request) + client.enable_plugin(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_list_labels_rest_required_fields(request_type=label_service.ListLabelsRequest): - transport_class = transports.LabelServiceRestTransport +def test_enable_plugin_rest_required_fields( + request_type=plugin_service.EnablePluginRequest, +): + transport_class = transports.ApiHubPluginRestTransport request_init = {} - request_init["parent"] = "" + request_init["name"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -1323,40 +1340,30 @@ def test_list_labels_rest_required_fields(request_type=label_service.ListLabelsR unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).list_labels._get_unset_required_fields(jsonified_request) + ).enable_plugin._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["parent"] = "parent_value" + jsonified_request["name"] = "name_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).list_labels._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set( - ( - "filter", - "order_by", - "page_size", - "page_token", - "skip", - ) - ) + ).enable_plugin._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == "parent_value" + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" - client = LabelServiceClient( + client = ApiHubPluginClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = label_service.ListLabelsResponse() + return_value = plugin_service.Plugin() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -1368,70 +1375,60 @@ def test_list_labels_rest_required_fields(request_type=label_service.ListLabelsR pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "get", + "method": "post", "query_params": pb_request, } + transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = label_service.ListLabelsResponse.pb(return_value) + return_value = plugin_service.Plugin.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.list_labels(request) + response = client.enable_plugin(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_list_labels_rest_unset_required_fields(): - transport = transports.LabelServiceRestTransport( +def test_enable_plugin_rest_unset_required_fields(): + transport = transports.ApiHubPluginRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.list_labels._get_unset_required_fields({}) - assert set(unset_fields) == ( - set( - ( - "filter", - "orderBy", - "pageSize", - "pageToken", - "skip", - ) - ) - & set(("parent",)) - ) + unset_fields = transport.enable_plugin._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_labels_rest_interceptors(null_interceptor): - transport = transports.LabelServiceRestTransport( +def test_enable_plugin_rest_interceptors(null_interceptor): + transport = transports.ApiHubPluginRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None if null_interceptor - else transports.LabelServiceRestInterceptor(), + else transports.ApiHubPluginRestInterceptor(), ) - client = LabelServiceClient(transport=transport) + client = ApiHubPluginClient(transport=transport) with mock.patch.object( type(client.transport._session), "request" ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.LabelServiceRestInterceptor, "post_list_labels" + transports.ApiHubPluginRestInterceptor, "post_enable_plugin" ) as post, mock.patch.object( - transports.LabelServiceRestInterceptor, "pre_list_labels" + transports.ApiHubPluginRestInterceptor, "pre_enable_plugin" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = label_service.ListLabelsRequest.pb( - label_service.ListLabelsRequest() + pb_message = plugin_service.EnablePluginRequest.pb( + plugin_service.EnablePluginRequest() ) transcode.return_value = { "method": "post", @@ -1443,19 +1440,19 @@ def test_list_labels_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = label_service.ListLabelsResponse.to_json( - label_service.ListLabelsResponse() + req.return_value._content = plugin_service.Plugin.to_json( + plugin_service.Plugin() ) - request = label_service.ListLabelsRequest() + request = plugin_service.EnablePluginRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = label_service.ListLabelsResponse() + post.return_value = plugin_service.Plugin() - client.list_labels( + client.enable_plugin( request, metadata=[ ("key", "val"), @@ -1467,16 +1464,16 @@ def test_list_labels_rest_interceptors(null_interceptor): post.assert_called_once() -def test_list_labels_rest_bad_request( - transport: str = "rest", request_type=label_service.ListLabelsRequest +def test_enable_plugin_rest_bad_request( + transport: str = "rest", request_type=plugin_service.EnablePluginRequest ): - client = LabelServiceClient( + client = ApiHubPluginClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # send a request that will satisfy transcoding - request_init = {"parent": "networks/sample1"} + request_init = {"name": "projects/sample1/locations/sample2/plugins/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -1488,11 +1485,11 @@ def test_list_labels_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.list_labels(request) + client.enable_plugin(request) -def test_list_labels_rest_flattened(): - client = LabelServiceClient( +def test_enable_plugin_rest_flattened(): + client = ApiHubPluginClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) @@ -1500,14 +1497,14 @@ def test_list_labels_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = label_service.ListLabelsResponse() + return_value = plugin_service.Plugin() # get arguments that satisfy an http rule for this method - sample_request = {"parent": "networks/sample1"} + sample_request = {"name": "projects/sample1/locations/sample2/plugins/sample3"} # get truthy value for each flattened field mock_args = dict( - parent="parent_value", + name="name_value", ) mock_args.update(sample_request) @@ -1515,24 +1512,26 @@ def test_list_labels_rest_flattened(): response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = label_service.ListLabelsResponse.pb(return_value) + return_value = plugin_service.Plugin.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.list_labels(**mock_args) + client.enable_plugin(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{parent=networks/*}/labels" % client.transport._host, args[1] + "%s/v1/{name=projects/*/locations/*/plugins/*}:enable" + % client.transport._host, + args[1], ) -def test_list_labels_rest_flattened_error(transport: str = "rest"): - client = LabelServiceClient( +def test_enable_plugin_rest_flattened_error(transport: str = "rest"): + client = ApiHubPluginClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -1540,102 +1539,357 @@ def test_list_labels_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.list_labels( - label_service.ListLabelsRequest(), - parent="parent_value", + client.enable_plugin( + plugin_service.EnablePluginRequest(), + name="name_value", ) -def test_list_labels_rest_pager(transport: str = "rest"): - client = LabelServiceClient( +def test_enable_plugin_rest_error(): + client = ApiHubPluginClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + plugin_service.DisablePluginRequest, + dict, + ], +) +def test_disable_plugin_rest(request_type): + client = ApiHubPluginClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + transport="rest", ) + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/plugins/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = plugin_service.Plugin( + name="name_value", + display_name="display_name_value", + description="description_value", + state=plugin_service.Plugin.State.ENABLED, + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = plugin_service.Plugin.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.disable_plugin(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, plugin_service.Plugin) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.description == "description_value" + assert response.state == plugin_service.Plugin.State.ENABLED + + +def test_disable_plugin_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ApiHubPluginClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.disable_plugin in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.disable_plugin] = mock_rpc + + request = {} + client.disable_plugin(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.disable_plugin(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_disable_plugin_rest_required_fields( + request_type=plugin_service.DisablePluginRequest, +): + transport_class = transports.ApiHubPluginRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).disable_plugin._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).disable_plugin._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = ApiHubPluginClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = plugin_service.Plugin() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: - # TODO(kbandes): remove this mock unless there's a good reason for it. - # with mock.patch.object(path_template, 'transcode') as transcode: - # Set the response as a series of pages - response = ( - label_service.ListLabelsResponse( - labels=[ - label_service.Label(), - label_service.Label(), - label_service.Label(), - ], - next_page_token="abc", - ), - label_service.ListLabelsResponse( - labels=[], - next_page_token="def", - ), - label_service.ListLabelsResponse( - labels=[ - label_service.Label(), - ], - next_page_token="ghi", - ), - label_service.ListLabelsResponse( - labels=[ - label_service.Label(), - label_service.Label(), - ], - ), + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = plugin_service.Plugin.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.disable_plugin(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_disable_plugin_rest_unset_required_fields(): + transport = transports.ApiHubPluginRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.disable_plugin._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_disable_plugin_rest_interceptors(null_interceptor): + transport = transports.ApiHubPluginRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.ApiHubPluginRestInterceptor(), + ) + client = ApiHubPluginClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.ApiHubPluginRestInterceptor, "post_disable_plugin" + ) as post, mock.patch.object( + transports.ApiHubPluginRestInterceptor, "pre_disable_plugin" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = plugin_service.DisablePluginRequest.pb( + plugin_service.DisablePluginRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = plugin_service.Plugin.to_json( + plugin_service.Plugin() + ) + + request = plugin_service.DisablePluginRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = plugin_service.Plugin() + + client.disable_plugin( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_disable_plugin_rest_bad_request( + transport: str = "rest", request_type=plugin_service.DisablePluginRequest +): + client = ApiHubPluginClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/plugins/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.disable_plugin(request) + + +def test_disable_plugin_rest_flattened(): + client = ApiHubPluginClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = plugin_service.Plugin() + + # get arguments that satisfy an http rule for this method + sample_request = {"name": "projects/sample1/locations/sample2/plugins/sample3"} + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = plugin_service.Plugin.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.disable_plugin(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/plugins/*}:disable" + % client.transport._host, + args[1], ) - # Two responses for two calls - response = response + response - # Wrap the values into proper Response objs - response = tuple(label_service.ListLabelsResponse.to_json(x) for x in response) - return_values = tuple(Response() for i in response) - for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode("UTF-8") - return_val.status_code = 200 - req.side_effect = return_values - sample_request = {"parent": "networks/sample1"} +def test_disable_plugin_rest_flattened_error(transport: str = "rest"): + client = ApiHubPluginClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) - pager = client.list_labels(request=sample_request) + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.disable_plugin( + plugin_service.DisablePluginRequest(), + name="name_value", + ) - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, label_service.Label) for i in results) - pages = list(client.list_labels(request=sample_request).pages) - for page_, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page_.raw_page.next_page_token == token +def test_disable_plugin_rest_error(): + client = ApiHubPluginClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) def test_credentials_transport_error(): # It is an error to provide credentials and a transport instance. - transport = transports.LabelServiceRestTransport( + transport = transports.ApiHubPluginRestTransport( credentials=ga_credentials.AnonymousCredentials(), ) with pytest.raises(ValueError): - client = LabelServiceClient( + client = ApiHubPluginClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # It is an error to provide a credentials file and a transport instance. - transport = transports.LabelServiceRestTransport( + transport = transports.ApiHubPluginRestTransport( credentials=ga_credentials.AnonymousCredentials(), ) with pytest.raises(ValueError): - client = LabelServiceClient( + client = ApiHubPluginClient( client_options={"credentials_file": "credentials.json"}, transport=transport, ) # It is an error to provide an api_key and a transport instance. - transport = transports.LabelServiceRestTransport( + transport = transports.ApiHubPluginRestTransport( credentials=ga_credentials.AnonymousCredentials(), ) options = client_options.ClientOptions() options.api_key = "api_key" with pytest.raises(ValueError): - client = LabelServiceClient( + client = ApiHubPluginClient( client_options=options, transport=transport, ) @@ -1644,16 +1898,16 @@ def test_credentials_transport_error(): options = client_options.ClientOptions() options.api_key = "api_key" with pytest.raises(ValueError): - client = LabelServiceClient( + client = ApiHubPluginClient( client_options=options, credentials=ga_credentials.AnonymousCredentials() ) # It is an error to provide scopes and a transport instance. - transport = transports.LabelServiceRestTransport( + transport = transports.ApiHubPluginRestTransport( credentials=ga_credentials.AnonymousCredentials(), ) with pytest.raises(ValueError): - client = LabelServiceClient( + client = ApiHubPluginClient( client_options={"scopes": ["1", "2"]}, transport=transport, ) @@ -1661,17 +1915,17 @@ def test_credentials_transport_error(): def test_transport_instance(): # A client may be instantiated with a custom transport instance. - transport = transports.LabelServiceRestTransport( + transport = transports.ApiHubPluginRestTransport( credentials=ga_credentials.AnonymousCredentials(), ) - client = LabelServiceClient(transport=transport) + client = ApiHubPluginClient(transport=transport) assert client.transport is transport @pytest.mark.parametrize( "transport_class", [ - transports.LabelServiceRestTransport, + transports.ApiHubPluginRestTransport, ], ) def test_transport_adc(transport_class): @@ -1689,37 +1943,43 @@ def test_transport_adc(transport_class): ], ) def test_transport_kind(transport_name): - transport = LabelServiceClient.get_transport_class(transport_name)( + transport = ApiHubPluginClient.get_transport_class(transport_name)( credentials=ga_credentials.AnonymousCredentials(), ) assert transport.kind == transport_name -def test_label_service_base_transport_error(): +def test_api_hub_plugin_base_transport_error(): # Passing both a credentials object and credentials_file should raise an error with pytest.raises(core_exceptions.DuplicateCredentialArgs): - transport = transports.LabelServiceTransport( + transport = transports.ApiHubPluginTransport( credentials=ga_credentials.AnonymousCredentials(), credentials_file="credentials.json", ) -def test_label_service_base_transport(): +def test_api_hub_plugin_base_transport(): # Instantiate the base transport. with mock.patch( - "google.ads.admanager_v1.services.label_service.transports.LabelServiceTransport.__init__" + "google.cloud.apihub_v1.services.api_hub_plugin.transports.ApiHubPluginTransport.__init__" ) as Transport: Transport.return_value = None - transport = transports.LabelServiceTransport( + transport = transports.ApiHubPluginTransport( credentials=ga_credentials.AnonymousCredentials(), ) # Every method on the transport should just blindly # raise NotImplementedError. methods = ( - "get_label", - "list_labels", + "get_plugin", + "enable_plugin", + "disable_plugin", + "get_location", + "list_locations", "get_operation", + "cancel_operation", + "delete_operation", + "list_operations", ) for method in methods: with pytest.raises(NotImplementedError): @@ -1737,56 +1997,56 @@ def test_label_service_base_transport(): getattr(transport, r)() -def test_label_service_base_transport_with_credentials_file(): +def test_api_hub_plugin_base_transport_with_credentials_file(): # Instantiate the base transport with a credentials file with mock.patch.object( google.auth, "load_credentials_from_file", autospec=True ) as load_creds, mock.patch( - "google.ads.admanager_v1.services.label_service.transports.LabelServiceTransport._prep_wrapped_messages" + "google.cloud.apihub_v1.services.api_hub_plugin.transports.ApiHubPluginTransport._prep_wrapped_messages" ) as Transport: Transport.return_value = None load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) - transport = transports.LabelServiceTransport( + transport = transports.ApiHubPluginTransport( credentials_file="credentials.json", quota_project_id="octopus", ) load_creds.assert_called_once_with( "credentials.json", scopes=None, - default_scopes=(), + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), quota_project_id="octopus", ) -def test_label_service_base_transport_with_adc(): +def test_api_hub_plugin_base_transport_with_adc(): # Test the default credentials are used if credentials and credentials_file are None. with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch( - "google.ads.admanager_v1.services.label_service.transports.LabelServiceTransport._prep_wrapped_messages" + "google.cloud.apihub_v1.services.api_hub_plugin.transports.ApiHubPluginTransport._prep_wrapped_messages" ) as Transport: Transport.return_value = None adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport = transports.LabelServiceTransport() + transport = transports.ApiHubPluginTransport() adc.assert_called_once() -def test_label_service_auth_adc(): +def test_api_hub_plugin_auth_adc(): # If no credentials are provided, we should use ADC credentials. with mock.patch.object(google.auth, "default", autospec=True) as adc: adc.return_value = (ga_credentials.AnonymousCredentials(), None) - LabelServiceClient() + ApiHubPluginClient() adc.assert_called_once_with( scopes=None, - default_scopes=(), + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), quota_project_id=None, ) -def test_label_service_http_transport_client_cert_source_for_mtls(): +def test_api_hub_plugin_http_transport_client_cert_source_for_mtls(): cred = ga_credentials.AnonymousCredentials() with mock.patch( "google.auth.transport.requests.AuthorizedSession.configure_mtls_channel" ) as mock_configure_mtls_channel: - transports.LabelServiceRestTransport( + transports.ApiHubPluginRestTransport( credentials=cred, client_cert_source_for_mtls=client_cert_source_callback ) mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) @@ -1798,18 +2058,18 @@ def test_label_service_http_transport_client_cert_source_for_mtls(): "rest", ], ) -def test_label_service_host_no_port(transport_name): - client = LabelServiceClient( +def test_api_hub_plugin_host_no_port(transport_name): + client = ApiHubPluginClient( credentials=ga_credentials.AnonymousCredentials(), client_options=client_options.ClientOptions( - api_endpoint="admanager.googleapis.com" + api_endpoint="apihub.googleapis.com" ), transport=transport_name, ) assert client.transport._host == ( - "admanager.googleapis.com:443" + "apihub.googleapis.com:443" if transport_name in ["grpc", "grpc_asyncio"] - else "https://admanager.googleapis.com" + else "https://apihub.googleapis.com" ) @@ -1819,18 +2079,18 @@ def test_label_service_host_no_port(transport_name): "rest", ], ) -def test_label_service_host_with_port(transport_name): - client = LabelServiceClient( +def test_api_hub_plugin_host_with_port(transport_name): + client = ApiHubPluginClient( credentials=ga_credentials.AnonymousCredentials(), client_options=client_options.ClientOptions( - api_endpoint="admanager.googleapis.com:8000" + api_endpoint="apihub.googleapis.com:8000" ), transport=transport_name, ) assert client.transport._host == ( - "admanager.googleapis.com:8000" + "apihub.googleapis.com:8000" if transport_name in ["grpc", "grpc_asyncio"] - else "https://admanager.googleapis.com:8000" + else "https://apihub.googleapis.com:8000" ) @@ -1840,168 +2100,180 @@ def test_label_service_host_with_port(transport_name): "rest", ], ) -def test_label_service_client_transport_session_collision(transport_name): +def test_api_hub_plugin_client_transport_session_collision(transport_name): creds1 = ga_credentials.AnonymousCredentials() creds2 = ga_credentials.AnonymousCredentials() - client1 = LabelServiceClient( + client1 = ApiHubPluginClient( credentials=creds1, transport=transport_name, ) - client2 = LabelServiceClient( + client2 = ApiHubPluginClient( credentials=creds2, transport=transport_name, ) - session1 = client1.transport.get_label._session - session2 = client2.transport.get_label._session + session1 = client1.transport.get_plugin._session + session2 = client2.transport.get_plugin._session assert session1 != session2 - session1 = client1.transport.list_labels._session - session2 = client2.transport.list_labels._session + session1 = client1.transport.enable_plugin._session + session2 = client2.transport.enable_plugin._session + assert session1 != session2 + session1 = client1.transport.disable_plugin._session + session2 = client2.transport.disable_plugin._session assert session1 != session2 -def test_label_path(): - network_code = "squid" - label = "clam" - expected = "networks/{network_code}/labels/{label}".format( - network_code=network_code, - label=label, +def test_attribute_path(): + project = "squid" + location = "clam" + attribute = "whelk" + expected = "projects/{project}/locations/{location}/attributes/{attribute}".format( + project=project, + location=location, + attribute=attribute, ) - actual = LabelServiceClient.label_path(network_code, label) + actual = ApiHubPluginClient.attribute_path(project, location, attribute) assert expected == actual -def test_parse_label_path(): +def test_parse_attribute_path(): expected = { - "network_code": "whelk", - "label": "octopus", + "project": "octopus", + "location": "oyster", + "attribute": "nudibranch", } - path = LabelServiceClient.label_path(**expected) + path = ApiHubPluginClient.attribute_path(**expected) # Check that the path construction is reversible. - actual = LabelServiceClient.parse_label_path(path) + actual = ApiHubPluginClient.parse_attribute_path(path) assert expected == actual -def test_network_path(): - network_code = "oyster" - expected = "networks/{network_code}".format( - network_code=network_code, +def test_plugin_path(): + project = "cuttlefish" + location = "mussel" + plugin = "winkle" + expected = "projects/{project}/locations/{location}/plugins/{plugin}".format( + project=project, + location=location, + plugin=plugin, ) - actual = LabelServiceClient.network_path(network_code) + actual = ApiHubPluginClient.plugin_path(project, location, plugin) assert expected == actual -def test_parse_network_path(): +def test_parse_plugin_path(): expected = { - "network_code": "nudibranch", + "project": "nautilus", + "location": "scallop", + "plugin": "abalone", } - path = LabelServiceClient.network_path(**expected) + path = ApiHubPluginClient.plugin_path(**expected) # Check that the path construction is reversible. - actual = LabelServiceClient.parse_network_path(path) + actual = ApiHubPluginClient.parse_plugin_path(path) assert expected == actual def test_common_billing_account_path(): - billing_account = "cuttlefish" + billing_account = "squid" expected = "billingAccounts/{billing_account}".format( billing_account=billing_account, ) - actual = LabelServiceClient.common_billing_account_path(billing_account) + actual = ApiHubPluginClient.common_billing_account_path(billing_account) assert expected == actual def test_parse_common_billing_account_path(): expected = { - "billing_account": "mussel", + "billing_account": "clam", } - path = LabelServiceClient.common_billing_account_path(**expected) + path = ApiHubPluginClient.common_billing_account_path(**expected) # Check that the path construction is reversible. - actual = LabelServiceClient.parse_common_billing_account_path(path) + actual = ApiHubPluginClient.parse_common_billing_account_path(path) assert expected == actual def test_common_folder_path(): - folder = "winkle" + folder = "whelk" expected = "folders/{folder}".format( folder=folder, ) - actual = LabelServiceClient.common_folder_path(folder) + actual = ApiHubPluginClient.common_folder_path(folder) assert expected == actual def test_parse_common_folder_path(): expected = { - "folder": "nautilus", + "folder": "octopus", } - path = LabelServiceClient.common_folder_path(**expected) + path = ApiHubPluginClient.common_folder_path(**expected) # Check that the path construction is reversible. - actual = LabelServiceClient.parse_common_folder_path(path) + actual = ApiHubPluginClient.parse_common_folder_path(path) assert expected == actual def test_common_organization_path(): - organization = "scallop" + organization = "oyster" expected = "organizations/{organization}".format( organization=organization, ) - actual = LabelServiceClient.common_organization_path(organization) + actual = ApiHubPluginClient.common_organization_path(organization) assert expected == actual def test_parse_common_organization_path(): expected = { - "organization": "abalone", + "organization": "nudibranch", } - path = LabelServiceClient.common_organization_path(**expected) + path = ApiHubPluginClient.common_organization_path(**expected) # Check that the path construction is reversible. - actual = LabelServiceClient.parse_common_organization_path(path) + actual = ApiHubPluginClient.parse_common_organization_path(path) assert expected == actual def test_common_project_path(): - project = "squid" + project = "cuttlefish" expected = "projects/{project}".format( project=project, ) - actual = LabelServiceClient.common_project_path(project) + actual = ApiHubPluginClient.common_project_path(project) assert expected == actual def test_parse_common_project_path(): expected = { - "project": "clam", + "project": "mussel", } - path = LabelServiceClient.common_project_path(**expected) + path = ApiHubPluginClient.common_project_path(**expected) # Check that the path construction is reversible. - actual = LabelServiceClient.parse_common_project_path(path) + actual = ApiHubPluginClient.parse_common_project_path(path) assert expected == actual def test_common_location_path(): - project = "whelk" - location = "octopus" + project = "winkle" + location = "nautilus" expected = "projects/{project}/locations/{location}".format( project=project, location=location, ) - actual = LabelServiceClient.common_location_path(project, location) + actual = ApiHubPluginClient.common_location_path(project, location) assert expected == actual def test_parse_common_location_path(): expected = { - "project": "oyster", - "location": "nudibranch", + "project": "scallop", + "location": "abalone", } - path = LabelServiceClient.common_location_path(**expected) + path = ApiHubPluginClient.common_location_path(**expected) # Check that the path construction is reversible. - actual = LabelServiceClient.parse_common_location_path(path) + actual = ApiHubPluginClient.parse_common_location_path(path) assert expected == actual @@ -2009,18 +2281,18 @@ def test_client_with_default_client_info(): client_info = gapic_v1.client_info.ClientInfo() with mock.patch.object( - transports.LabelServiceTransport, "_prep_wrapped_messages" + transports.ApiHubPluginTransport, "_prep_wrapped_messages" ) as prep: - client = LabelServiceClient( + client = ApiHubPluginClient( credentials=ga_credentials.AnonymousCredentials(), client_info=client_info, ) prep.assert_called_once_with(client_info) with mock.patch.object( - transports.LabelServiceTransport, "_prep_wrapped_messages" + transports.ApiHubPluginTransport, "_prep_wrapped_messages" ) as prep: - transport_class = LabelServiceClient.get_transport_class() + transport_class = ApiHubPluginClient.get_transport_class() transport = transport_class( credentials=ga_credentials.AnonymousCredentials(), client_info=client_info, @@ -2028,17 +2300,247 @@ def test_client_with_default_client_info(): prep.assert_called_once_with(client_info) +def test_get_location_rest_bad_request( + transport: str = "rest", request_type=locations_pb2.GetLocationRequest +): + client = ApiHubPluginClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + request = request_type() + request = json_format.ParseDict( + {"name": "projects/sample1/locations/sample2"}, request + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_location(request) + + +@pytest.mark.parametrize( + "request_type", + [ + locations_pb2.GetLocationRequest, + dict, + ], +) +def test_get_location_rest(request_type): + client = ApiHubPluginClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = {"name": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = locations_pb2.Location() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_location(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, locations_pb2.Location) + + +def test_list_locations_rest_bad_request( + transport: str = "rest", request_type=locations_pb2.ListLocationsRequest +): + client = ApiHubPluginClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + request = request_type() + request = json_format.ParseDict({"name": "projects/sample1"}, request) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_locations(request) + + +@pytest.mark.parametrize( + "request_type", + [ + locations_pb2.ListLocationsRequest, + dict, + ], +) +def test_list_locations_rest(request_type): + client = ApiHubPluginClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = {"name": "projects/sample1"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = locations_pb2.ListLocationsResponse() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.list_locations(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, locations_pb2.ListLocationsResponse) + + +def test_cancel_operation_rest_bad_request( + transport: str = "rest", request_type=operations_pb2.CancelOperationRequest +): + client = ApiHubPluginClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + request = request_type() + request = json_format.ParseDict( + {"name": "projects/sample1/locations/sample2/operations/sample3"}, request + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.cancel_operation(request) + + +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.CancelOperationRequest, + dict, + ], +) +def test_cancel_operation_rest(request_type): + client = ApiHubPluginClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = {"name": "projects/sample1/locations/sample2/operations/sample3"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "{}" + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.cancel_operation(request) + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_operation_rest_bad_request( + transport: str = "rest", request_type=operations_pb2.DeleteOperationRequest +): + client = ApiHubPluginClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + request = request_type() + request = json_format.ParseDict( + {"name": "projects/sample1/locations/sample2/operations/sample3"}, request + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete_operation(request) + + +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.DeleteOperationRequest, + dict, + ], +) +def test_delete_operation_rest(request_type): + client = ApiHubPluginClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = {"name": "projects/sample1/locations/sample2/operations/sample3"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "{}" + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.delete_operation(request) + + # Establish that the response is the type that we expect. + assert response is None + + def test_get_operation_rest_bad_request( transport: str = "rest", request_type=operations_pb2.GetOperationRequest ): - client = LabelServiceClient( + client = ApiHubPluginClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) request = request_type() request = json_format.ParseDict( - {"name": "networks/sample1/operations/reports/exports/sample2"}, request + {"name": "projects/sample1/locations/sample2/operations/sample3"}, request ) # Mock the http request call within the method and fake a BadRequest error. @@ -2061,11 +2563,11 @@ def test_get_operation_rest_bad_request( ], ) def test_get_operation_rest(request_type): - client = LabelServiceClient( + client = ApiHubPluginClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) - request_init = {"name": "networks/sample1/operations/reports/exports/sample2"} + request_init = {"name": "projects/sample1/locations/sample2/operations/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: @@ -2086,13 +2588,71 @@ def test_get_operation_rest(request_type): assert isinstance(response, operations_pb2.Operation) +def test_list_operations_rest_bad_request( + transport: str = "rest", request_type=operations_pb2.ListOperationsRequest +): + client = ApiHubPluginClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + request = request_type() + request = json_format.ParseDict( + {"name": "projects/sample1/locations/sample2"}, request + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_operations(request) + + +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.ListOperationsRequest, + dict, + ], +) +def test_list_operations_rest(request_type): + client = ApiHubPluginClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = {"name": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.ListOperationsResponse() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.list_operations(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.ListOperationsResponse) + + def test_transport_close(): transports = { "rest": "_session", } for transport, close_name in transports.items(): - client = LabelServiceClient( + client = ApiHubPluginClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport ) with mock.patch.object( @@ -2108,7 +2668,7 @@ def test_client_ctx(): "rest", ] for transport in transports: - client = LabelServiceClient( + client = ApiHubPluginClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport ) # Test client calls underlying transport. @@ -2122,7 +2682,7 @@ def test_client_ctx(): @pytest.mark.parametrize( "client_class,transport_class", [ - (LabelServiceClient, transports.LabelServiceRestTransport), + (ApiHubPluginClient, transports.ApiHubPluginRestTransport), ], ) def test_api_key_credentials(client_class, transport_class): diff --git a/packages/google-cloud-apihub/tests/unit/gapic/apihub_v1/test_host_project_registration_service.py b/packages/google-cloud-apihub/tests/unit/gapic/apihub_v1/test_host_project_registration_service.py new file mode 100644 index 000000000000..15813aa93505 --- /dev/null +++ b/packages/google-cloud-apihub/tests/unit/gapic/apihub_v1/test_host_project_registration_service.py @@ -0,0 +1,3070 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import os + +# try/except added for compatibility with python < 3.8 +try: + from unittest import mock + from unittest.mock import AsyncMock # pragma: NO COVER +except ImportError: # pragma: NO COVER + import mock + +from collections.abc import Iterable +import json +import math + +from google.api_core import gapic_v1, grpc_helpers, grpc_helpers_async, path_template +from google.api_core import api_core_version, client_options +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +import google.auth +from google.auth import credentials as ga_credentials +from google.auth.exceptions import MutualTLSChannelError +from google.cloud.location import locations_pb2 +from google.longrunning import operations_pb2 # type: ignore +from google.oauth2 import service_account +from google.protobuf import json_format +from google.protobuf import timestamp_pb2 # type: ignore +import grpc +from grpc.experimental import aio +from proto.marshal.rules import wrappers +from proto.marshal.rules.dates import DurationRule, TimestampRule +import pytest +from requests import PreparedRequest, Request, Response +from requests.sessions import Session + +from google.cloud.apihub_v1.services.host_project_registration_service import ( + HostProjectRegistrationServiceClient, + pagers, + transports, +) +from google.cloud.apihub_v1.types import host_project_registration_service + + +def client_cert_source_callback(): + return b"cert bytes", b"key bytes" + + +# If default endpoint is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint(client): + return ( + "foo.googleapis.com" + if ("localhost" in client.DEFAULT_ENDPOINT) + else client.DEFAULT_ENDPOINT + ) + + +# If default endpoint template is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint template so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint_template(client): + return ( + "test.{UNIVERSE_DOMAIN}" + if ("localhost" in client._DEFAULT_ENDPOINT_TEMPLATE) + else client._DEFAULT_ENDPOINT_TEMPLATE + ) + + +def test__get_default_mtls_endpoint(): + api_endpoint = "example.googleapis.com" + api_mtls_endpoint = "example.mtls.googleapis.com" + sandbox_endpoint = "example.sandbox.googleapis.com" + sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" + non_googleapi = "api.example.com" + + assert HostProjectRegistrationServiceClient._get_default_mtls_endpoint(None) is None + assert ( + HostProjectRegistrationServiceClient._get_default_mtls_endpoint(api_endpoint) + == api_mtls_endpoint + ) + assert ( + HostProjectRegistrationServiceClient._get_default_mtls_endpoint( + api_mtls_endpoint + ) + == api_mtls_endpoint + ) + assert ( + HostProjectRegistrationServiceClient._get_default_mtls_endpoint( + sandbox_endpoint + ) + == sandbox_mtls_endpoint + ) + assert ( + HostProjectRegistrationServiceClient._get_default_mtls_endpoint( + sandbox_mtls_endpoint + ) + == sandbox_mtls_endpoint + ) + assert ( + HostProjectRegistrationServiceClient._get_default_mtls_endpoint(non_googleapi) + == non_googleapi + ) + + +def test__read_environment_variables(): + assert HostProjectRegistrationServiceClient._read_environment_variables() == ( + False, + "auto", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + assert HostProjectRegistrationServiceClient._read_environment_variables() == ( + True, + "auto", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + assert HostProjectRegistrationServiceClient._read_environment_variables() == ( + False, + "auto", + None, + ) + + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + with pytest.raises(ValueError) as excinfo: + HostProjectRegistrationServiceClient._read_environment_variables() + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + assert HostProjectRegistrationServiceClient._read_environment_variables() == ( + False, + "never", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + assert HostProjectRegistrationServiceClient._read_environment_variables() == ( + False, + "always", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}): + assert HostProjectRegistrationServiceClient._read_environment_variables() == ( + False, + "auto", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + HostProjectRegistrationServiceClient._read_environment_variables() + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + with mock.patch.dict(os.environ, {"GOOGLE_CLOUD_UNIVERSE_DOMAIN": "foo.com"}): + assert HostProjectRegistrationServiceClient._read_environment_variables() == ( + False, + "auto", + "foo.com", + ) + + +def test__get_client_cert_source(): + mock_provided_cert_source = mock.Mock() + mock_default_cert_source = mock.Mock() + + assert ( + HostProjectRegistrationServiceClient._get_client_cert_source(None, False) + is None + ) + assert ( + HostProjectRegistrationServiceClient._get_client_cert_source( + mock_provided_cert_source, False + ) + is None + ) + assert ( + HostProjectRegistrationServiceClient._get_client_cert_source( + mock_provided_cert_source, True + ) + == mock_provided_cert_source + ) + + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", return_value=True + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=mock_default_cert_source, + ): + assert ( + HostProjectRegistrationServiceClient._get_client_cert_source(None, True) + is mock_default_cert_source + ) + assert ( + HostProjectRegistrationServiceClient._get_client_cert_source( + mock_provided_cert_source, "true" + ) + is mock_provided_cert_source + ) + + +@mock.patch.object( + HostProjectRegistrationServiceClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(HostProjectRegistrationServiceClient), +) +def test__get_api_endpoint(): + api_override = "foo.com" + mock_client_cert_source = mock.Mock() + default_universe = HostProjectRegistrationServiceClient._DEFAULT_UNIVERSE + default_endpoint = ( + HostProjectRegistrationServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=default_universe + ) + ) + mock_universe = "bar.com" + mock_endpoint = ( + HostProjectRegistrationServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=mock_universe + ) + ) + + assert ( + HostProjectRegistrationServiceClient._get_api_endpoint( + api_override, mock_client_cert_source, default_universe, "always" + ) + == api_override + ) + assert ( + HostProjectRegistrationServiceClient._get_api_endpoint( + None, mock_client_cert_source, default_universe, "auto" + ) + == HostProjectRegistrationServiceClient.DEFAULT_MTLS_ENDPOINT + ) + assert ( + HostProjectRegistrationServiceClient._get_api_endpoint( + None, None, default_universe, "auto" + ) + == default_endpoint + ) + assert ( + HostProjectRegistrationServiceClient._get_api_endpoint( + None, None, default_universe, "always" + ) + == HostProjectRegistrationServiceClient.DEFAULT_MTLS_ENDPOINT + ) + assert ( + HostProjectRegistrationServiceClient._get_api_endpoint( + None, mock_client_cert_source, default_universe, "always" + ) + == HostProjectRegistrationServiceClient.DEFAULT_MTLS_ENDPOINT + ) + assert ( + HostProjectRegistrationServiceClient._get_api_endpoint( + None, None, mock_universe, "never" + ) + == mock_endpoint + ) + assert ( + HostProjectRegistrationServiceClient._get_api_endpoint( + None, None, default_universe, "never" + ) + == default_endpoint + ) + + with pytest.raises(MutualTLSChannelError) as excinfo: + HostProjectRegistrationServiceClient._get_api_endpoint( + None, mock_client_cert_source, mock_universe, "auto" + ) + assert ( + str(excinfo.value) + == "mTLS is not supported in any universe other than googleapis.com." + ) + + +def test__get_universe_domain(): + client_universe_domain = "foo.com" + universe_domain_env = "bar.com" + + assert ( + HostProjectRegistrationServiceClient._get_universe_domain( + client_universe_domain, universe_domain_env + ) + == client_universe_domain + ) + assert ( + HostProjectRegistrationServiceClient._get_universe_domain( + None, universe_domain_env + ) + == universe_domain_env + ) + assert ( + HostProjectRegistrationServiceClient._get_universe_domain(None, None) + == HostProjectRegistrationServiceClient._DEFAULT_UNIVERSE + ) + + with pytest.raises(ValueError) as excinfo: + HostProjectRegistrationServiceClient._get_universe_domain("", None) + assert str(excinfo.value) == "Universe Domain cannot be an empty string." + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + ( + HostProjectRegistrationServiceClient, + transports.HostProjectRegistrationServiceRestTransport, + "rest", + ), + ], +) +def test__validate_universe_domain(client_class, transport_class, transport_name): + client = client_class( + transport=transport_class(credentials=ga_credentials.AnonymousCredentials()) + ) + assert client._validate_universe_domain() == True + + # Test the case when universe is already validated. + assert client._validate_universe_domain() == True + + if transport_name == "grpc": + # Test the case where credentials are provided by the + # `local_channel_credentials`. The default universes in both match. + channel = grpc.secure_channel( + "http://localhost/", grpc.local_channel_credentials() + ) + client = client_class(transport=transport_class(channel=channel)) + assert client._validate_universe_domain() == True + + # Test the case where credentials do not exist: e.g. a transport is provided + # with no credentials. Validation should still succeed because there is no + # mismatch with non-existent credentials. + channel = grpc.secure_channel( + "http://localhost/", grpc.local_channel_credentials() + ) + transport = transport_class(channel=channel) + transport._credentials = None + client = client_class(transport=transport) + assert client._validate_universe_domain() == True + + # TODO: This is needed to cater for older versions of google-auth + # Make this test unconditional once the minimum supported version of + # google-auth becomes 2.23.0 or higher. + google_auth_major, google_auth_minor = [ + int(part) for part in google.auth.__version__.split(".")[0:2] + ] + if google_auth_major > 2 or (google_auth_major == 2 and google_auth_minor >= 23): + credentials = ga_credentials.AnonymousCredentials() + credentials._universe_domain = "foo.com" + # Test the case when there is a universe mismatch from the credentials. + client = client_class(transport=transport_class(credentials=credentials)) + with pytest.raises(ValueError) as excinfo: + client._validate_universe_domain() + assert ( + str(excinfo.value) + == "The configured universe domain (googleapis.com) does not match the universe domain found in the credentials (foo.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." + ) + + # Test the case when there is a universe mismatch from the client. + # + # TODO: Make this test unconditional once the minimum supported version of + # google-api-core becomes 2.15.0 or higher. + api_core_major, api_core_minor = [ + int(part) for part in api_core_version.__version__.split(".")[0:2] + ] + if api_core_major > 2 or (api_core_major == 2 and api_core_minor >= 15): + client = client_class( + client_options={"universe_domain": "bar.com"}, + transport=transport_class( + credentials=ga_credentials.AnonymousCredentials(), + ), + ) + with pytest.raises(ValueError) as excinfo: + client._validate_universe_domain() + assert ( + str(excinfo.value) + == "The configured universe domain (bar.com) does not match the universe domain found in the credentials (googleapis.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." + ) + + # Test that ValueError is raised if universe_domain is provided via client options and credentials is None + with pytest.raises(ValueError): + client._compare_universes("foo.bar", None) + + +@pytest.mark.parametrize( + "client_class,transport_name", + [ + (HostProjectRegistrationServiceClient, "rest"), + ], +) +def test_host_project_registration_service_client_from_service_account_info( + client_class, transport_name +): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_info" + ) as factory: + factory.return_value = creds + info = {"valid": True} + client = client_class.from_service_account_info(info, transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + "apihub.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://apihub.googleapis.com" + ) + + +@pytest.mark.parametrize( + "transport_class,transport_name", + [ + (transports.HostProjectRegistrationServiceRestTransport, "rest"), + ], +) +def test_host_project_registration_service_client_service_account_always_use_jwt( + transport_class, transport_name +): + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=True) + use_jwt.assert_called_once_with(True) + + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=False) + use_jwt.assert_not_called() + + +@pytest.mark.parametrize( + "client_class,transport_name", + [ + (HostProjectRegistrationServiceClient, "rest"), + ], +) +def test_host_project_registration_service_client_from_service_account_file( + client_class, transport_name +): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_file" + ) as factory: + factory.return_value = creds + client = client_class.from_service_account_file( + "dummy/file/path.json", transport=transport_name + ) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + client = client_class.from_service_account_json( + "dummy/file/path.json", transport=transport_name + ) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + "apihub.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://apihub.googleapis.com" + ) + + +def test_host_project_registration_service_client_get_transport_class(): + transport = HostProjectRegistrationServiceClient.get_transport_class() + available_transports = [ + transports.HostProjectRegistrationServiceRestTransport, + ] + assert transport in available_transports + + transport = HostProjectRegistrationServiceClient.get_transport_class("rest") + assert transport == transports.HostProjectRegistrationServiceRestTransport + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + ( + HostProjectRegistrationServiceClient, + transports.HostProjectRegistrationServiceRestTransport, + "rest", + ), + ], +) +@mock.patch.object( + HostProjectRegistrationServiceClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(HostProjectRegistrationServiceClient), +) +def test_host_project_registration_service_client_client_options( + client_class, transport_class, transport_name +): + # Check that if channel is provided we won't create a new one. + with mock.patch.object( + HostProjectRegistrationServiceClient, "get_transport_class" + ) as gtc: + transport = transport_class(credentials=ga_credentials.AnonymousCredentials()) + client = client_class(transport=transport) + gtc.assert_not_called() + + # Check that if channel is provided via str we will create a new one. + with mock.patch.object( + HostProjectRegistrationServiceClient, "get_transport_class" + ) as gtc: + client = client_class(transport=transport_name) + gtc.assert_called() + + # Check the case api_endpoint is provided. + options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name, client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_MTLS_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + client = client_class(transport=transport_name) + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + with pytest.raises(ValueError) as excinfo: + client = client_class(transport=transport_name) + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + + # Check the case quota_project_id is provided + options = client_options.ClientOptions(quota_project_id="octopus") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id="octopus", + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + # Check the case api_endpoint is provided + options = client_options.ClientOptions( + api_audience="https://language.googleapis.com" + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience="https://language.googleapis.com", + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,use_client_cert_env", + [ + ( + HostProjectRegistrationServiceClient, + transports.HostProjectRegistrationServiceRestTransport, + "rest", + "true", + ), + ( + HostProjectRegistrationServiceClient, + transports.HostProjectRegistrationServiceRestTransport, + "rest", + "false", + ), + ], +) +@mock.patch.object( + HostProjectRegistrationServiceClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(HostProjectRegistrationServiceClient), +) +@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) +def test_host_project_registration_service_client_mtls_env_auto( + client_class, transport_class, transport_name, use_client_cert_env +): + # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default + # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. + + # Check the case client_cert_source is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + options = client_options.ClientOptions( + client_cert_source=client_cert_source_callback + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + + if use_client_cert_env == "false": + expected_client_cert_source = None + expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ) + else: + expected_client_cert_source = client_cert_source_callback + expected_host = client.DEFAULT_MTLS_ENDPOINT + + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case ADC client cert is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=client_cert_source_callback, + ): + if use_client_cert_env == "false": + expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ) + expected_client_cert_source = None + else: + expected_host = client.DEFAULT_MTLS_ENDPOINT + expected_client_cert_source = client_cert_source_callback + + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case client_cert_source and ADC client cert are not provided. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize("client_class", [HostProjectRegistrationServiceClient]) +@mock.patch.object( + HostProjectRegistrationServiceClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(HostProjectRegistrationServiceClient), +) +def test_host_project_registration_service_client_get_mtls_endpoint_and_cert_source( + client_class, +): + mock_client_cert_source = mock.Mock() + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source == mock_client_cert_source + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + mock_client_cert_source = mock.Mock() + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=mock_client_cert_source, + ): + ( + api_endpoint, + cert_source, + ) = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source == mock_client_cert_source + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + client_class.get_mtls_endpoint_and_cert_source() + + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + with pytest.raises(ValueError) as excinfo: + client_class.get_mtls_endpoint_and_cert_source() + + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + + +@pytest.mark.parametrize("client_class", [HostProjectRegistrationServiceClient]) +@mock.patch.object( + HostProjectRegistrationServiceClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(HostProjectRegistrationServiceClient), +) +def test_host_project_registration_service_client_client_api_endpoint(client_class): + mock_client_cert_source = client_cert_source_callback + api_override = "foo.com" + default_universe = HostProjectRegistrationServiceClient._DEFAULT_UNIVERSE + default_endpoint = ( + HostProjectRegistrationServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=default_universe + ) + ) + mock_universe = "bar.com" + mock_endpoint = ( + HostProjectRegistrationServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=mock_universe + ) + ) + + # If ClientOptions.api_endpoint is set and GOOGLE_API_USE_CLIENT_CERTIFICATE="true", + # use ClientOptions.api_endpoint as the api endpoint regardless. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.requests.AuthorizedSession.configure_mtls_channel" + ): + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=api_override + ) + client = client_class( + client_options=options, + credentials=ga_credentials.AnonymousCredentials(), + ) + assert client.api_endpoint == api_override + + # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="never", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + client = client_class(credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == default_endpoint + + # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="always", + # use the DEFAULT_MTLS_ENDPOINT as the api endpoint. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + client = client_class(credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + + # If ClientOptions.api_endpoint is not set, GOOGLE_API_USE_MTLS_ENDPOINT="auto" (default), + # GOOGLE_API_USE_CLIENT_CERTIFICATE="false" (default), default cert source doesn't exist, + # and ClientOptions.universe_domain="bar.com", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with universe domain as the api endpoint. + options = client_options.ClientOptions() + universe_exists = hasattr(options, "universe_domain") + if universe_exists: + options = client_options.ClientOptions(universe_domain=mock_universe) + client = client_class( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + else: + client = client_class( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + assert client.api_endpoint == ( + mock_endpoint if universe_exists else default_endpoint + ) + assert client.universe_domain == ( + mock_universe if universe_exists else default_universe + ) + + # If ClientOptions does not have a universe domain attribute and GOOGLE_API_USE_MTLS_ENDPOINT="never", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. + options = client_options.ClientOptions() + if hasattr(options, "universe_domain"): + delattr(options, "universe_domain") + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + client = client_class( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + assert client.api_endpoint == default_endpoint + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + ( + HostProjectRegistrationServiceClient, + transports.HostProjectRegistrationServiceRestTransport, + "rest", + ), + ], +) +def test_host_project_registration_service_client_client_options_scopes( + client_class, transport_class, transport_name +): + # Check the case scopes are provided. + options = client_options.ClientOptions( + scopes=["1", "2"], + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=["1", "2"], + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,grpc_helpers", + [ + ( + HostProjectRegistrationServiceClient, + transports.HostProjectRegistrationServiceRestTransport, + "rest", + None, + ), + ], +) +def test_host_project_registration_service_client_client_options_credentials_file( + client_class, transport_class, transport_name, grpc_helpers +): + # Check the case credentials file is provided. + options = client_options.ClientOptions(credentials_file="credentials.json") + + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "request_type", + [ + host_project_registration_service.CreateHostProjectRegistrationRequest, + dict, + ], +) +def test_create_host_project_registration_rest(request_type): + client = HostProjectRegistrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request_init["host_project_registration"] = { + "name": "name_value", + "gcp_project": "gcp_project_value", + "create_time": {"seconds": 751, "nanos": 543}, + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = host_project_registration_service.CreateHostProjectRegistrationRequest.meta.fields[ + "host_project_registration" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "host_project_registration" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, len(request_init["host_project_registration"][field]) + ): + del request_init["host_project_registration"][field][i][subfield] + else: + del request_init["host_project_registration"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = host_project_registration_service.HostProjectRegistration( + name="name_value", + gcp_project="gcp_project_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = host_project_registration_service.HostProjectRegistration.pb( + return_value + ) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.create_host_project_registration(request) + + # Establish that the response is the type that we expect. + assert isinstance( + response, host_project_registration_service.HostProjectRegistration + ) + assert response.name == "name_value" + assert response.gcp_project == "gcp_project_value" + + +def test_create_host_project_registration_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = HostProjectRegistrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.create_host_project_registration + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.create_host_project_registration + ] = mock_rpc + + request = {} + client.create_host_project_registration(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.create_host_project_registration(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_create_host_project_registration_rest_required_fields( + request_type=host_project_registration_service.CreateHostProjectRegistrationRequest, +): + transport_class = transports.HostProjectRegistrationServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request_init["host_project_registration_id"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + assert "hostProjectRegistrationId" not in jsonified_request + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_host_project_registration._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + assert "hostProjectRegistrationId" in jsonified_request + assert ( + jsonified_request["hostProjectRegistrationId"] + == request_init["host_project_registration_id"] + ) + + jsonified_request["parent"] = "parent_value" + jsonified_request[ + "hostProjectRegistrationId" + ] = "host_project_registration_id_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_host_project_registration._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("host_project_registration_id",)) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + assert "hostProjectRegistrationId" in jsonified_request + assert ( + jsonified_request["hostProjectRegistrationId"] + == "host_project_registration_id_value" + ) + + client = HostProjectRegistrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = host_project_registration_service.HostProjectRegistration() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = host_project_registration_service.HostProjectRegistration.pb( + return_value + ) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.create_host_project_registration(request) + + expected_params = [ + ( + "hostProjectRegistrationId", + "", + ), + ("$alt", "json;enum-encoding=int"), + ] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_create_host_project_registration_rest_unset_required_fields(): + transport = transports.HostProjectRegistrationServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = ( + transport.create_host_project_registration._get_unset_required_fields({}) + ) + assert set(unset_fields) == ( + set(("hostProjectRegistrationId",)) + & set( + ( + "parent", + "hostProjectRegistrationId", + "hostProjectRegistration", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_create_host_project_registration_rest_interceptors(null_interceptor): + transport = transports.HostProjectRegistrationServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.HostProjectRegistrationServiceRestInterceptor(), + ) + client = HostProjectRegistrationServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.HostProjectRegistrationServiceRestInterceptor, + "post_create_host_project_registration", + ) as post, mock.patch.object( + transports.HostProjectRegistrationServiceRestInterceptor, + "pre_create_host_project_registration", + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = ( + host_project_registration_service.CreateHostProjectRegistrationRequest.pb( + host_project_registration_service.CreateHostProjectRegistrationRequest() + ) + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = ( + host_project_registration_service.HostProjectRegistration.to_json( + host_project_registration_service.HostProjectRegistration() + ) + ) + + request = ( + host_project_registration_service.CreateHostProjectRegistrationRequest() + ) + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = host_project_registration_service.HostProjectRegistration() + + client.create_host_project_registration( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_create_host_project_registration_rest_bad_request( + transport: str = "rest", + request_type=host_project_registration_service.CreateHostProjectRegistrationRequest, +): + client = HostProjectRegistrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.create_host_project_registration(request) + + +def test_create_host_project_registration_rest_flattened(): + client = HostProjectRegistrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = host_project_registration_service.HostProjectRegistration() + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1/locations/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + host_project_registration=host_project_registration_service.HostProjectRegistration( + name="name_value" + ), + host_project_registration_id="host_project_registration_id_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = host_project_registration_service.HostProjectRegistration.pb( + return_value + ) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.create_host_project_registration(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*}/hostProjectRegistrations" + % client.transport._host, + args[1], + ) + + +def test_create_host_project_registration_rest_flattened_error(transport: str = "rest"): + client = HostProjectRegistrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_host_project_registration( + host_project_registration_service.CreateHostProjectRegistrationRequest(), + parent="parent_value", + host_project_registration=host_project_registration_service.HostProjectRegistration( + name="name_value" + ), + host_project_registration_id="host_project_registration_id_value", + ) + + +def test_create_host_project_registration_rest_error(): + client = HostProjectRegistrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + host_project_registration_service.GetHostProjectRegistrationRequest, + dict, + ], +) +def test_get_host_project_registration_rest(request_type): + client = HostProjectRegistrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/hostProjectRegistrations/sample3" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = host_project_registration_service.HostProjectRegistration( + name="name_value", + gcp_project="gcp_project_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = host_project_registration_service.HostProjectRegistration.pb( + return_value + ) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.get_host_project_registration(request) + + # Establish that the response is the type that we expect. + assert isinstance( + response, host_project_registration_service.HostProjectRegistration + ) + assert response.name == "name_value" + assert response.gcp_project == "gcp_project_value" + + +def test_get_host_project_registration_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = HostProjectRegistrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.get_host_project_registration + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.get_host_project_registration + ] = mock_rpc + + request = {} + client.get_host_project_registration(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_host_project_registration(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_get_host_project_registration_rest_required_fields( + request_type=host_project_registration_service.GetHostProjectRegistrationRequest, +): + transport_class = transports.HostProjectRegistrationServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_host_project_registration._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_host_project_registration._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = HostProjectRegistrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = host_project_registration_service.HostProjectRegistration() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = host_project_registration_service.HostProjectRegistration.pb( + return_value + ) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_host_project_registration(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_get_host_project_registration_rest_unset_required_fields(): + transport = transports.HostProjectRegistrationServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.get_host_project_registration._get_unset_required_fields( + {} + ) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_host_project_registration_rest_interceptors(null_interceptor): + transport = transports.HostProjectRegistrationServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.HostProjectRegistrationServiceRestInterceptor(), + ) + client = HostProjectRegistrationServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.HostProjectRegistrationServiceRestInterceptor, + "post_get_host_project_registration", + ) as post, mock.patch.object( + transports.HostProjectRegistrationServiceRestInterceptor, + "pre_get_host_project_registration", + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = ( + host_project_registration_service.GetHostProjectRegistrationRequest.pb( + host_project_registration_service.GetHostProjectRegistrationRequest() + ) + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = ( + host_project_registration_service.HostProjectRegistration.to_json( + host_project_registration_service.HostProjectRegistration() + ) + ) + + request = host_project_registration_service.GetHostProjectRegistrationRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = host_project_registration_service.HostProjectRegistration() + + client.get_host_project_registration( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_host_project_registration_rest_bad_request( + transport: str = "rest", + request_type=host_project_registration_service.GetHostProjectRegistrationRequest, +): + client = HostProjectRegistrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/hostProjectRegistrations/sample3" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_host_project_registration(request) + + +def test_get_host_project_registration_rest_flattened(): + client = HostProjectRegistrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = host_project_registration_service.HostProjectRegistration() + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/hostProjectRegistrations/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = host_project_registration_service.HostProjectRegistration.pb( + return_value + ) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.get_host_project_registration(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/hostProjectRegistrations/*}" + % client.transport._host, + args[1], + ) + + +def test_get_host_project_registration_rest_flattened_error(transport: str = "rest"): + client = HostProjectRegistrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_host_project_registration( + host_project_registration_service.GetHostProjectRegistrationRequest(), + name="name_value", + ) + + +def test_get_host_project_registration_rest_error(): + client = HostProjectRegistrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + host_project_registration_service.ListHostProjectRegistrationsRequest, + dict, + ], +) +def test_list_host_project_registrations_rest(request_type): + client = HostProjectRegistrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = ( + host_project_registration_service.ListHostProjectRegistrationsResponse( + next_page_token="next_page_token_value", + ) + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = ( + host_project_registration_service.ListHostProjectRegistrationsResponse.pb( + return_value + ) + ) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.list_host_project_registrations(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListHostProjectRegistrationsPager) + assert response.next_page_token == "next_page_token_value" + + +def test_list_host_project_registrations_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = HostProjectRegistrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.list_host_project_registrations + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.list_host_project_registrations + ] = mock_rpc + + request = {} + client.list_host_project_registrations(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_host_project_registrations(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_list_host_project_registrations_rest_required_fields( + request_type=host_project_registration_service.ListHostProjectRegistrationsRequest, +): + transport_class = transports.HostProjectRegistrationServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_host_project_registrations._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_host_project_registrations._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "filter", + "order_by", + "page_size", + "page_token", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = HostProjectRegistrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = ( + host_project_registration_service.ListHostProjectRegistrationsResponse() + ) + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = host_project_registration_service.ListHostProjectRegistrationsResponse.pb( + return_value + ) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.list_host_project_registrations(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_list_host_project_registrations_rest_unset_required_fields(): + transport = transports.HostProjectRegistrationServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.list_host_project_registrations._get_unset_required_fields( + {} + ) + assert set(unset_fields) == ( + set( + ( + "filter", + "orderBy", + "pageSize", + "pageToken", + ) + ) + & set(("parent",)) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_host_project_registrations_rest_interceptors(null_interceptor): + transport = transports.HostProjectRegistrationServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.HostProjectRegistrationServiceRestInterceptor(), + ) + client = HostProjectRegistrationServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.HostProjectRegistrationServiceRestInterceptor, + "post_list_host_project_registrations", + ) as post, mock.patch.object( + transports.HostProjectRegistrationServiceRestInterceptor, + "pre_list_host_project_registrations", + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = ( + host_project_registration_service.ListHostProjectRegistrationsRequest.pb( + host_project_registration_service.ListHostProjectRegistrationsRequest() + ) + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = host_project_registration_service.ListHostProjectRegistrationsResponse.to_json( + host_project_registration_service.ListHostProjectRegistrationsResponse() + ) + + request = ( + host_project_registration_service.ListHostProjectRegistrationsRequest() + ) + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = ( + host_project_registration_service.ListHostProjectRegistrationsResponse() + ) + + client.list_host_project_registrations( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_host_project_registrations_rest_bad_request( + transport: str = "rest", + request_type=host_project_registration_service.ListHostProjectRegistrationsRequest, +): + client = HostProjectRegistrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_host_project_registrations(request) + + +def test_list_host_project_registrations_rest_flattened(): + client = HostProjectRegistrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = ( + host_project_registration_service.ListHostProjectRegistrationsResponse() + ) + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1/locations/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = ( + host_project_registration_service.ListHostProjectRegistrationsResponse.pb( + return_value + ) + ) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.list_host_project_registrations(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*}/hostProjectRegistrations" + % client.transport._host, + args[1], + ) + + +def test_list_host_project_registrations_rest_flattened_error(transport: str = "rest"): + client = HostProjectRegistrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_host_project_registrations( + host_project_registration_service.ListHostProjectRegistrationsRequest(), + parent="parent_value", + ) + + +def test_list_host_project_registrations_rest_pager(transport: str = "rest"): + client = HostProjectRegistrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + host_project_registration_service.ListHostProjectRegistrationsResponse( + host_project_registrations=[ + host_project_registration_service.HostProjectRegistration(), + host_project_registration_service.HostProjectRegistration(), + host_project_registration_service.HostProjectRegistration(), + ], + next_page_token="abc", + ), + host_project_registration_service.ListHostProjectRegistrationsResponse( + host_project_registrations=[], + next_page_token="def", + ), + host_project_registration_service.ListHostProjectRegistrationsResponse( + host_project_registrations=[ + host_project_registration_service.HostProjectRegistration(), + ], + next_page_token="ghi", + ), + host_project_registration_service.ListHostProjectRegistrationsResponse( + host_project_registrations=[ + host_project_registration_service.HostProjectRegistration(), + host_project_registration_service.HostProjectRegistration(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple( + host_project_registration_service.ListHostProjectRegistrationsResponse.to_json( + x + ) + for x in response + ) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {"parent": "projects/sample1/locations/sample2"} + + pager = client.list_host_project_registrations(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all( + isinstance(i, host_project_registration_service.HostProjectRegistration) + for i in results + ) + + pages = list( + client.list_host_project_registrations(request=sample_request).pages + ) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.HostProjectRegistrationServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = HostProjectRegistrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.HostProjectRegistrationServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = HostProjectRegistrationServiceClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide an api_key and a transport instance. + transport = transports.HostProjectRegistrationServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = HostProjectRegistrationServiceClient( + client_options=options, + transport=transport, + ) + + # It is an error to provide an api_key and a credential. + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = HostProjectRegistrationServiceClient( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.HostProjectRegistrationServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = HostProjectRegistrationServiceClient( + client_options={"scopes": ["1", "2"]}, + transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.HostProjectRegistrationServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + client = HostProjectRegistrationServiceClient(transport=transport) + assert client.transport is transport + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.HostProjectRegistrationServiceRestTransport, + ], +) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + + +@pytest.mark.parametrize( + "transport_name", + [ + "rest", + ], +) +def test_transport_kind(transport_name): + transport = HostProjectRegistrationServiceClient.get_transport_class( + transport_name + )( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert transport.kind == transport_name + + +def test_host_project_registration_service_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(core_exceptions.DuplicateCredentialArgs): + transport = transports.HostProjectRegistrationServiceTransport( + credentials=ga_credentials.AnonymousCredentials(), + credentials_file="credentials.json", + ) + + +def test_host_project_registration_service_base_transport(): + # Instantiate the base transport. + with mock.patch( + "google.cloud.apihub_v1.services.host_project_registration_service.transports.HostProjectRegistrationServiceTransport.__init__" + ) as Transport: + Transport.return_value = None + transport = transports.HostProjectRegistrationServiceTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ( + "create_host_project_registration", + "get_host_project_registration", + "list_host_project_registrations", + "get_location", + "list_locations", + "get_operation", + "cancel_operation", + "delete_operation", + "list_operations", + ) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) + + with pytest.raises(NotImplementedError): + transport.close() + + # Catch all for all remaining methods and properties + remainder = [ + "kind", + ] + for r in remainder: + with pytest.raises(NotImplementedError): + getattr(transport, r)() + + +def test_host_project_registration_service_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch( + "google.cloud.apihub_v1.services.host_project_registration_service.transports.HostProjectRegistrationServiceTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.HostProjectRegistrationServiceTransport( + credentials_file="credentials.json", + quota_project_id="octopus", + ) + load_creds.assert_called_once_with( + "credentials.json", + scopes=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id="octopus", + ) + + +def test_host_project_registration_service_base_transport_with_adc(): + # Test the default credentials are used if credentials and credentials_file are None. + with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch( + "google.cloud.apihub_v1.services.host_project_registration_service.transports.HostProjectRegistrationServiceTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.HostProjectRegistrationServiceTransport() + adc.assert_called_once() + + +def test_host_project_registration_service_auth_adc(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + HostProjectRegistrationServiceClient() + adc.assert_called_once_with( + scopes=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id=None, + ) + + +def test_host_project_registration_service_http_transport_client_cert_source_for_mtls(): + cred = ga_credentials.AnonymousCredentials() + with mock.patch( + "google.auth.transport.requests.AuthorizedSession.configure_mtls_channel" + ) as mock_configure_mtls_channel: + transports.HostProjectRegistrationServiceRestTransport( + credentials=cred, client_cert_source_for_mtls=client_cert_source_callback + ) + mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) + + +@pytest.mark.parametrize( + "transport_name", + [ + "rest", + ], +) +def test_host_project_registration_service_host_no_port(transport_name): + client = HostProjectRegistrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="apihub.googleapis.com" + ), + transport=transport_name, + ) + assert client.transport._host == ( + "apihub.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://apihub.googleapis.com" + ) + + +@pytest.mark.parametrize( + "transport_name", + [ + "rest", + ], +) +def test_host_project_registration_service_host_with_port(transport_name): + client = HostProjectRegistrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="apihub.googleapis.com:8000" + ), + transport=transport_name, + ) + assert client.transport._host == ( + "apihub.googleapis.com:8000" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://apihub.googleapis.com:8000" + ) + + +@pytest.mark.parametrize( + "transport_name", + [ + "rest", + ], +) +def test_host_project_registration_service_client_transport_session_collision( + transport_name, +): + creds1 = ga_credentials.AnonymousCredentials() + creds2 = ga_credentials.AnonymousCredentials() + client1 = HostProjectRegistrationServiceClient( + credentials=creds1, + transport=transport_name, + ) + client2 = HostProjectRegistrationServiceClient( + credentials=creds2, + transport=transport_name, + ) + session1 = client1.transport.create_host_project_registration._session + session2 = client2.transport.create_host_project_registration._session + assert session1 != session2 + session1 = client1.transport.get_host_project_registration._session + session2 = client2.transport.get_host_project_registration._session + assert session1 != session2 + session1 = client1.transport.list_host_project_registrations._session + session2 = client2.transport.list_host_project_registrations._session + assert session1 != session2 + + +def test_host_project_registration_path(): + project = "squid" + location = "clam" + host_project_registration = "whelk" + expected = "projects/{project}/locations/{location}/hostProjectRegistrations/{host_project_registration}".format( + project=project, + location=location, + host_project_registration=host_project_registration, + ) + actual = HostProjectRegistrationServiceClient.host_project_registration_path( + project, location, host_project_registration + ) + assert expected == actual + + +def test_parse_host_project_registration_path(): + expected = { + "project": "octopus", + "location": "oyster", + "host_project_registration": "nudibranch", + } + path = HostProjectRegistrationServiceClient.host_project_registration_path( + **expected + ) + + # Check that the path construction is reversible. + actual = HostProjectRegistrationServiceClient.parse_host_project_registration_path( + path + ) + assert expected == actual + + +def test_common_billing_account_path(): + billing_account = "cuttlefish" + expected = "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + actual = HostProjectRegistrationServiceClient.common_billing_account_path( + billing_account + ) + assert expected == actual + + +def test_parse_common_billing_account_path(): + expected = { + "billing_account": "mussel", + } + path = HostProjectRegistrationServiceClient.common_billing_account_path(**expected) + + # Check that the path construction is reversible. + actual = HostProjectRegistrationServiceClient.parse_common_billing_account_path( + path + ) + assert expected == actual + + +def test_common_folder_path(): + folder = "winkle" + expected = "folders/{folder}".format( + folder=folder, + ) + actual = HostProjectRegistrationServiceClient.common_folder_path(folder) + assert expected == actual + + +def test_parse_common_folder_path(): + expected = { + "folder": "nautilus", + } + path = HostProjectRegistrationServiceClient.common_folder_path(**expected) + + # Check that the path construction is reversible. + actual = HostProjectRegistrationServiceClient.parse_common_folder_path(path) + assert expected == actual + + +def test_common_organization_path(): + organization = "scallop" + expected = "organizations/{organization}".format( + organization=organization, + ) + actual = HostProjectRegistrationServiceClient.common_organization_path(organization) + assert expected == actual + + +def test_parse_common_organization_path(): + expected = { + "organization": "abalone", + } + path = HostProjectRegistrationServiceClient.common_organization_path(**expected) + + # Check that the path construction is reversible. + actual = HostProjectRegistrationServiceClient.parse_common_organization_path(path) + assert expected == actual + + +def test_common_project_path(): + project = "squid" + expected = "projects/{project}".format( + project=project, + ) + actual = HostProjectRegistrationServiceClient.common_project_path(project) + assert expected == actual + + +def test_parse_common_project_path(): + expected = { + "project": "clam", + } + path = HostProjectRegistrationServiceClient.common_project_path(**expected) + + # Check that the path construction is reversible. + actual = HostProjectRegistrationServiceClient.parse_common_project_path(path) + assert expected == actual + + +def test_common_location_path(): + project = "whelk" + location = "octopus" + expected = "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) + actual = HostProjectRegistrationServiceClient.common_location_path( + project, location + ) + assert expected == actual + + +def test_parse_common_location_path(): + expected = { + "project": "oyster", + "location": "nudibranch", + } + path = HostProjectRegistrationServiceClient.common_location_path(**expected) + + # Check that the path construction is reversible. + actual = HostProjectRegistrationServiceClient.parse_common_location_path(path) + assert expected == actual + + +def test_client_with_default_client_info(): + client_info = gapic_v1.client_info.ClientInfo() + + with mock.patch.object( + transports.HostProjectRegistrationServiceTransport, "_prep_wrapped_messages" + ) as prep: + client = HostProjectRegistrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + with mock.patch.object( + transports.HostProjectRegistrationServiceTransport, "_prep_wrapped_messages" + ) as prep: + transport_class = HostProjectRegistrationServiceClient.get_transport_class() + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + +def test_get_location_rest_bad_request( + transport: str = "rest", request_type=locations_pb2.GetLocationRequest +): + client = HostProjectRegistrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + request = request_type() + request = json_format.ParseDict( + {"name": "projects/sample1/locations/sample2"}, request + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_location(request) + + +@pytest.mark.parametrize( + "request_type", + [ + locations_pb2.GetLocationRequest, + dict, + ], +) +def test_get_location_rest(request_type): + client = HostProjectRegistrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = {"name": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = locations_pb2.Location() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_location(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, locations_pb2.Location) + + +def test_list_locations_rest_bad_request( + transport: str = "rest", request_type=locations_pb2.ListLocationsRequest +): + client = HostProjectRegistrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + request = request_type() + request = json_format.ParseDict({"name": "projects/sample1"}, request) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_locations(request) + + +@pytest.mark.parametrize( + "request_type", + [ + locations_pb2.ListLocationsRequest, + dict, + ], +) +def test_list_locations_rest(request_type): + client = HostProjectRegistrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = {"name": "projects/sample1"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = locations_pb2.ListLocationsResponse() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.list_locations(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, locations_pb2.ListLocationsResponse) + + +def test_cancel_operation_rest_bad_request( + transport: str = "rest", request_type=operations_pb2.CancelOperationRequest +): + client = HostProjectRegistrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + request = request_type() + request = json_format.ParseDict( + {"name": "projects/sample1/locations/sample2/operations/sample3"}, request + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.cancel_operation(request) + + +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.CancelOperationRequest, + dict, + ], +) +def test_cancel_operation_rest(request_type): + client = HostProjectRegistrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = {"name": "projects/sample1/locations/sample2/operations/sample3"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "{}" + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.cancel_operation(request) + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_operation_rest_bad_request( + transport: str = "rest", request_type=operations_pb2.DeleteOperationRequest +): + client = HostProjectRegistrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + request = request_type() + request = json_format.ParseDict( + {"name": "projects/sample1/locations/sample2/operations/sample3"}, request + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete_operation(request) + + +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.DeleteOperationRequest, + dict, + ], +) +def test_delete_operation_rest(request_type): + client = HostProjectRegistrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = {"name": "projects/sample1/locations/sample2/operations/sample3"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "{}" + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.delete_operation(request) + + # Establish that the response is the type that we expect. + assert response is None + + +def test_get_operation_rest_bad_request( + transport: str = "rest", request_type=operations_pb2.GetOperationRequest +): + client = HostProjectRegistrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + request = request_type() + request = json_format.ParseDict( + {"name": "projects/sample1/locations/sample2/operations/sample3"}, request + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_operation(request) + + +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.GetOperationRequest, + dict, + ], +) +def test_get_operation_rest(request_type): + client = HostProjectRegistrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = {"name": "projects/sample1/locations/sample2/operations/sample3"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_operation(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) + + +def test_list_operations_rest_bad_request( + transport: str = "rest", request_type=operations_pb2.ListOperationsRequest +): + client = HostProjectRegistrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + request = request_type() + request = json_format.ParseDict( + {"name": "projects/sample1/locations/sample2"}, request + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_operations(request) + + +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.ListOperationsRequest, + dict, + ], +) +def test_list_operations_rest(request_type): + client = HostProjectRegistrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = {"name": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.ListOperationsResponse() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.list_operations(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.ListOperationsResponse) + + +def test_transport_close(): + transports = { + "rest": "_session", + } + + for transport, close_name in transports.items(): + client = HostProjectRegistrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + with mock.patch.object( + type(getattr(client.transport, close_name)), "close" + ) as close: + with client: + close.assert_not_called() + close.assert_called_once() + + +def test_client_ctx(): + transports = [ + "rest", + ] + for transport in transports: + client = HostProjectRegistrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called() + + +@pytest.mark.parametrize( + "client_class,transport_class", + [ + ( + HostProjectRegistrationServiceClient, + transports.HostProjectRegistrationServiceRestTransport, + ), + ], +) +def test_api_key_credentials(client_class, transport_class): + with mock.patch.object( + google.auth._default, "get_api_key_credentials", create=True + ) as get_api_key_credentials: + mock_cred = mock.Mock() + get_api_key_credentials.return_value = mock_cred + options = client_options.ClientOptions() + options.api_key = "api_key" + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=mock_cred, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) diff --git a/packages/google-cloud-apihub/tests/unit/gapic/apihub_v1/test_linting_service.py b/packages/google-cloud-apihub/tests/unit/gapic/apihub_v1/test_linting_service.py new file mode 100644 index 000000000000..db139191d3f8 --- /dev/null +++ b/packages/google-cloud-apihub/tests/unit/gapic/apihub_v1/test_linting_service.py @@ -0,0 +1,3068 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import os + +# try/except added for compatibility with python < 3.8 +try: + from unittest import mock + from unittest.mock import AsyncMock # pragma: NO COVER +except ImportError: # pragma: NO COVER + import mock + +from collections.abc import Iterable +import json +import math + +from google.api_core import gapic_v1, grpc_helpers, grpc_helpers_async, path_template +from google.api_core import api_core_version, client_options +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +import google.auth +from google.auth import credentials as ga_credentials +from google.auth.exceptions import MutualTLSChannelError +from google.cloud.location import locations_pb2 +from google.longrunning import operations_pb2 # type: ignore +from google.oauth2 import service_account +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import json_format +import grpc +from grpc.experimental import aio +from proto.marshal.rules import wrappers +from proto.marshal.rules.dates import DurationRule, TimestampRule +import pytest +from requests import PreparedRequest, Request, Response +from requests.sessions import Session + +from google.cloud.apihub_v1.services.linting_service import ( + LintingServiceClient, + transports, +) +from google.cloud.apihub_v1.types import common_fields, linting_service + + +def client_cert_source_callback(): + return b"cert bytes", b"key bytes" + + +# If default endpoint is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint(client): + return ( + "foo.googleapis.com" + if ("localhost" in client.DEFAULT_ENDPOINT) + else client.DEFAULT_ENDPOINT + ) + + +# If default endpoint template is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint template so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint_template(client): + return ( + "test.{UNIVERSE_DOMAIN}" + if ("localhost" in client._DEFAULT_ENDPOINT_TEMPLATE) + else client._DEFAULT_ENDPOINT_TEMPLATE + ) + + +def test__get_default_mtls_endpoint(): + api_endpoint = "example.googleapis.com" + api_mtls_endpoint = "example.mtls.googleapis.com" + sandbox_endpoint = "example.sandbox.googleapis.com" + sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" + non_googleapi = "api.example.com" + + assert LintingServiceClient._get_default_mtls_endpoint(None) is None + assert ( + LintingServiceClient._get_default_mtls_endpoint(api_endpoint) + == api_mtls_endpoint + ) + assert ( + LintingServiceClient._get_default_mtls_endpoint(api_mtls_endpoint) + == api_mtls_endpoint + ) + assert ( + LintingServiceClient._get_default_mtls_endpoint(sandbox_endpoint) + == sandbox_mtls_endpoint + ) + assert ( + LintingServiceClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) + == sandbox_mtls_endpoint + ) + assert ( + LintingServiceClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi + ) + + +def test__read_environment_variables(): + assert LintingServiceClient._read_environment_variables() == (False, "auto", None) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + assert LintingServiceClient._read_environment_variables() == ( + True, + "auto", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + assert LintingServiceClient._read_environment_variables() == ( + False, + "auto", + None, + ) + + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + with pytest.raises(ValueError) as excinfo: + LintingServiceClient._read_environment_variables() + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + assert LintingServiceClient._read_environment_variables() == ( + False, + "never", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + assert LintingServiceClient._read_environment_variables() == ( + False, + "always", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}): + assert LintingServiceClient._read_environment_variables() == ( + False, + "auto", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + LintingServiceClient._read_environment_variables() + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + with mock.patch.dict(os.environ, {"GOOGLE_CLOUD_UNIVERSE_DOMAIN": "foo.com"}): + assert LintingServiceClient._read_environment_variables() == ( + False, + "auto", + "foo.com", + ) + + +def test__get_client_cert_source(): + mock_provided_cert_source = mock.Mock() + mock_default_cert_source = mock.Mock() + + assert LintingServiceClient._get_client_cert_source(None, False) is None + assert ( + LintingServiceClient._get_client_cert_source(mock_provided_cert_source, False) + is None + ) + assert ( + LintingServiceClient._get_client_cert_source(mock_provided_cert_source, True) + == mock_provided_cert_source + ) + + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", return_value=True + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=mock_default_cert_source, + ): + assert ( + LintingServiceClient._get_client_cert_source(None, True) + is mock_default_cert_source + ) + assert ( + LintingServiceClient._get_client_cert_source( + mock_provided_cert_source, "true" + ) + is mock_provided_cert_source + ) + + +@mock.patch.object( + LintingServiceClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(LintingServiceClient), +) +def test__get_api_endpoint(): + api_override = "foo.com" + mock_client_cert_source = mock.Mock() + default_universe = LintingServiceClient._DEFAULT_UNIVERSE + default_endpoint = LintingServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=default_universe + ) + mock_universe = "bar.com" + mock_endpoint = LintingServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=mock_universe + ) + + assert ( + LintingServiceClient._get_api_endpoint( + api_override, mock_client_cert_source, default_universe, "always" + ) + == api_override + ) + assert ( + LintingServiceClient._get_api_endpoint( + None, mock_client_cert_source, default_universe, "auto" + ) + == LintingServiceClient.DEFAULT_MTLS_ENDPOINT + ) + assert ( + LintingServiceClient._get_api_endpoint(None, None, default_universe, "auto") + == default_endpoint + ) + assert ( + LintingServiceClient._get_api_endpoint(None, None, default_universe, "always") + == LintingServiceClient.DEFAULT_MTLS_ENDPOINT + ) + assert ( + LintingServiceClient._get_api_endpoint( + None, mock_client_cert_source, default_universe, "always" + ) + == LintingServiceClient.DEFAULT_MTLS_ENDPOINT + ) + assert ( + LintingServiceClient._get_api_endpoint(None, None, mock_universe, "never") + == mock_endpoint + ) + assert ( + LintingServiceClient._get_api_endpoint(None, None, default_universe, "never") + == default_endpoint + ) + + with pytest.raises(MutualTLSChannelError) as excinfo: + LintingServiceClient._get_api_endpoint( + None, mock_client_cert_source, mock_universe, "auto" + ) + assert ( + str(excinfo.value) + == "mTLS is not supported in any universe other than googleapis.com." + ) + + +def test__get_universe_domain(): + client_universe_domain = "foo.com" + universe_domain_env = "bar.com" + + assert ( + LintingServiceClient._get_universe_domain( + client_universe_domain, universe_domain_env + ) + == client_universe_domain + ) + assert ( + LintingServiceClient._get_universe_domain(None, universe_domain_env) + == universe_domain_env + ) + assert ( + LintingServiceClient._get_universe_domain(None, None) + == LintingServiceClient._DEFAULT_UNIVERSE + ) + + with pytest.raises(ValueError) as excinfo: + LintingServiceClient._get_universe_domain("", None) + assert str(excinfo.value) == "Universe Domain cannot be an empty string." + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + (LintingServiceClient, transports.LintingServiceRestTransport, "rest"), + ], +) +def test__validate_universe_domain(client_class, transport_class, transport_name): + client = client_class( + transport=transport_class(credentials=ga_credentials.AnonymousCredentials()) + ) + assert client._validate_universe_domain() == True + + # Test the case when universe is already validated. + assert client._validate_universe_domain() == True + + if transport_name == "grpc": + # Test the case where credentials are provided by the + # `local_channel_credentials`. The default universes in both match. + channel = grpc.secure_channel( + "http://localhost/", grpc.local_channel_credentials() + ) + client = client_class(transport=transport_class(channel=channel)) + assert client._validate_universe_domain() == True + + # Test the case where credentials do not exist: e.g. a transport is provided + # with no credentials. Validation should still succeed because there is no + # mismatch with non-existent credentials. + channel = grpc.secure_channel( + "http://localhost/", grpc.local_channel_credentials() + ) + transport = transport_class(channel=channel) + transport._credentials = None + client = client_class(transport=transport) + assert client._validate_universe_domain() == True + + # TODO: This is needed to cater for older versions of google-auth + # Make this test unconditional once the minimum supported version of + # google-auth becomes 2.23.0 or higher. + google_auth_major, google_auth_minor = [ + int(part) for part in google.auth.__version__.split(".")[0:2] + ] + if google_auth_major > 2 or (google_auth_major == 2 and google_auth_minor >= 23): + credentials = ga_credentials.AnonymousCredentials() + credentials._universe_domain = "foo.com" + # Test the case when there is a universe mismatch from the credentials. + client = client_class(transport=transport_class(credentials=credentials)) + with pytest.raises(ValueError) as excinfo: + client._validate_universe_domain() + assert ( + str(excinfo.value) + == "The configured universe domain (googleapis.com) does not match the universe domain found in the credentials (foo.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." + ) + + # Test the case when there is a universe mismatch from the client. + # + # TODO: Make this test unconditional once the minimum supported version of + # google-api-core becomes 2.15.0 or higher. + api_core_major, api_core_minor = [ + int(part) for part in api_core_version.__version__.split(".")[0:2] + ] + if api_core_major > 2 or (api_core_major == 2 and api_core_minor >= 15): + client = client_class( + client_options={"universe_domain": "bar.com"}, + transport=transport_class( + credentials=ga_credentials.AnonymousCredentials(), + ), + ) + with pytest.raises(ValueError) as excinfo: + client._validate_universe_domain() + assert ( + str(excinfo.value) + == "The configured universe domain (bar.com) does not match the universe domain found in the credentials (googleapis.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." + ) + + # Test that ValueError is raised if universe_domain is provided via client options and credentials is None + with pytest.raises(ValueError): + client._compare_universes("foo.bar", None) + + +@pytest.mark.parametrize( + "client_class,transport_name", + [ + (LintingServiceClient, "rest"), + ], +) +def test_linting_service_client_from_service_account_info(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_info" + ) as factory: + factory.return_value = creds + info = {"valid": True} + client = client_class.from_service_account_info(info, transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + "apihub.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://apihub.googleapis.com" + ) + + +@pytest.mark.parametrize( + "transport_class,transport_name", + [ + (transports.LintingServiceRestTransport, "rest"), + ], +) +def test_linting_service_client_service_account_always_use_jwt( + transport_class, transport_name +): + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=True) + use_jwt.assert_called_once_with(True) + + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=False) + use_jwt.assert_not_called() + + +@pytest.mark.parametrize( + "client_class,transport_name", + [ + (LintingServiceClient, "rest"), + ], +) +def test_linting_service_client_from_service_account_file(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_file" + ) as factory: + factory.return_value = creds + client = client_class.from_service_account_file( + "dummy/file/path.json", transport=transport_name + ) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + client = client_class.from_service_account_json( + "dummy/file/path.json", transport=transport_name + ) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + "apihub.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://apihub.googleapis.com" + ) + + +def test_linting_service_client_get_transport_class(): + transport = LintingServiceClient.get_transport_class() + available_transports = [ + transports.LintingServiceRestTransport, + ] + assert transport in available_transports + + transport = LintingServiceClient.get_transport_class("rest") + assert transport == transports.LintingServiceRestTransport + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + (LintingServiceClient, transports.LintingServiceRestTransport, "rest"), + ], +) +@mock.patch.object( + LintingServiceClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(LintingServiceClient), +) +def test_linting_service_client_client_options( + client_class, transport_class, transport_name +): + # Check that if channel is provided we won't create a new one. + with mock.patch.object(LintingServiceClient, "get_transport_class") as gtc: + transport = transport_class(credentials=ga_credentials.AnonymousCredentials()) + client = client_class(transport=transport) + gtc.assert_not_called() + + # Check that if channel is provided via str we will create a new one. + with mock.patch.object(LintingServiceClient, "get_transport_class") as gtc: + client = client_class(transport=transport_name) + gtc.assert_called() + + # Check the case api_endpoint is provided. + options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name, client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_MTLS_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + client = client_class(transport=transport_name) + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + with pytest.raises(ValueError) as excinfo: + client = client_class(transport=transport_name) + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + + # Check the case quota_project_id is provided + options = client_options.ClientOptions(quota_project_id="octopus") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id="octopus", + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + # Check the case api_endpoint is provided + options = client_options.ClientOptions( + api_audience="https://language.googleapis.com" + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience="https://language.googleapis.com", + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,use_client_cert_env", + [ + (LintingServiceClient, transports.LintingServiceRestTransport, "rest", "true"), + (LintingServiceClient, transports.LintingServiceRestTransport, "rest", "false"), + ], +) +@mock.patch.object( + LintingServiceClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(LintingServiceClient), +) +@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) +def test_linting_service_client_mtls_env_auto( + client_class, transport_class, transport_name, use_client_cert_env +): + # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default + # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. + + # Check the case client_cert_source is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + options = client_options.ClientOptions( + client_cert_source=client_cert_source_callback + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + + if use_client_cert_env == "false": + expected_client_cert_source = None + expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ) + else: + expected_client_cert_source = client_cert_source_callback + expected_host = client.DEFAULT_MTLS_ENDPOINT + + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case ADC client cert is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=client_cert_source_callback, + ): + if use_client_cert_env == "false": + expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ) + expected_client_cert_source = None + else: + expected_host = client.DEFAULT_MTLS_ENDPOINT + expected_client_cert_source = client_cert_source_callback + + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case client_cert_source and ADC client cert are not provided. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize("client_class", [LintingServiceClient]) +@mock.patch.object( + LintingServiceClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(LintingServiceClient), +) +def test_linting_service_client_get_mtls_endpoint_and_cert_source(client_class): + mock_client_cert_source = mock.Mock() + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source == mock_client_cert_source + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + mock_client_cert_source = mock.Mock() + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=mock_client_cert_source, + ): + ( + api_endpoint, + cert_source, + ) = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source == mock_client_cert_source + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + client_class.get_mtls_endpoint_and_cert_source() + + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + with pytest.raises(ValueError) as excinfo: + client_class.get_mtls_endpoint_and_cert_source() + + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + + +@pytest.mark.parametrize("client_class", [LintingServiceClient]) +@mock.patch.object( + LintingServiceClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(LintingServiceClient), +) +def test_linting_service_client_client_api_endpoint(client_class): + mock_client_cert_source = client_cert_source_callback + api_override = "foo.com" + default_universe = LintingServiceClient._DEFAULT_UNIVERSE + default_endpoint = LintingServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=default_universe + ) + mock_universe = "bar.com" + mock_endpoint = LintingServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=mock_universe + ) + + # If ClientOptions.api_endpoint is set and GOOGLE_API_USE_CLIENT_CERTIFICATE="true", + # use ClientOptions.api_endpoint as the api endpoint regardless. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.requests.AuthorizedSession.configure_mtls_channel" + ): + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=api_override + ) + client = client_class( + client_options=options, + credentials=ga_credentials.AnonymousCredentials(), + ) + assert client.api_endpoint == api_override + + # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="never", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + client = client_class(credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == default_endpoint + + # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="always", + # use the DEFAULT_MTLS_ENDPOINT as the api endpoint. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + client = client_class(credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + + # If ClientOptions.api_endpoint is not set, GOOGLE_API_USE_MTLS_ENDPOINT="auto" (default), + # GOOGLE_API_USE_CLIENT_CERTIFICATE="false" (default), default cert source doesn't exist, + # and ClientOptions.universe_domain="bar.com", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with universe domain as the api endpoint. + options = client_options.ClientOptions() + universe_exists = hasattr(options, "universe_domain") + if universe_exists: + options = client_options.ClientOptions(universe_domain=mock_universe) + client = client_class( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + else: + client = client_class( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + assert client.api_endpoint == ( + mock_endpoint if universe_exists else default_endpoint + ) + assert client.universe_domain == ( + mock_universe if universe_exists else default_universe + ) + + # If ClientOptions does not have a universe domain attribute and GOOGLE_API_USE_MTLS_ENDPOINT="never", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. + options = client_options.ClientOptions() + if hasattr(options, "universe_domain"): + delattr(options, "universe_domain") + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + client = client_class( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + assert client.api_endpoint == default_endpoint + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + (LintingServiceClient, transports.LintingServiceRestTransport, "rest"), + ], +) +def test_linting_service_client_client_options_scopes( + client_class, transport_class, transport_name +): + # Check the case scopes are provided. + options = client_options.ClientOptions( + scopes=["1", "2"], + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=["1", "2"], + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,grpc_helpers", + [ + (LintingServiceClient, transports.LintingServiceRestTransport, "rest", None), + ], +) +def test_linting_service_client_client_options_credentials_file( + client_class, transport_class, transport_name, grpc_helpers +): + # Check the case credentials file is provided. + options = client_options.ClientOptions(credentials_file="credentials.json") + + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "request_type", + [ + linting_service.GetStyleGuideRequest, + dict, + ], +) +def test_get_style_guide_rest(request_type): + client = LintingServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/plugins/sample3/styleGuide" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = linting_service.StyleGuide( + name="name_value", + linter=common_fields.Linter.SPECTRAL, + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = linting_service.StyleGuide.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.get_style_guide(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, linting_service.StyleGuide) + assert response.name == "name_value" + assert response.linter == common_fields.Linter.SPECTRAL + + +def test_get_style_guide_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = LintingServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_style_guide in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.get_style_guide] = mock_rpc + + request = {} + client.get_style_guide(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_style_guide(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_get_style_guide_rest_required_fields( + request_type=linting_service.GetStyleGuideRequest, +): + transport_class = transports.LintingServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_style_guide._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_style_guide._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = LintingServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = linting_service.StyleGuide() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = linting_service.StyleGuide.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_style_guide(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_get_style_guide_rest_unset_required_fields(): + transport = transports.LintingServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.get_style_guide._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_style_guide_rest_interceptors(null_interceptor): + transport = transports.LintingServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.LintingServiceRestInterceptor(), + ) + client = LintingServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.LintingServiceRestInterceptor, "post_get_style_guide" + ) as post, mock.patch.object( + transports.LintingServiceRestInterceptor, "pre_get_style_guide" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = linting_service.GetStyleGuideRequest.pb( + linting_service.GetStyleGuideRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = linting_service.StyleGuide.to_json( + linting_service.StyleGuide() + ) + + request = linting_service.GetStyleGuideRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = linting_service.StyleGuide() + + client.get_style_guide( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_style_guide_rest_bad_request( + transport: str = "rest", request_type=linting_service.GetStyleGuideRequest +): + client = LintingServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/plugins/sample3/styleGuide" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_style_guide(request) + + +def test_get_style_guide_rest_flattened(): + client = LintingServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = linting_service.StyleGuide() + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/plugins/sample3/styleGuide" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = linting_service.StyleGuide.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.get_style_guide(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/plugins/*/styleGuide}" + % client.transport._host, + args[1], + ) + + +def test_get_style_guide_rest_flattened_error(transport: str = "rest"): + client = LintingServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_style_guide( + linting_service.GetStyleGuideRequest(), + name="name_value", + ) + + +def test_get_style_guide_rest_error(): + client = LintingServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + linting_service.UpdateStyleGuideRequest, + dict, + ], +) +def test_update_style_guide_rest(request_type): + client = LintingServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "style_guide": { + "name": "projects/sample1/locations/sample2/plugins/sample3/styleGuide" + } + } + request_init["style_guide"] = { + "name": "projects/sample1/locations/sample2/plugins/sample3/styleGuide", + "linter": 1, + "contents": {"contents": b"contents_blob", "mime_type": "mime_type_value"}, + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = linting_service.UpdateStyleGuideRequest.meta.fields["style_guide"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["style_guide"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["style_guide"][field])): + del request_init["style_guide"][field][i][subfield] + else: + del request_init["style_guide"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = linting_service.StyleGuide( + name="name_value", + linter=common_fields.Linter.SPECTRAL, + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = linting_service.StyleGuide.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.update_style_guide(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, linting_service.StyleGuide) + assert response.name == "name_value" + assert response.linter == common_fields.Linter.SPECTRAL + + +def test_update_style_guide_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = LintingServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.update_style_guide in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.update_style_guide + ] = mock_rpc + + request = {} + client.update_style_guide(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.update_style_guide(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_update_style_guide_rest_required_fields( + request_type=linting_service.UpdateStyleGuideRequest, +): + transport_class = transports.LintingServiceRestTransport + + request_init = {} + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_style_guide._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_style_guide._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("update_mask",)) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + + client = LintingServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = linting_service.StyleGuide() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "patch", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = linting_service.StyleGuide.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.update_style_guide(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_update_style_guide_rest_unset_required_fields(): + transport = transports.LintingServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.update_style_guide._get_unset_required_fields({}) + assert set(unset_fields) == (set(("updateMask",)) & set(("styleGuide",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_update_style_guide_rest_interceptors(null_interceptor): + transport = transports.LintingServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.LintingServiceRestInterceptor(), + ) + client = LintingServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.LintingServiceRestInterceptor, "post_update_style_guide" + ) as post, mock.patch.object( + transports.LintingServiceRestInterceptor, "pre_update_style_guide" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = linting_service.UpdateStyleGuideRequest.pb( + linting_service.UpdateStyleGuideRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = linting_service.StyleGuide.to_json( + linting_service.StyleGuide() + ) + + request = linting_service.UpdateStyleGuideRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = linting_service.StyleGuide() + + client.update_style_guide( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_update_style_guide_rest_bad_request( + transport: str = "rest", request_type=linting_service.UpdateStyleGuideRequest +): + client = LintingServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "style_guide": { + "name": "projects/sample1/locations/sample2/plugins/sample3/styleGuide" + } + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.update_style_guide(request) + + +def test_update_style_guide_rest_flattened(): + client = LintingServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = linting_service.StyleGuide() + + # get arguments that satisfy an http rule for this method + sample_request = { + "style_guide": { + "name": "projects/sample1/locations/sample2/plugins/sample3/styleGuide" + } + } + + # get truthy value for each flattened field + mock_args = dict( + style_guide=linting_service.StyleGuide(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = linting_service.StyleGuide.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.update_style_guide(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{style_guide.name=projects/*/locations/*/plugins/*/styleGuide}" + % client.transport._host, + args[1], + ) + + +def test_update_style_guide_rest_flattened_error(transport: str = "rest"): + client = LintingServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_style_guide( + linting_service.UpdateStyleGuideRequest(), + style_guide=linting_service.StyleGuide(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +def test_update_style_guide_rest_error(): + client = LintingServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + linting_service.GetStyleGuideContentsRequest, + dict, + ], +) +def test_get_style_guide_contents_rest(request_type): + client = LintingServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/plugins/sample3/styleGuide" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = linting_service.StyleGuideContents( + contents=b"contents_blob", + mime_type="mime_type_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = linting_service.StyleGuideContents.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.get_style_guide_contents(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, linting_service.StyleGuideContents) + assert response.contents == b"contents_blob" + assert response.mime_type == "mime_type_value" + + +def test_get_style_guide_contents_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = LintingServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.get_style_guide_contents + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.get_style_guide_contents + ] = mock_rpc + + request = {} + client.get_style_guide_contents(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_style_guide_contents(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_get_style_guide_contents_rest_required_fields( + request_type=linting_service.GetStyleGuideContentsRequest, +): + transport_class = transports.LintingServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_style_guide_contents._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_style_guide_contents._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = LintingServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = linting_service.StyleGuideContents() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = linting_service.StyleGuideContents.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_style_guide_contents(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_get_style_guide_contents_rest_unset_required_fields(): + transport = transports.LintingServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.get_style_guide_contents._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_style_guide_contents_rest_interceptors(null_interceptor): + transport = transports.LintingServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.LintingServiceRestInterceptor(), + ) + client = LintingServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.LintingServiceRestInterceptor, "post_get_style_guide_contents" + ) as post, mock.patch.object( + transports.LintingServiceRestInterceptor, "pre_get_style_guide_contents" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = linting_service.GetStyleGuideContentsRequest.pb( + linting_service.GetStyleGuideContentsRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = linting_service.StyleGuideContents.to_json( + linting_service.StyleGuideContents() + ) + + request = linting_service.GetStyleGuideContentsRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = linting_service.StyleGuideContents() + + client.get_style_guide_contents( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_style_guide_contents_rest_bad_request( + transport: str = "rest", request_type=linting_service.GetStyleGuideContentsRequest +): + client = LintingServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/plugins/sample3/styleGuide" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_style_guide_contents(request) + + +def test_get_style_guide_contents_rest_flattened(): + client = LintingServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = linting_service.StyleGuideContents() + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/plugins/sample3/styleGuide" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = linting_service.StyleGuideContents.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.get_style_guide_contents(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/plugins/*/styleGuide}:contents" + % client.transport._host, + args[1], + ) + + +def test_get_style_guide_contents_rest_flattened_error(transport: str = "rest"): + client = LintingServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_style_guide_contents( + linting_service.GetStyleGuideContentsRequest(), + name="name_value", + ) + + +def test_get_style_guide_contents_rest_error(): + client = LintingServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + linting_service.LintSpecRequest, + dict, + ], +) +def test_lint_spec_rest(request_type): + client = LintingServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/apis/sample3/versions/sample4/specs/sample5" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "" + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.lint_spec(request) + + # Establish that the response is the type that we expect. + assert response is None + + +def test_lint_spec_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = LintingServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.lint_spec in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.lint_spec] = mock_rpc + + request = {} + client.lint_spec(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.lint_spec(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_lint_spec_rest_required_fields(request_type=linting_service.LintSpecRequest): + transport_class = transports.LintingServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).lint_spec._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).lint_spec._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = LintingServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = None + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = "" + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.lint_spec(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_lint_spec_rest_unset_required_fields(): + transport = transports.LintingServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.lint_spec._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_lint_spec_rest_interceptors(null_interceptor): + transport = transports.LintingServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.LintingServiceRestInterceptor(), + ) + client = LintingServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.LintingServiceRestInterceptor, "pre_lint_spec" + ) as pre: + pre.assert_not_called() + pb_message = linting_service.LintSpecRequest.pb( + linting_service.LintSpecRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + + request = linting_service.LintSpecRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + + client.lint_spec( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + + +def test_lint_spec_rest_bad_request( + transport: str = "rest", request_type=linting_service.LintSpecRequest +): + client = LintingServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/apis/sample3/versions/sample4/specs/sample5" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.lint_spec(request) + + +def test_lint_spec_rest_error(): + client = LintingServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.LintingServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = LintingServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.LintingServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = LintingServiceClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide an api_key and a transport instance. + transport = transports.LintingServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = LintingServiceClient( + client_options=options, + transport=transport, + ) + + # It is an error to provide an api_key and a credential. + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = LintingServiceClient( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.LintingServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = LintingServiceClient( + client_options={"scopes": ["1", "2"]}, + transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.LintingServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + client = LintingServiceClient(transport=transport) + assert client.transport is transport + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.LintingServiceRestTransport, + ], +) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + + +@pytest.mark.parametrize( + "transport_name", + [ + "rest", + ], +) +def test_transport_kind(transport_name): + transport = LintingServiceClient.get_transport_class(transport_name)( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert transport.kind == transport_name + + +def test_linting_service_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(core_exceptions.DuplicateCredentialArgs): + transport = transports.LintingServiceTransport( + credentials=ga_credentials.AnonymousCredentials(), + credentials_file="credentials.json", + ) + + +def test_linting_service_base_transport(): + # Instantiate the base transport. + with mock.patch( + "google.cloud.apihub_v1.services.linting_service.transports.LintingServiceTransport.__init__" + ) as Transport: + Transport.return_value = None + transport = transports.LintingServiceTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ( + "get_style_guide", + "update_style_guide", + "get_style_guide_contents", + "lint_spec", + "get_location", + "list_locations", + "get_operation", + "cancel_operation", + "delete_operation", + "list_operations", + ) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) + + with pytest.raises(NotImplementedError): + transport.close() + + # Catch all for all remaining methods and properties + remainder = [ + "kind", + ] + for r in remainder: + with pytest.raises(NotImplementedError): + getattr(transport, r)() + + +def test_linting_service_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch( + "google.cloud.apihub_v1.services.linting_service.transports.LintingServiceTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.LintingServiceTransport( + credentials_file="credentials.json", + quota_project_id="octopus", + ) + load_creds.assert_called_once_with( + "credentials.json", + scopes=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id="octopus", + ) + + +def test_linting_service_base_transport_with_adc(): + # Test the default credentials are used if credentials and credentials_file are None. + with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch( + "google.cloud.apihub_v1.services.linting_service.transports.LintingServiceTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.LintingServiceTransport() + adc.assert_called_once() + + +def test_linting_service_auth_adc(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + LintingServiceClient() + adc.assert_called_once_with( + scopes=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id=None, + ) + + +def test_linting_service_http_transport_client_cert_source_for_mtls(): + cred = ga_credentials.AnonymousCredentials() + with mock.patch( + "google.auth.transport.requests.AuthorizedSession.configure_mtls_channel" + ) as mock_configure_mtls_channel: + transports.LintingServiceRestTransport( + credentials=cred, client_cert_source_for_mtls=client_cert_source_callback + ) + mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) + + +@pytest.mark.parametrize( + "transport_name", + [ + "rest", + ], +) +def test_linting_service_host_no_port(transport_name): + client = LintingServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="apihub.googleapis.com" + ), + transport=transport_name, + ) + assert client.transport._host == ( + "apihub.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://apihub.googleapis.com" + ) + + +@pytest.mark.parametrize( + "transport_name", + [ + "rest", + ], +) +def test_linting_service_host_with_port(transport_name): + client = LintingServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="apihub.googleapis.com:8000" + ), + transport=transport_name, + ) + assert client.transport._host == ( + "apihub.googleapis.com:8000" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://apihub.googleapis.com:8000" + ) + + +@pytest.mark.parametrize( + "transport_name", + [ + "rest", + ], +) +def test_linting_service_client_transport_session_collision(transport_name): + creds1 = ga_credentials.AnonymousCredentials() + creds2 = ga_credentials.AnonymousCredentials() + client1 = LintingServiceClient( + credentials=creds1, + transport=transport_name, + ) + client2 = LintingServiceClient( + credentials=creds2, + transport=transport_name, + ) + session1 = client1.transport.get_style_guide._session + session2 = client2.transport.get_style_guide._session + assert session1 != session2 + session1 = client1.transport.update_style_guide._session + session2 = client2.transport.update_style_guide._session + assert session1 != session2 + session1 = client1.transport.get_style_guide_contents._session + session2 = client2.transport.get_style_guide_contents._session + assert session1 != session2 + session1 = client1.transport.lint_spec._session + session2 = client2.transport.lint_spec._session + assert session1 != session2 + + +def test_spec_path(): + project = "squid" + location = "clam" + api = "whelk" + version = "octopus" + spec = "oyster" + expected = "projects/{project}/locations/{location}/apis/{api}/versions/{version}/specs/{spec}".format( + project=project, + location=location, + api=api, + version=version, + spec=spec, + ) + actual = LintingServiceClient.spec_path(project, location, api, version, spec) + assert expected == actual + + +def test_parse_spec_path(): + expected = { + "project": "nudibranch", + "location": "cuttlefish", + "api": "mussel", + "version": "winkle", + "spec": "nautilus", + } + path = LintingServiceClient.spec_path(**expected) + + # Check that the path construction is reversible. + actual = LintingServiceClient.parse_spec_path(path) + assert expected == actual + + +def test_style_guide_path(): + project = "scallop" + location = "abalone" + plugin = "squid" + expected = ( + "projects/{project}/locations/{location}/plugins/{plugin}/styleGuide".format( + project=project, + location=location, + plugin=plugin, + ) + ) + actual = LintingServiceClient.style_guide_path(project, location, plugin) + assert expected == actual + + +def test_parse_style_guide_path(): + expected = { + "project": "clam", + "location": "whelk", + "plugin": "octopus", + } + path = LintingServiceClient.style_guide_path(**expected) + + # Check that the path construction is reversible. + actual = LintingServiceClient.parse_style_guide_path(path) + assert expected == actual + + +def test_common_billing_account_path(): + billing_account = "oyster" + expected = "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + actual = LintingServiceClient.common_billing_account_path(billing_account) + assert expected == actual + + +def test_parse_common_billing_account_path(): + expected = { + "billing_account": "nudibranch", + } + path = LintingServiceClient.common_billing_account_path(**expected) + + # Check that the path construction is reversible. + actual = LintingServiceClient.parse_common_billing_account_path(path) + assert expected == actual + + +def test_common_folder_path(): + folder = "cuttlefish" + expected = "folders/{folder}".format( + folder=folder, + ) + actual = LintingServiceClient.common_folder_path(folder) + assert expected == actual + + +def test_parse_common_folder_path(): + expected = { + "folder": "mussel", + } + path = LintingServiceClient.common_folder_path(**expected) + + # Check that the path construction is reversible. + actual = LintingServiceClient.parse_common_folder_path(path) + assert expected == actual + + +def test_common_organization_path(): + organization = "winkle" + expected = "organizations/{organization}".format( + organization=organization, + ) + actual = LintingServiceClient.common_organization_path(organization) + assert expected == actual + + +def test_parse_common_organization_path(): + expected = { + "organization": "nautilus", + } + path = LintingServiceClient.common_organization_path(**expected) + + # Check that the path construction is reversible. + actual = LintingServiceClient.parse_common_organization_path(path) + assert expected == actual + + +def test_common_project_path(): + project = "scallop" + expected = "projects/{project}".format( + project=project, + ) + actual = LintingServiceClient.common_project_path(project) + assert expected == actual + + +def test_parse_common_project_path(): + expected = { + "project": "abalone", + } + path = LintingServiceClient.common_project_path(**expected) + + # Check that the path construction is reversible. + actual = LintingServiceClient.parse_common_project_path(path) + assert expected == actual + + +def test_common_location_path(): + project = "squid" + location = "clam" + expected = "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) + actual = LintingServiceClient.common_location_path(project, location) + assert expected == actual + + +def test_parse_common_location_path(): + expected = { + "project": "whelk", + "location": "octopus", + } + path = LintingServiceClient.common_location_path(**expected) + + # Check that the path construction is reversible. + actual = LintingServiceClient.parse_common_location_path(path) + assert expected == actual + + +def test_client_with_default_client_info(): + client_info = gapic_v1.client_info.ClientInfo() + + with mock.patch.object( + transports.LintingServiceTransport, "_prep_wrapped_messages" + ) as prep: + client = LintingServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + with mock.patch.object( + transports.LintingServiceTransport, "_prep_wrapped_messages" + ) as prep: + transport_class = LintingServiceClient.get_transport_class() + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + +def test_get_location_rest_bad_request( + transport: str = "rest", request_type=locations_pb2.GetLocationRequest +): + client = LintingServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + request = request_type() + request = json_format.ParseDict( + {"name": "projects/sample1/locations/sample2"}, request + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_location(request) + + +@pytest.mark.parametrize( + "request_type", + [ + locations_pb2.GetLocationRequest, + dict, + ], +) +def test_get_location_rest(request_type): + client = LintingServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = {"name": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = locations_pb2.Location() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_location(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, locations_pb2.Location) + + +def test_list_locations_rest_bad_request( + transport: str = "rest", request_type=locations_pb2.ListLocationsRequest +): + client = LintingServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + request = request_type() + request = json_format.ParseDict({"name": "projects/sample1"}, request) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_locations(request) + + +@pytest.mark.parametrize( + "request_type", + [ + locations_pb2.ListLocationsRequest, + dict, + ], +) +def test_list_locations_rest(request_type): + client = LintingServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = {"name": "projects/sample1"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = locations_pb2.ListLocationsResponse() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.list_locations(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, locations_pb2.ListLocationsResponse) + + +def test_cancel_operation_rest_bad_request( + transport: str = "rest", request_type=operations_pb2.CancelOperationRequest +): + client = LintingServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + request = request_type() + request = json_format.ParseDict( + {"name": "projects/sample1/locations/sample2/operations/sample3"}, request + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.cancel_operation(request) + + +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.CancelOperationRequest, + dict, + ], +) +def test_cancel_operation_rest(request_type): + client = LintingServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = {"name": "projects/sample1/locations/sample2/operations/sample3"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "{}" + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.cancel_operation(request) + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_operation_rest_bad_request( + transport: str = "rest", request_type=operations_pb2.DeleteOperationRequest +): + client = LintingServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + request = request_type() + request = json_format.ParseDict( + {"name": "projects/sample1/locations/sample2/operations/sample3"}, request + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete_operation(request) + + +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.DeleteOperationRequest, + dict, + ], +) +def test_delete_operation_rest(request_type): + client = LintingServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = {"name": "projects/sample1/locations/sample2/operations/sample3"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "{}" + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.delete_operation(request) + + # Establish that the response is the type that we expect. + assert response is None + + +def test_get_operation_rest_bad_request( + transport: str = "rest", request_type=operations_pb2.GetOperationRequest +): + client = LintingServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + request = request_type() + request = json_format.ParseDict( + {"name": "projects/sample1/locations/sample2/operations/sample3"}, request + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_operation(request) + + +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.GetOperationRequest, + dict, + ], +) +def test_get_operation_rest(request_type): + client = LintingServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = {"name": "projects/sample1/locations/sample2/operations/sample3"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_operation(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) + + +def test_list_operations_rest_bad_request( + transport: str = "rest", request_type=operations_pb2.ListOperationsRequest +): + client = LintingServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + request = request_type() + request = json_format.ParseDict( + {"name": "projects/sample1/locations/sample2"}, request + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_operations(request) + + +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.ListOperationsRequest, + dict, + ], +) +def test_list_operations_rest(request_type): + client = LintingServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = {"name": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.ListOperationsResponse() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.list_operations(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.ListOperationsResponse) + + +def test_transport_close(): + transports = { + "rest": "_session", + } + + for transport, close_name in transports.items(): + client = LintingServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + with mock.patch.object( + type(getattr(client.transport, close_name)), "close" + ) as close: + with client: + close.assert_not_called() + close.assert_called_once() + + +def test_client_ctx(): + transports = [ + "rest", + ] + for transport in transports: + client = LintingServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called() + + +@pytest.mark.parametrize( + "client_class,transport_class", + [ + (LintingServiceClient, transports.LintingServiceRestTransport), + ], +) +def test_api_key_credentials(client_class, transport_class): + with mock.patch.object( + google.auth._default, "get_api_key_credentials", create=True + ) as get_api_key_credentials: + mock_cred = mock.Mock() + get_api_key_credentials.return_value = mock_cred + options = client_options.ClientOptions() + options.api_key = "api_key" + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=mock_cred, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) diff --git a/packages/google-cloud-apihub/tests/unit/gapic/apihub_v1/test_provisioning.py b/packages/google-cloud-apihub/tests/unit/gapic/apihub_v1/test_provisioning.py new file mode 100644 index 000000000000..06f68007eef1 --- /dev/null +++ b/packages/google-cloud-apihub/tests/unit/gapic/apihub_v1/test_provisioning.py @@ -0,0 +1,2822 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import os + +# try/except added for compatibility with python < 3.8 +try: + from unittest import mock + from unittest.mock import AsyncMock # pragma: NO COVER +except ImportError: # pragma: NO COVER + import mock + +from collections.abc import Iterable +import json +import math + +from google.api_core import ( + future, + gapic_v1, + grpc_helpers, + grpc_helpers_async, + operation, + operations_v1, + path_template, +) +from google.api_core import api_core_version, client_options +from google.api_core import exceptions as core_exceptions +from google.api_core import operation_async # type: ignore +from google.api_core import retry as retries +import google.auth +from google.auth import credentials as ga_credentials +from google.auth.exceptions import MutualTLSChannelError +from google.cloud.location import locations_pb2 +from google.longrunning import operations_pb2 # type: ignore +from google.oauth2 import service_account +from google.protobuf import json_format +from google.protobuf import timestamp_pb2 # type: ignore +import grpc +from grpc.experimental import aio +from proto.marshal.rules import wrappers +from proto.marshal.rules.dates import DurationRule, TimestampRule +import pytest +from requests import PreparedRequest, Request, Response +from requests.sessions import Session + +from google.cloud.apihub_v1.services.provisioning import ProvisioningClient, transports +from google.cloud.apihub_v1.types import common_fields, provisioning_service + + +def client_cert_source_callback(): + return b"cert bytes", b"key bytes" + + +# If default endpoint is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint(client): + return ( + "foo.googleapis.com" + if ("localhost" in client.DEFAULT_ENDPOINT) + else client.DEFAULT_ENDPOINT + ) + + +# If default endpoint template is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint template so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint_template(client): + return ( + "test.{UNIVERSE_DOMAIN}" + if ("localhost" in client._DEFAULT_ENDPOINT_TEMPLATE) + else client._DEFAULT_ENDPOINT_TEMPLATE + ) + + +def test__get_default_mtls_endpoint(): + api_endpoint = "example.googleapis.com" + api_mtls_endpoint = "example.mtls.googleapis.com" + sandbox_endpoint = "example.sandbox.googleapis.com" + sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" + non_googleapi = "api.example.com" + + assert ProvisioningClient._get_default_mtls_endpoint(None) is None + assert ( + ProvisioningClient._get_default_mtls_endpoint(api_endpoint) == api_mtls_endpoint + ) + assert ( + ProvisioningClient._get_default_mtls_endpoint(api_mtls_endpoint) + == api_mtls_endpoint + ) + assert ( + ProvisioningClient._get_default_mtls_endpoint(sandbox_endpoint) + == sandbox_mtls_endpoint + ) + assert ( + ProvisioningClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) + == sandbox_mtls_endpoint + ) + assert ProvisioningClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi + + +def test__read_environment_variables(): + assert ProvisioningClient._read_environment_variables() == (False, "auto", None) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + assert ProvisioningClient._read_environment_variables() == (True, "auto", None) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + assert ProvisioningClient._read_environment_variables() == (False, "auto", None) + + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + with pytest.raises(ValueError) as excinfo: + ProvisioningClient._read_environment_variables() + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + assert ProvisioningClient._read_environment_variables() == ( + False, + "never", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + assert ProvisioningClient._read_environment_variables() == ( + False, + "always", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}): + assert ProvisioningClient._read_environment_variables() == (False, "auto", None) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + ProvisioningClient._read_environment_variables() + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + with mock.patch.dict(os.environ, {"GOOGLE_CLOUD_UNIVERSE_DOMAIN": "foo.com"}): + assert ProvisioningClient._read_environment_variables() == ( + False, + "auto", + "foo.com", + ) + + +def test__get_client_cert_source(): + mock_provided_cert_source = mock.Mock() + mock_default_cert_source = mock.Mock() + + assert ProvisioningClient._get_client_cert_source(None, False) is None + assert ( + ProvisioningClient._get_client_cert_source(mock_provided_cert_source, False) + is None + ) + assert ( + ProvisioningClient._get_client_cert_source(mock_provided_cert_source, True) + == mock_provided_cert_source + ) + + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", return_value=True + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=mock_default_cert_source, + ): + assert ( + ProvisioningClient._get_client_cert_source(None, True) + is mock_default_cert_source + ) + assert ( + ProvisioningClient._get_client_cert_source( + mock_provided_cert_source, "true" + ) + is mock_provided_cert_source + ) + + +@mock.patch.object( + ProvisioningClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(ProvisioningClient), +) +def test__get_api_endpoint(): + api_override = "foo.com" + mock_client_cert_source = mock.Mock() + default_universe = ProvisioningClient._DEFAULT_UNIVERSE + default_endpoint = ProvisioningClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=default_universe + ) + mock_universe = "bar.com" + mock_endpoint = ProvisioningClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=mock_universe + ) + + assert ( + ProvisioningClient._get_api_endpoint( + api_override, mock_client_cert_source, default_universe, "always" + ) + == api_override + ) + assert ( + ProvisioningClient._get_api_endpoint( + None, mock_client_cert_source, default_universe, "auto" + ) + == ProvisioningClient.DEFAULT_MTLS_ENDPOINT + ) + assert ( + ProvisioningClient._get_api_endpoint(None, None, default_universe, "auto") + == default_endpoint + ) + assert ( + ProvisioningClient._get_api_endpoint(None, None, default_universe, "always") + == ProvisioningClient.DEFAULT_MTLS_ENDPOINT + ) + assert ( + ProvisioningClient._get_api_endpoint( + None, mock_client_cert_source, default_universe, "always" + ) + == ProvisioningClient.DEFAULT_MTLS_ENDPOINT + ) + assert ( + ProvisioningClient._get_api_endpoint(None, None, mock_universe, "never") + == mock_endpoint + ) + assert ( + ProvisioningClient._get_api_endpoint(None, None, default_universe, "never") + == default_endpoint + ) + + with pytest.raises(MutualTLSChannelError) as excinfo: + ProvisioningClient._get_api_endpoint( + None, mock_client_cert_source, mock_universe, "auto" + ) + assert ( + str(excinfo.value) + == "mTLS is not supported in any universe other than googleapis.com." + ) + + +def test__get_universe_domain(): + client_universe_domain = "foo.com" + universe_domain_env = "bar.com" + + assert ( + ProvisioningClient._get_universe_domain( + client_universe_domain, universe_domain_env + ) + == client_universe_domain + ) + assert ( + ProvisioningClient._get_universe_domain(None, universe_domain_env) + == universe_domain_env + ) + assert ( + ProvisioningClient._get_universe_domain(None, None) + == ProvisioningClient._DEFAULT_UNIVERSE + ) + + with pytest.raises(ValueError) as excinfo: + ProvisioningClient._get_universe_domain("", None) + assert str(excinfo.value) == "Universe Domain cannot be an empty string." + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + (ProvisioningClient, transports.ProvisioningRestTransport, "rest"), + ], +) +def test__validate_universe_domain(client_class, transport_class, transport_name): + client = client_class( + transport=transport_class(credentials=ga_credentials.AnonymousCredentials()) + ) + assert client._validate_universe_domain() == True + + # Test the case when universe is already validated. + assert client._validate_universe_domain() == True + + if transport_name == "grpc": + # Test the case where credentials are provided by the + # `local_channel_credentials`. The default universes in both match. + channel = grpc.secure_channel( + "http://localhost/", grpc.local_channel_credentials() + ) + client = client_class(transport=transport_class(channel=channel)) + assert client._validate_universe_domain() == True + + # Test the case where credentials do not exist: e.g. a transport is provided + # with no credentials. Validation should still succeed because there is no + # mismatch with non-existent credentials. + channel = grpc.secure_channel( + "http://localhost/", grpc.local_channel_credentials() + ) + transport = transport_class(channel=channel) + transport._credentials = None + client = client_class(transport=transport) + assert client._validate_universe_domain() == True + + # TODO: This is needed to cater for older versions of google-auth + # Make this test unconditional once the minimum supported version of + # google-auth becomes 2.23.0 or higher. + google_auth_major, google_auth_minor = [ + int(part) for part in google.auth.__version__.split(".")[0:2] + ] + if google_auth_major > 2 or (google_auth_major == 2 and google_auth_minor >= 23): + credentials = ga_credentials.AnonymousCredentials() + credentials._universe_domain = "foo.com" + # Test the case when there is a universe mismatch from the credentials. + client = client_class(transport=transport_class(credentials=credentials)) + with pytest.raises(ValueError) as excinfo: + client._validate_universe_domain() + assert ( + str(excinfo.value) + == "The configured universe domain (googleapis.com) does not match the universe domain found in the credentials (foo.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." + ) + + # Test the case when there is a universe mismatch from the client. + # + # TODO: Make this test unconditional once the minimum supported version of + # google-api-core becomes 2.15.0 or higher. + api_core_major, api_core_minor = [ + int(part) for part in api_core_version.__version__.split(".")[0:2] + ] + if api_core_major > 2 or (api_core_major == 2 and api_core_minor >= 15): + client = client_class( + client_options={"universe_domain": "bar.com"}, + transport=transport_class( + credentials=ga_credentials.AnonymousCredentials(), + ), + ) + with pytest.raises(ValueError) as excinfo: + client._validate_universe_domain() + assert ( + str(excinfo.value) + == "The configured universe domain (bar.com) does not match the universe domain found in the credentials (googleapis.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." + ) + + # Test that ValueError is raised if universe_domain is provided via client options and credentials is None + with pytest.raises(ValueError): + client._compare_universes("foo.bar", None) + + +@pytest.mark.parametrize( + "client_class,transport_name", + [ + (ProvisioningClient, "rest"), + ], +) +def test_provisioning_client_from_service_account_info(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_info" + ) as factory: + factory.return_value = creds + info = {"valid": True} + client = client_class.from_service_account_info(info, transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + "apihub.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://apihub.googleapis.com" + ) + + +@pytest.mark.parametrize( + "transport_class,transport_name", + [ + (transports.ProvisioningRestTransport, "rest"), + ], +) +def test_provisioning_client_service_account_always_use_jwt( + transport_class, transport_name +): + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=True) + use_jwt.assert_called_once_with(True) + + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=False) + use_jwt.assert_not_called() + + +@pytest.mark.parametrize( + "client_class,transport_name", + [ + (ProvisioningClient, "rest"), + ], +) +def test_provisioning_client_from_service_account_file(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_file" + ) as factory: + factory.return_value = creds + client = client_class.from_service_account_file( + "dummy/file/path.json", transport=transport_name + ) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + client = client_class.from_service_account_json( + "dummy/file/path.json", transport=transport_name + ) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + "apihub.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://apihub.googleapis.com" + ) + + +def test_provisioning_client_get_transport_class(): + transport = ProvisioningClient.get_transport_class() + available_transports = [ + transports.ProvisioningRestTransport, + ] + assert transport in available_transports + + transport = ProvisioningClient.get_transport_class("rest") + assert transport == transports.ProvisioningRestTransport + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + (ProvisioningClient, transports.ProvisioningRestTransport, "rest"), + ], +) +@mock.patch.object( + ProvisioningClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(ProvisioningClient), +) +def test_provisioning_client_client_options( + client_class, transport_class, transport_name +): + # Check that if channel is provided we won't create a new one. + with mock.patch.object(ProvisioningClient, "get_transport_class") as gtc: + transport = transport_class(credentials=ga_credentials.AnonymousCredentials()) + client = client_class(transport=transport) + gtc.assert_not_called() + + # Check that if channel is provided via str we will create a new one. + with mock.patch.object(ProvisioningClient, "get_transport_class") as gtc: + client = client_class(transport=transport_name) + gtc.assert_called() + + # Check the case api_endpoint is provided. + options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name, client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_MTLS_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + client = client_class(transport=transport_name) + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + with pytest.raises(ValueError) as excinfo: + client = client_class(transport=transport_name) + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + + # Check the case quota_project_id is provided + options = client_options.ClientOptions(quota_project_id="octopus") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id="octopus", + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + # Check the case api_endpoint is provided + options = client_options.ClientOptions( + api_audience="https://language.googleapis.com" + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience="https://language.googleapis.com", + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,use_client_cert_env", + [ + (ProvisioningClient, transports.ProvisioningRestTransport, "rest", "true"), + (ProvisioningClient, transports.ProvisioningRestTransport, "rest", "false"), + ], +) +@mock.patch.object( + ProvisioningClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(ProvisioningClient), +) +@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) +def test_provisioning_client_mtls_env_auto( + client_class, transport_class, transport_name, use_client_cert_env +): + # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default + # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. + + # Check the case client_cert_source is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + options = client_options.ClientOptions( + client_cert_source=client_cert_source_callback + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + + if use_client_cert_env == "false": + expected_client_cert_source = None + expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ) + else: + expected_client_cert_source = client_cert_source_callback + expected_host = client.DEFAULT_MTLS_ENDPOINT + + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case ADC client cert is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=client_cert_source_callback, + ): + if use_client_cert_env == "false": + expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ) + expected_client_cert_source = None + else: + expected_host = client.DEFAULT_MTLS_ENDPOINT + expected_client_cert_source = client_cert_source_callback + + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case client_cert_source and ADC client cert are not provided. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize("client_class", [ProvisioningClient]) +@mock.patch.object( + ProvisioningClient, "DEFAULT_ENDPOINT", modify_default_endpoint(ProvisioningClient) +) +def test_provisioning_client_get_mtls_endpoint_and_cert_source(client_class): + mock_client_cert_source = mock.Mock() + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source == mock_client_cert_source + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + mock_client_cert_source = mock.Mock() + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=mock_client_cert_source, + ): + ( + api_endpoint, + cert_source, + ) = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source == mock_client_cert_source + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + client_class.get_mtls_endpoint_and_cert_source() + + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + with pytest.raises(ValueError) as excinfo: + client_class.get_mtls_endpoint_and_cert_source() + + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + + +@pytest.mark.parametrize("client_class", [ProvisioningClient]) +@mock.patch.object( + ProvisioningClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(ProvisioningClient), +) +def test_provisioning_client_client_api_endpoint(client_class): + mock_client_cert_source = client_cert_source_callback + api_override = "foo.com" + default_universe = ProvisioningClient._DEFAULT_UNIVERSE + default_endpoint = ProvisioningClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=default_universe + ) + mock_universe = "bar.com" + mock_endpoint = ProvisioningClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=mock_universe + ) + + # If ClientOptions.api_endpoint is set and GOOGLE_API_USE_CLIENT_CERTIFICATE="true", + # use ClientOptions.api_endpoint as the api endpoint regardless. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.requests.AuthorizedSession.configure_mtls_channel" + ): + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=api_override + ) + client = client_class( + client_options=options, + credentials=ga_credentials.AnonymousCredentials(), + ) + assert client.api_endpoint == api_override + + # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="never", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + client = client_class(credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == default_endpoint + + # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="always", + # use the DEFAULT_MTLS_ENDPOINT as the api endpoint. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + client = client_class(credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + + # If ClientOptions.api_endpoint is not set, GOOGLE_API_USE_MTLS_ENDPOINT="auto" (default), + # GOOGLE_API_USE_CLIENT_CERTIFICATE="false" (default), default cert source doesn't exist, + # and ClientOptions.universe_domain="bar.com", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with universe domain as the api endpoint. + options = client_options.ClientOptions() + universe_exists = hasattr(options, "universe_domain") + if universe_exists: + options = client_options.ClientOptions(universe_domain=mock_universe) + client = client_class( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + else: + client = client_class( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + assert client.api_endpoint == ( + mock_endpoint if universe_exists else default_endpoint + ) + assert client.universe_domain == ( + mock_universe if universe_exists else default_universe + ) + + # If ClientOptions does not have a universe domain attribute and GOOGLE_API_USE_MTLS_ENDPOINT="never", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. + options = client_options.ClientOptions() + if hasattr(options, "universe_domain"): + delattr(options, "universe_domain") + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + client = client_class( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + assert client.api_endpoint == default_endpoint + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + (ProvisioningClient, transports.ProvisioningRestTransport, "rest"), + ], +) +def test_provisioning_client_client_options_scopes( + client_class, transport_class, transport_name +): + # Check the case scopes are provided. + options = client_options.ClientOptions( + scopes=["1", "2"], + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=["1", "2"], + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,grpc_helpers", + [ + (ProvisioningClient, transports.ProvisioningRestTransport, "rest", None), + ], +) +def test_provisioning_client_client_options_credentials_file( + client_class, transport_class, transport_name, grpc_helpers +): + # Check the case credentials file is provided. + options = client_options.ClientOptions(credentials_file="credentials.json") + + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "request_type", + [ + provisioning_service.CreateApiHubInstanceRequest, + dict, + ], +) +def test_create_api_hub_instance_rest(request_type): + client = ProvisioningClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request_init["api_hub_instance"] = { + "name": "name_value", + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "state": 1, + "state_message": "state_message_value", + "config": {"cmek_key_name": "cmek_key_name_value"}, + "labels": {}, + "description": "description_value", + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = provisioning_service.CreateApiHubInstanceRequest.meta.fields[ + "api_hub_instance" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["api_hub_instance"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["api_hub_instance"][field])): + del request_init["api_hub_instance"][field][i][subfield] + else: + del request_init["api_hub_instance"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.create_api_hub_instance(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_create_api_hub_instance_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ProvisioningClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.create_api_hub_instance + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.create_api_hub_instance + ] = mock_rpc + + request = {} + client.create_api_hub_instance(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.create_api_hub_instance(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_create_api_hub_instance_rest_required_fields( + request_type=provisioning_service.CreateApiHubInstanceRequest, +): + transport_class = transports.ProvisioningRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_api_hub_instance._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_api_hub_instance._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("api_hub_instance_id",)) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = ProvisioningClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.create_api_hub_instance(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_create_api_hub_instance_rest_unset_required_fields(): + transport = transports.ProvisioningRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.create_api_hub_instance._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(("apiHubInstanceId",)) + & set( + ( + "parent", + "apiHubInstance", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_create_api_hub_instance_rest_interceptors(null_interceptor): + transport = transports.ProvisioningRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.ProvisioningRestInterceptor(), + ) + client = ProvisioningClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.ProvisioningRestInterceptor, "post_create_api_hub_instance" + ) as post, mock.patch.object( + transports.ProvisioningRestInterceptor, "pre_create_api_hub_instance" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = provisioning_service.CreateApiHubInstanceRequest.pb( + provisioning_service.CreateApiHubInstanceRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) + + request = provisioning_service.CreateApiHubInstanceRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.create_api_hub_instance( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_create_api_hub_instance_rest_bad_request( + transport: str = "rest", + request_type=provisioning_service.CreateApiHubInstanceRequest, +): + client = ProvisioningClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.create_api_hub_instance(request) + + +def test_create_api_hub_instance_rest_flattened(): + client = ProvisioningClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1/locations/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + api_hub_instance=common_fields.ApiHubInstance(name="name_value"), + api_hub_instance_id="api_hub_instance_id_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.create_api_hub_instance(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*}/apiHubInstances" + % client.transport._host, + args[1], + ) + + +def test_create_api_hub_instance_rest_flattened_error(transport: str = "rest"): + client = ProvisioningClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_api_hub_instance( + provisioning_service.CreateApiHubInstanceRequest(), + parent="parent_value", + api_hub_instance=common_fields.ApiHubInstance(name="name_value"), + api_hub_instance_id="api_hub_instance_id_value", + ) + + +def test_create_api_hub_instance_rest_error(): + client = ProvisioningClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + provisioning_service.GetApiHubInstanceRequest, + dict, + ], +) +def test_get_api_hub_instance_rest(request_type): + client = ProvisioningClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/apiHubInstances/sample3" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = common_fields.ApiHubInstance( + name="name_value", + state=common_fields.ApiHubInstance.State.INACTIVE, + state_message="state_message_value", + description="description_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = common_fields.ApiHubInstance.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.get_api_hub_instance(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, common_fields.ApiHubInstance) + assert response.name == "name_value" + assert response.state == common_fields.ApiHubInstance.State.INACTIVE + assert response.state_message == "state_message_value" + assert response.description == "description_value" + + +def test_get_api_hub_instance_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ProvisioningClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.get_api_hub_instance in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.get_api_hub_instance + ] = mock_rpc + + request = {} + client.get_api_hub_instance(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_api_hub_instance(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_get_api_hub_instance_rest_required_fields( + request_type=provisioning_service.GetApiHubInstanceRequest, +): + transport_class = transports.ProvisioningRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_api_hub_instance._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_api_hub_instance._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = ProvisioningClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = common_fields.ApiHubInstance() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = common_fields.ApiHubInstance.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_api_hub_instance(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_get_api_hub_instance_rest_unset_required_fields(): + transport = transports.ProvisioningRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.get_api_hub_instance._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_api_hub_instance_rest_interceptors(null_interceptor): + transport = transports.ProvisioningRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.ProvisioningRestInterceptor(), + ) + client = ProvisioningClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.ProvisioningRestInterceptor, "post_get_api_hub_instance" + ) as post, mock.patch.object( + transports.ProvisioningRestInterceptor, "pre_get_api_hub_instance" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = provisioning_service.GetApiHubInstanceRequest.pb( + provisioning_service.GetApiHubInstanceRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = common_fields.ApiHubInstance.to_json( + common_fields.ApiHubInstance() + ) + + request = provisioning_service.GetApiHubInstanceRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = common_fields.ApiHubInstance() + + client.get_api_hub_instance( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_api_hub_instance_rest_bad_request( + transport: str = "rest", request_type=provisioning_service.GetApiHubInstanceRequest +): + client = ProvisioningClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/apiHubInstances/sample3" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_api_hub_instance(request) + + +def test_get_api_hub_instance_rest_flattened(): + client = ProvisioningClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = common_fields.ApiHubInstance() + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/apiHubInstances/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = common_fields.ApiHubInstance.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.get_api_hub_instance(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/apiHubInstances/*}" + % client.transport._host, + args[1], + ) + + +def test_get_api_hub_instance_rest_flattened_error(transport: str = "rest"): + client = ProvisioningClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_api_hub_instance( + provisioning_service.GetApiHubInstanceRequest(), + name="name_value", + ) + + +def test_get_api_hub_instance_rest_error(): + client = ProvisioningClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + provisioning_service.LookupApiHubInstanceRequest, + dict, + ], +) +def test_lookup_api_hub_instance_rest(request_type): + client = ProvisioningClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = provisioning_service.LookupApiHubInstanceResponse() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = provisioning_service.LookupApiHubInstanceResponse.pb( + return_value + ) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.lookup_api_hub_instance(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, provisioning_service.LookupApiHubInstanceResponse) + + +def test_lookup_api_hub_instance_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ProvisioningClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.lookup_api_hub_instance + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.lookup_api_hub_instance + ] = mock_rpc + + request = {} + client.lookup_api_hub_instance(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.lookup_api_hub_instance(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_lookup_api_hub_instance_rest_required_fields( + request_type=provisioning_service.LookupApiHubInstanceRequest, +): + transport_class = transports.ProvisioningRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).lookup_api_hub_instance._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).lookup_api_hub_instance._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = ProvisioningClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = provisioning_service.LookupApiHubInstanceResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = provisioning_service.LookupApiHubInstanceResponse.pb( + return_value + ) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.lookup_api_hub_instance(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_lookup_api_hub_instance_rest_unset_required_fields(): + transport = transports.ProvisioningRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.lookup_api_hub_instance._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("parent",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_lookup_api_hub_instance_rest_interceptors(null_interceptor): + transport = transports.ProvisioningRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.ProvisioningRestInterceptor(), + ) + client = ProvisioningClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.ProvisioningRestInterceptor, "post_lookup_api_hub_instance" + ) as post, mock.patch.object( + transports.ProvisioningRestInterceptor, "pre_lookup_api_hub_instance" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = provisioning_service.LookupApiHubInstanceRequest.pb( + provisioning_service.LookupApiHubInstanceRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = ( + provisioning_service.LookupApiHubInstanceResponse.to_json( + provisioning_service.LookupApiHubInstanceResponse() + ) + ) + + request = provisioning_service.LookupApiHubInstanceRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = provisioning_service.LookupApiHubInstanceResponse() + + client.lookup_api_hub_instance( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_lookup_api_hub_instance_rest_bad_request( + transport: str = "rest", + request_type=provisioning_service.LookupApiHubInstanceRequest, +): + client = ProvisioningClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.lookup_api_hub_instance(request) + + +def test_lookup_api_hub_instance_rest_flattened(): + client = ProvisioningClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = provisioning_service.LookupApiHubInstanceResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1/locations/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = provisioning_service.LookupApiHubInstanceResponse.pb( + return_value + ) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.lookup_api_hub_instance(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*}/apiHubInstances:lookup" + % client.transport._host, + args[1], + ) + + +def test_lookup_api_hub_instance_rest_flattened_error(transport: str = "rest"): + client = ProvisioningClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.lookup_api_hub_instance( + provisioning_service.LookupApiHubInstanceRequest(), + parent="parent_value", + ) + + +def test_lookup_api_hub_instance_rest_error(): + client = ProvisioningClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.ProvisioningRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = ProvisioningClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.ProvisioningRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = ProvisioningClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide an api_key and a transport instance. + transport = transports.ProvisioningRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = ProvisioningClient( + client_options=options, + transport=transport, + ) + + # It is an error to provide an api_key and a credential. + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = ProvisioningClient( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.ProvisioningRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = ProvisioningClient( + client_options={"scopes": ["1", "2"]}, + transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.ProvisioningRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + client = ProvisioningClient(transport=transport) + assert client.transport is transport + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.ProvisioningRestTransport, + ], +) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + + +@pytest.mark.parametrize( + "transport_name", + [ + "rest", + ], +) +def test_transport_kind(transport_name): + transport = ProvisioningClient.get_transport_class(transport_name)( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert transport.kind == transport_name + + +def test_provisioning_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(core_exceptions.DuplicateCredentialArgs): + transport = transports.ProvisioningTransport( + credentials=ga_credentials.AnonymousCredentials(), + credentials_file="credentials.json", + ) + + +def test_provisioning_base_transport(): + # Instantiate the base transport. + with mock.patch( + "google.cloud.apihub_v1.services.provisioning.transports.ProvisioningTransport.__init__" + ) as Transport: + Transport.return_value = None + transport = transports.ProvisioningTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ( + "create_api_hub_instance", + "get_api_hub_instance", + "lookup_api_hub_instance", + "get_location", + "list_locations", + "get_operation", + "cancel_operation", + "delete_operation", + "list_operations", + ) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) + + with pytest.raises(NotImplementedError): + transport.close() + + # Additionally, the LRO client (a property) should + # also raise NotImplementedError + with pytest.raises(NotImplementedError): + transport.operations_client + + # Catch all for all remaining methods and properties + remainder = [ + "kind", + ] + for r in remainder: + with pytest.raises(NotImplementedError): + getattr(transport, r)() + + +def test_provisioning_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch( + "google.cloud.apihub_v1.services.provisioning.transports.ProvisioningTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.ProvisioningTransport( + credentials_file="credentials.json", + quota_project_id="octopus", + ) + load_creds.assert_called_once_with( + "credentials.json", + scopes=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id="octopus", + ) + + +def test_provisioning_base_transport_with_adc(): + # Test the default credentials are used if credentials and credentials_file are None. + with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch( + "google.cloud.apihub_v1.services.provisioning.transports.ProvisioningTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.ProvisioningTransport() + adc.assert_called_once() + + +def test_provisioning_auth_adc(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + ProvisioningClient() + adc.assert_called_once_with( + scopes=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id=None, + ) + + +def test_provisioning_http_transport_client_cert_source_for_mtls(): + cred = ga_credentials.AnonymousCredentials() + with mock.patch( + "google.auth.transport.requests.AuthorizedSession.configure_mtls_channel" + ) as mock_configure_mtls_channel: + transports.ProvisioningRestTransport( + credentials=cred, client_cert_source_for_mtls=client_cert_source_callback + ) + mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) + + +def test_provisioning_rest_lro_client(): + client = ProvisioningClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + transport = client.transport + + # Ensure that we have a api-core operations client. + assert isinstance( + transport.operations_client, + operations_v1.AbstractOperationsClient, + ) + + # Ensure that subsequent calls to the property send the exact same object. + assert transport.operations_client is transport.operations_client + + +@pytest.mark.parametrize( + "transport_name", + [ + "rest", + ], +) +def test_provisioning_host_no_port(transport_name): + client = ProvisioningClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="apihub.googleapis.com" + ), + transport=transport_name, + ) + assert client.transport._host == ( + "apihub.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://apihub.googleapis.com" + ) + + +@pytest.mark.parametrize( + "transport_name", + [ + "rest", + ], +) +def test_provisioning_host_with_port(transport_name): + client = ProvisioningClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="apihub.googleapis.com:8000" + ), + transport=transport_name, + ) + assert client.transport._host == ( + "apihub.googleapis.com:8000" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://apihub.googleapis.com:8000" + ) + + +@pytest.mark.parametrize( + "transport_name", + [ + "rest", + ], +) +def test_provisioning_client_transport_session_collision(transport_name): + creds1 = ga_credentials.AnonymousCredentials() + creds2 = ga_credentials.AnonymousCredentials() + client1 = ProvisioningClient( + credentials=creds1, + transport=transport_name, + ) + client2 = ProvisioningClient( + credentials=creds2, + transport=transport_name, + ) + session1 = client1.transport.create_api_hub_instance._session + session2 = client2.transport.create_api_hub_instance._session + assert session1 != session2 + session1 = client1.transport.get_api_hub_instance._session + session2 = client2.transport.get_api_hub_instance._session + assert session1 != session2 + session1 = client1.transport.lookup_api_hub_instance._session + session2 = client2.transport.lookup_api_hub_instance._session + assert session1 != session2 + + +def test_api_hub_instance_path(): + project = "squid" + location = "clam" + api_hub_instance = "whelk" + expected = "projects/{project}/locations/{location}/apiHubInstances/{api_hub_instance}".format( + project=project, + location=location, + api_hub_instance=api_hub_instance, + ) + actual = ProvisioningClient.api_hub_instance_path( + project, location, api_hub_instance + ) + assert expected == actual + + +def test_parse_api_hub_instance_path(): + expected = { + "project": "octopus", + "location": "oyster", + "api_hub_instance": "nudibranch", + } + path = ProvisioningClient.api_hub_instance_path(**expected) + + # Check that the path construction is reversible. + actual = ProvisioningClient.parse_api_hub_instance_path(path) + assert expected == actual + + +def test_common_billing_account_path(): + billing_account = "cuttlefish" + expected = "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + actual = ProvisioningClient.common_billing_account_path(billing_account) + assert expected == actual + + +def test_parse_common_billing_account_path(): + expected = { + "billing_account": "mussel", + } + path = ProvisioningClient.common_billing_account_path(**expected) + + # Check that the path construction is reversible. + actual = ProvisioningClient.parse_common_billing_account_path(path) + assert expected == actual + + +def test_common_folder_path(): + folder = "winkle" + expected = "folders/{folder}".format( + folder=folder, + ) + actual = ProvisioningClient.common_folder_path(folder) + assert expected == actual + + +def test_parse_common_folder_path(): + expected = { + "folder": "nautilus", + } + path = ProvisioningClient.common_folder_path(**expected) + + # Check that the path construction is reversible. + actual = ProvisioningClient.parse_common_folder_path(path) + assert expected == actual + + +def test_common_organization_path(): + organization = "scallop" + expected = "organizations/{organization}".format( + organization=organization, + ) + actual = ProvisioningClient.common_organization_path(organization) + assert expected == actual + + +def test_parse_common_organization_path(): + expected = { + "organization": "abalone", + } + path = ProvisioningClient.common_organization_path(**expected) + + # Check that the path construction is reversible. + actual = ProvisioningClient.parse_common_organization_path(path) + assert expected == actual + + +def test_common_project_path(): + project = "squid" + expected = "projects/{project}".format( + project=project, + ) + actual = ProvisioningClient.common_project_path(project) + assert expected == actual + + +def test_parse_common_project_path(): + expected = { + "project": "clam", + } + path = ProvisioningClient.common_project_path(**expected) + + # Check that the path construction is reversible. + actual = ProvisioningClient.parse_common_project_path(path) + assert expected == actual + + +def test_common_location_path(): + project = "whelk" + location = "octopus" + expected = "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) + actual = ProvisioningClient.common_location_path(project, location) + assert expected == actual + + +def test_parse_common_location_path(): + expected = { + "project": "oyster", + "location": "nudibranch", + } + path = ProvisioningClient.common_location_path(**expected) + + # Check that the path construction is reversible. + actual = ProvisioningClient.parse_common_location_path(path) + assert expected == actual + + +def test_client_with_default_client_info(): + client_info = gapic_v1.client_info.ClientInfo() + + with mock.patch.object( + transports.ProvisioningTransport, "_prep_wrapped_messages" + ) as prep: + client = ProvisioningClient( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + with mock.patch.object( + transports.ProvisioningTransport, "_prep_wrapped_messages" + ) as prep: + transport_class = ProvisioningClient.get_transport_class() + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + +def test_get_location_rest_bad_request( + transport: str = "rest", request_type=locations_pb2.GetLocationRequest +): + client = ProvisioningClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + request = request_type() + request = json_format.ParseDict( + {"name": "projects/sample1/locations/sample2"}, request + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_location(request) + + +@pytest.mark.parametrize( + "request_type", + [ + locations_pb2.GetLocationRequest, + dict, + ], +) +def test_get_location_rest(request_type): + client = ProvisioningClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = {"name": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = locations_pb2.Location() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_location(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, locations_pb2.Location) + + +def test_list_locations_rest_bad_request( + transport: str = "rest", request_type=locations_pb2.ListLocationsRequest +): + client = ProvisioningClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + request = request_type() + request = json_format.ParseDict({"name": "projects/sample1"}, request) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_locations(request) + + +@pytest.mark.parametrize( + "request_type", + [ + locations_pb2.ListLocationsRequest, + dict, + ], +) +def test_list_locations_rest(request_type): + client = ProvisioningClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = {"name": "projects/sample1"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = locations_pb2.ListLocationsResponse() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.list_locations(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, locations_pb2.ListLocationsResponse) + + +def test_cancel_operation_rest_bad_request( + transport: str = "rest", request_type=operations_pb2.CancelOperationRequest +): + client = ProvisioningClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + request = request_type() + request = json_format.ParseDict( + {"name": "projects/sample1/locations/sample2/operations/sample3"}, request + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.cancel_operation(request) + + +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.CancelOperationRequest, + dict, + ], +) +def test_cancel_operation_rest(request_type): + client = ProvisioningClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = {"name": "projects/sample1/locations/sample2/operations/sample3"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "{}" + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.cancel_operation(request) + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_operation_rest_bad_request( + transport: str = "rest", request_type=operations_pb2.DeleteOperationRequest +): + client = ProvisioningClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + request = request_type() + request = json_format.ParseDict( + {"name": "projects/sample1/locations/sample2/operations/sample3"}, request + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete_operation(request) + + +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.DeleteOperationRequest, + dict, + ], +) +def test_delete_operation_rest(request_type): + client = ProvisioningClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = {"name": "projects/sample1/locations/sample2/operations/sample3"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "{}" + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.delete_operation(request) + + # Establish that the response is the type that we expect. + assert response is None + + +def test_get_operation_rest_bad_request( + transport: str = "rest", request_type=operations_pb2.GetOperationRequest +): + client = ProvisioningClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + request = request_type() + request = json_format.ParseDict( + {"name": "projects/sample1/locations/sample2/operations/sample3"}, request + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_operation(request) + + +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.GetOperationRequest, + dict, + ], +) +def test_get_operation_rest(request_type): + client = ProvisioningClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = {"name": "projects/sample1/locations/sample2/operations/sample3"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_operation(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) + + +def test_list_operations_rest_bad_request( + transport: str = "rest", request_type=operations_pb2.ListOperationsRequest +): + client = ProvisioningClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + request = request_type() + request = json_format.ParseDict( + {"name": "projects/sample1/locations/sample2"}, request + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_operations(request) + + +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.ListOperationsRequest, + dict, + ], +) +def test_list_operations_rest(request_type): + client = ProvisioningClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = {"name": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.ListOperationsResponse() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.list_operations(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.ListOperationsResponse) + + +def test_transport_close(): + transports = { + "rest": "_session", + } + + for transport, close_name in transports.items(): + client = ProvisioningClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + with mock.patch.object( + type(getattr(client.transport, close_name)), "close" + ) as close: + with client: + close.assert_not_called() + close.assert_called_once() + + +def test_client_ctx(): + transports = [ + "rest", + ] + for transport in transports: + client = ProvisioningClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called() + + +@pytest.mark.parametrize( + "client_class,transport_class", + [ + (ProvisioningClient, transports.ProvisioningRestTransport), + ], +) +def test_api_key_credentials(client_class, transport_class): + with mock.patch.object( + google.auth._default, "get_api_key_credentials", create=True + ) as get_api_key_credentials: + mock_cred = mock.Mock() + get_api_key_credentials.return_value = mock_cred + options = client_options.ClientOptions() + options.api_key = "api_key" + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=mock_cred, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) diff --git a/packages/google-cloud-apihub/tests/unit/gapic/apihub_v1/test_runtime_project_attachment_service.py b/packages/google-cloud-apihub/tests/unit/gapic/apihub_v1/test_runtime_project_attachment_service.py new file mode 100644 index 000000000000..836aa804eb51 --- /dev/null +++ b/packages/google-cloud-apihub/tests/unit/gapic/apihub_v1/test_runtime_project_attachment_service.py @@ -0,0 +1,3729 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import os + +# try/except added for compatibility with python < 3.8 +try: + from unittest import mock + from unittest.mock import AsyncMock # pragma: NO COVER +except ImportError: # pragma: NO COVER + import mock + +from collections.abc import Iterable +import json +import math + +from google.api_core import gapic_v1, grpc_helpers, grpc_helpers_async, path_template +from google.api_core import api_core_version, client_options +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +import google.auth +from google.auth import credentials as ga_credentials +from google.auth.exceptions import MutualTLSChannelError +from google.cloud.location import locations_pb2 +from google.longrunning import operations_pb2 # type: ignore +from google.oauth2 import service_account +from google.protobuf import json_format +from google.protobuf import timestamp_pb2 # type: ignore +import grpc +from grpc.experimental import aio +from proto.marshal.rules import wrappers +from proto.marshal.rules.dates import DurationRule, TimestampRule +import pytest +from requests import PreparedRequest, Request, Response +from requests.sessions import Session + +from google.cloud.apihub_v1.services.runtime_project_attachment_service import ( + RuntimeProjectAttachmentServiceClient, + pagers, + transports, +) +from google.cloud.apihub_v1.types import runtime_project_attachment_service + + +def client_cert_source_callback(): + return b"cert bytes", b"key bytes" + + +# If default endpoint is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint(client): + return ( + "foo.googleapis.com" + if ("localhost" in client.DEFAULT_ENDPOINT) + else client.DEFAULT_ENDPOINT + ) + + +# If default endpoint template is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint template so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint_template(client): + return ( + "test.{UNIVERSE_DOMAIN}" + if ("localhost" in client._DEFAULT_ENDPOINT_TEMPLATE) + else client._DEFAULT_ENDPOINT_TEMPLATE + ) + + +def test__get_default_mtls_endpoint(): + api_endpoint = "example.googleapis.com" + api_mtls_endpoint = "example.mtls.googleapis.com" + sandbox_endpoint = "example.sandbox.googleapis.com" + sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" + non_googleapi = "api.example.com" + + assert ( + RuntimeProjectAttachmentServiceClient._get_default_mtls_endpoint(None) is None + ) + assert ( + RuntimeProjectAttachmentServiceClient._get_default_mtls_endpoint(api_endpoint) + == api_mtls_endpoint + ) + assert ( + RuntimeProjectAttachmentServiceClient._get_default_mtls_endpoint( + api_mtls_endpoint + ) + == api_mtls_endpoint + ) + assert ( + RuntimeProjectAttachmentServiceClient._get_default_mtls_endpoint( + sandbox_endpoint + ) + == sandbox_mtls_endpoint + ) + assert ( + RuntimeProjectAttachmentServiceClient._get_default_mtls_endpoint( + sandbox_mtls_endpoint + ) + == sandbox_mtls_endpoint + ) + assert ( + RuntimeProjectAttachmentServiceClient._get_default_mtls_endpoint(non_googleapi) + == non_googleapi + ) + + +def test__read_environment_variables(): + assert RuntimeProjectAttachmentServiceClient._read_environment_variables() == ( + False, + "auto", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + assert RuntimeProjectAttachmentServiceClient._read_environment_variables() == ( + True, + "auto", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + assert RuntimeProjectAttachmentServiceClient._read_environment_variables() == ( + False, + "auto", + None, + ) + + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + with pytest.raises(ValueError) as excinfo: + RuntimeProjectAttachmentServiceClient._read_environment_variables() + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + assert RuntimeProjectAttachmentServiceClient._read_environment_variables() == ( + False, + "never", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + assert RuntimeProjectAttachmentServiceClient._read_environment_variables() == ( + False, + "always", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}): + assert RuntimeProjectAttachmentServiceClient._read_environment_variables() == ( + False, + "auto", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + RuntimeProjectAttachmentServiceClient._read_environment_variables() + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + with mock.patch.dict(os.environ, {"GOOGLE_CLOUD_UNIVERSE_DOMAIN": "foo.com"}): + assert RuntimeProjectAttachmentServiceClient._read_environment_variables() == ( + False, + "auto", + "foo.com", + ) + + +def test__get_client_cert_source(): + mock_provided_cert_source = mock.Mock() + mock_default_cert_source = mock.Mock() + + assert ( + RuntimeProjectAttachmentServiceClient._get_client_cert_source(None, False) + is None + ) + assert ( + RuntimeProjectAttachmentServiceClient._get_client_cert_source( + mock_provided_cert_source, False + ) + is None + ) + assert ( + RuntimeProjectAttachmentServiceClient._get_client_cert_source( + mock_provided_cert_source, True + ) + == mock_provided_cert_source + ) + + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", return_value=True + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=mock_default_cert_source, + ): + assert ( + RuntimeProjectAttachmentServiceClient._get_client_cert_source( + None, True + ) + is mock_default_cert_source + ) + assert ( + RuntimeProjectAttachmentServiceClient._get_client_cert_source( + mock_provided_cert_source, "true" + ) + is mock_provided_cert_source + ) + + +@mock.patch.object( + RuntimeProjectAttachmentServiceClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(RuntimeProjectAttachmentServiceClient), +) +def test__get_api_endpoint(): + api_override = "foo.com" + mock_client_cert_source = mock.Mock() + default_universe = RuntimeProjectAttachmentServiceClient._DEFAULT_UNIVERSE + default_endpoint = ( + RuntimeProjectAttachmentServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=default_universe + ) + ) + mock_universe = "bar.com" + mock_endpoint = ( + RuntimeProjectAttachmentServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=mock_universe + ) + ) + + assert ( + RuntimeProjectAttachmentServiceClient._get_api_endpoint( + api_override, mock_client_cert_source, default_universe, "always" + ) + == api_override + ) + assert ( + RuntimeProjectAttachmentServiceClient._get_api_endpoint( + None, mock_client_cert_source, default_universe, "auto" + ) + == RuntimeProjectAttachmentServiceClient.DEFAULT_MTLS_ENDPOINT + ) + assert ( + RuntimeProjectAttachmentServiceClient._get_api_endpoint( + None, None, default_universe, "auto" + ) + == default_endpoint + ) + assert ( + RuntimeProjectAttachmentServiceClient._get_api_endpoint( + None, None, default_universe, "always" + ) + == RuntimeProjectAttachmentServiceClient.DEFAULT_MTLS_ENDPOINT + ) + assert ( + RuntimeProjectAttachmentServiceClient._get_api_endpoint( + None, mock_client_cert_source, default_universe, "always" + ) + == RuntimeProjectAttachmentServiceClient.DEFAULT_MTLS_ENDPOINT + ) + assert ( + RuntimeProjectAttachmentServiceClient._get_api_endpoint( + None, None, mock_universe, "never" + ) + == mock_endpoint + ) + assert ( + RuntimeProjectAttachmentServiceClient._get_api_endpoint( + None, None, default_universe, "never" + ) + == default_endpoint + ) + + with pytest.raises(MutualTLSChannelError) as excinfo: + RuntimeProjectAttachmentServiceClient._get_api_endpoint( + None, mock_client_cert_source, mock_universe, "auto" + ) + assert ( + str(excinfo.value) + == "mTLS is not supported in any universe other than googleapis.com." + ) + + +def test__get_universe_domain(): + client_universe_domain = "foo.com" + universe_domain_env = "bar.com" + + assert ( + RuntimeProjectAttachmentServiceClient._get_universe_domain( + client_universe_domain, universe_domain_env + ) + == client_universe_domain + ) + assert ( + RuntimeProjectAttachmentServiceClient._get_universe_domain( + None, universe_domain_env + ) + == universe_domain_env + ) + assert ( + RuntimeProjectAttachmentServiceClient._get_universe_domain(None, None) + == RuntimeProjectAttachmentServiceClient._DEFAULT_UNIVERSE + ) + + with pytest.raises(ValueError) as excinfo: + RuntimeProjectAttachmentServiceClient._get_universe_domain("", None) + assert str(excinfo.value) == "Universe Domain cannot be an empty string." + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + ( + RuntimeProjectAttachmentServiceClient, + transports.RuntimeProjectAttachmentServiceRestTransport, + "rest", + ), + ], +) +def test__validate_universe_domain(client_class, transport_class, transport_name): + client = client_class( + transport=transport_class(credentials=ga_credentials.AnonymousCredentials()) + ) + assert client._validate_universe_domain() == True + + # Test the case when universe is already validated. + assert client._validate_universe_domain() == True + + if transport_name == "grpc": + # Test the case where credentials are provided by the + # `local_channel_credentials`. The default universes in both match. + channel = grpc.secure_channel( + "http://localhost/", grpc.local_channel_credentials() + ) + client = client_class(transport=transport_class(channel=channel)) + assert client._validate_universe_domain() == True + + # Test the case where credentials do not exist: e.g. a transport is provided + # with no credentials. Validation should still succeed because there is no + # mismatch with non-existent credentials. + channel = grpc.secure_channel( + "http://localhost/", grpc.local_channel_credentials() + ) + transport = transport_class(channel=channel) + transport._credentials = None + client = client_class(transport=transport) + assert client._validate_universe_domain() == True + + # TODO: This is needed to cater for older versions of google-auth + # Make this test unconditional once the minimum supported version of + # google-auth becomes 2.23.0 or higher. + google_auth_major, google_auth_minor = [ + int(part) for part in google.auth.__version__.split(".")[0:2] + ] + if google_auth_major > 2 or (google_auth_major == 2 and google_auth_minor >= 23): + credentials = ga_credentials.AnonymousCredentials() + credentials._universe_domain = "foo.com" + # Test the case when there is a universe mismatch from the credentials. + client = client_class(transport=transport_class(credentials=credentials)) + with pytest.raises(ValueError) as excinfo: + client._validate_universe_domain() + assert ( + str(excinfo.value) + == "The configured universe domain (googleapis.com) does not match the universe domain found in the credentials (foo.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." + ) + + # Test the case when there is a universe mismatch from the client. + # + # TODO: Make this test unconditional once the minimum supported version of + # google-api-core becomes 2.15.0 or higher. + api_core_major, api_core_minor = [ + int(part) for part in api_core_version.__version__.split(".")[0:2] + ] + if api_core_major > 2 or (api_core_major == 2 and api_core_minor >= 15): + client = client_class( + client_options={"universe_domain": "bar.com"}, + transport=transport_class( + credentials=ga_credentials.AnonymousCredentials(), + ), + ) + with pytest.raises(ValueError) as excinfo: + client._validate_universe_domain() + assert ( + str(excinfo.value) + == "The configured universe domain (bar.com) does not match the universe domain found in the credentials (googleapis.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." + ) + + # Test that ValueError is raised if universe_domain is provided via client options and credentials is None + with pytest.raises(ValueError): + client._compare_universes("foo.bar", None) + + +@pytest.mark.parametrize( + "client_class,transport_name", + [ + (RuntimeProjectAttachmentServiceClient, "rest"), + ], +) +def test_runtime_project_attachment_service_client_from_service_account_info( + client_class, transport_name +): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_info" + ) as factory: + factory.return_value = creds + info = {"valid": True} + client = client_class.from_service_account_info(info, transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + "apihub.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://apihub.googleapis.com" + ) + + +@pytest.mark.parametrize( + "transport_class,transport_name", + [ + (transports.RuntimeProjectAttachmentServiceRestTransport, "rest"), + ], +) +def test_runtime_project_attachment_service_client_service_account_always_use_jwt( + transport_class, transport_name +): + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=True) + use_jwt.assert_called_once_with(True) + + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=False) + use_jwt.assert_not_called() + + +@pytest.mark.parametrize( + "client_class,transport_name", + [ + (RuntimeProjectAttachmentServiceClient, "rest"), + ], +) +def test_runtime_project_attachment_service_client_from_service_account_file( + client_class, transport_name +): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_file" + ) as factory: + factory.return_value = creds + client = client_class.from_service_account_file( + "dummy/file/path.json", transport=transport_name + ) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + client = client_class.from_service_account_json( + "dummy/file/path.json", transport=transport_name + ) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + "apihub.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://apihub.googleapis.com" + ) + + +def test_runtime_project_attachment_service_client_get_transport_class(): + transport = RuntimeProjectAttachmentServiceClient.get_transport_class() + available_transports = [ + transports.RuntimeProjectAttachmentServiceRestTransport, + ] + assert transport in available_transports + + transport = RuntimeProjectAttachmentServiceClient.get_transport_class("rest") + assert transport == transports.RuntimeProjectAttachmentServiceRestTransport + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + ( + RuntimeProjectAttachmentServiceClient, + transports.RuntimeProjectAttachmentServiceRestTransport, + "rest", + ), + ], +) +@mock.patch.object( + RuntimeProjectAttachmentServiceClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(RuntimeProjectAttachmentServiceClient), +) +def test_runtime_project_attachment_service_client_client_options( + client_class, transport_class, transport_name +): + # Check that if channel is provided we won't create a new one. + with mock.patch.object( + RuntimeProjectAttachmentServiceClient, "get_transport_class" + ) as gtc: + transport = transport_class(credentials=ga_credentials.AnonymousCredentials()) + client = client_class(transport=transport) + gtc.assert_not_called() + + # Check that if channel is provided via str we will create a new one. + with mock.patch.object( + RuntimeProjectAttachmentServiceClient, "get_transport_class" + ) as gtc: + client = client_class(transport=transport_name) + gtc.assert_called() + + # Check the case api_endpoint is provided. + options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name, client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_MTLS_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + client = client_class(transport=transport_name) + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + with pytest.raises(ValueError) as excinfo: + client = client_class(transport=transport_name) + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + + # Check the case quota_project_id is provided + options = client_options.ClientOptions(quota_project_id="octopus") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id="octopus", + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + # Check the case api_endpoint is provided + options = client_options.ClientOptions( + api_audience="https://language.googleapis.com" + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience="https://language.googleapis.com", + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,use_client_cert_env", + [ + ( + RuntimeProjectAttachmentServiceClient, + transports.RuntimeProjectAttachmentServiceRestTransport, + "rest", + "true", + ), + ( + RuntimeProjectAttachmentServiceClient, + transports.RuntimeProjectAttachmentServiceRestTransport, + "rest", + "false", + ), + ], +) +@mock.patch.object( + RuntimeProjectAttachmentServiceClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(RuntimeProjectAttachmentServiceClient), +) +@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) +def test_runtime_project_attachment_service_client_mtls_env_auto( + client_class, transport_class, transport_name, use_client_cert_env +): + # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default + # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. + + # Check the case client_cert_source is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + options = client_options.ClientOptions( + client_cert_source=client_cert_source_callback + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + + if use_client_cert_env == "false": + expected_client_cert_source = None + expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ) + else: + expected_client_cert_source = client_cert_source_callback + expected_host = client.DEFAULT_MTLS_ENDPOINT + + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case ADC client cert is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=client_cert_source_callback, + ): + if use_client_cert_env == "false": + expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ) + expected_client_cert_source = None + else: + expected_host = client.DEFAULT_MTLS_ENDPOINT + expected_client_cert_source = client_cert_source_callback + + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case client_cert_source and ADC client cert are not provided. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize("client_class", [RuntimeProjectAttachmentServiceClient]) +@mock.patch.object( + RuntimeProjectAttachmentServiceClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(RuntimeProjectAttachmentServiceClient), +) +def test_runtime_project_attachment_service_client_get_mtls_endpoint_and_cert_source( + client_class, +): + mock_client_cert_source = mock.Mock() + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source == mock_client_cert_source + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + mock_client_cert_source = mock.Mock() + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=mock_client_cert_source, + ): + ( + api_endpoint, + cert_source, + ) = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source == mock_client_cert_source + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + client_class.get_mtls_endpoint_and_cert_source() + + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + with pytest.raises(ValueError) as excinfo: + client_class.get_mtls_endpoint_and_cert_source() + + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + + +@pytest.mark.parametrize("client_class", [RuntimeProjectAttachmentServiceClient]) +@mock.patch.object( + RuntimeProjectAttachmentServiceClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(RuntimeProjectAttachmentServiceClient), +) +def test_runtime_project_attachment_service_client_client_api_endpoint(client_class): + mock_client_cert_source = client_cert_source_callback + api_override = "foo.com" + default_universe = RuntimeProjectAttachmentServiceClient._DEFAULT_UNIVERSE + default_endpoint = ( + RuntimeProjectAttachmentServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=default_universe + ) + ) + mock_universe = "bar.com" + mock_endpoint = ( + RuntimeProjectAttachmentServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=mock_universe + ) + ) + + # If ClientOptions.api_endpoint is set and GOOGLE_API_USE_CLIENT_CERTIFICATE="true", + # use ClientOptions.api_endpoint as the api endpoint regardless. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.requests.AuthorizedSession.configure_mtls_channel" + ): + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=api_override + ) + client = client_class( + client_options=options, + credentials=ga_credentials.AnonymousCredentials(), + ) + assert client.api_endpoint == api_override + + # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="never", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + client = client_class(credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == default_endpoint + + # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="always", + # use the DEFAULT_MTLS_ENDPOINT as the api endpoint. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + client = client_class(credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + + # If ClientOptions.api_endpoint is not set, GOOGLE_API_USE_MTLS_ENDPOINT="auto" (default), + # GOOGLE_API_USE_CLIENT_CERTIFICATE="false" (default), default cert source doesn't exist, + # and ClientOptions.universe_domain="bar.com", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with universe domain as the api endpoint. + options = client_options.ClientOptions() + universe_exists = hasattr(options, "universe_domain") + if universe_exists: + options = client_options.ClientOptions(universe_domain=mock_universe) + client = client_class( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + else: + client = client_class( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + assert client.api_endpoint == ( + mock_endpoint if universe_exists else default_endpoint + ) + assert client.universe_domain == ( + mock_universe if universe_exists else default_universe + ) + + # If ClientOptions does not have a universe domain attribute and GOOGLE_API_USE_MTLS_ENDPOINT="never", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. + options = client_options.ClientOptions() + if hasattr(options, "universe_domain"): + delattr(options, "universe_domain") + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + client = client_class( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + assert client.api_endpoint == default_endpoint + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + ( + RuntimeProjectAttachmentServiceClient, + transports.RuntimeProjectAttachmentServiceRestTransport, + "rest", + ), + ], +) +def test_runtime_project_attachment_service_client_client_options_scopes( + client_class, transport_class, transport_name +): + # Check the case scopes are provided. + options = client_options.ClientOptions( + scopes=["1", "2"], + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=["1", "2"], + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,grpc_helpers", + [ + ( + RuntimeProjectAttachmentServiceClient, + transports.RuntimeProjectAttachmentServiceRestTransport, + "rest", + None, + ), + ], +) +def test_runtime_project_attachment_service_client_client_options_credentials_file( + client_class, transport_class, transport_name, grpc_helpers +): + # Check the case credentials file is provided. + options = client_options.ClientOptions(credentials_file="credentials.json") + + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "request_type", + [ + runtime_project_attachment_service.CreateRuntimeProjectAttachmentRequest, + dict, + ], +) +def test_create_runtime_project_attachment_rest(request_type): + client = RuntimeProjectAttachmentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request_init["runtime_project_attachment"] = { + "name": "name_value", + "runtime_project": "runtime_project_value", + "create_time": {"seconds": 751, "nanos": 543}, + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = runtime_project_attachment_service.CreateRuntimeProjectAttachmentRequest.meta.fields[ + "runtime_project_attachment" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "runtime_project_attachment" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, len(request_init["runtime_project_attachment"][field]) + ): + del request_init["runtime_project_attachment"][field][i][subfield] + else: + del request_init["runtime_project_attachment"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = runtime_project_attachment_service.RuntimeProjectAttachment( + name="name_value", + runtime_project="runtime_project_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = runtime_project_attachment_service.RuntimeProjectAttachment.pb( + return_value + ) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.create_runtime_project_attachment(request) + + # Establish that the response is the type that we expect. + assert isinstance( + response, runtime_project_attachment_service.RuntimeProjectAttachment + ) + assert response.name == "name_value" + assert response.runtime_project == "runtime_project_value" + + +def test_create_runtime_project_attachment_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = RuntimeProjectAttachmentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.create_runtime_project_attachment + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.create_runtime_project_attachment + ] = mock_rpc + + request = {} + client.create_runtime_project_attachment(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.create_runtime_project_attachment(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_create_runtime_project_attachment_rest_required_fields( + request_type=runtime_project_attachment_service.CreateRuntimeProjectAttachmentRequest, +): + transport_class = transports.RuntimeProjectAttachmentServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request_init["runtime_project_attachment_id"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + assert "runtimeProjectAttachmentId" not in jsonified_request + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_runtime_project_attachment._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + assert "runtimeProjectAttachmentId" in jsonified_request + assert ( + jsonified_request["runtimeProjectAttachmentId"] + == request_init["runtime_project_attachment_id"] + ) + + jsonified_request["parent"] = "parent_value" + jsonified_request[ + "runtimeProjectAttachmentId" + ] = "runtime_project_attachment_id_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_runtime_project_attachment._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("runtime_project_attachment_id",)) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + assert "runtimeProjectAttachmentId" in jsonified_request + assert ( + jsonified_request["runtimeProjectAttachmentId"] + == "runtime_project_attachment_id_value" + ) + + client = RuntimeProjectAttachmentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = runtime_project_attachment_service.RuntimeProjectAttachment() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = ( + runtime_project_attachment_service.RuntimeProjectAttachment.pb( + return_value + ) + ) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.create_runtime_project_attachment(request) + + expected_params = [ + ( + "runtimeProjectAttachmentId", + "", + ), + ("$alt", "json;enum-encoding=int"), + ] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_create_runtime_project_attachment_rest_unset_required_fields(): + transport = transports.RuntimeProjectAttachmentServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = ( + transport.create_runtime_project_attachment._get_unset_required_fields({}) + ) + assert set(unset_fields) == ( + set(("runtimeProjectAttachmentId",)) + & set( + ( + "parent", + "runtimeProjectAttachmentId", + "runtimeProjectAttachment", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_create_runtime_project_attachment_rest_interceptors(null_interceptor): + transport = transports.RuntimeProjectAttachmentServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.RuntimeProjectAttachmentServiceRestInterceptor(), + ) + client = RuntimeProjectAttachmentServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.RuntimeProjectAttachmentServiceRestInterceptor, + "post_create_runtime_project_attachment", + ) as post, mock.patch.object( + transports.RuntimeProjectAttachmentServiceRestInterceptor, + "pre_create_runtime_project_attachment", + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = runtime_project_attachment_service.CreateRuntimeProjectAttachmentRequest.pb( + runtime_project_attachment_service.CreateRuntimeProjectAttachmentRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = ( + runtime_project_attachment_service.RuntimeProjectAttachment.to_json( + runtime_project_attachment_service.RuntimeProjectAttachment() + ) + ) + + request = ( + runtime_project_attachment_service.CreateRuntimeProjectAttachmentRequest() + ) + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = ( + runtime_project_attachment_service.RuntimeProjectAttachment() + ) + + client.create_runtime_project_attachment( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_create_runtime_project_attachment_rest_bad_request( + transport: str = "rest", + request_type=runtime_project_attachment_service.CreateRuntimeProjectAttachmentRequest, +): + client = RuntimeProjectAttachmentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.create_runtime_project_attachment(request) + + +def test_create_runtime_project_attachment_rest_flattened(): + client = RuntimeProjectAttachmentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = runtime_project_attachment_service.RuntimeProjectAttachment() + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1/locations/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + runtime_project_attachment=runtime_project_attachment_service.RuntimeProjectAttachment( + name="name_value" + ), + runtime_project_attachment_id="runtime_project_attachment_id_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = runtime_project_attachment_service.RuntimeProjectAttachment.pb( + return_value + ) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.create_runtime_project_attachment(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*}/runtimeProjectAttachments" + % client.transport._host, + args[1], + ) + + +def test_create_runtime_project_attachment_rest_flattened_error( + transport: str = "rest", +): + client = RuntimeProjectAttachmentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_runtime_project_attachment( + runtime_project_attachment_service.CreateRuntimeProjectAttachmentRequest(), + parent="parent_value", + runtime_project_attachment=runtime_project_attachment_service.RuntimeProjectAttachment( + name="name_value" + ), + runtime_project_attachment_id="runtime_project_attachment_id_value", + ) + + +def test_create_runtime_project_attachment_rest_error(): + client = RuntimeProjectAttachmentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + runtime_project_attachment_service.GetRuntimeProjectAttachmentRequest, + dict, + ], +) +def test_get_runtime_project_attachment_rest(request_type): + client = RuntimeProjectAttachmentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/runtimeProjectAttachments/sample3" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = runtime_project_attachment_service.RuntimeProjectAttachment( + name="name_value", + runtime_project="runtime_project_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = runtime_project_attachment_service.RuntimeProjectAttachment.pb( + return_value + ) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.get_runtime_project_attachment(request) + + # Establish that the response is the type that we expect. + assert isinstance( + response, runtime_project_attachment_service.RuntimeProjectAttachment + ) + assert response.name == "name_value" + assert response.runtime_project == "runtime_project_value" + + +def test_get_runtime_project_attachment_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = RuntimeProjectAttachmentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.get_runtime_project_attachment + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.get_runtime_project_attachment + ] = mock_rpc + + request = {} + client.get_runtime_project_attachment(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_runtime_project_attachment(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_get_runtime_project_attachment_rest_required_fields( + request_type=runtime_project_attachment_service.GetRuntimeProjectAttachmentRequest, +): + transport_class = transports.RuntimeProjectAttachmentServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_runtime_project_attachment._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_runtime_project_attachment._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = RuntimeProjectAttachmentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = runtime_project_attachment_service.RuntimeProjectAttachment() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = ( + runtime_project_attachment_service.RuntimeProjectAttachment.pb( + return_value + ) + ) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_runtime_project_attachment(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_get_runtime_project_attachment_rest_unset_required_fields(): + transport = transports.RuntimeProjectAttachmentServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.get_runtime_project_attachment._get_unset_required_fields( + {} + ) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_runtime_project_attachment_rest_interceptors(null_interceptor): + transport = transports.RuntimeProjectAttachmentServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.RuntimeProjectAttachmentServiceRestInterceptor(), + ) + client = RuntimeProjectAttachmentServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.RuntimeProjectAttachmentServiceRestInterceptor, + "post_get_runtime_project_attachment", + ) as post, mock.patch.object( + transports.RuntimeProjectAttachmentServiceRestInterceptor, + "pre_get_runtime_project_attachment", + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = ( + runtime_project_attachment_service.GetRuntimeProjectAttachmentRequest.pb( + runtime_project_attachment_service.GetRuntimeProjectAttachmentRequest() + ) + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = ( + runtime_project_attachment_service.RuntimeProjectAttachment.to_json( + runtime_project_attachment_service.RuntimeProjectAttachment() + ) + ) + + request = ( + runtime_project_attachment_service.GetRuntimeProjectAttachmentRequest() + ) + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = ( + runtime_project_attachment_service.RuntimeProjectAttachment() + ) + + client.get_runtime_project_attachment( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_runtime_project_attachment_rest_bad_request( + transport: str = "rest", + request_type=runtime_project_attachment_service.GetRuntimeProjectAttachmentRequest, +): + client = RuntimeProjectAttachmentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/runtimeProjectAttachments/sample3" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_runtime_project_attachment(request) + + +def test_get_runtime_project_attachment_rest_flattened(): + client = RuntimeProjectAttachmentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = runtime_project_attachment_service.RuntimeProjectAttachment() + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/runtimeProjectAttachments/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = runtime_project_attachment_service.RuntimeProjectAttachment.pb( + return_value + ) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.get_runtime_project_attachment(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/runtimeProjectAttachments/*}" + % client.transport._host, + args[1], + ) + + +def test_get_runtime_project_attachment_rest_flattened_error(transport: str = "rest"): + client = RuntimeProjectAttachmentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_runtime_project_attachment( + runtime_project_attachment_service.GetRuntimeProjectAttachmentRequest(), + name="name_value", + ) + + +def test_get_runtime_project_attachment_rest_error(): + client = RuntimeProjectAttachmentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + runtime_project_attachment_service.ListRuntimeProjectAttachmentsRequest, + dict, + ], +) +def test_list_runtime_project_attachments_rest(request_type): + client = RuntimeProjectAttachmentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = ( + runtime_project_attachment_service.ListRuntimeProjectAttachmentsResponse( + next_page_token="next_page_token_value", + ) + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = ( + runtime_project_attachment_service.ListRuntimeProjectAttachmentsResponse.pb( + return_value + ) + ) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.list_runtime_project_attachments(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListRuntimeProjectAttachmentsPager) + assert response.next_page_token == "next_page_token_value" + + +def test_list_runtime_project_attachments_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = RuntimeProjectAttachmentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.list_runtime_project_attachments + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.list_runtime_project_attachments + ] = mock_rpc + + request = {} + client.list_runtime_project_attachments(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_runtime_project_attachments(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_list_runtime_project_attachments_rest_required_fields( + request_type=runtime_project_attachment_service.ListRuntimeProjectAttachmentsRequest, +): + transport_class = transports.RuntimeProjectAttachmentServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_runtime_project_attachments._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_runtime_project_attachments._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "filter", + "order_by", + "page_size", + "page_token", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = RuntimeProjectAttachmentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = ( + runtime_project_attachment_service.ListRuntimeProjectAttachmentsResponse() + ) + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = runtime_project_attachment_service.ListRuntimeProjectAttachmentsResponse.pb( + return_value + ) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.list_runtime_project_attachments(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_list_runtime_project_attachments_rest_unset_required_fields(): + transport = transports.RuntimeProjectAttachmentServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = ( + transport.list_runtime_project_attachments._get_unset_required_fields({}) + ) + assert set(unset_fields) == ( + set( + ( + "filter", + "orderBy", + "pageSize", + "pageToken", + ) + ) + & set(("parent",)) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_runtime_project_attachments_rest_interceptors(null_interceptor): + transport = transports.RuntimeProjectAttachmentServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.RuntimeProjectAttachmentServiceRestInterceptor(), + ) + client = RuntimeProjectAttachmentServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.RuntimeProjectAttachmentServiceRestInterceptor, + "post_list_runtime_project_attachments", + ) as post, mock.patch.object( + transports.RuntimeProjectAttachmentServiceRestInterceptor, + "pre_list_runtime_project_attachments", + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = runtime_project_attachment_service.ListRuntimeProjectAttachmentsRequest.pb( + runtime_project_attachment_service.ListRuntimeProjectAttachmentsRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = runtime_project_attachment_service.ListRuntimeProjectAttachmentsResponse.to_json( + runtime_project_attachment_service.ListRuntimeProjectAttachmentsResponse() + ) + + request = ( + runtime_project_attachment_service.ListRuntimeProjectAttachmentsRequest() + ) + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = ( + runtime_project_attachment_service.ListRuntimeProjectAttachmentsResponse() + ) + + client.list_runtime_project_attachments( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_runtime_project_attachments_rest_bad_request( + transport: str = "rest", + request_type=runtime_project_attachment_service.ListRuntimeProjectAttachmentsRequest, +): + client = RuntimeProjectAttachmentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_runtime_project_attachments(request) + + +def test_list_runtime_project_attachments_rest_flattened(): + client = RuntimeProjectAttachmentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = ( + runtime_project_attachment_service.ListRuntimeProjectAttachmentsResponse() + ) + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1/locations/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = ( + runtime_project_attachment_service.ListRuntimeProjectAttachmentsResponse.pb( + return_value + ) + ) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.list_runtime_project_attachments(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*}/runtimeProjectAttachments" + % client.transport._host, + args[1], + ) + + +def test_list_runtime_project_attachments_rest_flattened_error(transport: str = "rest"): + client = RuntimeProjectAttachmentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_runtime_project_attachments( + runtime_project_attachment_service.ListRuntimeProjectAttachmentsRequest(), + parent="parent_value", + ) + + +def test_list_runtime_project_attachments_rest_pager(transport: str = "rest"): + client = RuntimeProjectAttachmentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + runtime_project_attachment_service.ListRuntimeProjectAttachmentsResponse( + runtime_project_attachments=[ + runtime_project_attachment_service.RuntimeProjectAttachment(), + runtime_project_attachment_service.RuntimeProjectAttachment(), + runtime_project_attachment_service.RuntimeProjectAttachment(), + ], + next_page_token="abc", + ), + runtime_project_attachment_service.ListRuntimeProjectAttachmentsResponse( + runtime_project_attachments=[], + next_page_token="def", + ), + runtime_project_attachment_service.ListRuntimeProjectAttachmentsResponse( + runtime_project_attachments=[ + runtime_project_attachment_service.RuntimeProjectAttachment(), + ], + next_page_token="ghi", + ), + runtime_project_attachment_service.ListRuntimeProjectAttachmentsResponse( + runtime_project_attachments=[ + runtime_project_attachment_service.RuntimeProjectAttachment(), + runtime_project_attachment_service.RuntimeProjectAttachment(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple( + runtime_project_attachment_service.ListRuntimeProjectAttachmentsResponse.to_json( + x + ) + for x in response + ) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {"parent": "projects/sample1/locations/sample2"} + + pager = client.list_runtime_project_attachments(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all( + isinstance(i, runtime_project_attachment_service.RuntimeProjectAttachment) + for i in results + ) + + pages = list( + client.list_runtime_project_attachments(request=sample_request).pages + ) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + runtime_project_attachment_service.DeleteRuntimeProjectAttachmentRequest, + dict, + ], +) +def test_delete_runtime_project_attachment_rest(request_type): + client = RuntimeProjectAttachmentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/runtimeProjectAttachments/sample3" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "" + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.delete_runtime_project_attachment(request) + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_runtime_project_attachment_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = RuntimeProjectAttachmentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.delete_runtime_project_attachment + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.delete_runtime_project_attachment + ] = mock_rpc + + request = {} + client.delete_runtime_project_attachment(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.delete_runtime_project_attachment(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_delete_runtime_project_attachment_rest_required_fields( + request_type=runtime_project_attachment_service.DeleteRuntimeProjectAttachmentRequest, +): + transport_class = transports.RuntimeProjectAttachmentServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_runtime_project_attachment._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_runtime_project_attachment._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = RuntimeProjectAttachmentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = None + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "delete", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = "" + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.delete_runtime_project_attachment(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_delete_runtime_project_attachment_rest_unset_required_fields(): + transport = transports.RuntimeProjectAttachmentServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = ( + transport.delete_runtime_project_attachment._get_unset_required_fields({}) + ) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_runtime_project_attachment_rest_interceptors(null_interceptor): + transport = transports.RuntimeProjectAttachmentServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.RuntimeProjectAttachmentServiceRestInterceptor(), + ) + client = RuntimeProjectAttachmentServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.RuntimeProjectAttachmentServiceRestInterceptor, + "pre_delete_runtime_project_attachment", + ) as pre: + pre.assert_not_called() + pb_message = runtime_project_attachment_service.DeleteRuntimeProjectAttachmentRequest.pb( + runtime_project_attachment_service.DeleteRuntimeProjectAttachmentRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + + request = ( + runtime_project_attachment_service.DeleteRuntimeProjectAttachmentRequest() + ) + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + + client.delete_runtime_project_attachment( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + + +def test_delete_runtime_project_attachment_rest_bad_request( + transport: str = "rest", + request_type=runtime_project_attachment_service.DeleteRuntimeProjectAttachmentRequest, +): + client = RuntimeProjectAttachmentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/runtimeProjectAttachments/sample3" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete_runtime_project_attachment(request) + + +def test_delete_runtime_project_attachment_rest_flattened(): + client = RuntimeProjectAttachmentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/runtimeProjectAttachments/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "" + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.delete_runtime_project_attachment(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/runtimeProjectAttachments/*}" + % client.transport._host, + args[1], + ) + + +def test_delete_runtime_project_attachment_rest_flattened_error( + transport: str = "rest", +): + client = RuntimeProjectAttachmentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_runtime_project_attachment( + runtime_project_attachment_service.DeleteRuntimeProjectAttachmentRequest(), + name="name_value", + ) + + +def test_delete_runtime_project_attachment_rest_error(): + client = RuntimeProjectAttachmentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + runtime_project_attachment_service.LookupRuntimeProjectAttachmentRequest, + dict, + ], +) +def test_lookup_runtime_project_attachment_rest(request_type): + client = RuntimeProjectAttachmentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = ( + runtime_project_attachment_service.LookupRuntimeProjectAttachmentResponse() + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = runtime_project_attachment_service.LookupRuntimeProjectAttachmentResponse.pb( + return_value + ) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.lookup_runtime_project_attachment(request) + + # Establish that the response is the type that we expect. + assert isinstance( + response, + runtime_project_attachment_service.LookupRuntimeProjectAttachmentResponse, + ) + + +def test_lookup_runtime_project_attachment_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = RuntimeProjectAttachmentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.lookup_runtime_project_attachment + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.lookup_runtime_project_attachment + ] = mock_rpc + + request = {} + client.lookup_runtime_project_attachment(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.lookup_runtime_project_attachment(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_lookup_runtime_project_attachment_rest_required_fields( + request_type=runtime_project_attachment_service.LookupRuntimeProjectAttachmentRequest, +): + transport_class = transports.RuntimeProjectAttachmentServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).lookup_runtime_project_attachment._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).lookup_runtime_project_attachment._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = RuntimeProjectAttachmentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = ( + runtime_project_attachment_service.LookupRuntimeProjectAttachmentResponse() + ) + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = runtime_project_attachment_service.LookupRuntimeProjectAttachmentResponse.pb( + return_value + ) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.lookup_runtime_project_attachment(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_lookup_runtime_project_attachment_rest_unset_required_fields(): + transport = transports.RuntimeProjectAttachmentServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = ( + transport.lookup_runtime_project_attachment._get_unset_required_fields({}) + ) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_lookup_runtime_project_attachment_rest_interceptors(null_interceptor): + transport = transports.RuntimeProjectAttachmentServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.RuntimeProjectAttachmentServiceRestInterceptor(), + ) + client = RuntimeProjectAttachmentServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.RuntimeProjectAttachmentServiceRestInterceptor, + "post_lookup_runtime_project_attachment", + ) as post, mock.patch.object( + transports.RuntimeProjectAttachmentServiceRestInterceptor, + "pre_lookup_runtime_project_attachment", + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = runtime_project_attachment_service.LookupRuntimeProjectAttachmentRequest.pb( + runtime_project_attachment_service.LookupRuntimeProjectAttachmentRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = runtime_project_attachment_service.LookupRuntimeProjectAttachmentResponse.to_json( + runtime_project_attachment_service.LookupRuntimeProjectAttachmentResponse() + ) + + request = ( + runtime_project_attachment_service.LookupRuntimeProjectAttachmentRequest() + ) + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = ( + runtime_project_attachment_service.LookupRuntimeProjectAttachmentResponse() + ) + + client.lookup_runtime_project_attachment( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_lookup_runtime_project_attachment_rest_bad_request( + transport: str = "rest", + request_type=runtime_project_attachment_service.LookupRuntimeProjectAttachmentRequest, +): + client = RuntimeProjectAttachmentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.lookup_runtime_project_attachment(request) + + +def test_lookup_runtime_project_attachment_rest_flattened(): + client = RuntimeProjectAttachmentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = ( + runtime_project_attachment_service.LookupRuntimeProjectAttachmentResponse() + ) + + # get arguments that satisfy an http rule for this method + sample_request = {"name": "projects/sample1/locations/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = runtime_project_attachment_service.LookupRuntimeProjectAttachmentResponse.pb( + return_value + ) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.lookup_runtime_project_attachment(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*}:lookupRuntimeProjectAttachment" + % client.transport._host, + args[1], + ) + + +def test_lookup_runtime_project_attachment_rest_flattened_error( + transport: str = "rest", +): + client = RuntimeProjectAttachmentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.lookup_runtime_project_attachment( + runtime_project_attachment_service.LookupRuntimeProjectAttachmentRequest(), + name="name_value", + ) + + +def test_lookup_runtime_project_attachment_rest_error(): + client = RuntimeProjectAttachmentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.RuntimeProjectAttachmentServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = RuntimeProjectAttachmentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.RuntimeProjectAttachmentServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = RuntimeProjectAttachmentServiceClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide an api_key and a transport instance. + transport = transports.RuntimeProjectAttachmentServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = RuntimeProjectAttachmentServiceClient( + client_options=options, + transport=transport, + ) + + # It is an error to provide an api_key and a credential. + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = RuntimeProjectAttachmentServiceClient( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.RuntimeProjectAttachmentServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = RuntimeProjectAttachmentServiceClient( + client_options={"scopes": ["1", "2"]}, + transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.RuntimeProjectAttachmentServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + client = RuntimeProjectAttachmentServiceClient(transport=transport) + assert client.transport is transport + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.RuntimeProjectAttachmentServiceRestTransport, + ], +) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + + +@pytest.mark.parametrize( + "transport_name", + [ + "rest", + ], +) +def test_transport_kind(transport_name): + transport = RuntimeProjectAttachmentServiceClient.get_transport_class( + transport_name + )( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert transport.kind == transport_name + + +def test_runtime_project_attachment_service_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(core_exceptions.DuplicateCredentialArgs): + transport = transports.RuntimeProjectAttachmentServiceTransport( + credentials=ga_credentials.AnonymousCredentials(), + credentials_file="credentials.json", + ) + + +def test_runtime_project_attachment_service_base_transport(): + # Instantiate the base transport. + with mock.patch( + "google.cloud.apihub_v1.services.runtime_project_attachment_service.transports.RuntimeProjectAttachmentServiceTransport.__init__" + ) as Transport: + Transport.return_value = None + transport = transports.RuntimeProjectAttachmentServiceTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ( + "create_runtime_project_attachment", + "get_runtime_project_attachment", + "list_runtime_project_attachments", + "delete_runtime_project_attachment", + "lookup_runtime_project_attachment", + "get_location", + "list_locations", + "get_operation", + "cancel_operation", + "delete_operation", + "list_operations", + ) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) + + with pytest.raises(NotImplementedError): + transport.close() + + # Catch all for all remaining methods and properties + remainder = [ + "kind", + ] + for r in remainder: + with pytest.raises(NotImplementedError): + getattr(transport, r)() + + +def test_runtime_project_attachment_service_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch( + "google.cloud.apihub_v1.services.runtime_project_attachment_service.transports.RuntimeProjectAttachmentServiceTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.RuntimeProjectAttachmentServiceTransport( + credentials_file="credentials.json", + quota_project_id="octopus", + ) + load_creds.assert_called_once_with( + "credentials.json", + scopes=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id="octopus", + ) + + +def test_runtime_project_attachment_service_base_transport_with_adc(): + # Test the default credentials are used if credentials and credentials_file are None. + with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch( + "google.cloud.apihub_v1.services.runtime_project_attachment_service.transports.RuntimeProjectAttachmentServiceTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.RuntimeProjectAttachmentServiceTransport() + adc.assert_called_once() + + +def test_runtime_project_attachment_service_auth_adc(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + RuntimeProjectAttachmentServiceClient() + adc.assert_called_once_with( + scopes=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id=None, + ) + + +def test_runtime_project_attachment_service_http_transport_client_cert_source_for_mtls(): + cred = ga_credentials.AnonymousCredentials() + with mock.patch( + "google.auth.transport.requests.AuthorizedSession.configure_mtls_channel" + ) as mock_configure_mtls_channel: + transports.RuntimeProjectAttachmentServiceRestTransport( + credentials=cred, client_cert_source_for_mtls=client_cert_source_callback + ) + mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) + + +@pytest.mark.parametrize( + "transport_name", + [ + "rest", + ], +) +def test_runtime_project_attachment_service_host_no_port(transport_name): + client = RuntimeProjectAttachmentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="apihub.googleapis.com" + ), + transport=transport_name, + ) + assert client.transport._host == ( + "apihub.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://apihub.googleapis.com" + ) + + +@pytest.mark.parametrize( + "transport_name", + [ + "rest", + ], +) +def test_runtime_project_attachment_service_host_with_port(transport_name): + client = RuntimeProjectAttachmentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="apihub.googleapis.com:8000" + ), + transport=transport_name, + ) + assert client.transport._host == ( + "apihub.googleapis.com:8000" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://apihub.googleapis.com:8000" + ) + + +@pytest.mark.parametrize( + "transport_name", + [ + "rest", + ], +) +def test_runtime_project_attachment_service_client_transport_session_collision( + transport_name, +): + creds1 = ga_credentials.AnonymousCredentials() + creds2 = ga_credentials.AnonymousCredentials() + client1 = RuntimeProjectAttachmentServiceClient( + credentials=creds1, + transport=transport_name, + ) + client2 = RuntimeProjectAttachmentServiceClient( + credentials=creds2, + transport=transport_name, + ) + session1 = client1.transport.create_runtime_project_attachment._session + session2 = client2.transport.create_runtime_project_attachment._session + assert session1 != session2 + session1 = client1.transport.get_runtime_project_attachment._session + session2 = client2.transport.get_runtime_project_attachment._session + assert session1 != session2 + session1 = client1.transport.list_runtime_project_attachments._session + session2 = client2.transport.list_runtime_project_attachments._session + assert session1 != session2 + session1 = client1.transport.delete_runtime_project_attachment._session + session2 = client2.transport.delete_runtime_project_attachment._session + assert session1 != session2 + session1 = client1.transport.lookup_runtime_project_attachment._session + session2 = client2.transport.lookup_runtime_project_attachment._session + assert session1 != session2 + + +def test_runtime_project_attachment_path(): + project = "squid" + location = "clam" + runtime_project_attachment = "whelk" + expected = "projects/{project}/locations/{location}/runtimeProjectAttachments/{runtime_project_attachment}".format( + project=project, + location=location, + runtime_project_attachment=runtime_project_attachment, + ) + actual = RuntimeProjectAttachmentServiceClient.runtime_project_attachment_path( + project, location, runtime_project_attachment + ) + assert expected == actual + + +def test_parse_runtime_project_attachment_path(): + expected = { + "project": "octopus", + "location": "oyster", + "runtime_project_attachment": "nudibranch", + } + path = RuntimeProjectAttachmentServiceClient.runtime_project_attachment_path( + **expected + ) + + # Check that the path construction is reversible. + actual = ( + RuntimeProjectAttachmentServiceClient.parse_runtime_project_attachment_path( + path + ) + ) + assert expected == actual + + +def test_common_billing_account_path(): + billing_account = "cuttlefish" + expected = "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + actual = RuntimeProjectAttachmentServiceClient.common_billing_account_path( + billing_account + ) + assert expected == actual + + +def test_parse_common_billing_account_path(): + expected = { + "billing_account": "mussel", + } + path = RuntimeProjectAttachmentServiceClient.common_billing_account_path(**expected) + + # Check that the path construction is reversible. + actual = RuntimeProjectAttachmentServiceClient.parse_common_billing_account_path( + path + ) + assert expected == actual + + +def test_common_folder_path(): + folder = "winkle" + expected = "folders/{folder}".format( + folder=folder, + ) + actual = RuntimeProjectAttachmentServiceClient.common_folder_path(folder) + assert expected == actual + + +def test_parse_common_folder_path(): + expected = { + "folder": "nautilus", + } + path = RuntimeProjectAttachmentServiceClient.common_folder_path(**expected) + + # Check that the path construction is reversible. + actual = RuntimeProjectAttachmentServiceClient.parse_common_folder_path(path) + assert expected == actual + + +def test_common_organization_path(): + organization = "scallop" + expected = "organizations/{organization}".format( + organization=organization, + ) + actual = RuntimeProjectAttachmentServiceClient.common_organization_path( + organization + ) + assert expected == actual + + +def test_parse_common_organization_path(): + expected = { + "organization": "abalone", + } + path = RuntimeProjectAttachmentServiceClient.common_organization_path(**expected) + + # Check that the path construction is reversible. + actual = RuntimeProjectAttachmentServiceClient.parse_common_organization_path(path) + assert expected == actual + + +def test_common_project_path(): + project = "squid" + expected = "projects/{project}".format( + project=project, + ) + actual = RuntimeProjectAttachmentServiceClient.common_project_path(project) + assert expected == actual + + +def test_parse_common_project_path(): + expected = { + "project": "clam", + } + path = RuntimeProjectAttachmentServiceClient.common_project_path(**expected) + + # Check that the path construction is reversible. + actual = RuntimeProjectAttachmentServiceClient.parse_common_project_path(path) + assert expected == actual + + +def test_common_location_path(): + project = "whelk" + location = "octopus" + expected = "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) + actual = RuntimeProjectAttachmentServiceClient.common_location_path( + project, location + ) + assert expected == actual + + +def test_parse_common_location_path(): + expected = { + "project": "oyster", + "location": "nudibranch", + } + path = RuntimeProjectAttachmentServiceClient.common_location_path(**expected) + + # Check that the path construction is reversible. + actual = RuntimeProjectAttachmentServiceClient.parse_common_location_path(path) + assert expected == actual + + +def test_client_with_default_client_info(): + client_info = gapic_v1.client_info.ClientInfo() + + with mock.patch.object( + transports.RuntimeProjectAttachmentServiceTransport, "_prep_wrapped_messages" + ) as prep: + client = RuntimeProjectAttachmentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + with mock.patch.object( + transports.RuntimeProjectAttachmentServiceTransport, "_prep_wrapped_messages" + ) as prep: + transport_class = RuntimeProjectAttachmentServiceClient.get_transport_class() + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + +def test_get_location_rest_bad_request( + transport: str = "rest", request_type=locations_pb2.GetLocationRequest +): + client = RuntimeProjectAttachmentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + request = request_type() + request = json_format.ParseDict( + {"name": "projects/sample1/locations/sample2"}, request + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_location(request) + + +@pytest.mark.parametrize( + "request_type", + [ + locations_pb2.GetLocationRequest, + dict, + ], +) +def test_get_location_rest(request_type): + client = RuntimeProjectAttachmentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = {"name": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = locations_pb2.Location() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_location(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, locations_pb2.Location) + + +def test_list_locations_rest_bad_request( + transport: str = "rest", request_type=locations_pb2.ListLocationsRequest +): + client = RuntimeProjectAttachmentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + request = request_type() + request = json_format.ParseDict({"name": "projects/sample1"}, request) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_locations(request) + + +@pytest.mark.parametrize( + "request_type", + [ + locations_pb2.ListLocationsRequest, + dict, + ], +) +def test_list_locations_rest(request_type): + client = RuntimeProjectAttachmentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = {"name": "projects/sample1"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = locations_pb2.ListLocationsResponse() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.list_locations(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, locations_pb2.ListLocationsResponse) + + +def test_cancel_operation_rest_bad_request( + transport: str = "rest", request_type=operations_pb2.CancelOperationRequest +): + client = RuntimeProjectAttachmentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + request = request_type() + request = json_format.ParseDict( + {"name": "projects/sample1/locations/sample2/operations/sample3"}, request + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.cancel_operation(request) + + +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.CancelOperationRequest, + dict, + ], +) +def test_cancel_operation_rest(request_type): + client = RuntimeProjectAttachmentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = {"name": "projects/sample1/locations/sample2/operations/sample3"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "{}" + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.cancel_operation(request) + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_operation_rest_bad_request( + transport: str = "rest", request_type=operations_pb2.DeleteOperationRequest +): + client = RuntimeProjectAttachmentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + request = request_type() + request = json_format.ParseDict( + {"name": "projects/sample1/locations/sample2/operations/sample3"}, request + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete_operation(request) + + +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.DeleteOperationRequest, + dict, + ], +) +def test_delete_operation_rest(request_type): + client = RuntimeProjectAttachmentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = {"name": "projects/sample1/locations/sample2/operations/sample3"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "{}" + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.delete_operation(request) + + # Establish that the response is the type that we expect. + assert response is None + + +def test_get_operation_rest_bad_request( + transport: str = "rest", request_type=operations_pb2.GetOperationRequest +): + client = RuntimeProjectAttachmentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + request = request_type() + request = json_format.ParseDict( + {"name": "projects/sample1/locations/sample2/operations/sample3"}, request + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_operation(request) + + +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.GetOperationRequest, + dict, + ], +) +def test_get_operation_rest(request_type): + client = RuntimeProjectAttachmentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = {"name": "projects/sample1/locations/sample2/operations/sample3"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_operation(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) + + +def test_list_operations_rest_bad_request( + transport: str = "rest", request_type=operations_pb2.ListOperationsRequest +): + client = RuntimeProjectAttachmentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + request = request_type() + request = json_format.ParseDict( + {"name": "projects/sample1/locations/sample2"}, request + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_operations(request) + + +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.ListOperationsRequest, + dict, + ], +) +def test_list_operations_rest(request_type): + client = RuntimeProjectAttachmentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = {"name": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.ListOperationsResponse() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.list_operations(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.ListOperationsResponse) + + +def test_transport_close(): + transports = { + "rest": "_session", + } + + for transport, close_name in transports.items(): + client = RuntimeProjectAttachmentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + with mock.patch.object( + type(getattr(client.transport, close_name)), "close" + ) as close: + with client: + close.assert_not_called() + close.assert_called_once() + + +def test_client_ctx(): + transports = [ + "rest", + ] + for transport in transports: + client = RuntimeProjectAttachmentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called() + + +@pytest.mark.parametrize( + "client_class,transport_class", + [ + ( + RuntimeProjectAttachmentServiceClient, + transports.RuntimeProjectAttachmentServiceRestTransport, + ), + ], +) +def test_api_key_credentials(client_class, transport_class): + with mock.patch.object( + google.auth._default, "get_api_key_credentials", create=True + ) as get_api_key_credentials: + mock_cred = mock.Mock() + get_api_key_credentials.return_value = mock_cred + options = client_options.ClientOptions() + options.api_key = "api_key" + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=mock_cred, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) diff --git a/packages/google-cloud-appengine-admin/google/cloud/appengine_admin/gapic_version.py b/packages/google-cloud-appengine-admin/google/cloud/appengine_admin/gapic_version.py index f6b57ae22bbe..558c8aab67c5 100644 --- a/packages/google-cloud-appengine-admin/google/cloud/appengine_admin/gapic_version.py +++ b/packages/google-cloud-appengine-admin/google/cloud/appengine_admin/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.11.5" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-appengine-admin/google/cloud/appengine_admin_v1/gapic_version.py b/packages/google-cloud-appengine-admin/google/cloud/appengine_admin_v1/gapic_version.py index f6b57ae22bbe..558c8aab67c5 100644 --- a/packages/google-cloud-appengine-admin/google/cloud/appengine_admin_v1/gapic_version.py +++ b/packages/google-cloud-appengine-admin/google/cloud/appengine_admin_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.11.5" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-appengine-admin/google/cloud/appengine_admin_v1/services/applications/async_client.py b/packages/google-cloud-appengine-admin/google/cloud/appengine_admin_v1/services/applications/async_client.py index 9a08c3a23844..2cc4ae19de5c 100644 --- a/packages/google-cloud-appengine-admin/google/cloud/appengine_admin_v1/services/applications/async_client.py +++ b/packages/google-cloud-appengine-admin/google/cloud/appengine_admin_v1/services/applications/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -185,9 +184,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(ApplicationsClient).get_transport_class, type(ApplicationsClient) - ) + get_transport_class = ApplicationsClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-appengine-admin/google/cloud/appengine_admin_v1/services/applications/client.py b/packages/google-cloud-appengine-admin/google/cloud/appengine_admin_v1/services/applications/client.py index d1c87cf27d25..5010433228ac 100644 --- a/packages/google-cloud-appengine-admin/google/cloud/appengine_admin_v1/services/applications/client.py +++ b/packages/google-cloud-appengine-admin/google/cloud/appengine_admin_v1/services/applications/client.py @@ -637,7 +637,7 @@ def __init__( transport_init: Union[ Type[ApplicationsTransport], Callable[..., ApplicationsTransport] ] = ( - type(self).get_transport_class(transport) + ApplicationsClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., ApplicationsTransport], transport) ) diff --git a/packages/google-cloud-appengine-admin/google/cloud/appengine_admin_v1/services/authorized_certificates/async_client.py b/packages/google-cloud-appengine-admin/google/cloud/appengine_admin_v1/services/authorized_certificates/async_client.py index c7e0646785a9..f50144f9ebd2 100644 --- a/packages/google-cloud-appengine-admin/google/cloud/appengine_admin_v1/services/authorized_certificates/async_client.py +++ b/packages/google-cloud-appengine-admin/google/cloud/appengine_admin_v1/services/authorized_certificates/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -192,10 +191,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(AuthorizedCertificatesClient).get_transport_class, - type(AuthorizedCertificatesClient), - ) + get_transport_class = AuthorizedCertificatesClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-appengine-admin/google/cloud/appengine_admin_v1/services/authorized_certificates/client.py b/packages/google-cloud-appengine-admin/google/cloud/appengine_admin_v1/services/authorized_certificates/client.py index fe3bbf2688d3..e2633508c5c4 100644 --- a/packages/google-cloud-appengine-admin/google/cloud/appengine_admin_v1/services/authorized_certificates/client.py +++ b/packages/google-cloud-appengine-admin/google/cloud/appengine_admin_v1/services/authorized_certificates/client.py @@ -650,7 +650,7 @@ def __init__( Type[AuthorizedCertificatesTransport], Callable[..., AuthorizedCertificatesTransport], ] = ( - type(self).get_transport_class(transport) + AuthorizedCertificatesClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., AuthorizedCertificatesTransport], transport) ) diff --git a/packages/google-cloud-appengine-admin/google/cloud/appengine_admin_v1/services/authorized_domains/async_client.py b/packages/google-cloud-appengine-admin/google/cloud/appengine_admin_v1/services/authorized_domains/async_client.py index dea523dd80a7..4f6356077aaf 100644 --- a/packages/google-cloud-appengine-admin/google/cloud/appengine_admin_v1/services/authorized_domains/async_client.py +++ b/packages/google-cloud-appengine-admin/google/cloud/appengine_admin_v1/services/authorized_domains/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -188,9 +187,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(AuthorizedDomainsClient).get_transport_class, type(AuthorizedDomainsClient) - ) + get_transport_class = AuthorizedDomainsClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-appengine-admin/google/cloud/appengine_admin_v1/services/authorized_domains/client.py b/packages/google-cloud-appengine-admin/google/cloud/appengine_admin_v1/services/authorized_domains/client.py index 616b77ae1760..64df5a7c08c9 100644 --- a/packages/google-cloud-appengine-admin/google/cloud/appengine_admin_v1/services/authorized_domains/client.py +++ b/packages/google-cloud-appengine-admin/google/cloud/appengine_admin_v1/services/authorized_domains/client.py @@ -646,7 +646,7 @@ def __init__( Type[AuthorizedDomainsTransport], Callable[..., AuthorizedDomainsTransport], ] = ( - type(self).get_transport_class(transport) + AuthorizedDomainsClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., AuthorizedDomainsTransport], transport) ) diff --git a/packages/google-cloud-appengine-admin/google/cloud/appengine_admin_v1/services/domain_mappings/async_client.py b/packages/google-cloud-appengine-admin/google/cloud/appengine_admin_v1/services/domain_mappings/async_client.py index 881137a28182..59539a765ab9 100644 --- a/packages/google-cloud-appengine-admin/google/cloud/appengine_admin_v1/services/domain_mappings/async_client.py +++ b/packages/google-cloud-appengine-admin/google/cloud/appengine_admin_v1/services/domain_mappings/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -190,9 +189,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(DomainMappingsClient).get_transport_class, type(DomainMappingsClient) - ) + get_transport_class = DomainMappingsClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-appengine-admin/google/cloud/appengine_admin_v1/services/domain_mappings/client.py b/packages/google-cloud-appengine-admin/google/cloud/appengine_admin_v1/services/domain_mappings/client.py index c8666634d5da..cfda1bd8fedb 100644 --- a/packages/google-cloud-appengine-admin/google/cloud/appengine_admin_v1/services/domain_mappings/client.py +++ b/packages/google-cloud-appengine-admin/google/cloud/appengine_admin_v1/services/domain_mappings/client.py @@ -643,7 +643,7 @@ def __init__( transport_init: Union[ Type[DomainMappingsTransport], Callable[..., DomainMappingsTransport] ] = ( - type(self).get_transport_class(transport) + DomainMappingsClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., DomainMappingsTransport], transport) ) diff --git a/packages/google-cloud-appengine-admin/google/cloud/appengine_admin_v1/services/firewall/async_client.py b/packages/google-cloud-appengine-admin/google/cloud/appengine_admin_v1/services/firewall/async_client.py index da32b03138fa..399bca77546a 100644 --- a/packages/google-cloud-appengine-admin/google/cloud/appengine_admin_v1/services/firewall/async_client.py +++ b/packages/google-cloud-appengine-admin/google/cloud/appengine_admin_v1/services/firewall/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -189,9 +188,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(FirewallClient).get_transport_class, type(FirewallClient) - ) + get_transport_class = FirewallClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-appengine-admin/google/cloud/appengine_admin_v1/services/firewall/client.py b/packages/google-cloud-appengine-admin/google/cloud/appengine_admin_v1/services/firewall/client.py index d8551dd7bd6f..ac94c119d5e5 100644 --- a/packages/google-cloud-appengine-admin/google/cloud/appengine_admin_v1/services/firewall/client.py +++ b/packages/google-cloud-appengine-admin/google/cloud/appengine_admin_v1/services/firewall/client.py @@ -645,7 +645,7 @@ def __init__( transport_init: Union[ Type[FirewallTransport], Callable[..., FirewallTransport] ] = ( - type(self).get_transport_class(transport) + FirewallClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., FirewallTransport], transport) ) diff --git a/packages/google-cloud-appengine-admin/google/cloud/appengine_admin_v1/services/instances/async_client.py b/packages/google-cloud-appengine-admin/google/cloud/appengine_admin_v1/services/instances/async_client.py index b7706a13ac9e..2969b655e347 100644 --- a/packages/google-cloud-appengine-admin/google/cloud/appengine_admin_v1/services/instances/async_client.py +++ b/packages/google-cloud-appengine-admin/google/cloud/appengine_admin_v1/services/instances/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -187,9 +186,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(InstancesClient).get_transport_class, type(InstancesClient) - ) + get_transport_class = InstancesClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-appengine-admin/google/cloud/appengine_admin_v1/services/instances/client.py b/packages/google-cloud-appengine-admin/google/cloud/appengine_admin_v1/services/instances/client.py index 9fb962838850..cb551f26c8f2 100644 --- a/packages/google-cloud-appengine-admin/google/cloud/appengine_admin_v1/services/instances/client.py +++ b/packages/google-cloud-appengine-admin/google/cloud/appengine_admin_v1/services/instances/client.py @@ -663,7 +663,7 @@ def __init__( transport_init: Union[ Type[InstancesTransport], Callable[..., InstancesTransport] ] = ( - type(self).get_transport_class(transport) + InstancesClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., InstancesTransport], transport) ) diff --git a/packages/google-cloud-appengine-admin/google/cloud/appengine_admin_v1/services/services/async_client.py b/packages/google-cloud-appengine-admin/google/cloud/appengine_admin_v1/services/services/async_client.py index 2bd1de1db05d..6c1c6ff348b1 100644 --- a/packages/google-cloud-appengine-admin/google/cloud/appengine_admin_v1/services/services/async_client.py +++ b/packages/google-cloud-appengine-admin/google/cloud/appengine_admin_v1/services/services/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -183,9 +182,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(ServicesClient).get_transport_class, type(ServicesClient) - ) + get_transport_class = ServicesClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-appengine-admin/google/cloud/appengine_admin_v1/services/services/client.py b/packages/google-cloud-appengine-admin/google/cloud/appengine_admin_v1/services/services/client.py index 7be00be82215..a4440b5042fe 100644 --- a/packages/google-cloud-appengine-admin/google/cloud/appengine_admin_v1/services/services/client.py +++ b/packages/google-cloud-appengine-admin/google/cloud/appengine_admin_v1/services/services/client.py @@ -639,7 +639,7 @@ def __init__( transport_init: Union[ Type[ServicesTransport], Callable[..., ServicesTransport] ] = ( - type(self).get_transport_class(transport) + ServicesClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., ServicesTransport], transport) ) diff --git a/packages/google-cloud-appengine-admin/google/cloud/appengine_admin_v1/services/versions/async_client.py b/packages/google-cloud-appengine-admin/google/cloud/appengine_admin_v1/services/versions/async_client.py index c60e9d7b90b8..37a9eafb9ce8 100644 --- a/packages/google-cloud-appengine-admin/google/cloud/appengine_admin_v1/services/versions/async_client.py +++ b/packages/google-cloud-appengine-admin/google/cloud/appengine_admin_v1/services/versions/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -185,9 +184,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(VersionsClient).get_transport_class, type(VersionsClient) - ) + get_transport_class = VersionsClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-appengine-admin/google/cloud/appengine_admin_v1/services/versions/client.py b/packages/google-cloud-appengine-admin/google/cloud/appengine_admin_v1/services/versions/client.py index 7ccb087f84db..20dfb5b039de 100644 --- a/packages/google-cloud-appengine-admin/google/cloud/appengine_admin_v1/services/versions/client.py +++ b/packages/google-cloud-appengine-admin/google/cloud/appengine_admin_v1/services/versions/client.py @@ -641,7 +641,7 @@ def __init__( transport_init: Union[ Type[VersionsTransport], Callable[..., VersionsTransport] ] = ( - type(self).get_transport_class(transport) + VersionsClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., VersionsTransport], transport) ) diff --git a/packages/google-cloud-appengine-admin/samples/generated_samples/snippet_metadata_google.appengine.v1.json b/packages/google-cloud-appengine-admin/samples/generated_samples/snippet_metadata_google.appengine.v1.json index e8154861b9d3..fb8cbf58355c 100644 --- a/packages/google-cloud-appengine-admin/samples/generated_samples/snippet_metadata_google.appengine.v1.json +++ b/packages/google-cloud-appengine-admin/samples/generated_samples/snippet_metadata_google.appengine.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-appengine-admin", - "version": "1.11.5" + "version": "0.1.0" }, "snippets": [ { diff --git a/packages/google-cloud-appengine-admin/tests/unit/gapic/appengine_admin_v1/test_applications.py b/packages/google-cloud-appengine-admin/tests/unit/gapic/appengine_admin_v1/test_applications.py index 685ca369d35e..bc6900b9d664 100644 --- a/packages/google-cloud-appengine-admin/tests/unit/gapic/appengine_admin_v1/test_applications.py +++ b/packages/google-cloud-appengine-admin/tests/unit/gapic/appengine_admin_v1/test_applications.py @@ -1306,22 +1306,23 @@ async def test_get_application_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_application - ] = mock_object + ] = mock_rpc request = {} await client.get_application(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_application(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1642,8 +1643,9 @@ def test_create_application_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.create_application(request) @@ -1699,26 +1701,28 @@ async def test_create_application_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_application - ] = mock_object + ] = mock_rpc request = {} await client.create_application(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.create_application(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1877,8 +1881,9 @@ def test_update_application_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.update_application(request) @@ -1934,26 +1939,28 @@ async def test_update_application_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_application - ] = mock_object + ] = mock_rpc request = {} await client.update_application(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.update_application(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2177,8 +2184,9 @@ def test_repair_application_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.repair_application(request) @@ -2234,26 +2242,28 @@ async def test_repair_application_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.repair_application - ] = mock_object + ] = mock_rpc request = {} await client.repair_application(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.repair_application(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-appengine-admin/tests/unit/gapic/appengine_admin_v1/test_authorized_certificates.py b/packages/google-cloud-appengine-admin/tests/unit/gapic/appengine_admin_v1/test_authorized_certificates.py index 43dd20d14a37..4719dc399f1d 100644 --- a/packages/google-cloud-appengine-admin/tests/unit/gapic/appengine_admin_v1/test_authorized_certificates.py +++ b/packages/google-cloud-appengine-admin/tests/unit/gapic/appengine_admin_v1/test_authorized_certificates.py @@ -1372,22 +1372,23 @@ async def test_list_authorized_certificates_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_authorized_certificates - ] = mock_object + ] = mock_rpc request = {} await client.list_authorized_certificates(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_authorized_certificates(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1893,22 +1894,23 @@ async def test_get_authorized_certificate_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_authorized_certificate - ] = mock_object + ] = mock_rpc request = {} await client.get_authorized_certificate(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_authorized_certificate(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2220,22 +2222,23 @@ async def test_create_authorized_certificate_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_authorized_certificate - ] = mock_object + ] = mock_rpc request = {} await client.create_authorized_certificate(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_authorized_certificate(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2547,22 +2550,23 @@ async def test_update_authorized_certificate_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_authorized_certificate - ] = mock_object + ] = mock_rpc request = {} await client.update_authorized_certificate(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_authorized_certificate(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2852,22 +2856,23 @@ async def test_delete_authorized_certificate_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_authorized_certificate - ] = mock_object + ] = mock_rpc request = {} await client.delete_authorized_certificate(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_authorized_certificate(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-appengine-admin/tests/unit/gapic/appengine_admin_v1/test_authorized_domains.py b/packages/google-cloud-appengine-admin/tests/unit/gapic/appengine_admin_v1/test_authorized_domains.py index 8ffd7e44e2bd..851b967ad202 100644 --- a/packages/google-cloud-appengine-admin/tests/unit/gapic/appengine_admin_v1/test_authorized_domains.py +++ b/packages/google-cloud-appengine-admin/tests/unit/gapic/appengine_admin_v1/test_authorized_domains.py @@ -1338,22 +1338,23 @@ async def test_list_authorized_domains_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_authorized_domains - ] = mock_object + ] = mock_rpc request = {} await client.list_authorized_domains(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_authorized_domains(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-appengine-admin/tests/unit/gapic/appengine_admin_v1/test_domain_mappings.py b/packages/google-cloud-appengine-admin/tests/unit/gapic/appengine_admin_v1/test_domain_mappings.py index 71fcf41de417..8deb1e04cb08 100644 --- a/packages/google-cloud-appengine-admin/tests/unit/gapic/appengine_admin_v1/test_domain_mappings.py +++ b/packages/google-cloud-appengine-admin/tests/unit/gapic/appengine_admin_v1/test_domain_mappings.py @@ -1312,22 +1312,23 @@ async def test_list_domain_mappings_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_domain_mappings - ] = mock_object + ] = mock_rpc request = {} await client.list_domain_mappings(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_domain_mappings(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1817,22 +1818,23 @@ async def test_get_domain_mapping_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_domain_mapping - ] = mock_object + ] = mock_rpc request = {} await client.get_domain_mapping(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_domain_mapping(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2062,8 +2064,9 @@ def test_create_domain_mapping_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.create_domain_mapping(request) @@ -2119,26 +2122,28 @@ async def test_create_domain_mapping_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_domain_mapping - ] = mock_object + ] = mock_rpc request = {} await client.create_domain_mapping(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.create_domain_mapping(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2363,8 +2368,9 @@ def test_update_domain_mapping_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.update_domain_mapping(request) @@ -2420,26 +2426,28 @@ async def test_update_domain_mapping_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_domain_mapping - ] = mock_object + ] = mock_rpc request = {} await client.update_domain_mapping(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.update_domain_mapping(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2664,8 +2672,9 @@ def test_delete_domain_mapping_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.delete_domain_mapping(request) @@ -2721,26 +2730,28 @@ async def test_delete_domain_mapping_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_domain_mapping - ] = mock_object + ] = mock_rpc request = {} await client.delete_domain_mapping(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.delete_domain_mapping(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-appengine-admin/tests/unit/gapic/appengine_admin_v1/test_firewall.py b/packages/google-cloud-appengine-admin/tests/unit/gapic/appengine_admin_v1/test_firewall.py index 79384416507a..ebe3a04e982c 100644 --- a/packages/google-cloud-appengine-admin/tests/unit/gapic/appengine_admin_v1/test_firewall.py +++ b/packages/google-cloud-appengine-admin/tests/unit/gapic/appengine_admin_v1/test_firewall.py @@ -1246,22 +1246,23 @@ async def test_list_ingress_rules_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_ingress_rules - ] = mock_object + ] = mock_rpc request = {} await client.list_ingress_rules(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_ingress_rules(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1744,22 +1745,23 @@ async def test_batch_update_ingress_rules_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.batch_update_ingress_rules - ] = mock_object + ] = mock_rpc request = {} await client.batch_update_ingress_rules(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.batch_update_ingress_rules(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2051,22 +2053,23 @@ async def test_create_ingress_rule_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_ingress_rule - ] = mock_object + ] = mock_rpc request = {} await client.create_ingress_rule(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_ingress_rule(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2356,22 +2359,23 @@ async def test_get_ingress_rule_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_ingress_rule - ] = mock_object + ] = mock_rpc request = {} await client.get_ingress_rule(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_ingress_rule(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2665,22 +2669,23 @@ async def test_update_ingress_rule_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_ingress_rule - ] = mock_object + ] = mock_rpc request = {} await client.update_ingress_rule(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_ingress_rule(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2964,22 +2969,23 @@ async def test_delete_ingress_rule_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_ingress_rule - ] = mock_object + ] = mock_rpc request = {} await client.delete_ingress_rule(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_ingress_rule(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-appengine-admin/tests/unit/gapic/appengine_admin_v1/test_instances.py b/packages/google-cloud-appengine-admin/tests/unit/gapic/appengine_admin_v1/test_instances.py index c93e7998d96d..e0d073e87cb2 100644 --- a/packages/google-cloud-appengine-admin/tests/unit/gapic/appengine_admin_v1/test_instances.py +++ b/packages/google-cloud-appengine-admin/tests/unit/gapic/appengine_admin_v1/test_instances.py @@ -1255,22 +1255,23 @@ async def test_list_instances_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_instances - ] = mock_object + ] = mock_rpc request = {} await client.list_instances(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_instances(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1776,22 +1777,23 @@ async def test_get_instance_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_instance - ] = mock_object + ] = mock_rpc request = {} await client.get_instance(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_instance(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2030,8 +2032,9 @@ def test_delete_instance_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.delete_instance(request) @@ -2085,26 +2088,28 @@ async def test_delete_instance_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_instance - ] = mock_object + ] = mock_rpc request = {} await client.delete_instance(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.delete_instance(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2314,8 +2319,9 @@ def test_debug_instance_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.debug_instance(request) @@ -2369,26 +2375,28 @@ async def test_debug_instance_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.debug_instance - ] = mock_object + ] = mock_rpc request = {} await client.debug_instance(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.debug_instance(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-appengine-admin/tests/unit/gapic/appengine_admin_v1/test_services.py b/packages/google-cloud-appengine-admin/tests/unit/gapic/appengine_admin_v1/test_services.py index d3508563638b..12288ce8f9bd 100644 --- a/packages/google-cloud-appengine-admin/tests/unit/gapic/appengine_admin_v1/test_services.py +++ b/packages/google-cloud-appengine-admin/tests/unit/gapic/appengine_admin_v1/test_services.py @@ -1245,22 +1245,23 @@ async def test_list_services_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_services - ] = mock_object + ] = mock_rpc request = {} await client.list_services(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_services(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1724,22 +1725,23 @@ async def test_get_service_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_service - ] = mock_object + ] = mock_rpc request = {} await client.get_service(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_service(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1950,8 +1952,9 @@ def test_update_service_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.update_service(request) @@ -2005,26 +2008,28 @@ async def test_update_service_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_service - ] = mock_object + ] = mock_rpc request = {} await client.update_service(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.update_service(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2232,8 +2237,9 @@ def test_delete_service_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.delete_service(request) @@ -2287,26 +2293,28 @@ async def test_delete_service_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_service - ] = mock_object + ] = mock_rpc request = {} await client.delete_service(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.delete_service(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-appengine-admin/tests/unit/gapic/appengine_admin_v1/test_versions.py b/packages/google-cloud-appengine-admin/tests/unit/gapic/appengine_admin_v1/test_versions.py index bc4c34625da7..556d1b46336c 100644 --- a/packages/google-cloud-appengine-admin/tests/unit/gapic/appengine_admin_v1/test_versions.py +++ b/packages/google-cloud-appengine-admin/tests/unit/gapic/appengine_admin_v1/test_versions.py @@ -1247,22 +1247,23 @@ async def test_list_versions_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_versions - ] = mock_object + ] = mock_rpc request = {} await client.list_versions(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_versions(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1779,22 +1780,23 @@ async def test_get_version_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_version - ] = mock_object + ] = mock_rpc request = {} await client.get_version(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_version(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2041,8 +2043,9 @@ def test_create_version_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.create_version(request) @@ -2096,26 +2099,28 @@ async def test_create_version_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_version - ] = mock_object + ] = mock_rpc request = {} await client.create_version(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.create_version(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2323,8 +2328,9 @@ def test_update_version_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.update_version(request) @@ -2378,26 +2384,28 @@ async def test_update_version_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_version - ] = mock_object + ] = mock_rpc request = {} await client.update_version(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.update_version(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2605,8 +2613,9 @@ def test_delete_version_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.delete_version(request) @@ -2660,26 +2669,28 @@ async def test_delete_version_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_version - ] = mock_object + ] = mock_rpc request = {} await client.delete_version(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.delete_version(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-appengine-logging/google/cloud/appengine_logging/gapic_version.py b/packages/google-cloud-appengine-logging/google/cloud/appengine_logging/gapic_version.py index 91e6b04fad21..558c8aab67c5 100644 --- a/packages/google-cloud-appengine-logging/google/cloud/appengine_logging/gapic_version.py +++ b/packages/google-cloud-appengine-logging/google/cloud/appengine_logging/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.4.5" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-appengine-logging/google/cloud/appengine_logging_v1/gapic_version.py b/packages/google-cloud-appengine-logging/google/cloud/appengine_logging_v1/gapic_version.py index 91e6b04fad21..558c8aab67c5 100644 --- a/packages/google-cloud-appengine-logging/google/cloud/appengine_logging_v1/gapic_version.py +++ b/packages/google-cloud-appengine-logging/google/cloud/appengine_logging_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.4.5" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-apphub/google/cloud/apphub/gapic_version.py b/packages/google-cloud-apphub/google/cloud/apphub/gapic_version.py index 3b0a9d9a8d43..558c8aab67c5 100644 --- a/packages/google-cloud-apphub/google/cloud/apphub/gapic_version.py +++ b/packages/google-cloud-apphub/google/cloud/apphub/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.1.2" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-apphub/google/cloud/apphub_v1/gapic_version.py b/packages/google-cloud-apphub/google/cloud/apphub_v1/gapic_version.py index 3b0a9d9a8d43..558c8aab67c5 100644 --- a/packages/google-cloud-apphub/google/cloud/apphub_v1/gapic_version.py +++ b/packages/google-cloud-apphub/google/cloud/apphub_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.1.2" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-apphub/google/cloud/apphub_v1/services/app_hub/async_client.py b/packages/google-cloud-apphub/google/cloud/apphub_v1/services/app_hub/async_client.py index 6893ded4c86a..75e2df6b93de 100644 --- a/packages/google-cloud-apphub/google/cloud/apphub_v1/services/app_hub/async_client.py +++ b/packages/google-cloud-apphub/google/cloud/apphub_v1/services/app_hub/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -216,9 +215,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(AppHubClient).get_transport_class, type(AppHubClient) - ) + get_transport_class = AppHubClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-apphub/google/cloud/apphub_v1/services/app_hub/client.py b/packages/google-cloud-apphub/google/cloud/apphub_v1/services/app_hub/client.py index d0ade49a61d7..970d8975a18c 100644 --- a/packages/google-cloud-apphub/google/cloud/apphub_v1/services/app_hub/client.py +++ b/packages/google-cloud-apphub/google/cloud/apphub_v1/services/app_hub/client.py @@ -792,7 +792,7 @@ def __init__( transport_init: Union[ Type[AppHubTransport], Callable[..., AppHubTransport] ] = ( - type(self).get_transport_class(transport) + AppHubClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., AppHubTransport], transport) ) diff --git a/packages/google-cloud-apphub/samples/generated_samples/snippet_metadata_google.cloud.apphub.v1.json b/packages/google-cloud-apphub/samples/generated_samples/snippet_metadata_google.cloud.apphub.v1.json index 4d9af8cc701f..f29a5f624a1e 100644 --- a/packages/google-cloud-apphub/samples/generated_samples/snippet_metadata_google.cloud.apphub.v1.json +++ b/packages/google-cloud-apphub/samples/generated_samples/snippet_metadata_google.cloud.apphub.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-apphub", - "version": "0.1.2" + "version": "0.1.0" }, "snippets": [ { diff --git a/packages/google-cloud-apphub/tests/unit/gapic/apphub_v1/test_app_hub.py b/packages/google-cloud-apphub/tests/unit/gapic/apphub_v1/test_app_hub.py index 7eb90552f015..0cc2bf6405f1 100644 --- a/packages/google-cloud-apphub/tests/unit/gapic/apphub_v1/test_app_hub.py +++ b/packages/google-cloud-apphub/tests/unit/gapic/apphub_v1/test_app_hub.py @@ -1249,22 +1249,23 @@ async def test_lookup_service_project_attachment_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.lookup_service_project_attachment - ] = mock_object + ] = mock_rpc request = {} await client.lookup_service_project_attachment(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.lookup_service_project_attachment(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1643,22 +1644,23 @@ async def test_list_service_project_attachments_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_service_project_attachments - ] = mock_object + ] = mock_rpc request = {} await client.list_service_project_attachments(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_service_project_attachments(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2189,8 +2191,9 @@ def test_create_service_project_attachment_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.create_service_project_attachment(request) @@ -2246,26 +2249,28 @@ async def test_create_service_project_attachment_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_service_project_attachment - ] = mock_object + ] = mock_rpc request = {} await client.create_service_project_attachment(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.create_service_project_attachment(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2679,22 +2684,23 @@ async def test_get_service_project_attachment_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_service_project_attachment - ] = mock_object + ] = mock_rpc request = {} await client.get_service_project_attachment(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_service_project_attachment(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3020,8 +3026,9 @@ def test_delete_service_project_attachment_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.delete_service_project_attachment(request) @@ -3077,26 +3084,28 @@ async def test_delete_service_project_attachment_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_service_project_attachment - ] = mock_object + ] = mock_rpc request = {} await client.delete_service_project_attachment(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.delete_service_project_attachment(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3461,22 +3470,23 @@ async def test_detach_service_project_attachment_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.detach_service_project_attachment - ] = mock_object + ] = mock_rpc request = {} await client.detach_service_project_attachment(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.detach_service_project_attachment(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3855,22 +3865,23 @@ async def test_list_discovered_services_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_discovered_services - ] = mock_object + ] = mock_rpc request = {} await client.list_discovered_services(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_discovered_services(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4449,22 +4460,23 @@ async def test_get_discovered_service_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_discovered_service - ] = mock_object + ] = mock_rpc request = {} await client.get_discovered_service(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_discovered_service(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4834,22 +4846,23 @@ async def test_lookup_discovered_service_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.lookup_discovered_service - ] = mock_object + ] = mock_rpc request = {} await client.lookup_discovered_service(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.lookup_discovered_service(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5225,22 +5238,23 @@ async def test_list_services_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_services - ] = mock_object + ] = mock_rpc request = {} await client.list_services(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_services(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5733,8 +5747,9 @@ def test_create_service_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.create_service(request) @@ -5788,26 +5803,28 @@ async def test_create_service_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_service - ] = mock_object + ] = mock_rpc request = {} await client.create_service(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.create_service(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -6188,22 +6205,23 @@ async def test_get_service_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_service - ] = mock_object + ] = mock_rpc request = {} await client.get_service(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_service(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -6502,8 +6520,9 @@ def test_update_service_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.update_service(request) @@ -6557,26 +6576,28 @@ async def test_update_service_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_service - ] = mock_object + ] = mock_rpc request = {} await client.update_service(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.update_service(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -6878,8 +6899,9 @@ def test_delete_service_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.delete_service(request) @@ -6933,26 +6955,28 @@ async def test_delete_service_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_service - ] = mock_object + ] = mock_rpc request = {} await client.delete_service(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.delete_service(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -7320,22 +7344,23 @@ async def test_list_discovered_workloads_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_discovered_workloads - ] = mock_object + ] = mock_rpc request = {} await client.list_discovered_workloads(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_discovered_workloads(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -7914,22 +7939,23 @@ async def test_get_discovered_workload_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_discovered_workload - ] = mock_object + ] = mock_rpc request = {} await client.get_discovered_workload(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_discovered_workload(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -8299,22 +8325,23 @@ async def test_lookup_discovered_workload_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.lookup_discovered_workload - ] = mock_object + ] = mock_rpc request = {} await client.lookup_discovered_workload(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.lookup_discovered_workload(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -8690,22 +8717,23 @@ async def test_list_workloads_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_workloads - ] = mock_object + ] = mock_rpc request = {} await client.list_workloads(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_workloads(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -9198,8 +9226,9 @@ def test_create_workload_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.create_workload(request) @@ -9253,26 +9282,28 @@ async def test_create_workload_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_workload - ] = mock_object + ] = mock_rpc request = {} await client.create_workload(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.create_workload(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -9653,22 +9684,23 @@ async def test_get_workload_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_workload - ] = mock_object + ] = mock_rpc request = {} await client.get_workload(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_workload(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -9967,8 +9999,9 @@ def test_update_workload_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.update_workload(request) @@ -10022,26 +10055,28 @@ async def test_update_workload_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_workload - ] = mock_object + ] = mock_rpc request = {} await client.update_workload(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.update_workload(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -10343,8 +10378,9 @@ def test_delete_workload_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.delete_workload(request) @@ -10398,26 +10434,28 @@ async def test_delete_workload_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_workload - ] = mock_object + ] = mock_rpc request = {} await client.delete_workload(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.delete_workload(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -10782,22 +10820,23 @@ async def test_list_applications_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_applications - ] = mock_object + ] = mock_rpc request = {} await client.list_applications(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_applications(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -11318,8 +11357,9 @@ def test_create_application_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.create_application(request) @@ -11375,26 +11415,28 @@ async def test_create_application_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_application - ] = mock_object + ] = mock_rpc request = {} await client.create_application(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.create_application(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -11783,22 +11825,23 @@ async def test_get_application_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_application - ] = mock_object + ] = mock_rpc request = {} await client.get_application(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_application(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -12109,8 +12152,9 @@ def test_update_application_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.update_application(request) @@ -12166,26 +12210,28 @@ async def test_update_application_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_application - ] = mock_object + ] = mock_rpc request = {} await client.update_application(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.update_application(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -12508,8 +12554,9 @@ def test_delete_application_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.delete_application(request) @@ -12565,26 +12612,28 @@ async def test_delete_application_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_application - ] = mock_object + ] = mock_rpc request = {} await client.delete_application(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.delete_application(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-artifact-registry/google/cloud/artifactregistry/gapic_version.py b/packages/google-cloud-artifact-registry/google/cloud/artifactregistry/gapic_version.py index f6b57ae22bbe..558c8aab67c5 100644 --- a/packages/google-cloud-artifact-registry/google/cloud/artifactregistry/gapic_version.py +++ b/packages/google-cloud-artifact-registry/google/cloud/artifactregistry/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.11.5" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-artifact-registry/google/cloud/artifactregistry_v1/gapic_version.py b/packages/google-cloud-artifact-registry/google/cloud/artifactregistry_v1/gapic_version.py index f6b57ae22bbe..558c8aab67c5 100644 --- a/packages/google-cloud-artifact-registry/google/cloud/artifactregistry_v1/gapic_version.py +++ b/packages/google-cloud-artifact-registry/google/cloud/artifactregistry_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.11.5" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-artifact-registry/google/cloud/artifactregistry_v1/services/artifact_registry/async_client.py b/packages/google-cloud-artifact-registry/google/cloud/artifactregistry_v1/services/artifact_registry/async_client.py index 3327d5cf80fa..f12091b471e7 100644 --- a/packages/google-cloud-artifact-registry/google/cloud/artifactregistry_v1/services/artifact_registry/async_client.py +++ b/packages/google-cloud-artifact-registry/google/cloud/artifactregistry_v1/services/artifact_registry/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -262,9 +261,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(ArtifactRegistryClient).get_transport_class, type(ArtifactRegistryClient) - ) + get_transport_class = ArtifactRegistryClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-artifact-registry/google/cloud/artifactregistry_v1/services/artifact_registry/client.py b/packages/google-cloud-artifact-registry/google/cloud/artifactregistry_v1/services/artifact_registry/client.py index 1dd15dc4327c..76117933a9b3 100644 --- a/packages/google-cloud-artifact-registry/google/cloud/artifactregistry_v1/services/artifact_registry/client.py +++ b/packages/google-cloud-artifact-registry/google/cloud/artifactregistry_v1/services/artifact_registry/client.py @@ -998,7 +998,7 @@ def __init__( Type[ArtifactRegistryTransport], Callable[..., ArtifactRegistryTransport], ] = ( - type(self).get_transport_class(transport) + ArtifactRegistryClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., ArtifactRegistryTransport], transport) ) diff --git a/packages/google-cloud-artifact-registry/google/cloud/artifactregistry_v1beta2/gapic_version.py b/packages/google-cloud-artifact-registry/google/cloud/artifactregistry_v1beta2/gapic_version.py index f6b57ae22bbe..558c8aab67c5 100644 --- a/packages/google-cloud-artifact-registry/google/cloud/artifactregistry_v1beta2/gapic_version.py +++ b/packages/google-cloud-artifact-registry/google/cloud/artifactregistry_v1beta2/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.11.5" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-artifact-registry/google/cloud/artifactregistry_v1beta2/services/artifact_registry/async_client.py b/packages/google-cloud-artifact-registry/google/cloud/artifactregistry_v1beta2/services/artifact_registry/async_client.py index 54aab92c1222..53dbf2782f4f 100644 --- a/packages/google-cloud-artifact-registry/google/cloud/artifactregistry_v1beta2/services/artifact_registry/async_client.py +++ b/packages/google-cloud-artifact-registry/google/cloud/artifactregistry_v1beta2/services/artifact_registry/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -234,9 +233,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(ArtifactRegistryClient).get_transport_class, type(ArtifactRegistryClient) - ) + get_transport_class = ArtifactRegistryClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-artifact-registry/google/cloud/artifactregistry_v1beta2/services/artifact_registry/client.py b/packages/google-cloud-artifact-registry/google/cloud/artifactregistry_v1beta2/services/artifact_registry/client.py index f52a3852aa0d..c11270d9ed9f 100644 --- a/packages/google-cloud-artifact-registry/google/cloud/artifactregistry_v1beta2/services/artifact_registry/client.py +++ b/packages/google-cloud-artifact-registry/google/cloud/artifactregistry_v1beta2/services/artifact_registry/client.py @@ -833,7 +833,7 @@ def __init__( Type[ArtifactRegistryTransport], Callable[..., ArtifactRegistryTransport], ] = ( - type(self).get_transport_class(transport) + ArtifactRegistryClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., ArtifactRegistryTransport], transport) ) diff --git a/packages/google-cloud-artifact-registry/samples/generated_samples/snippet_metadata_google.devtools.artifactregistry.v1.json b/packages/google-cloud-artifact-registry/samples/generated_samples/snippet_metadata_google.devtools.artifactregistry.v1.json index deb29f1cf87e..b27db1a9e71b 100644 --- a/packages/google-cloud-artifact-registry/samples/generated_samples/snippet_metadata_google.devtools.artifactregistry.v1.json +++ b/packages/google-cloud-artifact-registry/samples/generated_samples/snippet_metadata_google.devtools.artifactregistry.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-artifact-registry", - "version": "1.11.5" + "version": "0.1.0" }, "snippets": [ { diff --git a/packages/google-cloud-artifact-registry/samples/generated_samples/snippet_metadata_google.devtools.artifactregistry.v1beta2.json b/packages/google-cloud-artifact-registry/samples/generated_samples/snippet_metadata_google.devtools.artifactregistry.v1beta2.json index 49de6b697aa4..c22cba2a0008 100644 --- a/packages/google-cloud-artifact-registry/samples/generated_samples/snippet_metadata_google.devtools.artifactregistry.v1beta2.json +++ b/packages/google-cloud-artifact-registry/samples/generated_samples/snippet_metadata_google.devtools.artifactregistry.v1beta2.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-artifact-registry", - "version": "1.11.5" + "version": "0.1.0" }, "snippets": [ { diff --git a/packages/google-cloud-artifact-registry/tests/unit/gapic/artifactregistry_v1/test_artifact_registry.py b/packages/google-cloud-artifact-registry/tests/unit/gapic/artifactregistry_v1/test_artifact_registry.py index 8c9d44c56e33..fb88718b7915 100644 --- a/packages/google-cloud-artifact-registry/tests/unit/gapic/artifactregistry_v1/test_artifact_registry.py +++ b/packages/google-cloud-artifact-registry/tests/unit/gapic/artifactregistry_v1/test_artifact_registry.py @@ -1359,22 +1359,23 @@ async def test_list_docker_images_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_docker_images - ] = mock_object + ] = mock_rpc request = {} await client.list_docker_images(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_docker_images(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1949,22 +1950,23 @@ async def test_get_docker_image_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_docker_image - ] = mock_object + ] = mock_rpc request = {} await client.get_docker_image(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_docker_image(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2335,22 +2337,23 @@ async def test_list_maven_artifacts_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_maven_artifacts - ] = mock_object + ] = mock_rpc request = {} await client.list_maven_artifacts(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_maven_artifacts(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2935,22 +2938,23 @@ async def test_get_maven_artifact_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_maven_artifact - ] = mock_object + ] = mock_rpc request = {} await client.get_maven_artifact(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_maven_artifact(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3329,22 +3333,23 @@ async def test_list_npm_packages_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_npm_packages - ] = mock_object + ] = mock_rpc request = {} await client.list_npm_packages(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_npm_packages(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3914,22 +3919,23 @@ async def test_get_npm_package_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_npm_package - ] = mock_object + ] = mock_rpc request = {} await client.get_npm_package(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_npm_package(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4294,22 +4300,23 @@ async def test_list_python_packages_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_python_packages - ] = mock_object + ] = mock_rpc request = {} await client.list_python_packages(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_python_packages(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4891,22 +4898,23 @@ async def test_get_python_package_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_python_package - ] = mock_object + ] = mock_rpc request = {} await client.get_python_package(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_python_package(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5225,8 +5233,9 @@ def test_import_apt_artifacts_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.import_apt_artifacts(request) @@ -5282,26 +5291,28 @@ async def test_import_apt_artifacts_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.import_apt_artifacts - ] = mock_object + ] = mock_rpc request = {} await client.import_apt_artifacts(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.import_apt_artifacts(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5525,8 +5536,9 @@ def test_import_yum_artifacts_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.import_yum_artifacts(request) @@ -5582,26 +5594,28 @@ async def test_import_yum_artifacts_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.import_yum_artifacts - ] = mock_object + ] = mock_rpc request = {} await client.import_yum_artifacts(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.import_yum_artifacts(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5883,22 +5897,23 @@ async def test_list_repositories_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_repositories - ] = mock_object + ] = mock_rpc request = {} await client.list_repositories(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_repositories(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -6480,22 +6495,23 @@ async def test_get_repository_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_repository - ] = mock_object + ] = mock_rpc request = {} await client.get_repository(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_repository(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -6812,8 +6828,9 @@ def test_create_repository_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.create_repository(request) @@ -6869,26 +6886,28 @@ async def test_create_repository_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_repository - ] = mock_object + ] = mock_rpc request = {} await client.create_repository(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.create_repository(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -7315,22 +7334,23 @@ async def test_update_repository_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_repository - ] = mock_object + ] = mock_rpc request = {} await client.update_repository(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_repository(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -7689,8 +7709,9 @@ def test_delete_repository_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.delete_repository(request) @@ -7746,26 +7767,28 @@ async def test_delete_repository_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_repository - ] = mock_object + ] = mock_rpc request = {} await client.delete_repository(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.delete_repository(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -8123,22 +8146,23 @@ async def test_list_packages_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_packages - ] = mock_object + ] = mock_rpc request = {} await client.list_packages(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_packages(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -8684,22 +8708,23 @@ async def test_get_package_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_package - ] = mock_object + ] = mock_rpc request = {} await client.get_package(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_package(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -8990,8 +9015,9 @@ def test_delete_package_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.delete_package(request) @@ -9045,26 +9071,28 @@ async def test_delete_package_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_package - ] = mock_object + ] = mock_rpc request = {} await client.delete_package(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.delete_package(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -9414,22 +9442,23 @@ async def test_list_versions_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_versions - ] = mock_object + ] = mock_rpc request = {} await client.list_versions(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_versions(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -9975,22 +10004,23 @@ async def test_get_version_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_version - ] = mock_object + ] = mock_rpc request = {} await client.get_version(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_version(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -10281,8 +10311,9 @@ def test_delete_version_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.delete_version(request) @@ -10336,26 +10367,28 @@ async def test_delete_version_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_version - ] = mock_object + ] = mock_rpc request = {} await client.delete_version(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.delete_version(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -10656,8 +10689,9 @@ def test_batch_delete_versions_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.batch_delete_versions(request) @@ -10713,26 +10747,28 @@ async def test_batch_delete_versions_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.batch_delete_versions - ] = mock_object + ] = mock_rpc request = {} await client.batch_delete_versions(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.batch_delete_versions(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -11102,22 +11138,23 @@ async def test_list_files_async_use_cached_wrapped_rpc(transport: str = "grpc_as ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_files - ] = mock_object + ] = mock_rpc request = {} await client.list_files(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_files(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -11664,22 +11701,23 @@ async def test_get_file_async_use_cached_wrapped_rpc(transport: str = "grpc_asyn ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_file - ] = mock_object + ] = mock_rpc request = {} await client.get_file(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_file(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -12030,22 +12068,23 @@ async def test_list_tags_async_use_cached_wrapped_rpc(transport: str = "grpc_asy ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_tags - ] = mock_object + ] = mock_rpc request = {} await client.list_tags(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_tags(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -12589,22 +12628,23 @@ async def test_get_tag_async_use_cached_wrapped_rpc(transport: str = "grpc_async ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_tag - ] = mock_object + ] = mock_rpc request = {} await client.get_tag(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_tag(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -12954,22 +12994,23 @@ async def test_create_tag_async_use_cached_wrapped_rpc(transport: str = "grpc_as ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_tag - ] = mock_object + ] = mock_rpc request = {} await client.create_tag(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_tag(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -13333,22 +13374,23 @@ async def test_update_tag_async_use_cached_wrapped_rpc(transport: str = "grpc_as ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_tag - ] = mock_object + ] = mock_rpc request = {} await client.update_tag(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_tag(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -13696,22 +13738,23 @@ async def test_delete_tag_async_use_cached_wrapped_rpc(transport: str = "grpc_as ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_tag - ] = mock_object + ] = mock_rpc request = {} await client.delete_tag(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_tag(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -14054,22 +14097,23 @@ async def test_set_iam_policy_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.set_iam_policy - ] = mock_object + ] = mock_rpc request = {} await client.set_iam_policy(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.set_iam_policy(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -14357,22 +14401,23 @@ async def test_get_iam_policy_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_iam_policy - ] = mock_object + ] = mock_rpc request = {} await client.get_iam_policy(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_iam_policy(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -14668,22 +14713,23 @@ async def test_test_iam_permissions_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.test_iam_permissions - ] = mock_object + ] = mock_rpc request = {} await client.test_iam_permissions(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.test_iam_permissions(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -14994,22 +15040,23 @@ async def test_get_project_settings_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_project_settings - ] = mock_object + ] = mock_rpc request = {} await client.get_project_settings(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_project_settings(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -15388,22 +15435,23 @@ async def test_update_project_settings_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_project_settings - ] = mock_object + ] = mock_rpc request = {} await client.update_project_settings(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_project_settings(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -15782,22 +15830,23 @@ async def test_get_vpcsc_config_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_vpcsc_config - ] = mock_object + ] = mock_rpc request = {} await client.get_vpcsc_config(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_vpcsc_config(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -16159,22 +16208,23 @@ async def test_update_vpcsc_config_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_vpcsc_config - ] = mock_object + ] = mock_rpc request = {} await client.update_vpcsc_config(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_vpcsc_config(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-artifact-registry/tests/unit/gapic/artifactregistry_v1beta2/test_artifact_registry.py b/packages/google-cloud-artifact-registry/tests/unit/gapic/artifactregistry_v1beta2/test_artifact_registry.py index 7399d56bf20a..c4d1aa4088c3 100644 --- a/packages/google-cloud-artifact-registry/tests/unit/gapic/artifactregistry_v1beta2/test_artifact_registry.py +++ b/packages/google-cloud-artifact-registry/tests/unit/gapic/artifactregistry_v1beta2/test_artifact_registry.py @@ -1293,8 +1293,9 @@ def test_import_apt_artifacts_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.import_apt_artifacts(request) @@ -1350,26 +1351,28 @@ async def test_import_apt_artifacts_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.import_apt_artifacts - ] = mock_object + ] = mock_rpc request = {} await client.import_apt_artifacts(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.import_apt_artifacts(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1593,8 +1596,9 @@ def test_import_yum_artifacts_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.import_yum_artifacts(request) @@ -1650,26 +1654,28 @@ async def test_import_yum_artifacts_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.import_yum_artifacts - ] = mock_object + ] = mock_rpc request = {} await client.import_yum_artifacts(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.import_yum_artifacts(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1951,22 +1957,23 @@ async def test_list_repositories_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_repositories - ] = mock_object + ] = mock_rpc request = {} await client.list_repositories(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_repositories(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2536,22 +2543,23 @@ async def test_get_repository_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_repository - ] = mock_object + ] = mock_rpc request = {} await client.get_repository(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_repository(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2860,8 +2868,9 @@ def test_create_repository_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.create_repository(request) @@ -2917,26 +2926,28 @@ async def test_create_repository_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_repository - ] = mock_object + ] = mock_rpc request = {} await client.create_repository(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.create_repository(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3351,22 +3362,23 @@ async def test_update_repository_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_repository - ] = mock_object + ] = mock_rpc request = {} await client.update_repository(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_repository(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3717,8 +3729,9 @@ def test_delete_repository_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.delete_repository(request) @@ -3774,26 +3787,28 @@ async def test_delete_repository_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_repository - ] = mock_object + ] = mock_rpc request = {} await client.delete_repository(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.delete_repository(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4151,22 +4166,23 @@ async def test_list_packages_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_packages - ] = mock_object + ] = mock_rpc request = {} await client.list_packages(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_packages(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4712,22 +4728,23 @@ async def test_get_package_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_package - ] = mock_object + ] = mock_rpc request = {} await client.get_package(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_package(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5018,8 +5035,9 @@ def test_delete_package_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.delete_package(request) @@ -5073,26 +5091,28 @@ async def test_delete_package_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_package - ] = mock_object + ] = mock_rpc request = {} await client.delete_package(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.delete_package(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5442,22 +5462,23 @@ async def test_list_versions_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_versions - ] = mock_object + ] = mock_rpc request = {} await client.list_versions(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_versions(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -6003,22 +6024,23 @@ async def test_get_version_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_version - ] = mock_object + ] = mock_rpc request = {} await client.get_version(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_version(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -6309,8 +6331,9 @@ def test_delete_version_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.delete_version(request) @@ -6364,26 +6387,28 @@ async def test_delete_version_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_version - ] = mock_object + ] = mock_rpc request = {} await client.delete_version(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.delete_version(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -6731,22 +6756,23 @@ async def test_list_files_async_use_cached_wrapped_rpc(transport: str = "grpc_as ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_files - ] = mock_object + ] = mock_rpc request = {} await client.list_files(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_files(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -7293,22 +7319,23 @@ async def test_get_file_async_use_cached_wrapped_rpc(transport: str = "grpc_asyn ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_file - ] = mock_object + ] = mock_rpc request = {} await client.get_file(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_file(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -7659,22 +7686,23 @@ async def test_list_tags_async_use_cached_wrapped_rpc(transport: str = "grpc_asy ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_tags - ] = mock_object + ] = mock_rpc request = {} await client.list_tags(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_tags(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -8218,22 +8246,23 @@ async def test_get_tag_async_use_cached_wrapped_rpc(transport: str = "grpc_async ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_tag - ] = mock_object + ] = mock_rpc request = {} await client.get_tag(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_tag(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -8583,22 +8612,23 @@ async def test_create_tag_async_use_cached_wrapped_rpc(transport: str = "grpc_as ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_tag - ] = mock_object + ] = mock_rpc request = {} await client.create_tag(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_tag(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -8962,22 +8992,23 @@ async def test_update_tag_async_use_cached_wrapped_rpc(transport: str = "grpc_as ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_tag - ] = mock_object + ] = mock_rpc request = {} await client.update_tag(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_tag(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -9325,22 +9356,23 @@ async def test_delete_tag_async_use_cached_wrapped_rpc(transport: str = "grpc_as ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_tag - ] = mock_object + ] = mock_rpc request = {} await client.delete_tag(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_tag(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -9683,22 +9715,23 @@ async def test_set_iam_policy_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.set_iam_policy - ] = mock_object + ] = mock_rpc request = {} await client.set_iam_policy(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.set_iam_policy(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -9986,22 +10019,23 @@ async def test_get_iam_policy_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_iam_policy - ] = mock_object + ] = mock_rpc request = {} await client.get_iam_policy(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_iam_policy(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -10297,22 +10331,23 @@ async def test_test_iam_permissions_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.test_iam_permissions - ] = mock_object + ] = mock_rpc request = {} await client.test_iam_permissions(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.test_iam_permissions(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -10623,22 +10658,23 @@ async def test_get_project_settings_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_project_settings - ] = mock_object + ] = mock_rpc request = {} await client.get_project_settings(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_project_settings(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -11017,22 +11053,23 @@ async def test_update_project_settings_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_project_settings - ] = mock_object + ] = mock_rpc request = {} await client.update_project_settings(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_project_settings(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-asset/CHANGELOG.md b/packages/google-cloud-asset/CHANGELOG.md index a038645b6f8a..733c3b3086d1 100644 --- a/packages/google-cloud-asset/CHANGELOG.md +++ b/packages/google-cloud-asset/CHANGELOG.md @@ -4,6 +4,13 @@ [1]: https://pypi.org/project/google-cloud-asset/#history +## [3.26.4](https://github.com/googleapis/google-cloud-python/compare/google-cloud-asset-v3.26.3...google-cloud-asset-v3.26.4) (2024-09-16) + + +### Documentation + +* [google-cloud-asset] Comments are clarified for certain fields in messages `QueryAssetsResponse` and `ResourceSearchResult` ([#13076](https://github.com/googleapis/google-cloud-python/issues/13076)) ([35b2c45](https://github.com/googleapis/google-cloud-python/commit/35b2c456c6791bc47ffe894f3ef966558cb6c98e)) + ## [3.26.3](https://github.com/googleapis/google-cloud-python/compare/google-cloud-asset-v3.26.2...google-cloud-asset-v3.26.3) (2024-07-30) diff --git a/packages/google-cloud-asset/google/cloud/asset/gapic_version.py b/packages/google-cloud-asset/google/cloud/asset/gapic_version.py index 83c98d26518e..fdbe0bc4679f 100644 --- a/packages/google-cloud-asset/google/cloud/asset/gapic_version.py +++ b/packages/google-cloud-asset/google/cloud/asset/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "3.26.3" # {x-release-please-version} +__version__ = "3.26.4" # {x-release-please-version} diff --git a/packages/google-cloud-asset/google/cloud/asset_v1/gapic_version.py b/packages/google-cloud-asset/google/cloud/asset_v1/gapic_version.py index 83c98d26518e..fdbe0bc4679f 100644 --- a/packages/google-cloud-asset/google/cloud/asset_v1/gapic_version.py +++ b/packages/google-cloud-asset/google/cloud/asset_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "3.26.3" # {x-release-please-version} +__version__ = "3.26.4" # {x-release-please-version} diff --git a/packages/google-cloud-asset/google/cloud/asset_v1/services/asset_service/async_client.py b/packages/google-cloud-asset/google/cloud/asset_v1/services/asset_service/async_client.py index f520a1810ce5..31793fef2a14 100644 --- a/packages/google-cloud-asset/google/cloud/asset_v1/services/asset_service/async_client.py +++ b/packages/google-cloud-asset/google/cloud/asset_v1/services/asset_service/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -205,9 +204,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(AssetServiceClient).get_transport_class, type(AssetServiceClient) - ) + get_transport_class = AssetServiceClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-asset/google/cloud/asset_v1/services/asset_service/client.py b/packages/google-cloud-asset/google/cloud/asset_v1/services/asset_service/client.py index 5ba73fd7b766..423f9eec50b8 100644 --- a/packages/google-cloud-asset/google/cloud/asset_v1/services/asset_service/client.py +++ b/packages/google-cloud-asset/google/cloud/asset_v1/services/asset_service/client.py @@ -765,7 +765,7 @@ def __init__( transport_init: Union[ Type[AssetServiceTransport], Callable[..., AssetServiceTransport] ] = ( - type(self).get_transport_class(transport) + AssetServiceClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., AssetServiceTransport], transport) ) diff --git a/packages/google-cloud-asset/google/cloud/asset_v1/types/asset_service.py b/packages/google-cloud-asset/google/cloud/asset_v1/types/asset_service.py index 88d2a833272d..1d09f9e767c6 100644 --- a/packages/google-cloud-asset/google/cloud/asset_v1/types/asset_service.py +++ b/packages/google-cloud-asset/google/cloud/asset_v1/types/asset_service.py @@ -2780,9 +2780,11 @@ class QueryAssetsResponse(proto.Message): valid ``response``. If ``done`` == ``false`` and the query result is being saved - in a output, the output_config field will be set. If + in an output, the output_config field will be set. If ``done`` == ``true``, exactly one of ``error``, - ``query_result`` or ``output_config`` will be set. + ``query_result`` or ``output_config`` will be set. [done] is + unset unless the [QueryAssetsResponse] contains a + [QueryAssetsResponse.job_reference]. error (google.rpc.status_pb2.Status): Error status. @@ -2792,10 +2794,10 @@ class QueryAssetsResponse(proto.Message): This field is a member of `oneof`_ ``response``. output_config (google.cloud.asset_v1.types.QueryAssetsOutputConfig): - Output configuration which indicates instead - of being returned in API response on the fly, - the query result will be saved in a specific - output. + Output configuration, which indicates that + instead of being returned in an API response on + the fly, the query result will be saved in a + specific output. This field is a member of `oneof`_ ``response``. """ diff --git a/packages/google-cloud-asset/google/cloud/asset_v1/types/assets.py b/packages/google-cloud-asset/google/cloud/asset_v1/types/assets.py index da13dc114c9b..d31228b6e04c 100644 --- a/packages/google-cloud-asset/google/cloud/asset_v1/types/assets.py +++ b/packages/google-cloud-asset/google/cloud/asset_v1/types/assets.py @@ -602,7 +602,7 @@ class EffectiveTagDetails(proto.Message): class ResourceSearchResult(proto.Message): r"""A result of Resource Search, containing information of a - cloud resource. Next ID: 34 + cloud resource. Attributes: name (str): @@ -684,8 +684,8 @@ class ResourceSearchResult(proto.Message): - Use a field query. Example: ``location:us-west*`` - Use a free text query. Example: ``us-west*`` labels (MutableMapping[str, str]): - Labels associated with this resource. See `Labelling and - grouping Google Cloud + User labels associated with this resource. See `Labelling + and grouping Google Cloud resources `__ for more information. This field is available only when the resource's Protobuf contains it. diff --git a/packages/google-cloud-asset/google/cloud/asset_v1p1beta1/gapic_version.py b/packages/google-cloud-asset/google/cloud/asset_v1p1beta1/gapic_version.py index 83c98d26518e..fdbe0bc4679f 100644 --- a/packages/google-cloud-asset/google/cloud/asset_v1p1beta1/gapic_version.py +++ b/packages/google-cloud-asset/google/cloud/asset_v1p1beta1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "3.26.3" # {x-release-please-version} +__version__ = "3.26.4" # {x-release-please-version} diff --git a/packages/google-cloud-asset/google/cloud/asset_v1p1beta1/services/asset_service/async_client.py b/packages/google-cloud-asset/google/cloud/asset_v1p1beta1/services/asset_service/async_client.py index 5c0be34f37b1..2448592a9eda 100644 --- a/packages/google-cloud-asset/google/cloud/asset_v1p1beta1/services/asset_service/async_client.py +++ b/packages/google-cloud-asset/google/cloud/asset_v1p1beta1/services/asset_service/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -183,9 +182,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(AssetServiceClient).get_transport_class, type(AssetServiceClient) - ) + get_transport_class = AssetServiceClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-asset/google/cloud/asset_v1p1beta1/services/asset_service/client.py b/packages/google-cloud-asset/google/cloud/asset_v1p1beta1/services/asset_service/client.py index 18bf409e4004..2fcc58f03271 100644 --- a/packages/google-cloud-asset/google/cloud/asset_v1p1beta1/services/asset_service/client.py +++ b/packages/google-cloud-asset/google/cloud/asset_v1p1beta1/services/asset_service/client.py @@ -635,7 +635,7 @@ def __init__( transport_init: Union[ Type[AssetServiceTransport], Callable[..., AssetServiceTransport] ] = ( - type(self).get_transport_class(transport) + AssetServiceClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., AssetServiceTransport], transport) ) diff --git a/packages/google-cloud-asset/google/cloud/asset_v1p2beta1/gapic_version.py b/packages/google-cloud-asset/google/cloud/asset_v1p2beta1/gapic_version.py index 83c98d26518e..fdbe0bc4679f 100644 --- a/packages/google-cloud-asset/google/cloud/asset_v1p2beta1/gapic_version.py +++ b/packages/google-cloud-asset/google/cloud/asset_v1p2beta1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "3.26.3" # {x-release-please-version} +__version__ = "3.26.4" # {x-release-please-version} diff --git a/packages/google-cloud-asset/google/cloud/asset_v1p2beta1/services/asset_service/async_client.py b/packages/google-cloud-asset/google/cloud/asset_v1p2beta1/services/asset_service/async_client.py index 6cc4c143a6ac..f0398dadd045 100644 --- a/packages/google-cloud-asset/google/cloud/asset_v1p2beta1/services/asset_service/async_client.py +++ b/packages/google-cloud-asset/google/cloud/asset_v1p2beta1/services/asset_service/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -184,9 +183,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(AssetServiceClient).get_transport_class, type(AssetServiceClient) - ) + get_transport_class = AssetServiceClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-asset/google/cloud/asset_v1p2beta1/services/asset_service/client.py b/packages/google-cloud-asset/google/cloud/asset_v1p2beta1/services/asset_service/client.py index 2672c17bfe01..15a61ac82944 100644 --- a/packages/google-cloud-asset/google/cloud/asset_v1p2beta1/services/asset_service/client.py +++ b/packages/google-cloud-asset/google/cloud/asset_v1p2beta1/services/asset_service/client.py @@ -651,7 +651,7 @@ def __init__( transport_init: Union[ Type[AssetServiceTransport], Callable[..., AssetServiceTransport] ] = ( - type(self).get_transport_class(transport) + AssetServiceClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., AssetServiceTransport], transport) ) diff --git a/packages/google-cloud-asset/google/cloud/asset_v1p4beta1/gapic_version.py b/packages/google-cloud-asset/google/cloud/asset_v1p4beta1/gapic_version.py index e6f4a2765144..d0552744bb07 100644 --- a/packages/google-cloud-asset/google/cloud/asset_v1p4beta1/gapic_version.py +++ b/packages/google-cloud-asset/google/cloud/asset_v1p4beta1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "3.26.3" # {x-release-please-version} +__version__ = "3.26.4" # {x-release-please-version} diff --git a/packages/google-cloud-asset/google/cloud/asset_v1p5beta1/gapic_version.py b/packages/google-cloud-asset/google/cloud/asset_v1p5beta1/gapic_version.py index 83c98d26518e..fdbe0bc4679f 100644 --- a/packages/google-cloud-asset/google/cloud/asset_v1p5beta1/gapic_version.py +++ b/packages/google-cloud-asset/google/cloud/asset_v1p5beta1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "3.26.3" # {x-release-please-version} +__version__ = "3.26.4" # {x-release-please-version} diff --git a/packages/google-cloud-asset/google/cloud/asset_v1p5beta1/services/asset_service/async_client.py b/packages/google-cloud-asset/google/cloud/asset_v1p5beta1/services/asset_service/async_client.py index 3546065a46fd..0cbe928ee9ea 100644 --- a/packages/google-cloud-asset/google/cloud/asset_v1p5beta1/services/asset_service/async_client.py +++ b/packages/google-cloud-asset/google/cloud/asset_v1p5beta1/services/asset_service/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -193,9 +192,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(AssetServiceClient).get_transport_class, type(AssetServiceClient) - ) + get_transport_class = AssetServiceClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-asset/google/cloud/asset_v1p5beta1/services/asset_service/client.py b/packages/google-cloud-asset/google/cloud/asset_v1p5beta1/services/asset_service/client.py index 7f8cf02ede71..63c2052ad74d 100644 --- a/packages/google-cloud-asset/google/cloud/asset_v1p5beta1/services/asset_service/client.py +++ b/packages/google-cloud-asset/google/cloud/asset_v1p5beta1/services/asset_service/client.py @@ -701,7 +701,7 @@ def __init__( transport_init: Union[ Type[AssetServiceTransport], Callable[..., AssetServiceTransport] ] = ( - type(self).get_transport_class(transport) + AssetServiceClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., AssetServiceTransport], transport) ) diff --git a/packages/google-cloud-asset/samples/generated_samples/snippet_metadata_google.cloud.asset.v1.json b/packages/google-cloud-asset/samples/generated_samples/snippet_metadata_google.cloud.asset.v1.json index 898877880e13..ada630458cc7 100644 --- a/packages/google-cloud-asset/samples/generated_samples/snippet_metadata_google.cloud.asset.v1.json +++ b/packages/google-cloud-asset/samples/generated_samples/snippet_metadata_google.cloud.asset.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-asset", - "version": "3.26.3" + "version": "3.26.4" }, "snippets": [ { diff --git a/packages/google-cloud-asset/samples/generated_samples/snippet_metadata_google.cloud.asset.v1p1beta1.json b/packages/google-cloud-asset/samples/generated_samples/snippet_metadata_google.cloud.asset.v1p1beta1.json index f2c681ab954c..d088663067e5 100644 --- a/packages/google-cloud-asset/samples/generated_samples/snippet_metadata_google.cloud.asset.v1p1beta1.json +++ b/packages/google-cloud-asset/samples/generated_samples/snippet_metadata_google.cloud.asset.v1p1beta1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-asset", - "version": "3.26.3" + "version": "3.26.4" }, "snippets": [ { diff --git a/packages/google-cloud-asset/samples/generated_samples/snippet_metadata_google.cloud.asset.v1p2beta1.json b/packages/google-cloud-asset/samples/generated_samples/snippet_metadata_google.cloud.asset.v1p2beta1.json index 151bc0122350..6af620ebaf84 100644 --- a/packages/google-cloud-asset/samples/generated_samples/snippet_metadata_google.cloud.asset.v1p2beta1.json +++ b/packages/google-cloud-asset/samples/generated_samples/snippet_metadata_google.cloud.asset.v1p2beta1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-asset", - "version": "3.26.3" + "version": "3.26.4" }, "snippets": [ { diff --git a/packages/google-cloud-asset/samples/generated_samples/snippet_metadata_google.cloud.asset.v1p5beta1.json b/packages/google-cloud-asset/samples/generated_samples/snippet_metadata_google.cloud.asset.v1p5beta1.json index 25fc1ab14b6e..1d6c2a34e3ae 100644 --- a/packages/google-cloud-asset/samples/generated_samples/snippet_metadata_google.cloud.asset.v1p5beta1.json +++ b/packages/google-cloud-asset/samples/generated_samples/snippet_metadata_google.cloud.asset.v1p5beta1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-asset", - "version": "3.26.3" + "version": "3.26.4" }, "snippets": [ { diff --git a/packages/google-cloud-asset/tests/unit/gapic/asset_v1/test_asset_service.py b/packages/google-cloud-asset/tests/unit/gapic/asset_v1/test_asset_service.py index a1ef85c12918..8b4fbb714598 100644 --- a/packages/google-cloud-asset/tests/unit/gapic/asset_v1/test_asset_service.py +++ b/packages/google-cloud-asset/tests/unit/gapic/asset_v1/test_asset_service.py @@ -1217,8 +1217,9 @@ def test_export_assets_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.export_assets(request) @@ -1272,26 +1273,28 @@ async def test_export_assets_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.export_assets - ] = mock_object + ] = mock_rpc request = {} await client.export_assets(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.export_assets(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1557,22 +1560,23 @@ async def test_list_assets_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_assets - ] = mock_object + ] = mock_rpc request = {} await client.list_assets(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_assets(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2123,22 +2127,23 @@ async def test_batch_get_assets_history_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.batch_get_assets_history - ] = mock_object + ] = mock_rpc request = {} await client.batch_get_assets_history(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.batch_get_assets_history(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2423,22 +2428,23 @@ async def test_create_feed_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_feed - ] = mock_object + ] = mock_rpc request = {} await client.create_feed(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_feed(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2801,22 +2807,23 @@ async def test_get_feed_async_use_cached_wrapped_rpc(transport: str = "grpc_asyn ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_feed - ] = mock_object + ] = mock_rpc request = {} await client.get_feed(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_feed(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3162,22 +3169,23 @@ async def test_list_feeds_async_use_cached_wrapped_rpc(transport: str = "grpc_as ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_feeds - ] = mock_object + ] = mock_rpc request = {} await client.list_feeds(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_feeds(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3531,22 +3539,23 @@ async def test_update_feed_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_feed - ] = mock_object + ] = mock_rpc request = {} await client.update_feed(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_feed(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3892,22 +3901,23 @@ async def test_delete_feed_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_feed - ] = mock_object + ] = mock_rpc request = {} await client.delete_feed(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_feed(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4265,22 +4275,23 @@ async def test_search_all_resources_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.search_all_resources - ] = mock_object + ] = mock_rpc request = {} await client.search_all_resources(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.search_all_resources(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4881,22 +4892,23 @@ async def test_search_all_iam_policies_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.search_all_iam_policies - ] = mock_object + ] = mock_rpc request = {} await client.search_all_iam_policies(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.search_all_iam_policies(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5480,22 +5492,23 @@ async def test_analyze_iam_policy_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.analyze_iam_policy - ] = mock_object + ] = mock_rpc request = {} await client.analyze_iam_policy(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.analyze_iam_policy(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5723,8 +5736,9 @@ def test_analyze_iam_policy_longrunning_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.analyze_iam_policy_longrunning(request) @@ -5780,26 +5794,28 @@ async def test_analyze_iam_policy_longrunning_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.analyze_iam_policy_longrunning - ] = mock_object + ] = mock_rpc request = {} await client.analyze_iam_policy_longrunning(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.analyze_iam_policy_longrunning(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -6067,22 +6083,23 @@ async def test_analyze_move_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.analyze_move - ] = mock_object + ] = mock_rpc request = {} await client.analyze_move(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.analyze_move(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -6355,22 +6372,23 @@ async def test_query_assets_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.query_assets - ] = mock_object + ] = mock_rpc request = {} await client.query_assets(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.query_assets(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -6662,22 +6680,23 @@ async def test_create_saved_query_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_saved_query - ] = mock_object + ] = mock_rpc request = {} await client.create_saved_query(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_saved_query(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -7071,22 +7090,23 @@ async def test_get_saved_query_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_saved_query - ] = mock_object + ] = mock_rpc request = {} await client.get_saved_query(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_saved_query(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -7457,22 +7477,23 @@ async def test_list_saved_queries_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_saved_queries - ] = mock_object + ] = mock_rpc request = {} await client.list_saved_queries(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_saved_queries(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -8050,22 +8071,23 @@ async def test_update_saved_query_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_saved_query - ] = mock_object + ] = mock_rpc request = {} await client.update_saved_query(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_saved_query(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -8445,22 +8467,23 @@ async def test_delete_saved_query_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_saved_query - ] = mock_object + ] = mock_rpc request = {} await client.delete_saved_query(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_saved_query(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -8818,22 +8841,23 @@ async def test_batch_get_effective_iam_policies_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.batch_get_effective_iam_policies - ] = mock_object + ] = mock_rpc request = {} await client.batch_get_effective_iam_policies(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.batch_get_effective_iam_policies(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -9122,22 +9146,23 @@ async def test_analyze_org_policies_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.analyze_org_policies - ] = mock_object + ] = mock_rpc request = {} await client.analyze_org_policies(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.analyze_org_policies(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -9744,22 +9769,23 @@ async def test_analyze_org_policy_governed_containers_async_use_cached_wrapped_r ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.analyze_org_policy_governed_containers - ] = mock_object + ] = mock_rpc request = {} await client.analyze_org_policy_governed_containers(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.analyze_org_policy_governed_containers(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -10374,22 +10400,23 @@ async def test_analyze_org_policy_governed_assets_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.analyze_org_policy_governed_assets - ] = mock_object + ] = mock_rpc request = {} await client.analyze_org_policy_governed_assets(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.analyze_org_policy_governed_assets(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-asset/tests/unit/gapic/asset_v1p1beta1/test_asset_service.py b/packages/google-cloud-asset/tests/unit/gapic/asset_v1p1beta1/test_asset_service.py index ad14021b1c75..94ff99223b67 100644 --- a/packages/google-cloud-asset/tests/unit/gapic/asset_v1p1beta1/test_asset_service.py +++ b/packages/google-cloud-asset/tests/unit/gapic/asset_v1p1beta1/test_asset_service.py @@ -1277,22 +1277,23 @@ async def test_search_all_resources_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.search_all_resources - ] = mock_object + ] = mock_rpc request = {} await client.search_all_resources(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.search_all_resources(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1891,22 +1892,23 @@ async def test_search_all_iam_policies_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.search_all_iam_policies - ] = mock_object + ] = mock_rpc request = {} await client.search_all_iam_policies(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.search_all_iam_policies(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-asset/tests/unit/gapic/asset_v1p2beta1/test_asset_service.py b/packages/google-cloud-asset/tests/unit/gapic/asset_v1p2beta1/test_asset_service.py index 21121056156d..2fb5df0ff9e6 100644 --- a/packages/google-cloud-asset/tests/unit/gapic/asset_v1p2beta1/test_asset_service.py +++ b/packages/google-cloud-asset/tests/unit/gapic/asset_v1p2beta1/test_asset_service.py @@ -1270,22 +1270,23 @@ async def test_create_feed_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_feed - ] = mock_object + ] = mock_rpc request = {} await client.create_feed(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_feed(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1643,22 +1644,23 @@ async def test_get_feed_async_use_cached_wrapped_rpc(transport: str = "grpc_asyn ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_feed - ] = mock_object + ] = mock_rpc request = {} await client.get_feed(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_feed(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2002,22 +2004,23 @@ async def test_list_feeds_async_use_cached_wrapped_rpc(transport: str = "grpc_as ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_feeds - ] = mock_object + ] = mock_rpc request = {} await client.list_feeds(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_feeds(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2368,22 +2371,23 @@ async def test_update_feed_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_feed - ] = mock_object + ] = mock_rpc request = {} await client.update_feed(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_feed(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2727,22 +2731,23 @@ async def test_delete_feed_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_feed - ] = mock_object + ] = mock_rpc request = {} await client.delete_feed(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_feed(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-asset/tests/unit/gapic/asset_v1p5beta1/test_asset_service.py b/packages/google-cloud-asset/tests/unit/gapic/asset_v1p5beta1/test_asset_service.py index 575062a34e4a..541d85d6aadb 100644 --- a/packages/google-cloud-asset/tests/unit/gapic/asset_v1p5beta1/test_asset_service.py +++ b/packages/google-cloud-asset/tests/unit/gapic/asset_v1p5beta1/test_asset_service.py @@ -1262,22 +1262,23 @@ async def test_list_assets_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_assets - ] = mock_object + ] = mock_rpc request = {} await client.list_assets(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_assets(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-assured-workloads/google/cloud/assuredworkloads/gapic_version.py b/packages/google-cloud-assured-workloads/google/cloud/assuredworkloads/gapic_version.py index 88ee289a17e3..558c8aab67c5 100644 --- a/packages/google-cloud-assured-workloads/google/cloud/assuredworkloads/gapic_version.py +++ b/packages/google-cloud-assured-workloads/google/cloud/assuredworkloads/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.12.5" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-assured-workloads/google/cloud/assuredworkloads_v1/gapic_version.py b/packages/google-cloud-assured-workloads/google/cloud/assuredworkloads_v1/gapic_version.py index 88ee289a17e3..558c8aab67c5 100644 --- a/packages/google-cloud-assured-workloads/google/cloud/assuredworkloads_v1/gapic_version.py +++ b/packages/google-cloud-assured-workloads/google/cloud/assuredworkloads_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.12.5" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-assured-workloads/google/cloud/assuredworkloads_v1/services/assured_workloads_service/async_client.py b/packages/google-cloud-assured-workloads/google/cloud/assuredworkloads_v1/services/assured_workloads_service/async_client.py index 673bd86fee98..f8874f2c0b7f 100644 --- a/packages/google-cloud-assured-workloads/google/cloud/assuredworkloads_v1/services/assured_workloads_service/async_client.py +++ b/packages/google-cloud-assured-workloads/google/cloud/assuredworkloads_v1/services/assured_workloads_service/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -205,10 +204,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(AssuredWorkloadsServiceClient).get_transport_class, - type(AssuredWorkloadsServiceClient), - ) + get_transport_class = AssuredWorkloadsServiceClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-assured-workloads/google/cloud/assuredworkloads_v1/services/assured_workloads_service/client.py b/packages/google-cloud-assured-workloads/google/cloud/assuredworkloads_v1/services/assured_workloads_service/client.py index eb815b932858..e85690951b49 100644 --- a/packages/google-cloud-assured-workloads/google/cloud/assuredworkloads_v1/services/assured_workloads_service/client.py +++ b/packages/google-cloud-assured-workloads/google/cloud/assuredworkloads_v1/services/assured_workloads_service/client.py @@ -699,7 +699,7 @@ def __init__( Type[AssuredWorkloadsServiceTransport], Callable[..., AssuredWorkloadsServiceTransport], ] = ( - type(self).get_transport_class(transport) + AssuredWorkloadsServiceClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., AssuredWorkloadsServiceTransport], transport) ) diff --git a/packages/google-cloud-assured-workloads/google/cloud/assuredworkloads_v1beta1/gapic_version.py b/packages/google-cloud-assured-workloads/google/cloud/assuredworkloads_v1beta1/gapic_version.py index 88ee289a17e3..558c8aab67c5 100644 --- a/packages/google-cloud-assured-workloads/google/cloud/assuredworkloads_v1beta1/gapic_version.py +++ b/packages/google-cloud-assured-workloads/google/cloud/assuredworkloads_v1beta1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.12.5" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-assured-workloads/google/cloud/assuredworkloads_v1beta1/services/assured_workloads_service/async_client.py b/packages/google-cloud-assured-workloads/google/cloud/assuredworkloads_v1beta1/services/assured_workloads_service/async_client.py index b7feed42f6f7..dd46655343c7 100644 --- a/packages/google-cloud-assured-workloads/google/cloud/assuredworkloads_v1beta1/services/assured_workloads_service/async_client.py +++ b/packages/google-cloud-assured-workloads/google/cloud/assuredworkloads_v1beta1/services/assured_workloads_service/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -203,10 +202,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(AssuredWorkloadsServiceClient).get_transport_class, - type(AssuredWorkloadsServiceClient), - ) + get_transport_class = AssuredWorkloadsServiceClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-assured-workloads/google/cloud/assuredworkloads_v1beta1/services/assured_workloads_service/client.py b/packages/google-cloud-assured-workloads/google/cloud/assuredworkloads_v1beta1/services/assured_workloads_service/client.py index ded263704f63..449efcd00681 100644 --- a/packages/google-cloud-assured-workloads/google/cloud/assuredworkloads_v1beta1/services/assured_workloads_service/client.py +++ b/packages/google-cloud-assured-workloads/google/cloud/assuredworkloads_v1beta1/services/assured_workloads_service/client.py @@ -677,7 +677,7 @@ def __init__( Type[AssuredWorkloadsServiceTransport], Callable[..., AssuredWorkloadsServiceTransport], ] = ( - type(self).get_transport_class(transport) + AssuredWorkloadsServiceClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., AssuredWorkloadsServiceTransport], transport) ) diff --git a/packages/google-cloud-assured-workloads/samples/generated_samples/snippet_metadata_google.cloud.assuredworkloads.v1.json b/packages/google-cloud-assured-workloads/samples/generated_samples/snippet_metadata_google.cloud.assuredworkloads.v1.json index 20c2b8afe354..5ecc555c1f98 100644 --- a/packages/google-cloud-assured-workloads/samples/generated_samples/snippet_metadata_google.cloud.assuredworkloads.v1.json +++ b/packages/google-cloud-assured-workloads/samples/generated_samples/snippet_metadata_google.cloud.assuredworkloads.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-assured-workloads", - "version": "1.12.5" + "version": "0.1.0" }, "snippets": [ { diff --git a/packages/google-cloud-assured-workloads/samples/generated_samples/snippet_metadata_google.cloud.assuredworkloads.v1beta1.json b/packages/google-cloud-assured-workloads/samples/generated_samples/snippet_metadata_google.cloud.assuredworkloads.v1beta1.json index f9cd0d03eb89..7bc4e288cc6a 100644 --- a/packages/google-cloud-assured-workloads/samples/generated_samples/snippet_metadata_google.cloud.assuredworkloads.v1beta1.json +++ b/packages/google-cloud-assured-workloads/samples/generated_samples/snippet_metadata_google.cloud.assuredworkloads.v1beta1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-assured-workloads", - "version": "1.12.5" + "version": "0.1.0" }, "snippets": [ { diff --git a/packages/google-cloud-assured-workloads/tests/unit/gapic/assuredworkloads_v1/test_assured_workloads_service.py b/packages/google-cloud-assured-workloads/tests/unit/gapic/assuredworkloads_v1/test_assured_workloads_service.py index a6b830d6486d..25ced336bc6d 100644 --- a/packages/google-cloud-assured-workloads/tests/unit/gapic/assuredworkloads_v1/test_assured_workloads_service.py +++ b/packages/google-cloud-assured-workloads/tests/unit/gapic/assuredworkloads_v1/test_assured_workloads_service.py @@ -1312,8 +1312,9 @@ def test_create_workload_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.create_workload(request) @@ -1367,26 +1368,28 @@ async def test_create_workload_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_workload - ] = mock_object + ] = mock_rpc request = {} await client.create_workload(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.create_workload(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1774,22 +1777,23 @@ async def test_update_workload_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_workload - ] = mock_object + ] = mock_rpc request = {} await client.update_workload(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_workload(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2181,22 +2185,23 @@ async def test_restrict_allowed_resources_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.restrict_allowed_resources - ] = mock_object + ] = mock_rpc request = {} await client.restrict_allowed_resources(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.restrict_allowed_resources(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2462,22 +2467,23 @@ async def test_delete_workload_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_workload - ] = mock_object + ] = mock_rpc request = {} await client.delete_workload(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_workload(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2853,22 +2859,23 @@ async def test_get_workload_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_workload - ] = mock_object + ] = mock_rpc request = {} await client.get_workload(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_workload(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3246,22 +3253,23 @@ async def test_list_workloads_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_workloads - ] = mock_object + ] = mock_rpc request = {} await client.list_workloads(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_workloads(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3808,22 +3816,23 @@ async def test_list_violations_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_violations - ] = mock_object + ] = mock_rpc request = {} await client.list_violations(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_violations(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4326,22 +4335,23 @@ async def test_get_violation_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_violation - ] = mock_object + ] = mock_rpc request = {} await client.get_violation(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_violation(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4657,22 +4667,23 @@ async def test_acknowledge_violation_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.acknowledge_violation - ] = mock_object + ] = mock_rpc request = {} await client.acknowledge_violation(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.acknowledge_violation(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-assured-workloads/tests/unit/gapic/assuredworkloads_v1beta1/test_assured_workloads_service.py b/packages/google-cloud-assured-workloads/tests/unit/gapic/assuredworkloads_v1beta1/test_assured_workloads_service.py index 97b74c010d95..c15aa8bf4dbd 100644 --- a/packages/google-cloud-assured-workloads/tests/unit/gapic/assuredworkloads_v1beta1/test_assured_workloads_service.py +++ b/packages/google-cloud-assured-workloads/tests/unit/gapic/assuredworkloads_v1beta1/test_assured_workloads_service.py @@ -1312,8 +1312,9 @@ def test_create_workload_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.create_workload(request) @@ -1367,26 +1368,28 @@ async def test_create_workload_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_workload - ] = mock_object + ] = mock_rpc request = {} await client.create_workload(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.create_workload(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1771,22 +1774,23 @@ async def test_update_workload_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_workload - ] = mock_object + ] = mock_rpc request = {} await client.update_workload(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_workload(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2115,22 +2119,23 @@ async def test_restrict_allowed_resources_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.restrict_allowed_resources - ] = mock_object + ] = mock_rpc request = {} await client.restrict_allowed_resources(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.restrict_allowed_resources(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2396,22 +2401,23 @@ async def test_delete_workload_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_workload - ] = mock_object + ] = mock_rpc request = {} await client.delete_workload(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_workload(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2784,22 +2790,23 @@ async def test_get_workload_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_workload - ] = mock_object + ] = mock_rpc request = {} await client.get_workload(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_workload(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3127,22 +3134,23 @@ async def test_analyze_workload_move_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.analyze_workload_move - ] = mock_object + ] = mock_rpc request = {} await client.analyze_workload_move(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.analyze_workload_move(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3447,22 +3455,23 @@ async def test_list_workloads_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_workloads - ] = mock_object + ] = mock_rpc request = {} await client.list_workloads(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_workloads(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-automl/google/cloud/automl/gapic_version.py b/packages/google-cloud-automl/google/cloud/automl/gapic_version.py index cf63574ee44f..558c8aab67c5 100644 --- a/packages/google-cloud-automl/google/cloud/automl/gapic_version.py +++ b/packages/google-cloud-automl/google/cloud/automl/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "2.13.5" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-automl/google/cloud/automl_v1/gapic_version.py b/packages/google-cloud-automl/google/cloud/automl_v1/gapic_version.py index cf63574ee44f..558c8aab67c5 100644 --- a/packages/google-cloud-automl/google/cloud/automl_v1/gapic_version.py +++ b/packages/google-cloud-automl/google/cloud/automl_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "2.13.5" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-automl/google/cloud/automl_v1/services/auto_ml/async_client.py b/packages/google-cloud-automl/google/cloud/automl_v1/services/auto_ml/async_client.py index 598855f0fc1d..84852b458c9f 100644 --- a/packages/google-cloud-automl/google/cloud/automl_v1/services/auto_ml/async_client.py +++ b/packages/google-cloud-automl/google/cloud/automl_v1/services/auto_ml/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -218,9 +217,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(AutoMlClient).get_transport_class, type(AutoMlClient) - ) + get_transport_class = AutoMlClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-automl/google/cloud/automl_v1/services/auto_ml/client.py b/packages/google-cloud-automl/google/cloud/automl_v1/services/auto_ml/client.py index ccd37ea0d8fb..f75547d5642f 100644 --- a/packages/google-cloud-automl/google/cloud/automl_v1/services/auto_ml/client.py +++ b/packages/google-cloud-automl/google/cloud/automl_v1/services/auto_ml/client.py @@ -760,7 +760,7 @@ def __init__( transport_init: Union[ Type[AutoMlTransport], Callable[..., AutoMlTransport] ] = ( - type(self).get_transport_class(transport) + AutoMlClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., AutoMlTransport], transport) ) diff --git a/packages/google-cloud-automl/google/cloud/automl_v1/services/prediction_service/async_client.py b/packages/google-cloud-automl/google/cloud/automl_v1/services/prediction_service/async_client.py index a73394386c14..d82bdf58cd78 100644 --- a/packages/google-cloud-automl/google/cloud/automl_v1/services/prediction_service/async_client.py +++ b/packages/google-cloud-automl/google/cloud/automl_v1/services/prediction_service/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -199,9 +198,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(PredictionServiceClient).get_transport_class, type(PredictionServiceClient) - ) + get_transport_class = PredictionServiceClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-automl/google/cloud/automl_v1/services/prediction_service/client.py b/packages/google-cloud-automl/google/cloud/automl_v1/services/prediction_service/client.py index deaab40e9db2..b8d837686197 100644 --- a/packages/google-cloud-automl/google/cloud/automl_v1/services/prediction_service/client.py +++ b/packages/google-cloud-automl/google/cloud/automl_v1/services/prediction_service/client.py @@ -677,7 +677,7 @@ def __init__( Type[PredictionServiceTransport], Callable[..., PredictionServiceTransport], ] = ( - type(self).get_transport_class(transport) + PredictionServiceClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., PredictionServiceTransport], transport) ) diff --git a/packages/google-cloud-automl/google/cloud/automl_v1beta1/gapic_version.py b/packages/google-cloud-automl/google/cloud/automl_v1beta1/gapic_version.py index cf63574ee44f..558c8aab67c5 100644 --- a/packages/google-cloud-automl/google/cloud/automl_v1beta1/gapic_version.py +++ b/packages/google-cloud-automl/google/cloud/automl_v1beta1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "2.13.5" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-automl/google/cloud/automl_v1beta1/services/auto_ml/async_client.py b/packages/google-cloud-automl/google/cloud/automl_v1beta1/services/auto_ml/async_client.py index b59caf5e044b..665e597dd641 100644 --- a/packages/google-cloud-automl/google/cloud/automl_v1beta1/services/auto_ml/async_client.py +++ b/packages/google-cloud-automl/google/cloud/automl_v1beta1/services/auto_ml/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -231,9 +230,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(AutoMlClient).get_transport_class, type(AutoMlClient) - ) + get_transport_class = AutoMlClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-automl/google/cloud/automl_v1beta1/services/auto_ml/client.py b/packages/google-cloud-automl/google/cloud/automl_v1beta1/services/auto_ml/client.py index acec65183c21..cb77a70222f4 100644 --- a/packages/google-cloud-automl/google/cloud/automl_v1beta1/services/auto_ml/client.py +++ b/packages/google-cloud-automl/google/cloud/automl_v1beta1/services/auto_ml/client.py @@ -819,7 +819,7 @@ def __init__( transport_init: Union[ Type[AutoMlTransport], Callable[..., AutoMlTransport] ] = ( - type(self).get_transport_class(transport) + AutoMlClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., AutoMlTransport], transport) ) diff --git a/packages/google-cloud-automl/google/cloud/automl_v1beta1/services/prediction_service/async_client.py b/packages/google-cloud-automl/google/cloud/automl_v1beta1/services/prediction_service/async_client.py index e2ca9c63a3e9..b8ad20c31516 100644 --- a/packages/google-cloud-automl/google/cloud/automl_v1beta1/services/prediction_service/async_client.py +++ b/packages/google-cloud-automl/google/cloud/automl_v1beta1/services/prediction_service/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -199,9 +198,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(PredictionServiceClient).get_transport_class, type(PredictionServiceClient) - ) + get_transport_class = PredictionServiceClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-automl/google/cloud/automl_v1beta1/services/prediction_service/client.py b/packages/google-cloud-automl/google/cloud/automl_v1beta1/services/prediction_service/client.py index 6b93a2ce64fb..b52f0c1c9c30 100644 --- a/packages/google-cloud-automl/google/cloud/automl_v1beta1/services/prediction_service/client.py +++ b/packages/google-cloud-automl/google/cloud/automl_v1beta1/services/prediction_service/client.py @@ -677,7 +677,7 @@ def __init__( Type[PredictionServiceTransport], Callable[..., PredictionServiceTransport], ] = ( - type(self).get_transport_class(transport) + PredictionServiceClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., PredictionServiceTransport], transport) ) diff --git a/packages/google-cloud-automl/samples/generated_samples/snippet_metadata_google.cloud.automl.v1.json b/packages/google-cloud-automl/samples/generated_samples/snippet_metadata_google.cloud.automl.v1.json index 7c633120b887..df593972f2f6 100644 --- a/packages/google-cloud-automl/samples/generated_samples/snippet_metadata_google.cloud.automl.v1.json +++ b/packages/google-cloud-automl/samples/generated_samples/snippet_metadata_google.cloud.automl.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-automl", - "version": "2.13.5" + "version": "0.1.0" }, "snippets": [ { diff --git a/packages/google-cloud-automl/samples/generated_samples/snippet_metadata_google.cloud.automl.v1beta1.json b/packages/google-cloud-automl/samples/generated_samples/snippet_metadata_google.cloud.automl.v1beta1.json index 3a9be5be7283..70c0487411a8 100644 --- a/packages/google-cloud-automl/samples/generated_samples/snippet_metadata_google.cloud.automl.v1beta1.json +++ b/packages/google-cloud-automl/samples/generated_samples/snippet_metadata_google.cloud.automl.v1beta1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-automl", - "version": "2.13.5" + "version": "0.1.0" }, "snippets": [ { diff --git a/packages/google-cloud-automl/tests/unit/gapic/automl_v1/test_auto_ml.py b/packages/google-cloud-automl/tests/unit/gapic/automl_v1/test_auto_ml.py index b3cdc046cea3..34708e16bcaf 100644 --- a/packages/google-cloud-automl/tests/unit/gapic/automl_v1/test_auto_ml.py +++ b/packages/google-cloud-automl/tests/unit/gapic/automl_v1/test_auto_ml.py @@ -1184,8 +1184,9 @@ def test_create_dataset_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.create_dataset(request) @@ -1239,26 +1240,28 @@ async def test_create_dataset_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_dataset - ] = mock_object + ] = mock_rpc request = {} await client.create_dataset(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.create_dataset(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1650,22 +1653,23 @@ async def test_get_dataset_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_dataset - ] = mock_object + ] = mock_rpc request = {} await client.get_dataset(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_dataset(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2022,22 +2026,23 @@ async def test_list_datasets_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_datasets - ] = mock_object + ] = mock_rpc request = {} await client.list_datasets(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_datasets(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2588,22 +2593,23 @@ async def test_update_dataset_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_dataset - ] = mock_object + ] = mock_rpc request = {} await client.update_dataset(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_dataset(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2934,8 +2940,9 @@ def test_delete_dataset_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.delete_dataset(request) @@ -2989,26 +2996,28 @@ async def test_delete_dataset_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_dataset - ] = mock_object + ] = mock_rpc request = {} await client.delete_dataset(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.delete_dataset(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3298,8 +3307,9 @@ def test_import_data_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.import_data(request) @@ -3353,26 +3363,28 @@ async def test_import_data_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.import_data - ] = mock_object + ] = mock_rpc request = {} await client.import_data(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.import_data(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3684,8 +3696,9 @@ def test_export_data_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.export_data(request) @@ -3739,26 +3752,28 @@ async def test_export_data_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.export_data - ] = mock_object + ] = mock_rpc request = {} await client.export_data(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.export_data(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4156,22 +4171,23 @@ async def test_get_annotation_spec_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_annotation_spec - ] = mock_object + ] = mock_rpc request = {} await client.get_annotation_spec(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_annotation_spec(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4478,8 +4494,9 @@ def test_create_model_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.create_model(request) @@ -4533,26 +4550,28 @@ async def test_create_model_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_model - ] = mock_object + ] = mock_rpc request = {} await client.create_model(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.create_model(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4942,22 +4961,23 @@ async def test_get_model_async_use_cached_wrapped_rpc(transport: str = "grpc_asy ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_model - ] = mock_object + ] = mock_rpc request = {} await client.get_model(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_model(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5314,22 +5334,23 @@ async def test_list_models_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_models - ] = mock_object + ] = mock_rpc request = {} await client.list_models(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_models(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5816,8 +5837,9 @@ def test_delete_model_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.delete_model(request) @@ -5871,26 +5893,28 @@ async def test_delete_model_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_model - ] = mock_object + ] = mock_rpc request = {} await client.delete_model(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.delete_model(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -6244,22 +6268,23 @@ async def test_update_model_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_model - ] = mock_object + ] = mock_rpc request = {} await client.update_model(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_model(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -6590,8 +6615,9 @@ def test_deploy_model_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.deploy_model(request) @@ -6645,26 +6671,28 @@ async def test_deploy_model_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.deploy_model - ] = mock_object + ] = mock_rpc request = {} await client.deploy_model(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.deploy_model(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -6954,8 +6982,9 @@ def test_undeploy_model_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.undeploy_model(request) @@ -7009,26 +7038,28 @@ async def test_undeploy_model_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.undeploy_model - ] = mock_object + ] = mock_rpc request = {} await client.undeploy_model(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.undeploy_model(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -7318,8 +7349,9 @@ def test_export_model_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.export_model(request) @@ -7373,26 +7405,28 @@ async def test_export_model_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.export_model - ] = mock_object + ] = mock_rpc request = {} await client.export_model(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.export_model(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -7793,22 +7827,23 @@ async def test_get_model_evaluation_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_model_evaluation - ] = mock_object + ] = mock_rpc request = {} await client.get_model_evaluation(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_model_evaluation(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -8190,22 +8225,23 @@ async def test_list_model_evaluations_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_model_evaluations - ] = mock_object + ] = mock_rpc request = {} await client.list_model_evaluations(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_model_evaluations(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-automl/tests/unit/gapic/automl_v1/test_prediction_service.py b/packages/google-cloud-automl/tests/unit/gapic/automl_v1/test_prediction_service.py index f21e284bae99..7f6685ecdec3 100644 --- a/packages/google-cloud-automl/tests/unit/gapic/automl_v1/test_prediction_service.py +++ b/packages/google-cloud-automl/tests/unit/gapic/automl_v1/test_prediction_service.py @@ -1329,22 +1329,23 @@ async def test_predict_async_use_cached_wrapped_rpc(transport: str = "grpc_async ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.predict - ] = mock_object + ] = mock_rpc request = {} await client.predict(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.predict(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1666,8 +1667,9 @@ def test_batch_predict_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.batch_predict(request) @@ -1721,26 +1723,28 @@ async def test_batch_predict_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.batch_predict - ] = mock_object + ] = mock_rpc request = {} await client.batch_predict(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.batch_predict(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-automl/tests/unit/gapic/automl_v1beta1/test_auto_ml.py b/packages/google-cloud-automl/tests/unit/gapic/automl_v1beta1/test_auto_ml.py index af1f078f1b51..d79c33b503ca 100644 --- a/packages/google-cloud-automl/tests/unit/gapic/automl_v1beta1/test_auto_ml.py +++ b/packages/google-cloud-automl/tests/unit/gapic/automl_v1beta1/test_auto_ml.py @@ -1262,22 +1262,23 @@ async def test_create_dataset_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_dataset - ] = mock_object + ] = mock_rpc request = {} await client.create_dataset(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_dataset(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1676,22 +1677,23 @@ async def test_get_dataset_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_dataset - ] = mock_object + ] = mock_rpc request = {} await client.get_dataset(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_dataset(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2048,22 +2050,23 @@ async def test_list_datasets_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_datasets - ] = mock_object + ] = mock_rpc request = {} await client.list_datasets(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_datasets(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2614,22 +2617,23 @@ async def test_update_dataset_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_dataset - ] = mock_object + ] = mock_rpc request = {} await client.update_dataset(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_dataset(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2950,8 +2954,9 @@ def test_delete_dataset_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.delete_dataset(request) @@ -3005,26 +3010,28 @@ async def test_delete_dataset_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_dataset - ] = mock_object + ] = mock_rpc request = {} await client.delete_dataset(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.delete_dataset(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3314,8 +3321,9 @@ def test_import_data_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.import_data(request) @@ -3369,26 +3377,28 @@ async def test_import_data_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.import_data - ] = mock_object + ] = mock_rpc request = {} await client.import_data(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.import_data(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3700,8 +3710,9 @@ def test_export_data_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.export_data(request) @@ -3755,26 +3766,28 @@ async def test_export_data_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.export_data - ] = mock_object + ] = mock_rpc request = {} await client.export_data(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.export_data(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4172,22 +4185,23 @@ async def test_get_annotation_spec_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_annotation_spec - ] = mock_object + ] = mock_rpc request = {} await client.get_annotation_spec(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_annotation_spec(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4565,22 +4579,23 @@ async def test_get_table_spec_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_table_spec - ] = mock_object + ] = mock_rpc request = {} await client.get_table_spec(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_table_spec(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4945,22 +4960,23 @@ async def test_list_table_specs_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_table_specs - ] = mock_object + ] = mock_rpc request = {} await client.list_table_specs(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_table_specs(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5524,22 +5540,23 @@ async def test_update_table_spec_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_table_spec - ] = mock_object + ] = mock_rpc request = {} await client.update_table_spec(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_table_spec(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5914,22 +5931,23 @@ async def test_get_column_spec_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_column_spec - ] = mock_object + ] = mock_rpc request = {} await client.get_column_spec(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_column_spec(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -6296,22 +6314,23 @@ async def test_list_column_specs_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_column_specs - ] = mock_object + ] = mock_rpc request = {} await client.list_column_specs(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_column_specs(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -6886,22 +6905,23 @@ async def test_update_column_spec_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_column_spec - ] = mock_object + ] = mock_rpc request = {} await client.update_column_spec(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_column_spec(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -7208,8 +7228,9 @@ def test_create_model_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.create_model(request) @@ -7263,26 +7284,28 @@ async def test_create_model_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_model - ] = mock_object + ] = mock_rpc request = {} await client.create_model(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.create_model(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -7669,22 +7692,23 @@ async def test_get_model_async_use_cached_wrapped_rpc(transport: str = "grpc_asy ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_model - ] = mock_object + ] = mock_rpc request = {} await client.get_model(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_model(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -8039,22 +8063,23 @@ async def test_list_models_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_models - ] = mock_object + ] = mock_rpc request = {} await client.list_models(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_models(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -8541,8 +8566,9 @@ def test_delete_model_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.delete_model(request) @@ -8596,26 +8622,28 @@ async def test_delete_model_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_model - ] = mock_object + ] = mock_rpc request = {} await client.delete_model(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.delete_model(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -8905,8 +8933,9 @@ def test_deploy_model_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.deploy_model(request) @@ -8960,26 +8989,28 @@ async def test_deploy_model_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.deploy_model - ] = mock_object + ] = mock_rpc request = {} await client.deploy_model(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.deploy_model(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -9269,8 +9300,9 @@ def test_undeploy_model_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.undeploy_model(request) @@ -9324,26 +9356,28 @@ async def test_undeploy_model_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.undeploy_model - ] = mock_object + ] = mock_rpc request = {} await client.undeploy_model(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.undeploy_model(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -9633,8 +9667,9 @@ def test_export_model_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.export_model(request) @@ -9688,26 +9723,28 @@ async def test_export_model_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.export_model - ] = mock_object + ] = mock_rpc request = {} await client.export_model(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.export_model(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -10042,8 +10079,9 @@ def test_export_evaluated_examples_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.export_evaluated_examples(request) @@ -10099,26 +10137,28 @@ async def test_export_evaluated_examples_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.export_evaluated_examples - ] = mock_object + ] = mock_rpc request = {} await client.export_evaluated_examples(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.export_evaluated_examples(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -10525,22 +10565,23 @@ async def test_get_model_evaluation_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_model_evaluation - ] = mock_object + ] = mock_rpc request = {} await client.get_model_evaluation(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_model_evaluation(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -10922,22 +10963,23 @@ async def test_list_model_evaluations_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_model_evaluations - ] = mock_object + ] = mock_rpc request = {} await client.list_model_evaluations(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_model_evaluations(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-automl/tests/unit/gapic/automl_v1beta1/test_prediction_service.py b/packages/google-cloud-automl/tests/unit/gapic/automl_v1beta1/test_prediction_service.py index 0e2018e1f32a..3d674900441e 100644 --- a/packages/google-cloud-automl/tests/unit/gapic/automl_v1beta1/test_prediction_service.py +++ b/packages/google-cloud-automl/tests/unit/gapic/automl_v1beta1/test_prediction_service.py @@ -1330,22 +1330,23 @@ async def test_predict_async_use_cached_wrapped_rpc(transport: str = "grpc_async ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.predict - ] = mock_object + ] = mock_rpc request = {} await client.predict(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.predict(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1667,8 +1668,9 @@ def test_batch_predict_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.batch_predict(request) @@ -1722,26 +1724,28 @@ async def test_batch_predict_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.batch_predict - ] = mock_object + ] = mock_rpc request = {} await client.batch_predict(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.batch_predict(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-backupdr/CHANGELOG.md b/packages/google-cloud-backupdr/CHANGELOG.md index 52580c971528..55f275c138f5 100644 --- a/packages/google-cloud-backupdr/CHANGELOG.md +++ b/packages/google-cloud-backupdr/CHANGELOG.md @@ -1,5 +1,26 @@ # Changelog +## [0.1.4](https://github.com/googleapis/google-cloud-python/compare/google-cloud-backupdr-v0.1.3...google-cloud-backupdr-v0.1.4) (2024-10-08) + + +### Features + +* [google-cloud-backupdr] Client library for the backupvault api is added ([27c262d](https://github.com/googleapis/google-cloud-python/commit/27c262d51c5d9f055152d9448f5fb6759da4bdb3)) +* Add backupplan proto ([27c262d](https://github.com/googleapis/google-cloud-python/commit/27c262d51c5d9f055152d9448f5fb6759da4bdb3)) +* Add backupplanassociation proto ([27c262d](https://github.com/googleapis/google-cloud-python/commit/27c262d51c5d9f055152d9448f5fb6759da4bdb3)) +* Add backupvault_ba proto ([27c262d](https://github.com/googleapis/google-cloud-python/commit/27c262d51c5d9f055152d9448f5fb6759da4bdb3)) +* Add backupvault_gce proto ([27c262d](https://github.com/googleapis/google-cloud-python/commit/27c262d51c5d9f055152d9448f5fb6759da4bdb3)) + + +### Documentation + +* A comment for field `management_servers` in message `.google.cloud.backupdr.v1.ListManagementServersResponse` is changed ([27c262d](https://github.com/googleapis/google-cloud-python/commit/27c262d51c5d9f055152d9448f5fb6759da4bdb3)) +* A comment for field `name` in message `.google.cloud.backupdr.v1.GetManagementServerRequest` is changed ([27c262d](https://github.com/googleapis/google-cloud-python/commit/27c262d51c5d9f055152d9448f5fb6759da4bdb3)) +* A comment for field `oauth2_client_id` in message `.google.cloud.backupdr.v1.ManagementServer` is changed ([27c262d](https://github.com/googleapis/google-cloud-python/commit/27c262d51c5d9f055152d9448f5fb6759da4bdb3)) +* A comment for field `parent` in message `.google.cloud.backupdr.v1.CreateManagementServerRequest` is changed ([27c262d](https://github.com/googleapis/google-cloud-python/commit/27c262d51c5d9f055152d9448f5fb6759da4bdb3)) +* A comment for field `parent` in message `.google.cloud.backupdr.v1.ListManagementServersRequest` is changed ([27c262d](https://github.com/googleapis/google-cloud-python/commit/27c262d51c5d9f055152d9448f5fb6759da4bdb3)) +* A comment for field `requested_cancellation` in message `.google.cloud.backupdr.v1.OperationMetadata` is changed ([27c262d](https://github.com/googleapis/google-cloud-python/commit/27c262d51c5d9f055152d9448f5fb6759da4bdb3)) + ## [0.1.3](https://github.com/googleapis/google-cloud-python/compare/google-cloud-backupdr-v0.1.2...google-cloud-backupdr-v0.1.3) (2024-07-30) diff --git a/packages/google-cloud-backupdr/google/cloud/backupdr/__init__.py b/packages/google-cloud-backupdr/google/cloud/backupdr/__init__.py index c29b14037f34..5ab4e805a40b 100644 --- a/packages/google-cloud-backupdr/google/cloud/backupdr/__init__.py +++ b/packages/google-cloud-backupdr/google/cloud/backupdr/__init__.py @@ -33,6 +33,94 @@ WorkforceIdentityBasedManagementURI, WorkforceIdentityBasedOAuth2ClientID, ) +from google.cloud.backupdr_v1.types.backupplan import ( + BackupPlan, + BackupRule, + BackupWindow, + CreateBackupPlanRequest, + DeleteBackupPlanRequest, + GetBackupPlanRequest, + ListBackupPlansRequest, + ListBackupPlansResponse, + StandardSchedule, + WeekDayOfMonth, +) +from google.cloud.backupdr_v1.types.backupplanassociation import ( + BackupPlanAssociation, + CreateBackupPlanAssociationRequest, + DeleteBackupPlanAssociationRequest, + GetBackupPlanAssociationRequest, + ListBackupPlanAssociationsRequest, + ListBackupPlanAssociationsResponse, + RuleConfigInfo, + TriggerBackupRequest, +) +from google.cloud.backupdr_v1.types.backupvault import ( + Backup, + BackupApplianceBackupConfig, + BackupApplianceLockInfo, + BackupConfigInfo, + BackupConfigState, + BackupLock, + BackupVault, + BackupVaultView, + BackupView, + CreateBackupVaultRequest, + DataSource, + DataSourceBackupApplianceApplication, + DataSourceGcpResource, + DeleteBackupRequest, + DeleteBackupVaultRequest, + FetchUsableBackupVaultsRequest, + FetchUsableBackupVaultsResponse, + GcpBackupConfig, + GcpResource, + GetBackupRequest, + GetBackupVaultRequest, + GetDataSourceRequest, + ListBackupsRequest, + ListBackupsResponse, + ListBackupVaultsRequest, + ListBackupVaultsResponse, + ListDataSourcesRequest, + ListDataSourcesResponse, + RestoreBackupRequest, + RestoreBackupResponse, + ServiceLockInfo, + TargetResource, + UpdateBackupRequest, + UpdateBackupVaultRequest, + UpdateDataSourceRequest, +) +from google.cloud.backupdr_v1.types.backupvault_ba import ( + BackupApplianceBackupProperties, +) +from google.cloud.backupdr_v1.types.backupvault_gce import ( + AcceleratorConfig, + AccessConfig, + AdvancedMachineFeatures, + AliasIpRange, + AllocationAffinity, + AttachedDisk, + ComputeInstanceBackupProperties, + ComputeInstanceDataSourceProperties, + ComputeInstanceRestoreProperties, + ComputeInstanceTargetEnvironment, + ConfidentialInstanceConfig, + CustomerEncryptionKey, + DisplayDevice, + Entry, + GuestOsFeature, + InstanceParams, + KeyRevocationActionType, + Metadata, + NetworkInterface, + NetworkPerformanceConfig, + Scheduling, + SchedulingDuration, + ServiceAccount, + Tags, +) __all__ = ( "BackupDRClient", @@ -48,4 +136,82 @@ "OperationMetadata", "WorkforceIdentityBasedManagementURI", "WorkforceIdentityBasedOAuth2ClientID", + "BackupPlan", + "BackupRule", + "BackupWindow", + "CreateBackupPlanRequest", + "DeleteBackupPlanRequest", + "GetBackupPlanRequest", + "ListBackupPlansRequest", + "ListBackupPlansResponse", + "StandardSchedule", + "WeekDayOfMonth", + "BackupPlanAssociation", + "CreateBackupPlanAssociationRequest", + "DeleteBackupPlanAssociationRequest", + "GetBackupPlanAssociationRequest", + "ListBackupPlanAssociationsRequest", + "ListBackupPlanAssociationsResponse", + "RuleConfigInfo", + "TriggerBackupRequest", + "Backup", + "BackupApplianceBackupConfig", + "BackupApplianceLockInfo", + "BackupConfigInfo", + "BackupLock", + "BackupVault", + "CreateBackupVaultRequest", + "DataSource", + "DataSourceBackupApplianceApplication", + "DataSourceGcpResource", + "DeleteBackupRequest", + "DeleteBackupVaultRequest", + "FetchUsableBackupVaultsRequest", + "FetchUsableBackupVaultsResponse", + "GcpBackupConfig", + "GcpResource", + "GetBackupRequest", + "GetBackupVaultRequest", + "GetDataSourceRequest", + "ListBackupsRequest", + "ListBackupsResponse", + "ListBackupVaultsRequest", + "ListBackupVaultsResponse", + "ListDataSourcesRequest", + "ListDataSourcesResponse", + "RestoreBackupRequest", + "RestoreBackupResponse", + "ServiceLockInfo", + "TargetResource", + "UpdateBackupRequest", + "UpdateBackupVaultRequest", + "UpdateDataSourceRequest", + "BackupConfigState", + "BackupVaultView", + "BackupView", + "BackupApplianceBackupProperties", + "AcceleratorConfig", + "AccessConfig", + "AdvancedMachineFeatures", + "AliasIpRange", + "AllocationAffinity", + "AttachedDisk", + "ComputeInstanceBackupProperties", + "ComputeInstanceDataSourceProperties", + "ComputeInstanceRestoreProperties", + "ComputeInstanceTargetEnvironment", + "ConfidentialInstanceConfig", + "CustomerEncryptionKey", + "DisplayDevice", + "Entry", + "GuestOsFeature", + "InstanceParams", + "Metadata", + "NetworkInterface", + "NetworkPerformanceConfig", + "Scheduling", + "SchedulingDuration", + "ServiceAccount", + "Tags", + "KeyRevocationActionType", ) diff --git a/packages/google-cloud-backupdr/google/cloud/backupdr/gapic_version.py b/packages/google-cloud-backupdr/google/cloud/backupdr/gapic_version.py index 114e40645800..937ede8823ef 100644 --- a/packages/google-cloud-backupdr/google/cloud/backupdr/gapic_version.py +++ b/packages/google-cloud-backupdr/google/cloud/backupdr/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.1.3" # {x-release-please-version} +__version__ = "0.1.4" # {x-release-please-version} diff --git a/packages/google-cloud-backupdr/google/cloud/backupdr_v1/__init__.py b/packages/google-cloud-backupdr/google/cloud/backupdr_v1/__init__.py index a2dc2b97f601..eddcfa53658f 100644 --- a/packages/google-cloud-backupdr/google/cloud/backupdr_v1/__init__.py +++ b/packages/google-cloud-backupdr/google/cloud/backupdr_v1/__init__.py @@ -32,19 +32,183 @@ WorkforceIdentityBasedManagementURI, WorkforceIdentityBasedOAuth2ClientID, ) +from .types.backupplan import ( + BackupPlan, + BackupRule, + BackupWindow, + CreateBackupPlanRequest, + DeleteBackupPlanRequest, + GetBackupPlanRequest, + ListBackupPlansRequest, + ListBackupPlansResponse, + StandardSchedule, + WeekDayOfMonth, +) +from .types.backupplanassociation import ( + BackupPlanAssociation, + CreateBackupPlanAssociationRequest, + DeleteBackupPlanAssociationRequest, + GetBackupPlanAssociationRequest, + ListBackupPlanAssociationsRequest, + ListBackupPlanAssociationsResponse, + RuleConfigInfo, + TriggerBackupRequest, +) +from .types.backupvault import ( + Backup, + BackupApplianceBackupConfig, + BackupApplianceLockInfo, + BackupConfigInfo, + BackupConfigState, + BackupLock, + BackupVault, + BackupVaultView, + BackupView, + CreateBackupVaultRequest, + DataSource, + DataSourceBackupApplianceApplication, + DataSourceGcpResource, + DeleteBackupRequest, + DeleteBackupVaultRequest, + FetchUsableBackupVaultsRequest, + FetchUsableBackupVaultsResponse, + GcpBackupConfig, + GcpResource, + GetBackupRequest, + GetBackupVaultRequest, + GetDataSourceRequest, + ListBackupsRequest, + ListBackupsResponse, + ListBackupVaultsRequest, + ListBackupVaultsResponse, + ListDataSourcesRequest, + ListDataSourcesResponse, + RestoreBackupRequest, + RestoreBackupResponse, + ServiceLockInfo, + TargetResource, + UpdateBackupRequest, + UpdateBackupVaultRequest, + UpdateDataSourceRequest, +) +from .types.backupvault_ba import BackupApplianceBackupProperties +from .types.backupvault_gce import ( + AcceleratorConfig, + AccessConfig, + AdvancedMachineFeatures, + AliasIpRange, + AllocationAffinity, + AttachedDisk, + ComputeInstanceBackupProperties, + ComputeInstanceDataSourceProperties, + ComputeInstanceRestoreProperties, + ComputeInstanceTargetEnvironment, + ConfidentialInstanceConfig, + CustomerEncryptionKey, + DisplayDevice, + Entry, + GuestOsFeature, + InstanceParams, + KeyRevocationActionType, + Metadata, + NetworkInterface, + NetworkPerformanceConfig, + Scheduling, + SchedulingDuration, + ServiceAccount, + Tags, +) __all__ = ( "BackupDRAsyncClient", + "AcceleratorConfig", + "AccessConfig", + "AdvancedMachineFeatures", + "AliasIpRange", + "AllocationAffinity", + "AttachedDisk", + "Backup", + "BackupApplianceBackupConfig", + "BackupApplianceBackupProperties", + "BackupApplianceLockInfo", + "BackupConfigInfo", + "BackupConfigState", "BackupDRClient", + "BackupLock", + "BackupPlan", + "BackupPlanAssociation", + "BackupRule", + "BackupVault", + "BackupVaultView", + "BackupView", + "BackupWindow", + "ComputeInstanceBackupProperties", + "ComputeInstanceDataSourceProperties", + "ComputeInstanceRestoreProperties", + "ComputeInstanceTargetEnvironment", + "ConfidentialInstanceConfig", + "CreateBackupPlanAssociationRequest", + "CreateBackupPlanRequest", + "CreateBackupVaultRequest", "CreateManagementServerRequest", + "CustomerEncryptionKey", + "DataSource", + "DataSourceBackupApplianceApplication", + "DataSourceGcpResource", + "DeleteBackupPlanAssociationRequest", + "DeleteBackupPlanRequest", + "DeleteBackupRequest", + "DeleteBackupVaultRequest", "DeleteManagementServerRequest", + "DisplayDevice", + "Entry", + "FetchUsableBackupVaultsRequest", + "FetchUsableBackupVaultsResponse", + "GcpBackupConfig", + "GcpResource", + "GetBackupPlanAssociationRequest", + "GetBackupPlanRequest", + "GetBackupRequest", + "GetBackupVaultRequest", + "GetDataSourceRequest", "GetManagementServerRequest", + "GuestOsFeature", + "InstanceParams", + "KeyRevocationActionType", + "ListBackupPlanAssociationsRequest", + "ListBackupPlanAssociationsResponse", + "ListBackupPlansRequest", + "ListBackupPlansResponse", + "ListBackupVaultsRequest", + "ListBackupVaultsResponse", + "ListBackupsRequest", + "ListBackupsResponse", + "ListDataSourcesRequest", + "ListDataSourcesResponse", "ListManagementServersRequest", "ListManagementServersResponse", "ManagementServer", "ManagementURI", + "Metadata", "NetworkConfig", + "NetworkInterface", + "NetworkPerformanceConfig", "OperationMetadata", + "RestoreBackupRequest", + "RestoreBackupResponse", + "RuleConfigInfo", + "Scheduling", + "SchedulingDuration", + "ServiceAccount", + "ServiceLockInfo", + "StandardSchedule", + "Tags", + "TargetResource", + "TriggerBackupRequest", + "UpdateBackupRequest", + "UpdateBackupVaultRequest", + "UpdateDataSourceRequest", + "WeekDayOfMonth", "WorkforceIdentityBasedManagementURI", "WorkforceIdentityBasedOAuth2ClientID", ) diff --git a/packages/google-cloud-backupdr/google/cloud/backupdr_v1/gapic_metadata.json b/packages/google-cloud-backupdr/google/cloud/backupdr_v1/gapic_metadata.json index 4a9d58bb8dad..902530688c39 100644 --- a/packages/google-cloud-backupdr/google/cloud/backupdr_v1/gapic_metadata.json +++ b/packages/google-cloud-backupdr/google/cloud/backupdr_v1/gapic_metadata.json @@ -10,75 +10,420 @@ "grpc": { "libraryClient": "BackupDRClient", "rpcs": { + "CreateBackupPlan": { + "methods": [ + "create_backup_plan" + ] + }, + "CreateBackupPlanAssociation": { + "methods": [ + "create_backup_plan_association" + ] + }, + "CreateBackupVault": { + "methods": [ + "create_backup_vault" + ] + }, "CreateManagementServer": { "methods": [ "create_management_server" ] }, + "DeleteBackup": { + "methods": [ + "delete_backup" + ] + }, + "DeleteBackupPlan": { + "methods": [ + "delete_backup_plan" + ] + }, + "DeleteBackupPlanAssociation": { + "methods": [ + "delete_backup_plan_association" + ] + }, + "DeleteBackupVault": { + "methods": [ + "delete_backup_vault" + ] + }, "DeleteManagementServer": { "methods": [ "delete_management_server" ] }, + "FetchUsableBackupVaults": { + "methods": [ + "fetch_usable_backup_vaults" + ] + }, + "GetBackup": { + "methods": [ + "get_backup" + ] + }, + "GetBackupPlan": { + "methods": [ + "get_backup_plan" + ] + }, + "GetBackupPlanAssociation": { + "methods": [ + "get_backup_plan_association" + ] + }, + "GetBackupVault": { + "methods": [ + "get_backup_vault" + ] + }, + "GetDataSource": { + "methods": [ + "get_data_source" + ] + }, "GetManagementServer": { "methods": [ "get_management_server" ] }, + "ListBackupPlanAssociations": { + "methods": [ + "list_backup_plan_associations" + ] + }, + "ListBackupPlans": { + "methods": [ + "list_backup_plans" + ] + }, + "ListBackupVaults": { + "methods": [ + "list_backup_vaults" + ] + }, + "ListBackups": { + "methods": [ + "list_backups" + ] + }, + "ListDataSources": { + "methods": [ + "list_data_sources" + ] + }, "ListManagementServers": { "methods": [ "list_management_servers" ] + }, + "RestoreBackup": { + "methods": [ + "restore_backup" + ] + }, + "TriggerBackup": { + "methods": [ + "trigger_backup" + ] + }, + "UpdateBackup": { + "methods": [ + "update_backup" + ] + }, + "UpdateBackupVault": { + "methods": [ + "update_backup_vault" + ] + }, + "UpdateDataSource": { + "methods": [ + "update_data_source" + ] } } }, "grpc-async": { "libraryClient": "BackupDRAsyncClient", "rpcs": { + "CreateBackupPlan": { + "methods": [ + "create_backup_plan" + ] + }, + "CreateBackupPlanAssociation": { + "methods": [ + "create_backup_plan_association" + ] + }, + "CreateBackupVault": { + "methods": [ + "create_backup_vault" + ] + }, "CreateManagementServer": { "methods": [ "create_management_server" ] }, + "DeleteBackup": { + "methods": [ + "delete_backup" + ] + }, + "DeleteBackupPlan": { + "methods": [ + "delete_backup_plan" + ] + }, + "DeleteBackupPlanAssociation": { + "methods": [ + "delete_backup_plan_association" + ] + }, + "DeleteBackupVault": { + "methods": [ + "delete_backup_vault" + ] + }, "DeleteManagementServer": { "methods": [ "delete_management_server" ] }, + "FetchUsableBackupVaults": { + "methods": [ + "fetch_usable_backup_vaults" + ] + }, + "GetBackup": { + "methods": [ + "get_backup" + ] + }, + "GetBackupPlan": { + "methods": [ + "get_backup_plan" + ] + }, + "GetBackupPlanAssociation": { + "methods": [ + "get_backup_plan_association" + ] + }, + "GetBackupVault": { + "methods": [ + "get_backup_vault" + ] + }, + "GetDataSource": { + "methods": [ + "get_data_source" + ] + }, "GetManagementServer": { "methods": [ "get_management_server" ] }, + "ListBackupPlanAssociations": { + "methods": [ + "list_backup_plan_associations" + ] + }, + "ListBackupPlans": { + "methods": [ + "list_backup_plans" + ] + }, + "ListBackupVaults": { + "methods": [ + "list_backup_vaults" + ] + }, + "ListBackups": { + "methods": [ + "list_backups" + ] + }, + "ListDataSources": { + "methods": [ + "list_data_sources" + ] + }, "ListManagementServers": { "methods": [ "list_management_servers" ] + }, + "RestoreBackup": { + "methods": [ + "restore_backup" + ] + }, + "TriggerBackup": { + "methods": [ + "trigger_backup" + ] + }, + "UpdateBackup": { + "methods": [ + "update_backup" + ] + }, + "UpdateBackupVault": { + "methods": [ + "update_backup_vault" + ] + }, + "UpdateDataSource": { + "methods": [ + "update_data_source" + ] } } }, "rest": { "libraryClient": "BackupDRClient", "rpcs": { + "CreateBackupPlan": { + "methods": [ + "create_backup_plan" + ] + }, + "CreateBackupPlanAssociation": { + "methods": [ + "create_backup_plan_association" + ] + }, + "CreateBackupVault": { + "methods": [ + "create_backup_vault" + ] + }, "CreateManagementServer": { "methods": [ "create_management_server" ] }, + "DeleteBackup": { + "methods": [ + "delete_backup" + ] + }, + "DeleteBackupPlan": { + "methods": [ + "delete_backup_plan" + ] + }, + "DeleteBackupPlanAssociation": { + "methods": [ + "delete_backup_plan_association" + ] + }, + "DeleteBackupVault": { + "methods": [ + "delete_backup_vault" + ] + }, "DeleteManagementServer": { "methods": [ "delete_management_server" ] }, + "FetchUsableBackupVaults": { + "methods": [ + "fetch_usable_backup_vaults" + ] + }, + "GetBackup": { + "methods": [ + "get_backup" + ] + }, + "GetBackupPlan": { + "methods": [ + "get_backup_plan" + ] + }, + "GetBackupPlanAssociation": { + "methods": [ + "get_backup_plan_association" + ] + }, + "GetBackupVault": { + "methods": [ + "get_backup_vault" + ] + }, + "GetDataSource": { + "methods": [ + "get_data_source" + ] + }, "GetManagementServer": { "methods": [ "get_management_server" ] }, + "ListBackupPlanAssociations": { + "methods": [ + "list_backup_plan_associations" + ] + }, + "ListBackupPlans": { + "methods": [ + "list_backup_plans" + ] + }, + "ListBackupVaults": { + "methods": [ + "list_backup_vaults" + ] + }, + "ListBackups": { + "methods": [ + "list_backups" + ] + }, + "ListDataSources": { + "methods": [ + "list_data_sources" + ] + }, "ListManagementServers": { "methods": [ "list_management_servers" ] + }, + "RestoreBackup": { + "methods": [ + "restore_backup" + ] + }, + "TriggerBackup": { + "methods": [ + "trigger_backup" + ] + }, + "UpdateBackup": { + "methods": [ + "update_backup" + ] + }, + "UpdateBackupVault": { + "methods": [ + "update_backup_vault" + ] + }, + "UpdateDataSource": { + "methods": [ + "update_data_source" + ] } } } diff --git a/packages/google-cloud-backupdr/google/cloud/backupdr_v1/gapic_version.py b/packages/google-cloud-backupdr/google/cloud/backupdr_v1/gapic_version.py index 114e40645800..937ede8823ef 100644 --- a/packages/google-cloud-backupdr/google/cloud/backupdr_v1/gapic_version.py +++ b/packages/google-cloud-backupdr/google/cloud/backupdr_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.1.3" # {x-release-please-version} +__version__ = "0.1.4" # {x-release-please-version} diff --git a/packages/google-cloud-backupdr/google/cloud/backupdr_v1/services/backup_dr/async_client.py b/packages/google-cloud-backupdr/google/cloud/backupdr_v1/services/backup_dr/async_client.py index 0151899eaf62..ae020d8602f6 100644 --- a/packages/google-cloud-backupdr/google/cloud/backupdr_v1/services/backup_dr/async_client.py +++ b/packages/google-cloud-backupdr/google/cloud/backupdr_v1/services/backup_dr/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -49,12 +48,21 @@ from google.iam.v1 import iam_policy_pb2 # type: ignore from google.iam.v1 import policy_pb2 # type: ignore from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import duration_pb2 # type: ignore from google.protobuf import empty_pb2 # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore from google.protobuf import timestamp_pb2 # type: ignore from google.protobuf import wrappers_pb2 # type: ignore from google.cloud.backupdr_v1.services.backup_dr import pagers -from google.cloud.backupdr_v1.types import backupdr +from google.cloud.backupdr_v1.types import ( + backupdr, + backupplan, + backupplanassociation, + backupvault, + backupvault_ba, + backupvault_gce, +) from .client import BackupDRClient from .transports.base import DEFAULT_CLIENT_INFO, BackupDRTransport @@ -73,6 +81,20 @@ class BackupDRAsyncClient: _DEFAULT_ENDPOINT_TEMPLATE = BackupDRClient._DEFAULT_ENDPOINT_TEMPLATE _DEFAULT_UNIVERSE = BackupDRClient._DEFAULT_UNIVERSE + backup_path = staticmethod(BackupDRClient.backup_path) + parse_backup_path = staticmethod(BackupDRClient.parse_backup_path) + backup_plan_path = staticmethod(BackupDRClient.backup_plan_path) + parse_backup_plan_path = staticmethod(BackupDRClient.parse_backup_plan_path) + backup_plan_association_path = staticmethod( + BackupDRClient.backup_plan_association_path + ) + parse_backup_plan_association_path = staticmethod( + BackupDRClient.parse_backup_plan_association_path + ) + backup_vault_path = staticmethod(BackupDRClient.backup_vault_path) + parse_backup_vault_path = staticmethod(BackupDRClient.parse_backup_vault_path) + data_source_path = staticmethod(BackupDRClient.data_source_path) + parse_data_source_path = staticmethod(BackupDRClient.parse_data_source_path) management_server_path = staticmethod(BackupDRClient.management_server_path) parse_management_server_path = staticmethod( BackupDRClient.parse_management_server_path @@ -191,9 +213,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(BackupDRClient).get_transport_class, type(BackupDRClient) - ) + get_transport_class = BackupDRClient.get_transport_class def __init__( self, @@ -307,10 +327,10 @@ async def sample_list_management_servers(): parent (:class:`str`): Required. The project and location for which to retrieve management servers information, in the format - ``projects/{project_id}/locations/{location}``. In Cloud - BackupDR, locations map to GCP regions, for example - **us-central1**. To retrieve management servers for all - locations, use "-" for the ``{location}`` value. + 'projects/{project_id}/locations/{location}'. In Cloud + BackupDR, locations map to Google Cloud regions, for + example **us-central1**. To retrieve management servers + for all locations, use "-" for the '{location}' value. This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this @@ -431,7 +451,7 @@ async def sample_get_management_server(): name (:class:`str`): Required. Name of the management server resource name, in the format - ``projects/{project_id}/locations/{location}/managementServers/{resource_name}`` + 'projects/{project_id}/locations/{location}/managementServers/{resource_name}' This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this @@ -545,10 +565,9 @@ async def sample_create_management_server(): management server instance. parent (:class:`str`): Required. The management server project and location in - the format - ``projects/{project_id}/locations/{location}``. In Cloud - Backup and DR locations map to GCP regions, for example - **us-central1**. + the format 'projects/{project_id}/locations/{location}'. + In Cloud Backup and DR locations map to Google Cloud + regions, for example **us-central1**. This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this @@ -769,6 +788,2928 @@ async def sample_delete_management_server(): # Done; return the response. return response + async def create_backup_vault( + self, + request: Optional[Union[backupvault.CreateBackupVaultRequest, dict]] = None, + *, + parent: Optional[str] = None, + backup_vault: Optional[backupvault.BackupVault] = None, + backup_vault_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Creates a new BackupVault in a given project and + location. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import backupdr_v1 + + async def sample_create_backup_vault(): + # Create a client + client = backupdr_v1.BackupDRAsyncClient() + + # Initialize request argument(s) + request = backupdr_v1.CreateBackupVaultRequest( + parent="parent_value", + backup_vault_id="backup_vault_id_value", + ) + + # Make the request + operation = client.create_backup_vault(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.backupdr_v1.types.CreateBackupVaultRequest, dict]]): + The request object. Message for creating a BackupVault. + parent (:class:`str`): + Required. Value for parent. + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + backup_vault (:class:`google.cloud.backupdr_v1.types.BackupVault`): + Required. The resource being created + This corresponds to the ``backup_vault`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + backup_vault_id (:class:`str`): + Required. ID of the requesting object If auto-generating + ID server-side, remove this field and backup_vault_id + from the method_signature of Create RPC + + This corresponds to the ``backup_vault_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.backupdr_v1.types.BackupVault` + Message describing a BackupVault object. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, backup_vault, backup_vault_id]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, backupvault.CreateBackupVaultRequest): + request = backupvault.CreateBackupVaultRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if backup_vault is not None: + request.backup_vault = backup_vault + if backup_vault_id is not None: + request.backup_vault_id = backup_vault_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.create_backup_vault + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + backupvault.BackupVault, + metadata_type=backupdr.OperationMetadata, + ) + + # Done; return the response. + return response + + async def list_backup_vaults( + self, + request: Optional[Union[backupvault.ListBackupVaultsRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListBackupVaultsAsyncPager: + r"""Lists BackupVaults in a given project and location. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import backupdr_v1 + + async def sample_list_backup_vaults(): + # Create a client + client = backupdr_v1.BackupDRAsyncClient() + + # Initialize request argument(s) + request = backupdr_v1.ListBackupVaultsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_backup_vaults(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.backupdr_v1.types.ListBackupVaultsRequest, dict]]): + The request object. Request message for listing + backupvault stores. + parent (:class:`str`): + Required. The project and location for which to retrieve + backupvault stores information, in the format + 'projects/{project_id}/locations/{location}'. In Cloud + Backup and DR, locations map to Google Cloud regions, + for example **us-central1**. To retrieve backupvault + stores for all locations, use "-" for the '{location}' + value. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.backupdr_v1.services.backup_dr.pagers.ListBackupVaultsAsyncPager: + Response message for listing + BackupVaults. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, backupvault.ListBackupVaultsRequest): + request = backupvault.ListBackupVaultsRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.list_backup_vaults + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListBackupVaultsAsyncPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def fetch_usable_backup_vaults( + self, + request: Optional[ + Union[backupvault.FetchUsableBackupVaultsRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.FetchUsableBackupVaultsAsyncPager: + r"""FetchUsableBackupVaults lists usable BackupVaults in + a given project and location. Usable BackupVault are the + ones that user has backupdr.backupVaults.get permission. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import backupdr_v1 + + async def sample_fetch_usable_backup_vaults(): + # Create a client + client = backupdr_v1.BackupDRAsyncClient() + + # Initialize request argument(s) + request = backupdr_v1.FetchUsableBackupVaultsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.fetch_usable_backup_vaults(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.backupdr_v1.types.FetchUsableBackupVaultsRequest, dict]]): + The request object. Request message for fetching usable + BackupVaults. + parent (:class:`str`): + Required. The project and location for which to retrieve + backupvault stores information, in the format + 'projects/{project_id}/locations/{location}'. In Cloud + Backup and DR, locations map to Google Cloud regions, + for example **us-central1**. To retrieve backupvault + stores for all locations, use "-" for the '{location}' + value. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.backupdr_v1.services.backup_dr.pagers.FetchUsableBackupVaultsAsyncPager: + Response message for fetching usable + BackupVaults. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, backupvault.FetchUsableBackupVaultsRequest): + request = backupvault.FetchUsableBackupVaultsRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.fetch_usable_backup_vaults + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.FetchUsableBackupVaultsAsyncPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_backup_vault( + self, + request: Optional[Union[backupvault.GetBackupVaultRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> backupvault.BackupVault: + r"""Gets details of a BackupVault. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import backupdr_v1 + + async def sample_get_backup_vault(): + # Create a client + client = backupdr_v1.BackupDRAsyncClient() + + # Initialize request argument(s) + request = backupdr_v1.GetBackupVaultRequest( + name="name_value", + ) + + # Make the request + response = await client.get_backup_vault(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.backupdr_v1.types.GetBackupVaultRequest, dict]]): + The request object. Request message for getting a + BackupVault. + name (:class:`str`): + Required. Name of the backupvault store resource name, + in the format + 'projects/{project_id}/locations/{location}/backupVaults/{resource_name}' + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.backupdr_v1.types.BackupVault: + Message describing a BackupVault + object. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, backupvault.GetBackupVaultRequest): + request = backupvault.GetBackupVaultRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.get_backup_vault + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def update_backup_vault( + self, + request: Optional[Union[backupvault.UpdateBackupVaultRequest, dict]] = None, + *, + backup_vault: Optional[backupvault.BackupVault] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Updates the settings of a BackupVault. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import backupdr_v1 + + async def sample_update_backup_vault(): + # Create a client + client = backupdr_v1.BackupDRAsyncClient() + + # Initialize request argument(s) + request = backupdr_v1.UpdateBackupVaultRequest( + ) + + # Make the request + operation = client.update_backup_vault(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.backupdr_v1.types.UpdateBackupVaultRequest, dict]]): + The request object. Request message for updating a + BackupVault. + backup_vault (:class:`google.cloud.backupdr_v1.types.BackupVault`): + Required. The resource being updated + This corresponds to the ``backup_vault`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): + Required. Field mask is used to specify the fields to be + overwritten in the BackupVault resource by the update. + The fields specified in the update_mask are relative to + the resource, not the full request. A field will be + overwritten if it is in the mask. If the user does not + provide a mask then the request will fail. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.backupdr_v1.types.BackupVault` + Message describing a BackupVault object. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([backup_vault, update_mask]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, backupvault.UpdateBackupVaultRequest): + request = backupvault.UpdateBackupVaultRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if backup_vault is not None: + request.backup_vault = backup_vault + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.update_backup_vault + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("backup_vault.name", request.backup_vault.name),) + ), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + backupvault.BackupVault, + metadata_type=backupdr.OperationMetadata, + ) + + # Done; return the response. + return response + + async def delete_backup_vault( + self, + request: Optional[Union[backupvault.DeleteBackupVaultRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Deletes a BackupVault. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import backupdr_v1 + + async def sample_delete_backup_vault(): + # Create a client + client = backupdr_v1.BackupDRAsyncClient() + + # Initialize request argument(s) + request = backupdr_v1.DeleteBackupVaultRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_backup_vault(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.backupdr_v1.types.DeleteBackupVaultRequest, dict]]): + The request object. Message for deleting a BackupVault. + name (:class:`str`): + Required. Name of the resource. + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated + empty messages in your APIs. A typical example is to + use it as the request or the response type of an API + method. For instance: + + service Foo { + rpc Bar(google.protobuf.Empty) returns + (google.protobuf.Empty); + + } + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, backupvault.DeleteBackupVaultRequest): + request = backupvault.DeleteBackupVaultRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.delete_backup_vault + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + empty_pb2.Empty, + metadata_type=backupdr.OperationMetadata, + ) + + # Done; return the response. + return response + + async def list_data_sources( + self, + request: Optional[Union[backupvault.ListDataSourcesRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListDataSourcesAsyncPager: + r"""Lists DataSources in a given project and location. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import backupdr_v1 + + async def sample_list_data_sources(): + # Create a client + client = backupdr_v1.BackupDRAsyncClient() + + # Initialize request argument(s) + request = backupdr_v1.ListDataSourcesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_data_sources(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.backupdr_v1.types.ListDataSourcesRequest, dict]]): + The request object. Request message for listing + DataSources. + parent (:class:`str`): + Required. The project and location for which to retrieve + data sources information, in the format + 'projects/{project_id}/locations/{location}'. In Cloud + Backup and DR, locations map to Google Cloud regions, + for example **us-central1**. To retrieve data sources + for all locations, use "-" for the '{location}' value. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.backupdr_v1.services.backup_dr.pagers.ListDataSourcesAsyncPager: + Response message for listing + DataSources. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, backupvault.ListDataSourcesRequest): + request = backupvault.ListDataSourcesRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.list_data_sources + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListDataSourcesAsyncPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_data_source( + self, + request: Optional[Union[backupvault.GetDataSourceRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> backupvault.DataSource: + r"""Gets details of a DataSource. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import backupdr_v1 + + async def sample_get_data_source(): + # Create a client + client = backupdr_v1.BackupDRAsyncClient() + + # Initialize request argument(s) + request = backupdr_v1.GetDataSourceRequest( + name="name_value", + ) + + # Make the request + response = await client.get_data_source(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.backupdr_v1.types.GetDataSourceRequest, dict]]): + The request object. Request message for getting a + DataSource instance. + name (:class:`str`): + Required. Name of the data source resource name, in the + format + 'projects/{project_id}/locations/{location}/backupVaults/{resource_name}/dataSource/{resource_name}' + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.backupdr_v1.types.DataSource: + Message describing a DataSource + object. Datasource object used to + represent Datasource details for both + admin and basic view. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, backupvault.GetDataSourceRequest): + request = backupvault.GetDataSourceRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.get_data_source + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def update_data_source( + self, + request: Optional[Union[backupvault.UpdateDataSourceRequest, dict]] = None, + *, + data_source: Optional[backupvault.DataSource] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Updates the settings of a DataSource. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import backupdr_v1 + + async def sample_update_data_source(): + # Create a client + client = backupdr_v1.BackupDRAsyncClient() + + # Initialize request argument(s) + request = backupdr_v1.UpdateDataSourceRequest( + ) + + # Make the request + operation = client.update_data_source(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.backupdr_v1.types.UpdateDataSourceRequest, dict]]): + The request object. Request message for updating a data + source instance. + data_source (:class:`google.cloud.backupdr_v1.types.DataSource`): + Required. The resource being updated + This corresponds to the ``data_source`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): + Required. Field mask is used to specify the fields to be + overwritten in the DataSource resource by the update. + The fields specified in the update_mask are relative to + the resource, not the full request. A field will be + overwritten if it is in the mask. If the user does not + provide a mask then the request will fail. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.backupdr_v1.types.DataSource` Message describing a DataSource object. + Datasource object used to represent Datasource + details for both admin and basic view. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([data_source, update_mask]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, backupvault.UpdateDataSourceRequest): + request = backupvault.UpdateDataSourceRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if data_source is not None: + request.data_source = data_source + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.update_data_source + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("data_source.name", request.data_source.name),) + ), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + backupvault.DataSource, + metadata_type=backupdr.OperationMetadata, + ) + + # Done; return the response. + return response + + async def list_backups( + self, + request: Optional[Union[backupvault.ListBackupsRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListBackupsAsyncPager: + r"""Lists Backups in a given project and location. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import backupdr_v1 + + async def sample_list_backups(): + # Create a client + client = backupdr_v1.BackupDRAsyncClient() + + # Initialize request argument(s) + request = backupdr_v1.ListBackupsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_backups(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.backupdr_v1.types.ListBackupsRequest, dict]]): + The request object. Request message for listing Backups. + parent (:class:`str`): + Required. The project and location for which to retrieve + backup information, in the format + 'projects/{project_id}/locations/{location}'. In Cloud + Backup and DR, locations map to Google Cloud regions, + for example **us-central1**. To retrieve data sources + for all locations, use "-" for the '{location}' value. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.backupdr_v1.services.backup_dr.pagers.ListBackupsAsyncPager: + Response message for listing Backups. + + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, backupvault.ListBackupsRequest): + request = backupvault.ListBackupsRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.list_backups + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListBackupsAsyncPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_backup( + self, + request: Optional[Union[backupvault.GetBackupRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> backupvault.Backup: + r"""Gets details of a Backup. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import backupdr_v1 + + async def sample_get_backup(): + # Create a client + client = backupdr_v1.BackupDRAsyncClient() + + # Initialize request argument(s) + request = backupdr_v1.GetBackupRequest( + name="name_value", + ) + + # Make the request + response = await client.get_backup(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.backupdr_v1.types.GetBackupRequest, dict]]): + The request object. Request message for getting a Backup. + name (:class:`str`): + Required. Name of the data source resource name, in the + format + 'projects/{project_id}/locations/{location}/backupVaults/{backupVault}/dataSources/{datasource}/backups/{backup}' + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.backupdr_v1.types.Backup: + Message describing a Backup object. + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, backupvault.GetBackupRequest): + request = backupvault.GetBackupRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.get_backup + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def update_backup( + self, + request: Optional[Union[backupvault.UpdateBackupRequest, dict]] = None, + *, + backup: Optional[backupvault.Backup] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Updates the settings of a Backup. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import backupdr_v1 + + async def sample_update_backup(): + # Create a client + client = backupdr_v1.BackupDRAsyncClient() + + # Initialize request argument(s) + request = backupdr_v1.UpdateBackupRequest( + ) + + # Make the request + operation = client.update_backup(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.backupdr_v1.types.UpdateBackupRequest, dict]]): + The request object. Request message for updating a + Backup. + backup (:class:`google.cloud.backupdr_v1.types.Backup`): + Required. The resource being updated + This corresponds to the ``backup`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): + Required. Field mask is used to specify the fields to be + overwritten in the Backup resource by the update. The + fields specified in the update_mask are relative to the + resource, not the full request. A field will be + overwritten if it is in the mask. If the user does not + provide a mask then the request will fail. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.backupdr_v1.types.Backup` Message + describing a Backup object. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([backup, update_mask]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, backupvault.UpdateBackupRequest): + request = backupvault.UpdateBackupRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if backup is not None: + request.backup = backup + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.update_backup + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("backup.name", request.backup.name),) + ), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + backupvault.Backup, + metadata_type=backupdr.OperationMetadata, + ) + + # Done; return the response. + return response + + async def delete_backup( + self, + request: Optional[Union[backupvault.DeleteBackupRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Deletes a Backup. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import backupdr_v1 + + async def sample_delete_backup(): + # Create a client + client = backupdr_v1.BackupDRAsyncClient() + + # Initialize request argument(s) + request = backupdr_v1.DeleteBackupRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_backup(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.backupdr_v1.types.DeleteBackupRequest, dict]]): + The request object. Message for deleting a Backup. + name (:class:`str`): + Required. Name of the resource. + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.backupdr_v1.types.Backup` Message + describing a Backup object. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, backupvault.DeleteBackupRequest): + request = backupvault.DeleteBackupRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.delete_backup + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + backupvault.Backup, + metadata_type=backupdr.OperationMetadata, + ) + + # Done; return the response. + return response + + async def restore_backup( + self, + request: Optional[Union[backupvault.RestoreBackupRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Restore from a Backup + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import backupdr_v1 + + async def sample_restore_backup(): + # Create a client + client = backupdr_v1.BackupDRAsyncClient() + + # Initialize request argument(s) + compute_instance_target_environment = backupdr_v1.ComputeInstanceTargetEnvironment() + compute_instance_target_environment.project = "project_value" + compute_instance_target_environment.zone = "zone_value" + + request = backupdr_v1.RestoreBackupRequest( + compute_instance_target_environment=compute_instance_target_environment, + name="name_value", + ) + + # Make the request + operation = client.restore_backup(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.backupdr_v1.types.RestoreBackupRequest, dict]]): + The request object. Request message for restoring from a + Backup. + name (:class:`str`): + Required. The resource name of the Backup instance, in + the format + 'projects/*/locations/*/backupVaults/*/dataSources/*/backups/'. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.backupdr_v1.types.RestoreBackupResponse` + Response message for restoring from a Backup. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, backupvault.RestoreBackupRequest): + request = backupvault.RestoreBackupRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.restore_backup + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + backupvault.RestoreBackupResponse, + metadata_type=backupdr.OperationMetadata, + ) + + # Done; return the response. + return response + + async def create_backup_plan( + self, + request: Optional[Union[backupplan.CreateBackupPlanRequest, dict]] = None, + *, + parent: Optional[str] = None, + backup_plan: Optional[backupplan.BackupPlan] = None, + backup_plan_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Create a BackupPlan + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import backupdr_v1 + + async def sample_create_backup_plan(): + # Create a client + client = backupdr_v1.BackupDRAsyncClient() + + # Initialize request argument(s) + backup_plan = backupdr_v1.BackupPlan() + backup_plan.backup_rules.standard_schedule.recurrence_type = "YEARLY" + backup_plan.backup_rules.standard_schedule.backup_window.start_hour_of_day = 1820 + backup_plan.backup_rules.standard_schedule.backup_window.end_hour_of_day = 1573 + backup_plan.backup_rules.standard_schedule.time_zone = "time_zone_value" + backup_plan.backup_rules.rule_id = "rule_id_value" + backup_plan.backup_rules.backup_retention_days = 2237 + backup_plan.resource_type = "resource_type_value" + backup_plan.backup_vault = "backup_vault_value" + + request = backupdr_v1.CreateBackupPlanRequest( + parent="parent_value", + backup_plan_id="backup_plan_id_value", + backup_plan=backup_plan, + ) + + # Make the request + operation = client.create_backup_plan(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.backupdr_v1.types.CreateBackupPlanRequest, dict]]): + The request object. The request message for creating a ``BackupPlan``. + parent (:class:`str`): + Required. The ``BackupPlan`` project and location in the + format ``projects/{project}/locations/{location}``. In + Cloud BackupDR locations map to GCP regions, for example + **us-central1**. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + backup_plan (:class:`google.cloud.backupdr_v1.types.BackupPlan`): + Required. The ``BackupPlan`` resource object to create. + This corresponds to the ``backup_plan`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + backup_plan_id (:class:`str`): + Required. The name of the ``BackupPlan`` to create. The + name must be unique for the specified project and + location.The name must start with a lowercase letter + followed by up to 62 lowercase letters, numbers, or + hyphens. Pattern, /[a-z][a-z0-9-]{,62}/. + + This corresponds to the ``backup_plan_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.backupdr_v1.types.BackupPlan` A BackupPlan specifies some common fields, such as description as well + as one or more BackupRule messages. Each BackupRule + has a retention policy and defines a schedule by + which the system is to perform backup workloads. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, backup_plan, backup_plan_id]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, backupplan.CreateBackupPlanRequest): + request = backupplan.CreateBackupPlanRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if backup_plan is not None: + request.backup_plan = backup_plan + if backup_plan_id is not None: + request.backup_plan_id = backup_plan_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.create_backup_plan + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + backupplan.BackupPlan, + metadata_type=backupdr.OperationMetadata, + ) + + # Done; return the response. + return response + + async def get_backup_plan( + self, + request: Optional[Union[backupplan.GetBackupPlanRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> backupplan.BackupPlan: + r"""Gets details of a single BackupPlan. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import backupdr_v1 + + async def sample_get_backup_plan(): + # Create a client + client = backupdr_v1.BackupDRAsyncClient() + + # Initialize request argument(s) + request = backupdr_v1.GetBackupPlanRequest( + name="name_value", + ) + + # Make the request + response = await client.get_backup_plan(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.backupdr_v1.types.GetBackupPlanRequest, dict]]): + The request object. The request message for getting a ``BackupPlan``. + name (:class:`str`): + Required. The resource name of the ``BackupPlan`` to + retrieve. + + Format: + ``projects/{project}/locations/{location}/backupPlans/{backup_plan}`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.backupdr_v1.types.BackupPlan: + A BackupPlan specifies some common fields, such as description as well + as one or more BackupRule messages. Each BackupRule + has a retention policy and defines a schedule by + which the system is to perform backup workloads. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, backupplan.GetBackupPlanRequest): + request = backupplan.GetBackupPlanRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.get_backup_plan + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_backup_plans( + self, + request: Optional[Union[backupplan.ListBackupPlansRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListBackupPlansAsyncPager: + r"""Lists BackupPlans in a given project and location. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import backupdr_v1 + + async def sample_list_backup_plans(): + # Create a client + client = backupdr_v1.BackupDRAsyncClient() + + # Initialize request argument(s) + request = backupdr_v1.ListBackupPlansRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_backup_plans(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.backupdr_v1.types.ListBackupPlansRequest, dict]]): + The request object. The request message for getting a list ``BackupPlan``. + parent (:class:`str`): + Required. The project and location for which to retrieve + ``BackupPlans`` information. Format: + ``projects/{project}/locations/{location}``. In Cloud + BackupDR, locations map to GCP regions, for e.g. + **us-central1**. To retrieve backup plans for all + locations, use "-" for the ``{location}`` value. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.backupdr_v1.services.backup_dr.pagers.ListBackupPlansAsyncPager: + The response message for getting a list of BackupPlan. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, backupplan.ListBackupPlansRequest): + request = backupplan.ListBackupPlansRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.list_backup_plans + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListBackupPlansAsyncPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def delete_backup_plan( + self, + request: Optional[Union[backupplan.DeleteBackupPlanRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Deletes a single BackupPlan. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import backupdr_v1 + + async def sample_delete_backup_plan(): + # Create a client + client = backupdr_v1.BackupDRAsyncClient() + + # Initialize request argument(s) + request = backupdr_v1.DeleteBackupPlanRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_backup_plan(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.backupdr_v1.types.DeleteBackupPlanRequest, dict]]): + The request object. The request message for deleting a ``BackupPlan``. + name (:class:`str`): + Required. The resource name of the ``BackupPlan`` to + delete. + + Format: + ``projects/{project}/locations/{location}/backupPlans/{backup_plan}`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated + empty messages in your APIs. A typical example is to + use it as the request or the response type of an API + method. For instance: + + service Foo { + rpc Bar(google.protobuf.Empty) returns + (google.protobuf.Empty); + + } + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, backupplan.DeleteBackupPlanRequest): + request = backupplan.DeleteBackupPlanRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.delete_backup_plan + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + empty_pb2.Empty, + metadata_type=backupdr.OperationMetadata, + ) + + # Done; return the response. + return response + + async def create_backup_plan_association( + self, + request: Optional[ + Union[backupplanassociation.CreateBackupPlanAssociationRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + backup_plan_association: Optional[ + backupplanassociation.BackupPlanAssociation + ] = None, + backup_plan_association_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Create a BackupPlanAssociation + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import backupdr_v1 + + async def sample_create_backup_plan_association(): + # Create a client + client = backupdr_v1.BackupDRAsyncClient() + + # Initialize request argument(s) + backup_plan_association = backupdr_v1.BackupPlanAssociation() + backup_plan_association.resource = "resource_value" + backup_plan_association.backup_plan = "backup_plan_value" + + request = backupdr_v1.CreateBackupPlanAssociationRequest( + parent="parent_value", + backup_plan_association_id="backup_plan_association_id_value", + backup_plan_association=backup_plan_association, + ) + + # Make the request + operation = client.create_backup_plan_association(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.backupdr_v1.types.CreateBackupPlanAssociationRequest, dict]]): + The request object. Request message for creating a backup + plan. + parent (:class:`str`): + Required. The backup plan association project and + location in the format + ``projects/{project_id}/locations/{location}``. In Cloud + BackupDR locations map to GCP regions, for example + **us-central1**. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + backup_plan_association (:class:`google.cloud.backupdr_v1.types.BackupPlanAssociation`): + Required. The resource being created + This corresponds to the ``backup_plan_association`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + backup_plan_association_id (:class:`str`): + Required. The name of the backup plan + association to create. The name must be + unique for the specified project and + location. + + This corresponds to the ``backup_plan_association_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.backupdr_v1.types.BackupPlanAssociation` A BackupPlanAssociation represents a single BackupPlanAssociation which + contains details like workload, backup plan etc + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any( + [parent, backup_plan_association, backup_plan_association_id] + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance( + request, backupplanassociation.CreateBackupPlanAssociationRequest + ): + request = backupplanassociation.CreateBackupPlanAssociationRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if backup_plan_association is not None: + request.backup_plan_association = backup_plan_association + if backup_plan_association_id is not None: + request.backup_plan_association_id = backup_plan_association_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.create_backup_plan_association + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + backupplanassociation.BackupPlanAssociation, + metadata_type=backupdr.OperationMetadata, + ) + + # Done; return the response. + return response + + async def get_backup_plan_association( + self, + request: Optional[ + Union[backupplanassociation.GetBackupPlanAssociationRequest, dict] + ] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> backupplanassociation.BackupPlanAssociation: + r"""Gets details of a single BackupPlanAssociation. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import backupdr_v1 + + async def sample_get_backup_plan_association(): + # Create a client + client = backupdr_v1.BackupDRAsyncClient() + + # Initialize request argument(s) + request = backupdr_v1.GetBackupPlanAssociationRequest( + name="name_value", + ) + + # Make the request + response = await client.get_backup_plan_association(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.backupdr_v1.types.GetBackupPlanAssociationRequest, dict]]): + The request object. Request message for getting a + BackupPlanAssociation resource. + name (:class:`str`): + Required. Name of the backup plan association resource, + in the format + ``projects/{project}/locations/{location}/backupPlanAssociations/{backupPlanAssociationId}`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.backupdr_v1.types.BackupPlanAssociation: + A BackupPlanAssociation represents a + single BackupPlanAssociation which + contains details like workload, backup + plan etc + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance( + request, backupplanassociation.GetBackupPlanAssociationRequest + ): + request = backupplanassociation.GetBackupPlanAssociationRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.get_backup_plan_association + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_backup_plan_associations( + self, + request: Optional[ + Union[backupplanassociation.ListBackupPlanAssociationsRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListBackupPlanAssociationsAsyncPager: + r"""Lists BackupPlanAssociations in a given project and + location. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import backupdr_v1 + + async def sample_list_backup_plan_associations(): + # Create a client + client = backupdr_v1.BackupDRAsyncClient() + + # Initialize request argument(s) + request = backupdr_v1.ListBackupPlanAssociationsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_backup_plan_associations(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.backupdr_v1.types.ListBackupPlanAssociationsRequest, dict]]): + The request object. Request message for List + BackupPlanAssociation + parent (:class:`str`): + Required. The project and location for which to retrieve + backup Plan Associations information, in the format + ``projects/{project_id}/locations/{location}``. In Cloud + BackupDR, locations map to GCP regions, for example + **us-central1**. To retrieve backup plan associations + for all locations, use "-" for the ``{location}`` value. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.backupdr_v1.services.backup_dr.pagers.ListBackupPlanAssociationsAsyncPager: + Response message for List + BackupPlanAssociation + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance( + request, backupplanassociation.ListBackupPlanAssociationsRequest + ): + request = backupplanassociation.ListBackupPlanAssociationsRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.list_backup_plan_associations + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListBackupPlanAssociationsAsyncPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def delete_backup_plan_association( + self, + request: Optional[ + Union[backupplanassociation.DeleteBackupPlanAssociationRequest, dict] + ] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Deletes a single BackupPlanAssociation. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import backupdr_v1 + + async def sample_delete_backup_plan_association(): + # Create a client + client = backupdr_v1.BackupDRAsyncClient() + + # Initialize request argument(s) + request = backupdr_v1.DeleteBackupPlanAssociationRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_backup_plan_association(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.backupdr_v1.types.DeleteBackupPlanAssociationRequest, dict]]): + The request object. Request message for deleting a backup + plan association. + name (:class:`str`): + Required. Name of the backup plan association resource, + in the format + ``projects/{project}/locations/{location}/backupPlanAssociations/{backupPlanAssociationId}`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated + empty messages in your APIs. A typical example is to + use it as the request or the response type of an API + method. For instance: + + service Foo { + rpc Bar(google.protobuf.Empty) returns + (google.protobuf.Empty); + + } + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance( + request, backupplanassociation.DeleteBackupPlanAssociationRequest + ): + request = backupplanassociation.DeleteBackupPlanAssociationRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.delete_backup_plan_association + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + empty_pb2.Empty, + metadata_type=backupdr.OperationMetadata, + ) + + # Done; return the response. + return response + + async def trigger_backup( + self, + request: Optional[ + Union[backupplanassociation.TriggerBackupRequest, dict] + ] = None, + *, + name: Optional[str] = None, + rule_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Triggers a new Backup. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import backupdr_v1 + + async def sample_trigger_backup(): + # Create a client + client = backupdr_v1.BackupDRAsyncClient() + + # Initialize request argument(s) + request = backupdr_v1.TriggerBackupRequest( + name="name_value", + rule_id="rule_id_value", + ) + + # Make the request + operation = client.trigger_backup(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.backupdr_v1.types.TriggerBackupRequest, dict]]): + The request object. Request message for triggering a + backup. + name (:class:`str`): + Required. Name of the backup plan association resource, + in the format + ``projects/{project}/locations/{location}/backupPlanAssociations/{backupPlanAssociationId}`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + rule_id (:class:`str`): + Required. backup rule_id for which a backup needs to be + triggered. + + This corresponds to the ``rule_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.backupdr_v1.types.BackupPlanAssociation` A BackupPlanAssociation represents a single BackupPlanAssociation which + contains details like workload, backup plan etc + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name, rule_id]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, backupplanassociation.TriggerBackupRequest): + request = backupplanassociation.TriggerBackupRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + if rule_id is not None: + request.rule_id = rule_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.trigger_backup + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + backupplanassociation.BackupPlanAssociation, + metadata_type=backupdr.OperationMetadata, + ) + + # Done; return the response. + return response + async def list_operations( self, request: Optional[operations_pb2.ListOperationsRequest] = None, diff --git a/packages/google-cloud-backupdr/google/cloud/backupdr_v1/services/backup_dr/client.py b/packages/google-cloud-backupdr/google/cloud/backupdr_v1/services/backup_dr/client.py index ea052aabef9a..b884fc527a39 100644 --- a/packages/google-cloud-backupdr/google/cloud/backupdr_v1/services/backup_dr/client.py +++ b/packages/google-cloud-backupdr/google/cloud/backupdr_v1/services/backup_dr/client.py @@ -54,12 +54,21 @@ from google.iam.v1 import iam_policy_pb2 # type: ignore from google.iam.v1 import policy_pb2 # type: ignore from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import duration_pb2 # type: ignore from google.protobuf import empty_pb2 # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore from google.protobuf import timestamp_pb2 # type: ignore from google.protobuf import wrappers_pb2 # type: ignore from google.cloud.backupdr_v1.services.backup_dr import pagers -from google.cloud.backupdr_v1.types import backupdr +from google.cloud.backupdr_v1.types import ( + backupdr, + backupplan, + backupplanassociation, + backupvault, + backupvault_ba, + backupvault_gce, +) from .transports.base import DEFAULT_CLIENT_INFO, BackupDRTransport from .transports.grpc import BackupDRGrpcTransport @@ -191,6 +200,126 @@ def transport(self) -> BackupDRTransport: """ return self._transport + @staticmethod + def backup_path( + project: str, + location: str, + backupvault: str, + datasource: str, + backup: str, + ) -> str: + """Returns a fully-qualified backup string.""" + return "projects/{project}/locations/{location}/backupVaults/{backupvault}/dataSources/{datasource}/backups/{backup}".format( + project=project, + location=location, + backupvault=backupvault, + datasource=datasource, + backup=backup, + ) + + @staticmethod + def parse_backup_path(path: str) -> Dict[str, str]: + """Parses a backup path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/backupVaults/(?P.+?)/dataSources/(?P.+?)/backups/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + + @staticmethod + def backup_plan_path( + project: str, + location: str, + backup_plan: str, + ) -> str: + """Returns a fully-qualified backup_plan string.""" + return ( + "projects/{project}/locations/{location}/backupPlans/{backup_plan}".format( + project=project, + location=location, + backup_plan=backup_plan, + ) + ) + + @staticmethod + def parse_backup_plan_path(path: str) -> Dict[str, str]: + """Parses a backup_plan path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/backupPlans/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + + @staticmethod + def backup_plan_association_path( + project: str, + location: str, + backup_plan_association: str, + ) -> str: + """Returns a fully-qualified backup_plan_association string.""" + return "projects/{project}/locations/{location}/backupPlanAssociations/{backup_plan_association}".format( + project=project, + location=location, + backup_plan_association=backup_plan_association, + ) + + @staticmethod + def parse_backup_plan_association_path(path: str) -> Dict[str, str]: + """Parses a backup_plan_association path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/backupPlanAssociations/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + + @staticmethod + def backup_vault_path( + project: str, + location: str, + backupvault: str, + ) -> str: + """Returns a fully-qualified backup_vault string.""" + return ( + "projects/{project}/locations/{location}/backupVaults/{backupvault}".format( + project=project, + location=location, + backupvault=backupvault, + ) + ) + + @staticmethod + def parse_backup_vault_path(path: str) -> Dict[str, str]: + """Parses a backup_vault path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/backupVaults/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + + @staticmethod + def data_source_path( + project: str, + location: str, + backupvault: str, + datasource: str, + ) -> str: + """Returns a fully-qualified data_source string.""" + return "projects/{project}/locations/{location}/backupVaults/{backupvault}/dataSources/{datasource}".format( + project=project, + location=location, + backupvault=backupvault, + datasource=datasource, + ) + + @staticmethod + def parse_data_source_path(path: str) -> Dict[str, str]: + """Parses a data_source path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/backupVaults/(?P.+?)/dataSources/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + @staticmethod def management_server_path( project: str, @@ -665,7 +794,7 @@ def __init__( transport_init: Union[ Type[BackupDRTransport], Callable[..., BackupDRTransport] ] = ( - type(self).get_transport_class(transport) + BackupDRClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., BackupDRTransport], transport) ) @@ -728,10 +857,10 @@ def sample_list_management_servers(): parent (str): Required. The project and location for which to retrieve management servers information, in the format - ``projects/{project_id}/locations/{location}``. In Cloud - BackupDR, locations map to GCP regions, for example - **us-central1**. To retrieve management servers for all - locations, use "-" for the ``{location}`` value. + 'projects/{project_id}/locations/{location}'. In Cloud + BackupDR, locations map to Google Cloud regions, for + example **us-central1**. To retrieve management servers + for all locations, use "-" for the '{location}' value. This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this @@ -849,7 +978,7 @@ def sample_get_management_server(): name (str): Required. Name of the management server resource name, in the format - ``projects/{project_id}/locations/{location}/managementServers/{resource_name}`` + 'projects/{project_id}/locations/{location}/managementServers/{resource_name}' This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this @@ -960,10 +1089,9 @@ def sample_create_management_server(): management server instance. parent (str): Required. The management server project and location in - the format - ``projects/{project_id}/locations/{location}``. In Cloud - Backup and DR locations map to GCP regions, for example - **us-central1**. + the format 'projects/{project_id}/locations/{location}'. + In Cloud Backup and DR locations map to Google Cloud + regions, for example **us-central1**. This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this @@ -1178,6 +1306,2869 @@ def sample_delete_management_server(): # Done; return the response. return response + def create_backup_vault( + self, + request: Optional[Union[backupvault.CreateBackupVaultRequest, dict]] = None, + *, + parent: Optional[str] = None, + backup_vault: Optional[backupvault.BackupVault] = None, + backup_vault_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Creates a new BackupVault in a given project and + location. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import backupdr_v1 + + def sample_create_backup_vault(): + # Create a client + client = backupdr_v1.BackupDRClient() + + # Initialize request argument(s) + request = backupdr_v1.CreateBackupVaultRequest( + parent="parent_value", + backup_vault_id="backup_vault_id_value", + ) + + # Make the request + operation = client.create_backup_vault(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.backupdr_v1.types.CreateBackupVaultRequest, dict]): + The request object. Message for creating a BackupVault. + parent (str): + Required. Value for parent. + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + backup_vault (google.cloud.backupdr_v1.types.BackupVault): + Required. The resource being created + This corresponds to the ``backup_vault`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + backup_vault_id (str): + Required. ID of the requesting object If auto-generating + ID server-side, remove this field and backup_vault_id + from the method_signature of Create RPC + + This corresponds to the ``backup_vault_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.backupdr_v1.types.BackupVault` + Message describing a BackupVault object. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, backup_vault, backup_vault_id]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, backupvault.CreateBackupVaultRequest): + request = backupvault.CreateBackupVaultRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if backup_vault is not None: + request.backup_vault = backup_vault + if backup_vault_id is not None: + request.backup_vault_id = backup_vault_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.create_backup_vault] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + backupvault.BackupVault, + metadata_type=backupdr.OperationMetadata, + ) + + # Done; return the response. + return response + + def list_backup_vaults( + self, + request: Optional[Union[backupvault.ListBackupVaultsRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListBackupVaultsPager: + r"""Lists BackupVaults in a given project and location. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import backupdr_v1 + + def sample_list_backup_vaults(): + # Create a client + client = backupdr_v1.BackupDRClient() + + # Initialize request argument(s) + request = backupdr_v1.ListBackupVaultsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_backup_vaults(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.backupdr_v1.types.ListBackupVaultsRequest, dict]): + The request object. Request message for listing + backupvault stores. + parent (str): + Required. The project and location for which to retrieve + backupvault stores information, in the format + 'projects/{project_id}/locations/{location}'. In Cloud + Backup and DR, locations map to Google Cloud regions, + for example **us-central1**. To retrieve backupvault + stores for all locations, use "-" for the '{location}' + value. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.backupdr_v1.services.backup_dr.pagers.ListBackupVaultsPager: + Response message for listing + BackupVaults. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, backupvault.ListBackupVaultsRequest): + request = backupvault.ListBackupVaultsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_backup_vaults] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListBackupVaultsPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def fetch_usable_backup_vaults( + self, + request: Optional[ + Union[backupvault.FetchUsableBackupVaultsRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.FetchUsableBackupVaultsPager: + r"""FetchUsableBackupVaults lists usable BackupVaults in + a given project and location. Usable BackupVault are the + ones that user has backupdr.backupVaults.get permission. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import backupdr_v1 + + def sample_fetch_usable_backup_vaults(): + # Create a client + client = backupdr_v1.BackupDRClient() + + # Initialize request argument(s) + request = backupdr_v1.FetchUsableBackupVaultsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.fetch_usable_backup_vaults(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.backupdr_v1.types.FetchUsableBackupVaultsRequest, dict]): + The request object. Request message for fetching usable + BackupVaults. + parent (str): + Required. The project and location for which to retrieve + backupvault stores information, in the format + 'projects/{project_id}/locations/{location}'. In Cloud + Backup and DR, locations map to Google Cloud regions, + for example **us-central1**. To retrieve backupvault + stores for all locations, use "-" for the '{location}' + value. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.backupdr_v1.services.backup_dr.pagers.FetchUsableBackupVaultsPager: + Response message for fetching usable + BackupVaults. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, backupvault.FetchUsableBackupVaultsRequest): + request = backupvault.FetchUsableBackupVaultsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[ + self._transport.fetch_usable_backup_vaults + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.FetchUsableBackupVaultsPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_backup_vault( + self, + request: Optional[Union[backupvault.GetBackupVaultRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> backupvault.BackupVault: + r"""Gets details of a BackupVault. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import backupdr_v1 + + def sample_get_backup_vault(): + # Create a client + client = backupdr_v1.BackupDRClient() + + # Initialize request argument(s) + request = backupdr_v1.GetBackupVaultRequest( + name="name_value", + ) + + # Make the request + response = client.get_backup_vault(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.backupdr_v1.types.GetBackupVaultRequest, dict]): + The request object. Request message for getting a + BackupVault. + name (str): + Required. Name of the backupvault store resource name, + in the format + 'projects/{project_id}/locations/{location}/backupVaults/{resource_name}' + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.backupdr_v1.types.BackupVault: + Message describing a BackupVault + object. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, backupvault.GetBackupVaultRequest): + request = backupvault.GetBackupVaultRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_backup_vault] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def update_backup_vault( + self, + request: Optional[Union[backupvault.UpdateBackupVaultRequest, dict]] = None, + *, + backup_vault: Optional[backupvault.BackupVault] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Updates the settings of a BackupVault. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import backupdr_v1 + + def sample_update_backup_vault(): + # Create a client + client = backupdr_v1.BackupDRClient() + + # Initialize request argument(s) + request = backupdr_v1.UpdateBackupVaultRequest( + ) + + # Make the request + operation = client.update_backup_vault(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.backupdr_v1.types.UpdateBackupVaultRequest, dict]): + The request object. Request message for updating a + BackupVault. + backup_vault (google.cloud.backupdr_v1.types.BackupVault): + Required. The resource being updated + This corresponds to the ``backup_vault`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Required. Field mask is used to specify the fields to be + overwritten in the BackupVault resource by the update. + The fields specified in the update_mask are relative to + the resource, not the full request. A field will be + overwritten if it is in the mask. If the user does not + provide a mask then the request will fail. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.backupdr_v1.types.BackupVault` + Message describing a BackupVault object. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([backup_vault, update_mask]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, backupvault.UpdateBackupVaultRequest): + request = backupvault.UpdateBackupVaultRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if backup_vault is not None: + request.backup_vault = backup_vault + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.update_backup_vault] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("backup_vault.name", request.backup_vault.name),) + ), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + backupvault.BackupVault, + metadata_type=backupdr.OperationMetadata, + ) + + # Done; return the response. + return response + + def delete_backup_vault( + self, + request: Optional[Union[backupvault.DeleteBackupVaultRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Deletes a BackupVault. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import backupdr_v1 + + def sample_delete_backup_vault(): + # Create a client + client = backupdr_v1.BackupDRClient() + + # Initialize request argument(s) + request = backupdr_v1.DeleteBackupVaultRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_backup_vault(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.backupdr_v1.types.DeleteBackupVaultRequest, dict]): + The request object. Message for deleting a BackupVault. + name (str): + Required. Name of the resource. + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated + empty messages in your APIs. A typical example is to + use it as the request or the response type of an API + method. For instance: + + service Foo { + rpc Bar(google.protobuf.Empty) returns + (google.protobuf.Empty); + + } + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, backupvault.DeleteBackupVaultRequest): + request = backupvault.DeleteBackupVaultRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_backup_vault] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + empty_pb2.Empty, + metadata_type=backupdr.OperationMetadata, + ) + + # Done; return the response. + return response + + def list_data_sources( + self, + request: Optional[Union[backupvault.ListDataSourcesRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListDataSourcesPager: + r"""Lists DataSources in a given project and location. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import backupdr_v1 + + def sample_list_data_sources(): + # Create a client + client = backupdr_v1.BackupDRClient() + + # Initialize request argument(s) + request = backupdr_v1.ListDataSourcesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_data_sources(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.backupdr_v1.types.ListDataSourcesRequest, dict]): + The request object. Request message for listing + DataSources. + parent (str): + Required. The project and location for which to retrieve + data sources information, in the format + 'projects/{project_id}/locations/{location}'. In Cloud + Backup and DR, locations map to Google Cloud regions, + for example **us-central1**. To retrieve data sources + for all locations, use "-" for the '{location}' value. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.backupdr_v1.services.backup_dr.pagers.ListDataSourcesPager: + Response message for listing + DataSources. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, backupvault.ListDataSourcesRequest): + request = backupvault.ListDataSourcesRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_data_sources] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListDataSourcesPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_data_source( + self, + request: Optional[Union[backupvault.GetDataSourceRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> backupvault.DataSource: + r"""Gets details of a DataSource. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import backupdr_v1 + + def sample_get_data_source(): + # Create a client + client = backupdr_v1.BackupDRClient() + + # Initialize request argument(s) + request = backupdr_v1.GetDataSourceRequest( + name="name_value", + ) + + # Make the request + response = client.get_data_source(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.backupdr_v1.types.GetDataSourceRequest, dict]): + The request object. Request message for getting a + DataSource instance. + name (str): + Required. Name of the data source resource name, in the + format + 'projects/{project_id}/locations/{location}/backupVaults/{resource_name}/dataSource/{resource_name}' + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.backupdr_v1.types.DataSource: + Message describing a DataSource + object. Datasource object used to + represent Datasource details for both + admin and basic view. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, backupvault.GetDataSourceRequest): + request = backupvault.GetDataSourceRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_data_source] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def update_data_source( + self, + request: Optional[Union[backupvault.UpdateDataSourceRequest, dict]] = None, + *, + data_source: Optional[backupvault.DataSource] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Updates the settings of a DataSource. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import backupdr_v1 + + def sample_update_data_source(): + # Create a client + client = backupdr_v1.BackupDRClient() + + # Initialize request argument(s) + request = backupdr_v1.UpdateDataSourceRequest( + ) + + # Make the request + operation = client.update_data_source(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.backupdr_v1.types.UpdateDataSourceRequest, dict]): + The request object. Request message for updating a data + source instance. + data_source (google.cloud.backupdr_v1.types.DataSource): + Required. The resource being updated + This corresponds to the ``data_source`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Required. Field mask is used to specify the fields to be + overwritten in the DataSource resource by the update. + The fields specified in the update_mask are relative to + the resource, not the full request. A field will be + overwritten if it is in the mask. If the user does not + provide a mask then the request will fail. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.backupdr_v1.types.DataSource` Message describing a DataSource object. + Datasource object used to represent Datasource + details for both admin and basic view. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([data_source, update_mask]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, backupvault.UpdateDataSourceRequest): + request = backupvault.UpdateDataSourceRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if data_source is not None: + request.data_source = data_source + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.update_data_source] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("data_source.name", request.data_source.name),) + ), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + backupvault.DataSource, + metadata_type=backupdr.OperationMetadata, + ) + + # Done; return the response. + return response + + def list_backups( + self, + request: Optional[Union[backupvault.ListBackupsRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListBackupsPager: + r"""Lists Backups in a given project and location. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import backupdr_v1 + + def sample_list_backups(): + # Create a client + client = backupdr_v1.BackupDRClient() + + # Initialize request argument(s) + request = backupdr_v1.ListBackupsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_backups(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.backupdr_v1.types.ListBackupsRequest, dict]): + The request object. Request message for listing Backups. + parent (str): + Required. The project and location for which to retrieve + backup information, in the format + 'projects/{project_id}/locations/{location}'. In Cloud + Backup and DR, locations map to Google Cloud regions, + for example **us-central1**. To retrieve data sources + for all locations, use "-" for the '{location}' value. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.backupdr_v1.services.backup_dr.pagers.ListBackupsPager: + Response message for listing Backups. + + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, backupvault.ListBackupsRequest): + request = backupvault.ListBackupsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_backups] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListBackupsPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_backup( + self, + request: Optional[Union[backupvault.GetBackupRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> backupvault.Backup: + r"""Gets details of a Backup. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import backupdr_v1 + + def sample_get_backup(): + # Create a client + client = backupdr_v1.BackupDRClient() + + # Initialize request argument(s) + request = backupdr_v1.GetBackupRequest( + name="name_value", + ) + + # Make the request + response = client.get_backup(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.backupdr_v1.types.GetBackupRequest, dict]): + The request object. Request message for getting a Backup. + name (str): + Required. Name of the data source resource name, in the + format + 'projects/{project_id}/locations/{location}/backupVaults/{backupVault}/dataSources/{datasource}/backups/{backup}' + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.backupdr_v1.types.Backup: + Message describing a Backup object. + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, backupvault.GetBackupRequest): + request = backupvault.GetBackupRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_backup] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def update_backup( + self, + request: Optional[Union[backupvault.UpdateBackupRequest, dict]] = None, + *, + backup: Optional[backupvault.Backup] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Updates the settings of a Backup. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import backupdr_v1 + + def sample_update_backup(): + # Create a client + client = backupdr_v1.BackupDRClient() + + # Initialize request argument(s) + request = backupdr_v1.UpdateBackupRequest( + ) + + # Make the request + operation = client.update_backup(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.backupdr_v1.types.UpdateBackupRequest, dict]): + The request object. Request message for updating a + Backup. + backup (google.cloud.backupdr_v1.types.Backup): + Required. The resource being updated + This corresponds to the ``backup`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Required. Field mask is used to specify the fields to be + overwritten in the Backup resource by the update. The + fields specified in the update_mask are relative to the + resource, not the full request. A field will be + overwritten if it is in the mask. If the user does not + provide a mask then the request will fail. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.backupdr_v1.types.Backup` Message + describing a Backup object. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([backup, update_mask]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, backupvault.UpdateBackupRequest): + request = backupvault.UpdateBackupRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if backup is not None: + request.backup = backup + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.update_backup] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("backup.name", request.backup.name),) + ), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + backupvault.Backup, + metadata_type=backupdr.OperationMetadata, + ) + + # Done; return the response. + return response + + def delete_backup( + self, + request: Optional[Union[backupvault.DeleteBackupRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Deletes a Backup. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import backupdr_v1 + + def sample_delete_backup(): + # Create a client + client = backupdr_v1.BackupDRClient() + + # Initialize request argument(s) + request = backupdr_v1.DeleteBackupRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_backup(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.backupdr_v1.types.DeleteBackupRequest, dict]): + The request object. Message for deleting a Backup. + name (str): + Required. Name of the resource. + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.backupdr_v1.types.Backup` Message + describing a Backup object. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, backupvault.DeleteBackupRequest): + request = backupvault.DeleteBackupRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_backup] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + backupvault.Backup, + metadata_type=backupdr.OperationMetadata, + ) + + # Done; return the response. + return response + + def restore_backup( + self, + request: Optional[Union[backupvault.RestoreBackupRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Restore from a Backup + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import backupdr_v1 + + def sample_restore_backup(): + # Create a client + client = backupdr_v1.BackupDRClient() + + # Initialize request argument(s) + compute_instance_target_environment = backupdr_v1.ComputeInstanceTargetEnvironment() + compute_instance_target_environment.project = "project_value" + compute_instance_target_environment.zone = "zone_value" + + request = backupdr_v1.RestoreBackupRequest( + compute_instance_target_environment=compute_instance_target_environment, + name="name_value", + ) + + # Make the request + operation = client.restore_backup(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.backupdr_v1.types.RestoreBackupRequest, dict]): + The request object. Request message for restoring from a + Backup. + name (str): + Required. The resource name of the Backup instance, in + the format + 'projects/*/locations/*/backupVaults/*/dataSources/*/backups/'. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.backupdr_v1.types.RestoreBackupResponse` + Response message for restoring from a Backup. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, backupvault.RestoreBackupRequest): + request = backupvault.RestoreBackupRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.restore_backup] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + backupvault.RestoreBackupResponse, + metadata_type=backupdr.OperationMetadata, + ) + + # Done; return the response. + return response + + def create_backup_plan( + self, + request: Optional[Union[backupplan.CreateBackupPlanRequest, dict]] = None, + *, + parent: Optional[str] = None, + backup_plan: Optional[backupplan.BackupPlan] = None, + backup_plan_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Create a BackupPlan + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import backupdr_v1 + + def sample_create_backup_plan(): + # Create a client + client = backupdr_v1.BackupDRClient() + + # Initialize request argument(s) + backup_plan = backupdr_v1.BackupPlan() + backup_plan.backup_rules.standard_schedule.recurrence_type = "YEARLY" + backup_plan.backup_rules.standard_schedule.backup_window.start_hour_of_day = 1820 + backup_plan.backup_rules.standard_schedule.backup_window.end_hour_of_day = 1573 + backup_plan.backup_rules.standard_schedule.time_zone = "time_zone_value" + backup_plan.backup_rules.rule_id = "rule_id_value" + backup_plan.backup_rules.backup_retention_days = 2237 + backup_plan.resource_type = "resource_type_value" + backup_plan.backup_vault = "backup_vault_value" + + request = backupdr_v1.CreateBackupPlanRequest( + parent="parent_value", + backup_plan_id="backup_plan_id_value", + backup_plan=backup_plan, + ) + + # Make the request + operation = client.create_backup_plan(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.backupdr_v1.types.CreateBackupPlanRequest, dict]): + The request object. The request message for creating a ``BackupPlan``. + parent (str): + Required. The ``BackupPlan`` project and location in the + format ``projects/{project}/locations/{location}``. In + Cloud BackupDR locations map to GCP regions, for example + **us-central1**. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + backup_plan (google.cloud.backupdr_v1.types.BackupPlan): + Required. The ``BackupPlan`` resource object to create. + This corresponds to the ``backup_plan`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + backup_plan_id (str): + Required. The name of the ``BackupPlan`` to create. The + name must be unique for the specified project and + location.The name must start with a lowercase letter + followed by up to 62 lowercase letters, numbers, or + hyphens. Pattern, /[a-z][a-z0-9-]{,62}/. + + This corresponds to the ``backup_plan_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.backupdr_v1.types.BackupPlan` A BackupPlan specifies some common fields, such as description as well + as one or more BackupRule messages. Each BackupRule + has a retention policy and defines a schedule by + which the system is to perform backup workloads. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, backup_plan, backup_plan_id]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, backupplan.CreateBackupPlanRequest): + request = backupplan.CreateBackupPlanRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if backup_plan is not None: + request.backup_plan = backup_plan + if backup_plan_id is not None: + request.backup_plan_id = backup_plan_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.create_backup_plan] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + backupplan.BackupPlan, + metadata_type=backupdr.OperationMetadata, + ) + + # Done; return the response. + return response + + def get_backup_plan( + self, + request: Optional[Union[backupplan.GetBackupPlanRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> backupplan.BackupPlan: + r"""Gets details of a single BackupPlan. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import backupdr_v1 + + def sample_get_backup_plan(): + # Create a client + client = backupdr_v1.BackupDRClient() + + # Initialize request argument(s) + request = backupdr_v1.GetBackupPlanRequest( + name="name_value", + ) + + # Make the request + response = client.get_backup_plan(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.backupdr_v1.types.GetBackupPlanRequest, dict]): + The request object. The request message for getting a ``BackupPlan``. + name (str): + Required. The resource name of the ``BackupPlan`` to + retrieve. + + Format: + ``projects/{project}/locations/{location}/backupPlans/{backup_plan}`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.backupdr_v1.types.BackupPlan: + A BackupPlan specifies some common fields, such as description as well + as one or more BackupRule messages. Each BackupRule + has a retention policy and defines a schedule by + which the system is to perform backup workloads. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, backupplan.GetBackupPlanRequest): + request = backupplan.GetBackupPlanRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_backup_plan] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def list_backup_plans( + self, + request: Optional[Union[backupplan.ListBackupPlansRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListBackupPlansPager: + r"""Lists BackupPlans in a given project and location. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import backupdr_v1 + + def sample_list_backup_plans(): + # Create a client + client = backupdr_v1.BackupDRClient() + + # Initialize request argument(s) + request = backupdr_v1.ListBackupPlansRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_backup_plans(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.backupdr_v1.types.ListBackupPlansRequest, dict]): + The request object. The request message for getting a list ``BackupPlan``. + parent (str): + Required. The project and location for which to retrieve + ``BackupPlans`` information. Format: + ``projects/{project}/locations/{location}``. In Cloud + BackupDR, locations map to GCP regions, for e.g. + **us-central1**. To retrieve backup plans for all + locations, use "-" for the ``{location}`` value. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.backupdr_v1.services.backup_dr.pagers.ListBackupPlansPager: + The response message for getting a list of BackupPlan. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, backupplan.ListBackupPlansRequest): + request = backupplan.ListBackupPlansRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_backup_plans] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListBackupPlansPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def delete_backup_plan( + self, + request: Optional[Union[backupplan.DeleteBackupPlanRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Deletes a single BackupPlan. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import backupdr_v1 + + def sample_delete_backup_plan(): + # Create a client + client = backupdr_v1.BackupDRClient() + + # Initialize request argument(s) + request = backupdr_v1.DeleteBackupPlanRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_backup_plan(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.backupdr_v1.types.DeleteBackupPlanRequest, dict]): + The request object. The request message for deleting a ``BackupPlan``. + name (str): + Required. The resource name of the ``BackupPlan`` to + delete. + + Format: + ``projects/{project}/locations/{location}/backupPlans/{backup_plan}`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated + empty messages in your APIs. A typical example is to + use it as the request or the response type of an API + method. For instance: + + service Foo { + rpc Bar(google.protobuf.Empty) returns + (google.protobuf.Empty); + + } + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, backupplan.DeleteBackupPlanRequest): + request = backupplan.DeleteBackupPlanRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_backup_plan] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + empty_pb2.Empty, + metadata_type=backupdr.OperationMetadata, + ) + + # Done; return the response. + return response + + def create_backup_plan_association( + self, + request: Optional[ + Union[backupplanassociation.CreateBackupPlanAssociationRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + backup_plan_association: Optional[ + backupplanassociation.BackupPlanAssociation + ] = None, + backup_plan_association_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Create a BackupPlanAssociation + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import backupdr_v1 + + def sample_create_backup_plan_association(): + # Create a client + client = backupdr_v1.BackupDRClient() + + # Initialize request argument(s) + backup_plan_association = backupdr_v1.BackupPlanAssociation() + backup_plan_association.resource = "resource_value" + backup_plan_association.backup_plan = "backup_plan_value" + + request = backupdr_v1.CreateBackupPlanAssociationRequest( + parent="parent_value", + backup_plan_association_id="backup_plan_association_id_value", + backup_plan_association=backup_plan_association, + ) + + # Make the request + operation = client.create_backup_plan_association(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.backupdr_v1.types.CreateBackupPlanAssociationRequest, dict]): + The request object. Request message for creating a backup + plan. + parent (str): + Required. The backup plan association project and + location in the format + ``projects/{project_id}/locations/{location}``. In Cloud + BackupDR locations map to GCP regions, for example + **us-central1**. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + backup_plan_association (google.cloud.backupdr_v1.types.BackupPlanAssociation): + Required. The resource being created + This corresponds to the ``backup_plan_association`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + backup_plan_association_id (str): + Required. The name of the backup plan + association to create. The name must be + unique for the specified project and + location. + + This corresponds to the ``backup_plan_association_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.backupdr_v1.types.BackupPlanAssociation` A BackupPlanAssociation represents a single BackupPlanAssociation which + contains details like workload, backup plan etc + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any( + [parent, backup_plan_association, backup_plan_association_id] + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance( + request, backupplanassociation.CreateBackupPlanAssociationRequest + ): + request = backupplanassociation.CreateBackupPlanAssociationRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if backup_plan_association is not None: + request.backup_plan_association = backup_plan_association + if backup_plan_association_id is not None: + request.backup_plan_association_id = backup_plan_association_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[ + self._transport.create_backup_plan_association + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + backupplanassociation.BackupPlanAssociation, + metadata_type=backupdr.OperationMetadata, + ) + + # Done; return the response. + return response + + def get_backup_plan_association( + self, + request: Optional[ + Union[backupplanassociation.GetBackupPlanAssociationRequest, dict] + ] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> backupplanassociation.BackupPlanAssociation: + r"""Gets details of a single BackupPlanAssociation. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import backupdr_v1 + + def sample_get_backup_plan_association(): + # Create a client + client = backupdr_v1.BackupDRClient() + + # Initialize request argument(s) + request = backupdr_v1.GetBackupPlanAssociationRequest( + name="name_value", + ) + + # Make the request + response = client.get_backup_plan_association(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.backupdr_v1.types.GetBackupPlanAssociationRequest, dict]): + The request object. Request message for getting a + BackupPlanAssociation resource. + name (str): + Required. Name of the backup plan association resource, + in the format + ``projects/{project}/locations/{location}/backupPlanAssociations/{backupPlanAssociationId}`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.backupdr_v1.types.BackupPlanAssociation: + A BackupPlanAssociation represents a + single BackupPlanAssociation which + contains details like workload, backup + plan etc + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance( + request, backupplanassociation.GetBackupPlanAssociationRequest + ): + request = backupplanassociation.GetBackupPlanAssociationRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[ + self._transport.get_backup_plan_association + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def list_backup_plan_associations( + self, + request: Optional[ + Union[backupplanassociation.ListBackupPlanAssociationsRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListBackupPlanAssociationsPager: + r"""Lists BackupPlanAssociations in a given project and + location. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import backupdr_v1 + + def sample_list_backup_plan_associations(): + # Create a client + client = backupdr_v1.BackupDRClient() + + # Initialize request argument(s) + request = backupdr_v1.ListBackupPlanAssociationsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_backup_plan_associations(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.backupdr_v1.types.ListBackupPlanAssociationsRequest, dict]): + The request object. Request message for List + BackupPlanAssociation + parent (str): + Required. The project and location for which to retrieve + backup Plan Associations information, in the format + ``projects/{project_id}/locations/{location}``. In Cloud + BackupDR, locations map to GCP regions, for example + **us-central1**. To retrieve backup plan associations + for all locations, use "-" for the ``{location}`` value. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.backupdr_v1.services.backup_dr.pagers.ListBackupPlanAssociationsPager: + Response message for List + BackupPlanAssociation + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance( + request, backupplanassociation.ListBackupPlanAssociationsRequest + ): + request = backupplanassociation.ListBackupPlanAssociationsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[ + self._transport.list_backup_plan_associations + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListBackupPlanAssociationsPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def delete_backup_plan_association( + self, + request: Optional[ + Union[backupplanassociation.DeleteBackupPlanAssociationRequest, dict] + ] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Deletes a single BackupPlanAssociation. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import backupdr_v1 + + def sample_delete_backup_plan_association(): + # Create a client + client = backupdr_v1.BackupDRClient() + + # Initialize request argument(s) + request = backupdr_v1.DeleteBackupPlanAssociationRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_backup_plan_association(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.backupdr_v1.types.DeleteBackupPlanAssociationRequest, dict]): + The request object. Request message for deleting a backup + plan association. + name (str): + Required. Name of the backup plan association resource, + in the format + ``projects/{project}/locations/{location}/backupPlanAssociations/{backupPlanAssociationId}`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated + empty messages in your APIs. A typical example is to + use it as the request or the response type of an API + method. For instance: + + service Foo { + rpc Bar(google.protobuf.Empty) returns + (google.protobuf.Empty); + + } + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance( + request, backupplanassociation.DeleteBackupPlanAssociationRequest + ): + request = backupplanassociation.DeleteBackupPlanAssociationRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[ + self._transport.delete_backup_plan_association + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + empty_pb2.Empty, + metadata_type=backupdr.OperationMetadata, + ) + + # Done; return the response. + return response + + def trigger_backup( + self, + request: Optional[ + Union[backupplanassociation.TriggerBackupRequest, dict] + ] = None, + *, + name: Optional[str] = None, + rule_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Triggers a new Backup. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import backupdr_v1 + + def sample_trigger_backup(): + # Create a client + client = backupdr_v1.BackupDRClient() + + # Initialize request argument(s) + request = backupdr_v1.TriggerBackupRequest( + name="name_value", + rule_id="rule_id_value", + ) + + # Make the request + operation = client.trigger_backup(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.backupdr_v1.types.TriggerBackupRequest, dict]): + The request object. Request message for triggering a + backup. + name (str): + Required. Name of the backup plan association resource, + in the format + ``projects/{project}/locations/{location}/backupPlanAssociations/{backupPlanAssociationId}`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + rule_id (str): + Required. backup rule_id for which a backup needs to be + triggered. + + This corresponds to the ``rule_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.backupdr_v1.types.BackupPlanAssociation` A BackupPlanAssociation represents a single BackupPlanAssociation which + contains details like workload, backup plan etc + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name, rule_id]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, backupplanassociation.TriggerBackupRequest): + request = backupplanassociation.TriggerBackupRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + if rule_id is not None: + request.rule_id = rule_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.trigger_backup] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + backupplanassociation.BackupPlanAssociation, + metadata_type=backupdr.OperationMetadata, + ) + + # Done; return the response. + return response + def __enter__(self) -> "BackupDRClient": return self diff --git a/packages/google-cloud-backupdr/google/cloud/backupdr_v1/services/backup_dr/pagers.py b/packages/google-cloud-backupdr/google/cloud/backupdr_v1/services/backup_dr/pagers.py index 462c609c80a8..3594ed629a56 100644 --- a/packages/google-cloud-backupdr/google/cloud/backupdr_v1/services/backup_dr/pagers.py +++ b/packages/google-cloud-backupdr/google/cloud/backupdr_v1/services/backup_dr/pagers.py @@ -38,7 +38,12 @@ OptionalRetry = Union[retries.Retry, object, None] # type: ignore OptionalAsyncRetry = Union[retries_async.AsyncRetry, object, None] # type: ignore -from google.cloud.backupdr_v1.types import backupdr +from google.cloud.backupdr_v1.types import ( + backupdr, + backupplan, + backupplanassociation, + backupvault, +) class ListManagementServersPager: @@ -191,3 +196,921 @@ async def async_generator(): def __repr__(self) -> str: return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListBackupVaultsPager: + """A pager for iterating through ``list_backup_vaults`` requests. + + This class thinly wraps an initial + :class:`google.cloud.backupdr_v1.types.ListBackupVaultsResponse` object, and + provides an ``__iter__`` method to iterate through its + ``backup_vaults`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListBackupVaults`` requests and continue to iterate + through the ``backup_vaults`` field on the + corresponding responses. + + All the usual :class:`google.cloud.backupdr_v1.types.ListBackupVaultsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., backupvault.ListBackupVaultsResponse], + request: backupvault.ListBackupVaultsRequest, + response: backupvault.ListBackupVaultsResponse, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.backupdr_v1.types.ListBackupVaultsRequest): + The initial request object. + response (google.cloud.backupdr_v1.types.ListBackupVaultsResponse): + The initial response object. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = backupvault.ListBackupVaultsRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[backupvault.ListBackupVaultsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) + yield self._response + + def __iter__(self) -> Iterator[backupvault.BackupVault]: + for page in self.pages: + yield from page.backup_vaults + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListBackupVaultsAsyncPager: + """A pager for iterating through ``list_backup_vaults`` requests. + + This class thinly wraps an initial + :class:`google.cloud.backupdr_v1.types.ListBackupVaultsResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``backup_vaults`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListBackupVaults`` requests and continue to iterate + through the ``backup_vaults`` field on the + corresponding responses. + + All the usual :class:`google.cloud.backupdr_v1.types.ListBackupVaultsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., Awaitable[backupvault.ListBackupVaultsResponse]], + request: backupvault.ListBackupVaultsRequest, + response: backupvault.ListBackupVaultsResponse, + *, + retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.backupdr_v1.types.ListBackupVaultsRequest): + The initial request object. + response (google.cloud.backupdr_v1.types.ListBackupVaultsResponse): + The initial response object. + retry (google.api_core.retry.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = backupvault.ListBackupVaultsRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[backupvault.ListBackupVaultsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) + yield self._response + + def __aiter__(self) -> AsyncIterator[backupvault.BackupVault]: + async def async_generator(): + async for page in self.pages: + for response in page.backup_vaults: + yield response + + return async_generator() + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class FetchUsableBackupVaultsPager: + """A pager for iterating through ``fetch_usable_backup_vaults`` requests. + + This class thinly wraps an initial + :class:`google.cloud.backupdr_v1.types.FetchUsableBackupVaultsResponse` object, and + provides an ``__iter__`` method to iterate through its + ``backup_vaults`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``FetchUsableBackupVaults`` requests and continue to iterate + through the ``backup_vaults`` field on the + corresponding responses. + + All the usual :class:`google.cloud.backupdr_v1.types.FetchUsableBackupVaultsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., backupvault.FetchUsableBackupVaultsResponse], + request: backupvault.FetchUsableBackupVaultsRequest, + response: backupvault.FetchUsableBackupVaultsResponse, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.backupdr_v1.types.FetchUsableBackupVaultsRequest): + The initial request object. + response (google.cloud.backupdr_v1.types.FetchUsableBackupVaultsResponse): + The initial response object. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = backupvault.FetchUsableBackupVaultsRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[backupvault.FetchUsableBackupVaultsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) + yield self._response + + def __iter__(self) -> Iterator[backupvault.BackupVault]: + for page in self.pages: + yield from page.backup_vaults + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class FetchUsableBackupVaultsAsyncPager: + """A pager for iterating through ``fetch_usable_backup_vaults`` requests. + + This class thinly wraps an initial + :class:`google.cloud.backupdr_v1.types.FetchUsableBackupVaultsResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``backup_vaults`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``FetchUsableBackupVaults`` requests and continue to iterate + through the ``backup_vaults`` field on the + corresponding responses. + + All the usual :class:`google.cloud.backupdr_v1.types.FetchUsableBackupVaultsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., Awaitable[backupvault.FetchUsableBackupVaultsResponse]], + request: backupvault.FetchUsableBackupVaultsRequest, + response: backupvault.FetchUsableBackupVaultsResponse, + *, + retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.backupdr_v1.types.FetchUsableBackupVaultsRequest): + The initial request object. + response (google.cloud.backupdr_v1.types.FetchUsableBackupVaultsResponse): + The initial response object. + retry (google.api_core.retry.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = backupvault.FetchUsableBackupVaultsRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[backupvault.FetchUsableBackupVaultsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) + yield self._response + + def __aiter__(self) -> AsyncIterator[backupvault.BackupVault]: + async def async_generator(): + async for page in self.pages: + for response in page.backup_vaults: + yield response + + return async_generator() + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListDataSourcesPager: + """A pager for iterating through ``list_data_sources`` requests. + + This class thinly wraps an initial + :class:`google.cloud.backupdr_v1.types.ListDataSourcesResponse` object, and + provides an ``__iter__`` method to iterate through its + ``data_sources`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListDataSources`` requests and continue to iterate + through the ``data_sources`` field on the + corresponding responses. + + All the usual :class:`google.cloud.backupdr_v1.types.ListDataSourcesResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., backupvault.ListDataSourcesResponse], + request: backupvault.ListDataSourcesRequest, + response: backupvault.ListDataSourcesResponse, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.backupdr_v1.types.ListDataSourcesRequest): + The initial request object. + response (google.cloud.backupdr_v1.types.ListDataSourcesResponse): + The initial response object. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = backupvault.ListDataSourcesRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[backupvault.ListDataSourcesResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) + yield self._response + + def __iter__(self) -> Iterator[backupvault.DataSource]: + for page in self.pages: + yield from page.data_sources + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListDataSourcesAsyncPager: + """A pager for iterating through ``list_data_sources`` requests. + + This class thinly wraps an initial + :class:`google.cloud.backupdr_v1.types.ListDataSourcesResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``data_sources`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListDataSources`` requests and continue to iterate + through the ``data_sources`` field on the + corresponding responses. + + All the usual :class:`google.cloud.backupdr_v1.types.ListDataSourcesResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., Awaitable[backupvault.ListDataSourcesResponse]], + request: backupvault.ListDataSourcesRequest, + response: backupvault.ListDataSourcesResponse, + *, + retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.backupdr_v1.types.ListDataSourcesRequest): + The initial request object. + response (google.cloud.backupdr_v1.types.ListDataSourcesResponse): + The initial response object. + retry (google.api_core.retry.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = backupvault.ListDataSourcesRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[backupvault.ListDataSourcesResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) + yield self._response + + def __aiter__(self) -> AsyncIterator[backupvault.DataSource]: + async def async_generator(): + async for page in self.pages: + for response in page.data_sources: + yield response + + return async_generator() + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListBackupsPager: + """A pager for iterating through ``list_backups`` requests. + + This class thinly wraps an initial + :class:`google.cloud.backupdr_v1.types.ListBackupsResponse` object, and + provides an ``__iter__`` method to iterate through its + ``backups`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListBackups`` requests and continue to iterate + through the ``backups`` field on the + corresponding responses. + + All the usual :class:`google.cloud.backupdr_v1.types.ListBackupsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., backupvault.ListBackupsResponse], + request: backupvault.ListBackupsRequest, + response: backupvault.ListBackupsResponse, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.backupdr_v1.types.ListBackupsRequest): + The initial request object. + response (google.cloud.backupdr_v1.types.ListBackupsResponse): + The initial response object. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = backupvault.ListBackupsRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[backupvault.ListBackupsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) + yield self._response + + def __iter__(self) -> Iterator[backupvault.Backup]: + for page in self.pages: + yield from page.backups + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListBackupsAsyncPager: + """A pager for iterating through ``list_backups`` requests. + + This class thinly wraps an initial + :class:`google.cloud.backupdr_v1.types.ListBackupsResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``backups`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListBackups`` requests and continue to iterate + through the ``backups`` field on the + corresponding responses. + + All the usual :class:`google.cloud.backupdr_v1.types.ListBackupsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., Awaitable[backupvault.ListBackupsResponse]], + request: backupvault.ListBackupsRequest, + response: backupvault.ListBackupsResponse, + *, + retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.backupdr_v1.types.ListBackupsRequest): + The initial request object. + response (google.cloud.backupdr_v1.types.ListBackupsResponse): + The initial response object. + retry (google.api_core.retry.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = backupvault.ListBackupsRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[backupvault.ListBackupsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) + yield self._response + + def __aiter__(self) -> AsyncIterator[backupvault.Backup]: + async def async_generator(): + async for page in self.pages: + for response in page.backups: + yield response + + return async_generator() + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListBackupPlansPager: + """A pager for iterating through ``list_backup_plans`` requests. + + This class thinly wraps an initial + :class:`google.cloud.backupdr_v1.types.ListBackupPlansResponse` object, and + provides an ``__iter__`` method to iterate through its + ``backup_plans`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListBackupPlans`` requests and continue to iterate + through the ``backup_plans`` field on the + corresponding responses. + + All the usual :class:`google.cloud.backupdr_v1.types.ListBackupPlansResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., backupplan.ListBackupPlansResponse], + request: backupplan.ListBackupPlansRequest, + response: backupplan.ListBackupPlansResponse, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.backupdr_v1.types.ListBackupPlansRequest): + The initial request object. + response (google.cloud.backupdr_v1.types.ListBackupPlansResponse): + The initial response object. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = backupplan.ListBackupPlansRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[backupplan.ListBackupPlansResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) + yield self._response + + def __iter__(self) -> Iterator[backupplan.BackupPlan]: + for page in self.pages: + yield from page.backup_plans + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListBackupPlansAsyncPager: + """A pager for iterating through ``list_backup_plans`` requests. + + This class thinly wraps an initial + :class:`google.cloud.backupdr_v1.types.ListBackupPlansResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``backup_plans`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListBackupPlans`` requests and continue to iterate + through the ``backup_plans`` field on the + corresponding responses. + + All the usual :class:`google.cloud.backupdr_v1.types.ListBackupPlansResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., Awaitable[backupplan.ListBackupPlansResponse]], + request: backupplan.ListBackupPlansRequest, + response: backupplan.ListBackupPlansResponse, + *, + retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.backupdr_v1.types.ListBackupPlansRequest): + The initial request object. + response (google.cloud.backupdr_v1.types.ListBackupPlansResponse): + The initial response object. + retry (google.api_core.retry.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = backupplan.ListBackupPlansRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[backupplan.ListBackupPlansResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) + yield self._response + + def __aiter__(self) -> AsyncIterator[backupplan.BackupPlan]: + async def async_generator(): + async for page in self.pages: + for response in page.backup_plans: + yield response + + return async_generator() + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListBackupPlanAssociationsPager: + """A pager for iterating through ``list_backup_plan_associations`` requests. + + This class thinly wraps an initial + :class:`google.cloud.backupdr_v1.types.ListBackupPlanAssociationsResponse` object, and + provides an ``__iter__`` method to iterate through its + ``backup_plan_associations`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListBackupPlanAssociations`` requests and continue to iterate + through the ``backup_plan_associations`` field on the + corresponding responses. + + All the usual :class:`google.cloud.backupdr_v1.types.ListBackupPlanAssociationsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., backupplanassociation.ListBackupPlanAssociationsResponse], + request: backupplanassociation.ListBackupPlanAssociationsRequest, + response: backupplanassociation.ListBackupPlanAssociationsResponse, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.backupdr_v1.types.ListBackupPlanAssociationsRequest): + The initial request object. + response (google.cloud.backupdr_v1.types.ListBackupPlanAssociationsResponse): + The initial response object. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = backupplanassociation.ListBackupPlanAssociationsRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages( + self, + ) -> Iterator[backupplanassociation.ListBackupPlanAssociationsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) + yield self._response + + def __iter__(self) -> Iterator[backupplanassociation.BackupPlanAssociation]: + for page in self.pages: + yield from page.backup_plan_associations + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListBackupPlanAssociationsAsyncPager: + """A pager for iterating through ``list_backup_plan_associations`` requests. + + This class thinly wraps an initial + :class:`google.cloud.backupdr_v1.types.ListBackupPlanAssociationsResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``backup_plan_associations`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListBackupPlanAssociations`` requests and continue to iterate + through the ``backup_plan_associations`` field on the + corresponding responses. + + All the usual :class:`google.cloud.backupdr_v1.types.ListBackupPlanAssociationsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[ + ..., Awaitable[backupplanassociation.ListBackupPlanAssociationsResponse] + ], + request: backupplanassociation.ListBackupPlanAssociationsRequest, + response: backupplanassociation.ListBackupPlanAssociationsResponse, + *, + retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.backupdr_v1.types.ListBackupPlanAssociationsRequest): + The initial request object. + response (google.cloud.backupdr_v1.types.ListBackupPlanAssociationsResponse): + The initial response object. + retry (google.api_core.retry.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = backupplanassociation.ListBackupPlanAssociationsRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages( + self, + ) -> AsyncIterator[backupplanassociation.ListBackupPlanAssociationsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) + yield self._response + + def __aiter__(self) -> AsyncIterator[backupplanassociation.BackupPlanAssociation]: + async def async_generator(): + async for page in self.pages: + for response in page.backup_plan_associations: + yield response + + return async_generator() + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) diff --git a/packages/google-cloud-backupdr/google/cloud/backupdr_v1/services/backup_dr/transports/base.py b/packages/google-cloud-backupdr/google/cloud/backupdr_v1/services/backup_dr/transports/base.py index 0b741d07dfc8..e11f0ea29379 100644 --- a/packages/google-cloud-backupdr/google/cloud/backupdr_v1/services/backup_dr/transports/base.py +++ b/packages/google-cloud-backupdr/google/cloud/backupdr_v1/services/backup_dr/transports/base.py @@ -29,7 +29,12 @@ from google.oauth2 import service_account # type: ignore from google.cloud.backupdr_v1 import gapic_version as package_version -from google.cloud.backupdr_v1.types import backupdr +from google.cloud.backupdr_v1.types import ( + backupdr, + backupplan, + backupplanassociation, + backupvault, +) DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( gapic_version=package_version.__version__ @@ -170,6 +175,202 @@ def _prep_wrapped_messages(self, client_info): default_timeout=60.0, client_info=client_info, ), + self.create_backup_vault: gapic_v1.method.wrap_method( + self.create_backup_vault, + default_retry=retries.Retry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.list_backup_vaults: gapic_v1.method.wrap_method( + self.list_backup_vaults, + default_retry=retries.Retry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.fetch_usable_backup_vaults: gapic_v1.method.wrap_method( + self.fetch_usable_backup_vaults, + default_retry=retries.Retry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.get_backup_vault: gapic_v1.method.wrap_method( + self.get_backup_vault, + default_retry=retries.Retry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.update_backup_vault: gapic_v1.method.wrap_method( + self.update_backup_vault, + default_timeout=60.0, + client_info=client_info, + ), + self.delete_backup_vault: gapic_v1.method.wrap_method( + self.delete_backup_vault, + default_retry=retries.Retry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.list_data_sources: gapic_v1.method.wrap_method( + self.list_data_sources, + default_retry=retries.Retry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.get_data_source: gapic_v1.method.wrap_method( + self.get_data_source, + default_retry=retries.Retry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.update_data_source: gapic_v1.method.wrap_method( + self.update_data_source, + default_timeout=60.0, + client_info=client_info, + ), + self.list_backups: gapic_v1.method.wrap_method( + self.list_backups, + default_retry=retries.Retry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.get_backup: gapic_v1.method.wrap_method( + self.get_backup, + default_retry=retries.Retry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.update_backup: gapic_v1.method.wrap_method( + self.update_backup, + default_timeout=60.0, + client_info=client_info, + ), + self.delete_backup: gapic_v1.method.wrap_method( + self.delete_backup, + default_timeout=None, + client_info=client_info, + ), + self.restore_backup: gapic_v1.method.wrap_method( + self.restore_backup, + default_timeout=60.0, + client_info=client_info, + ), + self.create_backup_plan: gapic_v1.method.wrap_method( + self.create_backup_plan, + default_timeout=None, + client_info=client_info, + ), + self.get_backup_plan: gapic_v1.method.wrap_method( + self.get_backup_plan, + default_timeout=None, + client_info=client_info, + ), + self.list_backup_plans: gapic_v1.method.wrap_method( + self.list_backup_plans, + default_timeout=None, + client_info=client_info, + ), + self.delete_backup_plan: gapic_v1.method.wrap_method( + self.delete_backup_plan, + default_timeout=None, + client_info=client_info, + ), + self.create_backup_plan_association: gapic_v1.method.wrap_method( + self.create_backup_plan_association, + default_timeout=None, + client_info=client_info, + ), + self.get_backup_plan_association: gapic_v1.method.wrap_method( + self.get_backup_plan_association, + default_timeout=None, + client_info=client_info, + ), + self.list_backup_plan_associations: gapic_v1.method.wrap_method( + self.list_backup_plan_associations, + default_timeout=None, + client_info=client_info, + ), + self.delete_backup_plan_association: gapic_v1.method.wrap_method( + self.delete_backup_plan_association, + default_timeout=None, + client_info=client_info, + ), + self.trigger_backup: gapic_v1.method.wrap_method( + self.trigger_backup, + default_timeout=None, + client_info=client_info, + ), } def close(self): @@ -225,6 +426,233 @@ def delete_management_server( ]: raise NotImplementedError() + @property + def create_backup_vault( + self, + ) -> Callable[ + [backupvault.CreateBackupVaultRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def list_backup_vaults( + self, + ) -> Callable[ + [backupvault.ListBackupVaultsRequest], + Union[ + backupvault.ListBackupVaultsResponse, + Awaitable[backupvault.ListBackupVaultsResponse], + ], + ]: + raise NotImplementedError() + + @property + def fetch_usable_backup_vaults( + self, + ) -> Callable[ + [backupvault.FetchUsableBackupVaultsRequest], + Union[ + backupvault.FetchUsableBackupVaultsResponse, + Awaitable[backupvault.FetchUsableBackupVaultsResponse], + ], + ]: + raise NotImplementedError() + + @property + def get_backup_vault( + self, + ) -> Callable[ + [backupvault.GetBackupVaultRequest], + Union[backupvault.BackupVault, Awaitable[backupvault.BackupVault]], + ]: + raise NotImplementedError() + + @property + def update_backup_vault( + self, + ) -> Callable[ + [backupvault.UpdateBackupVaultRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def delete_backup_vault( + self, + ) -> Callable[ + [backupvault.DeleteBackupVaultRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def list_data_sources( + self, + ) -> Callable[ + [backupvault.ListDataSourcesRequest], + Union[ + backupvault.ListDataSourcesResponse, + Awaitable[backupvault.ListDataSourcesResponse], + ], + ]: + raise NotImplementedError() + + @property + def get_data_source( + self, + ) -> Callable[ + [backupvault.GetDataSourceRequest], + Union[backupvault.DataSource, Awaitable[backupvault.DataSource]], + ]: + raise NotImplementedError() + + @property + def update_data_source( + self, + ) -> Callable[ + [backupvault.UpdateDataSourceRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def list_backups( + self, + ) -> Callable[ + [backupvault.ListBackupsRequest], + Union[ + backupvault.ListBackupsResponse, Awaitable[backupvault.ListBackupsResponse] + ], + ]: + raise NotImplementedError() + + @property + def get_backup( + self, + ) -> Callable[ + [backupvault.GetBackupRequest], + Union[backupvault.Backup, Awaitable[backupvault.Backup]], + ]: + raise NotImplementedError() + + @property + def update_backup( + self, + ) -> Callable[ + [backupvault.UpdateBackupRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def delete_backup( + self, + ) -> Callable[ + [backupvault.DeleteBackupRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def restore_backup( + self, + ) -> Callable[ + [backupvault.RestoreBackupRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def create_backup_plan( + self, + ) -> Callable[ + [backupplan.CreateBackupPlanRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def get_backup_plan( + self, + ) -> Callable[ + [backupplan.GetBackupPlanRequest], + Union[backupplan.BackupPlan, Awaitable[backupplan.BackupPlan]], + ]: + raise NotImplementedError() + + @property + def list_backup_plans( + self, + ) -> Callable[ + [backupplan.ListBackupPlansRequest], + Union[ + backupplan.ListBackupPlansResponse, + Awaitable[backupplan.ListBackupPlansResponse], + ], + ]: + raise NotImplementedError() + + @property + def delete_backup_plan( + self, + ) -> Callable[ + [backupplan.DeleteBackupPlanRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def create_backup_plan_association( + self, + ) -> Callable[ + [backupplanassociation.CreateBackupPlanAssociationRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def get_backup_plan_association( + self, + ) -> Callable[ + [backupplanassociation.GetBackupPlanAssociationRequest], + Union[ + backupplanassociation.BackupPlanAssociation, + Awaitable[backupplanassociation.BackupPlanAssociation], + ], + ]: + raise NotImplementedError() + + @property + def list_backup_plan_associations( + self, + ) -> Callable[ + [backupplanassociation.ListBackupPlanAssociationsRequest], + Union[ + backupplanassociation.ListBackupPlanAssociationsResponse, + Awaitable[backupplanassociation.ListBackupPlanAssociationsResponse], + ], + ]: + raise NotImplementedError() + + @property + def delete_backup_plan_association( + self, + ) -> Callable[ + [backupplanassociation.DeleteBackupPlanAssociationRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def trigger_backup( + self, + ) -> Callable[ + [backupplanassociation.TriggerBackupRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + @property def list_operations( self, diff --git a/packages/google-cloud-backupdr/google/cloud/backupdr_v1/services/backup_dr/transports/grpc.py b/packages/google-cloud-backupdr/google/cloud/backupdr_v1/services/backup_dr/transports/grpc.py index 39a429c884c0..63c287d5bfa1 100644 --- a/packages/google-cloud-backupdr/google/cloud/backupdr_v1/services/backup_dr/transports/grpc.py +++ b/packages/google-cloud-backupdr/google/cloud/backupdr_v1/services/backup_dr/transports/grpc.py @@ -26,7 +26,12 @@ from google.longrunning import operations_pb2 # type: ignore import grpc # type: ignore -from google.cloud.backupdr_v1.types import backupdr +from google.cloud.backupdr_v1.types import ( + backupdr, + backupplan, + backupplanassociation, + backupvault, +) from .base import DEFAULT_CLIENT_INFO, BackupDRTransport @@ -361,6 +366,637 @@ def delete_management_server( ) return self._stubs["delete_management_server"] + @property + def create_backup_vault( + self, + ) -> Callable[[backupvault.CreateBackupVaultRequest], operations_pb2.Operation]: + r"""Return a callable for the create backup vault method over gRPC. + + Creates a new BackupVault in a given project and + location. + + Returns: + Callable[[~.CreateBackupVaultRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_backup_vault" not in self._stubs: + self._stubs["create_backup_vault"] = self.grpc_channel.unary_unary( + "/google.cloud.backupdr.v1.BackupDR/CreateBackupVault", + request_serializer=backupvault.CreateBackupVaultRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["create_backup_vault"] + + @property + def list_backup_vaults( + self, + ) -> Callable[ + [backupvault.ListBackupVaultsRequest], backupvault.ListBackupVaultsResponse + ]: + r"""Return a callable for the list backup vaults method over gRPC. + + Lists BackupVaults in a given project and location. + + Returns: + Callable[[~.ListBackupVaultsRequest], + ~.ListBackupVaultsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_backup_vaults" not in self._stubs: + self._stubs["list_backup_vaults"] = self.grpc_channel.unary_unary( + "/google.cloud.backupdr.v1.BackupDR/ListBackupVaults", + request_serializer=backupvault.ListBackupVaultsRequest.serialize, + response_deserializer=backupvault.ListBackupVaultsResponse.deserialize, + ) + return self._stubs["list_backup_vaults"] + + @property + def fetch_usable_backup_vaults( + self, + ) -> Callable[ + [backupvault.FetchUsableBackupVaultsRequest], + backupvault.FetchUsableBackupVaultsResponse, + ]: + r"""Return a callable for the fetch usable backup vaults method over gRPC. + + FetchUsableBackupVaults lists usable BackupVaults in + a given project and location. Usable BackupVault are the + ones that user has backupdr.backupVaults.get permission. + + Returns: + Callable[[~.FetchUsableBackupVaultsRequest], + ~.FetchUsableBackupVaultsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "fetch_usable_backup_vaults" not in self._stubs: + self._stubs["fetch_usable_backup_vaults"] = self.grpc_channel.unary_unary( + "/google.cloud.backupdr.v1.BackupDR/FetchUsableBackupVaults", + request_serializer=backupvault.FetchUsableBackupVaultsRequest.serialize, + response_deserializer=backupvault.FetchUsableBackupVaultsResponse.deserialize, + ) + return self._stubs["fetch_usable_backup_vaults"] + + @property + def get_backup_vault( + self, + ) -> Callable[[backupvault.GetBackupVaultRequest], backupvault.BackupVault]: + r"""Return a callable for the get backup vault method over gRPC. + + Gets details of a BackupVault. + + Returns: + Callable[[~.GetBackupVaultRequest], + ~.BackupVault]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_backup_vault" not in self._stubs: + self._stubs["get_backup_vault"] = self.grpc_channel.unary_unary( + "/google.cloud.backupdr.v1.BackupDR/GetBackupVault", + request_serializer=backupvault.GetBackupVaultRequest.serialize, + response_deserializer=backupvault.BackupVault.deserialize, + ) + return self._stubs["get_backup_vault"] + + @property + def update_backup_vault( + self, + ) -> Callable[[backupvault.UpdateBackupVaultRequest], operations_pb2.Operation]: + r"""Return a callable for the update backup vault method over gRPC. + + Updates the settings of a BackupVault. + + Returns: + Callable[[~.UpdateBackupVaultRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_backup_vault" not in self._stubs: + self._stubs["update_backup_vault"] = self.grpc_channel.unary_unary( + "/google.cloud.backupdr.v1.BackupDR/UpdateBackupVault", + request_serializer=backupvault.UpdateBackupVaultRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["update_backup_vault"] + + @property + def delete_backup_vault( + self, + ) -> Callable[[backupvault.DeleteBackupVaultRequest], operations_pb2.Operation]: + r"""Return a callable for the delete backup vault method over gRPC. + + Deletes a BackupVault. + + Returns: + Callable[[~.DeleteBackupVaultRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_backup_vault" not in self._stubs: + self._stubs["delete_backup_vault"] = self.grpc_channel.unary_unary( + "/google.cloud.backupdr.v1.BackupDR/DeleteBackupVault", + request_serializer=backupvault.DeleteBackupVaultRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["delete_backup_vault"] + + @property + def list_data_sources( + self, + ) -> Callable[ + [backupvault.ListDataSourcesRequest], backupvault.ListDataSourcesResponse + ]: + r"""Return a callable for the list data sources method over gRPC. + + Lists DataSources in a given project and location. + + Returns: + Callable[[~.ListDataSourcesRequest], + ~.ListDataSourcesResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_data_sources" not in self._stubs: + self._stubs["list_data_sources"] = self.grpc_channel.unary_unary( + "/google.cloud.backupdr.v1.BackupDR/ListDataSources", + request_serializer=backupvault.ListDataSourcesRequest.serialize, + response_deserializer=backupvault.ListDataSourcesResponse.deserialize, + ) + return self._stubs["list_data_sources"] + + @property + def get_data_source( + self, + ) -> Callable[[backupvault.GetDataSourceRequest], backupvault.DataSource]: + r"""Return a callable for the get data source method over gRPC. + + Gets details of a DataSource. + + Returns: + Callable[[~.GetDataSourceRequest], + ~.DataSource]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_data_source" not in self._stubs: + self._stubs["get_data_source"] = self.grpc_channel.unary_unary( + "/google.cloud.backupdr.v1.BackupDR/GetDataSource", + request_serializer=backupvault.GetDataSourceRequest.serialize, + response_deserializer=backupvault.DataSource.deserialize, + ) + return self._stubs["get_data_source"] + + @property + def update_data_source( + self, + ) -> Callable[[backupvault.UpdateDataSourceRequest], operations_pb2.Operation]: + r"""Return a callable for the update data source method over gRPC. + + Updates the settings of a DataSource. + + Returns: + Callable[[~.UpdateDataSourceRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_data_source" not in self._stubs: + self._stubs["update_data_source"] = self.grpc_channel.unary_unary( + "/google.cloud.backupdr.v1.BackupDR/UpdateDataSource", + request_serializer=backupvault.UpdateDataSourceRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["update_data_source"] + + @property + def list_backups( + self, + ) -> Callable[[backupvault.ListBackupsRequest], backupvault.ListBackupsResponse]: + r"""Return a callable for the list backups method over gRPC. + + Lists Backups in a given project and location. + + Returns: + Callable[[~.ListBackupsRequest], + ~.ListBackupsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_backups" not in self._stubs: + self._stubs["list_backups"] = self.grpc_channel.unary_unary( + "/google.cloud.backupdr.v1.BackupDR/ListBackups", + request_serializer=backupvault.ListBackupsRequest.serialize, + response_deserializer=backupvault.ListBackupsResponse.deserialize, + ) + return self._stubs["list_backups"] + + @property + def get_backup( + self, + ) -> Callable[[backupvault.GetBackupRequest], backupvault.Backup]: + r"""Return a callable for the get backup method over gRPC. + + Gets details of a Backup. + + Returns: + Callable[[~.GetBackupRequest], + ~.Backup]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_backup" not in self._stubs: + self._stubs["get_backup"] = self.grpc_channel.unary_unary( + "/google.cloud.backupdr.v1.BackupDR/GetBackup", + request_serializer=backupvault.GetBackupRequest.serialize, + response_deserializer=backupvault.Backup.deserialize, + ) + return self._stubs["get_backup"] + + @property + def update_backup( + self, + ) -> Callable[[backupvault.UpdateBackupRequest], operations_pb2.Operation]: + r"""Return a callable for the update backup method over gRPC. + + Updates the settings of a Backup. + + Returns: + Callable[[~.UpdateBackupRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_backup" not in self._stubs: + self._stubs["update_backup"] = self.grpc_channel.unary_unary( + "/google.cloud.backupdr.v1.BackupDR/UpdateBackup", + request_serializer=backupvault.UpdateBackupRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["update_backup"] + + @property + def delete_backup( + self, + ) -> Callable[[backupvault.DeleteBackupRequest], operations_pb2.Operation]: + r"""Return a callable for the delete backup method over gRPC. + + Deletes a Backup. + + Returns: + Callable[[~.DeleteBackupRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_backup" not in self._stubs: + self._stubs["delete_backup"] = self.grpc_channel.unary_unary( + "/google.cloud.backupdr.v1.BackupDR/DeleteBackup", + request_serializer=backupvault.DeleteBackupRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["delete_backup"] + + @property + def restore_backup( + self, + ) -> Callable[[backupvault.RestoreBackupRequest], operations_pb2.Operation]: + r"""Return a callable for the restore backup method over gRPC. + + Restore from a Backup + + Returns: + Callable[[~.RestoreBackupRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "restore_backup" not in self._stubs: + self._stubs["restore_backup"] = self.grpc_channel.unary_unary( + "/google.cloud.backupdr.v1.BackupDR/RestoreBackup", + request_serializer=backupvault.RestoreBackupRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["restore_backup"] + + @property + def create_backup_plan( + self, + ) -> Callable[[backupplan.CreateBackupPlanRequest], operations_pb2.Operation]: + r"""Return a callable for the create backup plan method over gRPC. + + Create a BackupPlan + + Returns: + Callable[[~.CreateBackupPlanRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_backup_plan" not in self._stubs: + self._stubs["create_backup_plan"] = self.grpc_channel.unary_unary( + "/google.cloud.backupdr.v1.BackupDR/CreateBackupPlan", + request_serializer=backupplan.CreateBackupPlanRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["create_backup_plan"] + + @property + def get_backup_plan( + self, + ) -> Callable[[backupplan.GetBackupPlanRequest], backupplan.BackupPlan]: + r"""Return a callable for the get backup plan method over gRPC. + + Gets details of a single BackupPlan. + + Returns: + Callable[[~.GetBackupPlanRequest], + ~.BackupPlan]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_backup_plan" not in self._stubs: + self._stubs["get_backup_plan"] = self.grpc_channel.unary_unary( + "/google.cloud.backupdr.v1.BackupDR/GetBackupPlan", + request_serializer=backupplan.GetBackupPlanRequest.serialize, + response_deserializer=backupplan.BackupPlan.deserialize, + ) + return self._stubs["get_backup_plan"] + + @property + def list_backup_plans( + self, + ) -> Callable[ + [backupplan.ListBackupPlansRequest], backupplan.ListBackupPlansResponse + ]: + r"""Return a callable for the list backup plans method over gRPC. + + Lists BackupPlans in a given project and location. + + Returns: + Callable[[~.ListBackupPlansRequest], + ~.ListBackupPlansResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_backup_plans" not in self._stubs: + self._stubs["list_backup_plans"] = self.grpc_channel.unary_unary( + "/google.cloud.backupdr.v1.BackupDR/ListBackupPlans", + request_serializer=backupplan.ListBackupPlansRequest.serialize, + response_deserializer=backupplan.ListBackupPlansResponse.deserialize, + ) + return self._stubs["list_backup_plans"] + + @property + def delete_backup_plan( + self, + ) -> Callable[[backupplan.DeleteBackupPlanRequest], operations_pb2.Operation]: + r"""Return a callable for the delete backup plan method over gRPC. + + Deletes a single BackupPlan. + + Returns: + Callable[[~.DeleteBackupPlanRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_backup_plan" not in self._stubs: + self._stubs["delete_backup_plan"] = self.grpc_channel.unary_unary( + "/google.cloud.backupdr.v1.BackupDR/DeleteBackupPlan", + request_serializer=backupplan.DeleteBackupPlanRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["delete_backup_plan"] + + @property + def create_backup_plan_association( + self, + ) -> Callable[ + [backupplanassociation.CreateBackupPlanAssociationRequest], + operations_pb2.Operation, + ]: + r"""Return a callable for the create backup plan association method over gRPC. + + Create a BackupPlanAssociation + + Returns: + Callable[[~.CreateBackupPlanAssociationRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_backup_plan_association" not in self._stubs: + self._stubs[ + "create_backup_plan_association" + ] = self.grpc_channel.unary_unary( + "/google.cloud.backupdr.v1.BackupDR/CreateBackupPlanAssociation", + request_serializer=backupplanassociation.CreateBackupPlanAssociationRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["create_backup_plan_association"] + + @property + def get_backup_plan_association( + self, + ) -> Callable[ + [backupplanassociation.GetBackupPlanAssociationRequest], + backupplanassociation.BackupPlanAssociation, + ]: + r"""Return a callable for the get backup plan association method over gRPC. + + Gets details of a single BackupPlanAssociation. + + Returns: + Callable[[~.GetBackupPlanAssociationRequest], + ~.BackupPlanAssociation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_backup_plan_association" not in self._stubs: + self._stubs["get_backup_plan_association"] = self.grpc_channel.unary_unary( + "/google.cloud.backupdr.v1.BackupDR/GetBackupPlanAssociation", + request_serializer=backupplanassociation.GetBackupPlanAssociationRequest.serialize, + response_deserializer=backupplanassociation.BackupPlanAssociation.deserialize, + ) + return self._stubs["get_backup_plan_association"] + + @property + def list_backup_plan_associations( + self, + ) -> Callable[ + [backupplanassociation.ListBackupPlanAssociationsRequest], + backupplanassociation.ListBackupPlanAssociationsResponse, + ]: + r"""Return a callable for the list backup plan associations method over gRPC. + + Lists BackupPlanAssociations in a given project and + location. + + Returns: + Callable[[~.ListBackupPlanAssociationsRequest], + ~.ListBackupPlanAssociationsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_backup_plan_associations" not in self._stubs: + self._stubs[ + "list_backup_plan_associations" + ] = self.grpc_channel.unary_unary( + "/google.cloud.backupdr.v1.BackupDR/ListBackupPlanAssociations", + request_serializer=backupplanassociation.ListBackupPlanAssociationsRequest.serialize, + response_deserializer=backupplanassociation.ListBackupPlanAssociationsResponse.deserialize, + ) + return self._stubs["list_backup_plan_associations"] + + @property + def delete_backup_plan_association( + self, + ) -> Callable[ + [backupplanassociation.DeleteBackupPlanAssociationRequest], + operations_pb2.Operation, + ]: + r"""Return a callable for the delete backup plan association method over gRPC. + + Deletes a single BackupPlanAssociation. + + Returns: + Callable[[~.DeleteBackupPlanAssociationRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_backup_plan_association" not in self._stubs: + self._stubs[ + "delete_backup_plan_association" + ] = self.grpc_channel.unary_unary( + "/google.cloud.backupdr.v1.BackupDR/DeleteBackupPlanAssociation", + request_serializer=backupplanassociation.DeleteBackupPlanAssociationRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["delete_backup_plan_association"] + + @property + def trigger_backup( + self, + ) -> Callable[ + [backupplanassociation.TriggerBackupRequest], operations_pb2.Operation + ]: + r"""Return a callable for the trigger backup method over gRPC. + + Triggers a new Backup. + + Returns: + Callable[[~.TriggerBackupRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "trigger_backup" not in self._stubs: + self._stubs["trigger_backup"] = self.grpc_channel.unary_unary( + "/google.cloud.backupdr.v1.BackupDR/TriggerBackup", + request_serializer=backupplanassociation.TriggerBackupRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["trigger_backup"] + def close(self): self.grpc_channel.close() diff --git a/packages/google-cloud-backupdr/google/cloud/backupdr_v1/services/backup_dr/transports/grpc_asyncio.py b/packages/google-cloud-backupdr/google/cloud/backupdr_v1/services/backup_dr/transports/grpc_asyncio.py index 26b64ba6a60c..9acd2b61c3fb 100644 --- a/packages/google-cloud-backupdr/google/cloud/backupdr_v1/services/backup_dr/transports/grpc_asyncio.py +++ b/packages/google-cloud-backupdr/google/cloud/backupdr_v1/services/backup_dr/transports/grpc_asyncio.py @@ -28,7 +28,12 @@ import grpc # type: ignore from grpc.experimental import aio # type: ignore -from google.cloud.backupdr_v1.types import backupdr +from google.cloud.backupdr_v1.types import ( + backupdr, + backupplan, + backupplanassociation, + backupvault, +) from .base import DEFAULT_CLIENT_INFO, BackupDRTransport from .grpc import BackupDRGrpcTransport @@ -374,6 +379,665 @@ def delete_management_server( ) return self._stubs["delete_management_server"] + @property + def create_backup_vault( + self, + ) -> Callable[ + [backupvault.CreateBackupVaultRequest], Awaitable[operations_pb2.Operation] + ]: + r"""Return a callable for the create backup vault method over gRPC. + + Creates a new BackupVault in a given project and + location. + + Returns: + Callable[[~.CreateBackupVaultRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_backup_vault" not in self._stubs: + self._stubs["create_backup_vault"] = self.grpc_channel.unary_unary( + "/google.cloud.backupdr.v1.BackupDR/CreateBackupVault", + request_serializer=backupvault.CreateBackupVaultRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["create_backup_vault"] + + @property + def list_backup_vaults( + self, + ) -> Callable[ + [backupvault.ListBackupVaultsRequest], + Awaitable[backupvault.ListBackupVaultsResponse], + ]: + r"""Return a callable for the list backup vaults method over gRPC. + + Lists BackupVaults in a given project and location. + + Returns: + Callable[[~.ListBackupVaultsRequest], + Awaitable[~.ListBackupVaultsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_backup_vaults" not in self._stubs: + self._stubs["list_backup_vaults"] = self.grpc_channel.unary_unary( + "/google.cloud.backupdr.v1.BackupDR/ListBackupVaults", + request_serializer=backupvault.ListBackupVaultsRequest.serialize, + response_deserializer=backupvault.ListBackupVaultsResponse.deserialize, + ) + return self._stubs["list_backup_vaults"] + + @property + def fetch_usable_backup_vaults( + self, + ) -> Callable[ + [backupvault.FetchUsableBackupVaultsRequest], + Awaitable[backupvault.FetchUsableBackupVaultsResponse], + ]: + r"""Return a callable for the fetch usable backup vaults method over gRPC. + + FetchUsableBackupVaults lists usable BackupVaults in + a given project and location. Usable BackupVault are the + ones that user has backupdr.backupVaults.get permission. + + Returns: + Callable[[~.FetchUsableBackupVaultsRequest], + Awaitable[~.FetchUsableBackupVaultsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "fetch_usable_backup_vaults" not in self._stubs: + self._stubs["fetch_usable_backup_vaults"] = self.grpc_channel.unary_unary( + "/google.cloud.backupdr.v1.BackupDR/FetchUsableBackupVaults", + request_serializer=backupvault.FetchUsableBackupVaultsRequest.serialize, + response_deserializer=backupvault.FetchUsableBackupVaultsResponse.deserialize, + ) + return self._stubs["fetch_usable_backup_vaults"] + + @property + def get_backup_vault( + self, + ) -> Callable[ + [backupvault.GetBackupVaultRequest], Awaitable[backupvault.BackupVault] + ]: + r"""Return a callable for the get backup vault method over gRPC. + + Gets details of a BackupVault. + + Returns: + Callable[[~.GetBackupVaultRequest], + Awaitable[~.BackupVault]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_backup_vault" not in self._stubs: + self._stubs["get_backup_vault"] = self.grpc_channel.unary_unary( + "/google.cloud.backupdr.v1.BackupDR/GetBackupVault", + request_serializer=backupvault.GetBackupVaultRequest.serialize, + response_deserializer=backupvault.BackupVault.deserialize, + ) + return self._stubs["get_backup_vault"] + + @property + def update_backup_vault( + self, + ) -> Callable[ + [backupvault.UpdateBackupVaultRequest], Awaitable[operations_pb2.Operation] + ]: + r"""Return a callable for the update backup vault method over gRPC. + + Updates the settings of a BackupVault. + + Returns: + Callable[[~.UpdateBackupVaultRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_backup_vault" not in self._stubs: + self._stubs["update_backup_vault"] = self.grpc_channel.unary_unary( + "/google.cloud.backupdr.v1.BackupDR/UpdateBackupVault", + request_serializer=backupvault.UpdateBackupVaultRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["update_backup_vault"] + + @property + def delete_backup_vault( + self, + ) -> Callable[ + [backupvault.DeleteBackupVaultRequest], Awaitable[operations_pb2.Operation] + ]: + r"""Return a callable for the delete backup vault method over gRPC. + + Deletes a BackupVault. + + Returns: + Callable[[~.DeleteBackupVaultRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_backup_vault" not in self._stubs: + self._stubs["delete_backup_vault"] = self.grpc_channel.unary_unary( + "/google.cloud.backupdr.v1.BackupDR/DeleteBackupVault", + request_serializer=backupvault.DeleteBackupVaultRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["delete_backup_vault"] + + @property + def list_data_sources( + self, + ) -> Callable[ + [backupvault.ListDataSourcesRequest], + Awaitable[backupvault.ListDataSourcesResponse], + ]: + r"""Return a callable for the list data sources method over gRPC. + + Lists DataSources in a given project and location. + + Returns: + Callable[[~.ListDataSourcesRequest], + Awaitable[~.ListDataSourcesResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_data_sources" not in self._stubs: + self._stubs["list_data_sources"] = self.grpc_channel.unary_unary( + "/google.cloud.backupdr.v1.BackupDR/ListDataSources", + request_serializer=backupvault.ListDataSourcesRequest.serialize, + response_deserializer=backupvault.ListDataSourcesResponse.deserialize, + ) + return self._stubs["list_data_sources"] + + @property + def get_data_source( + self, + ) -> Callable[ + [backupvault.GetDataSourceRequest], Awaitable[backupvault.DataSource] + ]: + r"""Return a callable for the get data source method over gRPC. + + Gets details of a DataSource. + + Returns: + Callable[[~.GetDataSourceRequest], + Awaitable[~.DataSource]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_data_source" not in self._stubs: + self._stubs["get_data_source"] = self.grpc_channel.unary_unary( + "/google.cloud.backupdr.v1.BackupDR/GetDataSource", + request_serializer=backupvault.GetDataSourceRequest.serialize, + response_deserializer=backupvault.DataSource.deserialize, + ) + return self._stubs["get_data_source"] + + @property + def update_data_source( + self, + ) -> Callable[ + [backupvault.UpdateDataSourceRequest], Awaitable[operations_pb2.Operation] + ]: + r"""Return a callable for the update data source method over gRPC. + + Updates the settings of a DataSource. + + Returns: + Callable[[~.UpdateDataSourceRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_data_source" not in self._stubs: + self._stubs["update_data_source"] = self.grpc_channel.unary_unary( + "/google.cloud.backupdr.v1.BackupDR/UpdateDataSource", + request_serializer=backupvault.UpdateDataSourceRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["update_data_source"] + + @property + def list_backups( + self, + ) -> Callable[ + [backupvault.ListBackupsRequest], Awaitable[backupvault.ListBackupsResponse] + ]: + r"""Return a callable for the list backups method over gRPC. + + Lists Backups in a given project and location. + + Returns: + Callable[[~.ListBackupsRequest], + Awaitable[~.ListBackupsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_backups" not in self._stubs: + self._stubs["list_backups"] = self.grpc_channel.unary_unary( + "/google.cloud.backupdr.v1.BackupDR/ListBackups", + request_serializer=backupvault.ListBackupsRequest.serialize, + response_deserializer=backupvault.ListBackupsResponse.deserialize, + ) + return self._stubs["list_backups"] + + @property + def get_backup( + self, + ) -> Callable[[backupvault.GetBackupRequest], Awaitable[backupvault.Backup]]: + r"""Return a callable for the get backup method over gRPC. + + Gets details of a Backup. + + Returns: + Callable[[~.GetBackupRequest], + Awaitable[~.Backup]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_backup" not in self._stubs: + self._stubs["get_backup"] = self.grpc_channel.unary_unary( + "/google.cloud.backupdr.v1.BackupDR/GetBackup", + request_serializer=backupvault.GetBackupRequest.serialize, + response_deserializer=backupvault.Backup.deserialize, + ) + return self._stubs["get_backup"] + + @property + def update_backup( + self, + ) -> Callable[ + [backupvault.UpdateBackupRequest], Awaitable[operations_pb2.Operation] + ]: + r"""Return a callable for the update backup method over gRPC. + + Updates the settings of a Backup. + + Returns: + Callable[[~.UpdateBackupRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_backup" not in self._stubs: + self._stubs["update_backup"] = self.grpc_channel.unary_unary( + "/google.cloud.backupdr.v1.BackupDR/UpdateBackup", + request_serializer=backupvault.UpdateBackupRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["update_backup"] + + @property + def delete_backup( + self, + ) -> Callable[ + [backupvault.DeleteBackupRequest], Awaitable[operations_pb2.Operation] + ]: + r"""Return a callable for the delete backup method over gRPC. + + Deletes a Backup. + + Returns: + Callable[[~.DeleteBackupRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_backup" not in self._stubs: + self._stubs["delete_backup"] = self.grpc_channel.unary_unary( + "/google.cloud.backupdr.v1.BackupDR/DeleteBackup", + request_serializer=backupvault.DeleteBackupRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["delete_backup"] + + @property + def restore_backup( + self, + ) -> Callable[ + [backupvault.RestoreBackupRequest], Awaitable[operations_pb2.Operation] + ]: + r"""Return a callable for the restore backup method over gRPC. + + Restore from a Backup + + Returns: + Callable[[~.RestoreBackupRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "restore_backup" not in self._stubs: + self._stubs["restore_backup"] = self.grpc_channel.unary_unary( + "/google.cloud.backupdr.v1.BackupDR/RestoreBackup", + request_serializer=backupvault.RestoreBackupRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["restore_backup"] + + @property + def create_backup_plan( + self, + ) -> Callable[ + [backupplan.CreateBackupPlanRequest], Awaitable[operations_pb2.Operation] + ]: + r"""Return a callable for the create backup plan method over gRPC. + + Create a BackupPlan + + Returns: + Callable[[~.CreateBackupPlanRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_backup_plan" not in self._stubs: + self._stubs["create_backup_plan"] = self.grpc_channel.unary_unary( + "/google.cloud.backupdr.v1.BackupDR/CreateBackupPlan", + request_serializer=backupplan.CreateBackupPlanRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["create_backup_plan"] + + @property + def get_backup_plan( + self, + ) -> Callable[[backupplan.GetBackupPlanRequest], Awaitable[backupplan.BackupPlan]]: + r"""Return a callable for the get backup plan method over gRPC. + + Gets details of a single BackupPlan. + + Returns: + Callable[[~.GetBackupPlanRequest], + Awaitable[~.BackupPlan]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_backup_plan" not in self._stubs: + self._stubs["get_backup_plan"] = self.grpc_channel.unary_unary( + "/google.cloud.backupdr.v1.BackupDR/GetBackupPlan", + request_serializer=backupplan.GetBackupPlanRequest.serialize, + response_deserializer=backupplan.BackupPlan.deserialize, + ) + return self._stubs["get_backup_plan"] + + @property + def list_backup_plans( + self, + ) -> Callable[ + [backupplan.ListBackupPlansRequest], + Awaitable[backupplan.ListBackupPlansResponse], + ]: + r"""Return a callable for the list backup plans method over gRPC. + + Lists BackupPlans in a given project and location. + + Returns: + Callable[[~.ListBackupPlansRequest], + Awaitable[~.ListBackupPlansResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_backup_plans" not in self._stubs: + self._stubs["list_backup_plans"] = self.grpc_channel.unary_unary( + "/google.cloud.backupdr.v1.BackupDR/ListBackupPlans", + request_serializer=backupplan.ListBackupPlansRequest.serialize, + response_deserializer=backupplan.ListBackupPlansResponse.deserialize, + ) + return self._stubs["list_backup_plans"] + + @property + def delete_backup_plan( + self, + ) -> Callable[ + [backupplan.DeleteBackupPlanRequest], Awaitable[operations_pb2.Operation] + ]: + r"""Return a callable for the delete backup plan method over gRPC. + + Deletes a single BackupPlan. + + Returns: + Callable[[~.DeleteBackupPlanRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_backup_plan" not in self._stubs: + self._stubs["delete_backup_plan"] = self.grpc_channel.unary_unary( + "/google.cloud.backupdr.v1.BackupDR/DeleteBackupPlan", + request_serializer=backupplan.DeleteBackupPlanRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["delete_backup_plan"] + + @property + def create_backup_plan_association( + self, + ) -> Callable[ + [backupplanassociation.CreateBackupPlanAssociationRequest], + Awaitable[operations_pb2.Operation], + ]: + r"""Return a callable for the create backup plan association method over gRPC. + + Create a BackupPlanAssociation + + Returns: + Callable[[~.CreateBackupPlanAssociationRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_backup_plan_association" not in self._stubs: + self._stubs[ + "create_backup_plan_association" + ] = self.grpc_channel.unary_unary( + "/google.cloud.backupdr.v1.BackupDR/CreateBackupPlanAssociation", + request_serializer=backupplanassociation.CreateBackupPlanAssociationRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["create_backup_plan_association"] + + @property + def get_backup_plan_association( + self, + ) -> Callable[ + [backupplanassociation.GetBackupPlanAssociationRequest], + Awaitable[backupplanassociation.BackupPlanAssociation], + ]: + r"""Return a callable for the get backup plan association method over gRPC. + + Gets details of a single BackupPlanAssociation. + + Returns: + Callable[[~.GetBackupPlanAssociationRequest], + Awaitable[~.BackupPlanAssociation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_backup_plan_association" not in self._stubs: + self._stubs["get_backup_plan_association"] = self.grpc_channel.unary_unary( + "/google.cloud.backupdr.v1.BackupDR/GetBackupPlanAssociation", + request_serializer=backupplanassociation.GetBackupPlanAssociationRequest.serialize, + response_deserializer=backupplanassociation.BackupPlanAssociation.deserialize, + ) + return self._stubs["get_backup_plan_association"] + + @property + def list_backup_plan_associations( + self, + ) -> Callable[ + [backupplanassociation.ListBackupPlanAssociationsRequest], + Awaitable[backupplanassociation.ListBackupPlanAssociationsResponse], + ]: + r"""Return a callable for the list backup plan associations method over gRPC. + + Lists BackupPlanAssociations in a given project and + location. + + Returns: + Callable[[~.ListBackupPlanAssociationsRequest], + Awaitable[~.ListBackupPlanAssociationsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_backup_plan_associations" not in self._stubs: + self._stubs[ + "list_backup_plan_associations" + ] = self.grpc_channel.unary_unary( + "/google.cloud.backupdr.v1.BackupDR/ListBackupPlanAssociations", + request_serializer=backupplanassociation.ListBackupPlanAssociationsRequest.serialize, + response_deserializer=backupplanassociation.ListBackupPlanAssociationsResponse.deserialize, + ) + return self._stubs["list_backup_plan_associations"] + + @property + def delete_backup_plan_association( + self, + ) -> Callable[ + [backupplanassociation.DeleteBackupPlanAssociationRequest], + Awaitable[operations_pb2.Operation], + ]: + r"""Return a callable for the delete backup plan association method over gRPC. + + Deletes a single BackupPlanAssociation. + + Returns: + Callable[[~.DeleteBackupPlanAssociationRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_backup_plan_association" not in self._stubs: + self._stubs[ + "delete_backup_plan_association" + ] = self.grpc_channel.unary_unary( + "/google.cloud.backupdr.v1.BackupDR/DeleteBackupPlanAssociation", + request_serializer=backupplanassociation.DeleteBackupPlanAssociationRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["delete_backup_plan_association"] + + @property + def trigger_backup( + self, + ) -> Callable[ + [backupplanassociation.TriggerBackupRequest], + Awaitable[operations_pb2.Operation], + ]: + r"""Return a callable for the trigger backup method over gRPC. + + Triggers a new Backup. + + Returns: + Callable[[~.TriggerBackupRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "trigger_backup" not in self._stubs: + self._stubs["trigger_backup"] = self.grpc_channel.unary_unary( + "/google.cloud.backupdr.v1.BackupDR/TriggerBackup", + request_serializer=backupplanassociation.TriggerBackupRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["trigger_backup"] + def _prep_wrapped_messages(self, client_info): """Precompute the wrapped methods, overriding the base class method to use async wrappers.""" self._wrapped_methods = { @@ -415,6 +1079,202 @@ def _prep_wrapped_messages(self, client_info): default_timeout=60.0, client_info=client_info, ), + self.create_backup_vault: gapic_v1.method_async.wrap_method( + self.create_backup_vault, + default_retry=retries.AsyncRetry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.list_backup_vaults: gapic_v1.method_async.wrap_method( + self.list_backup_vaults, + default_retry=retries.AsyncRetry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.fetch_usable_backup_vaults: gapic_v1.method_async.wrap_method( + self.fetch_usable_backup_vaults, + default_retry=retries.AsyncRetry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.get_backup_vault: gapic_v1.method_async.wrap_method( + self.get_backup_vault, + default_retry=retries.AsyncRetry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.update_backup_vault: gapic_v1.method_async.wrap_method( + self.update_backup_vault, + default_timeout=60.0, + client_info=client_info, + ), + self.delete_backup_vault: gapic_v1.method_async.wrap_method( + self.delete_backup_vault, + default_retry=retries.AsyncRetry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.list_data_sources: gapic_v1.method_async.wrap_method( + self.list_data_sources, + default_retry=retries.AsyncRetry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.get_data_source: gapic_v1.method_async.wrap_method( + self.get_data_source, + default_retry=retries.AsyncRetry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.update_data_source: gapic_v1.method_async.wrap_method( + self.update_data_source, + default_timeout=60.0, + client_info=client_info, + ), + self.list_backups: gapic_v1.method_async.wrap_method( + self.list_backups, + default_retry=retries.AsyncRetry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.get_backup: gapic_v1.method_async.wrap_method( + self.get_backup, + default_retry=retries.AsyncRetry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.update_backup: gapic_v1.method_async.wrap_method( + self.update_backup, + default_timeout=60.0, + client_info=client_info, + ), + self.delete_backup: gapic_v1.method_async.wrap_method( + self.delete_backup, + default_timeout=None, + client_info=client_info, + ), + self.restore_backup: gapic_v1.method_async.wrap_method( + self.restore_backup, + default_timeout=60.0, + client_info=client_info, + ), + self.create_backup_plan: gapic_v1.method_async.wrap_method( + self.create_backup_plan, + default_timeout=None, + client_info=client_info, + ), + self.get_backup_plan: gapic_v1.method_async.wrap_method( + self.get_backup_plan, + default_timeout=None, + client_info=client_info, + ), + self.list_backup_plans: gapic_v1.method_async.wrap_method( + self.list_backup_plans, + default_timeout=None, + client_info=client_info, + ), + self.delete_backup_plan: gapic_v1.method_async.wrap_method( + self.delete_backup_plan, + default_timeout=None, + client_info=client_info, + ), + self.create_backup_plan_association: gapic_v1.method_async.wrap_method( + self.create_backup_plan_association, + default_timeout=None, + client_info=client_info, + ), + self.get_backup_plan_association: gapic_v1.method_async.wrap_method( + self.get_backup_plan_association, + default_timeout=None, + client_info=client_info, + ), + self.list_backup_plan_associations: gapic_v1.method_async.wrap_method( + self.list_backup_plan_associations, + default_timeout=None, + client_info=client_info, + ), + self.delete_backup_plan_association: gapic_v1.method_async.wrap_method( + self.delete_backup_plan_association, + default_timeout=None, + client_info=client_info, + ), + self.trigger_backup: gapic_v1.method_async.wrap_method( + self.trigger_backup, + default_timeout=None, + client_info=client_info, + ), } def close(self): diff --git a/packages/google-cloud-backupdr/google/cloud/backupdr_v1/services/backup_dr/transports/rest.py b/packages/google-cloud-backupdr/google/cloud/backupdr_v1/services/backup_dr/transports/rest.py index 621153ce0574..2ff0d1ccf3b9 100644 --- a/packages/google-cloud-backupdr/google/cloud/backupdr_v1/services/backup_dr/transports/rest.py +++ b/packages/google-cloud-backupdr/google/cloud/backupdr_v1/services/backup_dr/transports/rest.py @@ -47,7 +47,12 @@ from google.longrunning import operations_pb2 # type: ignore -from google.cloud.backupdr_v1.types import backupdr +from google.cloud.backupdr_v1.types import ( + backupdr, + backupplan, + backupplanassociation, + backupvault, +) from .base import BackupDRTransport from .base import DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO @@ -74,6 +79,30 @@ class BackupDRRestInterceptor: .. code-block:: python class MyCustomBackupDRInterceptor(BackupDRRestInterceptor): + def pre_create_backup_plan(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_create_backup_plan(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_create_backup_plan_association(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_create_backup_plan_association(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_create_backup_vault(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_create_backup_vault(self, response): + logging.log(f"Received response: {response}") + return response + def pre_create_management_server(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata @@ -82,6 +111,38 @@ def post_create_management_server(self, response): logging.log(f"Received response: {response}") return response + def pre_delete_backup(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_delete_backup(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_delete_backup_plan(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_delete_backup_plan(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_delete_backup_plan_association(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_delete_backup_plan_association(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_delete_backup_vault(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_delete_backup_vault(self, response): + logging.log(f"Received response: {response}") + return response + def pre_delete_management_server(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata @@ -90,6 +151,54 @@ def post_delete_management_server(self, response): logging.log(f"Received response: {response}") return response + def pre_fetch_usable_backup_vaults(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_fetch_usable_backup_vaults(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get_backup(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_backup(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get_backup_plan(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_backup_plan(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get_backup_plan_association(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_backup_plan_association(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get_backup_vault(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_backup_vault(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get_data_source(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_data_source(self, response): + logging.log(f"Received response: {response}") + return response + def pre_get_management_server(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata @@ -98,6 +207,46 @@ def post_get_management_server(self, response): logging.log(f"Received response: {response}") return response + def pre_list_backup_plan_associations(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_backup_plan_associations(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_backup_plans(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_backup_plans(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_backups(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_backups(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_backup_vaults(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_backup_vaults(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_data_sources(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_data_sources(self, response): + logging.log(f"Received response: {response}") + return response + def pre_list_management_servers(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata @@ -106,28 +255,68 @@ def post_list_management_servers(self, response): logging.log(f"Received response: {response}") return response + def pre_restore_backup(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_restore_backup(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_trigger_backup(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_trigger_backup(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_update_backup(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_update_backup(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_update_backup_vault(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_update_backup_vault(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_update_data_source(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_update_data_source(self, response): + logging.log(f"Received response: {response}") + return response + transport = BackupDRRestTransport(interceptor=MyCustomBackupDRInterceptor()) client = BackupDRClient(transport=transport) """ - def pre_create_management_server( + def pre_create_backup_plan( self, - request: backupdr.CreateManagementServerRequest, + request: backupplan.CreateBackupPlanRequest, metadata: Sequence[Tuple[str, str]], - ) -> Tuple[backupdr.CreateManagementServerRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for create_management_server + ) -> Tuple[backupplan.CreateBackupPlanRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for create_backup_plan Override in a subclass to manipulate the request or metadata before they are sent to the BackupDR server. """ return request, metadata - def post_create_management_server( + def post_create_backup_plan( self, response: operations_pb2.Operation ) -> operations_pb2.Operation: - """Post-rpc interceptor for create_management_server + """Post-rpc interceptor for create_backup_plan Override in a subclass to manipulate the response after it is returned by the BackupDR server but before @@ -135,22 +324,25 @@ def post_create_management_server( """ return response - def pre_delete_management_server( + def pre_create_backup_plan_association( self, - request: backupdr.DeleteManagementServerRequest, + request: backupplanassociation.CreateBackupPlanAssociationRequest, metadata: Sequence[Tuple[str, str]], - ) -> Tuple[backupdr.DeleteManagementServerRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for delete_management_server + ) -> Tuple[ + backupplanassociation.CreateBackupPlanAssociationRequest, + Sequence[Tuple[str, str]], + ]: + """Pre-rpc interceptor for create_backup_plan_association Override in a subclass to manipulate the request or metadata before they are sent to the BackupDR server. """ return request, metadata - def post_delete_management_server( + def post_create_backup_plan_association( self, response: operations_pb2.Operation ) -> operations_pb2.Operation: - """Post-rpc interceptor for delete_management_server + """Post-rpc interceptor for create_backup_plan_association Override in a subclass to manipulate the response after it is returned by the BackupDR server but before @@ -158,22 +350,22 @@ def post_delete_management_server( """ return response - def pre_get_management_server( + def pre_create_backup_vault( self, - request: backupdr.GetManagementServerRequest, + request: backupvault.CreateBackupVaultRequest, metadata: Sequence[Tuple[str, str]], - ) -> Tuple[backupdr.GetManagementServerRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for get_management_server + ) -> Tuple[backupvault.CreateBackupVaultRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for create_backup_vault Override in a subclass to manipulate the request or metadata before they are sent to the BackupDR server. """ return request, metadata - def post_get_management_server( - self, response: backupdr.ManagementServer - ) -> backupdr.ManagementServer: - """Post-rpc interceptor for get_management_server + def post_create_backup_vault( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for create_backup_vault Override in a subclass to manipulate the response after it is returned by the BackupDR server but before @@ -181,22 +373,22 @@ def post_get_management_server( """ return response - def pre_list_management_servers( + def pre_create_management_server( self, - request: backupdr.ListManagementServersRequest, + request: backupdr.CreateManagementServerRequest, metadata: Sequence[Tuple[str, str]], - ) -> Tuple[backupdr.ListManagementServersRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for list_management_servers + ) -> Tuple[backupdr.CreateManagementServerRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for create_management_server Override in a subclass to manipulate the request or metadata before they are sent to the BackupDR server. """ return request, metadata - def post_list_management_servers( - self, response: backupdr.ListManagementServersResponse - ) -> backupdr.ListManagementServersResponse: - """Post-rpc interceptor for list_management_servers + def post_create_management_server( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for create_management_server Override in a subclass to manipulate the response after it is returned by the BackupDR server but before @@ -204,22 +396,22 @@ def post_list_management_servers( """ return response - def pre_get_location( + def pre_delete_backup( self, - request: locations_pb2.GetLocationRequest, + request: backupvault.DeleteBackupRequest, metadata: Sequence[Tuple[str, str]], - ) -> Tuple[locations_pb2.GetLocationRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for get_location + ) -> Tuple[backupvault.DeleteBackupRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for delete_backup Override in a subclass to manipulate the request or metadata before they are sent to the BackupDR server. """ return request, metadata - def post_get_location( - self, response: locations_pb2.Location - ) -> locations_pb2.Location: - """Post-rpc interceptor for get_location + def post_delete_backup( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for delete_backup Override in a subclass to manipulate the response after it is returned by the BackupDR server but before @@ -227,22 +419,22 @@ def post_get_location( """ return response - def pre_list_locations( + def pre_delete_backup_plan( self, - request: locations_pb2.ListLocationsRequest, + request: backupplan.DeleteBackupPlanRequest, metadata: Sequence[Tuple[str, str]], - ) -> Tuple[locations_pb2.ListLocationsRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for list_locations + ) -> Tuple[backupplan.DeleteBackupPlanRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for delete_backup_plan Override in a subclass to manipulate the request or metadata before they are sent to the BackupDR server. """ return request, metadata - def post_list_locations( - self, response: locations_pb2.ListLocationsResponse - ) -> locations_pb2.ListLocationsResponse: - """Post-rpc interceptor for list_locations + def post_delete_backup_plan( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for delete_backup_plan Override in a subclass to manipulate the response after it is returned by the BackupDR server but before @@ -250,20 +442,25 @@ def post_list_locations( """ return response - def pre_get_iam_policy( + def pre_delete_backup_plan_association( self, - request: iam_policy_pb2.GetIamPolicyRequest, + request: backupplanassociation.DeleteBackupPlanAssociationRequest, metadata: Sequence[Tuple[str, str]], - ) -> Tuple[iam_policy_pb2.GetIamPolicyRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for get_iam_policy + ) -> Tuple[ + backupplanassociation.DeleteBackupPlanAssociationRequest, + Sequence[Tuple[str, str]], + ]: + """Pre-rpc interceptor for delete_backup_plan_association Override in a subclass to manipulate the request or metadata before they are sent to the BackupDR server. """ return request, metadata - def post_get_iam_policy(self, response: policy_pb2.Policy) -> policy_pb2.Policy: - """Post-rpc interceptor for get_iam_policy + def post_delete_backup_plan_association( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for delete_backup_plan_association Override in a subclass to manipulate the response after it is returned by the BackupDR server but before @@ -271,20 +468,22 @@ def post_get_iam_policy(self, response: policy_pb2.Policy) -> policy_pb2.Policy: """ return response - def pre_set_iam_policy( + def pre_delete_backup_vault( self, - request: iam_policy_pb2.SetIamPolicyRequest, + request: backupvault.DeleteBackupVaultRequest, metadata: Sequence[Tuple[str, str]], - ) -> Tuple[iam_policy_pb2.SetIamPolicyRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for set_iam_policy + ) -> Tuple[backupvault.DeleteBackupVaultRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for delete_backup_vault Override in a subclass to manipulate the request or metadata before they are sent to the BackupDR server. """ return request, metadata - def post_set_iam_policy(self, response: policy_pb2.Policy) -> policy_pb2.Policy: - """Post-rpc interceptor for set_iam_policy + def post_delete_backup_vault( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for delete_backup_vault Override in a subclass to manipulate the response after it is returned by the BackupDR server but before @@ -292,22 +491,22 @@ def post_set_iam_policy(self, response: policy_pb2.Policy) -> policy_pb2.Policy: """ return response - def pre_test_iam_permissions( + def pre_delete_management_server( self, - request: iam_policy_pb2.TestIamPermissionsRequest, + request: backupdr.DeleteManagementServerRequest, metadata: Sequence[Tuple[str, str]], - ) -> Tuple[iam_policy_pb2.TestIamPermissionsRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for test_iam_permissions + ) -> Tuple[backupdr.DeleteManagementServerRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for delete_management_server Override in a subclass to manipulate the request or metadata before they are sent to the BackupDR server. """ return request, metadata - def post_test_iam_permissions( - self, response: iam_policy_pb2.TestIamPermissionsResponse - ) -> iam_policy_pb2.TestIamPermissionsResponse: - """Post-rpc interceptor for test_iam_permissions + def post_delete_management_server( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for delete_management_server Override in a subclass to manipulate the response after it is returned by the BackupDR server but before @@ -315,20 +514,22 @@ def post_test_iam_permissions( """ return response - def pre_cancel_operation( + def pre_fetch_usable_backup_vaults( self, - request: operations_pb2.CancelOperationRequest, + request: backupvault.FetchUsableBackupVaultsRequest, metadata: Sequence[Tuple[str, str]], - ) -> Tuple[operations_pb2.CancelOperationRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for cancel_operation + ) -> Tuple[backupvault.FetchUsableBackupVaultsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for fetch_usable_backup_vaults Override in a subclass to manipulate the request or metadata before they are sent to the BackupDR server. """ return request, metadata - def post_cancel_operation(self, response: None) -> None: - """Post-rpc interceptor for cancel_operation + def post_fetch_usable_backup_vaults( + self, response: backupvault.FetchUsableBackupVaultsResponse + ) -> backupvault.FetchUsableBackupVaultsResponse: + """Post-rpc interceptor for fetch_usable_backup_vaults Override in a subclass to manipulate the response after it is returned by the BackupDR server but before @@ -336,20 +537,18 @@ def post_cancel_operation(self, response: None) -> None: """ return response - def pre_delete_operation( - self, - request: operations_pb2.DeleteOperationRequest, - metadata: Sequence[Tuple[str, str]], - ) -> Tuple[operations_pb2.DeleteOperationRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for delete_operation + def pre_get_backup( + self, request: backupvault.GetBackupRequest, metadata: Sequence[Tuple[str, str]] + ) -> Tuple[backupvault.GetBackupRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_backup Override in a subclass to manipulate the request or metadata before they are sent to the BackupDR server. """ return request, metadata - def post_delete_operation(self, response: None) -> None: - """Post-rpc interceptor for delete_operation + def post_get_backup(self, response: backupvault.Backup) -> backupvault.Backup: + """Post-rpc interceptor for get_backup Override in a subclass to manipulate the response after it is returned by the BackupDR server but before @@ -357,22 +556,22 @@ def post_delete_operation(self, response: None) -> None: """ return response - def pre_get_operation( + def pre_get_backup_plan( self, - request: operations_pb2.GetOperationRequest, + request: backupplan.GetBackupPlanRequest, metadata: Sequence[Tuple[str, str]], - ) -> Tuple[operations_pb2.GetOperationRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for get_operation + ) -> Tuple[backupplan.GetBackupPlanRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_backup_plan Override in a subclass to manipulate the request or metadata before they are sent to the BackupDR server. """ return request, metadata - def post_get_operation( - self, response: operations_pb2.Operation - ) -> operations_pb2.Operation: - """Post-rpc interceptor for get_operation + def post_get_backup_plan( + self, response: backupplan.BackupPlan + ) -> backupplan.BackupPlan: + """Post-rpc interceptor for get_backup_plan Override in a subclass to manipulate the response after it is returned by the BackupDR server but before @@ -380,22 +579,24 @@ def post_get_operation( """ return response - def pre_list_operations( + def pre_get_backup_plan_association( self, - request: operations_pb2.ListOperationsRequest, + request: backupplanassociation.GetBackupPlanAssociationRequest, metadata: Sequence[Tuple[str, str]], - ) -> Tuple[operations_pb2.ListOperationsRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for list_operations + ) -> Tuple[ + backupplanassociation.GetBackupPlanAssociationRequest, Sequence[Tuple[str, str]] + ]: + """Pre-rpc interceptor for get_backup_plan_association Override in a subclass to manipulate the request or metadata before they are sent to the BackupDR server. """ return request, metadata - def post_list_operations( - self, response: operations_pb2.ListOperationsResponse - ) -> operations_pb2.ListOperationsResponse: - """Post-rpc interceptor for list_operations + def post_get_backup_plan_association( + self, response: backupplanassociation.BackupPlanAssociation + ) -> backupplanassociation.BackupPlanAssociation: + """Post-rpc interceptor for get_backup_plan_association Override in a subclass to manipulate the response after it is returned by the BackupDR server but before @@ -403,164 +604,2784 @@ def post_list_operations( """ return response + def pre_get_backup_vault( + self, + request: backupvault.GetBackupVaultRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[backupvault.GetBackupVaultRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_backup_vault -@dataclasses.dataclass -class BackupDRRestStub: - _session: AuthorizedSession - _host: str - _interceptor: BackupDRRestInterceptor + Override in a subclass to manipulate the request or metadata + before they are sent to the BackupDR server. + """ + return request, metadata + def post_get_backup_vault( + self, response: backupvault.BackupVault + ) -> backupvault.BackupVault: + """Post-rpc interceptor for get_backup_vault -class BackupDRRestTransport(BackupDRTransport): - """REST backend transport for BackupDR. + Override in a subclass to manipulate the response + after it is returned by the BackupDR server but before + it is returned to user code. + """ + return response - The BackupDR Service + def pre_get_data_source( + self, + request: backupvault.GetDataSourceRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[backupvault.GetDataSourceRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_data_source - This class defines the same methods as the primary client, so the - primary client can load the underlying transport implementation - and call it. + Override in a subclass to manipulate the request or metadata + before they are sent to the BackupDR server. + """ + return request, metadata - It sends JSON representations of protocol buffers over HTTP/1.1 + def post_get_data_source( + self, response: backupvault.DataSource + ) -> backupvault.DataSource: + """Post-rpc interceptor for get_data_source - """ + Override in a subclass to manipulate the response + after it is returned by the BackupDR server but before + it is returned to user code. + """ + return response - def __init__( + def pre_get_management_server( self, - *, - host: str = "backupdr.googleapis.com", - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - url_scheme: str = "https", - interceptor: Optional[BackupDRRestInterceptor] = None, - api_audience: Optional[str] = None, - ) -> None: - """Instantiate the transport. + request: backupdr.GetManagementServerRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[backupdr.GetManagementServerRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_management_server - Args: - host (Optional[str]): - The hostname to connect to (default: 'backupdr.googleapis.com'). - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. + Override in a subclass to manipulate the request or metadata + before they are sent to the BackupDR server. + """ + return request, metadata - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if ``channel`` is provided. - scopes (Optional(Sequence[str])): A list of scopes. This argument is - ignored if ``channel`` is provided. - client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client - certificate to configure mutual TLS HTTP channel. It is ignored - if ``channel`` is provided. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you are developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - url_scheme: the protocol scheme for the API endpoint. Normally - "https", but for testing or local servers, - "http" can be specified. + def post_get_management_server( + self, response: backupdr.ManagementServer + ) -> backupdr.ManagementServer: + """Post-rpc interceptor for get_management_server + + Override in a subclass to manipulate the response + after it is returned by the BackupDR server but before + it is returned to user code. """ - # Run the base constructor - # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. - # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the - # credentials object - maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) - if maybe_url_match is None: - raise ValueError( - f"Unexpected hostname structure: {host}" - ) # pragma: NO COVER + return response - url_match_items = maybe_url_match.groupdict() + def pre_list_backup_plan_associations( + self, + request: backupplanassociation.ListBackupPlanAssociationsRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[ + backupplanassociation.ListBackupPlanAssociationsRequest, + Sequence[Tuple[str, str]], + ]: + """Pre-rpc interceptor for list_backup_plan_associations - host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host + Override in a subclass to manipulate the request or metadata + before they are sent to the BackupDR server. + """ + return request, metadata - super().__init__( - host=host, - credentials=credentials, - client_info=client_info, - always_use_jwt_access=always_use_jwt_access, - api_audience=api_audience, - ) - self._session = AuthorizedSession( - self._credentials, default_host=self.DEFAULT_HOST - ) - self._operations_client: Optional[operations_v1.AbstractOperationsClient] = None - if client_cert_source_for_mtls: - self._session.configure_mtls_channel(client_cert_source_for_mtls) - self._interceptor = interceptor or BackupDRRestInterceptor() - self._prep_wrapped_messages(client_info) + def post_list_backup_plan_associations( + self, response: backupplanassociation.ListBackupPlanAssociationsResponse + ) -> backupplanassociation.ListBackupPlanAssociationsResponse: + """Post-rpc interceptor for list_backup_plan_associations - @property - def operations_client(self) -> operations_v1.AbstractOperationsClient: - """Create the client designed to process long-running operations. + Override in a subclass to manipulate the response + after it is returned by the BackupDR server but before + it is returned to user code. + """ + return response - This property caches on the instance; repeated calls return the same - client. + def pre_list_backup_plans( + self, + request: backupplan.ListBackupPlansRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[backupplan.ListBackupPlansRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_backup_plans + + Override in a subclass to manipulate the request or metadata + before they are sent to the BackupDR server. """ - # Only create a new client if we do not already have one. - if self._operations_client is None: - http_options: Dict[str, List[Dict[str, str]]] = { - "google.longrunning.Operations.CancelOperation": [ - { - "method": "post", - "uri": "/v1/{name=projects/*/locations/*/operations/*}:cancel", - "body": "*", - }, - ], - "google.longrunning.Operations.DeleteOperation": [ - { - "method": "delete", - "uri": "/v1/{name=projects/*/locations/*/operations/*}", - }, - ], - "google.longrunning.Operations.GetOperation": [ - { - "method": "get", - "uri": "/v1/{name=projects/*/locations/*/operations/*}", - }, - ], - "google.longrunning.Operations.ListOperations": [ - { - "method": "get", - "uri": "/v1/{name=projects/*/locations/*}/operations", - }, - ], - } + return request, metadata - rest_transport = operations_v1.OperationsRestTransport( - host=self._host, - # use the credentials which are saved - credentials=self._credentials, - scopes=self._scopes, - http_options=http_options, - path_prefix="v1", - ) + def post_list_backup_plans( + self, response: backupplan.ListBackupPlansResponse + ) -> backupplan.ListBackupPlansResponse: + """Post-rpc interceptor for list_backup_plans - self._operations_client = operations_v1.AbstractOperationsClient( - transport=rest_transport - ) + Override in a subclass to manipulate the response + after it is returned by the BackupDR server but before + it is returned to user code. + """ + return response + + def pre_list_backups( + self, + request: backupvault.ListBackupsRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[backupvault.ListBackupsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_backups + + Override in a subclass to manipulate the request or metadata + before they are sent to the BackupDR server. + """ + return request, metadata + + def post_list_backups( + self, response: backupvault.ListBackupsResponse + ) -> backupvault.ListBackupsResponse: + """Post-rpc interceptor for list_backups + + Override in a subclass to manipulate the response + after it is returned by the BackupDR server but before + it is returned to user code. + """ + return response + + def pre_list_backup_vaults( + self, + request: backupvault.ListBackupVaultsRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[backupvault.ListBackupVaultsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_backup_vaults + + Override in a subclass to manipulate the request or metadata + before they are sent to the BackupDR server. + """ + return request, metadata + + def post_list_backup_vaults( + self, response: backupvault.ListBackupVaultsResponse + ) -> backupvault.ListBackupVaultsResponse: + """Post-rpc interceptor for list_backup_vaults + + Override in a subclass to manipulate the response + after it is returned by the BackupDR server but before + it is returned to user code. + """ + return response + + def pre_list_data_sources( + self, + request: backupvault.ListDataSourcesRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[backupvault.ListDataSourcesRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_data_sources + + Override in a subclass to manipulate the request or metadata + before they are sent to the BackupDR server. + """ + return request, metadata + + def post_list_data_sources( + self, response: backupvault.ListDataSourcesResponse + ) -> backupvault.ListDataSourcesResponse: + """Post-rpc interceptor for list_data_sources + + Override in a subclass to manipulate the response + after it is returned by the BackupDR server but before + it is returned to user code. + """ + return response + + def pre_list_management_servers( + self, + request: backupdr.ListManagementServersRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[backupdr.ListManagementServersRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_management_servers + + Override in a subclass to manipulate the request or metadata + before they are sent to the BackupDR server. + """ + return request, metadata + + def post_list_management_servers( + self, response: backupdr.ListManagementServersResponse + ) -> backupdr.ListManagementServersResponse: + """Post-rpc interceptor for list_management_servers + + Override in a subclass to manipulate the response + after it is returned by the BackupDR server but before + it is returned to user code. + """ + return response + + def pre_restore_backup( + self, + request: backupvault.RestoreBackupRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[backupvault.RestoreBackupRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for restore_backup + + Override in a subclass to manipulate the request or metadata + before they are sent to the BackupDR server. + """ + return request, metadata + + def post_restore_backup( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for restore_backup + + Override in a subclass to manipulate the response + after it is returned by the BackupDR server but before + it is returned to user code. + """ + return response + + def pre_trigger_backup( + self, + request: backupplanassociation.TriggerBackupRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[backupplanassociation.TriggerBackupRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for trigger_backup + + Override in a subclass to manipulate the request or metadata + before they are sent to the BackupDR server. + """ + return request, metadata + + def post_trigger_backup( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for trigger_backup + + Override in a subclass to manipulate the response + after it is returned by the BackupDR server but before + it is returned to user code. + """ + return response + + def pre_update_backup( + self, + request: backupvault.UpdateBackupRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[backupvault.UpdateBackupRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for update_backup + + Override in a subclass to manipulate the request or metadata + before they are sent to the BackupDR server. + """ + return request, metadata + + def post_update_backup( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for update_backup + + Override in a subclass to manipulate the response + after it is returned by the BackupDR server but before + it is returned to user code. + """ + return response + + def pre_update_backup_vault( + self, + request: backupvault.UpdateBackupVaultRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[backupvault.UpdateBackupVaultRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for update_backup_vault + + Override in a subclass to manipulate the request or metadata + before they are sent to the BackupDR server. + """ + return request, metadata + + def post_update_backup_vault( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for update_backup_vault + + Override in a subclass to manipulate the response + after it is returned by the BackupDR server but before + it is returned to user code. + """ + return response + + def pre_update_data_source( + self, + request: backupvault.UpdateDataSourceRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[backupvault.UpdateDataSourceRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for update_data_source + + Override in a subclass to manipulate the request or metadata + before they are sent to the BackupDR server. + """ + return request, metadata + + def post_update_data_source( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for update_data_source + + Override in a subclass to manipulate the response + after it is returned by the BackupDR server but before + it is returned to user code. + """ + return response + + def pre_get_location( + self, + request: locations_pb2.GetLocationRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[locations_pb2.GetLocationRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_location + + Override in a subclass to manipulate the request or metadata + before they are sent to the BackupDR server. + """ + return request, metadata + + def post_get_location( + self, response: locations_pb2.Location + ) -> locations_pb2.Location: + """Post-rpc interceptor for get_location + + Override in a subclass to manipulate the response + after it is returned by the BackupDR server but before + it is returned to user code. + """ + return response + + def pre_list_locations( + self, + request: locations_pb2.ListLocationsRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[locations_pb2.ListLocationsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_locations + + Override in a subclass to manipulate the request or metadata + before they are sent to the BackupDR server. + """ + return request, metadata + + def post_list_locations( + self, response: locations_pb2.ListLocationsResponse + ) -> locations_pb2.ListLocationsResponse: + """Post-rpc interceptor for list_locations + + Override in a subclass to manipulate the response + after it is returned by the BackupDR server but before + it is returned to user code. + """ + return response + + def pre_get_iam_policy( + self, + request: iam_policy_pb2.GetIamPolicyRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[iam_policy_pb2.GetIamPolicyRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_iam_policy + + Override in a subclass to manipulate the request or metadata + before they are sent to the BackupDR server. + """ + return request, metadata + + def post_get_iam_policy(self, response: policy_pb2.Policy) -> policy_pb2.Policy: + """Post-rpc interceptor for get_iam_policy + + Override in a subclass to manipulate the response + after it is returned by the BackupDR server but before + it is returned to user code. + """ + return response + + def pre_set_iam_policy( + self, + request: iam_policy_pb2.SetIamPolicyRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[iam_policy_pb2.SetIamPolicyRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for set_iam_policy + + Override in a subclass to manipulate the request or metadata + before they are sent to the BackupDR server. + """ + return request, metadata + + def post_set_iam_policy(self, response: policy_pb2.Policy) -> policy_pb2.Policy: + """Post-rpc interceptor for set_iam_policy + + Override in a subclass to manipulate the response + after it is returned by the BackupDR server but before + it is returned to user code. + """ + return response + + def pre_test_iam_permissions( + self, + request: iam_policy_pb2.TestIamPermissionsRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[iam_policy_pb2.TestIamPermissionsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for test_iam_permissions + + Override in a subclass to manipulate the request or metadata + before they are sent to the BackupDR server. + """ + return request, metadata + + def post_test_iam_permissions( + self, response: iam_policy_pb2.TestIamPermissionsResponse + ) -> iam_policy_pb2.TestIamPermissionsResponse: + """Post-rpc interceptor for test_iam_permissions + + Override in a subclass to manipulate the response + after it is returned by the BackupDR server but before + it is returned to user code. + """ + return response + + def pre_cancel_operation( + self, + request: operations_pb2.CancelOperationRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[operations_pb2.CancelOperationRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for cancel_operation + + Override in a subclass to manipulate the request or metadata + before they are sent to the BackupDR server. + """ + return request, metadata + + def post_cancel_operation(self, response: None) -> None: + """Post-rpc interceptor for cancel_operation + + Override in a subclass to manipulate the response + after it is returned by the BackupDR server but before + it is returned to user code. + """ + return response + + def pre_delete_operation( + self, + request: operations_pb2.DeleteOperationRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[operations_pb2.DeleteOperationRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for delete_operation + + Override in a subclass to manipulate the request or metadata + before they are sent to the BackupDR server. + """ + return request, metadata + + def post_delete_operation(self, response: None) -> None: + """Post-rpc interceptor for delete_operation + + Override in a subclass to manipulate the response + after it is returned by the BackupDR server but before + it is returned to user code. + """ + return response + + def pre_get_operation( + self, + request: operations_pb2.GetOperationRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[operations_pb2.GetOperationRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_operation + + Override in a subclass to manipulate the request or metadata + before they are sent to the BackupDR server. + """ + return request, metadata + + def post_get_operation( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for get_operation + + Override in a subclass to manipulate the response + after it is returned by the BackupDR server but before + it is returned to user code. + """ + return response + + def pre_list_operations( + self, + request: operations_pb2.ListOperationsRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[operations_pb2.ListOperationsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_operations + + Override in a subclass to manipulate the request or metadata + before they are sent to the BackupDR server. + """ + return request, metadata + + def post_list_operations( + self, response: operations_pb2.ListOperationsResponse + ) -> operations_pb2.ListOperationsResponse: + """Post-rpc interceptor for list_operations + + Override in a subclass to manipulate the response + after it is returned by the BackupDR server but before + it is returned to user code. + """ + return response + + +@dataclasses.dataclass +class BackupDRRestStub: + _session: AuthorizedSession + _host: str + _interceptor: BackupDRRestInterceptor + + +class BackupDRRestTransport(BackupDRTransport): + """REST backend transport for BackupDR. + + The BackupDR Service + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + + """ + + def __init__( + self, + *, + host: str = "backupdr.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = "https", + interceptor: Optional[BackupDRRestInterceptor] = None, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'backupdr.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client + certificate to configure mutual TLS HTTP channel. It is ignored + if ``channel`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. + # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the + # credentials object + maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) + if maybe_url_match is None: + raise ValueError( + f"Unexpected hostname structure: {host}" + ) # pragma: NO COVER + + url_match_items = maybe_url_match.groupdict() + + host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host + + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + self._session = AuthorizedSession( + self._credentials, default_host=self.DEFAULT_HOST + ) + self._operations_client: Optional[operations_v1.AbstractOperationsClient] = None + if client_cert_source_for_mtls: + self._session.configure_mtls_channel(client_cert_source_for_mtls) + self._interceptor = interceptor or BackupDRRestInterceptor() + self._prep_wrapped_messages(client_info) + + @property + def operations_client(self) -> operations_v1.AbstractOperationsClient: + """Create the client designed to process long-running operations. + + This property caches on the instance; repeated calls return the same + client. + """ + # Only create a new client if we do not already have one. + if self._operations_client is None: + http_options: Dict[str, List[Dict[str, str]]] = { + "google.longrunning.Operations.CancelOperation": [ + { + "method": "post", + "uri": "/v1/{name=projects/*/locations/*/operations/*}:cancel", + "body": "*", + }, + ], + "google.longrunning.Operations.DeleteOperation": [ + { + "method": "delete", + "uri": "/v1/{name=projects/*/locations/*/operations/*}", + }, + ], + "google.longrunning.Operations.GetOperation": [ + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/operations/*}", + }, + ], + "google.longrunning.Operations.ListOperations": [ + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*}/operations", + }, + ], + } + + rest_transport = operations_v1.OperationsRestTransport( + host=self._host, + # use the credentials which are saved + credentials=self._credentials, + scopes=self._scopes, + http_options=http_options, + path_prefix="v1", + ) + + self._operations_client = operations_v1.AbstractOperationsClient( + transport=rest_transport + ) + + # Return the client from cache. + return self._operations_client + + class _CreateBackupPlan(BackupDRRestStub): + def __hash__(self): + return hash("CreateBackupPlan") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "backupPlanId": "", + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: backupplan.CreateBackupPlanRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the create backup plan method over HTTP. + + Args: + request (~.backupplan.CreateBackupPlanRequest): + The request object. The request message for creating a ``BackupPlan``. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{parent=projects/*/locations/*}/backupPlans", + "body": "backup_plan", + }, + ] + request, metadata = self._interceptor.pre_create_backup_plan( + request, metadata + ) + pb_request = backupplan.CreateBackupPlanRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_create_backup_plan(resp) + return resp + + class _CreateBackupPlanAssociation(BackupDRRestStub): + def __hash__(self): + return hash("CreateBackupPlanAssociation") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "backupPlanAssociationId": "", + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: backupplanassociation.CreateBackupPlanAssociationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the create backup plan + association method over HTTP. + + Args: + request (~.backupplanassociation.CreateBackupPlanAssociationRequest): + The request object. Request message for creating a backup + plan. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{parent=projects/*/locations/*}/backupPlanAssociations", + "body": "backup_plan_association", + }, + ] + request, metadata = self._interceptor.pre_create_backup_plan_association( + request, metadata + ) + pb_request = backupplanassociation.CreateBackupPlanAssociationRequest.pb( + request + ) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_create_backup_plan_association(resp) + return resp + + class _CreateBackupVault(BackupDRRestStub): + def __hash__(self): + return hash("CreateBackupVault") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "backupVaultId": "", + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: backupvault.CreateBackupVaultRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the create backup vault method over HTTP. + + Args: + request (~.backupvault.CreateBackupVaultRequest): + The request object. Message for creating a BackupVault. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{parent=projects/*/locations/*}/backupVaults", + "body": "backup_vault", + }, + ] + request, metadata = self._interceptor.pre_create_backup_vault( + request, metadata + ) + pb_request = backupvault.CreateBackupVaultRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_create_backup_vault(resp) + return resp + + class _CreateManagementServer(BackupDRRestStub): + def __hash__(self): + return hash("CreateManagementServer") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "managementServerId": "", + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: backupdr.CreateManagementServerRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the create management server method over HTTP. + + Args: + request (~.backupdr.CreateManagementServerRequest): + The request object. Request message for creating a + management server instance. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{parent=projects/*/locations/*}/managementServers", + "body": "management_server", + }, + ] + request, metadata = self._interceptor.pre_create_management_server( + request, metadata + ) + pb_request = backupdr.CreateManagementServerRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_create_management_server(resp) + return resp + + class _DeleteBackup(BackupDRRestStub): + def __hash__(self): + return hash("DeleteBackup") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: backupvault.DeleteBackupRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the delete backup method over HTTP. + + Args: + request (~.backupvault.DeleteBackupRequest): + The request object. Message for deleting a Backup. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "delete", + "uri": "/v1/{name=projects/*/locations/*/backupVaults/*/dataSources/*/backups/*}", + }, + ] + request, metadata = self._interceptor.pre_delete_backup(request, metadata) + pb_request = backupvault.DeleteBackupRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_delete_backup(resp) + return resp + + class _DeleteBackupPlan(BackupDRRestStub): + def __hash__(self): + return hash("DeleteBackupPlan") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: backupplan.DeleteBackupPlanRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the delete backup plan method over HTTP. + + Args: + request (~.backupplan.DeleteBackupPlanRequest): + The request object. The request message for deleting a ``BackupPlan``. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "delete", + "uri": "/v1/{name=projects/*/locations/*/backupPlans/*}", + }, + ] + request, metadata = self._interceptor.pre_delete_backup_plan( + request, metadata + ) + pb_request = backupplan.DeleteBackupPlanRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_delete_backup_plan(resp) + return resp + + class _DeleteBackupPlanAssociation(BackupDRRestStub): + def __hash__(self): + return hash("DeleteBackupPlanAssociation") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: backupplanassociation.DeleteBackupPlanAssociationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the delete backup plan + association method over HTTP. + + Args: + request (~.backupplanassociation.DeleteBackupPlanAssociationRequest): + The request object. Request message for deleting a backup + plan association. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "delete", + "uri": "/v1/{name=projects/*/locations/*/backupPlanAssociations/*}", + }, + ] + request, metadata = self._interceptor.pre_delete_backup_plan_association( + request, metadata + ) + pb_request = backupplanassociation.DeleteBackupPlanAssociationRequest.pb( + request + ) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_delete_backup_plan_association(resp) + return resp + + class _DeleteBackupVault(BackupDRRestStub): + def __hash__(self): + return hash("DeleteBackupVault") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: backupvault.DeleteBackupVaultRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the delete backup vault method over HTTP. + + Args: + request (~.backupvault.DeleteBackupVaultRequest): + The request object. Message for deleting a BackupVault. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "delete", + "uri": "/v1/{name=projects/*/locations/*/backupVaults/*}", + }, + ] + request, metadata = self._interceptor.pre_delete_backup_vault( + request, metadata + ) + pb_request = backupvault.DeleteBackupVaultRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_delete_backup_vault(resp) + return resp + + class _DeleteManagementServer(BackupDRRestStub): + def __hash__(self): + return hash("DeleteManagementServer") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: backupdr.DeleteManagementServerRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the delete management server method over HTTP. + + Args: + request (~.backupdr.DeleteManagementServerRequest): + The request object. Request message for deleting a + management server instance. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "delete", + "uri": "/v1/{name=projects/*/locations/*/managementServers/*}", + }, + ] + request, metadata = self._interceptor.pre_delete_management_server( + request, metadata + ) + pb_request = backupdr.DeleteManagementServerRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_delete_management_server(resp) + return resp + + class _FetchUsableBackupVaults(BackupDRRestStub): + def __hash__(self): + return hash("FetchUsableBackupVaults") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: backupvault.FetchUsableBackupVaultsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> backupvault.FetchUsableBackupVaultsResponse: + r"""Call the fetch usable backup + vaults method over HTTP. + + Args: + request (~.backupvault.FetchUsableBackupVaultsRequest): + The request object. Request message for fetching usable + BackupVaults. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.backupvault.FetchUsableBackupVaultsResponse: + Response message for fetching usable + BackupVaults. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{parent=projects/*/locations/*}/backupVaults:fetchUsable", + }, + ] + request, metadata = self._interceptor.pre_fetch_usable_backup_vaults( + request, metadata + ) + pb_request = backupvault.FetchUsableBackupVaultsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = backupvault.FetchUsableBackupVaultsResponse() + pb_resp = backupvault.FetchUsableBackupVaultsResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_fetch_usable_backup_vaults(resp) + return resp + + class _GetBackup(BackupDRRestStub): + def __hash__(self): + return hash("GetBackup") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: backupvault.GetBackupRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> backupvault.Backup: + r"""Call the get backup method over HTTP. + + Args: + request (~.backupvault.GetBackupRequest): + The request object. Request message for getting a Backup. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.backupvault.Backup: + Message describing a Backup object. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/backupVaults/*/dataSources/*/backups/*}", + }, + ] + request, metadata = self._interceptor.pre_get_backup(request, metadata) + pb_request = backupvault.GetBackupRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = backupvault.Backup() + pb_resp = backupvault.Backup.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_backup(resp) + return resp + + class _GetBackupPlan(BackupDRRestStub): + def __hash__(self): + return hash("GetBackupPlan") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: backupplan.GetBackupPlanRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> backupplan.BackupPlan: + r"""Call the get backup plan method over HTTP. + + Args: + request (~.backupplan.GetBackupPlanRequest): + The request object. The request message for getting a ``BackupPlan``. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.backupplan.BackupPlan: + A ``BackupPlan`` specifies some common fields, such as + ``description`` as well as one or more ``BackupRule`` + messages. Each ``BackupRule`` has a retention policy and + defines a schedule by which the system is to perform + backup workloads. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/backupPlans/*}", + }, + ] + request, metadata = self._interceptor.pre_get_backup_plan(request, metadata) + pb_request = backupplan.GetBackupPlanRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = backupplan.BackupPlan() + pb_resp = backupplan.BackupPlan.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_backup_plan(resp) + return resp + + class _GetBackupPlanAssociation(BackupDRRestStub): + def __hash__(self): + return hash("GetBackupPlanAssociation") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: backupplanassociation.GetBackupPlanAssociationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> backupplanassociation.BackupPlanAssociation: + r"""Call the get backup plan + association method over HTTP. + + Args: + request (~.backupplanassociation.GetBackupPlanAssociationRequest): + The request object. Request message for getting a + BackupPlanAssociation resource. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.backupplanassociation.BackupPlanAssociation: + A BackupPlanAssociation represents a + single BackupPlanAssociation which + contains details like workload, backup + plan etc + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/backupPlanAssociations/*}", + }, + ] + request, metadata = self._interceptor.pre_get_backup_plan_association( + request, metadata + ) + pb_request = backupplanassociation.GetBackupPlanAssociationRequest.pb( + request + ) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = backupplanassociation.BackupPlanAssociation() + pb_resp = backupplanassociation.BackupPlanAssociation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_backup_plan_association(resp) + return resp + + class _GetBackupVault(BackupDRRestStub): + def __hash__(self): + return hash("GetBackupVault") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: backupvault.GetBackupVaultRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> backupvault.BackupVault: + r"""Call the get backup vault method over HTTP. + + Args: + request (~.backupvault.GetBackupVaultRequest): + The request object. Request message for getting a + BackupVault. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.backupvault.BackupVault: + Message describing a BackupVault + object. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/backupVaults/*}", + }, + ] + request, metadata = self._interceptor.pre_get_backup_vault( + request, metadata + ) + pb_request = backupvault.GetBackupVaultRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = backupvault.BackupVault() + pb_resp = backupvault.BackupVault.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_backup_vault(resp) + return resp + + class _GetDataSource(BackupDRRestStub): + def __hash__(self): + return hash("GetDataSource") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: backupvault.GetDataSourceRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> backupvault.DataSource: + r"""Call the get data source method over HTTP. + + Args: + request (~.backupvault.GetDataSourceRequest): + The request object. Request message for getting a + DataSource instance. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.backupvault.DataSource: + Message describing a DataSource + object. Datasource object used to + represent Datasource details for both + admin and basic view. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/backupVaults/*/dataSources/*}", + }, + ] + request, metadata = self._interceptor.pre_get_data_source(request, metadata) + pb_request = backupvault.GetDataSourceRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = backupvault.DataSource() + pb_resp = backupvault.DataSource.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_data_source(resp) + return resp + + class _GetManagementServer(BackupDRRestStub): + def __hash__(self): + return hash("GetManagementServer") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: backupdr.GetManagementServerRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> backupdr.ManagementServer: + r"""Call the get management server method over HTTP. + + Args: + request (~.backupdr.GetManagementServerRequest): + The request object. Request message for getting a + management server instance. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.backupdr.ManagementServer: + ManagementServer describes a single + BackupDR ManagementServer instance. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/managementServers/*}", + }, + ] + request, metadata = self._interceptor.pre_get_management_server( + request, metadata + ) + pb_request = backupdr.GetManagementServerRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = backupdr.ManagementServer() + pb_resp = backupdr.ManagementServer.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_management_server(resp) + return resp + + class _ListBackupPlanAssociations(BackupDRRestStub): + def __hash__(self): + return hash("ListBackupPlanAssociations") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: backupplanassociation.ListBackupPlanAssociationsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> backupplanassociation.ListBackupPlanAssociationsResponse: + r"""Call the list backup plan + associations method over HTTP. + + Args: + request (~.backupplanassociation.ListBackupPlanAssociationsRequest): + The request object. Request message for List + BackupPlanAssociation + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.backupplanassociation.ListBackupPlanAssociationsResponse: + Response message for List + BackupPlanAssociation + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{parent=projects/*/locations/*}/backupPlanAssociations", + }, + ] + request, metadata = self._interceptor.pre_list_backup_plan_associations( + request, metadata + ) + pb_request = backupplanassociation.ListBackupPlanAssociationsRequest.pb( + request + ) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = backupplanassociation.ListBackupPlanAssociationsResponse() + pb_resp = backupplanassociation.ListBackupPlanAssociationsResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_backup_plan_associations(resp) + return resp + + class _ListBackupPlans(BackupDRRestStub): + def __hash__(self): + return hash("ListBackupPlans") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: backupplan.ListBackupPlansRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> backupplan.ListBackupPlansResponse: + r"""Call the list backup plans method over HTTP. + + Args: + request (~.backupplan.ListBackupPlansRequest): + The request object. The request message for getting a list ``BackupPlan``. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.backupplan.ListBackupPlansResponse: + The response message for getting a list of + ``BackupPlan``. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{parent=projects/*/locations/*}/backupPlans", + }, + ] + request, metadata = self._interceptor.pre_list_backup_plans( + request, metadata + ) + pb_request = backupplan.ListBackupPlansRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = backupplan.ListBackupPlansResponse() + pb_resp = backupplan.ListBackupPlansResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_backup_plans(resp) + return resp + + class _ListBackups(BackupDRRestStub): + def __hash__(self): + return hash("ListBackups") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: backupvault.ListBackupsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> backupvault.ListBackupsResponse: + r"""Call the list backups method over HTTP. + + Args: + request (~.backupvault.ListBackupsRequest): + The request object. Request message for listing Backups. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.backupvault.ListBackupsResponse: + Response message for listing Backups. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{parent=projects/*/locations/*/backupVaults/*/dataSources/*}/backups", + }, + ] + request, metadata = self._interceptor.pre_list_backups(request, metadata) + pb_request = backupvault.ListBackupsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = backupvault.ListBackupsResponse() + pb_resp = backupvault.ListBackupsResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_backups(resp) + return resp + + class _ListBackupVaults(BackupDRRestStub): + def __hash__(self): + return hash("ListBackupVaults") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: backupvault.ListBackupVaultsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> backupvault.ListBackupVaultsResponse: + r"""Call the list backup vaults method over HTTP. + + Args: + request (~.backupvault.ListBackupVaultsRequest): + The request object. Request message for listing + backupvault stores. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.backupvault.ListBackupVaultsResponse: + Response message for listing + BackupVaults. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{parent=projects/*/locations/*}/backupVaults", + }, + ] + request, metadata = self._interceptor.pre_list_backup_vaults( + request, metadata + ) + pb_request = backupvault.ListBackupVaultsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = backupvault.ListBackupVaultsResponse() + pb_resp = backupvault.ListBackupVaultsResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_backup_vaults(resp) + return resp + + class _ListDataSources(BackupDRRestStub): + def __hash__(self): + return hash("ListDataSources") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: backupvault.ListDataSourcesRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> backupvault.ListDataSourcesResponse: + r"""Call the list data sources method over HTTP. + + Args: + request (~.backupvault.ListDataSourcesRequest): + The request object. Request message for listing + DataSources. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.backupvault.ListDataSourcesResponse: + Response message for listing + DataSources. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{parent=projects/*/locations/*/backupVaults/*}/dataSources", + }, + ] + request, metadata = self._interceptor.pre_list_data_sources( + request, metadata + ) + pb_request = backupvault.ListDataSourcesRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = backupvault.ListDataSourcesResponse() + pb_resp = backupvault.ListDataSourcesResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_data_sources(resp) + return resp + + class _ListManagementServers(BackupDRRestStub): + def __hash__(self): + return hash("ListManagementServers") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: backupdr.ListManagementServersRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> backupdr.ListManagementServersResponse: + r"""Call the list management servers method over HTTP. + + Args: + request (~.backupdr.ListManagementServersRequest): + The request object. Request message for listing + management servers. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.backupdr.ListManagementServersResponse: + Response message for listing + management servers. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{parent=projects/*/locations/*}/managementServers", + }, + ] + request, metadata = self._interceptor.pre_list_management_servers( + request, metadata + ) + pb_request = backupdr.ListManagementServersRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = backupdr.ListManagementServersResponse() + pb_resp = backupdr.ListManagementServersResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_management_servers(resp) + return resp + + class _RestoreBackup(BackupDRRestStub): + def __hash__(self): + return hash("RestoreBackup") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: backupvault.RestoreBackupRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the restore backup method over HTTP. + + Args: + request (~.backupvault.RestoreBackupRequest): + The request object. Request message for restoring from a + Backup. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{name=projects/*/locations/*/backupVaults/*/dataSources/*/backups/*}:restore", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_restore_backup(request, metadata) + pb_request = backupvault.RestoreBackupRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) - # Return the client from cache. - return self._operations_client + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_restore_backup(resp) + return resp - class _CreateManagementServer(BackupDRRestStub): + class _TriggerBackup(BackupDRRestStub): def __hash__(self): - return hash("CreateManagementServer") + return hash("TriggerBackup") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - "managementServerId": "", - } + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} @classmethod def _get_unset_required_fields(cls, message_dict): @@ -572,18 +3393,18 @@ def _get_unset_required_fields(cls, message_dict): def __call__( self, - request: backupdr.CreateManagementServerRequest, + request: backupplanassociation.TriggerBackupRequest, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), ) -> operations_pb2.Operation: - r"""Call the create management server method over HTTP. + r"""Call the trigger backup method over HTTP. Args: - request (~.backupdr.CreateManagementServerRequest): - The request object. Request message for creating a - management server instance. + request (~.backupplanassociation.TriggerBackupRequest): + The request object. Request message for triggering a + backup. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -601,14 +3422,12 @@ def __call__( http_options: List[Dict[str, str]] = [ { "method": "post", - "uri": "/v1/{parent=projects/*/locations/*}/managementServers", - "body": "management_server", + "uri": "/v1/{name=projects/*/locations/*/backupPlanAssociations/*}:triggerBackup", + "body": "*", }, ] - request, metadata = self._interceptor.pre_create_management_server( - request, metadata - ) - pb_request = backupdr.CreateManagementServerRequest.pb(request) + request, metadata = self._interceptor.pre_trigger_backup(request, metadata) + pb_request = backupplanassociation.TriggerBackupRequest.pb(request) transcoded_request = path_template.transcode(http_options, pb_request) # Jsonify the request body @@ -649,14 +3468,16 @@ def __call__( # Return the response resp = operations_pb2.Operation() json_format.Parse(response.content, resp, ignore_unknown_fields=True) - resp = self._interceptor.post_create_management_server(resp) + resp = self._interceptor.post_trigger_backup(resp) return resp - class _DeleteManagementServer(BackupDRRestStub): + class _UpdateBackup(BackupDRRestStub): def __hash__(self): - return hash("DeleteManagementServer") + return hash("UpdateBackup") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "updateMask": {}, + } @classmethod def _get_unset_required_fields(cls, message_dict): @@ -668,18 +3489,18 @@ def _get_unset_required_fields(cls, message_dict): def __call__( self, - request: backupdr.DeleteManagementServerRequest, + request: backupvault.UpdateBackupRequest, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), ) -> operations_pb2.Operation: - r"""Call the delete management server method over HTTP. + r"""Call the update backup method over HTTP. Args: - request (~.backupdr.DeleteManagementServerRequest): - The request object. Request message for deleting a - management server instance. + request (~.backupvault.UpdateBackupRequest): + The request object. Request message for updating a + Backup. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -696,16 +3517,20 @@ def __call__( http_options: List[Dict[str, str]] = [ { - "method": "delete", - "uri": "/v1/{name=projects/*/locations/*/managementServers/*}", + "method": "patch", + "uri": "/v1/{backup.name=projects/*/locations/*/backupVaults/*/dataSources/*/backups/*}", + "body": "backup", }, ] - request, metadata = self._interceptor.pre_delete_management_server( - request, metadata - ) - pb_request = backupdr.DeleteManagementServerRequest.pb(request) + request, metadata = self._interceptor.pre_update_backup(request, metadata) + pb_request = backupvault.UpdateBackupRequest.pb(request) transcoded_request = path_template.transcode(http_options, pb_request) + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) uri = transcoded_request["uri"] method = transcoded_request["method"] @@ -728,6 +3553,7 @@ def __call__( timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -738,14 +3564,16 @@ def __call__( # Return the response resp = operations_pb2.Operation() json_format.Parse(response.content, resp, ignore_unknown_fields=True) - resp = self._interceptor.post_delete_management_server(resp) + resp = self._interceptor.post_update_backup(resp) return resp - class _GetManagementServer(BackupDRRestStub): + class _UpdateBackupVault(BackupDRRestStub): def __hash__(self): - return hash("GetManagementServer") + return hash("UpdateBackupVault") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "updateMask": {}, + } @classmethod def _get_unset_required_fields(cls, message_dict): @@ -757,18 +3585,18 @@ def _get_unset_required_fields(cls, message_dict): def __call__( self, - request: backupdr.GetManagementServerRequest, + request: backupvault.UpdateBackupVaultRequest, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), - ) -> backupdr.ManagementServer: - r"""Call the get management server method over HTTP. + ) -> operations_pb2.Operation: + r"""Call the update backup vault method over HTTP. Args: - request (~.backupdr.GetManagementServerRequest): - The request object. Request message for getting a - management server instance. + request (~.backupvault.UpdateBackupVaultRequest): + The request object. Request message for updating a + BackupVault. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -776,24 +3604,31 @@ def __call__( sent along with the request as metadata. Returns: - ~.backupdr.ManagementServer: - ManagementServer describes a single - BackupDR ManagementServer instance. + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. """ http_options: List[Dict[str, str]] = [ { - "method": "get", - "uri": "/v1/{name=projects/*/locations/*/managementServers/*}", + "method": "patch", + "uri": "/v1/{backup_vault.name=projects/*/locations/*/backupVaults/*}", + "body": "backup_vault", }, ] - request, metadata = self._interceptor.pre_get_management_server( + request, metadata = self._interceptor.pre_update_backup_vault( request, metadata ) - pb_request = backupdr.GetManagementServerRequest.pb(request) + pb_request = backupvault.UpdateBackupVaultRequest.pb(request) transcoded_request = path_template.transcode(http_options, pb_request) + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) uri = transcoded_request["uri"] method = transcoded_request["method"] @@ -816,6 +3651,7 @@ def __call__( timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -824,18 +3660,18 @@ def __call__( raise core_exceptions.from_http_response(response) # Return the response - resp = backupdr.ManagementServer() - pb_resp = backupdr.ManagementServer.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - resp = self._interceptor.post_get_management_server(resp) + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_update_backup_vault(resp) return resp - class _ListManagementServers(BackupDRRestStub): + class _UpdateDataSource(BackupDRRestStub): def __hash__(self): - return hash("ListManagementServers") + return hash("UpdateDataSource") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "updateMask": {}, + } @classmethod def _get_unset_required_fields(cls, message_dict): @@ -847,18 +3683,18 @@ def _get_unset_required_fields(cls, message_dict): def __call__( self, - request: backupdr.ListManagementServersRequest, + request: backupvault.UpdateDataSourceRequest, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), - ) -> backupdr.ListManagementServersResponse: - r"""Call the list management servers method over HTTP. + ) -> operations_pb2.Operation: + r"""Call the update data source method over HTTP. Args: - request (~.backupdr.ListManagementServersRequest): - The request object. Request message for listing - management servers. + request (~.backupvault.UpdateDataSourceRequest): + The request object. Request message for updating a data + source instance. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -866,24 +3702,31 @@ def __call__( sent along with the request as metadata. Returns: - ~.backupdr.ListManagementServersResponse: - Response message for listing - management servers. + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. """ http_options: List[Dict[str, str]] = [ { - "method": "get", - "uri": "/v1/{parent=projects/*/locations/*}/managementServers", + "method": "patch", + "uri": "/v1/{data_source.name=projects/*/locations/*/backupVaults/*/dataSources/*}", + "body": "data_source", }, ] - request, metadata = self._interceptor.pre_list_management_servers( + request, metadata = self._interceptor.pre_update_data_source( request, metadata ) - pb_request = backupdr.ListManagementServersRequest.pb(request) + pb_request = backupvault.UpdateDataSourceRequest.pb(request) transcoded_request = path_template.transcode(http_options, pb_request) + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) uri = transcoded_request["uri"] method = transcoded_request["method"] @@ -906,6 +3749,7 @@ def __call__( timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -914,13 +3758,38 @@ def __call__( raise core_exceptions.from_http_response(response) # Return the response - resp = backupdr.ListManagementServersResponse() - pb_resp = backupdr.ListManagementServersResponse.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - resp = self._interceptor.post_list_management_servers(resp) + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_update_data_source(resp) return resp + @property + def create_backup_plan( + self, + ) -> Callable[[backupplan.CreateBackupPlanRequest], operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._CreateBackupPlan(self._session, self._host, self._interceptor) # type: ignore + + @property + def create_backup_plan_association( + self, + ) -> Callable[ + [backupplanassociation.CreateBackupPlanAssociationRequest], + operations_pb2.Operation, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._CreateBackupPlanAssociation(self._session, self._host, self._interceptor) # type: ignore + + @property + def create_backup_vault( + self, + ) -> Callable[[backupvault.CreateBackupVaultRequest], operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._CreateBackupVault(self._session, self._host, self._interceptor) # type: ignore + @property def create_management_server( self, @@ -929,6 +3798,41 @@ def create_management_server( # In C++ this would require a dynamic_cast return self._CreateManagementServer(self._session, self._host, self._interceptor) # type: ignore + @property + def delete_backup( + self, + ) -> Callable[[backupvault.DeleteBackupRequest], operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._DeleteBackup(self._session, self._host, self._interceptor) # type: ignore + + @property + def delete_backup_plan( + self, + ) -> Callable[[backupplan.DeleteBackupPlanRequest], operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._DeleteBackupPlan(self._session, self._host, self._interceptor) # type: ignore + + @property + def delete_backup_plan_association( + self, + ) -> Callable[ + [backupplanassociation.DeleteBackupPlanAssociationRequest], + operations_pb2.Operation, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._DeleteBackupPlanAssociation(self._session, self._host, self._interceptor) # type: ignore + + @property + def delete_backup_vault( + self, + ) -> Callable[[backupvault.DeleteBackupVaultRequest], operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._DeleteBackupVault(self._session, self._host, self._interceptor) # type: ignore + @property def delete_management_server( self, @@ -937,6 +3841,60 @@ def delete_management_server( # In C++ this would require a dynamic_cast return self._DeleteManagementServer(self._session, self._host, self._interceptor) # type: ignore + @property + def fetch_usable_backup_vaults( + self, + ) -> Callable[ + [backupvault.FetchUsableBackupVaultsRequest], + backupvault.FetchUsableBackupVaultsResponse, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._FetchUsableBackupVaults(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_backup( + self, + ) -> Callable[[backupvault.GetBackupRequest], backupvault.Backup]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetBackup(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_backup_plan( + self, + ) -> Callable[[backupplan.GetBackupPlanRequest], backupplan.BackupPlan]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetBackupPlan(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_backup_plan_association( + self, + ) -> Callable[ + [backupplanassociation.GetBackupPlanAssociationRequest], + backupplanassociation.BackupPlanAssociation, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetBackupPlanAssociation(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_backup_vault( + self, + ) -> Callable[[backupvault.GetBackupVaultRequest], backupvault.BackupVault]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetBackupVault(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_data_source( + self, + ) -> Callable[[backupvault.GetDataSourceRequest], backupvault.DataSource]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetDataSource(self._session, self._host, self._interceptor) # type: ignore + @property def get_management_server( self, @@ -945,6 +3903,55 @@ def get_management_server( # In C++ this would require a dynamic_cast return self._GetManagementServer(self._session, self._host, self._interceptor) # type: ignore + @property + def list_backup_plan_associations( + self, + ) -> Callable[ + [backupplanassociation.ListBackupPlanAssociationsRequest], + backupplanassociation.ListBackupPlanAssociationsResponse, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListBackupPlanAssociations(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_backup_plans( + self, + ) -> Callable[ + [backupplan.ListBackupPlansRequest], backupplan.ListBackupPlansResponse + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListBackupPlans(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_backups( + self, + ) -> Callable[[backupvault.ListBackupsRequest], backupvault.ListBackupsResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListBackups(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_backup_vaults( + self, + ) -> Callable[ + [backupvault.ListBackupVaultsRequest], backupvault.ListBackupVaultsResponse + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListBackupVaults(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_data_sources( + self, + ) -> Callable[ + [backupvault.ListDataSourcesRequest], backupvault.ListDataSourcesResponse + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListDataSources(self._session, self._host, self._interceptor) # type: ignore + @property def list_management_servers( self, @@ -955,6 +3962,48 @@ def list_management_servers( # In C++ this would require a dynamic_cast return self._ListManagementServers(self._session, self._host, self._interceptor) # type: ignore + @property + def restore_backup( + self, + ) -> Callable[[backupvault.RestoreBackupRequest], operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._RestoreBackup(self._session, self._host, self._interceptor) # type: ignore + + @property + def trigger_backup( + self, + ) -> Callable[ + [backupplanassociation.TriggerBackupRequest], operations_pb2.Operation + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._TriggerBackup(self._session, self._host, self._interceptor) # type: ignore + + @property + def update_backup( + self, + ) -> Callable[[backupvault.UpdateBackupRequest], operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._UpdateBackup(self._session, self._host, self._interceptor) # type: ignore + + @property + def update_backup_vault( + self, + ) -> Callable[[backupvault.UpdateBackupVaultRequest], operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._UpdateBackupVault(self._session, self._host, self._interceptor) # type: ignore + + @property + def update_data_source( + self, + ) -> Callable[[backupvault.UpdateDataSourceRequest], operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._UpdateDataSource(self._session, self._host, self._interceptor) # type: ignore + @property def get_location(self): return self._GetLocation(self._session, self._host, self._interceptor) # type: ignore diff --git a/packages/google-cloud-backupdr/google/cloud/backupdr_v1/types/__init__.py b/packages/google-cloud-backupdr/google/cloud/backupdr_v1/types/__init__.py index 3afc31268ba2..951186d655ee 100644 --- a/packages/google-cloud-backupdr/google/cloud/backupdr_v1/types/__init__.py +++ b/packages/google-cloud-backupdr/google/cloud/backupdr_v1/types/__init__.py @@ -26,6 +26,92 @@ WorkforceIdentityBasedManagementURI, WorkforceIdentityBasedOAuth2ClientID, ) +from .backupplan import ( + BackupPlan, + BackupRule, + BackupWindow, + CreateBackupPlanRequest, + DeleteBackupPlanRequest, + GetBackupPlanRequest, + ListBackupPlansRequest, + ListBackupPlansResponse, + StandardSchedule, + WeekDayOfMonth, +) +from .backupplanassociation import ( + BackupPlanAssociation, + CreateBackupPlanAssociationRequest, + DeleteBackupPlanAssociationRequest, + GetBackupPlanAssociationRequest, + ListBackupPlanAssociationsRequest, + ListBackupPlanAssociationsResponse, + RuleConfigInfo, + TriggerBackupRequest, +) +from .backupvault import ( + Backup, + BackupApplianceBackupConfig, + BackupApplianceLockInfo, + BackupConfigInfo, + BackupConfigState, + BackupLock, + BackupVault, + BackupVaultView, + BackupView, + CreateBackupVaultRequest, + DataSource, + DataSourceBackupApplianceApplication, + DataSourceGcpResource, + DeleteBackupRequest, + DeleteBackupVaultRequest, + FetchUsableBackupVaultsRequest, + FetchUsableBackupVaultsResponse, + GcpBackupConfig, + GcpResource, + GetBackupRequest, + GetBackupVaultRequest, + GetDataSourceRequest, + ListBackupsRequest, + ListBackupsResponse, + ListBackupVaultsRequest, + ListBackupVaultsResponse, + ListDataSourcesRequest, + ListDataSourcesResponse, + RestoreBackupRequest, + RestoreBackupResponse, + ServiceLockInfo, + TargetResource, + UpdateBackupRequest, + UpdateBackupVaultRequest, + UpdateDataSourceRequest, +) +from .backupvault_ba import BackupApplianceBackupProperties +from .backupvault_gce import ( + AcceleratorConfig, + AccessConfig, + AdvancedMachineFeatures, + AliasIpRange, + AllocationAffinity, + AttachedDisk, + ComputeInstanceBackupProperties, + ComputeInstanceDataSourceProperties, + ComputeInstanceRestoreProperties, + ComputeInstanceTargetEnvironment, + ConfidentialInstanceConfig, + CustomerEncryptionKey, + DisplayDevice, + Entry, + GuestOsFeature, + InstanceParams, + KeyRevocationActionType, + Metadata, + NetworkInterface, + NetworkPerformanceConfig, + Scheduling, + SchedulingDuration, + ServiceAccount, + Tags, +) __all__ = ( "CreateManagementServerRequest", @@ -39,4 +125,82 @@ "OperationMetadata", "WorkforceIdentityBasedManagementURI", "WorkforceIdentityBasedOAuth2ClientID", + "BackupPlan", + "BackupRule", + "BackupWindow", + "CreateBackupPlanRequest", + "DeleteBackupPlanRequest", + "GetBackupPlanRequest", + "ListBackupPlansRequest", + "ListBackupPlansResponse", + "StandardSchedule", + "WeekDayOfMonth", + "BackupPlanAssociation", + "CreateBackupPlanAssociationRequest", + "DeleteBackupPlanAssociationRequest", + "GetBackupPlanAssociationRequest", + "ListBackupPlanAssociationsRequest", + "ListBackupPlanAssociationsResponse", + "RuleConfigInfo", + "TriggerBackupRequest", + "Backup", + "BackupApplianceBackupConfig", + "BackupApplianceLockInfo", + "BackupConfigInfo", + "BackupLock", + "BackupVault", + "CreateBackupVaultRequest", + "DataSource", + "DataSourceBackupApplianceApplication", + "DataSourceGcpResource", + "DeleteBackupRequest", + "DeleteBackupVaultRequest", + "FetchUsableBackupVaultsRequest", + "FetchUsableBackupVaultsResponse", + "GcpBackupConfig", + "GcpResource", + "GetBackupRequest", + "GetBackupVaultRequest", + "GetDataSourceRequest", + "ListBackupsRequest", + "ListBackupsResponse", + "ListBackupVaultsRequest", + "ListBackupVaultsResponse", + "ListDataSourcesRequest", + "ListDataSourcesResponse", + "RestoreBackupRequest", + "RestoreBackupResponse", + "ServiceLockInfo", + "TargetResource", + "UpdateBackupRequest", + "UpdateBackupVaultRequest", + "UpdateDataSourceRequest", + "BackupConfigState", + "BackupVaultView", + "BackupView", + "BackupApplianceBackupProperties", + "AcceleratorConfig", + "AccessConfig", + "AdvancedMachineFeatures", + "AliasIpRange", + "AllocationAffinity", + "AttachedDisk", + "ComputeInstanceBackupProperties", + "ComputeInstanceDataSourceProperties", + "ComputeInstanceRestoreProperties", + "ComputeInstanceTargetEnvironment", + "ConfidentialInstanceConfig", + "CustomerEncryptionKey", + "DisplayDevice", + "Entry", + "GuestOsFeature", + "InstanceParams", + "Metadata", + "NetworkInterface", + "NetworkPerformanceConfig", + "Scheduling", + "SchedulingDuration", + "ServiceAccount", + "Tags", + "KeyRevocationActionType", ) diff --git a/packages/google-cloud-backupdr/google/cloud/backupdr_v1/types/backupdr.py b/packages/google-cloud-backupdr/google/cloud/backupdr_v1/types/backupdr.py index 07ad09a753bc..2cbce001d7d5 100644 --- a/packages/google-cloud-backupdr/google/cloud/backupdr_v1/types/backupdr.py +++ b/packages/google-cloud-backupdr/google/cloud/backupdr_v1/types/backupdr.py @@ -195,7 +195,7 @@ class ManagementServer(proto.Message): oauth2_client_id (str): Output only. The OAuth 2.0 client id is required to make API calls to the BackupDR instance API of this ManagementServer. - This is the value that should be provided in the ‘aud’ field + This is the value that should be provided in the 'aud' field of the OIDC ID Token (see openid specification https://openid.net/specs/openid-connect-core-1_0.html#IDToken). workforce_identity_based_oauth2_client_id (google.cloud.backupdr_v1.types.WorkforceIdentityBasedOAuth2ClientID): @@ -350,10 +350,10 @@ class ListManagementServersRequest(proto.Message): parent (str): Required. The project and location for which to retrieve management servers information, in the format - ``projects/{project_id}/locations/{location}``. In Cloud - BackupDR, locations map to GCP regions, for example + 'projects/{project_id}/locations/{location}'. In Cloud + BackupDR, locations map to Google Cloud regions, for example **us-central1**. To retrieve management servers for all - locations, use "-" for the ``{location}`` value. + locations, use "-" for the '{location}' value. page_size (int): Optional. Requested page size. Server may return fewer items than requested. If @@ -401,15 +401,16 @@ class ListManagementServersResponse(proto.Message): Attributes: management_servers (MutableSequence[google.cloud.backupdr_v1.types.ManagementServer]): - The list of ManagementServer instances in the project for - the specified location. - - If the ``{location}`` value in the request is "-", the - response contains a list of instances from all locations. In - case any location is unreachable, the response will only - return management servers in reachable locations and the - 'unreachable' field will be populated with a list of - unreachable locations. + The list of ManagementServer instances in the + project for the specified location. + + If the '{location}' value in the request is "-", + the response contains a list of instances from + all locations. In case any location is + unreachable, the response will only return + management servers in reachable locations and + the 'unreachable' field will be populated with a + list of unreachable locations. next_page_token (str): A token identifying a page of results the server should return. @@ -443,7 +444,7 @@ class GetManagementServerRequest(proto.Message): name (str): Required. Name of the management server resource name, in the format - ``projects/{project_id}/locations/{location}/managementServers/{resource_name}`` + 'projects/{project_id}/locations/{location}/managementServers/{resource_name}' """ name: str = proto.Field( @@ -458,9 +459,9 @@ class CreateManagementServerRequest(proto.Message): Attributes: parent (str): Required. The management server project and location in the - format ``projects/{project_id}/locations/{location}``. In - Cloud Backup and DR locations map to GCP regions, for - example **us-central1**. + format 'projects/{project_id}/locations/{location}'. In + Cloud Backup and DR locations map to Google Cloud regions, + for example **us-central1**. management_server_id (str): Required. The name of the management server to create. The name must be unique for the @@ -571,7 +572,7 @@ class OperationMetadata(proto.Message): cancellation of the operation. Operations that have successfully been cancelled have [Operation.error][] value with a [google.rpc.Status.code][google.rpc.Status.code] of - 1, corresponding to ``Code.CANCELLED``. + 1, corresponding to 'Code.CANCELLED'. api_version (str): Output only. API version used to start the operation. diff --git a/packages/google-cloud-backupdr/google/cloud/backupdr_v1/types/backupplan.py b/packages/google-cloud-backupdr/google/cloud/backupdr_v1/types/backupplan.py new file mode 100644 index 000000000000..ca0eabb6d9f1 --- /dev/null +++ b/packages/google-cloud-backupdr/google/cloud/backupdr_v1/types/backupplan.py @@ -0,0 +1,644 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +from google.protobuf import timestamp_pb2 # type: ignore +from google.type import dayofweek_pb2 # type: ignore +from google.type import month_pb2 # type: ignore +import proto # type: ignore + +__protobuf__ = proto.module( + package="google.cloud.backupdr.v1", + manifest={ + "BackupPlan", + "BackupRule", + "StandardSchedule", + "BackupWindow", + "WeekDayOfMonth", + "CreateBackupPlanRequest", + "ListBackupPlansRequest", + "ListBackupPlansResponse", + "GetBackupPlanRequest", + "DeleteBackupPlanRequest", + }, +) + + +class BackupPlan(proto.Message): + r"""A ``BackupPlan`` specifies some common fields, such as + ``description`` as well as one or more ``BackupRule`` messages. Each + ``BackupRule`` has a retention policy and defines a schedule by + which the system is to perform backup workloads. + + Attributes: + name (str): + Output only. Identifier. The resource name of the + ``BackupPlan``. + + Format: + ``projects/{project}/locations/{location}/backupPlans/{backup_plan}`` + description (str): + Optional. The description of the ``BackupPlan`` resource. + + The description allows for additional details about + ``BackupPlan`` and its use cases to be provided. An example + description is the following: "This is a backup plan that + performs a daily backup at 6pm and retains data for 3 + months". The description must be at most 2048 characters. + labels (MutableMapping[str, str]): + Optional. This collection of key/value pairs + allows for custom labels to be supplied by the + user. Example, {"tag": "Weekly"}. + create_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. When the ``BackupPlan`` was created. + update_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. When the ``BackupPlan`` was last updated. + backup_rules (MutableSequence[google.cloud.backupdr_v1.types.BackupRule]): + Required. The backup rules for this ``BackupPlan``. There + must be at least one ``BackupRule`` message. + state (google.cloud.backupdr_v1.types.BackupPlan.State): + Output only. The ``State`` for the ``BackupPlan``. + resource_type (str): + Required. The resource type to which the ``BackupPlan`` will + be applied. Examples include, + "compute.googleapis.com/Instance" and + "storage.googleapis.com/Bucket". + etag (str): + Optional. ``etag`` is returned from the service in the + response. As a user of the service, you may provide an etag + value in this field to prevent stale resources. + backup_vault (str): + Required. Resource name of backup vault which + will be used as storage location for backups. + Format: + + projects/{project}/locations/{location}/backupVaults/{backupvault} + backup_vault_service_account (str): + Output only. The Google Cloud Platform + Service Account to be used by the BackupVault + for taking backups. Specify the email address of + the Backup Vault Service Account. + """ + + class State(proto.Enum): + r"""``State`` enumerates the possible states for a ``BackupPlan``. + + Values: + STATE_UNSPECIFIED (0): + State not set. + CREATING (1): + The resource is being created. + ACTIVE (2): + The resource has been created and is fully + usable. + DELETING (3): + The resource is being deleted. + INACTIVE (4): + The resource has been created but is not + usable. + """ + STATE_UNSPECIFIED = 0 + CREATING = 1 + ACTIVE = 2 + DELETING = 3 + INACTIVE = 4 + + name: str = proto.Field( + proto.STRING, + number=1, + ) + description: str = proto.Field( + proto.STRING, + number=2, + ) + labels: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=3, + ) + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=4, + message=timestamp_pb2.Timestamp, + ) + update_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=5, + message=timestamp_pb2.Timestamp, + ) + backup_rules: MutableSequence["BackupRule"] = proto.RepeatedField( + proto.MESSAGE, + number=6, + message="BackupRule", + ) + state: State = proto.Field( + proto.ENUM, + number=7, + enum=State, + ) + resource_type: str = proto.Field( + proto.STRING, + number=8, + ) + etag: str = proto.Field( + proto.STRING, + number=9, + ) + backup_vault: str = proto.Field( + proto.STRING, + number=10, + ) + backup_vault_service_account: str = proto.Field( + proto.STRING, + number=11, + ) + + +class BackupRule(proto.Message): + r"""``BackupRule`` binds the backup schedule to a retention policy. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + rule_id (str): + Required. Immutable. The unique id of this ``BackupRule``. + The ``rule_id`` is unique per ``BackupPlan``.The ``rule_id`` + must start with a lowercase letter followed by up to 62 + lowercase letters, numbers, or hyphens. Pattern, + /[a-z][a-z0-9-]{,62}/. + backup_retention_days (int): + Required. Configures the duration for which + backup data will be kept. It is defined in + “days”. The value should be greater than or + equal to minimum enforced retention of the + backup vault. + standard_schedule (google.cloud.backupdr_v1.types.StandardSchedule): + Required. Defines a schedule that runs within + the confines of a defined window of time. + + This field is a member of `oneof`_ ``backup_schedule_oneof``. + """ + + rule_id: str = proto.Field( + proto.STRING, + number=1, + ) + backup_retention_days: int = proto.Field( + proto.INT32, + number=4, + ) + standard_schedule: "StandardSchedule" = proto.Field( + proto.MESSAGE, + number=5, + oneof="backup_schedule_oneof", + message="StandardSchedule", + ) + + +class StandardSchedule(proto.Message): + r"""``StandardSchedule`` defines a schedule that run within the confines + of a defined window of days. We can define recurrence type for + schedule as HOURLY, DAILY, WEEKLY, MONTHLY or YEARLY. + + Attributes: + recurrence_type (google.cloud.backupdr_v1.types.StandardSchedule.RecurrenceType): + Required. Specifies the ``RecurrenceType`` for the schedule. + hourly_frequency (int): + Optional. Specifies frequency for hourly backups. A hourly + frequency of 2 means jobs will run every 2 hours from start + time till end time defined. + + This is required for ``recurrence_type``, ``HOURLY`` and is + not applicable otherwise. A validation error will occur if a + value is supplied and ``recurrence_type`` is not ``HOURLY``. + + Value of hourly frequency should be between 6 and 23. + + Reason for limit : We found that there is bandwidth + limitation of 3GB/S for GMI while taking a backup and 5GB/S + while doing a restore. Given the amount of parallel backups + and restore we are targeting, this will potentially take the + backup time to mins and hours (in worst case scenario). + days_of_week (MutableSequence[google.type.dayofweek_pb2.DayOfWeek]): + Optional. Specifies days of week like, MONDAY or TUESDAY, on + which jobs will run. + + This is required for ``recurrence_type``, ``WEEKLY`` and is + not applicable otherwise. A validation error will occur if a + value is supplied and ``recurrence_type`` is not ``WEEKLY``. + days_of_month (MutableSequence[int]): + Optional. Specifies days of months like 1, 5, or 14 on which + jobs will run. + + Values for ``days_of_month`` are only applicable for + ``recurrence_type``, ``MONTHLY`` and ``YEARLY``. A + validation error will occur if other values are supplied. + week_day_of_month (google.cloud.backupdr_v1.types.WeekDayOfMonth): + Optional. Specifies a week day of the month like, FIRST + SUNDAY or LAST MONDAY, on which jobs will run. This will be + specified by two fields in ``WeekDayOfMonth``, one for the + day, e.g. ``MONDAY``, and one for the week, e.g. ``LAST``. + + This field is only applicable for ``recurrence_type``, + ``MONTHLY`` and ``YEARLY``. A validation error will occur if + other values are supplied. + months (MutableSequence[google.type.month_pb2.Month]): + Optional. Specifies the months of year, like ``FEBRUARY`` + and/or ``MAY``, on which jobs will run. + + This field is only applicable when ``recurrence_type`` is + ``YEARLY``. A validation error will occur if other values + are supplied. + backup_window (google.cloud.backupdr_v1.types.BackupWindow): + Required. A BackupWindow defines the window of day during + which backup jobs will run. Jobs are queued at the beginning + of the window and will be marked as ``NOT_RUN`` if they do + not start by the end of the window. + + Note: running jobs will not be cancelled at the end of the + window. + time_zone (str): + Required. The time zone to be used when interpreting the + schedule. The value of this field must be a time zone name + from the IANA tz database. See + https://en.wikipedia.org/wiki/List_of_tz_database_time_zones + for the list of valid timezone names. For e.g., + Europe/Paris. + """ + + class RecurrenceType(proto.Enum): + r"""``RecurrenceTypes`` enumerates the applicable periodicity for the + schedule. + + Values: + RECURRENCE_TYPE_UNSPECIFIED (0): + recurrence type not set + HOURLY (1): + The ``BackupRule`` is to be applied hourly. + DAILY (2): + The ``BackupRule`` is to be applied daily. + WEEKLY (3): + The ``BackupRule`` is to be applied weekly. + MONTHLY (4): + The ``BackupRule`` is to be applied monthly. + YEARLY (5): + The ``BackupRule`` is to be applied yearly. + """ + RECURRENCE_TYPE_UNSPECIFIED = 0 + HOURLY = 1 + DAILY = 2 + WEEKLY = 3 + MONTHLY = 4 + YEARLY = 5 + + recurrence_type: RecurrenceType = proto.Field( + proto.ENUM, + number=1, + enum=RecurrenceType, + ) + hourly_frequency: int = proto.Field( + proto.INT32, + number=2, + ) + days_of_week: MutableSequence[dayofweek_pb2.DayOfWeek] = proto.RepeatedField( + proto.ENUM, + number=3, + enum=dayofweek_pb2.DayOfWeek, + ) + days_of_month: MutableSequence[int] = proto.RepeatedField( + proto.INT32, + number=4, + ) + week_day_of_month: "WeekDayOfMonth" = proto.Field( + proto.MESSAGE, + number=5, + message="WeekDayOfMonth", + ) + months: MutableSequence[month_pb2.Month] = proto.RepeatedField( + proto.ENUM, + number=6, + enum=month_pb2.Month, + ) + backup_window: "BackupWindow" = proto.Field( + proto.MESSAGE, + number=7, + message="BackupWindow", + ) + time_zone: str = proto.Field( + proto.STRING, + number=8, + ) + + +class BackupWindow(proto.Message): + r"""``BackupWindow`` defines a window of the day during which backup + jobs will run. + + Attributes: + start_hour_of_day (int): + Required. The hour of day (0-23) when the + window starts for e.g. if value of start hour of + day is 6 that mean backup window start at 6:00. + end_hour_of_day (int): + Required. The hour of day (1-24) when the window end for + e.g. if value of end hour of day is 10 that mean backup + window end time is 10:00. + + End hour of day should be greater than start hour of day. 0 + <= start_hour_of_day < end_hour_of_day <= 24 + + End hour of day is not include in backup window that mean if + end_hour_of_day= 10 jobs should start before 10:00. + """ + + start_hour_of_day: int = proto.Field( + proto.INT32, + number=1, + ) + end_hour_of_day: int = proto.Field( + proto.INT32, + number=2, + ) + + +class WeekDayOfMonth(proto.Message): + r"""``WeekDayOfMonth`` defines the week day of the month on which the + backups will run. The message combines a ``WeekOfMonth`` and + ``DayOfWeek`` to produce values like ``FIRST``/``MONDAY`` or + ``LAST``/``FRIDAY``. + + Attributes: + week_of_month (google.cloud.backupdr_v1.types.WeekDayOfMonth.WeekOfMonth): + Required. Specifies the week of the month. + day_of_week (google.type.dayofweek_pb2.DayOfWeek): + Required. Specifies the day of the week. + """ + + class WeekOfMonth(proto.Enum): + r"""``WeekOfMonth`` enumerates possible weeks in the month, e.g. the + first, third, or last week of the month. + + Values: + WEEK_OF_MONTH_UNSPECIFIED (0): + The zero value. Do not use. + FIRST (1): + The first week of the month. + SECOND (2): + The second week of the month. + THIRD (3): + The third week of the month. + FOURTH (4): + The fourth week of the month. + LAST (5): + The last week of the month. + """ + WEEK_OF_MONTH_UNSPECIFIED = 0 + FIRST = 1 + SECOND = 2 + THIRD = 3 + FOURTH = 4 + LAST = 5 + + week_of_month: WeekOfMonth = proto.Field( + proto.ENUM, + number=1, + enum=WeekOfMonth, + ) + day_of_week: dayofweek_pb2.DayOfWeek = proto.Field( + proto.ENUM, + number=2, + enum=dayofweek_pb2.DayOfWeek, + ) + + +class CreateBackupPlanRequest(proto.Message): + r"""The request message for creating a ``BackupPlan``. + + Attributes: + parent (str): + Required. The ``BackupPlan`` project and location in the + format ``projects/{project}/locations/{location}``. In Cloud + BackupDR locations map to GCP regions, for example + **us-central1**. + backup_plan_id (str): + Required. The name of the ``BackupPlan`` to create. The name + must be unique for the specified project and location.The + name must start with a lowercase letter followed by up to 62 + lowercase letters, numbers, or hyphens. Pattern, + /[a-z][a-z0-9-]{,62}/. + backup_plan (google.cloud.backupdr_v1.types.BackupPlan): + Required. The ``BackupPlan`` resource object to create. + request_id (str): + Optional. An optional request ID to identify + requests. Specify a unique request ID so that if + you must retry your request, the server will + know to ignore the request if it has already + been completed. The server will guarantee that + for at least 60 minutes since the first request. + + For example, consider a situation where you make + an initial request and t he request times out. + If you make the request again with the same + request ID, the server can check if original + operation with the same request ID was received, + and if so, will ignore the second request. This + prevents clients from accidentally creating + duplicate commitments. + + The request ID must be a valid UUID with the + exception that zero UUID is not supported + (00000000-0000-0000-0000-000000000000). + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + backup_plan_id: str = proto.Field( + proto.STRING, + number=2, + ) + backup_plan: "BackupPlan" = proto.Field( + proto.MESSAGE, + number=3, + message="BackupPlan", + ) + request_id: str = proto.Field( + proto.STRING, + number=4, + ) + + +class ListBackupPlansRequest(proto.Message): + r"""The request message for getting a list ``BackupPlan``. + + Attributes: + parent (str): + Required. The project and location for which to retrieve + ``BackupPlans`` information. Format: + ``projects/{project}/locations/{location}``. In Cloud + BackupDR, locations map to GCP regions, for e.g. + **us-central1**. To retrieve backup plans for all locations, + use "-" for the ``{location}`` value. + page_size (int): + Optional. The maximum number of ``BackupPlans`` to return in + a single response. If not specified, a default value will be + chosen by the service. Note that the response may include a + partial list and a caller should only rely on the response's + [next_page_token][google.cloud.backupdr.v1.ListBackupPlansResponse.next_page_token] + to determine if there are more instances left to be queried. + page_token (str): + Optional. The value of + [next_page_token][google.cloud.backupdr.v1.ListBackupPlansResponse.next_page_token] + received from a previous ``ListBackupPlans`` call. Provide + this to retrieve the subsequent page in a multi-page list of + results. When paginating, all other parameters provided to + ``ListBackupPlans`` must match the call that provided the + page token. + filter (str): + Optional. Field match expression used to + filter the results. + order_by (str): + Optional. Field by which to sort the results. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + page_size: int = proto.Field( + proto.INT32, + number=2, + ) + page_token: str = proto.Field( + proto.STRING, + number=3, + ) + filter: str = proto.Field( + proto.STRING, + number=4, + ) + order_by: str = proto.Field( + proto.STRING, + number=5, + ) + + +class ListBackupPlansResponse(proto.Message): + r"""The response message for getting a list of ``BackupPlan``. + + Attributes: + backup_plans (MutableSequence[google.cloud.backupdr_v1.types.BackupPlan]): + The list of ``BackupPlans`` in the project for the specified + location. + + If the ``{location}`` value in the request is "-", the + response contains a list of resources from all locations. In + case any location is unreachable, the response will only + return backup plans in reachable locations and the + 'unreachable' field will be populated with a list of + unreachable locations. BackupPlan + next_page_token (str): + A token which may be sent as + [page_token][google.cloud.backupdr.v1.ListBackupPlansRequest.page_token] + in a subsequent ``ListBackupPlans`` call to retrieve the + next page of results. If this field is omitted or empty, + then there are no more results to return. + unreachable (MutableSequence[str]): + Locations that could not be reached. + """ + + @property + def raw_page(self): + return self + + backup_plans: MutableSequence["BackupPlan"] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="BackupPlan", + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + unreachable: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=3, + ) + + +class GetBackupPlanRequest(proto.Message): + r"""The request message for getting a ``BackupPlan``. + + Attributes: + name (str): + Required. The resource name of the ``BackupPlan`` to + retrieve. + + Format: + ``projects/{project}/locations/{location}/backupPlans/{backup_plan}`` + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class DeleteBackupPlanRequest(proto.Message): + r"""The request message for deleting a ``BackupPlan``. + + Attributes: + name (str): + Required. The resource name of the ``BackupPlan`` to delete. + + Format: + ``projects/{project}/locations/{location}/backupPlans/{backup_plan}`` + request_id (str): + Optional. An optional request ID to identify + requests. Specify a unique request ID so that if + you must retry your request, the server will + know to ignore the request if it has already + been completed. The server will guarantee that + for at least 60 minutes after the first request. + + For example, consider a situation where you make + an initial request and the request times out. If + you make the request again with the same request + ID, the server can check if original operation + with the same request ID was received, and if + so, will ignore the second request. This + prevents clients from accidentally creating + duplicate commitments. + + The request ID must be a valid UUID with the + exception that zero UUID is not supported + (00000000-0000-0000-0000-000000000000). + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + request_id: str = proto.Field( + proto.STRING, + number=2, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-backupdr/google/cloud/backupdr_v1/types/backupplanassociation.py b/packages/google-cloud-backupdr/google/cloud/backupdr_v1/types/backupplanassociation.py new file mode 100644 index 000000000000..23a4309a3fd8 --- /dev/null +++ b/packages/google-cloud-backupdr/google/cloud/backupdr_v1/types/backupplanassociation.py @@ -0,0 +1,454 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +from google.protobuf import timestamp_pb2 # type: ignore +from google.rpc import status_pb2 # type: ignore +import proto # type: ignore + +__protobuf__ = proto.module( + package="google.cloud.backupdr.v1", + manifest={ + "BackupPlanAssociation", + "RuleConfigInfo", + "CreateBackupPlanAssociationRequest", + "ListBackupPlanAssociationsRequest", + "ListBackupPlanAssociationsResponse", + "GetBackupPlanAssociationRequest", + "DeleteBackupPlanAssociationRequest", + "TriggerBackupRequest", + }, +) + + +class BackupPlanAssociation(proto.Message): + r"""A BackupPlanAssociation represents a single + BackupPlanAssociation which contains details like workload, + backup plan etc + + Attributes: + name (str): + Output only. Identifier. The resource name of + BackupPlanAssociation in below format Format : + + projects/{project}/locations/{location}/backupPlanAssociations/{backupPlanAssociationId} + resource_type (str): + Optional. Resource type of workload on which + backupplan is applied + resource (str): + Required. Immutable. Resource name of + workload on which backupplan is applied + backup_plan (str): + Required. Resource name of backup plan which + needs to be applied on workload. Format: + + projects/{project}/locations/{location}/backupPlans/{backupPlanId} + create_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The time when the instance was + created. + update_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The time when the instance was + updated. + state (google.cloud.backupdr_v1.types.BackupPlanAssociation.State): + Output only. The BackupPlanAssociation + resource state. + rules_config_info (MutableSequence[google.cloud.backupdr_v1.types.RuleConfigInfo]): + Output only. The config info related to + backup rules. + data_source (str): + Output only. Output Only. + + Resource name of data source which will be used + as storage location for backups taken. + Format : + + projects/{project}/locations/{location}/backupVaults/{backupvault}/dataSources/{datasource} + """ + + class State(proto.Enum): + r"""Enum for State of BackupPlan Association + + Values: + STATE_UNSPECIFIED (0): + State not set. + CREATING (1): + The resource is being created. + ACTIVE (2): + The resource has been created and is fully + usable. + DELETING (3): + The resource is being deleted. + INACTIVE (4): + The resource has been created but is not + usable. + """ + STATE_UNSPECIFIED = 0 + CREATING = 1 + ACTIVE = 2 + DELETING = 3 + INACTIVE = 4 + + name: str = proto.Field( + proto.STRING, + number=1, + ) + resource_type: str = proto.Field( + proto.STRING, + number=2, + ) + resource: str = proto.Field( + proto.STRING, + number=3, + ) + backup_plan: str = proto.Field( + proto.STRING, + number=4, + ) + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=5, + message=timestamp_pb2.Timestamp, + ) + update_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=6, + message=timestamp_pb2.Timestamp, + ) + state: State = proto.Field( + proto.ENUM, + number=7, + enum=State, + ) + rules_config_info: MutableSequence["RuleConfigInfo"] = proto.RepeatedField( + proto.MESSAGE, + number=8, + message="RuleConfigInfo", + ) + data_source: str = proto.Field( + proto.STRING, + number=9, + ) + + +class RuleConfigInfo(proto.Message): + r"""Message for rules config info. + + Attributes: + rule_id (str): + Output only. Output Only. + + Backup Rule id fetched from backup plan. + last_backup_state (google.cloud.backupdr_v1.types.RuleConfigInfo.LastBackupState): + Output only. The last backup state for rule. + last_backup_error (google.rpc.status_pb2.Status): + Output only. Output Only. + + google.rpc.Status object to store the last + backup error. + last_successful_backup_consistency_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The point in time when the last + successful backup was captured from the source. + """ + + class LastBackupState(proto.Enum): + r"""Enum for LastBackupState + + Values: + LAST_BACKUP_STATE_UNSPECIFIED (0): + State not set. + FIRST_BACKUP_PENDING (1): + The first backup is pending. + PERMISSION_DENIED (2): + The most recent backup could not be + run/failed because of the lack of permissions. + SUCCEEDED (3): + The last backup operation succeeded. + FAILED (4): + The last backup operation failed. + """ + LAST_BACKUP_STATE_UNSPECIFIED = 0 + FIRST_BACKUP_PENDING = 1 + PERMISSION_DENIED = 2 + SUCCEEDED = 3 + FAILED = 4 + + rule_id: str = proto.Field( + proto.STRING, + number=1, + ) + last_backup_state: LastBackupState = proto.Field( + proto.ENUM, + number=3, + enum=LastBackupState, + ) + last_backup_error: status_pb2.Status = proto.Field( + proto.MESSAGE, + number=4, + message=status_pb2.Status, + ) + last_successful_backup_consistency_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=5, + message=timestamp_pb2.Timestamp, + ) + + +class CreateBackupPlanAssociationRequest(proto.Message): + r"""Request message for creating a backup plan. + + Attributes: + parent (str): + Required. The backup plan association project and location + in the format + ``projects/{project_id}/locations/{location}``. In Cloud + BackupDR locations map to GCP regions, for example + **us-central1**. + backup_plan_association_id (str): + Required. The name of the backup plan + association to create. The name must be unique + for the specified project and location. + backup_plan_association (google.cloud.backupdr_v1.types.BackupPlanAssociation): + Required. The resource being created + request_id (str): + Optional. An optional request ID to identify + requests. Specify a unique request ID so that if + you must retry your request, the server will + know to ignore the request if it has already + been completed. The server will guarantee that + for at least 60 minutes since the first request. + + For example, consider a situation where you make + an initial request and t he request times out. + If you make the request again with the same + request ID, the server can check if original + operation with the same request ID was received, + and if so, will ignore the second request. This + prevents clients from accidentally creating + duplicate commitments. + + The request ID must be a valid UUID with the + exception that zero UUID is not supported + (00000000-0000-0000-0000-000000000000). + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + backup_plan_association_id: str = proto.Field( + proto.STRING, + number=2, + ) + backup_plan_association: "BackupPlanAssociation" = proto.Field( + proto.MESSAGE, + number=3, + message="BackupPlanAssociation", + ) + request_id: str = proto.Field( + proto.STRING, + number=4, + ) + + +class ListBackupPlanAssociationsRequest(proto.Message): + r"""Request message for List BackupPlanAssociation + + Attributes: + parent (str): + Required. The project and location for which to retrieve + backup Plan Associations information, in the format + ``projects/{project_id}/locations/{location}``. In Cloud + BackupDR, locations map to GCP regions, for example + **us-central1**. To retrieve backup plan associations for + all locations, use "-" for the ``{location}`` value. + page_size (int): + Optional. Requested page size. Server may + return fewer items than requested. If + unspecified, server will pick an appropriate + default. + page_token (str): + Optional. A token identifying a page of + results the server should return. + filter (str): + Optional. Filtering results + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + page_size: int = proto.Field( + proto.INT32, + number=2, + ) + page_token: str = proto.Field( + proto.STRING, + number=3, + ) + filter: str = proto.Field( + proto.STRING, + number=4, + ) + + +class ListBackupPlanAssociationsResponse(proto.Message): + r"""Response message for List BackupPlanAssociation + + Attributes: + backup_plan_associations (MutableSequence[google.cloud.backupdr_v1.types.BackupPlanAssociation]): + The list of Backup Plan Associations in the project for the + specified location. + + If the ``{location}`` value in the request is "-", the + response contains a list of instances from all locations. In + case any location is unreachable, the response will only + return backup plan associations in reachable locations and + the 'unreachable' field will be populated with a list of + unreachable locations. + next_page_token (str): + A token identifying a page of results the + server should return. + unreachable (MutableSequence[str]): + Locations that could not be reached. + """ + + @property + def raw_page(self): + return self + + backup_plan_associations: MutableSequence[ + "BackupPlanAssociation" + ] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="BackupPlanAssociation", + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + unreachable: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=3, + ) + + +class GetBackupPlanAssociationRequest(proto.Message): + r"""Request message for getting a BackupPlanAssociation resource. + + Attributes: + name (str): + Required. Name of the backup plan association resource, in + the format + ``projects/{project}/locations/{location}/backupPlanAssociations/{backupPlanAssociationId}`` + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class DeleteBackupPlanAssociationRequest(proto.Message): + r"""Request message for deleting a backup plan association. + + Attributes: + name (str): + Required. Name of the backup plan association resource, in + the format + ``projects/{project}/locations/{location}/backupPlanAssociations/{backupPlanAssociationId}`` + request_id (str): + Optional. An optional request ID to identify + requests. Specify a unique request ID so that if + you must retry your request, the server will + know to ignore the request if it has already + been completed. The server will guarantee that + for at least 60 minutes after the first request. + + For example, consider a situation where you make + an initial request and the request times out. If + you make the request again with the same request + ID, the server can check if original operation + with the same request ID was received, and if + so, will ignore the second request. This + prevents clients from accidentally creating + duplicate commitments. + + The request ID must be a valid UUID with the + exception that zero UUID is not supported + (00000000-0000-0000-0000-000000000000). + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + request_id: str = proto.Field( + proto.STRING, + number=2, + ) + + +class TriggerBackupRequest(proto.Message): + r"""Request message for triggering a backup. + + Attributes: + name (str): + Required. Name of the backup plan association resource, in + the format + ``projects/{project}/locations/{location}/backupPlanAssociations/{backupPlanAssociationId}`` + rule_id (str): + Required. backup rule_id for which a backup needs to be + triggered. + request_id (str): + Optional. An optional request ID to identify + requests. Specify a unique request ID so that if + you must retry your request, the server will + know to ignore the request if it has already + been completed. The server will guarantee that + for at least 60 minutes after the first request. + + For example, consider a situation where you make + an initial request and the request times out. If + you make the request again with the same request + ID, the server can check if original operation + with the same request ID was received, and if + so, will ignore the second request. This + prevents clients from accidentally creating + duplicate commitments. + + The request ID must be a valid UUID with the + exception that zero UUID is not supported + (00000000-0000-0000-0000-000000000000). + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + rule_id: str = proto.Field( + proto.STRING, + number=2, + ) + request_id: str = proto.Field( + proto.STRING, + number=3, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-backupdr/google/cloud/backupdr_v1/types/backupvault.py b/packages/google-cloud-backupdr/google/cloud/backupdr_v1/types/backupvault.py new file mode 100644 index 000000000000..ced3cd195702 --- /dev/null +++ b/packages/google-cloud-backupdr/google/cloud/backupdr_v1/types/backupvault.py @@ -0,0 +1,2065 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +from google.protobuf import duration_pb2 # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore +from google.rpc import status_pb2 # type: ignore +import proto # type: ignore + +from google.cloud.backupdr_v1.types import backupvault_ba, backupvault_gce + +__protobuf__ = proto.module( + package="google.cloud.backupdr.v1", + manifest={ + "BackupConfigState", + "BackupView", + "BackupVaultView", + "BackupVault", + "DataSource", + "BackupConfigInfo", + "GcpBackupConfig", + "BackupApplianceBackupConfig", + "DataSourceGcpResource", + "DataSourceBackupApplianceApplication", + "ServiceLockInfo", + "BackupApplianceLockInfo", + "BackupLock", + "Backup", + "CreateBackupVaultRequest", + "ListBackupVaultsRequest", + "ListBackupVaultsResponse", + "FetchUsableBackupVaultsRequest", + "FetchUsableBackupVaultsResponse", + "GetBackupVaultRequest", + "UpdateBackupVaultRequest", + "DeleteBackupVaultRequest", + "ListDataSourcesRequest", + "ListDataSourcesResponse", + "GetDataSourceRequest", + "UpdateDataSourceRequest", + "ListBackupsRequest", + "ListBackupsResponse", + "GetBackupRequest", + "UpdateBackupRequest", + "DeleteBackupRequest", + "RestoreBackupRequest", + "RestoreBackupResponse", + "TargetResource", + "GcpResource", + }, +) + + +class BackupConfigState(proto.Enum): + r"""Backup configuration state. Is the resource configured for + backup? + + Values: + BACKUP_CONFIG_STATE_UNSPECIFIED (0): + The possible states of backup configuration. + Status not set. + ACTIVE (1): + The data source is actively protected (i.e. + there is a BackupPlanAssociation or Appliance + SLA pointing to it) + PASSIVE (2): + The data source is no longer protected (but + may have backups under it) + """ + BACKUP_CONFIG_STATE_UNSPECIFIED = 0 + ACTIVE = 1 + PASSIVE = 2 + + +class BackupView(proto.Enum): + r"""BackupView contains enum options for Partial and Full view. + + Values: + BACKUP_VIEW_UNSPECIFIED (0): + If the value is not set, the default 'FULL' + view is used. + BACKUP_VIEW_BASIC (1): + Includes basic data about the Backup, but not + the full contents. + BACKUP_VIEW_FULL (2): + Includes all data about the Backup. + This is the default value (for both ListBackups + and GetBackup). + """ + BACKUP_VIEW_UNSPECIFIED = 0 + BACKUP_VIEW_BASIC = 1 + BACKUP_VIEW_FULL = 2 + + +class BackupVaultView(proto.Enum): + r"""BackupVaultView contains enum options for Partial and Full + view. + + Values: + BACKUP_VAULT_VIEW_UNSPECIFIED (0): + If the value is not set, the default 'FULL' + view is used. + BACKUP_VAULT_VIEW_BASIC (1): + Includes basic data about the Backup Vault, + but not the full contents. + BACKUP_VAULT_VIEW_FULL (2): + Includes all data about the Backup Vault. + This is the default value (for both + ListBackupVaults and GetBackupVault). + """ + BACKUP_VAULT_VIEW_UNSPECIFIED = 0 + BACKUP_VAULT_VIEW_BASIC = 1 + BACKUP_VAULT_VIEW_FULL = 2 + + +class BackupVault(proto.Message): + r"""Message describing a BackupVault object. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + name (str): + Output only. Identifier. Name of the backup vault to create. + It must have the + format\ ``"projects/{project}/locations/{location}/backupVaults/{backupvault}"``. + ``{backupvault}`` cannot be changed after creation. It must + be between 3-63 characters long and must be unique within + the project and location. + description (str): + Optional. The description of the BackupVault + instance (2048 characters or less). + + This field is a member of `oneof`_ ``_description``. + labels (MutableMapping[str, str]): + Optional. Resource labels to represent user + provided metadata. No labels currently defined: + create_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The time when the instance was + created. + + This field is a member of `oneof`_ ``_create_time``. + update_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The time when the instance was + updated. + + This field is a member of `oneof`_ ``_update_time``. + backup_minimum_enforced_retention_duration (google.protobuf.duration_pb2.Duration): + Required. The default and minimum enforced + retention for each backup within the backup + vault. The enforced retention for each backup + can be extended. + + This field is a member of `oneof`_ ``_backup_minimum_enforced_retention_duration``. + deletable (bool): + Output only. Set to true when there are no + backups nested under this resource. + + This field is a member of `oneof`_ ``_deletable``. + etag (str): + Optional. Server specified ETag for the + backup vault resource to prevent simultaneous + updates from overwiting each other. + + This field is a member of `oneof`_ ``_etag``. + state (google.cloud.backupdr_v1.types.BackupVault.State): + Output only. The BackupVault resource + instance state. + effective_time (google.protobuf.timestamp_pb2.Timestamp): + Optional. Time after which the BackupVault + resource is locked. + + This field is a member of `oneof`_ ``_effective_time``. + backup_count (int): + Output only. The number of backups in this + backup vault. + service_account (str): + Output only. Service account used by the + BackupVault Service for this BackupVault. The + user should grant this account permissions in + their workload project to enable the service to + run backups and restores there. + total_stored_bytes (int): + Output only. Total size of the storage used + by all backup resources. + uid (str): + Output only. Output only + Immutable after resource creation until resource + deletion. + annotations (MutableMapping[str, str]): + Optional. User annotations. See + https://google.aip.dev/128#annotations Stores + small amounts of arbitrary data. + access_restriction (google.cloud.backupdr_v1.types.BackupVault.AccessRestriction): + Optional. Note: This field is added for future use case and + will not be supported in the current release. + + Optional. + + Access restriction for the backup vault. Default value is + WITHIN_ORGANIZATION if not provided during creation. + """ + + class State(proto.Enum): + r"""Holds the state of the backup vault resource. + + Values: + STATE_UNSPECIFIED (0): + State not set. + CREATING (1): + The backup vault is being created. + ACTIVE (2): + The backup vault has been created and is + fully usable. + DELETING (3): + The backup vault is being deleted. + ERROR (4): + The backup vault is experiencing an issue and + might be unusable. + """ + STATE_UNSPECIFIED = 0 + CREATING = 1 + ACTIVE = 2 + DELETING = 3 + ERROR = 4 + + class AccessRestriction(proto.Enum): + r"""Holds the access restriction for the backup vault. + + Values: + ACCESS_RESTRICTION_UNSPECIFIED (0): + Access restriction not set. + WITHIN_PROJECT (1): + Access to or from resources outside your + current project will be denied. + WITHIN_ORGANIZATION (2): + Access to or from resources outside your + current organization will be denied. + UNRESTRICTED (3): + No access restriction. + """ + ACCESS_RESTRICTION_UNSPECIFIED = 0 + WITHIN_PROJECT = 1 + WITHIN_ORGANIZATION = 2 + UNRESTRICTED = 3 + + name: str = proto.Field( + proto.STRING, + number=1, + ) + description: str = proto.Field( + proto.STRING, + number=2, + optional=True, + ) + labels: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=3, + ) + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=4, + optional=True, + message=timestamp_pb2.Timestamp, + ) + update_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=5, + optional=True, + message=timestamp_pb2.Timestamp, + ) + backup_minimum_enforced_retention_duration: duration_pb2.Duration = proto.Field( + proto.MESSAGE, + number=20, + optional=True, + message=duration_pb2.Duration, + ) + deletable: bool = proto.Field( + proto.BOOL, + number=8, + optional=True, + ) + etag: str = proto.Field( + proto.STRING, + number=9, + optional=True, + ) + state: State = proto.Field( + proto.ENUM, + number=10, + enum=State, + ) + effective_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=12, + optional=True, + message=timestamp_pb2.Timestamp, + ) + backup_count: int = proto.Field( + proto.INT64, + number=17, + ) + service_account: str = proto.Field( + proto.STRING, + number=18, + ) + total_stored_bytes: int = proto.Field( + proto.INT64, + number=19, + ) + uid: str = proto.Field( + proto.STRING, + number=21, + ) + annotations: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=22, + ) + access_restriction: AccessRestriction = proto.Field( + proto.ENUM, + number=24, + enum=AccessRestriction, + ) + + +class DataSource(proto.Message): + r"""Message describing a DataSource object. + Datasource object used to represent Datasource details for both + admin and basic view. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + name (str): + Output only. Identifier. Name of the datasource to create. + It must have the + format\ ``"projects/{project}/locations/{location}/backupVaults/{backupvault}/dataSources/{datasource}"``. + ``{datasource}`` cannot be changed after creation. It must + be between 3-63 characters long and must be unique within + the backup vault. + state (google.cloud.backupdr_v1.types.DataSource.State): + Output only. The DataSource resource instance + state. + labels (MutableMapping[str, str]): + Optional. Resource labels to represent user + provided metadata. No labels currently defined: + create_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The time when the instance was + created. + + This field is a member of `oneof`_ ``_create_time``. + update_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The time when the instance was + updated. + + This field is a member of `oneof`_ ``_update_time``. + backup_count (int): + Number of backups in the data source. + + This field is a member of `oneof`_ ``_backup_count``. + etag (str): + Server specified ETag for the + ManagementServer resource to prevent + simultaneous updates from overwiting each other. + + This field is a member of `oneof`_ ``_etag``. + total_stored_bytes (int): + The number of bytes (metadata and data) + stored in this datasource. + + This field is a member of `oneof`_ ``_total_stored_bytes``. + config_state (google.cloud.backupdr_v1.types.BackupConfigState): + Output only. The backup configuration state. + backup_config_info (google.cloud.backupdr_v1.types.BackupConfigInfo): + Output only. Details of how the resource is + configured for backup. + data_source_gcp_resource (google.cloud.backupdr_v1.types.DataSourceGcpResource): + The backed up resource is a Google Cloud + resource. The word 'DataSource' was included in + the names to indicate that this is the + representation of the Google Cloud resource used + within the DataSource object. + + This field is a member of `oneof`_ ``source_resource``. + data_source_backup_appliance_application (google.cloud.backupdr_v1.types.DataSourceBackupApplianceApplication): + The backed up resource is a backup appliance + application. + + This field is a member of `oneof`_ ``source_resource``. + """ + + class State(proto.Enum): + r"""Holds the state of the data source resource. + + Values: + STATE_UNSPECIFIED (0): + State not set. + CREATING (1): + The data source is being created. + ACTIVE (2): + The data source has been created and is fully + usable. + DELETING (3): + The data source is being deleted. + ERROR (4): + The data source is experiencing an issue and + might be unusable. + """ + STATE_UNSPECIFIED = 0 + CREATING = 1 + ACTIVE = 2 + DELETING = 3 + ERROR = 4 + + name: str = proto.Field( + proto.STRING, + number=1, + ) + state: State = proto.Field( + proto.ENUM, + number=21, + enum=State, + ) + labels: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=4, + ) + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=5, + optional=True, + message=timestamp_pb2.Timestamp, + ) + update_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=6, + optional=True, + message=timestamp_pb2.Timestamp, + ) + backup_count: int = proto.Field( + proto.INT64, + number=7, + optional=True, + ) + etag: str = proto.Field( + proto.STRING, + number=14, + optional=True, + ) + total_stored_bytes: int = proto.Field( + proto.INT64, + number=23, + optional=True, + ) + config_state: "BackupConfigState" = proto.Field( + proto.ENUM, + number=24, + enum="BackupConfigState", + ) + backup_config_info: "BackupConfigInfo" = proto.Field( + proto.MESSAGE, + number=25, + message="BackupConfigInfo", + ) + data_source_gcp_resource: "DataSourceGcpResource" = proto.Field( + proto.MESSAGE, + number=26, + oneof="source_resource", + message="DataSourceGcpResource", + ) + data_source_backup_appliance_application: "DataSourceBackupApplianceApplication" = ( + proto.Field( + proto.MESSAGE, + number=27, + oneof="source_resource", + message="DataSourceBackupApplianceApplication", + ) + ) + + +class BackupConfigInfo(proto.Message): + r"""BackupConfigInfo has information about how the resource is + configured for Backup and about the most recent backup to this + vault. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + last_backup_state (google.cloud.backupdr_v1.types.BackupConfigInfo.LastBackupState): + Output only. The status of the last backup to + this BackupVault + last_successful_backup_consistency_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. If the last backup were + successful, this field has the consistency date. + last_backup_error (google.rpc.status_pb2.Status): + Output only. If the last backup failed, this + field has the error message. + gcp_backup_config (google.cloud.backupdr_v1.types.GcpBackupConfig): + Configuration for a Google Cloud resource. + + This field is a member of `oneof`_ ``backup_config``. + backup_appliance_backup_config (google.cloud.backupdr_v1.types.BackupApplianceBackupConfig): + Configuration for an application backed up by + a Backup Appliance. + + This field is a member of `oneof`_ ``backup_config``. + """ + + class LastBackupState(proto.Enum): + r"""LastBackupstate tracks whether the last backup was not yet + started, successful, failed, or could not be run because of the + lack of permissions. + + Values: + LAST_BACKUP_STATE_UNSPECIFIED (0): + Status not set. + FIRST_BACKUP_PENDING (1): + The first backup has not yet completed + SUCCEEDED (2): + The most recent backup was successful + FAILED (3): + The most recent backup failed + PERMISSION_DENIED (4): + The most recent backup could not be + run/failed because of the lack of permissions + """ + LAST_BACKUP_STATE_UNSPECIFIED = 0 + FIRST_BACKUP_PENDING = 1 + SUCCEEDED = 2 + FAILED = 3 + PERMISSION_DENIED = 4 + + last_backup_state: LastBackupState = proto.Field( + proto.ENUM, + number=1, + enum=LastBackupState, + ) + last_successful_backup_consistency_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=2, + message=timestamp_pb2.Timestamp, + ) + last_backup_error: status_pb2.Status = proto.Field( + proto.MESSAGE, + number=3, + message=status_pb2.Status, + ) + gcp_backup_config: "GcpBackupConfig" = proto.Field( + proto.MESSAGE, + number=4, + oneof="backup_config", + message="GcpBackupConfig", + ) + backup_appliance_backup_config: "BackupApplianceBackupConfig" = proto.Field( + proto.MESSAGE, + number=5, + oneof="backup_config", + message="BackupApplianceBackupConfig", + ) + + +class GcpBackupConfig(proto.Message): + r"""GcpBackupConfig captures the Backup configuration details for + Google Cloud resources. All Google Cloud resources regardless of + type are protected with backup plan associations. + + Attributes: + backup_plan (str): + The name of the backup plan. + backup_plan_description (str): + The description of the backup plan. + backup_plan_association (str): + The name of the backup plan association. + backup_plan_rules (MutableSequence[str]): + The names of the backup plan rules which + point to this backupvault + """ + + backup_plan: str = proto.Field( + proto.STRING, + number=1, + ) + backup_plan_description: str = proto.Field( + proto.STRING, + number=2, + ) + backup_plan_association: str = proto.Field( + proto.STRING, + number=3, + ) + backup_plan_rules: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=4, + ) + + +class BackupApplianceBackupConfig(proto.Message): + r"""BackupApplianceBackupConfig captures the backup configuration + for applications that are protected by Backup Appliances. + + Attributes: + backup_appliance_name (str): + The name of the backup appliance. + backup_appliance_id (int): + The ID of the backup appliance. + sla_id (int): + The ID of the SLA of this application. + application_name (str): + The name of the application. + host_name (str): + The name of the host where the application is + running. + slt_name (str): + The name of the SLT associated with the + application. + slp_name (str): + The name of the SLP associated with the + application. + """ + + backup_appliance_name: str = proto.Field( + proto.STRING, + number=1, + ) + backup_appliance_id: int = proto.Field( + proto.INT64, + number=2, + ) + sla_id: int = proto.Field( + proto.INT64, + number=3, + ) + application_name: str = proto.Field( + proto.STRING, + number=4, + ) + host_name: str = proto.Field( + proto.STRING, + number=5, + ) + slt_name: str = proto.Field( + proto.STRING, + number=6, + ) + slp_name: str = proto.Field( + proto.STRING, + number=7, + ) + + +class DataSourceGcpResource(proto.Message): + r"""DataSourceGcpResource is used for protected resources that + are Google Cloud Resources. This name is easeier to understand + than GcpResourceDataSource or GcpDataSourceResource + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + gcp_resourcename (str): + Output only. Full resource pathname URL of + the source Google Cloud resource. + location (str): + Location of the resource: + //"global"/"unspecified". + type_ (str): + The type of the Google Cloud resource. Use + the Unified Resource Type, eg. + compute.googleapis.com/Instance. + compute_instance_datasource_properties (google.cloud.backupdr_v1.types.ComputeInstanceDataSourceProperties): + ComputeInstanceDataSourceProperties has a + subset of Compute Instance properties that are + useful at the Datasource level. + + This field is a member of `oneof`_ ``gcp_resource_properties``. + """ + + gcp_resourcename: str = proto.Field( + proto.STRING, + number=1, + ) + location: str = proto.Field( + proto.STRING, + number=2, + ) + type_: str = proto.Field( + proto.STRING, + number=3, + ) + compute_instance_datasource_properties: backupvault_gce.ComputeInstanceDataSourceProperties = proto.Field( + proto.MESSAGE, + number=4, + oneof="gcp_resource_properties", + message=backupvault_gce.ComputeInstanceDataSourceProperties, + ) + + +class DataSourceBackupApplianceApplication(proto.Message): + r"""BackupApplianceApplication describes a Source Resource when + it is an application backed up by a BackupAppliance. + + Attributes: + application_name (str): + The name of the Application as known to the + Backup Appliance. + backup_appliance (str): + Appliance name. + appliance_id (int): + Appliance Id of the Backup Appliance. + type_ (str): + The type of the application. e.g. VMBackup + application_id (int): + The appid field of the application within the + Backup Appliance. + hostname (str): + Hostname of the host where the application is + running. + host_id (int): + Hostid of the application host. + """ + + application_name: str = proto.Field( + proto.STRING, + number=1, + ) + backup_appliance: str = proto.Field( + proto.STRING, + number=2, + ) + appliance_id: int = proto.Field( + proto.INT64, + number=3, + ) + type_: str = proto.Field( + proto.STRING, + number=4, + ) + application_id: int = proto.Field( + proto.INT64, + number=8, + ) + hostname: str = proto.Field( + proto.STRING, + number=6, + ) + host_id: int = proto.Field( + proto.INT64, + number=7, + ) + + +class ServiceLockInfo(proto.Message): + r"""ServiceLockInfo represents the details of a lock taken by the + service on a Backup resource. + + Attributes: + operation (str): + Output only. The name of the operation that + created this lock. The lock will automatically + be released when the operation completes. + """ + + operation: str = proto.Field( + proto.STRING, + number=1, + ) + + +class BackupApplianceLockInfo(proto.Message): + r"""BackupApplianceLockInfo contains metadata about the + backupappliance that created the lock. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + backup_appliance_id (int): + Required. The ID of the backup/recovery + appliance that created this lock. + backup_appliance_name (str): + Required. The name of the backup/recovery + appliance that created this lock. + lock_reason (str): + Required. The reason for the lock: e.g. + MOUNT/RESTORE/BACKUP/etc. The value of this + string is only meaningful to the client and it + is not interpreted by the BackupVault service. + job_name (str): + The job name on the backup/recovery appliance + that created this lock. + + This field is a member of `oneof`_ ``lock_source``. + backup_image (str): + The image name that depends on this Backup. + + This field is a member of `oneof`_ ``lock_source``. + sla_id (int): + The SLA on the backup/recovery appliance that + owns the lock. + + This field is a member of `oneof`_ ``lock_source``. + """ + + backup_appliance_id: int = proto.Field( + proto.INT64, + number=1, + ) + backup_appliance_name: str = proto.Field( + proto.STRING, + number=2, + ) + lock_reason: str = proto.Field( + proto.STRING, + number=5, + ) + job_name: str = proto.Field( + proto.STRING, + number=6, + oneof="lock_source", + ) + backup_image: str = proto.Field( + proto.STRING, + number=7, + oneof="lock_source", + ) + sla_id: int = proto.Field( + proto.INT64, + number=8, + oneof="lock_source", + ) + + +class BackupLock(proto.Message): + r"""BackupLock represents a single lock on a Backup resource. An + unexpired lock on a Backup prevents the Backup from being + deleted. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + lock_until_time (google.protobuf.timestamp_pb2.Timestamp): + Required. The time after which this lock is + not considered valid and will no longer protect + the Backup from deletion. + backup_appliance_lock_info (google.cloud.backupdr_v1.types.BackupApplianceLockInfo): + If the client is a backup and recovery + appliance, this contains metadata about why the + lock exists. + + This field is a member of `oneof`_ ``ClientLockInfo``. + service_lock_info (google.cloud.backupdr_v1.types.ServiceLockInfo): + Output only. Contains metadata about the lock + exist for Google Cloud native backups. + + This field is a member of `oneof`_ ``ClientLockInfo``. + """ + + lock_until_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=1, + message=timestamp_pb2.Timestamp, + ) + backup_appliance_lock_info: "BackupApplianceLockInfo" = proto.Field( + proto.MESSAGE, + number=3, + oneof="ClientLockInfo", + message="BackupApplianceLockInfo", + ) + service_lock_info: "ServiceLockInfo" = proto.Field( + proto.MESSAGE, + number=4, + oneof="ClientLockInfo", + message="ServiceLockInfo", + ) + + +class Backup(proto.Message): + r"""Message describing a Backup object. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + name (str): + Output only. Identifier. Name of the backup to create. It + must have the + format\ ``"projects//locations//backupVaults//dataSources/{datasource}/backups/{backup}"``. + ``{backup}`` cannot be changed after creation. It must be + between 3-63 characters long and must be unique within the + datasource. + description (str): + Output only. The description of the Backup + instance (2048 characters or less). + + This field is a member of `oneof`_ ``_description``. + create_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The time when the instance was + created. + + This field is a member of `oneof`_ ``_create_time``. + update_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The time when the instance was + updated. + + This field is a member of `oneof`_ ``_update_time``. + labels (MutableMapping[str, str]): + Optional. Resource labels to represent user + provided metadata. No labels currently defined. + enforced_retention_end_time (google.protobuf.timestamp_pb2.Timestamp): + Optional. The backup can not be deleted + before this time. + + This field is a member of `oneof`_ ``_enforced_retention_end_time``. + expire_time (google.protobuf.timestamp_pb2.Timestamp): + Optional. When this backup is automatically + expired. + + This field is a member of `oneof`_ ``_expire_time``. + consistency_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The point in time when this + backup was captured from the source. + + This field is a member of `oneof`_ ``_consistency_time``. + etag (str): + Optional. Server specified ETag to prevent + updates from overwriting each other. + + This field is a member of `oneof`_ ``_etag``. + state (google.cloud.backupdr_v1.types.Backup.State): + Output only. The Backup resource instance + state. + service_locks (MutableSequence[google.cloud.backupdr_v1.types.BackupLock]): + Output only. The list of BackupLocks taken by + the service to prevent the deletion of the + backup. + backup_appliance_locks (MutableSequence[google.cloud.backupdr_v1.types.BackupLock]): + Optional. The list of BackupLocks taken by + the accessor Backup Appliance. + compute_instance_backup_properties (google.cloud.backupdr_v1.types.ComputeInstanceBackupProperties): + Output only. Compute Engine specific backup + properties. + + This field is a member of `oneof`_ ``backup_properties``. + backup_appliance_backup_properties (google.cloud.backupdr_v1.types.BackupApplianceBackupProperties): + Output only. Backup Appliance specific backup + properties. + + This field is a member of `oneof`_ ``backup_properties``. + backup_type (google.cloud.backupdr_v1.types.Backup.BackupType): + Output only. Type of the backup, unspecified, + scheduled or ondemand. + gcp_backup_plan_info (google.cloud.backupdr_v1.types.Backup.GCPBackupPlanInfo): + Output only. Configuration for a Google Cloud + resource. + + This field is a member of `oneof`_ ``plan_info``. + resource_size_bytes (int): + Output only. source resource size in bytes at + the time of the backup. + """ + + class State(proto.Enum): + r"""Holds the state of the backup resource. + + Values: + STATE_UNSPECIFIED (0): + State not set. + CREATING (1): + The backup is being created. + ACTIVE (2): + The backup has been created and is fully + usable. + DELETING (3): + The backup is being deleted. + ERROR (4): + The backup is experiencing an issue and might + be unusable. + """ + STATE_UNSPECIFIED = 0 + CREATING = 1 + ACTIVE = 2 + DELETING = 3 + ERROR = 4 + + class BackupType(proto.Enum): + r"""Type of the backup, scheduled or ondemand. + + Values: + BACKUP_TYPE_UNSPECIFIED (0): + Backup type is unspecified. + SCHEDULED (1): + Scheduled backup. + ON_DEMAND (2): + On demand backup. + """ + BACKUP_TYPE_UNSPECIFIED = 0 + SCHEDULED = 1 + ON_DEMAND = 2 + + class GCPBackupPlanInfo(proto.Message): + r"""GCPBackupPlanInfo captures the plan configuration details of + Google Cloud resources at the time of backup. + + Attributes: + backup_plan (str): + Resource name of backup plan by which + workload is protected at the time of the backup. + Format: + + projects/{project}/locations/{location}/backupPlans/{backupPlanId} + backup_plan_rule_id (str): + The rule id of the backup plan which + triggered this backup in case of scheduled + backup or used for + """ + + backup_plan: str = proto.Field( + proto.STRING, + number=1, + ) + backup_plan_rule_id: str = proto.Field( + proto.STRING, + number=2, + ) + + name: str = proto.Field( + proto.STRING, + number=1, + ) + description: str = proto.Field( + proto.STRING, + number=2, + optional=True, + ) + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=3, + optional=True, + message=timestamp_pb2.Timestamp, + ) + update_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=4, + optional=True, + message=timestamp_pb2.Timestamp, + ) + labels: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=5, + ) + enforced_retention_end_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=6, + optional=True, + message=timestamp_pb2.Timestamp, + ) + expire_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=7, + optional=True, + message=timestamp_pb2.Timestamp, + ) + consistency_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=9, + optional=True, + message=timestamp_pb2.Timestamp, + ) + etag: str = proto.Field( + proto.STRING, + number=11, + optional=True, + ) + state: State = proto.Field( + proto.ENUM, + number=15, + enum=State, + ) + service_locks: MutableSequence["BackupLock"] = proto.RepeatedField( + proto.MESSAGE, + number=17, + message="BackupLock", + ) + backup_appliance_locks: MutableSequence["BackupLock"] = proto.RepeatedField( + proto.MESSAGE, + number=18, + message="BackupLock", + ) + compute_instance_backup_properties: backupvault_gce.ComputeInstanceBackupProperties = proto.Field( + proto.MESSAGE, + number=19, + oneof="backup_properties", + message=backupvault_gce.ComputeInstanceBackupProperties, + ) + backup_appliance_backup_properties: backupvault_ba.BackupApplianceBackupProperties = proto.Field( + proto.MESSAGE, + number=21, + oneof="backup_properties", + message=backupvault_ba.BackupApplianceBackupProperties, + ) + backup_type: BackupType = proto.Field( + proto.ENUM, + number=20, + enum=BackupType, + ) + gcp_backup_plan_info: GCPBackupPlanInfo = proto.Field( + proto.MESSAGE, + number=22, + oneof="plan_info", + message=GCPBackupPlanInfo, + ) + resource_size_bytes: int = proto.Field( + proto.INT64, + number=23, + ) + + +class CreateBackupVaultRequest(proto.Message): + r"""Message for creating a BackupVault. + + Attributes: + parent (str): + Required. Value for parent. + backup_vault_id (str): + Required. ID of the requesting object If auto-generating ID + server-side, remove this field and backup_vault_id from the + method_signature of Create RPC + backup_vault (google.cloud.backupdr_v1.types.BackupVault): + Required. The resource being created + request_id (str): + Optional. An optional request ID to identify + requests. Specify a unique request ID so that if + you must retry your request, the server will + know to ignore the request if it has already + been completed. The server will guarantee that + for at least 60 minutes since the first request. + + For example, consider a situation where you make + an initial request and the request times out. If + you make the request again with the same request + ID, the server can check if original operation + with the same request ID was received, and if + so, will ignore the second request. This + prevents clients from accidentally creating + duplicate commitments. + + The request ID must be a valid UUID with the + exception that zero UUID is not supported + (00000000-0000-0000-0000-000000000000). + validate_only (bool): + Optional. Only validate the request, but do + not perform mutations. The default is 'false'. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + backup_vault_id: str = proto.Field( + proto.STRING, + number=2, + ) + backup_vault: "BackupVault" = proto.Field( + proto.MESSAGE, + number=3, + message="BackupVault", + ) + request_id: str = proto.Field( + proto.STRING, + number=4, + ) + validate_only: bool = proto.Field( + proto.BOOL, + number=5, + ) + + +class ListBackupVaultsRequest(proto.Message): + r"""Request message for listing backupvault stores. + + Attributes: + parent (str): + Required. The project and location for which to retrieve + backupvault stores information, in the format + 'projects/{project_id}/locations/{location}'. In Cloud + Backup and DR, locations map to Google Cloud regions, for + example **us-central1**. To retrieve backupvault stores for + all locations, use "-" for the '{location}' value. + page_size (int): + Optional. Requested page size. Server may + return fewer items than requested. If + unspecified, server will pick an appropriate + default. + page_token (str): + Optional. A token identifying a page of + results the server should return. + filter (str): + Optional. Filtering results. + order_by (str): + Optional. Hint for how to order the results. + view (google.cloud.backupdr_v1.types.BackupVaultView): + Optional. Reserved for future use to provide + a BASIC & FULL view of Backup Vault. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + page_size: int = proto.Field( + proto.INT32, + number=2, + ) + page_token: str = proto.Field( + proto.STRING, + number=3, + ) + filter: str = proto.Field( + proto.STRING, + number=4, + ) + order_by: str = proto.Field( + proto.STRING, + number=5, + ) + view: "BackupVaultView" = proto.Field( + proto.ENUM, + number=6, + enum="BackupVaultView", + ) + + +class ListBackupVaultsResponse(proto.Message): + r"""Response message for listing BackupVaults. + + Attributes: + backup_vaults (MutableSequence[google.cloud.backupdr_v1.types.BackupVault]): + The list of BackupVault instances in the + project for the specified location. + + If the '{location}' value in the request is "-", + the response contains a list of instances from + all locations. In case any location is + unreachable, the response will only return + backup vaults in reachable locations and the + 'unreachable' field will be populated with a + list of unreachable locations. + next_page_token (str): + A token identifying a page of results the + server should return. + unreachable (MutableSequence[str]): + Locations that could not be reached. + """ + + @property + def raw_page(self): + return self + + backup_vaults: MutableSequence["BackupVault"] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="BackupVault", + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + unreachable: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=3, + ) + + +class FetchUsableBackupVaultsRequest(proto.Message): + r"""Request message for fetching usable BackupVaults. + + Attributes: + parent (str): + Required. The project and location for which to retrieve + backupvault stores information, in the format + 'projects/{project_id}/locations/{location}'. In Cloud + Backup and DR, locations map to Google Cloud regions, for + example **us-central1**. To retrieve backupvault stores for + all locations, use "-" for the '{location}' value. + page_size (int): + Optional. Requested page size. Server may + return fewer items than requested. If + unspecified, server will pick an appropriate + default. + page_token (str): + Optional. A token identifying a page of + results the server should return. + filter (str): + Optional. Filtering results. + order_by (str): + Optional. Hint for how to order the results. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + page_size: int = proto.Field( + proto.INT32, + number=2, + ) + page_token: str = proto.Field( + proto.STRING, + number=3, + ) + filter: str = proto.Field( + proto.STRING, + number=4, + ) + order_by: str = proto.Field( + proto.STRING, + number=5, + ) + + +class FetchUsableBackupVaultsResponse(proto.Message): + r"""Response message for fetching usable BackupVaults. + + Attributes: + backup_vaults (MutableSequence[google.cloud.backupdr_v1.types.BackupVault]): + The list of BackupVault instances in the + project for the specified location. + + If the '{location}' value in the request is "-", + the response contains a list of instances from + all locations. In case any location is + unreachable, the response will only return + backup vaults in reachable locations and the + 'unreachable' field will be populated with a + list of unreachable locations. + next_page_token (str): + A token identifying a page of results the + server should return. + unreachable (MutableSequence[str]): + Locations that could not be reached. + """ + + @property + def raw_page(self): + return self + + backup_vaults: MutableSequence["BackupVault"] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="BackupVault", + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + unreachable: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=3, + ) + + +class GetBackupVaultRequest(proto.Message): + r"""Request message for getting a BackupVault. + + Attributes: + name (str): + Required. Name of the backupvault store resource name, in + the format + 'projects/{project_id}/locations/{location}/backupVaults/{resource_name}' + view (google.cloud.backupdr_v1.types.BackupVaultView): + Optional. Reserved for future use to provide + a BASIC & FULL view of Backup Vault + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + view: "BackupVaultView" = proto.Field( + proto.ENUM, + number=2, + enum="BackupVaultView", + ) + + +class UpdateBackupVaultRequest(proto.Message): + r"""Request message for updating a BackupVault. + + Attributes: + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Required. Field mask is used to specify the fields to be + overwritten in the BackupVault resource by the update. The + fields specified in the update_mask are relative to the + resource, not the full request. A field will be overwritten + if it is in the mask. If the user does not provide a mask + then the request will fail. + backup_vault (google.cloud.backupdr_v1.types.BackupVault): + Required. The resource being updated + request_id (str): + Optional. An optional request ID to identify + requests. Specify a unique request ID so that if + you must retry your request, the server will + know to ignore the request if it has already + been completed. The server will guarantee that + for at least 60 minutes since the first request. + + For example, consider a situation where you make + an initial request and the request times out. If + you make the request again with the same request + ID, the server can check if original operation + with the same request ID was received, and if + so, will ignore the second request. This + prevents clients from accidentally creating + duplicate commitments. + + The request ID must be a valid UUID with the + exception that zero UUID is not supported + (00000000-0000-0000-0000-000000000000). + validate_only (bool): + Optional. Only validate the request, but do + not perform mutations. The default is 'false'. + force (bool): + Optional. If set to true, will not check plan + duration against backup vault enforcement + duration. + """ + + update_mask: field_mask_pb2.FieldMask = proto.Field( + proto.MESSAGE, + number=1, + message=field_mask_pb2.FieldMask, + ) + backup_vault: "BackupVault" = proto.Field( + proto.MESSAGE, + number=2, + message="BackupVault", + ) + request_id: str = proto.Field( + proto.STRING, + number=3, + ) + validate_only: bool = proto.Field( + proto.BOOL, + number=4, + ) + force: bool = proto.Field( + proto.BOOL, + number=5, + ) + + +class DeleteBackupVaultRequest(proto.Message): + r"""Message for deleting a BackupVault. + + Attributes: + name (str): + Required. Name of the resource. + request_id (str): + Optional. An optional request ID to identify + requests. Specify a unique request ID so that if + you must retry your request, the server will + know to ignore the request if it has already + been completed. The server will guarantee that + for at least 60 minutes after the first request. + + For example, consider a situation where you make + an initial request and the request times out. If + you make the request again with the same request + ID, the server can check if original operation + with the same request ID was received, and if + so, will ignore the second request. This + prevents clients from accidentally creating + duplicate commitments. + + The request ID must be a valid UUID with the + exception that zero UUID is not supported + (00000000-0000-0000-0000-000000000000). + force (bool): + Optional. If set to true, any data source + from this backup vault will also be deleted. + etag (str): + The current etag of the backup vault. + If an etag is provided and does not match the + current etag of the connection, deletion will be + blocked. + validate_only (bool): + Optional. Only validate the request, but do + not perform mutations. The default is 'false'. + allow_missing (bool): + Optional. If true and the BackupVault is not + found, the request will succeed but no action + will be taken. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + request_id: str = proto.Field( + proto.STRING, + number=2, + ) + force: bool = proto.Field( + proto.BOOL, + number=3, + ) + etag: str = proto.Field( + proto.STRING, + number=4, + ) + validate_only: bool = proto.Field( + proto.BOOL, + number=5, + ) + allow_missing: bool = proto.Field( + proto.BOOL, + number=6, + ) + + +class ListDataSourcesRequest(proto.Message): + r"""Request message for listing DataSources. + + Attributes: + parent (str): + Required. The project and location for which to retrieve + data sources information, in the format + 'projects/{project_id}/locations/{location}'. In Cloud + Backup and DR, locations map to Google Cloud regions, for + example **us-central1**. To retrieve data sources for all + locations, use "-" for the '{location}' value. + page_size (int): + Optional. Requested page size. Server may + return fewer items than requested. If + unspecified, server will pick an appropriate + default. + page_token (str): + Optional. A token identifying a page of + results the server should return. + filter (str): + Optional. Filtering results. + order_by (str): + Optional. Hint for how to order the results. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + page_size: int = proto.Field( + proto.INT32, + number=2, + ) + page_token: str = proto.Field( + proto.STRING, + number=3, + ) + filter: str = proto.Field( + proto.STRING, + number=4, + ) + order_by: str = proto.Field( + proto.STRING, + number=5, + ) + + +class ListDataSourcesResponse(proto.Message): + r"""Response message for listing DataSources. + + Attributes: + data_sources (MutableSequence[google.cloud.backupdr_v1.types.DataSource]): + The list of DataSource instances in the + project for the specified location. + + If the '{location}' value in the request is "-", + the response contains a list of instances from + all locations. In case any location is + unreachable, the response will only return data + sources in reachable locations and the + 'unreachable' field will be populated with a + list of unreachable locations. + next_page_token (str): + A token identifying a page of results the + server should return. + unreachable (MutableSequence[str]): + Locations that could not be reached. + """ + + @property + def raw_page(self): + return self + + data_sources: MutableSequence["DataSource"] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="DataSource", + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + unreachable: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=3, + ) + + +class GetDataSourceRequest(proto.Message): + r"""Request message for getting a DataSource instance. + + Attributes: + name (str): + Required. Name of the data source resource name, in the + format + 'projects/{project_id}/locations/{location}/backupVaults/{resource_name}/dataSource/{resource_name}' + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class UpdateDataSourceRequest(proto.Message): + r"""Request message for updating a data source instance. + + Attributes: + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Required. Field mask is used to specify the fields to be + overwritten in the DataSource resource by the update. The + fields specified in the update_mask are relative to the + resource, not the full request. A field will be overwritten + if it is in the mask. If the user does not provide a mask + then the request will fail. + data_source (google.cloud.backupdr_v1.types.DataSource): + Required. The resource being updated + request_id (str): + Optional. An optional request ID to identify + requests. Specify a unique request ID so that if + you must retry your request, the server will + know to ignore the request if it has already + been completed. The server will guarantee that + for at least 60 minutes since the first request. + + For example, consider a situation where you make + an initial request and the request times out. If + you make the request again with the same request + ID, the server can check if original operation + with the same request ID was received, and if + so, will ignore the second request. This + prevents clients from accidentally creating + duplicate commitments. + + The request ID must be a valid UUID with the + exception that zero UUID is not supported + (00000000-0000-0000-0000-000000000000). + allow_missing (bool): + Optional. Enable upsert. + """ + + update_mask: field_mask_pb2.FieldMask = proto.Field( + proto.MESSAGE, + number=1, + message=field_mask_pb2.FieldMask, + ) + data_source: "DataSource" = proto.Field( + proto.MESSAGE, + number=2, + message="DataSource", + ) + request_id: str = proto.Field( + proto.STRING, + number=3, + ) + allow_missing: bool = proto.Field( + proto.BOOL, + number=4, + ) + + +class ListBackupsRequest(proto.Message): + r"""Request message for listing Backups. + + Attributes: + parent (str): + Required. The project and location for which to retrieve + backup information, in the format + 'projects/{project_id}/locations/{location}'. In Cloud + Backup and DR, locations map to Google Cloud regions, for + example **us-central1**. To retrieve data sources for all + locations, use "-" for the '{location}' value. + page_size (int): + Optional. Requested page size. Server may + return fewer items than requested. If + unspecified, server will pick an appropriate + default. + page_token (str): + Optional. A token identifying a page of + results the server should return. + filter (str): + Optional. Filtering results. + order_by (str): + Optional. Hint for how to order the results. + view (google.cloud.backupdr_v1.types.BackupView): + Optional. Reserved for future use to provide + a BASIC & FULL view of Backup resource. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + page_size: int = proto.Field( + proto.INT32, + number=2, + ) + page_token: str = proto.Field( + proto.STRING, + number=3, + ) + filter: str = proto.Field( + proto.STRING, + number=4, + ) + order_by: str = proto.Field( + proto.STRING, + number=5, + ) + view: "BackupView" = proto.Field( + proto.ENUM, + number=6, + enum="BackupView", + ) + + +class ListBackupsResponse(proto.Message): + r"""Response message for listing Backups. + + Attributes: + backups (MutableSequence[google.cloud.backupdr_v1.types.Backup]): + The list of Backup instances in the project + for the specified location. + + If the '{location}' value in the request is "-", + the response contains a list of instances from + all locations. In case any location is + unreachable, the response will only return data + sources in reachable locations and the + 'unreachable' field will be populated with a + list of unreachable locations. + next_page_token (str): + A token identifying a page of results the + server should return. + unreachable (MutableSequence[str]): + Locations that could not be reached. + """ + + @property + def raw_page(self): + return self + + backups: MutableSequence["Backup"] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="Backup", + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + unreachable: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=3, + ) + + +class GetBackupRequest(proto.Message): + r"""Request message for getting a Backup. + + Attributes: + name (str): + Required. Name of the data source resource name, in the + format + 'projects/{project_id}/locations/{location}/backupVaults/{backupVault}/dataSources/{datasource}/backups/{backup}' + view (google.cloud.backupdr_v1.types.BackupView): + Optional. Reserved for future use to provide + a BASIC & FULL view of Backup resource. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + view: "BackupView" = proto.Field( + proto.ENUM, + number=2, + enum="BackupView", + ) + + +class UpdateBackupRequest(proto.Message): + r"""Request message for updating a Backup. + + Attributes: + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Required. Field mask is used to specify the fields to be + overwritten in the Backup resource by the update. The fields + specified in the update_mask are relative to the resource, + not the full request. A field will be overwritten if it is + in the mask. If the user does not provide a mask then the + request will fail. + backup (google.cloud.backupdr_v1.types.Backup): + Required. The resource being updated + request_id (str): + Optional. An optional request ID to identify + requests. Specify a unique request ID so that if + you must retry your request, the server will + know to ignore the request if it has already + been completed. The server will guarantee that + for at least 60 minutes since the first request. + + For example, consider a situation where you make + an initial request and the request times out. If + you make the request again with the same request + ID, the server can check if original operation + with the same request ID was received, and if + so, will ignore the second request. This + prevents clients from accidentally creating + duplicate commitments. + + The request ID must be a valid UUID with the + exception that zero UUID is not supported + (00000000-0000-0000-0000-000000000000). + """ + + update_mask: field_mask_pb2.FieldMask = proto.Field( + proto.MESSAGE, + number=1, + message=field_mask_pb2.FieldMask, + ) + backup: "Backup" = proto.Field( + proto.MESSAGE, + number=2, + message="Backup", + ) + request_id: str = proto.Field( + proto.STRING, + number=3, + ) + + +class DeleteBackupRequest(proto.Message): + r"""Message for deleting a Backup. + + Attributes: + name (str): + Required. Name of the resource. + request_id (str): + Optional. An optional request ID to identify + requests. Specify a unique request ID so that if + you must retry your request, the server will + know to ignore the request if it has already + been completed. The server will guarantee that + for at least 60 minutes after the first request. + + For example, consider a situation where you make + an initial request and the request times out. If + you make the request again with the same request + ID, the server can check if original operation + with the same request ID was received, and if + so, will ignore the second request. This + prevents clients from accidentally creating + duplicate commitments. + + The request ID must be a valid UUID with the + exception that zero UUID is not supported + (00000000-0000-0000-0000-000000000000). + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + request_id: str = proto.Field( + proto.STRING, + number=2, + ) + + +class RestoreBackupRequest(proto.Message): + r"""Request message for restoring from a Backup. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + name (str): + Required. The resource name of the Backup instance, in the + format + 'projects/*/locations/*/backupVaults/*/dataSources/*/backups/'. + request_id (str): + Optional. An optional request ID to identify + requests. Specify a unique request ID so that if + you must retry your request, the server will + know to ignore the request if it has already + been completed. The server will guarantee that + for at least 60 minutes after the first request. + + For example, consider a situation where you make + an initial request and the request times out. If + you make the request again with the same request + ID, the server can check if original operation + with the same request ID was received, and if + so, will ignore the second request. This + prevents clients from accidentally creating + duplicate commitments. + + The request ID must be a valid UUID with the + exception that zero UUID is not supported + (00000000-0000-0000-0000-000000000000). + compute_instance_target_environment (google.cloud.backupdr_v1.types.ComputeInstanceTargetEnvironment): + Compute Engine target environment to be used + during restore. + + This field is a member of `oneof`_ ``target_environment``. + compute_instance_restore_properties (google.cloud.backupdr_v1.types.ComputeInstanceRestoreProperties): + Compute Engine instance properties to be + overridden during restore. + + This field is a member of `oneof`_ ``instance_properties``. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + request_id: str = proto.Field( + proto.STRING, + number=2, + ) + compute_instance_target_environment: backupvault_gce.ComputeInstanceTargetEnvironment = proto.Field( + proto.MESSAGE, + number=3, + oneof="target_environment", + message=backupvault_gce.ComputeInstanceTargetEnvironment, + ) + compute_instance_restore_properties: backupvault_gce.ComputeInstanceRestoreProperties = proto.Field( + proto.MESSAGE, + number=4, + oneof="instance_properties", + message=backupvault_gce.ComputeInstanceRestoreProperties, + ) + + +class RestoreBackupResponse(proto.Message): + r"""Response message for restoring from a Backup. + + Attributes: + target_resource (google.cloud.backupdr_v1.types.TargetResource): + Details of the target resource + created/modified as part of restore. + """ + + target_resource: "TargetResource" = proto.Field( + proto.MESSAGE, + number=1, + message="TargetResource", + ) + + +class TargetResource(proto.Message): + r"""Details of the target resource created/modified as part of + restore. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + gcp_resource (google.cloud.backupdr_v1.types.GcpResource): + Details of the native Google Cloud resource + created as part of restore. + + This field is a member of `oneof`_ ``target_resource_info``. + """ + + gcp_resource: "GcpResource" = proto.Field( + proto.MESSAGE, + number=1, + oneof="target_resource_info", + message="GcpResource", + ) + + +class GcpResource(proto.Message): + r"""Minimum details to identify a Google Cloud resource + + Attributes: + gcp_resourcename (str): + Name of the Google Cloud resource. + location (str): + Location of the resource: + //"global"/"unspecified". + type_ (str): + Type of the resource. Use the Unified + Resource Type, eg. + compute.googleapis.com/Instance. + """ + + gcp_resourcename: str = proto.Field( + proto.STRING, + number=1, + ) + location: str = proto.Field( + proto.STRING, + number=2, + ) + type_: str = proto.Field( + proto.STRING, + number=3, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-backupdr/google/cloud/backupdr_v1/types/backupvault_ba.py b/packages/google-cloud-backupdr/google/cloud/backupdr_v1/types/backupvault_ba.py new file mode 100644 index 000000000000..131f54b56abe --- /dev/null +++ b/packages/google-cloud-backupdr/google/cloud/backupdr_v1/types/backupvault_ba.py @@ -0,0 +1,87 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +from google.protobuf import timestamp_pb2 # type: ignore +import proto # type: ignore + +__protobuf__ = proto.module( + package="google.cloud.backupdr.v1", + manifest={ + "BackupApplianceBackupProperties", + }, +) + + +class BackupApplianceBackupProperties(proto.Message): + r"""BackupApplianceBackupProperties represents BackupDR backup + appliance's properties. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + generation_id (int): + Output only. The numeric generation ID of the + backup (monotonically increasing). + + This field is a member of `oneof`_ ``_generation_id``. + finalize_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The time when this backup object + was finalized (if none, backup is not + finalized). + + This field is a member of `oneof`_ ``_finalize_time``. + recovery_range_start_time (google.protobuf.timestamp_pb2.Timestamp): + Optional. The earliest timestamp of data + available in this Backup. + + This field is a member of `oneof`_ ``_recovery_range_start_time``. + recovery_range_end_time (google.protobuf.timestamp_pb2.Timestamp): + Optional. The latest timestamp of data + available in this Backup. + + This field is a member of `oneof`_ ``_recovery_range_end_time``. + """ + + generation_id: int = proto.Field( + proto.INT32, + number=1, + optional=True, + ) + finalize_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=2, + optional=True, + message=timestamp_pb2.Timestamp, + ) + recovery_range_start_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=3, + optional=True, + message=timestamp_pb2.Timestamp, + ) + recovery_range_end_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=4, + optional=True, + message=timestamp_pb2.Timestamp, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-backupdr/google/cloud/backupdr_v1/types/backupvault_gce.py b/packages/google-cloud-backupdr/google/cloud/backupdr_v1/types/backupvault_gce.py new file mode 100644 index 000000000000..9e3e98632644 --- /dev/null +++ b/packages/google-cloud-backupdr/google/cloud/backupdr_v1/types/backupvault_gce.py @@ -0,0 +1,1991 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +import proto # type: ignore + +__protobuf__ = proto.module( + package="google.cloud.backupdr.v1", + manifest={ + "KeyRevocationActionType", + "ComputeInstanceBackupProperties", + "ComputeInstanceRestoreProperties", + "ComputeInstanceTargetEnvironment", + "ComputeInstanceDataSourceProperties", + "AdvancedMachineFeatures", + "ConfidentialInstanceConfig", + "DisplayDevice", + "AcceleratorConfig", + "CustomerEncryptionKey", + "Entry", + "Metadata", + "NetworkInterface", + "NetworkPerformanceConfig", + "AccessConfig", + "AliasIpRange", + "InstanceParams", + "AllocationAffinity", + "Scheduling", + "SchedulingDuration", + "ServiceAccount", + "Tags", + "AttachedDisk", + "GuestOsFeature", + }, +) + + +class KeyRevocationActionType(proto.Enum): + r"""Specifies whether the virtual machine instance will be shut + down on key revocation. It is currently used in instance, + instance properties and GMI protos + + Values: + KEY_REVOCATION_ACTION_TYPE_UNSPECIFIED (0): + Default value. This value is unused. + NONE (1): + Indicates user chose no operation. + STOP (2): + Indicates user chose to opt for VM shutdown + on key revocation. + """ + KEY_REVOCATION_ACTION_TYPE_UNSPECIFIED = 0 + NONE = 1 + STOP = 2 + + +class ComputeInstanceBackupProperties(proto.Message): + r"""ComputeInstanceBackupProperties represents Compute Engine + instance backup properties. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + description (str): + An optional text description for the + instances that are created from these + properties. + + This field is a member of `oneof`_ ``_description``. + tags (google.cloud.backupdr_v1.types.Tags): + A list of tags to apply to the instances that + are created from these properties. The tags + identify valid sources or targets for network + firewalls. The setTags method can modify this + list of tags. Each tag within the list must + comply with RFC1035 + (https://www.ietf.org/rfc/rfc1035.txt). + + This field is a member of `oneof`_ ``_tags``. + machine_type (str): + The machine type to use for instances that + are created from these properties. + + This field is a member of `oneof`_ ``_machine_type``. + can_ip_forward (bool): + Enables instances created based on these properties to send + packets with source IP addresses other than their own and + receive packets with destination IP addresses other than + their own. If these instances will be used as an IP gateway + or it will be set as the next-hop in a Route resource, + specify ``true``. If unsure, leave this set to ``false``. + See the + https://cloud.google.com/vpc/docs/using-routes#canipforward + documentation for more information. + + This field is a member of `oneof`_ ``_can_ip_forward``. + network_interface (MutableSequence[google.cloud.backupdr_v1.types.NetworkInterface]): + An array of network access configurations for + this interface. + disk (MutableSequence[google.cloud.backupdr_v1.types.AttachedDisk]): + An array of disks that are associated with + the instances that are created from these + properties. + metadata (google.cloud.backupdr_v1.types.Metadata): + The metadata key/value pairs to assign to + instances that are created from these + properties. These pairs can consist of custom + metadata or predefined keys. See + https://cloud.google.com/compute/docs/metadata/overview + for more information. + + This field is a member of `oneof`_ ``_metadata``. + service_account (MutableSequence[google.cloud.backupdr_v1.types.ServiceAccount]): + A list of service accounts with specified + scopes. Access tokens for these service accounts + are available to the instances that are created + from these properties. Use metadata queries to + obtain the access tokens for these instances. + scheduling (google.cloud.backupdr_v1.types.Scheduling): + Specifies the scheduling options for the + instances that are created from these + properties. + + This field is a member of `oneof`_ ``_scheduling``. + guest_accelerator (MutableSequence[google.cloud.backupdr_v1.types.AcceleratorConfig]): + A list of guest accelerator cards' type and + count to use for instances created from these + properties. + min_cpu_platform (str): + Minimum cpu/platform to be used by instances. The instance + may be scheduled on the specified or newer cpu/platform. + Applicable values are the friendly names of CPU platforms, + such as ``minCpuPlatform: Intel Haswell`` or + ``minCpuPlatform: Intel Sandy Bridge``. For more + information, read + https://cloud.google.com/compute/docs/instances/specify-min-cpu-platform. + + This field is a member of `oneof`_ ``_min_cpu_platform``. + key_revocation_action_type (google.cloud.backupdr_v1.types.KeyRevocationActionType): + KeyRevocationActionType of the instance. + Supported options are "STOP" and "NONE". The + default value is "NONE" if it is not specified. + + This field is a member of `oneof`_ ``_key_revocation_action_type``. + source_instance (str): + The source instance used to create this + backup. This can be a partial or full URL to the + resource. For example, the following are valid + values: + + -https://www.googleapis.com/compute/v1/projects/project/zones/zone/instances/instance + -projects/project/zones/zone/instances/instance + + This field is a member of `oneof`_ ``_source_instance``. + labels (MutableMapping[str, str]): + Labels to apply to instances that are created + from these properties. + """ + + description: str = proto.Field( + proto.STRING, + number=1, + optional=True, + ) + tags: "Tags" = proto.Field( + proto.MESSAGE, + number=2, + optional=True, + message="Tags", + ) + machine_type: str = proto.Field( + proto.STRING, + number=3, + optional=True, + ) + can_ip_forward: bool = proto.Field( + proto.BOOL, + number=4, + optional=True, + ) + network_interface: MutableSequence["NetworkInterface"] = proto.RepeatedField( + proto.MESSAGE, + number=5, + message="NetworkInterface", + ) + disk: MutableSequence["AttachedDisk"] = proto.RepeatedField( + proto.MESSAGE, + number=6, + message="AttachedDisk", + ) + metadata: "Metadata" = proto.Field( + proto.MESSAGE, + number=7, + optional=True, + message="Metadata", + ) + service_account: MutableSequence["ServiceAccount"] = proto.RepeatedField( + proto.MESSAGE, + number=8, + message="ServiceAccount", + ) + scheduling: "Scheduling" = proto.Field( + proto.MESSAGE, + number=9, + optional=True, + message="Scheduling", + ) + guest_accelerator: MutableSequence["AcceleratorConfig"] = proto.RepeatedField( + proto.MESSAGE, + number=10, + message="AcceleratorConfig", + ) + min_cpu_platform: str = proto.Field( + proto.STRING, + number=11, + optional=True, + ) + key_revocation_action_type: "KeyRevocationActionType" = proto.Field( + proto.ENUM, + number=12, + optional=True, + enum="KeyRevocationActionType", + ) + source_instance: str = proto.Field( + proto.STRING, + number=13, + optional=True, + ) + labels: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=14, + ) + + +class ComputeInstanceRestoreProperties(proto.Message): + r"""ComputeInstanceRestoreProperties represents Compute Engine + instance properties to be overridden during restore. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + name (str): + Required. Name of the compute instance. + + This field is a member of `oneof`_ ``_name``. + advanced_machine_features (google.cloud.backupdr_v1.types.AdvancedMachineFeatures): + Optional. Controls for advanced + machine-related behavior features. + + This field is a member of `oneof`_ ``_advanced_machine_features``. + can_ip_forward (bool): + Optional. Allows this instance to send and + receive packets with non-matching destination or + source IPs. + + This field is a member of `oneof`_ ``_can_ip_forward``. + confidential_instance_config (google.cloud.backupdr_v1.types.ConfidentialInstanceConfig): + Optional. Controls Confidential compute + options on the instance + + This field is a member of `oneof`_ ``_confidential_instance_config``. + deletion_protection (bool): + Optional. Whether the resource should be + protected against deletion. + + This field is a member of `oneof`_ ``_deletion_protection``. + description (str): + Optional. An optional description of this + resource. Provide this property when you create + the resource. + + This field is a member of `oneof`_ ``_description``. + disks (MutableSequence[google.cloud.backupdr_v1.types.AttachedDisk]): + Optional. Array of disks associated with this + instance. Persistent disks must be created + before you can assign them. + display_device (google.cloud.backupdr_v1.types.DisplayDevice): + Optional. Enables display device for the + instance. + + This field is a member of `oneof`_ ``_display_device``. + guest_accelerators (MutableSequence[google.cloud.backupdr_v1.types.AcceleratorConfig]): + Optional. A list of the type and count of + accelerator cards attached to the instance. + hostname (str): + Optional. Specifies the hostname of the instance. The + specified hostname must be RFC1035 compliant. If hostname is + not specified, the default hostname is + [INSTANCE_NAME].c.[PROJECT_ID].internal when using the + global DNS, and + [INSTANCE_NAME].[ZONE].c.[PROJECT_ID].internal when using + zonal DNS. + + This field is a member of `oneof`_ ``_hostname``. + instance_encryption_key (google.cloud.backupdr_v1.types.CustomerEncryptionKey): + Optional. Encrypts suspended data for an + instance with a customer-managed encryption key. + + This field is a member of `oneof`_ ``_instance_encryption_key``. + key_revocation_action_type (google.cloud.backupdr_v1.types.KeyRevocationActionType): + Optional. KeyRevocationActionType of the + instance. + + This field is a member of `oneof`_ ``_key_revocation_action_type``. + labels (MutableMapping[str, str]): + Optional. Labels to apply to this instance. + machine_type (str): + Optional. Full or partial URL of the machine + type resource to use for this instance. + + This field is a member of `oneof`_ ``_machine_type``. + metadata (google.cloud.backupdr_v1.types.Metadata): + Optional. This includes custom metadata and + predefined keys. + + This field is a member of `oneof`_ ``_metadata``. + min_cpu_platform (str): + Optional. Minimum CPU platform to use for + this instance. + + This field is a member of `oneof`_ ``_min_cpu_platform``. + network_interfaces (MutableSequence[google.cloud.backupdr_v1.types.NetworkInterface]): + Optional. An array of network configurations + for this instance. These specify how interfaces + are configured to interact with other network + services, such as connecting to the internet. + Multiple interfaces are supported per instance. + network_performance_config (google.cloud.backupdr_v1.types.NetworkPerformanceConfig): + Optional. Configure network performance such + as egress bandwidth tier. + + This field is a member of `oneof`_ ``_network_performance_config``. + params (google.cloud.backupdr_v1.types.InstanceParams): + Input only. Additional params passed with the + request, but not persisted as part of resource + payload. + + This field is a member of `oneof`_ ``_params``. + private_ipv6_google_access (google.cloud.backupdr_v1.types.ComputeInstanceRestoreProperties.InstancePrivateIpv6GoogleAccess): + Optional. The private IPv6 google access type for the VM. If + not specified, use INHERIT_FROM_SUBNETWORK as default. + + This field is a member of `oneof`_ ``_private_ipv6_google_access``. + allocation_affinity (google.cloud.backupdr_v1.types.AllocationAffinity): + Optional. Specifies the reservations that + this instance can consume from. + + This field is a member of `oneof`_ ``_allocation_affinity``. + resource_policies (MutableSequence[str]): + Optional. Resource policies applied to this + instance. + scheduling (google.cloud.backupdr_v1.types.Scheduling): + Optional. Sets the scheduling options for + this instance. + + This field is a member of `oneof`_ ``_scheduling``. + service_accounts (MutableSequence[google.cloud.backupdr_v1.types.ServiceAccount]): + Optional. A list of service accounts, with + their specified scopes, authorized for this + instance. Only one service account per VM + instance is supported. + tags (google.cloud.backupdr_v1.types.Tags): + Optional. Tags to apply to this instance. + Tags are used to identify valid sources or + targets for network firewalls and are specified + by the client during instance creation. + + This field is a member of `oneof`_ ``_tags``. + """ + + class InstancePrivateIpv6GoogleAccess(proto.Enum): + r"""The private IPv6 google access type for the VMs. + + Values: + INSTANCE_PRIVATE_IPV6_GOOGLE_ACCESS_UNSPECIFIED (0): + Default value. This value is unused. + INHERIT_FROM_SUBNETWORK (1): + Each network interface inherits + PrivateIpv6GoogleAccess from its subnetwork. + ENABLE_OUTBOUND_VM_ACCESS_TO_GOOGLE (2): + Outbound private IPv6 access from VMs in this + subnet to Google services. If specified, the + subnetwork who is attached to the instance's + default network interface will be assigned an + internal IPv6 prefix if it doesn't have before. + ENABLE_BIDIRECTIONAL_ACCESS_TO_GOOGLE (3): + Bidirectional private IPv6 access to/from + Google services. If specified, the subnetwork + who is attached to the instance's default + network interface will be assigned an internal + IPv6 prefix if it doesn't have before. + """ + INSTANCE_PRIVATE_IPV6_GOOGLE_ACCESS_UNSPECIFIED = 0 + INHERIT_FROM_SUBNETWORK = 1 + ENABLE_OUTBOUND_VM_ACCESS_TO_GOOGLE = 2 + ENABLE_BIDIRECTIONAL_ACCESS_TO_GOOGLE = 3 + + name: str = proto.Field( + proto.STRING, + number=1, + optional=True, + ) + advanced_machine_features: "AdvancedMachineFeatures" = proto.Field( + proto.MESSAGE, + number=2, + optional=True, + message="AdvancedMachineFeatures", + ) + can_ip_forward: bool = proto.Field( + proto.BOOL, + number=3, + optional=True, + ) + confidential_instance_config: "ConfidentialInstanceConfig" = proto.Field( + proto.MESSAGE, + number=4, + optional=True, + message="ConfidentialInstanceConfig", + ) + deletion_protection: bool = proto.Field( + proto.BOOL, + number=5, + optional=True, + ) + description: str = proto.Field( + proto.STRING, + number=6, + optional=True, + ) + disks: MutableSequence["AttachedDisk"] = proto.RepeatedField( + proto.MESSAGE, + number=7, + message="AttachedDisk", + ) + display_device: "DisplayDevice" = proto.Field( + proto.MESSAGE, + number=8, + optional=True, + message="DisplayDevice", + ) + guest_accelerators: MutableSequence["AcceleratorConfig"] = proto.RepeatedField( + proto.MESSAGE, + number=9, + message="AcceleratorConfig", + ) + hostname: str = proto.Field( + proto.STRING, + number=10, + optional=True, + ) + instance_encryption_key: "CustomerEncryptionKey" = proto.Field( + proto.MESSAGE, + number=11, + optional=True, + message="CustomerEncryptionKey", + ) + key_revocation_action_type: "KeyRevocationActionType" = proto.Field( + proto.ENUM, + number=12, + optional=True, + enum="KeyRevocationActionType", + ) + labels: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=13, + ) + machine_type: str = proto.Field( + proto.STRING, + number=14, + optional=True, + ) + metadata: "Metadata" = proto.Field( + proto.MESSAGE, + number=15, + optional=True, + message="Metadata", + ) + min_cpu_platform: str = proto.Field( + proto.STRING, + number=16, + optional=True, + ) + network_interfaces: MutableSequence["NetworkInterface"] = proto.RepeatedField( + proto.MESSAGE, + number=17, + message="NetworkInterface", + ) + network_performance_config: "NetworkPerformanceConfig" = proto.Field( + proto.MESSAGE, + number=18, + optional=True, + message="NetworkPerformanceConfig", + ) + params: "InstanceParams" = proto.Field( + proto.MESSAGE, + number=19, + optional=True, + message="InstanceParams", + ) + private_ipv6_google_access: InstancePrivateIpv6GoogleAccess = proto.Field( + proto.ENUM, + number=20, + optional=True, + enum=InstancePrivateIpv6GoogleAccess, + ) + allocation_affinity: "AllocationAffinity" = proto.Field( + proto.MESSAGE, + number=21, + optional=True, + message="AllocationAffinity", + ) + resource_policies: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=22, + ) + scheduling: "Scheduling" = proto.Field( + proto.MESSAGE, + number=23, + optional=True, + message="Scheduling", + ) + service_accounts: MutableSequence["ServiceAccount"] = proto.RepeatedField( + proto.MESSAGE, + number=24, + message="ServiceAccount", + ) + tags: "Tags" = proto.Field( + proto.MESSAGE, + number=26, + optional=True, + message="Tags", + ) + + +class ComputeInstanceTargetEnvironment(proto.Message): + r"""ComputeInstanceTargetEnvironment represents Compute Engine + target environment to be used during restore. + + Attributes: + project (str): + Required. Target project for the Compute + Engine instance. + zone (str): + Required. The zone of the Compute Engine + instance. + """ + + project: str = proto.Field( + proto.STRING, + number=1, + ) + zone: str = proto.Field( + proto.STRING, + number=2, + ) + + +class ComputeInstanceDataSourceProperties(proto.Message): + r"""ComputeInstanceDataSourceProperties represents the properties + of a ComputeEngine resource that are stored in the DataSource. + + Attributes: + name (str): + Name of the compute instance backed up by the + datasource. + description (str): + The description of the Compute Engine + instance. + machine_type (str): + The machine type of the instance. + total_disk_count (int): + The total number of disks attached to the + Instance. + total_disk_size_gb (int): + The sum of all the disk sizes. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + description: str = proto.Field( + proto.STRING, + number=2, + ) + machine_type: str = proto.Field( + proto.STRING, + number=3, + ) + total_disk_count: int = proto.Field( + proto.INT64, + number=4, + ) + total_disk_size_gb: int = proto.Field( + proto.INT64, + number=5, + ) + + +class AdvancedMachineFeatures(proto.Message): + r"""Specifies options for controlling advanced machine features. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + enable_nested_virtualization (bool): + Optional. Whether to enable nested + virtualization or not (default is false). + + This field is a member of `oneof`_ ``_enable_nested_virtualization``. + threads_per_core (int): + Optional. The number of threads per physical + core. To disable simultaneous multithreading + (SMT) set this to 1. If unset, the maximum + number of threads supported per core by the + underlying processor is assumed. + + This field is a member of `oneof`_ ``_threads_per_core``. + visible_core_count (int): + Optional. The number of physical cores to + expose to an instance. Multiply by the number of + threads per core to compute the total number of + virtual CPUs to expose to the instance. If + unset, the number of cores is inferred from the + instance's nominal CPU count and the underlying + platform's SMT width. + + This field is a member of `oneof`_ ``_visible_core_count``. + enable_uefi_networking (bool): + Optional. Whether to enable UEFI networking + for instance creation. + + This field is a member of `oneof`_ ``_enable_uefi_networking``. + """ + + enable_nested_virtualization: bool = proto.Field( + proto.BOOL, + number=1, + optional=True, + ) + threads_per_core: int = proto.Field( + proto.INT32, + number=2, + optional=True, + ) + visible_core_count: int = proto.Field( + proto.INT32, + number=3, + optional=True, + ) + enable_uefi_networking: bool = proto.Field( + proto.BOOL, + number=4, + optional=True, + ) + + +class ConfidentialInstanceConfig(proto.Message): + r"""A set of Confidential Instance options. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + enable_confidential_compute (bool): + Optional. Defines whether the instance should + have confidential compute enabled. + + This field is a member of `oneof`_ ``_enable_confidential_compute``. + """ + + enable_confidential_compute: bool = proto.Field( + proto.BOOL, + number=1, + optional=True, + ) + + +class DisplayDevice(proto.Message): + r"""A set of Display Device options + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + enable_display (bool): + Optional. Enables display for the Compute + Engine VM + + This field is a member of `oneof`_ ``_enable_display``. + """ + + enable_display: bool = proto.Field( + proto.BOOL, + number=1, + optional=True, + ) + + +class AcceleratorConfig(proto.Message): + r"""A specification of the type and number of accelerator cards + attached to the instance. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + accelerator_type (str): + Optional. Full or partial URL of the + accelerator type resource to attach to this + instance. + + This field is a member of `oneof`_ ``_accelerator_type``. + accelerator_count (int): + Optional. The number of the guest accelerator + cards exposed to this instance. + + This field is a member of `oneof`_ ``_accelerator_count``. + """ + + accelerator_type: str = proto.Field( + proto.STRING, + number=1, + optional=True, + ) + accelerator_count: int = proto.Field( + proto.INT32, + number=2, + optional=True, + ) + + +class CustomerEncryptionKey(proto.Message): + r"""A customer-supplied encryption key. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + raw_key (str): + Optional. Specifies a 256-bit + customer-supplied encryption key. + + This field is a member of `oneof`_ ``key``. + rsa_encrypted_key (str): + Optional. RSA-wrapped 2048-bit + customer-supplied encryption key to either + encrypt or decrypt this resource. + + This field is a member of `oneof`_ ``key``. + kms_key_name (str): + Optional. The name of the encryption key that + is stored in Google Cloud KMS. + + This field is a member of `oneof`_ ``key``. + kms_key_service_account (str): + Optional. The service account being used for + the encryption request for the given KMS key. If + absent, the Compute Engine default service + account is used. + + This field is a member of `oneof`_ ``_kms_key_service_account``. + """ + + raw_key: str = proto.Field( + proto.STRING, + number=1, + oneof="key", + ) + rsa_encrypted_key: str = proto.Field( + proto.STRING, + number=2, + oneof="key", + ) + kms_key_name: str = proto.Field( + proto.STRING, + number=3, + oneof="key", + ) + kms_key_service_account: str = proto.Field( + proto.STRING, + number=4, + optional=True, + ) + + +class Entry(proto.Message): + r"""A key/value pair to be used for storing metadata. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + key (str): + Optional. Key for the metadata entry. + + This field is a member of `oneof`_ ``_key``. + value (str): + Optional. Value for the metadata entry. These + are free-form strings, and only have meaning as + interpreted by the image running in the + instance. The only restriction placed on values + is that their size must be less than or equal to + 262144 bytes (256 KiB). + + This field is a member of `oneof`_ ``_value``. + """ + + key: str = proto.Field( + proto.STRING, + number=1, + optional=True, + ) + value: str = proto.Field( + proto.STRING, + number=2, + optional=True, + ) + + +class Metadata(proto.Message): + r"""A metadata key/value entry. + + Attributes: + items (MutableSequence[google.cloud.backupdr_v1.types.Entry]): + Optional. Array of key/value pairs. The total + size of all keys and values must be less than + 512 KB. + """ + + items: MutableSequence["Entry"] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="Entry", + ) + + +class NetworkInterface(proto.Message): + r"""A network interface resource attached to an instance. + s + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + network (str): + Optional. URL of the VPC network resource for + this instance. + + This field is a member of `oneof`_ ``_network``. + subnetwork (str): + Optional. The URL of the Subnetwork resource + for this instance. + + This field is a member of `oneof`_ ``_subnetwork``. + ip_address (str): + Optional. An IPv4 internal IP address to + assign to the instance for this network + interface. If not specified by the user, an + unused internal IP is assigned by the system. + + This field is a member of `oneof`_ ``_ip_address``. + ipv6_address (str): + Optional. An IPv6 internal network address + for this network interface. To use a static + internal IP address, it must be unused and in + the same region as the instance's zone. If not + specified, Google Cloud will automatically + assign an internal IPv6 address from the + instance's subnetwork. + + This field is a member of `oneof`_ ``_ipv6_address``. + internal_ipv6_prefix_length (int): + Optional. The prefix length of the primary + internal IPv6 range. + + This field is a member of `oneof`_ ``_internal_ipv6_prefix_length``. + name (str): + Output only. [Output Only] The name of the network + interface, which is generated by the server. + + This field is a member of `oneof`_ ``_name``. + access_configs (MutableSequence[google.cloud.backupdr_v1.types.AccessConfig]): + Optional. An array of configurations for this interface. + Currently, only one access config,ONE_TO_ONE_NAT is + supported. If there are no accessConfigs specified, then + this instance will have no external internet access. + ipv6_access_configs (MutableSequence[google.cloud.backupdr_v1.types.AccessConfig]): + Optional. An array of IPv6 access configurations for this + interface. Currently, only one IPv6 access config, + DIRECT_IPV6, is supported. If there is no ipv6AccessConfig + specified, then this instance will have no external IPv6 + Internet access. + alias_ip_ranges (MutableSequence[google.cloud.backupdr_v1.types.AliasIpRange]): + Optional. An array of alias IP ranges for + this network interface. You can only specify + this field for network interfaces in VPC + networks. + stack_type (google.cloud.backupdr_v1.types.NetworkInterface.StackType): + The stack type for this network interface. + + This field is a member of `oneof`_ ``_stack_type``. + ipv6_access_type (google.cloud.backupdr_v1.types.NetworkInterface.Ipv6AccessType): + Optional. [Output Only] One of EXTERNAL, INTERNAL to + indicate whether the IP can be accessed from the Internet. + This field is always inherited from its subnetwork. + + This field is a member of `oneof`_ ``_ipv6_access_type``. + queue_count (int): + Optional. The networking queue count that's + specified by users for the network interface. + Both Rx and Tx queues will be set to this + number. It'll be empty if not specified by the + users. + + This field is a member of `oneof`_ ``_queue_count``. + nic_type (google.cloud.backupdr_v1.types.NetworkInterface.NicType): + Optional. The type of vNIC to be used on this + interface. This may be gVNIC or VirtioNet. + + This field is a member of `oneof`_ ``_nic_type``. + network_attachment (str): + Optional. The URL of the network attachment that this + interface should connect to in the following format: + projects/{project_number}/regions/{region_name}/networkAttachments/{network_attachment_name}. + + This field is a member of `oneof`_ ``_network_attachment``. + """ + + class StackType(proto.Enum): + r"""Stack type for this network interface. + + Values: + STACK_TYPE_UNSPECIFIED (0): + Default should be STACK_TYPE_UNSPECIFIED. + IPV4_ONLY (1): + The network interface will be assigned IPv4 + address. + IPV4_IPV6 (2): + The network interface can have both IPv4 and + IPv6 addresses. + """ + STACK_TYPE_UNSPECIFIED = 0 + IPV4_ONLY = 1 + IPV4_IPV6 = 2 + + class Ipv6AccessType(proto.Enum): + r"""IPv6 access type for this network interface. + + Values: + UNSPECIFIED_IPV6_ACCESS_TYPE (0): + IPv6 access type not set. Means this network + interface hasn't been turned on IPv6 yet. + INTERNAL (1): + This network interface can have internal + IPv6. + EXTERNAL (2): + This network interface can have external + IPv6. + """ + UNSPECIFIED_IPV6_ACCESS_TYPE = 0 + INTERNAL = 1 + EXTERNAL = 2 + + class NicType(proto.Enum): + r"""Nic type for this network interface. + + Values: + NIC_TYPE_UNSPECIFIED (0): + Default should be NIC_TYPE_UNSPECIFIED. + VIRTIO_NET (1): + VIRTIO + GVNIC (2): + GVNIC + """ + NIC_TYPE_UNSPECIFIED = 0 + VIRTIO_NET = 1 + GVNIC = 2 + + network: str = proto.Field( + proto.STRING, + number=1, + optional=True, + ) + subnetwork: str = proto.Field( + proto.STRING, + number=2, + optional=True, + ) + ip_address: str = proto.Field( + proto.STRING, + number=3, + optional=True, + ) + ipv6_address: str = proto.Field( + proto.STRING, + number=4, + optional=True, + ) + internal_ipv6_prefix_length: int = proto.Field( + proto.INT32, + number=5, + optional=True, + ) + name: str = proto.Field( + proto.STRING, + number=6, + optional=True, + ) + access_configs: MutableSequence["AccessConfig"] = proto.RepeatedField( + proto.MESSAGE, + number=7, + message="AccessConfig", + ) + ipv6_access_configs: MutableSequence["AccessConfig"] = proto.RepeatedField( + proto.MESSAGE, + number=8, + message="AccessConfig", + ) + alias_ip_ranges: MutableSequence["AliasIpRange"] = proto.RepeatedField( + proto.MESSAGE, + number=9, + message="AliasIpRange", + ) + stack_type: StackType = proto.Field( + proto.ENUM, + number=10, + optional=True, + enum=StackType, + ) + ipv6_access_type: Ipv6AccessType = proto.Field( + proto.ENUM, + number=11, + optional=True, + enum=Ipv6AccessType, + ) + queue_count: int = proto.Field( + proto.INT32, + number=12, + optional=True, + ) + nic_type: NicType = proto.Field( + proto.ENUM, + number=13, + optional=True, + enum=NicType, + ) + network_attachment: str = proto.Field( + proto.STRING, + number=14, + optional=True, + ) + + +class NetworkPerformanceConfig(proto.Message): + r"""Network performance configuration. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + total_egress_bandwidth_tier (google.cloud.backupdr_v1.types.NetworkPerformanceConfig.Tier): + Optional. The tier of the total egress + bandwidth. + + This field is a member of `oneof`_ ``_total_egress_bandwidth_tier``. + """ + + class Tier(proto.Enum): + r"""Network performance tier. + + Values: + TIER_UNSPECIFIED (0): + This value is unused. + DEFAULT (1): + Default network performance config. + TIER_1 (2): + Tier 1 network performance config. + """ + TIER_UNSPECIFIED = 0 + DEFAULT = 1 + TIER_1 = 2 + + total_egress_bandwidth_tier: Tier = proto.Field( + proto.ENUM, + number=1, + optional=True, + enum=Tier, + ) + + +class AccessConfig(proto.Message): + r"""An access configuration attached to an instance's network + interface. Only one access config per instance is supported. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + type_ (google.cloud.backupdr_v1.types.AccessConfig.AccessType): + Optional. In accessConfigs (IPv4), the default and only + option is ONE_TO_ONE_NAT. In ipv6AccessConfigs, the default + and only option is DIRECT_IPV6. + + This field is a member of `oneof`_ ``_type``. + name (str): + Optional. The name of this access + configuration. + + This field is a member of `oneof`_ ``_name``. + external_ip (str): + Optional. The external IP address of this + access configuration. + + This field is a member of `oneof`_ ``_external_ip``. + external_ipv6 (str): + Optional. The external IPv6 address of this + access configuration. + + This field is a member of `oneof`_ ``_external_ipv6``. + external_ipv6_prefix_length (int): + Optional. The prefix length of the external + IPv6 range. + + This field is a member of `oneof`_ ``_external_ipv6_prefix_length``. + set_public_ptr (bool): + Optional. Specifies whether a public DNS + 'PTR' record should be created to map the + external IP address of the instance to a DNS + domain name. + + This field is a member of `oneof`_ ``_set_public_ptr``. + public_ptr_domain_name (str): + Optional. The DNS domain name for the public + PTR record. + + This field is a member of `oneof`_ ``_public_ptr_domain_name``. + network_tier (google.cloud.backupdr_v1.types.AccessConfig.NetworkTier): + Optional. This signifies the networking tier + used for configuring this access + + This field is a member of `oneof`_ ``_network_tier``. + """ + + class AccessType(proto.Enum): + r"""The type of configuration. + + Values: + ACCESS_TYPE_UNSPECIFIED (0): + Default value. This value is unused. + ONE_TO_ONE_NAT (1): + ONE_TO_ONE_NAT + DIRECT_IPV6 (2): + Direct IPv6 access. + """ + ACCESS_TYPE_UNSPECIFIED = 0 + ONE_TO_ONE_NAT = 1 + DIRECT_IPV6 = 2 + + class NetworkTier(proto.Enum): + r"""Network tier property used by addresses, instances and + forwarding rules. + + Values: + NETWORK_TIER_UNSPECIFIED (0): + Default value. This value is unused. + PREMIUM (1): + High quality, Google-grade network tier, + support for all networking products. + STANDARD (2): + Public internet quality, only limited support + for other networking products. + """ + NETWORK_TIER_UNSPECIFIED = 0 + PREMIUM = 1 + STANDARD = 2 + + type_: AccessType = proto.Field( + proto.ENUM, + number=1, + optional=True, + enum=AccessType, + ) + name: str = proto.Field( + proto.STRING, + number=2, + optional=True, + ) + external_ip: str = proto.Field( + proto.STRING, + number=3, + optional=True, + ) + external_ipv6: str = proto.Field( + proto.STRING, + number=4, + optional=True, + ) + external_ipv6_prefix_length: int = proto.Field( + proto.INT32, + number=5, + optional=True, + ) + set_public_ptr: bool = proto.Field( + proto.BOOL, + number=6, + optional=True, + ) + public_ptr_domain_name: str = proto.Field( + proto.STRING, + number=7, + optional=True, + ) + network_tier: NetworkTier = proto.Field( + proto.ENUM, + number=8, + optional=True, + enum=NetworkTier, + ) + + +class AliasIpRange(proto.Message): + r"""An alias IP range attached to an instance's network + interface. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + ip_cidr_range (str): + Optional. The IP alias ranges to allocate for + this interface. + + This field is a member of `oneof`_ ``_ip_cidr_range``. + subnetwork_range_name (str): + Optional. The name of a subnetwork secondary + IP range from which to allocate an IP alias + range. If not specified, the primary range of + the subnetwork is used. + + This field is a member of `oneof`_ ``_subnetwork_range_name``. + """ + + ip_cidr_range: str = proto.Field( + proto.STRING, + number=1, + optional=True, + ) + subnetwork_range_name: str = proto.Field( + proto.STRING, + number=2, + optional=True, + ) + + +class InstanceParams(proto.Message): + r"""Additional instance params. + + Attributes: + resource_manager_tags (MutableMapping[str, str]): + Optional. Resource manager tags to be bound + to the instance. + """ + + resource_manager_tags: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=1, + ) + + +class AllocationAffinity(proto.Message): + r"""Specifies the reservations that this instance can consume + from. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + consume_allocation_type (google.cloud.backupdr_v1.types.AllocationAffinity.Type): + Optional. Specifies the type of reservation + from which this instance can consume + + This field is a member of `oneof`_ ``_consume_allocation_type``. + key (str): + Optional. Corresponds to the label key of a + reservation resource. + + This field is a member of `oneof`_ ``_key``. + values (MutableSequence[str]): + Optional. Corresponds to the label values of + a reservation resource. + """ + + class Type(proto.Enum): + r"""Indicates whether to consume from a reservation or not. + + Values: + TYPE_UNSPECIFIED (0): + Default value. This value is unused. + NO_RESERVATION (1): + Do not consume from any allocated capacity. + ANY_RESERVATION (2): + Consume any allocation available. + SPECIFIC_RESERVATION (3): + Must consume from a specific reservation. + Must specify key value fields for specifying the + reservations. + """ + TYPE_UNSPECIFIED = 0 + NO_RESERVATION = 1 + ANY_RESERVATION = 2 + SPECIFIC_RESERVATION = 3 + + consume_allocation_type: Type = proto.Field( + proto.ENUM, + number=1, + optional=True, + enum=Type, + ) + key: str = proto.Field( + proto.STRING, + number=2, + optional=True, + ) + values: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=3, + ) + + +class Scheduling(proto.Message): + r"""Sets the scheduling options for an Instance. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + on_host_maintenance (google.cloud.backupdr_v1.types.Scheduling.OnHostMaintenance): + Optional. Defines the maintenance behavior + for this instance. + + This field is a member of `oneof`_ ``_on_host_maintenance``. + automatic_restart (bool): + Optional. Specifies whether the instance + should be automatically restarted if it is + terminated by Compute Engine (not terminated by + a user). + + This field is a member of `oneof`_ ``_automatic_restart``. + preemptible (bool): + Optional. Defines whether the instance is + preemptible. + + This field is a member of `oneof`_ ``_preemptible``. + node_affinities (MutableSequence[google.cloud.backupdr_v1.types.Scheduling.NodeAffinity]): + Optional. A set of node affinity and + anti-affinity configurations. Overrides + reservationAffinity. + min_node_cpus (int): + Optional. The minimum number of virtual CPUs + this instance will consume when running on a + sole-tenant node. + + This field is a member of `oneof`_ ``_min_node_cpus``. + provisioning_model (google.cloud.backupdr_v1.types.Scheduling.ProvisioningModel): + Optional. Specifies the provisioning model of + the instance. + + This field is a member of `oneof`_ ``_provisioning_model``. + instance_termination_action (google.cloud.backupdr_v1.types.Scheduling.InstanceTerminationAction): + Optional. Specifies the termination action + for the instance. + + This field is a member of `oneof`_ ``_instance_termination_action``. + local_ssd_recovery_timeout (google.cloud.backupdr_v1.types.SchedulingDuration): + Optional. Specifies the maximum amount of + time a Local Ssd Vm should wait while recovery + of the Local Ssd state is attempted. Its value + should be in between 0 and 168 hours with hour + granularity and the default value being 1 hour. + + This field is a member of `oneof`_ ``_local_ssd_recovery_timeout``. + """ + + class OnHostMaintenance(proto.Enum): + r"""Defines the maintenance behavior for this instance= + + Values: + ON_HOST_MAINTENANCE_UNSPECIFIED (0): + Default value. This value is unused. + TERMINATE (1): + Tells Compute Engine to terminate and + (optionally) restart the instance away from the + maintenance activity. + MIGRATE (1000): + Default, Allows Compute Engine to + automatically migrate instances out of the way + of maintenance events. + """ + ON_HOST_MAINTENANCE_UNSPECIFIED = 0 + TERMINATE = 1 + MIGRATE = 1000 + + class ProvisioningModel(proto.Enum): + r"""Defines the provisioning model for an instance. + + Values: + PROVISIONING_MODEL_UNSPECIFIED (0): + Default value. This value is not used. + STANDARD (1): + Standard provisioning with user controlled + runtime, no discounts. + SPOT (2): + Heavily discounted, no guaranteed runtime. + """ + PROVISIONING_MODEL_UNSPECIFIED = 0 + STANDARD = 1 + SPOT = 2 + + class InstanceTerminationAction(proto.Enum): + r"""Defines the supported termination actions for an instance. + + Values: + INSTANCE_TERMINATION_ACTION_UNSPECIFIED (0): + Default value. This value is unused. + DELETE (1): + Delete the VM. + STOP (2): + Stop the VM without storing in-memory + content. default action. + """ + INSTANCE_TERMINATION_ACTION_UNSPECIFIED = 0 + DELETE = 1 + STOP = 2 + + class NodeAffinity(proto.Message): + r"""Node Affinity: the configuration of desired nodes onto which + this Instance could be scheduled. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + key (str): + Optional. Corresponds to the label key of + Node resource. + + This field is a member of `oneof`_ ``_key``. + operator (google.cloud.backupdr_v1.types.Scheduling.NodeAffinity.Operator): + Optional. Defines the operation of node + selection. + + This field is a member of `oneof`_ ``_operator``. + values (MutableSequence[str]): + Optional. Corresponds to the label values of + Node resource. + """ + + class Operator(proto.Enum): + r"""Defines the type of node selections. + + Values: + OPERATOR_UNSPECIFIED (0): + Default value. This value is unused. + IN (1): + Requires Compute Engine to seek for matched + nodes. + NOT_IN (2): + Requires Compute Engine to avoid certain + nodes. + """ + OPERATOR_UNSPECIFIED = 0 + IN = 1 + NOT_IN = 2 + + key: str = proto.Field( + proto.STRING, + number=1, + optional=True, + ) + operator: "Scheduling.NodeAffinity.Operator" = proto.Field( + proto.ENUM, + number=2, + optional=True, + enum="Scheduling.NodeAffinity.Operator", + ) + values: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=3, + ) + + on_host_maintenance: OnHostMaintenance = proto.Field( + proto.ENUM, + number=1, + optional=True, + enum=OnHostMaintenance, + ) + automatic_restart: bool = proto.Field( + proto.BOOL, + number=2, + optional=True, + ) + preemptible: bool = proto.Field( + proto.BOOL, + number=3, + optional=True, + ) + node_affinities: MutableSequence[NodeAffinity] = proto.RepeatedField( + proto.MESSAGE, + number=4, + message=NodeAffinity, + ) + min_node_cpus: int = proto.Field( + proto.INT32, + number=5, + optional=True, + ) + provisioning_model: ProvisioningModel = proto.Field( + proto.ENUM, + number=6, + optional=True, + enum=ProvisioningModel, + ) + instance_termination_action: InstanceTerminationAction = proto.Field( + proto.ENUM, + number=7, + optional=True, + enum=InstanceTerminationAction, + ) + local_ssd_recovery_timeout: "SchedulingDuration" = proto.Field( + proto.MESSAGE, + number=10, + optional=True, + message="SchedulingDuration", + ) + + +class SchedulingDuration(proto.Message): + r"""A SchedulingDuration represents a fixed-length span of time + represented as a count of seconds and fractions of seconds at + nanosecond resolution. It is independent of any calendar and + concepts like "day" or "month". Range is approximately 10,000 + years. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + seconds (int): + Optional. Span of time at a resolution of a + second. + + This field is a member of `oneof`_ ``_seconds``. + nanos (int): + Optional. Span of time that's a fraction of a + second at nanosecond resolution. + + This field is a member of `oneof`_ ``_nanos``. + """ + + seconds: int = proto.Field( + proto.INT64, + number=1, + optional=True, + ) + nanos: int = proto.Field( + proto.INT32, + number=2, + optional=True, + ) + + +class ServiceAccount(proto.Message): + r"""A service account. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + email (str): + Optional. Email address of the service + account. + + This field is a member of `oneof`_ ``_email``. + scopes (MutableSequence[str]): + Optional. The list of scopes to be made + available for this service account. + """ + + email: str = proto.Field( + proto.STRING, + number=1, + optional=True, + ) + scopes: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=2, + ) + + +class Tags(proto.Message): + r"""A set of instance tags. + + Attributes: + items (MutableSequence[str]): + Optional. An array of tags. Each tag must be + 1-63 characters long, and comply with RFC1035. + """ + + items: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=1, + ) + + +class AttachedDisk(proto.Message): + r"""An instance-attached disk resource. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + initialize_params (google.cloud.backupdr_v1.types.AttachedDisk.InitializeParams): + Optional. Specifies the parameters to + initialize this disk. + + This field is a member of `oneof`_ ``_initialize_params``. + device_name (str): + Optional. This is used as an identifier for the disks. This + is the unique name has to provided to modify disk parameters + like disk_name and replica_zones (in case of RePDs) + + This field is a member of `oneof`_ ``_device_name``. + kind (str): + Optional. Type of the resource. + + This field is a member of `oneof`_ ``_kind``. + disk_type_deprecated (google.cloud.backupdr_v1.types.AttachedDisk.DiskType): + Specifies the type of the disk. + + This field is a member of `oneof`_ ``_disk_type_deprecated``. + mode (google.cloud.backupdr_v1.types.AttachedDisk.DiskMode): + Optional. The mode in which to attach this + disk. + + This field is a member of `oneof`_ ``_mode``. + source (str): + Optional. Specifies a valid partial or full + URL to an existing Persistent Disk resource. + + This field is a member of `oneof`_ ``_source``. + index (int): + Optional. A zero-based index to this disk, + where 0 is reserved for the boot disk. + + This field is a member of `oneof`_ ``_index``. + boot (bool): + Optional. Indicates that this is a boot disk. + The virtual machine will use the first partition + of the disk for its root filesystem. + + This field is a member of `oneof`_ ``_boot``. + auto_delete (bool): + Optional. Specifies whether the disk will be + auto-deleted when the instance is deleted (but + not when the disk is detached from the + instance). + + This field is a member of `oneof`_ ``_auto_delete``. + license_ (MutableSequence[str]): + Optional. Any valid publicly visible + licenses. + disk_interface (google.cloud.backupdr_v1.types.AttachedDisk.DiskInterface): + Optional. Specifies the disk interface to use + for attaching this disk. + + This field is a member of `oneof`_ ``_disk_interface``. + guest_os_feature (MutableSequence[google.cloud.backupdr_v1.types.GuestOsFeature]): + Optional. A list of features to enable on the + guest operating system. Applicable only for + bootable images. + disk_encryption_key (google.cloud.backupdr_v1.types.CustomerEncryptionKey): + Optional. Encrypts or decrypts a disk using a + customer-supplied encryption key. + + This field is a member of `oneof`_ ``_disk_encryption_key``. + disk_size_gb (int): + Optional. The size of the disk in GB. + + This field is a member of `oneof`_ ``_disk_size_gb``. + saved_state (google.cloud.backupdr_v1.types.AttachedDisk.DiskSavedState): + Optional. Output only. The state of the disk. + + This field is a member of `oneof`_ ``_saved_state``. + disk_type (str): + Optional. Output only. The URI of the disk + type resource. For example: + projects/project/zones/zone/diskTypes/pd-standard + or pd-ssd + + This field is a member of `oneof`_ ``_disk_type``. + type_ (google.cloud.backupdr_v1.types.AttachedDisk.DiskType): + Optional. Specifies the type of the disk. + + This field is a member of `oneof`_ ``_type``. + """ + + class DiskType(proto.Enum): + r"""List of the Disk Types. + + Values: + DISK_TYPE_UNSPECIFIED (0): + Default value, which is unused. + SCRATCH (1): + A scratch disk type. + PERSISTENT (2): + A persistent disk type. + """ + DISK_TYPE_UNSPECIFIED = 0 + SCRATCH = 1 + PERSISTENT = 2 + + class DiskMode(proto.Enum): + r"""List of the Disk Modes. + + Values: + DISK_MODE_UNSPECIFIED (0): + Default value, which is unused. + READ_WRITE (1): + Attaches this disk in read-write mode. Only + one virtual machine at a time can be attached to + a disk in read-write mode. + READ_ONLY (2): + Attaches this disk in read-only mode. + Multiple virtual machines can use a disk in + read-only mode at a time. + LOCKED (3): + The disk is locked for administrative + reasons. Nobody else can use the disk. This mode + is used (for example) when taking a snapshot of + a disk to prevent mounting the disk while it is + being snapshotted. + """ + DISK_MODE_UNSPECIFIED = 0 + READ_WRITE = 1 + READ_ONLY = 2 + LOCKED = 3 + + class DiskInterface(proto.Enum): + r"""List of the Disk Interfaces. + + Values: + DISK_INTERFACE_UNSPECIFIED (0): + Default value, which is unused. + SCSI (1): + SCSI Disk Interface. + NVME (2): + NVME Disk Interface. + NVDIMM (3): + NVDIMM Disk Interface. + ISCSI (4): + ISCSI Disk Interface. + """ + DISK_INTERFACE_UNSPECIFIED = 0 + SCSI = 1 + NVME = 2 + NVDIMM = 3 + ISCSI = 4 + + class DiskSavedState(proto.Enum): + r"""List of the states of the Disk. + + Values: + DISK_SAVED_STATE_UNSPECIFIED (0): + Default Disk state has not been preserved. + PRESERVED (1): + Disk state has been preserved. + """ + DISK_SAVED_STATE_UNSPECIFIED = 0 + PRESERVED = 1 + + class InitializeParams(proto.Message): + r"""Specifies the parameters to initialize this disk. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + disk_name (str): + Optional. Specifies the disk name. If not + specified, the default is to use the name of the + instance. + + This field is a member of `oneof`_ ``_disk_name``. + replica_zones (MutableSequence[str]): + Optional. URL of the zone where the disk + should be created. Required for each regional + disk associated with the instance. + """ + + disk_name: str = proto.Field( + proto.STRING, + number=1, + optional=True, + ) + replica_zones: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=2, + ) + + initialize_params: InitializeParams = proto.Field( + proto.MESSAGE, + number=1, + optional=True, + message=InitializeParams, + ) + device_name: str = proto.Field( + proto.STRING, + number=4, + optional=True, + ) + kind: str = proto.Field( + proto.STRING, + number=5, + optional=True, + ) + disk_type_deprecated: DiskType = proto.Field( + proto.ENUM, + number=6, + optional=True, + enum=DiskType, + ) + mode: DiskMode = proto.Field( + proto.ENUM, + number=7, + optional=True, + enum=DiskMode, + ) + source: str = proto.Field( + proto.STRING, + number=8, + optional=True, + ) + index: int = proto.Field( + proto.INT64, + number=9, + optional=True, + ) + boot: bool = proto.Field( + proto.BOOL, + number=10, + optional=True, + ) + auto_delete: bool = proto.Field( + proto.BOOL, + number=11, + optional=True, + ) + license_: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=12, + ) + disk_interface: DiskInterface = proto.Field( + proto.ENUM, + number=13, + optional=True, + enum=DiskInterface, + ) + guest_os_feature: MutableSequence["GuestOsFeature"] = proto.RepeatedField( + proto.MESSAGE, + number=14, + message="GuestOsFeature", + ) + disk_encryption_key: "CustomerEncryptionKey" = proto.Field( + proto.MESSAGE, + number=15, + optional=True, + message="CustomerEncryptionKey", + ) + disk_size_gb: int = proto.Field( + proto.INT64, + number=16, + optional=True, + ) + saved_state: DiskSavedState = proto.Field( + proto.ENUM, + number=17, + optional=True, + enum=DiskSavedState, + ) + disk_type: str = proto.Field( + proto.STRING, + number=18, + optional=True, + ) + type_: DiskType = proto.Field( + proto.ENUM, + number=19, + optional=True, + enum=DiskType, + ) + + +class GuestOsFeature(proto.Message): + r"""Feature type of the Guest OS. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + type_ (google.cloud.backupdr_v1.types.GuestOsFeature.FeatureType): + The ID of a supported feature. + + This field is a member of `oneof`_ ``_type``. + """ + + class FeatureType(proto.Enum): + r"""List of the Feature Types. + + Values: + FEATURE_TYPE_UNSPECIFIED (0): + Default value, which is unused. + VIRTIO_SCSI_MULTIQUEUE (1): + VIRTIO_SCSI_MULTIQUEUE feature type. + WINDOWS (2): + WINDOWS feature type. + MULTI_IP_SUBNET (3): + MULTI_IP_SUBNET feature type. + UEFI_COMPATIBLE (4): + UEFI_COMPATIBLE feature type. + SECURE_BOOT (5): + SECURE_BOOT feature type. + GVNIC (6): + GVNIC feature type. + SEV_CAPABLE (7): + SEV_CAPABLE feature type. + BARE_METAL_LINUX_COMPATIBLE (8): + BARE_METAL_LINUX_COMPATIBLE feature type. + SUSPEND_RESUME_COMPATIBLE (9): + SUSPEND_RESUME_COMPATIBLE feature type. + SEV_LIVE_MIGRATABLE (10): + SEV_LIVE_MIGRATABLE feature type. + SEV_SNP_CAPABLE (11): + SEV_SNP_CAPABLE feature type. + TDX_CAPABLE (12): + TDX_CAPABLE feature type. + IDPF (13): + IDPF feature type. + SEV_LIVE_MIGRATABLE_V2 (14): + SEV_LIVE_MIGRATABLE_V2 feature type. + """ + FEATURE_TYPE_UNSPECIFIED = 0 + VIRTIO_SCSI_MULTIQUEUE = 1 + WINDOWS = 2 + MULTI_IP_SUBNET = 3 + UEFI_COMPATIBLE = 4 + SECURE_BOOT = 5 + GVNIC = 6 + SEV_CAPABLE = 7 + BARE_METAL_LINUX_COMPATIBLE = 8 + SUSPEND_RESUME_COMPATIBLE = 9 + SEV_LIVE_MIGRATABLE = 10 + SEV_SNP_CAPABLE = 11 + TDX_CAPABLE = 12 + IDPF = 13 + SEV_LIVE_MIGRATABLE_V2 = 14 + + type_: FeatureType = proto.Field( + proto.ENUM, + number=1, + optional=True, + enum=FeatureType, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_create_backup_plan_association_async.py b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_create_backup_plan_association_async.py new file mode 100644 index 000000000000..25dbf9cca081 --- /dev/null +++ b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_create_backup_plan_association_async.py @@ -0,0 +1,62 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateBackupPlanAssociation +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-backupdr + + +# [START backupdr_v1_generated_BackupDR_CreateBackupPlanAssociation_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import backupdr_v1 + + +async def sample_create_backup_plan_association(): + # Create a client + client = backupdr_v1.BackupDRAsyncClient() + + # Initialize request argument(s) + backup_plan_association = backupdr_v1.BackupPlanAssociation() + backup_plan_association.resource = "resource_value" + backup_plan_association.backup_plan = "backup_plan_value" + + request = backupdr_v1.CreateBackupPlanAssociationRequest( + parent="parent_value", + backup_plan_association_id="backup_plan_association_id_value", + backup_plan_association=backup_plan_association, + ) + + # Make the request + operation = client.create_backup_plan_association(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END backupdr_v1_generated_BackupDR_CreateBackupPlanAssociation_async] diff --git a/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_create_backup_plan_association_sync.py b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_create_backup_plan_association_sync.py new file mode 100644 index 000000000000..fc82ca77f706 --- /dev/null +++ b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_create_backup_plan_association_sync.py @@ -0,0 +1,62 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateBackupPlanAssociation +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-backupdr + + +# [START backupdr_v1_generated_BackupDR_CreateBackupPlanAssociation_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import backupdr_v1 + + +def sample_create_backup_plan_association(): + # Create a client + client = backupdr_v1.BackupDRClient() + + # Initialize request argument(s) + backup_plan_association = backupdr_v1.BackupPlanAssociation() + backup_plan_association.resource = "resource_value" + backup_plan_association.backup_plan = "backup_plan_value" + + request = backupdr_v1.CreateBackupPlanAssociationRequest( + parent="parent_value", + backup_plan_association_id="backup_plan_association_id_value", + backup_plan_association=backup_plan_association, + ) + + # Make the request + operation = client.create_backup_plan_association(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END backupdr_v1_generated_BackupDR_CreateBackupPlanAssociation_sync] diff --git a/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_create_backup_plan_async.py b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_create_backup_plan_async.py new file mode 100644 index 000000000000..ff546daa2ac6 --- /dev/null +++ b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_create_backup_plan_async.py @@ -0,0 +1,68 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateBackupPlan +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-backupdr + + +# [START backupdr_v1_generated_BackupDR_CreateBackupPlan_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import backupdr_v1 + + +async def sample_create_backup_plan(): + # Create a client + client = backupdr_v1.BackupDRAsyncClient() + + # Initialize request argument(s) + backup_plan = backupdr_v1.BackupPlan() + backup_plan.backup_rules.standard_schedule.recurrence_type = "YEARLY" + backup_plan.backup_rules.standard_schedule.backup_window.start_hour_of_day = 1820 + backup_plan.backup_rules.standard_schedule.backup_window.end_hour_of_day = 1573 + backup_plan.backup_rules.standard_schedule.time_zone = "time_zone_value" + backup_plan.backup_rules.rule_id = "rule_id_value" + backup_plan.backup_rules.backup_retention_days = 2237 + backup_plan.resource_type = "resource_type_value" + backup_plan.backup_vault = "backup_vault_value" + + request = backupdr_v1.CreateBackupPlanRequest( + parent="parent_value", + backup_plan_id="backup_plan_id_value", + backup_plan=backup_plan, + ) + + # Make the request + operation = client.create_backup_plan(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END backupdr_v1_generated_BackupDR_CreateBackupPlan_async] diff --git a/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_create_backup_plan_sync.py b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_create_backup_plan_sync.py new file mode 100644 index 000000000000..5c648a085be1 --- /dev/null +++ b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_create_backup_plan_sync.py @@ -0,0 +1,68 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateBackupPlan +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-backupdr + + +# [START backupdr_v1_generated_BackupDR_CreateBackupPlan_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import backupdr_v1 + + +def sample_create_backup_plan(): + # Create a client + client = backupdr_v1.BackupDRClient() + + # Initialize request argument(s) + backup_plan = backupdr_v1.BackupPlan() + backup_plan.backup_rules.standard_schedule.recurrence_type = "YEARLY" + backup_plan.backup_rules.standard_schedule.backup_window.start_hour_of_day = 1820 + backup_plan.backup_rules.standard_schedule.backup_window.end_hour_of_day = 1573 + backup_plan.backup_rules.standard_schedule.time_zone = "time_zone_value" + backup_plan.backup_rules.rule_id = "rule_id_value" + backup_plan.backup_rules.backup_retention_days = 2237 + backup_plan.resource_type = "resource_type_value" + backup_plan.backup_vault = "backup_vault_value" + + request = backupdr_v1.CreateBackupPlanRequest( + parent="parent_value", + backup_plan_id="backup_plan_id_value", + backup_plan=backup_plan, + ) + + # Make the request + operation = client.create_backup_plan(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END backupdr_v1_generated_BackupDR_CreateBackupPlan_sync] diff --git a/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_create_backup_vault_async.py b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_create_backup_vault_async.py new file mode 100644 index 000000000000..1acf666c5d38 --- /dev/null +++ b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_create_backup_vault_async.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateBackupVault +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-backupdr + + +# [START backupdr_v1_generated_BackupDR_CreateBackupVault_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import backupdr_v1 + + +async def sample_create_backup_vault(): + # Create a client + client = backupdr_v1.BackupDRAsyncClient() + + # Initialize request argument(s) + request = backupdr_v1.CreateBackupVaultRequest( + parent="parent_value", + backup_vault_id="backup_vault_id_value", + ) + + # Make the request + operation = client.create_backup_vault(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END backupdr_v1_generated_BackupDR_CreateBackupVault_async] diff --git a/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_create_backup_vault_sync.py b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_create_backup_vault_sync.py new file mode 100644 index 000000000000..5a1abda3a275 --- /dev/null +++ b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_create_backup_vault_sync.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateBackupVault +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-backupdr + + +# [START backupdr_v1_generated_BackupDR_CreateBackupVault_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import backupdr_v1 + + +def sample_create_backup_vault(): + # Create a client + client = backupdr_v1.BackupDRClient() + + # Initialize request argument(s) + request = backupdr_v1.CreateBackupVaultRequest( + parent="parent_value", + backup_vault_id="backup_vault_id_value", + ) + + # Make the request + operation = client.create_backup_vault(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END backupdr_v1_generated_BackupDR_CreateBackupVault_sync] diff --git a/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_delete_backup_async.py b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_delete_backup_async.py new file mode 100644 index 000000000000..346d24a9543e --- /dev/null +++ b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_delete_backup_async.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteBackup +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-backupdr + + +# [START backupdr_v1_generated_BackupDR_DeleteBackup_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import backupdr_v1 + + +async def sample_delete_backup(): + # Create a client + client = backupdr_v1.BackupDRAsyncClient() + + # Initialize request argument(s) + request = backupdr_v1.DeleteBackupRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_backup(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END backupdr_v1_generated_BackupDR_DeleteBackup_async] diff --git a/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_delete_backup_plan_association_async.py b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_delete_backup_plan_association_async.py new file mode 100644 index 000000000000..21af239763d3 --- /dev/null +++ b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_delete_backup_plan_association_async.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteBackupPlanAssociation +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-backupdr + + +# [START backupdr_v1_generated_BackupDR_DeleteBackupPlanAssociation_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import backupdr_v1 + + +async def sample_delete_backup_plan_association(): + # Create a client + client = backupdr_v1.BackupDRAsyncClient() + + # Initialize request argument(s) + request = backupdr_v1.DeleteBackupPlanAssociationRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_backup_plan_association(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END backupdr_v1_generated_BackupDR_DeleteBackupPlanAssociation_async] diff --git a/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_delete_backup_plan_association_sync.py b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_delete_backup_plan_association_sync.py new file mode 100644 index 000000000000..18c4ca0cae3b --- /dev/null +++ b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_delete_backup_plan_association_sync.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteBackupPlanAssociation +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-backupdr + + +# [START backupdr_v1_generated_BackupDR_DeleteBackupPlanAssociation_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import backupdr_v1 + + +def sample_delete_backup_plan_association(): + # Create a client + client = backupdr_v1.BackupDRClient() + + # Initialize request argument(s) + request = backupdr_v1.DeleteBackupPlanAssociationRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_backup_plan_association(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END backupdr_v1_generated_BackupDR_DeleteBackupPlanAssociation_sync] diff --git a/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_delete_backup_plan_async.py b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_delete_backup_plan_async.py new file mode 100644 index 000000000000..3423852e66cd --- /dev/null +++ b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_delete_backup_plan_async.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteBackupPlan +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-backupdr + + +# [START backupdr_v1_generated_BackupDR_DeleteBackupPlan_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import backupdr_v1 + + +async def sample_delete_backup_plan(): + # Create a client + client = backupdr_v1.BackupDRAsyncClient() + + # Initialize request argument(s) + request = backupdr_v1.DeleteBackupPlanRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_backup_plan(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END backupdr_v1_generated_BackupDR_DeleteBackupPlan_async] diff --git a/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_delete_backup_plan_sync.py b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_delete_backup_plan_sync.py new file mode 100644 index 000000000000..aaabe6e55265 --- /dev/null +++ b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_delete_backup_plan_sync.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteBackupPlan +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-backupdr + + +# [START backupdr_v1_generated_BackupDR_DeleteBackupPlan_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import backupdr_v1 + + +def sample_delete_backup_plan(): + # Create a client + client = backupdr_v1.BackupDRClient() + + # Initialize request argument(s) + request = backupdr_v1.DeleteBackupPlanRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_backup_plan(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END backupdr_v1_generated_BackupDR_DeleteBackupPlan_sync] diff --git a/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_delete_backup_sync.py b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_delete_backup_sync.py new file mode 100644 index 000000000000..dee368f1cd32 --- /dev/null +++ b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_delete_backup_sync.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteBackup +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-backupdr + + +# [START backupdr_v1_generated_BackupDR_DeleteBackup_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import backupdr_v1 + + +def sample_delete_backup(): + # Create a client + client = backupdr_v1.BackupDRClient() + + # Initialize request argument(s) + request = backupdr_v1.DeleteBackupRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_backup(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END backupdr_v1_generated_BackupDR_DeleteBackup_sync] diff --git a/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_delete_backup_vault_async.py b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_delete_backup_vault_async.py new file mode 100644 index 000000000000..a70379011f44 --- /dev/null +++ b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_delete_backup_vault_async.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteBackupVault +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-backupdr + + +# [START backupdr_v1_generated_BackupDR_DeleteBackupVault_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import backupdr_v1 + + +async def sample_delete_backup_vault(): + # Create a client + client = backupdr_v1.BackupDRAsyncClient() + + # Initialize request argument(s) + request = backupdr_v1.DeleteBackupVaultRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_backup_vault(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END backupdr_v1_generated_BackupDR_DeleteBackupVault_async] diff --git a/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_delete_backup_vault_sync.py b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_delete_backup_vault_sync.py new file mode 100644 index 000000000000..2b824ef4088e --- /dev/null +++ b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_delete_backup_vault_sync.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteBackupVault +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-backupdr + + +# [START backupdr_v1_generated_BackupDR_DeleteBackupVault_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import backupdr_v1 + + +def sample_delete_backup_vault(): + # Create a client + client = backupdr_v1.BackupDRClient() + + # Initialize request argument(s) + request = backupdr_v1.DeleteBackupVaultRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_backup_vault(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END backupdr_v1_generated_BackupDR_DeleteBackupVault_sync] diff --git a/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_fetch_usable_backup_vaults_async.py b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_fetch_usable_backup_vaults_async.py new file mode 100644 index 000000000000..ab7dc9c365b4 --- /dev/null +++ b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_fetch_usable_backup_vaults_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for FetchUsableBackupVaults +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-backupdr + + +# [START backupdr_v1_generated_BackupDR_FetchUsableBackupVaults_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import backupdr_v1 + + +async def sample_fetch_usable_backup_vaults(): + # Create a client + client = backupdr_v1.BackupDRAsyncClient() + + # Initialize request argument(s) + request = backupdr_v1.FetchUsableBackupVaultsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.fetch_usable_backup_vaults(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END backupdr_v1_generated_BackupDR_FetchUsableBackupVaults_async] diff --git a/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_fetch_usable_backup_vaults_sync.py b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_fetch_usable_backup_vaults_sync.py new file mode 100644 index 000000000000..0e4abb2342d5 --- /dev/null +++ b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_fetch_usable_backup_vaults_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for FetchUsableBackupVaults +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-backupdr + + +# [START backupdr_v1_generated_BackupDR_FetchUsableBackupVaults_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import backupdr_v1 + + +def sample_fetch_usable_backup_vaults(): + # Create a client + client = backupdr_v1.BackupDRClient() + + # Initialize request argument(s) + request = backupdr_v1.FetchUsableBackupVaultsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.fetch_usable_backup_vaults(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END backupdr_v1_generated_BackupDR_FetchUsableBackupVaults_sync] diff --git a/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_get_backup_async.py b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_get_backup_async.py new file mode 100644 index 000000000000..27f69f503b1b --- /dev/null +++ b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_get_backup_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetBackup +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-backupdr + + +# [START backupdr_v1_generated_BackupDR_GetBackup_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import backupdr_v1 + + +async def sample_get_backup(): + # Create a client + client = backupdr_v1.BackupDRAsyncClient() + + # Initialize request argument(s) + request = backupdr_v1.GetBackupRequest( + name="name_value", + ) + + # Make the request + response = await client.get_backup(request=request) + + # Handle the response + print(response) + +# [END backupdr_v1_generated_BackupDR_GetBackup_async] diff --git a/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_get_backup_plan_association_async.py b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_get_backup_plan_association_async.py new file mode 100644 index 000000000000..666e503e039c --- /dev/null +++ b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_get_backup_plan_association_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetBackupPlanAssociation +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-backupdr + + +# [START backupdr_v1_generated_BackupDR_GetBackupPlanAssociation_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import backupdr_v1 + + +async def sample_get_backup_plan_association(): + # Create a client + client = backupdr_v1.BackupDRAsyncClient() + + # Initialize request argument(s) + request = backupdr_v1.GetBackupPlanAssociationRequest( + name="name_value", + ) + + # Make the request + response = await client.get_backup_plan_association(request=request) + + # Handle the response + print(response) + +# [END backupdr_v1_generated_BackupDR_GetBackupPlanAssociation_async] diff --git a/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_get_backup_plan_association_sync.py b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_get_backup_plan_association_sync.py new file mode 100644 index 000000000000..f16d4b5dcdc6 --- /dev/null +++ b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_get_backup_plan_association_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetBackupPlanAssociation +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-backupdr + + +# [START backupdr_v1_generated_BackupDR_GetBackupPlanAssociation_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import backupdr_v1 + + +def sample_get_backup_plan_association(): + # Create a client + client = backupdr_v1.BackupDRClient() + + # Initialize request argument(s) + request = backupdr_v1.GetBackupPlanAssociationRequest( + name="name_value", + ) + + # Make the request + response = client.get_backup_plan_association(request=request) + + # Handle the response + print(response) + +# [END backupdr_v1_generated_BackupDR_GetBackupPlanAssociation_sync] diff --git a/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_get_backup_plan_async.py b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_get_backup_plan_async.py new file mode 100644 index 000000000000..bbca5985c4d3 --- /dev/null +++ b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_get_backup_plan_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetBackupPlan +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-backupdr + + +# [START backupdr_v1_generated_BackupDR_GetBackupPlan_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import backupdr_v1 + + +async def sample_get_backup_plan(): + # Create a client + client = backupdr_v1.BackupDRAsyncClient() + + # Initialize request argument(s) + request = backupdr_v1.GetBackupPlanRequest( + name="name_value", + ) + + # Make the request + response = await client.get_backup_plan(request=request) + + # Handle the response + print(response) + +# [END backupdr_v1_generated_BackupDR_GetBackupPlan_async] diff --git a/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_get_backup_plan_sync.py b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_get_backup_plan_sync.py new file mode 100644 index 000000000000..3e6f35ccdc90 --- /dev/null +++ b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_get_backup_plan_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetBackupPlan +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-backupdr + + +# [START backupdr_v1_generated_BackupDR_GetBackupPlan_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import backupdr_v1 + + +def sample_get_backup_plan(): + # Create a client + client = backupdr_v1.BackupDRClient() + + # Initialize request argument(s) + request = backupdr_v1.GetBackupPlanRequest( + name="name_value", + ) + + # Make the request + response = client.get_backup_plan(request=request) + + # Handle the response + print(response) + +# [END backupdr_v1_generated_BackupDR_GetBackupPlan_sync] diff --git a/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_get_backup_sync.py b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_get_backup_sync.py new file mode 100644 index 000000000000..064cbac8920e --- /dev/null +++ b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_get_backup_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetBackup +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-backupdr + + +# [START backupdr_v1_generated_BackupDR_GetBackup_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import backupdr_v1 + + +def sample_get_backup(): + # Create a client + client = backupdr_v1.BackupDRClient() + + # Initialize request argument(s) + request = backupdr_v1.GetBackupRequest( + name="name_value", + ) + + # Make the request + response = client.get_backup(request=request) + + # Handle the response + print(response) + +# [END backupdr_v1_generated_BackupDR_GetBackup_sync] diff --git a/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_get_backup_vault_async.py b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_get_backup_vault_async.py new file mode 100644 index 000000000000..95d30ed5bf46 --- /dev/null +++ b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_get_backup_vault_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetBackupVault +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-backupdr + + +# [START backupdr_v1_generated_BackupDR_GetBackupVault_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import backupdr_v1 + + +async def sample_get_backup_vault(): + # Create a client + client = backupdr_v1.BackupDRAsyncClient() + + # Initialize request argument(s) + request = backupdr_v1.GetBackupVaultRequest( + name="name_value", + ) + + # Make the request + response = await client.get_backup_vault(request=request) + + # Handle the response + print(response) + +# [END backupdr_v1_generated_BackupDR_GetBackupVault_async] diff --git a/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_get_backup_vault_sync.py b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_get_backup_vault_sync.py new file mode 100644 index 000000000000..814ccccaf4a0 --- /dev/null +++ b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_get_backup_vault_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetBackupVault +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-backupdr + + +# [START backupdr_v1_generated_BackupDR_GetBackupVault_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import backupdr_v1 + + +def sample_get_backup_vault(): + # Create a client + client = backupdr_v1.BackupDRClient() + + # Initialize request argument(s) + request = backupdr_v1.GetBackupVaultRequest( + name="name_value", + ) + + # Make the request + response = client.get_backup_vault(request=request) + + # Handle the response + print(response) + +# [END backupdr_v1_generated_BackupDR_GetBackupVault_sync] diff --git a/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_get_data_source_async.py b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_get_data_source_async.py new file mode 100644 index 000000000000..864ee90db114 --- /dev/null +++ b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_get_data_source_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetDataSource +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-backupdr + + +# [START backupdr_v1_generated_BackupDR_GetDataSource_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import backupdr_v1 + + +async def sample_get_data_source(): + # Create a client + client = backupdr_v1.BackupDRAsyncClient() + + # Initialize request argument(s) + request = backupdr_v1.GetDataSourceRequest( + name="name_value", + ) + + # Make the request + response = await client.get_data_source(request=request) + + # Handle the response + print(response) + +# [END backupdr_v1_generated_BackupDR_GetDataSource_async] diff --git a/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_get_data_source_sync.py b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_get_data_source_sync.py new file mode 100644 index 000000000000..95f18218de42 --- /dev/null +++ b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_get_data_source_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetDataSource +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-backupdr + + +# [START backupdr_v1_generated_BackupDR_GetDataSource_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import backupdr_v1 + + +def sample_get_data_source(): + # Create a client + client = backupdr_v1.BackupDRClient() + + # Initialize request argument(s) + request = backupdr_v1.GetDataSourceRequest( + name="name_value", + ) + + # Make the request + response = client.get_data_source(request=request) + + # Handle the response + print(response) + +# [END backupdr_v1_generated_BackupDR_GetDataSource_sync] diff --git a/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_list_backup_plan_associations_async.py b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_list_backup_plan_associations_async.py new file mode 100644 index 000000000000..e6cfd3cc039c --- /dev/null +++ b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_list_backup_plan_associations_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListBackupPlanAssociations +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-backupdr + + +# [START backupdr_v1_generated_BackupDR_ListBackupPlanAssociations_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import backupdr_v1 + + +async def sample_list_backup_plan_associations(): + # Create a client + client = backupdr_v1.BackupDRAsyncClient() + + # Initialize request argument(s) + request = backupdr_v1.ListBackupPlanAssociationsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_backup_plan_associations(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END backupdr_v1_generated_BackupDR_ListBackupPlanAssociations_async] diff --git a/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_list_backup_plan_associations_sync.py b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_list_backup_plan_associations_sync.py new file mode 100644 index 000000000000..39b135ce9944 --- /dev/null +++ b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_list_backup_plan_associations_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListBackupPlanAssociations +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-backupdr + + +# [START backupdr_v1_generated_BackupDR_ListBackupPlanAssociations_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import backupdr_v1 + + +def sample_list_backup_plan_associations(): + # Create a client + client = backupdr_v1.BackupDRClient() + + # Initialize request argument(s) + request = backupdr_v1.ListBackupPlanAssociationsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_backup_plan_associations(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END backupdr_v1_generated_BackupDR_ListBackupPlanAssociations_sync] diff --git a/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_list_backup_plans_async.py b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_list_backup_plans_async.py new file mode 100644 index 000000000000..f09593b5796b --- /dev/null +++ b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_list_backup_plans_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListBackupPlans +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-backupdr + + +# [START backupdr_v1_generated_BackupDR_ListBackupPlans_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import backupdr_v1 + + +async def sample_list_backup_plans(): + # Create a client + client = backupdr_v1.BackupDRAsyncClient() + + # Initialize request argument(s) + request = backupdr_v1.ListBackupPlansRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_backup_plans(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END backupdr_v1_generated_BackupDR_ListBackupPlans_async] diff --git a/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_list_backup_plans_sync.py b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_list_backup_plans_sync.py new file mode 100644 index 000000000000..ab2cab9b1701 --- /dev/null +++ b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_list_backup_plans_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListBackupPlans +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-backupdr + + +# [START backupdr_v1_generated_BackupDR_ListBackupPlans_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import backupdr_v1 + + +def sample_list_backup_plans(): + # Create a client + client = backupdr_v1.BackupDRClient() + + # Initialize request argument(s) + request = backupdr_v1.ListBackupPlansRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_backup_plans(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END backupdr_v1_generated_BackupDR_ListBackupPlans_sync] diff --git a/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_list_backup_vaults_async.py b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_list_backup_vaults_async.py new file mode 100644 index 000000000000..675c345b810c --- /dev/null +++ b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_list_backup_vaults_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListBackupVaults +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-backupdr + + +# [START backupdr_v1_generated_BackupDR_ListBackupVaults_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import backupdr_v1 + + +async def sample_list_backup_vaults(): + # Create a client + client = backupdr_v1.BackupDRAsyncClient() + + # Initialize request argument(s) + request = backupdr_v1.ListBackupVaultsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_backup_vaults(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END backupdr_v1_generated_BackupDR_ListBackupVaults_async] diff --git a/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_list_backup_vaults_sync.py b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_list_backup_vaults_sync.py new file mode 100644 index 000000000000..27b1faa5debb --- /dev/null +++ b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_list_backup_vaults_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListBackupVaults +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-backupdr + + +# [START backupdr_v1_generated_BackupDR_ListBackupVaults_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import backupdr_v1 + + +def sample_list_backup_vaults(): + # Create a client + client = backupdr_v1.BackupDRClient() + + # Initialize request argument(s) + request = backupdr_v1.ListBackupVaultsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_backup_vaults(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END backupdr_v1_generated_BackupDR_ListBackupVaults_sync] diff --git a/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_list_backups_async.py b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_list_backups_async.py new file mode 100644 index 000000000000..da6366f3b095 --- /dev/null +++ b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_list_backups_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListBackups +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-backupdr + + +# [START backupdr_v1_generated_BackupDR_ListBackups_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import backupdr_v1 + + +async def sample_list_backups(): + # Create a client + client = backupdr_v1.BackupDRAsyncClient() + + # Initialize request argument(s) + request = backupdr_v1.ListBackupsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_backups(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END backupdr_v1_generated_BackupDR_ListBackups_async] diff --git a/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_list_backups_sync.py b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_list_backups_sync.py new file mode 100644 index 000000000000..18387f7371a5 --- /dev/null +++ b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_list_backups_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListBackups +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-backupdr + + +# [START backupdr_v1_generated_BackupDR_ListBackups_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import backupdr_v1 + + +def sample_list_backups(): + # Create a client + client = backupdr_v1.BackupDRClient() + + # Initialize request argument(s) + request = backupdr_v1.ListBackupsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_backups(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END backupdr_v1_generated_BackupDR_ListBackups_sync] diff --git a/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_list_data_sources_async.py b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_list_data_sources_async.py new file mode 100644 index 000000000000..f5cb4d5a4477 --- /dev/null +++ b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_list_data_sources_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListDataSources +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-backupdr + + +# [START backupdr_v1_generated_BackupDR_ListDataSources_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import backupdr_v1 + + +async def sample_list_data_sources(): + # Create a client + client = backupdr_v1.BackupDRAsyncClient() + + # Initialize request argument(s) + request = backupdr_v1.ListDataSourcesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_data_sources(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END backupdr_v1_generated_BackupDR_ListDataSources_async] diff --git a/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_list_data_sources_sync.py b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_list_data_sources_sync.py new file mode 100644 index 000000000000..36680bf32e15 --- /dev/null +++ b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_list_data_sources_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListDataSources +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-backupdr + + +# [START backupdr_v1_generated_BackupDR_ListDataSources_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import backupdr_v1 + + +def sample_list_data_sources(): + # Create a client + client = backupdr_v1.BackupDRClient() + + # Initialize request argument(s) + request = backupdr_v1.ListDataSourcesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_data_sources(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END backupdr_v1_generated_BackupDR_ListDataSources_sync] diff --git a/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_restore_backup_async.py b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_restore_backup_async.py new file mode 100644 index 000000000000..9bdfab3c21bc --- /dev/null +++ b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_restore_backup_async.py @@ -0,0 +1,61 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for RestoreBackup +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-backupdr + + +# [START backupdr_v1_generated_BackupDR_RestoreBackup_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import backupdr_v1 + + +async def sample_restore_backup(): + # Create a client + client = backupdr_v1.BackupDRAsyncClient() + + # Initialize request argument(s) + compute_instance_target_environment = backupdr_v1.ComputeInstanceTargetEnvironment() + compute_instance_target_environment.project = "project_value" + compute_instance_target_environment.zone = "zone_value" + + request = backupdr_v1.RestoreBackupRequest( + compute_instance_target_environment=compute_instance_target_environment, + name="name_value", + ) + + # Make the request + operation = client.restore_backup(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END backupdr_v1_generated_BackupDR_RestoreBackup_async] diff --git a/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_restore_backup_sync.py b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_restore_backup_sync.py new file mode 100644 index 000000000000..6b503fb4a546 --- /dev/null +++ b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_restore_backup_sync.py @@ -0,0 +1,61 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for RestoreBackup +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-backupdr + + +# [START backupdr_v1_generated_BackupDR_RestoreBackup_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import backupdr_v1 + + +def sample_restore_backup(): + # Create a client + client = backupdr_v1.BackupDRClient() + + # Initialize request argument(s) + compute_instance_target_environment = backupdr_v1.ComputeInstanceTargetEnvironment() + compute_instance_target_environment.project = "project_value" + compute_instance_target_environment.zone = "zone_value" + + request = backupdr_v1.RestoreBackupRequest( + compute_instance_target_environment=compute_instance_target_environment, + name="name_value", + ) + + # Make the request + operation = client.restore_backup(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END backupdr_v1_generated_BackupDR_RestoreBackup_sync] diff --git a/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_trigger_backup_async.py b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_trigger_backup_async.py new file mode 100644 index 000000000000..6c6c641d54ee --- /dev/null +++ b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_trigger_backup_async.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for TriggerBackup +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-backupdr + + +# [START backupdr_v1_generated_BackupDR_TriggerBackup_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import backupdr_v1 + + +async def sample_trigger_backup(): + # Create a client + client = backupdr_v1.BackupDRAsyncClient() + + # Initialize request argument(s) + request = backupdr_v1.TriggerBackupRequest( + name="name_value", + rule_id="rule_id_value", + ) + + # Make the request + operation = client.trigger_backup(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END backupdr_v1_generated_BackupDR_TriggerBackup_async] diff --git a/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_trigger_backup_sync.py b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_trigger_backup_sync.py new file mode 100644 index 000000000000..359727f2dd1c --- /dev/null +++ b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_trigger_backup_sync.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for TriggerBackup +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-backupdr + + +# [START backupdr_v1_generated_BackupDR_TriggerBackup_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import backupdr_v1 + + +def sample_trigger_backup(): + # Create a client + client = backupdr_v1.BackupDRClient() + + # Initialize request argument(s) + request = backupdr_v1.TriggerBackupRequest( + name="name_value", + rule_id="rule_id_value", + ) + + # Make the request + operation = client.trigger_backup(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END backupdr_v1_generated_BackupDR_TriggerBackup_sync] diff --git a/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_update_backup_async.py b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_update_backup_async.py new file mode 100644 index 000000000000..a1a2fcc0ce51 --- /dev/null +++ b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_update_backup_async.py @@ -0,0 +1,55 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateBackup +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-backupdr + + +# [START backupdr_v1_generated_BackupDR_UpdateBackup_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import backupdr_v1 + + +async def sample_update_backup(): + # Create a client + client = backupdr_v1.BackupDRAsyncClient() + + # Initialize request argument(s) + request = backupdr_v1.UpdateBackupRequest( + ) + + # Make the request + operation = client.update_backup(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END backupdr_v1_generated_BackupDR_UpdateBackup_async] diff --git a/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_update_backup_sync.py b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_update_backup_sync.py new file mode 100644 index 000000000000..9ea7e26404d0 --- /dev/null +++ b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_update_backup_sync.py @@ -0,0 +1,55 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateBackup +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-backupdr + + +# [START backupdr_v1_generated_BackupDR_UpdateBackup_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import backupdr_v1 + + +def sample_update_backup(): + # Create a client + client = backupdr_v1.BackupDRClient() + + # Initialize request argument(s) + request = backupdr_v1.UpdateBackupRequest( + ) + + # Make the request + operation = client.update_backup(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END backupdr_v1_generated_BackupDR_UpdateBackup_sync] diff --git a/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_update_backup_vault_async.py b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_update_backup_vault_async.py new file mode 100644 index 000000000000..386f2ca872d3 --- /dev/null +++ b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_update_backup_vault_async.py @@ -0,0 +1,55 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateBackupVault +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-backupdr + + +# [START backupdr_v1_generated_BackupDR_UpdateBackupVault_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import backupdr_v1 + + +async def sample_update_backup_vault(): + # Create a client + client = backupdr_v1.BackupDRAsyncClient() + + # Initialize request argument(s) + request = backupdr_v1.UpdateBackupVaultRequest( + ) + + # Make the request + operation = client.update_backup_vault(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END backupdr_v1_generated_BackupDR_UpdateBackupVault_async] diff --git a/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_update_backup_vault_sync.py b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_update_backup_vault_sync.py new file mode 100644 index 000000000000..ab3690e1df33 --- /dev/null +++ b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_update_backup_vault_sync.py @@ -0,0 +1,55 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateBackupVault +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-backupdr + + +# [START backupdr_v1_generated_BackupDR_UpdateBackupVault_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import backupdr_v1 + + +def sample_update_backup_vault(): + # Create a client + client = backupdr_v1.BackupDRClient() + + # Initialize request argument(s) + request = backupdr_v1.UpdateBackupVaultRequest( + ) + + # Make the request + operation = client.update_backup_vault(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END backupdr_v1_generated_BackupDR_UpdateBackupVault_sync] diff --git a/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_update_data_source_async.py b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_update_data_source_async.py new file mode 100644 index 000000000000..986de214c53d --- /dev/null +++ b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_update_data_source_async.py @@ -0,0 +1,55 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateDataSource +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-backupdr + + +# [START backupdr_v1_generated_BackupDR_UpdateDataSource_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import backupdr_v1 + + +async def sample_update_data_source(): + # Create a client + client = backupdr_v1.BackupDRAsyncClient() + + # Initialize request argument(s) + request = backupdr_v1.UpdateDataSourceRequest( + ) + + # Make the request + operation = client.update_data_source(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END backupdr_v1_generated_BackupDR_UpdateDataSource_async] diff --git a/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_update_data_source_sync.py b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_update_data_source_sync.py new file mode 100644 index 000000000000..d20aa5d93848 --- /dev/null +++ b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_update_data_source_sync.py @@ -0,0 +1,55 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateDataSource +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-backupdr + + +# [START backupdr_v1_generated_BackupDR_UpdateDataSource_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import backupdr_v1 + + +def sample_update_data_source(): + # Create a client + client = backupdr_v1.BackupDRClient() + + # Initialize request argument(s) + request = backupdr_v1.UpdateDataSourceRequest( + ) + + # Make the request + operation = client.update_data_source(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END backupdr_v1_generated_BackupDR_UpdateDataSource_sync] diff --git a/packages/google-cloud-backupdr/samples/generated_samples/snippet_metadata_google.cloud.backupdr.v1.json b/packages/google-cloud-backupdr/samples/generated_samples/snippet_metadata_google.cloud.backupdr.v1.json index 2671bc2d913d..dc01534e8660 100644 --- a/packages/google-cloud-backupdr/samples/generated_samples/snippet_metadata_google.cloud.backupdr.v1.json +++ b/packages/google-cloud-backupdr/samples/generated_samples/snippet_metadata_google.cloud.backupdr.v1.json @@ -8,9 +8,540 @@ ], "language": "PYTHON", "name": "google-cloud-backupdr", - "version": "0.1.3" + "version": "0.1.4" }, "snippets": [ + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.backupdr_v1.BackupDRAsyncClient", + "shortName": "BackupDRAsyncClient" + }, + "fullName": "google.cloud.backupdr_v1.BackupDRAsyncClient.create_backup_plan_association", + "method": { + "fullName": "google.cloud.backupdr.v1.BackupDR.CreateBackupPlanAssociation", + "service": { + "fullName": "google.cloud.backupdr.v1.BackupDR", + "shortName": "BackupDR" + }, + "shortName": "CreateBackupPlanAssociation" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.backupdr_v1.types.CreateBackupPlanAssociationRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "backup_plan_association", + "type": "google.cloud.backupdr_v1.types.BackupPlanAssociation" + }, + { + "name": "backup_plan_association_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "create_backup_plan_association" + }, + "description": "Sample for CreateBackupPlanAssociation", + "file": "backupdr_v1_generated_backup_dr_create_backup_plan_association_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "backupdr_v1_generated_BackupDR_CreateBackupPlanAssociation_async", + "segments": [ + { + "end": 61, + "start": 27, + "type": "FULL" + }, + { + "end": 61, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 51, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 58, + "start": 52, + "type": "REQUEST_EXECUTION" + }, + { + "end": 62, + "start": 59, + "type": "RESPONSE_HANDLING" + } + ], + "title": "backupdr_v1_generated_backup_dr_create_backup_plan_association_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.backupdr_v1.BackupDRClient", + "shortName": "BackupDRClient" + }, + "fullName": "google.cloud.backupdr_v1.BackupDRClient.create_backup_plan_association", + "method": { + "fullName": "google.cloud.backupdr.v1.BackupDR.CreateBackupPlanAssociation", + "service": { + "fullName": "google.cloud.backupdr.v1.BackupDR", + "shortName": "BackupDR" + }, + "shortName": "CreateBackupPlanAssociation" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.backupdr_v1.types.CreateBackupPlanAssociationRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "backup_plan_association", + "type": "google.cloud.backupdr_v1.types.BackupPlanAssociation" + }, + { + "name": "backup_plan_association_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "create_backup_plan_association" + }, + "description": "Sample for CreateBackupPlanAssociation", + "file": "backupdr_v1_generated_backup_dr_create_backup_plan_association_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "backupdr_v1_generated_BackupDR_CreateBackupPlanAssociation_sync", + "segments": [ + { + "end": 61, + "start": 27, + "type": "FULL" + }, + { + "end": 61, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 51, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 58, + "start": 52, + "type": "REQUEST_EXECUTION" + }, + { + "end": 62, + "start": 59, + "type": "RESPONSE_HANDLING" + } + ], + "title": "backupdr_v1_generated_backup_dr_create_backup_plan_association_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.backupdr_v1.BackupDRAsyncClient", + "shortName": "BackupDRAsyncClient" + }, + "fullName": "google.cloud.backupdr_v1.BackupDRAsyncClient.create_backup_plan", + "method": { + "fullName": "google.cloud.backupdr.v1.BackupDR.CreateBackupPlan", + "service": { + "fullName": "google.cloud.backupdr.v1.BackupDR", + "shortName": "BackupDR" + }, + "shortName": "CreateBackupPlan" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.backupdr_v1.types.CreateBackupPlanRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "backup_plan", + "type": "google.cloud.backupdr_v1.types.BackupPlan" + }, + { + "name": "backup_plan_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "create_backup_plan" + }, + "description": "Sample for CreateBackupPlan", + "file": "backupdr_v1_generated_backup_dr_create_backup_plan_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "backupdr_v1_generated_BackupDR_CreateBackupPlan_async", + "segments": [ + { + "end": 67, + "start": 27, + "type": "FULL" + }, + { + "end": 67, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 57, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 64, + "start": 58, + "type": "REQUEST_EXECUTION" + }, + { + "end": 68, + "start": 65, + "type": "RESPONSE_HANDLING" + } + ], + "title": "backupdr_v1_generated_backup_dr_create_backup_plan_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.backupdr_v1.BackupDRClient", + "shortName": "BackupDRClient" + }, + "fullName": "google.cloud.backupdr_v1.BackupDRClient.create_backup_plan", + "method": { + "fullName": "google.cloud.backupdr.v1.BackupDR.CreateBackupPlan", + "service": { + "fullName": "google.cloud.backupdr.v1.BackupDR", + "shortName": "BackupDR" + }, + "shortName": "CreateBackupPlan" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.backupdr_v1.types.CreateBackupPlanRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "backup_plan", + "type": "google.cloud.backupdr_v1.types.BackupPlan" + }, + { + "name": "backup_plan_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "create_backup_plan" + }, + "description": "Sample for CreateBackupPlan", + "file": "backupdr_v1_generated_backup_dr_create_backup_plan_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "backupdr_v1_generated_BackupDR_CreateBackupPlan_sync", + "segments": [ + { + "end": 67, + "start": 27, + "type": "FULL" + }, + { + "end": 67, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 57, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 64, + "start": 58, + "type": "REQUEST_EXECUTION" + }, + { + "end": 68, + "start": 65, + "type": "RESPONSE_HANDLING" + } + ], + "title": "backupdr_v1_generated_backup_dr_create_backup_plan_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.backupdr_v1.BackupDRAsyncClient", + "shortName": "BackupDRAsyncClient" + }, + "fullName": "google.cloud.backupdr_v1.BackupDRAsyncClient.create_backup_vault", + "method": { + "fullName": "google.cloud.backupdr.v1.BackupDR.CreateBackupVault", + "service": { + "fullName": "google.cloud.backupdr.v1.BackupDR", + "shortName": "BackupDR" + }, + "shortName": "CreateBackupVault" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.backupdr_v1.types.CreateBackupVaultRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "backup_vault", + "type": "google.cloud.backupdr_v1.types.BackupVault" + }, + { + "name": "backup_vault_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "create_backup_vault" + }, + "description": "Sample for CreateBackupVault", + "file": "backupdr_v1_generated_backup_dr_create_backup_vault_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "backupdr_v1_generated_BackupDR_CreateBackupVault_async", + "segments": [ + { + "end": 56, + "start": 27, + "type": "FULL" + }, + { + "end": 56, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 53, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 57, + "start": 54, + "type": "RESPONSE_HANDLING" + } + ], + "title": "backupdr_v1_generated_backup_dr_create_backup_vault_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.backupdr_v1.BackupDRClient", + "shortName": "BackupDRClient" + }, + "fullName": "google.cloud.backupdr_v1.BackupDRClient.create_backup_vault", + "method": { + "fullName": "google.cloud.backupdr.v1.BackupDR.CreateBackupVault", + "service": { + "fullName": "google.cloud.backupdr.v1.BackupDR", + "shortName": "BackupDR" + }, + "shortName": "CreateBackupVault" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.backupdr_v1.types.CreateBackupVaultRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "backup_vault", + "type": "google.cloud.backupdr_v1.types.BackupVault" + }, + { + "name": "backup_vault_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "create_backup_vault" + }, + "description": "Sample for CreateBackupVault", + "file": "backupdr_v1_generated_backup_dr_create_backup_vault_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "backupdr_v1_generated_BackupDR_CreateBackupVault_sync", + "segments": [ + { + "end": 56, + "start": 27, + "type": "FULL" + }, + { + "end": 56, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 53, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 57, + "start": 54, + "type": "RESPONSE_HANDLING" + } + ], + "title": "backupdr_v1_generated_backup_dr_create_backup_vault_sync.py" + }, { "canonical": true, "clientMethod": { @@ -21,28 +552,3260 @@ }, "fullName": "google.cloud.backupdr_v1.BackupDRAsyncClient.create_management_server", "method": { - "fullName": "google.cloud.backupdr.v1.BackupDR.CreateManagementServer", + "fullName": "google.cloud.backupdr.v1.BackupDR.CreateManagementServer", + "service": { + "fullName": "google.cloud.backupdr.v1.BackupDR", + "shortName": "BackupDR" + }, + "shortName": "CreateManagementServer" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.backupdr_v1.types.CreateManagementServerRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "management_server", + "type": "google.cloud.backupdr_v1.types.ManagementServer" + }, + { + "name": "management_server_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "create_management_server" + }, + "description": "Sample for CreateManagementServer", + "file": "backupdr_v1_generated_backup_dr_create_management_server_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "backupdr_v1_generated_BackupDR_CreateManagementServer_async", + "segments": [ + { + "end": 56, + "start": 27, + "type": "FULL" + }, + { + "end": 56, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 53, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 57, + "start": 54, + "type": "RESPONSE_HANDLING" + } + ], + "title": "backupdr_v1_generated_backup_dr_create_management_server_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.backupdr_v1.BackupDRClient", + "shortName": "BackupDRClient" + }, + "fullName": "google.cloud.backupdr_v1.BackupDRClient.create_management_server", + "method": { + "fullName": "google.cloud.backupdr.v1.BackupDR.CreateManagementServer", + "service": { + "fullName": "google.cloud.backupdr.v1.BackupDR", + "shortName": "BackupDR" + }, + "shortName": "CreateManagementServer" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.backupdr_v1.types.CreateManagementServerRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "management_server", + "type": "google.cloud.backupdr_v1.types.ManagementServer" + }, + { + "name": "management_server_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "create_management_server" + }, + "description": "Sample for CreateManagementServer", + "file": "backupdr_v1_generated_backup_dr_create_management_server_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "backupdr_v1_generated_BackupDR_CreateManagementServer_sync", + "segments": [ + { + "end": 56, + "start": 27, + "type": "FULL" + }, + { + "end": 56, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 53, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 57, + "start": 54, + "type": "RESPONSE_HANDLING" + } + ], + "title": "backupdr_v1_generated_backup_dr_create_management_server_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.backupdr_v1.BackupDRAsyncClient", + "shortName": "BackupDRAsyncClient" + }, + "fullName": "google.cloud.backupdr_v1.BackupDRAsyncClient.delete_backup_plan_association", + "method": { + "fullName": "google.cloud.backupdr.v1.BackupDR.DeleteBackupPlanAssociation", + "service": { + "fullName": "google.cloud.backupdr.v1.BackupDR", + "shortName": "BackupDR" + }, + "shortName": "DeleteBackupPlanAssociation" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.backupdr_v1.types.DeleteBackupPlanAssociationRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "delete_backup_plan_association" + }, + "description": "Sample for DeleteBackupPlanAssociation", + "file": "backupdr_v1_generated_backup_dr_delete_backup_plan_association_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "backupdr_v1_generated_BackupDR_DeleteBackupPlanAssociation_async", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "backupdr_v1_generated_backup_dr_delete_backup_plan_association_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.backupdr_v1.BackupDRClient", + "shortName": "BackupDRClient" + }, + "fullName": "google.cloud.backupdr_v1.BackupDRClient.delete_backup_plan_association", + "method": { + "fullName": "google.cloud.backupdr.v1.BackupDR.DeleteBackupPlanAssociation", + "service": { + "fullName": "google.cloud.backupdr.v1.BackupDR", + "shortName": "BackupDR" + }, + "shortName": "DeleteBackupPlanAssociation" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.backupdr_v1.types.DeleteBackupPlanAssociationRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "delete_backup_plan_association" + }, + "description": "Sample for DeleteBackupPlanAssociation", + "file": "backupdr_v1_generated_backup_dr_delete_backup_plan_association_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "backupdr_v1_generated_BackupDR_DeleteBackupPlanAssociation_sync", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "backupdr_v1_generated_backup_dr_delete_backup_plan_association_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.backupdr_v1.BackupDRAsyncClient", + "shortName": "BackupDRAsyncClient" + }, + "fullName": "google.cloud.backupdr_v1.BackupDRAsyncClient.delete_backup_plan", + "method": { + "fullName": "google.cloud.backupdr.v1.BackupDR.DeleteBackupPlan", + "service": { + "fullName": "google.cloud.backupdr.v1.BackupDR", + "shortName": "BackupDR" + }, + "shortName": "DeleteBackupPlan" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.backupdr_v1.types.DeleteBackupPlanRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "delete_backup_plan" + }, + "description": "Sample for DeleteBackupPlan", + "file": "backupdr_v1_generated_backup_dr_delete_backup_plan_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "backupdr_v1_generated_BackupDR_DeleteBackupPlan_async", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "backupdr_v1_generated_backup_dr_delete_backup_plan_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.backupdr_v1.BackupDRClient", + "shortName": "BackupDRClient" + }, + "fullName": "google.cloud.backupdr_v1.BackupDRClient.delete_backup_plan", + "method": { + "fullName": "google.cloud.backupdr.v1.BackupDR.DeleteBackupPlan", + "service": { + "fullName": "google.cloud.backupdr.v1.BackupDR", + "shortName": "BackupDR" + }, + "shortName": "DeleteBackupPlan" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.backupdr_v1.types.DeleteBackupPlanRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "delete_backup_plan" + }, + "description": "Sample for DeleteBackupPlan", + "file": "backupdr_v1_generated_backup_dr_delete_backup_plan_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "backupdr_v1_generated_BackupDR_DeleteBackupPlan_sync", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "backupdr_v1_generated_backup_dr_delete_backup_plan_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.backupdr_v1.BackupDRAsyncClient", + "shortName": "BackupDRAsyncClient" + }, + "fullName": "google.cloud.backupdr_v1.BackupDRAsyncClient.delete_backup_vault", + "method": { + "fullName": "google.cloud.backupdr.v1.BackupDR.DeleteBackupVault", + "service": { + "fullName": "google.cloud.backupdr.v1.BackupDR", + "shortName": "BackupDR" + }, + "shortName": "DeleteBackupVault" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.backupdr_v1.types.DeleteBackupVaultRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "delete_backup_vault" + }, + "description": "Sample for DeleteBackupVault", + "file": "backupdr_v1_generated_backup_dr_delete_backup_vault_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "backupdr_v1_generated_BackupDR_DeleteBackupVault_async", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "backupdr_v1_generated_backup_dr_delete_backup_vault_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.backupdr_v1.BackupDRClient", + "shortName": "BackupDRClient" + }, + "fullName": "google.cloud.backupdr_v1.BackupDRClient.delete_backup_vault", + "method": { + "fullName": "google.cloud.backupdr.v1.BackupDR.DeleteBackupVault", + "service": { + "fullName": "google.cloud.backupdr.v1.BackupDR", + "shortName": "BackupDR" + }, + "shortName": "DeleteBackupVault" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.backupdr_v1.types.DeleteBackupVaultRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "delete_backup_vault" + }, + "description": "Sample for DeleteBackupVault", + "file": "backupdr_v1_generated_backup_dr_delete_backup_vault_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "backupdr_v1_generated_BackupDR_DeleteBackupVault_sync", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "backupdr_v1_generated_backup_dr_delete_backup_vault_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.backupdr_v1.BackupDRAsyncClient", + "shortName": "BackupDRAsyncClient" + }, + "fullName": "google.cloud.backupdr_v1.BackupDRAsyncClient.delete_backup", + "method": { + "fullName": "google.cloud.backupdr.v1.BackupDR.DeleteBackup", + "service": { + "fullName": "google.cloud.backupdr.v1.BackupDR", + "shortName": "BackupDR" + }, + "shortName": "DeleteBackup" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.backupdr_v1.types.DeleteBackupRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "delete_backup" + }, + "description": "Sample for DeleteBackup", + "file": "backupdr_v1_generated_backup_dr_delete_backup_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "backupdr_v1_generated_BackupDR_DeleteBackup_async", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "backupdr_v1_generated_backup_dr_delete_backup_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.backupdr_v1.BackupDRClient", + "shortName": "BackupDRClient" + }, + "fullName": "google.cloud.backupdr_v1.BackupDRClient.delete_backup", + "method": { + "fullName": "google.cloud.backupdr.v1.BackupDR.DeleteBackup", + "service": { + "fullName": "google.cloud.backupdr.v1.BackupDR", + "shortName": "BackupDR" + }, + "shortName": "DeleteBackup" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.backupdr_v1.types.DeleteBackupRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "delete_backup" + }, + "description": "Sample for DeleteBackup", + "file": "backupdr_v1_generated_backup_dr_delete_backup_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "backupdr_v1_generated_BackupDR_DeleteBackup_sync", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "backupdr_v1_generated_backup_dr_delete_backup_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.backupdr_v1.BackupDRAsyncClient", + "shortName": "BackupDRAsyncClient" + }, + "fullName": "google.cloud.backupdr_v1.BackupDRAsyncClient.delete_management_server", + "method": { + "fullName": "google.cloud.backupdr.v1.BackupDR.DeleteManagementServer", + "service": { + "fullName": "google.cloud.backupdr.v1.BackupDR", + "shortName": "BackupDR" + }, + "shortName": "DeleteManagementServer" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.backupdr_v1.types.DeleteManagementServerRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "delete_management_server" + }, + "description": "Sample for DeleteManagementServer", + "file": "backupdr_v1_generated_backup_dr_delete_management_server_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "backupdr_v1_generated_BackupDR_DeleteManagementServer_async", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "backupdr_v1_generated_backup_dr_delete_management_server_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.backupdr_v1.BackupDRClient", + "shortName": "BackupDRClient" + }, + "fullName": "google.cloud.backupdr_v1.BackupDRClient.delete_management_server", + "method": { + "fullName": "google.cloud.backupdr.v1.BackupDR.DeleteManagementServer", + "service": { + "fullName": "google.cloud.backupdr.v1.BackupDR", + "shortName": "BackupDR" + }, + "shortName": "DeleteManagementServer" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.backupdr_v1.types.DeleteManagementServerRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "delete_management_server" + }, + "description": "Sample for DeleteManagementServer", + "file": "backupdr_v1_generated_backup_dr_delete_management_server_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "backupdr_v1_generated_BackupDR_DeleteManagementServer_sync", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "backupdr_v1_generated_backup_dr_delete_management_server_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.backupdr_v1.BackupDRAsyncClient", + "shortName": "BackupDRAsyncClient" + }, + "fullName": "google.cloud.backupdr_v1.BackupDRAsyncClient.fetch_usable_backup_vaults", + "method": { + "fullName": "google.cloud.backupdr.v1.BackupDR.FetchUsableBackupVaults", + "service": { + "fullName": "google.cloud.backupdr.v1.BackupDR", + "shortName": "BackupDR" + }, + "shortName": "FetchUsableBackupVaults" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.backupdr_v1.types.FetchUsableBackupVaultsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.backupdr_v1.services.backup_dr.pagers.FetchUsableBackupVaultsAsyncPager", + "shortName": "fetch_usable_backup_vaults" + }, + "description": "Sample for FetchUsableBackupVaults", + "file": "backupdr_v1_generated_backup_dr_fetch_usable_backup_vaults_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "backupdr_v1_generated_BackupDR_FetchUsableBackupVaults_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "backupdr_v1_generated_backup_dr_fetch_usable_backup_vaults_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.backupdr_v1.BackupDRClient", + "shortName": "BackupDRClient" + }, + "fullName": "google.cloud.backupdr_v1.BackupDRClient.fetch_usable_backup_vaults", + "method": { + "fullName": "google.cloud.backupdr.v1.BackupDR.FetchUsableBackupVaults", + "service": { + "fullName": "google.cloud.backupdr.v1.BackupDR", + "shortName": "BackupDR" + }, + "shortName": "FetchUsableBackupVaults" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.backupdr_v1.types.FetchUsableBackupVaultsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.backupdr_v1.services.backup_dr.pagers.FetchUsableBackupVaultsPager", + "shortName": "fetch_usable_backup_vaults" + }, + "description": "Sample for FetchUsableBackupVaults", + "file": "backupdr_v1_generated_backup_dr_fetch_usable_backup_vaults_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "backupdr_v1_generated_BackupDR_FetchUsableBackupVaults_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "backupdr_v1_generated_backup_dr_fetch_usable_backup_vaults_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.backupdr_v1.BackupDRAsyncClient", + "shortName": "BackupDRAsyncClient" + }, + "fullName": "google.cloud.backupdr_v1.BackupDRAsyncClient.get_backup_plan_association", + "method": { + "fullName": "google.cloud.backupdr.v1.BackupDR.GetBackupPlanAssociation", + "service": { + "fullName": "google.cloud.backupdr.v1.BackupDR", + "shortName": "BackupDR" + }, + "shortName": "GetBackupPlanAssociation" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.backupdr_v1.types.GetBackupPlanAssociationRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.backupdr_v1.types.BackupPlanAssociation", + "shortName": "get_backup_plan_association" + }, + "description": "Sample for GetBackupPlanAssociation", + "file": "backupdr_v1_generated_backup_dr_get_backup_plan_association_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "backupdr_v1_generated_BackupDR_GetBackupPlanAssociation_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "backupdr_v1_generated_backup_dr_get_backup_plan_association_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.backupdr_v1.BackupDRClient", + "shortName": "BackupDRClient" + }, + "fullName": "google.cloud.backupdr_v1.BackupDRClient.get_backup_plan_association", + "method": { + "fullName": "google.cloud.backupdr.v1.BackupDR.GetBackupPlanAssociation", + "service": { + "fullName": "google.cloud.backupdr.v1.BackupDR", + "shortName": "BackupDR" + }, + "shortName": "GetBackupPlanAssociation" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.backupdr_v1.types.GetBackupPlanAssociationRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.backupdr_v1.types.BackupPlanAssociation", + "shortName": "get_backup_plan_association" + }, + "description": "Sample for GetBackupPlanAssociation", + "file": "backupdr_v1_generated_backup_dr_get_backup_plan_association_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "backupdr_v1_generated_BackupDR_GetBackupPlanAssociation_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "backupdr_v1_generated_backup_dr_get_backup_plan_association_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.backupdr_v1.BackupDRAsyncClient", + "shortName": "BackupDRAsyncClient" + }, + "fullName": "google.cloud.backupdr_v1.BackupDRAsyncClient.get_backup_plan", + "method": { + "fullName": "google.cloud.backupdr.v1.BackupDR.GetBackupPlan", + "service": { + "fullName": "google.cloud.backupdr.v1.BackupDR", + "shortName": "BackupDR" + }, + "shortName": "GetBackupPlan" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.backupdr_v1.types.GetBackupPlanRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.backupdr_v1.types.BackupPlan", + "shortName": "get_backup_plan" + }, + "description": "Sample for GetBackupPlan", + "file": "backupdr_v1_generated_backup_dr_get_backup_plan_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "backupdr_v1_generated_BackupDR_GetBackupPlan_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "backupdr_v1_generated_backup_dr_get_backup_plan_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.backupdr_v1.BackupDRClient", + "shortName": "BackupDRClient" + }, + "fullName": "google.cloud.backupdr_v1.BackupDRClient.get_backup_plan", + "method": { + "fullName": "google.cloud.backupdr.v1.BackupDR.GetBackupPlan", + "service": { + "fullName": "google.cloud.backupdr.v1.BackupDR", + "shortName": "BackupDR" + }, + "shortName": "GetBackupPlan" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.backupdr_v1.types.GetBackupPlanRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.backupdr_v1.types.BackupPlan", + "shortName": "get_backup_plan" + }, + "description": "Sample for GetBackupPlan", + "file": "backupdr_v1_generated_backup_dr_get_backup_plan_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "backupdr_v1_generated_BackupDR_GetBackupPlan_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "backupdr_v1_generated_backup_dr_get_backup_plan_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.backupdr_v1.BackupDRAsyncClient", + "shortName": "BackupDRAsyncClient" + }, + "fullName": "google.cloud.backupdr_v1.BackupDRAsyncClient.get_backup_vault", + "method": { + "fullName": "google.cloud.backupdr.v1.BackupDR.GetBackupVault", + "service": { + "fullName": "google.cloud.backupdr.v1.BackupDR", + "shortName": "BackupDR" + }, + "shortName": "GetBackupVault" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.backupdr_v1.types.GetBackupVaultRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.backupdr_v1.types.BackupVault", + "shortName": "get_backup_vault" + }, + "description": "Sample for GetBackupVault", + "file": "backupdr_v1_generated_backup_dr_get_backup_vault_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "backupdr_v1_generated_BackupDR_GetBackupVault_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "backupdr_v1_generated_backup_dr_get_backup_vault_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.backupdr_v1.BackupDRClient", + "shortName": "BackupDRClient" + }, + "fullName": "google.cloud.backupdr_v1.BackupDRClient.get_backup_vault", + "method": { + "fullName": "google.cloud.backupdr.v1.BackupDR.GetBackupVault", + "service": { + "fullName": "google.cloud.backupdr.v1.BackupDR", + "shortName": "BackupDR" + }, + "shortName": "GetBackupVault" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.backupdr_v1.types.GetBackupVaultRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.backupdr_v1.types.BackupVault", + "shortName": "get_backup_vault" + }, + "description": "Sample for GetBackupVault", + "file": "backupdr_v1_generated_backup_dr_get_backup_vault_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "backupdr_v1_generated_BackupDR_GetBackupVault_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "backupdr_v1_generated_backup_dr_get_backup_vault_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.backupdr_v1.BackupDRAsyncClient", + "shortName": "BackupDRAsyncClient" + }, + "fullName": "google.cloud.backupdr_v1.BackupDRAsyncClient.get_backup", + "method": { + "fullName": "google.cloud.backupdr.v1.BackupDR.GetBackup", + "service": { + "fullName": "google.cloud.backupdr.v1.BackupDR", + "shortName": "BackupDR" + }, + "shortName": "GetBackup" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.backupdr_v1.types.GetBackupRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.backupdr_v1.types.Backup", + "shortName": "get_backup" + }, + "description": "Sample for GetBackup", + "file": "backupdr_v1_generated_backup_dr_get_backup_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "backupdr_v1_generated_BackupDR_GetBackup_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "backupdr_v1_generated_backup_dr_get_backup_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.backupdr_v1.BackupDRClient", + "shortName": "BackupDRClient" + }, + "fullName": "google.cloud.backupdr_v1.BackupDRClient.get_backup", + "method": { + "fullName": "google.cloud.backupdr.v1.BackupDR.GetBackup", + "service": { + "fullName": "google.cloud.backupdr.v1.BackupDR", + "shortName": "BackupDR" + }, + "shortName": "GetBackup" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.backupdr_v1.types.GetBackupRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.backupdr_v1.types.Backup", + "shortName": "get_backup" + }, + "description": "Sample for GetBackup", + "file": "backupdr_v1_generated_backup_dr_get_backup_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "backupdr_v1_generated_BackupDR_GetBackup_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "backupdr_v1_generated_backup_dr_get_backup_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.backupdr_v1.BackupDRAsyncClient", + "shortName": "BackupDRAsyncClient" + }, + "fullName": "google.cloud.backupdr_v1.BackupDRAsyncClient.get_data_source", + "method": { + "fullName": "google.cloud.backupdr.v1.BackupDR.GetDataSource", + "service": { + "fullName": "google.cloud.backupdr.v1.BackupDR", + "shortName": "BackupDR" + }, + "shortName": "GetDataSource" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.backupdr_v1.types.GetDataSourceRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.backupdr_v1.types.DataSource", + "shortName": "get_data_source" + }, + "description": "Sample for GetDataSource", + "file": "backupdr_v1_generated_backup_dr_get_data_source_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "backupdr_v1_generated_BackupDR_GetDataSource_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "backupdr_v1_generated_backup_dr_get_data_source_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.backupdr_v1.BackupDRClient", + "shortName": "BackupDRClient" + }, + "fullName": "google.cloud.backupdr_v1.BackupDRClient.get_data_source", + "method": { + "fullName": "google.cloud.backupdr.v1.BackupDR.GetDataSource", + "service": { + "fullName": "google.cloud.backupdr.v1.BackupDR", + "shortName": "BackupDR" + }, + "shortName": "GetDataSource" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.backupdr_v1.types.GetDataSourceRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.backupdr_v1.types.DataSource", + "shortName": "get_data_source" + }, + "description": "Sample for GetDataSource", + "file": "backupdr_v1_generated_backup_dr_get_data_source_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "backupdr_v1_generated_BackupDR_GetDataSource_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "backupdr_v1_generated_backup_dr_get_data_source_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.backupdr_v1.BackupDRAsyncClient", + "shortName": "BackupDRAsyncClient" + }, + "fullName": "google.cloud.backupdr_v1.BackupDRAsyncClient.get_management_server", + "method": { + "fullName": "google.cloud.backupdr.v1.BackupDR.GetManagementServer", + "service": { + "fullName": "google.cloud.backupdr.v1.BackupDR", + "shortName": "BackupDR" + }, + "shortName": "GetManagementServer" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.backupdr_v1.types.GetManagementServerRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.backupdr_v1.types.ManagementServer", + "shortName": "get_management_server" + }, + "description": "Sample for GetManagementServer", + "file": "backupdr_v1_generated_backup_dr_get_management_server_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "backupdr_v1_generated_BackupDR_GetManagementServer_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "backupdr_v1_generated_backup_dr_get_management_server_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.backupdr_v1.BackupDRClient", + "shortName": "BackupDRClient" + }, + "fullName": "google.cloud.backupdr_v1.BackupDRClient.get_management_server", + "method": { + "fullName": "google.cloud.backupdr.v1.BackupDR.GetManagementServer", + "service": { + "fullName": "google.cloud.backupdr.v1.BackupDR", + "shortName": "BackupDR" + }, + "shortName": "GetManagementServer" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.backupdr_v1.types.GetManagementServerRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.backupdr_v1.types.ManagementServer", + "shortName": "get_management_server" + }, + "description": "Sample for GetManagementServer", + "file": "backupdr_v1_generated_backup_dr_get_management_server_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "backupdr_v1_generated_BackupDR_GetManagementServer_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "backupdr_v1_generated_backup_dr_get_management_server_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.backupdr_v1.BackupDRAsyncClient", + "shortName": "BackupDRAsyncClient" + }, + "fullName": "google.cloud.backupdr_v1.BackupDRAsyncClient.list_backup_plan_associations", + "method": { + "fullName": "google.cloud.backupdr.v1.BackupDR.ListBackupPlanAssociations", + "service": { + "fullName": "google.cloud.backupdr.v1.BackupDR", + "shortName": "BackupDR" + }, + "shortName": "ListBackupPlanAssociations" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.backupdr_v1.types.ListBackupPlanAssociationsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.backupdr_v1.services.backup_dr.pagers.ListBackupPlanAssociationsAsyncPager", + "shortName": "list_backup_plan_associations" + }, + "description": "Sample for ListBackupPlanAssociations", + "file": "backupdr_v1_generated_backup_dr_list_backup_plan_associations_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "backupdr_v1_generated_BackupDR_ListBackupPlanAssociations_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "backupdr_v1_generated_backup_dr_list_backup_plan_associations_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.backupdr_v1.BackupDRClient", + "shortName": "BackupDRClient" + }, + "fullName": "google.cloud.backupdr_v1.BackupDRClient.list_backup_plan_associations", + "method": { + "fullName": "google.cloud.backupdr.v1.BackupDR.ListBackupPlanAssociations", + "service": { + "fullName": "google.cloud.backupdr.v1.BackupDR", + "shortName": "BackupDR" + }, + "shortName": "ListBackupPlanAssociations" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.backupdr_v1.types.ListBackupPlanAssociationsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.backupdr_v1.services.backup_dr.pagers.ListBackupPlanAssociationsPager", + "shortName": "list_backup_plan_associations" + }, + "description": "Sample for ListBackupPlanAssociations", + "file": "backupdr_v1_generated_backup_dr_list_backup_plan_associations_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "backupdr_v1_generated_BackupDR_ListBackupPlanAssociations_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "backupdr_v1_generated_backup_dr_list_backup_plan_associations_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.backupdr_v1.BackupDRAsyncClient", + "shortName": "BackupDRAsyncClient" + }, + "fullName": "google.cloud.backupdr_v1.BackupDRAsyncClient.list_backup_plans", + "method": { + "fullName": "google.cloud.backupdr.v1.BackupDR.ListBackupPlans", + "service": { + "fullName": "google.cloud.backupdr.v1.BackupDR", + "shortName": "BackupDR" + }, + "shortName": "ListBackupPlans" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.backupdr_v1.types.ListBackupPlansRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.backupdr_v1.services.backup_dr.pagers.ListBackupPlansAsyncPager", + "shortName": "list_backup_plans" + }, + "description": "Sample for ListBackupPlans", + "file": "backupdr_v1_generated_backup_dr_list_backup_plans_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "backupdr_v1_generated_BackupDR_ListBackupPlans_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "backupdr_v1_generated_backup_dr_list_backup_plans_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.backupdr_v1.BackupDRClient", + "shortName": "BackupDRClient" + }, + "fullName": "google.cloud.backupdr_v1.BackupDRClient.list_backup_plans", + "method": { + "fullName": "google.cloud.backupdr.v1.BackupDR.ListBackupPlans", + "service": { + "fullName": "google.cloud.backupdr.v1.BackupDR", + "shortName": "BackupDR" + }, + "shortName": "ListBackupPlans" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.backupdr_v1.types.ListBackupPlansRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.backupdr_v1.services.backup_dr.pagers.ListBackupPlansPager", + "shortName": "list_backup_plans" + }, + "description": "Sample for ListBackupPlans", + "file": "backupdr_v1_generated_backup_dr_list_backup_plans_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "backupdr_v1_generated_BackupDR_ListBackupPlans_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "backupdr_v1_generated_backup_dr_list_backup_plans_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.backupdr_v1.BackupDRAsyncClient", + "shortName": "BackupDRAsyncClient" + }, + "fullName": "google.cloud.backupdr_v1.BackupDRAsyncClient.list_backup_vaults", + "method": { + "fullName": "google.cloud.backupdr.v1.BackupDR.ListBackupVaults", + "service": { + "fullName": "google.cloud.backupdr.v1.BackupDR", + "shortName": "BackupDR" + }, + "shortName": "ListBackupVaults" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.backupdr_v1.types.ListBackupVaultsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.backupdr_v1.services.backup_dr.pagers.ListBackupVaultsAsyncPager", + "shortName": "list_backup_vaults" + }, + "description": "Sample for ListBackupVaults", + "file": "backupdr_v1_generated_backup_dr_list_backup_vaults_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "backupdr_v1_generated_BackupDR_ListBackupVaults_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "backupdr_v1_generated_backup_dr_list_backup_vaults_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.backupdr_v1.BackupDRClient", + "shortName": "BackupDRClient" + }, + "fullName": "google.cloud.backupdr_v1.BackupDRClient.list_backup_vaults", + "method": { + "fullName": "google.cloud.backupdr.v1.BackupDR.ListBackupVaults", + "service": { + "fullName": "google.cloud.backupdr.v1.BackupDR", + "shortName": "BackupDR" + }, + "shortName": "ListBackupVaults" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.backupdr_v1.types.ListBackupVaultsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.backupdr_v1.services.backup_dr.pagers.ListBackupVaultsPager", + "shortName": "list_backup_vaults" + }, + "description": "Sample for ListBackupVaults", + "file": "backupdr_v1_generated_backup_dr_list_backup_vaults_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "backupdr_v1_generated_BackupDR_ListBackupVaults_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "backupdr_v1_generated_backup_dr_list_backup_vaults_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.backupdr_v1.BackupDRAsyncClient", + "shortName": "BackupDRAsyncClient" + }, + "fullName": "google.cloud.backupdr_v1.BackupDRAsyncClient.list_backups", + "method": { + "fullName": "google.cloud.backupdr.v1.BackupDR.ListBackups", + "service": { + "fullName": "google.cloud.backupdr.v1.BackupDR", + "shortName": "BackupDR" + }, + "shortName": "ListBackups" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.backupdr_v1.types.ListBackupsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.backupdr_v1.services.backup_dr.pagers.ListBackupsAsyncPager", + "shortName": "list_backups" + }, + "description": "Sample for ListBackups", + "file": "backupdr_v1_generated_backup_dr_list_backups_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "backupdr_v1_generated_BackupDR_ListBackups_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "backupdr_v1_generated_backup_dr_list_backups_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.backupdr_v1.BackupDRClient", + "shortName": "BackupDRClient" + }, + "fullName": "google.cloud.backupdr_v1.BackupDRClient.list_backups", + "method": { + "fullName": "google.cloud.backupdr.v1.BackupDR.ListBackups", + "service": { + "fullName": "google.cloud.backupdr.v1.BackupDR", + "shortName": "BackupDR" + }, + "shortName": "ListBackups" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.backupdr_v1.types.ListBackupsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.backupdr_v1.services.backup_dr.pagers.ListBackupsPager", + "shortName": "list_backups" + }, + "description": "Sample for ListBackups", + "file": "backupdr_v1_generated_backup_dr_list_backups_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "backupdr_v1_generated_BackupDR_ListBackups_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "backupdr_v1_generated_backup_dr_list_backups_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.backupdr_v1.BackupDRAsyncClient", + "shortName": "BackupDRAsyncClient" + }, + "fullName": "google.cloud.backupdr_v1.BackupDRAsyncClient.list_data_sources", + "method": { + "fullName": "google.cloud.backupdr.v1.BackupDR.ListDataSources", + "service": { + "fullName": "google.cloud.backupdr.v1.BackupDR", + "shortName": "BackupDR" + }, + "shortName": "ListDataSources" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.backupdr_v1.types.ListDataSourcesRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.backupdr_v1.services.backup_dr.pagers.ListDataSourcesAsyncPager", + "shortName": "list_data_sources" + }, + "description": "Sample for ListDataSources", + "file": "backupdr_v1_generated_backup_dr_list_data_sources_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "backupdr_v1_generated_BackupDR_ListDataSources_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "backupdr_v1_generated_backup_dr_list_data_sources_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.backupdr_v1.BackupDRClient", + "shortName": "BackupDRClient" + }, + "fullName": "google.cloud.backupdr_v1.BackupDRClient.list_data_sources", + "method": { + "fullName": "google.cloud.backupdr.v1.BackupDR.ListDataSources", + "service": { + "fullName": "google.cloud.backupdr.v1.BackupDR", + "shortName": "BackupDR" + }, + "shortName": "ListDataSources" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.backupdr_v1.types.ListDataSourcesRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.backupdr_v1.services.backup_dr.pagers.ListDataSourcesPager", + "shortName": "list_data_sources" + }, + "description": "Sample for ListDataSources", + "file": "backupdr_v1_generated_backup_dr_list_data_sources_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "backupdr_v1_generated_BackupDR_ListDataSources_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "backupdr_v1_generated_backup_dr_list_data_sources_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.backupdr_v1.BackupDRAsyncClient", + "shortName": "BackupDRAsyncClient" + }, + "fullName": "google.cloud.backupdr_v1.BackupDRAsyncClient.list_management_servers", + "method": { + "fullName": "google.cloud.backupdr.v1.BackupDR.ListManagementServers", + "service": { + "fullName": "google.cloud.backupdr.v1.BackupDR", + "shortName": "BackupDR" + }, + "shortName": "ListManagementServers" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.backupdr_v1.types.ListManagementServersRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.backupdr_v1.services.backup_dr.pagers.ListManagementServersAsyncPager", + "shortName": "list_management_servers" + }, + "description": "Sample for ListManagementServers", + "file": "backupdr_v1_generated_backup_dr_list_management_servers_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "backupdr_v1_generated_BackupDR_ListManagementServers_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "backupdr_v1_generated_backup_dr_list_management_servers_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.backupdr_v1.BackupDRClient", + "shortName": "BackupDRClient" + }, + "fullName": "google.cloud.backupdr_v1.BackupDRClient.list_management_servers", + "method": { + "fullName": "google.cloud.backupdr.v1.BackupDR.ListManagementServers", + "service": { + "fullName": "google.cloud.backupdr.v1.BackupDR", + "shortName": "BackupDR" + }, + "shortName": "ListManagementServers" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.backupdr_v1.types.ListManagementServersRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.backupdr_v1.services.backup_dr.pagers.ListManagementServersPager", + "shortName": "list_management_servers" + }, + "description": "Sample for ListManagementServers", + "file": "backupdr_v1_generated_backup_dr_list_management_servers_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "backupdr_v1_generated_BackupDR_ListManagementServers_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "backupdr_v1_generated_backup_dr_list_management_servers_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.backupdr_v1.BackupDRAsyncClient", + "shortName": "BackupDRAsyncClient" + }, + "fullName": "google.cloud.backupdr_v1.BackupDRAsyncClient.restore_backup", + "method": { + "fullName": "google.cloud.backupdr.v1.BackupDR.RestoreBackup", + "service": { + "fullName": "google.cloud.backupdr.v1.BackupDR", + "shortName": "BackupDR" + }, + "shortName": "RestoreBackup" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.backupdr_v1.types.RestoreBackupRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "restore_backup" + }, + "description": "Sample for RestoreBackup", + "file": "backupdr_v1_generated_backup_dr_restore_backup_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "backupdr_v1_generated_BackupDR_RestoreBackup_async", + "segments": [ + { + "end": 60, + "start": 27, + "type": "FULL" + }, + { + "end": 60, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 50, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 57, + "start": 51, + "type": "REQUEST_EXECUTION" + }, + { + "end": 61, + "start": 58, + "type": "RESPONSE_HANDLING" + } + ], + "title": "backupdr_v1_generated_backup_dr_restore_backup_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.backupdr_v1.BackupDRClient", + "shortName": "BackupDRClient" + }, + "fullName": "google.cloud.backupdr_v1.BackupDRClient.restore_backup", + "method": { + "fullName": "google.cloud.backupdr.v1.BackupDR.RestoreBackup", "service": { "fullName": "google.cloud.backupdr.v1.BackupDR", "shortName": "BackupDR" }, - "shortName": "CreateManagementServer" + "shortName": "RestoreBackup" }, "parameters": [ { "name": "request", - "type": "google.cloud.backupdr_v1.types.CreateManagementServerRequest" + "type": "google.cloud.backupdr_v1.types.RestoreBackupRequest" }, { - "name": "parent", + "name": "name", "type": "str" }, { - "name": "management_server", - "type": "google.cloud.backupdr_v1.types.ManagementServer" + "name": "retry", + "type": "google.api_core.retry.Retry" }, { - "name": "management_server_id", + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "restore_backup" + }, + "description": "Sample for RestoreBackup", + "file": "backupdr_v1_generated_backup_dr_restore_backup_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "backupdr_v1_generated_BackupDR_RestoreBackup_sync", + "segments": [ + { + "end": 60, + "start": 27, + "type": "FULL" + }, + { + "end": 60, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 50, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 57, + "start": 51, + "type": "REQUEST_EXECUTION" + }, + { + "end": 61, + "start": 58, + "type": "RESPONSE_HANDLING" + } + ], + "title": "backupdr_v1_generated_backup_dr_restore_backup_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.backupdr_v1.BackupDRAsyncClient", + "shortName": "BackupDRAsyncClient" + }, + "fullName": "google.cloud.backupdr_v1.BackupDRAsyncClient.trigger_backup", + "method": { + "fullName": "google.cloud.backupdr.v1.BackupDR.TriggerBackup", + "service": { + "fullName": "google.cloud.backupdr.v1.BackupDR", + "shortName": "BackupDR" + }, + "shortName": "TriggerBackup" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.backupdr_v1.types.TriggerBackupRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "rule_id", "type": "str" }, { @@ -59,13 +3822,13 @@ } ], "resultType": "google.api_core.operation_async.AsyncOperation", - "shortName": "create_management_server" + "shortName": "trigger_backup" }, - "description": "Sample for CreateManagementServer", - "file": "backupdr_v1_generated_backup_dr_create_management_server_async.py", + "description": "Sample for TriggerBackup", + "file": "backupdr_v1_generated_backup_dr_trigger_backup_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "backupdr_v1_generated_BackupDR_CreateManagementServer_async", + "regionTag": "backupdr_v1_generated_BackupDR_TriggerBackup_async", "segments": [ { "end": 56, @@ -98,7 +3861,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "backupdr_v1_generated_backup_dr_create_management_server_async.py" + "title": "backupdr_v1_generated_backup_dr_trigger_backup_async.py" }, { "canonical": true, @@ -107,30 +3870,26 @@ "fullName": "google.cloud.backupdr_v1.BackupDRClient", "shortName": "BackupDRClient" }, - "fullName": "google.cloud.backupdr_v1.BackupDRClient.create_management_server", + "fullName": "google.cloud.backupdr_v1.BackupDRClient.trigger_backup", "method": { - "fullName": "google.cloud.backupdr.v1.BackupDR.CreateManagementServer", + "fullName": "google.cloud.backupdr.v1.BackupDR.TriggerBackup", "service": { "fullName": "google.cloud.backupdr.v1.BackupDR", "shortName": "BackupDR" }, - "shortName": "CreateManagementServer" + "shortName": "TriggerBackup" }, "parameters": [ { "name": "request", - "type": "google.cloud.backupdr_v1.types.CreateManagementServerRequest" + "type": "google.cloud.backupdr_v1.types.TriggerBackupRequest" }, { - "name": "parent", + "name": "name", "type": "str" }, { - "name": "management_server", - "type": "google.cloud.backupdr_v1.types.ManagementServer" - }, - { - "name": "management_server_id", + "name": "rule_id", "type": "str" }, { @@ -147,13 +3906,13 @@ } ], "resultType": "google.api_core.operation.Operation", - "shortName": "create_management_server" + "shortName": "trigger_backup" }, - "description": "Sample for CreateManagementServer", - "file": "backupdr_v1_generated_backup_dr_create_management_server_sync.py", + "description": "Sample for TriggerBackup", + "file": "backupdr_v1_generated_backup_dr_trigger_backup_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "backupdr_v1_generated_BackupDR_CreateManagementServer_sync", + "regionTag": "backupdr_v1_generated_BackupDR_TriggerBackup_sync", "segments": [ { "end": 56, @@ -186,7 +3945,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "backupdr_v1_generated_backup_dr_create_management_server_sync.py" + "title": "backupdr_v1_generated_backup_dr_trigger_backup_sync.py" }, { "canonical": true, @@ -196,23 +3955,27 @@ "fullName": "google.cloud.backupdr_v1.BackupDRAsyncClient", "shortName": "BackupDRAsyncClient" }, - "fullName": "google.cloud.backupdr_v1.BackupDRAsyncClient.delete_management_server", + "fullName": "google.cloud.backupdr_v1.BackupDRAsyncClient.update_backup_vault", "method": { - "fullName": "google.cloud.backupdr.v1.BackupDR.DeleteManagementServer", + "fullName": "google.cloud.backupdr.v1.BackupDR.UpdateBackupVault", "service": { "fullName": "google.cloud.backupdr.v1.BackupDR", "shortName": "BackupDR" }, - "shortName": "DeleteManagementServer" + "shortName": "UpdateBackupVault" }, "parameters": [ { "name": "request", - "type": "google.cloud.backupdr_v1.types.DeleteManagementServerRequest" + "type": "google.cloud.backupdr_v1.types.UpdateBackupVaultRequest" }, { - "name": "name", - "type": "str" + "name": "backup_vault", + "type": "google.cloud.backupdr_v1.types.BackupVault" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" }, { "name": "retry", @@ -228,21 +3991,21 @@ } ], "resultType": "google.api_core.operation_async.AsyncOperation", - "shortName": "delete_management_server" + "shortName": "update_backup_vault" }, - "description": "Sample for DeleteManagementServer", - "file": "backupdr_v1_generated_backup_dr_delete_management_server_async.py", + "description": "Sample for UpdateBackupVault", + "file": "backupdr_v1_generated_backup_dr_update_backup_vault_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "backupdr_v1_generated_BackupDR_DeleteManagementServer_async", + "regionTag": "backupdr_v1_generated_BackupDR_UpdateBackupVault_async", "segments": [ { - "end": 55, + "end": 54, "start": 27, "type": "FULL" }, { - "end": 55, + "end": 54, "start": 27, "type": "SHORT" }, @@ -252,22 +4015,22 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 45, + "end": 44, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 52, - "start": 46, + "end": 51, + "start": 45, "type": "REQUEST_EXECUTION" }, { - "end": 56, - "start": 53, + "end": 55, + "start": 52, "type": "RESPONSE_HANDLING" } ], - "title": "backupdr_v1_generated_backup_dr_delete_management_server_async.py" + "title": "backupdr_v1_generated_backup_dr_update_backup_vault_async.py" }, { "canonical": true, @@ -276,23 +4039,27 @@ "fullName": "google.cloud.backupdr_v1.BackupDRClient", "shortName": "BackupDRClient" }, - "fullName": "google.cloud.backupdr_v1.BackupDRClient.delete_management_server", + "fullName": "google.cloud.backupdr_v1.BackupDRClient.update_backup_vault", "method": { - "fullName": "google.cloud.backupdr.v1.BackupDR.DeleteManagementServer", + "fullName": "google.cloud.backupdr.v1.BackupDR.UpdateBackupVault", "service": { "fullName": "google.cloud.backupdr.v1.BackupDR", "shortName": "BackupDR" }, - "shortName": "DeleteManagementServer" + "shortName": "UpdateBackupVault" }, "parameters": [ { "name": "request", - "type": "google.cloud.backupdr_v1.types.DeleteManagementServerRequest" + "type": "google.cloud.backupdr_v1.types.UpdateBackupVaultRequest" }, { - "name": "name", - "type": "str" + "name": "backup_vault", + "type": "google.cloud.backupdr_v1.types.BackupVault" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" }, { "name": "retry", @@ -308,21 +4075,21 @@ } ], "resultType": "google.api_core.operation.Operation", - "shortName": "delete_management_server" + "shortName": "update_backup_vault" }, - "description": "Sample for DeleteManagementServer", - "file": "backupdr_v1_generated_backup_dr_delete_management_server_sync.py", + "description": "Sample for UpdateBackupVault", + "file": "backupdr_v1_generated_backup_dr_update_backup_vault_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "backupdr_v1_generated_BackupDR_DeleteManagementServer_sync", + "regionTag": "backupdr_v1_generated_BackupDR_UpdateBackupVault_sync", "segments": [ { - "end": 55, + "end": 54, "start": 27, "type": "FULL" }, { - "end": 55, + "end": 54, "start": 27, "type": "SHORT" }, @@ -332,22 +4099,22 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 45, + "end": 44, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 52, - "start": 46, + "end": 51, + "start": 45, "type": "REQUEST_EXECUTION" }, { - "end": 56, - "start": 53, + "end": 55, + "start": 52, "type": "RESPONSE_HANDLING" } ], - "title": "backupdr_v1_generated_backup_dr_delete_management_server_sync.py" + "title": "backupdr_v1_generated_backup_dr_update_backup_vault_sync.py" }, { "canonical": true, @@ -357,23 +4124,27 @@ "fullName": "google.cloud.backupdr_v1.BackupDRAsyncClient", "shortName": "BackupDRAsyncClient" }, - "fullName": "google.cloud.backupdr_v1.BackupDRAsyncClient.get_management_server", + "fullName": "google.cloud.backupdr_v1.BackupDRAsyncClient.update_backup", "method": { - "fullName": "google.cloud.backupdr.v1.BackupDR.GetManagementServer", + "fullName": "google.cloud.backupdr.v1.BackupDR.UpdateBackup", "service": { "fullName": "google.cloud.backupdr.v1.BackupDR", "shortName": "BackupDR" }, - "shortName": "GetManagementServer" + "shortName": "UpdateBackup" }, "parameters": [ { "name": "request", - "type": "google.cloud.backupdr_v1.types.GetManagementServerRequest" + "type": "google.cloud.backupdr_v1.types.UpdateBackupRequest" }, { - "name": "name", - "type": "str" + "name": "backup", + "type": "google.cloud.backupdr_v1.types.Backup" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" }, { "name": "retry", @@ -388,22 +4159,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.backupdr_v1.types.ManagementServer", - "shortName": "get_management_server" + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "update_backup" }, - "description": "Sample for GetManagementServer", - "file": "backupdr_v1_generated_backup_dr_get_management_server_async.py", + "description": "Sample for UpdateBackup", + "file": "backupdr_v1_generated_backup_dr_update_backup_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "backupdr_v1_generated_BackupDR_GetManagementServer_async", + "regionTag": "backupdr_v1_generated_BackupDR_UpdateBackup_async", "segments": [ { - "end": 51, + "end": 54, "start": 27, "type": "FULL" }, { - "end": 51, + "end": 54, "start": 27, "type": "SHORT" }, @@ -413,22 +4184,22 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 45, + "end": 44, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 48, - "start": 46, + "end": 51, + "start": 45, "type": "REQUEST_EXECUTION" }, { - "end": 52, - "start": 49, + "end": 55, + "start": 52, "type": "RESPONSE_HANDLING" } ], - "title": "backupdr_v1_generated_backup_dr_get_management_server_async.py" + "title": "backupdr_v1_generated_backup_dr_update_backup_async.py" }, { "canonical": true, @@ -437,23 +4208,27 @@ "fullName": "google.cloud.backupdr_v1.BackupDRClient", "shortName": "BackupDRClient" }, - "fullName": "google.cloud.backupdr_v1.BackupDRClient.get_management_server", + "fullName": "google.cloud.backupdr_v1.BackupDRClient.update_backup", "method": { - "fullName": "google.cloud.backupdr.v1.BackupDR.GetManagementServer", + "fullName": "google.cloud.backupdr.v1.BackupDR.UpdateBackup", "service": { "fullName": "google.cloud.backupdr.v1.BackupDR", "shortName": "BackupDR" }, - "shortName": "GetManagementServer" + "shortName": "UpdateBackup" }, "parameters": [ { "name": "request", - "type": "google.cloud.backupdr_v1.types.GetManagementServerRequest" + "type": "google.cloud.backupdr_v1.types.UpdateBackupRequest" }, { - "name": "name", - "type": "str" + "name": "backup", + "type": "google.cloud.backupdr_v1.types.Backup" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" }, { "name": "retry", @@ -468,22 +4243,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.backupdr_v1.types.ManagementServer", - "shortName": "get_management_server" + "resultType": "google.api_core.operation.Operation", + "shortName": "update_backup" }, - "description": "Sample for GetManagementServer", - "file": "backupdr_v1_generated_backup_dr_get_management_server_sync.py", + "description": "Sample for UpdateBackup", + "file": "backupdr_v1_generated_backup_dr_update_backup_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "backupdr_v1_generated_BackupDR_GetManagementServer_sync", + "regionTag": "backupdr_v1_generated_BackupDR_UpdateBackup_sync", "segments": [ { - "end": 51, + "end": 54, "start": 27, "type": "FULL" }, { - "end": 51, + "end": 54, "start": 27, "type": "SHORT" }, @@ -493,22 +4268,22 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 45, + "end": 44, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 48, - "start": 46, + "end": 51, + "start": 45, "type": "REQUEST_EXECUTION" }, { - "end": 52, - "start": 49, + "end": 55, + "start": 52, "type": "RESPONSE_HANDLING" } ], - "title": "backupdr_v1_generated_backup_dr_get_management_server_sync.py" + "title": "backupdr_v1_generated_backup_dr_update_backup_sync.py" }, { "canonical": true, @@ -518,23 +4293,27 @@ "fullName": "google.cloud.backupdr_v1.BackupDRAsyncClient", "shortName": "BackupDRAsyncClient" }, - "fullName": "google.cloud.backupdr_v1.BackupDRAsyncClient.list_management_servers", + "fullName": "google.cloud.backupdr_v1.BackupDRAsyncClient.update_data_source", "method": { - "fullName": "google.cloud.backupdr.v1.BackupDR.ListManagementServers", + "fullName": "google.cloud.backupdr.v1.BackupDR.UpdateDataSource", "service": { "fullName": "google.cloud.backupdr.v1.BackupDR", "shortName": "BackupDR" }, - "shortName": "ListManagementServers" + "shortName": "UpdateDataSource" }, "parameters": [ { "name": "request", - "type": "google.cloud.backupdr_v1.types.ListManagementServersRequest" + "type": "google.cloud.backupdr_v1.types.UpdateDataSourceRequest" }, { - "name": "parent", - "type": "str" + "name": "data_source", + "type": "google.cloud.backupdr_v1.types.DataSource" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" }, { "name": "retry", @@ -549,22 +4328,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.backupdr_v1.services.backup_dr.pagers.ListManagementServersAsyncPager", - "shortName": "list_management_servers" + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "update_data_source" }, - "description": "Sample for ListManagementServers", - "file": "backupdr_v1_generated_backup_dr_list_management_servers_async.py", + "description": "Sample for UpdateDataSource", + "file": "backupdr_v1_generated_backup_dr_update_data_source_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "backupdr_v1_generated_BackupDR_ListManagementServers_async", + "regionTag": "backupdr_v1_generated_BackupDR_UpdateDataSource_async", "segments": [ { - "end": 52, + "end": 54, "start": 27, "type": "FULL" }, { - "end": 52, + "end": 54, "start": 27, "type": "SHORT" }, @@ -574,22 +4353,22 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 45, + "end": 44, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 48, - "start": 46, + "end": 51, + "start": 45, "type": "REQUEST_EXECUTION" }, { - "end": 53, - "start": 49, + "end": 55, + "start": 52, "type": "RESPONSE_HANDLING" } ], - "title": "backupdr_v1_generated_backup_dr_list_management_servers_async.py" + "title": "backupdr_v1_generated_backup_dr_update_data_source_async.py" }, { "canonical": true, @@ -598,23 +4377,27 @@ "fullName": "google.cloud.backupdr_v1.BackupDRClient", "shortName": "BackupDRClient" }, - "fullName": "google.cloud.backupdr_v1.BackupDRClient.list_management_servers", + "fullName": "google.cloud.backupdr_v1.BackupDRClient.update_data_source", "method": { - "fullName": "google.cloud.backupdr.v1.BackupDR.ListManagementServers", + "fullName": "google.cloud.backupdr.v1.BackupDR.UpdateDataSource", "service": { "fullName": "google.cloud.backupdr.v1.BackupDR", "shortName": "BackupDR" }, - "shortName": "ListManagementServers" + "shortName": "UpdateDataSource" }, "parameters": [ { "name": "request", - "type": "google.cloud.backupdr_v1.types.ListManagementServersRequest" + "type": "google.cloud.backupdr_v1.types.UpdateDataSourceRequest" }, { - "name": "parent", - "type": "str" + "name": "data_source", + "type": "google.cloud.backupdr_v1.types.DataSource" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" }, { "name": "retry", @@ -629,22 +4412,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.backupdr_v1.services.backup_dr.pagers.ListManagementServersPager", - "shortName": "list_management_servers" + "resultType": "google.api_core.operation.Operation", + "shortName": "update_data_source" }, - "description": "Sample for ListManagementServers", - "file": "backupdr_v1_generated_backup_dr_list_management_servers_sync.py", + "description": "Sample for UpdateDataSource", + "file": "backupdr_v1_generated_backup_dr_update_data_source_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "backupdr_v1_generated_BackupDR_ListManagementServers_sync", + "regionTag": "backupdr_v1_generated_BackupDR_UpdateDataSource_sync", "segments": [ { - "end": 52, + "end": 54, "start": 27, "type": "FULL" }, { - "end": 52, + "end": 54, "start": 27, "type": "SHORT" }, @@ -654,22 +4437,22 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 45, + "end": 44, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 48, - "start": 46, + "end": 51, + "start": 45, "type": "REQUEST_EXECUTION" }, { - "end": 53, - "start": 49, + "end": 55, + "start": 52, "type": "RESPONSE_HANDLING" } ], - "title": "backupdr_v1_generated_backup_dr_list_management_servers_sync.py" + "title": "backupdr_v1_generated_backup_dr_update_data_source_sync.py" } ] } diff --git a/packages/google-cloud-backupdr/scripts/fixup_backupdr_v1_keywords.py b/packages/google-cloud-backupdr/scripts/fixup_backupdr_v1_keywords.py index b65698148046..c0dd15568f46 100644 --- a/packages/google-cloud-backupdr/scripts/fixup_backupdr_v1_keywords.py +++ b/packages/google-cloud-backupdr/scripts/fixup_backupdr_v1_keywords.py @@ -39,10 +39,33 @@ def partition( class backupdrCallTransformer(cst.CSTTransformer): CTRL_PARAMS: Tuple[str] = ('retry', 'timeout', 'metadata') METHOD_TO_PARAMS: Dict[str, Tuple[str]] = { + 'create_backup_plan': ('parent', 'backup_plan_id', 'backup_plan', 'request_id', ), + 'create_backup_plan_association': ('parent', 'backup_plan_association_id', 'backup_plan_association', 'request_id', ), + 'create_backup_vault': ('parent', 'backup_vault_id', 'backup_vault', 'request_id', 'validate_only', ), 'create_management_server': ('parent', 'management_server_id', 'management_server', 'request_id', ), + 'delete_backup': ('name', 'request_id', ), + 'delete_backup_plan': ('name', 'request_id', ), + 'delete_backup_plan_association': ('name', 'request_id', ), + 'delete_backup_vault': ('name', 'request_id', 'force', 'etag', 'validate_only', 'allow_missing', ), 'delete_management_server': ('name', 'request_id', ), + 'fetch_usable_backup_vaults': ('parent', 'page_size', 'page_token', 'filter', 'order_by', ), + 'get_backup': ('name', 'view', ), + 'get_backup_plan': ('name', ), + 'get_backup_plan_association': ('name', ), + 'get_backup_vault': ('name', 'view', ), + 'get_data_source': ('name', ), 'get_management_server': ('name', ), + 'list_backup_plan_associations': ('parent', 'page_size', 'page_token', 'filter', ), + 'list_backup_plans': ('parent', 'page_size', 'page_token', 'filter', 'order_by', ), + 'list_backups': ('parent', 'page_size', 'page_token', 'filter', 'order_by', 'view', ), + 'list_backup_vaults': ('parent', 'page_size', 'page_token', 'filter', 'order_by', 'view', ), + 'list_data_sources': ('parent', 'page_size', 'page_token', 'filter', 'order_by', ), 'list_management_servers': ('parent', 'page_size', 'page_token', 'filter', 'order_by', ), + 'restore_backup': ('name', 'request_id', 'compute_instance_target_environment', 'compute_instance_restore_properties', ), + 'trigger_backup': ('name', 'rule_id', 'request_id', ), + 'update_backup': ('update_mask', 'backup', 'request_id', ), + 'update_backup_vault': ('update_mask', 'backup_vault', 'request_id', 'validate_only', 'force', ), + 'update_data_source': ('update_mask', 'data_source', 'request_id', 'allow_missing', ), } def leave_Call(self, original: cst.Call, updated: cst.Call) -> cst.CSTNode: diff --git a/packages/google-cloud-backupdr/tests/unit/gapic/backupdr_v1/test_backup_dr.py b/packages/google-cloud-backupdr/tests/unit/gapic/backupdr_v1/test_backup_dr.py index f7e48fb284b5..ee098d5a5646 100644 --- a/packages/google-cloud-backupdr/tests/unit/gapic/backupdr_v1/test_backup_dr.py +++ b/packages/google-cloud-backupdr/tests/unit/gapic/backupdr_v1/test_backup_dr.py @@ -48,10 +48,16 @@ from google.iam.v1 import policy_pb2 # type: ignore from google.longrunning import operations_pb2 # type: ignore from google.oauth2 import service_account +from google.protobuf import any_pb2 # type: ignore +from google.protobuf import duration_pb2 # type: ignore from google.protobuf import empty_pb2 # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore from google.protobuf import json_format from google.protobuf import timestamp_pb2 # type: ignore from google.protobuf import wrappers_pb2 # type: ignore +from google.rpc import status_pb2 # type: ignore +from google.type import dayofweek_pb2 # type: ignore +from google.type import month_pb2 # type: ignore import grpc from grpc.experimental import aio from proto.marshal.rules import wrappers @@ -66,7 +72,14 @@ pagers, transports, ) -from google.cloud.backupdr_v1.types import backupdr +from google.cloud.backupdr_v1.types import ( + backupdr, + backupplan, + backupplanassociation, + backupvault, + backupvault_ba, + backupvault_gce, +) def client_cert_source_callback(): @@ -1264,22 +1277,23 @@ async def test_list_management_servers_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_management_servers - ] = mock_object + ] = mock_rpc request = {} await client.list_management_servers(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_management_servers(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1876,22 +1890,23 @@ async def test_get_management_server_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_management_server - ] = mock_object + ] = mock_rpc request = {} await client.get_management_server(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_management_server(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2223,8 +2238,9 @@ def test_create_management_server_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.create_management_server(request) @@ -2280,26 +2296,28 @@ async def test_create_management_server_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_management_server - ] = mock_object + ] = mock_rpc request = {} await client.create_management_server(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.create_management_server(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2632,8 +2650,9 @@ def test_delete_management_server_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.delete_management_server(request) @@ -2689,26 +2708,28 @@ async def test_delete_management_server_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_management_server - ] = mock_object + ] = mock_rpc request = {} await client.delete_management_server(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.delete_management_server(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2903,52 +2924,98 @@ async def test_delete_management_server_flattened_error_async(): @pytest.mark.parametrize( "request_type", [ - backupdr.ListManagementServersRequest, + backupvault.CreateBackupVaultRequest, dict, ], ) -def test_list_management_servers_rest(request_type): +def test_create_backup_vault(request_type, transport: str = "grpc"): client = BackupDRClient( credentials=ga_credentials.AnonymousCredentials(), - transport="rest", + transport=transport, ) - # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2"} - request = request_type(**request_init) + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = backupdr.ListManagementServersResponse( - next_page_token="next_page_token_value", - unreachable=["unreachable_value"], + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_backup_vault), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.create_backup_vault(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = backupvault.CreateBackupVaultRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_create_backup_vault_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_backup_vault), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. ) + client.create_backup_vault() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == backupvault.CreateBackupVaultRequest() - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = backupdr.ListManagementServersResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - response = client.list_management_servers(request) +def test_create_backup_vault_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListManagementServersPager) - assert response.next_page_token == "next_page_token_value" - assert response.unreachable == ["unreachable_value"] + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = backupvault.CreateBackupVaultRequest( + parent="parent_value", + backup_vault_id="backup_vault_id_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_backup_vault), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.create_backup_vault(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == backupvault.CreateBackupVaultRequest( + parent="parent_value", + backup_vault_id="backup_vault_id_value", + ) -def test_list_management_servers_rest_use_cached_wrapped_rpc(): +def test_create_backup_vault_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: client = BackupDRClient( credentials=ga_credentials.AnonymousCredentials(), - transport="rest", + transport="grpc", ) # Should wrap all calls on client creation @@ -2957,8 +3024,7 @@ def test_list_management_servers_rest_use_cached_wrapped_rpc(): # Ensure method has been cached assert ( - client._transport.list_management_servers - in client._transport._wrapped_methods + client._transport.create_backup_vault in client._transport._wrapped_methods ) # Replace cached wrapped function with mock @@ -2967,126 +3033,18960 @@ def test_list_management_servers_rest_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.list_management_servers + client._transport.create_backup_vault ] = mock_rpc - request = {} - client.list_management_servers(request) + client.create_backup_vault(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.list_management_servers(request) + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.create_backup_vault(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_list_management_servers_rest_required_fields( - request_type=backupdr.ListManagementServersRequest, -): - transport_class = transports.BackupDRRestTransport - - request_init = {} - request_init["parent"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads( - json_format.MessageToJson(pb_request, use_integers_for_enums=False) +@pytest.mark.asyncio +async def test_create_backup_vault_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", ) - # verify fields with default values are dropped + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_backup_vault), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.create_backup_vault() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == backupvault.CreateBackupVaultRequest() - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).list_management_servers._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - # verify required fields with default values are now present +@pytest.mark.asyncio +async def test_create_backup_vault_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) - jsonified_request["parent"] = "parent_value" + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).list_management_servers._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set( - ( - "filter", - "order_by", - "page_size", - "page_token", + # Ensure method has been cached + assert ( + client._client._transport.create_backup_vault + in client._client._transport._wrapped_methods ) - ) - jsonified_request.update(unset_fields) - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == "parent_value" + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.create_backup_vault + ] = mock_rpc - client = BackupDRClient( + request = {} + await client.create_backup_vault(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.create_backup_vault(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_create_backup_vault_async( + transport: str = "grpc_asyncio", request_type=backupvault.CreateBackupVaultRequest +): + client = BackupDRAsyncClient( credentials=ga_credentials.AnonymousCredentials(), - transport="rest", + transport=transport, ) - request = request_type(**request_init) - # Designate an appropriate value for the returned response. - return_value = backupdr.ListManagementServersResponse() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, "transcode") as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - "uri": "v1/sample_method", - "method": "get", - "query_params": pb_request, - } - transcode.return_value = transcode_result + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() - response_value = Response() - response_value.status_code = 200 + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_backup_vault), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.create_backup_vault(request) - # Convert return value to protobuf type - return_value = backupdr.ListManagementServersResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = backupvault.CreateBackupVaultRequest() + assert args[0] == request - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) - response = client.list_management_servers(request) - expected_params = [("$alt", "json;enum-encoding=int")] - actual_params = req.call_args.kwargs["params"] - assert expected_params == actual_params +@pytest.mark.asyncio +async def test_create_backup_vault_async_from_dict(): + await test_create_backup_vault_async(request_type=dict) -def test_list_management_servers_rest_unset_required_fields(): - transport = transports.BackupDRRestTransport( - credentials=ga_credentials.AnonymousCredentials +def test_create_backup_vault_field_headers(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), ) - unset_fields = transport.list_management_servers._get_unset_required_fields({}) - assert set(unset_fields) == ( - set( - ( - "filter", - "orderBy", - "pageSize", + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = backupvault.CreateBackupVaultRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_backup_vault), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.create_backup_vault(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_create_backup_vault_field_headers_async(): + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = backupvault.CreateBackupVaultRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_backup_vault), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.create_backup_vault(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_create_backup_vault_flattened(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_backup_vault), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.create_backup_vault( + parent="parent_value", + backup_vault=backupvault.BackupVault(name="name_value"), + backup_vault_id="backup_vault_id_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].backup_vault + mock_val = backupvault.BackupVault(name="name_value") + assert arg == mock_val + arg = args[0].backup_vault_id + mock_val = "backup_vault_id_value" + assert arg == mock_val + + +def test_create_backup_vault_flattened_error(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_backup_vault( + backupvault.CreateBackupVaultRequest(), + parent="parent_value", + backup_vault=backupvault.BackupVault(name="name_value"), + backup_vault_id="backup_vault_id_value", + ) + + +@pytest.mark.asyncio +async def test_create_backup_vault_flattened_async(): + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_backup_vault), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.create_backup_vault( + parent="parent_value", + backup_vault=backupvault.BackupVault(name="name_value"), + backup_vault_id="backup_vault_id_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].backup_vault + mock_val = backupvault.BackupVault(name="name_value") + assert arg == mock_val + arg = args[0].backup_vault_id + mock_val = "backup_vault_id_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_create_backup_vault_flattened_error_async(): + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.create_backup_vault( + backupvault.CreateBackupVaultRequest(), + parent="parent_value", + backup_vault=backupvault.BackupVault(name="name_value"), + backup_vault_id="backup_vault_id_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + backupvault.ListBackupVaultsRequest, + dict, + ], +) +def test_list_backup_vaults(request_type, transport: str = "grpc"): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_backup_vaults), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = backupvault.ListBackupVaultsResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + response = client.list_backup_vaults(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = backupvault.ListBackupVaultsRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListBackupVaultsPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] + + +def test_list_backup_vaults_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_backup_vaults), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.list_backup_vaults() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == backupvault.ListBackupVaultsRequest() + + +def test_list_backup_vaults_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = backupvault.ListBackupVaultsRequest( + parent="parent_value", + page_token="page_token_value", + filter="filter_value", + order_by="order_by_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_backup_vaults), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.list_backup_vaults(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == backupvault.ListBackupVaultsRequest( + parent="parent_value", + page_token="page_token_value", + filter="filter_value", + order_by="order_by_value", + ) + + +def test_list_backup_vaults_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.list_backup_vaults in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.list_backup_vaults + ] = mock_rpc + request = {} + client.list_backup_vaults(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_backup_vaults(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_list_backup_vaults_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_backup_vaults), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + backupvault.ListBackupVaultsResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + ) + response = await client.list_backup_vaults() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == backupvault.ListBackupVaultsRequest() + + +@pytest.mark.asyncio +async def test_list_backup_vaults_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.list_backup_vaults + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.list_backup_vaults + ] = mock_rpc + + request = {} + await client.list_backup_vaults(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.list_backup_vaults(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_list_backup_vaults_async( + transport: str = "grpc_asyncio", request_type=backupvault.ListBackupVaultsRequest +): + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_backup_vaults), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + backupvault.ListBackupVaultsResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + ) + response = await client.list_backup_vaults(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = backupvault.ListBackupVaultsRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListBackupVaultsAsyncPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] + + +@pytest.mark.asyncio +async def test_list_backup_vaults_async_from_dict(): + await test_list_backup_vaults_async(request_type=dict) + + +def test_list_backup_vaults_field_headers(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = backupvault.ListBackupVaultsRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_backup_vaults), "__call__" + ) as call: + call.return_value = backupvault.ListBackupVaultsResponse() + client.list_backup_vaults(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_backup_vaults_field_headers_async(): + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = backupvault.ListBackupVaultsRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_backup_vaults), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + backupvault.ListBackupVaultsResponse() + ) + await client.list_backup_vaults(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_list_backup_vaults_flattened(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_backup_vaults), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = backupvault.ListBackupVaultsResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_backup_vaults( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +def test_list_backup_vaults_flattened_error(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_backup_vaults( + backupvault.ListBackupVaultsRequest(), + parent="parent_value", + ) + + +@pytest.mark.asyncio +async def test_list_backup_vaults_flattened_async(): + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_backup_vaults), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = backupvault.ListBackupVaultsResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + backupvault.ListBackupVaultsResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_backup_vaults( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_list_backup_vaults_flattened_error_async(): + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_backup_vaults( + backupvault.ListBackupVaultsRequest(), + parent="parent_value", + ) + + +def test_list_backup_vaults_pager(transport_name: str = "grpc"): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_backup_vaults), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + backupvault.ListBackupVaultsResponse( + backup_vaults=[ + backupvault.BackupVault(), + backupvault.BackupVault(), + backupvault.BackupVault(), + ], + next_page_token="abc", + ), + backupvault.ListBackupVaultsResponse( + backup_vaults=[], + next_page_token="def", + ), + backupvault.ListBackupVaultsResponse( + backup_vaults=[ + backupvault.BackupVault(), + ], + next_page_token="ghi", + ), + backupvault.ListBackupVaultsResponse( + backup_vaults=[ + backupvault.BackupVault(), + backupvault.BackupVault(), + ], + ), + RuntimeError, + ) + + expected_metadata = () + retry = retries.Retry() + timeout = 5 + expected_metadata = tuple(expected_metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + ) + pager = client.list_backup_vaults(request={}, retry=retry, timeout=timeout) + + assert pager._metadata == expected_metadata + assert pager._retry == retry + assert pager._timeout == timeout + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, backupvault.BackupVault) for i in results) + + +def test_list_backup_vaults_pages(transport_name: str = "grpc"): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_backup_vaults), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + backupvault.ListBackupVaultsResponse( + backup_vaults=[ + backupvault.BackupVault(), + backupvault.BackupVault(), + backupvault.BackupVault(), + ], + next_page_token="abc", + ), + backupvault.ListBackupVaultsResponse( + backup_vaults=[], + next_page_token="def", + ), + backupvault.ListBackupVaultsResponse( + backup_vaults=[ + backupvault.BackupVault(), + ], + next_page_token="ghi", + ), + backupvault.ListBackupVaultsResponse( + backup_vaults=[ + backupvault.BackupVault(), + backupvault.BackupVault(), + ], + ), + RuntimeError, + ) + pages = list(client.list_backup_vaults(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.asyncio +async def test_list_backup_vaults_async_pager(): + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_backup_vaults), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + backupvault.ListBackupVaultsResponse( + backup_vaults=[ + backupvault.BackupVault(), + backupvault.BackupVault(), + backupvault.BackupVault(), + ], + next_page_token="abc", + ), + backupvault.ListBackupVaultsResponse( + backup_vaults=[], + next_page_token="def", + ), + backupvault.ListBackupVaultsResponse( + backup_vaults=[ + backupvault.BackupVault(), + ], + next_page_token="ghi", + ), + backupvault.ListBackupVaultsResponse( + backup_vaults=[ + backupvault.BackupVault(), + backupvault.BackupVault(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_backup_vaults( + request={}, + ) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, backupvault.BackupVault) for i in responses) + + +@pytest.mark.asyncio +async def test_list_backup_vaults_async_pages(): + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_backup_vaults), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + backupvault.ListBackupVaultsResponse( + backup_vaults=[ + backupvault.BackupVault(), + backupvault.BackupVault(), + backupvault.BackupVault(), + ], + next_page_token="abc", + ), + backupvault.ListBackupVaultsResponse( + backup_vaults=[], + next_page_token="def", + ), + backupvault.ListBackupVaultsResponse( + backup_vaults=[ + backupvault.BackupVault(), + ], + next_page_token="ghi", + ), + backupvault.ListBackupVaultsResponse( + backup_vaults=[ + backupvault.BackupVault(), + backupvault.BackupVault(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_backup_vaults(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + backupvault.FetchUsableBackupVaultsRequest, + dict, + ], +) +def test_fetch_usable_backup_vaults(request_type, transport: str = "grpc"): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.fetch_usable_backup_vaults), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = backupvault.FetchUsableBackupVaultsResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + response = client.fetch_usable_backup_vaults(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = backupvault.FetchUsableBackupVaultsRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.FetchUsableBackupVaultsPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] + + +def test_fetch_usable_backup_vaults_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.fetch_usable_backup_vaults), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.fetch_usable_backup_vaults() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == backupvault.FetchUsableBackupVaultsRequest() + + +def test_fetch_usable_backup_vaults_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = backupvault.FetchUsableBackupVaultsRequest( + parent="parent_value", + page_token="page_token_value", + filter="filter_value", + order_by="order_by_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.fetch_usable_backup_vaults), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.fetch_usable_backup_vaults(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == backupvault.FetchUsableBackupVaultsRequest( + parent="parent_value", + page_token="page_token_value", + filter="filter_value", + order_by="order_by_value", + ) + + +def test_fetch_usable_backup_vaults_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.fetch_usable_backup_vaults + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.fetch_usable_backup_vaults + ] = mock_rpc + request = {} + client.fetch_usable_backup_vaults(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.fetch_usable_backup_vaults(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_fetch_usable_backup_vaults_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.fetch_usable_backup_vaults), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + backupvault.FetchUsableBackupVaultsResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + ) + response = await client.fetch_usable_backup_vaults() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == backupvault.FetchUsableBackupVaultsRequest() + + +@pytest.mark.asyncio +async def test_fetch_usable_backup_vaults_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.fetch_usable_backup_vaults + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.fetch_usable_backup_vaults + ] = mock_rpc + + request = {} + await client.fetch_usable_backup_vaults(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.fetch_usable_backup_vaults(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_fetch_usable_backup_vaults_async( + transport: str = "grpc_asyncio", + request_type=backupvault.FetchUsableBackupVaultsRequest, +): + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.fetch_usable_backup_vaults), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + backupvault.FetchUsableBackupVaultsResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + ) + response = await client.fetch_usable_backup_vaults(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = backupvault.FetchUsableBackupVaultsRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.FetchUsableBackupVaultsAsyncPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] + + +@pytest.mark.asyncio +async def test_fetch_usable_backup_vaults_async_from_dict(): + await test_fetch_usable_backup_vaults_async(request_type=dict) + + +def test_fetch_usable_backup_vaults_field_headers(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = backupvault.FetchUsableBackupVaultsRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.fetch_usable_backup_vaults), "__call__" + ) as call: + call.return_value = backupvault.FetchUsableBackupVaultsResponse() + client.fetch_usable_backup_vaults(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_fetch_usable_backup_vaults_field_headers_async(): + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = backupvault.FetchUsableBackupVaultsRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.fetch_usable_backup_vaults), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + backupvault.FetchUsableBackupVaultsResponse() + ) + await client.fetch_usable_backup_vaults(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_fetch_usable_backup_vaults_flattened(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.fetch_usable_backup_vaults), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = backupvault.FetchUsableBackupVaultsResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.fetch_usable_backup_vaults( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +def test_fetch_usable_backup_vaults_flattened_error(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.fetch_usable_backup_vaults( + backupvault.FetchUsableBackupVaultsRequest(), + parent="parent_value", + ) + + +@pytest.mark.asyncio +async def test_fetch_usable_backup_vaults_flattened_async(): + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.fetch_usable_backup_vaults), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = backupvault.FetchUsableBackupVaultsResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + backupvault.FetchUsableBackupVaultsResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.fetch_usable_backup_vaults( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_fetch_usable_backup_vaults_flattened_error_async(): + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.fetch_usable_backup_vaults( + backupvault.FetchUsableBackupVaultsRequest(), + parent="parent_value", + ) + + +def test_fetch_usable_backup_vaults_pager(transport_name: str = "grpc"): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.fetch_usable_backup_vaults), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + backupvault.FetchUsableBackupVaultsResponse( + backup_vaults=[ + backupvault.BackupVault(), + backupvault.BackupVault(), + backupvault.BackupVault(), + ], + next_page_token="abc", + ), + backupvault.FetchUsableBackupVaultsResponse( + backup_vaults=[], + next_page_token="def", + ), + backupvault.FetchUsableBackupVaultsResponse( + backup_vaults=[ + backupvault.BackupVault(), + ], + next_page_token="ghi", + ), + backupvault.FetchUsableBackupVaultsResponse( + backup_vaults=[ + backupvault.BackupVault(), + backupvault.BackupVault(), + ], + ), + RuntimeError, + ) + + expected_metadata = () + retry = retries.Retry() + timeout = 5 + expected_metadata = tuple(expected_metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + ) + pager = client.fetch_usable_backup_vaults( + request={}, retry=retry, timeout=timeout + ) + + assert pager._metadata == expected_metadata + assert pager._retry == retry + assert pager._timeout == timeout + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, backupvault.BackupVault) for i in results) + + +def test_fetch_usable_backup_vaults_pages(transport_name: str = "grpc"): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.fetch_usable_backup_vaults), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + backupvault.FetchUsableBackupVaultsResponse( + backup_vaults=[ + backupvault.BackupVault(), + backupvault.BackupVault(), + backupvault.BackupVault(), + ], + next_page_token="abc", + ), + backupvault.FetchUsableBackupVaultsResponse( + backup_vaults=[], + next_page_token="def", + ), + backupvault.FetchUsableBackupVaultsResponse( + backup_vaults=[ + backupvault.BackupVault(), + ], + next_page_token="ghi", + ), + backupvault.FetchUsableBackupVaultsResponse( + backup_vaults=[ + backupvault.BackupVault(), + backupvault.BackupVault(), + ], + ), + RuntimeError, + ) + pages = list(client.fetch_usable_backup_vaults(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.asyncio +async def test_fetch_usable_backup_vaults_async_pager(): + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.fetch_usable_backup_vaults), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + backupvault.FetchUsableBackupVaultsResponse( + backup_vaults=[ + backupvault.BackupVault(), + backupvault.BackupVault(), + backupvault.BackupVault(), + ], + next_page_token="abc", + ), + backupvault.FetchUsableBackupVaultsResponse( + backup_vaults=[], + next_page_token="def", + ), + backupvault.FetchUsableBackupVaultsResponse( + backup_vaults=[ + backupvault.BackupVault(), + ], + next_page_token="ghi", + ), + backupvault.FetchUsableBackupVaultsResponse( + backup_vaults=[ + backupvault.BackupVault(), + backupvault.BackupVault(), + ], + ), + RuntimeError, + ) + async_pager = await client.fetch_usable_backup_vaults( + request={}, + ) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, backupvault.BackupVault) for i in responses) + + +@pytest.mark.asyncio +async def test_fetch_usable_backup_vaults_async_pages(): + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.fetch_usable_backup_vaults), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + backupvault.FetchUsableBackupVaultsResponse( + backup_vaults=[ + backupvault.BackupVault(), + backupvault.BackupVault(), + backupvault.BackupVault(), + ], + next_page_token="abc", + ), + backupvault.FetchUsableBackupVaultsResponse( + backup_vaults=[], + next_page_token="def", + ), + backupvault.FetchUsableBackupVaultsResponse( + backup_vaults=[ + backupvault.BackupVault(), + ], + next_page_token="ghi", + ), + backupvault.FetchUsableBackupVaultsResponse( + backup_vaults=[ + backupvault.BackupVault(), + backupvault.BackupVault(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.fetch_usable_backup_vaults(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + backupvault.GetBackupVaultRequest, + dict, + ], +) +def test_get_backup_vault(request_type, transport: str = "grpc"): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_backup_vault), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = backupvault.BackupVault( + name="name_value", + description="description_value", + deletable=True, + etag="etag_value", + state=backupvault.BackupVault.State.CREATING, + backup_count=1278, + service_account="service_account_value", + total_stored_bytes=1946, + uid="uid_value", + access_restriction=backupvault.BackupVault.AccessRestriction.WITHIN_PROJECT, + ) + response = client.get_backup_vault(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = backupvault.GetBackupVaultRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, backupvault.BackupVault) + assert response.name == "name_value" + assert response.description == "description_value" + assert response.deletable is True + assert response.etag == "etag_value" + assert response.state == backupvault.BackupVault.State.CREATING + assert response.backup_count == 1278 + assert response.service_account == "service_account_value" + assert response.total_stored_bytes == 1946 + assert response.uid == "uid_value" + assert ( + response.access_restriction + == backupvault.BackupVault.AccessRestriction.WITHIN_PROJECT + ) + + +def test_get_backup_vault_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_backup_vault), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.get_backup_vault() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == backupvault.GetBackupVaultRequest() + + +def test_get_backup_vault_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = backupvault.GetBackupVaultRequest( + name="name_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_backup_vault), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.get_backup_vault(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == backupvault.GetBackupVaultRequest( + name="name_value", + ) + + +def test_get_backup_vault_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_backup_vault in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.get_backup_vault + ] = mock_rpc + request = {} + client.get_backup_vault(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_backup_vault(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_get_backup_vault_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_backup_vault), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + backupvault.BackupVault( + name="name_value", + description="description_value", + deletable=True, + etag="etag_value", + state=backupvault.BackupVault.State.CREATING, + backup_count=1278, + service_account="service_account_value", + total_stored_bytes=1946, + uid="uid_value", + access_restriction=backupvault.BackupVault.AccessRestriction.WITHIN_PROJECT, + ) + ) + response = await client.get_backup_vault() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == backupvault.GetBackupVaultRequest() + + +@pytest.mark.asyncio +async def test_get_backup_vault_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.get_backup_vault + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.get_backup_vault + ] = mock_rpc + + request = {} + await client.get_backup_vault(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.get_backup_vault(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_get_backup_vault_async( + transport: str = "grpc_asyncio", request_type=backupvault.GetBackupVaultRequest +): + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_backup_vault), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + backupvault.BackupVault( + name="name_value", + description="description_value", + deletable=True, + etag="etag_value", + state=backupvault.BackupVault.State.CREATING, + backup_count=1278, + service_account="service_account_value", + total_stored_bytes=1946, + uid="uid_value", + access_restriction=backupvault.BackupVault.AccessRestriction.WITHIN_PROJECT, + ) + ) + response = await client.get_backup_vault(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = backupvault.GetBackupVaultRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, backupvault.BackupVault) + assert response.name == "name_value" + assert response.description == "description_value" + assert response.deletable is True + assert response.etag == "etag_value" + assert response.state == backupvault.BackupVault.State.CREATING + assert response.backup_count == 1278 + assert response.service_account == "service_account_value" + assert response.total_stored_bytes == 1946 + assert response.uid == "uid_value" + assert ( + response.access_restriction + == backupvault.BackupVault.AccessRestriction.WITHIN_PROJECT + ) + + +@pytest.mark.asyncio +async def test_get_backup_vault_async_from_dict(): + await test_get_backup_vault_async(request_type=dict) + + +def test_get_backup_vault_field_headers(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = backupvault.GetBackupVaultRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_backup_vault), "__call__") as call: + call.return_value = backupvault.BackupVault() + client.get_backup_vault(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_backup_vault_field_headers_async(): + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = backupvault.GetBackupVaultRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_backup_vault), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + backupvault.BackupVault() + ) + await client.get_backup_vault(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_get_backup_vault_flattened(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_backup_vault), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = backupvault.BackupVault() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_backup_vault( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_get_backup_vault_flattened_error(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_backup_vault( + backupvault.GetBackupVaultRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_get_backup_vault_flattened_async(): + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_backup_vault), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = backupvault.BackupVault() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + backupvault.BackupVault() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_backup_vault( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_get_backup_vault_flattened_error_async(): + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_backup_vault( + backupvault.GetBackupVaultRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + backupvault.UpdateBackupVaultRequest, + dict, + ], +) +def test_update_backup_vault(request_type, transport: str = "grpc"): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_backup_vault), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.update_backup_vault(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = backupvault.UpdateBackupVaultRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_update_backup_vault_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_backup_vault), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.update_backup_vault() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == backupvault.UpdateBackupVaultRequest() + + +def test_update_backup_vault_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = backupvault.UpdateBackupVaultRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_backup_vault), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.update_backup_vault(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == backupvault.UpdateBackupVaultRequest() + + +def test_update_backup_vault_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.update_backup_vault in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.update_backup_vault + ] = mock_rpc + request = {} + client.update_backup_vault(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.update_backup_vault(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_update_backup_vault_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_backup_vault), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.update_backup_vault() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == backupvault.UpdateBackupVaultRequest() + + +@pytest.mark.asyncio +async def test_update_backup_vault_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.update_backup_vault + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.update_backup_vault + ] = mock_rpc + + request = {} + await client.update_backup_vault(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.update_backup_vault(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_update_backup_vault_async( + transport: str = "grpc_asyncio", request_type=backupvault.UpdateBackupVaultRequest +): + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_backup_vault), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.update_backup_vault(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = backupvault.UpdateBackupVaultRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_update_backup_vault_async_from_dict(): + await test_update_backup_vault_async(request_type=dict) + + +def test_update_backup_vault_field_headers(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = backupvault.UpdateBackupVaultRequest() + + request.backup_vault.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_backup_vault), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.update_backup_vault(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "backup_vault.name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_update_backup_vault_field_headers_async(): + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = backupvault.UpdateBackupVaultRequest() + + request.backup_vault.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_backup_vault), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.update_backup_vault(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "backup_vault.name=name_value", + ) in kw["metadata"] + + +def test_update_backup_vault_flattened(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_backup_vault), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.update_backup_vault( + backup_vault=backupvault.BackupVault(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].backup_vault + mock_val = backupvault.BackupVault(name="name_value") + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + assert arg == mock_val + + +def test_update_backup_vault_flattened_error(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_backup_vault( + backupvault.UpdateBackupVaultRequest(), + backup_vault=backupvault.BackupVault(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +@pytest.mark.asyncio +async def test_update_backup_vault_flattened_async(): + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_backup_vault), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.update_backup_vault( + backup_vault=backupvault.BackupVault(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].backup_vault + mock_val = backupvault.BackupVault(name="name_value") + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_update_backup_vault_flattened_error_async(): + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.update_backup_vault( + backupvault.UpdateBackupVaultRequest(), + backup_vault=backupvault.BackupVault(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +@pytest.mark.parametrize( + "request_type", + [ + backupvault.DeleteBackupVaultRequest, + dict, + ], +) +def test_delete_backup_vault(request_type, transport: str = "grpc"): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_backup_vault), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.delete_backup_vault(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = backupvault.DeleteBackupVaultRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_delete_backup_vault_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_backup_vault), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.delete_backup_vault() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == backupvault.DeleteBackupVaultRequest() + + +def test_delete_backup_vault_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = backupvault.DeleteBackupVaultRequest( + name="name_value", + etag="etag_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_backup_vault), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.delete_backup_vault(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == backupvault.DeleteBackupVaultRequest( + name="name_value", + etag="etag_value", + ) + + +def test_delete_backup_vault_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.delete_backup_vault in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.delete_backup_vault + ] = mock_rpc + request = {} + client.delete_backup_vault(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.delete_backup_vault(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_delete_backup_vault_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_backup_vault), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.delete_backup_vault() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == backupvault.DeleteBackupVaultRequest() + + +@pytest.mark.asyncio +async def test_delete_backup_vault_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.delete_backup_vault + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.delete_backup_vault + ] = mock_rpc + + request = {} + await client.delete_backup_vault(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.delete_backup_vault(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_delete_backup_vault_async( + transport: str = "grpc_asyncio", request_type=backupvault.DeleteBackupVaultRequest +): + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_backup_vault), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.delete_backup_vault(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = backupvault.DeleteBackupVaultRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_delete_backup_vault_async_from_dict(): + await test_delete_backup_vault_async(request_type=dict) + + +def test_delete_backup_vault_field_headers(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = backupvault.DeleteBackupVaultRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_backup_vault), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.delete_backup_vault(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_delete_backup_vault_field_headers_async(): + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = backupvault.DeleteBackupVaultRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_backup_vault), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.delete_backup_vault(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_delete_backup_vault_flattened(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_backup_vault), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.delete_backup_vault( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_delete_backup_vault_flattened_error(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_backup_vault( + backupvault.DeleteBackupVaultRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_delete_backup_vault_flattened_async(): + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_backup_vault), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.delete_backup_vault( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_delete_backup_vault_flattened_error_async(): + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.delete_backup_vault( + backupvault.DeleteBackupVaultRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + backupvault.ListDataSourcesRequest, + dict, + ], +) +def test_list_data_sources(request_type, transport: str = "grpc"): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_data_sources), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = backupvault.ListDataSourcesResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + response = client.list_data_sources(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = backupvault.ListDataSourcesRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListDataSourcesPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] + + +def test_list_data_sources_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_data_sources), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.list_data_sources() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == backupvault.ListDataSourcesRequest() + + +def test_list_data_sources_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = backupvault.ListDataSourcesRequest( + parent="parent_value", + page_token="page_token_value", + filter="filter_value", + order_by="order_by_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_data_sources), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.list_data_sources(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == backupvault.ListDataSourcesRequest( + parent="parent_value", + page_token="page_token_value", + filter="filter_value", + order_by="order_by_value", + ) + + +def test_list_data_sources_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_data_sources in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.list_data_sources + ] = mock_rpc + request = {} + client.list_data_sources(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_data_sources(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_list_data_sources_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_data_sources), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + backupvault.ListDataSourcesResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + ) + response = await client.list_data_sources() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == backupvault.ListDataSourcesRequest() + + +@pytest.mark.asyncio +async def test_list_data_sources_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.list_data_sources + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.list_data_sources + ] = mock_rpc + + request = {} + await client.list_data_sources(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.list_data_sources(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_list_data_sources_async( + transport: str = "grpc_asyncio", request_type=backupvault.ListDataSourcesRequest +): + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_data_sources), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + backupvault.ListDataSourcesResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + ) + response = await client.list_data_sources(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = backupvault.ListDataSourcesRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListDataSourcesAsyncPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] + + +@pytest.mark.asyncio +async def test_list_data_sources_async_from_dict(): + await test_list_data_sources_async(request_type=dict) + + +def test_list_data_sources_field_headers(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = backupvault.ListDataSourcesRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_data_sources), "__call__" + ) as call: + call.return_value = backupvault.ListDataSourcesResponse() + client.list_data_sources(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_data_sources_field_headers_async(): + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = backupvault.ListDataSourcesRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_data_sources), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + backupvault.ListDataSourcesResponse() + ) + await client.list_data_sources(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_list_data_sources_flattened(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_data_sources), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = backupvault.ListDataSourcesResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_data_sources( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +def test_list_data_sources_flattened_error(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_data_sources( + backupvault.ListDataSourcesRequest(), + parent="parent_value", + ) + + +@pytest.mark.asyncio +async def test_list_data_sources_flattened_async(): + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_data_sources), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = backupvault.ListDataSourcesResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + backupvault.ListDataSourcesResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_data_sources( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_list_data_sources_flattened_error_async(): + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_data_sources( + backupvault.ListDataSourcesRequest(), + parent="parent_value", + ) + + +def test_list_data_sources_pager(transport_name: str = "grpc"): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_data_sources), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + backupvault.ListDataSourcesResponse( + data_sources=[ + backupvault.DataSource(), + backupvault.DataSource(), + backupvault.DataSource(), + ], + next_page_token="abc", + ), + backupvault.ListDataSourcesResponse( + data_sources=[], + next_page_token="def", + ), + backupvault.ListDataSourcesResponse( + data_sources=[ + backupvault.DataSource(), + ], + next_page_token="ghi", + ), + backupvault.ListDataSourcesResponse( + data_sources=[ + backupvault.DataSource(), + backupvault.DataSource(), + ], + ), + RuntimeError, + ) + + expected_metadata = () + retry = retries.Retry() + timeout = 5 + expected_metadata = tuple(expected_metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + ) + pager = client.list_data_sources(request={}, retry=retry, timeout=timeout) + + assert pager._metadata == expected_metadata + assert pager._retry == retry + assert pager._timeout == timeout + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, backupvault.DataSource) for i in results) + + +def test_list_data_sources_pages(transport_name: str = "grpc"): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_data_sources), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + backupvault.ListDataSourcesResponse( + data_sources=[ + backupvault.DataSource(), + backupvault.DataSource(), + backupvault.DataSource(), + ], + next_page_token="abc", + ), + backupvault.ListDataSourcesResponse( + data_sources=[], + next_page_token="def", + ), + backupvault.ListDataSourcesResponse( + data_sources=[ + backupvault.DataSource(), + ], + next_page_token="ghi", + ), + backupvault.ListDataSourcesResponse( + data_sources=[ + backupvault.DataSource(), + backupvault.DataSource(), + ], + ), + RuntimeError, + ) + pages = list(client.list_data_sources(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.asyncio +async def test_list_data_sources_async_pager(): + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_data_sources), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + backupvault.ListDataSourcesResponse( + data_sources=[ + backupvault.DataSource(), + backupvault.DataSource(), + backupvault.DataSource(), + ], + next_page_token="abc", + ), + backupvault.ListDataSourcesResponse( + data_sources=[], + next_page_token="def", + ), + backupvault.ListDataSourcesResponse( + data_sources=[ + backupvault.DataSource(), + ], + next_page_token="ghi", + ), + backupvault.ListDataSourcesResponse( + data_sources=[ + backupvault.DataSource(), + backupvault.DataSource(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_data_sources( + request={}, + ) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, backupvault.DataSource) for i in responses) + + +@pytest.mark.asyncio +async def test_list_data_sources_async_pages(): + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_data_sources), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + backupvault.ListDataSourcesResponse( + data_sources=[ + backupvault.DataSource(), + backupvault.DataSource(), + backupvault.DataSource(), + ], + next_page_token="abc", + ), + backupvault.ListDataSourcesResponse( + data_sources=[], + next_page_token="def", + ), + backupvault.ListDataSourcesResponse( + data_sources=[ + backupvault.DataSource(), + ], + next_page_token="ghi", + ), + backupvault.ListDataSourcesResponse( + data_sources=[ + backupvault.DataSource(), + backupvault.DataSource(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_data_sources(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + backupvault.GetDataSourceRequest, + dict, + ], +) +def test_get_data_source(request_type, transport: str = "grpc"): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_data_source), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = backupvault.DataSource( + name="name_value", + state=backupvault.DataSource.State.CREATING, + backup_count=1278, + etag="etag_value", + total_stored_bytes=1946, + config_state=backupvault.BackupConfigState.ACTIVE, + ) + response = client.get_data_source(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = backupvault.GetDataSourceRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, backupvault.DataSource) + assert response.name == "name_value" + assert response.state == backupvault.DataSource.State.CREATING + assert response.backup_count == 1278 + assert response.etag == "etag_value" + assert response.total_stored_bytes == 1946 + assert response.config_state == backupvault.BackupConfigState.ACTIVE + + +def test_get_data_source_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_data_source), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.get_data_source() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == backupvault.GetDataSourceRequest() + + +def test_get_data_source_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = backupvault.GetDataSourceRequest( + name="name_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_data_source), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.get_data_source(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == backupvault.GetDataSourceRequest( + name="name_value", + ) + + +def test_get_data_source_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_data_source in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.get_data_source] = mock_rpc + request = {} + client.get_data_source(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_data_source(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_get_data_source_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_data_source), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + backupvault.DataSource( + name="name_value", + state=backupvault.DataSource.State.CREATING, + backup_count=1278, + etag="etag_value", + total_stored_bytes=1946, + config_state=backupvault.BackupConfigState.ACTIVE, + ) + ) + response = await client.get_data_source() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == backupvault.GetDataSourceRequest() + + +@pytest.mark.asyncio +async def test_get_data_source_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.get_data_source + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.get_data_source + ] = mock_rpc + + request = {} + await client.get_data_source(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.get_data_source(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_get_data_source_async( + transport: str = "grpc_asyncio", request_type=backupvault.GetDataSourceRequest +): + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_data_source), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + backupvault.DataSource( + name="name_value", + state=backupvault.DataSource.State.CREATING, + backup_count=1278, + etag="etag_value", + total_stored_bytes=1946, + config_state=backupvault.BackupConfigState.ACTIVE, + ) + ) + response = await client.get_data_source(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = backupvault.GetDataSourceRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, backupvault.DataSource) + assert response.name == "name_value" + assert response.state == backupvault.DataSource.State.CREATING + assert response.backup_count == 1278 + assert response.etag == "etag_value" + assert response.total_stored_bytes == 1946 + assert response.config_state == backupvault.BackupConfigState.ACTIVE + + +@pytest.mark.asyncio +async def test_get_data_source_async_from_dict(): + await test_get_data_source_async(request_type=dict) + + +def test_get_data_source_field_headers(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = backupvault.GetDataSourceRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_data_source), "__call__") as call: + call.return_value = backupvault.DataSource() + client.get_data_source(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_data_source_field_headers_async(): + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = backupvault.GetDataSourceRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_data_source), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + backupvault.DataSource() + ) + await client.get_data_source(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_get_data_source_flattened(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_data_source), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = backupvault.DataSource() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_data_source( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_get_data_source_flattened_error(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_data_source( + backupvault.GetDataSourceRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_get_data_source_flattened_async(): + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_data_source), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = backupvault.DataSource() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + backupvault.DataSource() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_data_source( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_get_data_source_flattened_error_async(): + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_data_source( + backupvault.GetDataSourceRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + backupvault.UpdateDataSourceRequest, + dict, + ], +) +def test_update_data_source(request_type, transport: str = "grpc"): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_data_source), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.update_data_source(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = backupvault.UpdateDataSourceRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_update_data_source_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_data_source), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.update_data_source() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == backupvault.UpdateDataSourceRequest() + + +def test_update_data_source_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = backupvault.UpdateDataSourceRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_data_source), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.update_data_source(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == backupvault.UpdateDataSourceRequest() + + +def test_update_data_source_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.update_data_source in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.update_data_source + ] = mock_rpc + request = {} + client.update_data_source(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.update_data_source(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_update_data_source_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_data_source), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.update_data_source() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == backupvault.UpdateDataSourceRequest() + + +@pytest.mark.asyncio +async def test_update_data_source_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.update_data_source + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.update_data_source + ] = mock_rpc + + request = {} + await client.update_data_source(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.update_data_source(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_update_data_source_async( + transport: str = "grpc_asyncio", request_type=backupvault.UpdateDataSourceRequest +): + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_data_source), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.update_data_source(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = backupvault.UpdateDataSourceRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_update_data_source_async_from_dict(): + await test_update_data_source_async(request_type=dict) + + +def test_update_data_source_field_headers(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = backupvault.UpdateDataSourceRequest() + + request.data_source.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_data_source), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.update_data_source(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "data_source.name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_update_data_source_field_headers_async(): + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = backupvault.UpdateDataSourceRequest() + + request.data_source.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_data_source), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.update_data_source(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "data_source.name=name_value", + ) in kw["metadata"] + + +def test_update_data_source_flattened(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_data_source), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.update_data_source( + data_source=backupvault.DataSource(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].data_source + mock_val = backupvault.DataSource(name="name_value") + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + assert arg == mock_val + + +def test_update_data_source_flattened_error(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_data_source( + backupvault.UpdateDataSourceRequest(), + data_source=backupvault.DataSource(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +@pytest.mark.asyncio +async def test_update_data_source_flattened_async(): + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_data_source), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.update_data_source( + data_source=backupvault.DataSource(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].data_source + mock_val = backupvault.DataSource(name="name_value") + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_update_data_source_flattened_error_async(): + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.update_data_source( + backupvault.UpdateDataSourceRequest(), + data_source=backupvault.DataSource(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +@pytest.mark.parametrize( + "request_type", + [ + backupvault.ListBackupsRequest, + dict, + ], +) +def test_list_backups(request_type, transport: str = "grpc"): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_backups), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = backupvault.ListBackupsResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + response = client.list_backups(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = backupvault.ListBackupsRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListBackupsPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] + + +def test_list_backups_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_backups), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.list_backups() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == backupvault.ListBackupsRequest() + + +def test_list_backups_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = backupvault.ListBackupsRequest( + parent="parent_value", + page_token="page_token_value", + filter="filter_value", + order_by="order_by_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_backups), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.list_backups(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == backupvault.ListBackupsRequest( + parent="parent_value", + page_token="page_token_value", + filter="filter_value", + order_by="order_by_value", + ) + + +def test_list_backups_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_backups in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.list_backups] = mock_rpc + request = {} + client.list_backups(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_backups(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_list_backups_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_backups), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + backupvault.ListBackupsResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + ) + response = await client.list_backups() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == backupvault.ListBackupsRequest() + + +@pytest.mark.asyncio +async def test_list_backups_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.list_backups + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.list_backups + ] = mock_rpc + + request = {} + await client.list_backups(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.list_backups(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_list_backups_async( + transport: str = "grpc_asyncio", request_type=backupvault.ListBackupsRequest +): + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_backups), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + backupvault.ListBackupsResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + ) + response = await client.list_backups(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = backupvault.ListBackupsRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListBackupsAsyncPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] + + +@pytest.mark.asyncio +async def test_list_backups_async_from_dict(): + await test_list_backups_async(request_type=dict) + + +def test_list_backups_field_headers(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = backupvault.ListBackupsRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_backups), "__call__") as call: + call.return_value = backupvault.ListBackupsResponse() + client.list_backups(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_backups_field_headers_async(): + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = backupvault.ListBackupsRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_backups), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + backupvault.ListBackupsResponse() + ) + await client.list_backups(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_list_backups_flattened(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_backups), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = backupvault.ListBackupsResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_backups( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +def test_list_backups_flattened_error(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_backups( + backupvault.ListBackupsRequest(), + parent="parent_value", + ) + + +@pytest.mark.asyncio +async def test_list_backups_flattened_async(): + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_backups), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = backupvault.ListBackupsResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + backupvault.ListBackupsResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_backups( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_list_backups_flattened_error_async(): + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_backups( + backupvault.ListBackupsRequest(), + parent="parent_value", + ) + + +def test_list_backups_pager(transport_name: str = "grpc"): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_backups), "__call__") as call: + # Set the response to a series of pages. + call.side_effect = ( + backupvault.ListBackupsResponse( + backups=[ + backupvault.Backup(), + backupvault.Backup(), + backupvault.Backup(), + ], + next_page_token="abc", + ), + backupvault.ListBackupsResponse( + backups=[], + next_page_token="def", + ), + backupvault.ListBackupsResponse( + backups=[ + backupvault.Backup(), + ], + next_page_token="ghi", + ), + backupvault.ListBackupsResponse( + backups=[ + backupvault.Backup(), + backupvault.Backup(), + ], + ), + RuntimeError, + ) + + expected_metadata = () + retry = retries.Retry() + timeout = 5 + expected_metadata = tuple(expected_metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + ) + pager = client.list_backups(request={}, retry=retry, timeout=timeout) + + assert pager._metadata == expected_metadata + assert pager._retry == retry + assert pager._timeout == timeout + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, backupvault.Backup) for i in results) + + +def test_list_backups_pages(transport_name: str = "grpc"): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_backups), "__call__") as call: + # Set the response to a series of pages. + call.side_effect = ( + backupvault.ListBackupsResponse( + backups=[ + backupvault.Backup(), + backupvault.Backup(), + backupvault.Backup(), + ], + next_page_token="abc", + ), + backupvault.ListBackupsResponse( + backups=[], + next_page_token="def", + ), + backupvault.ListBackupsResponse( + backups=[ + backupvault.Backup(), + ], + next_page_token="ghi", + ), + backupvault.ListBackupsResponse( + backups=[ + backupvault.Backup(), + backupvault.Backup(), + ], + ), + RuntimeError, + ) + pages = list(client.list_backups(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.asyncio +async def test_list_backups_async_pager(): + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_backups), "__call__", new_callable=mock.AsyncMock + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + backupvault.ListBackupsResponse( + backups=[ + backupvault.Backup(), + backupvault.Backup(), + backupvault.Backup(), + ], + next_page_token="abc", + ), + backupvault.ListBackupsResponse( + backups=[], + next_page_token="def", + ), + backupvault.ListBackupsResponse( + backups=[ + backupvault.Backup(), + ], + next_page_token="ghi", + ), + backupvault.ListBackupsResponse( + backups=[ + backupvault.Backup(), + backupvault.Backup(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_backups( + request={}, + ) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, backupvault.Backup) for i in responses) + + +@pytest.mark.asyncio +async def test_list_backups_async_pages(): + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_backups), "__call__", new_callable=mock.AsyncMock + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + backupvault.ListBackupsResponse( + backups=[ + backupvault.Backup(), + backupvault.Backup(), + backupvault.Backup(), + ], + next_page_token="abc", + ), + backupvault.ListBackupsResponse( + backups=[], + next_page_token="def", + ), + backupvault.ListBackupsResponse( + backups=[ + backupvault.Backup(), + ], + next_page_token="ghi", + ), + backupvault.ListBackupsResponse( + backups=[ + backupvault.Backup(), + backupvault.Backup(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_backups(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + backupvault.GetBackupRequest, + dict, + ], +) +def test_get_backup(request_type, transport: str = "grpc"): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_backup), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = backupvault.Backup( + name="name_value", + description="description_value", + etag="etag_value", + state=backupvault.Backup.State.CREATING, + backup_type=backupvault.Backup.BackupType.SCHEDULED, + resource_size_bytes=2056, + ) + response = client.get_backup(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = backupvault.GetBackupRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, backupvault.Backup) + assert response.name == "name_value" + assert response.description == "description_value" + assert response.etag == "etag_value" + assert response.state == backupvault.Backup.State.CREATING + assert response.backup_type == backupvault.Backup.BackupType.SCHEDULED + assert response.resource_size_bytes == 2056 + + +def test_get_backup_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_backup), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.get_backup() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == backupvault.GetBackupRequest() + + +def test_get_backup_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = backupvault.GetBackupRequest( + name="name_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_backup), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.get_backup(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == backupvault.GetBackupRequest( + name="name_value", + ) + + +def test_get_backup_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_backup in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.get_backup] = mock_rpc + request = {} + client.get_backup(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_backup(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_get_backup_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_backup), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + backupvault.Backup( + name="name_value", + description="description_value", + etag="etag_value", + state=backupvault.Backup.State.CREATING, + backup_type=backupvault.Backup.BackupType.SCHEDULED, + resource_size_bytes=2056, + ) + ) + response = await client.get_backup() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == backupvault.GetBackupRequest() + + +@pytest.mark.asyncio +async def test_get_backup_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.get_backup + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.get_backup + ] = mock_rpc + + request = {} + await client.get_backup(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.get_backup(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_get_backup_async( + transport: str = "grpc_asyncio", request_type=backupvault.GetBackupRequest +): + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_backup), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + backupvault.Backup( + name="name_value", + description="description_value", + etag="etag_value", + state=backupvault.Backup.State.CREATING, + backup_type=backupvault.Backup.BackupType.SCHEDULED, + resource_size_bytes=2056, + ) + ) + response = await client.get_backup(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = backupvault.GetBackupRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, backupvault.Backup) + assert response.name == "name_value" + assert response.description == "description_value" + assert response.etag == "etag_value" + assert response.state == backupvault.Backup.State.CREATING + assert response.backup_type == backupvault.Backup.BackupType.SCHEDULED + assert response.resource_size_bytes == 2056 + + +@pytest.mark.asyncio +async def test_get_backup_async_from_dict(): + await test_get_backup_async(request_type=dict) + + +def test_get_backup_field_headers(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = backupvault.GetBackupRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_backup), "__call__") as call: + call.return_value = backupvault.Backup() + client.get_backup(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_backup_field_headers_async(): + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = backupvault.GetBackupRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_backup), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(backupvault.Backup()) + await client.get_backup(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_get_backup_flattened(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_backup), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = backupvault.Backup() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_backup( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_get_backup_flattened_error(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_backup( + backupvault.GetBackupRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_get_backup_flattened_async(): + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_backup), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = backupvault.Backup() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(backupvault.Backup()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_backup( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_get_backup_flattened_error_async(): + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_backup( + backupvault.GetBackupRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + backupvault.UpdateBackupRequest, + dict, + ], +) +def test_update_backup(request_type, transport: str = "grpc"): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_backup), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.update_backup(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = backupvault.UpdateBackupRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_update_backup_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_backup), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.update_backup() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == backupvault.UpdateBackupRequest() + + +def test_update_backup_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = backupvault.UpdateBackupRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_backup), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.update_backup(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == backupvault.UpdateBackupRequest() + + +def test_update_backup_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.update_backup in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.update_backup] = mock_rpc + request = {} + client.update_backup(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.update_backup(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_update_backup_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_backup), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.update_backup() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == backupvault.UpdateBackupRequest() + + +@pytest.mark.asyncio +async def test_update_backup_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.update_backup + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.update_backup + ] = mock_rpc + + request = {} + await client.update_backup(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.update_backup(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_update_backup_async( + transport: str = "grpc_asyncio", request_type=backupvault.UpdateBackupRequest +): + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_backup), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.update_backup(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = backupvault.UpdateBackupRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_update_backup_async_from_dict(): + await test_update_backup_async(request_type=dict) + + +def test_update_backup_field_headers(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = backupvault.UpdateBackupRequest() + + request.backup.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_backup), "__call__") as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.update_backup(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "backup.name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_update_backup_field_headers_async(): + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = backupvault.UpdateBackupRequest() + + request.backup.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_backup), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.update_backup(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "backup.name=name_value", + ) in kw["metadata"] + + +def test_update_backup_flattened(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_backup), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.update_backup( + backup=backupvault.Backup(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].backup + mock_val = backupvault.Backup(name="name_value") + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + assert arg == mock_val + + +def test_update_backup_flattened_error(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_backup( + backupvault.UpdateBackupRequest(), + backup=backupvault.Backup(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +@pytest.mark.asyncio +async def test_update_backup_flattened_async(): + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_backup), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.update_backup( + backup=backupvault.Backup(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].backup + mock_val = backupvault.Backup(name="name_value") + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_update_backup_flattened_error_async(): + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.update_backup( + backupvault.UpdateBackupRequest(), + backup=backupvault.Backup(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +@pytest.mark.parametrize( + "request_type", + [ + backupvault.DeleteBackupRequest, + dict, + ], +) +def test_delete_backup(request_type, transport: str = "grpc"): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_backup), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.delete_backup(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = backupvault.DeleteBackupRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_delete_backup_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_backup), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.delete_backup() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == backupvault.DeleteBackupRequest() + + +def test_delete_backup_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = backupvault.DeleteBackupRequest( + name="name_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_backup), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.delete_backup(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == backupvault.DeleteBackupRequest( + name="name_value", + ) + + +def test_delete_backup_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.delete_backup in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.delete_backup] = mock_rpc + request = {} + client.delete_backup(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.delete_backup(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_delete_backup_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_backup), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.delete_backup() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == backupvault.DeleteBackupRequest() + + +@pytest.mark.asyncio +async def test_delete_backup_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.delete_backup + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.delete_backup + ] = mock_rpc + + request = {} + await client.delete_backup(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.delete_backup(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_delete_backup_async( + transport: str = "grpc_asyncio", request_type=backupvault.DeleteBackupRequest +): + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_backup), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.delete_backup(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = backupvault.DeleteBackupRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_delete_backup_async_from_dict(): + await test_delete_backup_async(request_type=dict) + + +def test_delete_backup_field_headers(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = backupvault.DeleteBackupRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_backup), "__call__") as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.delete_backup(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_delete_backup_field_headers_async(): + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = backupvault.DeleteBackupRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_backup), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.delete_backup(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_delete_backup_flattened(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_backup), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.delete_backup( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_delete_backup_flattened_error(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_backup( + backupvault.DeleteBackupRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_delete_backup_flattened_async(): + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_backup), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.delete_backup( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_delete_backup_flattened_error_async(): + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.delete_backup( + backupvault.DeleteBackupRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + backupvault.RestoreBackupRequest, + dict, + ], +) +def test_restore_backup(request_type, transport: str = "grpc"): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.restore_backup), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.restore_backup(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = backupvault.RestoreBackupRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_restore_backup_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.restore_backup), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.restore_backup() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == backupvault.RestoreBackupRequest() + + +def test_restore_backup_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = backupvault.RestoreBackupRequest( + name="name_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.restore_backup), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.restore_backup(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == backupvault.RestoreBackupRequest( + name="name_value", + ) + + +def test_restore_backup_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.restore_backup in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.restore_backup] = mock_rpc + request = {} + client.restore_backup(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.restore_backup(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_restore_backup_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.restore_backup), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.restore_backup() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == backupvault.RestoreBackupRequest() + + +@pytest.mark.asyncio +async def test_restore_backup_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.restore_backup + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.restore_backup + ] = mock_rpc + + request = {} + await client.restore_backup(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.restore_backup(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_restore_backup_async( + transport: str = "grpc_asyncio", request_type=backupvault.RestoreBackupRequest +): + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.restore_backup), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.restore_backup(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = backupvault.RestoreBackupRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_restore_backup_async_from_dict(): + await test_restore_backup_async(request_type=dict) + + +def test_restore_backup_field_headers(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = backupvault.RestoreBackupRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.restore_backup), "__call__") as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.restore_backup(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_restore_backup_field_headers_async(): + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = backupvault.RestoreBackupRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.restore_backup), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.restore_backup(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_restore_backup_flattened(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.restore_backup), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.restore_backup( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_restore_backup_flattened_error(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.restore_backup( + backupvault.RestoreBackupRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_restore_backup_flattened_async(): + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.restore_backup), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.restore_backup( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_restore_backup_flattened_error_async(): + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.restore_backup( + backupvault.RestoreBackupRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + backupplan.CreateBackupPlanRequest, + dict, + ], +) +def test_create_backup_plan(request_type, transport: str = "grpc"): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_backup_plan), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.create_backup_plan(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = backupplan.CreateBackupPlanRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_create_backup_plan_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_backup_plan), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.create_backup_plan() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == backupplan.CreateBackupPlanRequest() + + +def test_create_backup_plan_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = backupplan.CreateBackupPlanRequest( + parent="parent_value", + backup_plan_id="backup_plan_id_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_backup_plan), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.create_backup_plan(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == backupplan.CreateBackupPlanRequest( + parent="parent_value", + backup_plan_id="backup_plan_id_value", + ) + + +def test_create_backup_plan_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.create_backup_plan in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.create_backup_plan + ] = mock_rpc + request = {} + client.create_backup_plan(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.create_backup_plan(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_create_backup_plan_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_backup_plan), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.create_backup_plan() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == backupplan.CreateBackupPlanRequest() + + +@pytest.mark.asyncio +async def test_create_backup_plan_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.create_backup_plan + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.create_backup_plan + ] = mock_rpc + + request = {} + await client.create_backup_plan(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.create_backup_plan(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_create_backup_plan_async( + transport: str = "grpc_asyncio", request_type=backupplan.CreateBackupPlanRequest +): + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_backup_plan), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.create_backup_plan(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = backupplan.CreateBackupPlanRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_create_backup_plan_async_from_dict(): + await test_create_backup_plan_async(request_type=dict) + + +def test_create_backup_plan_field_headers(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = backupplan.CreateBackupPlanRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_backup_plan), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.create_backup_plan(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_create_backup_plan_field_headers_async(): + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = backupplan.CreateBackupPlanRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_backup_plan), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.create_backup_plan(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_create_backup_plan_flattened(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_backup_plan), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.create_backup_plan( + parent="parent_value", + backup_plan=backupplan.BackupPlan(name="name_value"), + backup_plan_id="backup_plan_id_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].backup_plan + mock_val = backupplan.BackupPlan(name="name_value") + assert arg == mock_val + arg = args[0].backup_plan_id + mock_val = "backup_plan_id_value" + assert arg == mock_val + + +def test_create_backup_plan_flattened_error(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_backup_plan( + backupplan.CreateBackupPlanRequest(), + parent="parent_value", + backup_plan=backupplan.BackupPlan(name="name_value"), + backup_plan_id="backup_plan_id_value", + ) + + +@pytest.mark.asyncio +async def test_create_backup_plan_flattened_async(): + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_backup_plan), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.create_backup_plan( + parent="parent_value", + backup_plan=backupplan.BackupPlan(name="name_value"), + backup_plan_id="backup_plan_id_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].backup_plan + mock_val = backupplan.BackupPlan(name="name_value") + assert arg == mock_val + arg = args[0].backup_plan_id + mock_val = "backup_plan_id_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_create_backup_plan_flattened_error_async(): + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.create_backup_plan( + backupplan.CreateBackupPlanRequest(), + parent="parent_value", + backup_plan=backupplan.BackupPlan(name="name_value"), + backup_plan_id="backup_plan_id_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + backupplan.GetBackupPlanRequest, + dict, + ], +) +def test_get_backup_plan(request_type, transport: str = "grpc"): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_backup_plan), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = backupplan.BackupPlan( + name="name_value", + description="description_value", + state=backupplan.BackupPlan.State.CREATING, + resource_type="resource_type_value", + etag="etag_value", + backup_vault="backup_vault_value", + backup_vault_service_account="backup_vault_service_account_value", + ) + response = client.get_backup_plan(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = backupplan.GetBackupPlanRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, backupplan.BackupPlan) + assert response.name == "name_value" + assert response.description == "description_value" + assert response.state == backupplan.BackupPlan.State.CREATING + assert response.resource_type == "resource_type_value" + assert response.etag == "etag_value" + assert response.backup_vault == "backup_vault_value" + assert response.backup_vault_service_account == "backup_vault_service_account_value" + + +def test_get_backup_plan_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_backup_plan), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.get_backup_plan() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == backupplan.GetBackupPlanRequest() + + +def test_get_backup_plan_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = backupplan.GetBackupPlanRequest( + name="name_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_backup_plan), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.get_backup_plan(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == backupplan.GetBackupPlanRequest( + name="name_value", + ) + + +def test_get_backup_plan_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_backup_plan in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.get_backup_plan] = mock_rpc + request = {} + client.get_backup_plan(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_backup_plan(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_get_backup_plan_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_backup_plan), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + backupplan.BackupPlan( + name="name_value", + description="description_value", + state=backupplan.BackupPlan.State.CREATING, + resource_type="resource_type_value", + etag="etag_value", + backup_vault="backup_vault_value", + backup_vault_service_account="backup_vault_service_account_value", + ) + ) + response = await client.get_backup_plan() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == backupplan.GetBackupPlanRequest() + + +@pytest.mark.asyncio +async def test_get_backup_plan_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.get_backup_plan + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.get_backup_plan + ] = mock_rpc + + request = {} + await client.get_backup_plan(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.get_backup_plan(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_get_backup_plan_async( + transport: str = "grpc_asyncio", request_type=backupplan.GetBackupPlanRequest +): + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_backup_plan), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + backupplan.BackupPlan( + name="name_value", + description="description_value", + state=backupplan.BackupPlan.State.CREATING, + resource_type="resource_type_value", + etag="etag_value", + backup_vault="backup_vault_value", + backup_vault_service_account="backup_vault_service_account_value", + ) + ) + response = await client.get_backup_plan(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = backupplan.GetBackupPlanRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, backupplan.BackupPlan) + assert response.name == "name_value" + assert response.description == "description_value" + assert response.state == backupplan.BackupPlan.State.CREATING + assert response.resource_type == "resource_type_value" + assert response.etag == "etag_value" + assert response.backup_vault == "backup_vault_value" + assert response.backup_vault_service_account == "backup_vault_service_account_value" + + +@pytest.mark.asyncio +async def test_get_backup_plan_async_from_dict(): + await test_get_backup_plan_async(request_type=dict) + + +def test_get_backup_plan_field_headers(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = backupplan.GetBackupPlanRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_backup_plan), "__call__") as call: + call.return_value = backupplan.BackupPlan() + client.get_backup_plan(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_backup_plan_field_headers_async(): + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = backupplan.GetBackupPlanRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_backup_plan), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + backupplan.BackupPlan() + ) + await client.get_backup_plan(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_get_backup_plan_flattened(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_backup_plan), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = backupplan.BackupPlan() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_backup_plan( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_get_backup_plan_flattened_error(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_backup_plan( + backupplan.GetBackupPlanRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_get_backup_plan_flattened_async(): + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_backup_plan), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = backupplan.BackupPlan() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + backupplan.BackupPlan() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_backup_plan( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_get_backup_plan_flattened_error_async(): + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_backup_plan( + backupplan.GetBackupPlanRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + backupplan.ListBackupPlansRequest, + dict, + ], +) +def test_list_backup_plans(request_type, transport: str = "grpc"): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_backup_plans), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = backupplan.ListBackupPlansResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + response = client.list_backup_plans(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = backupplan.ListBackupPlansRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListBackupPlansPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] + + +def test_list_backup_plans_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_backup_plans), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.list_backup_plans() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == backupplan.ListBackupPlansRequest() + + +def test_list_backup_plans_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = backupplan.ListBackupPlansRequest( + parent="parent_value", + page_token="page_token_value", + filter="filter_value", + order_by="order_by_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_backup_plans), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.list_backup_plans(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == backupplan.ListBackupPlansRequest( + parent="parent_value", + page_token="page_token_value", + filter="filter_value", + order_by="order_by_value", + ) + + +def test_list_backup_plans_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_backup_plans in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.list_backup_plans + ] = mock_rpc + request = {} + client.list_backup_plans(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_backup_plans(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_list_backup_plans_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_backup_plans), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + backupplan.ListBackupPlansResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + ) + response = await client.list_backup_plans() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == backupplan.ListBackupPlansRequest() + + +@pytest.mark.asyncio +async def test_list_backup_plans_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.list_backup_plans + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.list_backup_plans + ] = mock_rpc + + request = {} + await client.list_backup_plans(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.list_backup_plans(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_list_backup_plans_async( + transport: str = "grpc_asyncio", request_type=backupplan.ListBackupPlansRequest +): + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_backup_plans), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + backupplan.ListBackupPlansResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + ) + response = await client.list_backup_plans(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = backupplan.ListBackupPlansRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListBackupPlansAsyncPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] + + +@pytest.mark.asyncio +async def test_list_backup_plans_async_from_dict(): + await test_list_backup_plans_async(request_type=dict) + + +def test_list_backup_plans_field_headers(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = backupplan.ListBackupPlansRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_backup_plans), "__call__" + ) as call: + call.return_value = backupplan.ListBackupPlansResponse() + client.list_backup_plans(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_backup_plans_field_headers_async(): + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = backupplan.ListBackupPlansRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_backup_plans), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + backupplan.ListBackupPlansResponse() + ) + await client.list_backup_plans(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_list_backup_plans_flattened(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_backup_plans), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = backupplan.ListBackupPlansResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_backup_plans( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +def test_list_backup_plans_flattened_error(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_backup_plans( + backupplan.ListBackupPlansRequest(), + parent="parent_value", + ) + + +@pytest.mark.asyncio +async def test_list_backup_plans_flattened_async(): + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_backup_plans), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = backupplan.ListBackupPlansResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + backupplan.ListBackupPlansResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_backup_plans( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_list_backup_plans_flattened_error_async(): + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_backup_plans( + backupplan.ListBackupPlansRequest(), + parent="parent_value", + ) + + +def test_list_backup_plans_pager(transport_name: str = "grpc"): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_backup_plans), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + backupplan.ListBackupPlansResponse( + backup_plans=[ + backupplan.BackupPlan(), + backupplan.BackupPlan(), + backupplan.BackupPlan(), + ], + next_page_token="abc", + ), + backupplan.ListBackupPlansResponse( + backup_plans=[], + next_page_token="def", + ), + backupplan.ListBackupPlansResponse( + backup_plans=[ + backupplan.BackupPlan(), + ], + next_page_token="ghi", + ), + backupplan.ListBackupPlansResponse( + backup_plans=[ + backupplan.BackupPlan(), + backupplan.BackupPlan(), + ], + ), + RuntimeError, + ) + + expected_metadata = () + retry = retries.Retry() + timeout = 5 + expected_metadata = tuple(expected_metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + ) + pager = client.list_backup_plans(request={}, retry=retry, timeout=timeout) + + assert pager._metadata == expected_metadata + assert pager._retry == retry + assert pager._timeout == timeout + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, backupplan.BackupPlan) for i in results) + + +def test_list_backup_plans_pages(transport_name: str = "grpc"): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_backup_plans), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + backupplan.ListBackupPlansResponse( + backup_plans=[ + backupplan.BackupPlan(), + backupplan.BackupPlan(), + backupplan.BackupPlan(), + ], + next_page_token="abc", + ), + backupplan.ListBackupPlansResponse( + backup_plans=[], + next_page_token="def", + ), + backupplan.ListBackupPlansResponse( + backup_plans=[ + backupplan.BackupPlan(), + ], + next_page_token="ghi", + ), + backupplan.ListBackupPlansResponse( + backup_plans=[ + backupplan.BackupPlan(), + backupplan.BackupPlan(), + ], + ), + RuntimeError, + ) + pages = list(client.list_backup_plans(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.asyncio +async def test_list_backup_plans_async_pager(): + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_backup_plans), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + backupplan.ListBackupPlansResponse( + backup_plans=[ + backupplan.BackupPlan(), + backupplan.BackupPlan(), + backupplan.BackupPlan(), + ], + next_page_token="abc", + ), + backupplan.ListBackupPlansResponse( + backup_plans=[], + next_page_token="def", + ), + backupplan.ListBackupPlansResponse( + backup_plans=[ + backupplan.BackupPlan(), + ], + next_page_token="ghi", + ), + backupplan.ListBackupPlansResponse( + backup_plans=[ + backupplan.BackupPlan(), + backupplan.BackupPlan(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_backup_plans( + request={}, + ) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, backupplan.BackupPlan) for i in responses) + + +@pytest.mark.asyncio +async def test_list_backup_plans_async_pages(): + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_backup_plans), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + backupplan.ListBackupPlansResponse( + backup_plans=[ + backupplan.BackupPlan(), + backupplan.BackupPlan(), + backupplan.BackupPlan(), + ], + next_page_token="abc", + ), + backupplan.ListBackupPlansResponse( + backup_plans=[], + next_page_token="def", + ), + backupplan.ListBackupPlansResponse( + backup_plans=[ + backupplan.BackupPlan(), + ], + next_page_token="ghi", + ), + backupplan.ListBackupPlansResponse( + backup_plans=[ + backupplan.BackupPlan(), + backupplan.BackupPlan(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_backup_plans(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + backupplan.DeleteBackupPlanRequest, + dict, + ], +) +def test_delete_backup_plan(request_type, transport: str = "grpc"): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_backup_plan), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.delete_backup_plan(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = backupplan.DeleteBackupPlanRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_delete_backup_plan_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_backup_plan), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.delete_backup_plan() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == backupplan.DeleteBackupPlanRequest() + + +def test_delete_backup_plan_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = backupplan.DeleteBackupPlanRequest( + name="name_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_backup_plan), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.delete_backup_plan(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == backupplan.DeleteBackupPlanRequest( + name="name_value", + ) + + +def test_delete_backup_plan_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.delete_backup_plan in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.delete_backup_plan + ] = mock_rpc + request = {} + client.delete_backup_plan(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.delete_backup_plan(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_delete_backup_plan_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_backup_plan), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.delete_backup_plan() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == backupplan.DeleteBackupPlanRequest() + + +@pytest.mark.asyncio +async def test_delete_backup_plan_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.delete_backup_plan + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.delete_backup_plan + ] = mock_rpc + + request = {} + await client.delete_backup_plan(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.delete_backup_plan(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_delete_backup_plan_async( + transport: str = "grpc_asyncio", request_type=backupplan.DeleteBackupPlanRequest +): + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_backup_plan), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.delete_backup_plan(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = backupplan.DeleteBackupPlanRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_delete_backup_plan_async_from_dict(): + await test_delete_backup_plan_async(request_type=dict) + + +def test_delete_backup_plan_field_headers(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = backupplan.DeleteBackupPlanRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_backup_plan), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.delete_backup_plan(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_delete_backup_plan_field_headers_async(): + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = backupplan.DeleteBackupPlanRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_backup_plan), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.delete_backup_plan(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_delete_backup_plan_flattened(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_backup_plan), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.delete_backup_plan( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_delete_backup_plan_flattened_error(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_backup_plan( + backupplan.DeleteBackupPlanRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_delete_backup_plan_flattened_async(): + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_backup_plan), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.delete_backup_plan( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_delete_backup_plan_flattened_error_async(): + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.delete_backup_plan( + backupplan.DeleteBackupPlanRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + backupplanassociation.CreateBackupPlanAssociationRequest, + dict, + ], +) +def test_create_backup_plan_association(request_type, transport: str = "grpc"): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_backup_plan_association), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.create_backup_plan_association(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = backupplanassociation.CreateBackupPlanAssociationRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_create_backup_plan_association_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_backup_plan_association), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.create_backup_plan_association() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == backupplanassociation.CreateBackupPlanAssociationRequest() + + +def test_create_backup_plan_association_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = backupplanassociation.CreateBackupPlanAssociationRequest( + parent="parent_value", + backup_plan_association_id="backup_plan_association_id_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_backup_plan_association), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.create_backup_plan_association(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == backupplanassociation.CreateBackupPlanAssociationRequest( + parent="parent_value", + backup_plan_association_id="backup_plan_association_id_value", + ) + + +def test_create_backup_plan_association_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.create_backup_plan_association + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.create_backup_plan_association + ] = mock_rpc + request = {} + client.create_backup_plan_association(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.create_backup_plan_association(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_create_backup_plan_association_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_backup_plan_association), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.create_backup_plan_association() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == backupplanassociation.CreateBackupPlanAssociationRequest() + + +@pytest.mark.asyncio +async def test_create_backup_plan_association_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.create_backup_plan_association + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.create_backup_plan_association + ] = mock_rpc + + request = {} + await client.create_backup_plan_association(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.create_backup_plan_association(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_create_backup_plan_association_async( + transport: str = "grpc_asyncio", + request_type=backupplanassociation.CreateBackupPlanAssociationRequest, +): + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_backup_plan_association), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.create_backup_plan_association(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = backupplanassociation.CreateBackupPlanAssociationRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_create_backup_plan_association_async_from_dict(): + await test_create_backup_plan_association_async(request_type=dict) + + +def test_create_backup_plan_association_field_headers(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = backupplanassociation.CreateBackupPlanAssociationRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_backup_plan_association), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.create_backup_plan_association(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_create_backup_plan_association_field_headers_async(): + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = backupplanassociation.CreateBackupPlanAssociationRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_backup_plan_association), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.create_backup_plan_association(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_create_backup_plan_association_flattened(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_backup_plan_association), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.create_backup_plan_association( + parent="parent_value", + backup_plan_association=backupplanassociation.BackupPlanAssociation( + name="name_value" + ), + backup_plan_association_id="backup_plan_association_id_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].backup_plan_association + mock_val = backupplanassociation.BackupPlanAssociation(name="name_value") + assert arg == mock_val + arg = args[0].backup_plan_association_id + mock_val = "backup_plan_association_id_value" + assert arg == mock_val + + +def test_create_backup_plan_association_flattened_error(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_backup_plan_association( + backupplanassociation.CreateBackupPlanAssociationRequest(), + parent="parent_value", + backup_plan_association=backupplanassociation.BackupPlanAssociation( + name="name_value" + ), + backup_plan_association_id="backup_plan_association_id_value", + ) + + +@pytest.mark.asyncio +async def test_create_backup_plan_association_flattened_async(): + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_backup_plan_association), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.create_backup_plan_association( + parent="parent_value", + backup_plan_association=backupplanassociation.BackupPlanAssociation( + name="name_value" + ), + backup_plan_association_id="backup_plan_association_id_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].backup_plan_association + mock_val = backupplanassociation.BackupPlanAssociation(name="name_value") + assert arg == mock_val + arg = args[0].backup_plan_association_id + mock_val = "backup_plan_association_id_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_create_backup_plan_association_flattened_error_async(): + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.create_backup_plan_association( + backupplanassociation.CreateBackupPlanAssociationRequest(), + parent="parent_value", + backup_plan_association=backupplanassociation.BackupPlanAssociation( + name="name_value" + ), + backup_plan_association_id="backup_plan_association_id_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + backupplanassociation.GetBackupPlanAssociationRequest, + dict, + ], +) +def test_get_backup_plan_association(request_type, transport: str = "grpc"): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_backup_plan_association), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = backupplanassociation.BackupPlanAssociation( + name="name_value", + resource_type="resource_type_value", + resource="resource_value", + backup_plan="backup_plan_value", + state=backupplanassociation.BackupPlanAssociation.State.CREATING, + data_source="data_source_value", + ) + response = client.get_backup_plan_association(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = backupplanassociation.GetBackupPlanAssociationRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, backupplanassociation.BackupPlanAssociation) + assert response.name == "name_value" + assert response.resource_type == "resource_type_value" + assert response.resource == "resource_value" + assert response.backup_plan == "backup_plan_value" + assert response.state == backupplanassociation.BackupPlanAssociation.State.CREATING + assert response.data_source == "data_source_value" + + +def test_get_backup_plan_association_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_backup_plan_association), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.get_backup_plan_association() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == backupplanassociation.GetBackupPlanAssociationRequest() + + +def test_get_backup_plan_association_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = backupplanassociation.GetBackupPlanAssociationRequest( + name="name_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_backup_plan_association), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.get_backup_plan_association(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == backupplanassociation.GetBackupPlanAssociationRequest( + name="name_value", + ) + + +def test_get_backup_plan_association_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.get_backup_plan_association + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.get_backup_plan_association + ] = mock_rpc + request = {} + client.get_backup_plan_association(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_backup_plan_association(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_get_backup_plan_association_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_backup_plan_association), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + backupplanassociation.BackupPlanAssociation( + name="name_value", + resource_type="resource_type_value", + resource="resource_value", + backup_plan="backup_plan_value", + state=backupplanassociation.BackupPlanAssociation.State.CREATING, + data_source="data_source_value", + ) + ) + response = await client.get_backup_plan_association() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == backupplanassociation.GetBackupPlanAssociationRequest() + + +@pytest.mark.asyncio +async def test_get_backup_plan_association_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.get_backup_plan_association + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.get_backup_plan_association + ] = mock_rpc + + request = {} + await client.get_backup_plan_association(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.get_backup_plan_association(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_get_backup_plan_association_async( + transport: str = "grpc_asyncio", + request_type=backupplanassociation.GetBackupPlanAssociationRequest, +): + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_backup_plan_association), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + backupplanassociation.BackupPlanAssociation( + name="name_value", + resource_type="resource_type_value", + resource="resource_value", + backup_plan="backup_plan_value", + state=backupplanassociation.BackupPlanAssociation.State.CREATING, + data_source="data_source_value", + ) + ) + response = await client.get_backup_plan_association(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = backupplanassociation.GetBackupPlanAssociationRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, backupplanassociation.BackupPlanAssociation) + assert response.name == "name_value" + assert response.resource_type == "resource_type_value" + assert response.resource == "resource_value" + assert response.backup_plan == "backup_plan_value" + assert response.state == backupplanassociation.BackupPlanAssociation.State.CREATING + assert response.data_source == "data_source_value" + + +@pytest.mark.asyncio +async def test_get_backup_plan_association_async_from_dict(): + await test_get_backup_plan_association_async(request_type=dict) + + +def test_get_backup_plan_association_field_headers(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = backupplanassociation.GetBackupPlanAssociationRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_backup_plan_association), "__call__" + ) as call: + call.return_value = backupplanassociation.BackupPlanAssociation() + client.get_backup_plan_association(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_backup_plan_association_field_headers_async(): + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = backupplanassociation.GetBackupPlanAssociationRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_backup_plan_association), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + backupplanassociation.BackupPlanAssociation() + ) + await client.get_backup_plan_association(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_get_backup_plan_association_flattened(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_backup_plan_association), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = backupplanassociation.BackupPlanAssociation() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_backup_plan_association( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_get_backup_plan_association_flattened_error(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_backup_plan_association( + backupplanassociation.GetBackupPlanAssociationRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_get_backup_plan_association_flattened_async(): + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_backup_plan_association), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = backupplanassociation.BackupPlanAssociation() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + backupplanassociation.BackupPlanAssociation() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_backup_plan_association( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_get_backup_plan_association_flattened_error_async(): + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_backup_plan_association( + backupplanassociation.GetBackupPlanAssociationRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + backupplanassociation.ListBackupPlanAssociationsRequest, + dict, + ], +) +def test_list_backup_plan_associations(request_type, transport: str = "grpc"): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_backup_plan_associations), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = backupplanassociation.ListBackupPlanAssociationsResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + response = client.list_backup_plan_associations(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = backupplanassociation.ListBackupPlanAssociationsRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListBackupPlanAssociationsPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] + + +def test_list_backup_plan_associations_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_backup_plan_associations), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.list_backup_plan_associations() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == backupplanassociation.ListBackupPlanAssociationsRequest() + + +def test_list_backup_plan_associations_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = backupplanassociation.ListBackupPlanAssociationsRequest( + parent="parent_value", + page_token="page_token_value", + filter="filter_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_backup_plan_associations), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.list_backup_plan_associations(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == backupplanassociation.ListBackupPlanAssociationsRequest( + parent="parent_value", + page_token="page_token_value", + filter="filter_value", + ) + + +def test_list_backup_plan_associations_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.list_backup_plan_associations + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.list_backup_plan_associations + ] = mock_rpc + request = {} + client.list_backup_plan_associations(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_backup_plan_associations(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_list_backup_plan_associations_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_backup_plan_associations), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + backupplanassociation.ListBackupPlanAssociationsResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + ) + response = await client.list_backup_plan_associations() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == backupplanassociation.ListBackupPlanAssociationsRequest() + + +@pytest.mark.asyncio +async def test_list_backup_plan_associations_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.list_backup_plan_associations + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.list_backup_plan_associations + ] = mock_rpc + + request = {} + await client.list_backup_plan_associations(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.list_backup_plan_associations(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_list_backup_plan_associations_async( + transport: str = "grpc_asyncio", + request_type=backupplanassociation.ListBackupPlanAssociationsRequest, +): + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_backup_plan_associations), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + backupplanassociation.ListBackupPlanAssociationsResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + ) + response = await client.list_backup_plan_associations(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = backupplanassociation.ListBackupPlanAssociationsRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListBackupPlanAssociationsAsyncPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] + + +@pytest.mark.asyncio +async def test_list_backup_plan_associations_async_from_dict(): + await test_list_backup_plan_associations_async(request_type=dict) + + +def test_list_backup_plan_associations_field_headers(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = backupplanassociation.ListBackupPlanAssociationsRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_backup_plan_associations), "__call__" + ) as call: + call.return_value = backupplanassociation.ListBackupPlanAssociationsResponse() + client.list_backup_plan_associations(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_backup_plan_associations_field_headers_async(): + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = backupplanassociation.ListBackupPlanAssociationsRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_backup_plan_associations), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + backupplanassociation.ListBackupPlanAssociationsResponse() + ) + await client.list_backup_plan_associations(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_list_backup_plan_associations_flattened(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_backup_plan_associations), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = backupplanassociation.ListBackupPlanAssociationsResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_backup_plan_associations( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +def test_list_backup_plan_associations_flattened_error(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_backup_plan_associations( + backupplanassociation.ListBackupPlanAssociationsRequest(), + parent="parent_value", + ) + + +@pytest.mark.asyncio +async def test_list_backup_plan_associations_flattened_async(): + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_backup_plan_associations), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = backupplanassociation.ListBackupPlanAssociationsResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + backupplanassociation.ListBackupPlanAssociationsResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_backup_plan_associations( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_list_backup_plan_associations_flattened_error_async(): + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_backup_plan_associations( + backupplanassociation.ListBackupPlanAssociationsRequest(), + parent="parent_value", + ) + + +def test_list_backup_plan_associations_pager(transport_name: str = "grpc"): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_backup_plan_associations), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + backupplanassociation.ListBackupPlanAssociationsResponse( + backup_plan_associations=[ + backupplanassociation.BackupPlanAssociation(), + backupplanassociation.BackupPlanAssociation(), + backupplanassociation.BackupPlanAssociation(), + ], + next_page_token="abc", + ), + backupplanassociation.ListBackupPlanAssociationsResponse( + backup_plan_associations=[], + next_page_token="def", + ), + backupplanassociation.ListBackupPlanAssociationsResponse( + backup_plan_associations=[ + backupplanassociation.BackupPlanAssociation(), + ], + next_page_token="ghi", + ), + backupplanassociation.ListBackupPlanAssociationsResponse( + backup_plan_associations=[ + backupplanassociation.BackupPlanAssociation(), + backupplanassociation.BackupPlanAssociation(), + ], + ), + RuntimeError, + ) + + expected_metadata = () + retry = retries.Retry() + timeout = 5 + expected_metadata = tuple(expected_metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + ) + pager = client.list_backup_plan_associations( + request={}, retry=retry, timeout=timeout + ) + + assert pager._metadata == expected_metadata + assert pager._retry == retry + assert pager._timeout == timeout + + results = list(pager) + assert len(results) == 6 + assert all( + isinstance(i, backupplanassociation.BackupPlanAssociation) for i in results + ) + + +def test_list_backup_plan_associations_pages(transport_name: str = "grpc"): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_backup_plan_associations), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + backupplanassociation.ListBackupPlanAssociationsResponse( + backup_plan_associations=[ + backupplanassociation.BackupPlanAssociation(), + backupplanassociation.BackupPlanAssociation(), + backupplanassociation.BackupPlanAssociation(), + ], + next_page_token="abc", + ), + backupplanassociation.ListBackupPlanAssociationsResponse( + backup_plan_associations=[], + next_page_token="def", + ), + backupplanassociation.ListBackupPlanAssociationsResponse( + backup_plan_associations=[ + backupplanassociation.BackupPlanAssociation(), + ], + next_page_token="ghi", + ), + backupplanassociation.ListBackupPlanAssociationsResponse( + backup_plan_associations=[ + backupplanassociation.BackupPlanAssociation(), + backupplanassociation.BackupPlanAssociation(), + ], + ), + RuntimeError, + ) + pages = list(client.list_backup_plan_associations(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.asyncio +async def test_list_backup_plan_associations_async_pager(): + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_backup_plan_associations), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + backupplanassociation.ListBackupPlanAssociationsResponse( + backup_plan_associations=[ + backupplanassociation.BackupPlanAssociation(), + backupplanassociation.BackupPlanAssociation(), + backupplanassociation.BackupPlanAssociation(), + ], + next_page_token="abc", + ), + backupplanassociation.ListBackupPlanAssociationsResponse( + backup_plan_associations=[], + next_page_token="def", + ), + backupplanassociation.ListBackupPlanAssociationsResponse( + backup_plan_associations=[ + backupplanassociation.BackupPlanAssociation(), + ], + next_page_token="ghi", + ), + backupplanassociation.ListBackupPlanAssociationsResponse( + backup_plan_associations=[ + backupplanassociation.BackupPlanAssociation(), + backupplanassociation.BackupPlanAssociation(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_backup_plan_associations( + request={}, + ) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all( + isinstance(i, backupplanassociation.BackupPlanAssociation) + for i in responses + ) + + +@pytest.mark.asyncio +async def test_list_backup_plan_associations_async_pages(): + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_backup_plan_associations), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + backupplanassociation.ListBackupPlanAssociationsResponse( + backup_plan_associations=[ + backupplanassociation.BackupPlanAssociation(), + backupplanassociation.BackupPlanAssociation(), + backupplanassociation.BackupPlanAssociation(), + ], + next_page_token="abc", + ), + backupplanassociation.ListBackupPlanAssociationsResponse( + backup_plan_associations=[], + next_page_token="def", + ), + backupplanassociation.ListBackupPlanAssociationsResponse( + backup_plan_associations=[ + backupplanassociation.BackupPlanAssociation(), + ], + next_page_token="ghi", + ), + backupplanassociation.ListBackupPlanAssociationsResponse( + backup_plan_associations=[ + backupplanassociation.BackupPlanAssociation(), + backupplanassociation.BackupPlanAssociation(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_backup_plan_associations(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + backupplanassociation.DeleteBackupPlanAssociationRequest, + dict, + ], +) +def test_delete_backup_plan_association(request_type, transport: str = "grpc"): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_backup_plan_association), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.delete_backup_plan_association(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = backupplanassociation.DeleteBackupPlanAssociationRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_delete_backup_plan_association_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_backup_plan_association), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.delete_backup_plan_association() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == backupplanassociation.DeleteBackupPlanAssociationRequest() + + +def test_delete_backup_plan_association_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = backupplanassociation.DeleteBackupPlanAssociationRequest( + name="name_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_backup_plan_association), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.delete_backup_plan_association(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == backupplanassociation.DeleteBackupPlanAssociationRequest( + name="name_value", + ) + + +def test_delete_backup_plan_association_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.delete_backup_plan_association + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.delete_backup_plan_association + ] = mock_rpc + request = {} + client.delete_backup_plan_association(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.delete_backup_plan_association(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_delete_backup_plan_association_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_backup_plan_association), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.delete_backup_plan_association() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == backupplanassociation.DeleteBackupPlanAssociationRequest() + + +@pytest.mark.asyncio +async def test_delete_backup_plan_association_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.delete_backup_plan_association + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.delete_backup_plan_association + ] = mock_rpc + + request = {} + await client.delete_backup_plan_association(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.delete_backup_plan_association(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_delete_backup_plan_association_async( + transport: str = "grpc_asyncio", + request_type=backupplanassociation.DeleteBackupPlanAssociationRequest, +): + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_backup_plan_association), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.delete_backup_plan_association(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = backupplanassociation.DeleteBackupPlanAssociationRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_delete_backup_plan_association_async_from_dict(): + await test_delete_backup_plan_association_async(request_type=dict) + + +def test_delete_backup_plan_association_field_headers(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = backupplanassociation.DeleteBackupPlanAssociationRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_backup_plan_association), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.delete_backup_plan_association(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_delete_backup_plan_association_field_headers_async(): + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = backupplanassociation.DeleteBackupPlanAssociationRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_backup_plan_association), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.delete_backup_plan_association(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_delete_backup_plan_association_flattened(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_backup_plan_association), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.delete_backup_plan_association( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_delete_backup_plan_association_flattened_error(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_backup_plan_association( + backupplanassociation.DeleteBackupPlanAssociationRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_delete_backup_plan_association_flattened_async(): + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_backup_plan_association), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.delete_backup_plan_association( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_delete_backup_plan_association_flattened_error_async(): + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.delete_backup_plan_association( + backupplanassociation.DeleteBackupPlanAssociationRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + backupplanassociation.TriggerBackupRequest, + dict, + ], +) +def test_trigger_backup(request_type, transport: str = "grpc"): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.trigger_backup), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.trigger_backup(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = backupplanassociation.TriggerBackupRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_trigger_backup_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.trigger_backup), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.trigger_backup() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == backupplanassociation.TriggerBackupRequest() + + +def test_trigger_backup_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = backupplanassociation.TriggerBackupRequest( + name="name_value", + rule_id="rule_id_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.trigger_backup), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.trigger_backup(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == backupplanassociation.TriggerBackupRequest( + name="name_value", + rule_id="rule_id_value", + ) + + +def test_trigger_backup_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.trigger_backup in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.trigger_backup] = mock_rpc + request = {} + client.trigger_backup(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.trigger_backup(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_trigger_backup_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.trigger_backup), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.trigger_backup() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == backupplanassociation.TriggerBackupRequest() + + +@pytest.mark.asyncio +async def test_trigger_backup_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.trigger_backup + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.trigger_backup + ] = mock_rpc + + request = {} + await client.trigger_backup(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.trigger_backup(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_trigger_backup_async( + transport: str = "grpc_asyncio", + request_type=backupplanassociation.TriggerBackupRequest, +): + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.trigger_backup), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.trigger_backup(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = backupplanassociation.TriggerBackupRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_trigger_backup_async_from_dict(): + await test_trigger_backup_async(request_type=dict) + + +def test_trigger_backup_field_headers(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = backupplanassociation.TriggerBackupRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.trigger_backup), "__call__") as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.trigger_backup(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_trigger_backup_field_headers_async(): + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = backupplanassociation.TriggerBackupRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.trigger_backup), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.trigger_backup(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_trigger_backup_flattened(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.trigger_backup), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.trigger_backup( + name="name_value", + rule_id="rule_id_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + arg = args[0].rule_id + mock_val = "rule_id_value" + assert arg == mock_val + + +def test_trigger_backup_flattened_error(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.trigger_backup( + backupplanassociation.TriggerBackupRequest(), + name="name_value", + rule_id="rule_id_value", + ) + + +@pytest.mark.asyncio +async def test_trigger_backup_flattened_async(): + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.trigger_backup), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.trigger_backup( + name="name_value", + rule_id="rule_id_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + arg = args[0].rule_id + mock_val = "rule_id_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_trigger_backup_flattened_error_async(): + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.trigger_backup( + backupplanassociation.TriggerBackupRequest(), + name="name_value", + rule_id="rule_id_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + backupdr.ListManagementServersRequest, + dict, + ], +) +def test_list_management_servers_rest(request_type): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = backupdr.ListManagementServersResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = backupdr.ListManagementServersResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.list_management_servers(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListManagementServersPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] + + +def test_list_management_servers_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.list_management_servers + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.list_management_servers + ] = mock_rpc + + request = {} + client.list_management_servers(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_management_servers(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_list_management_servers_rest_required_fields( + request_type=backupdr.ListManagementServersRequest, +): + transport_class = transports.BackupDRRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_management_servers._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_management_servers._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "filter", + "order_by", + "page_size", + "page_token", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = backupdr.ListManagementServersResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = backupdr.ListManagementServersResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.list_management_servers(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_list_management_servers_rest_unset_required_fields(): + transport = transports.BackupDRRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.list_management_servers._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "filter", + "orderBy", + "pageSize", + "pageToken", + ) + ) + & set(("parent",)) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_management_servers_rest_interceptors(null_interceptor): + transport = transports.BackupDRRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.BackupDRRestInterceptor(), + ) + client = BackupDRClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.BackupDRRestInterceptor, "post_list_management_servers" + ) as post, mock.patch.object( + transports.BackupDRRestInterceptor, "pre_list_management_servers" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = backupdr.ListManagementServersRequest.pb( + backupdr.ListManagementServersRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = backupdr.ListManagementServersResponse.to_json( + backupdr.ListManagementServersResponse() + ) + + request = backupdr.ListManagementServersRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = backupdr.ListManagementServersResponse() + + client.list_management_servers( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_management_servers_rest_bad_request( + transport: str = "rest", request_type=backupdr.ListManagementServersRequest +): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_management_servers(request) + + +def test_list_management_servers_rest_flattened(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = backupdr.ListManagementServersResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1/locations/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = backupdr.ListManagementServersResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.list_management_servers(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*}/managementServers" + % client.transport._host, + args[1], + ) + + +def test_list_management_servers_rest_flattened_error(transport: str = "rest"): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_management_servers( + backupdr.ListManagementServersRequest(), + parent="parent_value", + ) + + +def test_list_management_servers_rest_pager(transport: str = "rest"): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + backupdr.ListManagementServersResponse( + management_servers=[ + backupdr.ManagementServer(), + backupdr.ManagementServer(), + backupdr.ManagementServer(), + ], + next_page_token="abc", + ), + backupdr.ListManagementServersResponse( + management_servers=[], + next_page_token="def", + ), + backupdr.ListManagementServersResponse( + management_servers=[ + backupdr.ManagementServer(), + ], + next_page_token="ghi", + ), + backupdr.ListManagementServersResponse( + management_servers=[ + backupdr.ManagementServer(), + backupdr.ManagementServer(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple( + backupdr.ListManagementServersResponse.to_json(x) for x in response + ) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {"parent": "projects/sample1/locations/sample2"} + + pager = client.list_management_servers(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, backupdr.ManagementServer) for i in results) + + pages = list(client.list_management_servers(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + backupdr.GetManagementServerRequest, + dict, + ], +) +def test_get_management_server_rest(request_type): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/managementServers/sample3" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = backupdr.ManagementServer( + name="name_value", + description="description_value", + type_=backupdr.ManagementServer.InstanceType.BACKUP_RESTORE, + state=backupdr.ManagementServer.InstanceState.CREATING, + etag="etag_value", + oauth2_client_id="oauth2_client_id_value", + ba_proxy_uri=["ba_proxy_uri_value"], + satisfies_pzi=True, + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = backupdr.ManagementServer.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.get_management_server(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, backupdr.ManagementServer) + assert response.name == "name_value" + assert response.description == "description_value" + assert response.type_ == backupdr.ManagementServer.InstanceType.BACKUP_RESTORE + assert response.state == backupdr.ManagementServer.InstanceState.CREATING + assert response.etag == "etag_value" + assert response.oauth2_client_id == "oauth2_client_id_value" + assert response.ba_proxy_uri == ["ba_proxy_uri_value"] + assert response.satisfies_pzi is True + + +def test_get_management_server_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.get_management_server + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.get_management_server + ] = mock_rpc + + request = {} + client.get_management_server(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_management_server(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_get_management_server_rest_required_fields( + request_type=backupdr.GetManagementServerRequest, +): + transport_class = transports.BackupDRRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_management_server._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_management_server._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = backupdr.ManagementServer() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = backupdr.ManagementServer.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_management_server(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_get_management_server_rest_unset_required_fields(): + transport = transports.BackupDRRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.get_management_server._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_management_server_rest_interceptors(null_interceptor): + transport = transports.BackupDRRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.BackupDRRestInterceptor(), + ) + client = BackupDRClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.BackupDRRestInterceptor, "post_get_management_server" + ) as post, mock.patch.object( + transports.BackupDRRestInterceptor, "pre_get_management_server" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = backupdr.GetManagementServerRequest.pb( + backupdr.GetManagementServerRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = backupdr.ManagementServer.to_json( + backupdr.ManagementServer() + ) + + request = backupdr.GetManagementServerRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = backupdr.ManagementServer() + + client.get_management_server( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_management_server_rest_bad_request( + transport: str = "rest", request_type=backupdr.GetManagementServerRequest +): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/managementServers/sample3" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_management_server(request) + + +def test_get_management_server_rest_flattened(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = backupdr.ManagementServer() + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/managementServers/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = backupdr.ManagementServer.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.get_management_server(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/managementServers/*}" + % client.transport._host, + args[1], + ) + + +def test_get_management_server_rest_flattened_error(transport: str = "rest"): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_management_server( + backupdr.GetManagementServerRequest(), + name="name_value", + ) + + +def test_get_management_server_rest_error(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + backupdr.CreateManagementServerRequest, + dict, + ], +) +def test_create_management_server_rest(request_type): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request_init["management_server"] = { + "name": "name_value", + "description": "description_value", + "labels": {}, + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "type_": 1, + "management_uri": {"web_ui": "web_ui_value", "api": "api_value"}, + "workforce_identity_based_management_uri": { + "first_party_management_uri": "first_party_management_uri_value", + "third_party_management_uri": "third_party_management_uri_value", + }, + "state": 1, + "networks": [{"network": "network_value", "peering_mode": 1}], + "etag": "etag_value", + "oauth2_client_id": "oauth2_client_id_value", + "workforce_identity_based_oauth2_client_id": { + "first_party_oauth2_client_id": "first_party_oauth2_client_id_value", + "third_party_oauth2_client_id": "third_party_oauth2_client_id_value", + }, + "ba_proxy_uri": ["ba_proxy_uri_value1", "ba_proxy_uri_value2"], + "satisfies_pzs": {"value": True}, + "satisfies_pzi": True, + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = backupdr.CreateManagementServerRequest.meta.fields["management_server"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["management_server"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["management_server"][field])): + del request_init["management_server"][field][i][subfield] + else: + del request_init["management_server"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.create_management_server(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_create_management_server_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.create_management_server + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.create_management_server + ] = mock_rpc + + request = {} + client.create_management_server(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.create_management_server(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_create_management_server_rest_required_fields( + request_type=backupdr.CreateManagementServerRequest, +): + transport_class = transports.BackupDRRestTransport + + request_init = {} + request_init["parent"] = "" + request_init["management_server_id"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + assert "managementServerId" not in jsonified_request + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_management_server._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + assert "managementServerId" in jsonified_request + assert ( + jsonified_request["managementServerId"] == request_init["management_server_id"] + ) + + jsonified_request["parent"] = "parent_value" + jsonified_request["managementServerId"] = "management_server_id_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_management_server._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "management_server_id", + "request_id", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + assert "managementServerId" in jsonified_request + assert jsonified_request["managementServerId"] == "management_server_id_value" + + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.create_management_server(request) + + expected_params = [ + ( + "managementServerId", + "", + ), + ("$alt", "json;enum-encoding=int"), + ] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_create_management_server_rest_unset_required_fields(): + transport = transports.BackupDRRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.create_management_server._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "managementServerId", + "requestId", + ) + ) + & set( + ( + "parent", + "managementServerId", + "managementServer", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_create_management_server_rest_interceptors(null_interceptor): + transport = transports.BackupDRRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.BackupDRRestInterceptor(), + ) + client = BackupDRClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.BackupDRRestInterceptor, "post_create_management_server" + ) as post, mock.patch.object( + transports.BackupDRRestInterceptor, "pre_create_management_server" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = backupdr.CreateManagementServerRequest.pb( + backupdr.CreateManagementServerRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) + + request = backupdr.CreateManagementServerRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.create_management_server( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_create_management_server_rest_bad_request( + transport: str = "rest", request_type=backupdr.CreateManagementServerRequest +): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.create_management_server(request) + + +def test_create_management_server_rest_flattened(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1/locations/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + management_server=backupdr.ManagementServer(name="name_value"), + management_server_id="management_server_id_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.create_management_server(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*}/managementServers" + % client.transport._host, + args[1], + ) + + +def test_create_management_server_rest_flattened_error(transport: str = "rest"): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_management_server( + backupdr.CreateManagementServerRequest(), + parent="parent_value", + management_server=backupdr.ManagementServer(name="name_value"), + management_server_id="management_server_id_value", + ) + + +def test_create_management_server_rest_error(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + backupdr.DeleteManagementServerRequest, + dict, + ], +) +def test_delete_management_server_rest(request_type): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/managementServers/sample3" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.delete_management_server(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_delete_management_server_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.delete_management_server + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.delete_management_server + ] = mock_rpc + + request = {} + client.delete_management_server(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.delete_management_server(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_delete_management_server_rest_required_fields( + request_type=backupdr.DeleteManagementServerRequest, +): + transport_class = transports.BackupDRRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_management_server._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_management_server._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id",)) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "delete", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.delete_management_server(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_delete_management_server_rest_unset_required_fields(): + transport = transports.BackupDRRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.delete_management_server._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId",)) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_management_server_rest_interceptors(null_interceptor): + transport = transports.BackupDRRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.BackupDRRestInterceptor(), + ) + client = BackupDRClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.BackupDRRestInterceptor, "post_delete_management_server" + ) as post, mock.patch.object( + transports.BackupDRRestInterceptor, "pre_delete_management_server" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = backupdr.DeleteManagementServerRequest.pb( + backupdr.DeleteManagementServerRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) + + request = backupdr.DeleteManagementServerRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.delete_management_server( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_delete_management_server_rest_bad_request( + transport: str = "rest", request_type=backupdr.DeleteManagementServerRequest +): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/managementServers/sample3" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete_management_server(request) + + +def test_delete_management_server_rest_flattened(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/managementServers/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.delete_management_server(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/managementServers/*}" + % client.transport._host, + args[1], + ) + + +def test_delete_management_server_rest_flattened_error(transport: str = "rest"): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_management_server( + backupdr.DeleteManagementServerRequest(), + name="name_value", + ) + + +def test_delete_management_server_rest_error(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + backupvault.CreateBackupVaultRequest, + dict, + ], +) +def test_create_backup_vault_rest(request_type): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request_init["backup_vault"] = { + "name": "name_value", + "description": "description_value", + "labels": {}, + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "backup_minimum_enforced_retention_duration": {"seconds": 751, "nanos": 543}, + "deletable": True, + "etag": "etag_value", + "state": 1, + "effective_time": {}, + "backup_count": 1278, + "service_account": "service_account_value", + "total_stored_bytes": 1946, + "uid": "uid_value", + "annotations": {}, + "access_restriction": 1, + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = backupvault.CreateBackupVaultRequest.meta.fields["backup_vault"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["backup_vault"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["backup_vault"][field])): + del request_init["backup_vault"][field][i][subfield] + else: + del request_init["backup_vault"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.create_backup_vault(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_create_backup_vault_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.create_backup_vault in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.create_backup_vault + ] = mock_rpc + + request = {} + client.create_backup_vault(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.create_backup_vault(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_create_backup_vault_rest_required_fields( + request_type=backupvault.CreateBackupVaultRequest, +): + transport_class = transports.BackupDRRestTransport + + request_init = {} + request_init["parent"] = "" + request_init["backup_vault_id"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + assert "backupVaultId" not in jsonified_request + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_backup_vault._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + assert "backupVaultId" in jsonified_request + assert jsonified_request["backupVaultId"] == request_init["backup_vault_id"] + + jsonified_request["parent"] = "parent_value" + jsonified_request["backupVaultId"] = "backup_vault_id_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_backup_vault._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "backup_vault_id", + "request_id", + "validate_only", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + assert "backupVaultId" in jsonified_request + assert jsonified_request["backupVaultId"] == "backup_vault_id_value" + + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.create_backup_vault(request) + + expected_params = [ + ( + "backupVaultId", + "", + ), + ("$alt", "json;enum-encoding=int"), + ] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_create_backup_vault_rest_unset_required_fields(): + transport = transports.BackupDRRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.create_backup_vault._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "backupVaultId", + "requestId", + "validateOnly", + ) + ) + & set( + ( + "parent", + "backupVaultId", + "backupVault", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_create_backup_vault_rest_interceptors(null_interceptor): + transport = transports.BackupDRRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.BackupDRRestInterceptor(), + ) + client = BackupDRClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.BackupDRRestInterceptor, "post_create_backup_vault" + ) as post, mock.patch.object( + transports.BackupDRRestInterceptor, "pre_create_backup_vault" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = backupvault.CreateBackupVaultRequest.pb( + backupvault.CreateBackupVaultRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) + + request = backupvault.CreateBackupVaultRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.create_backup_vault( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_create_backup_vault_rest_bad_request( + transport: str = "rest", request_type=backupvault.CreateBackupVaultRequest +): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.create_backup_vault(request) + + +def test_create_backup_vault_rest_flattened(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1/locations/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + backup_vault=backupvault.BackupVault(name="name_value"), + backup_vault_id="backup_vault_id_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.create_backup_vault(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*}/backupVaults" + % client.transport._host, + args[1], + ) + + +def test_create_backup_vault_rest_flattened_error(transport: str = "rest"): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_backup_vault( + backupvault.CreateBackupVaultRequest(), + parent="parent_value", + backup_vault=backupvault.BackupVault(name="name_value"), + backup_vault_id="backup_vault_id_value", + ) + + +def test_create_backup_vault_rest_error(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + backupvault.ListBackupVaultsRequest, + dict, + ], +) +def test_list_backup_vaults_rest(request_type): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = backupvault.ListBackupVaultsResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = backupvault.ListBackupVaultsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.list_backup_vaults(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListBackupVaultsPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] + + +def test_list_backup_vaults_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.list_backup_vaults in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.list_backup_vaults + ] = mock_rpc + + request = {} + client.list_backup_vaults(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_backup_vaults(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_list_backup_vaults_rest_required_fields( + request_type=backupvault.ListBackupVaultsRequest, +): + transport_class = transports.BackupDRRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_backup_vaults._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_backup_vaults._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "filter", + "order_by", + "page_size", + "page_token", + "view", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = backupvault.ListBackupVaultsResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = backupvault.ListBackupVaultsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.list_backup_vaults(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_list_backup_vaults_rest_unset_required_fields(): + transport = transports.BackupDRRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.list_backup_vaults._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "filter", + "orderBy", + "pageSize", + "pageToken", + "view", + ) + ) + & set(("parent",)) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_backup_vaults_rest_interceptors(null_interceptor): + transport = transports.BackupDRRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.BackupDRRestInterceptor(), + ) + client = BackupDRClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.BackupDRRestInterceptor, "post_list_backup_vaults" + ) as post, mock.patch.object( + transports.BackupDRRestInterceptor, "pre_list_backup_vaults" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = backupvault.ListBackupVaultsRequest.pb( + backupvault.ListBackupVaultsRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = backupvault.ListBackupVaultsResponse.to_json( + backupvault.ListBackupVaultsResponse() + ) + + request = backupvault.ListBackupVaultsRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = backupvault.ListBackupVaultsResponse() + + client.list_backup_vaults( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_backup_vaults_rest_bad_request( + transport: str = "rest", request_type=backupvault.ListBackupVaultsRequest +): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_backup_vaults(request) + + +def test_list_backup_vaults_rest_flattened(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = backupvault.ListBackupVaultsResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1/locations/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = backupvault.ListBackupVaultsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.list_backup_vaults(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*}/backupVaults" + % client.transport._host, + args[1], + ) + + +def test_list_backup_vaults_rest_flattened_error(transport: str = "rest"): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_backup_vaults( + backupvault.ListBackupVaultsRequest(), + parent="parent_value", + ) + + +def test_list_backup_vaults_rest_pager(transport: str = "rest"): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + backupvault.ListBackupVaultsResponse( + backup_vaults=[ + backupvault.BackupVault(), + backupvault.BackupVault(), + backupvault.BackupVault(), + ], + next_page_token="abc", + ), + backupvault.ListBackupVaultsResponse( + backup_vaults=[], + next_page_token="def", + ), + backupvault.ListBackupVaultsResponse( + backup_vaults=[ + backupvault.BackupVault(), + ], + next_page_token="ghi", + ), + backupvault.ListBackupVaultsResponse( + backup_vaults=[ + backupvault.BackupVault(), + backupvault.BackupVault(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple( + backupvault.ListBackupVaultsResponse.to_json(x) for x in response + ) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {"parent": "projects/sample1/locations/sample2"} + + pager = client.list_backup_vaults(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, backupvault.BackupVault) for i in results) + + pages = list(client.list_backup_vaults(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + backupvault.FetchUsableBackupVaultsRequest, + dict, + ], +) +def test_fetch_usable_backup_vaults_rest(request_type): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = backupvault.FetchUsableBackupVaultsResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = backupvault.FetchUsableBackupVaultsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.fetch_usable_backup_vaults(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.FetchUsableBackupVaultsPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] + + +def test_fetch_usable_backup_vaults_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.fetch_usable_backup_vaults + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.fetch_usable_backup_vaults + ] = mock_rpc + + request = {} + client.fetch_usable_backup_vaults(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.fetch_usable_backup_vaults(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_fetch_usable_backup_vaults_rest_required_fields( + request_type=backupvault.FetchUsableBackupVaultsRequest, +): + transport_class = transports.BackupDRRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).fetch_usable_backup_vaults._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).fetch_usable_backup_vaults._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "filter", + "order_by", + "page_size", + "page_token", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = backupvault.FetchUsableBackupVaultsResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = backupvault.FetchUsableBackupVaultsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.fetch_usable_backup_vaults(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_fetch_usable_backup_vaults_rest_unset_required_fields(): + transport = transports.BackupDRRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.fetch_usable_backup_vaults._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "filter", + "orderBy", + "pageSize", + "pageToken", + ) + ) + & set(("parent",)) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_fetch_usable_backup_vaults_rest_interceptors(null_interceptor): + transport = transports.BackupDRRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.BackupDRRestInterceptor(), + ) + client = BackupDRClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.BackupDRRestInterceptor, "post_fetch_usable_backup_vaults" + ) as post, mock.patch.object( + transports.BackupDRRestInterceptor, "pre_fetch_usable_backup_vaults" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = backupvault.FetchUsableBackupVaultsRequest.pb( + backupvault.FetchUsableBackupVaultsRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = backupvault.FetchUsableBackupVaultsResponse.to_json( + backupvault.FetchUsableBackupVaultsResponse() + ) + + request = backupvault.FetchUsableBackupVaultsRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = backupvault.FetchUsableBackupVaultsResponse() + + client.fetch_usable_backup_vaults( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_fetch_usable_backup_vaults_rest_bad_request( + transport: str = "rest", request_type=backupvault.FetchUsableBackupVaultsRequest +): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.fetch_usable_backup_vaults(request) + + +def test_fetch_usable_backup_vaults_rest_flattened(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = backupvault.FetchUsableBackupVaultsResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1/locations/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = backupvault.FetchUsableBackupVaultsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.fetch_usable_backup_vaults(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*}/backupVaults:fetchUsable" + % client.transport._host, + args[1], + ) + + +def test_fetch_usable_backup_vaults_rest_flattened_error(transport: str = "rest"): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.fetch_usable_backup_vaults( + backupvault.FetchUsableBackupVaultsRequest(), + parent="parent_value", + ) + + +def test_fetch_usable_backup_vaults_rest_pager(transport: str = "rest"): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + backupvault.FetchUsableBackupVaultsResponse( + backup_vaults=[ + backupvault.BackupVault(), + backupvault.BackupVault(), + backupvault.BackupVault(), + ], + next_page_token="abc", + ), + backupvault.FetchUsableBackupVaultsResponse( + backup_vaults=[], + next_page_token="def", + ), + backupvault.FetchUsableBackupVaultsResponse( + backup_vaults=[ + backupvault.BackupVault(), + ], + next_page_token="ghi", + ), + backupvault.FetchUsableBackupVaultsResponse( + backup_vaults=[ + backupvault.BackupVault(), + backupvault.BackupVault(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple( + backupvault.FetchUsableBackupVaultsResponse.to_json(x) for x in response + ) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {"parent": "projects/sample1/locations/sample2"} + + pager = client.fetch_usable_backup_vaults(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, backupvault.BackupVault) for i in results) + + pages = list(client.fetch_usable_backup_vaults(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + backupvault.GetBackupVaultRequest, + dict, + ], +) +def test_get_backup_vault_rest(request_type): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/backupVaults/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = backupvault.BackupVault( + name="name_value", + description="description_value", + deletable=True, + etag="etag_value", + state=backupvault.BackupVault.State.CREATING, + backup_count=1278, + service_account="service_account_value", + total_stored_bytes=1946, + uid="uid_value", + access_restriction=backupvault.BackupVault.AccessRestriction.WITHIN_PROJECT, + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = backupvault.BackupVault.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.get_backup_vault(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, backupvault.BackupVault) + assert response.name == "name_value" + assert response.description == "description_value" + assert response.deletable is True + assert response.etag == "etag_value" + assert response.state == backupvault.BackupVault.State.CREATING + assert response.backup_count == 1278 + assert response.service_account == "service_account_value" + assert response.total_stored_bytes == 1946 + assert response.uid == "uid_value" + assert ( + response.access_restriction + == backupvault.BackupVault.AccessRestriction.WITHIN_PROJECT + ) + + +def test_get_backup_vault_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_backup_vault in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.get_backup_vault + ] = mock_rpc + + request = {} + client.get_backup_vault(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_backup_vault(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_get_backup_vault_rest_required_fields( + request_type=backupvault.GetBackupVaultRequest, +): + transport_class = transports.BackupDRRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_backup_vault._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_backup_vault._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("view",)) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = backupvault.BackupVault() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = backupvault.BackupVault.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_backup_vault(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_get_backup_vault_rest_unset_required_fields(): + transport = transports.BackupDRRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.get_backup_vault._get_unset_required_fields({}) + assert set(unset_fields) == (set(("view",)) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_backup_vault_rest_interceptors(null_interceptor): + transport = transports.BackupDRRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.BackupDRRestInterceptor(), + ) + client = BackupDRClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.BackupDRRestInterceptor, "post_get_backup_vault" + ) as post, mock.patch.object( + transports.BackupDRRestInterceptor, "pre_get_backup_vault" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = backupvault.GetBackupVaultRequest.pb( + backupvault.GetBackupVaultRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = backupvault.BackupVault.to_json( + backupvault.BackupVault() + ) + + request = backupvault.GetBackupVaultRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = backupvault.BackupVault() + + client.get_backup_vault( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_backup_vault_rest_bad_request( + transport: str = "rest", request_type=backupvault.GetBackupVaultRequest +): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/backupVaults/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_backup_vault(request) + + +def test_get_backup_vault_rest_flattened(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = backupvault.BackupVault() + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/backupVaults/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = backupvault.BackupVault.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.get_backup_vault(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/backupVaults/*}" + % client.transport._host, + args[1], + ) + + +def test_get_backup_vault_rest_flattened_error(transport: str = "rest"): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_backup_vault( + backupvault.GetBackupVaultRequest(), + name="name_value", + ) + + +def test_get_backup_vault_rest_error(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + backupvault.UpdateBackupVaultRequest, + dict, + ], +) +def test_update_backup_vault_rest(request_type): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "backup_vault": { + "name": "projects/sample1/locations/sample2/backupVaults/sample3" + } + } + request_init["backup_vault"] = { + "name": "projects/sample1/locations/sample2/backupVaults/sample3", + "description": "description_value", + "labels": {}, + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "backup_minimum_enforced_retention_duration": {"seconds": 751, "nanos": 543}, + "deletable": True, + "etag": "etag_value", + "state": 1, + "effective_time": {}, + "backup_count": 1278, + "service_account": "service_account_value", + "total_stored_bytes": 1946, + "uid": "uid_value", + "annotations": {}, + "access_restriction": 1, + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = backupvault.UpdateBackupVaultRequest.meta.fields["backup_vault"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["backup_vault"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["backup_vault"][field])): + del request_init["backup_vault"][field][i][subfield] + else: + del request_init["backup_vault"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.update_backup_vault(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_update_backup_vault_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.update_backup_vault in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.update_backup_vault + ] = mock_rpc + + request = {} + client.update_backup_vault(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.update_backup_vault(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_update_backup_vault_rest_required_fields( + request_type=backupvault.UpdateBackupVaultRequest, +): + transport_class = transports.BackupDRRestTransport + + request_init = {} + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_backup_vault._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_backup_vault._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "force", + "request_id", + "update_mask", + "validate_only", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "patch", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.update_backup_vault(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_update_backup_vault_rest_unset_required_fields(): + transport = transports.BackupDRRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.update_backup_vault._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "force", + "requestId", + "updateMask", + "validateOnly", + ) + ) + & set( + ( + "updateMask", + "backupVault", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_update_backup_vault_rest_interceptors(null_interceptor): + transport = transports.BackupDRRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.BackupDRRestInterceptor(), + ) + client = BackupDRClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.BackupDRRestInterceptor, "post_update_backup_vault" + ) as post, mock.patch.object( + transports.BackupDRRestInterceptor, "pre_update_backup_vault" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = backupvault.UpdateBackupVaultRequest.pb( + backupvault.UpdateBackupVaultRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) + + request = backupvault.UpdateBackupVaultRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.update_backup_vault( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_update_backup_vault_rest_bad_request( + transport: str = "rest", request_type=backupvault.UpdateBackupVaultRequest +): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "backup_vault": { + "name": "projects/sample1/locations/sample2/backupVaults/sample3" + } + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.update_backup_vault(request) + + +def test_update_backup_vault_rest_flattened(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = { + "backup_vault": { + "name": "projects/sample1/locations/sample2/backupVaults/sample3" + } + } + + # get truthy value for each flattened field + mock_args = dict( + backup_vault=backupvault.BackupVault(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.update_backup_vault(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{backup_vault.name=projects/*/locations/*/backupVaults/*}" + % client.transport._host, + args[1], + ) + + +def test_update_backup_vault_rest_flattened_error(transport: str = "rest"): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_backup_vault( + backupvault.UpdateBackupVaultRequest(), + backup_vault=backupvault.BackupVault(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +def test_update_backup_vault_rest_error(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + backupvault.DeleteBackupVaultRequest, + dict, + ], +) +def test_delete_backup_vault_rest(request_type): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/backupVaults/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.delete_backup_vault(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_delete_backup_vault_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.delete_backup_vault in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.delete_backup_vault + ] = mock_rpc + + request = {} + client.delete_backup_vault(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.delete_backup_vault(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_delete_backup_vault_rest_required_fields( + request_type=backupvault.DeleteBackupVaultRequest, +): + transport_class = transports.BackupDRRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_backup_vault._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_backup_vault._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "allow_missing", + "etag", + "force", + "request_id", + "validate_only", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "delete", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.delete_backup_vault(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_delete_backup_vault_rest_unset_required_fields(): + transport = transports.BackupDRRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.delete_backup_vault._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "allowMissing", + "etag", + "force", + "requestId", + "validateOnly", + ) + ) + & set(("name",)) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_backup_vault_rest_interceptors(null_interceptor): + transport = transports.BackupDRRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.BackupDRRestInterceptor(), + ) + client = BackupDRClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.BackupDRRestInterceptor, "post_delete_backup_vault" + ) as post, mock.patch.object( + transports.BackupDRRestInterceptor, "pre_delete_backup_vault" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = backupvault.DeleteBackupVaultRequest.pb( + backupvault.DeleteBackupVaultRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) + + request = backupvault.DeleteBackupVaultRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.delete_backup_vault( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_delete_backup_vault_rest_bad_request( + transport: str = "rest", request_type=backupvault.DeleteBackupVaultRequest +): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/backupVaults/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete_backup_vault(request) + + +def test_delete_backup_vault_rest_flattened(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/backupVaults/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.delete_backup_vault(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/backupVaults/*}" + % client.transport._host, + args[1], + ) + + +def test_delete_backup_vault_rest_flattened_error(transport: str = "rest"): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_backup_vault( + backupvault.DeleteBackupVaultRequest(), + name="name_value", + ) + + +def test_delete_backup_vault_rest_error(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + backupvault.ListDataSourcesRequest, + dict, + ], +) +def test_list_data_sources_rest(request_type): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2/backupVaults/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = backupvault.ListDataSourcesResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = backupvault.ListDataSourcesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.list_data_sources(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListDataSourcesPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] + + +def test_list_data_sources_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_data_sources in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.list_data_sources + ] = mock_rpc + + request = {} + client.list_data_sources(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_data_sources(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_list_data_sources_rest_required_fields( + request_type=backupvault.ListDataSourcesRequest, +): + transport_class = transports.BackupDRRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_data_sources._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_data_sources._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "filter", + "order_by", + "page_size", + "page_token", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = backupvault.ListDataSourcesResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = backupvault.ListDataSourcesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.list_data_sources(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_list_data_sources_rest_unset_required_fields(): + transport = transports.BackupDRRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.list_data_sources._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "filter", + "orderBy", + "pageSize", + "pageToken", + ) + ) + & set(("parent",)) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_data_sources_rest_interceptors(null_interceptor): + transport = transports.BackupDRRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.BackupDRRestInterceptor(), + ) + client = BackupDRClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.BackupDRRestInterceptor, "post_list_data_sources" + ) as post, mock.patch.object( + transports.BackupDRRestInterceptor, "pre_list_data_sources" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = backupvault.ListDataSourcesRequest.pb( + backupvault.ListDataSourcesRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = backupvault.ListDataSourcesResponse.to_json( + backupvault.ListDataSourcesResponse() + ) + + request = backupvault.ListDataSourcesRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = backupvault.ListDataSourcesResponse() + + client.list_data_sources( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_data_sources_rest_bad_request( + transport: str = "rest", request_type=backupvault.ListDataSourcesRequest +): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2/backupVaults/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_data_sources(request) + + +def test_list_data_sources_rest_flattened(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = backupvault.ListDataSourcesResponse() + + # get arguments that satisfy an http rule for this method + sample_request = { + "parent": "projects/sample1/locations/sample2/backupVaults/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = backupvault.ListDataSourcesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.list_data_sources(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*/backupVaults/*}/dataSources" + % client.transport._host, + args[1], + ) + + +def test_list_data_sources_rest_flattened_error(transport: str = "rest"): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_data_sources( + backupvault.ListDataSourcesRequest(), + parent="parent_value", + ) + + +def test_list_data_sources_rest_pager(transport: str = "rest"): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + backupvault.ListDataSourcesResponse( + data_sources=[ + backupvault.DataSource(), + backupvault.DataSource(), + backupvault.DataSource(), + ], + next_page_token="abc", + ), + backupvault.ListDataSourcesResponse( + data_sources=[], + next_page_token="def", + ), + backupvault.ListDataSourcesResponse( + data_sources=[ + backupvault.DataSource(), + ], + next_page_token="ghi", + ), + backupvault.ListDataSourcesResponse( + data_sources=[ + backupvault.DataSource(), + backupvault.DataSource(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple( + backupvault.ListDataSourcesResponse.to_json(x) for x in response + ) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = { + "parent": "projects/sample1/locations/sample2/backupVaults/sample3" + } + + pager = client.list_data_sources(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, backupvault.DataSource) for i in results) + + pages = list(client.list_data_sources(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + backupvault.GetDataSourceRequest, + dict, + ], +) +def test_get_data_source_rest(request_type): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/backupVaults/sample3/dataSources/sample4" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = backupvault.DataSource( + name="name_value", + state=backupvault.DataSource.State.CREATING, + backup_count=1278, + etag="etag_value", + total_stored_bytes=1946, + config_state=backupvault.BackupConfigState.ACTIVE, + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = backupvault.DataSource.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.get_data_source(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, backupvault.DataSource) + assert response.name == "name_value" + assert response.state == backupvault.DataSource.State.CREATING + assert response.backup_count == 1278 + assert response.etag == "etag_value" + assert response.total_stored_bytes == 1946 + assert response.config_state == backupvault.BackupConfigState.ACTIVE + + +def test_get_data_source_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_data_source in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.get_data_source] = mock_rpc + + request = {} + client.get_data_source(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_data_source(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_get_data_source_rest_required_fields( + request_type=backupvault.GetDataSourceRequest, +): + transport_class = transports.BackupDRRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_data_source._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_data_source._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = backupvault.DataSource() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = backupvault.DataSource.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_data_source(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_get_data_source_rest_unset_required_fields(): + transport = transports.BackupDRRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.get_data_source._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_data_source_rest_interceptors(null_interceptor): + transport = transports.BackupDRRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.BackupDRRestInterceptor(), + ) + client = BackupDRClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.BackupDRRestInterceptor, "post_get_data_source" + ) as post, mock.patch.object( + transports.BackupDRRestInterceptor, "pre_get_data_source" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = backupvault.GetDataSourceRequest.pb( + backupvault.GetDataSourceRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = backupvault.DataSource.to_json( + backupvault.DataSource() + ) + + request = backupvault.GetDataSourceRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = backupvault.DataSource() + + client.get_data_source( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_data_source_rest_bad_request( + transport: str = "rest", request_type=backupvault.GetDataSourceRequest +): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/backupVaults/sample3/dataSources/sample4" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_data_source(request) + + +def test_get_data_source_rest_flattened(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = backupvault.DataSource() + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/backupVaults/sample3/dataSources/sample4" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = backupvault.DataSource.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.get_data_source(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/backupVaults/*/dataSources/*}" + % client.transport._host, + args[1], + ) + + +def test_get_data_source_rest_flattened_error(transport: str = "rest"): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_data_source( + backupvault.GetDataSourceRequest(), + name="name_value", + ) + + +def test_get_data_source_rest_error(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + backupvault.UpdateDataSourceRequest, + dict, + ], +) +def test_update_data_source_rest(request_type): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "data_source": { + "name": "projects/sample1/locations/sample2/backupVaults/sample3/dataSources/sample4" + } + } + request_init["data_source"] = { + "name": "projects/sample1/locations/sample2/backupVaults/sample3/dataSources/sample4", + "state": 1, + "labels": {}, + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "backup_count": 1278, + "etag": "etag_value", + "total_stored_bytes": 1946, + "config_state": 1, + "backup_config_info": { + "last_backup_state": 1, + "last_successful_backup_consistency_time": {}, + "last_backup_error": { + "code": 411, + "message": "message_value", + "details": [ + { + "type_url": "type.googleapis.com/google.protobuf.Duration", + "value": b"\x08\x0c\x10\xdb\x07", + } + ], + }, + "gcp_backup_config": { + "backup_plan": "backup_plan_value", + "backup_plan_description": "backup_plan_description_value", + "backup_plan_association": "backup_plan_association_value", + "backup_plan_rules": [ + "backup_plan_rules_value1", + "backup_plan_rules_value2", + ], + }, + "backup_appliance_backup_config": { + "backup_appliance_name": "backup_appliance_name_value", + "backup_appliance_id": 1966, + "sla_id": 620, + "application_name": "application_name_value", + "host_name": "host_name_value", + "slt_name": "slt_name_value", + "slp_name": "slp_name_value", + }, + }, + "data_source_gcp_resource": { + "gcp_resourcename": "gcp_resourcename_value", + "location": "location_value", + "type_": "type__value", + "compute_instance_datasource_properties": { + "name": "name_value", + "description": "description_value", + "machine_type": "machine_type_value", + "total_disk_count": 1718, + "total_disk_size_gb": 1904, + }, + }, + "data_source_backup_appliance_application": { + "application_name": "application_name_value", + "backup_appliance": "backup_appliance_value", + "appliance_id": 1241, + "type_": "type__value", + "application_id": 1472, + "hostname": "hostname_value", + "host_id": 746, + }, + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = backupvault.UpdateDataSourceRequest.meta.fields["data_source"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["data_source"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["data_source"][field])): + del request_init["data_source"][field][i][subfield] + else: + del request_init["data_source"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.update_data_source(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_update_data_source_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.update_data_source in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.update_data_source + ] = mock_rpc + + request = {} + client.update_data_source(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.update_data_source(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_update_data_source_rest_required_fields( + request_type=backupvault.UpdateDataSourceRequest, +): + transport_class = transports.BackupDRRestTransport + + request_init = {} + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_data_source._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_data_source._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "allow_missing", + "request_id", + "update_mask", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "patch", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.update_data_source(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_update_data_source_rest_unset_required_fields(): + transport = transports.BackupDRRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.update_data_source._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "allowMissing", + "requestId", + "updateMask", + ) + ) + & set( + ( + "updateMask", + "dataSource", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_update_data_source_rest_interceptors(null_interceptor): + transport = transports.BackupDRRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.BackupDRRestInterceptor(), + ) + client = BackupDRClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.BackupDRRestInterceptor, "post_update_data_source" + ) as post, mock.patch.object( + transports.BackupDRRestInterceptor, "pre_update_data_source" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = backupvault.UpdateDataSourceRequest.pb( + backupvault.UpdateDataSourceRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) + + request = backupvault.UpdateDataSourceRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.update_data_source( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_update_data_source_rest_bad_request( + transport: str = "rest", request_type=backupvault.UpdateDataSourceRequest +): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "data_source": { + "name": "projects/sample1/locations/sample2/backupVaults/sample3/dataSources/sample4" + } + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.update_data_source(request) + + +def test_update_data_source_rest_flattened(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = { + "data_source": { + "name": "projects/sample1/locations/sample2/backupVaults/sample3/dataSources/sample4" + } + } + + # get truthy value for each flattened field + mock_args = dict( + data_source=backupvault.DataSource(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.update_data_source(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{data_source.name=projects/*/locations/*/backupVaults/*/dataSources/*}" + % client.transport._host, + args[1], + ) + + +def test_update_data_source_rest_flattened_error(transport: str = "rest"): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_data_source( + backupvault.UpdateDataSourceRequest(), + data_source=backupvault.DataSource(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +def test_update_data_source_rest_error(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + backupvault.ListBackupsRequest, + dict, + ], +) +def test_list_backups_rest(request_type): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "parent": "projects/sample1/locations/sample2/backupVaults/sample3/dataSources/sample4" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = backupvault.ListBackupsResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = backupvault.ListBackupsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.list_backups(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListBackupsPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] + + +def test_list_backups_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_backups in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.list_backups] = mock_rpc + + request = {} + client.list_backups(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_backups(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_list_backups_rest_required_fields(request_type=backupvault.ListBackupsRequest): + transport_class = transports.BackupDRRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_backups._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_backups._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "filter", + "order_by", + "page_size", + "page_token", + "view", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = backupvault.ListBackupsResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = backupvault.ListBackupsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.list_backups(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_list_backups_rest_unset_required_fields(): + transport = transports.BackupDRRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.list_backups._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "filter", + "orderBy", + "pageSize", + "pageToken", + "view", + ) + ) + & set(("parent",)) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_backups_rest_interceptors(null_interceptor): + transport = transports.BackupDRRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.BackupDRRestInterceptor(), + ) + client = BackupDRClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.BackupDRRestInterceptor, "post_list_backups" + ) as post, mock.patch.object( + transports.BackupDRRestInterceptor, "pre_list_backups" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = backupvault.ListBackupsRequest.pb(backupvault.ListBackupsRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = backupvault.ListBackupsResponse.to_json( + backupvault.ListBackupsResponse() + ) + + request = backupvault.ListBackupsRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = backupvault.ListBackupsResponse() + + client.list_backups( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_backups_rest_bad_request( + transport: str = "rest", request_type=backupvault.ListBackupsRequest +): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "parent": "projects/sample1/locations/sample2/backupVaults/sample3/dataSources/sample4" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_backups(request) + + +def test_list_backups_rest_flattened(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = backupvault.ListBackupsResponse() + + # get arguments that satisfy an http rule for this method + sample_request = { + "parent": "projects/sample1/locations/sample2/backupVaults/sample3/dataSources/sample4" + } + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = backupvault.ListBackupsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.list_backups(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*/backupVaults/*/dataSources/*}/backups" + % client.transport._host, + args[1], + ) + + +def test_list_backups_rest_flattened_error(transport: str = "rest"): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_backups( + backupvault.ListBackupsRequest(), + parent="parent_value", + ) + + +def test_list_backups_rest_pager(transport: str = "rest"): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + backupvault.ListBackupsResponse( + backups=[ + backupvault.Backup(), + backupvault.Backup(), + backupvault.Backup(), + ], + next_page_token="abc", + ), + backupvault.ListBackupsResponse( + backups=[], + next_page_token="def", + ), + backupvault.ListBackupsResponse( + backups=[ + backupvault.Backup(), + ], + next_page_token="ghi", + ), + backupvault.ListBackupsResponse( + backups=[ + backupvault.Backup(), + backupvault.Backup(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(backupvault.ListBackupsResponse.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = { + "parent": "projects/sample1/locations/sample2/backupVaults/sample3/dataSources/sample4" + } + + pager = client.list_backups(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, backupvault.Backup) for i in results) + + pages = list(client.list_backups(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + backupvault.GetBackupRequest, + dict, + ], +) +def test_get_backup_rest(request_type): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/backupVaults/sample3/dataSources/sample4/backups/sample5" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = backupvault.Backup( + name="name_value", + description="description_value", + etag="etag_value", + state=backupvault.Backup.State.CREATING, + backup_type=backupvault.Backup.BackupType.SCHEDULED, + resource_size_bytes=2056, + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = backupvault.Backup.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.get_backup(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, backupvault.Backup) + assert response.name == "name_value" + assert response.description == "description_value" + assert response.etag == "etag_value" + assert response.state == backupvault.Backup.State.CREATING + assert response.backup_type == backupvault.Backup.BackupType.SCHEDULED + assert response.resource_size_bytes == 2056 + + +def test_get_backup_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_backup in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.get_backup] = mock_rpc + + request = {} + client.get_backup(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_backup(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_get_backup_rest_required_fields(request_type=backupvault.GetBackupRequest): + transport_class = transports.BackupDRRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_backup._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_backup._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("view",)) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = backupvault.Backup() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = backupvault.Backup.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_backup(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_get_backup_rest_unset_required_fields(): + transport = transports.BackupDRRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.get_backup._get_unset_required_fields({}) + assert set(unset_fields) == (set(("view",)) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_backup_rest_interceptors(null_interceptor): + transport = transports.BackupDRRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.BackupDRRestInterceptor(), + ) + client = BackupDRClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.BackupDRRestInterceptor, "post_get_backup" + ) as post, mock.patch.object( + transports.BackupDRRestInterceptor, "pre_get_backup" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = backupvault.GetBackupRequest.pb(backupvault.GetBackupRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = backupvault.Backup.to_json(backupvault.Backup()) + + request = backupvault.GetBackupRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = backupvault.Backup() + + client.get_backup( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_backup_rest_bad_request( + transport: str = "rest", request_type=backupvault.GetBackupRequest +): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/backupVaults/sample3/dataSources/sample4/backups/sample5" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_backup(request) + + +def test_get_backup_rest_flattened(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = backupvault.Backup() + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/backupVaults/sample3/dataSources/sample4/backups/sample5" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = backupvault.Backup.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.get_backup(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/backupVaults/*/dataSources/*/backups/*}" + % client.transport._host, + args[1], + ) + + +def test_get_backup_rest_flattened_error(transport: str = "rest"): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_backup( + backupvault.GetBackupRequest(), + name="name_value", + ) + + +def test_get_backup_rest_error(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + backupvault.UpdateBackupRequest, + dict, + ], +) +def test_update_backup_rest(request_type): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "backup": { + "name": "projects/sample1/locations/sample2/backupVaults/sample3/dataSources/sample4/backups/sample5" + } + } + request_init["backup"] = { + "name": "projects/sample1/locations/sample2/backupVaults/sample3/dataSources/sample4/backups/sample5", + "description": "description_value", + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "labels": {}, + "enforced_retention_end_time": {}, + "expire_time": {}, + "consistency_time": {}, + "etag": "etag_value", + "state": 1, + "service_locks": [ + { + "lock_until_time": {}, + "backup_appliance_lock_info": { + "backup_appliance_id": 1966, + "backup_appliance_name": "backup_appliance_name_value", + "lock_reason": "lock_reason_value", + "job_name": "job_name_value", + "backup_image": "backup_image_value", + "sla_id": 620, + }, + "service_lock_info": {"operation": "operation_value"}, + } + ], + "backup_appliance_locks": {}, + "compute_instance_backup_properties": { + "description": "description_value", + "tags": {"items": ["items_value1", "items_value2"]}, + "machine_type": "machine_type_value", + "can_ip_forward": True, + "network_interface": [ + { + "network": "network_value", + "subnetwork": "subnetwork_value", + "ip_address": "ip_address_value", + "ipv6_address": "ipv6_address_value", + "internal_ipv6_prefix_length": 2831, + "name": "name_value", + "access_configs": [ + { + "type_": 1, + "name": "name_value", + "external_ip": "external_ip_value", + "external_ipv6": "external_ipv6_value", + "external_ipv6_prefix_length": 2837, + "set_public_ptr": True, + "public_ptr_domain_name": "public_ptr_domain_name_value", + "network_tier": 1, + } + ], + "ipv6_access_configs": {}, + "alias_ip_ranges": [ + { + "ip_cidr_range": "ip_cidr_range_value", + "subnetwork_range_name": "subnetwork_range_name_value", + } + ], + "stack_type": 1, + "ipv6_access_type": 1, + "queue_count": 1197, + "nic_type": 1, + "network_attachment": "network_attachment_value", + } + ], + "disk": [ + { + "initialize_params": { + "disk_name": "disk_name_value", + "replica_zones": [ + "replica_zones_value1", + "replica_zones_value2", + ], + }, + "device_name": "device_name_value", + "kind": "kind_value", + "disk_type_deprecated": 1, + "mode": 1, + "source": "source_value", + "index": 536, + "boot": True, + "auto_delete": True, + "license_": ["license__value1", "license__value2"], + "disk_interface": 1, + "guest_os_feature": [{"type_": 1}], + "disk_encryption_key": { + "raw_key": "raw_key_value", + "rsa_encrypted_key": "rsa_encrypted_key_value", + "kms_key_name": "kms_key_name_value", + "kms_key_service_account": "kms_key_service_account_value", + }, + "disk_size_gb": 1261, + "saved_state": 1, + "disk_type": "disk_type_value", + "type_": 1, + } + ], + "metadata": {"items": [{"key": "key_value", "value": "value_value"}]}, + "service_account": [ + {"email": "email_value", "scopes": ["scopes_value1", "scopes_value2"]} + ], + "scheduling": { + "on_host_maintenance": 1, + "automatic_restart": True, + "preemptible": True, + "node_affinities": [ + { + "key": "key_value", + "operator": 1, + "values": ["values_value1", "values_value2"], + } + ], + "min_node_cpus": 1379, + "provisioning_model": 1, + "instance_termination_action": 1, + "local_ssd_recovery_timeout": {"seconds": 751, "nanos": 543}, + }, + "guest_accelerator": [ + { + "accelerator_type": "accelerator_type_value", + "accelerator_count": 1805, + } + ], + "min_cpu_platform": "min_cpu_platform_value", + "key_revocation_action_type": 1, + "source_instance": "source_instance_value", + "labels": {}, + }, + "backup_appliance_backup_properties": { + "generation_id": 1368, + "finalize_time": {}, + "recovery_range_start_time": {}, + "recovery_range_end_time": {}, + }, + "backup_type": 1, + "gcp_backup_plan_info": { + "backup_plan": "backup_plan_value", + "backup_plan_rule_id": "backup_plan_rule_id_value", + }, + "resource_size_bytes": 2056, + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = backupvault.UpdateBackupRequest.meta.fields["backup"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["backup"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["backup"][field])): + del request_init["backup"][field][i][subfield] + else: + del request_init["backup"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.update_backup(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_update_backup_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.update_backup in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.update_backup] = mock_rpc + + request = {} + client.update_backup(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.update_backup(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_update_backup_rest_required_fields( + request_type=backupvault.UpdateBackupRequest, +): + transport_class = transports.BackupDRRestTransport + + request_init = {} + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_backup._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_backup._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "request_id", + "update_mask", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "patch", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.update_backup(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_update_backup_rest_unset_required_fields(): + transport = transports.BackupDRRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.update_backup._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "requestId", + "updateMask", + ) + ) + & set( + ( + "updateMask", + "backup", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_update_backup_rest_interceptors(null_interceptor): + transport = transports.BackupDRRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.BackupDRRestInterceptor(), + ) + client = BackupDRClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.BackupDRRestInterceptor, "post_update_backup" + ) as post, mock.patch.object( + transports.BackupDRRestInterceptor, "pre_update_backup" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = backupvault.UpdateBackupRequest.pb( + backupvault.UpdateBackupRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) + + request = backupvault.UpdateBackupRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.update_backup( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_update_backup_rest_bad_request( + transport: str = "rest", request_type=backupvault.UpdateBackupRequest +): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "backup": { + "name": "projects/sample1/locations/sample2/backupVaults/sample3/dataSources/sample4/backups/sample5" + } + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.update_backup(request) + + +def test_update_backup_rest_flattened(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = { + "backup": { + "name": "projects/sample1/locations/sample2/backupVaults/sample3/dataSources/sample4/backups/sample5" + } + } + + # get truthy value for each flattened field + mock_args = dict( + backup=backupvault.Backup(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.update_backup(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{backup.name=projects/*/locations/*/backupVaults/*/dataSources/*/backups/*}" + % client.transport._host, + args[1], + ) + + +def test_update_backup_rest_flattened_error(transport: str = "rest"): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_backup( + backupvault.UpdateBackupRequest(), + backup=backupvault.Backup(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +def test_update_backup_rest_error(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + backupvault.DeleteBackupRequest, + dict, + ], +) +def test_delete_backup_rest(request_type): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/backupVaults/sample3/dataSources/sample4/backups/sample5" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.delete_backup(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_delete_backup_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.delete_backup in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.delete_backup] = mock_rpc + + request = {} + client.delete_backup(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.delete_backup(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_delete_backup_rest_required_fields( + request_type=backupvault.DeleteBackupRequest, +): + transport_class = transports.BackupDRRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_backup._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_backup._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id",)) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "delete", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.delete_backup(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_delete_backup_rest_unset_required_fields(): + transport = transports.BackupDRRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.delete_backup._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId",)) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_backup_rest_interceptors(null_interceptor): + transport = transports.BackupDRRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.BackupDRRestInterceptor(), + ) + client = BackupDRClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.BackupDRRestInterceptor, "post_delete_backup" + ) as post, mock.patch.object( + transports.BackupDRRestInterceptor, "pre_delete_backup" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = backupvault.DeleteBackupRequest.pb( + backupvault.DeleteBackupRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) + + request = backupvault.DeleteBackupRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.delete_backup( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_delete_backup_rest_bad_request( + transport: str = "rest", request_type=backupvault.DeleteBackupRequest +): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/backupVaults/sample3/dataSources/sample4/backups/sample5" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete_backup(request) + + +def test_delete_backup_rest_flattened(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/backupVaults/sample3/dataSources/sample4/backups/sample5" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.delete_backup(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/backupVaults/*/dataSources/*/backups/*}" + % client.transport._host, + args[1], + ) + + +def test_delete_backup_rest_flattened_error(transport: str = "rest"): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_backup( + backupvault.DeleteBackupRequest(), + name="name_value", + ) + + +def test_delete_backup_rest_error(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + backupvault.RestoreBackupRequest, + dict, + ], +) +def test_restore_backup_rest(request_type): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/backupVaults/sample3/dataSources/sample4/backups/sample5" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.restore_backup(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_restore_backup_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.restore_backup in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.restore_backup] = mock_rpc + + request = {} + client.restore_backup(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.restore_backup(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_restore_backup_rest_required_fields( + request_type=backupvault.RestoreBackupRequest, +): + transport_class = transports.BackupDRRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).restore_backup._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).restore_backup._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.restore_backup(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_restore_backup_rest_unset_required_fields(): + transport = transports.BackupDRRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.restore_backup._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_restore_backup_rest_interceptors(null_interceptor): + transport = transports.BackupDRRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.BackupDRRestInterceptor(), + ) + client = BackupDRClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.BackupDRRestInterceptor, "post_restore_backup" + ) as post, mock.patch.object( + transports.BackupDRRestInterceptor, "pre_restore_backup" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = backupvault.RestoreBackupRequest.pb( + backupvault.RestoreBackupRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) + + request = backupvault.RestoreBackupRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.restore_backup( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_restore_backup_rest_bad_request( + transport: str = "rest", request_type=backupvault.RestoreBackupRequest +): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/backupVaults/sample3/dataSources/sample4/backups/sample5" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.restore_backup(request) + + +def test_restore_backup_rest_flattened(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/backupVaults/sample3/dataSources/sample4/backups/sample5" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.restore_backup(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/backupVaults/*/dataSources/*/backups/*}:restore" + % client.transport._host, + args[1], + ) + + +def test_restore_backup_rest_flattened_error(transport: str = "rest"): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.restore_backup( + backupvault.RestoreBackupRequest(), + name="name_value", + ) + + +def test_restore_backup_rest_error(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + backupplan.CreateBackupPlanRequest, + dict, + ], +) +def test_create_backup_plan_rest(request_type): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request_init["backup_plan"] = { + "name": "name_value", + "description": "description_value", + "labels": {}, + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "backup_rules": [ + { + "rule_id": "rule_id_value", + "backup_retention_days": 2237, + "standard_schedule": { + "recurrence_type": 1, + "hourly_frequency": 1748, + "days_of_week": [1], + "days_of_month": [1387, 1388], + "week_day_of_month": {"week_of_month": 1, "day_of_week": 1}, + "months": [1], + "backup_window": { + "start_hour_of_day": 1820, + "end_hour_of_day": 1573, + }, + "time_zone": "time_zone_value", + }, + } + ], + "state": 1, + "resource_type": "resource_type_value", + "etag": "etag_value", + "backup_vault": "backup_vault_value", + "backup_vault_service_account": "backup_vault_service_account_value", + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = backupplan.CreateBackupPlanRequest.meta.fields["backup_plan"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["backup_plan"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["backup_plan"][field])): + del request_init["backup_plan"][field][i][subfield] + else: + del request_init["backup_plan"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.create_backup_plan(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_create_backup_plan_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.create_backup_plan in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.create_backup_plan + ] = mock_rpc + + request = {} + client.create_backup_plan(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.create_backup_plan(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_create_backup_plan_rest_required_fields( + request_type=backupplan.CreateBackupPlanRequest, +): + transport_class = transports.BackupDRRestTransport + + request_init = {} + request_init["parent"] = "" + request_init["backup_plan_id"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + assert "backupPlanId" not in jsonified_request + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_backup_plan._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + assert "backupPlanId" in jsonified_request + assert jsonified_request["backupPlanId"] == request_init["backup_plan_id"] + + jsonified_request["parent"] = "parent_value" + jsonified_request["backupPlanId"] = "backup_plan_id_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_backup_plan._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "backup_plan_id", + "request_id", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + assert "backupPlanId" in jsonified_request + assert jsonified_request["backupPlanId"] == "backup_plan_id_value" + + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.create_backup_plan(request) + + expected_params = [ + ( + "backupPlanId", + "", + ), + ("$alt", "json;enum-encoding=int"), + ] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_create_backup_plan_rest_unset_required_fields(): + transport = transports.BackupDRRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.create_backup_plan._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "backupPlanId", + "requestId", + ) + ) + & set( + ( + "parent", + "backupPlanId", + "backupPlan", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_create_backup_plan_rest_interceptors(null_interceptor): + transport = transports.BackupDRRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.BackupDRRestInterceptor(), + ) + client = BackupDRClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.BackupDRRestInterceptor, "post_create_backup_plan" + ) as post, mock.patch.object( + transports.BackupDRRestInterceptor, "pre_create_backup_plan" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = backupplan.CreateBackupPlanRequest.pb( + backupplan.CreateBackupPlanRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) + + request = backupplan.CreateBackupPlanRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.create_backup_plan( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_create_backup_plan_rest_bad_request( + transport: str = "rest", request_type=backupplan.CreateBackupPlanRequest +): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.create_backup_plan(request) + + +def test_create_backup_plan_rest_flattened(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1/locations/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + backup_plan=backupplan.BackupPlan(name="name_value"), + backup_plan_id="backup_plan_id_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.create_backup_plan(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*}/backupPlans" + % client.transport._host, + args[1], + ) + + +def test_create_backup_plan_rest_flattened_error(transport: str = "rest"): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_backup_plan( + backupplan.CreateBackupPlanRequest(), + parent="parent_value", + backup_plan=backupplan.BackupPlan(name="name_value"), + backup_plan_id="backup_plan_id_value", + ) + + +def test_create_backup_plan_rest_error(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + backupplan.GetBackupPlanRequest, + dict, + ], +) +def test_get_backup_plan_rest(request_type): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/backupPlans/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = backupplan.BackupPlan( + name="name_value", + description="description_value", + state=backupplan.BackupPlan.State.CREATING, + resource_type="resource_type_value", + etag="etag_value", + backup_vault="backup_vault_value", + backup_vault_service_account="backup_vault_service_account_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = backupplan.BackupPlan.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.get_backup_plan(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, backupplan.BackupPlan) + assert response.name == "name_value" + assert response.description == "description_value" + assert response.state == backupplan.BackupPlan.State.CREATING + assert response.resource_type == "resource_type_value" + assert response.etag == "etag_value" + assert response.backup_vault == "backup_vault_value" + assert response.backup_vault_service_account == "backup_vault_service_account_value" + + +def test_get_backup_plan_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_backup_plan in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.get_backup_plan] = mock_rpc + + request = {} + client.get_backup_plan(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_backup_plan(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_get_backup_plan_rest_required_fields( + request_type=backupplan.GetBackupPlanRequest, +): + transport_class = transports.BackupDRRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_backup_plan._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_backup_plan._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = backupplan.BackupPlan() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = backupplan.BackupPlan.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_backup_plan(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_get_backup_plan_rest_unset_required_fields(): + transport = transports.BackupDRRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.get_backup_plan._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_backup_plan_rest_interceptors(null_interceptor): + transport = transports.BackupDRRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.BackupDRRestInterceptor(), + ) + client = BackupDRClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.BackupDRRestInterceptor, "post_get_backup_plan" + ) as post, mock.patch.object( + transports.BackupDRRestInterceptor, "pre_get_backup_plan" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = backupplan.GetBackupPlanRequest.pb( + backupplan.GetBackupPlanRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = backupplan.BackupPlan.to_json( + backupplan.BackupPlan() + ) + + request = backupplan.GetBackupPlanRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = backupplan.BackupPlan() + + client.get_backup_plan( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_backup_plan_rest_bad_request( + transport: str = "rest", request_type=backupplan.GetBackupPlanRequest +): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/backupPlans/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_backup_plan(request) + + +def test_get_backup_plan_rest_flattened(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = backupplan.BackupPlan() + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/backupPlans/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = backupplan.BackupPlan.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.get_backup_plan(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/backupPlans/*}" + % client.transport._host, + args[1], + ) + + +def test_get_backup_plan_rest_flattened_error(transport: str = "rest"): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_backup_plan( + backupplan.GetBackupPlanRequest(), + name="name_value", + ) + + +def test_get_backup_plan_rest_error(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + backupplan.ListBackupPlansRequest, + dict, + ], +) +def test_list_backup_plans_rest(request_type): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = backupplan.ListBackupPlansResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = backupplan.ListBackupPlansResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.list_backup_plans(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListBackupPlansPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] + + +def test_list_backup_plans_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_backup_plans in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.list_backup_plans + ] = mock_rpc + + request = {} + client.list_backup_plans(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_backup_plans(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_list_backup_plans_rest_required_fields( + request_type=backupplan.ListBackupPlansRequest, +): + transport_class = transports.BackupDRRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_backup_plans._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_backup_plans._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "filter", + "order_by", + "page_size", + "page_token", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = backupplan.ListBackupPlansResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = backupplan.ListBackupPlansResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.list_backup_plans(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_list_backup_plans_rest_unset_required_fields(): + transport = transports.BackupDRRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.list_backup_plans._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "filter", + "orderBy", + "pageSize", "pageToken", ) ) - & set(("parent",)) + & set(("parent",)) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_backup_plans_rest_interceptors(null_interceptor): + transport = transports.BackupDRRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.BackupDRRestInterceptor(), + ) + client = BackupDRClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.BackupDRRestInterceptor, "post_list_backup_plans" + ) as post, mock.patch.object( + transports.BackupDRRestInterceptor, "pre_list_backup_plans" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = backupplan.ListBackupPlansRequest.pb( + backupplan.ListBackupPlansRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = backupplan.ListBackupPlansResponse.to_json( + backupplan.ListBackupPlansResponse() + ) + + request = backupplan.ListBackupPlansRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = backupplan.ListBackupPlansResponse() + + client.list_backup_plans( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_backup_plans_rest_bad_request( + transport: str = "rest", request_type=backupplan.ListBackupPlansRequest +): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_backup_plans(request) + + +def test_list_backup_plans_rest_flattened(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = backupplan.ListBackupPlansResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1/locations/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = backupplan.ListBackupPlansResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.list_backup_plans(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*}/backupPlans" + % client.transport._host, + args[1], + ) + + +def test_list_backup_plans_rest_flattened_error(transport: str = "rest"): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_backup_plans( + backupplan.ListBackupPlansRequest(), + parent="parent_value", + ) + + +def test_list_backup_plans_rest_pager(transport: str = "rest"): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + backupplan.ListBackupPlansResponse( + backup_plans=[ + backupplan.BackupPlan(), + backupplan.BackupPlan(), + backupplan.BackupPlan(), + ], + next_page_token="abc", + ), + backupplan.ListBackupPlansResponse( + backup_plans=[], + next_page_token="def", + ), + backupplan.ListBackupPlansResponse( + backup_plans=[ + backupplan.BackupPlan(), + ], + next_page_token="ghi", + ), + backupplan.ListBackupPlansResponse( + backup_plans=[ + backupplan.BackupPlan(), + backupplan.BackupPlan(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple( + backupplan.ListBackupPlansResponse.to_json(x) for x in response + ) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {"parent": "projects/sample1/locations/sample2"} + + pager = client.list_backup_plans(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, backupplan.BackupPlan) for i in results) + + pages = list(client.list_backup_plans(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + backupplan.DeleteBackupPlanRequest, + dict, + ], +) +def test_delete_backup_plan_rest(request_type): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/backupPlans/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.delete_backup_plan(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_delete_backup_plan_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.delete_backup_plan in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.delete_backup_plan + ] = mock_rpc + + request = {} + client.delete_backup_plan(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.delete_backup_plan(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_delete_backup_plan_rest_required_fields( + request_type=backupplan.DeleteBackupPlanRequest, +): + transport_class = transports.BackupDRRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_backup_plan._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_backup_plan._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id",)) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "delete", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.delete_backup_plan(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_delete_backup_plan_rest_unset_required_fields(): + transport = transports.BackupDRRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.delete_backup_plan._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId",)) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_backup_plan_rest_interceptors(null_interceptor): + transport = transports.BackupDRRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.BackupDRRestInterceptor(), + ) + client = BackupDRClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.BackupDRRestInterceptor, "post_delete_backup_plan" + ) as post, mock.patch.object( + transports.BackupDRRestInterceptor, "pre_delete_backup_plan" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = backupplan.DeleteBackupPlanRequest.pb( + backupplan.DeleteBackupPlanRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) + + request = backupplan.DeleteBackupPlanRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.delete_backup_plan( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_delete_backup_plan_rest_bad_request( + transport: str = "rest", request_type=backupplan.DeleteBackupPlanRequest +): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/backupPlans/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete_backup_plan(request) + + +def test_delete_backup_plan_rest_flattened(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/backupPlans/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.delete_backup_plan(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/backupPlans/*}" + % client.transport._host, + args[1], + ) + + +def test_delete_backup_plan_rest_flattened_error(transport: str = "rest"): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_backup_plan( + backupplan.DeleteBackupPlanRequest(), + name="name_value", + ) + + +def test_delete_backup_plan_rest_error(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + backupplanassociation.CreateBackupPlanAssociationRequest, + dict, + ], +) +def test_create_backup_plan_association_rest(request_type): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request_init["backup_plan_association"] = { + "name": "name_value", + "resource_type": "resource_type_value", + "resource": "resource_value", + "backup_plan": "backup_plan_value", + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "state": 1, + "rules_config_info": [ + { + "rule_id": "rule_id_value", + "last_backup_state": 1, + "last_backup_error": { + "code": 411, + "message": "message_value", + "details": [ + { + "type_url": "type.googleapis.com/google.protobuf.Duration", + "value": b"\x08\x0c\x10\xdb\x07", + } + ], + }, + "last_successful_backup_consistency_time": {}, + } + ], + "data_source": "data_source_value", + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = backupplanassociation.CreateBackupPlanAssociationRequest.meta.fields[ + "backup_plan_association" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "backup_plan_association" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["backup_plan_association"][field])): + del request_init["backup_plan_association"][field][i][subfield] + else: + del request_init["backup_plan_association"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.create_backup_plan_association(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_create_backup_plan_association_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.create_backup_plan_association + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.create_backup_plan_association + ] = mock_rpc + + request = {} + client.create_backup_plan_association(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.create_backup_plan_association(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_create_backup_plan_association_rest_required_fields( + request_type=backupplanassociation.CreateBackupPlanAssociationRequest, +): + transport_class = transports.BackupDRRestTransport + + request_init = {} + request_init["parent"] = "" + request_init["backup_plan_association_id"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + assert "backupPlanAssociationId" not in jsonified_request + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_backup_plan_association._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + assert "backupPlanAssociationId" in jsonified_request + assert ( + jsonified_request["backupPlanAssociationId"] + == request_init["backup_plan_association_id"] + ) + + jsonified_request["parent"] = "parent_value" + jsonified_request["backupPlanAssociationId"] = "backup_plan_association_id_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_backup_plan_association._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "backup_plan_association_id", + "request_id", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + assert "backupPlanAssociationId" in jsonified_request + assert ( + jsonified_request["backupPlanAssociationId"] + == "backup_plan_association_id_value" + ) + + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.create_backup_plan_association(request) + + expected_params = [ + ( + "backupPlanAssociationId", + "", + ), + ("$alt", "json;enum-encoding=int"), + ] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_create_backup_plan_association_rest_unset_required_fields(): + transport = transports.BackupDRRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.create_backup_plan_association._get_unset_required_fields( + {} + ) + assert set(unset_fields) == ( + set( + ( + "backupPlanAssociationId", + "requestId", + ) + ) + & set( + ( + "parent", + "backupPlanAssociationId", + "backupPlanAssociation", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_create_backup_plan_association_rest_interceptors(null_interceptor): + transport = transports.BackupDRRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.BackupDRRestInterceptor(), + ) + client = BackupDRClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.BackupDRRestInterceptor, "post_create_backup_plan_association" + ) as post, mock.patch.object( + transports.BackupDRRestInterceptor, "pre_create_backup_plan_association" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = backupplanassociation.CreateBackupPlanAssociationRequest.pb( + backupplanassociation.CreateBackupPlanAssociationRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) + + request = backupplanassociation.CreateBackupPlanAssociationRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.create_backup_plan_association( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_create_backup_plan_association_rest_bad_request( + transport: str = "rest", + request_type=backupplanassociation.CreateBackupPlanAssociationRequest, +): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.create_backup_plan_association(request) + + +def test_create_backup_plan_association_rest_flattened(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1/locations/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + backup_plan_association=backupplanassociation.BackupPlanAssociation( + name="name_value" + ), + backup_plan_association_id="backup_plan_association_id_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.create_backup_plan_association(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*}/backupPlanAssociations" + % client.transport._host, + args[1], + ) + + +def test_create_backup_plan_association_rest_flattened_error(transport: str = "rest"): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_backup_plan_association( + backupplanassociation.CreateBackupPlanAssociationRequest(), + parent="parent_value", + backup_plan_association=backupplanassociation.BackupPlanAssociation( + name="name_value" + ), + backup_plan_association_id="backup_plan_association_id_value", + ) + + +def test_create_backup_plan_association_rest_error(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + backupplanassociation.GetBackupPlanAssociationRequest, + dict, + ], +) +def test_get_backup_plan_association_rest(request_type): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/backupPlanAssociations/sample3" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = backupplanassociation.BackupPlanAssociation( + name="name_value", + resource_type="resource_type_value", + resource="resource_value", + backup_plan="backup_plan_value", + state=backupplanassociation.BackupPlanAssociation.State.CREATING, + data_source="data_source_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = backupplanassociation.BackupPlanAssociation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.get_backup_plan_association(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, backupplanassociation.BackupPlanAssociation) + assert response.name == "name_value" + assert response.resource_type == "resource_type_value" + assert response.resource == "resource_value" + assert response.backup_plan == "backup_plan_value" + assert response.state == backupplanassociation.BackupPlanAssociation.State.CREATING + assert response.data_source == "data_source_value" + + +def test_get_backup_plan_association_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.get_backup_plan_association + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.get_backup_plan_association + ] = mock_rpc + + request = {} + client.get_backup_plan_association(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_backup_plan_association(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_get_backup_plan_association_rest_required_fields( + request_type=backupplanassociation.GetBackupPlanAssociationRequest, +): + transport_class = transports.BackupDRRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_backup_plan_association._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_backup_plan_association._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = backupplanassociation.BackupPlanAssociation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = backupplanassociation.BackupPlanAssociation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_backup_plan_association(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_get_backup_plan_association_rest_unset_required_fields(): + transport = transports.BackupDRRestTransport( + credentials=ga_credentials.AnonymousCredentials ) + unset_fields = transport.get_backup_plan_association._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_management_servers_rest_interceptors(null_interceptor): +def test_get_backup_plan_association_rest_interceptors(null_interceptor): transport = transports.BackupDRRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None if null_interceptor else transports.BackupDRRestInterceptor(), @@ -3097,14 +21997,14 @@ def test_list_management_servers_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.BackupDRRestInterceptor, "post_list_management_servers" + transports.BackupDRRestInterceptor, "post_get_backup_plan_association" ) as post, mock.patch.object( - transports.BackupDRRestInterceptor, "pre_list_management_servers" + transports.BackupDRRestInterceptor, "pre_get_backup_plan_association" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = backupdr.ListManagementServersRequest.pb( - backupdr.ListManagementServersRequest() + pb_message = backupplanassociation.GetBackupPlanAssociationRequest.pb( + backupplanassociation.GetBackupPlanAssociationRequest() ) transcode.return_value = { "method": "post", @@ -3116,19 +22016,19 @@ def test_list_management_servers_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = backupdr.ListManagementServersResponse.to_json( - backupdr.ListManagementServersResponse() + req.return_value._content = backupplanassociation.BackupPlanAssociation.to_json( + backupplanassociation.BackupPlanAssociation() ) - request = backupdr.ListManagementServersRequest() + request = backupplanassociation.GetBackupPlanAssociationRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = backupdr.ListManagementServersResponse() + post.return_value = backupplanassociation.BackupPlanAssociation() - client.list_management_servers( + client.get_backup_plan_association( request, metadata=[ ("key", "val"), @@ -3140,8 +22040,9 @@ def test_list_management_servers_rest_interceptors(null_interceptor): post.assert_called_once() -def test_list_management_servers_rest_bad_request( - transport: str = "rest", request_type=backupdr.ListManagementServersRequest +def test_get_backup_plan_association_rest_bad_request( + transport: str = "rest", + request_type=backupplanassociation.GetBackupPlanAssociationRequest, ): client = BackupDRClient( credentials=ga_credentials.AnonymousCredentials(), @@ -3149,7 +22050,9 @@ def test_list_management_servers_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2"} + request_init = { + "name": "projects/sample1/locations/sample2/backupPlanAssociations/sample3" + } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -3161,10 +22064,10 @@ def test_list_management_servers_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.list_management_servers(request) + client.get_backup_plan_association(request) -def test_list_management_servers_rest_flattened(): +def test_get_backup_plan_association_rest_flattened(): client = BackupDRClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -3173,14 +22076,16 @@ def test_list_management_servers_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = backupdr.ListManagementServersResponse() + return_value = backupplanassociation.BackupPlanAssociation() # get arguments that satisfy an http rule for this method - sample_request = {"parent": "projects/sample1/locations/sample2"} + sample_request = { + "name": "projects/sample1/locations/sample2/backupPlanAssociations/sample3" + } # get truthy value for each flattened field mock_args = dict( - parent="parent_value", + name="name_value", ) mock_args.update(sample_request) @@ -3188,159 +22093,90 @@ def test_list_management_servers_rest_flattened(): response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = backupdr.ListManagementServersResponse.pb(return_value) + return_value = backupplanassociation.BackupPlanAssociation.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.list_management_servers(**mock_args) + client.get_backup_plan_association(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{parent=projects/*/locations/*}/managementServers" + "%s/v1/{name=projects/*/locations/*/backupPlanAssociations/*}" % client.transport._host, args[1], ) -def test_list_management_servers_rest_flattened_error(transport: str = "rest"): - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_management_servers( - backupdr.ListManagementServersRequest(), - parent="parent_value", - ) - - -def test_list_management_servers_rest_pager(transport: str = "rest"): +def test_get_backup_plan_association_rest_flattened_error(transport: str = "rest"): client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # TODO(kbandes): remove this mock unless there's a good reason for it. - # with mock.patch.object(path_template, 'transcode') as transcode: - # Set the response as a series of pages - response = ( - backupdr.ListManagementServersResponse( - management_servers=[ - backupdr.ManagementServer(), - backupdr.ManagementServer(), - backupdr.ManagementServer(), - ], - next_page_token="abc", - ), - backupdr.ListManagementServersResponse( - management_servers=[], - next_page_token="def", - ), - backupdr.ListManagementServersResponse( - management_servers=[ - backupdr.ManagementServer(), - ], - next_page_token="ghi", - ), - backupdr.ListManagementServersResponse( - management_servers=[ - backupdr.ManagementServer(), - backupdr.ManagementServer(), - ], - ), - ) - # Two responses for two calls - response = response + response - - # Wrap the values into proper Response objs - response = tuple( - backupdr.ListManagementServersResponse.to_json(x) for x in response - ) - return_values = tuple(Response() for i in response) - for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode("UTF-8") - return_val.status_code = 200 - req.side_effect = return_values - - sample_request = {"parent": "projects/sample1/locations/sample2"} + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) - pager = client.list_management_servers(request=sample_request) + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_backup_plan_association( + backupplanassociation.GetBackupPlanAssociationRequest(), + name="name_value", + ) - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, backupdr.ManagementServer) for i in results) - pages = list(client.list_management_servers(request=sample_request).pages) - for page_, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page_.raw_page.next_page_token == token +def test_get_backup_plan_association_rest_error(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) @pytest.mark.parametrize( "request_type", [ - backupdr.GetManagementServerRequest, + backupplanassociation.ListBackupPlanAssociationsRequest, dict, ], ) -def test_get_management_server_rest(request_type): +def test_list_backup_plan_associations_rest(request_type): client = BackupDRClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = { - "name": "projects/sample1/locations/sample2/managementServers/sample3" - } + request_init = {"parent": "projects/sample1/locations/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = backupdr.ManagementServer( - name="name_value", - description="description_value", - type_=backupdr.ManagementServer.InstanceType.BACKUP_RESTORE, - state=backupdr.ManagementServer.InstanceState.CREATING, - etag="etag_value", - oauth2_client_id="oauth2_client_id_value", - ba_proxy_uri=["ba_proxy_uri_value"], - satisfies_pzi=True, + return_value = backupplanassociation.ListBackupPlanAssociationsResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], ) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = backupdr.ManagementServer.pb(return_value) + return_value = backupplanassociation.ListBackupPlanAssociationsResponse.pb( + return_value + ) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.get_management_server(request) + response = client.list_backup_plan_associations(request) # Establish that the response is the type that we expect. - assert isinstance(response, backupdr.ManagementServer) - assert response.name == "name_value" - assert response.description == "description_value" - assert response.type_ == backupdr.ManagementServer.InstanceType.BACKUP_RESTORE - assert response.state == backupdr.ManagementServer.InstanceState.CREATING - assert response.etag == "etag_value" - assert response.oauth2_client_id == "oauth2_client_id_value" - assert response.ba_proxy_uri == ["ba_proxy_uri_value"] - assert response.satisfies_pzi is True + assert isinstance(response, pagers.ListBackupPlanAssociationsPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] -def test_get_management_server_rest_use_cached_wrapped_rpc(): +def test_list_backup_plan_associations_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -3355,7 +22191,7 @@ def test_get_management_server_rest_use_cached_wrapped_rpc(): # Ensure method has been cached assert ( - client._transport.get_management_server + client._transport.list_backup_plan_associations in client._transport._wrapped_methods ) @@ -3365,29 +22201,29 @@ def test_get_management_server_rest_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.get_management_server + client._transport.list_backup_plan_associations ] = mock_rpc request = {} - client.get_management_server(request) + client.list_backup_plan_associations(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.get_management_server(request) + client.list_backup_plan_associations(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_get_management_server_rest_required_fields( - request_type=backupdr.GetManagementServerRequest, +def test_list_backup_plan_associations_rest_required_fields( + request_type=backupplanassociation.ListBackupPlanAssociationsRequest, ): transport_class = transports.BackupDRRestTransport request_init = {} - request_init["name"] = "" + request_init["parent"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -3398,21 +22234,29 @@ def test_get_management_server_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).get_management_server._get_unset_required_fields(jsonified_request) + ).list_backup_plan_associations._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["name"] = "name_value" + jsonified_request["parent"] = "parent_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).get_management_server._get_unset_required_fields(jsonified_request) + ).list_backup_plan_associations._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "filter", + "page_size", + "page_token", + ) + ) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == "name_value" + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" client = BackupDRClient( credentials=ga_credentials.AnonymousCredentials(), @@ -3421,7 +22265,7 @@ def test_get_management_server_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = backupdr.ManagementServer() + return_value = backupplanassociation.ListBackupPlanAssociationsResponse() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -3442,30 +22286,43 @@ def test_get_management_server_rest_required_fields( response_value.status_code = 200 # Convert return value to protobuf type - return_value = backupdr.ManagementServer.pb(return_value) + return_value = backupplanassociation.ListBackupPlanAssociationsResponse.pb( + return_value + ) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.get_management_server(request) + response = client.list_backup_plan_associations(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_get_management_server_rest_unset_required_fields(): +def test_list_backup_plan_associations_rest_unset_required_fields(): transport = transports.BackupDRRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.get_management_server._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name",))) + unset_fields = transport.list_backup_plan_associations._get_unset_required_fields( + {} + ) + assert set(unset_fields) == ( + set( + ( + "filter", + "pageSize", + "pageToken", + ) + ) + & set(("parent",)) + ) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_management_server_rest_interceptors(null_interceptor): +def test_list_backup_plan_associations_rest_interceptors(null_interceptor): transport = transports.BackupDRRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None if null_interceptor else transports.BackupDRRestInterceptor(), @@ -3476,14 +22333,14 @@ def test_get_management_server_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.BackupDRRestInterceptor, "post_get_management_server" + transports.BackupDRRestInterceptor, "post_list_backup_plan_associations" ) as post, mock.patch.object( - transports.BackupDRRestInterceptor, "pre_get_management_server" + transports.BackupDRRestInterceptor, "pre_list_backup_plan_associations" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = backupdr.GetManagementServerRequest.pb( - backupdr.GetManagementServerRequest() + pb_message = backupplanassociation.ListBackupPlanAssociationsRequest.pb( + backupplanassociation.ListBackupPlanAssociationsRequest() ) transcode.return_value = { "method": "post", @@ -3495,19 +22352,21 @@ def test_get_management_server_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = backupdr.ManagementServer.to_json( - backupdr.ManagementServer() + req.return_value._content = ( + backupplanassociation.ListBackupPlanAssociationsResponse.to_json( + backupplanassociation.ListBackupPlanAssociationsResponse() + ) ) - request = backupdr.GetManagementServerRequest() + request = backupplanassociation.ListBackupPlanAssociationsRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = backupdr.ManagementServer() + post.return_value = backupplanassociation.ListBackupPlanAssociationsResponse() - client.get_management_server( + client.list_backup_plan_associations( request, metadata=[ ("key", "val"), @@ -3519,8 +22378,9 @@ def test_get_management_server_rest_interceptors(null_interceptor): post.assert_called_once() -def test_get_management_server_rest_bad_request( - transport: str = "rest", request_type=backupdr.GetManagementServerRequest +def test_list_backup_plan_associations_rest_bad_request( + transport: str = "rest", + request_type=backupplanassociation.ListBackupPlanAssociationsRequest, ): client = BackupDRClient( credentials=ga_credentials.AnonymousCredentials(), @@ -3528,9 +22388,7 @@ def test_get_management_server_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = { - "name": "projects/sample1/locations/sample2/managementServers/sample3" - } + request_init = {"parent": "projects/sample1/locations/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -3542,10 +22400,10 @@ def test_get_management_server_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.get_management_server(request) + client.list_backup_plan_associations(request) -def test_get_management_server_rest_flattened(): +def test_list_backup_plan_associations_rest_flattened(): client = BackupDRClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -3554,16 +22412,14 @@ def test_get_management_server_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = backupdr.ManagementServer() + return_value = backupplanassociation.ListBackupPlanAssociationsResponse() # get arguments that satisfy an http rule for this method - sample_request = { - "name": "projects/sample1/locations/sample2/managementServers/sample3" - } + sample_request = {"parent": "projects/sample1/locations/sample2"} # get truthy value for each flattened field mock_args = dict( - name="name_value", + parent="parent_value", ) mock_args.update(sample_request) @@ -3571,25 +22427,27 @@ def test_get_management_server_rest_flattened(): response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = backupdr.ManagementServer.pb(return_value) + return_value = backupplanassociation.ListBackupPlanAssociationsResponse.pb( + return_value + ) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.get_management_server(**mock_args) + client.list_backup_plan_associations(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{name=projects/*/locations/*/managementServers/*}" + "%s/v1/{parent=projects/*/locations/*}/backupPlanAssociations" % client.transport._host, args[1], ) -def test_get_management_server_rest_flattened_error(transport: str = "rest"): +def test_list_backup_plan_associations_rest_flattened_error(transport: str = "rest"): client = BackupDRClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -3598,124 +22456,95 @@ def test_get_management_server_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.get_management_server( - backupdr.GetManagementServerRequest(), - name="name_value", + client.list_backup_plan_associations( + backupplanassociation.ListBackupPlanAssociationsRequest(), + parent="parent_value", ) -def test_get_management_server_rest_error(): - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) - - -@pytest.mark.parametrize( - "request_type", - [ - backupdr.CreateManagementServerRequest, - dict, - ], -) -def test_create_management_server_rest(request_type): +def test_list_backup_plan_associations_rest_pager(transport: str = "rest"): client = BackupDRClient( credentials=ga_credentials.AnonymousCredentials(), - transport="rest", + transport=transport, ) - # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2"} - request_init["management_server"] = { - "name": "name_value", - "description": "description_value", - "labels": {}, - "create_time": {"seconds": 751, "nanos": 543}, - "update_time": {}, - "type_": 1, - "management_uri": {"web_ui": "web_ui_value", "api": "api_value"}, - "workforce_identity_based_management_uri": { - "first_party_management_uri": "first_party_management_uri_value", - "third_party_management_uri": "third_party_management_uri_value", - }, - "state": 1, - "networks": [{"network": "network_value", "peering_mode": 1}], - "etag": "etag_value", - "oauth2_client_id": "oauth2_client_id_value", - "workforce_identity_based_oauth2_client_id": { - "first_party_oauth2_client_id": "first_party_oauth2_client_id_value", - "third_party_oauth2_client_id": "third_party_oauth2_client_id_value", - }, - "ba_proxy_uri": ["ba_proxy_uri_value1", "ba_proxy_uri_value2"], - "satisfies_pzs": {"value": True}, - "satisfies_pzi": True, - } - # The version of a generated dependency at test runtime may differ from the version used during generation. - # Delete any fields which are not present in the current runtime dependency - # See https://github.com/googleapis/gapic-generator-python/issues/1748 - - # Determine if the message type is proto-plus or protobuf - test_field = backupdr.CreateManagementServerRequest.meta.fields["management_server"] + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + backupplanassociation.ListBackupPlanAssociationsResponse( + backup_plan_associations=[ + backupplanassociation.BackupPlanAssociation(), + backupplanassociation.BackupPlanAssociation(), + backupplanassociation.BackupPlanAssociation(), + ], + next_page_token="abc", + ), + backupplanassociation.ListBackupPlanAssociationsResponse( + backup_plan_associations=[], + next_page_token="def", + ), + backupplanassociation.ListBackupPlanAssociationsResponse( + backup_plan_associations=[ + backupplanassociation.BackupPlanAssociation(), + ], + next_page_token="ghi", + ), + backupplanassociation.ListBackupPlanAssociationsResponse( + backup_plan_associations=[ + backupplanassociation.BackupPlanAssociation(), + backupplanassociation.BackupPlanAssociation(), + ], + ), + ) + # Two responses for two calls + response = response + response - def get_message_fields(field): - # Given a field which is a message (composite type), return a list with - # all the fields of the message. - # If the field is not a composite type, return an empty list. - message_fields = [] + # Wrap the values into proper Response objs + response = tuple( + backupplanassociation.ListBackupPlanAssociationsResponse.to_json(x) + for x in response + ) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values - if hasattr(field, "message") and field.message: - is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + sample_request = {"parent": "projects/sample1/locations/sample2"} - if is_field_type_proto_plus_type: - message_fields = field.message.meta.fields.values() - # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER - message_fields = field.message.DESCRIPTOR.fields - return message_fields + pager = client.list_backup_plan_associations(request=sample_request) - runtime_nested_fields = [ - (field.name, nested_field.name) - for field in get_message_fields(test_field) - for nested_field in get_message_fields(field) - ] + results = list(pager) + assert len(results) == 6 + assert all( + isinstance(i, backupplanassociation.BackupPlanAssociation) for i in results + ) - subfields_not_in_runtime = [] + pages = list(client.list_backup_plan_associations(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token - # For each item in the sample request, create a list of sub fields which are not present at runtime - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["management_server"].items(): # pragma: NO COVER - result = None - is_repeated = False - # For repeated fields - if isinstance(value, list) and len(value): - is_repeated = True - result = value[0] - # For fields where the type is another message - if isinstance(value, dict): - result = value - if result and hasattr(result, "keys"): - for subfield in result.keys(): - if (field, subfield) not in runtime_nested_fields: - subfields_not_in_runtime.append( - { - "field": field, - "subfield": subfield, - "is_repeated": is_repeated, - } - ) +@pytest.mark.parametrize( + "request_type", + [ + backupplanassociation.DeleteBackupPlanAssociationRequest, + dict, + ], +) +def test_delete_backup_plan_association_rest(request_type): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) - # Remove fields from the sample request which are not present in the runtime version of the dependency - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER - field = subfield_to_delete.get("field") - field_repeated = subfield_to_delete.get("is_repeated") - subfield = subfield_to_delete.get("subfield") - if subfield: - if field_repeated: - for i in range(0, len(request_init["management_server"][field])): - del request_init["management_server"][field][i][subfield] - else: - del request_init["management_server"][field][subfield] + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/backupPlanAssociations/sample3" + } request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -3730,13 +22559,13 @@ def get_message_fields(field): response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.create_management_server(request) + response = client.delete_backup_plan_association(request) # Establish that the response is the type that we expect. assert response.operation.name == "operations/spam" -def test_create_management_server_rest_use_cached_wrapped_rpc(): +def test_delete_backup_plan_association_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -3751,7 +22580,7 @@ def test_create_management_server_rest_use_cached_wrapped_rpc(): # Ensure method has been cached assert ( - client._transport.create_management_server + client._transport.delete_backup_plan_association in client._transport._wrapped_methods ) @@ -3761,11 +22590,11 @@ def test_create_management_server_rest_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.create_management_server + client._transport.delete_backup_plan_association ] = mock_rpc request = {} - client.create_management_server(request) + client.delete_backup_plan_association(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 @@ -3774,21 +22603,20 @@ def test_create_management_server_rest_use_cached_wrapped_rpc(): # subsequent calls should use the cached wrapper wrapper_fn.reset_mock() - client.create_management_server(request) + client.delete_backup_plan_association(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_create_management_server_rest_required_fields( - request_type=backupdr.CreateManagementServerRequest, +def test_delete_backup_plan_association_rest_required_fields( + request_type=backupplanassociation.DeleteBackupPlanAssociationRequest, ): transport_class = transports.BackupDRRestTransport request_init = {} - request_init["parent"] = "" - request_init["management_server_id"] = "" + request_init["name"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -3796,39 +22624,26 @@ def test_create_management_server_rest_required_fields( ) # verify fields with default values are dropped - assert "managementServerId" not in jsonified_request unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).create_management_server._get_unset_required_fields(jsonified_request) + ).delete_backup_plan_association._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - assert "managementServerId" in jsonified_request - assert ( - jsonified_request["managementServerId"] == request_init["management_server_id"] - ) - jsonified_request["parent"] = "parent_value" - jsonified_request["managementServerId"] = "management_server_id_value" + jsonified_request["name"] = "name_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).create_management_server._get_unset_required_fields(jsonified_request) + ).delete_backup_plan_association._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set( - ( - "management_server_id", - "request_id", - ) - ) + assert not set(unset_fields) - set(("request_id",)) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == "parent_value" - assert "managementServerId" in jsonified_request - assert jsonified_request["managementServerId"] == "management_server_id_value" + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" client = BackupDRClient( credentials=ga_credentials.AnonymousCredentials(), @@ -3849,10 +22664,9 @@ def test_create_management_server_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "post", + "method": "delete", "query_params": pb_request, } - transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() @@ -3862,44 +22676,26 @@ def test_create_management_server_rest_required_fields( response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.create_management_server(request) + response = client.delete_backup_plan_association(request) - expected_params = [ - ( - "managementServerId", - "", - ), - ("$alt", "json;enum-encoding=int"), - ] + expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_create_management_server_rest_unset_required_fields(): +def test_delete_backup_plan_association_rest_unset_required_fields(): transport = transports.BackupDRRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.create_management_server._get_unset_required_fields({}) - assert set(unset_fields) == ( - set( - ( - "managementServerId", - "requestId", - ) - ) - & set( - ( - "parent", - "managementServerId", - "managementServer", - ) - ) + unset_fields = transport.delete_backup_plan_association._get_unset_required_fields( + {} ) + assert set(unset_fields) == (set(("requestId",)) & set(("name",))) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_create_management_server_rest_interceptors(null_interceptor): +def test_delete_backup_plan_association_rest_interceptors(null_interceptor): transport = transports.BackupDRRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None if null_interceptor else transports.BackupDRRestInterceptor(), @@ -3912,14 +22708,14 @@ def test_create_management_server_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( operation.Operation, "_set_result_from_operation" ), mock.patch.object( - transports.BackupDRRestInterceptor, "post_create_management_server" + transports.BackupDRRestInterceptor, "post_delete_backup_plan_association" ) as post, mock.patch.object( - transports.BackupDRRestInterceptor, "pre_create_management_server" + transports.BackupDRRestInterceptor, "pre_delete_backup_plan_association" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = backupdr.CreateManagementServerRequest.pb( - backupdr.CreateManagementServerRequest() + pb_message = backupplanassociation.DeleteBackupPlanAssociationRequest.pb( + backupplanassociation.DeleteBackupPlanAssociationRequest() ) transcode.return_value = { "method": "post", @@ -3935,7 +22731,7 @@ def test_create_management_server_rest_interceptors(null_interceptor): operations_pb2.Operation() ) - request = backupdr.CreateManagementServerRequest() + request = backupplanassociation.DeleteBackupPlanAssociationRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), @@ -3943,7 +22739,7 @@ def test_create_management_server_rest_interceptors(null_interceptor): pre.return_value = request, metadata post.return_value = operations_pb2.Operation() - client.create_management_server( + client.delete_backup_plan_association( request, metadata=[ ("key", "val"), @@ -3955,8 +22751,9 @@ def test_create_management_server_rest_interceptors(null_interceptor): post.assert_called_once() -def test_create_management_server_rest_bad_request( - transport: str = "rest", request_type=backupdr.CreateManagementServerRequest +def test_delete_backup_plan_association_rest_bad_request( + transport: str = "rest", + request_type=backupplanassociation.DeleteBackupPlanAssociationRequest, ): client = BackupDRClient( credentials=ga_credentials.AnonymousCredentials(), @@ -3964,7 +22761,9 @@ def test_create_management_server_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2"} + request_init = { + "name": "projects/sample1/locations/sample2/backupPlanAssociations/sample3" + } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -3976,10 +22775,10 @@ def test_create_management_server_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.create_management_server(request) + client.delete_backup_plan_association(request) -def test_create_management_server_rest_flattened(): +def test_delete_backup_plan_association_rest_flattened(): client = BackupDRClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -3991,13 +22790,13 @@ def test_create_management_server_rest_flattened(): return_value = operations_pb2.Operation(name="operations/spam") # get arguments that satisfy an http rule for this method - sample_request = {"parent": "projects/sample1/locations/sample2"} + sample_request = { + "name": "projects/sample1/locations/sample2/backupPlanAssociations/sample3" + } # get truthy value for each flattened field mock_args = dict( - parent="parent_value", - management_server=backupdr.ManagementServer(name="name_value"), - management_server_id="management_server_id_value", + name="name_value", ) mock_args.update(sample_request) @@ -4008,20 +22807,20 @@ def test_create_management_server_rest_flattened(): response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.create_management_server(**mock_args) + client.delete_backup_plan_association(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{parent=projects/*/locations/*}/managementServers" + "%s/v1/{name=projects/*/locations/*/backupPlanAssociations/*}" % client.transport._host, args[1], ) -def test_create_management_server_rest_flattened_error(transport: str = "rest"): +def test_delete_backup_plan_association_rest_flattened_error(transport: str = "rest"): client = BackupDRClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -4030,15 +22829,13 @@ def test_create_management_server_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.create_management_server( - backupdr.CreateManagementServerRequest(), - parent="parent_value", - management_server=backupdr.ManagementServer(name="name_value"), - management_server_id="management_server_id_value", + client.delete_backup_plan_association( + backupplanassociation.DeleteBackupPlanAssociationRequest(), + name="name_value", ) -def test_create_management_server_rest_error(): +def test_delete_backup_plan_association_rest_error(): client = BackupDRClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -4047,11 +22844,11 @@ def test_create_management_server_rest_error(): @pytest.mark.parametrize( "request_type", [ - backupdr.DeleteManagementServerRequest, + backupplanassociation.TriggerBackupRequest, dict, ], ) -def test_delete_management_server_rest(request_type): +def test_trigger_backup_rest(request_type): client = BackupDRClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -4059,7 +22856,7 @@ def test_delete_management_server_rest(request_type): # send a request that will satisfy transcoding request_init = { - "name": "projects/sample1/locations/sample2/managementServers/sample3" + "name": "projects/sample1/locations/sample2/backupPlanAssociations/sample3" } request = request_type(**request_init) @@ -4075,13 +22872,13 @@ def test_delete_management_server_rest(request_type): response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.delete_management_server(request) + response = client.trigger_backup(request) # Establish that the response is the type that we expect. assert response.operation.name == "operations/spam" -def test_delete_management_server_rest_use_cached_wrapped_rpc(): +def test_trigger_backup_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -4095,22 +22892,17 @@ def test_delete_management_server_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert ( - client._transport.delete_management_server - in client._transport._wrapped_methods - ) + assert client._transport.trigger_backup in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.delete_management_server - ] = mock_rpc + client._transport._wrapped_methods[client._transport.trigger_backup] = mock_rpc request = {} - client.delete_management_server(request) + client.trigger_backup(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 @@ -4119,20 +22911,21 @@ def test_delete_management_server_rest_use_cached_wrapped_rpc(): # subsequent calls should use the cached wrapper wrapper_fn.reset_mock() - client.delete_management_server(request) + client.trigger_backup(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_delete_management_server_rest_required_fields( - request_type=backupdr.DeleteManagementServerRequest, +def test_trigger_backup_rest_required_fields( + request_type=backupplanassociation.TriggerBackupRequest, ): transport_class = transports.BackupDRRestTransport request_init = {} request_init["name"] = "" + request_init["rule_id"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -4143,23 +22936,24 @@ def test_delete_management_server_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).delete_management_server._get_unset_required_fields(jsonified_request) + ).trigger_backup._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present jsonified_request["name"] = "name_value" + jsonified_request["ruleId"] = "rule_id_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).delete_management_server._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("request_id",)) + ).trigger_backup._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone assert "name" in jsonified_request assert jsonified_request["name"] == "name_value" + assert "ruleId" in jsonified_request + assert jsonified_request["ruleId"] == "rule_id_value" client = BackupDRClient( credentials=ga_credentials.AnonymousCredentials(), @@ -4180,9 +22974,10 @@ def test_delete_management_server_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "delete", + "method": "post", "query_params": pb_request, } + transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() @@ -4192,24 +22987,32 @@ def test_delete_management_server_rest_required_fields( response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.delete_management_server(request) + response = client.trigger_backup(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_delete_management_server_rest_unset_required_fields(): +def test_trigger_backup_rest_unset_required_fields(): transport = transports.BackupDRRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.delete_management_server._get_unset_required_fields({}) - assert set(unset_fields) == (set(("requestId",)) & set(("name",))) + unset_fields = transport.trigger_backup._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(()) + & set( + ( + "name", + "ruleId", + ) + ) + ) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_delete_management_server_rest_interceptors(null_interceptor): +def test_trigger_backup_rest_interceptors(null_interceptor): transport = transports.BackupDRRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None if null_interceptor else transports.BackupDRRestInterceptor(), @@ -4222,14 +23025,14 @@ def test_delete_management_server_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( operation.Operation, "_set_result_from_operation" ), mock.patch.object( - transports.BackupDRRestInterceptor, "post_delete_management_server" + transports.BackupDRRestInterceptor, "post_trigger_backup" ) as post, mock.patch.object( - transports.BackupDRRestInterceptor, "pre_delete_management_server" + transports.BackupDRRestInterceptor, "pre_trigger_backup" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = backupdr.DeleteManagementServerRequest.pb( - backupdr.DeleteManagementServerRequest() + pb_message = backupplanassociation.TriggerBackupRequest.pb( + backupplanassociation.TriggerBackupRequest() ) transcode.return_value = { "method": "post", @@ -4245,7 +23048,7 @@ def test_delete_management_server_rest_interceptors(null_interceptor): operations_pb2.Operation() ) - request = backupdr.DeleteManagementServerRequest() + request = backupplanassociation.TriggerBackupRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), @@ -4253,7 +23056,7 @@ def test_delete_management_server_rest_interceptors(null_interceptor): pre.return_value = request, metadata post.return_value = operations_pb2.Operation() - client.delete_management_server( + client.trigger_backup( request, metadata=[ ("key", "val"), @@ -4265,8 +23068,8 @@ def test_delete_management_server_rest_interceptors(null_interceptor): post.assert_called_once() -def test_delete_management_server_rest_bad_request( - transport: str = "rest", request_type=backupdr.DeleteManagementServerRequest +def test_trigger_backup_rest_bad_request( + transport: str = "rest", request_type=backupplanassociation.TriggerBackupRequest ): client = BackupDRClient( credentials=ga_credentials.AnonymousCredentials(), @@ -4275,7 +23078,7 @@ def test_delete_management_server_rest_bad_request( # send a request that will satisfy transcoding request_init = { - "name": "projects/sample1/locations/sample2/managementServers/sample3" + "name": "projects/sample1/locations/sample2/backupPlanAssociations/sample3" } request = request_type(**request_init) @@ -4288,10 +23091,10 @@ def test_delete_management_server_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.delete_management_server(request) + client.trigger_backup(request) -def test_delete_management_server_rest_flattened(): +def test_trigger_backup_rest_flattened(): client = BackupDRClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -4304,12 +23107,13 @@ def test_delete_management_server_rest_flattened(): # get arguments that satisfy an http rule for this method sample_request = { - "name": "projects/sample1/locations/sample2/managementServers/sample3" + "name": "projects/sample1/locations/sample2/backupPlanAssociations/sample3" } # get truthy value for each flattened field mock_args = dict( name="name_value", + rule_id="rule_id_value", ) mock_args.update(sample_request) @@ -4320,20 +23124,20 @@ def test_delete_management_server_rest_flattened(): response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.delete_management_server(**mock_args) + client.trigger_backup(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{name=projects/*/locations/*/managementServers/*}" + "%s/v1/{name=projects/*/locations/*/backupPlanAssociations/*}:triggerBackup" % client.transport._host, args[1], ) -def test_delete_management_server_rest_flattened_error(transport: str = "rest"): +def test_trigger_backup_rest_flattened_error(transport: str = "rest"): client = BackupDRClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -4342,13 +23146,14 @@ def test_delete_management_server_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.delete_management_server( - backupdr.DeleteManagementServerRequest(), + client.trigger_backup( + backupplanassociation.TriggerBackupRequest(), name="name_value", + rule_id="rule_id_value", ) -def test_delete_management_server_rest_error(): +def test_trigger_backup_rest_error(): client = BackupDRClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -4497,6 +23302,29 @@ def test_backup_dr_base_transport(): "get_management_server", "create_management_server", "delete_management_server", + "create_backup_vault", + "list_backup_vaults", + "fetch_usable_backup_vaults", + "get_backup_vault", + "update_backup_vault", + "delete_backup_vault", + "list_data_sources", + "get_data_source", + "update_data_source", + "list_backups", + "get_backup", + "update_backup", + "delete_backup", + "restore_backup", + "create_backup_plan", + "get_backup_plan", + "list_backup_plans", + "delete_backup_plan", + "create_backup_plan_association", + "get_backup_plan_association", + "list_backup_plan_associations", + "delete_backup_plan_association", + "trigger_backup", "set_iam_policy", "get_iam_policy", "test_iam_permissions", @@ -4795,6 +23623,75 @@ def test_backup_dr_client_transport_session_collision(transport_name): session1 = client1.transport.delete_management_server._session session2 = client2.transport.delete_management_server._session assert session1 != session2 + session1 = client1.transport.create_backup_vault._session + session2 = client2.transport.create_backup_vault._session + assert session1 != session2 + session1 = client1.transport.list_backup_vaults._session + session2 = client2.transport.list_backup_vaults._session + assert session1 != session2 + session1 = client1.transport.fetch_usable_backup_vaults._session + session2 = client2.transport.fetch_usable_backup_vaults._session + assert session1 != session2 + session1 = client1.transport.get_backup_vault._session + session2 = client2.transport.get_backup_vault._session + assert session1 != session2 + session1 = client1.transport.update_backup_vault._session + session2 = client2.transport.update_backup_vault._session + assert session1 != session2 + session1 = client1.transport.delete_backup_vault._session + session2 = client2.transport.delete_backup_vault._session + assert session1 != session2 + session1 = client1.transport.list_data_sources._session + session2 = client2.transport.list_data_sources._session + assert session1 != session2 + session1 = client1.transport.get_data_source._session + session2 = client2.transport.get_data_source._session + assert session1 != session2 + session1 = client1.transport.update_data_source._session + session2 = client2.transport.update_data_source._session + assert session1 != session2 + session1 = client1.transport.list_backups._session + session2 = client2.transport.list_backups._session + assert session1 != session2 + session1 = client1.transport.get_backup._session + session2 = client2.transport.get_backup._session + assert session1 != session2 + session1 = client1.transport.update_backup._session + session2 = client2.transport.update_backup._session + assert session1 != session2 + session1 = client1.transport.delete_backup._session + session2 = client2.transport.delete_backup._session + assert session1 != session2 + session1 = client1.transport.restore_backup._session + session2 = client2.transport.restore_backup._session + assert session1 != session2 + session1 = client1.transport.create_backup_plan._session + session2 = client2.transport.create_backup_plan._session + assert session1 != session2 + session1 = client1.transport.get_backup_plan._session + session2 = client2.transport.get_backup_plan._session + assert session1 != session2 + session1 = client1.transport.list_backup_plans._session + session2 = client2.transport.list_backup_plans._session + assert session1 != session2 + session1 = client1.transport.delete_backup_plan._session + session2 = client2.transport.delete_backup_plan._session + assert session1 != session2 + session1 = client1.transport.create_backup_plan_association._session + session2 = client2.transport.create_backup_plan_association._session + assert session1 != session2 + session1 = client1.transport.get_backup_plan_association._session + session2 = client2.transport.get_backup_plan_association._session + assert session1 != session2 + session1 = client1.transport.list_backup_plan_associations._session + session2 = client2.transport.list_backup_plan_associations._session + assert session1 != session2 + session1 = client1.transport.delete_backup_plan_association._session + session2 = client2.transport.delete_backup_plan_association._session + assert session1 != session2 + session1 = client1.transport.trigger_backup._session + session2 = client2.transport.trigger_backup._session + assert session1 != session2 def test_backup_dr_grpc_transport_channel(): @@ -4949,6 +23846,153 @@ def test_backup_dr_grpc_lro_async_client(): assert transport.operations_client is transport.operations_client +def test_backup_path(): + project = "squid" + location = "clam" + backupvault = "whelk" + datasource = "octopus" + backup = "oyster" + expected = "projects/{project}/locations/{location}/backupVaults/{backupvault}/dataSources/{datasource}/backups/{backup}".format( + project=project, + location=location, + backupvault=backupvault, + datasource=datasource, + backup=backup, + ) + actual = BackupDRClient.backup_path( + project, location, backupvault, datasource, backup + ) + assert expected == actual + + +def test_parse_backup_path(): + expected = { + "project": "nudibranch", + "location": "cuttlefish", + "backupvault": "mussel", + "datasource": "winkle", + "backup": "nautilus", + } + path = BackupDRClient.backup_path(**expected) + + # Check that the path construction is reversible. + actual = BackupDRClient.parse_backup_path(path) + assert expected == actual + + +def test_backup_plan_path(): + project = "scallop" + location = "abalone" + backup_plan = "squid" + expected = ( + "projects/{project}/locations/{location}/backupPlans/{backup_plan}".format( + project=project, + location=location, + backup_plan=backup_plan, + ) + ) + actual = BackupDRClient.backup_plan_path(project, location, backup_plan) + assert expected == actual + + +def test_parse_backup_plan_path(): + expected = { + "project": "clam", + "location": "whelk", + "backup_plan": "octopus", + } + path = BackupDRClient.backup_plan_path(**expected) + + # Check that the path construction is reversible. + actual = BackupDRClient.parse_backup_plan_path(path) + assert expected == actual + + +def test_backup_plan_association_path(): + project = "oyster" + location = "nudibranch" + backup_plan_association = "cuttlefish" + expected = "projects/{project}/locations/{location}/backupPlanAssociations/{backup_plan_association}".format( + project=project, + location=location, + backup_plan_association=backup_plan_association, + ) + actual = BackupDRClient.backup_plan_association_path( + project, location, backup_plan_association + ) + assert expected == actual + + +def test_parse_backup_plan_association_path(): + expected = { + "project": "mussel", + "location": "winkle", + "backup_plan_association": "nautilus", + } + path = BackupDRClient.backup_plan_association_path(**expected) + + # Check that the path construction is reversible. + actual = BackupDRClient.parse_backup_plan_association_path(path) + assert expected == actual + + +def test_backup_vault_path(): + project = "scallop" + location = "abalone" + backupvault = "squid" + expected = ( + "projects/{project}/locations/{location}/backupVaults/{backupvault}".format( + project=project, + location=location, + backupvault=backupvault, + ) + ) + actual = BackupDRClient.backup_vault_path(project, location, backupvault) + assert expected == actual + + +def test_parse_backup_vault_path(): + expected = { + "project": "clam", + "location": "whelk", + "backupvault": "octopus", + } + path = BackupDRClient.backup_vault_path(**expected) + + # Check that the path construction is reversible. + actual = BackupDRClient.parse_backup_vault_path(path) + assert expected == actual + + +def test_data_source_path(): + project = "oyster" + location = "nudibranch" + backupvault = "cuttlefish" + datasource = "mussel" + expected = "projects/{project}/locations/{location}/backupVaults/{backupvault}/dataSources/{datasource}".format( + project=project, + location=location, + backupvault=backupvault, + datasource=datasource, + ) + actual = BackupDRClient.data_source_path(project, location, backupvault, datasource) + assert expected == actual + + +def test_parse_data_source_path(): + expected = { + "project": "winkle", + "location": "nautilus", + "backupvault": "scallop", + "datasource": "abalone", + } + path = BackupDRClient.data_source_path(**expected) + + # Check that the path construction is reversible. + actual = BackupDRClient.parse_data_source_path(path) + assert expected == actual + + def test_management_server_path(): project = "squid" location = "clam" diff --git a/packages/google-cloud-bare-metal-solution/google/cloud/bare_metal_solution/gapic_version.py b/packages/google-cloud-bare-metal-solution/google/cloud/bare_metal_solution/gapic_version.py index e114cc3e196a..558c8aab67c5 100644 --- a/packages/google-cloud-bare-metal-solution/google/cloud/bare_metal_solution/gapic_version.py +++ b/packages/google-cloud-bare-metal-solution/google/cloud/bare_metal_solution/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.7.5" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-bare-metal-solution/google/cloud/bare_metal_solution_v2/gapic_version.py b/packages/google-cloud-bare-metal-solution/google/cloud/bare_metal_solution_v2/gapic_version.py index e114cc3e196a..558c8aab67c5 100644 --- a/packages/google-cloud-bare-metal-solution/google/cloud/bare_metal_solution_v2/gapic_version.py +++ b/packages/google-cloud-bare-metal-solution/google/cloud/bare_metal_solution_v2/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.7.5" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-bare-metal-solution/google/cloud/bare_metal_solution_v2/services/bare_metal_solution/async_client.py b/packages/google-cloud-bare-metal-solution/google/cloud/bare_metal_solution_v2/services/bare_metal_solution/async_client.py index afb6dc1c0388..f05bf87b0d43 100644 --- a/packages/google-cloud-bare-metal-solution/google/cloud/bare_metal_solution_v2/services/bare_metal_solution/async_client.py +++ b/packages/google-cloud-bare-metal-solution/google/cloud/bare_metal_solution_v2/services/bare_metal_solution/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -278,9 +277,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(BareMetalSolutionClient).get_transport_class, type(BareMetalSolutionClient) - ) + get_transport_class = BareMetalSolutionClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-bare-metal-solution/google/cloud/bare_metal_solution_v2/services/bare_metal_solution/client.py b/packages/google-cloud-bare-metal-solution/google/cloud/bare_metal_solution_v2/services/bare_metal_solution/client.py index 8383e474221e..e25097f02f90 100644 --- a/packages/google-cloud-bare-metal-solution/google/cloud/bare_metal_solution_v2/services/bare_metal_solution/client.py +++ b/packages/google-cloud-bare-metal-solution/google/cloud/bare_metal_solution_v2/services/bare_metal_solution/client.py @@ -1034,7 +1034,7 @@ def __init__( Type[BareMetalSolutionTransport], Callable[..., BareMetalSolutionTransport], ] = ( - type(self).get_transport_class(transport) + BareMetalSolutionClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., BareMetalSolutionTransport], transport) ) diff --git a/packages/google-cloud-bare-metal-solution/samples/generated_samples/snippet_metadata_google.cloud.baremetalsolution.v2.json b/packages/google-cloud-bare-metal-solution/samples/generated_samples/snippet_metadata_google.cloud.baremetalsolution.v2.json index ddbf76e56a0e..b8dd39f4b60c 100644 --- a/packages/google-cloud-bare-metal-solution/samples/generated_samples/snippet_metadata_google.cloud.baremetalsolution.v2.json +++ b/packages/google-cloud-bare-metal-solution/samples/generated_samples/snippet_metadata_google.cloud.baremetalsolution.v2.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-bare-metal-solution", - "version": "1.7.5" + "version": "0.1.0" }, "snippets": [ { diff --git a/packages/google-cloud-bare-metal-solution/tests/unit/gapic/bare_metal_solution_v2/test_bare_metal_solution.py b/packages/google-cloud-bare-metal-solution/tests/unit/gapic/bare_metal_solution_v2/test_bare_metal_solution.py index 8d42ff9372d4..0b5a2762a3e1 100644 --- a/packages/google-cloud-bare-metal-solution/tests/unit/gapic/bare_metal_solution_v2/test_bare_metal_solution.py +++ b/packages/google-cloud-bare-metal-solution/tests/unit/gapic/bare_metal_solution_v2/test_bare_metal_solution.py @@ -1359,22 +1359,23 @@ async def test_list_instances_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_instances - ] = mock_object + ] = mock_rpc request = {} await client.list_instances(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_instances(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1952,22 +1953,23 @@ async def test_get_instance_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_instance - ] = mock_object + ] = mock_rpc request = {} await client.get_instance(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_instance(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2274,8 +2276,9 @@ def test_update_instance_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.update_instance(request) @@ -2329,26 +2332,28 @@ async def test_update_instance_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_instance - ] = mock_object + ] = mock_rpc request = {} await client.update_instance(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.update_instance(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2739,22 +2744,23 @@ async def test_rename_instance_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.rename_instance - ] = mock_object + ] = mock_rpc request = {} await client.rename_instance(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.rename_instance(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3075,8 +3081,9 @@ def test_reset_instance_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.reset_instance(request) @@ -3130,26 +3137,28 @@ async def test_reset_instance_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.reset_instance - ] = mock_object + ] = mock_rpc request = {} await client.reset_instance(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.reset_instance(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3439,8 +3448,9 @@ def test_start_instance_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.start_instance(request) @@ -3494,26 +3504,28 @@ async def test_start_instance_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.start_instance - ] = mock_object + ] = mock_rpc request = {} await client.start_instance(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.start_instance(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3803,8 +3815,9 @@ def test_stop_instance_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.stop_instance(request) @@ -3858,26 +3871,28 @@ async def test_stop_instance_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.stop_instance - ] = mock_object + ] = mock_rpc request = {} await client.stop_instance(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.stop_instance(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4178,8 +4193,9 @@ def test_enable_interactive_serial_console_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.enable_interactive_serial_console(request) @@ -4235,26 +4251,28 @@ async def test_enable_interactive_serial_console_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.enable_interactive_serial_console - ] = mock_object + ] = mock_rpc request = {} await client.enable_interactive_serial_console(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.enable_interactive_serial_console(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4566,8 +4584,9 @@ def test_disable_interactive_serial_console_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.disable_interactive_serial_console(request) @@ -4623,26 +4642,28 @@ async def test_disable_interactive_serial_console_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.disable_interactive_serial_console - ] = mock_object + ] = mock_rpc request = {} await client.disable_interactive_serial_console(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.disable_interactive_serial_console(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4945,8 +4966,9 @@ def test_detach_lun_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.detach_lun(request) @@ -4998,26 +5020,28 @@ async def test_detach_lun_async_use_cached_wrapped_rpc(transport: str = "grpc_as ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.detach_lun - ] = mock_object + ] = mock_rpc request = {} await client.detach_lun(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.detach_lun(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5375,22 +5399,23 @@ async def test_list_ssh_keys_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_ssh_keys - ] = mock_object + ] = mock_rpc request = {} await client.list_ssh_keys(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_ssh_keys(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5938,22 +5963,23 @@ async def test_create_ssh_key_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_ssh_key - ] = mock_object + ] = mock_rpc request = {} await client.create_ssh_key(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_ssh_key(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -6313,22 +6339,23 @@ async def test_delete_ssh_key_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_ssh_key - ] = mock_object + ] = mock_rpc request = {} await client.delete_ssh_key(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_ssh_key(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -6675,22 +6702,23 @@ async def test_list_volumes_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_volumes - ] = mock_object + ] = mock_rpc request = {} await client.list_volumes(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_volumes(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -7299,22 +7327,23 @@ async def test_get_volume_async_use_cached_wrapped_rpc(transport: str = "grpc_as ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_volume - ] = mock_object + ] = mock_rpc request = {} await client.get_volume(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_volume(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -7645,8 +7674,9 @@ def test_update_volume_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.update_volume(request) @@ -7700,26 +7730,28 @@ async def test_update_volume_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_volume - ] = mock_object + ] = mock_rpc request = {} await client.update_volume(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.update_volume(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -8143,22 +8175,23 @@ async def test_rename_volume_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.rename_volume - ] = mock_object + ] = mock_rpc request = {} await client.rename_volume(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.rename_volume(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -8503,8 +8536,9 @@ def test_evict_volume_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.evict_volume(request) @@ -8558,26 +8592,28 @@ async def test_evict_volume_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.evict_volume - ] = mock_object + ] = mock_rpc request = {} await client.evict_volume(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.evict_volume(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -8867,8 +8903,9 @@ def test_resize_volume_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.resize_volume(request) @@ -8922,26 +8959,28 @@ async def test_resize_volume_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.resize_volume - ] = mock_object + ] = mock_rpc request = {} await client.resize_volume(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.resize_volume(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -9304,22 +9343,23 @@ async def test_list_networks_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_networks - ] = mock_object + ] = mock_rpc request = {} await client.list_networks(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_networks(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -9871,22 +9911,23 @@ async def test_list_network_usage_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_network_usage - ] = mock_object + ] = mock_rpc request = {} await client.list_network_usage(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_network_usage(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -10275,22 +10316,23 @@ async def test_get_network_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_network - ] = mock_object + ] = mock_rpc request = {} await client.get_network(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_network(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -10597,8 +10639,9 @@ def test_update_network_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.update_network(request) @@ -10652,26 +10695,28 @@ async def test_update_network_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_network - ] = mock_object + ] = mock_rpc request = {} await client.update_network(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.update_network(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -11052,22 +11097,23 @@ async def test_create_volume_snapshot_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_volume_snapshot - ] = mock_object + ] = mock_rpc request = {} await client.create_volume_snapshot(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_volume_snapshot(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -11400,8 +11446,9 @@ def test_restore_volume_snapshot_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.restore_volume_snapshot(request) @@ -11457,26 +11504,28 @@ async def test_restore_volume_snapshot_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.restore_volume_snapshot - ] = mock_object + ] = mock_rpc request = {} await client.restore_volume_snapshot(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.restore_volume_snapshot(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -11839,22 +11888,23 @@ async def test_delete_volume_snapshot_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_volume_snapshot - ] = mock_object + ] = mock_rpc request = {} await client.delete_volume_snapshot(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_volume_snapshot(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -12229,22 +12279,23 @@ async def test_get_volume_snapshot_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_volume_snapshot - ] = mock_object + ] = mock_rpc request = {} await client.get_volume_snapshot(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_volume_snapshot(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -12630,22 +12681,23 @@ async def test_list_volume_snapshots_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_volume_snapshots - ] = mock_object + ] = mock_rpc request = {} await client.list_volume_snapshots(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_volume_snapshots(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -13237,22 +13289,23 @@ async def test_get_lun_async_use_cached_wrapped_rpc(transport: str = "grpc_async ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_lun - ] = mock_object + ] = mock_rpc request = {} await client.get_lun(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_lun(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -13620,22 +13673,23 @@ async def test_list_luns_async_use_cached_wrapped_rpc(transport: str = "grpc_asy ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_luns - ] = mock_object + ] = mock_rpc request = {} await client.list_luns(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_luns(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -14124,8 +14178,9 @@ def test_evict_lun_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.evict_lun(request) @@ -14177,26 +14232,28 @@ async def test_evict_lun_async_use_cached_wrapped_rpc(transport: str = "grpc_asy ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.evict_lun - ] = mock_object + ] = mock_rpc request = {} await client.evict_lun(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.evict_lun(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -14560,22 +14617,23 @@ async def test_get_nfs_share_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_nfs_share - ] = mock_object + ] = mock_rpc request = {} await client.get_nfs_share(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_nfs_share(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -14939,22 +14997,23 @@ async def test_list_nfs_shares_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_nfs_shares - ] = mock_object + ] = mock_rpc request = {} await client.list_nfs_shares(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_nfs_shares(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -15441,8 +15500,9 @@ def test_update_nfs_share_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.update_nfs_share(request) @@ -15496,26 +15556,28 @@ async def test_update_nfs_share_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_nfs_share - ] = mock_object + ] = mock_rpc request = {} await client.update_nfs_share(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.update_nfs_share(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -15817,8 +15879,9 @@ def test_create_nfs_share_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.create_nfs_share(request) @@ -15872,26 +15935,28 @@ async def test_create_nfs_share_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_nfs_share - ] = mock_object + ] = mock_rpc request = {} await client.create_nfs_share(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.create_nfs_share(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -16269,22 +16334,23 @@ async def test_rename_nfs_share_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.rename_nfs_share - ] = mock_object + ] = mock_rpc request = {} await client.rename_nfs_share(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.rename_nfs_share(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -16597,8 +16663,9 @@ def test_delete_nfs_share_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.delete_nfs_share(request) @@ -16652,26 +16719,28 @@ async def test_delete_nfs_share_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_nfs_share - ] = mock_object + ] = mock_rpc request = {} await client.delete_nfs_share(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.delete_nfs_share(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -17032,22 +17101,23 @@ async def test_list_provisioning_quotas_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_provisioning_quotas - ] = mock_object + ] = mock_rpc request = {} await client.list_provisioning_quotas(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_provisioning_quotas(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -17621,22 +17691,23 @@ async def test_submit_provisioning_config_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.submit_provisioning_config - ] = mock_object + ] = mock_rpc request = {} await client.submit_provisioning_config(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.submit_provisioning_config(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -18043,22 +18114,23 @@ async def test_get_provisioning_config_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_provisioning_config - ] = mock_object + ] = mock_rpc request = {} await client.get_provisioning_config(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_provisioning_config(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -18478,22 +18550,23 @@ async def test_create_provisioning_config_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_provisioning_config - ] = mock_object + ] = mock_rpc request = {} await client.create_provisioning_config(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_provisioning_config(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -18921,22 +18994,23 @@ async def test_update_provisioning_config_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_provisioning_config - ] = mock_object + ] = mock_rpc request = {} await client.update_provisioning_config(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_provisioning_config(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -19359,22 +19433,23 @@ async def test_rename_network_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.rename_network - ] = mock_object + ] = mock_rpc request = {} await client.rename_network(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.rename_network(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -19753,22 +19828,23 @@ async def test_list_os_images_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_os_images - ] = mock_object + ] = mock_rpc request = {} await client.list_os_images(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_os_images(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-batch/CHANGELOG.md b/packages/google-cloud-batch/CHANGELOG.md index b31c1263dbcf..b923d8c36e1e 100644 --- a/packages/google-cloud-batch/CHANGELOG.md +++ b/packages/google-cloud-batch/CHANGELOG.md @@ -1,5 +1,57 @@ # Changelog +## [0.17.29](https://github.com/googleapis/google-cloud-python/compare/google-cloud-batch-v0.17.28...google-cloud-batch-v0.17.29) (2024-10-08) + + +### Documentation + +* Clarify Batch only supports global custom instance template now ([023d099](https://github.com/googleapis/google-cloud-python/commit/023d09955a2b4e013a3506d2dbed45c3e7e4a696)) + +## [0.17.28](https://github.com/googleapis/google-cloud-python/compare/google-cloud-batch-v0.17.27...google-cloud-batch-v0.17.28) (2024-09-16) + + +### Features + +* [google-cloud-batch] A new value `CANCELLATION_IN_PROGRESS` is added to enum `State` ([#13074](https://github.com/googleapis/google-cloud-python/issues/13074)) ([76267b2](https://github.com/googleapis/google-cloud-python/commit/76267b2b8998fd2a3602ebf4d12d2aaa30a90cde)) + +## [0.17.27](https://github.com/googleapis/google-cloud-python/compare/google-cloud-batch-v0.17.26...google-cloud-batch-v0.17.27) (2024-09-03) + + +### Features + +* **v1:** promote block_project_ssh_keys support to batch v1 API ([63a6de0](https://github.com/googleapis/google-cloud-python/commit/63a6de00b1c6e2b6289b4fa76468859c828cb363)) + +## [0.17.26](https://github.com/googleapis/google-cloud-python/compare/google-cloud-batch-v0.17.25...google-cloud-batch-v0.17.26) (2024-08-20) + + +### Documentation + +* [google-cloud-batch] clarify tasks success criteria for background ([e3a6b17](https://github.com/googleapis/google-cloud-python/commit/e3a6b17c8b05ef23da801e81598ce2d75e18b6bb)) +* [google-cloud-batch] clarify tasks success criteria for background runnable ([#13023](https://github.com/googleapis/google-cloud-python/issues/13023)) ([e3a6b17](https://github.com/googleapis/google-cloud-python/commit/e3a6b17c8b05ef23da801e81598ce2d75e18b6bb)) + +## [0.17.25](https://github.com/googleapis/google-cloud-python/compare/google-cloud-batch-v0.17.24...google-cloud-batch-v0.17.25) (2024-08-19) + + +### Documentation + +* Batch CentOS images and HPC CentOS images are EOS ([5f179b9](https://github.com/googleapis/google-cloud-python/commit/5f179b98744808c33b07768f44efdfb3551fda03)) +* Clarify required fields for Runnable.Container ([5f179b9](https://github.com/googleapis/google-cloud-python/commit/5f179b98744808c33b07768f44efdfb3551fda03)) +* Clarify required oneof fields for Runnable.Script ([5f179b9](https://github.com/googleapis/google-cloud-python/commit/5f179b98744808c33b07768f44efdfb3551fda03)) +* Clarify TaskSpec requires one or more runnables ([5f179b9](https://github.com/googleapis/google-cloud-python/commit/5f179b98744808c33b07768f44efdfb3551fda03)) + +## [0.17.24](https://github.com/googleapis/google-cloud-python/compare/google-cloud-batch-v0.17.23...google-cloud-batch-v0.17.24) (2024-08-08) + + +### Features + +* add block_project_ssh_keys field to the v1alpha job API to block project level ssh keys access to Batch created VMs ([56ec4fc](https://github.com/googleapis/google-cloud-python/commit/56ec4fcfa50454522f40561d82c700946fc2a7d1)) +* remove visibility restriction of cancel job api, allow in v1alpha ([56ec4fc](https://github.com/googleapis/google-cloud-python/commit/56ec4fcfa50454522f40561d82c700946fc2a7d1)) + + +### Documentation + +* Refine usage scope for field `task_execution` and `task_state` in `status_events` ([56ec4fc](https://github.com/googleapis/google-cloud-python/commit/56ec4fcfa50454522f40561d82c700946fc2a7d1)) + ## [0.17.23](https://github.com/googleapis/google-cloud-python/compare/google-cloud-batch-v0.17.22...google-cloud-batch-v0.17.23) (2024-07-30) diff --git a/packages/google-cloud-batch/batch-v1-py.tar.gz b/packages/google-cloud-batch/batch-v1-py.tar.gz new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/packages/google-cloud-batch/google/cloud/batch/gapic_version.py b/packages/google-cloud-batch/google/cloud/batch/gapic_version.py index 582686c5a553..059e03e3105c 100644 --- a/packages/google-cloud-batch/google/cloud/batch/gapic_version.py +++ b/packages/google-cloud-batch/google/cloud/batch/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.17.23" # {x-release-please-version} +__version__ = "0.17.29" # {x-release-please-version} diff --git a/packages/google-cloud-batch/google/cloud/batch_v1/gapic_version.py b/packages/google-cloud-batch/google/cloud/batch_v1/gapic_version.py index 582686c5a553..059e03e3105c 100644 --- a/packages/google-cloud-batch/google/cloud/batch_v1/gapic_version.py +++ b/packages/google-cloud-batch/google/cloud/batch_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.17.23" # {x-release-please-version} +__version__ = "0.17.29" # {x-release-please-version} diff --git a/packages/google-cloud-batch/google/cloud/batch_v1/services/batch_service/async_client.py b/packages/google-cloud-batch/google/cloud/batch_v1/services/batch_service/async_client.py index 6b16945ad4e3..4d01c36c456e 100644 --- a/packages/google-cloud-batch/google/cloud/batch_v1/services/batch_service/async_client.py +++ b/packages/google-cloud-batch/google/cloud/batch_v1/services/batch_service/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -200,9 +199,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(BatchServiceClient).get_transport_class, type(BatchServiceClient) - ) + get_transport_class = BatchServiceClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-batch/google/cloud/batch_v1/services/batch_service/client.py b/packages/google-cloud-batch/google/cloud/batch_v1/services/batch_service/client.py index 0079f78ef0a8..8d9d187805fd 100644 --- a/packages/google-cloud-batch/google/cloud/batch_v1/services/batch_service/client.py +++ b/packages/google-cloud-batch/google/cloud/batch_v1/services/batch_service/client.py @@ -718,7 +718,7 @@ def __init__( transport_init: Union[ Type[BatchServiceTransport], Callable[..., BatchServiceTransport] ] = ( - type(self).get_transport_class(transport) + BatchServiceClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., BatchServiceTransport], transport) ) diff --git a/packages/google-cloud-batch/google/cloud/batch_v1/types/job.py b/packages/google-cloud-batch/google/cloud/batch_v1/types/job.py index 6777c2c1b6f8..a5aca6e5d1bc 100644 --- a/packages/google-cloud-batch/google/cloud/batch_v1/types/job.py +++ b/packages/google-cloud-batch/google/cloud/batch_v1/types/job.py @@ -549,9 +549,7 @@ class Disk(proto.Message): following image values are supported for a boot disk: - ``batch-debian``: use Batch Debian images. - - ``batch-centos``: use Batch CentOS images. - ``batch-cos``: use Batch Container-Optimized images. - - ``batch-hpc-centos``: use Batch HPC CentOS images. - ``batch-hpc-rocky``: use Batch HPC Rocky Linux images. This field is a member of `oneof`_ ``data_source``. @@ -794,7 +792,11 @@ class InstancePolicyOrTemplate(proto.Message): instance_template (str): Name of an instance template used to create VMs. Named the field as 'instance_template' instead of 'template' to avoid - c++ keyword conflict. + C++ keyword conflict. + + Batch only supports global instance templates. You can + specify the global instance template as a full or partial + URL. This field is a member of `oneof`_ ``policy_template``. install_gpu_drivers (bool): @@ -812,6 +814,26 @@ class InstancePolicyOrTemplate(proto.Message): Optional. Set this field true if you want Batch to install Ops Agent on your behalf. Default is false. + block_project_ssh_keys (bool): + Optional. Set this field to ``true`` if you want Batch to + block project-level SSH keys from accessing this job's VMs. + Alternatively, you can configure the job to specify a VM + instance template that blocks project-level SSH keys. In + either case, Batch blocks project-level SSH keys while + creating the VMs for this job. + + Batch allows project-level SSH keys for a job's VMs only if + all the following are true: + + - This field is undefined or set to ``false``. + - The job's VM instance template (if any) doesn't block + project-level SSH keys. + + Notably, you can override this behavior by manually updating + a VM to block or allow project-level SSH keys. For more + information about blocking project-level SSH keys, see the + Compute Engine documentation: + https://cloud.google.com/compute/docs/connect/restrict-ssh-keys#block-keys """ policy: "AllocationPolicy.InstancePolicy" = proto.Field( @@ -833,6 +855,10 @@ class InstancePolicyOrTemplate(proto.Message): proto.BOOL, number=4, ) + block_project_ssh_keys: bool = proto.Field( + proto.BOOL, + number=5, + ) class NetworkInterface(proto.Message): r"""A network interface. diff --git a/packages/google-cloud-batch/google/cloud/batch_v1/types/task.py b/packages/google-cloud-batch/google/cloud/batch_v1/types/task.py index 770048a9d2d0..d42dfd8b608b 100644 --- a/packages/google-cloud-batch/google/cloud/batch_v1/types/task.py +++ b/packages/google-cloud-batch/google/cloud/batch_v1/types/task.py @@ -109,7 +109,7 @@ class ComputeResource(proto.Message): class StatusEvent(proto.Message): - r"""Status event + r"""Status event. Attributes: type_ (str): @@ -119,9 +119,13 @@ class StatusEvent(proto.Message): event_time (google.protobuf.timestamp_pb2.Timestamp): The time this event occurred. task_execution (google.cloud.batch_v1.types.TaskExecution): - Task Execution + Task Execution. + This field is only defined for task-level status + events where the task fails. task_state (google.cloud.batch_v1.types.TaskStatus.State): - Task State + Task State. + This field is only defined for task-level status + events. """ type_: str = proto.Field( @@ -179,11 +183,11 @@ class TaskExecution(proto.Message): class TaskStatus(proto.Message): - r"""Status of a task + r"""Status of a task. Attributes: state (google.cloud.batch_v1.types.TaskStatus.State): - Task state + Task state. status_events (MutableSequence[google.cloud.batch_v1.types.StatusEvent]): Detailed info about why the state is reached. """ @@ -261,16 +265,26 @@ class Runnable(proto.Message): to understand the logs. If not provided the index of the runnable will be used for outputs. ignore_exit_status (bool): - Normally, a non-zero exit status causes the - Task to fail. This flag allows execution of - other Runnables to continue instead. + Normally, a runnable that returns a non-zero exit status + fails and causes the task to fail. However, you can set this + field to ``true`` to allow the task to continue executing + its other runnables even if this runnable fails. background (bool): - This flag allows a Runnable to continue - running in the background while the Task - executes subsequent Runnables. This is useful to - provide services to other Runnables (or to - provide debugging support tools like SSH - servers). + Normally, a runnable that doesn't exit causes its task to + fail. However, you can set this field to ``true`` to + configure a background runnable. Background runnables are + allowed continue running in the background while the task + executes subsequent runnables. For example, background + runnables are useful for providing services to other + runnables or providing debugging-support tools like SSH + servers. + + Specifically, background runnables are killed automatically + (if they have not already exited) a short time after all + foreground runnables have completed. Even though this is + likely to result in a non-zero exit status for the + background runnable, these automatic kills are not treated + as task failures. always_run (bool): By default, after a Runnable fails, no further Runnable are executed. This flag indicates that this Runnable must be run @@ -297,32 +311,38 @@ class Container(proto.Message): Attributes: image_uri (str): - The URI to pull the container image from. + Required. The URI to pull the container image + from. commands (MutableSequence[str]): - Overrides the ``CMD`` specified in the container. If there - is an ENTRYPOINT (either in the container image or with the - entrypoint field below) then commands are appended as - arguments to the ENTRYPOINT. + Required for some container images. Overrides the ``CMD`` + specified in the container. If there is an ``ENTRYPOINT`` + (either in the container image or with the ``entrypoint`` + field below) then these commands are appended as arguments + to the ``ENTRYPOINT``. entrypoint (str): - Overrides the ``ENTRYPOINT`` specified in the container. + Required for some container images. Overrides the + ``ENTRYPOINT`` specified in the container. volumes (MutableSequence[str]): Volumes to mount (bind mount) from the host machine files or - directories into the container, formatted to match docker - run's --volume option, e.g. /foo:/bar, or /foo:/bar:ro + directories into the container, formatted to match + ``--volume`` option for the ``docker run`` command—for + example, ``/foo:/bar`` or ``/foo:/bar:ro``. If the ``TaskSpec.Volumes`` field is specified but this field is not, Batch will mount each volume from the host machine to the container with the same mount path by default. In this case, the default mount option for - containers will be read-only (ro) for existing persistent - disks and read-write (rw) for other volume types, regardless - of the original mount options specified in + containers will be read-only (``ro``) for existing + persistent disks and read-write (``rw``) for other volume + types, regardless of the original mount options specified in ``TaskSpec.Volumes``. If you need different mount settings, you can explicitly configure them in this field. options (str): - Arbitrary additional options to include in - the "docker run" command when running this - container, e.g. "--network host". + Required for some container images. Arbitrary additional + options to include in the ``docker run`` command when + running this container—for example, ``--network host``. For + the ``--volume`` option, use the ``volumes`` field for the + container. block_external_network (bool): If set to true, external network access to and from container will be blocked, containers that are with @@ -439,28 +459,32 @@ class Script(proto.Message): Attributes: path (str): - Script file path on the host VM. - - To specify an interpreter, please add a - ``#!``\ (also known as `shebang - line `__) as - the first line of the file.(For example, to execute the - script using bash, ``#!/bin/bash`` should be the first line - of the file. To execute the script using\ ``Python3``, - ``#!/usr/bin/env python3`` should be the first line of the - file.) Otherwise, the file will by default be executed by - ``/bin/sh``. + The path to a script file that is accessible from the host + VM(s). + + Unless the script file supports the default ``#!/bin/sh`` + shell interpreter, you must specify an interpreter by + including a [shebang + line](https://en.wikipedia.org/wiki/Shebang_(Unix) as the + first line of the file. For example, to execute the script + using bash, include ``#!/bin/bash`` as the first line of the + file. Alternatively, to execute the script using Python3, + include ``#!/usr/bin/env python3`` as the first line of the + file. This field is a member of `oneof`_ ``command``. text (str): - Shell script text. - - To specify an interpreter, please add a - ``#!\n`` at the beginning of the text.(For - example, to execute the script using bash, ``#!/bin/bash\n`` - should be added. To execute the script using\ ``Python3``, - ``#!/usr/bin/env python3\n`` should be added.) Otherwise, - the script will by default be executed by ``/bin/sh``. + The text for a script. + + Unless the script text supports the default ``#!/bin/sh`` + shell interpreter, you must specify an interpreter by + including a [shebang + line](https://en.wikipedia.org/wiki/Shebang_(Unix) at the + beginning of the text. For example, to execute the script + using bash, include ``#!/bin/bash\n`` at the beginning of + the text. Alternatively, to execute the script using + Python3, include ``#!/usr/bin/env python3\n`` at the + beginning of the text. This field is a member of `oneof`_ ``command``. """ @@ -477,8 +501,9 @@ class Script(proto.Message): ) class Barrier(proto.Message): - r"""Barrier runnable blocks until all tasks in a taskgroup reach - it. + r"""A barrier runnable automatically blocks the execution of + subsequent runnables until all the tasks in the task group reach + the barrier. Attributes: name (str): @@ -548,18 +573,18 @@ class TaskSpec(proto.Message): Attributes: runnables (MutableSequence[google.cloud.batch_v1.types.Runnable]): - The sequence of scripts or containers to run for this Task. - Each Task using this TaskSpec executes its list of runnables - in order. The Task succeeds if all of its runnables either - exit with a zero status or any that exit with a non-zero - status have the ignore_exit_status flag. - - Background runnables are killed automatically (if they have - not already exited) a short time after all foreground - runnables have completed. Even though this is likely to - result in a non-zero exit status for the background - runnable, these automatic kills are not treated as Task - failures. + Required. The sequence of one or more runnables (executable + scripts, executable containers, and/or barriers) for each + task in this task group to run. Each task runs this list of + runnables in order. For a task to succeed, all of its script + and container runnables each must meet at least one of the + following conditions: + + - The runnable exited with a zero status. + - The runnable didn't finish, but you enabled its + ``background`` subfield. + - The runnable exited with a non-zero status, but you + enabled its ``ignore_exit_status`` subfield. compute_resource (google.cloud.batch_v1.types.ComputeResource): ComputeResource requirements. max_run_duration (google.protobuf.duration_pb2.Duration): diff --git a/packages/google-cloud-batch/google/cloud/batch_v1alpha/__init__.py b/packages/google-cloud-batch/google/cloud/batch_v1alpha/__init__.py index 0e918ab65e9f..25fa5142c3a9 100644 --- a/packages/google-cloud-batch/google/cloud/batch_v1alpha/__init__.py +++ b/packages/google-cloud-batch/google/cloud/batch_v1alpha/__init__.py @@ -20,6 +20,8 @@ from .services.batch_service import BatchServiceAsyncClient, BatchServiceClient from .types.batch import ( + CancelJobRequest, + CancelJobResponse, CreateJobRequest, CreateResourceAllowanceRequest, DeleteJobRequest, @@ -76,6 +78,8 @@ "AllocationPolicy", "BatchServiceClient", "CalendarPeriod", + "CancelJobRequest", + "CancelJobResponse", "ComputeResource", "CreateJobRequest", "CreateResourceAllowanceRequest", diff --git a/packages/google-cloud-batch/google/cloud/batch_v1alpha/gapic_metadata.json b/packages/google-cloud-batch/google/cloud/batch_v1alpha/gapic_metadata.json index fbbd7eed609c..d2eeeb1176cf 100644 --- a/packages/google-cloud-batch/google/cloud/batch_v1alpha/gapic_metadata.json +++ b/packages/google-cloud-batch/google/cloud/batch_v1alpha/gapic_metadata.json @@ -10,6 +10,11 @@ "grpc": { "libraryClient": "BatchServiceClient", "rpcs": { + "CancelJob": { + "methods": [ + "cancel_job" + ] + }, "CreateJob": { "methods": [ "create_job" @@ -75,6 +80,11 @@ "grpc-async": { "libraryClient": "BatchServiceAsyncClient", "rpcs": { + "CancelJob": { + "methods": [ + "cancel_job" + ] + }, "CreateJob": { "methods": [ "create_job" @@ -140,6 +150,11 @@ "rest": { "libraryClient": "BatchServiceClient", "rpcs": { + "CancelJob": { + "methods": [ + "cancel_job" + ] + }, "CreateJob": { "methods": [ "create_job" diff --git a/packages/google-cloud-batch/google/cloud/batch_v1alpha/gapic_version.py b/packages/google-cloud-batch/google/cloud/batch_v1alpha/gapic_version.py index 582686c5a553..059e03e3105c 100644 --- a/packages/google-cloud-batch/google/cloud/batch_v1alpha/gapic_version.py +++ b/packages/google-cloud-batch/google/cloud/batch_v1alpha/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.17.23" # {x-release-please-version} +__version__ = "0.17.29" # {x-release-please-version} diff --git a/packages/google-cloud-batch/google/cloud/batch_v1alpha/services/batch_service/async_client.py b/packages/google-cloud-batch/google/cloud/batch_v1alpha/services/batch_service/async_client.py index 87156abbb235..e5fa2a62b8f5 100644 --- a/packages/google-cloud-batch/google/cloud/batch_v1alpha/services/batch_service/async_client.py +++ b/packages/google-cloud-batch/google/cloud/batch_v1alpha/services/batch_service/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -210,9 +209,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(BatchServiceClient).get_transport_class, type(BatchServiceClient) - ) + get_transport_class = BatchServiceClient.get_transport_class def __init__( self, @@ -633,6 +630,124 @@ async def sample_delete_job(): # Done; return the response. return response + async def cancel_job( + self, + request: Optional[Union[batch.CancelJobRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Cancel a Job. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import batch_v1alpha + + async def sample_cancel_job(): + # Create a client + client = batch_v1alpha.BatchServiceAsyncClient() + + # Initialize request argument(s) + request = batch_v1alpha.CancelJobRequest( + name="name_value", + ) + + # Make the request + operation = client.cancel_job(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.batch_v1alpha.types.CancelJobRequest, dict]]): + The request object. CancelJob Request. + name (:class:`str`): + Required. Job name. + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.batch_v1alpha.types.CancelJobResponse` + Response to the CancelJob request. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, batch.CancelJobRequest): + request = batch.CancelJobRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.cancel_job + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + batch.CancelJobResponse, + metadata_type=batch.OperationMetadata, + ) + + # Done; return the response. + return response + async def update_job( self, request: Optional[Union[batch.UpdateJobRequest, dict]] = None, diff --git a/packages/google-cloud-batch/google/cloud/batch_v1alpha/services/batch_service/client.py b/packages/google-cloud-batch/google/cloud/batch_v1alpha/services/batch_service/client.py index c267e54a623e..1c35c5f30ac6 100644 --- a/packages/google-cloud-batch/google/cloud/batch_v1alpha/services/batch_service/client.py +++ b/packages/google-cloud-batch/google/cloud/batch_v1alpha/services/batch_service/client.py @@ -746,7 +746,7 @@ def __init__( transport_init: Union[ Type[BatchServiceTransport], Callable[..., BatchServiceTransport] ] = ( - type(self).get_transport_class(transport) + BatchServiceClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., BatchServiceTransport], transport) ) @@ -1109,6 +1109,121 @@ def sample_delete_job(): # Done; return the response. return response + def cancel_job( + self, + request: Optional[Union[batch.CancelJobRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Cancel a Job. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import batch_v1alpha + + def sample_cancel_job(): + # Create a client + client = batch_v1alpha.BatchServiceClient() + + # Initialize request argument(s) + request = batch_v1alpha.CancelJobRequest( + name="name_value", + ) + + # Make the request + operation = client.cancel_job(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.batch_v1alpha.types.CancelJobRequest, dict]): + The request object. CancelJob Request. + name (str): + Required. Job name. + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.batch_v1alpha.types.CancelJobResponse` + Response to the CancelJob request. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, batch.CancelJobRequest): + request = batch.CancelJobRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.cancel_job] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + batch.CancelJobResponse, + metadata_type=batch.OperationMetadata, + ) + + # Done; return the response. + return response + def update_job( self, request: Optional[Union[batch.UpdateJobRequest, dict]] = None, diff --git a/packages/google-cloud-batch/google/cloud/batch_v1alpha/services/batch_service/transports/base.py b/packages/google-cloud-batch/google/cloud/batch_v1alpha/services/batch_service/transports/base.py index bea486b7ead4..4f3ffa39c02d 100644 --- a/packages/google-cloud-batch/google/cloud/batch_v1alpha/services/batch_service/transports/base.py +++ b/packages/google-cloud-batch/google/cloud/batch_v1alpha/services/batch_service/transports/base.py @@ -161,6 +161,11 @@ def _prep_wrapped_messages(self, client_info): default_timeout=60.0, client_info=client_info, ), + self.cancel_job: gapic_v1.method.wrap_method( + self.cancel_job, + default_timeout=60.0, + client_info=client_info, + ), self.update_job: gapic_v1.method.wrap_method( self.update_job, default_timeout=60.0, @@ -288,6 +293,15 @@ def delete_job( ]: raise NotImplementedError() + @property + def cancel_job( + self, + ) -> Callable[ + [batch.CancelJobRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + @property def update_job( self, diff --git a/packages/google-cloud-batch/google/cloud/batch_v1alpha/services/batch_service/transports/grpc.py b/packages/google-cloud-batch/google/cloud/batch_v1alpha/services/batch_service/transports/grpc.py index 04c0554ce78f..b723e9d7f161 100644 --- a/packages/google-cloud-batch/google/cloud/batch_v1alpha/services/batch_service/transports/grpc.py +++ b/packages/google-cloud-batch/google/cloud/batch_v1alpha/services/batch_service/transports/grpc.py @@ -334,6 +334,32 @@ def delete_job( ) return self._stubs["delete_job"] + @property + def cancel_job( + self, + ) -> Callable[[batch.CancelJobRequest], operations_pb2.Operation]: + r"""Return a callable for the cancel job method over gRPC. + + Cancel a Job. + + Returns: + Callable[[~.CancelJobRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "cancel_job" not in self._stubs: + self._stubs["cancel_job"] = self.grpc_channel.unary_unary( + "/google.cloud.batch.v1alpha.BatchService/CancelJob", + request_serializer=batch.CancelJobRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["cancel_job"] + @property def update_job(self) -> Callable[[batch.UpdateJobRequest], gcb_job.Job]: r"""Return a callable for the update job method over gRPC. diff --git a/packages/google-cloud-batch/google/cloud/batch_v1alpha/services/batch_service/transports/grpc_asyncio.py b/packages/google-cloud-batch/google/cloud/batch_v1alpha/services/batch_service/transports/grpc_asyncio.py index 23e047db70f0..fa50038d8225 100644 --- a/packages/google-cloud-batch/google/cloud/batch_v1alpha/services/batch_service/transports/grpc_asyncio.py +++ b/packages/google-cloud-batch/google/cloud/batch_v1alpha/services/batch_service/transports/grpc_asyncio.py @@ -340,6 +340,32 @@ def delete_job( ) return self._stubs["delete_job"] + @property + def cancel_job( + self, + ) -> Callable[[batch.CancelJobRequest], Awaitable[operations_pb2.Operation]]: + r"""Return a callable for the cancel job method over gRPC. + + Cancel a Job. + + Returns: + Callable[[~.CancelJobRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "cancel_job" not in self._stubs: + self._stubs["cancel_job"] = self.grpc_channel.unary_unary( + "/google.cloud.batch.v1alpha.BatchService/CancelJob", + request_serializer=batch.CancelJobRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["cancel_job"] + @property def update_job(self) -> Callable[[batch.UpdateJobRequest], Awaitable[gcb_job.Job]]: r"""Return a callable for the update job method over gRPC. @@ -613,6 +639,11 @@ def _prep_wrapped_messages(self, client_info): default_timeout=60.0, client_info=client_info, ), + self.cancel_job: gapic_v1.method_async.wrap_method( + self.cancel_job, + default_timeout=60.0, + client_info=client_info, + ), self.update_job: gapic_v1.method_async.wrap_method( self.update_job, default_timeout=60.0, diff --git a/packages/google-cloud-batch/google/cloud/batch_v1alpha/services/batch_service/transports/rest.py b/packages/google-cloud-batch/google/cloud/batch_v1alpha/services/batch_service/transports/rest.py index 16b672e6217c..4a1d9e6184fb 100644 --- a/packages/google-cloud-batch/google/cloud/batch_v1alpha/services/batch_service/transports/rest.py +++ b/packages/google-cloud-batch/google/cloud/batch_v1alpha/services/batch_service/transports/rest.py @@ -79,6 +79,14 @@ class BatchServiceRestInterceptor: .. code-block:: python class MyCustomBatchServiceInterceptor(BatchServiceRestInterceptor): + def pre_cancel_job(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_cancel_job(self, response): + logging.log(f"Received response: {response}") + return response + def pre_create_job(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata @@ -181,6 +189,27 @@ def post_update_resource_allowance(self, response): """ + def pre_cancel_job( + self, request: batch.CancelJobRequest, metadata: Sequence[Tuple[str, str]] + ) -> Tuple[batch.CancelJobRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for cancel_job + + Override in a subclass to manipulate the request or metadata + before they are sent to the BatchService server. + """ + return request, metadata + + def post_cancel_job( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for cancel_job + + Override in a subclass to manipulate the response + after it is returned by the BatchService server but before + it is returned to user code. + """ + return response + def pre_create_job( self, request: batch.CreateJobRequest, metadata: Sequence[Tuple[str, str]] ) -> Tuple[batch.CreateJobRequest, Sequence[Tuple[str, str]]]: @@ -722,6 +751,99 @@ def operations_client(self) -> operations_v1.AbstractOperationsClient: # Return the client from cache. return self._operations_client + class _CancelJob(BatchServiceRestStub): + def __hash__(self): + return hash("CancelJob") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: batch.CancelJobRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the cancel job method over HTTP. + + Args: + request (~.batch.CancelJobRequest): + The request object. CancelJob Request. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1alpha/{name=projects/*/locations/*/jobs/*}:cancel", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_cancel_job(request, metadata) + pb_request = batch.CancelJobRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_cancel_job(resp) + return resp + class _CreateJob(BatchServiceRestStub): def __hash__(self): return hash("CreateJob") @@ -1776,6 +1898,14 @@ def __call__( resp = self._interceptor.post_update_resource_allowance(resp) return resp + @property + def cancel_job( + self, + ) -> Callable[[batch.CancelJobRequest], operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._CancelJob(self._session, self._host, self._interceptor) # type: ignore + @property def create_job(self) -> Callable[[batch.CreateJobRequest], gcb_job.Job]: # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. diff --git a/packages/google-cloud-batch/google/cloud/batch_v1alpha/types/__init__.py b/packages/google-cloud-batch/google/cloud/batch_v1alpha/types/__init__.py index d410f1d71c07..c1ecdb476e43 100644 --- a/packages/google-cloud-batch/google/cloud/batch_v1alpha/types/__init__.py +++ b/packages/google-cloud-batch/google/cloud/batch_v1alpha/types/__init__.py @@ -14,6 +14,8 @@ # limitations under the License. # from .batch import ( + CancelJobRequest, + CancelJobResponse, CreateJobRequest, CreateResourceAllowanceRequest, DeleteJobRequest, @@ -66,6 +68,8 @@ from .volume import GCS, NFS, PD, Volume __all__ = ( + "CancelJobRequest", + "CancelJobResponse", "CreateJobRequest", "CreateResourceAllowanceRequest", "DeleteJobRequest", diff --git a/packages/google-cloud-batch/google/cloud/batch_v1alpha/types/batch.py b/packages/google-cloud-batch/google/cloud/batch_v1alpha/types/batch.py index 7cbfb53f4ef7..a1f56648f3a4 100644 --- a/packages/google-cloud-batch/google/cloud/batch_v1alpha/types/batch.py +++ b/packages/google-cloud-batch/google/cloud/batch_v1alpha/types/batch.py @@ -33,6 +33,8 @@ "CreateJobRequest", "GetJobRequest", "DeleteJobRequest", + "CancelJobRequest", + "CancelJobResponse", "UpdateJobRequest", "ListJobsRequest", "ListJobsResponse", @@ -170,6 +172,48 @@ class DeleteJobRequest(proto.Message): ) +class CancelJobRequest(proto.Message): + r"""CancelJob Request. + + Attributes: + name (str): + Required. Job name. + request_id (str): + Optional. An optional request ID to identify + requests. Specify a unique request ID so that if + you must retry your request, the server will + know to ignore the request if it has already + been completed. The server will guarantee that + for at least 60 minutes after the first request. + + For example, consider a situation where you make + an initial request and the request times out. If + you make the request again with the same request + ID, the server can check if original operation + with the same request ID was received, and if + so, will ignore the second request. This + prevents clients from accidentally creating + duplicate commitments. + + The request ID must be a valid UUID with the + exception that zero UUID is not supported + (00000000-0000-0000-0000-000000000000). + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + request_id: str = proto.Field( + proto.STRING, + number=4, + ) + + +class CancelJobResponse(proto.Message): + r"""Response to the CancelJob request.""" + + class UpdateJobRequest(proto.Message): r"""UpdateJob Request. diff --git a/packages/google-cloud-batch/google/cloud/batch_v1alpha/types/job.py b/packages/google-cloud-batch/google/cloud/batch_v1alpha/types/job.py index 80bb9f2925a4..744d6bdb9a8b 100644 --- a/packages/google-cloud-batch/google/cloud/batch_v1alpha/types/job.py +++ b/packages/google-cloud-batch/google/cloud/batch_v1alpha/types/job.py @@ -332,6 +332,14 @@ class State(proto.Enum): The Job will be deleted, but has not been deleted yet. Typically this is because resources used by the Job are still being cleaned up. + CANCELLATION_IN_PROGRESS (7): + The Job cancellation is in progress, this is + because the resources used by the Job are still + being cleaned up. + CANCELLED (8): + The Job has been cancelled, the task + executions were stopped and the resources were + cleaned up. """ STATE_UNSPECIFIED = 0 QUEUED = 1 @@ -340,6 +348,8 @@ class State(proto.Enum): SUCCEEDED = 4 FAILED = 5 DELETION_IN_PROGRESS = 6 + CANCELLATION_IN_PROGRESS = 7 + CANCELLED = 8 class InstanceStatus(proto.Message): r"""VM instance status. @@ -669,9 +679,7 @@ class Disk(proto.Message): following image values are supported for a boot disk: - ``batch-debian``: use Batch Debian images. - - ``batch-centos``: use Batch CentOS images. - ``batch-cos``: use Batch Container-Optimized images. - - ``batch-hpc-centos``: use Batch HPC CentOS images. - ``batch-hpc-rocky``: use Batch HPC Rocky Linux images. This field is a member of `oneof`_ ``data_source``. @@ -938,6 +946,26 @@ class InstancePolicyOrTemplate(proto.Message): Optional. Set this field true if you want Batch to install Ops Agent on your behalf. Default is false. + block_project_ssh_keys (bool): + Optional. Set this field to ``true`` if you want Batch to + block project-level SSH keys from accessing this job's VMs. + Alternatively, you can configure the job to specify a VM + instance template that blocks project-level SSH keys. In + either case, Batch blocks project-level SSH keys while + creating the VMs for this job. + + Batch allows project-level SSH keys for a job's VMs only if + all the following are true: + + - This field is undefined or set to ``false``. + - The job's VM instance template (if any) doesn't block + project-level SSH keys. + + Notably, you can override this behavior by manually updating + a VM to block or allow project-level SSH keys. For more + information about blocking project-level SSH keys, see the + Compute Engine documentation: + https://cloud.google.com/compute/docs/connect/restrict-ssh-keys#block-keys """ policy: "AllocationPolicy.InstancePolicy" = proto.Field( @@ -959,6 +987,10 @@ class InstancePolicyOrTemplate(proto.Message): proto.BOOL, number=4, ) + block_project_ssh_keys: bool = proto.Field( + proto.BOOL, + number=5, + ) class NetworkInterface(proto.Message): r"""A network interface. diff --git a/packages/google-cloud-batch/google/cloud/batch_v1alpha/types/task.py b/packages/google-cloud-batch/google/cloud/batch_v1alpha/types/task.py index 625ed05e889f..b6ddf5702b52 100644 --- a/packages/google-cloud-batch/google/cloud/batch_v1alpha/types/task.py +++ b/packages/google-cloud-batch/google/cloud/batch_v1alpha/types/task.py @@ -118,7 +118,7 @@ class ComputeResource(proto.Message): class StatusEvent(proto.Message): - r"""Status event + r"""Status event. Attributes: type_ (str): @@ -128,9 +128,13 @@ class StatusEvent(proto.Message): event_time (google.protobuf.timestamp_pb2.Timestamp): The time this event occurred. task_execution (google.cloud.batch_v1alpha.types.TaskExecution): - Task Execution + Task Execution. + This field is only defined for task-level status + events where the task fails. task_state (google.cloud.batch_v1alpha.types.TaskStatus.State): - Task State + Task State. + This field is only defined for task-level status + events. """ type_: str = proto.Field( @@ -197,11 +201,11 @@ class TaskExecution(proto.Message): class TaskStatus(proto.Message): - r"""Status of a task + r"""Status of a task. Attributes: state (google.cloud.batch_v1alpha.types.TaskStatus.State): - Task state + Task state. status_events (MutableSequence[google.cloud.batch_v1alpha.types.StatusEvent]): Detailed info about why the state is reached. resource_usage (google.cloud.batch_v1alpha.types.TaskResourceUsage): @@ -301,16 +305,26 @@ class Runnable(proto.Message): to understand the logs. If not provided the index of the runnable will be used for outputs. ignore_exit_status (bool): - Normally, a non-zero exit status causes the - Task to fail. This flag allows execution of - other Runnables to continue instead. + Normally, a runnable that returns a non-zero exit status + fails and causes the task to fail. However, you can set this + field to ``true`` to allow the task to continue executing + its other runnables even if this runnable fails. background (bool): - This flag allows a Runnable to continue - running in the background while the Task - executes subsequent Runnables. This is useful to - provide services to other Runnables (or to - provide debugging support tools like SSH - servers). + Normally, a runnable that doesn't exit causes its task to + fail. However, you can set this field to ``true`` to + configure a background runnable. Background runnables are + allowed continue running in the background while the task + executes subsequent runnables. For example, background + runnables are useful for providing services to other + runnables or providing debugging-support tools like SSH + servers. + + Specifically, background runnables are killed automatically + (if they have not already exited) a short time after all + foreground runnables have completed. Even though this is + likely to result in a non-zero exit status for the + background runnable, these automatic kills are not treated + as task failures. always_run (bool): By default, after a Runnable fails, no further Runnable are executed. This flag indicates that this Runnable must be run @@ -337,32 +351,38 @@ class Container(proto.Message): Attributes: image_uri (str): - The URI to pull the container image from. + Required. The URI to pull the container image + from. commands (MutableSequence[str]): - Overrides the ``CMD`` specified in the container. If there - is an ENTRYPOINT (either in the container image or with the - entrypoint field below) then commands are appended as - arguments to the ENTRYPOINT. + Required for some container images. Overrides the ``CMD`` + specified in the container. If there is an ``ENTRYPOINT`` + (either in the container image or with the ``entrypoint`` + field below) then these commands are appended as arguments + to the ``ENTRYPOINT``. entrypoint (str): - Overrides the ``ENTRYPOINT`` specified in the container. + Required for some container images. Overrides the + ``ENTRYPOINT`` specified in the container. volumes (MutableSequence[str]): Volumes to mount (bind mount) from the host machine files or - directories into the container, formatted to match docker - run's --volume option, e.g. /foo:/bar, or /foo:/bar:ro + directories into the container, formatted to match + ``--volume`` option for the ``docker run`` command—for + example, ``/foo:/bar`` or ``/foo:/bar:ro``. If the ``TaskSpec.Volumes`` field is specified but this field is not, Batch will mount each volume from the host machine to the container with the same mount path by default. In this case, the default mount option for - containers will be read-only (ro) for existing persistent - disks and read-write (rw) for other volume types, regardless - of the original mount options specified in + containers will be read-only (``ro``) for existing + persistent disks and read-write (``rw``) for other volume + types, regardless of the original mount options specified in ``TaskSpec.Volumes``. If you need different mount settings, you can explicitly configure them in this field. options (str): - Arbitrary additional options to include in - the "docker run" command when running this - container, e.g. "--network host". + Required for some container images. Arbitrary additional + options to include in the ``docker run`` command when + running this container—for example, ``--network host``. For + the ``--volume`` option, use the ``volumes`` field for the + container. block_external_network (bool): If set to true, external network access to and from container will be blocked, containers that are with @@ -479,28 +499,32 @@ class Script(proto.Message): Attributes: path (str): - Script file path on the host VM. - - To specify an interpreter, please add a - ``#!``\ (also known as `shebang - line `__) as - the first line of the file.(For example, to execute the - script using bash, ``#!/bin/bash`` should be the first line - of the file. To execute the script using\ ``Python3``, - ``#!/usr/bin/env python3`` should be the first line of the - file.) Otherwise, the file will by default be executed by - ``/bin/sh``. + The path to a script file that is accessible from the host + VM(s). + + Unless the script file supports the default ``#!/bin/sh`` + shell interpreter, you must specify an interpreter by + including a [shebang + line](https://en.wikipedia.org/wiki/Shebang_(Unix) as the + first line of the file. For example, to execute the script + using bash, include ``#!/bin/bash`` as the first line of the + file. Alternatively, to execute the script using Python3, + include ``#!/usr/bin/env python3`` as the first line of the + file. This field is a member of `oneof`_ ``command``. text (str): - Shell script text. - - To specify an interpreter, please add a - ``#!\n`` at the beginning of the text.(For - example, to execute the script using bash, ``#!/bin/bash\n`` - should be added. To execute the script using\ ``Python3``, - ``#!/usr/bin/env python3\n`` should be added.) Otherwise, - the script will by default be executed by ``/bin/sh``. + The text for a script. + + Unless the script text supports the default ``#!/bin/sh`` + shell interpreter, you must specify an interpreter by + including a [shebang + line](https://en.wikipedia.org/wiki/Shebang_(Unix) at the + beginning of the text. For example, to execute the script + using bash, include ``#!/bin/bash\n`` at the beginning of + the text. Alternatively, to execute the script using + Python3, include ``#!/usr/bin/env python3\n`` at the + beginning of the text. This field is a member of `oneof`_ ``command``. """ @@ -517,8 +541,9 @@ class Script(proto.Message): ) class Barrier(proto.Message): - r"""Barrier runnable blocks until all tasks in a taskgroup reach - it. + r"""A barrier runnable automatically blocks the execution of + subsequent runnables until all the tasks in the task group reach + the barrier. Attributes: name (str): @@ -588,18 +613,18 @@ class TaskSpec(proto.Message): Attributes: runnables (MutableSequence[google.cloud.batch_v1alpha.types.Runnable]): - The sequence of scripts or containers to run for this Task. - Each Task using this TaskSpec executes its list of runnables - in order. The Task succeeds if all of its runnables either - exit with a zero status or any that exit with a non-zero - status have the ignore_exit_status flag. - - Background runnables are killed automatically (if they have - not already exited) a short time after all foreground - runnables have completed. Even though this is likely to - result in a non-zero exit status for the background - runnable, these automatic kills are not treated as Task - failures. + Required. The sequence of one or more runnables (executable + scripts, executable containers, and/or barriers) for each + task in this task group to run. Each task runs this list of + runnables in order. For a task to succeed, all of its script + and container runnables each must meet at least one of the + following conditions: + + - The runnable exited with a zero status. + - The runnable didn't finish, but you enabled its + ``background`` subfield. + - The runnable exited with a non-zero status, but you + enabled its ``ignore_exit_status`` subfield. compute_resource (google.cloud.batch_v1alpha.types.ComputeResource): ComputeResource requirements. max_run_duration (google.protobuf.duration_pb2.Duration): diff --git a/packages/google-cloud-batch/samples/generated_samples/batch_v1alpha_generated_batch_service_cancel_job_async.py b/packages/google-cloud-batch/samples/generated_samples/batch_v1alpha_generated_batch_service_cancel_job_async.py new file mode 100644 index 000000000000..19d634ba25bd --- /dev/null +++ b/packages/google-cloud-batch/samples/generated_samples/batch_v1alpha_generated_batch_service_cancel_job_async.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CancelJob +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-batch + + +# [START batch_v1alpha_generated_BatchService_CancelJob_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import batch_v1alpha + + +async def sample_cancel_job(): + # Create a client + client = batch_v1alpha.BatchServiceAsyncClient() + + # Initialize request argument(s) + request = batch_v1alpha.CancelJobRequest( + name="name_value", + ) + + # Make the request + operation = client.cancel_job(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END batch_v1alpha_generated_BatchService_CancelJob_async] diff --git a/packages/google-cloud-batch/samples/generated_samples/batch_v1alpha_generated_batch_service_cancel_job_sync.py b/packages/google-cloud-batch/samples/generated_samples/batch_v1alpha_generated_batch_service_cancel_job_sync.py new file mode 100644 index 000000000000..4661b4b1b295 --- /dev/null +++ b/packages/google-cloud-batch/samples/generated_samples/batch_v1alpha_generated_batch_service_cancel_job_sync.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CancelJob +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-batch + + +# [START batch_v1alpha_generated_BatchService_CancelJob_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import batch_v1alpha + + +def sample_cancel_job(): + # Create a client + client = batch_v1alpha.BatchServiceClient() + + # Initialize request argument(s) + request = batch_v1alpha.CancelJobRequest( + name="name_value", + ) + + # Make the request + operation = client.cancel_job(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END batch_v1alpha_generated_BatchService_CancelJob_sync] diff --git a/packages/google-cloud-batch/samples/generated_samples/snippet_metadata_google.cloud.batch.v1.json b/packages/google-cloud-batch/samples/generated_samples/snippet_metadata_google.cloud.batch.v1.json index c68cd953756f..aa5fe556ec83 100644 --- a/packages/google-cloud-batch/samples/generated_samples/snippet_metadata_google.cloud.batch.v1.json +++ b/packages/google-cloud-batch/samples/generated_samples/snippet_metadata_google.cloud.batch.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-batch", - "version": "0.17.23" + "version": "0.17.29" }, "snippets": [ { diff --git a/packages/google-cloud-batch/samples/generated_samples/snippet_metadata_google.cloud.batch.v1alpha.json b/packages/google-cloud-batch/samples/generated_samples/snippet_metadata_google.cloud.batch.v1alpha.json index 9b4f0fd89dbc..479ce3ca3586 100644 --- a/packages/google-cloud-batch/samples/generated_samples/snippet_metadata_google.cloud.batch.v1alpha.json +++ b/packages/google-cloud-batch/samples/generated_samples/snippet_metadata_google.cloud.batch.v1alpha.json @@ -8,9 +8,170 @@ ], "language": "PYTHON", "name": "google-cloud-batch", - "version": "0.17.23" + "version": "0.17.29" }, "snippets": [ + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.batch_v1alpha.BatchServiceAsyncClient", + "shortName": "BatchServiceAsyncClient" + }, + "fullName": "google.cloud.batch_v1alpha.BatchServiceAsyncClient.cancel_job", + "method": { + "fullName": "google.cloud.batch.v1alpha.BatchService.CancelJob", + "service": { + "fullName": "google.cloud.batch.v1alpha.BatchService", + "shortName": "BatchService" + }, + "shortName": "CancelJob" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.batch_v1alpha.types.CancelJobRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "cancel_job" + }, + "description": "Sample for CancelJob", + "file": "batch_v1alpha_generated_batch_service_cancel_job_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "batch_v1alpha_generated_BatchService_CancelJob_async", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "batch_v1alpha_generated_batch_service_cancel_job_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.batch_v1alpha.BatchServiceClient", + "shortName": "BatchServiceClient" + }, + "fullName": "google.cloud.batch_v1alpha.BatchServiceClient.cancel_job", + "method": { + "fullName": "google.cloud.batch.v1alpha.BatchService.CancelJob", + "service": { + "fullName": "google.cloud.batch.v1alpha.BatchService", + "shortName": "BatchService" + }, + "shortName": "CancelJob" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.batch_v1alpha.types.CancelJobRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "cancel_job" + }, + "description": "Sample for CancelJob", + "file": "batch_v1alpha_generated_batch_service_cancel_job_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "batch_v1alpha_generated_BatchService_CancelJob_sync", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "batch_v1alpha_generated_batch_service_cancel_job_sync.py" + }, { "canonical": true, "clientMethod": { diff --git a/packages/google-cloud-batch/scripts/fixup_batch_v1alpha_keywords.py b/packages/google-cloud-batch/scripts/fixup_batch_v1alpha_keywords.py index 03e73c3a0a5e..3b43e30b2119 100644 --- a/packages/google-cloud-batch/scripts/fixup_batch_v1alpha_keywords.py +++ b/packages/google-cloud-batch/scripts/fixup_batch_v1alpha_keywords.py @@ -39,6 +39,7 @@ def partition( class batchCallTransformer(cst.CSTTransformer): CTRL_PARAMS: Tuple[str] = ('retry', 'timeout', 'metadata') METHOD_TO_PARAMS: Dict[str, Tuple[str]] = { + 'cancel_job': ('name', 'request_id', ), 'create_job': ('parent', 'job', 'job_id', 'request_id', ), 'create_resource_allowance': ('parent', 'resource_allowance', 'resource_allowance_id', 'request_id', ), 'delete_job': ('name', 'reason', 'request_id', ), diff --git a/packages/google-cloud-batch/tests/unit/gapic/batch_v1/test_batch_service.py b/packages/google-cloud-batch/tests/unit/gapic/batch_v1/test_batch_service.py index 04b87e270586..d123a2803997 100644 --- a/packages/google-cloud-batch/tests/unit/gapic/batch_v1/test_batch_service.py +++ b/packages/google-cloud-batch/tests/unit/gapic/batch_v1/test_batch_service.py @@ -1283,22 +1283,23 @@ async def test_create_job_async_use_cached_wrapped_rpc(transport: str = "grpc_as ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_job - ] = mock_object + ] = mock_rpc request = {} await client.create_job(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_job(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1671,22 +1672,23 @@ async def test_get_job_async_use_cached_wrapped_rpc(transport: str = "grpc_async ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_job - ] = mock_object + ] = mock_rpc request = {} await client.get_job(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_job(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1983,8 +1985,9 @@ def test_delete_job_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.delete_job(request) @@ -2036,26 +2039,28 @@ async def test_delete_job_async_use_cached_wrapped_rpc(transport: str = "grpc_as ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_job - ] = mock_object + ] = mock_rpc request = {} await client.delete_job(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.delete_job(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2408,22 +2413,23 @@ async def test_list_jobs_async_use_cached_wrapped_rpc(transport: str = "grpc_asy ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_jobs - ] = mock_object + ] = mock_rpc request = {} await client.list_jobs(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_jobs(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2966,22 +2972,23 @@ async def test_get_task_async_use_cached_wrapped_rpc(transport: str = "grpc_asyn ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_task - ] = mock_object + ] = mock_rpc request = {} await client.get_task(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_task(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3331,22 +3338,23 @@ async def test_list_tasks_async_use_cached_wrapped_rpc(transport: str = "grpc_as ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_tasks - ] = mock_object + ] = mock_rpc request = {} await client.list_tasks(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_tasks(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3859,6 +3867,7 @@ def test_create_job_rest(request_type): "instance_template": "instance_template_value", "install_gpu_drivers": True, "install_ops_agent": True, + "block_project_ssh_keys": True, } ], "service_account": { diff --git a/packages/google-cloud-batch/tests/unit/gapic/batch_v1alpha/test_batch_service.py b/packages/google-cloud-batch/tests/unit/gapic/batch_v1alpha/test_batch_service.py index 8d13af92a978..8af9c5d104a1 100644 --- a/packages/google-cloud-batch/tests/unit/gapic/batch_v1alpha/test_batch_service.py +++ b/packages/google-cloud-batch/tests/unit/gapic/batch_v1alpha/test_batch_service.py @@ -1295,22 +1295,23 @@ async def test_create_job_async_use_cached_wrapped_rpc(transport: str = "grpc_as ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_job - ] = mock_object + ] = mock_rpc request = {} await client.create_job(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_job(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1690,22 +1691,23 @@ async def test_get_job_async_use_cached_wrapped_rpc(transport: str = "grpc_async ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_job - ] = mock_object + ] = mock_rpc request = {} await client.get_job(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_job(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2004,8 +2006,9 @@ def test_delete_job_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.delete_job(request) @@ -2057,26 +2060,28 @@ async def test_delete_job_async_use_cached_wrapped_rpc(transport: str = "grpc_as ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_job - ] = mock_object + ] = mock_rpc request = {} await client.delete_job(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.delete_job(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2261,11 +2266,11 @@ async def test_delete_job_flattened_error_async(): @pytest.mark.parametrize( "request_type", [ - batch.UpdateJobRequest, + batch.CancelJobRequest, dict, ], ) -def test_update_job(request_type, transport: str = "grpc"): +def test_cancel_job(request_type, transport: str = "grpc"): client = BatchServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -2276,33 +2281,22 @@ def test_update_job(request_type, transport: str = "grpc"): request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.update_job), "__call__") as call: + with mock.patch.object(type(client.transport.cancel_job), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = gcb_job.Job( - name="name_value", - uid="uid_value", - priority=898, - scheduling_policy=gcb_job.Job.SchedulingPolicy.AS_SOON_AS_POSSIBLE, - ) - response = client.update_job(request) + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.cancel_job(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - request = batch.UpdateJobRequest() + request = batch.CancelJobRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, gcb_job.Job) - assert response.name == "name_value" - assert response.uid == "uid_value" - assert response.priority == 898 - assert ( - response.scheduling_policy == gcb_job.Job.SchedulingPolicy.AS_SOON_AS_POSSIBLE - ) + assert isinstance(response, future.Future) -def test_update_job_empty_call(): +def test_cancel_job_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = BatchServiceClient( @@ -2311,17 +2305,17 @@ def test_update_job_empty_call(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.update_job), "__call__") as call: + with mock.patch.object(type(client.transport.cancel_job), "__call__") as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.update_job() + client.cancel_job() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == batch.UpdateJobRequest() + assert args[0] == batch.CancelJobRequest() -def test_update_job_non_empty_request_with_auto_populated_field(): +def test_cancel_job_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. client = BatchServiceClient( @@ -2332,20 +2326,24 @@ def test_update_job_non_empty_request_with_auto_populated_field(): # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. - request = batch.UpdateJobRequest() + request = batch.CancelJobRequest( + name="name_value", + ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.update_job), "__call__") as call: + with mock.patch.object(type(client.transport.cancel_job), "__call__") as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.update_job(request=request) + client.cancel_job(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == batch.UpdateJobRequest() + assert args[0] == batch.CancelJobRequest( + name="name_value", + ) -def test_update_job_use_cached_wrapped_rpc(): +def test_cancel_job_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -2359,21 +2357,26 @@ def test_update_job_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.update_job in client._transport._wrapped_methods + assert client._transport.cancel_job in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.update_job] = mock_rpc + client._transport._wrapped_methods[client._transport.cancel_job] = mock_rpc request = {} - client.update_job(request) + client.cancel_job(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.update_job(request) + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.cancel_job(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -2381,7 +2384,7 @@ def test_update_job_use_cached_wrapped_rpc(): @pytest.mark.asyncio -async def test_update_job_empty_call_async(): +async def test_cancel_job_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = BatchServiceAsyncClient( @@ -2390,24 +2393,19 @@ async def test_update_job_empty_call_async(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.update_job), "__call__") as call: + with mock.patch.object(type(client.transport.cancel_job), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - gcb_job.Job( - name="name_value", - uid="uid_value", - priority=898, - scheduling_policy=gcb_job.Job.SchedulingPolicy.AS_SOON_AS_POSSIBLE, - ) + operations_pb2.Operation(name="operations/spam") ) - response = await client.update_job() + response = await client.cancel_job() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == batch.UpdateJobRequest() + assert args[0] == batch.CancelJobRequest() @pytest.mark.asyncio -async def test_update_job_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): +async def test_cancel_job_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: @@ -2422,32 +2420,38 @@ async def test_update_job_async_use_cached_wrapped_rpc(transport: str = "grpc_as # Ensure method has been cached assert ( - client._client._transport.update_job + client._client._transport.cancel_job in client._client._transport._wrapped_methods ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ - client._client._transport.update_job - ] = mock_object + client._client._transport.cancel_job + ] = mock_rpc request = {} - await client.update_job(request) + await client.cancel_job(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - await client.update_job(request) + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.cancel_job(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio -async def test_update_job_async( - transport: str = "grpc_asyncio", request_type=batch.UpdateJobRequest +async def test_cancel_job_async( + transport: str = "grpc_asyncio", request_type=batch.CancelJobRequest ): client = BatchServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), @@ -2459,54 +2463,43 @@ async def test_update_job_async( request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.update_job), "__call__") as call: + with mock.patch.object(type(client.transport.cancel_job), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - gcb_job.Job( - name="name_value", - uid="uid_value", - priority=898, - scheduling_policy=gcb_job.Job.SchedulingPolicy.AS_SOON_AS_POSSIBLE, - ) + operations_pb2.Operation(name="operations/spam") ) - response = await client.update_job(request) + response = await client.cancel_job(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request = batch.UpdateJobRequest() + request = batch.CancelJobRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, gcb_job.Job) - assert response.name == "name_value" - assert response.uid == "uid_value" - assert response.priority == 898 - assert ( - response.scheduling_policy == gcb_job.Job.SchedulingPolicy.AS_SOON_AS_POSSIBLE - ) + assert isinstance(response, future.Future) @pytest.mark.asyncio -async def test_update_job_async_from_dict(): - await test_update_job_async(request_type=dict) +async def test_cancel_job_async_from_dict(): + await test_cancel_job_async(request_type=dict) -def test_update_job_field_headers(): +def test_cancel_job_field_headers(): client = BatchServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = batch.UpdateJobRequest() + request = batch.CancelJobRequest() - request.job.name = "name_value" + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.update_job), "__call__") as call: - call.return_value = gcb_job.Job() - client.update_job(request) + with mock.patch.object(type(client.transport.cancel_job), "__call__") as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.cancel_job(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -2517,26 +2510,28 @@ def test_update_job_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "job.name=name_value", + "name=name_value", ) in kw["metadata"] @pytest.mark.asyncio -async def test_update_job_field_headers_async(): +async def test_cancel_job_field_headers_async(): client = BatchServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = batch.UpdateJobRequest() + request = batch.CancelJobRequest() - request.job.name = "name_value" + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.update_job), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(gcb_job.Job()) - await client.update_job(request) + with mock.patch.object(type(client.transport.cancel_job), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.cancel_job(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -2547,39 +2542,35 @@ async def test_update_job_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "job.name=name_value", + "name=name_value", ) in kw["metadata"] -def test_update_job_flattened(): +def test_cancel_job_flattened(): client = BatchServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.update_job), "__call__") as call: + with mock.patch.object(type(client.transport.cancel_job), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = gcb_job.Job() + call.return_value = operations_pb2.Operation(name="operations/op") # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.update_job( - job=gcb_job.Job(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + client.cancel_job( + name="name_value", ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - arg = args[0].job - mock_val = gcb_job.Job(name="name_value") - assert arg == mock_val - arg = args[0].update_mask - mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + arg = args[0].name + mock_val = "name_value" assert arg == mock_val -def test_update_job_flattened_error(): +def test_cancel_job_flattened_error(): client = BatchServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -2587,46 +2578,43 @@ def test_update_job_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.update_job( - batch.UpdateJobRequest(), - job=gcb_job.Job(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + client.cancel_job( + batch.CancelJobRequest(), + name="name_value", ) @pytest.mark.asyncio -async def test_update_job_flattened_async(): +async def test_cancel_job_flattened_async(): client = BatchServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.update_job), "__call__") as call: + with mock.patch.object(type(client.transport.cancel_job), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = gcb_job.Job() + call.return_value = operations_pb2.Operation(name="operations/op") - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(gcb_job.Job()) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.update_job( - job=gcb_job.Job(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + response = await client.cancel_job( + name="name_value", ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - arg = args[0].job - mock_val = gcb_job.Job(name="name_value") - assert arg == mock_val - arg = args[0].update_mask - mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + arg = args[0].name + mock_val = "name_value" assert arg == mock_val @pytest.mark.asyncio -async def test_update_job_flattened_error_async(): +async def test_cancel_job_flattened_error_async(): client = BatchServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -2634,21 +2622,20 @@ async def test_update_job_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.update_job( - batch.UpdateJobRequest(), - job=gcb_job.Job(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + await client.cancel_job( + batch.CancelJobRequest(), + name="name_value", ) @pytest.mark.parametrize( "request_type", [ - batch.ListJobsRequest, + batch.UpdateJobRequest, dict, ], ) -def test_list_jobs(request_type, transport: str = "grpc"): +def test_update_job(request_type, transport: str = "grpc"): client = BatchServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -2659,27 +2646,33 @@ def test_list_jobs(request_type, transport: str = "grpc"): request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_jobs), "__call__") as call: + with mock.patch.object(type(client.transport.update_job), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = batch.ListJobsResponse( - next_page_token="next_page_token_value", - unreachable=["unreachable_value"], + call.return_value = gcb_job.Job( + name="name_value", + uid="uid_value", + priority=898, + scheduling_policy=gcb_job.Job.SchedulingPolicy.AS_SOON_AS_POSSIBLE, ) - response = client.list_jobs(request) + response = client.update_job(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - request = batch.ListJobsRequest() + request = batch.UpdateJobRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListJobsPager) - assert response.next_page_token == "next_page_token_value" - assert response.unreachable == ["unreachable_value"] + assert isinstance(response, gcb_job.Job) + assert response.name == "name_value" + assert response.uid == "uid_value" + assert response.priority == 898 + assert ( + response.scheduling_policy == gcb_job.Job.SchedulingPolicy.AS_SOON_AS_POSSIBLE + ) -def test_list_jobs_empty_call(): +def test_update_job_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = BatchServiceClient( @@ -2688,17 +2681,17 @@ def test_list_jobs_empty_call(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_jobs), "__call__") as call: + with mock.patch.object(type(client.transport.update_job), "__call__") as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.list_jobs() + client.update_job() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == batch.ListJobsRequest() + assert args[0] == batch.UpdateJobRequest() -def test_list_jobs_non_empty_request_with_auto_populated_field(): +def test_update_job_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. client = BatchServiceClient( @@ -2709,30 +2702,20 @@ def test_list_jobs_non_empty_request_with_auto_populated_field(): # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. - request = batch.ListJobsRequest( - parent="parent_value", - filter="filter_value", - order_by="order_by_value", - page_token="page_token_value", - ) + request = batch.UpdateJobRequest() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_jobs), "__call__") as call: + with mock.patch.object(type(client.transport.update_job), "__call__") as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.list_jobs(request=request) + client.update_job(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == batch.ListJobsRequest( - parent="parent_value", - filter="filter_value", - order_by="order_by_value", - page_token="page_token_value", - ) + assert args[0] == batch.UpdateJobRequest() -def test_list_jobs_use_cached_wrapped_rpc(): +def test_update_job_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -2746,21 +2729,21 @@ def test_list_jobs_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.list_jobs in client._transport._wrapped_methods + assert client._transport.update_job in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.list_jobs] = mock_rpc + client._transport._wrapped_methods[client._transport.update_job] = mock_rpc request = {} - client.list_jobs(request) + client.update_job(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.list_jobs(request) + client.update_job(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -2768,7 +2751,7 @@ def test_list_jobs_use_cached_wrapped_rpc(): @pytest.mark.asyncio -async def test_list_jobs_empty_call_async(): +async def test_update_job_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = BatchServiceAsyncClient( @@ -2777,22 +2760,24 @@ async def test_list_jobs_empty_call_async(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_jobs), "__call__") as call: + with mock.patch.object(type(client.transport.update_job), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - batch.ListJobsResponse( - next_page_token="next_page_token_value", - unreachable=["unreachable_value"], + gcb_job.Job( + name="name_value", + uid="uid_value", + priority=898, + scheduling_policy=gcb_job.Job.SchedulingPolicy.AS_SOON_AS_POSSIBLE, ) ) - response = await client.list_jobs() + response = await client.update_job() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == batch.ListJobsRequest() + assert args[0] == batch.UpdateJobRequest() @pytest.mark.asyncio -async def test_list_jobs_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): +async def test_update_job_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: @@ -2807,32 +2792,33 @@ async def test_list_jobs_async_use_cached_wrapped_rpc(transport: str = "grpc_asy # Ensure method has been cached assert ( - client._client._transport.list_jobs + client._client._transport.update_job in client._client._transport._wrapped_methods ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ - client._client._transport.list_jobs - ] = mock_object + client._client._transport.update_job + ] = mock_rpc request = {} - await client.list_jobs(request) + await client.update_job(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - await client.list_jobs(request) + await client.update_job(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio -async def test_list_jobs_async( - transport: str = "grpc_asyncio", request_type=batch.ListJobsRequest +async def test_update_job_async( + transport: str = "grpc_asyncio", request_type=batch.UpdateJobRequest ): client = BatchServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), @@ -2844,48 +2830,54 @@ async def test_list_jobs_async( request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_jobs), "__call__") as call: + with mock.patch.object(type(client.transport.update_job), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - batch.ListJobsResponse( - next_page_token="next_page_token_value", - unreachable=["unreachable_value"], + gcb_job.Job( + name="name_value", + uid="uid_value", + priority=898, + scheduling_policy=gcb_job.Job.SchedulingPolicy.AS_SOON_AS_POSSIBLE, ) ) - response = await client.list_jobs(request) + response = await client.update_job(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request = batch.ListJobsRequest() + request = batch.UpdateJobRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListJobsAsyncPager) - assert response.next_page_token == "next_page_token_value" - assert response.unreachable == ["unreachable_value"] + assert isinstance(response, gcb_job.Job) + assert response.name == "name_value" + assert response.uid == "uid_value" + assert response.priority == 898 + assert ( + response.scheduling_policy == gcb_job.Job.SchedulingPolicy.AS_SOON_AS_POSSIBLE + ) @pytest.mark.asyncio -async def test_list_jobs_async_from_dict(): - await test_list_jobs_async(request_type=dict) +async def test_update_job_async_from_dict(): + await test_update_job_async(request_type=dict) -def test_list_jobs_field_headers(): +def test_update_job_field_headers(): client = BatchServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = batch.ListJobsRequest() + request = batch.UpdateJobRequest() - request.parent = "parent_value" + request.job.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_jobs), "__call__") as call: - call.return_value = batch.ListJobsResponse() - client.list_jobs(request) + with mock.patch.object(type(client.transport.update_job), "__call__") as call: + call.return_value = gcb_job.Job() + client.update_job(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -2896,28 +2888,26 @@ def test_list_jobs_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "parent=parent_value", + "job.name=name_value", ) in kw["metadata"] @pytest.mark.asyncio -async def test_list_jobs_field_headers_async(): +async def test_update_job_field_headers_async(): client = BatchServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = batch.ListJobsRequest() + request = batch.UpdateJobRequest() - request.parent = "parent_value" + request.job.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_jobs), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - batch.ListJobsResponse() - ) - await client.list_jobs(request) + with mock.patch.object(type(client.transport.update_job), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(gcb_job.Job()) + await client.update_job(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -2928,35 +2918,39 @@ async def test_list_jobs_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "parent=parent_value", + "job.name=name_value", ) in kw["metadata"] -def test_list_jobs_flattened(): +def test_update_job_flattened(): client = BatchServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_jobs), "__call__") as call: + with mock.patch.object(type(client.transport.update_job), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = batch.ListJobsResponse() + call.return_value = gcb_job.Job() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.list_jobs( - parent="parent_value", + client.update_job( + job=gcb_job.Job(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = "parent_value" + arg = args[0].job + mock_val = gcb_job.Job(name="name_value") + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) assert arg == mock_val -def test_list_jobs_flattened_error(): +def test_update_job_flattened_error(): client = BatchServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -2964,43 +2958,46 @@ def test_list_jobs_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.list_jobs( - batch.ListJobsRequest(), - parent="parent_value", + client.update_job( + batch.UpdateJobRequest(), + job=gcb_job.Job(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) @pytest.mark.asyncio -async def test_list_jobs_flattened_async(): +async def test_update_job_flattened_async(): client = BatchServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_jobs), "__call__") as call: + with mock.patch.object(type(client.transport.update_job), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = batch.ListJobsResponse() + call.return_value = gcb_job.Job() - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - batch.ListJobsResponse() - ) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(gcb_job.Job()) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.list_jobs( - parent="parent_value", + response = await client.update_job( + job=gcb_job.Job(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = "parent_value" + arg = args[0].job + mock_val = gcb_job.Job(name="name_value") + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) assert arg == mock_val @pytest.mark.asyncio -async def test_list_jobs_flattened_error_async(): +async def test_update_job_flattened_error_async(): client = BatchServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -3008,290 +3005,105 @@ async def test_list_jobs_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.list_jobs( - batch.ListJobsRequest(), - parent="parent_value", + await client.update_job( + batch.UpdateJobRequest(), + job=gcb_job.Job(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) -def test_list_jobs_pager(transport_name: str = "grpc"): +@pytest.mark.parametrize( + "request_type", + [ + batch.ListJobsRequest, + dict, + ], +) +def test_list_jobs(request_type, transport: str = "grpc"): client = BatchServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, + transport=transport, ) + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_jobs), "__call__") as call: - # Set the response to a series of pages. - call.side_effect = ( - batch.ListJobsResponse( - jobs=[ - job.Job(), - job.Job(), - job.Job(), - ], - next_page_token="abc", - ), - batch.ListJobsResponse( - jobs=[], - next_page_token="def", - ), - batch.ListJobsResponse( - jobs=[ - job.Job(), - ], - next_page_token="ghi", - ), - batch.ListJobsResponse( - jobs=[ - job.Job(), - job.Job(), - ], - ), - RuntimeError, - ) - - expected_metadata = () - retry = retries.Retry() - timeout = 5 - expected_metadata = tuple(expected_metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + # Designate an appropriate return value for the call. + call.return_value = batch.ListJobsResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], ) - pager = client.list_jobs(request={}, retry=retry, timeout=timeout) + response = client.list_jobs(request) - assert pager._metadata == expected_metadata - assert pager._retry == retry - assert pager._timeout == timeout + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = batch.ListJobsRequest() + assert args[0] == request - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, job.Job) for i in results) + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListJobsPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] -def test_list_jobs_pages(transport_name: str = "grpc"): +def test_list_jobs_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. client = BatchServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, + transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_jobs), "__call__") as call: - # Set the response to a series of pages. - call.side_effect = ( - batch.ListJobsResponse( - jobs=[ - job.Job(), - job.Job(), - job.Job(), - ], - next_page_token="abc", - ), - batch.ListJobsResponse( - jobs=[], - next_page_token="def", - ), - batch.ListJobsResponse( - jobs=[ - job.Job(), - ], - next_page_token="ghi", - ), - batch.ListJobsResponse( - jobs=[ - job.Job(), - job.Job(), - ], - ), - RuntimeError, + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. ) - pages = list(client.list_jobs(request={}).pages) - for page_, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page_.raw_page.next_page_token == token + client.list_jobs() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == batch.ListJobsRequest() -@pytest.mark.asyncio -async def test_list_jobs_async_pager(): - client = BatchServiceAsyncClient( +def test_list_jobs_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = BatchServiceClient( credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = batch.ListJobsRequest( + parent="parent_value", + filter="filter_value", + order_by="order_by_value", + page_token="page_token_value", ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_jobs), "__call__", new_callable=mock.AsyncMock - ) as call: - # Set the response to a series of pages. - call.side_effect = ( - batch.ListJobsResponse( - jobs=[ - job.Job(), - job.Job(), - job.Job(), - ], - next_page_token="abc", - ), - batch.ListJobsResponse( - jobs=[], - next_page_token="def", - ), - batch.ListJobsResponse( - jobs=[ - job.Job(), - ], - next_page_token="ghi", - ), - batch.ListJobsResponse( - jobs=[ - job.Job(), - job.Job(), - ], - ), - RuntimeError, - ) - async_pager = await client.list_jobs( - request={}, - ) - assert async_pager.next_page_token == "abc" - responses = [] - async for response in async_pager: # pragma: no branch - responses.append(response) - - assert len(responses) == 6 - assert all(isinstance(i, job.Job) for i in responses) - - -@pytest.mark.asyncio -async def test_list_jobs_async_pages(): - client = BatchServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_jobs), "__call__", new_callable=mock.AsyncMock - ) as call: - # Set the response to a series of pages. - call.side_effect = ( - batch.ListJobsResponse( - jobs=[ - job.Job(), - job.Job(), - job.Job(), - ], - next_page_token="abc", - ), - batch.ListJobsResponse( - jobs=[], - next_page_token="def", - ), - batch.ListJobsResponse( - jobs=[ - job.Job(), - ], - next_page_token="ghi", - ), - batch.ListJobsResponse( - jobs=[ - job.Job(), - job.Job(), - ], - ), - RuntimeError, - ) - pages = [] - # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` - # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 - async for page_ in ( # pragma: no branch - await client.list_jobs(request={}) - ).pages: - pages.append(page_) - for page_, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page_.raw_page.next_page_token == token - - -@pytest.mark.parametrize( - "request_type", - [ - batch.GetTaskRequest, - dict, - ], -) -def test_get_task(request_type, transport: str = "grpc"): - client = BatchServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_task), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = task.Task( - name="name_value", - ) - response = client.get_task(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = batch.GetTaskRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, task.Task) - assert response.name == "name_value" - - -def test_get_task_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = BatchServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_task), "__call__") as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.get_task() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == batch.GetTaskRequest() - - -def test_get_task_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = BatchServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = batch.GetTaskRequest( - name="name_value", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_task), "__call__") as call: + with mock.patch.object(type(client.transport.list_jobs), "__call__") as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.get_task(request=request) + client.list_jobs(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == batch.GetTaskRequest( - name="name_value", + assert args[0] == batch.ListJobsRequest( + parent="parent_value", + filter="filter_value", + order_by="order_by_value", + page_token="page_token_value", ) -def test_get_task_use_cached_wrapped_rpc(): +def test_list_jobs_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -3305,21 +3117,21 @@ def test_get_task_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.get_task in client._transport._wrapped_methods + assert client._transport.list_jobs in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.get_task] = mock_rpc + client._transport._wrapped_methods[client._transport.list_jobs] = mock_rpc request = {} - client.get_task(request) + client.list_jobs(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.get_task(request) + client.list_jobs(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -3327,7 +3139,7 @@ def test_get_task_use_cached_wrapped_rpc(): @pytest.mark.asyncio -async def test_get_task_empty_call_async(): +async def test_list_jobs_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = BatchServiceAsyncClient( @@ -3336,21 +3148,22 @@ async def test_get_task_empty_call_async(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_task), "__call__") as call: + with mock.patch.object(type(client.transport.list_jobs), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - task.Task( - name="name_value", + batch.ListJobsResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], ) ) - response = await client.get_task() + response = await client.list_jobs() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == batch.GetTaskRequest() + assert args[0] == batch.ListJobsRequest() @pytest.mark.asyncio -async def test_get_task_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): +async def test_list_jobs_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: @@ -3365,32 +3178,33 @@ async def test_get_task_async_use_cached_wrapped_rpc(transport: str = "grpc_asyn # Ensure method has been cached assert ( - client._client._transport.get_task + client._client._transport.list_jobs in client._client._transport._wrapped_methods ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ - client._client._transport.get_task - ] = mock_object + client._client._transport.list_jobs + ] = mock_rpc request = {} - await client.get_task(request) + await client.list_jobs(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - await client.get_task(request) + await client.list_jobs(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio -async def test_get_task_async( - transport: str = "grpc_asyncio", request_type=batch.GetTaskRequest +async def test_list_jobs_async( + transport: str = "grpc_asyncio", request_type=batch.ListJobsRequest ): client = BatchServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), @@ -3402,46 +3216,48 @@ async def test_get_task_async( request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_task), "__call__") as call: + with mock.patch.object(type(client.transport.list_jobs), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - task.Task( - name="name_value", + batch.ListJobsResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], ) ) - response = await client.get_task(request) + response = await client.list_jobs(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request = batch.GetTaskRequest() + request = batch.ListJobsRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, task.Task) - assert response.name == "name_value" + assert isinstance(response, pagers.ListJobsAsyncPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] @pytest.mark.asyncio -async def test_get_task_async_from_dict(): - await test_get_task_async(request_type=dict) +async def test_list_jobs_async_from_dict(): + await test_list_jobs_async(request_type=dict) -def test_get_task_field_headers(): +def test_list_jobs_field_headers(): client = BatchServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = batch.GetTaskRequest() + request = batch.ListJobsRequest() - request.name = "name_value" + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_task), "__call__") as call: - call.return_value = task.Task() - client.get_task(request) + with mock.patch.object(type(client.transport.list_jobs), "__call__") as call: + call.return_value = batch.ListJobsResponse() + client.list_jobs(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -3452,28 +3268,30 @@ def test_get_task_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "name=name_value", + "parent=parent_value", ) in kw["metadata"] @pytest.mark.asyncio -async def test_get_task_field_headers_async(): +async def test_list_jobs_field_headers_async(): client = BatchServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = batch.GetTaskRequest() + request = batch.ListJobsRequest() - request.name = "name_value" + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_task), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(task.Task()) - await client.get_task(request) - - # Establish that the underlying gRPC stub method was called. + with mock.patch.object(type(client.transport.list_jobs), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + batch.ListJobsResponse() + ) + await client.list_jobs(request) + + # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] assert args[0] == request @@ -3482,35 +3300,35 @@ async def test_get_task_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "name=name_value", + "parent=parent_value", ) in kw["metadata"] -def test_get_task_flattened(): +def test_list_jobs_flattened(): client = BatchServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_task), "__call__") as call: + with mock.patch.object(type(client.transport.list_jobs), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = task.Task() + call.return_value = batch.ListJobsResponse() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.get_task( - name="name_value", + client.list_jobs( + parent="parent_value", ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" + arg = args[0].parent + mock_val = "parent_value" assert arg == mock_val -def test_get_task_flattened_error(): +def test_list_jobs_flattened_error(): client = BatchServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -3518,41 +3336,43 @@ def test_get_task_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.get_task( - batch.GetTaskRequest(), - name="name_value", + client.list_jobs( + batch.ListJobsRequest(), + parent="parent_value", ) @pytest.mark.asyncio -async def test_get_task_flattened_async(): +async def test_list_jobs_flattened_async(): client = BatchServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_task), "__call__") as call: + with mock.patch.object(type(client.transport.list_jobs), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = task.Task() + call.return_value = batch.ListJobsResponse() - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(task.Task()) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + batch.ListJobsResponse() + ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.get_task( - name="name_value", + response = await client.list_jobs( + parent="parent_value", ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" + arg = args[0].parent + mock_val = "parent_value" assert arg == mock_val @pytest.mark.asyncio -async def test_get_task_flattened_error_async(): +async def test_list_jobs_flattened_error_async(): client = BatchServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -3560,132 +3380,318 @@ async def test_get_task_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.get_task( - batch.GetTaskRequest(), - name="name_value", + await client.list_jobs( + batch.ListJobsRequest(), + parent="parent_value", ) -@pytest.mark.parametrize( - "request_type", - [ - batch.ListTasksRequest, - dict, - ], -) -def test_list_tasks(request_type, transport: str = "grpc"): +def test_list_jobs_pager(transport_name: str = "grpc"): client = BatchServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + transport=transport_name, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_tasks), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = batch.ListTasksResponse( - next_page_token="next_page_token_value", - unreachable=["unreachable_value"], + with mock.patch.object(type(client.transport.list_jobs), "__call__") as call: + # Set the response to a series of pages. + call.side_effect = ( + batch.ListJobsResponse( + jobs=[ + job.Job(), + job.Job(), + job.Job(), + ], + next_page_token="abc", + ), + batch.ListJobsResponse( + jobs=[], + next_page_token="def", + ), + batch.ListJobsResponse( + jobs=[ + job.Job(), + ], + next_page_token="ghi", + ), + batch.ListJobsResponse( + jobs=[ + job.Job(), + job.Job(), + ], + ), + RuntimeError, ) - response = client.list_tasks(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = batch.ListTasksRequest() - assert args[0] == request + expected_metadata = () + retry = retries.Retry() + timeout = 5 + expected_metadata = tuple(expected_metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + ) + pager = client.list_jobs(request={}, retry=retry, timeout=timeout) - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListTasksPager) - assert response.next_page_token == "next_page_token_value" - assert response.unreachable == ["unreachable_value"] + assert pager._metadata == expected_metadata + assert pager._retry == retry + assert pager._timeout == timeout + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, job.Job) for i in results) -def test_list_tasks_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. +def test_list_jobs_pages(transport_name: str = "grpc"): client = BatchServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", + transport=transport_name, ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_tasks), "__call__") as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. + with mock.patch.object(type(client.transport.list_jobs), "__call__") as call: + # Set the response to a series of pages. + call.side_effect = ( + batch.ListJobsResponse( + jobs=[ + job.Job(), + job.Job(), + job.Job(), + ], + next_page_token="abc", + ), + batch.ListJobsResponse( + jobs=[], + next_page_token="def", + ), + batch.ListJobsResponse( + jobs=[ + job.Job(), + ], + next_page_token="ghi", + ), + batch.ListJobsResponse( + jobs=[ + job.Job(), + job.Job(), + ], + ), + RuntimeError, ) - client.list_tasks() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == batch.ListTasksRequest() + pages = list(client.list_jobs(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token -def test_list_tasks_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = BatchServiceClient( +@pytest.mark.asyncio +async def test_list_jobs_async_pager(): + client = BatchServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = batch.ListTasksRequest( - parent="parent_value", - filter="filter_value", - order_by="order_by_value", - page_token="page_token_value", ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_tasks), "__call__") as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. + with mock.patch.object( + type(client.transport.list_jobs), "__call__", new_callable=mock.AsyncMock + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + batch.ListJobsResponse( + jobs=[ + job.Job(), + job.Job(), + job.Job(), + ], + next_page_token="abc", + ), + batch.ListJobsResponse( + jobs=[], + next_page_token="def", + ), + batch.ListJobsResponse( + jobs=[ + job.Job(), + ], + next_page_token="ghi", + ), + batch.ListJobsResponse( + jobs=[ + job.Job(), + job.Job(), + ], + ), + RuntimeError, ) - client.list_tasks(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == batch.ListTasksRequest( - parent="parent_value", - filter="filter_value", - order_by="order_by_value", - page_token="page_token_value", + async_pager = await client.list_jobs( + request={}, ) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + assert len(responses) == 6 + assert all(isinstance(i, job.Job) for i in responses) -def test_list_tasks_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = BatchServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() +@pytest.mark.asyncio +async def test_list_jobs_async_pages(): + client = BatchServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_jobs), "__call__", new_callable=mock.AsyncMock + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + batch.ListJobsResponse( + jobs=[ + job.Job(), + job.Job(), + job.Job(), + ], + next_page_token="abc", + ), + batch.ListJobsResponse( + jobs=[], + next_page_token="def", + ), + batch.ListJobsResponse( + jobs=[ + job.Job(), + ], + next_page_token="ghi", + ), + batch.ListJobsResponse( + jobs=[ + job.Job(), + job.Job(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_jobs(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + batch.GetTaskRequest, + dict, + ], +) +def test_get_task(request_type, transport: str = "grpc"): + client = BatchServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_task), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = task.Task( + name="name_value", + ) + response = client.get_task(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = batch.GetTaskRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, task.Task) + assert response.name == "name_value" + + +def test_get_task_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = BatchServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_task), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.get_task() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == batch.GetTaskRequest() + + +def test_get_task_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = BatchServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = batch.GetTaskRequest( + name="name_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_task), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.get_task(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == batch.GetTaskRequest( + name="name_value", + ) + + +def test_get_task_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BatchServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.list_tasks in client._transport._wrapped_methods + assert client._transport.get_task in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.list_tasks] = mock_rpc + client._transport._wrapped_methods[client._transport.get_task] = mock_rpc request = {} - client.list_tasks(request) + client.get_task(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.list_tasks(request) + client.get_task(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -3693,7 +3699,7 @@ def test_list_tasks_use_cached_wrapped_rpc(): @pytest.mark.asyncio -async def test_list_tasks_empty_call_async(): +async def test_get_task_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = BatchServiceAsyncClient( @@ -3702,22 +3708,21 @@ async def test_list_tasks_empty_call_async(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_tasks), "__call__") as call: + with mock.patch.object(type(client.transport.get_task), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - batch.ListTasksResponse( - next_page_token="next_page_token_value", - unreachable=["unreachable_value"], + task.Task( + name="name_value", ) ) - response = await client.list_tasks() + response = await client.get_task() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == batch.ListTasksRequest() + assert args[0] == batch.GetTaskRequest() @pytest.mark.asyncio -async def test_list_tasks_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): +async def test_get_task_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: @@ -3732,32 +3737,33 @@ async def test_list_tasks_async_use_cached_wrapped_rpc(transport: str = "grpc_as # Ensure method has been cached assert ( - client._client._transport.list_tasks + client._client._transport.get_task in client._client._transport._wrapped_methods ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ - client._client._transport.list_tasks - ] = mock_object + client._client._transport.get_task + ] = mock_rpc request = {} - await client.list_tasks(request) + await client.get_task(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - await client.list_tasks(request) + await client.get_task(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio -async def test_list_tasks_async( - transport: str = "grpc_asyncio", request_type=batch.ListTasksRequest +async def test_get_task_async( + transport: str = "grpc_asyncio", request_type=batch.GetTaskRequest ): client = BatchServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), @@ -3769,48 +3775,46 @@ async def test_list_tasks_async( request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_tasks), "__call__") as call: + with mock.patch.object(type(client.transport.get_task), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - batch.ListTasksResponse( - next_page_token="next_page_token_value", - unreachable=["unreachable_value"], + task.Task( + name="name_value", ) ) - response = await client.list_tasks(request) + response = await client.get_task(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request = batch.ListTasksRequest() + request = batch.GetTaskRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListTasksAsyncPager) - assert response.next_page_token == "next_page_token_value" - assert response.unreachable == ["unreachable_value"] + assert isinstance(response, task.Task) + assert response.name == "name_value" @pytest.mark.asyncio -async def test_list_tasks_async_from_dict(): - await test_list_tasks_async(request_type=dict) +async def test_get_task_async_from_dict(): + await test_get_task_async(request_type=dict) -def test_list_tasks_field_headers(): +def test_get_task_field_headers(): client = BatchServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = batch.ListTasksRequest() + request = batch.GetTaskRequest() - request.parent = "parent_value" + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_tasks), "__call__") as call: - call.return_value = batch.ListTasksResponse() - client.list_tasks(request) + with mock.patch.object(type(client.transport.get_task), "__call__") as call: + call.return_value = task.Task() + client.get_task(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -3821,28 +3825,26 @@ def test_list_tasks_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "parent=parent_value", + "name=name_value", ) in kw["metadata"] @pytest.mark.asyncio -async def test_list_tasks_field_headers_async(): +async def test_get_task_field_headers_async(): client = BatchServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = batch.ListTasksRequest() + request = batch.GetTaskRequest() - request.parent = "parent_value" + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_tasks), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - batch.ListTasksResponse() - ) - await client.list_tasks(request) + with mock.patch.object(type(client.transport.get_task), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(task.Task()) + await client.get_task(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -3853,35 +3855,35 @@ async def test_list_tasks_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "parent=parent_value", + "name=name_value", ) in kw["metadata"] -def test_list_tasks_flattened(): +def test_get_task_flattened(): client = BatchServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_tasks), "__call__") as call: + with mock.patch.object(type(client.transport.get_task), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = batch.ListTasksResponse() + call.return_value = task.Task() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.list_tasks( - parent="parent_value", + client.get_task( + name="name_value", ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = "parent_value" + arg = args[0].name + mock_val = "name_value" assert arg == mock_val -def test_list_tasks_flattened_error(): +def test_get_task_flattened_error(): client = BatchServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -3889,43 +3891,41 @@ def test_list_tasks_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.list_tasks( - batch.ListTasksRequest(), - parent="parent_value", + client.get_task( + batch.GetTaskRequest(), + name="name_value", ) @pytest.mark.asyncio -async def test_list_tasks_flattened_async(): +async def test_get_task_flattened_async(): client = BatchServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_tasks), "__call__") as call: + with mock.patch.object(type(client.transport.get_task), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = batch.ListTasksResponse() + call.return_value = task.Task() - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - batch.ListTasksResponse() - ) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(task.Task()) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.list_tasks( - parent="parent_value", + response = await client.get_task( + name="name_value", ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = "parent_value" + arg = args[0].name + mock_val = "name_value" assert arg == mock_val @pytest.mark.asyncio -async def test_list_tasks_flattened_error_async(): +async def test_get_task_flattened_error_async(): client = BatchServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -3933,214 +3933,20 @@ async def test_list_tasks_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.list_tasks( - batch.ListTasksRequest(), - parent="parent_value", - ) - - -def test_list_tasks_pager(transport_name: str = "grpc"): - client = BatchServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_tasks), "__call__") as call: - # Set the response to a series of pages. - call.side_effect = ( - batch.ListTasksResponse( - tasks=[ - task.Task(), - task.Task(), - task.Task(), - ], - next_page_token="abc", - ), - batch.ListTasksResponse( - tasks=[], - next_page_token="def", - ), - batch.ListTasksResponse( - tasks=[ - task.Task(), - ], - next_page_token="ghi", - ), - batch.ListTasksResponse( - tasks=[ - task.Task(), - task.Task(), - ], - ), - RuntimeError, - ) - - expected_metadata = () - retry = retries.Retry() - timeout = 5 - expected_metadata = tuple(expected_metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), - ) - pager = client.list_tasks(request={}, retry=retry, timeout=timeout) - - assert pager._metadata == expected_metadata - assert pager._retry == retry - assert pager._timeout == timeout - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, task.Task) for i in results) - - -def test_list_tasks_pages(transport_name: str = "grpc"): - client = BatchServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_tasks), "__call__") as call: - # Set the response to a series of pages. - call.side_effect = ( - batch.ListTasksResponse( - tasks=[ - task.Task(), - task.Task(), - task.Task(), - ], - next_page_token="abc", - ), - batch.ListTasksResponse( - tasks=[], - next_page_token="def", - ), - batch.ListTasksResponse( - tasks=[ - task.Task(), - ], - next_page_token="ghi", - ), - batch.ListTasksResponse( - tasks=[ - task.Task(), - task.Task(), - ], - ), - RuntimeError, - ) - pages = list(client.list_tasks(request={}).pages) - for page_, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page_.raw_page.next_page_token == token - - -@pytest.mark.asyncio -async def test_list_tasks_async_pager(): - client = BatchServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_tasks), "__call__", new_callable=mock.AsyncMock - ) as call: - # Set the response to a series of pages. - call.side_effect = ( - batch.ListTasksResponse( - tasks=[ - task.Task(), - task.Task(), - task.Task(), - ], - next_page_token="abc", - ), - batch.ListTasksResponse( - tasks=[], - next_page_token="def", - ), - batch.ListTasksResponse( - tasks=[ - task.Task(), - ], - next_page_token="ghi", - ), - batch.ListTasksResponse( - tasks=[ - task.Task(), - task.Task(), - ], - ), - RuntimeError, - ) - async_pager = await client.list_tasks( - request={}, - ) - assert async_pager.next_page_token == "abc" - responses = [] - async for response in async_pager: # pragma: no branch - responses.append(response) - - assert len(responses) == 6 - assert all(isinstance(i, task.Task) for i in responses) - - -@pytest.mark.asyncio -async def test_list_tasks_async_pages(): - client = BatchServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_tasks), "__call__", new_callable=mock.AsyncMock - ) as call: - # Set the response to a series of pages. - call.side_effect = ( - batch.ListTasksResponse( - tasks=[ - task.Task(), - task.Task(), - task.Task(), - ], - next_page_token="abc", - ), - batch.ListTasksResponse( - tasks=[], - next_page_token="def", - ), - batch.ListTasksResponse( - tasks=[ - task.Task(), - ], - next_page_token="ghi", - ), - batch.ListTasksResponse( - tasks=[ - task.Task(), - task.Task(), - ], - ), - RuntimeError, + await client.get_task( + batch.GetTaskRequest(), + name="name_value", ) - pages = [] - # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` - # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 - async for page_ in ( # pragma: no branch - await client.list_tasks(request={}) - ).pages: - pages.append(page_) - for page_, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page_.raw_page.next_page_token == token @pytest.mark.parametrize( "request_type", [ - batch.CreateResourceAllowanceRequest, + batch.ListTasksRequest, dict, ], ) -def test_create_resource_allowance(request_type, transport: str = "grpc"): +def test_list_tasks(request_type, transport: str = "grpc"): client = BatchServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -4151,29 +3957,27 @@ def test_create_resource_allowance(request_type, transport: str = "grpc"): request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_resource_allowance), "__call__" - ) as call: + with mock.patch.object(type(client.transport.list_tasks), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = gcb_resource_allowance.ResourceAllowance( - name="name_value", - uid="uid_value", + call.return_value = batch.ListTasksResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], ) - response = client.create_resource_allowance(request) + response = client.list_tasks(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - request = batch.CreateResourceAllowanceRequest() + request = batch.ListTasksRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, gcb_resource_allowance.ResourceAllowance) - assert response.name == "name_value" - assert response.uid == "uid_value" + assert isinstance(response, pagers.ListTasksPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] -def test_create_resource_allowance_empty_call(): +def test_list_tasks_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = BatchServiceClient( @@ -4182,19 +3986,17 @@ def test_create_resource_allowance_empty_call(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_resource_allowance), "__call__" - ) as call: + with mock.patch.object(type(client.transport.list_tasks), "__call__") as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.create_resource_allowance() + client.list_tasks() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == batch.CreateResourceAllowanceRequest() + assert args[0] == batch.ListTasksRequest() -def test_create_resource_allowance_non_empty_request_with_auto_populated_field(): +def test_list_tasks_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. client = BatchServiceClient( @@ -4205,28 +4007,30 @@ def test_create_resource_allowance_non_empty_request_with_auto_populated_field() # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. - request = batch.CreateResourceAllowanceRequest( + request = batch.ListTasksRequest( parent="parent_value", - resource_allowance_id="resource_allowance_id_value", + filter="filter_value", + order_by="order_by_value", + page_token="page_token_value", ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_resource_allowance), "__call__" - ) as call: + with mock.patch.object(type(client.transport.list_tasks), "__call__") as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.create_resource_allowance(request=request) + client.list_tasks(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == batch.CreateResourceAllowanceRequest( + assert args[0] == batch.ListTasksRequest( parent="parent_value", - resource_allowance_id="resource_allowance_id_value", + filter="filter_value", + order_by="order_by_value", + page_token="page_token_value", ) -def test_create_resource_allowance_use_cached_wrapped_rpc(): +def test_list_tasks_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -4240,26 +4044,21 @@ def test_create_resource_allowance_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert ( - client._transport.create_resource_allowance - in client._transport._wrapped_methods - ) + assert client._transport.list_tasks in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.create_resource_allowance - ] = mock_rpc + client._transport._wrapped_methods[client._transport.list_tasks] = mock_rpc request = {} - client.create_resource_allowance(request) + client.list_tasks(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.create_resource_allowance(request) + client.list_tasks(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -4267,7 +4066,7 @@ def test_create_resource_allowance_use_cached_wrapped_rpc(): @pytest.mark.asyncio -async def test_create_resource_allowance_empty_call_async(): +async def test_list_tasks_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = BatchServiceAsyncClient( @@ -4276,26 +4075,22 @@ async def test_create_resource_allowance_empty_call_async(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_resource_allowance), "__call__" - ) as call: + with mock.patch.object(type(client.transport.list_tasks), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - gcb_resource_allowance.ResourceAllowance( - name="name_value", - uid="uid_value", + batch.ListTasksResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], ) ) - response = await client.create_resource_allowance() + response = await client.list_tasks() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == batch.CreateResourceAllowanceRequest() + assert args[0] == batch.ListTasksRequest() @pytest.mark.asyncio -async def test_create_resource_allowance_async_use_cached_wrapped_rpc( - transport: str = "grpc_asyncio", -): +async def test_list_tasks_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: @@ -4310,32 +4105,33 @@ async def test_create_resource_allowance_async_use_cached_wrapped_rpc( # Ensure method has been cached assert ( - client._client._transport.create_resource_allowance + client._client._transport.list_tasks in client._client._transport._wrapped_methods ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ - client._client._transport.create_resource_allowance - ] = mock_object + client._client._transport.list_tasks + ] = mock_rpc request = {} - await client.create_resource_allowance(request) + await client.list_tasks(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - await client.create_resource_allowance(request) + await client.list_tasks(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio -async def test_create_resource_allowance_async( - transport: str = "grpc_asyncio", request_type=batch.CreateResourceAllowanceRequest +async def test_list_tasks_async( + transport: str = "grpc_asyncio", request_type=batch.ListTasksRequest ): client = BatchServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), @@ -4347,52 +4143,48 @@ async def test_create_resource_allowance_async( request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_resource_allowance), "__call__" - ) as call: + with mock.patch.object(type(client.transport.list_tasks), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - gcb_resource_allowance.ResourceAllowance( - name="name_value", - uid="uid_value", + batch.ListTasksResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], ) ) - response = await client.create_resource_allowance(request) + response = await client.list_tasks(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request = batch.CreateResourceAllowanceRequest() + request = batch.ListTasksRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, gcb_resource_allowance.ResourceAllowance) - assert response.name == "name_value" - assert response.uid == "uid_value" + assert isinstance(response, pagers.ListTasksAsyncPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] @pytest.mark.asyncio -async def test_create_resource_allowance_async_from_dict(): - await test_create_resource_allowance_async(request_type=dict) +async def test_list_tasks_async_from_dict(): + await test_list_tasks_async(request_type=dict) -def test_create_resource_allowance_field_headers(): +def test_list_tasks_field_headers(): client = BatchServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = batch.CreateResourceAllowanceRequest() + request = batch.ListTasksRequest() request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_resource_allowance), "__call__" - ) as call: - call.return_value = gcb_resource_allowance.ResourceAllowance() - client.create_resource_allowance(request) + with mock.patch.object(type(client.transport.list_tasks), "__call__") as call: + call.return_value = batch.ListTasksResponse() + client.list_tasks(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -4408,25 +4200,23 @@ def test_create_resource_allowance_field_headers(): @pytest.mark.asyncio -async def test_create_resource_allowance_field_headers_async(): +async def test_list_tasks_field_headers_async(): client = BatchServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = batch.CreateResourceAllowanceRequest() + request = batch.ListTasksRequest() request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_resource_allowance), "__call__" - ) as call: + with mock.patch.object(type(client.transport.list_tasks), "__call__") as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - gcb_resource_allowance.ResourceAllowance() + batch.ListTasksResponse() ) - await client.create_resource_allowance(request) + await client.list_tasks(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -4441,29 +4231,19 @@ async def test_create_resource_allowance_field_headers_async(): ) in kw["metadata"] -def test_create_resource_allowance_flattened(): +def test_list_tasks_flattened(): client = BatchServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_resource_allowance), "__call__" - ) as call: + with mock.patch.object(type(client.transport.list_tasks), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = gcb_resource_allowance.ResourceAllowance() + call.return_value = batch.ListTasksResponse() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.create_resource_allowance( + client.list_tasks( parent="parent_value", - resource_allowance=gcb_resource_allowance.ResourceAllowance( - usage_resource_allowance=gcb_resource_allowance.UsageResourceAllowance( - spec=gcb_resource_allowance.UsageResourceAllowanceSpec( - type_="type__value" - ) - ) - ), - resource_allowance_id="resource_allowance_id_value", ) # Establish that the underlying call was made with the expected @@ -4473,21 +4253,9 @@ def test_create_resource_allowance_flattened(): arg = args[0].parent mock_val = "parent_value" assert arg == mock_val - arg = args[0].resource_allowance - mock_val = gcb_resource_allowance.ResourceAllowance( - usage_resource_allowance=gcb_resource_allowance.UsageResourceAllowance( - spec=gcb_resource_allowance.UsageResourceAllowanceSpec( - type_="type__value" - ) - ) - ) - assert arg == mock_val - arg = args[0].resource_allowance_id - mock_val = "resource_allowance_id_value" - assert arg == mock_val -def test_create_resource_allowance_flattened_error(): +def test_list_tasks_flattened_error(): client = BatchServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -4495,48 +4263,30 @@ def test_create_resource_allowance_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.create_resource_allowance( - batch.CreateResourceAllowanceRequest(), + client.list_tasks( + batch.ListTasksRequest(), parent="parent_value", - resource_allowance=gcb_resource_allowance.ResourceAllowance( - usage_resource_allowance=gcb_resource_allowance.UsageResourceAllowance( - spec=gcb_resource_allowance.UsageResourceAllowanceSpec( - type_="type__value" - ) - ) - ), - resource_allowance_id="resource_allowance_id_value", ) @pytest.mark.asyncio -async def test_create_resource_allowance_flattened_async(): +async def test_list_tasks_flattened_async(): client = BatchServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_resource_allowance), "__call__" - ) as call: + with mock.patch.object(type(client.transport.list_tasks), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = gcb_resource_allowance.ResourceAllowance() + call.return_value = batch.ListTasksResponse() call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - gcb_resource_allowance.ResourceAllowance() + batch.ListTasksResponse() ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.create_resource_allowance( + response = await client.list_tasks( parent="parent_value", - resource_allowance=gcb_resource_allowance.ResourceAllowance( - usage_resource_allowance=gcb_resource_allowance.UsageResourceAllowance( - spec=gcb_resource_allowance.UsageResourceAllowanceSpec( - type_="type__value" - ) - ) - ), - resource_allowance_id="resource_allowance_id_value", ) # Establish that the underlying call was made with the expected @@ -4546,22 +4296,10 @@ async def test_create_resource_allowance_flattened_async(): arg = args[0].parent mock_val = "parent_value" assert arg == mock_val - arg = args[0].resource_allowance - mock_val = gcb_resource_allowance.ResourceAllowance( - usage_resource_allowance=gcb_resource_allowance.UsageResourceAllowance( - spec=gcb_resource_allowance.UsageResourceAllowanceSpec( - type_="type__value" - ) - ) - ) - assert arg == mock_val - arg = args[0].resource_allowance_id - mock_val = "resource_allowance_id_value" - assert arg == mock_val @pytest.mark.asyncio -async def test_create_resource_allowance_flattened_error_async(): +async def test_list_tasks_flattened_error_async(): client = BatchServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -4569,61 +4307,247 @@ async def test_create_resource_allowance_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.create_resource_allowance( - batch.CreateResourceAllowanceRequest(), + await client.list_tasks( + batch.ListTasksRequest(), parent="parent_value", - resource_allowance=gcb_resource_allowance.ResourceAllowance( - usage_resource_allowance=gcb_resource_allowance.UsageResourceAllowance( - spec=gcb_resource_allowance.UsageResourceAllowanceSpec( - type_="type__value" - ) - ) - ), - resource_allowance_id="resource_allowance_id_value", ) -@pytest.mark.parametrize( - "request_type", - [ - batch.GetResourceAllowanceRequest, - dict, - ], -) -def test_get_resource_allowance(request_type, transport: str = "grpc"): +def test_list_tasks_pager(transport_name: str = "grpc"): client = BatchServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + transport=transport_name, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_resource_allowance), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = resource_allowance.ResourceAllowance( - name="name_value", - uid="uid_value", + with mock.patch.object(type(client.transport.list_tasks), "__call__") as call: + # Set the response to a series of pages. + call.side_effect = ( + batch.ListTasksResponse( + tasks=[ + task.Task(), + task.Task(), + task.Task(), + ], + next_page_token="abc", + ), + batch.ListTasksResponse( + tasks=[], + next_page_token="def", + ), + batch.ListTasksResponse( + tasks=[ + task.Task(), + ], + next_page_token="ghi", + ), + batch.ListTasksResponse( + tasks=[ + task.Task(), + task.Task(), + ], + ), + RuntimeError, ) - response = client.get_resource_allowance(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = batch.GetResourceAllowanceRequest() - assert args[0] == request + + expected_metadata = () + retry = retries.Retry() + timeout = 5 + expected_metadata = tuple(expected_metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + ) + pager = client.list_tasks(request={}, retry=retry, timeout=timeout) + + assert pager._metadata == expected_metadata + assert pager._retry == retry + assert pager._timeout == timeout + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, task.Task) for i in results) + + +def test_list_tasks_pages(transport_name: str = "grpc"): + client = BatchServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_tasks), "__call__") as call: + # Set the response to a series of pages. + call.side_effect = ( + batch.ListTasksResponse( + tasks=[ + task.Task(), + task.Task(), + task.Task(), + ], + next_page_token="abc", + ), + batch.ListTasksResponse( + tasks=[], + next_page_token="def", + ), + batch.ListTasksResponse( + tasks=[ + task.Task(), + ], + next_page_token="ghi", + ), + batch.ListTasksResponse( + tasks=[ + task.Task(), + task.Task(), + ], + ), + RuntimeError, + ) + pages = list(client.list_tasks(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.asyncio +async def test_list_tasks_async_pager(): + client = BatchServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_tasks), "__call__", new_callable=mock.AsyncMock + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + batch.ListTasksResponse( + tasks=[ + task.Task(), + task.Task(), + task.Task(), + ], + next_page_token="abc", + ), + batch.ListTasksResponse( + tasks=[], + next_page_token="def", + ), + batch.ListTasksResponse( + tasks=[ + task.Task(), + ], + next_page_token="ghi", + ), + batch.ListTasksResponse( + tasks=[ + task.Task(), + task.Task(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_tasks( + request={}, + ) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, task.Task) for i in responses) + + +@pytest.mark.asyncio +async def test_list_tasks_async_pages(): + client = BatchServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_tasks), "__call__", new_callable=mock.AsyncMock + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + batch.ListTasksResponse( + tasks=[ + task.Task(), + task.Task(), + task.Task(), + ], + next_page_token="abc", + ), + batch.ListTasksResponse( + tasks=[], + next_page_token="def", + ), + batch.ListTasksResponse( + tasks=[ + task.Task(), + ], + next_page_token="ghi", + ), + batch.ListTasksResponse( + tasks=[ + task.Task(), + task.Task(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_tasks(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + batch.CreateResourceAllowanceRequest, + dict, + ], +) +def test_create_resource_allowance(request_type, transport: str = "grpc"): + client = BatchServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_resource_allowance), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = gcb_resource_allowance.ResourceAllowance( + name="name_value", + uid="uid_value", + ) + response = client.create_resource_allowance(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = batch.CreateResourceAllowanceRequest() + assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, resource_allowance.ResourceAllowance) + assert isinstance(response, gcb_resource_allowance.ResourceAllowance) assert response.name == "name_value" assert response.uid == "uid_value" -def test_get_resource_allowance_empty_call(): +def test_create_resource_allowance_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = BatchServiceClient( @@ -4633,18 +4557,18 @@ def test_get_resource_allowance_empty_call(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.get_resource_allowance), "__call__" + type(client.transport.create_resource_allowance), "__call__" ) as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.get_resource_allowance() + client.create_resource_allowance() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == batch.GetResourceAllowanceRequest() + assert args[0] == batch.CreateResourceAllowanceRequest() -def test_get_resource_allowance_non_empty_request_with_auto_populated_field(): +def test_create_resource_allowance_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. client = BatchServiceClient( @@ -4655,26 +4579,28 @@ def test_get_resource_allowance_non_empty_request_with_auto_populated_field(): # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. - request = batch.GetResourceAllowanceRequest( - name="name_value", + request = batch.CreateResourceAllowanceRequest( + parent="parent_value", + resource_allowance_id="resource_allowance_id_value", ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.get_resource_allowance), "__call__" + type(client.transport.create_resource_allowance), "__call__" ) as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.get_resource_allowance(request=request) + client.create_resource_allowance(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == batch.GetResourceAllowanceRequest( - name="name_value", + assert args[0] == batch.CreateResourceAllowanceRequest( + parent="parent_value", + resource_allowance_id="resource_allowance_id_value", ) -def test_get_resource_allowance_use_cached_wrapped_rpc(): +def test_create_resource_allowance_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -4689,7 +4615,7 @@ def test_get_resource_allowance_use_cached_wrapped_rpc(): # Ensure method has been cached assert ( - client._transport.get_resource_allowance + client._transport.create_resource_allowance in client._transport._wrapped_methods ) @@ -4699,15 +4625,15 @@ def test_get_resource_allowance_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.get_resource_allowance + client._transport.create_resource_allowance ] = mock_rpc request = {} - client.get_resource_allowance(request) + client.create_resource_allowance(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.get_resource_allowance(request) + client.create_resource_allowance(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -4715,7 +4641,7 @@ def test_get_resource_allowance_use_cached_wrapped_rpc(): @pytest.mark.asyncio -async def test_get_resource_allowance_empty_call_async(): +async def test_create_resource_allowance_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = BatchServiceAsyncClient( @@ -4725,23 +4651,23 @@ async def test_get_resource_allowance_empty_call_async(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.get_resource_allowance), "__call__" + type(client.transport.create_resource_allowance), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - resource_allowance.ResourceAllowance( + gcb_resource_allowance.ResourceAllowance( name="name_value", uid="uid_value", ) ) - response = await client.get_resource_allowance() + response = await client.create_resource_allowance() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == batch.GetResourceAllowanceRequest() + assert args[0] == batch.CreateResourceAllowanceRequest() @pytest.mark.asyncio -async def test_get_resource_allowance_async_use_cached_wrapped_rpc( +async def test_create_resource_allowance_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", ): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -4758,32 +4684,33 @@ async def test_get_resource_allowance_async_use_cached_wrapped_rpc( # Ensure method has been cached assert ( - client._client._transport.get_resource_allowance + client._client._transport.create_resource_allowance in client._client._transport._wrapped_methods ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ - client._client._transport.get_resource_allowance - ] = mock_object + client._client._transport.create_resource_allowance + ] = mock_rpc request = {} - await client.get_resource_allowance(request) + await client.create_resource_allowance(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - await client.get_resource_allowance(request) + await client.create_resource_allowance(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio -async def test_get_resource_allowance_async( - transport: str = "grpc_asyncio", request_type=batch.GetResourceAllowanceRequest +async def test_create_resource_allowance_async( + transport: str = "grpc_asyncio", request_type=batch.CreateResourceAllowanceRequest ): client = BatchServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), @@ -4796,51 +4723,51 @@ async def test_get_resource_allowance_async( # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.get_resource_allowance), "__call__" + type(client.transport.create_resource_allowance), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - resource_allowance.ResourceAllowance( + gcb_resource_allowance.ResourceAllowance( name="name_value", uid="uid_value", ) ) - response = await client.get_resource_allowance(request) + response = await client.create_resource_allowance(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request = batch.GetResourceAllowanceRequest() + request = batch.CreateResourceAllowanceRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, resource_allowance.ResourceAllowance) + assert isinstance(response, gcb_resource_allowance.ResourceAllowance) assert response.name == "name_value" assert response.uid == "uid_value" @pytest.mark.asyncio -async def test_get_resource_allowance_async_from_dict(): - await test_get_resource_allowance_async(request_type=dict) +async def test_create_resource_allowance_async_from_dict(): + await test_create_resource_allowance_async(request_type=dict) -def test_get_resource_allowance_field_headers(): +def test_create_resource_allowance_field_headers(): client = BatchServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = batch.GetResourceAllowanceRequest() + request = batch.CreateResourceAllowanceRequest() - request.name = "name_value" + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.get_resource_allowance), "__call__" + type(client.transport.create_resource_allowance), "__call__" ) as call: - call.return_value = resource_allowance.ResourceAllowance() - client.get_resource_allowance(request) + call.return_value = gcb_resource_allowance.ResourceAllowance() + client.create_resource_allowance(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -4851,30 +4778,30 @@ def test_get_resource_allowance_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "name=name_value", + "parent=parent_value", ) in kw["metadata"] @pytest.mark.asyncio -async def test_get_resource_allowance_field_headers_async(): +async def test_create_resource_allowance_field_headers_async(): client = BatchServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = batch.GetResourceAllowanceRequest() + request = batch.CreateResourceAllowanceRequest() - request.name = "name_value" + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.get_resource_allowance), "__call__" + type(client.transport.create_resource_allowance), "__call__" ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - resource_allowance.ResourceAllowance() + gcb_resource_allowance.ResourceAllowance() ) - await client.get_resource_allowance(request) + await client.create_resource_allowance(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -4885,37 +4812,57 @@ async def test_get_resource_allowance_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "name=name_value", + "parent=parent_value", ) in kw["metadata"] -def test_get_resource_allowance_flattened(): +def test_create_resource_allowance_flattened(): client = BatchServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.get_resource_allowance), "__call__" + type(client.transport.create_resource_allowance), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = resource_allowance.ResourceAllowance() + call.return_value = gcb_resource_allowance.ResourceAllowance() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.get_resource_allowance( - name="name_value", + client.create_resource_allowance( + parent="parent_value", + resource_allowance=gcb_resource_allowance.ResourceAllowance( + usage_resource_allowance=gcb_resource_allowance.UsageResourceAllowance( + spec=gcb_resource_allowance.UsageResourceAllowanceSpec( + type_="type__value" + ) + ) + ), + resource_allowance_id="resource_allowance_id_value", ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].resource_allowance + mock_val = gcb_resource_allowance.ResourceAllowance( + usage_resource_allowance=gcb_resource_allowance.UsageResourceAllowance( + spec=gcb_resource_allowance.UsageResourceAllowanceSpec( + type_="type__value" + ) + ) + ) + assert arg == mock_val + arg = args[0].resource_allowance_id + mock_val = "resource_allowance_id_value" assert arg == mock_val -def test_get_resource_allowance_flattened_error(): +def test_create_resource_allowance_flattened_error(): client = BatchServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -4923,45 +4870,73 @@ def test_get_resource_allowance_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.get_resource_allowance( - batch.GetResourceAllowanceRequest(), - name="name_value", + client.create_resource_allowance( + batch.CreateResourceAllowanceRequest(), + parent="parent_value", + resource_allowance=gcb_resource_allowance.ResourceAllowance( + usage_resource_allowance=gcb_resource_allowance.UsageResourceAllowance( + spec=gcb_resource_allowance.UsageResourceAllowanceSpec( + type_="type__value" + ) + ) + ), + resource_allowance_id="resource_allowance_id_value", ) @pytest.mark.asyncio -async def test_get_resource_allowance_flattened_async(): +async def test_create_resource_allowance_flattened_async(): client = BatchServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.get_resource_allowance), "__call__" + type(client.transport.create_resource_allowance), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = resource_allowance.ResourceAllowance() + call.return_value = gcb_resource_allowance.ResourceAllowance() call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - resource_allowance.ResourceAllowance() + gcb_resource_allowance.ResourceAllowance() ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.get_resource_allowance( - name="name_value", + response = await client.create_resource_allowance( + parent="parent_value", + resource_allowance=gcb_resource_allowance.ResourceAllowance( + usage_resource_allowance=gcb_resource_allowance.UsageResourceAllowance( + spec=gcb_resource_allowance.UsageResourceAllowanceSpec( + type_="type__value" + ) + ) + ), + resource_allowance_id="resource_allowance_id_value", ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].resource_allowance + mock_val = gcb_resource_allowance.ResourceAllowance( + usage_resource_allowance=gcb_resource_allowance.UsageResourceAllowance( + spec=gcb_resource_allowance.UsageResourceAllowanceSpec( + type_="type__value" + ) + ) + ) + assert arg == mock_val + arg = args[0].resource_allowance_id + mock_val = "resource_allowance_id_value" assert arg == mock_val @pytest.mark.asyncio -async def test_get_resource_allowance_flattened_error_async(): +async def test_create_resource_allowance_flattened_error_async(): client = BatchServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -4969,20 +4944,28 @@ async def test_get_resource_allowance_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.get_resource_allowance( - batch.GetResourceAllowanceRequest(), - name="name_value", + await client.create_resource_allowance( + batch.CreateResourceAllowanceRequest(), + parent="parent_value", + resource_allowance=gcb_resource_allowance.ResourceAllowance( + usage_resource_allowance=gcb_resource_allowance.UsageResourceAllowance( + spec=gcb_resource_allowance.UsageResourceAllowanceSpec( + type_="type__value" + ) + ) + ), + resource_allowance_id="resource_allowance_id_value", ) @pytest.mark.parametrize( "request_type", [ - batch.DeleteResourceAllowanceRequest, + batch.GetResourceAllowanceRequest, dict, ], ) -def test_delete_resource_allowance(request_type, transport: str = "grpc"): +def test_get_resource_allowance(request_type, transport: str = "grpc"): client = BatchServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -4994,23 +4977,28 @@ def test_delete_resource_allowance(request_type, transport: str = "grpc"): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.delete_resource_allowance), "__call__" + type(client.transport.get_resource_allowance), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name="operations/spam") - response = client.delete_resource_allowance(request) + call.return_value = resource_allowance.ResourceAllowance( + name="name_value", + uid="uid_value", + ) + response = client.get_resource_allowance(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - request = batch.DeleteResourceAllowanceRequest() + request = batch.GetResourceAllowanceRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) + assert isinstance(response, resource_allowance.ResourceAllowance) + assert response.name == "name_value" + assert response.uid == "uid_value" -def test_delete_resource_allowance_empty_call(): +def test_get_resource_allowance_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = BatchServiceClient( @@ -5020,18 +5008,18 @@ def test_delete_resource_allowance_empty_call(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.delete_resource_allowance), "__call__" + type(client.transport.get_resource_allowance), "__call__" ) as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.delete_resource_allowance() + client.get_resource_allowance() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == batch.DeleteResourceAllowanceRequest() + assert args[0] == batch.GetResourceAllowanceRequest() -def test_delete_resource_allowance_non_empty_request_with_auto_populated_field(): +def test_get_resource_allowance_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. client = BatchServiceClient( @@ -5042,28 +5030,26 @@ def test_delete_resource_allowance_non_empty_request_with_auto_populated_field() # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. - request = batch.DeleteResourceAllowanceRequest( + request = batch.GetResourceAllowanceRequest( name="name_value", - reason="reason_value", ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.delete_resource_allowance), "__call__" + type(client.transport.get_resource_allowance), "__call__" ) as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.delete_resource_allowance(request=request) + client.get_resource_allowance(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == batch.DeleteResourceAllowanceRequest( + assert args[0] == batch.GetResourceAllowanceRequest( name="name_value", - reason="reason_value", ) -def test_delete_resource_allowance_use_cached_wrapped_rpc(): +def test_get_resource_allowance_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -5078,7 +5064,7 @@ def test_delete_resource_allowance_use_cached_wrapped_rpc(): # Ensure method has been cached assert ( - client._transport.delete_resource_allowance + client._transport.get_resource_allowance in client._transport._wrapped_methods ) @@ -5088,19 +5074,15 @@ def test_delete_resource_allowance_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.delete_resource_allowance + client._transport.get_resource_allowance ] = mock_rpc request = {} - client.delete_resource_allowance(request) + client.get_resource_allowance(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.delete_resource_allowance(request) + client.get_resource_allowance(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -5108,7 +5090,7 @@ def test_delete_resource_allowance_use_cached_wrapped_rpc(): @pytest.mark.asyncio -async def test_delete_resource_allowance_empty_call_async(): +async def test_get_resource_allowance_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = BatchServiceAsyncClient( @@ -5118,20 +5100,23 @@ async def test_delete_resource_allowance_empty_call_async(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.delete_resource_allowance), "__call__" + type(client.transport.get_resource_allowance), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name="operations/spam") + resource_allowance.ResourceAllowance( + name="name_value", + uid="uid_value", + ) ) - response = await client.delete_resource_allowance() + response = await client.get_resource_allowance() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == batch.DeleteResourceAllowanceRequest() + assert args[0] == batch.GetResourceAllowanceRequest() @pytest.mark.asyncio -async def test_delete_resource_allowance_async_use_cached_wrapped_rpc( +async def test_get_resource_allowance_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", ): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -5148,36 +5133,33 @@ async def test_delete_resource_allowance_async_use_cached_wrapped_rpc( # Ensure method has been cached assert ( - client._client._transport.delete_resource_allowance + client._client._transport.get_resource_allowance in client._client._transport._wrapped_methods ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ - client._client._transport.delete_resource_allowance - ] = mock_object + client._client._transport.get_resource_allowance + ] = mock_rpc request = {} - await client.delete_resource_allowance(request) + await client.get_resource_allowance(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 - - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() + assert mock_rpc.call_count == 1 - await client.delete_resource_allowance(request) + await client.get_resource_allowance(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio -async def test_delete_resource_allowance_async( - transport: str = "grpc_asyncio", request_type=batch.DeleteResourceAllowanceRequest +async def test_get_resource_allowance_async( + transport: str = "grpc_asyncio", request_type=batch.GetResourceAllowanceRequest ): client = BatchServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), @@ -5190,46 +5172,51 @@ async def test_delete_resource_allowance_async( # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.delete_resource_allowance), "__call__" + type(client.transport.get_resource_allowance), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name="operations/spam") + resource_allowance.ResourceAllowance( + name="name_value", + uid="uid_value", + ) ) - response = await client.delete_resource_allowance(request) + response = await client.get_resource_allowance(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request = batch.DeleteResourceAllowanceRequest() + request = batch.GetResourceAllowanceRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) + assert isinstance(response, resource_allowance.ResourceAllowance) + assert response.name == "name_value" + assert response.uid == "uid_value" @pytest.mark.asyncio -async def test_delete_resource_allowance_async_from_dict(): - await test_delete_resource_allowance_async(request_type=dict) +async def test_get_resource_allowance_async_from_dict(): + await test_get_resource_allowance_async(request_type=dict) -def test_delete_resource_allowance_field_headers(): +def test_get_resource_allowance_field_headers(): client = BatchServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = batch.DeleteResourceAllowanceRequest() + request = batch.GetResourceAllowanceRequest() request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.delete_resource_allowance), "__call__" + type(client.transport.get_resource_allowance), "__call__" ) as call: - call.return_value = operations_pb2.Operation(name="operations/op") - client.delete_resource_allowance(request) + call.return_value = resource_allowance.ResourceAllowance() + client.get_resource_allowance(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -5245,25 +5232,25 @@ def test_delete_resource_allowance_field_headers(): @pytest.mark.asyncio -async def test_delete_resource_allowance_field_headers_async(): +async def test_get_resource_allowance_field_headers_async(): client = BatchServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = batch.DeleteResourceAllowanceRequest() + request = batch.GetResourceAllowanceRequest() request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.delete_resource_allowance), "__call__" + type(client.transport.get_resource_allowance), "__call__" ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name="operations/op") + resource_allowance.ResourceAllowance() ) - await client.delete_resource_allowance(request) + await client.get_resource_allowance(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -5278,20 +5265,20 @@ async def test_delete_resource_allowance_field_headers_async(): ) in kw["metadata"] -def test_delete_resource_allowance_flattened(): +def test_get_resource_allowance_flattened(): client = BatchServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.delete_resource_allowance), "__call__" + type(client.transport.get_resource_allowance), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name="operations/op") + call.return_value = resource_allowance.ResourceAllowance() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.delete_resource_allowance( + client.get_resource_allowance( name="name_value", ) @@ -5304,7 +5291,7 @@ def test_delete_resource_allowance_flattened(): assert arg == mock_val -def test_delete_resource_allowance_flattened_error(): +def test_get_resource_allowance_flattened_error(): client = BatchServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -5312,31 +5299,31 @@ def test_delete_resource_allowance_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.delete_resource_allowance( - batch.DeleteResourceAllowanceRequest(), + client.get_resource_allowance( + batch.GetResourceAllowanceRequest(), name="name_value", ) @pytest.mark.asyncio -async def test_delete_resource_allowance_flattened_async(): +async def test_get_resource_allowance_flattened_async(): client = BatchServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.delete_resource_allowance), "__call__" + type(client.transport.get_resource_allowance), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name="operations/op") + call.return_value = resource_allowance.ResourceAllowance() call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name="operations/spam") + resource_allowance.ResourceAllowance() ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.delete_resource_allowance( + response = await client.get_resource_allowance( name="name_value", ) @@ -5350,7 +5337,7 @@ async def test_delete_resource_allowance_flattened_async(): @pytest.mark.asyncio -async def test_delete_resource_allowance_flattened_error_async(): +async def test_get_resource_allowance_flattened_error_async(): client = BatchServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -5358,8 +5345,8 @@ async def test_delete_resource_allowance_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.delete_resource_allowance( - batch.DeleteResourceAllowanceRequest(), + await client.get_resource_allowance( + batch.GetResourceAllowanceRequest(), name="name_value", ) @@ -5367,11 +5354,11 @@ async def test_delete_resource_allowance_flattened_error_async(): @pytest.mark.parametrize( "request_type", [ - batch.ListResourceAllowancesRequest, + batch.DeleteResourceAllowanceRequest, dict, ], ) -def test_list_resource_allowances(request_type, transport: str = "grpc"): +def test_delete_resource_allowance(request_type, transport: str = "grpc"): client = BatchServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -5383,28 +5370,23 @@ def test_list_resource_allowances(request_type, transport: str = "grpc"): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_resource_allowances), "__call__" + type(client.transport.delete_resource_allowance), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = batch.ListResourceAllowancesResponse( - next_page_token="next_page_token_value", - unreachable=["unreachable_value"], - ) - response = client.list_resource_allowances(request) + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.delete_resource_allowance(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - request = batch.ListResourceAllowancesRequest() + request = batch.DeleteResourceAllowanceRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListResourceAllowancesPager) - assert response.next_page_token == "next_page_token_value" - assert response.unreachable == ["unreachable_value"] + assert isinstance(response, future.Future) -def test_list_resource_allowances_empty_call(): +def test_delete_resource_allowance_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = BatchServiceClient( @@ -5414,18 +5396,18 @@ def test_list_resource_allowances_empty_call(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_resource_allowances), "__call__" + type(client.transport.delete_resource_allowance), "__call__" ) as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.list_resource_allowances() + client.delete_resource_allowance() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == batch.ListResourceAllowancesRequest() + assert args[0] == batch.DeleteResourceAllowanceRequest() -def test_list_resource_allowances_non_empty_request_with_auto_populated_field(): +def test_delete_resource_allowance_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. client = BatchServiceClient( @@ -5436,28 +5418,28 @@ def test_list_resource_allowances_non_empty_request_with_auto_populated_field(): # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. - request = batch.ListResourceAllowancesRequest( - parent="parent_value", - page_token="page_token_value", + request = batch.DeleteResourceAllowanceRequest( + name="name_value", + reason="reason_value", ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_resource_allowances), "__call__" + type(client.transport.delete_resource_allowance), "__call__" ) as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.list_resource_allowances(request=request) + client.delete_resource_allowance(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == batch.ListResourceAllowancesRequest( - parent="parent_value", - page_token="page_token_value", + assert args[0] == batch.DeleteResourceAllowanceRequest( + name="name_value", + reason="reason_value", ) -def test_list_resource_allowances_use_cached_wrapped_rpc(): +def test_delete_resource_allowance_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -5472,7 +5454,7 @@ def test_list_resource_allowances_use_cached_wrapped_rpc(): # Ensure method has been cached assert ( - client._transport.list_resource_allowances + client._transport.delete_resource_allowance in client._transport._wrapped_methods ) @@ -5482,15 +5464,20 @@ def test_list_resource_allowances_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.list_resource_allowances + client._transport.delete_resource_allowance ] = mock_rpc request = {} - client.list_resource_allowances(request) + client.delete_resource_allowance(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.list_resource_allowances(request) + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.delete_resource_allowance(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -5498,7 +5485,7 @@ def test_list_resource_allowances_use_cached_wrapped_rpc(): @pytest.mark.asyncio -async def test_list_resource_allowances_empty_call_async(): +async def test_delete_resource_allowance_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = BatchServiceAsyncClient( @@ -5508,23 +5495,20 @@ async def test_list_resource_allowances_empty_call_async(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_resource_allowances), "__call__" + type(client.transport.delete_resource_allowance), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - batch.ListResourceAllowancesResponse( - next_page_token="next_page_token_value", - unreachable=["unreachable_value"], - ) + operations_pb2.Operation(name="operations/spam") ) - response = await client.list_resource_allowances() + response = await client.delete_resource_allowance() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == batch.ListResourceAllowancesRequest() + assert args[0] == batch.DeleteResourceAllowanceRequest() @pytest.mark.asyncio -async def test_list_resource_allowances_async_use_cached_wrapped_rpc( +async def test_delete_resource_allowance_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", ): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -5541,32 +5525,38 @@ async def test_list_resource_allowances_async_use_cached_wrapped_rpc( # Ensure method has been cached assert ( - client._client._transport.list_resource_allowances + client._client._transport.delete_resource_allowance in client._client._transport._wrapped_methods ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ - client._client._transport.list_resource_allowances - ] = mock_object + client._client._transport.delete_resource_allowance + ] = mock_rpc request = {} - await client.list_resource_allowances(request) + await client.delete_resource_allowance(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - await client.list_resource_allowances(request) + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.delete_resource_allowance(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio -async def test_list_resource_allowances_async( - transport: str = "grpc_asyncio", request_type=batch.ListResourceAllowancesRequest +async def test_delete_resource_allowance_async( + transport: str = "grpc_asyncio", request_type=batch.DeleteResourceAllowanceRequest ): client = BatchServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), @@ -5579,51 +5569,46 @@ async def test_list_resource_allowances_async( # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_resource_allowances), "__call__" + type(client.transport.delete_resource_allowance), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - batch.ListResourceAllowancesResponse( - next_page_token="next_page_token_value", - unreachable=["unreachable_value"], - ) + operations_pb2.Operation(name="operations/spam") ) - response = await client.list_resource_allowances(request) + response = await client.delete_resource_allowance(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request = batch.ListResourceAllowancesRequest() + request = batch.DeleteResourceAllowanceRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListResourceAllowancesAsyncPager) - assert response.next_page_token == "next_page_token_value" - assert response.unreachable == ["unreachable_value"] + assert isinstance(response, future.Future) @pytest.mark.asyncio -async def test_list_resource_allowances_async_from_dict(): - await test_list_resource_allowances_async(request_type=dict) +async def test_delete_resource_allowance_async_from_dict(): + await test_delete_resource_allowance_async(request_type=dict) -def test_list_resource_allowances_field_headers(): +def test_delete_resource_allowance_field_headers(): client = BatchServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = batch.ListResourceAllowancesRequest() + request = batch.DeleteResourceAllowanceRequest() - request.parent = "parent_value" + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_resource_allowances), "__call__" + type(client.transport.delete_resource_allowance), "__call__" ) as call: - call.return_value = batch.ListResourceAllowancesResponse() - client.list_resource_allowances(request) + call.return_value = operations_pb2.Operation(name="operations/op") + client.delete_resource_allowance(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -5634,30 +5619,30 @@ def test_list_resource_allowances_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "parent=parent_value", + "name=name_value", ) in kw["metadata"] @pytest.mark.asyncio -async def test_list_resource_allowances_field_headers_async(): +async def test_delete_resource_allowance_field_headers_async(): client = BatchServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = batch.ListResourceAllowancesRequest() + request = batch.DeleteResourceAllowanceRequest() - request.parent = "parent_value" + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_resource_allowances), "__call__" + type(client.transport.delete_resource_allowance), "__call__" ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - batch.ListResourceAllowancesResponse() + operations_pb2.Operation(name="operations/op") ) - await client.list_resource_allowances(request) + await client.delete_resource_allowance(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -5668,37 +5653,37 @@ async def test_list_resource_allowances_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "parent=parent_value", + "name=name_value", ) in kw["metadata"] -def test_list_resource_allowances_flattened(): +def test_delete_resource_allowance_flattened(): client = BatchServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_resource_allowances), "__call__" + type(client.transport.delete_resource_allowance), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = batch.ListResourceAllowancesResponse() + call.return_value = operations_pb2.Operation(name="operations/op") # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.list_resource_allowances( - parent="parent_value", + client.delete_resource_allowance( + name="name_value", ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = "parent_value" + arg = args[0].name + mock_val = "name_value" assert arg == mock_val -def test_list_resource_allowances_flattened_error(): +def test_delete_resource_allowance_flattened_error(): client = BatchServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -5706,45 +5691,45 @@ def test_list_resource_allowances_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.list_resource_allowances( - batch.ListResourceAllowancesRequest(), - parent="parent_value", + client.delete_resource_allowance( + batch.DeleteResourceAllowanceRequest(), + name="name_value", ) @pytest.mark.asyncio -async def test_list_resource_allowances_flattened_async(): +async def test_delete_resource_allowance_flattened_async(): client = BatchServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_resource_allowances), "__call__" + type(client.transport.delete_resource_allowance), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = batch.ListResourceAllowancesResponse() + call.return_value = operations_pb2.Operation(name="operations/op") call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - batch.ListResourceAllowancesResponse() + operations_pb2.Operation(name="operations/spam") ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.list_resource_allowances( - parent="parent_value", + response = await client.delete_resource_allowance( + name="name_value", ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = "parent_value" + arg = args[0].name + mock_val = "name_value" assert arg == mock_val @pytest.mark.asyncio -async def test_list_resource_allowances_flattened_error_async(): +async def test_delete_resource_allowance_flattened_error_async(): client = BatchServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -5752,259 +5737,53 @@ async def test_list_resource_allowances_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.list_resource_allowances( - batch.ListResourceAllowancesRequest(), - parent="parent_value", + await client.delete_resource_allowance( + batch.DeleteResourceAllowanceRequest(), + name="name_value", ) -def test_list_resource_allowances_pager(transport_name: str = "grpc"): +@pytest.mark.parametrize( + "request_type", + [ + batch.ListResourceAllowancesRequest, + dict, + ], +) +def test_list_resource_allowances(request_type, transport: str = "grpc"): client = BatchServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, + transport=transport, ) + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.list_resource_allowances), "__call__" - ) as call: - # Set the response to a series of pages. - call.side_effect = ( - batch.ListResourceAllowancesResponse( - resource_allowances=[ - resource_allowance.ResourceAllowance(), - resource_allowance.ResourceAllowance(), - resource_allowance.ResourceAllowance(), - ], - next_page_token="abc", - ), - batch.ListResourceAllowancesResponse( - resource_allowances=[], - next_page_token="def", - ), - batch.ListResourceAllowancesResponse( - resource_allowances=[ - resource_allowance.ResourceAllowance(), - ], - next_page_token="ghi", - ), - batch.ListResourceAllowancesResponse( - resource_allowances=[ - resource_allowance.ResourceAllowance(), - resource_allowance.ResourceAllowance(), - ], - ), - RuntimeError, - ) - - expected_metadata = () - retry = retries.Retry() - timeout = 5 - expected_metadata = tuple(expected_metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), - ) - pager = client.list_resource_allowances( - request={}, retry=retry, timeout=timeout - ) - - assert pager._metadata == expected_metadata - assert pager._retry == retry - assert pager._timeout == timeout - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, resource_allowance.ResourceAllowance) for i in results) - - -def test_list_resource_allowances_pages(transport_name: str = "grpc"): - client = BatchServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_resource_allowances), "__call__" - ) as call: - # Set the response to a series of pages. - call.side_effect = ( - batch.ListResourceAllowancesResponse( - resource_allowances=[ - resource_allowance.ResourceAllowance(), - resource_allowance.ResourceAllowance(), - resource_allowance.ResourceAllowance(), - ], - next_page_token="abc", - ), - batch.ListResourceAllowancesResponse( - resource_allowances=[], - next_page_token="def", - ), - batch.ListResourceAllowancesResponse( - resource_allowances=[ - resource_allowance.ResourceAllowance(), - ], - next_page_token="ghi", - ), - batch.ListResourceAllowancesResponse( - resource_allowances=[ - resource_allowance.ResourceAllowance(), - resource_allowance.ResourceAllowance(), - ], - ), - RuntimeError, - ) - pages = list(client.list_resource_allowances(request={}).pages) - for page_, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page_.raw_page.next_page_token == token - - -@pytest.mark.asyncio -async def test_list_resource_allowances_async_pager(): - client = BatchServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_resource_allowances), - "__call__", - new_callable=mock.AsyncMock, - ) as call: - # Set the response to a series of pages. - call.side_effect = ( - batch.ListResourceAllowancesResponse( - resource_allowances=[ - resource_allowance.ResourceAllowance(), - resource_allowance.ResourceAllowance(), - resource_allowance.ResourceAllowance(), - ], - next_page_token="abc", - ), - batch.ListResourceAllowancesResponse( - resource_allowances=[], - next_page_token="def", - ), - batch.ListResourceAllowancesResponse( - resource_allowances=[ - resource_allowance.ResourceAllowance(), - ], - next_page_token="ghi", - ), - batch.ListResourceAllowancesResponse( - resource_allowances=[ - resource_allowance.ResourceAllowance(), - resource_allowance.ResourceAllowance(), - ], - ), - RuntimeError, - ) - async_pager = await client.list_resource_allowances( - request={}, - ) - assert async_pager.next_page_token == "abc" - responses = [] - async for response in async_pager: # pragma: no branch - responses.append(response) - - assert len(responses) == 6 - assert all( - isinstance(i, resource_allowance.ResourceAllowance) for i in responses - ) - - -@pytest.mark.asyncio -async def test_list_resource_allowances_async_pages(): - client = BatchServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_resource_allowances), - "__call__", - new_callable=mock.AsyncMock, - ) as call: - # Set the response to a series of pages. - call.side_effect = ( - batch.ListResourceAllowancesResponse( - resource_allowances=[ - resource_allowance.ResourceAllowance(), - resource_allowance.ResourceAllowance(), - resource_allowance.ResourceAllowance(), - ], - next_page_token="abc", - ), - batch.ListResourceAllowancesResponse( - resource_allowances=[], - next_page_token="def", - ), - batch.ListResourceAllowancesResponse( - resource_allowances=[ - resource_allowance.ResourceAllowance(), - ], - next_page_token="ghi", - ), - batch.ListResourceAllowancesResponse( - resource_allowances=[ - resource_allowance.ResourceAllowance(), - resource_allowance.ResourceAllowance(), - ], - ), - RuntimeError, - ) - pages = [] - # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` - # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 - async for page_ in ( # pragma: no branch - await client.list_resource_allowances(request={}) - ).pages: - pages.append(page_) - for page_, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page_.raw_page.next_page_token == token - - -@pytest.mark.parametrize( - "request_type", - [ - batch.UpdateResourceAllowanceRequest, - dict, - ], -) -def test_update_resource_allowance(request_type, transport: str = "grpc"): - client = BatchServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_resource_allowance), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = gcb_resource_allowance.ResourceAllowance( - name="name_value", - uid="uid_value", + call.return_value = batch.ListResourceAllowancesResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], ) - response = client.update_resource_allowance(request) + response = client.list_resource_allowances(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - request = batch.UpdateResourceAllowanceRequest() + request = batch.ListResourceAllowancesRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, gcb_resource_allowance.ResourceAllowance) - assert response.name == "name_value" - assert response.uid == "uid_value" + assert isinstance(response, pagers.ListResourceAllowancesPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] -def test_update_resource_allowance_empty_call(): +def test_list_resource_allowances_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = BatchServiceClient( @@ -6014,18 +5793,18 @@ def test_update_resource_allowance_empty_call(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.update_resource_allowance), "__call__" + type(client.transport.list_resource_allowances), "__call__" ) as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.update_resource_allowance() + client.list_resource_allowances() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == batch.UpdateResourceAllowanceRequest() + assert args[0] == batch.ListResourceAllowancesRequest() -def test_update_resource_allowance_non_empty_request_with_auto_populated_field(): +def test_list_resource_allowances_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. client = BatchServiceClient( @@ -6036,22 +5815,28 @@ def test_update_resource_allowance_non_empty_request_with_auto_populated_field() # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. - request = batch.UpdateResourceAllowanceRequest() + request = batch.ListResourceAllowancesRequest( + parent="parent_value", + page_token="page_token_value", + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.update_resource_allowance), "__call__" + type(client.transport.list_resource_allowances), "__call__" ) as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.update_resource_allowance(request=request) + client.list_resource_allowances(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == batch.UpdateResourceAllowanceRequest() + assert args[0] == batch.ListResourceAllowancesRequest( + parent="parent_value", + page_token="page_token_value", + ) -def test_update_resource_allowance_use_cached_wrapped_rpc(): +def test_list_resource_allowances_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -6066,7 +5851,7 @@ def test_update_resource_allowance_use_cached_wrapped_rpc(): # Ensure method has been cached assert ( - client._transport.update_resource_allowance + client._transport.list_resource_allowances in client._transport._wrapped_methods ) @@ -6076,15 +5861,15 @@ def test_update_resource_allowance_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.update_resource_allowance + client._transport.list_resource_allowances ] = mock_rpc request = {} - client.update_resource_allowance(request) + client.list_resource_allowances(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.update_resource_allowance(request) + client.list_resource_allowances(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -6092,7 +5877,7 @@ def test_update_resource_allowance_use_cached_wrapped_rpc(): @pytest.mark.asyncio -async def test_update_resource_allowance_empty_call_async(): +async def test_list_resource_allowances_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = BatchServiceAsyncClient( @@ -6102,23 +5887,23 @@ async def test_update_resource_allowance_empty_call_async(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.update_resource_allowance), "__call__" + type(client.transport.list_resource_allowances), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - gcb_resource_allowance.ResourceAllowance( - name="name_value", - uid="uid_value", + batch.ListResourceAllowancesResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], ) ) - response = await client.update_resource_allowance() + response = await client.list_resource_allowances() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == batch.UpdateResourceAllowanceRequest() + assert args[0] == batch.ListResourceAllowancesRequest() @pytest.mark.asyncio -async def test_update_resource_allowance_async_use_cached_wrapped_rpc( +async def test_list_resource_allowances_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", ): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -6135,32 +5920,33 @@ async def test_update_resource_allowance_async_use_cached_wrapped_rpc( # Ensure method has been cached assert ( - client._client._transport.update_resource_allowance + client._client._transport.list_resource_allowances in client._client._transport._wrapped_methods ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ - client._client._transport.update_resource_allowance - ] = mock_object + client._client._transport.list_resource_allowances + ] = mock_rpc request = {} - await client.update_resource_allowance(request) + await client.list_resource_allowances(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - await client.update_resource_allowance(request) + await client.list_resource_allowances(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio -async def test_update_resource_allowance_async( - transport: str = "grpc_asyncio", request_type=batch.UpdateResourceAllowanceRequest +async def test_list_resource_allowances_async( + transport: str = "grpc_asyncio", request_type=batch.ListResourceAllowancesRequest ): client = BatchServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), @@ -6173,51 +5959,51 @@ async def test_update_resource_allowance_async( # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.update_resource_allowance), "__call__" + type(client.transport.list_resource_allowances), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - gcb_resource_allowance.ResourceAllowance( - name="name_value", - uid="uid_value", + batch.ListResourceAllowancesResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], ) ) - response = await client.update_resource_allowance(request) + response = await client.list_resource_allowances(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request = batch.UpdateResourceAllowanceRequest() + request = batch.ListResourceAllowancesRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, gcb_resource_allowance.ResourceAllowance) - assert response.name == "name_value" - assert response.uid == "uid_value" + assert isinstance(response, pagers.ListResourceAllowancesAsyncPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] @pytest.mark.asyncio -async def test_update_resource_allowance_async_from_dict(): - await test_update_resource_allowance_async(request_type=dict) +async def test_list_resource_allowances_async_from_dict(): + await test_list_resource_allowances_async(request_type=dict) -def test_update_resource_allowance_field_headers(): +def test_list_resource_allowances_field_headers(): client = BatchServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = batch.UpdateResourceAllowanceRequest() + request = batch.ListResourceAllowancesRequest() - request.resource_allowance.name = "name_value" + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.update_resource_allowance), "__call__" + type(client.transport.list_resource_allowances), "__call__" ) as call: - call.return_value = gcb_resource_allowance.ResourceAllowance() - client.update_resource_allowance(request) + call.return_value = batch.ListResourceAllowancesResponse() + client.list_resource_allowances(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -6228,30 +6014,30 @@ def test_update_resource_allowance_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "resource_allowance.name=name_value", + "parent=parent_value", ) in kw["metadata"] @pytest.mark.asyncio -async def test_update_resource_allowance_field_headers_async(): +async def test_list_resource_allowances_field_headers_async(): client = BatchServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = batch.UpdateResourceAllowanceRequest() + request = batch.ListResourceAllowancesRequest() - request.resource_allowance.name = "name_value" + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.update_resource_allowance), "__call__" + type(client.transport.list_resource_allowances), "__call__" ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - gcb_resource_allowance.ResourceAllowance() + batch.ListResourceAllowancesResponse() ) - await client.update_resource_allowance(request) + await client.list_resource_allowances(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -6262,53 +6048,37 @@ async def test_update_resource_allowance_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "resource_allowance.name=name_value", + "parent=parent_value", ) in kw["metadata"] -def test_update_resource_allowance_flattened(): +def test_list_resource_allowances_flattened(): client = BatchServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.update_resource_allowance), "__call__" + type(client.transport.list_resource_allowances), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = gcb_resource_allowance.ResourceAllowance() + call.return_value = batch.ListResourceAllowancesResponse() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.update_resource_allowance( - resource_allowance=gcb_resource_allowance.ResourceAllowance( - usage_resource_allowance=gcb_resource_allowance.UsageResourceAllowance( - spec=gcb_resource_allowance.UsageResourceAllowanceSpec( - type_="type__value" - ) - ) - ), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + client.list_resource_allowances( + parent="parent_value", ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - arg = args[0].resource_allowance - mock_val = gcb_resource_allowance.ResourceAllowance( - usage_resource_allowance=gcb_resource_allowance.UsageResourceAllowance( - spec=gcb_resource_allowance.UsageResourceAllowanceSpec( - type_="type__value" - ) - ) - ) - assert arg == mock_val - arg = args[0].update_mask - mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + arg = args[0].parent + mock_val = "parent_value" assert arg == mock_val -def test_update_resource_allowance_flattened_error(): +def test_list_resource_allowances_flattened_error(): client = BatchServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -6316,68 +6086,45 @@ def test_update_resource_allowance_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.update_resource_allowance( - batch.UpdateResourceAllowanceRequest(), - resource_allowance=gcb_resource_allowance.ResourceAllowance( - usage_resource_allowance=gcb_resource_allowance.UsageResourceAllowance( - spec=gcb_resource_allowance.UsageResourceAllowanceSpec( - type_="type__value" - ) - ) - ), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + client.list_resource_allowances( + batch.ListResourceAllowancesRequest(), + parent="parent_value", ) @pytest.mark.asyncio -async def test_update_resource_allowance_flattened_async(): +async def test_list_resource_allowances_flattened_async(): client = BatchServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.update_resource_allowance), "__call__" + type(client.transport.list_resource_allowances), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = gcb_resource_allowance.ResourceAllowance() + call.return_value = batch.ListResourceAllowancesResponse() call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - gcb_resource_allowance.ResourceAllowance() + batch.ListResourceAllowancesResponse() ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.update_resource_allowance( - resource_allowance=gcb_resource_allowance.ResourceAllowance( - usage_resource_allowance=gcb_resource_allowance.UsageResourceAllowance( - spec=gcb_resource_allowance.UsageResourceAllowanceSpec( - type_="type__value" - ) - ) - ), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + response = await client.list_resource_allowances( + parent="parent_value", ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - arg = args[0].resource_allowance - mock_val = gcb_resource_allowance.ResourceAllowance( - usage_resource_allowance=gcb_resource_allowance.UsageResourceAllowance( - spec=gcb_resource_allowance.UsageResourceAllowanceSpec( - type_="type__value" - ) - ) - ) - assert arg == mock_val - arg = args[0].update_mask - mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + arg = args[0].parent + mock_val = "parent_value" assert arg == mock_val @pytest.mark.asyncio -async def test_update_resource_allowance_flattened_error_async(): +async def test_list_resource_allowances_flattened_error_async(): client = BatchServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -6385,54 +6132,688 @@ async def test_update_resource_allowance_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.update_resource_allowance( - batch.UpdateResourceAllowanceRequest(), - resource_allowance=gcb_resource_allowance.ResourceAllowance( - usage_resource_allowance=gcb_resource_allowance.UsageResourceAllowance( - spec=gcb_resource_allowance.UsageResourceAllowanceSpec( - type_="type__value" - ) - ) + await client.list_resource_allowances( + batch.ListResourceAllowancesRequest(), + parent="parent_value", + ) + + +def test_list_resource_allowances_pager(transport_name: str = "grpc"): + client = BatchServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_resource_allowances), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + batch.ListResourceAllowancesResponse( + resource_allowances=[ + resource_allowance.ResourceAllowance(), + resource_allowance.ResourceAllowance(), + resource_allowance.ResourceAllowance(), + ], + next_page_token="abc", ), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + batch.ListResourceAllowancesResponse( + resource_allowances=[], + next_page_token="def", + ), + batch.ListResourceAllowancesResponse( + resource_allowances=[ + resource_allowance.ResourceAllowance(), + ], + next_page_token="ghi", + ), + batch.ListResourceAllowancesResponse( + resource_allowances=[ + resource_allowance.ResourceAllowance(), + resource_allowance.ResourceAllowance(), + ], + ), + RuntimeError, ) + expected_metadata = () + retry = retries.Retry() + timeout = 5 + expected_metadata = tuple(expected_metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + ) + pager = client.list_resource_allowances( + request={}, retry=retry, timeout=timeout + ) -@pytest.mark.parametrize( - "request_type", - [ - batch.CreateJobRequest, - dict, - ], -) -def test_create_job_rest(request_type): + assert pager._metadata == expected_metadata + assert pager._retry == retry + assert pager._timeout == timeout + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, resource_allowance.ResourceAllowance) for i in results) + + +def test_list_resource_allowances_pages(transport_name: str = "grpc"): client = BatchServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport="rest", + transport=transport_name, ) - # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2"} - request_init["job"] = { - "name": "name_value", - "uid": "uid_value", - "priority": 898, - "task_groups": [ - { - "name": "name_value", - "task_spec": { - "runnables": [ - { - "container": { - "image_uri": "image_uri_value", - "commands": ["commands_value1", "commands_value2"], - "entrypoint": "entrypoint_value", - "volumes": ["volumes_value1", "volumes_value2"], - "options": "options_value", - "block_external_network": True, - "username": "username_value", - "password": "password_value", - "enable_image_streaming": True, + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_resource_allowances), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + batch.ListResourceAllowancesResponse( + resource_allowances=[ + resource_allowance.ResourceAllowance(), + resource_allowance.ResourceAllowance(), + resource_allowance.ResourceAllowance(), + ], + next_page_token="abc", + ), + batch.ListResourceAllowancesResponse( + resource_allowances=[], + next_page_token="def", + ), + batch.ListResourceAllowancesResponse( + resource_allowances=[ + resource_allowance.ResourceAllowance(), + ], + next_page_token="ghi", + ), + batch.ListResourceAllowancesResponse( + resource_allowances=[ + resource_allowance.ResourceAllowance(), + resource_allowance.ResourceAllowance(), + ], + ), + RuntimeError, + ) + pages = list(client.list_resource_allowances(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.asyncio +async def test_list_resource_allowances_async_pager(): + client = BatchServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_resource_allowances), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + batch.ListResourceAllowancesResponse( + resource_allowances=[ + resource_allowance.ResourceAllowance(), + resource_allowance.ResourceAllowance(), + resource_allowance.ResourceAllowance(), + ], + next_page_token="abc", + ), + batch.ListResourceAllowancesResponse( + resource_allowances=[], + next_page_token="def", + ), + batch.ListResourceAllowancesResponse( + resource_allowances=[ + resource_allowance.ResourceAllowance(), + ], + next_page_token="ghi", + ), + batch.ListResourceAllowancesResponse( + resource_allowances=[ + resource_allowance.ResourceAllowance(), + resource_allowance.ResourceAllowance(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_resource_allowances( + request={}, + ) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all( + isinstance(i, resource_allowance.ResourceAllowance) for i in responses + ) + + +@pytest.mark.asyncio +async def test_list_resource_allowances_async_pages(): + client = BatchServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_resource_allowances), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + batch.ListResourceAllowancesResponse( + resource_allowances=[ + resource_allowance.ResourceAllowance(), + resource_allowance.ResourceAllowance(), + resource_allowance.ResourceAllowance(), + ], + next_page_token="abc", + ), + batch.ListResourceAllowancesResponse( + resource_allowances=[], + next_page_token="def", + ), + batch.ListResourceAllowancesResponse( + resource_allowances=[ + resource_allowance.ResourceAllowance(), + ], + next_page_token="ghi", + ), + batch.ListResourceAllowancesResponse( + resource_allowances=[ + resource_allowance.ResourceAllowance(), + resource_allowance.ResourceAllowance(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_resource_allowances(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + batch.UpdateResourceAllowanceRequest, + dict, + ], +) +def test_update_resource_allowance(request_type, transport: str = "grpc"): + client = BatchServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_resource_allowance), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = gcb_resource_allowance.ResourceAllowance( + name="name_value", + uid="uid_value", + ) + response = client.update_resource_allowance(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = batch.UpdateResourceAllowanceRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, gcb_resource_allowance.ResourceAllowance) + assert response.name == "name_value" + assert response.uid == "uid_value" + + +def test_update_resource_allowance_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = BatchServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_resource_allowance), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.update_resource_allowance() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == batch.UpdateResourceAllowanceRequest() + + +def test_update_resource_allowance_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = BatchServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = batch.UpdateResourceAllowanceRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_resource_allowance), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.update_resource_allowance(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == batch.UpdateResourceAllowanceRequest() + + +def test_update_resource_allowance_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BatchServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.update_resource_allowance + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.update_resource_allowance + ] = mock_rpc + request = {} + client.update_resource_allowance(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.update_resource_allowance(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_update_resource_allowance_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = BatchServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_resource_allowance), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + gcb_resource_allowance.ResourceAllowance( + name="name_value", + uid="uid_value", + ) + ) + response = await client.update_resource_allowance() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == batch.UpdateResourceAllowanceRequest() + + +@pytest.mark.asyncio +async def test_update_resource_allowance_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = BatchServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.update_resource_allowance + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.update_resource_allowance + ] = mock_rpc + + request = {} + await client.update_resource_allowance(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.update_resource_allowance(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_update_resource_allowance_async( + transport: str = "grpc_asyncio", request_type=batch.UpdateResourceAllowanceRequest +): + client = BatchServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_resource_allowance), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + gcb_resource_allowance.ResourceAllowance( + name="name_value", + uid="uid_value", + ) + ) + response = await client.update_resource_allowance(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = batch.UpdateResourceAllowanceRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, gcb_resource_allowance.ResourceAllowance) + assert response.name == "name_value" + assert response.uid == "uid_value" + + +@pytest.mark.asyncio +async def test_update_resource_allowance_async_from_dict(): + await test_update_resource_allowance_async(request_type=dict) + + +def test_update_resource_allowance_field_headers(): + client = BatchServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = batch.UpdateResourceAllowanceRequest() + + request.resource_allowance.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_resource_allowance), "__call__" + ) as call: + call.return_value = gcb_resource_allowance.ResourceAllowance() + client.update_resource_allowance(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "resource_allowance.name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_update_resource_allowance_field_headers_async(): + client = BatchServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = batch.UpdateResourceAllowanceRequest() + + request.resource_allowance.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_resource_allowance), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + gcb_resource_allowance.ResourceAllowance() + ) + await client.update_resource_allowance(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "resource_allowance.name=name_value", + ) in kw["metadata"] + + +def test_update_resource_allowance_flattened(): + client = BatchServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_resource_allowance), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = gcb_resource_allowance.ResourceAllowance() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.update_resource_allowance( + resource_allowance=gcb_resource_allowance.ResourceAllowance( + usage_resource_allowance=gcb_resource_allowance.UsageResourceAllowance( + spec=gcb_resource_allowance.UsageResourceAllowanceSpec( + type_="type__value" + ) + ) + ), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].resource_allowance + mock_val = gcb_resource_allowance.ResourceAllowance( + usage_resource_allowance=gcb_resource_allowance.UsageResourceAllowance( + spec=gcb_resource_allowance.UsageResourceAllowanceSpec( + type_="type__value" + ) + ) + ) + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + assert arg == mock_val + + +def test_update_resource_allowance_flattened_error(): + client = BatchServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_resource_allowance( + batch.UpdateResourceAllowanceRequest(), + resource_allowance=gcb_resource_allowance.ResourceAllowance( + usage_resource_allowance=gcb_resource_allowance.UsageResourceAllowance( + spec=gcb_resource_allowance.UsageResourceAllowanceSpec( + type_="type__value" + ) + ) + ), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +@pytest.mark.asyncio +async def test_update_resource_allowance_flattened_async(): + client = BatchServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_resource_allowance), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = gcb_resource_allowance.ResourceAllowance() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + gcb_resource_allowance.ResourceAllowance() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.update_resource_allowance( + resource_allowance=gcb_resource_allowance.ResourceAllowance( + usage_resource_allowance=gcb_resource_allowance.UsageResourceAllowance( + spec=gcb_resource_allowance.UsageResourceAllowanceSpec( + type_="type__value" + ) + ) + ), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].resource_allowance + mock_val = gcb_resource_allowance.ResourceAllowance( + usage_resource_allowance=gcb_resource_allowance.UsageResourceAllowance( + spec=gcb_resource_allowance.UsageResourceAllowanceSpec( + type_="type__value" + ) + ) + ) + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_update_resource_allowance_flattened_error_async(): + client = BatchServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.update_resource_allowance( + batch.UpdateResourceAllowanceRequest(), + resource_allowance=gcb_resource_allowance.ResourceAllowance( + usage_resource_allowance=gcb_resource_allowance.UsageResourceAllowance( + spec=gcb_resource_allowance.UsageResourceAllowanceSpec( + type_="type__value" + ) + ) + ), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +@pytest.mark.parametrize( + "request_type", + [ + batch.CreateJobRequest, + dict, + ], +) +def test_create_job_rest(request_type): + client = BatchServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request_init["job"] = { + "name": "name_value", + "uid": "uid_value", + "priority": 898, + "task_groups": [ + { + "name": "name_value", + "task_spec": { + "runnables": [ + { + "container": { + "image_uri": "image_uri_value", + "commands": ["commands_value1", "commands_value2"], + "entrypoint": "entrypoint_value", + "volumes": ["volumes_value1", "volumes_value2"], + "options": "options_value", + "block_external_network": True, + "username": "username_value", + "password": "password_value", + "enable_image_streaming": True, }, "script": {"path": "path_value", "text": "text_value"}, "barrier": {"name": "name_value"}, @@ -6538,6 +6919,7 @@ def test_create_job_rest(request_type): "instance_template": "instance_template_value", "install_gpu_drivers": True, "install_ops_agent": True, + "block_project_ssh_keys": True, } ], "instance_templates": [ @@ -6574,143 +6956,470 @@ def test_create_job_rest(request_type): "run_as_non_root": True, "service_account": {}, } - ], - "scheduling_policy": 1, - "dependencies": [{"items": {}}], - "allocation_policy": {}, - "labels": {}, - "status": { - "state": 1, - "status_events": [ - { - "type_": "type__value", - "description": "description_value", - "event_time": {"seconds": 751, "nanos": 543}, - "task_execution": { - "exit_code": 948, - "stderr_snippet": "stderr_snippet_value", - }, - "task_state": 1, - } + ], + "scheduling_policy": 1, + "dependencies": [{"items": {}}], + "allocation_policy": {}, + "labels": {}, + "status": { + "state": 1, + "status_events": [ + { + "type_": "type__value", + "description": "description_value", + "event_time": {"seconds": 751, "nanos": 543}, + "task_execution": { + "exit_code": 948, + "stderr_snippet": "stderr_snippet_value", + }, + "task_state": 1, + } + ], + "task_groups": {}, + "run_duration": {}, + "resource_usage": {"core_hours": 0.1081}, + }, + "notification": { + "pubsub_topic": "pubsub_topic_value", + "message": {"type_": 1, "new_job_state": 1, "new_task_state": 1}, + }, + "create_time": {}, + "update_time": {}, + "logs_policy": { + "destination": 1, + "logs_path": "logs_path_value", + "cloud_logging_option": {"use_generic_task_monitored_resource": True}, + }, + "notifications": {}, + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = batch.CreateJobRequest.meta.fields["job"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["job"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["job"][field])): + del request_init["job"][field][i][subfield] + else: + del request_init["job"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = gcb_job.Job( + name="name_value", + uid="uid_value", + priority=898, + scheduling_policy=gcb_job.Job.SchedulingPolicy.AS_SOON_AS_POSSIBLE, + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = gcb_job.Job.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.create_job(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, gcb_job.Job) + assert response.name == "name_value" + assert response.uid == "uid_value" + assert response.priority == 898 + assert ( + response.scheduling_policy == gcb_job.Job.SchedulingPolicy.AS_SOON_AS_POSSIBLE + ) + + +def test_create_job_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BatchServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.create_job in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.create_job] = mock_rpc + + request = {} + client.create_job(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.create_job(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_create_job_rest_required_fields(request_type=batch.CreateJobRequest): + transport_class = transports.BatchServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_job._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_job._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "job_id", + "request_id", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = BatchServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = gcb_job.Job() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = gcb_job.Job.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.create_job(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_create_job_rest_unset_required_fields(): + transport = transports.BatchServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.create_job._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "jobId", + "requestId", + ) + ) + & set( + ( + "parent", + "job", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_create_job_rest_interceptors(null_interceptor): + transport = transports.BatchServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.BatchServiceRestInterceptor(), + ) + client = BatchServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.BatchServiceRestInterceptor, "post_create_job" + ) as post, mock.patch.object( + transports.BatchServiceRestInterceptor, "pre_create_job" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = batch.CreateJobRequest.pb(batch.CreateJobRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = gcb_job.Job.to_json(gcb_job.Job()) + + request = batch.CreateJobRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = gcb_job.Job() + + client.create_job( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), ], - "task_groups": {}, - "run_duration": {}, - "resource_usage": {"core_hours": 0.1081}, - }, - "notification": { - "pubsub_topic": "pubsub_topic_value", - "message": {"type_": 1, "new_job_state": 1, "new_task_state": 1}, - }, - "create_time": {}, - "update_time": {}, - "logs_policy": { - "destination": 1, - "logs_path": "logs_path_value", - "cloud_logging_option": {"use_generic_task_monitored_resource": True}, - }, - "notifications": {}, - } - # The version of a generated dependency at test runtime may differ from the version used during generation. - # Delete any fields which are not present in the current runtime dependency - # See https://github.com/googleapis/gapic-generator-python/issues/1748 + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_create_job_rest_bad_request( + transport: str = "rest", request_type=batch.CreateJobRequest +): + client = BatchServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.create_job(request) - # Determine if the message type is proto-plus or protobuf - test_field = batch.CreateJobRequest.meta.fields["job"] - def get_message_fields(field): - # Given a field which is a message (composite type), return a list with - # all the fields of the message. - # If the field is not a composite type, return an empty list. - message_fields = [] +def test_create_job_rest_flattened(): + client = BatchServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) - if hasattr(field, "message") and field.message: - is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = gcb_job.Job() - if is_field_type_proto_plus_type: - message_fields = field.message.meta.fields.values() - # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER - message_fields = field.message.DESCRIPTOR.fields - return message_fields + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1/locations/sample2"} - runtime_nested_fields = [ - (field.name, nested_field.name) - for field in get_message_fields(test_field) - for nested_field in get_message_fields(field) - ] + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + job=gcb_job.Job(name="name_value"), + job_id="job_id_value", + ) + mock_args.update(sample_request) - subfields_not_in_runtime = [] + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = gcb_job.Job.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value - # For each item in the sample request, create a list of sub fields which are not present at runtime - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["job"].items(): # pragma: NO COVER - result = None - is_repeated = False - # For repeated fields - if isinstance(value, list) and len(value): - is_repeated = True - result = value[0] - # For fields where the type is another message - if isinstance(value, dict): - result = value + client.create_job(**mock_args) - if result and hasattr(result, "keys"): - for subfield in result.keys(): - if (field, subfield) not in runtime_nested_fields: - subfields_not_in_runtime.append( - { - "field": field, - "subfield": subfield, - "is_repeated": is_repeated, - } - ) + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1alpha/{parent=projects/*/locations/*}/jobs" % client.transport._host, + args[1], + ) - # Remove fields from the sample request which are not present in the runtime version of the dependency - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER - field = subfield_to_delete.get("field") - field_repeated = subfield_to_delete.get("is_repeated") - subfield = subfield_to_delete.get("subfield") - if subfield: - if field_repeated: - for i in range(0, len(request_init["job"][field])): - del request_init["job"][field][i][subfield] - else: - del request_init["job"][field][subfield] + +def test_create_job_rest_flattened_error(transport: str = "rest"): + client = BatchServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_job( + batch.CreateJobRequest(), + parent="parent_value", + job=gcb_job.Job(name="name_value"), + job_id="job_id_value", + ) + + +def test_create_job_rest_error(): + client = BatchServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + batch.GetJobRequest, + dict, + ], +) +def test_get_job_rest(request_type): + client = BatchServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/jobs/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = gcb_job.Job( + return_value = job.Job( name="name_value", uid="uid_value", priority=898, - scheduling_policy=gcb_job.Job.SchedulingPolicy.AS_SOON_AS_POSSIBLE, + scheduling_policy=job.Job.SchedulingPolicy.AS_SOON_AS_POSSIBLE, ) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = gcb_job.Job.pb(return_value) + return_value = job.Job.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.create_job(request) + response = client.get_job(request) # Establish that the response is the type that we expect. - assert isinstance(response, gcb_job.Job) + assert isinstance(response, job.Job) assert response.name == "name_value" assert response.uid == "uid_value" assert response.priority == 898 - assert ( - response.scheduling_policy == gcb_job.Job.SchedulingPolicy.AS_SOON_AS_POSSIBLE - ) + assert response.scheduling_policy == job.Job.SchedulingPolicy.AS_SOON_AS_POSSIBLE -def test_create_job_rest_use_cached_wrapped_rpc(): +def test_get_job_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -6724,33 +7433,33 @@ def test_create_job_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.create_job in client._transport._wrapped_methods + assert client._transport.get_job in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.create_job] = mock_rpc + client._transport._wrapped_methods[client._transport.get_job] = mock_rpc request = {} - client.create_job(request) + client.get_job(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.create_job(request) + client.get_job(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_create_job_rest_required_fields(request_type=batch.CreateJobRequest): +def test_get_job_rest_required_fields(request_type=batch.GetJobRequest): transport_class = transports.BatchServiceRestTransport request_init = {} - request_init["parent"] = "" + request_init["name"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -6761,28 +7470,21 @@ def test_create_job_rest_required_fields(request_type=batch.CreateJobRequest): unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).create_job._get_unset_required_fields(jsonified_request) + ).get_job._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["parent"] = "parent_value" + jsonified_request["name"] = "name_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).create_job._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set( - ( - "job_id", - "request_id", - ) - ) + ).get_job._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == "parent_value" + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" client = BatchServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -6791,7 +7493,7 @@ def test_create_job_rest_required_fields(request_type=batch.CreateJobRequest): request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = gcb_job.Job() + return_value = job.Job() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -6803,53 +7505,39 @@ def test_create_job_rest_required_fields(request_type=batch.CreateJobRequest): pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "post", + "method": "get", "query_params": pb_request, } - transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = gcb_job.Job.pb(return_value) + return_value = job.Job.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.create_job(request) + response = client.get_job(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_create_job_rest_unset_required_fields(): +def test_get_job_rest_unset_required_fields(): transport = transports.BatchServiceRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.create_job._get_unset_required_fields({}) - assert set(unset_fields) == ( - set( - ( - "jobId", - "requestId", - ) - ) - & set( - ( - "parent", - "job", - ) - ) - ) + unset_fields = transport.get_job._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_create_job_rest_interceptors(null_interceptor): +def test_get_job_rest_interceptors(null_interceptor): transport = transports.BatchServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -6862,13 +7550,13 @@ def test_create_job_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.BatchServiceRestInterceptor, "post_create_job" + transports.BatchServiceRestInterceptor, "post_get_job" ) as post, mock.patch.object( - transports.BatchServiceRestInterceptor, "pre_create_job" + transports.BatchServiceRestInterceptor, "pre_get_job" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = batch.CreateJobRequest.pb(batch.CreateJobRequest()) + pb_message = batch.GetJobRequest.pb(batch.GetJobRequest()) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -6879,17 +7567,17 @@ def test_create_job_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = gcb_job.Job.to_json(gcb_job.Job()) + req.return_value._content = job.Job.to_json(job.Job()) - request = batch.CreateJobRequest() + request = batch.GetJobRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = gcb_job.Job() + post.return_value = job.Job() - client.create_job( + client.get_job( request, metadata=[ ("key", "val"), @@ -6901,8 +7589,8 @@ def test_create_job_rest_interceptors(null_interceptor): post.assert_called_once() -def test_create_job_rest_bad_request( - transport: str = "rest", request_type=batch.CreateJobRequest +def test_get_job_rest_bad_request( + transport: str = "rest", request_type=batch.GetJobRequest ): client = BatchServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -6910,7 +7598,7 @@ def test_create_job_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2"} + request_init = {"name": "projects/sample1/locations/sample2/jobs/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -6922,10 +7610,10 @@ def test_create_job_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.create_job(request) + client.get_job(request) -def test_create_job_rest_flattened(): +def test_get_job_rest_flattened(): client = BatchServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -6934,16 +7622,14 @@ def test_create_job_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = gcb_job.Job() + return_value = job.Job() # get arguments that satisfy an http rule for this method - sample_request = {"parent": "projects/sample1/locations/sample2"} + sample_request = {"name": "projects/sample1/locations/sample2/jobs/sample3"} # get truthy value for each flattened field mock_args = dict( - parent="parent_value", - job=gcb_job.Job(name="name_value"), - job_id="job_id_value", + name="name_value", ) mock_args.update(sample_request) @@ -6951,24 +7637,24 @@ def test_create_job_rest_flattened(): response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = gcb_job.Job.pb(return_value) + return_value = job.Job.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.create_job(**mock_args) + client.get_job(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1alpha/{parent=projects/*/locations/*}/jobs" % client.transport._host, + "%s/v1alpha/{name=projects/*/locations/*/jobs/*}" % client.transport._host, args[1], ) -def test_create_job_rest_flattened_error(transport: str = "rest"): +def test_get_job_rest_flattened_error(transport: str = "rest"): client = BatchServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -6977,15 +7663,13 @@ def test_create_job_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.create_job( - batch.CreateJobRequest(), - parent="parent_value", - job=gcb_job.Job(name="name_value"), - job_id="job_id_value", + client.get_job( + batch.GetJobRequest(), + name="name_value", ) -def test_create_job_rest_error(): +def test_get_job_rest_error(): client = BatchServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -6994,11 +7678,11 @@ def test_create_job_rest_error(): @pytest.mark.parametrize( "request_type", [ - batch.GetJobRequest, + batch.DeleteJobRequest, dict, ], ) -def test_get_job_rest(request_type): +def test_delete_job_rest(request_type): client = BatchServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -7011,33 +7695,22 @@ def test_get_job_rest(request_type): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = job.Job( - name="name_value", - uid="uid_value", - priority=898, - scheduling_policy=job.Job.SchedulingPolicy.AS_SOON_AS_POSSIBLE, - ) + return_value = operations_pb2.Operation(name="operations/spam") # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - # Convert return value to protobuf type - return_value = job.Job.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.get_job(request) + response = client.delete_job(request) # Establish that the response is the type that we expect. - assert isinstance(response, job.Job) - assert response.name == "name_value" - assert response.uid == "uid_value" - assert response.priority == 898 - assert response.scheduling_policy == job.Job.SchedulingPolicy.AS_SOON_AS_POSSIBLE + assert response.operation.name == "operations/spam" -def test_get_job_rest_use_cached_wrapped_rpc(): +def test_delete_job_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -7051,111 +7724,34 @@ def test_get_job_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.get_job in client._transport._wrapped_methods + assert client._transport.delete_job in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.get_job] = mock_rpc + client._transport._wrapped_methods[client._transport.delete_job] = mock_rpc request = {} - client.get_job(request) + client.delete_job(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.get_job(request) + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.delete_job(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_get_job_rest_required_fields(request_type=batch.GetJobRequest): - transport_class = transports.BatchServiceRestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads( - json_format.MessageToJson(pb_request, use_integers_for_enums=False) - ) - - # verify fields with default values are dropped - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).get_job._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = "name_value" - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).get_job._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == "name_value" - - client = BatchServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = job.Job() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, "transcode") as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - "uri": "v1/sample_method", - "method": "get", - "query_params": pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = job.Job.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - response = client.get_job(request) - - expected_params = [("$alt", "json;enum-encoding=int")] - actual_params = req.call_args.kwargs["params"] - assert expected_params == actual_params - - -def test_get_job_rest_unset_required_fields(): - transport = transports.BatchServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials - ) - - unset_fields = transport.get_job._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name",))) - - @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_job_rest_interceptors(null_interceptor): +def test_delete_job_rest_interceptors(null_interceptor): transport = transports.BatchServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -7168,13 +7764,15 @@ def test_get_job_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.BatchServiceRestInterceptor, "post_get_job" + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.BatchServiceRestInterceptor, "post_delete_job" ) as post, mock.patch.object( - transports.BatchServiceRestInterceptor, "pre_get_job" + transports.BatchServiceRestInterceptor, "pre_delete_job" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = batch.GetJobRequest.pb(batch.GetJobRequest()) + pb_message = batch.DeleteJobRequest.pb(batch.DeleteJobRequest()) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -7185,17 +7783,19 @@ def test_get_job_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = job.Job.to_json(job.Job()) + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) - request = batch.GetJobRequest() + request = batch.DeleteJobRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = job.Job() + post.return_value = operations_pb2.Operation() - client.get_job( + client.delete_job( request, metadata=[ ("key", "val"), @@ -7207,8 +7807,8 @@ def test_get_job_rest_interceptors(null_interceptor): post.assert_called_once() -def test_get_job_rest_bad_request( - transport: str = "rest", request_type=batch.GetJobRequest +def test_delete_job_rest_bad_request( + transport: str = "rest", request_type=batch.DeleteJobRequest ): client = BatchServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -7228,10 +7828,10 @@ def test_get_job_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.get_job(request) + client.delete_job(request) -def test_get_job_rest_flattened(): +def test_delete_job_rest_flattened(): client = BatchServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -7240,7 +7840,7 @@ def test_get_job_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = job.Job() + return_value = operations_pb2.Operation(name="operations/spam") # get arguments that satisfy an http rule for this method sample_request = {"name": "projects/sample1/locations/sample2/jobs/sample3"} @@ -7254,13 +7854,11 @@ def test_get_job_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - # Convert return value to protobuf type - return_value = job.Job.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.get_job(**mock_args) + client.delete_job(**mock_args) # Establish that the underlying call was made with the expected # request object values. @@ -7272,7 +7870,7 @@ def test_get_job_rest_flattened(): ) -def test_get_job_rest_flattened_error(transport: str = "rest"): +def test_delete_job_rest_flattened_error(transport: str = "rest"): client = BatchServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -7281,13 +7879,13 @@ def test_get_job_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.get_job( - batch.GetJobRequest(), + client.delete_job( + batch.DeleteJobRequest(), name="name_value", ) -def test_get_job_rest_error(): +def test_delete_job_rest_error(): client = BatchServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -7296,11 +7894,11 @@ def test_get_job_rest_error(): @pytest.mark.parametrize( "request_type", [ - batch.DeleteJobRequest, + batch.CancelJobRequest, dict, ], ) -def test_delete_job_rest(request_type): +def test_cancel_job_rest(request_type): client = BatchServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -7322,13 +7920,13 @@ def test_delete_job_rest(request_type): response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.delete_job(request) + response = client.cancel_job(request) # Establish that the response is the type that we expect. assert response.operation.name == "operations/spam" -def test_delete_job_rest_use_cached_wrapped_rpc(): +def test_cancel_job_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -7342,17 +7940,17 @@ def test_delete_job_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.delete_job in client._transport._wrapped_methods + assert client._transport.cancel_job in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.delete_job] = mock_rpc + client._transport._wrapped_methods[client._transport.cancel_job] = mock_rpc request = {} - client.delete_job(request) + client.cancel_job(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 @@ -7361,15 +7959,94 @@ def test_delete_job_rest_use_cached_wrapped_rpc(): # subsequent calls should use the cached wrapper wrapper_fn.reset_mock() - client.delete_job(request) + client.cancel_job(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 +def test_cancel_job_rest_required_fields(request_type=batch.CancelJobRequest): + transport_class = transports.BatchServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).cancel_job._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).cancel_job._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = BatchServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.cancel_job(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_cancel_job_rest_unset_required_fields(): + transport = transports.BatchServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.cancel_job._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_delete_job_rest_interceptors(null_interceptor): +def test_cancel_job_rest_interceptors(null_interceptor): transport = transports.BatchServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -7384,13 +8061,13 @@ def test_delete_job_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( operation.Operation, "_set_result_from_operation" ), mock.patch.object( - transports.BatchServiceRestInterceptor, "post_delete_job" + transports.BatchServiceRestInterceptor, "post_cancel_job" ) as post, mock.patch.object( - transports.BatchServiceRestInterceptor, "pre_delete_job" + transports.BatchServiceRestInterceptor, "pre_cancel_job" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = batch.DeleteJobRequest.pb(batch.DeleteJobRequest()) + pb_message = batch.CancelJobRequest.pb(batch.CancelJobRequest()) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -7405,7 +8082,7 @@ def test_delete_job_rest_interceptors(null_interceptor): operations_pb2.Operation() ) - request = batch.DeleteJobRequest() + request = batch.CancelJobRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), @@ -7413,7 +8090,7 @@ def test_delete_job_rest_interceptors(null_interceptor): pre.return_value = request, metadata post.return_value = operations_pb2.Operation() - client.delete_job( + client.cancel_job( request, metadata=[ ("key", "val"), @@ -7425,8 +8102,8 @@ def test_delete_job_rest_interceptors(null_interceptor): post.assert_called_once() -def test_delete_job_rest_bad_request( - transport: str = "rest", request_type=batch.DeleteJobRequest +def test_cancel_job_rest_bad_request( + transport: str = "rest", request_type=batch.CancelJobRequest ): client = BatchServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -7446,10 +8123,10 @@ def test_delete_job_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.delete_job(request) + client.cancel_job(request) -def test_delete_job_rest_flattened(): +def test_cancel_job_rest_flattened(): client = BatchServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -7476,19 +8153,20 @@ def test_delete_job_rest_flattened(): response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.delete_job(**mock_args) + client.cancel_job(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1alpha/{name=projects/*/locations/*/jobs/*}" % client.transport._host, + "%s/v1alpha/{name=projects/*/locations/*/jobs/*}:cancel" + % client.transport._host, args[1], ) -def test_delete_job_rest_flattened_error(transport: str = "rest"): +def test_cancel_job_rest_flattened_error(transport: str = "rest"): client = BatchServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -7497,13 +8175,13 @@ def test_delete_job_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.delete_job( - batch.DeleteJobRequest(), + client.cancel_job( + batch.CancelJobRequest(), name="name_value", ) -def test_delete_job_rest_error(): +def test_cancel_job_rest_error(): client = BatchServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -7649,6 +8327,7 @@ def test_update_job_rest(request_type): "instance_template": "instance_template_value", "install_gpu_drivers": True, "install_ops_agent": True, + "block_project_ssh_keys": True, } ], "instance_templates": [ @@ -11106,6 +11785,7 @@ def test_batch_service_base_transport(): "create_job", "get_job", "delete_job", + "cancel_job", "update_job", "list_jobs", "get_task", @@ -11407,6 +12087,9 @@ def test_batch_service_client_transport_session_collision(transport_name): session1 = client1.transport.delete_job._session session2 = client2.transport.delete_job._session assert session1 != session2 + session1 = client1.transport.cancel_job._session + session2 = client2.transport.cancel_job._session + assert session1 != session2 session1 = client1.transport.update_job._session session2 = client2.transport.update_job._session assert session1 != session2 diff --git a/packages/google-cloud-beyondcorp-appconnections/google/cloud/beyondcorp_appconnections/gapic_version.py b/packages/google-cloud-beyondcorp-appconnections/google/cloud/beyondcorp_appconnections/gapic_version.py index 5feceb32bedf..558c8aab67c5 100644 --- a/packages/google-cloud-beyondcorp-appconnections/google/cloud/beyondcorp_appconnections/gapic_version.py +++ b/packages/google-cloud-beyondcorp-appconnections/google/cloud/beyondcorp_appconnections/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.4.11" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-beyondcorp-appconnections/google/cloud/beyondcorp_appconnections_v1/gapic_version.py b/packages/google-cloud-beyondcorp-appconnections/google/cloud/beyondcorp_appconnections_v1/gapic_version.py index 5feceb32bedf..558c8aab67c5 100644 --- a/packages/google-cloud-beyondcorp-appconnections/google/cloud/beyondcorp_appconnections_v1/gapic_version.py +++ b/packages/google-cloud-beyondcorp-appconnections/google/cloud/beyondcorp_appconnections_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.4.11" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-beyondcorp-appconnections/google/cloud/beyondcorp_appconnections_v1/services/app_connections_service/async_client.py b/packages/google-cloud-beyondcorp-appconnections/google/cloud/beyondcorp_appconnections_v1/services/app_connections_service/async_client.py index 10c32368d63b..962dd370306f 100644 --- a/packages/google-cloud-beyondcorp-appconnections/google/cloud/beyondcorp_appconnections_v1/services/app_connections_service/async_client.py +++ b/packages/google-cloud-beyondcorp-appconnections/google/cloud/beyondcorp_appconnections_v1/services/app_connections_service/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -225,10 +224,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(AppConnectionsServiceClient).get_transport_class, - type(AppConnectionsServiceClient), - ) + get_transport_class = AppConnectionsServiceClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-beyondcorp-appconnections/google/cloud/beyondcorp_appconnections_v1/services/app_connections_service/client.py b/packages/google-cloud-beyondcorp-appconnections/google/cloud/beyondcorp_appconnections_v1/services/app_connections_service/client.py index 2036ca7cc4df..0e82da29ad9b 100644 --- a/packages/google-cloud-beyondcorp-appconnections/google/cloud/beyondcorp_appconnections_v1/services/app_connections_service/client.py +++ b/packages/google-cloud-beyondcorp-appconnections/google/cloud/beyondcorp_appconnections_v1/services/app_connections_service/client.py @@ -739,7 +739,7 @@ def __init__( Type[AppConnectionsServiceTransport], Callable[..., AppConnectionsServiceTransport], ] = ( - type(self).get_transport_class(transport) + AppConnectionsServiceClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., AppConnectionsServiceTransport], transport) ) diff --git a/packages/google-cloud-beyondcorp-appconnections/samples/generated_samples/snippet_metadata_google.cloud.beyondcorp.appconnections.v1.json b/packages/google-cloud-beyondcorp-appconnections/samples/generated_samples/snippet_metadata_google.cloud.beyondcorp.appconnections.v1.json index f0f179574810..a630b6644a7e 100644 --- a/packages/google-cloud-beyondcorp-appconnections/samples/generated_samples/snippet_metadata_google.cloud.beyondcorp.appconnections.v1.json +++ b/packages/google-cloud-beyondcorp-appconnections/samples/generated_samples/snippet_metadata_google.cloud.beyondcorp.appconnections.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-beyondcorp-appconnections", - "version": "0.4.11" + "version": "0.1.0" }, "snippets": [ { diff --git a/packages/google-cloud-beyondcorp-appconnections/tests/unit/gapic/beyondcorp_appconnections_v1/test_app_connections_service.py b/packages/google-cloud-beyondcorp-appconnections/tests/unit/gapic/beyondcorp_appconnections_v1/test_app_connections_service.py index 691dba428c52..e0d01098f488 100644 --- a/packages/google-cloud-beyondcorp-appconnections/tests/unit/gapic/beyondcorp_appconnections_v1/test_app_connections_service.py +++ b/packages/google-cloud-beyondcorp-appconnections/tests/unit/gapic/beyondcorp_appconnections_v1/test_app_connections_service.py @@ -1389,22 +1389,23 @@ async def test_list_app_connections_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_app_connections - ] = mock_object + ] = mock_rpc request = {} await client.list_app_connections(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_app_connections(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1999,22 +2000,23 @@ async def test_get_app_connection_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_app_connection - ] = mock_object + ] = mock_rpc request = {} await client.get_app_connection(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_app_connection(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2343,8 +2345,9 @@ def test_create_app_connection_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.create_app_connection(request) @@ -2400,26 +2403,28 @@ async def test_create_app_connection_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_app_connection - ] = mock_object + ] = mock_rpc request = {} await client.create_app_connection(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.create_app_connection(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2751,8 +2756,9 @@ def test_update_app_connection_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.update_app_connection(request) @@ -2808,26 +2814,28 @@ async def test_update_app_connection_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_app_connection - ] = mock_object + ] = mock_rpc request = {} await client.update_app_connection(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.update_app_connection(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3151,8 +3159,9 @@ def test_delete_app_connection_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.delete_app_connection(request) @@ -3208,26 +3217,28 @@ async def test_delete_app_connection_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_app_connection - ] = mock_object + ] = mock_rpc request = {} await client.delete_app_connection(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.delete_app_connection(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3604,22 +3615,23 @@ async def test_resolve_app_connections_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.resolve_app_connections - ] = mock_object + ] = mock_rpc request = {} await client.resolve_app_connections(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.resolve_app_connections(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-beyondcorp-appconnectors/google/cloud/beyondcorp_appconnectors/gapic_version.py b/packages/google-cloud-beyondcorp-appconnectors/google/cloud/beyondcorp_appconnectors/gapic_version.py index 5feceb32bedf..558c8aab67c5 100644 --- a/packages/google-cloud-beyondcorp-appconnectors/google/cloud/beyondcorp_appconnectors/gapic_version.py +++ b/packages/google-cloud-beyondcorp-appconnectors/google/cloud/beyondcorp_appconnectors/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.4.11" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-beyondcorp-appconnectors/google/cloud/beyondcorp_appconnectors_v1/gapic_version.py b/packages/google-cloud-beyondcorp-appconnectors/google/cloud/beyondcorp_appconnectors_v1/gapic_version.py index 5feceb32bedf..558c8aab67c5 100644 --- a/packages/google-cloud-beyondcorp-appconnectors/google/cloud/beyondcorp_appconnectors_v1/gapic_version.py +++ b/packages/google-cloud-beyondcorp-appconnectors/google/cloud/beyondcorp_appconnectors_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.4.11" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-beyondcorp-appconnectors/google/cloud/beyondcorp_appconnectors_v1/services/app_connectors_service/async_client.py b/packages/google-cloud-beyondcorp-appconnectors/google/cloud/beyondcorp_appconnectors_v1/services/app_connectors_service/async_client.py index 926e618d6187..70a5047fb8ee 100644 --- a/packages/google-cloud-beyondcorp-appconnectors/google/cloud/beyondcorp_appconnectors_v1/services/app_connectors_service/async_client.py +++ b/packages/google-cloud-beyondcorp-appconnectors/google/cloud/beyondcorp_appconnectors_v1/services/app_connectors_service/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -219,10 +218,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(AppConnectorsServiceClient).get_transport_class, - type(AppConnectorsServiceClient), - ) + get_transport_class = AppConnectorsServiceClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-beyondcorp-appconnectors/google/cloud/beyondcorp_appconnectors_v1/services/app_connectors_service/client.py b/packages/google-cloud-beyondcorp-appconnectors/google/cloud/beyondcorp_appconnectors_v1/services/app_connectors_service/client.py index 59edee9a9254..485df705d13c 100644 --- a/packages/google-cloud-beyondcorp-appconnectors/google/cloud/beyondcorp_appconnectors_v1/services/app_connectors_service/client.py +++ b/packages/google-cloud-beyondcorp-appconnectors/google/cloud/beyondcorp_appconnectors_v1/services/app_connectors_service/client.py @@ -695,7 +695,7 @@ def __init__( Type[AppConnectorsServiceTransport], Callable[..., AppConnectorsServiceTransport], ] = ( - type(self).get_transport_class(transport) + AppConnectorsServiceClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., AppConnectorsServiceTransport], transport) ) diff --git a/packages/google-cloud-beyondcorp-appconnectors/samples/generated_samples/snippet_metadata_google.cloud.beyondcorp.appconnectors.v1.json b/packages/google-cloud-beyondcorp-appconnectors/samples/generated_samples/snippet_metadata_google.cloud.beyondcorp.appconnectors.v1.json index 400b504d4ea6..b24a837b4434 100644 --- a/packages/google-cloud-beyondcorp-appconnectors/samples/generated_samples/snippet_metadata_google.cloud.beyondcorp.appconnectors.v1.json +++ b/packages/google-cloud-beyondcorp-appconnectors/samples/generated_samples/snippet_metadata_google.cloud.beyondcorp.appconnectors.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-beyondcorp-appconnectors", - "version": "0.4.11" + "version": "0.1.0" }, "snippets": [ { diff --git a/packages/google-cloud-beyondcorp-appconnectors/tests/unit/gapic/beyondcorp_appconnectors_v1/test_app_connectors_service.py b/packages/google-cloud-beyondcorp-appconnectors/tests/unit/gapic/beyondcorp_appconnectors_v1/test_app_connectors_service.py index db455cd7bb62..120223dd7e89 100644 --- a/packages/google-cloud-beyondcorp-appconnectors/tests/unit/gapic/beyondcorp_appconnectors_v1/test_app_connectors_service.py +++ b/packages/google-cloud-beyondcorp-appconnectors/tests/unit/gapic/beyondcorp_appconnectors_v1/test_app_connectors_service.py @@ -1392,22 +1392,23 @@ async def test_list_app_connectors_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_app_connectors - ] = mock_object + ] = mock_rpc request = {} await client.list_app_connectors(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_app_connectors(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1992,22 +1993,23 @@ async def test_get_app_connector_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_app_connector - ] = mock_object + ] = mock_rpc request = {} await client.get_app_connector(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_app_connector(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2331,8 +2333,9 @@ def test_create_app_connector_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.create_app_connector(request) @@ -2388,26 +2391,28 @@ async def test_create_app_connector_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_app_connector - ] = mock_object + ] = mock_rpc request = {} await client.create_app_connector(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.create_app_connector(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2738,8 +2743,9 @@ def test_update_app_connector_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.update_app_connector(request) @@ -2795,26 +2801,28 @@ async def test_update_app_connector_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_app_connector - ] = mock_object + ] = mock_rpc request = {} await client.update_app_connector(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.update_app_connector(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3137,8 +3145,9 @@ def test_delete_app_connector_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.delete_app_connector(request) @@ -3194,26 +3203,28 @@ async def test_delete_app_connector_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_app_connector - ] = mock_object + ] = mock_rpc request = {} await client.delete_app_connector(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.delete_app_connector(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3516,8 +3527,9 @@ def test_report_status_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.report_status(request) @@ -3571,26 +3583,28 @@ async def test_report_status_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.report_status - ] = mock_object + ] = mock_rpc request = {} await client.report_status(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.report_status(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-beyondcorp-appgateways/google/cloud/beyondcorp_appgateways/gapic_version.py b/packages/google-cloud-beyondcorp-appgateways/google/cloud/beyondcorp_appgateways/gapic_version.py index 5feceb32bedf..558c8aab67c5 100644 --- a/packages/google-cloud-beyondcorp-appgateways/google/cloud/beyondcorp_appgateways/gapic_version.py +++ b/packages/google-cloud-beyondcorp-appgateways/google/cloud/beyondcorp_appgateways/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.4.11" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-beyondcorp-appgateways/google/cloud/beyondcorp_appgateways_v1/gapic_version.py b/packages/google-cloud-beyondcorp-appgateways/google/cloud/beyondcorp_appgateways_v1/gapic_version.py index 5feceb32bedf..558c8aab67c5 100644 --- a/packages/google-cloud-beyondcorp-appgateways/google/cloud/beyondcorp_appgateways_v1/gapic_version.py +++ b/packages/google-cloud-beyondcorp-appgateways/google/cloud/beyondcorp_appgateways_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.4.11" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-beyondcorp-appgateways/google/cloud/beyondcorp_appgateways_v1/services/app_gateways_service/async_client.py b/packages/google-cloud-beyondcorp-appgateways/google/cloud/beyondcorp_appgateways_v1/services/app_gateways_service/async_client.py index 71ed7b071c35..133141bf3bfc 100644 --- a/packages/google-cloud-beyondcorp-appgateways/google/cloud/beyondcorp_appgateways_v1/services/app_gateways_service/async_client.py +++ b/packages/google-cloud-beyondcorp-appgateways/google/cloud/beyondcorp_appgateways_v1/services/app_gateways_service/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -212,10 +211,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(AppGatewaysServiceClient).get_transport_class, - type(AppGatewaysServiceClient), - ) + get_transport_class = AppGatewaysServiceClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-beyondcorp-appgateways/google/cloud/beyondcorp_appgateways_v1/services/app_gateways_service/client.py b/packages/google-cloud-beyondcorp-appgateways/google/cloud/beyondcorp_appgateways_v1/services/app_gateways_service/client.py index abc83d4d0df6..9bb6f081d233 100644 --- a/packages/google-cloud-beyondcorp-appgateways/google/cloud/beyondcorp_appgateways_v1/services/app_gateways_service/client.py +++ b/packages/google-cloud-beyondcorp-appgateways/google/cloud/beyondcorp_appgateways_v1/services/app_gateways_service/client.py @@ -690,7 +690,7 @@ def __init__( Type[AppGatewaysServiceTransport], Callable[..., AppGatewaysServiceTransport], ] = ( - type(self).get_transport_class(transport) + AppGatewaysServiceClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., AppGatewaysServiceTransport], transport) ) diff --git a/packages/google-cloud-beyondcorp-appgateways/samples/generated_samples/snippet_metadata_google.cloud.beyondcorp.appgateways.v1.json b/packages/google-cloud-beyondcorp-appgateways/samples/generated_samples/snippet_metadata_google.cloud.beyondcorp.appgateways.v1.json index 36bbe9dd1c18..4146eed832ac 100644 --- a/packages/google-cloud-beyondcorp-appgateways/samples/generated_samples/snippet_metadata_google.cloud.beyondcorp.appgateways.v1.json +++ b/packages/google-cloud-beyondcorp-appgateways/samples/generated_samples/snippet_metadata_google.cloud.beyondcorp.appgateways.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-beyondcorp-appgateways", - "version": "0.4.11" + "version": "0.1.0" }, "snippets": [ { diff --git a/packages/google-cloud-beyondcorp-appgateways/tests/unit/gapic/beyondcorp_appgateways_v1/test_app_gateways_service.py b/packages/google-cloud-beyondcorp-appgateways/tests/unit/gapic/beyondcorp_appgateways_v1/test_app_gateways_service.py index b884f194d421..f80e9716f499 100644 --- a/packages/google-cloud-beyondcorp-appgateways/tests/unit/gapic/beyondcorp_appgateways_v1/test_app_gateways_service.py +++ b/packages/google-cloud-beyondcorp-appgateways/tests/unit/gapic/beyondcorp_appgateways_v1/test_app_gateways_service.py @@ -1358,22 +1358,23 @@ async def test_list_app_gateways_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_app_gateways - ] = mock_object + ] = mock_rpc request = {} await client.list_app_gateways(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_app_gateways(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1957,22 +1958,23 @@ async def test_get_app_gateway_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_app_gateway - ] = mock_object + ] = mock_rpc request = {} await client.get_app_gateway(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_app_gateway(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2294,8 +2296,9 @@ def test_create_app_gateway_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.create_app_gateway(request) @@ -2351,26 +2354,28 @@ async def test_create_app_gateway_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_app_gateway - ] = mock_object + ] = mock_rpc request = {} await client.create_app_gateway(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.create_app_gateway(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2703,8 +2708,9 @@ def test_delete_app_gateway_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.delete_app_gateway(request) @@ -2760,26 +2766,28 @@ async def test_delete_app_gateway_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_app_gateway - ] = mock_object + ] = mock_rpc request = {} await client.delete_app_gateway(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.delete_app_gateway(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-beyondcorp-clientconnectorservices/google/cloud/beyondcorp_clientconnectorservices/gapic_version.py b/packages/google-cloud-beyondcorp-clientconnectorservices/google/cloud/beyondcorp_clientconnectorservices/gapic_version.py index 5feceb32bedf..558c8aab67c5 100644 --- a/packages/google-cloud-beyondcorp-clientconnectorservices/google/cloud/beyondcorp_clientconnectorservices/gapic_version.py +++ b/packages/google-cloud-beyondcorp-clientconnectorservices/google/cloud/beyondcorp_clientconnectorservices/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.4.11" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-beyondcorp-clientconnectorservices/google/cloud/beyondcorp_clientconnectorservices_v1/gapic_version.py b/packages/google-cloud-beyondcorp-clientconnectorservices/google/cloud/beyondcorp_clientconnectorservices_v1/gapic_version.py index 5feceb32bedf..558c8aab67c5 100644 --- a/packages/google-cloud-beyondcorp-clientconnectorservices/google/cloud/beyondcorp_clientconnectorservices_v1/gapic_version.py +++ b/packages/google-cloud-beyondcorp-clientconnectorservices/google/cloud/beyondcorp_clientconnectorservices_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.4.11" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-beyondcorp-clientconnectorservices/google/cloud/beyondcorp_clientconnectorservices_v1/services/client_connector_services_service/async_client.py b/packages/google-cloud-beyondcorp-clientconnectorservices/google/cloud/beyondcorp_clientconnectorservices_v1/services/client_connector_services_service/async_client.py index e1445f5a2545..185fc139c1da 100644 --- a/packages/google-cloud-beyondcorp-clientconnectorservices/google/cloud/beyondcorp_clientconnectorservices_v1/services/client_connector_services_service/async_client.py +++ b/packages/google-cloud-beyondcorp-clientconnectorservices/google/cloud/beyondcorp_clientconnectorservices_v1/services/client_connector_services_service/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -229,10 +228,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(ClientConnectorServicesServiceClient).get_transport_class, - type(ClientConnectorServicesServiceClient), - ) + get_transport_class = ClientConnectorServicesServiceClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-beyondcorp-clientconnectorservices/google/cloud/beyondcorp_clientconnectorservices_v1/services/client_connector_services_service/client.py b/packages/google-cloud-beyondcorp-clientconnectorservices/google/cloud/beyondcorp_clientconnectorservices_v1/services/client_connector_services_service/client.py index 5b0ad123fb11..550c2c7aa6f0 100644 --- a/packages/google-cloud-beyondcorp-clientconnectorservices/google/cloud/beyondcorp_clientconnectorservices_v1/services/client_connector_services_service/client.py +++ b/packages/google-cloud-beyondcorp-clientconnectorservices/google/cloud/beyondcorp_clientconnectorservices_v1/services/client_connector_services_service/client.py @@ -707,7 +707,7 @@ def __init__( Type[ClientConnectorServicesServiceTransport], Callable[..., ClientConnectorServicesServiceTransport], ] = ( - type(self).get_transport_class(transport) + ClientConnectorServicesServiceClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast( Callable[..., ClientConnectorServicesServiceTransport], transport diff --git a/packages/google-cloud-beyondcorp-clientconnectorservices/samples/generated_samples/snippet_metadata_google.cloud.beyondcorp.clientconnectorservices.v1.json b/packages/google-cloud-beyondcorp-clientconnectorservices/samples/generated_samples/snippet_metadata_google.cloud.beyondcorp.clientconnectorservices.v1.json index 67eefa2752d3..9a230827e86d 100644 --- a/packages/google-cloud-beyondcorp-clientconnectorservices/samples/generated_samples/snippet_metadata_google.cloud.beyondcorp.clientconnectorservices.v1.json +++ b/packages/google-cloud-beyondcorp-clientconnectorservices/samples/generated_samples/snippet_metadata_google.cloud.beyondcorp.clientconnectorservices.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-beyondcorp-clientconnectorservices", - "version": "0.4.11" + "version": "0.1.0" }, "snippets": [ { diff --git a/packages/google-cloud-beyondcorp-clientconnectorservices/tests/unit/gapic/beyondcorp_clientconnectorservices_v1/test_client_connector_services_service.py b/packages/google-cloud-beyondcorp-clientconnectorservices/tests/unit/gapic/beyondcorp_clientconnectorservices_v1/test_client_connector_services_service.py index f867d5f86c02..982b7b9d9252 100644 --- a/packages/google-cloud-beyondcorp-clientconnectorservices/tests/unit/gapic/beyondcorp_clientconnectorservices_v1/test_client_connector_services_service.py +++ b/packages/google-cloud-beyondcorp-clientconnectorservices/tests/unit/gapic/beyondcorp_clientconnectorservices_v1/test_client_connector_services_service.py @@ -1429,22 +1429,23 @@ async def test_list_client_connector_services_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_client_connector_services - ] = mock_object + ] = mock_rpc request = {} await client.list_client_connector_services(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_client_connector_services(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2054,22 +2055,23 @@ async def test_get_client_connector_service_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_client_connector_service - ] = mock_object + ] = mock_rpc request = {} await client.get_client_connector_service(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_client_connector_service(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2404,8 +2406,9 @@ def test_create_client_connector_service_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.create_client_connector_service(request) @@ -2464,26 +2467,28 @@ async def test_create_client_connector_service_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_client_connector_service - ] = mock_object + ] = mock_rpc request = {} await client.create_client_connector_service(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.create_client_connector_service(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2836,8 +2841,9 @@ def test_update_client_connector_service_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.update_client_connector_service(request) @@ -2896,26 +2902,28 @@ async def test_update_client_connector_service_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_client_connector_service - ] = mock_object + ] = mock_rpc request = {} await client.update_client_connector_service(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.update_client_connector_service(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3260,8 +3268,9 @@ def test_delete_client_connector_service_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.delete_client_connector_service(request) @@ -3320,26 +3329,28 @@ async def test_delete_client_connector_service_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_client_connector_service - ] = mock_object + ] = mock_rpc request = {} await client.delete_client_connector_service(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.delete_client_connector_service(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-beyondcorp-clientgateways/google/cloud/beyondcorp_clientgateways/gapic_version.py b/packages/google-cloud-beyondcorp-clientgateways/google/cloud/beyondcorp_clientgateways/gapic_version.py index 792f80c59ee5..558c8aab67c5 100644 --- a/packages/google-cloud-beyondcorp-clientgateways/google/cloud/beyondcorp_clientgateways/gapic_version.py +++ b/packages/google-cloud-beyondcorp-clientgateways/google/cloud/beyondcorp_clientgateways/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.4.10" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-beyondcorp-clientgateways/google/cloud/beyondcorp_clientgateways_v1/gapic_version.py b/packages/google-cloud-beyondcorp-clientgateways/google/cloud/beyondcorp_clientgateways_v1/gapic_version.py index 792f80c59ee5..558c8aab67c5 100644 --- a/packages/google-cloud-beyondcorp-clientgateways/google/cloud/beyondcorp_clientgateways_v1/gapic_version.py +++ b/packages/google-cloud-beyondcorp-clientgateways/google/cloud/beyondcorp_clientgateways_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.4.10" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-beyondcorp-clientgateways/google/cloud/beyondcorp_clientgateways_v1/services/client_gateways_service/async_client.py b/packages/google-cloud-beyondcorp-clientgateways/google/cloud/beyondcorp_clientgateways_v1/services/client_gateways_service/async_client.py index 6aced810bb66..d66c2154fbe9 100644 --- a/packages/google-cloud-beyondcorp-clientgateways/google/cloud/beyondcorp_clientgateways_v1/services/client_gateways_service/async_client.py +++ b/packages/google-cloud-beyondcorp-clientgateways/google/cloud/beyondcorp_clientgateways_v1/services/client_gateways_service/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -213,10 +212,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(ClientGatewaysServiceClient).get_transport_class, - type(ClientGatewaysServiceClient), - ) + get_transport_class = ClientGatewaysServiceClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-beyondcorp-clientgateways/google/cloud/beyondcorp_clientgateways_v1/services/client_gateways_service/client.py b/packages/google-cloud-beyondcorp-clientgateways/google/cloud/beyondcorp_clientgateways_v1/services/client_gateways_service/client.py index 623bc52e144d..96a93ecd29e2 100644 --- a/packages/google-cloud-beyondcorp-clientgateways/google/cloud/beyondcorp_clientgateways_v1/services/client_gateways_service/client.py +++ b/packages/google-cloud-beyondcorp-clientgateways/google/cloud/beyondcorp_clientgateways_v1/services/client_gateways_service/client.py @@ -689,7 +689,7 @@ def __init__( Type[ClientGatewaysServiceTransport], Callable[..., ClientGatewaysServiceTransport], ] = ( - type(self).get_transport_class(transport) + ClientGatewaysServiceClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., ClientGatewaysServiceTransport], transport) ) diff --git a/packages/google-cloud-beyondcorp-clientgateways/samples/generated_samples/snippet_metadata_google.cloud.beyondcorp.clientgateways.v1.json b/packages/google-cloud-beyondcorp-clientgateways/samples/generated_samples/snippet_metadata_google.cloud.beyondcorp.clientgateways.v1.json index e6588b11df67..6d3ff814a340 100644 --- a/packages/google-cloud-beyondcorp-clientgateways/samples/generated_samples/snippet_metadata_google.cloud.beyondcorp.clientgateways.v1.json +++ b/packages/google-cloud-beyondcorp-clientgateways/samples/generated_samples/snippet_metadata_google.cloud.beyondcorp.clientgateways.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-beyondcorp-clientgateways", - "version": "0.4.10" + "version": "0.1.0" }, "snippets": [ { diff --git a/packages/google-cloud-beyondcorp-clientgateways/tests/unit/gapic/beyondcorp_clientgateways_v1/test_client_gateways_service.py b/packages/google-cloud-beyondcorp-clientgateways/tests/unit/gapic/beyondcorp_clientgateways_v1/test_client_gateways_service.py index 26f2b6ff7a5b..b2315222b3f8 100644 --- a/packages/google-cloud-beyondcorp-clientgateways/tests/unit/gapic/beyondcorp_clientgateways_v1/test_client_gateways_service.py +++ b/packages/google-cloud-beyondcorp-clientgateways/tests/unit/gapic/beyondcorp_clientgateways_v1/test_client_gateways_service.py @@ -1388,22 +1388,23 @@ async def test_list_client_gateways_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_client_gateways - ] = mock_object + ] = mock_rpc request = {} await client.list_client_gateways(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_client_gateways(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1992,22 +1993,23 @@ async def test_get_client_gateway_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_client_gateway - ] = mock_object + ] = mock_rpc request = {} await client.get_client_gateway(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_client_gateway(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2332,8 +2334,9 @@ def test_create_client_gateway_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.create_client_gateway(request) @@ -2389,26 +2392,28 @@ async def test_create_client_gateway_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_client_gateway - ] = mock_object + ] = mock_rpc request = {} await client.create_client_gateway(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.create_client_gateway(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2742,8 +2747,9 @@ def test_delete_client_gateway_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.delete_client_gateway(request) @@ -2799,26 +2805,28 @@ async def test_delete_client_gateway_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_client_gateway - ] = mock_object + ] = mock_rpc request = {} await client.delete_client_gateway(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.delete_client_gateway(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-bigquery-analyticshub/google/cloud/bigquery_analyticshub/gapic_version.py b/packages/google-cloud-bigquery-analyticshub/google/cloud/bigquery_analyticshub/gapic_version.py index 5feceb32bedf..558c8aab67c5 100644 --- a/packages/google-cloud-bigquery-analyticshub/google/cloud/bigquery_analyticshub/gapic_version.py +++ b/packages/google-cloud-bigquery-analyticshub/google/cloud/bigquery_analyticshub/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.4.11" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-bigquery-analyticshub/google/cloud/bigquery_analyticshub_v1/gapic_version.py b/packages/google-cloud-bigquery-analyticshub/google/cloud/bigquery_analyticshub_v1/gapic_version.py index 5feceb32bedf..558c8aab67c5 100644 --- a/packages/google-cloud-bigquery-analyticshub/google/cloud/bigquery_analyticshub_v1/gapic_version.py +++ b/packages/google-cloud-bigquery-analyticshub/google/cloud/bigquery_analyticshub_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.4.11" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-bigquery-analyticshub/google/cloud/bigquery_analyticshub_v1/services/analytics_hub_service/async_client.py b/packages/google-cloud-bigquery-analyticshub/google/cloud/bigquery_analyticshub_v1/services/analytics_hub_service/async_client.py index 292deecf4f23..b75e593f1449 100644 --- a/packages/google-cloud-bigquery-analyticshub/google/cloud/bigquery_analyticshub_v1/services/analytics_hub_service/async_client.py +++ b/packages/google-cloud-bigquery-analyticshub/google/cloud/bigquery_analyticshub_v1/services/analytics_hub_service/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -215,10 +214,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(AnalyticsHubServiceClient).get_transport_class, - type(AnalyticsHubServiceClient), - ) + get_transport_class = AnalyticsHubServiceClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-bigquery-analyticshub/google/cloud/bigquery_analyticshub_v1/services/analytics_hub_service/client.py b/packages/google-cloud-bigquery-analyticshub/google/cloud/bigquery_analyticshub_v1/services/analytics_hub_service/client.py index 36b8315ade23..c9dabbcb36ef 100644 --- a/packages/google-cloud-bigquery-analyticshub/google/cloud/bigquery_analyticshub_v1/services/analytics_hub_service/client.py +++ b/packages/google-cloud-bigquery-analyticshub/google/cloud/bigquery_analyticshub_v1/services/analytics_hub_service/client.py @@ -764,7 +764,7 @@ def __init__( Type[AnalyticsHubServiceTransport], Callable[..., AnalyticsHubServiceTransport], ] = ( - type(self).get_transport_class(transport) + AnalyticsHubServiceClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., AnalyticsHubServiceTransport], transport) ) diff --git a/packages/google-cloud-bigquery-analyticshub/samples/generated_samples/snippet_metadata_google.cloud.bigquery.analyticshub.v1.json b/packages/google-cloud-bigquery-analyticshub/samples/generated_samples/snippet_metadata_google.cloud.bigquery.analyticshub.v1.json index c9affa2ed5af..0e7f7893e9b6 100644 --- a/packages/google-cloud-bigquery-analyticshub/samples/generated_samples/snippet_metadata_google.cloud.bigquery.analyticshub.v1.json +++ b/packages/google-cloud-bigquery-analyticshub/samples/generated_samples/snippet_metadata_google.cloud.bigquery.analyticshub.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-bigquery-analyticshub", - "version": "0.4.11" + "version": "0.1.0" }, "snippets": [ { diff --git a/packages/google-cloud-bigquery-analyticshub/tests/unit/gapic/bigquery_analyticshub_v1/test_analytics_hub_service.py b/packages/google-cloud-bigquery-analyticshub/tests/unit/gapic/bigquery_analyticshub_v1/test_analytics_hub_service.py index 5d773f946ca5..4dd8673a177b 100644 --- a/packages/google-cloud-bigquery-analyticshub/tests/unit/gapic/bigquery_analyticshub_v1/test_analytics_hub_service.py +++ b/packages/google-cloud-bigquery-analyticshub/tests/unit/gapic/bigquery_analyticshub_v1/test_analytics_hub_service.py @@ -1334,22 +1334,23 @@ async def test_list_data_exchanges_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_data_exchanges - ] = mock_object + ] = mock_rpc request = {} await client.list_data_exchanges(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_data_exchanges(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1925,22 +1926,23 @@ async def test_list_org_data_exchanges_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_org_data_exchanges - ] = mock_object + ] = mock_rpc request = {} await client.list_org_data_exchanges(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_org_data_exchanges(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2533,22 +2535,23 @@ async def test_get_data_exchange_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_data_exchange - ] = mock_object + ] = mock_rpc request = {} await client.get_data_exchange(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_data_exchange(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2956,22 +2959,23 @@ async def test_create_data_exchange_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_data_exchange - ] = mock_object + ] = mock_rpc request = {} await client.create_data_exchange(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_data_exchange(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3383,22 +3387,23 @@ async def test_update_data_exchange_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_data_exchange - ] = mock_object + ] = mock_rpc request = {} await client.update_data_exchange(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_data_exchange(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3786,22 +3791,23 @@ async def test_delete_data_exchange_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_data_exchange - ] = mock_object + ] = mock_rpc request = {} await client.delete_data_exchange(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_data_exchange(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4153,22 +4159,23 @@ async def test_list_listings_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_listings - ] = mock_object + ] = mock_rpc request = {} await client.list_listings(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_listings(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4738,22 +4745,23 @@ async def test_get_listing_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_listing - ] = mock_object + ] = mock_rpc request = {} await client.get_listing(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_listing(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5149,22 +5157,23 @@ async def test_create_listing_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_listing - ] = mock_object + ] = mock_rpc request = {} await client.create_listing(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_listing(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5588,22 +5597,23 @@ async def test_update_listing_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_listing - ] = mock_object + ] = mock_rpc request = {} await client.update_listing(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_listing(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5997,22 +6007,23 @@ async def test_delete_listing_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_listing - ] = mock_object + ] = mock_rpc request = {} await client.delete_listing(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_listing(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -6357,22 +6368,23 @@ async def test_subscribe_listing_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.subscribe_listing - ] = mock_object + ] = mock_rpc request = {} await client.subscribe_listing(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.subscribe_listing(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -6689,8 +6701,9 @@ def test_subscribe_data_exchange_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.subscribe_data_exchange(request) @@ -6746,26 +6759,28 @@ async def test_subscribe_data_exchange_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.subscribe_data_exchange - ] = mock_object + ] = mock_rpc request = {} await client.subscribe_data_exchange(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.subscribe_data_exchange(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -7076,8 +7091,9 @@ def test_refresh_subscription_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.refresh_subscription(request) @@ -7133,26 +7149,28 @@ async def test_refresh_subscription_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.refresh_subscription - ] = mock_object + ] = mock_rpc request = {} await client.refresh_subscription(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.refresh_subscription(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -7524,22 +7542,23 @@ async def test_get_subscription_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_subscription - ] = mock_object + ] = mock_rpc request = {} await client.get_subscription(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_subscription(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -7912,22 +7931,23 @@ async def test_list_subscriptions_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_subscriptions - ] = mock_object + ] = mock_rpc request = {} await client.list_subscriptions(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_subscriptions(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -8503,22 +8523,23 @@ async def test_list_shared_resource_subscriptions_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_shared_resource_subscriptions - ] = mock_object + ] = mock_rpc request = {} await client.list_shared_resource_subscriptions(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_shared_resource_subscriptions(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -9089,22 +9110,23 @@ async def test_revoke_subscription_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.revoke_subscription - ] = mock_object + ] = mock_rpc request = {} await client.revoke_subscription(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.revoke_subscription(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -9414,8 +9436,9 @@ def test_delete_subscription_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.delete_subscription(request) @@ -9471,26 +9494,28 @@ async def test_delete_subscription_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_subscription - ] = mock_object + ] = mock_rpc request = {} await client.delete_subscription(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.delete_subscription(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -9849,22 +9874,23 @@ async def test_get_iam_policy_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_iam_policy - ] = mock_object + ] = mock_rpc request = {} await client.get_iam_policy(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_iam_policy(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -10151,22 +10177,23 @@ async def test_set_iam_policy_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.set_iam_policy - ] = mock_object + ] = mock_rpc request = {} await client.set_iam_policy(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.set_iam_policy(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -10463,22 +10490,23 @@ async def test_test_iam_permissions_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.test_iam_permissions - ] = mock_object + ] = mock_rpc request = {} await client.test_iam_permissions(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.test_iam_permissions(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-bigquery-biglake/google/cloud/bigquery_biglake/gapic_version.py b/packages/google-cloud-bigquery-biglake/google/cloud/bigquery_biglake/gapic_version.py index 27a1c7da1ce8..558c8aab67c5 100644 --- a/packages/google-cloud-bigquery-biglake/google/cloud/bigquery_biglake/gapic_version.py +++ b/packages/google-cloud-bigquery-biglake/google/cloud/bigquery_biglake/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.4.9" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-bigquery-biglake/google/cloud/bigquery_biglake_v1/gapic_version.py b/packages/google-cloud-bigquery-biglake/google/cloud/bigquery_biglake_v1/gapic_version.py index 27a1c7da1ce8..558c8aab67c5 100644 --- a/packages/google-cloud-bigquery-biglake/google/cloud/bigquery_biglake_v1/gapic_version.py +++ b/packages/google-cloud-bigquery-biglake/google/cloud/bigquery_biglake_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.4.9" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-bigquery-biglake/google/cloud/bigquery_biglake_v1/services/metastore_service/async_client.py b/packages/google-cloud-bigquery-biglake/google/cloud/bigquery_biglake_v1/services/metastore_service/async_client.py index 39cdc5786c28..9976660fe3a9 100644 --- a/packages/google-cloud-bigquery-biglake/google/cloud/bigquery_biglake_v1/services/metastore_service/async_client.py +++ b/packages/google-cloud-bigquery-biglake/google/cloud/bigquery_biglake_v1/services/metastore_service/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -205,9 +204,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(MetastoreServiceClient).get_transport_class, type(MetastoreServiceClient) - ) + get_transport_class = MetastoreServiceClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-bigquery-biglake/google/cloud/bigquery_biglake_v1/services/metastore_service/client.py b/packages/google-cloud-bigquery-biglake/google/cloud/bigquery_biglake_v1/services/metastore_service/client.py index 73043e89fbc7..37fc63db909f 100644 --- a/packages/google-cloud-bigquery-biglake/google/cloud/bigquery_biglake_v1/services/metastore_service/client.py +++ b/packages/google-cloud-bigquery-biglake/google/cloud/bigquery_biglake_v1/services/metastore_service/client.py @@ -727,7 +727,7 @@ def __init__( Type[MetastoreServiceTransport], Callable[..., MetastoreServiceTransport], ] = ( - type(self).get_transport_class(transport) + MetastoreServiceClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., MetastoreServiceTransport], transport) ) diff --git a/packages/google-cloud-bigquery-biglake/google/cloud/bigquery_biglake_v1alpha1/gapic_version.py b/packages/google-cloud-bigquery-biglake/google/cloud/bigquery_biglake_v1alpha1/gapic_version.py index 27a1c7da1ce8..558c8aab67c5 100644 --- a/packages/google-cloud-bigquery-biglake/google/cloud/bigquery_biglake_v1alpha1/gapic_version.py +++ b/packages/google-cloud-bigquery-biglake/google/cloud/bigquery_biglake_v1alpha1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.4.9" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-bigquery-biglake/google/cloud/bigquery_biglake_v1alpha1/services/metastore_service/async_client.py b/packages/google-cloud-bigquery-biglake/google/cloud/bigquery_biglake_v1alpha1/services/metastore_service/async_client.py index 57d093f919c3..740fef82f9d2 100644 --- a/packages/google-cloud-bigquery-biglake/google/cloud/bigquery_biglake_v1alpha1/services/metastore_service/async_client.py +++ b/packages/google-cloud-bigquery-biglake/google/cloud/bigquery_biglake_v1alpha1/services/metastore_service/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -207,9 +206,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(MetastoreServiceClient).get_transport_class, type(MetastoreServiceClient) - ) + get_transport_class = MetastoreServiceClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-bigquery-biglake/google/cloud/bigquery_biglake_v1alpha1/services/metastore_service/client.py b/packages/google-cloud-bigquery-biglake/google/cloud/bigquery_biglake_v1alpha1/services/metastore_service/client.py index 30013e6dc3a4..0893a12db218 100644 --- a/packages/google-cloud-bigquery-biglake/google/cloud/bigquery_biglake_v1alpha1/services/metastore_service/client.py +++ b/packages/google-cloud-bigquery-biglake/google/cloud/bigquery_biglake_v1alpha1/services/metastore_service/client.py @@ -753,7 +753,7 @@ def __init__( Type[MetastoreServiceTransport], Callable[..., MetastoreServiceTransport], ] = ( - type(self).get_transport_class(transport) + MetastoreServiceClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., MetastoreServiceTransport], transport) ) diff --git a/packages/google-cloud-bigquery-biglake/samples/generated_samples/snippet_metadata_google.cloud.bigquery.biglake.v1.json b/packages/google-cloud-bigquery-biglake/samples/generated_samples/snippet_metadata_google.cloud.bigquery.biglake.v1.json index 904fbd6135fd..bc293aab30cd 100644 --- a/packages/google-cloud-bigquery-biglake/samples/generated_samples/snippet_metadata_google.cloud.bigquery.biglake.v1.json +++ b/packages/google-cloud-bigquery-biglake/samples/generated_samples/snippet_metadata_google.cloud.bigquery.biglake.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-bigquery-biglake", - "version": "0.4.9" + "version": "0.1.0" }, "snippets": [ { diff --git a/packages/google-cloud-bigquery-biglake/samples/generated_samples/snippet_metadata_google.cloud.bigquery.biglake.v1alpha1.json b/packages/google-cloud-bigquery-biglake/samples/generated_samples/snippet_metadata_google.cloud.bigquery.biglake.v1alpha1.json index 4692b3d37bb0..f43e6ba05775 100644 --- a/packages/google-cloud-bigquery-biglake/samples/generated_samples/snippet_metadata_google.cloud.bigquery.biglake.v1alpha1.json +++ b/packages/google-cloud-bigquery-biglake/samples/generated_samples/snippet_metadata_google.cloud.bigquery.biglake.v1alpha1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-bigquery-biglake", - "version": "0.4.9" + "version": "0.1.0" }, "snippets": [ { diff --git a/packages/google-cloud-bigquery-biglake/tests/unit/gapic/bigquery_biglake_v1/test_metastore_service.py b/packages/google-cloud-bigquery-biglake/tests/unit/gapic/bigquery_biglake_v1/test_metastore_service.py index ec4c2498df35..4eb88b1409b3 100644 --- a/packages/google-cloud-bigquery-biglake/tests/unit/gapic/bigquery_biglake_v1/test_metastore_service.py +++ b/packages/google-cloud-bigquery-biglake/tests/unit/gapic/bigquery_biglake_v1/test_metastore_service.py @@ -1318,22 +1318,23 @@ async def test_create_catalog_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_catalog - ] = mock_object + ] = mock_rpc request = {} await client.create_catalog(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_catalog(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1698,22 +1699,23 @@ async def test_delete_catalog_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_catalog - ] = mock_object + ] = mock_rpc request = {} await client.delete_catalog(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_catalog(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2058,22 +2060,23 @@ async def test_get_catalog_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_catalog - ] = mock_object + ] = mock_rpc request = {} await client.get_catalog(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_catalog(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2420,22 +2423,23 @@ async def test_list_catalogs_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_catalogs - ] = mock_object + ] = mock_rpc request = {} await client.list_catalogs(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_catalogs(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2983,22 +2987,23 @@ async def test_create_database_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_database - ] = mock_object + ] = mock_rpc request = {} await client.create_database(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_database(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3392,22 +3397,23 @@ async def test_delete_database_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_database - ] = mock_object + ] = mock_rpc request = {} await client.delete_database(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_database(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3753,22 +3759,23 @@ async def test_update_database_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_database - ] = mock_object + ] = mock_rpc request = {} await client.update_database(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_database(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4152,22 +4159,23 @@ async def test_get_database_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_database - ] = mock_object + ] = mock_rpc request = {} await client.get_database(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_database(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4516,22 +4524,23 @@ async def test_list_databases_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_databases - ] = mock_object + ] = mock_rpc request = {} await client.list_databases(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_databases(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5082,22 +5091,23 @@ async def test_create_table_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_table - ] = mock_object + ] = mock_rpc request = {} await client.create_table(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_table(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5496,22 +5506,23 @@ async def test_delete_table_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_table - ] = mock_object + ] = mock_rpc request = {} await client.delete_table(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_table(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5862,22 +5873,23 @@ async def test_update_table_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_table - ] = mock_object + ] = mock_rpc request = {} await client.update_table(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_table(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -6268,22 +6280,23 @@ async def test_rename_table_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.rename_table - ] = mock_object + ] = mock_rpc request = {} await client.rename_table(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.rename_table(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -6646,22 +6659,23 @@ async def test_get_table_async_use_cached_wrapped_rpc(transport: str = "grpc_asy ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_table - ] = mock_object + ] = mock_rpc request = {} await client.get_table(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_table(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -7012,22 +7026,23 @@ async def test_list_tables_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_tables - ] = mock_object + ] = mock_rpc request = {} await client.list_tables(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_tables(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-bigquery-biglake/tests/unit/gapic/bigquery_biglake_v1alpha1/test_metastore_service.py b/packages/google-cloud-bigquery-biglake/tests/unit/gapic/bigquery_biglake_v1alpha1/test_metastore_service.py index 883e1cd3626a..c6daf99d6847 100644 --- a/packages/google-cloud-bigquery-biglake/tests/unit/gapic/bigquery_biglake_v1alpha1/test_metastore_service.py +++ b/packages/google-cloud-bigquery-biglake/tests/unit/gapic/bigquery_biglake_v1alpha1/test_metastore_service.py @@ -1318,22 +1318,23 @@ async def test_create_catalog_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_catalog - ] = mock_object + ] = mock_rpc request = {} await client.create_catalog(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_catalog(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1698,22 +1699,23 @@ async def test_delete_catalog_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_catalog - ] = mock_object + ] = mock_rpc request = {} await client.delete_catalog(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_catalog(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2058,22 +2060,23 @@ async def test_get_catalog_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_catalog - ] = mock_object + ] = mock_rpc request = {} await client.get_catalog(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_catalog(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2420,22 +2423,23 @@ async def test_list_catalogs_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_catalogs - ] = mock_object + ] = mock_rpc request = {} await client.list_catalogs(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_catalogs(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2983,22 +2987,23 @@ async def test_create_database_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_database - ] = mock_object + ] = mock_rpc request = {} await client.create_database(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_database(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3392,22 +3397,23 @@ async def test_delete_database_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_database - ] = mock_object + ] = mock_rpc request = {} await client.delete_database(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_database(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3753,22 +3759,23 @@ async def test_update_database_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_database - ] = mock_object + ] = mock_rpc request = {} await client.update_database(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_database(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4152,22 +4159,23 @@ async def test_get_database_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_database - ] = mock_object + ] = mock_rpc request = {} await client.get_database(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_database(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4516,22 +4524,23 @@ async def test_list_databases_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_databases - ] = mock_object + ] = mock_rpc request = {} await client.list_databases(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_databases(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5082,22 +5091,23 @@ async def test_create_table_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_table - ] = mock_object + ] = mock_rpc request = {} await client.create_table(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_table(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5496,22 +5506,23 @@ async def test_delete_table_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_table - ] = mock_object + ] = mock_rpc request = {} await client.delete_table(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_table(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5862,22 +5873,23 @@ async def test_update_table_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_table - ] = mock_object + ] = mock_rpc request = {} await client.update_table(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_table(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -6268,22 +6280,23 @@ async def test_rename_table_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.rename_table - ] = mock_object + ] = mock_rpc request = {} await client.rename_table(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.rename_table(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -6646,22 +6659,23 @@ async def test_get_table_async_use_cached_wrapped_rpc(transport: str = "grpc_asy ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_table - ] = mock_object + ] = mock_rpc request = {} await client.get_table(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_table(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -7012,22 +7026,23 @@ async def test_list_tables_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_tables - ] = mock_object + ] = mock_rpc request = {} await client.list_tables(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_tables(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -7577,22 +7592,23 @@ async def test_create_lock_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_lock - ] = mock_object + ] = mock_rpc request = {} await client.create_lock(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_lock(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -7944,22 +7960,23 @@ async def test_delete_lock_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_lock - ] = mock_object + ] = mock_rpc request = {} await client.delete_lock(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_lock(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -8304,22 +8321,23 @@ async def test_check_lock_async_use_cached_wrapped_rpc(transport: str = "grpc_as ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.check_lock - ] = mock_object + ] = mock_rpc request = {} await client.check_lock(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.check_lock(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -8668,22 +8686,23 @@ async def test_list_locks_async_use_cached_wrapped_rpc(transport: str = "grpc_as ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_locks - ] = mock_object + ] = mock_rpc request = {} await client.list_locks(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_locks(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-bigquery-connection/google/cloud/bigquery_connection/gapic_version.py b/packages/google-cloud-bigquery-connection/google/cloud/bigquery_connection/gapic_version.py index e018cef961ff..558c8aab67c5 100644 --- a/packages/google-cloud-bigquery-connection/google/cloud/bigquery_connection/gapic_version.py +++ b/packages/google-cloud-bigquery-connection/google/cloud/bigquery_connection/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.15.5" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-bigquery-connection/google/cloud/bigquery_connection_v1/gapic_version.py b/packages/google-cloud-bigquery-connection/google/cloud/bigquery_connection_v1/gapic_version.py index e018cef961ff..558c8aab67c5 100644 --- a/packages/google-cloud-bigquery-connection/google/cloud/bigquery_connection_v1/gapic_version.py +++ b/packages/google-cloud-bigquery-connection/google/cloud/bigquery_connection_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.15.5" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-bigquery-connection/google/cloud/bigquery_connection_v1/services/connection_service/async_client.py b/packages/google-cloud-bigquery-connection/google/cloud/bigquery_connection_v1/services/connection_service/async_client.py index 6d980c8ff3b3..9aef76185375 100644 --- a/packages/google-cloud-bigquery-connection/google/cloud/bigquery_connection_v1/services/connection_service/async_client.py +++ b/packages/google-cloud-bigquery-connection/google/cloud/bigquery_connection_v1/services/connection_service/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -196,9 +195,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(ConnectionServiceClient).get_transport_class, type(ConnectionServiceClient) - ) + get_transport_class = ConnectionServiceClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-bigquery-connection/google/cloud/bigquery_connection_v1/services/connection_service/client.py b/packages/google-cloud-bigquery-connection/google/cloud/bigquery_connection_v1/services/connection_service/client.py index 8eaa107e8478..4dd767c16bad 100644 --- a/packages/google-cloud-bigquery-connection/google/cloud/bigquery_connection_v1/services/connection_service/client.py +++ b/packages/google-cloud-bigquery-connection/google/cloud/bigquery_connection_v1/services/connection_service/client.py @@ -716,7 +716,7 @@ def __init__( Type[ConnectionServiceTransport], Callable[..., ConnectionServiceTransport], ] = ( - type(self).get_transport_class(transport) + ConnectionServiceClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., ConnectionServiceTransport], transport) ) diff --git a/packages/google-cloud-bigquery-connection/samples/generated_samples/snippet_metadata_google.cloud.bigquery.connection.v1.json b/packages/google-cloud-bigquery-connection/samples/generated_samples/snippet_metadata_google.cloud.bigquery.connection.v1.json index 273e29615ff2..0fde6f0d6368 100644 --- a/packages/google-cloud-bigquery-connection/samples/generated_samples/snippet_metadata_google.cloud.bigquery.connection.v1.json +++ b/packages/google-cloud-bigquery-connection/samples/generated_samples/snippet_metadata_google.cloud.bigquery.connection.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-bigquery-connection", - "version": "1.15.5" + "version": "0.1.0" }, "snippets": [ { diff --git a/packages/google-cloud-bigquery-connection/tests/unit/gapic/bigquery_connection_v1/test_connection_service.py b/packages/google-cloud-bigquery-connection/tests/unit/gapic/bigquery_connection_v1/test_connection_service.py index 4f7b0d2864d8..a643d6edcbd2 100644 --- a/packages/google-cloud-bigquery-connection/tests/unit/gapic/bigquery_connection_v1/test_connection_service.py +++ b/packages/google-cloud-bigquery-connection/tests/unit/gapic/bigquery_connection_v1/test_connection_service.py @@ -1355,22 +1355,23 @@ async def test_create_connection_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_connection - ] = mock_object + ] = mock_rpc request = {} await client.create_connection(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_connection(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1775,22 +1776,23 @@ async def test_get_connection_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_connection - ] = mock_object + ] = mock_rpc request = {} await client.get_connection(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_connection(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2153,22 +2155,23 @@ async def test_list_connections_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_connections - ] = mock_object + ] = mock_rpc request = {} await client.list_connections(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_connections(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2736,22 +2739,23 @@ async def test_update_connection_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_connection - ] = mock_object + ] = mock_rpc request = {} await client.update_connection(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_connection(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3144,22 +3148,23 @@ async def test_delete_connection_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_connection - ] = mock_object + ] = mock_rpc request = {} await client.delete_connection(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_connection(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3512,22 +3517,23 @@ async def test_get_iam_policy_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_iam_policy - ] = mock_object + ] = mock_rpc request = {} await client.get_iam_policy(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_iam_policy(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3894,22 +3900,23 @@ async def test_set_iam_policy_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.set_iam_policy - ] = mock_object + ] = mock_rpc request = {} await client.set_iam_policy(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.set_iam_policy(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4286,22 +4293,23 @@ async def test_test_iam_permissions_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.test_iam_permissions - ] = mock_object + ] = mock_rpc request = {} await client.test_iam_permissions(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.test_iam_permissions(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-bigquery-data-exchange/google/cloud/bigquery_data_exchange/gapic_version.py b/packages/google-cloud-bigquery-data-exchange/google/cloud/bigquery_data_exchange/gapic_version.py index bf678492aaad..558c8aab67c5 100644 --- a/packages/google-cloud-bigquery-data-exchange/google/cloud/bigquery_data_exchange/gapic_version.py +++ b/packages/google-cloud-bigquery-data-exchange/google/cloud/bigquery_data_exchange/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.5.13" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-bigquery-data-exchange/google/cloud/bigquery_data_exchange_v1beta1/gapic_version.py b/packages/google-cloud-bigquery-data-exchange/google/cloud/bigquery_data_exchange_v1beta1/gapic_version.py index bf678492aaad..558c8aab67c5 100644 --- a/packages/google-cloud-bigquery-data-exchange/google/cloud/bigquery_data_exchange_v1beta1/gapic_version.py +++ b/packages/google-cloud-bigquery-data-exchange/google/cloud/bigquery_data_exchange_v1beta1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.5.13" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-bigquery-data-exchange/google/cloud/bigquery_data_exchange_v1beta1/services/analytics_hub_service/async_client.py b/packages/google-cloud-bigquery-data-exchange/google/cloud/bigquery_data_exchange_v1beta1/services/analytics_hub_service/async_client.py index b1ea10447be6..877a8f55c9ab 100644 --- a/packages/google-cloud-bigquery-data-exchange/google/cloud/bigquery_data_exchange_v1beta1/services/analytics_hub_service/async_client.py +++ b/packages/google-cloud-bigquery-data-exchange/google/cloud/bigquery_data_exchange_v1beta1/services/analytics_hub_service/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -207,10 +206,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(AnalyticsHubServiceClient).get_transport_class, - type(AnalyticsHubServiceClient), - ) + get_transport_class = AnalyticsHubServiceClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-bigquery-data-exchange/google/cloud/bigquery_data_exchange_v1beta1/services/analytics_hub_service/client.py b/packages/google-cloud-bigquery-data-exchange/google/cloud/bigquery_data_exchange_v1beta1/services/analytics_hub_service/client.py index 361b7577651d..c3820bca8c92 100644 --- a/packages/google-cloud-bigquery-data-exchange/google/cloud/bigquery_data_exchange_v1beta1/services/analytics_hub_service/client.py +++ b/packages/google-cloud-bigquery-data-exchange/google/cloud/bigquery_data_exchange_v1beta1/services/analytics_hub_service/client.py @@ -718,7 +718,7 @@ def __init__( Type[AnalyticsHubServiceTransport], Callable[..., AnalyticsHubServiceTransport], ] = ( - type(self).get_transport_class(transport) + AnalyticsHubServiceClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., AnalyticsHubServiceTransport], transport) ) diff --git a/packages/google-cloud-bigquery-data-exchange/samples/generated_samples/snippet_metadata_google.cloud.bigquery.dataexchange.v1beta1.json b/packages/google-cloud-bigquery-data-exchange/samples/generated_samples/snippet_metadata_google.cloud.bigquery.dataexchange.v1beta1.json index 4a849aea8d88..6e4d55c65d52 100644 --- a/packages/google-cloud-bigquery-data-exchange/samples/generated_samples/snippet_metadata_google.cloud.bigquery.dataexchange.v1beta1.json +++ b/packages/google-cloud-bigquery-data-exchange/samples/generated_samples/snippet_metadata_google.cloud.bigquery.dataexchange.v1beta1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-bigquery-data-exchange", - "version": "0.5.13" + "version": "0.1.0" }, "snippets": [ { diff --git a/packages/google-cloud-bigquery-data-exchange/tests/unit/gapic/bigquery_data_exchange_v1beta1/test_analytics_hub_service.py b/packages/google-cloud-bigquery-data-exchange/tests/unit/gapic/bigquery_data_exchange_v1beta1/test_analytics_hub_service.py index 8319363da6db..61a9895afd21 100644 --- a/packages/google-cloud-bigquery-data-exchange/tests/unit/gapic/bigquery_data_exchange_v1beta1/test_analytics_hub_service.py +++ b/packages/google-cloud-bigquery-data-exchange/tests/unit/gapic/bigquery_data_exchange_v1beta1/test_analytics_hub_service.py @@ -1323,22 +1323,23 @@ async def test_list_data_exchanges_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_data_exchanges - ] = mock_object + ] = mock_rpc request = {} await client.list_data_exchanges(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_data_exchanges(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1914,22 +1915,23 @@ async def test_list_org_data_exchanges_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_org_data_exchanges - ] = mock_object + ] = mock_rpc request = {} await client.list_org_data_exchanges(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_org_data_exchanges(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2519,22 +2521,23 @@ async def test_get_data_exchange_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_data_exchange - ] = mock_object + ] = mock_rpc request = {} await client.get_data_exchange(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_data_exchange(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2937,22 +2940,23 @@ async def test_create_data_exchange_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_data_exchange - ] = mock_object + ] = mock_rpc request = {} await client.create_data_exchange(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_data_exchange(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3359,22 +3363,23 @@ async def test_update_data_exchange_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_data_exchange - ] = mock_object + ] = mock_rpc request = {} await client.update_data_exchange(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_data_exchange(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3760,22 +3765,23 @@ async def test_delete_data_exchange_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_data_exchange - ] = mock_object + ] = mock_rpc request = {} await client.delete_data_exchange(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_data_exchange(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4127,22 +4133,23 @@ async def test_list_listings_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_listings - ] = mock_object + ] = mock_rpc request = {} await client.list_listings(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_listings(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4709,22 +4716,23 @@ async def test_get_listing_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_listing - ] = mock_object + ] = mock_rpc request = {} await client.get_listing(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_listing(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5115,22 +5123,23 @@ async def test_create_listing_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_listing - ] = mock_object + ] = mock_rpc request = {} await client.create_listing(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_listing(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5549,22 +5558,23 @@ async def test_update_listing_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_listing - ] = mock_object + ] = mock_rpc request = {} await client.update_listing(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_listing(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5956,22 +5966,23 @@ async def test_delete_listing_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_listing - ] = mock_object + ] = mock_rpc request = {} await client.delete_listing(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_listing(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -6316,22 +6327,23 @@ async def test_subscribe_listing_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.subscribe_listing - ] = mock_object + ] = mock_rpc request = {} await client.subscribe_listing(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.subscribe_listing(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -6690,22 +6702,23 @@ async def test_get_iam_policy_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_iam_policy - ] = mock_object + ] = mock_rpc request = {} await client.get_iam_policy(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_iam_policy(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -6992,22 +7005,23 @@ async def test_set_iam_policy_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.set_iam_policy - ] = mock_object + ] = mock_rpc request = {} await client.set_iam_policy(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.set_iam_policy(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -7304,22 +7318,23 @@ async def test_test_iam_permissions_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.test_iam_permissions - ] = mock_object + ] = mock_rpc request = {} await client.test_iam_permissions(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.test_iam_permissions(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-bigquery-datapolicies/google/cloud/bigquery_datapolicies/gapic_version.py b/packages/google-cloud-bigquery-datapolicies/google/cloud/bigquery_datapolicies/gapic_version.py index 911c2d1dfcef..558c8aab67c5 100644 --- a/packages/google-cloud-bigquery-datapolicies/google/cloud/bigquery_datapolicies/gapic_version.py +++ b/packages/google-cloud-bigquery-datapolicies/google/cloud/bigquery_datapolicies/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.6.8" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-bigquery-datapolicies/google/cloud/bigquery_datapolicies_v1/gapic_version.py b/packages/google-cloud-bigquery-datapolicies/google/cloud/bigquery_datapolicies_v1/gapic_version.py index 911c2d1dfcef..558c8aab67c5 100644 --- a/packages/google-cloud-bigquery-datapolicies/google/cloud/bigquery_datapolicies_v1/gapic_version.py +++ b/packages/google-cloud-bigquery-datapolicies/google/cloud/bigquery_datapolicies_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.6.8" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-bigquery-datapolicies/google/cloud/bigquery_datapolicies_v1/services/data_policy_service/async_client.py b/packages/google-cloud-bigquery-datapolicies/google/cloud/bigquery_datapolicies_v1/services/data_policy_service/async_client.py index dc9c00a75490..02ffeb4d3d2c 100644 --- a/packages/google-cloud-bigquery-datapolicies/google/cloud/bigquery_datapolicies_v1/services/data_policy_service/async_client.py +++ b/packages/google-cloud-bigquery-datapolicies/google/cloud/bigquery_datapolicies_v1/services/data_policy_service/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -195,9 +194,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(DataPolicyServiceClient).get_transport_class, type(DataPolicyServiceClient) - ) + get_transport_class = DataPolicyServiceClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-bigquery-datapolicies/google/cloud/bigquery_datapolicies_v1/services/data_policy_service/client.py b/packages/google-cloud-bigquery-datapolicies/google/cloud/bigquery_datapolicies_v1/services/data_policy_service/client.py index 584b1dc78848..50a82cf1652f 100644 --- a/packages/google-cloud-bigquery-datapolicies/google/cloud/bigquery_datapolicies_v1/services/data_policy_service/client.py +++ b/packages/google-cloud-bigquery-datapolicies/google/cloud/bigquery_datapolicies_v1/services/data_policy_service/client.py @@ -673,7 +673,7 @@ def __init__( Type[DataPolicyServiceTransport], Callable[..., DataPolicyServiceTransport], ] = ( - type(self).get_transport_class(transport) + DataPolicyServiceClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., DataPolicyServiceTransport], transport) ) diff --git a/packages/google-cloud-bigquery-datapolicies/google/cloud/bigquery_datapolicies_v1beta1/gapic_version.py b/packages/google-cloud-bigquery-datapolicies/google/cloud/bigquery_datapolicies_v1beta1/gapic_version.py index 911c2d1dfcef..558c8aab67c5 100644 --- a/packages/google-cloud-bigquery-datapolicies/google/cloud/bigquery_datapolicies_v1beta1/gapic_version.py +++ b/packages/google-cloud-bigquery-datapolicies/google/cloud/bigquery_datapolicies_v1beta1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.6.8" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-bigquery-datapolicies/google/cloud/bigquery_datapolicies_v1beta1/services/data_policy_service/async_client.py b/packages/google-cloud-bigquery-datapolicies/google/cloud/bigquery_datapolicies_v1beta1/services/data_policy_service/async_client.py index 633beaae99b0..a1bfade9d9f6 100644 --- a/packages/google-cloud-bigquery-datapolicies/google/cloud/bigquery_datapolicies_v1beta1/services/data_policy_service/async_client.py +++ b/packages/google-cloud-bigquery-datapolicies/google/cloud/bigquery_datapolicies_v1beta1/services/data_policy_service/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -197,9 +196,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(DataPolicyServiceClient).get_transport_class, type(DataPolicyServiceClient) - ) + get_transport_class = DataPolicyServiceClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-bigquery-datapolicies/google/cloud/bigquery_datapolicies_v1beta1/services/data_policy_service/client.py b/packages/google-cloud-bigquery-datapolicies/google/cloud/bigquery_datapolicies_v1beta1/services/data_policy_service/client.py index 00cfdfd25187..c7e5e6aa27a8 100644 --- a/packages/google-cloud-bigquery-datapolicies/google/cloud/bigquery_datapolicies_v1beta1/services/data_policy_service/client.py +++ b/packages/google-cloud-bigquery-datapolicies/google/cloud/bigquery_datapolicies_v1beta1/services/data_policy_service/client.py @@ -673,7 +673,7 @@ def __init__( Type[DataPolicyServiceTransport], Callable[..., DataPolicyServiceTransport], ] = ( - type(self).get_transport_class(transport) + DataPolicyServiceClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., DataPolicyServiceTransport], transport) ) diff --git a/packages/google-cloud-bigquery-datapolicies/samples/generated_samples/snippet_metadata_google.cloud.bigquery.datapolicies.v1.json b/packages/google-cloud-bigquery-datapolicies/samples/generated_samples/snippet_metadata_google.cloud.bigquery.datapolicies.v1.json index 975b08c55b8a..6be87a75b46d 100644 --- a/packages/google-cloud-bigquery-datapolicies/samples/generated_samples/snippet_metadata_google.cloud.bigquery.datapolicies.v1.json +++ b/packages/google-cloud-bigquery-datapolicies/samples/generated_samples/snippet_metadata_google.cloud.bigquery.datapolicies.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-bigquery-datapolicies", - "version": "0.6.8" + "version": "0.1.0" }, "snippets": [ { diff --git a/packages/google-cloud-bigquery-datapolicies/samples/generated_samples/snippet_metadata_google.cloud.bigquery.datapolicies.v1beta1.json b/packages/google-cloud-bigquery-datapolicies/samples/generated_samples/snippet_metadata_google.cloud.bigquery.datapolicies.v1beta1.json index 4e353afc1b2f..6753f39659f4 100644 --- a/packages/google-cloud-bigquery-datapolicies/samples/generated_samples/snippet_metadata_google.cloud.bigquery.datapolicies.v1beta1.json +++ b/packages/google-cloud-bigquery-datapolicies/samples/generated_samples/snippet_metadata_google.cloud.bigquery.datapolicies.v1beta1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-bigquery-datapolicies", - "version": "0.6.8" + "version": "0.1.0" }, "snippets": [ { diff --git a/packages/google-cloud-bigquery-datapolicies/tests/unit/gapic/bigquery_datapolicies_v1/test_data_policy_service.py b/packages/google-cloud-bigquery-datapolicies/tests/unit/gapic/bigquery_datapolicies_v1/test_data_policy_service.py index 6a7e0e4337fd..ec982a3e1c3c 100644 --- a/packages/google-cloud-bigquery-datapolicies/tests/unit/gapic/bigquery_datapolicies_v1/test_data_policy_service.py +++ b/packages/google-cloud-bigquery-datapolicies/tests/unit/gapic/bigquery_datapolicies_v1/test_data_policy_service.py @@ -1349,22 +1349,23 @@ async def test_create_data_policy_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_data_policy - ] = mock_object + ] = mock_rpc request = {} await client.create_data_policy(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_data_policy(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1758,22 +1759,23 @@ async def test_update_data_policy_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_data_policy - ] = mock_object + ] = mock_rpc request = {} await client.update_data_policy(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_data_policy(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2173,22 +2175,23 @@ async def test_rename_data_policy_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.rename_data_policy - ] = mock_object + ] = mock_rpc request = {} await client.rename_data_policy(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.rename_data_policy(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2569,22 +2572,23 @@ async def test_delete_data_policy_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_data_policy - ] = mock_object + ] = mock_rpc request = {} await client.delete_data_policy(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_data_policy(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2944,22 +2948,23 @@ async def test_get_data_policy_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_data_policy - ] = mock_object + ] = mock_rpc request = {} await client.get_data_policy(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_data_policy(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3331,22 +3336,23 @@ async def test_list_data_policies_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_data_policies - ] = mock_object + ] = mock_rpc request = {} await client.list_data_policies(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_data_policies(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3910,22 +3916,23 @@ async def test_get_iam_policy_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_iam_policy - ] = mock_object + ] = mock_rpc request = {} await client.get_iam_policy(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_iam_policy(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4212,22 +4219,23 @@ async def test_set_iam_policy_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.set_iam_policy - ] = mock_object + ] = mock_rpc request = {} await client.set_iam_policy(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.set_iam_policy(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4524,22 +4532,23 @@ async def test_test_iam_permissions_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.test_iam_permissions - ] = mock_object + ] = mock_rpc request = {} await client.test_iam_permissions(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.test_iam_permissions(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-bigquery-datapolicies/tests/unit/gapic/bigquery_datapolicies_v1beta1/test_data_policy_service.py b/packages/google-cloud-bigquery-datapolicies/tests/unit/gapic/bigquery_datapolicies_v1beta1/test_data_policy_service.py index 8ed13bae9c6c..5b3ce146b7bf 100644 --- a/packages/google-cloud-bigquery-datapolicies/tests/unit/gapic/bigquery_datapolicies_v1beta1/test_data_policy_service.py +++ b/packages/google-cloud-bigquery-datapolicies/tests/unit/gapic/bigquery_datapolicies_v1beta1/test_data_policy_service.py @@ -1311,22 +1311,23 @@ async def test_create_data_policy_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_data_policy - ] = mock_object + ] = mock_rpc request = {} await client.create_data_policy(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_data_policy(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1720,22 +1721,23 @@ async def test_update_data_policy_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_data_policy - ] = mock_object + ] = mock_rpc request = {} await client.update_data_policy(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_data_policy(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2116,22 +2118,23 @@ async def test_delete_data_policy_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_data_policy - ] = mock_object + ] = mock_rpc request = {} await client.delete_data_policy(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_data_policy(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2491,22 +2494,23 @@ async def test_get_data_policy_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_data_policy - ] = mock_object + ] = mock_rpc request = {} await client.get_data_policy(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_data_policy(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2876,22 +2880,23 @@ async def test_list_data_policies_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_data_policies - ] = mock_object + ] = mock_rpc request = {} await client.list_data_policies(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_data_policies(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3455,22 +3460,23 @@ async def test_get_iam_policy_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_iam_policy - ] = mock_object + ] = mock_rpc request = {} await client.get_iam_policy(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_iam_policy(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3757,22 +3763,23 @@ async def test_set_iam_policy_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.set_iam_policy - ] = mock_object + ] = mock_rpc request = {} await client.set_iam_policy(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.set_iam_policy(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4069,22 +4076,23 @@ async def test_test_iam_permissions_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.test_iam_permissions - ] = mock_object + ] = mock_rpc request = {} await client.test_iam_permissions(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.test_iam_permissions(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-bigquery-datatransfer/CHANGELOG.md b/packages/google-cloud-bigquery-datatransfer/CHANGELOG.md index 0139e057a0fa..0df3c54ccc62 100644 --- a/packages/google-cloud-bigquery-datatransfer/CHANGELOG.md +++ b/packages/google-cloud-bigquery-datatransfer/CHANGELOG.md @@ -4,6 +4,27 @@ [1]: https://pypi.org/project/google-cloud-bigquery-datatransfer/#history +## [3.16.0](https://github.com/googleapis/google-cloud-python/compare/google-cloud-bigquery-datatransfer-v3.15.7...google-cloud-bigquery-datatransfer-v3.16.0) (2024-10-08) + + +### Features + +* Add scheduleOptionsV2 and Error fields for TransferConfig ([052585c](https://github.com/googleapis/google-cloud-python/commit/052585c63dfa172b7f88fdb5882eda446fc47bfe)) + +## [3.15.7](https://github.com/googleapis/google-cloud-python/compare/google-cloud-bigquery-datatransfer-v3.15.6...google-cloud-bigquery-datatransfer-v3.15.7) (2024-09-04) + + +### Documentation + +* [google-cloud-bigquery-datatransfer] add a note to the CreateTransferConfigRequest and UpdateTransferConfigRequest to disable restricting service account usage ([#13051](https://github.com/googleapis/google-cloud-python/issues/13051)) ([4136c10](https://github.com/googleapis/google-cloud-python/commit/4136c10fabc1df012b028a5d407aaec326e448b6)) + +## [3.15.6](https://github.com/googleapis/google-cloud-python/compare/google-cloud-bigquery-datatransfer-v3.15.5...google-cloud-bigquery-datatransfer-v3.15.6) (2024-08-20) + + +### Documentation + +* [google-cloud-bigquery-datatransfer] deprecate `authorization_code` ([#13021](https://github.com/googleapis/google-cloud-python/issues/13021)) ([78bd284](https://github.com/googleapis/google-cloud-python/commit/78bd284bbaa89ef26ce60c20beb6445212c8b27b)) + ## [3.15.5](https://github.com/googleapis/google-cloud-python/compare/google-cloud-bigquery-datatransfer-v3.15.4...google-cloud-bigquery-datatransfer-v3.15.5) (2024-07-30) diff --git a/packages/google-cloud-bigquery-datatransfer/google/cloud/bigquery_datatransfer/__init__.py b/packages/google-cloud-bigquery-datatransfer/google/cloud/bigquery_datatransfer/__init__.py index c2cce6e9d9e7..bb51c87c8254 100644 --- a/packages/google-cloud-bigquery-datatransfer/google/cloud/bigquery_datatransfer/__init__.py +++ b/packages/google-cloud-bigquery-datatransfer/google/cloud/bigquery_datatransfer/__init__.py @@ -54,7 +54,11 @@ from google.cloud.bigquery_datatransfer_v1.types.transfer import ( EmailPreferences, EncryptionConfiguration, + EventDrivenSchedule, + ManualSchedule, ScheduleOptions, + ScheduleOptionsV2, + TimeBasedSchedule, TransferConfig, TransferMessage, TransferRun, @@ -93,7 +97,11 @@ "UpdateTransferConfigRequest", "EmailPreferences", "EncryptionConfiguration", + "EventDrivenSchedule", + "ManualSchedule", "ScheduleOptions", + "ScheduleOptionsV2", + "TimeBasedSchedule", "TransferConfig", "TransferMessage", "TransferRun", diff --git a/packages/google-cloud-bigquery-datatransfer/google/cloud/bigquery_datatransfer/gapic_version.py b/packages/google-cloud-bigquery-datatransfer/google/cloud/bigquery_datatransfer/gapic_version.py index 71a07a06cd93..dd91ea26f1b4 100644 --- a/packages/google-cloud-bigquery-datatransfer/google/cloud/bigquery_datatransfer/gapic_version.py +++ b/packages/google-cloud-bigquery-datatransfer/google/cloud/bigquery_datatransfer/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "3.15.5" # {x-release-please-version} +__version__ = "3.16.0" # {x-release-please-version} diff --git a/packages/google-cloud-bigquery-datatransfer/google/cloud/bigquery_datatransfer_v1/__init__.py b/packages/google-cloud-bigquery-datatransfer/google/cloud/bigquery_datatransfer_v1/__init__.py index 41ad09552699..7df301ab3c59 100644 --- a/packages/google-cloud-bigquery-datatransfer/google/cloud/bigquery_datatransfer_v1/__init__.py +++ b/packages/google-cloud-bigquery-datatransfer/google/cloud/bigquery_datatransfer_v1/__init__.py @@ -52,7 +52,11 @@ from .types.transfer import ( EmailPreferences, EncryptionConfiguration, + EventDrivenSchedule, + ManualSchedule, ScheduleOptions, + ScheduleOptionsV2, + TimeBasedSchedule, TransferConfig, TransferMessage, TransferRun, @@ -74,6 +78,7 @@ "EmailPreferences", "EncryptionConfiguration", "EnrollDataSourcesRequest", + "EventDrivenSchedule", "GetDataSourceRequest", "GetTransferConfigRequest", "GetTransferRunRequest", @@ -85,11 +90,14 @@ "ListTransferLogsResponse", "ListTransferRunsRequest", "ListTransferRunsResponse", + "ManualSchedule", "ScheduleOptions", + "ScheduleOptionsV2", "ScheduleTransferRunsRequest", "ScheduleTransferRunsResponse", "StartManualTransferRunsRequest", "StartManualTransferRunsResponse", + "TimeBasedSchedule", "TransferConfig", "TransferMessage", "TransferRun", diff --git a/packages/google-cloud-bigquery-datatransfer/google/cloud/bigquery_datatransfer_v1/gapic_version.py b/packages/google-cloud-bigquery-datatransfer/google/cloud/bigquery_datatransfer_v1/gapic_version.py index 71a07a06cd93..dd91ea26f1b4 100644 --- a/packages/google-cloud-bigquery-datatransfer/google/cloud/bigquery_datatransfer_v1/gapic_version.py +++ b/packages/google-cloud-bigquery-datatransfer/google/cloud/bigquery_datatransfer_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "3.15.5" # {x-release-please-version} +__version__ = "3.16.0" # {x-release-please-version} diff --git a/packages/google-cloud-bigquery-datatransfer/google/cloud/bigquery_datatransfer_v1/services/data_transfer_service/async_client.py b/packages/google-cloud-bigquery-datatransfer/google/cloud/bigquery_datatransfer_v1/services/data_transfer_service/async_client.py index f66b024bd01c..ea32882077da 100644 --- a/packages/google-cloud-bigquery-datatransfer/google/cloud/bigquery_datatransfer_v1/services/data_transfer_service/async_client.py +++ b/packages/google-cloud-bigquery-datatransfer/google/cloud/bigquery_datatransfer_v1/services/data_transfer_service/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -205,10 +204,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(DataTransferServiceClient).get_transport_class, - type(DataTransferServiceClient), - ) + get_transport_class = DataTransferServiceClient.get_transport_class def __init__( self, @@ -558,17 +554,20 @@ async def sample_create_transfer_config(): Args: request (Optional[Union[google.cloud.bigquery_datatransfer_v1.types.CreateTransferConfigRequest, dict]]): - The request object. A request to create a data transfer - configuration. If new credentials are - needed for this transfer configuration, - authorization info must be provided. If - authorization info is provided, the - transfer configuration will be - associated with the user id - corresponding to the authorization info. - Otherwise, the transfer configuration - will be associated with the calling - user. + The request object. A request to create a data transfer configuration. If + new credentials are needed for this transfer + configuration, authorization info must be provided. If + authorization info is provided, the transfer + configuration will be associated with the user id + corresponding to the authorization info. Otherwise, the + transfer configuration will be associated with the + calling user. + + When using a cross project service account for creating + a transfer config, you must enable cross project service + account usage. For more information, see `Disable + attachment of service accounts to resources in other + projects `__. parent (:class:`str`): Required. The BigQuery project id where the transfer configuration should be created. Must be in the format @@ -697,10 +696,15 @@ async def sample_update_transfer_config(): Args: request (Optional[Union[google.cloud.bigquery_datatransfer_v1.types.UpdateTransferConfigRequest, dict]]): - The request object. A request to update a transfer - configuration. To update the user id of - the transfer configuration, - authorization info needs to be provided. + The request object. A request to update a transfer configuration. To update + the user id of the transfer configuration, authorization + info needs to be provided. + + When using a cross project service account for updating + a transfer config, you must enable cross project service + account usage. For more information, see `Disable + attachment of service accounts to resources in other + projects `__. transfer_config (:class:`google.cloud.bigquery_datatransfer_v1.types.TransferConfig`): Required. Data transfer configuration to create. diff --git a/packages/google-cloud-bigquery-datatransfer/google/cloud/bigquery_datatransfer_v1/services/data_transfer_service/client.py b/packages/google-cloud-bigquery-datatransfer/google/cloud/bigquery_datatransfer_v1/services/data_transfer_service/client.py index c8dc5155fa8e..b5032cd300f4 100644 --- a/packages/google-cloud-bigquery-datatransfer/google/cloud/bigquery_datatransfer_v1/services/data_transfer_service/client.py +++ b/packages/google-cloud-bigquery-datatransfer/google/cloud/bigquery_datatransfer_v1/services/data_transfer_service/client.py @@ -713,7 +713,7 @@ def __init__( Type[DataTransferServiceTransport], Callable[..., DataTransferServiceTransport], ] = ( - type(self).get_transport_class(transport) + DataTransferServiceClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., DataTransferServiceTransport], transport) ) @@ -1002,17 +1002,20 @@ def sample_create_transfer_config(): Args: request (Union[google.cloud.bigquery_datatransfer_v1.types.CreateTransferConfigRequest, dict]): - The request object. A request to create a data transfer - configuration. If new credentials are - needed for this transfer configuration, - authorization info must be provided. If - authorization info is provided, the - transfer configuration will be - associated with the user id - corresponding to the authorization info. - Otherwise, the transfer configuration - will be associated with the calling - user. + The request object. A request to create a data transfer configuration. If + new credentials are needed for this transfer + configuration, authorization info must be provided. If + authorization info is provided, the transfer + configuration will be associated with the user id + corresponding to the authorization info. Otherwise, the + transfer configuration will be associated with the + calling user. + + When using a cross project service account for creating + a transfer config, you must enable cross project service + account usage. For more information, see `Disable + attachment of service accounts to resources in other + projects `__. parent (str): Required. The BigQuery project id where the transfer configuration should be created. Must be in the format @@ -1138,10 +1141,15 @@ def sample_update_transfer_config(): Args: request (Union[google.cloud.bigquery_datatransfer_v1.types.UpdateTransferConfigRequest, dict]): - The request object. A request to update a transfer - configuration. To update the user id of - the transfer configuration, - authorization info needs to be provided. + The request object. A request to update a transfer configuration. To update + the user id of the transfer configuration, authorization + info needs to be provided. + + When using a cross project service account for updating + a transfer config, you must enable cross project service + account usage. For more information, see `Disable + attachment of service accounts to resources in other + projects `__. transfer_config (google.cloud.bigquery_datatransfer_v1.types.TransferConfig): Required. Data transfer configuration to create. diff --git a/packages/google-cloud-bigquery-datatransfer/google/cloud/bigquery_datatransfer_v1/services/data_transfer_service/transports/rest.py b/packages/google-cloud-bigquery-datatransfer/google/cloud/bigquery_datatransfer_v1/services/data_transfer_service/transports/rest.py index cd9e0af0c4d2..91347f2be728 100644 --- a/packages/google-cloud-bigquery-datatransfer/google/cloud/bigquery_datatransfer_v1/services/data_transfer_service/transports/rest.py +++ b/packages/google-cloud-bigquery-datatransfer/google/cloud/bigquery_datatransfer_v1/services/data_transfer_service/transports/rest.py @@ -789,17 +789,20 @@ def __call__( Args: request (~.datatransfer.CreateTransferConfigRequest): - The request object. A request to create a data transfer - configuration. If new credentials are - needed for this transfer configuration, - authorization info must be provided. If - authorization info is provided, the - transfer configuration will be - associated with the user id - corresponding to the authorization info. - Otherwise, the transfer configuration - will be associated with the calling - user. + The request object. A request to create a data transfer configuration. If + new credentials are needed for this transfer + configuration, authorization info must be provided. If + authorization info is provided, the transfer + configuration will be associated with the user id + corresponding to the authorization info. Otherwise, the + transfer configuration will be associated with the + calling user. + + When using a cross project service account for creating + a transfer config, you must enable cross project service + account usage. For more information, see `Disable + attachment of service accounts to resources in other + projects `__. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -2104,10 +2107,15 @@ def __call__( Args: request (~.datatransfer.UpdateTransferConfigRequest): - The request object. A request to update a transfer - configuration. To update the user id of - the transfer configuration, - authorization info needs to be provided. + The request object. A request to update a transfer configuration. To update + the user id of the transfer configuration, authorization + info needs to be provided. + + When using a cross project service account for updating + a transfer config, you must enable cross project service + account usage. For more information, see `Disable + attachment of service accounts to resources in other + projects `__. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. diff --git a/packages/google-cloud-bigquery-datatransfer/google/cloud/bigquery_datatransfer_v1/types/__init__.py b/packages/google-cloud-bigquery-datatransfer/google/cloud/bigquery_datatransfer_v1/types/__init__.py index 2caa0e24a50d..f704ac5f758d 100644 --- a/packages/google-cloud-bigquery-datatransfer/google/cloud/bigquery_datatransfer_v1/types/__init__.py +++ b/packages/google-cloud-bigquery-datatransfer/google/cloud/bigquery_datatransfer_v1/types/__init__.py @@ -43,7 +43,11 @@ from .transfer import ( EmailPreferences, EncryptionConfiguration, + EventDrivenSchedule, + ManualSchedule, ScheduleOptions, + ScheduleOptionsV2, + TimeBasedSchedule, TransferConfig, TransferMessage, TransferRun, @@ -80,7 +84,11 @@ "UpdateTransferConfigRequest", "EmailPreferences", "EncryptionConfiguration", + "EventDrivenSchedule", + "ManualSchedule", "ScheduleOptions", + "ScheduleOptionsV2", + "TimeBasedSchedule", "TransferConfig", "TransferMessage", "TransferRun", diff --git a/packages/google-cloud-bigquery-datatransfer/google/cloud/bigquery_datatransfer_v1/types/datatransfer.py b/packages/google-cloud-bigquery-datatransfer/google/cloud/bigquery_datatransfer_v1/types/datatransfer.py index 1138fb6ff3f5..e0319b78c3e9 100644 --- a/packages/google-cloud-bigquery-datatransfer/google/cloud/bigquery_datatransfer_v1/types/datatransfer.py +++ b/packages/google-cloud-bigquery-datatransfer/google/cloud/bigquery_datatransfer_v1/types/datatransfer.py @@ -480,6 +480,12 @@ class CreateTransferConfigRequest(proto.Message): user id corresponding to the authorization info. Otherwise, the transfer configuration will be associated with the calling user. + When using a cross project service account for creating a transfer + config, you must enable cross project service account usage. For + more information, see `Disable attachment of service accounts to + resources in other + projects `__. + Attributes: parent (str): Required. The BigQuery project id where the transfer @@ -492,6 +498,11 @@ class CreateTransferConfigRequest(proto.Message): Required. Data transfer configuration to create. authorization_code (str): + Deprecated: Authorization code was required when + ``transferConfig.dataSourceId`` is 'youtube_channel' but it + is no longer used in any data sources. Use ``version_info`` + instead. + Optional OAuth2 authorization code to use with this transfer configuration. This is required only if ``transferConfig.dataSourceId`` is 'youtube_channel' and new @@ -505,7 +516,7 @@ class CreateTransferConfigRequest(proto.Message): https://bigquery.cloud.google.com/datatransfer/oauthz/auth?redirect_uri=urn:ietf:wg:oauth:2.0:oob&response_type=authorization_code&client_id=client_id&scope=data_source_scopes - - The client_id is the OAuth client_id of the a data source + - The client_id is the OAuth client_id of the data source as returned by ListDataSources method. - data_source_scopes are the scopes returned by ListDataSources method. @@ -514,8 +525,10 @@ class CreateTransferConfigRequest(proto.Message): ``service_account_name`` is used to create the transfer config. version_info (str): - Optional version info. This is required only if - ``transferConfig.dataSourceId`` is not 'youtube_channel' and + Optional version info. This parameter replaces + ``authorization_code`` which is no longer used in any data + sources. This is required only if + ``transferConfig.dataSourceId`` is 'youtube_channel' *or* new credentials are needed, as indicated by ``CheckValidCreds``. In order to obtain version info, make a request to the following URL: @@ -526,7 +539,7 @@ class CreateTransferConfigRequest(proto.Message): https://bigquery.cloud.google.com/datatransfer/oauthz/auth?redirect_uri=urn:ietf:wg:oauth:2.0:oob&response_type=version_info&client_id=client_id&scope=data_source_scopes - - The client_id is the OAuth client_id of the a data source + - The client_id is the OAuth client_id of the data source as returned by ListDataSources method. - data_source_scopes are the scopes returned by ListDataSources method. @@ -570,15 +583,26 @@ class CreateTransferConfigRequest(proto.Message): class UpdateTransferConfigRequest(proto.Message): - r"""A request to update a transfer configuration. To update the - user id of the transfer configuration, authorization info needs - to be provided. + r"""A request to update a transfer configuration. To update the user id + of the transfer configuration, authorization info needs to be + provided. + + When using a cross project service account for updating a transfer + config, you must enable cross project service account usage. For + more information, see `Disable attachment of service accounts to + resources in other + projects `__. Attributes: transfer_config (google.cloud.bigquery_datatransfer_v1.types.TransferConfig): Required. Data transfer configuration to create. authorization_code (str): + Deprecated: Authorization code was required when + ``transferConfig.dataSourceId`` is 'youtube_channel' but it + is no longer used in any data sources. Use ``version_info`` + instead. + Optional OAuth2 authorization code to use with this transfer configuration. This is required only if ``transferConfig.dataSourceId`` is 'youtube_channel' and new @@ -592,7 +616,7 @@ class UpdateTransferConfigRequest(proto.Message): https://bigquery.cloud.google.com/datatransfer/oauthz/auth?redirect_uri=urn:ietf:wg:oauth:2.0:oob&response_type=authorization_code&client_id=client_id&scope=data_source_scopes - - The client_id is the OAuth client_id of the a data source + - The client_id is the OAuth client_id of the data source as returned by ListDataSources method. - data_source_scopes are the scopes returned by ListDataSources method. @@ -604,8 +628,10 @@ class UpdateTransferConfigRequest(proto.Message): Required. Required list of fields to be updated in this request. version_info (str): - Optional version info. This is required only if - ``transferConfig.dataSourceId`` is not 'youtube_channel' and + Optional version info. This parameter replaces + ``authorization_code`` which is no longer used in any data + sources. This is required only if + ``transferConfig.dataSourceId`` is 'youtube_channel' *or* new credentials are needed, as indicated by ``CheckValidCreds``. In order to obtain version info, make a request to the following URL: @@ -616,7 +642,7 @@ class UpdateTransferConfigRequest(proto.Message): https://bigquery.cloud.google.com/datatransfer/oauthz/auth?redirect_uri=urn:ietf:wg:oauth:2.0:oob&response_type=version_info&client_id=client_id&scope=data_source_scopes - - The client_id is the OAuth client_id of the a data source + - The client_id is the OAuth client_id of the data source as returned by ListDataSources method. - data_source_scopes are the scopes returned by ListDataSources method. diff --git a/packages/google-cloud-bigquery-datatransfer/google/cloud/bigquery_datatransfer_v1/types/transfer.py b/packages/google-cloud-bigquery-datatransfer/google/cloud/bigquery_datatransfer_v1/types/transfer.py index 4403154949f8..bd37dfdec84b 100644 --- a/packages/google-cloud-bigquery-datatransfer/google/cloud/bigquery_datatransfer_v1/types/transfer.py +++ b/packages/google-cloud-bigquery-datatransfer/google/cloud/bigquery_datatransfer_v1/types/transfer.py @@ -30,6 +30,10 @@ "TransferState", "EmailPreferences", "ScheduleOptions", + "ScheduleOptionsV2", + "TimeBasedSchedule", + "ManualSchedule", + "EventDrivenSchedule", "UserInfo", "TransferConfig", "EncryptionConfiguration", @@ -144,6 +148,130 @@ class ScheduleOptions(proto.Message): ) +class ScheduleOptionsV2(proto.Message): + r"""V2 options customizing different types of data transfer + schedule. This field supports existing time-based and manual + transfer schedule. Also supports Event-Driven transfer schedule. + ScheduleOptionsV2 cannot be used together with + ScheduleOptions/Schedule. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + time_based_schedule (google.cloud.bigquery_datatransfer_v1.types.TimeBasedSchedule): + Time based transfer schedule options. This is + the default schedule option. + + This field is a member of `oneof`_ ``schedule``. + manual_schedule (google.cloud.bigquery_datatransfer_v1.types.ManualSchedule): + Manual transfer schedule. If set, the transfer run will not + be auto-scheduled by the system, unless the client invokes + StartManualTransferRuns. This is equivalent to + disable_auto_scheduling = true. + + This field is a member of `oneof`_ ``schedule``. + event_driven_schedule (google.cloud.bigquery_datatransfer_v1.types.EventDrivenSchedule): + Event driven transfer schedule options. If + set, the transfer will be scheduled upon events + arrial. + + This field is a member of `oneof`_ ``schedule``. + """ + + time_based_schedule: "TimeBasedSchedule" = proto.Field( + proto.MESSAGE, + number=1, + oneof="schedule", + message="TimeBasedSchedule", + ) + manual_schedule: "ManualSchedule" = proto.Field( + proto.MESSAGE, + number=2, + oneof="schedule", + message="ManualSchedule", + ) + event_driven_schedule: "EventDrivenSchedule" = proto.Field( + proto.MESSAGE, + number=3, + oneof="schedule", + message="EventDrivenSchedule", + ) + + +class TimeBasedSchedule(proto.Message): + r"""Options customizing the time based transfer schedule. + Options are migrated from the original ScheduleOptions message. + + Attributes: + schedule (str): + Data transfer schedule. If the data source does not support + a custom schedule, this should be empty. If it is empty, the + default value for the data source will be used. The + specified times are in UTC. Examples of valid format: + ``1st,3rd monday of month 15:30``, + ``every wed,fri of jan,jun 13:15``, and + ``first sunday of quarter 00:00``. See more explanation + about the format here: + https://cloud.google.com/appengine/docs/flexible/python/scheduling-jobs-with-cron-yaml#the_schedule_format + + NOTE: The minimum interval time between recurring transfers + depends on the data source; refer to the documentation for + your data source. + start_time (google.protobuf.timestamp_pb2.Timestamp): + Specifies time to start scheduling transfer + runs. The first run will be scheduled at or + after the start time according to a recurrence + pattern defined in the schedule string. The + start time can be changed at any moment. + end_time (google.protobuf.timestamp_pb2.Timestamp): + Defines time to stop scheduling transfer + runs. A transfer run cannot be scheduled at or + after the end time. The end time can be changed + at any moment. + """ + + schedule: str = proto.Field( + proto.STRING, + number=1, + ) + start_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=2, + message=timestamp_pb2.Timestamp, + ) + end_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=3, + message=timestamp_pb2.Timestamp, + ) + + +class ManualSchedule(proto.Message): + r"""Options customizing manual transfers schedule.""" + + +class EventDrivenSchedule(proto.Message): + r"""Options customizing EventDriven transfers schedule. + + Attributes: + pubsub_subscription (str): + Pub/Sub subscription name used to receive + events. Only Google Cloud Storage data source + support this option. Format: + projects/{project}/subscriptions/{subscription} + """ + + pubsub_subscription: str = proto.Field( + proto.STRING, + number=1, + ) + + class UserInfo(proto.Message): r"""Information about a user. @@ -222,6 +350,11 @@ class TransferConfig(proto.Message): schedule_options (google.cloud.bigquery_datatransfer_v1.types.ScheduleOptions): Options customizing the data transfer schedule. + schedule_options_v2 (google.cloud.bigquery_datatransfer_v1.types.ScheduleOptionsV2): + Options customizing different types of data transfer + schedule. This field replaces "schedule" and + "schedule_options" fields. ScheduleOptionsV2 cannot be used + together with ScheduleOptions/Schedule. data_refresh_window_days (int): The number of days to look back to automatically refresh the data. For example, if ``data_refresh_window_days = 10``, @@ -274,6 +407,10 @@ class TransferConfig(proto.Message): effect. Write methods will apply the key if it is present, or otherwise try to apply project default keys if it is absent. + error (google.rpc.status_pb2.Status): + Output only. Error code with detailed + information about reason of the latest config + failure. """ name: str = proto.Field( @@ -307,6 +444,11 @@ class TransferConfig(proto.Message): number=24, message="ScheduleOptions", ) + schedule_options_v2: "ScheduleOptionsV2" = proto.Field( + proto.MESSAGE, + number=31, + message="ScheduleOptionsV2", + ) data_refresh_window_days: int = proto.Field( proto.INT32, number=12, @@ -358,6 +500,11 @@ class TransferConfig(proto.Message): number=28, message="EncryptionConfiguration", ) + error: status_pb2.Status = proto.Field( + proto.MESSAGE, + number=32, + message=status_pb2.Status, + ) class EncryptionConfiguration(proto.Message): diff --git a/packages/google-cloud-bigquery-datatransfer/samples/generated_samples/snippet_metadata_google.cloud.bigquery.datatransfer.v1.json b/packages/google-cloud-bigquery-datatransfer/samples/generated_samples/snippet_metadata_google.cloud.bigquery.datatransfer.v1.json index dd8310150319..4c0bdf899c2a 100644 --- a/packages/google-cloud-bigquery-datatransfer/samples/generated_samples/snippet_metadata_google.cloud.bigquery.datatransfer.v1.json +++ b/packages/google-cloud-bigquery-datatransfer/samples/generated_samples/snippet_metadata_google.cloud.bigquery.datatransfer.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-bigquery-datatransfer", - "version": "3.15.5" + "version": "3.16.0" }, "snippets": [ { diff --git a/packages/google-cloud-bigquery-datatransfer/tests/unit/gapic/bigquery_datatransfer_v1/test_data_transfer_service.py b/packages/google-cloud-bigquery-datatransfer/tests/unit/gapic/bigquery_datatransfer_v1/test_data_transfer_service.py index b15240ba8d36..f144355cd636 100644 --- a/packages/google-cloud-bigquery-datatransfer/tests/unit/gapic/bigquery_datatransfer_v1/test_data_transfer_service.py +++ b/packages/google-cloud-bigquery-datatransfer/tests/unit/gapic/bigquery_datatransfer_v1/test_data_transfer_service.py @@ -35,6 +35,7 @@ from google.auth.exceptions import MutualTLSChannelError from google.cloud.location import locations_pb2 from google.oauth2 import service_account +from google.protobuf import any_pb2 # type: ignore from google.protobuf import duration_pb2 # type: ignore from google.protobuf import field_mask_pb2 # type: ignore from google.protobuf import json_format @@ -1407,22 +1408,23 @@ async def test_get_data_source_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_data_source - ] = mock_object + ] = mock_rpc request = {} await client.get_data_source(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_data_source(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1819,22 +1821,23 @@ async def test_list_data_sources_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_data_sources - ] = mock_object + ] = mock_rpc request = {} await client.list_data_sources(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_data_sources(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2442,22 +2445,23 @@ async def test_create_transfer_config_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_transfer_config - ] = mock_object + ] = mock_rpc request = {} await client.create_transfer_config(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_transfer_config(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2890,22 +2894,23 @@ async def test_update_transfer_config_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_transfer_config - ] = mock_object + ] = mock_rpc request = {} await client.update_transfer_config(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_transfer_config(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3299,22 +3304,23 @@ async def test_delete_transfer_config_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_transfer_config - ] = mock_object + ] = mock_rpc request = {} await client.delete_transfer_config(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_transfer_config(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3705,22 +3711,23 @@ async def test_get_transfer_config_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_transfer_config - ] = mock_object + ] = mock_rpc request = {} await client.get_transfer_config(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_transfer_config(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4112,22 +4119,23 @@ async def test_list_transfer_configs_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_transfer_configs - ] = mock_object + ] = mock_rpc request = {} await client.list_transfer_configs(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_transfer_configs(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4697,22 +4705,23 @@ async def test_schedule_transfer_runs_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.schedule_transfer_runs - ] = mock_object + ] = mock_rpc request = {} await client.schedule_transfer_runs(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.schedule_transfer_runs(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5097,22 +5106,23 @@ async def test_start_manual_transfer_runs_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.start_manual_transfer_runs - ] = mock_object + ] = mock_rpc request = {} await client.start_manual_transfer_runs(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.start_manual_transfer_runs(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5401,22 +5411,23 @@ async def test_get_transfer_run_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_transfer_run - ] = mock_object + ] = mock_rpc request = {} await client.get_transfer_run(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_transfer_run(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5780,22 +5791,23 @@ async def test_delete_transfer_run_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_transfer_run - ] = mock_object + ] = mock_rpc request = {} await client.delete_transfer_run(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_transfer_run(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -6159,22 +6171,23 @@ async def test_list_transfer_runs_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_transfer_runs - ] = mock_object + ] = mock_rpc request = {} await client.list_transfer_runs(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_transfer_runs(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -6749,22 +6762,23 @@ async def test_list_transfer_logs_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_transfer_logs - ] = mock_object + ] = mock_rpc request = {} await client.list_transfer_logs(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_transfer_logs(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -7335,22 +7349,23 @@ async def test_check_valid_creds_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.check_valid_creds - ] = mock_object + ] = mock_rpc request = {} await client.check_valid_creds(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.check_valid_creds(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -7714,22 +7729,23 @@ async def test_enroll_data_sources_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.enroll_data_sources - ] = mock_object + ] = mock_rpc request = {} await client.enroll_data_sources(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.enroll_data_sources(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -8001,22 +8017,23 @@ async def test_unenroll_data_sources_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.unenroll_data_sources - ] = mock_object + ] = mock_rpc request = {} await client.unenroll_data_sources(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.unenroll_data_sources(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -8864,6 +8881,17 @@ def test_create_transfer_config_rest(request_type): "start_time": {"seconds": 751, "nanos": 543}, "end_time": {}, }, + "schedule_options_v2": { + "time_based_schedule": { + "schedule": "schedule_value", + "start_time": {}, + "end_time": {}, + }, + "manual_schedule": {}, + "event_driven_schedule": { + "pubsub_subscription": "pubsub_subscription_value" + }, + }, "data_refresh_window_days": 2543, "disabled": True, "update_time": {}, @@ -8875,6 +8903,16 @@ def test_create_transfer_config_rest(request_type): "email_preferences": {"enable_failure_email": True}, "owner_info": {"email": "email_value"}, "encryption_configuration": {"kms_key_name": {"value": "value_value"}}, + "error": { + "code": 411, + "message": "message_value", + "details": [ + { + "type_url": "type.googleapis.com/google.protobuf.Duration", + "value": b"\x08\x0c\x10\xdb\x07", + } + ], + }, } # The version of a generated dependency at test runtime may differ from the version used during generation. # Delete any fields which are not present in the current runtime dependency @@ -9311,6 +9349,17 @@ def test_update_transfer_config_rest(request_type): "start_time": {"seconds": 751, "nanos": 543}, "end_time": {}, }, + "schedule_options_v2": { + "time_based_schedule": { + "schedule": "schedule_value", + "start_time": {}, + "end_time": {}, + }, + "manual_schedule": {}, + "event_driven_schedule": { + "pubsub_subscription": "pubsub_subscription_value" + }, + }, "data_refresh_window_days": 2543, "disabled": True, "update_time": {}, @@ -9322,6 +9371,16 @@ def test_update_transfer_config_rest(request_type): "email_preferences": {"enable_failure_email": True}, "owner_info": {"email": "email_value"}, "encryption_configuration": {"kms_key_name": {"value": "value_value"}}, + "error": { + "code": 411, + "message": "message_value", + "details": [ + { + "type_url": "type.googleapis.com/google.protobuf.Duration", + "value": b"\x08\x0c\x10\xdb\x07", + } + ], + }, } # The version of a generated dependency at test runtime may differ from the version used during generation. # Delete any fields which are not present in the current runtime dependency diff --git a/packages/google-cloud-bigquery-logging/google/cloud/bigquery_logging/gapic_version.py b/packages/google-cloud-bigquery-logging/google/cloud/bigquery_logging/gapic_version.py index 91e6b04fad21..558c8aab67c5 100644 --- a/packages/google-cloud-bigquery-logging/google/cloud/bigquery_logging/gapic_version.py +++ b/packages/google-cloud-bigquery-logging/google/cloud/bigquery_logging/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.4.5" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-bigquery-logging/google/cloud/bigquery_logging_v1/gapic_version.py b/packages/google-cloud-bigquery-logging/google/cloud/bigquery_logging_v1/gapic_version.py index 91e6b04fad21..558c8aab67c5 100644 --- a/packages/google-cloud-bigquery-logging/google/cloud/bigquery_logging_v1/gapic_version.py +++ b/packages/google-cloud-bigquery-logging/google/cloud/bigquery_logging_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.4.5" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-bigquery-migration/google/cloud/bigquery_migration/gapic_version.py b/packages/google-cloud-bigquery-migration/google/cloud/bigquery_migration/gapic_version.py index 57646b556e81..558c8aab67c5 100644 --- a/packages/google-cloud-bigquery-migration/google/cloud/bigquery_migration/gapic_version.py +++ b/packages/google-cloud-bigquery-migration/google/cloud/bigquery_migration/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.11.9" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-bigquery-migration/google/cloud/bigquery_migration_v2/gapic_version.py b/packages/google-cloud-bigquery-migration/google/cloud/bigquery_migration_v2/gapic_version.py index 57646b556e81..558c8aab67c5 100644 --- a/packages/google-cloud-bigquery-migration/google/cloud/bigquery_migration_v2/gapic_version.py +++ b/packages/google-cloud-bigquery-migration/google/cloud/bigquery_migration_v2/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.11.9" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-bigquery-migration/google/cloud/bigquery_migration_v2/services/migration_service/async_client.py b/packages/google-cloud-bigquery-migration/google/cloud/bigquery_migration_v2/services/migration_service/async_client.py index 53e74a3d1eb8..bd489ab5e971 100644 --- a/packages/google-cloud-bigquery-migration/google/cloud/bigquery_migration_v2/services/migration_service/async_client.py +++ b/packages/google-cloud-bigquery-migration/google/cloud/bigquery_migration_v2/services/migration_service/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -203,9 +202,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(MigrationServiceClient).get_transport_class, type(MigrationServiceClient) - ) + get_transport_class = MigrationServiceClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-bigquery-migration/google/cloud/bigquery_migration_v2/services/migration_service/client.py b/packages/google-cloud-bigquery-migration/google/cloud/bigquery_migration_v2/services/migration_service/client.py index 960e0603769c..87b6df7fd5f2 100644 --- a/packages/google-cloud-bigquery-migration/google/cloud/bigquery_migration_v2/services/migration_service/client.py +++ b/packages/google-cloud-bigquery-migration/google/cloud/bigquery_migration_v2/services/migration_service/client.py @@ -693,7 +693,7 @@ def __init__( Type[MigrationServiceTransport], Callable[..., MigrationServiceTransport], ] = ( - type(self).get_transport_class(transport) + MigrationServiceClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., MigrationServiceTransport], transport) ) diff --git a/packages/google-cloud-bigquery-migration/google/cloud/bigquery_migration_v2alpha/gapic_version.py b/packages/google-cloud-bigquery-migration/google/cloud/bigquery_migration_v2alpha/gapic_version.py index 57646b556e81..558c8aab67c5 100644 --- a/packages/google-cloud-bigquery-migration/google/cloud/bigquery_migration_v2alpha/gapic_version.py +++ b/packages/google-cloud-bigquery-migration/google/cloud/bigquery_migration_v2alpha/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.11.9" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-bigquery-migration/google/cloud/bigquery_migration_v2alpha/services/migration_service/async_client.py b/packages/google-cloud-bigquery-migration/google/cloud/bigquery_migration_v2alpha/services/migration_service/async_client.py index 1a664bea858b..78fbfa6e9f1f 100644 --- a/packages/google-cloud-bigquery-migration/google/cloud/bigquery_migration_v2alpha/services/migration_service/async_client.py +++ b/packages/google-cloud-bigquery-migration/google/cloud/bigquery_migration_v2alpha/services/migration_service/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -203,9 +202,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(MigrationServiceClient).get_transport_class, type(MigrationServiceClient) - ) + get_transport_class = MigrationServiceClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-bigquery-migration/google/cloud/bigquery_migration_v2alpha/services/migration_service/client.py b/packages/google-cloud-bigquery-migration/google/cloud/bigquery_migration_v2alpha/services/migration_service/client.py index 724464d77f11..32164e645021 100644 --- a/packages/google-cloud-bigquery-migration/google/cloud/bigquery_migration_v2alpha/services/migration_service/client.py +++ b/packages/google-cloud-bigquery-migration/google/cloud/bigquery_migration_v2alpha/services/migration_service/client.py @@ -693,7 +693,7 @@ def __init__( Type[MigrationServiceTransport], Callable[..., MigrationServiceTransport], ] = ( - type(self).get_transport_class(transport) + MigrationServiceClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., MigrationServiceTransport], transport) ) diff --git a/packages/google-cloud-bigquery-migration/google/cloud/bigquery_migration_v2alpha/services/sql_translation_service/async_client.py b/packages/google-cloud-bigquery-migration/google/cloud/bigquery_migration_v2alpha/services/sql_translation_service/async_client.py index 956f35ad59f2..c9f47f4dec54 100644 --- a/packages/google-cloud-bigquery-migration/google/cloud/bigquery_migration_v2alpha/services/sql_translation_service/async_client.py +++ b/packages/google-cloud-bigquery-migration/google/cloud/bigquery_migration_v2alpha/services/sql_translation_service/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -188,10 +187,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(SqlTranslationServiceClient).get_transport_class, - type(SqlTranslationServiceClient), - ) + get_transport_class = SqlTranslationServiceClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-bigquery-migration/google/cloud/bigquery_migration_v2alpha/services/sql_translation_service/client.py b/packages/google-cloud-bigquery-migration/google/cloud/bigquery_migration_v2alpha/services/sql_translation_service/client.py index 8fc2dce51fe9..ff48b7f2af4b 100644 --- a/packages/google-cloud-bigquery-migration/google/cloud/bigquery_migration_v2alpha/services/sql_translation_service/client.py +++ b/packages/google-cloud-bigquery-migration/google/cloud/bigquery_migration_v2alpha/services/sql_translation_service/client.py @@ -644,7 +644,7 @@ def __init__( Type[SqlTranslationServiceTransport], Callable[..., SqlTranslationServiceTransport], ] = ( - type(self).get_transport_class(transport) + SqlTranslationServiceClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., SqlTranslationServiceTransport], transport) ) diff --git a/packages/google-cloud-bigquery-migration/samples/generated_samples/snippet_metadata_google.cloud.bigquery.migration.v2.json b/packages/google-cloud-bigquery-migration/samples/generated_samples/snippet_metadata_google.cloud.bigquery.migration.v2.json index e79de74a1e1e..eb49516af3a7 100644 --- a/packages/google-cloud-bigquery-migration/samples/generated_samples/snippet_metadata_google.cloud.bigquery.migration.v2.json +++ b/packages/google-cloud-bigquery-migration/samples/generated_samples/snippet_metadata_google.cloud.bigquery.migration.v2.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-bigquery-migration", - "version": "0.11.9" + "version": "0.1.0" }, "snippets": [ { diff --git a/packages/google-cloud-bigquery-migration/samples/generated_samples/snippet_metadata_google.cloud.bigquery.migration.v2alpha.json b/packages/google-cloud-bigquery-migration/samples/generated_samples/snippet_metadata_google.cloud.bigquery.migration.v2alpha.json index 040fcb245230..14575559ea90 100644 --- a/packages/google-cloud-bigquery-migration/samples/generated_samples/snippet_metadata_google.cloud.bigquery.migration.v2alpha.json +++ b/packages/google-cloud-bigquery-migration/samples/generated_samples/snippet_metadata_google.cloud.bigquery.migration.v2alpha.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-bigquery-migration", - "version": "0.11.9" + "version": "0.1.0" }, "snippets": [ { diff --git a/packages/google-cloud-bigquery-migration/tests/unit/gapic/bigquery_migration_v2/test_migration_service.py b/packages/google-cloud-bigquery-migration/tests/unit/gapic/bigquery_migration_v2/test_migration_service.py index 82dab12d415f..e77bc503ba08 100644 --- a/packages/google-cloud-bigquery-migration/tests/unit/gapic/bigquery_migration_v2/test_migration_service.py +++ b/packages/google-cloud-bigquery-migration/tests/unit/gapic/bigquery_migration_v2/test_migration_service.py @@ -1306,22 +1306,23 @@ async def test_create_migration_workflow_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_migration_workflow - ] = mock_object + ] = mock_rpc request = {} await client.create_migration_workflow(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_migration_workflow(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1714,22 +1715,23 @@ async def test_get_migration_workflow_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_migration_workflow - ] = mock_object + ] = mock_rpc request = {} await client.get_migration_workflow(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_migration_workflow(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2108,22 +2110,23 @@ async def test_list_migration_workflows_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_migration_workflows - ] = mock_object + ] = mock_rpc request = {} await client.list_migration_workflows(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_migration_workflows(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2695,22 +2698,23 @@ async def test_delete_migration_workflow_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_migration_workflow - ] = mock_object + ] = mock_rpc request = {} await client.delete_migration_workflow(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_migration_workflow(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3067,22 +3071,23 @@ async def test_start_migration_workflow_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.start_migration_workflow - ] = mock_object + ] = mock_rpc request = {} await client.start_migration_workflow(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.start_migration_workflow(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3458,22 +3463,23 @@ async def test_get_migration_subtask_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_migration_subtask - ] = mock_object + ] = mock_rpc request = {} await client.get_migration_subtask(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_migration_subtask(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3858,22 +3864,23 @@ async def test_list_migration_subtasks_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_migration_subtasks - ] = mock_object + ] = mock_rpc request = {} await client.list_migration_subtasks(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_migration_subtasks(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-bigquery-migration/tests/unit/gapic/bigquery_migration_v2alpha/test_migration_service.py b/packages/google-cloud-bigquery-migration/tests/unit/gapic/bigquery_migration_v2alpha/test_migration_service.py index 4f501bf67df1..a990375a119a 100644 --- a/packages/google-cloud-bigquery-migration/tests/unit/gapic/bigquery_migration_v2alpha/test_migration_service.py +++ b/packages/google-cloud-bigquery-migration/tests/unit/gapic/bigquery_migration_v2alpha/test_migration_service.py @@ -1303,22 +1303,23 @@ async def test_create_migration_workflow_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_migration_workflow - ] = mock_object + ] = mock_rpc request = {} await client.create_migration_workflow(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_migration_workflow(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1711,22 +1712,23 @@ async def test_get_migration_workflow_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_migration_workflow - ] = mock_object + ] = mock_rpc request = {} await client.get_migration_workflow(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_migration_workflow(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2105,22 +2107,23 @@ async def test_list_migration_workflows_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_migration_workflows - ] = mock_object + ] = mock_rpc request = {} await client.list_migration_workflows(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_migration_workflows(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2692,22 +2695,23 @@ async def test_delete_migration_workflow_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_migration_workflow - ] = mock_object + ] = mock_rpc request = {} await client.delete_migration_workflow(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_migration_workflow(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3064,22 +3068,23 @@ async def test_start_migration_workflow_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.start_migration_workflow - ] = mock_object + ] = mock_rpc request = {} await client.start_migration_workflow(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.start_migration_workflow(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3455,22 +3460,23 @@ async def test_get_migration_subtask_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_migration_subtask - ] = mock_object + ] = mock_rpc request = {} await client.get_migration_subtask(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_migration_subtask(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3855,22 +3861,23 @@ async def test_list_migration_subtasks_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_migration_subtasks - ] = mock_object + ] = mock_rpc request = {} await client.list_migration_subtasks(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_migration_subtasks(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-bigquery-migration/tests/unit/gapic/bigquery_migration_v2alpha/test_sql_translation_service.py b/packages/google-cloud-bigquery-migration/tests/unit/gapic/bigquery_migration_v2alpha/test_sql_translation_service.py index a2d9f7d9467b..47459b395757 100644 --- a/packages/google-cloud-bigquery-migration/tests/unit/gapic/bigquery_migration_v2alpha/test_sql_translation_service.py +++ b/packages/google-cloud-bigquery-migration/tests/unit/gapic/bigquery_migration_v2alpha/test_sql_translation_service.py @@ -1305,22 +1305,23 @@ async def test_translate_query_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.translate_query - ] = mock_object + ] = mock_rpc request = {} await client.translate_query(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.translate_query(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-bigquery-reservation/google/cloud/bigquery_reservation/gapic_version.py b/packages/google-cloud-bigquery-reservation/google/cloud/bigquery_reservation/gapic_version.py index f192c1b4f03b..558c8aab67c5 100644 --- a/packages/google-cloud-bigquery-reservation/google/cloud/bigquery_reservation/gapic_version.py +++ b/packages/google-cloud-bigquery-reservation/google/cloud/bigquery_reservation/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.13.5" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-bigquery-reservation/google/cloud/bigquery_reservation_v1/gapic_version.py b/packages/google-cloud-bigquery-reservation/google/cloud/bigquery_reservation_v1/gapic_version.py index f192c1b4f03b..558c8aab67c5 100644 --- a/packages/google-cloud-bigquery-reservation/google/cloud/bigquery_reservation_v1/gapic_version.py +++ b/packages/google-cloud-bigquery-reservation/google/cloud/bigquery_reservation_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.13.5" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-bigquery-reservation/google/cloud/bigquery_reservation_v1/services/reservation_service/async_client.py b/packages/google-cloud-bigquery-reservation/google/cloud/bigquery_reservation_v1/services/reservation_service/async_client.py index be30eabd4aee..5975332939a1 100644 --- a/packages/google-cloud-bigquery-reservation/google/cloud/bigquery_reservation_v1/services/reservation_service/async_client.py +++ b/packages/google-cloud-bigquery-reservation/google/cloud/bigquery_reservation_v1/services/reservation_service/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -223,10 +222,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(ReservationServiceClient).get_transport_class, - type(ReservationServiceClient), - ) + get_transport_class = ReservationServiceClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-bigquery-reservation/google/cloud/bigquery_reservation_v1/services/reservation_service/client.py b/packages/google-cloud-bigquery-reservation/google/cloud/bigquery_reservation_v1/services/reservation_service/client.py index 53ffea8a0a86..30e6b9de73dd 100644 --- a/packages/google-cloud-bigquery-reservation/google/cloud/bigquery_reservation_v1/services/reservation_service/client.py +++ b/packages/google-cloud-bigquery-reservation/google/cloud/bigquery_reservation_v1/services/reservation_service/client.py @@ -754,7 +754,7 @@ def __init__( Type[ReservationServiceTransport], Callable[..., ReservationServiceTransport], ] = ( - type(self).get_transport_class(transport) + ReservationServiceClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., ReservationServiceTransport], transport) ) diff --git a/packages/google-cloud-bigquery-reservation/samples/generated_samples/snippet_metadata_google.cloud.bigquery.reservation.v1.json b/packages/google-cloud-bigquery-reservation/samples/generated_samples/snippet_metadata_google.cloud.bigquery.reservation.v1.json index 71d0f72acf58..9fa664c51cab 100644 --- a/packages/google-cloud-bigquery-reservation/samples/generated_samples/snippet_metadata_google.cloud.bigquery.reservation.v1.json +++ b/packages/google-cloud-bigquery-reservation/samples/generated_samples/snippet_metadata_google.cloud.bigquery.reservation.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-bigquery-reservation", - "version": "1.13.5" + "version": "0.1.0" }, "snippets": [ { diff --git a/packages/google-cloud-bigquery-reservation/tests/unit/gapic/bigquery_reservation_v1/test_reservation_service.py b/packages/google-cloud-bigquery-reservation/tests/unit/gapic/bigquery_reservation_v1/test_reservation_service.py index 468d0a25becf..0461f2e9e671 100644 --- a/packages/google-cloud-bigquery-reservation/tests/unit/gapic/bigquery_reservation_v1/test_reservation_service.py +++ b/packages/google-cloud-bigquery-reservation/tests/unit/gapic/bigquery_reservation_v1/test_reservation_service.py @@ -1360,22 +1360,23 @@ async def test_create_reservation_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_reservation - ] = mock_object + ] = mock_rpc request = {} await client.create_reservation(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_reservation(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1777,22 +1778,23 @@ async def test_list_reservations_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_reservations - ] = mock_object + ] = mock_rpc request = {} await client.list_reservations(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_reservations(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2368,22 +2370,23 @@ async def test_get_reservation_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_reservation - ] = mock_object + ] = mock_rpc request = {} await client.get_reservation(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_reservation(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2747,22 +2750,23 @@ async def test_delete_reservation_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_reservation - ] = mock_object + ] = mock_rpc request = {} await client.delete_reservation(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_reservation(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3135,22 +3139,23 @@ async def test_update_reservation_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_reservation - ] = mock_object + ] = mock_rpc request = {} await client.update_reservation(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_reservation(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3563,22 +3568,23 @@ async def test_create_capacity_commitment_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_capacity_commitment - ] = mock_object + ] = mock_rpc request = {} await client.create_capacity_commitment(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_capacity_commitment(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3975,22 +3981,23 @@ async def test_list_capacity_commitments_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_capacity_commitments - ] = mock_object + ] = mock_rpc request = {} await client.list_capacity_commitments(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_capacity_commitments(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4585,22 +4592,23 @@ async def test_get_capacity_commitment_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_capacity_commitment - ] = mock_object + ] = mock_rpc request = {} await client.get_capacity_commitment(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_capacity_commitment(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4978,22 +4986,23 @@ async def test_delete_capacity_commitment_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_capacity_commitment - ] = mock_object + ] = mock_rpc request = {} await client.delete_capacity_commitment(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_capacity_commitment(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5371,22 +5380,23 @@ async def test_update_capacity_commitment_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_capacity_commitment - ] = mock_object + ] = mock_rpc request = {} await client.update_capacity_commitment(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_capacity_commitment(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5776,22 +5786,23 @@ async def test_split_capacity_commitment_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.split_capacity_commitment - ] = mock_object + ] = mock_rpc request = {} await client.split_capacity_commitment(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.split_capacity_commitment(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -6189,22 +6200,23 @@ async def test_merge_capacity_commitments_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.merge_capacity_commitments - ] = mock_object + ] = mock_rpc request = {} await client.merge_capacity_commitments(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.merge_capacity_commitments(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -6607,22 +6619,23 @@ async def test_create_assignment_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_assignment - ] = mock_object + ] = mock_rpc request = {} await client.create_assignment(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_assignment(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -7001,22 +7014,23 @@ async def test_list_assignments_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_assignments - ] = mock_object + ] = mock_rpc request = {} await client.list_assignments(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_assignments(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -7562,22 +7576,23 @@ async def test_delete_assignment_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_assignment - ] = mock_object + ] = mock_rpc request = {} await client.delete_assignment(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_assignment(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -7943,22 +7958,23 @@ async def test_search_assignments_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.search_assignments - ] = mock_object + ] = mock_rpc request = {} await client.search_assignments(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.search_assignments(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -8546,22 +8562,23 @@ async def test_search_all_assignments_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.search_all_assignments - ] = mock_object + ] = mock_rpc request = {} await client.search_all_assignments(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.search_all_assignments(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -9146,22 +9163,23 @@ async def test_move_assignment_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.move_assignment - ] = mock_object + ] = mock_rpc request = {} await client.move_assignment(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.move_assignment(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -9541,22 +9559,23 @@ async def test_update_assignment_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_assignment - ] = mock_object + ] = mock_rpc request = {} await client.update_assignment(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_assignment(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -9946,22 +9965,23 @@ async def test_get_bi_reservation_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_bi_reservation - ] = mock_object + ] = mock_rpc request = {} await client.get_bi_reservation(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_bi_reservation(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -10334,22 +10354,23 @@ async def test_update_bi_reservation_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_bi_reservation - ] = mock_object + ] = mock_rpc request = {} await client.update_bi_reservation(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_bi_reservation(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-billing-budgets/google/cloud/billing/budgets/gapic_version.py b/packages/google-cloud-billing-budgets/google/cloud/billing/budgets/gapic_version.py index 5464dff981fa..558c8aab67c5 100644 --- a/packages/google-cloud-billing-budgets/google/cloud/billing/budgets/gapic_version.py +++ b/packages/google-cloud-billing-budgets/google/cloud/billing/budgets/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.14.5" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-billing-budgets/google/cloud/billing/budgets_v1/gapic_version.py b/packages/google-cloud-billing-budgets/google/cloud/billing/budgets_v1/gapic_version.py index 5464dff981fa..558c8aab67c5 100644 --- a/packages/google-cloud-billing-budgets/google/cloud/billing/budgets_v1/gapic_version.py +++ b/packages/google-cloud-billing-budgets/google/cloud/billing/budgets_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.14.5" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-billing-budgets/google/cloud/billing/budgets_v1/services/budget_service/async_client.py b/packages/google-cloud-billing-budgets/google/cloud/billing/budgets_v1/services/budget_service/async_client.py index d763e39c5d4d..ecb32d617b1e 100644 --- a/packages/google-cloud-billing-budgets/google/cloud/billing/budgets_v1/services/budget_service/async_client.py +++ b/packages/google-cloud-billing-budgets/google/cloud/billing/budgets_v1/services/budget_service/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -192,9 +191,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(BudgetServiceClient).get_transport_class, type(BudgetServiceClient) - ) + get_transport_class = BudgetServiceClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-billing-budgets/google/cloud/billing/budgets_v1/services/budget_service/client.py b/packages/google-cloud-billing-budgets/google/cloud/billing/budgets_v1/services/budget_service/client.py index 032c3ed2b7f2..2095fbefdf66 100644 --- a/packages/google-cloud-billing-budgets/google/cloud/billing/budgets_v1/services/budget_service/client.py +++ b/packages/google-cloud-billing-budgets/google/cloud/billing/budgets_v1/services/budget_service/client.py @@ -660,7 +660,7 @@ def __init__( transport_init: Union[ Type[BudgetServiceTransport], Callable[..., BudgetServiceTransport] ] = ( - type(self).get_transport_class(transport) + BudgetServiceClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., BudgetServiceTransport], transport) ) diff --git a/packages/google-cloud-billing-budgets/google/cloud/billing/budgets_v1beta1/gapic_version.py b/packages/google-cloud-billing-budgets/google/cloud/billing/budgets_v1beta1/gapic_version.py index 5464dff981fa..558c8aab67c5 100644 --- a/packages/google-cloud-billing-budgets/google/cloud/billing/budgets_v1beta1/gapic_version.py +++ b/packages/google-cloud-billing-budgets/google/cloud/billing/budgets_v1beta1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.14.5" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-billing-budgets/google/cloud/billing/budgets_v1beta1/services/budget_service/async_client.py b/packages/google-cloud-billing-budgets/google/cloud/billing/budgets_v1beta1/services/budget_service/async_client.py index fab157f19282..0c14a2ea58ce 100644 --- a/packages/google-cloud-billing-budgets/google/cloud/billing/budgets_v1beta1/services/budget_service/async_client.py +++ b/packages/google-cloud-billing-budgets/google/cloud/billing/budgets_v1beta1/services/budget_service/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -190,9 +189,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(BudgetServiceClient).get_transport_class, type(BudgetServiceClient) - ) + get_transport_class = BudgetServiceClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-billing-budgets/google/cloud/billing/budgets_v1beta1/services/budget_service/client.py b/packages/google-cloud-billing-budgets/google/cloud/billing/budgets_v1beta1/services/budget_service/client.py index 55b70b6245b2..8407b63a3c79 100644 --- a/packages/google-cloud-billing-budgets/google/cloud/billing/budgets_v1beta1/services/budget_service/client.py +++ b/packages/google-cloud-billing-budgets/google/cloud/billing/budgets_v1beta1/services/budget_service/client.py @@ -656,7 +656,7 @@ def __init__( transport_init: Union[ Type[BudgetServiceTransport], Callable[..., BudgetServiceTransport] ] = ( - type(self).get_transport_class(transport) + BudgetServiceClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., BudgetServiceTransport], transport) ) diff --git a/packages/google-cloud-billing-budgets/samples/generated_samples/snippet_metadata_google.cloud.billing.budgets.v1.json b/packages/google-cloud-billing-budgets/samples/generated_samples/snippet_metadata_google.cloud.billing.budgets.v1.json index 3f2e755b485e..9f6a8ee49f17 100644 --- a/packages/google-cloud-billing-budgets/samples/generated_samples/snippet_metadata_google.cloud.billing.budgets.v1.json +++ b/packages/google-cloud-billing-budgets/samples/generated_samples/snippet_metadata_google.cloud.billing.budgets.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-billing-budgets", - "version": "1.14.5" + "version": "0.1.0" }, "snippets": [ { diff --git a/packages/google-cloud-billing-budgets/samples/generated_samples/snippet_metadata_google.cloud.billing.budgets.v1beta1.json b/packages/google-cloud-billing-budgets/samples/generated_samples/snippet_metadata_google.cloud.billing.budgets.v1beta1.json index 5ef7fcec9913..33b9f5b1220a 100644 --- a/packages/google-cloud-billing-budgets/samples/generated_samples/snippet_metadata_google.cloud.billing.budgets.v1beta1.json +++ b/packages/google-cloud-billing-budgets/samples/generated_samples/snippet_metadata_google.cloud.billing.budgets.v1beta1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-billing-budgets", - "version": "1.14.5" + "version": "0.1.0" }, "snippets": [ { diff --git a/packages/google-cloud-billing-budgets/tests/unit/gapic/budgets_v1/test_budget_service.py b/packages/google-cloud-billing-budgets/tests/unit/gapic/budgets_v1/test_budget_service.py index cdb8e9652c9c..d62bb8f78e48 100644 --- a/packages/google-cloud-billing-budgets/tests/unit/gapic/budgets_v1/test_budget_service.py +++ b/packages/google-cloud-billing-budgets/tests/unit/gapic/budgets_v1/test_budget_service.py @@ -1290,22 +1290,23 @@ async def test_create_budget_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_budget - ] = mock_object + ] = mock_rpc request = {} await client.create_budget(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_budget(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1666,22 +1667,23 @@ async def test_update_budget_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_budget - ] = mock_object + ] = mock_rpc request = {} await client.update_budget(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_budget(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2044,22 +2046,23 @@ async def test_get_budget_async_use_cached_wrapped_rpc(transport: str = "grpc_as ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_budget - ] = mock_object + ] = mock_rpc request = {} await client.get_budget(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_budget(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2412,22 +2415,23 @@ async def test_list_budgets_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_budgets - ] = mock_object + ] = mock_rpc request = {} await client.list_budgets(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_budgets(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2963,22 +2967,23 @@ async def test_delete_budget_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_budget - ] = mock_object + ] = mock_rpc request = {} await client.delete_budget(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_budget(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-billing-budgets/tests/unit/gapic/budgets_v1beta1/test_budget_service.py b/packages/google-cloud-billing-budgets/tests/unit/gapic/budgets_v1beta1/test_budget_service.py index 0abcd892dd5c..b61257bd43de 100644 --- a/packages/google-cloud-billing-budgets/tests/unit/gapic/budgets_v1beta1/test_budget_service.py +++ b/packages/google-cloud-billing-budgets/tests/unit/gapic/budgets_v1beta1/test_budget_service.py @@ -1267,22 +1267,23 @@ async def test_create_budget_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_budget - ] = mock_object + ] = mock_rpc request = {} await client.create_budget(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_budget(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1553,22 +1554,23 @@ async def test_update_budget_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_budget - ] = mock_object + ] = mock_rpc request = {} await client.update_budget(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_budget(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1841,22 +1843,23 @@ async def test_get_budget_async_use_cached_wrapped_rpc(transport: str = "grpc_as ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_budget - ] = mock_object + ] = mock_rpc request = {} await client.get_budget(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_budget(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2129,22 +2132,23 @@ async def test_list_budgets_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_budgets - ] = mock_object + ] = mock_rpc request = {} await client.list_budgets(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_budgets(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2598,22 +2602,23 @@ async def test_delete_budget_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_budget - ] = mock_object + ] = mock_rpc request = {} await client.delete_budget(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_budget(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-billing/google/cloud/billing/gapic_version.py b/packages/google-cloud-billing/google/cloud/billing/gapic_version.py index 6748388713a3..558c8aab67c5 100644 --- a/packages/google-cloud-billing/google/cloud/billing/gapic_version.py +++ b/packages/google-cloud-billing/google/cloud/billing/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.13.6" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-billing/google/cloud/billing_v1/gapic_version.py b/packages/google-cloud-billing/google/cloud/billing_v1/gapic_version.py index 6748388713a3..558c8aab67c5 100644 --- a/packages/google-cloud-billing/google/cloud/billing_v1/gapic_version.py +++ b/packages/google-cloud-billing/google/cloud/billing_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.13.6" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-billing/google/cloud/billing_v1/services/cloud_billing/async_client.py b/packages/google-cloud-billing/google/cloud/billing_v1/services/cloud_billing/async_client.py index 382147acd562..f31888507582 100644 --- a/packages/google-cloud-billing/google/cloud/billing_v1/services/cloud_billing/async_client.py +++ b/packages/google-cloud-billing/google/cloud/billing_v1/services/cloud_billing/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -200,9 +199,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(CloudBillingClient).get_transport_class, type(CloudBillingClient) - ) + get_transport_class = CloudBillingClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-billing/google/cloud/billing_v1/services/cloud_billing/client.py b/packages/google-cloud-billing/google/cloud/billing_v1/services/cloud_billing/client.py index 82e7c612e5f5..c31f665736cb 100644 --- a/packages/google-cloud-billing/google/cloud/billing_v1/services/cloud_billing/client.py +++ b/packages/google-cloud-billing/google/cloud/billing_v1/services/cloud_billing/client.py @@ -698,7 +698,7 @@ def __init__( transport_init: Union[ Type[CloudBillingTransport], Callable[..., CloudBillingTransport] ] = ( - type(self).get_transport_class(transport) + CloudBillingClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., CloudBillingTransport], transport) ) diff --git a/packages/google-cloud-billing/google/cloud/billing_v1/services/cloud_catalog/async_client.py b/packages/google-cloud-billing/google/cloud/billing_v1/services/cloud_catalog/async_client.py index ff4ffbbecf79..b76b512fe9d6 100644 --- a/packages/google-cloud-billing/google/cloud/billing_v1/services/cloud_catalog/async_client.py +++ b/packages/google-cloud-billing/google/cloud/billing_v1/services/cloud_catalog/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -188,9 +187,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(CloudCatalogClient).get_transport_class, type(CloudCatalogClient) - ) + get_transport_class = CloudCatalogClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-billing/google/cloud/billing_v1/services/cloud_catalog/client.py b/packages/google-cloud-billing/google/cloud/billing_v1/services/cloud_catalog/client.py index 36d530d4435d..864deed1c10e 100644 --- a/packages/google-cloud-billing/google/cloud/billing_v1/services/cloud_catalog/client.py +++ b/packages/google-cloud-billing/google/cloud/billing_v1/services/cloud_catalog/client.py @@ -668,7 +668,7 @@ def __init__( transport_init: Union[ Type[CloudCatalogTransport], Callable[..., CloudCatalogTransport] ] = ( - type(self).get_transport_class(transport) + CloudCatalogClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., CloudCatalogTransport], transport) ) diff --git a/packages/google-cloud-billing/samples/generated_samples/snippet_metadata_google.cloud.billing.v1.json b/packages/google-cloud-billing/samples/generated_samples/snippet_metadata_google.cloud.billing.v1.json index eea27046c944..94a6ddeb90f9 100644 --- a/packages/google-cloud-billing/samples/generated_samples/snippet_metadata_google.cloud.billing.v1.json +++ b/packages/google-cloud-billing/samples/generated_samples/snippet_metadata_google.cloud.billing.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-billing", - "version": "1.13.6" + "version": "0.1.0" }, "snippets": [ { diff --git a/packages/google-cloud-billing/tests/unit/gapic/billing_v1/test_cloud_billing.py b/packages/google-cloud-billing/tests/unit/gapic/billing_v1/test_cloud_billing.py index 6368685618ec..e81305c3c0d8 100644 --- a/packages/google-cloud-billing/tests/unit/gapic/billing_v1/test_cloud_billing.py +++ b/packages/google-cloud-billing/tests/unit/gapic/billing_v1/test_cloud_billing.py @@ -1291,22 +1291,23 @@ async def test_get_billing_account_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_billing_account - ] = mock_object + ] = mock_rpc request = {} await client.get_billing_account(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_billing_account(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1690,22 +1691,23 @@ async def test_list_billing_accounts_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_billing_accounts - ] = mock_object + ] = mock_rpc request = {} await client.list_billing_accounts(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_billing_accounts(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2224,22 +2226,23 @@ async def test_update_billing_account_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_billing_account - ] = mock_object + ] = mock_rpc request = {} await client.update_billing_account(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_billing_account(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2642,22 +2645,23 @@ async def test_create_billing_account_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_billing_account - ] = mock_object + ] = mock_rpc request = {} await client.create_billing_account(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_billing_account(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2985,22 +2989,23 @@ async def test_list_project_billing_info_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_project_billing_info - ] = mock_object + ] = mock_rpc request = {} await client.list_project_billing_info(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_project_billing_info(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3586,22 +3591,23 @@ async def test_get_project_billing_info_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_project_billing_info - ] = mock_object + ] = mock_rpc request = {} await client.get_project_billing_info(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_project_billing_info(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3989,22 +3995,23 @@ async def test_update_project_billing_info_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_project_billing_info - ] = mock_object + ] = mock_rpc request = {} await client.update_project_billing_info(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_project_billing_info(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4383,22 +4390,23 @@ async def test_get_iam_policy_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_iam_policy - ] = mock_object + ] = mock_rpc request = {} await client.get_iam_policy(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_iam_policy(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4765,22 +4773,23 @@ async def test_set_iam_policy_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.set_iam_policy - ] = mock_object + ] = mock_rpc request = {} await client.set_iam_policy(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.set_iam_policy(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5157,22 +5166,23 @@ async def test_test_iam_permissions_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.test_iam_permissions - ] = mock_object + ] = mock_rpc request = {} await client.test_iam_permissions(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.test_iam_permissions(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5587,22 +5597,23 @@ async def test_move_billing_account_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.move_billing_account - ] = mock_object + ] = mock_rpc request = {} await client.move_billing_account(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.move_billing_account(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-billing/tests/unit/gapic/billing_v1/test_cloud_catalog.py b/packages/google-cloud-billing/tests/unit/gapic/billing_v1/test_cloud_catalog.py index b7a803137581..ae18f1a72cb5 100644 --- a/packages/google-cloud-billing/tests/unit/gapic/billing_v1/test_cloud_catalog.py +++ b/packages/google-cloud-billing/tests/unit/gapic/billing_v1/test_cloud_catalog.py @@ -1263,22 +1263,23 @@ async def test_list_services_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_services - ] = mock_object + ] = mock_rpc request = {} await client.list_services(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_services(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1677,22 +1678,23 @@ async def test_list_skus_async_use_cached_wrapped_rpc(transport: str = "grpc_asy ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_skus - ] = mock_object + ] = mock_rpc request = {} await client.list_skus(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_skus(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-binary-authorization/google/cloud/binaryauthorization/gapic_version.py b/packages/google-cloud-binary-authorization/google/cloud/binaryauthorization/gapic_version.py index 3ba9a6de4897..558c8aab67c5 100644 --- a/packages/google-cloud-binary-authorization/google/cloud/binaryauthorization/gapic_version.py +++ b/packages/google-cloud-binary-authorization/google/cloud/binaryauthorization/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.10.5" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-binary-authorization/google/cloud/binaryauthorization_v1/gapic_version.py b/packages/google-cloud-binary-authorization/google/cloud/binaryauthorization_v1/gapic_version.py index 3ba9a6de4897..558c8aab67c5 100644 --- a/packages/google-cloud-binary-authorization/google/cloud/binaryauthorization_v1/gapic_version.py +++ b/packages/google-cloud-binary-authorization/google/cloud/binaryauthorization_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.10.5" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-binary-authorization/google/cloud/binaryauthorization_v1/services/binauthz_management_service_v1/async_client.py b/packages/google-cloud-binary-authorization/google/cloud/binaryauthorization_v1/services/binauthz_management_service_v1/async_client.py index dbdb1c3e69ce..20913d2aee62 100644 --- a/packages/google-cloud-binary-authorization/google/cloud/binaryauthorization_v1/services/binauthz_management_service_v1/async_client.py +++ b/packages/google-cloud-binary-authorization/google/cloud/binaryauthorization_v1/services/binauthz_management_service_v1/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -212,10 +211,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(BinauthzManagementServiceV1Client).get_transport_class, - type(BinauthzManagementServiceV1Client), - ) + get_transport_class = BinauthzManagementServiceV1Client.get_transport_class def __init__( self, diff --git a/packages/google-cloud-binary-authorization/google/cloud/binaryauthorization_v1/services/binauthz_management_service_v1/client.py b/packages/google-cloud-binary-authorization/google/cloud/binaryauthorization_v1/services/binauthz_management_service_v1/client.py index ef855ccedc69..91142cfbeb1e 100644 --- a/packages/google-cloud-binary-authorization/google/cloud/binaryauthorization_v1/services/binauthz_management_service_v1/client.py +++ b/packages/google-cloud-binary-authorization/google/cloud/binaryauthorization_v1/services/binauthz_management_service_v1/client.py @@ -694,7 +694,7 @@ def __init__( Type[BinauthzManagementServiceV1Transport], Callable[..., BinauthzManagementServiceV1Transport], ] = ( - type(self).get_transport_class(transport) + BinauthzManagementServiceV1Client.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast( Callable[..., BinauthzManagementServiceV1Transport], transport diff --git a/packages/google-cloud-binary-authorization/google/cloud/binaryauthorization_v1/services/system_policy_v1/async_client.py b/packages/google-cloud-binary-authorization/google/cloud/binaryauthorization_v1/services/system_policy_v1/async_client.py index 2438906210b4..609494774d87 100644 --- a/packages/google-cloud-binary-authorization/google/cloud/binaryauthorization_v1/services/system_policy_v1/async_client.py +++ b/packages/google-cloud-binary-authorization/google/cloud/binaryauthorization_v1/services/system_policy_v1/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -188,9 +187,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(SystemPolicyV1Client).get_transport_class, type(SystemPolicyV1Client) - ) + get_transport_class = SystemPolicyV1Client.get_transport_class def __init__( self, diff --git a/packages/google-cloud-binary-authorization/google/cloud/binaryauthorization_v1/services/system_policy_v1/client.py b/packages/google-cloud-binary-authorization/google/cloud/binaryauthorization_v1/services/system_policy_v1/client.py index afd3a44b101a..592df00e4ec6 100644 --- a/packages/google-cloud-binary-authorization/google/cloud/binaryauthorization_v1/services/system_policy_v1/client.py +++ b/packages/google-cloud-binary-authorization/google/cloud/binaryauthorization_v1/services/system_policy_v1/client.py @@ -654,7 +654,7 @@ def __init__( transport_init: Union[ Type[SystemPolicyV1Transport], Callable[..., SystemPolicyV1Transport] ] = ( - type(self).get_transport_class(transport) + SystemPolicyV1Client.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., SystemPolicyV1Transport], transport) ) diff --git a/packages/google-cloud-binary-authorization/google/cloud/binaryauthorization_v1/services/validation_helper_v1/async_client.py b/packages/google-cloud-binary-authorization/google/cloud/binaryauthorization_v1/services/validation_helper_v1/async_client.py index 1c302f4e447a..8570c32538c9 100644 --- a/packages/google-cloud-binary-authorization/google/cloud/binaryauthorization_v1/services/validation_helper_v1/async_client.py +++ b/packages/google-cloud-binary-authorization/google/cloud/binaryauthorization_v1/services/validation_helper_v1/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -184,10 +183,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(ValidationHelperV1Client).get_transport_class, - type(ValidationHelperV1Client), - ) + get_transport_class = ValidationHelperV1Client.get_transport_class def __init__( self, diff --git a/packages/google-cloud-binary-authorization/google/cloud/binaryauthorization_v1/services/validation_helper_v1/client.py b/packages/google-cloud-binary-authorization/google/cloud/binaryauthorization_v1/services/validation_helper_v1/client.py index 3916342a1680..bf501c96eb59 100644 --- a/packages/google-cloud-binary-authorization/google/cloud/binaryauthorization_v1/services/validation_helper_v1/client.py +++ b/packages/google-cloud-binary-authorization/google/cloud/binaryauthorization_v1/services/validation_helper_v1/client.py @@ -642,7 +642,7 @@ def __init__( Type[ValidationHelperV1Transport], Callable[..., ValidationHelperV1Transport], ] = ( - type(self).get_transport_class(transport) + ValidationHelperV1Client.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., ValidationHelperV1Transport], transport) ) diff --git a/packages/google-cloud-binary-authorization/google/cloud/binaryauthorization_v1beta1/gapic_version.py b/packages/google-cloud-binary-authorization/google/cloud/binaryauthorization_v1beta1/gapic_version.py index 3ba9a6de4897..558c8aab67c5 100644 --- a/packages/google-cloud-binary-authorization/google/cloud/binaryauthorization_v1beta1/gapic_version.py +++ b/packages/google-cloud-binary-authorization/google/cloud/binaryauthorization_v1beta1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.10.5" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-binary-authorization/google/cloud/binaryauthorization_v1beta1/services/binauthz_management_service_v1_beta1/async_client.py b/packages/google-cloud-binary-authorization/google/cloud/binaryauthorization_v1beta1/services/binauthz_management_service_v1_beta1/async_client.py index 0b31fdffeda5..ebe7a778f8be 100644 --- a/packages/google-cloud-binary-authorization/google/cloud/binaryauthorization_v1beta1/services/binauthz_management_service_v1_beta1/async_client.py +++ b/packages/google-cloud-binary-authorization/google/cloud/binaryauthorization_v1beta1/services/binauthz_management_service_v1_beta1/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -217,10 +216,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(BinauthzManagementServiceV1Beta1Client).get_transport_class, - type(BinauthzManagementServiceV1Beta1Client), - ) + get_transport_class = BinauthzManagementServiceV1Beta1Client.get_transport_class def __init__( self, diff --git a/packages/google-cloud-binary-authorization/google/cloud/binaryauthorization_v1beta1/services/binauthz_management_service_v1_beta1/client.py b/packages/google-cloud-binary-authorization/google/cloud/binaryauthorization_v1beta1/services/binauthz_management_service_v1_beta1/client.py index ca6f0c167713..a2e1b58e9735 100644 --- a/packages/google-cloud-binary-authorization/google/cloud/binaryauthorization_v1beta1/services/binauthz_management_service_v1_beta1/client.py +++ b/packages/google-cloud-binary-authorization/google/cloud/binaryauthorization_v1beta1/services/binauthz_management_service_v1_beta1/client.py @@ -701,7 +701,7 @@ def __init__( Type[BinauthzManagementServiceV1Beta1Transport], Callable[..., BinauthzManagementServiceV1Beta1Transport], ] = ( - type(self).get_transport_class(transport) + BinauthzManagementServiceV1Beta1Client.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast( Callable[..., BinauthzManagementServiceV1Beta1Transport], transport diff --git a/packages/google-cloud-binary-authorization/google/cloud/binaryauthorization_v1beta1/services/system_policy_v1_beta1/async_client.py b/packages/google-cloud-binary-authorization/google/cloud/binaryauthorization_v1beta1/services/system_policy_v1_beta1/async_client.py index 4b813f6005a2..d041cd8db3e8 100644 --- a/packages/google-cloud-binary-authorization/google/cloud/binaryauthorization_v1beta1/services/system_policy_v1_beta1/async_client.py +++ b/packages/google-cloud-binary-authorization/google/cloud/binaryauthorization_v1beta1/services/system_policy_v1_beta1/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -188,10 +187,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(SystemPolicyV1Beta1Client).get_transport_class, - type(SystemPolicyV1Beta1Client), - ) + get_transport_class = SystemPolicyV1Beta1Client.get_transport_class def __init__( self, diff --git a/packages/google-cloud-binary-authorization/google/cloud/binaryauthorization_v1beta1/services/system_policy_v1_beta1/client.py b/packages/google-cloud-binary-authorization/google/cloud/binaryauthorization_v1beta1/services/system_policy_v1_beta1/client.py index b10b186fb75a..b4df4ff9e7e8 100644 --- a/packages/google-cloud-binary-authorization/google/cloud/binaryauthorization_v1beta1/services/system_policy_v1_beta1/client.py +++ b/packages/google-cloud-binary-authorization/google/cloud/binaryauthorization_v1beta1/services/system_policy_v1_beta1/client.py @@ -659,7 +659,7 @@ def __init__( Type[SystemPolicyV1Beta1Transport], Callable[..., SystemPolicyV1Beta1Transport], ] = ( - type(self).get_transport_class(transport) + SystemPolicyV1Beta1Client.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., SystemPolicyV1Beta1Transport], transport) ) diff --git a/packages/google-cloud-binary-authorization/samples/generated_samples/snippet_metadata_google.cloud.binaryauthorization.v1.json b/packages/google-cloud-binary-authorization/samples/generated_samples/snippet_metadata_google.cloud.binaryauthorization.v1.json index 622c68895a94..6d7a035dbe2d 100644 --- a/packages/google-cloud-binary-authorization/samples/generated_samples/snippet_metadata_google.cloud.binaryauthorization.v1.json +++ b/packages/google-cloud-binary-authorization/samples/generated_samples/snippet_metadata_google.cloud.binaryauthorization.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-binary-authorization", - "version": "1.10.5" + "version": "0.1.0" }, "snippets": [ { diff --git a/packages/google-cloud-binary-authorization/samples/generated_samples/snippet_metadata_google.cloud.binaryauthorization.v1beta1.json b/packages/google-cloud-binary-authorization/samples/generated_samples/snippet_metadata_google.cloud.binaryauthorization.v1beta1.json index 75ffc586f4c9..c60d5267337f 100644 --- a/packages/google-cloud-binary-authorization/samples/generated_samples/snippet_metadata_google.cloud.binaryauthorization.v1beta1.json +++ b/packages/google-cloud-binary-authorization/samples/generated_samples/snippet_metadata_google.cloud.binaryauthorization.v1beta1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-binary-authorization", - "version": "1.10.5" + "version": "0.1.0" }, "snippets": [ { diff --git a/packages/google-cloud-binary-authorization/tests/unit/gapic/binaryauthorization_v1/test_binauthz_management_service_v1.py b/packages/google-cloud-binary-authorization/tests/unit/gapic/binaryauthorization_v1/test_binauthz_management_service_v1.py index 06cef592aa2a..56f56b95d565 100644 --- a/packages/google-cloud-binary-authorization/tests/unit/gapic/binaryauthorization_v1/test_binauthz_management_service_v1.py +++ b/packages/google-cloud-binary-authorization/tests/unit/gapic/binaryauthorization_v1/test_binauthz_management_service_v1.py @@ -1377,22 +1377,23 @@ async def test_get_policy_async_use_cached_wrapped_rpc(transport: str = "grpc_as ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_policy - ] = mock_object + ] = mock_rpc request = {} await client.get_policy(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_policy(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1749,22 +1750,23 @@ async def test_update_policy_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_policy - ] = mock_object + ] = mock_rpc request = {} await client.update_policy(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_policy(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2121,22 +2123,23 @@ async def test_create_attestor_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_attestor - ] = mock_object + ] = mock_rpc request = {} await client.create_attestor(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_attestor(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2506,22 +2509,23 @@ async def test_get_attestor_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_attestor - ] = mock_object + ] = mock_rpc request = {} await client.get_attestor(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_attestor(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2867,22 +2871,23 @@ async def test_update_attestor_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_attestor - ] = mock_object + ] = mock_rpc request = {} await client.update_attestor(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_attestor(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3231,22 +3236,23 @@ async def test_list_attestors_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_attestors - ] = mock_object + ] = mock_rpc request = {} await client.list_attestors(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_attestors(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3782,22 +3788,23 @@ async def test_delete_attestor_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_attestor - ] = mock_object + ] = mock_rpc request = {} await client.delete_attestor(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_attestor(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-binary-authorization/tests/unit/gapic/binaryauthorization_v1/test_system_policy_v1.py b/packages/google-cloud-binary-authorization/tests/unit/gapic/binaryauthorization_v1/test_system_policy_v1.py index 47c93f794dad..b1d37d46380b 100644 --- a/packages/google-cloud-binary-authorization/tests/unit/gapic/binaryauthorization_v1/test_system_policy_v1.py +++ b/packages/google-cloud-binary-authorization/tests/unit/gapic/binaryauthorization_v1/test_system_policy_v1.py @@ -1304,22 +1304,23 @@ async def test_get_system_policy_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_system_policy - ] = mock_object + ] = mock_rpc request = {} await client.get_system_policy(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_system_policy(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-binary-authorization/tests/unit/gapic/binaryauthorization_v1/test_validation_helper_v1.py b/packages/google-cloud-binary-authorization/tests/unit/gapic/binaryauthorization_v1/test_validation_helper_v1.py index 553869031193..475ff6fdcb0b 100644 --- a/packages/google-cloud-binary-authorization/tests/unit/gapic/binaryauthorization_v1/test_validation_helper_v1.py +++ b/packages/google-cloud-binary-authorization/tests/unit/gapic/binaryauthorization_v1/test_validation_helper_v1.py @@ -1344,22 +1344,23 @@ async def test_validate_attestation_occurrence_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.validate_attestation_occurrence - ] = mock_object + ] = mock_rpc request = {} await client.validate_attestation_occurrence(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.validate_attestation_occurrence(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-binary-authorization/tests/unit/gapic/binaryauthorization_v1beta1/test_binauthz_management_service_v1_beta1.py b/packages/google-cloud-binary-authorization/tests/unit/gapic/binaryauthorization_v1beta1/test_binauthz_management_service_v1_beta1.py index aa245ba21baa..88a803ec0651 100644 --- a/packages/google-cloud-binary-authorization/tests/unit/gapic/binaryauthorization_v1beta1/test_binauthz_management_service_v1_beta1.py +++ b/packages/google-cloud-binary-authorization/tests/unit/gapic/binaryauthorization_v1beta1/test_binauthz_management_service_v1_beta1.py @@ -1399,22 +1399,23 @@ async def test_get_policy_async_use_cached_wrapped_rpc(transport: str = "grpc_as ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_policy - ] = mock_object + ] = mock_rpc request = {} await client.get_policy(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_policy(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1771,22 +1772,23 @@ async def test_update_policy_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_policy - ] = mock_object + ] = mock_rpc request = {} await client.update_policy(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_policy(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2143,22 +2145,23 @@ async def test_create_attestor_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_attestor - ] = mock_object + ] = mock_rpc request = {} await client.create_attestor(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_attestor(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2528,22 +2531,23 @@ async def test_get_attestor_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_attestor - ] = mock_object + ] = mock_rpc request = {} await client.get_attestor(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_attestor(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2889,22 +2893,23 @@ async def test_update_attestor_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_attestor - ] = mock_object + ] = mock_rpc request = {} await client.update_attestor(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_attestor(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3253,22 +3258,23 @@ async def test_list_attestors_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_attestors - ] = mock_object + ] = mock_rpc request = {} await client.list_attestors(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_attestors(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3804,22 +3810,23 @@ async def test_delete_attestor_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_attestor - ] = mock_object + ] = mock_rpc request = {} await client.delete_attestor(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_attestor(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-binary-authorization/tests/unit/gapic/binaryauthorization_v1beta1/test_system_policy_v1_beta1.py b/packages/google-cloud-binary-authorization/tests/unit/gapic/binaryauthorization_v1beta1/test_system_policy_v1_beta1.py index cc0892c0e8dd..3a97b5f5de3f 100644 --- a/packages/google-cloud-binary-authorization/tests/unit/gapic/binaryauthorization_v1beta1/test_system_policy_v1_beta1.py +++ b/packages/google-cloud-binary-authorization/tests/unit/gapic/binaryauthorization_v1beta1/test_system_policy_v1_beta1.py @@ -1368,22 +1368,23 @@ async def test_get_system_policy_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_system_policy - ] = mock_object + ] = mock_rpc request = {} await client.get_system_policy(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_system_policy(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-build/CHANGELOG.md b/packages/google-cloud-build/CHANGELOG.md index 619b570d55f6..fb07ad800441 100644 --- a/packages/google-cloud-build/CHANGELOG.md +++ b/packages/google-cloud-build/CHANGELOG.md @@ -4,6 +4,18 @@ [1]: https://pypi.org/project/google-cloud-build/#history +## [3.25.0](https://github.com/googleapis/google-cloud-python/compare/google-cloud-build-v3.24.2...google-cloud-build-v3.25.0) (2024-09-23) + + +### Features + +* Add LEGACY_BUCKET option to DefaultLogsBucketBehavior ([e889809](https://github.com/googleapis/google-cloud-python/commit/e889809389c5b194ec77955664eb2859cde28d73)) + + +### Documentation + +* Sanitize docs ([e889809](https://github.com/googleapis/google-cloud-python/commit/e889809389c5b194ec77955664eb2859cde28d73)) + ## [3.24.2](https://github.com/googleapis/google-cloud-python/compare/google-cloud-build-v3.24.1...google-cloud-build-v3.24.2) (2024-07-30) diff --git a/packages/google-cloud-build/google/cloud/devtools/cloudbuild/gapic_version.py b/packages/google-cloud-build/google/cloud/devtools/cloudbuild/gapic_version.py index 8f1bcb93f69d..8adcea73e25d 100644 --- a/packages/google-cloud-build/google/cloud/devtools/cloudbuild/gapic_version.py +++ b/packages/google-cloud-build/google/cloud/devtools/cloudbuild/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "3.24.2" # {x-release-please-version} +__version__ = "3.25.0" # {x-release-please-version} diff --git a/packages/google-cloud-build/google/cloud/devtools/cloudbuild_v1/gapic_version.py b/packages/google-cloud-build/google/cloud/devtools/cloudbuild_v1/gapic_version.py index 8f1bcb93f69d..8adcea73e25d 100644 --- a/packages/google-cloud-build/google/cloud/devtools/cloudbuild_v1/gapic_version.py +++ b/packages/google-cloud-build/google/cloud/devtools/cloudbuild_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "3.24.2" # {x-release-please-version} +__version__ = "3.25.0" # {x-release-please-version} diff --git a/packages/google-cloud-build/google/cloud/devtools/cloudbuild_v1/services/cloud_build/async_client.py b/packages/google-cloud-build/google/cloud/devtools/cloudbuild_v1/services/cloud_build/async_client.py index 647a6f3c5819..45b787724a85 100644 --- a/packages/google-cloud-build/google/cloud/devtools/cloudbuild_v1/services/cloud_build/async_client.py +++ b/packages/google-cloud-build/google/cloud/devtools/cloudbuild_v1/services/cloud_build/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -222,9 +221,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(CloudBuildClient).get_transport_class, type(CloudBuildClient) - ) + get_transport_class = CloudBuildClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-build/google/cloud/devtools/cloudbuild_v1/services/cloud_build/client.py b/packages/google-cloud-build/google/cloud/devtools/cloudbuild_v1/services/cloud_build/client.py index 88ac1c97bcbc..2a69fbe66407 100644 --- a/packages/google-cloud-build/google/cloud/devtools/cloudbuild_v1/services/cloud_build/client.py +++ b/packages/google-cloud-build/google/cloud/devtools/cloudbuild_v1/services/cloud_build/client.py @@ -870,7 +870,7 @@ def __init__( transport_init: Union[ Type[CloudBuildTransport], Callable[..., CloudBuildTransport] ] = ( - type(self).get_transport_class(transport) + CloudBuildClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., CloudBuildTransport], transport) ) diff --git a/packages/google-cloud-build/google/cloud/devtools/cloudbuild_v1/types/cloudbuild.py b/packages/google-cloud-build/google/cloud/devtools/cloudbuild_v1/types/cloudbuild.py index a10715d0ffdf..995ae202614c 100644 --- a/packages/google-cloud-build/google/cloud/devtools/cloudbuild_v1/types/cloudbuild.py +++ b/packages/google-cloud-build/google/cloud/devtools/cloudbuild_v1/types/cloudbuild.py @@ -3278,7 +3278,7 @@ class LoggingMode(proto.Enum): NONE = 4 class DefaultLogsBucketBehavior(proto.Enum): - r"""Default GCS log bucket behavior options. + r"""Default Cloud Storage log bucket behavior options. Values: DEFAULT_LOGS_BUCKET_BEHAVIOR_UNSPECIFIED (0): @@ -3287,10 +3287,15 @@ class DefaultLogsBucketBehavior(proto.Enum): Bucket is located in user-owned project in the same region as the build. The builder service account must have access to create and - write to GCS buckets in the build project. + write to Cloud Storage buckets in the build + project. + LEGACY_BUCKET (2): + Bucket is located in a Google-owned project + and is not regionalized. """ DEFAULT_LOGS_BUCKET_BEHAVIOR_UNSPECIFIED = 0 REGIONAL_USER_OWNED_BUCKET = 1 + LEGACY_BUCKET = 2 class PoolOption(proto.Message): r"""Details about how a build should be executed on a ``WorkerPool``. diff --git a/packages/google-cloud-build/google/cloud/devtools/cloudbuild_v2/gapic_version.py b/packages/google-cloud-build/google/cloud/devtools/cloudbuild_v2/gapic_version.py index 8f1bcb93f69d..8adcea73e25d 100644 --- a/packages/google-cloud-build/google/cloud/devtools/cloudbuild_v2/gapic_version.py +++ b/packages/google-cloud-build/google/cloud/devtools/cloudbuild_v2/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "3.24.2" # {x-release-please-version} +__version__ = "3.25.0" # {x-release-please-version} diff --git a/packages/google-cloud-build/google/cloud/devtools/cloudbuild_v2/services/repository_manager/async_client.py b/packages/google-cloud-build/google/cloud/devtools/cloudbuild_v2/services/repository_manager/async_client.py index 2c8a90b23aa1..58ba51b37193 100644 --- a/packages/google-cloud-build/google/cloud/devtools/cloudbuild_v2/services/repository_manager/async_client.py +++ b/packages/google-cloud-build/google/cloud/devtools/cloudbuild_v2/services/repository_manager/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -205,9 +204,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(RepositoryManagerClient).get_transport_class, type(RepositoryManagerClient) - ) + get_transport_class = RepositoryManagerClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-build/google/cloud/devtools/cloudbuild_v2/services/repository_manager/client.py b/packages/google-cloud-build/google/cloud/devtools/cloudbuild_v2/services/repository_manager/client.py index ed93324a78ed..8fb9cdfea9af 100644 --- a/packages/google-cloud-build/google/cloud/devtools/cloudbuild_v2/services/repository_manager/client.py +++ b/packages/google-cloud-build/google/cloud/devtools/cloudbuild_v2/services/repository_manager/client.py @@ -747,7 +747,7 @@ def __init__( Type[RepositoryManagerTransport], Callable[..., RepositoryManagerTransport], ] = ( - type(self).get_transport_class(transport) + RepositoryManagerClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., RepositoryManagerTransport], transport) ) diff --git a/packages/google-cloud-build/samples/generated_samples/snippet_metadata_google.devtools.cloudbuild.v1.json b/packages/google-cloud-build/samples/generated_samples/snippet_metadata_google.devtools.cloudbuild.v1.json index 7a83c7c8d4e3..66e42a84ba95 100644 --- a/packages/google-cloud-build/samples/generated_samples/snippet_metadata_google.devtools.cloudbuild.v1.json +++ b/packages/google-cloud-build/samples/generated_samples/snippet_metadata_google.devtools.cloudbuild.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-build", - "version": "3.24.2" + "version": "3.25.0" }, "snippets": [ { diff --git a/packages/google-cloud-build/samples/generated_samples/snippet_metadata_google.devtools.cloudbuild.v2.json b/packages/google-cloud-build/samples/generated_samples/snippet_metadata_google.devtools.cloudbuild.v2.json index 3c49b3ef58be..f4891e033575 100644 --- a/packages/google-cloud-build/samples/generated_samples/snippet_metadata_google.devtools.cloudbuild.v2.json +++ b/packages/google-cloud-build/samples/generated_samples/snippet_metadata_google.devtools.cloudbuild.v2.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-build", - "version": "3.24.2" + "version": "3.25.0" }, "snippets": [ { diff --git a/packages/google-cloud-build/tests/unit/gapic/cloudbuild_v1/test_cloud_build.py b/packages/google-cloud-build/tests/unit/gapic/cloudbuild_v1/test_cloud_build.py index 87a888133dd9..1135f9395b81 100644 --- a/packages/google-cloud-build/tests/unit/gapic/cloudbuild_v1/test_cloud_build.py +++ b/packages/google-cloud-build/tests/unit/gapic/cloudbuild_v1/test_cloud_build.py @@ -1202,8 +1202,9 @@ def test_create_build_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.create_build(request) @@ -1257,26 +1258,28 @@ async def test_create_build_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_build - ] = mock_object + ] = mock_rpc request = {} await client.create_build(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.create_build(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1629,22 +1632,23 @@ async def test_get_build_async_use_cached_wrapped_rpc(transport: str = "grpc_asy ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_build - ] = mock_object + ] = mock_rpc request = {} await client.get_build(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_build(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1992,22 +1996,23 @@ async def test_list_builds_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_builds - ] = mock_object + ] = mock_rpc request = {} await client.list_builds(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_builds(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2556,22 +2561,23 @@ async def test_cancel_build_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.cancel_build - ] = mock_object + ] = mock_rpc request = {} await client.cancel_build(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.cancel_build(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2861,8 +2867,9 @@ def test_retry_build_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.retry_build(request) @@ -2916,26 +2923,28 @@ async def test_retry_build_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.retry_build - ] = mock_object + ] = mock_rpc request = {} await client.retry_build(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.retry_build(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3200,8 +3209,9 @@ def test_approve_build_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.approve_build(request) @@ -3255,26 +3265,28 @@ async def test_approve_build_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.approve_build - ] = mock_object + ] = mock_rpc request = {} await client.approve_build(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.approve_build(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3645,22 +3657,23 @@ async def test_create_build_trigger_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_build_trigger - ] = mock_object + ] = mock_rpc request = {} await client.create_build_trigger(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_build_trigger(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4052,22 +4065,23 @@ async def test_get_build_trigger_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_build_trigger - ] = mock_object + ] = mock_rpc request = {} await client.get_build_trigger(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_build_trigger(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4433,22 +4447,23 @@ async def test_list_build_triggers_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_build_triggers - ] = mock_object + ] = mock_rpc request = {} await client.list_build_triggers(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_build_triggers(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4978,22 +4993,23 @@ async def test_delete_build_trigger_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_build_trigger - ] = mock_object + ] = mock_rpc request = {} await client.delete_build_trigger(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_build_trigger(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5360,22 +5376,23 @@ async def test_update_build_trigger_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_build_trigger - ] = mock_object + ] = mock_rpc request = {} await client.update_build_trigger(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_build_trigger(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5695,8 +5712,9 @@ def test_run_build_trigger_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.run_build_trigger(request) @@ -5752,26 +5770,28 @@ async def test_run_build_trigger_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.run_build_trigger - ] = mock_object + ] = mock_rpc request = {} await client.run_build_trigger(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.run_build_trigger(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -6124,22 +6144,23 @@ async def test_receive_trigger_webhook_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.receive_trigger_webhook - ] = mock_object + ] = mock_rpc request = {} await client.receive_trigger_webhook(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.receive_trigger_webhook(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -6368,8 +6389,9 @@ def test_create_worker_pool_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.create_worker_pool(request) @@ -6425,26 +6447,28 @@ async def test_create_worker_pool_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_worker_pool - ] = mock_object + ] = mock_rpc request = {} await client.create_worker_pool(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.create_worker_pool(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -6795,22 +6819,23 @@ async def test_get_worker_pool_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_worker_pool - ] = mock_object + ] = mock_rpc request = {} await client.get_worker_pool(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_worker_pool(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -7088,8 +7113,9 @@ def test_delete_worker_pool_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.delete_worker_pool(request) @@ -7145,26 +7171,28 @@ async def test_delete_worker_pool_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_worker_pool - ] = mock_object + ] = mock_rpc request = {} await client.delete_worker_pool(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.delete_worker_pool(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -7433,8 +7461,9 @@ def test_update_worker_pool_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.update_worker_pool(request) @@ -7490,26 +7519,28 @@ async def test_update_worker_pool_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_worker_pool - ] = mock_object + ] = mock_rpc request = {} await client.update_worker_pool(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.update_worker_pool(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -7854,22 +7885,23 @@ async def test_list_worker_pools_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_worker_pools - ] = mock_object + ] = mock_rpc request = {} await client.list_worker_pools(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_worker_pools(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-build/tests/unit/gapic/cloudbuild_v2/test_repository_manager.py b/packages/google-cloud-build/tests/unit/gapic/cloudbuild_v2/test_repository_manager.py index bf87b1ff1fd3..16354f791f6f 100644 --- a/packages/google-cloud-build/tests/unit/gapic/cloudbuild_v2/test_repository_manager.py +++ b/packages/google-cloud-build/tests/unit/gapic/cloudbuild_v2/test_repository_manager.py @@ -1290,8 +1290,9 @@ def test_create_connection_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.create_connection(request) @@ -1347,26 +1348,28 @@ async def test_create_connection_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_connection - ] = mock_object + ] = mock_rpc request = {} await client.create_connection(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.create_connection(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1751,22 +1754,23 @@ async def test_get_connection_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_connection - ] = mock_object + ] = mock_rpc request = {} await client.get_connection(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_connection(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2125,22 +2129,23 @@ async def test_list_connections_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_connections - ] = mock_object + ] = mock_rpc request = {} await client.list_connections(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_connections(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2635,8 +2640,9 @@ def test_update_connection_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.update_connection(request) @@ -2692,26 +2698,28 @@ async def test_update_connection_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_connection - ] = mock_object + ] = mock_rpc request = {} await client.update_connection(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.update_connection(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3031,8 +3039,9 @@ def test_delete_connection_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.delete_connection(request) @@ -3088,26 +3097,28 @@ async def test_delete_connection_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_connection - ] = mock_object + ] = mock_rpc request = {} await client.delete_connection(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.delete_connection(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3417,8 +3428,9 @@ def test_create_repository_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.create_repository(request) @@ -3474,26 +3486,28 @@ async def test_create_repository_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_repository - ] = mock_object + ] = mock_rpc request = {} await client.create_repository(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.create_repository(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3824,8 +3838,9 @@ def test_batch_create_repositories_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.batch_create_repositories(request) @@ -3881,26 +3896,28 @@ async def test_batch_create_repositories_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.batch_create_repositories - ] = mock_object + ] = mock_rpc request = {} await client.batch_create_repositories(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.batch_create_repositories(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4276,22 +4293,23 @@ async def test_get_repository_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_repository - ] = mock_object + ] = mock_rpc request = {} await client.get_repository(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_repository(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4660,22 +4678,23 @@ async def test_list_repositories_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_repositories - ] = mock_object + ] = mock_rpc request = {} await client.list_repositories(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_repositories(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5190,8 +5209,9 @@ def test_delete_repository_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.delete_repository(request) @@ -5247,26 +5267,28 @@ async def test_delete_repository_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_repository - ] = mock_object + ] = mock_rpc request = {} await client.delete_repository(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.delete_repository(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5635,22 +5657,23 @@ async def test_fetch_read_write_token_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.fetch_read_write_token - ] = mock_object + ] = mock_rpc request = {} await client.fetch_read_write_token(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.fetch_read_write_token(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -6012,22 +6035,23 @@ async def test_fetch_read_token_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.fetch_read_token - ] = mock_object + ] = mock_rpc request = {} await client.fetch_read_token(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.fetch_read_token(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -6391,22 +6415,23 @@ async def test_fetch_linkable_repositories_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.fetch_linkable_repositories - ] = mock_object + ] = mock_rpc request = {} await client.fetch_linkable_repositories(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.fetch_linkable_repositories(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -6884,22 +6909,23 @@ async def test_fetch_git_refs_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.fetch_git_refs - ] = mock_object + ] = mock_rpc request = {} await client.fetch_git_refs(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.fetch_git_refs(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-certificate-manager/google/cloud/certificate_manager/gapic_version.py b/packages/google-cloud-certificate-manager/google/cloud/certificate_manager/gapic_version.py index f1e9cb0c0d05..558c8aab67c5 100644 --- a/packages/google-cloud-certificate-manager/google/cloud/certificate_manager/gapic_version.py +++ b/packages/google-cloud-certificate-manager/google/cloud/certificate_manager/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.7.2" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-certificate-manager/google/cloud/certificate_manager_v1/gapic_version.py b/packages/google-cloud-certificate-manager/google/cloud/certificate_manager_v1/gapic_version.py index f1e9cb0c0d05..558c8aab67c5 100644 --- a/packages/google-cloud-certificate-manager/google/cloud/certificate_manager_v1/gapic_version.py +++ b/packages/google-cloud-certificate-manager/google/cloud/certificate_manager_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.7.2" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-certificate-manager/google/cloud/certificate_manager_v1/services/certificate_manager/async_client.py b/packages/google-cloud-certificate-manager/google/cloud/certificate_manager_v1/services/certificate_manager/async_client.py index 3872daefc9ff..5384984359aa 100644 --- a/packages/google-cloud-certificate-manager/google/cloud/certificate_manager_v1/services/certificate_manager/async_client.py +++ b/packages/google-cloud-certificate-manager/google/cloud/certificate_manager_v1/services/certificate_manager/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -260,10 +259,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(CertificateManagerClient).get_transport_class, - type(CertificateManagerClient), - ) + get_transport_class = CertificateManagerClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-certificate-manager/google/cloud/certificate_manager_v1/services/certificate_manager/client.py b/packages/google-cloud-certificate-manager/google/cloud/certificate_manager_v1/services/certificate_manager/client.py index 0f1213ccd73a..958164c19471 100644 --- a/packages/google-cloud-certificate-manager/google/cloud/certificate_manager_v1/services/certificate_manager/client.py +++ b/packages/google-cloud-certificate-manager/google/cloud/certificate_manager_v1/services/certificate_manager/client.py @@ -844,7 +844,7 @@ def __init__( Type[CertificateManagerTransport], Callable[..., CertificateManagerTransport], ] = ( - type(self).get_transport_class(transport) + CertificateManagerClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., CertificateManagerTransport], transport) ) diff --git a/packages/google-cloud-certificate-manager/samples/generated_samples/snippet_metadata_google.cloud.certificatemanager.v1.json b/packages/google-cloud-certificate-manager/samples/generated_samples/snippet_metadata_google.cloud.certificatemanager.v1.json index d66436431ed1..578413176930 100644 --- a/packages/google-cloud-certificate-manager/samples/generated_samples/snippet_metadata_google.cloud.certificatemanager.v1.json +++ b/packages/google-cloud-certificate-manager/samples/generated_samples/snippet_metadata_google.cloud.certificatemanager.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-certificate-manager", - "version": "1.7.2" + "version": "0.1.0" }, "snippets": [ { diff --git a/packages/google-cloud-certificate-manager/tests/unit/gapic/certificate_manager_v1/test_certificate_manager.py b/packages/google-cloud-certificate-manager/tests/unit/gapic/certificate_manager_v1/test_certificate_manager.py index 2cea96180b25..1eaad9a4dfa1 100644 --- a/packages/google-cloud-certificate-manager/tests/unit/gapic/certificate_manager_v1/test_certificate_manager.py +++ b/packages/google-cloud-certificate-manager/tests/unit/gapic/certificate_manager_v1/test_certificate_manager.py @@ -1363,22 +1363,23 @@ async def test_list_certificates_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_certificates - ] = mock_object + ] = mock_rpc request = {} await client.list_certificates(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_certificates(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1954,22 +1955,23 @@ async def test_get_certificate_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_certificate - ] = mock_object + ] = mock_rpc request = {} await client.get_certificate(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_certificate(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2283,8 +2285,9 @@ def test_create_certificate_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.create_certificate(request) @@ -2340,26 +2343,28 @@ async def test_create_certificate_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_certificate - ] = mock_object + ] = mock_rpc request = {} await client.create_certificate(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.create_certificate(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2686,8 +2691,9 @@ def test_update_certificate_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.update_certificate(request) @@ -2743,26 +2749,28 @@ async def test_update_certificate_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_certificate - ] = mock_object + ] = mock_rpc request = {} await client.update_certificate(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.update_certificate(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3083,8 +3091,9 @@ def test_delete_certificate_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.delete_certificate(request) @@ -3140,26 +3149,28 @@ async def test_delete_certificate_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_certificate - ] = mock_object + ] = mock_rpc request = {} await client.delete_certificate(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.delete_certificate(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3538,22 +3549,23 @@ async def test_list_certificate_maps_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_certificate_maps - ] = mock_object + ] = mock_rpc request = {} await client.list_certificate_maps(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_certificate_maps(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4132,22 +4144,23 @@ async def test_get_certificate_map_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_certificate_map - ] = mock_object + ] = mock_rpc request = {} await client.get_certificate_map(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_certificate_map(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4466,8 +4479,9 @@ def test_create_certificate_map_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.create_certificate_map(request) @@ -4523,26 +4537,28 @@ async def test_create_certificate_map_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_certificate_map - ] = mock_object + ] = mock_rpc request = {} await client.create_certificate_map(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.create_certificate_map(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4870,8 +4886,9 @@ def test_update_certificate_map_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.update_certificate_map(request) @@ -4927,26 +4944,28 @@ async def test_update_certificate_map_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_certificate_map - ] = mock_object + ] = mock_rpc request = {} await client.update_certificate_map(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.update_certificate_map(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5268,8 +5287,9 @@ def test_delete_certificate_map_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.delete_certificate_map(request) @@ -5325,26 +5345,28 @@ async def test_delete_certificate_map_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_certificate_map - ] = mock_object + ] = mock_rpc request = {} await client.delete_certificate_map(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.delete_certificate_map(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5723,22 +5745,23 @@ async def test_list_certificate_map_entries_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_certificate_map_entries - ] = mock_object + ] = mock_rpc request = {} await client.list_certificate_map_entries(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_certificate_map_entries(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -6331,22 +6354,23 @@ async def test_get_certificate_map_entry_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_certificate_map_entry - ] = mock_object + ] = mock_rpc request = {} await client.get_certificate_map_entry(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_certificate_map_entry(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -6669,8 +6693,9 @@ def test_create_certificate_map_entry_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.create_certificate_map_entry(request) @@ -6726,26 +6751,28 @@ async def test_create_certificate_map_entry_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_certificate_map_entry - ] = mock_object + ] = mock_rpc request = {} await client.create_certificate_map_entry(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.create_certificate_map_entry(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -7081,8 +7108,9 @@ def test_update_certificate_map_entry_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.update_certificate_map_entry(request) @@ -7138,26 +7166,28 @@ async def test_update_certificate_map_entry_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_certificate_map_entry - ] = mock_object + ] = mock_rpc request = {} await client.update_certificate_map_entry(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.update_certificate_map_entry(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -7487,8 +7517,9 @@ def test_delete_certificate_map_entry_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.delete_certificate_map_entry(request) @@ -7544,26 +7575,28 @@ async def test_delete_certificate_map_entry_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_certificate_map_entry - ] = mock_object + ] = mock_rpc request = {} await client.delete_certificate_map_entry(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.delete_certificate_map_entry(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -7942,22 +7975,23 @@ async def test_list_dns_authorizations_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_dns_authorizations - ] = mock_object + ] = mock_rpc request = {} await client.list_dns_authorizations(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_dns_authorizations(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -8545,22 +8579,23 @@ async def test_get_dns_authorization_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_dns_authorization - ] = mock_object + ] = mock_rpc request = {} await client.get_dns_authorization(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_dns_authorization(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -8883,8 +8918,9 @@ def test_create_dns_authorization_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.create_dns_authorization(request) @@ -8940,26 +8976,28 @@ async def test_create_dns_authorization_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_dns_authorization - ] = mock_object + ] = mock_rpc request = {} await client.create_dns_authorization(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.create_dns_authorization(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -9287,8 +9325,9 @@ def test_update_dns_authorization_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.update_dns_authorization(request) @@ -9344,26 +9383,28 @@ async def test_update_dns_authorization_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_dns_authorization - ] = mock_object + ] = mock_rpc request = {} await client.update_dns_authorization(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.update_dns_authorization(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -9685,8 +9726,9 @@ def test_delete_dns_authorization_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.delete_dns_authorization(request) @@ -9742,26 +9784,28 @@ async def test_delete_dns_authorization_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_dns_authorization - ] = mock_object + ] = mock_rpc request = {} await client.delete_dns_authorization(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.delete_dns_authorization(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -10150,22 +10194,23 @@ async def test_list_certificate_issuance_configs_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_certificate_issuance_configs - ] = mock_object + ] = mock_rpc request = {} await client.list_certificate_issuance_configs(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_certificate_issuance_configs(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -10774,22 +10819,23 @@ async def test_get_certificate_issuance_config_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_certificate_issuance_config - ] = mock_object + ] = mock_rpc request = {} await client.get_certificate_issuance_config(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_certificate_issuance_config(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -11122,8 +11168,9 @@ def test_create_certificate_issuance_config_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.create_certificate_issuance_config(request) @@ -11182,26 +11229,28 @@ async def test_create_certificate_issuance_config_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_certificate_issuance_config - ] = mock_object + ] = mock_rpc request = {} await client.create_certificate_issuance_config(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.create_certificate_issuance_config(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -11552,8 +11601,9 @@ def test_delete_certificate_issuance_config_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.delete_certificate_issuance_config(request) @@ -11612,26 +11662,28 @@ async def test_delete_certificate_issuance_config_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_certificate_issuance_config - ] = mock_object + ] = mock_rpc request = {} await client.delete_certificate_issuance_config(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.delete_certificate_issuance_config(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -12009,22 +12061,23 @@ async def test_list_trust_configs_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_trust_configs - ] = mock_object + ] = mock_rpc request = {} await client.list_trust_configs(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_trust_configs(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -12595,22 +12648,23 @@ async def test_get_trust_config_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_trust_config - ] = mock_object + ] = mock_rpc request = {} await client.get_trust_config(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_trust_config(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -12919,8 +12973,9 @@ def test_create_trust_config_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.create_trust_config(request) @@ -12976,26 +13031,28 @@ async def test_create_trust_config_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_trust_config - ] = mock_object + ] = mock_rpc request = {} await client.create_trust_config(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.create_trust_config(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -13322,8 +13379,9 @@ def test_update_trust_config_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.update_trust_config(request) @@ -13379,26 +13437,28 @@ async def test_update_trust_config_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_trust_config - ] = mock_object + ] = mock_rpc request = {} await client.update_trust_config(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.update_trust_config(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -13721,8 +13781,9 @@ def test_delete_trust_config_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.delete_trust_config(request) @@ -13778,26 +13839,28 @@ async def test_delete_trust_config_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_trust_config - ] = mock_object + ] = mock_rpc request = {} await client.delete_trust_config(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.delete_trust_config(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-channel/CHANGELOG.md b/packages/google-cloud-channel/CHANGELOG.md index da482c90412d..252ea1ffe6f5 100644 --- a/packages/google-cloud-channel/CHANGELOG.md +++ b/packages/google-cloud-channel/CHANGELOG.md @@ -1,5 +1,19 @@ # Changelog +## [1.19.0](https://github.com/googleapis/google-cloud-python/compare/google-cloud-channel-v1.18.5...google-cloud-channel-v1.19.0) (2024-10-08) + + +### Features + +* Add support for importing team customer from a different reseller ([c38431b](https://github.com/googleapis/google-cloud-python/commit/c38431b363fd4f18bb692593f401e3ac3759637c)) +* Add support for primary_admin_email as customer_identity for ImportCustomer ([c38431b](https://github.com/googleapis/google-cloud-python/commit/c38431b363fd4f18bb692593f401e3ac3759637c)) +* Add support to look up team customer Cloud Identity information ([c38431b](https://github.com/googleapis/google-cloud-python/commit/c38431b363fd4f18bb692593f401e3ac3759637c)) + + +### Documentation + +* Clarify the expected value of the domain field for team type customers ([c38431b](https://github.com/googleapis/google-cloud-python/commit/c38431b363fd4f18bb692593f401e3ac3759637c)) + ## [1.18.5](https://github.com/googleapis/google-cloud-python/compare/google-cloud-channel-v1.18.4...google-cloud-channel-v1.18.5) (2024-07-30) diff --git a/packages/google-cloud-channel/google/cloud/channel/gapic_version.py b/packages/google-cloud-channel/google/cloud/channel/gapic_version.py index d413e1807c55..f1337c609ff8 100644 --- a/packages/google-cloud-channel/google/cloud/channel/gapic_version.py +++ b/packages/google-cloud-channel/google/cloud/channel/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.18.5" # {x-release-please-version} +__version__ = "1.19.0" # {x-release-please-version} diff --git a/packages/google-cloud-channel/google/cloud/channel_v1/gapic_version.py b/packages/google-cloud-channel/google/cloud/channel_v1/gapic_version.py index d413e1807c55..f1337c609ff8 100644 --- a/packages/google-cloud-channel/google/cloud/channel_v1/gapic_version.py +++ b/packages/google-cloud-channel/google/cloud/channel_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.18.5" # {x-release-please-version} +__version__ = "1.19.0" # {x-release-please-version} diff --git a/packages/google-cloud-channel/google/cloud/channel_v1/services/cloud_channel_reports_service/async_client.py b/packages/google-cloud-channel/google/cloud/channel_v1/services/cloud_channel_reports_service/async_client.py index 3276a8a353e2..85b64f3501fe 100644 --- a/packages/google-cloud-channel/google/cloud/channel_v1/services/cloud_channel_reports_service/async_client.py +++ b/packages/google-cloud-channel/google/cloud/channel_v1/services/cloud_channel_reports_service/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -213,10 +212,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(CloudChannelReportsServiceClient).get_transport_class, - type(CloudChannelReportsServiceClient), - ) + get_transport_class = CloudChannelReportsServiceClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-channel/google/cloud/channel_v1/services/cloud_channel_reports_service/client.py b/packages/google-cloud-channel/google/cloud/channel_v1/services/cloud_channel_reports_service/client.py index 1abb1598ec2b..94d7c58c7094 100644 --- a/packages/google-cloud-channel/google/cloud/channel_v1/services/cloud_channel_reports_service/client.py +++ b/packages/google-cloud-channel/google/cloud/channel_v1/services/cloud_channel_reports_service/client.py @@ -694,7 +694,7 @@ def __init__( Type[CloudChannelReportsServiceTransport], Callable[..., CloudChannelReportsServiceTransport], ] = ( - type(self).get_transport_class(transport) + CloudChannelReportsServiceClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., CloudChannelReportsServiceTransport], transport) ) diff --git a/packages/google-cloud-channel/google/cloud/channel_v1/services/cloud_channel_service/async_client.py b/packages/google-cloud-channel/google/cloud/channel_v1/services/cloud_channel_service/async_client.py index 525d24935ee8..62e7e214d94b 100644 --- a/packages/google-cloud-channel/google/cloud/channel_v1/services/cloud_channel_service/async_client.py +++ b/packages/google-cloud-channel/google/cloud/channel_v1/services/cloud_channel_service/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -262,10 +261,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(CloudChannelServiceClient).get_transport_class, - type(CloudChannelServiceClient), - ) + get_transport_class = CloudChannelServiceClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-channel/google/cloud/channel_v1/services/cloud_channel_service/client.py b/packages/google-cloud-channel/google/cloud/channel_v1/services/cloud_channel_service/client.py index 9a7f970d80fa..1edd0dbbd8a9 100644 --- a/packages/google-cloud-channel/google/cloud/channel_v1/services/cloud_channel_service/client.py +++ b/packages/google-cloud-channel/google/cloud/channel_v1/services/cloud_channel_service/client.py @@ -873,7 +873,7 @@ def __init__( Type[CloudChannelServiceTransport], Callable[..., CloudChannelServiceTransport], ] = ( - type(self).get_transport_class(transport) + CloudChannelServiceClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., CloudChannelServiceTransport], transport) ) diff --git a/packages/google-cloud-channel/google/cloud/channel_v1/types/service.py b/packages/google-cloud-channel/google/cloud/channel_v1/types/service.py index c8ae1f8d383f..9738f170a601 100644 --- a/packages/google-cloud-channel/google/cloud/channel_v1/types/service.py +++ b/packages/google-cloud-channel/google/cloud/channel_v1/types/service.py @@ -127,7 +127,12 @@ class CheckCloudIdentityAccountsExistRequest(proto.Message): the format: accounts/{account_id} domain (str): Required. Domain to fetch for Cloud Identity - account customer. + account customers, including domain and team + customers. For team customers, please use the + domain for their emails. + primary_admin_email (str): + Optional. Primary admin email to fetch for + Cloud Identity account team customer. """ parent: str = proto.Field( @@ -138,6 +143,10 @@ class CheckCloudIdentityAccountsExistRequest(proto.Message): proto.STRING, number=2, ) + primary_admin_email: str = proto.Field( + proto.STRING, + number=4, + ) class CloudIdentityCustomerAccount(proto.Message): @@ -159,6 +168,11 @@ class CloudIdentityCustomerAccount(proto.Message): customer_cloud_identity_id (str): If existing = true, the Cloud Identity ID of the customer. + customer_type (google.cloud.channel_v1.types.CloudIdentityInfo.CustomerType): + If existing = true, the type of the customer. + channel_partner_cloud_identity_id (str): + If existing = true, and is 2-tier customer, + the channel partner of the customer. """ existing: bool = proto.Field( @@ -177,6 +191,15 @@ class CloudIdentityCustomerAccount(proto.Message): proto.STRING, number=4, ) + customer_type: common.CloudIdentityInfo.CustomerType = proto.Field( + proto.ENUM, + number=5, + enum=common.CloudIdentityInfo.CustomerType, + ) + channel_partner_cloud_identity_id: str = proto.Field( + proto.STRING, + number=6, + ) class CheckCloudIdentityAccountsExistResponse(proto.Message): @@ -373,6 +396,10 @@ class ImportCustomerRequest(proto.Message): cloud_identity_id (str): Required. Customer's Cloud Identity ID + This field is a member of `oneof`_ ``customer_identity``. + primary_admin_email (str): + Required. Customer's primary admin email. + This field is a member of `oneof`_ ``customer_identity``. parent (str): Required. The resource name of the reseller's account. @@ -413,6 +440,11 @@ class ImportCustomerRequest(proto.Message): number=3, oneof="customer_identity", ) + primary_admin_email: str = proto.Field( + proto.STRING, + number=8, + oneof="customer_identity", + ) parent: str = proto.Field( proto.STRING, number=1, diff --git a/packages/google-cloud-channel/samples/generated_samples/snippet_metadata_google.cloud.channel.v1.json b/packages/google-cloud-channel/samples/generated_samples/snippet_metadata_google.cloud.channel.v1.json index 0d2a795960e5..075cb1b96e00 100644 --- a/packages/google-cloud-channel/samples/generated_samples/snippet_metadata_google.cloud.channel.v1.json +++ b/packages/google-cloud-channel/samples/generated_samples/snippet_metadata_google.cloud.channel.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-channel", - "version": "1.18.5" + "version": "1.19.0" }, "snippets": [ { diff --git a/packages/google-cloud-channel/scripts/fixup_channel_v1_keywords.py b/packages/google-cloud-channel/scripts/fixup_channel_v1_keywords.py index 7c3e175a35d5..a7022924a590 100644 --- a/packages/google-cloud-channel/scripts/fixup_channel_v1_keywords.py +++ b/packages/google-cloud-channel/scripts/fixup_channel_v1_keywords.py @@ -44,7 +44,7 @@ class channelCallTransformer(cst.CSTTransformer): 'change_offer': ('name', 'offer', 'parameters', 'purchase_order_id', 'request_id', 'billing_account', ), 'change_parameters': ('name', 'parameters', 'request_id', 'purchase_order_id', ), 'change_renewal_settings': ('name', 'renewal_settings', 'request_id', ), - 'check_cloud_identity_accounts_exist': ('parent', 'domain', ), + 'check_cloud_identity_accounts_exist': ('parent', 'domain', 'primary_admin_email', ), 'create_channel_partner_link': ('parent', 'channel_partner_link', ), 'create_channel_partner_repricing_config': ('parent', 'channel_partner_repricing_config', ), 'create_customer': ('parent', 'customer', ), @@ -59,7 +59,7 @@ class channelCallTransformer(cst.CSTTransformer): 'get_customer': ('name', ), 'get_customer_repricing_config': ('name', ), 'get_entitlement': ('name', ), - 'import_customer': ('domain', 'cloud_identity_id', 'parent', 'overwrite_if_exists', 'auth_token', 'channel_partner_id', 'customer', ), + 'import_customer': ('domain', 'cloud_identity_id', 'primary_admin_email', 'parent', 'overwrite_if_exists', 'auth_token', 'channel_partner_id', 'customer', ), 'list_channel_partner_links': ('parent', 'page_size', 'page_token', 'view', ), 'list_channel_partner_repricing_configs': ('parent', 'page_size', 'page_token', 'filter', ), 'list_customer_repricing_configs': ('parent', 'page_size', 'page_token', 'filter', ), diff --git a/packages/google-cloud-channel/tests/unit/gapic/channel_v1/test_cloud_channel_reports_service.py b/packages/google-cloud-channel/tests/unit/gapic/channel_v1/test_cloud_channel_reports_service.py index 2f704293a112..623d643f7351 100644 --- a/packages/google-cloud-channel/tests/unit/gapic/channel_v1/test_cloud_channel_reports_service.py +++ b/packages/google-cloud-channel/tests/unit/gapic/channel_v1/test_cloud_channel_reports_service.py @@ -1278,8 +1278,9 @@ def test_run_report_job_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.run_report_job(request) @@ -1333,26 +1334,28 @@ async def test_run_report_job_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.run_report_job - ] = mock_object + ] = mock_rpc request = {} await client.run_report_job(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.run_report_job(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1630,22 +1633,23 @@ async def test_fetch_report_results_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.fetch_report_results - ] = mock_object + ] = mock_rpc request = {} await client.fetch_report_results(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.fetch_report_results(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2211,22 +2215,23 @@ async def test_list_reports_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_reports - ] = mock_object + ] = mock_rpc request = {} await client.list_reports(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_reports(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-channel/tests/unit/gapic/channel_v1/test_cloud_channel_service.py b/packages/google-cloud-channel/tests/unit/gapic/channel_v1/test_cloud_channel_service.py index a55d6ca6a8f1..b6ab34645279 100644 --- a/packages/google-cloud-channel/tests/unit/gapic/channel_v1/test_cloud_channel_service.py +++ b/packages/google-cloud-channel/tests/unit/gapic/channel_v1/test_cloud_channel_service.py @@ -1331,22 +1331,23 @@ async def test_list_customers_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_customers - ] = mock_object + ] = mock_rpc request = {} await client.list_customers(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_customers(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1828,22 +1829,23 @@ async def test_get_customer_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_customer - ] = mock_object + ] = mock_rpc request = {} await client.get_customer(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_customer(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2108,6 +2110,7 @@ def test_check_cloud_identity_accounts_exist_non_empty_request_with_auto_populat request = service.CheckCloudIdentityAccountsExistRequest( parent="parent_value", domain="domain_value", + primary_admin_email="primary_admin_email_value", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -2123,6 +2126,7 @@ def test_check_cloud_identity_accounts_exist_non_empty_request_with_auto_populat assert args[0] == service.CheckCloudIdentityAccountsExistRequest( parent="parent_value", domain="domain_value", + primary_admin_email="primary_admin_email_value", ) @@ -2212,22 +2216,23 @@ async def test_check_cloud_identity_accounts_exist_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.check_cloud_identity_accounts_exist - ] = mock_object + ] = mock_rpc request = {} await client.check_cloud_identity_accounts_exist(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.check_cloud_identity_accounts_exist(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2519,22 +2524,23 @@ async def test_create_customer_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_customer - ] = mock_object + ] = mock_rpc request = {} await client.create_customer(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_customer(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2830,22 +2836,23 @@ async def test_update_customer_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_customer - ] = mock_object + ] = mock_rpc request = {} await client.update_customer(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_customer(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3117,22 +3124,23 @@ async def test_delete_customer_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_customer - ] = mock_object + ] = mock_rpc request = {} await client.delete_customer(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_customer(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3391,6 +3399,7 @@ def test_import_customer_non_empty_request_with_auto_populated_field(): request = service.ImportCustomerRequest( domain="domain_value", cloud_identity_id="cloud_identity_id_value", + primary_admin_email="primary_admin_email_value", parent="parent_value", auth_token="auth_token_value", channel_partner_id="channel_partner_id_value", @@ -3408,6 +3417,7 @@ def test_import_customer_non_empty_request_with_auto_populated_field(): assert args[0] == service.ImportCustomerRequest( domain="domain_value", cloud_identity_id="cloud_identity_id_value", + primary_admin_email="primary_admin_email_value", parent="parent_value", auth_token="auth_token_value", channel_partner_id="channel_partner_id_value", @@ -3503,22 +3513,23 @@ async def test_import_customer_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.import_customer - ] = mock_object + ] = mock_rpc request = {} await client.import_customer(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.import_customer(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3752,8 +3763,9 @@ def test_provision_cloud_identity_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.provision_cloud_identity(request) @@ -3809,26 +3821,28 @@ async def test_provision_cloud_identity_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.provision_cloud_identity - ] = mock_object + ] = mock_rpc request = {} await client.provision_cloud_identity(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.provision_cloud_identity(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4110,22 +4124,23 @@ async def test_list_entitlements_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_entitlements - ] = mock_object + ] = mock_rpc request = {} await client.list_entitlements(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_entitlements(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4623,22 +4638,23 @@ async def test_list_transferable_skus_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_transferable_skus - ] = mock_object + ] = mock_rpc request = {} await client.list_transferable_skus(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_transferable_skus(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5138,22 +5154,23 @@ async def test_list_transferable_offers_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_transferable_offers - ] = mock_object + ] = mock_rpc request = {} await client.list_transferable_offers(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_transferable_offers(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5653,22 +5670,23 @@ async def test_get_entitlement_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_entitlement - ] = mock_object + ] = mock_rpc request = {} await client.get_entitlement(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_entitlement(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5907,8 +5925,9 @@ def test_create_entitlement_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.create_entitlement(request) @@ -5964,26 +5983,28 @@ async def test_create_entitlement_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_entitlement - ] = mock_object + ] = mock_rpc request = {} await client.create_entitlement(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.create_entitlement(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -6209,8 +6230,9 @@ def test_change_parameters_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.change_parameters(request) @@ -6266,26 +6288,28 @@ async def test_change_parameters_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.change_parameters - ] = mock_object + ] = mock_rpc request = {} await client.change_parameters(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.change_parameters(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -6512,8 +6536,9 @@ def test_change_renewal_settings_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.change_renewal_settings(request) @@ -6569,26 +6594,28 @@ async def test_change_renewal_settings_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.change_renewal_settings - ] = mock_object + ] = mock_rpc request = {} await client.change_renewal_settings(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.change_renewal_settings(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -6810,8 +6837,9 @@ def test_change_offer_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.change_offer(request) @@ -6865,26 +6893,28 @@ async def test_change_offer_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.change_offer - ] = mock_object + ] = mock_rpc request = {} await client.change_offer(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.change_offer(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -7104,8 +7134,9 @@ def test_start_paid_service_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.start_paid_service(request) @@ -7161,26 +7192,28 @@ async def test_start_paid_service_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.start_paid_service - ] = mock_object + ] = mock_rpc request = {} await client.start_paid_service(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.start_paid_service(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -7406,8 +7439,9 @@ def test_suspend_entitlement_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.suspend_entitlement(request) @@ -7463,26 +7497,28 @@ async def test_suspend_entitlement_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.suspend_entitlement - ] = mock_object + ] = mock_rpc request = {} await client.suspend_entitlement(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.suspend_entitlement(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -7708,8 +7744,9 @@ def test_cancel_entitlement_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.cancel_entitlement(request) @@ -7765,26 +7802,28 @@ async def test_cancel_entitlement_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.cancel_entitlement - ] = mock_object + ] = mock_rpc request = {} await client.cancel_entitlement(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.cancel_entitlement(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -8010,8 +8049,9 @@ def test_activate_entitlement_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.activate_entitlement(request) @@ -8067,26 +8107,28 @@ async def test_activate_entitlement_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.activate_entitlement - ] = mock_object + ] = mock_rpc request = {} await client.activate_entitlement(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.activate_entitlement(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -8315,8 +8357,9 @@ def test_transfer_entitlements_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.transfer_entitlements(request) @@ -8372,26 +8415,28 @@ async def test_transfer_entitlements_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.transfer_entitlements - ] = mock_object + ] = mock_rpc request = {} await client.transfer_entitlements(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.transfer_entitlements(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -8618,8 +8663,9 @@ def test_transfer_entitlements_to_google_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.transfer_entitlements_to_google(request) @@ -8675,26 +8721,28 @@ async def test_transfer_entitlements_to_google_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.transfer_entitlements_to_google - ] = mock_object + ] = mock_rpc request = {} await client.transfer_entitlements_to_google(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.transfer_entitlements_to_google(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -8980,22 +9028,23 @@ async def test_list_channel_partner_links_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_channel_partner_links - ] = mock_object + ] = mock_rpc request = {} await client.list_channel_partner_links(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_channel_partner_links(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -9501,22 +9550,23 @@ async def test_get_channel_partner_link_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_channel_partner_link - ] = mock_object + ] = mock_rpc request = {} await client.get_channel_partner_link(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_channel_partner_link(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -9822,22 +9872,23 @@ async def test_create_channel_partner_link_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_channel_partner_link - ] = mock_object + ] = mock_rpc request = {} await client.create_channel_partner_link(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_channel_partner_link(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -10144,22 +10195,23 @@ async def test_update_channel_partner_link_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_channel_partner_link - ] = mock_object + ] = mock_rpc request = {} await client.update_channel_partner_link(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_channel_partner_link(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -10454,22 +10506,23 @@ async def test_get_customer_repricing_config_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_customer_repricing_config - ] = mock_object + ] = mock_rpc request = {} await client.get_customer_repricing_config(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_customer_repricing_config(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -10846,22 +10899,23 @@ async def test_list_customer_repricing_configs_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_customer_repricing_configs - ] = mock_object + ] = mock_rpc request = {} await client.list_customer_repricing_configs(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_customer_repricing_configs(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -11438,22 +11492,23 @@ async def test_create_customer_repricing_config_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_customer_repricing_config - ] = mock_object + ] = mock_rpc request = {} await client.create_customer_repricing_config(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_customer_repricing_config(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -11840,22 +11895,23 @@ async def test_update_customer_repricing_config_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_customer_repricing_config - ] = mock_object + ] = mock_rpc request = {} await client.update_customer_repricing_config(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_customer_repricing_config(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -12229,22 +12285,23 @@ async def test_delete_customer_repricing_config_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_customer_repricing_config - ] = mock_object + ] = mock_rpc request = {} await client.delete_customer_repricing_config(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_customer_repricing_config(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -12608,22 +12665,23 @@ async def test_get_channel_partner_repricing_config_async_use_cached_wrapped_rpc ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_channel_partner_repricing_config - ] = mock_object + ] = mock_rpc request = {} await client.get_channel_partner_repricing_config(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_channel_partner_repricing_config(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -13000,22 +13058,23 @@ async def test_list_channel_partner_repricing_configs_async_use_cached_wrapped_r ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_channel_partner_repricing_configs - ] = mock_object + ] = mock_rpc request = {} await client.list_channel_partner_repricing_configs(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_channel_partner_repricing_configs(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -13596,22 +13655,23 @@ async def test_create_channel_partner_repricing_config_async_use_cached_wrapped_ ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_channel_partner_repricing_config - ] = mock_object + ] = mock_rpc request = {} await client.create_channel_partner_repricing_config(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_channel_partner_repricing_config(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -13998,22 +14058,23 @@ async def test_update_channel_partner_repricing_config_async_use_cached_wrapped_ ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_channel_partner_repricing_config - ] = mock_object + ] = mock_rpc request = {} await client.update_channel_partner_repricing_config(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_channel_partner_repricing_config(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -14387,22 +14448,23 @@ async def test_delete_channel_partner_repricing_config_async_use_cached_wrapped_ ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_channel_partner_repricing_config - ] = mock_object + ] = mock_rpc request = {} await client.delete_channel_partner_repricing_config(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_channel_partner_repricing_config(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -14755,22 +14817,23 @@ async def test_list_sku_groups_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_sku_groups - ] = mock_object + ] = mock_rpc request = {} await client.list_sku_groups(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_sku_groups(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -15328,22 +15391,23 @@ async def test_list_sku_group_billable_skus_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_sku_group_billable_skus - ] = mock_object + ] = mock_rpc request = {} await client.list_sku_group_billable_skus(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_sku_group_billable_skus(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -15910,22 +15974,23 @@ async def test_lookup_offer_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.lookup_offer - ] = mock_object + ] = mock_rpc request = {} await client.lookup_offer(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.lookup_offer(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -16196,22 +16261,23 @@ async def test_list_products_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_products - ] = mock_object + ] = mock_rpc request = {} await client.list_products(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_products(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -16612,22 +16678,23 @@ async def test_list_skus_async_use_cached_wrapped_rpc(transport: str = "grpc_asy ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_skus - ] = mock_object + ] = mock_rpc request = {} await client.list_skus(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_skus(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -17094,22 +17161,23 @@ async def test_list_offers_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_offers - ] = mock_object + ] = mock_rpc request = {} await client.list_offers(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_offers(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -17587,22 +17655,23 @@ async def test_list_purchasable_skus_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_purchasable_skus - ] = mock_object + ] = mock_rpc request = {} await client.list_purchasable_skus(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_purchasable_skus(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -18094,22 +18163,23 @@ async def test_list_purchasable_offers_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_purchasable_offers - ] = mock_object + ] = mock_rpc request = {} await client.list_purchasable_offers(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_purchasable_offers(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -18592,22 +18662,23 @@ async def test_query_eligible_billing_accounts_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.query_eligible_billing_accounts - ] = mock_object + ] = mock_rpc request = {} await client.query_eligible_billing_accounts(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.query_eligible_billing_accounts(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -18892,22 +18963,23 @@ async def test_register_subscriber_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.register_subscriber - ] = mock_object + ] = mock_rpc request = {} await client.register_subscriber(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.register_subscriber(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -19195,22 +19267,23 @@ async def test_unregister_subscriber_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.unregister_subscriber - ] = mock_object + ] = mock_rpc request = {} await client.unregister_subscriber(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.unregister_subscriber(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -19493,22 +19566,23 @@ async def test_list_subscribers_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_subscribers - ] = mock_object + ] = mock_rpc request = {} await client.list_subscribers(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_subscribers(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -19990,22 +20064,23 @@ async def test_list_entitlement_changes_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_entitlement_changes - ] = mock_object + ] = mock_rpc request = {} await client.list_entitlement_changes(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_entitlement_changes(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-cloudcontrolspartner/CHANGELOG.md b/packages/google-cloud-cloudcontrolspartner/CHANGELOG.md index b2bd23a8caac..275e18ff132a 100644 --- a/packages/google-cloud-cloudcontrolspartner/CHANGELOG.md +++ b/packages/google-cloud-cloudcontrolspartner/CHANGELOG.md @@ -1,5 +1,28 @@ # Changelog +## [0.2.0](https://github.com/googleapis/google-cloud-python/compare/google-cloud-cloudcontrolspartner-v0.1.3...google-cloud-cloudcontrolspartner-v0.2.0) (2024-09-16) + + +### ⚠ BREAKING CHANGES + +* [google-cloud-cloudcontrolspartner] Field behavior for field display_name in message .google.cloud.cloudcontrolspartner.v1beta.Customer is changed + +### Features + +* A new value `ACCESS_TRANSPARENCY_LOGS_SUPPORT_CASE_VIEWER` is added to enum `.google.cloud.cloudcontrolspartner.v1beta.PartnerPermissions.Permission` ([c03c441](https://github.com/googleapis/google-cloud-python/commit/c03c4411287ee195fd5c99aff94d812381a908f3)) +* Field behavior for field `customer_onboarding_state` in message `.google.cloud.cloudcontrolspartner.v1beta.Customer` is changed ([c03c441](https://github.com/googleapis/google-cloud-python/commit/c03c4411287ee195fd5c99aff94d812381a908f3)) +* Field behavior for field `is_onboarded` in message `.google.cloud.cloudcontrolspartner.v1beta.Customer` is changed ([c03c441](https://github.com/googleapis/google-cloud-python/commit/c03c4411287ee195fd5c99aff94d812381a908f3)) + + +### Bug Fixes + +* [google-cloud-cloudcontrolspartner] Field behavior for field display_name in message .google.cloud.cloudcontrolspartner.v1beta.Customer is changed ([c03c441](https://github.com/googleapis/google-cloud-python/commit/c03c4411287ee195fd5c99aff94d812381a908f3)) + + +### Documentation + +* A comment for field `display_name` in message `.google.cloud.cloudcontrolspartner.v1beta.Customer` is changed ([c03c441](https://github.com/googleapis/google-cloud-python/commit/c03c4411287ee195fd5c99aff94d812381a908f3)) + ## [0.1.3](https://github.com/googleapis/google-cloud-python/compare/google-cloud-cloudcontrolspartner-v0.1.2...google-cloud-cloudcontrolspartner-v0.1.3) (2024-07-30) diff --git a/packages/google-cloud-cloudcontrolspartner/google/cloud/cloudcontrolspartner/gapic_version.py b/packages/google-cloud-cloudcontrolspartner/google/cloud/cloudcontrolspartner/gapic_version.py index 114e40645800..364164ddb134 100644 --- a/packages/google-cloud-cloudcontrolspartner/google/cloud/cloudcontrolspartner/gapic_version.py +++ b/packages/google-cloud-cloudcontrolspartner/google/cloud/cloudcontrolspartner/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.1.3" # {x-release-please-version} +__version__ = "0.2.0" # {x-release-please-version} diff --git a/packages/google-cloud-cloudcontrolspartner/google/cloud/cloudcontrolspartner_v1/gapic_version.py b/packages/google-cloud-cloudcontrolspartner/google/cloud/cloudcontrolspartner_v1/gapic_version.py index 114e40645800..364164ddb134 100644 --- a/packages/google-cloud-cloudcontrolspartner/google/cloud/cloudcontrolspartner_v1/gapic_version.py +++ b/packages/google-cloud-cloudcontrolspartner/google/cloud/cloudcontrolspartner_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.1.3" # {x-release-please-version} +__version__ = "0.2.0" # {x-release-please-version} diff --git a/packages/google-cloud-cloudcontrolspartner/google/cloud/cloudcontrolspartner_v1/services/cloud_controls_partner_core/async_client.py b/packages/google-cloud-cloudcontrolspartner/google/cloud/cloudcontrolspartner_v1/services/cloud_controls_partner_core/async_client.py index 7ba8aef8e738..569e84b46bce 100644 --- a/packages/google-cloud-cloudcontrolspartner/google/cloud/cloudcontrolspartner_v1/services/cloud_controls_partner_core/async_client.py +++ b/packages/google-cloud-cloudcontrolspartner/google/cloud/cloudcontrolspartner_v1/services/cloud_controls_partner_core/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -231,10 +230,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(CloudControlsPartnerCoreClient).get_transport_class, - type(CloudControlsPartnerCoreClient), - ) + get_transport_class = CloudControlsPartnerCoreClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-cloudcontrolspartner/google/cloud/cloudcontrolspartner_v1/services/cloud_controls_partner_core/client.py b/packages/google-cloud-cloudcontrolspartner/google/cloud/cloudcontrolspartner_v1/services/cloud_controls_partner_core/client.py index e18ed6347c58..82b86d2f3332 100644 --- a/packages/google-cloud-cloudcontrolspartner/google/cloud/cloudcontrolspartner_v1/services/cloud_controls_partner_core/client.py +++ b/packages/google-cloud-cloudcontrolspartner/google/cloud/cloudcontrolspartner_v1/services/cloud_controls_partner_core/client.py @@ -798,7 +798,7 @@ def __init__( Type[CloudControlsPartnerCoreTransport], Callable[..., CloudControlsPartnerCoreTransport], ] = ( - type(self).get_transport_class(transport) + CloudControlsPartnerCoreClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., CloudControlsPartnerCoreTransport], transport) ) diff --git a/packages/google-cloud-cloudcontrolspartner/google/cloud/cloudcontrolspartner_v1/services/cloud_controls_partner_monitoring/async_client.py b/packages/google-cloud-cloudcontrolspartner/google/cloud/cloudcontrolspartner_v1/services/cloud_controls_partner_monitoring/async_client.py index 35290d8f376f..c5de6440a7da 100644 --- a/packages/google-cloud-cloudcontrolspartner/google/cloud/cloudcontrolspartner_v1/services/cloud_controls_partner_monitoring/async_client.py +++ b/packages/google-cloud-cloudcontrolspartner/google/cloud/cloudcontrolspartner_v1/services/cloud_controls_partner_monitoring/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -204,10 +203,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(CloudControlsPartnerMonitoringClient).get_transport_class, - type(CloudControlsPartnerMonitoringClient), - ) + get_transport_class = CloudControlsPartnerMonitoringClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-cloudcontrolspartner/google/cloud/cloudcontrolspartner_v1/services/cloud_controls_partner_monitoring/client.py b/packages/google-cloud-cloudcontrolspartner/google/cloud/cloudcontrolspartner_v1/services/cloud_controls_partner_monitoring/client.py index ef2665fd6df2..0f7d9f952054 100644 --- a/packages/google-cloud-cloudcontrolspartner/google/cloud/cloudcontrolspartner_v1/services/cloud_controls_partner_monitoring/client.py +++ b/packages/google-cloud-cloudcontrolspartner/google/cloud/cloudcontrolspartner_v1/services/cloud_controls_partner_monitoring/client.py @@ -688,7 +688,7 @@ def __init__( Type[CloudControlsPartnerMonitoringTransport], Callable[..., CloudControlsPartnerMonitoringTransport], ] = ( - type(self).get_transport_class(transport) + CloudControlsPartnerMonitoringClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast( Callable[..., CloudControlsPartnerMonitoringTransport], transport diff --git a/packages/google-cloud-cloudcontrolspartner/google/cloud/cloudcontrolspartner_v1/types/customers.py b/packages/google-cloud-cloudcontrolspartner/google/cloud/cloudcontrolspartner_v1/types/customers.py index 62eee778fdaf..917b2c256294 100644 --- a/packages/google-cloud-cloudcontrolspartner/google/cloud/cloudcontrolspartner_v1/types/customers.py +++ b/packages/google-cloud-cloudcontrolspartner/google/cloud/cloudcontrolspartner_v1/types/customers.py @@ -45,13 +45,13 @@ class Customer(proto.Message): Identifier. Format: ``organizations/{organization}/locations/{location}/customers/{customer}`` display_name (str): - The customer organization's display name. - E.g. "google.com". + Required. Display name for the customer customer_onboarding_state (google.cloud.cloudcontrolspartner_v1.types.CustomerOnboardingState): - Container for customer onboarding steps + Output only. Container for customer + onboarding steps is_onboarded (bool): - Indicates whether a customer is fully - onboarded + Output only. Indicates whether a customer is + fully onboarded """ name: str = proto.Field( diff --git a/packages/google-cloud-cloudcontrolspartner/google/cloud/cloudcontrolspartner_v1/types/partner_permissions.py b/packages/google-cloud-cloudcontrolspartner/google/cloud/cloudcontrolspartner_v1/types/partner_permissions.py index 072b279e0861..a9c4f2513124 100644 --- a/packages/google-cloud-cloudcontrolspartner/google/cloud/cloudcontrolspartner_v1/types/partner_permissions.py +++ b/packages/google-cloud-cloudcontrolspartner/google/cloud/cloudcontrolspartner_v1/types/partner_permissions.py @@ -57,12 +57,16 @@ class Permission(proto.Enum): ASSURED_WORKLOADS_EKM_CONNECTION_STATUS (4): Permission for External Key Manager connection status + ACCESS_TRANSPARENCY_LOGS_SUPPORT_CASE_VIEWER (5): + Permission for support case details for + Access Transparency log entries """ PERMISSION_UNSPECIFIED = 0 ACCESS_TRANSPARENCY_AND_EMERGENCY_ACCESS_LOGS = 1 ASSURED_WORKLOADS_MONITORING = 2 ACCESS_APPROVAL_REQUESTS = 3 ASSURED_WORKLOADS_EKM_CONNECTION_STATUS = 4 + ACCESS_TRANSPARENCY_LOGS_SUPPORT_CASE_VIEWER = 5 name: str = proto.Field( proto.STRING, diff --git a/packages/google-cloud-cloudcontrolspartner/google/cloud/cloudcontrolspartner_v1beta/gapic_version.py b/packages/google-cloud-cloudcontrolspartner/google/cloud/cloudcontrolspartner_v1beta/gapic_version.py index 114e40645800..364164ddb134 100644 --- a/packages/google-cloud-cloudcontrolspartner/google/cloud/cloudcontrolspartner_v1beta/gapic_version.py +++ b/packages/google-cloud-cloudcontrolspartner/google/cloud/cloudcontrolspartner_v1beta/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.1.3" # {x-release-please-version} +__version__ = "0.2.0" # {x-release-please-version} diff --git a/packages/google-cloud-cloudcontrolspartner/google/cloud/cloudcontrolspartner_v1beta/services/cloud_controls_partner_core/async_client.py b/packages/google-cloud-cloudcontrolspartner/google/cloud/cloudcontrolspartner_v1beta/services/cloud_controls_partner_core/async_client.py index e42485a45d1f..89159b48c9e5 100644 --- a/packages/google-cloud-cloudcontrolspartner/google/cloud/cloudcontrolspartner_v1beta/services/cloud_controls_partner_core/async_client.py +++ b/packages/google-cloud-cloudcontrolspartner/google/cloud/cloudcontrolspartner_v1beta/services/cloud_controls_partner_core/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -231,10 +230,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(CloudControlsPartnerCoreClient).get_transport_class, - type(CloudControlsPartnerCoreClient), - ) + get_transport_class = CloudControlsPartnerCoreClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-cloudcontrolspartner/google/cloud/cloudcontrolspartner_v1beta/services/cloud_controls_partner_core/client.py b/packages/google-cloud-cloudcontrolspartner/google/cloud/cloudcontrolspartner_v1beta/services/cloud_controls_partner_core/client.py index 039ce6aecbd7..1896fa7459b7 100644 --- a/packages/google-cloud-cloudcontrolspartner/google/cloud/cloudcontrolspartner_v1beta/services/cloud_controls_partner_core/client.py +++ b/packages/google-cloud-cloudcontrolspartner/google/cloud/cloudcontrolspartner_v1beta/services/cloud_controls_partner_core/client.py @@ -798,7 +798,7 @@ def __init__( Type[CloudControlsPartnerCoreTransport], Callable[..., CloudControlsPartnerCoreTransport], ] = ( - type(self).get_transport_class(transport) + CloudControlsPartnerCoreClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., CloudControlsPartnerCoreTransport], transport) ) diff --git a/packages/google-cloud-cloudcontrolspartner/google/cloud/cloudcontrolspartner_v1beta/services/cloud_controls_partner_monitoring/async_client.py b/packages/google-cloud-cloudcontrolspartner/google/cloud/cloudcontrolspartner_v1beta/services/cloud_controls_partner_monitoring/async_client.py index 869bf976e6e1..47889c59fb46 100644 --- a/packages/google-cloud-cloudcontrolspartner/google/cloud/cloudcontrolspartner_v1beta/services/cloud_controls_partner_monitoring/async_client.py +++ b/packages/google-cloud-cloudcontrolspartner/google/cloud/cloudcontrolspartner_v1beta/services/cloud_controls_partner_monitoring/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -204,10 +203,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(CloudControlsPartnerMonitoringClient).get_transport_class, - type(CloudControlsPartnerMonitoringClient), - ) + get_transport_class = CloudControlsPartnerMonitoringClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-cloudcontrolspartner/google/cloud/cloudcontrolspartner_v1beta/services/cloud_controls_partner_monitoring/client.py b/packages/google-cloud-cloudcontrolspartner/google/cloud/cloudcontrolspartner_v1beta/services/cloud_controls_partner_monitoring/client.py index 71d16cc912b8..edde6d0911ee 100644 --- a/packages/google-cloud-cloudcontrolspartner/google/cloud/cloudcontrolspartner_v1beta/services/cloud_controls_partner_monitoring/client.py +++ b/packages/google-cloud-cloudcontrolspartner/google/cloud/cloudcontrolspartner_v1beta/services/cloud_controls_partner_monitoring/client.py @@ -688,7 +688,7 @@ def __init__( Type[CloudControlsPartnerMonitoringTransport], Callable[..., CloudControlsPartnerMonitoringTransport], ] = ( - type(self).get_transport_class(transport) + CloudControlsPartnerMonitoringClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast( Callable[..., CloudControlsPartnerMonitoringTransport], transport diff --git a/packages/google-cloud-cloudcontrolspartner/google/cloud/cloudcontrolspartner_v1beta/types/customers.py b/packages/google-cloud-cloudcontrolspartner/google/cloud/cloudcontrolspartner_v1beta/types/customers.py index dae25231d4f0..2237867d884f 100644 --- a/packages/google-cloud-cloudcontrolspartner/google/cloud/cloudcontrolspartner_v1beta/types/customers.py +++ b/packages/google-cloud-cloudcontrolspartner/google/cloud/cloudcontrolspartner_v1beta/types/customers.py @@ -45,13 +45,13 @@ class Customer(proto.Message): Identifier. Format: ``organizations/{organization}/locations/{location}/customers/{customer}`` display_name (str): - The customer organization's display name. - E.g. "google.com". + Required. Display name for the customer customer_onboarding_state (google.cloud.cloudcontrolspartner_v1beta.types.CustomerOnboardingState): - Container for customer onboarding steps + Output only. Container for customer + onboarding steps is_onboarded (bool): - Indicates whether a customer is fully - onboarded + Output only. Indicates whether a customer is + fully onboarded """ name: str = proto.Field( diff --git a/packages/google-cloud-cloudcontrolspartner/google/cloud/cloudcontrolspartner_v1beta/types/partner_permissions.py b/packages/google-cloud-cloudcontrolspartner/google/cloud/cloudcontrolspartner_v1beta/types/partner_permissions.py index d94dff633d35..eddc0cf9ab95 100644 --- a/packages/google-cloud-cloudcontrolspartner/google/cloud/cloudcontrolspartner_v1beta/types/partner_permissions.py +++ b/packages/google-cloud-cloudcontrolspartner/google/cloud/cloudcontrolspartner_v1beta/types/partner_permissions.py @@ -57,12 +57,16 @@ class Permission(proto.Enum): ASSURED_WORKLOADS_EKM_CONNECTION_STATUS (4): Permission for External Key Manager connection status + ACCESS_TRANSPARENCY_LOGS_SUPPORT_CASE_VIEWER (5): + Permission for support case details for + Access Transparency log entries """ PERMISSION_UNSPECIFIED = 0 ACCESS_TRANSPARENCY_AND_EMERGENCY_ACCESS_LOGS = 1 ASSURED_WORKLOADS_MONITORING = 2 ACCESS_APPROVAL_REQUESTS = 3 ASSURED_WORKLOADS_EKM_CONNECTION_STATUS = 4 + ACCESS_TRANSPARENCY_LOGS_SUPPORT_CASE_VIEWER = 5 name: str = proto.Field( proto.STRING, diff --git a/packages/google-cloud-cloudcontrolspartner/samples/generated_samples/snippet_metadata_google.cloud.cloudcontrolspartner.v1.json b/packages/google-cloud-cloudcontrolspartner/samples/generated_samples/snippet_metadata_google.cloud.cloudcontrolspartner.v1.json index 37f6e017d9a3..606c14b81f01 100644 --- a/packages/google-cloud-cloudcontrolspartner/samples/generated_samples/snippet_metadata_google.cloud.cloudcontrolspartner.v1.json +++ b/packages/google-cloud-cloudcontrolspartner/samples/generated_samples/snippet_metadata_google.cloud.cloudcontrolspartner.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-cloudcontrolspartner", - "version": "0.1.3" + "version": "0.2.0" }, "snippets": [ { diff --git a/packages/google-cloud-cloudcontrolspartner/samples/generated_samples/snippet_metadata_google.cloud.cloudcontrolspartner.v1beta.json b/packages/google-cloud-cloudcontrolspartner/samples/generated_samples/snippet_metadata_google.cloud.cloudcontrolspartner.v1beta.json index d3df2e6d1061..9c0039bf1f65 100644 --- a/packages/google-cloud-cloudcontrolspartner/samples/generated_samples/snippet_metadata_google.cloud.cloudcontrolspartner.v1beta.json +++ b/packages/google-cloud-cloudcontrolspartner/samples/generated_samples/snippet_metadata_google.cloud.cloudcontrolspartner.v1beta.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-cloudcontrolspartner", - "version": "0.1.3" + "version": "0.2.0" }, "snippets": [ { diff --git a/packages/google-cloud-cloudcontrolspartner/tests/unit/gapic/cloudcontrolspartner_v1/test_cloud_controls_partner_core.py b/packages/google-cloud-cloudcontrolspartner/tests/unit/gapic/cloudcontrolspartner_v1/test_cloud_controls_partner_core.py index 2746ec5e2e42..7401a18fff9e 100644 --- a/packages/google-cloud-cloudcontrolspartner/tests/unit/gapic/cloudcontrolspartner_v1/test_cloud_controls_partner_core.py +++ b/packages/google-cloud-cloudcontrolspartner/tests/unit/gapic/cloudcontrolspartner_v1/test_cloud_controls_partner_core.py @@ -1388,22 +1388,23 @@ async def test_get_workload_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_workload - ] = mock_object + ] = mock_rpc request = {} await client.get_workload(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_workload(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1776,22 +1777,23 @@ async def test_list_workloads_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_workloads - ] = mock_object + ] = mock_rpc request = {} await client.list_workloads(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_workloads(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2343,22 +2345,23 @@ async def test_get_customer_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_customer - ] = mock_object + ] = mock_rpc request = {} await client.get_customer(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_customer(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2716,22 +2719,23 @@ async def test_list_customers_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_customers - ] = mock_object + ] = mock_rpc request = {} await client.list_customers(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_customers(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3288,22 +3292,23 @@ async def test_get_ekm_connections_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_ekm_connections - ] = mock_object + ] = mock_rpc request = {} await client.get_ekm_connections(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_ekm_connections(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3685,22 +3690,23 @@ async def test_get_partner_permissions_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_partner_permissions - ] = mock_object + ] = mock_rpc request = {} await client.get_partner_permissions(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_partner_permissions(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4088,22 +4094,23 @@ async def test_list_access_approval_requests_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_access_approval_requests - ] = mock_object + ] = mock_rpc request = {} await client.list_access_approval_requests(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_access_approval_requests(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4687,22 +4694,23 @@ async def test_get_partner_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_partner - ] = mock_object + ] = mock_rpc request = {} await client.get_partner(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_partner(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-cloudcontrolspartner/tests/unit/gapic/cloudcontrolspartner_v1/test_cloud_controls_partner_monitoring.py b/packages/google-cloud-cloudcontrolspartner/tests/unit/gapic/cloudcontrolspartner_v1/test_cloud_controls_partner_monitoring.py index 7258a6619007..2f28ff3c4f6b 100644 --- a/packages/google-cloud-cloudcontrolspartner/tests/unit/gapic/cloudcontrolspartner_v1/test_cloud_controls_partner_monitoring.py +++ b/packages/google-cloud-cloudcontrolspartner/tests/unit/gapic/cloudcontrolspartner_v1/test_cloud_controls_partner_monitoring.py @@ -1389,22 +1389,23 @@ async def test_list_violations_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_violations - ] = mock_object + ] = mock_rpc request = {} await client.list_violations(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_violations(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1964,22 +1965,23 @@ async def test_get_violation_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_violation - ] = mock_object + ] = mock_rpc request = {} await client.get_violation(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_violation(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-cloudcontrolspartner/tests/unit/gapic/cloudcontrolspartner_v1beta/test_cloud_controls_partner_core.py b/packages/google-cloud-cloudcontrolspartner/tests/unit/gapic/cloudcontrolspartner_v1beta/test_cloud_controls_partner_core.py index 8370f8fbd38d..012e5a4033b2 100644 --- a/packages/google-cloud-cloudcontrolspartner/tests/unit/gapic/cloudcontrolspartner_v1beta/test_cloud_controls_partner_core.py +++ b/packages/google-cloud-cloudcontrolspartner/tests/unit/gapic/cloudcontrolspartner_v1beta/test_cloud_controls_partner_core.py @@ -1388,22 +1388,23 @@ async def test_get_workload_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_workload - ] = mock_object + ] = mock_rpc request = {} await client.get_workload(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_workload(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1776,22 +1777,23 @@ async def test_list_workloads_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_workloads - ] = mock_object + ] = mock_rpc request = {} await client.list_workloads(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_workloads(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2343,22 +2345,23 @@ async def test_get_customer_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_customer - ] = mock_object + ] = mock_rpc request = {} await client.get_customer(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_customer(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2716,22 +2719,23 @@ async def test_list_customers_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_customers - ] = mock_object + ] = mock_rpc request = {} await client.list_customers(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_customers(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3288,22 +3292,23 @@ async def test_get_ekm_connections_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_ekm_connections - ] = mock_object + ] = mock_rpc request = {} await client.get_ekm_connections(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_ekm_connections(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3685,22 +3690,23 @@ async def test_get_partner_permissions_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_partner_permissions - ] = mock_object + ] = mock_rpc request = {} await client.get_partner_permissions(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_partner_permissions(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4088,22 +4094,23 @@ async def test_list_access_approval_requests_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_access_approval_requests - ] = mock_object + ] = mock_rpc request = {} await client.list_access_approval_requests(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_access_approval_requests(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4687,22 +4694,23 @@ async def test_get_partner_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_partner - ] = mock_object + ] = mock_rpc request = {} await client.get_partner(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_partner(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-cloudcontrolspartner/tests/unit/gapic/cloudcontrolspartner_v1beta/test_cloud_controls_partner_monitoring.py b/packages/google-cloud-cloudcontrolspartner/tests/unit/gapic/cloudcontrolspartner_v1beta/test_cloud_controls_partner_monitoring.py index 5a1de7e85211..7d6ab9fce47f 100644 --- a/packages/google-cloud-cloudcontrolspartner/tests/unit/gapic/cloudcontrolspartner_v1beta/test_cloud_controls_partner_monitoring.py +++ b/packages/google-cloud-cloudcontrolspartner/tests/unit/gapic/cloudcontrolspartner_v1beta/test_cloud_controls_partner_monitoring.py @@ -1389,22 +1389,23 @@ async def test_list_violations_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_violations - ] = mock_object + ] = mock_rpc request = {} await client.list_violations(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_violations(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1964,22 +1965,23 @@ async def test_get_violation_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_violation - ] = mock_object + ] = mock_rpc request = {} await client.get_violation(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_violation(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-cloudquotas/google/cloud/cloudquotas/gapic_version.py b/packages/google-cloud-cloudquotas/google/cloud/cloudquotas/gapic_version.py index 9413c3341313..558c8aab67c5 100644 --- a/packages/google-cloud-cloudquotas/google/cloud/cloudquotas/gapic_version.py +++ b/packages/google-cloud-cloudquotas/google/cloud/cloudquotas/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.1.10" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-cloudquotas/google/cloud/cloudquotas_v1/gapic_version.py b/packages/google-cloud-cloudquotas/google/cloud/cloudquotas_v1/gapic_version.py index 9413c3341313..558c8aab67c5 100644 --- a/packages/google-cloud-cloudquotas/google/cloud/cloudquotas_v1/gapic_version.py +++ b/packages/google-cloud-cloudquotas/google/cloud/cloudquotas_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.1.10" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-cloudquotas/google/cloud/cloudquotas_v1/services/cloud_quotas/async_client.py b/packages/google-cloud-cloudquotas/google/cloud/cloudquotas_v1/services/cloud_quotas/async_client.py index fef354c48230..80a6fb9730a7 100644 --- a/packages/google-cloud-cloudquotas/google/cloud/cloudquotas_v1/services/cloud_quotas/async_client.py +++ b/packages/google-cloud-cloudquotas/google/cloud/cloudquotas_v1/services/cloud_quotas/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -200,9 +199,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(CloudQuotasClient).get_transport_class, type(CloudQuotasClient) - ) + get_transport_class = CloudQuotasClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-cloudquotas/google/cloud/cloudquotas_v1/services/cloud_quotas/client.py b/packages/google-cloud-cloudquotas/google/cloud/cloudquotas_v1/services/cloud_quotas/client.py index 376a965778b5..81a992db5567 100644 --- a/packages/google-cloud-cloudquotas/google/cloud/cloudquotas_v1/services/cloud_quotas/client.py +++ b/packages/google-cloud-cloudquotas/google/cloud/cloudquotas_v1/services/cloud_quotas/client.py @@ -692,7 +692,7 @@ def __init__( transport_init: Union[ Type[CloudQuotasTransport], Callable[..., CloudQuotasTransport] ] = ( - type(self).get_transport_class(transport) + CloudQuotasClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., CloudQuotasTransport], transport) ) diff --git a/packages/google-cloud-cloudquotas/samples/generated_samples/snippet_metadata_google.api.cloudquotas.v1.json b/packages/google-cloud-cloudquotas/samples/generated_samples/snippet_metadata_google.api.cloudquotas.v1.json index 33a66763fc72..5c9889ca79cf 100644 --- a/packages/google-cloud-cloudquotas/samples/generated_samples/snippet_metadata_google.api.cloudquotas.v1.json +++ b/packages/google-cloud-cloudquotas/samples/generated_samples/snippet_metadata_google.api.cloudquotas.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-quotas", - "version": "0.1.10" + "version": "0.1.0" }, "snippets": [ { diff --git a/packages/google-cloud-cloudquotas/tests/unit/gapic/cloudquotas_v1/test_cloud_quotas.py b/packages/google-cloud-cloudquotas/tests/unit/gapic/cloudquotas_v1/test_cloud_quotas.py index fceb1992e9ae..4988c6aec8cf 100644 --- a/packages/google-cloud-cloudquotas/tests/unit/gapic/cloudquotas_v1/test_cloud_quotas.py +++ b/packages/google-cloud-cloudquotas/tests/unit/gapic/cloudquotas_v1/test_cloud_quotas.py @@ -1251,22 +1251,23 @@ async def test_list_quota_infos_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_quota_infos - ] = mock_object + ] = mock_rpc request = {} await client.list_quota_infos(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_quota_infos(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1848,22 +1849,23 @@ async def test_get_quota_info_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_quota_info - ] = mock_object + ] = mock_rpc request = {} await client.get_quota_info(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_quota_info(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2256,22 +2258,23 @@ async def test_list_quota_preferences_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_quota_preferences - ] = mock_object + ] = mock_rpc request = {} await client.list_quota_preferences(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_quota_preferences(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2865,22 +2868,23 @@ async def test_get_quota_preference_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_quota_preference - ] = mock_object + ] = mock_rpc request = {} await client.get_quota_preference(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_quota_preference(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3284,22 +3288,23 @@ async def test_create_quota_preference_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_quota_preference - ] = mock_object + ] = mock_rpc request = {} await client.create_quota_preference(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_quota_preference(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3718,22 +3723,23 @@ async def test_update_quota_preference_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_quota_preference - ] = mock_object + ] = mock_rpc request = {} await client.update_quota_preference(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_quota_preference(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-commerce-consumer-procurement/CHANGELOG.md b/packages/google-cloud-commerce-consumer-procurement/CHANGELOG.md index 8e7cbe49e394..8bed09396aee 100644 --- a/packages/google-cloud-commerce-consumer-procurement/CHANGELOG.md +++ b/packages/google-cloud-commerce-consumer-procurement/CHANGELOG.md @@ -1,5 +1,24 @@ # Changelog +## [0.1.8](https://github.com/googleapis/google-cloud-python/compare/google-cloud-commerce-consumer-procurement-v0.1.7...google-cloud-commerce-consumer-procurement-v0.1.8) (2024-10-08) + + +### Features + +* add Order modification RPCs and License Management Service ([852d797](https://github.com/googleapis/google-cloud-python/commit/852d797f21d4809c32d98b384c60bf9852b14216)) + + +### Documentation + +* A comment for enum value `LINE_ITEM_CHANGE_STATE_ABANDONED` in enum `LineItemChangeState` is changed ([852d797](https://github.com/googleapis/google-cloud-python/commit/852d797f21d4809c32d98b384c60bf9852b14216)) +* A comment for enum value `LINE_ITEM_CHANGE_STATE_ACTIVATING` in enum `LineItemChangeState` is changed ([852d797](https://github.com/googleapis/google-cloud-python/commit/852d797f21d4809c32d98b384c60bf9852b14216)) +* A comment for enum value `LINE_ITEM_CHANGE_STATE_APPROVED` in enum `LineItemChangeState` is changed ([852d797](https://github.com/googleapis/google-cloud-python/commit/852d797f21d4809c32d98b384c60bf9852b14216)) +* A comment for enum value `LINE_ITEM_CHANGE_STATE_COMPLETED` in enum `LineItemChangeState` is changed ([852d797](https://github.com/googleapis/google-cloud-python/commit/852d797f21d4809c32d98b384c60bf9852b14216)) +* A comment for enum value `LINE_ITEM_CHANGE_STATE_PENDING_APPROVAL` in enum `LineItemChangeState` is changed ([852d797](https://github.com/googleapis/google-cloud-python/commit/852d797f21d4809c32d98b384c60bf9852b14216)) +* A comment for enum value `LINE_ITEM_CHANGE_STATE_REJECTED` in enum `LineItemChangeState` is changed ([852d797](https://github.com/googleapis/google-cloud-python/commit/852d797f21d4809c32d98b384c60bf9852b14216)) +* A comment for field `filter` in message `.google.cloud.commerce.consumer.procurement.v1.ListOrdersRequest` is changed ([852d797](https://github.com/googleapis/google-cloud-python/commit/852d797f21d4809c32d98b384c60bf9852b14216)) +* A comment for field `request_id` in message `.google.cloud.commerce.consumer.procurement.v1.PlaceOrderRequest` is changed ([852d797](https://github.com/googleapis/google-cloud-python/commit/852d797f21d4809c32d98b384c60bf9852b14216)) + ## [0.1.7](https://github.com/googleapis/google-cloud-python/compare/google-cloud-commerce-consumer-procurement-v0.1.6...google-cloud-commerce-consumer-procurement-v0.1.7) (2024-07-30) diff --git a/packages/google-cloud-commerce-consumer-procurement/docs/commerce_consumer_procurement_v1/license_management_service.rst b/packages/google-cloud-commerce-consumer-procurement/docs/commerce_consumer_procurement_v1/license_management_service.rst new file mode 100644 index 000000000000..d08a71e7aec0 --- /dev/null +++ b/packages/google-cloud-commerce-consumer-procurement/docs/commerce_consumer_procurement_v1/license_management_service.rst @@ -0,0 +1,10 @@ +LicenseManagementService +------------------------------------------ + +.. automodule:: google.cloud.commerce_consumer_procurement_v1.services.license_management_service + :members: + :inherited-members: + +.. automodule:: google.cloud.commerce_consumer_procurement_v1.services.license_management_service.pagers + :members: + :inherited-members: diff --git a/packages/google-cloud-commerce-consumer-procurement/docs/commerce_consumer_procurement_v1/services_.rst b/packages/google-cloud-commerce-consumer-procurement/docs/commerce_consumer_procurement_v1/services_.rst index d5e8b5f12ecb..8d66166cebbe 100644 --- a/packages/google-cloud-commerce-consumer-procurement/docs/commerce_consumer_procurement_v1/services_.rst +++ b/packages/google-cloud-commerce-consumer-procurement/docs/commerce_consumer_procurement_v1/services_.rst @@ -4,3 +4,4 @@ Services for Google Cloud Commerce Consumer Procurement v1 API :maxdepth: 2 consumer_procurement_service + license_management_service diff --git a/packages/google-cloud-commerce-consumer-procurement/google/cloud/commerce_consumer_procurement/__init__.py b/packages/google-cloud-commerce-consumer-procurement/google/cloud/commerce_consumer_procurement/__init__.py index f271433c727b..bba3576813f8 100644 --- a/packages/google-cloud-commerce-consumer-procurement/google/cloud/commerce_consumer_procurement/__init__.py +++ b/packages/google-cloud-commerce-consumer-procurement/google/cloud/commerce_consumer_procurement/__init__.py @@ -24,6 +24,25 @@ from google.cloud.commerce_consumer_procurement_v1.services.consumer_procurement_service.client import ( ConsumerProcurementServiceClient, ) +from google.cloud.commerce_consumer_procurement_v1.services.license_management_service.async_client import ( + LicenseManagementServiceAsyncClient, +) +from google.cloud.commerce_consumer_procurement_v1.services.license_management_service.client import ( + LicenseManagementServiceClient, +) +from google.cloud.commerce_consumer_procurement_v1.types.license_management_service import ( + AssignmentProtocol, + AssignRequest, + AssignResponse, + EnumerateLicensedUsersRequest, + EnumerateLicensedUsersResponse, + GetLicensePoolRequest, + LicensedUser, + LicensePool, + UnassignRequest, + UnassignResponse, + UpdateLicensePoolRequest, +) from google.cloud.commerce_consumer_procurement_v1.types.order import ( LineItem, LineItemChange, @@ -36,9 +55,14 @@ Subscription, ) from google.cloud.commerce_consumer_procurement_v1.types.procurement_service import ( + AutoRenewalBehavior, + CancelOrderMetadata, + CancelOrderRequest, GetOrderRequest, ListOrdersRequest, ListOrdersResponse, + ModifyOrderMetadata, + ModifyOrderRequest, PlaceOrderMetadata, PlaceOrderRequest, ) @@ -46,6 +70,19 @@ __all__ = ( "ConsumerProcurementServiceClient", "ConsumerProcurementServiceAsyncClient", + "LicenseManagementServiceClient", + "LicenseManagementServiceAsyncClient", + "AssignmentProtocol", + "AssignRequest", + "AssignResponse", + "EnumerateLicensedUsersRequest", + "EnumerateLicensedUsersResponse", + "GetLicensePoolRequest", + "LicensedUser", + "LicensePool", + "UnassignRequest", + "UnassignResponse", + "UpdateLicensePoolRequest", "LineItem", "LineItemChange", "LineItemInfo", @@ -55,9 +92,14 @@ "LineItemChangeState", "LineItemChangeStateReasonType", "LineItemChangeType", + "CancelOrderMetadata", + "CancelOrderRequest", "GetOrderRequest", "ListOrdersRequest", "ListOrdersResponse", + "ModifyOrderMetadata", + "ModifyOrderRequest", "PlaceOrderMetadata", "PlaceOrderRequest", + "AutoRenewalBehavior", ) diff --git a/packages/google-cloud-commerce-consumer-procurement/google/cloud/commerce_consumer_procurement/gapic_version.py b/packages/google-cloud-commerce-consumer-procurement/google/cloud/commerce_consumer_procurement/gapic_version.py index cf5493b86bbc..ec8d212c9160 100644 --- a/packages/google-cloud-commerce-consumer-procurement/google/cloud/commerce_consumer_procurement/gapic_version.py +++ b/packages/google-cloud-commerce-consumer-procurement/google/cloud/commerce_consumer_procurement/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.1.7" # {x-release-please-version} +__version__ = "0.1.8" # {x-release-please-version} diff --git a/packages/google-cloud-commerce-consumer-procurement/google/cloud/commerce_consumer_procurement_v1/__init__.py b/packages/google-cloud-commerce-consumer-procurement/google/cloud/commerce_consumer_procurement_v1/__init__.py index d1a4fa34b7d8..2a6c2e07c4ab 100644 --- a/packages/google-cloud-commerce-consumer-procurement/google/cloud/commerce_consumer_procurement_v1/__init__.py +++ b/packages/google-cloud-commerce-consumer-procurement/google/cloud/commerce_consumer_procurement_v1/__init__.py @@ -24,6 +24,23 @@ ConsumerProcurementServiceAsyncClient, ConsumerProcurementServiceClient, ) +from .services.license_management_service import ( + LicenseManagementServiceAsyncClient, + LicenseManagementServiceClient, +) +from .types.license_management_service import ( + AssignmentProtocol, + AssignRequest, + AssignResponse, + EnumerateLicensedUsersRequest, + EnumerateLicensedUsersResponse, + GetLicensePoolRequest, + LicensedUser, + LicensePool, + UnassignRequest, + UnassignResponse, + UpdateLicensePoolRequest, +) from .types.order import ( LineItem, LineItemChange, @@ -36,17 +53,35 @@ Subscription, ) from .types.procurement_service import ( + AutoRenewalBehavior, + CancelOrderMetadata, + CancelOrderRequest, GetOrderRequest, ListOrdersRequest, ListOrdersResponse, + ModifyOrderMetadata, + ModifyOrderRequest, PlaceOrderMetadata, PlaceOrderRequest, ) __all__ = ( "ConsumerProcurementServiceAsyncClient", + "LicenseManagementServiceAsyncClient", + "AssignRequest", + "AssignResponse", + "AssignmentProtocol", + "AutoRenewalBehavior", + "CancelOrderMetadata", + "CancelOrderRequest", "ConsumerProcurementServiceClient", + "EnumerateLicensedUsersRequest", + "EnumerateLicensedUsersResponse", + "GetLicensePoolRequest", "GetOrderRequest", + "LicenseManagementServiceClient", + "LicensePool", + "LicensedUser", "LineItem", "LineItemChange", "LineItemChangeState", @@ -55,9 +90,14 @@ "LineItemInfo", "ListOrdersRequest", "ListOrdersResponse", + "ModifyOrderMetadata", + "ModifyOrderRequest", "Order", "Parameter", "PlaceOrderMetadata", "PlaceOrderRequest", "Subscription", + "UnassignRequest", + "UnassignResponse", + "UpdateLicensePoolRequest", ) diff --git a/packages/google-cloud-commerce-consumer-procurement/google/cloud/commerce_consumer_procurement_v1/gapic_metadata.json b/packages/google-cloud-commerce-consumer-procurement/google/cloud/commerce_consumer_procurement_v1/gapic_metadata.json index 638c161ad386..e11a84a7c315 100644 --- a/packages/google-cloud-commerce-consumer-procurement/google/cloud/commerce_consumer_procurement_v1/gapic_metadata.json +++ b/packages/google-cloud-commerce-consumer-procurement/google/cloud/commerce_consumer_procurement_v1/gapic_metadata.json @@ -10,6 +10,11 @@ "grpc": { "libraryClient": "ConsumerProcurementServiceClient", "rpcs": { + "CancelOrder": { + "methods": [ + "cancel_order" + ] + }, "GetOrder": { "methods": [ "get_order" @@ -20,6 +25,11 @@ "list_orders" ] }, + "ModifyOrder": { + "methods": [ + "modify_order" + ] + }, "PlaceOrder": { "methods": [ "place_order" @@ -30,6 +40,11 @@ "grpc-async": { "libraryClient": "ConsumerProcurementServiceAsyncClient", "rpcs": { + "CancelOrder": { + "methods": [ + "cancel_order" + ] + }, "GetOrder": { "methods": [ "get_order" @@ -40,6 +55,11 @@ "list_orders" ] }, + "ModifyOrder": { + "methods": [ + "modify_order" + ] + }, "PlaceOrder": { "methods": [ "place_order" @@ -50,6 +70,11 @@ "rest": { "libraryClient": "ConsumerProcurementServiceClient", "rpcs": { + "CancelOrder": { + "methods": [ + "cancel_order" + ] + }, "GetOrder": { "methods": [ "get_order" @@ -60,6 +85,11 @@ "list_orders" ] }, + "ModifyOrder": { + "methods": [ + "modify_order" + ] + }, "PlaceOrder": { "methods": [ "place_order" @@ -68,6 +98,100 @@ } } } + }, + "LicenseManagementService": { + "clients": { + "grpc": { + "libraryClient": "LicenseManagementServiceClient", + "rpcs": { + "Assign": { + "methods": [ + "assign" + ] + }, + "EnumerateLicensedUsers": { + "methods": [ + "enumerate_licensed_users" + ] + }, + "GetLicensePool": { + "methods": [ + "get_license_pool" + ] + }, + "Unassign": { + "methods": [ + "unassign" + ] + }, + "UpdateLicensePool": { + "methods": [ + "update_license_pool" + ] + } + } + }, + "grpc-async": { + "libraryClient": "LicenseManagementServiceAsyncClient", + "rpcs": { + "Assign": { + "methods": [ + "assign" + ] + }, + "EnumerateLicensedUsers": { + "methods": [ + "enumerate_licensed_users" + ] + }, + "GetLicensePool": { + "methods": [ + "get_license_pool" + ] + }, + "Unassign": { + "methods": [ + "unassign" + ] + }, + "UpdateLicensePool": { + "methods": [ + "update_license_pool" + ] + } + } + }, + "rest": { + "libraryClient": "LicenseManagementServiceClient", + "rpcs": { + "Assign": { + "methods": [ + "assign" + ] + }, + "EnumerateLicensedUsers": { + "methods": [ + "enumerate_licensed_users" + ] + }, + "GetLicensePool": { + "methods": [ + "get_license_pool" + ] + }, + "Unassign": { + "methods": [ + "unassign" + ] + }, + "UpdateLicensePool": { + "methods": [ + "update_license_pool" + ] + } + } + } + } } } } diff --git a/packages/google-cloud-commerce-consumer-procurement/google/cloud/commerce_consumer_procurement_v1/gapic_version.py b/packages/google-cloud-commerce-consumer-procurement/google/cloud/commerce_consumer_procurement_v1/gapic_version.py index cf5493b86bbc..ec8d212c9160 100644 --- a/packages/google-cloud-commerce-consumer-procurement/google/cloud/commerce_consumer_procurement_v1/gapic_version.py +++ b/packages/google-cloud-commerce-consumer-procurement/google/cloud/commerce_consumer_procurement_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.1.7" # {x-release-please-version} +__version__ = "0.1.8" # {x-release-please-version} diff --git a/packages/google-cloud-commerce-consumer-procurement/google/cloud/commerce_consumer_procurement_v1/services/consumer_procurement_service/async_client.py b/packages/google-cloud-commerce-consumer-procurement/google/cloud/commerce_consumer_procurement_v1/services/consumer_procurement_service/async_client.py index 34170fd7ad29..4b4132787111 100644 --- a/packages/google-cloud-commerce-consumer-procurement/google/cloud/commerce_consumer_procurement_v1/services/consumer_procurement_service/async_client.py +++ b/packages/google-cloud-commerce-consumer-procurement/google/cloud/commerce_consumer_procurement_v1/services/consumer_procurement_service/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -217,10 +216,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(ConsumerProcurementServiceClient).get_transport_class, - type(ConsumerProcurementServiceClient), - ) + get_transport_class = ConsumerProcurementServiceClient.get_transport_class def __init__( self, @@ -662,6 +658,230 @@ async def sample_list_orders(): # Done; return the response. return response + async def modify_order( + self, + request: Optional[Union[procurement_service.ModifyOrderRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Modifies an existing + [Order][google.cloud.commerce.consumer.procurement.v1.Order] + resource. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import commerce_consumer_procurement_v1 + + async def sample_modify_order(): + # Create a client + client = commerce_consumer_procurement_v1.ConsumerProcurementServiceAsyncClient() + + # Initialize request argument(s) + request = commerce_consumer_procurement_v1.ModifyOrderRequest( + name="name_value", + ) + + # Make the request + operation = client.modify_order(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.commerce_consumer_procurement_v1.types.ModifyOrderRequest, dict]]): + The request object. Request message for + [ConsumerProcurementService.ModifyOrder][google.cloud.commerce.consumer.procurement.v1.ConsumerProcurementService.ModifyOrder]. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.commerce_consumer_procurement_v1.types.Order` Represents a purchase made by a customer on Cloud Marketplace. + Creating an order makes sure that both the Google + backend systems as well as external service + provider's systems (if needed) allow use of purchased + products and ensures the appropriate billing events + occur. + + An Order can be made against one Product with + multiple add-ons (optional) or one Quote which might + reference multiple products. + + Customers typically choose a price plan for each + Product purchased when they create an order and can + change their plan later, if the product allows. + + """ + # Create or coerce a protobuf request object. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, procurement_service.ModifyOrderRequest): + request = procurement_service.ModifyOrderRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.modify_order + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + order.Order, + metadata_type=procurement_service.ModifyOrderMetadata, + ) + + # Done; return the response. + return response + + async def cancel_order( + self, + request: Optional[Union[procurement_service.CancelOrderRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Cancels an existing + [Order][google.cloud.commerce.consumer.procurement.v1.Order]. + Every product procured in the Order will be cancelled. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import commerce_consumer_procurement_v1 + + async def sample_cancel_order(): + # Create a client + client = commerce_consumer_procurement_v1.ConsumerProcurementServiceAsyncClient() + + # Initialize request argument(s) + request = commerce_consumer_procurement_v1.CancelOrderRequest( + name="name_value", + ) + + # Make the request + operation = client.cancel_order(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.commerce_consumer_procurement_v1.types.CancelOrderRequest, dict]]): + The request object. Request message for + [ConsumerProcurementService.CancelOrder][google.cloud.commerce.consumer.procurement.v1.ConsumerProcurementService.CancelOrder]. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.commerce_consumer_procurement_v1.types.Order` Represents a purchase made by a customer on Cloud Marketplace. + Creating an order makes sure that both the Google + backend systems as well as external service + provider's systems (if needed) allow use of purchased + products and ensures the appropriate billing events + occur. + + An Order can be made against one Product with + multiple add-ons (optional) or one Quote which might + reference multiple products. + + Customers typically choose a price plan for each + Product purchased when they create an order and can + change their plan later, if the product allows. + + """ + # Create or coerce a protobuf request object. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, procurement_service.CancelOrderRequest): + request = procurement_service.CancelOrderRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.cancel_order + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + order.Order, + metadata_type=procurement_service.CancelOrderMetadata, + ) + + # Done; return the response. + return response + async def get_operation( self, request: Optional[operations_pb2.GetOperationRequest] = None, diff --git a/packages/google-cloud-commerce-consumer-procurement/google/cloud/commerce_consumer_procurement_v1/services/consumer_procurement_service/client.py b/packages/google-cloud-commerce-consumer-procurement/google/cloud/commerce_consumer_procurement_v1/services/consumer_procurement_service/client.py index 3999e726c4b8..525ad9877370 100644 --- a/packages/google-cloud-commerce-consumer-procurement/google/cloud/commerce_consumer_procurement_v1/services/consumer_procurement_service/client.py +++ b/packages/google-cloud-commerce-consumer-procurement/google/cloud/commerce_consumer_procurement_v1/services/consumer_procurement_service/client.py @@ -703,7 +703,7 @@ def __init__( Type[ConsumerProcurementServiceTransport], Callable[..., ConsumerProcurementServiceTransport], ] = ( - type(self).get_transport_class(transport) + ConsumerProcurementServiceClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., ConsumerProcurementServiceTransport], transport) ) @@ -1082,6 +1082,226 @@ def sample_list_orders(): # Done; return the response. return response + def modify_order( + self, + request: Optional[Union[procurement_service.ModifyOrderRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Modifies an existing + [Order][google.cloud.commerce.consumer.procurement.v1.Order] + resource. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import commerce_consumer_procurement_v1 + + def sample_modify_order(): + # Create a client + client = commerce_consumer_procurement_v1.ConsumerProcurementServiceClient() + + # Initialize request argument(s) + request = commerce_consumer_procurement_v1.ModifyOrderRequest( + name="name_value", + ) + + # Make the request + operation = client.modify_order(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.commerce_consumer_procurement_v1.types.ModifyOrderRequest, dict]): + The request object. Request message for + [ConsumerProcurementService.ModifyOrder][google.cloud.commerce.consumer.procurement.v1.ConsumerProcurementService.ModifyOrder]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.commerce_consumer_procurement_v1.types.Order` Represents a purchase made by a customer on Cloud Marketplace. + Creating an order makes sure that both the Google + backend systems as well as external service + provider's systems (if needed) allow use of purchased + products and ensures the appropriate billing events + occur. + + An Order can be made against one Product with + multiple add-ons (optional) or one Quote which might + reference multiple products. + + Customers typically choose a price plan for each + Product purchased when they create an order and can + change their plan later, if the product allows. + + """ + # Create or coerce a protobuf request object. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, procurement_service.ModifyOrderRequest): + request = procurement_service.ModifyOrderRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.modify_order] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + order.Order, + metadata_type=procurement_service.ModifyOrderMetadata, + ) + + # Done; return the response. + return response + + def cancel_order( + self, + request: Optional[Union[procurement_service.CancelOrderRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Cancels an existing + [Order][google.cloud.commerce.consumer.procurement.v1.Order]. + Every product procured in the Order will be cancelled. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import commerce_consumer_procurement_v1 + + def sample_cancel_order(): + # Create a client + client = commerce_consumer_procurement_v1.ConsumerProcurementServiceClient() + + # Initialize request argument(s) + request = commerce_consumer_procurement_v1.CancelOrderRequest( + name="name_value", + ) + + # Make the request + operation = client.cancel_order(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.commerce_consumer_procurement_v1.types.CancelOrderRequest, dict]): + The request object. Request message for + [ConsumerProcurementService.CancelOrder][google.cloud.commerce.consumer.procurement.v1.ConsumerProcurementService.CancelOrder]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.commerce_consumer_procurement_v1.types.Order` Represents a purchase made by a customer on Cloud Marketplace. + Creating an order makes sure that both the Google + backend systems as well as external service + provider's systems (if needed) allow use of purchased + products and ensures the appropriate billing events + occur. + + An Order can be made against one Product with + multiple add-ons (optional) or one Quote which might + reference multiple products. + + Customers typically choose a price plan for each + Product purchased when they create an order and can + change their plan later, if the product allows. + + """ + # Create or coerce a protobuf request object. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, procurement_service.CancelOrderRequest): + request = procurement_service.CancelOrderRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.cancel_order] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + order.Order, + metadata_type=procurement_service.CancelOrderMetadata, + ) + + # Done; return the response. + return response + def __enter__(self) -> "ConsumerProcurementServiceClient": return self diff --git a/packages/google-cloud-commerce-consumer-procurement/google/cloud/commerce_consumer_procurement_v1/services/consumer_procurement_service/transports/base.py b/packages/google-cloud-commerce-consumer-procurement/google/cloud/commerce_consumer_procurement_v1/services/consumer_procurement_service/transports/base.py index 405ae9789b33..b7df5231e9d5 100644 --- a/packages/google-cloud-commerce-consumer-procurement/google/cloud/commerce_consumer_procurement_v1/services/consumer_procurement_service/transports/base.py +++ b/packages/google-cloud-commerce-consumer-procurement/google/cloud/commerce_consumer_procurement_v1/services/consumer_procurement_service/transports/base.py @@ -167,6 +167,16 @@ def _prep_wrapped_messages(self, client_info): default_timeout=60.0, client_info=client_info, ), + self.modify_order: gapic_v1.method.wrap_method( + self.modify_order, + default_timeout=None, + client_info=client_info, + ), + self.cancel_order: gapic_v1.method.wrap_method( + self.cancel_order, + default_timeout=None, + client_info=client_info, + ), } def close(self): @@ -213,6 +223,24 @@ def list_orders( ]: raise NotImplementedError() + @property + def modify_order( + self, + ) -> Callable[ + [procurement_service.ModifyOrderRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def cancel_order( + self, + ) -> Callable[ + [procurement_service.CancelOrderRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + @property def get_operation( self, diff --git a/packages/google-cloud-commerce-consumer-procurement/google/cloud/commerce_consumer_procurement_v1/services/consumer_procurement_service/transports/grpc.py b/packages/google-cloud-commerce-consumer-procurement/google/cloud/commerce_consumer_procurement_v1/services/consumer_procurement_service/transports/grpc.py index 25e976eeed36..307d720364fa 100644 --- a/packages/google-cloud-commerce-consumer-procurement/google/cloud/commerce_consumer_procurement_v1/services/consumer_procurement_service/transports/grpc.py +++ b/packages/google-cloud-commerce-consumer-procurement/google/cloud/commerce_consumer_procurement_v1/services/consumer_procurement_service/transports/grpc.py @@ -353,6 +353,62 @@ def list_orders( ) return self._stubs["list_orders"] + @property + def modify_order( + self, + ) -> Callable[[procurement_service.ModifyOrderRequest], operations_pb2.Operation]: + r"""Return a callable for the modify order method over gRPC. + + Modifies an existing + [Order][google.cloud.commerce.consumer.procurement.v1.Order] + resource. + + Returns: + Callable[[~.ModifyOrderRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "modify_order" not in self._stubs: + self._stubs["modify_order"] = self.grpc_channel.unary_unary( + "/google.cloud.commerce.consumer.procurement.v1.ConsumerProcurementService/ModifyOrder", + request_serializer=procurement_service.ModifyOrderRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["modify_order"] + + @property + def cancel_order( + self, + ) -> Callable[[procurement_service.CancelOrderRequest], operations_pb2.Operation]: + r"""Return a callable for the cancel order method over gRPC. + + Cancels an existing + [Order][google.cloud.commerce.consumer.procurement.v1.Order]. + Every product procured in the Order will be cancelled. + + Returns: + Callable[[~.CancelOrderRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "cancel_order" not in self._stubs: + self._stubs["cancel_order"] = self.grpc_channel.unary_unary( + "/google.cloud.commerce.consumer.procurement.v1.ConsumerProcurementService/CancelOrder", + request_serializer=procurement_service.CancelOrderRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["cancel_order"] + def close(self): self.grpc_channel.close() diff --git a/packages/google-cloud-commerce-consumer-procurement/google/cloud/commerce_consumer_procurement_v1/services/consumer_procurement_service/transports/grpc_asyncio.py b/packages/google-cloud-commerce-consumer-procurement/google/cloud/commerce_consumer_procurement_v1/services/consumer_procurement_service/transports/grpc_asyncio.py index fb34a2b76187..0cdba90bcd6f 100644 --- a/packages/google-cloud-commerce-consumer-procurement/google/cloud/commerce_consumer_procurement_v1/services/consumer_procurement_service/transports/grpc_asyncio.py +++ b/packages/google-cloud-commerce-consumer-procurement/google/cloud/commerce_consumer_procurement_v1/services/consumer_procurement_service/transports/grpc_asyncio.py @@ -366,6 +366,66 @@ def list_orders( ) return self._stubs["list_orders"] + @property + def modify_order( + self, + ) -> Callable[ + [procurement_service.ModifyOrderRequest], Awaitable[operations_pb2.Operation] + ]: + r"""Return a callable for the modify order method over gRPC. + + Modifies an existing + [Order][google.cloud.commerce.consumer.procurement.v1.Order] + resource. + + Returns: + Callable[[~.ModifyOrderRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "modify_order" not in self._stubs: + self._stubs["modify_order"] = self.grpc_channel.unary_unary( + "/google.cloud.commerce.consumer.procurement.v1.ConsumerProcurementService/ModifyOrder", + request_serializer=procurement_service.ModifyOrderRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["modify_order"] + + @property + def cancel_order( + self, + ) -> Callable[ + [procurement_service.CancelOrderRequest], Awaitable[operations_pb2.Operation] + ]: + r"""Return a callable for the cancel order method over gRPC. + + Cancels an existing + [Order][google.cloud.commerce.consumer.procurement.v1.Order]. + Every product procured in the Order will be cancelled. + + Returns: + Callable[[~.CancelOrderRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "cancel_order" not in self._stubs: + self._stubs["cancel_order"] = self.grpc_channel.unary_unary( + "/google.cloud.commerce.consumer.procurement.v1.ConsumerProcurementService/CancelOrder", + request_serializer=procurement_service.CancelOrderRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["cancel_order"] + def _prep_wrapped_messages(self, client_info): """Precompute the wrapped methods, overriding the base class method to use async wrappers.""" self._wrapped_methods = { @@ -402,6 +462,16 @@ def _prep_wrapped_messages(self, client_info): default_timeout=60.0, client_info=client_info, ), + self.modify_order: gapic_v1.method_async.wrap_method( + self.modify_order, + default_timeout=None, + client_info=client_info, + ), + self.cancel_order: gapic_v1.method_async.wrap_method( + self.cancel_order, + default_timeout=None, + client_info=client_info, + ), } def close(self): diff --git a/packages/google-cloud-commerce-consumer-procurement/google/cloud/commerce_consumer_procurement_v1/services/consumer_procurement_service/transports/rest.py b/packages/google-cloud-commerce-consumer-procurement/google/cloud/commerce_consumer_procurement_v1/services/consumer_procurement_service/transports/rest.py index 353e9db69b53..16459934f854 100644 --- a/packages/google-cloud-commerce-consumer-procurement/google/cloud/commerce_consumer_procurement_v1/services/consumer_procurement_service/transports/rest.py +++ b/packages/google-cloud-commerce-consumer-procurement/google/cloud/commerce_consumer_procurement_v1/services/consumer_procurement_service/transports/rest.py @@ -74,6 +74,14 @@ class ConsumerProcurementServiceRestInterceptor: .. code-block:: python class MyCustomConsumerProcurementServiceInterceptor(ConsumerProcurementServiceRestInterceptor): + def pre_cancel_order(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_cancel_order(self, response): + logging.log(f"Received response: {response}") + return response + def pre_get_order(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata @@ -90,6 +98,14 @@ def post_list_orders(self, response): logging.log(f"Received response: {response}") return response + def pre_modify_order(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_modify_order(self, response): + logging.log(f"Received response: {response}") + return response + def pre_place_order(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata @@ -104,6 +120,29 @@ def post_place_order(self, response): """ + def pre_cancel_order( + self, + request: procurement_service.CancelOrderRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[procurement_service.CancelOrderRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for cancel_order + + Override in a subclass to manipulate the request or metadata + before they are sent to the ConsumerProcurementService server. + """ + return request, metadata + + def post_cancel_order( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for cancel_order + + Override in a subclass to manipulate the response + after it is returned by the ConsumerProcurementService server but before + it is returned to user code. + """ + return response + def pre_get_order( self, request: procurement_service.GetOrderRequest, @@ -148,6 +187,29 @@ def post_list_orders( """ return response + def pre_modify_order( + self, + request: procurement_service.ModifyOrderRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[procurement_service.ModifyOrderRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for modify_order + + Override in a subclass to manipulate the request or metadata + before they are sent to the ConsumerProcurementService server. + """ + return request, metadata + + def post_modify_order( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for modify_order + + Override in a subclass to manipulate the response + after it is returned by the ConsumerProcurementService server but before + it is returned to user code. + """ + return response + def pre_place_order( self, request: procurement_service.PlaceOrderRequest, @@ -333,6 +395,100 @@ def operations_client(self) -> operations_v1.AbstractOperationsClient: # Return the client from cache. return self._operations_client + class _CancelOrder(ConsumerProcurementServiceRestStub): + def __hash__(self): + return hash("CancelOrder") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: procurement_service.CancelOrderRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the cancel order method over HTTP. + + Args: + request (~.procurement_service.CancelOrderRequest): + The request object. Request message for + [ConsumerProcurementService.CancelOrder][google.cloud.commerce.consumer.procurement.v1.ConsumerProcurementService.CancelOrder]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{name=billingAccounts/*/orders/*}:cancel", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_cancel_order(request, metadata) + pb_request = procurement_service.CancelOrderRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_cancel_order(resp) + return resp + class _GetOrder(ConsumerProcurementServiceRestStub): def __hash__(self): return hash("GetOrder") @@ -525,6 +681,100 @@ def __call__( resp = self._interceptor.post_list_orders(resp) return resp + class _ModifyOrder(ConsumerProcurementServiceRestStub): + def __hash__(self): + return hash("ModifyOrder") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: procurement_service.ModifyOrderRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the modify order method over HTTP. + + Args: + request (~.procurement_service.ModifyOrderRequest): + The request object. Request message for + [ConsumerProcurementService.ModifyOrder][google.cloud.commerce.consumer.procurement.v1.ConsumerProcurementService.ModifyOrder]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{name=billingAccounts/*/orders/*}:modify", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_modify_order(request, metadata) + pb_request = procurement_service.ModifyOrderRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_modify_order(resp) + return resp + class _PlaceOrder(ConsumerProcurementServiceRestStub): def __hash__(self): return hash("PlaceOrder") @@ -619,6 +869,14 @@ def __call__( resp = self._interceptor.post_place_order(resp) return resp + @property + def cancel_order( + self, + ) -> Callable[[procurement_service.CancelOrderRequest], operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._CancelOrder(self._session, self._host, self._interceptor) # type: ignore + @property def get_order(self) -> Callable[[procurement_service.GetOrderRequest], order.Order]: # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. @@ -635,6 +893,14 @@ def list_orders( # In C++ this would require a dynamic_cast return self._ListOrders(self._session, self._host, self._interceptor) # type: ignore + @property + def modify_order( + self, + ) -> Callable[[procurement_service.ModifyOrderRequest], operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ModifyOrder(self._session, self._host, self._interceptor) # type: ignore + @property def place_order( self, diff --git a/packages/google-cloud-commerce-consumer-procurement/google/cloud/commerce_consumer_procurement_v1/services/license_management_service/__init__.py b/packages/google-cloud-commerce-consumer-procurement/google/cloud/commerce_consumer_procurement_v1/services/license_management_service/__init__.py new file mode 100644 index 000000000000..d1b19f8f83bc --- /dev/null +++ b/packages/google-cloud-commerce-consumer-procurement/google/cloud/commerce_consumer_procurement_v1/services/license_management_service/__init__.py @@ -0,0 +1,22 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .async_client import LicenseManagementServiceAsyncClient +from .client import LicenseManagementServiceClient + +__all__ = ( + "LicenseManagementServiceClient", + "LicenseManagementServiceAsyncClient", +) diff --git a/packages/google-cloud-commerce-consumer-procurement/google/cloud/commerce_consumer_procurement_v1/services/license_management_service/async_client.py b/packages/google-cloud-commerce-consumer-procurement/google/cloud/commerce_consumer_procurement_v1/services/license_management_service/async_client.py new file mode 100644 index 000000000000..015a76c88aef --- /dev/null +++ b/packages/google-cloud-commerce-consumer-procurement/google/cloud/commerce_consumer_procurement_v1/services/license_management_service/async_client.py @@ -0,0 +1,925 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import re +from typing import ( + Callable, + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, +) + +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry_async as retries +from google.api_core.client_options import ClientOptions +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.commerce_consumer_procurement_v1 import ( + gapic_version as package_version, +) + +try: + OptionalRetry = Union[retries.AsyncRetry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.AsyncRetry, object, None] # type: ignore + +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore + +from google.cloud.commerce_consumer_procurement_v1.services.license_management_service import ( + pagers, +) +from google.cloud.commerce_consumer_procurement_v1.types import ( + license_management_service, +) + +from .client import LicenseManagementServiceClient +from .transports.base import DEFAULT_CLIENT_INFO, LicenseManagementServiceTransport +from .transports.grpc_asyncio import LicenseManagementServiceGrpcAsyncIOTransport + + +class LicenseManagementServiceAsyncClient: + """Service for managing licenses.""" + + _client: LicenseManagementServiceClient + + # Copy defaults from the synchronous client for use here. + # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. + DEFAULT_ENDPOINT = LicenseManagementServiceClient.DEFAULT_ENDPOINT + DEFAULT_MTLS_ENDPOINT = LicenseManagementServiceClient.DEFAULT_MTLS_ENDPOINT + _DEFAULT_ENDPOINT_TEMPLATE = ( + LicenseManagementServiceClient._DEFAULT_ENDPOINT_TEMPLATE + ) + _DEFAULT_UNIVERSE = LicenseManagementServiceClient._DEFAULT_UNIVERSE + + license_pool_path = staticmethod(LicenseManagementServiceClient.license_pool_path) + parse_license_pool_path = staticmethod( + LicenseManagementServiceClient.parse_license_pool_path + ) + common_billing_account_path = staticmethod( + LicenseManagementServiceClient.common_billing_account_path + ) + parse_common_billing_account_path = staticmethod( + LicenseManagementServiceClient.parse_common_billing_account_path + ) + common_folder_path = staticmethod(LicenseManagementServiceClient.common_folder_path) + parse_common_folder_path = staticmethod( + LicenseManagementServiceClient.parse_common_folder_path + ) + common_organization_path = staticmethod( + LicenseManagementServiceClient.common_organization_path + ) + parse_common_organization_path = staticmethod( + LicenseManagementServiceClient.parse_common_organization_path + ) + common_project_path = staticmethod( + LicenseManagementServiceClient.common_project_path + ) + parse_common_project_path = staticmethod( + LicenseManagementServiceClient.parse_common_project_path + ) + common_location_path = staticmethod( + LicenseManagementServiceClient.common_location_path + ) + parse_common_location_path = staticmethod( + LicenseManagementServiceClient.parse_common_location_path + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + LicenseManagementServiceAsyncClient: The constructed client. + """ + return LicenseManagementServiceClient.from_service_account_info.__func__(LicenseManagementServiceAsyncClient, info, *args, **kwargs) # type: ignore + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + LicenseManagementServiceAsyncClient: The constructed client. + """ + return LicenseManagementServiceClient.from_service_account_file.__func__(LicenseManagementServiceAsyncClient, filename, *args, **kwargs) # type: ignore + + from_service_account_json = from_service_account_file + + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[ClientOptions] = None + ): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + return LicenseManagementServiceClient.get_mtls_endpoint_and_cert_source(client_options) # type: ignore + + @property + def transport(self) -> LicenseManagementServiceTransport: + """Returns the transport used by the client instance. + + Returns: + LicenseManagementServiceTransport: The transport used by the client instance. + """ + return self._client.transport + + @property + def api_endpoint(self): + """Return the API endpoint used by the client instance. + + Returns: + str: The API endpoint used by the client instance. + """ + return self._client._api_endpoint + + @property + def universe_domain(self) -> str: + """Return the universe domain used by the client instance. + + Returns: + str: The universe domain used + by the client instance. + """ + return self._client._universe_domain + + get_transport_class = LicenseManagementServiceClient.get_transport_class + + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[ + Union[ + str, + LicenseManagementServiceTransport, + Callable[..., LicenseManagementServiceTransport], + ] + ] = "grpc_asyncio", + client_options: Optional[ClientOptions] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the license management service async client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Optional[Union[str,LicenseManagementServiceTransport,Callable[..., LicenseManagementServiceTransport]]]): + The transport to use, or a Callable that constructs and returns a new transport to use. + If a Callable is given, it will be called with the same set of initialization + arguments as used in the LicenseManagementServiceTransport constructor. + If set to None, a transport is chosen automatically. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): + Custom options for the client. + + 1. The ``api_endpoint`` property can be used to override the + default endpoint provided by the client when ``transport`` is + not explicitly provided. Only if this property is not set and + ``transport`` was not explicitly provided, the endpoint is + determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment + variable, which have one of the following values: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto-switch to the + default mTLS endpoint if client certificate is present; this is + the default value). + + 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide a client certificate for mTLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + 3. The ``universe_domain`` property can be used to override the + default "googleapis.com" universe. Note that ``api_endpoint`` + property still takes precedence; and ``universe_domain`` is + currently not supported for mTLS. + + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client = LicenseManagementServiceClient( + credentials=credentials, + transport=transport, + client_options=client_options, + client_info=client_info, + ) + + async def get_license_pool( + self, + request: Optional[ + Union[license_management_service.GetLicensePoolRequest, dict] + ] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> license_management_service.LicensePool: + r"""Gets the license pool. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import commerce_consumer_procurement_v1 + + async def sample_get_license_pool(): + # Create a client + client = commerce_consumer_procurement_v1.LicenseManagementServiceAsyncClient() + + # Initialize request argument(s) + request = commerce_consumer_procurement_v1.GetLicensePoolRequest( + name="name_value", + ) + + # Make the request + response = await client.get_license_pool(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.commerce_consumer_procurement_v1.types.GetLicensePoolRequest, dict]]): + The request object. Request message for getting a license + pool. + name (:class:`str`): + Required. The name of the license pool to get. Format: + ``billingAccounts/{billing_account}/orders/{order}/licensePool`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.commerce_consumer_procurement_v1.types.LicensePool: + A license pool represents a pool of + licenses that can be assigned to users. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, license_management_service.GetLicensePoolRequest): + request = license_management_service.GetLicensePoolRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.get_license_pool + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def update_license_pool( + self, + request: Optional[ + Union[license_management_service.UpdateLicensePoolRequest, dict] + ] = None, + *, + license_pool: Optional[license_management_service.LicensePool] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> license_management_service.LicensePool: + r"""Updates the license pool if one exists for this + Order. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import commerce_consumer_procurement_v1 + + async def sample_update_license_pool(): + # Create a client + client = commerce_consumer_procurement_v1.LicenseManagementServiceAsyncClient() + + # Initialize request argument(s) + request = commerce_consumer_procurement_v1.UpdateLicensePoolRequest( + ) + + # Make the request + response = await client.update_license_pool(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.commerce_consumer_procurement_v1.types.UpdateLicensePoolRequest, dict]]): + The request object. Request message for updating a + license pool. + license_pool (:class:`google.cloud.commerce_consumer_procurement_v1.types.LicensePool`): + Required. The license pool to update. + + The license pool's name field is used to identify the + license pool to update. Format: + ``billingAccounts/{billing_account}/orders/{order}/licensePool``. + + This corresponds to the ``license_pool`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): + Required. The list of fields to + update. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.commerce_consumer_procurement_v1.types.LicensePool: + A license pool represents a pool of + licenses that can be assigned to users. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([license_pool, update_mask]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, license_management_service.UpdateLicensePoolRequest): + request = license_management_service.UpdateLicensePoolRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if license_pool is not None: + request.license_pool = license_pool + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.update_license_pool + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("license_pool.name", request.license_pool.name),) + ), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def assign( + self, + request: Optional[Union[license_management_service.AssignRequest, dict]] = None, + *, + parent: Optional[str] = None, + usernames: Optional[MutableSequence[str]] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> license_management_service.AssignResponse: + r"""Assigns a license to a user. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import commerce_consumer_procurement_v1 + + async def sample_assign(): + # Create a client + client = commerce_consumer_procurement_v1.LicenseManagementServiceAsyncClient() + + # Initialize request argument(s) + request = commerce_consumer_procurement_v1.AssignRequest( + parent="parent_value", + usernames=['usernames_value1', 'usernames_value2'], + ) + + # Make the request + response = await client.assign(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.commerce_consumer_procurement_v1.types.AssignRequest, dict]]): + The request object. Request message for + [LicenseManagementService.Assign][google.cloud.commerce.consumer.procurement.v1.LicenseManagementService.Assign]. + parent (:class:`str`): + Required. License pool name. + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + usernames (:class:`MutableSequence[str]`): + Required. Username. Format: ``name@domain.com``. + This corresponds to the ``usernames`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.commerce_consumer_procurement_v1.types.AssignResponse: + Response message for + [LicenseManagementService.Assign][google.cloud.commerce.consumer.procurement.v1.LicenseManagementService.Assign]. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, usernames]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, license_management_service.AssignRequest): + request = license_management_service.AssignRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if usernames: + request.usernames.extend(usernames) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.assign] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def unassign( + self, + request: Optional[ + Union[license_management_service.UnassignRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + usernames: Optional[MutableSequence[str]] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> license_management_service.UnassignResponse: + r"""Unassigns a license from a user. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import commerce_consumer_procurement_v1 + + async def sample_unassign(): + # Create a client + client = commerce_consumer_procurement_v1.LicenseManagementServiceAsyncClient() + + # Initialize request argument(s) + request = commerce_consumer_procurement_v1.UnassignRequest( + parent="parent_value", + usernames=['usernames_value1', 'usernames_value2'], + ) + + # Make the request + response = await client.unassign(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.commerce_consumer_procurement_v1.types.UnassignRequest, dict]]): + The request object. Request message for + [LicenseManagementService.Unassign][google.cloud.commerce.consumer.procurement.v1.LicenseManagementService.Unassign]. + parent (:class:`str`): + Required. License pool name. + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + usernames (:class:`MutableSequence[str]`): + Required. Username. Format: ``name@domain.com``. + This corresponds to the ``usernames`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.commerce_consumer_procurement_v1.types.UnassignResponse: + Response message for + [LicenseManagementService.Unassign][google.cloud.commerce.consumer.procurement.v1.LicenseManagementService.Unassign]. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, usernames]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, license_management_service.UnassignRequest): + request = license_management_service.UnassignRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if usernames: + request.usernames.extend(usernames) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.unassign] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def enumerate_licensed_users( + self, + request: Optional[ + Union[license_management_service.EnumerateLicensedUsersRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.EnumerateLicensedUsersAsyncPager: + r"""Enumerates all users assigned a license. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import commerce_consumer_procurement_v1 + + async def sample_enumerate_licensed_users(): + # Create a client + client = commerce_consumer_procurement_v1.LicenseManagementServiceAsyncClient() + + # Initialize request argument(s) + request = commerce_consumer_procurement_v1.EnumerateLicensedUsersRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.enumerate_licensed_users(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.commerce_consumer_procurement_v1.types.EnumerateLicensedUsersRequest, dict]]): + The request object. Request message for + [LicenseManagementService.EnumerateLicensedUsers][google.cloud.commerce.consumer.procurement.v1.LicenseManagementService.EnumerateLicensedUsers]. + parent (:class:`str`): + Required. License pool name. + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.commerce_consumer_procurement_v1.services.license_management_service.pagers.EnumerateLicensedUsersAsyncPager: + Response message for + [LicenseManagementService.EnumerateLicensedUsers][google.cloud.commerce.consumer.procurement.v1.LicenseManagementService.EnumerateLicensedUsers]. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance( + request, license_management_service.EnumerateLicensedUsersRequest + ): + request = license_management_service.EnumerateLicensedUsersRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.enumerate_licensed_users + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.EnumerateLicensedUsersAsyncPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_operation( + self, + request: Optional[operations_pb2.GetOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Gets the latest state of a long-running operation. + + Args: + request (:class:`~.operations_pb2.GetOperationRequest`): + The request object. Request message for + `GetOperation` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.Operation: + An ``Operation`` object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.GetOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def __aenter__(self) -> "LicenseManagementServiceAsyncClient": + return self + + async def __aexit__(self, exc_type, exc, tb): + await self.transport.close() + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +__all__ = ("LicenseManagementServiceAsyncClient",) diff --git a/packages/google-cloud-commerce-consumer-procurement/google/cloud/commerce_consumer_procurement_v1/services/license_management_service/client.py b/packages/google-cloud-commerce-consumer-procurement/google/cloud/commerce_consumer_procurement_v1/services/license_management_service/client.py new file mode 100644 index 000000000000..5c8aee6b83e5 --- /dev/null +++ b/packages/google-cloud-commerce-consumer-procurement/google/cloud/commerce_consumer_procurement_v1/services/license_management_service/client.py @@ -0,0 +1,1339 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import os +import re +from typing import ( + Callable, + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, + cast, +) +import warnings + +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.commerce_consumer_procurement_v1 import ( + gapic_version as package_version, +) + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object, None] # type: ignore + +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore + +from google.cloud.commerce_consumer_procurement_v1.services.license_management_service import ( + pagers, +) +from google.cloud.commerce_consumer_procurement_v1.types import ( + license_management_service, +) + +from .transports.base import DEFAULT_CLIENT_INFO, LicenseManagementServiceTransport +from .transports.grpc import LicenseManagementServiceGrpcTransport +from .transports.grpc_asyncio import LicenseManagementServiceGrpcAsyncIOTransport +from .transports.rest import LicenseManagementServiceRestTransport + + +class LicenseManagementServiceClientMeta(type): + """Metaclass for the LicenseManagementService client. + + This provides class-level methods for building and retrieving + support objects (e.g. transport) without polluting the client instance + objects. + """ + + _transport_registry = ( + OrderedDict() + ) # type: Dict[str, Type[LicenseManagementServiceTransport]] + _transport_registry["grpc"] = LicenseManagementServiceGrpcTransport + _transport_registry["grpc_asyncio"] = LicenseManagementServiceGrpcAsyncIOTransport + _transport_registry["rest"] = LicenseManagementServiceRestTransport + + def get_transport_class( + cls, + label: Optional[str] = None, + ) -> Type[LicenseManagementServiceTransport]: + """Returns an appropriate transport class. + + Args: + label: The name of the desired transport. If none is + provided, then the first transport in the registry is used. + + Returns: + The transport class to use. + """ + # If a specific transport is requested, return that one. + if label: + return cls._transport_registry[label] + + # No transport is requested; return the default (that is, the first one + # in the dictionary). + return next(iter(cls._transport_registry.values())) + + +class LicenseManagementServiceClient(metaclass=LicenseManagementServiceClientMeta): + """Service for managing licenses.""" + + @staticmethod + def _get_default_mtls_endpoint(api_endpoint): + """Converts api endpoint to mTLS endpoint. + + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to + "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. + Args: + api_endpoint (Optional[str]): the api endpoint to convert. + Returns: + str: converted mTLS api endpoint. + """ + if not api_endpoint: + return api_endpoint + + mtls_endpoint_re = re.compile( + r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" + ) + + m = mtls_endpoint_re.match(api_endpoint) + name, mtls, sandbox, googledomain = m.groups() + if mtls or not googledomain: + return api_endpoint + + if sandbox: + return api_endpoint.replace( + "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" + ) + + return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + + # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. + DEFAULT_ENDPOINT = "cloudcommerceconsumerprocurement.googleapis.com" + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore + DEFAULT_ENDPOINT + ) + + _DEFAULT_ENDPOINT_TEMPLATE = "cloudcommerceconsumerprocurement.{UNIVERSE_DOMAIN}" + _DEFAULT_UNIVERSE = "googleapis.com" + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + LicenseManagementServiceClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_info(info) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + LicenseManagementServiceClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file(filename) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> LicenseManagementServiceTransport: + """Returns the transport used by the client instance. + + Returns: + LicenseManagementServiceTransport: The transport used by the client + instance. + """ + return self._transport + + @staticmethod + def license_pool_path( + billing_account: str, + order: str, + ) -> str: + """Returns a fully-qualified license_pool string.""" + return "billingAccounts/{billing_account}/orders/{order}/licensePool".format( + billing_account=billing_account, + order=order, + ) + + @staticmethod + def parse_license_pool_path(path: str) -> Dict[str, str]: + """Parses a license_pool path into its component segments.""" + m = re.match( + r"^billingAccounts/(?P.+?)/orders/(?P.+?)/licensePool$", + path, + ) + return m.groupdict() if m else {} + + @staticmethod + def common_billing_account_path( + billing_account: str, + ) -> str: + """Returns a fully-qualified billing_account string.""" + return "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + + @staticmethod + def parse_common_billing_account_path(path: str) -> Dict[str, str]: + """Parse a billing_account path into its component segments.""" + m = re.match(r"^billingAccounts/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_folder_path( + folder: str, + ) -> str: + """Returns a fully-qualified folder string.""" + return "folders/{folder}".format( + folder=folder, + ) + + @staticmethod + def parse_common_folder_path(path: str) -> Dict[str, str]: + """Parse a folder path into its component segments.""" + m = re.match(r"^folders/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_organization_path( + organization: str, + ) -> str: + """Returns a fully-qualified organization string.""" + return "organizations/{organization}".format( + organization=organization, + ) + + @staticmethod + def parse_common_organization_path(path: str) -> Dict[str, str]: + """Parse a organization path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_project_path( + project: str, + ) -> str: + """Returns a fully-qualified project string.""" + return "projects/{project}".format( + project=project, + ) + + @staticmethod + def parse_common_project_path(path: str) -> Dict[str, str]: + """Parse a project path into its component segments.""" + m = re.match(r"^projects/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_location_path( + project: str, + location: str, + ) -> str: + """Returns a fully-qualified location string.""" + return "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) + + @staticmethod + def parse_common_location_path(path: str) -> Dict[str, str]: + """Parse a location path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[client_options_lib.ClientOptions] = None + ): + """Deprecated. Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + + warnings.warn( + "get_mtls_endpoint_and_cert_source is deprecated. Use the api_endpoint property instead.", + DeprecationWarning, + ) + if client_options is None: + client_options = client_options_lib.ClientOptions() + use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") + if use_client_cert not in ("true", "false"): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError( + "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + # Figure out the client cert source to use. + client_cert_source = None + if use_client_cert == "true": + if client_options.client_cert_source: + client_cert_source = client_options.client_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + elif use_mtls_endpoint == "always" or ( + use_mtls_endpoint == "auto" and client_cert_source + ): + api_endpoint = cls.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = cls.DEFAULT_ENDPOINT + + return api_endpoint, client_cert_source + + @staticmethod + def _read_environment_variables(): + """Returns the environment variables used by the client. + + Returns: + Tuple[bool, str, str]: returns the GOOGLE_API_USE_CLIENT_CERTIFICATE, + GOOGLE_API_USE_MTLS_ENDPOINT, and GOOGLE_CLOUD_UNIVERSE_DOMAIN environment variables. + + Raises: + ValueError: If GOOGLE_API_USE_CLIENT_CERTIFICATE is not + any of ["true", "false"]. + google.auth.exceptions.MutualTLSChannelError: If GOOGLE_API_USE_MTLS_ENDPOINT + is not any of ["auto", "never", "always"]. + """ + use_client_cert = os.getenv( + "GOOGLE_API_USE_CLIENT_CERTIFICATE", "false" + ).lower() + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto").lower() + universe_domain_env = os.getenv("GOOGLE_CLOUD_UNIVERSE_DOMAIN") + if use_client_cert not in ("true", "false"): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError( + "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + return use_client_cert == "true", use_mtls_endpoint, universe_domain_env + + @staticmethod + def _get_client_cert_source(provided_cert_source, use_cert_flag): + """Return the client cert source to be used by the client. + + Args: + provided_cert_source (bytes): The client certificate source provided. + use_cert_flag (bool): A flag indicating whether to use the client certificate. + + Returns: + bytes or None: The client cert source to be used by the client. + """ + client_cert_source = None + if use_cert_flag: + if provided_cert_source: + client_cert_source = provided_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + return client_cert_source + + @staticmethod + def _get_api_endpoint( + api_override, client_cert_source, universe_domain, use_mtls_endpoint + ): + """Return the API endpoint used by the client. + + Args: + api_override (str): The API endpoint override. If specified, this is always + the return value of this function and the other arguments are not used. + client_cert_source (bytes): The client certificate source used by the client. + universe_domain (str): The universe domain used by the client. + use_mtls_endpoint (str): How to use the mTLS endpoint, which depends also on the other parameters. + Possible values are "always", "auto", or "never". + + Returns: + str: The API endpoint to be used by the client. + """ + if api_override is not None: + api_endpoint = api_override + elif use_mtls_endpoint == "always" or ( + use_mtls_endpoint == "auto" and client_cert_source + ): + _default_universe = LicenseManagementServiceClient._DEFAULT_UNIVERSE + if universe_domain != _default_universe: + raise MutualTLSChannelError( + f"mTLS is not supported in any universe other than {_default_universe}." + ) + api_endpoint = LicenseManagementServiceClient.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = ( + LicenseManagementServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=universe_domain + ) + ) + return api_endpoint + + @staticmethod + def _get_universe_domain( + client_universe_domain: Optional[str], universe_domain_env: Optional[str] + ) -> str: + """Return the universe domain used by the client. + + Args: + client_universe_domain (Optional[str]): The universe domain configured via the client options. + universe_domain_env (Optional[str]): The universe domain configured via the "GOOGLE_CLOUD_UNIVERSE_DOMAIN" environment variable. + + Returns: + str: The universe domain to be used by the client. + + Raises: + ValueError: If the universe domain is an empty string. + """ + universe_domain = LicenseManagementServiceClient._DEFAULT_UNIVERSE + if client_universe_domain is not None: + universe_domain = client_universe_domain + elif universe_domain_env is not None: + universe_domain = universe_domain_env + if len(universe_domain.strip()) == 0: + raise ValueError("Universe Domain cannot be an empty string.") + return universe_domain + + @staticmethod + def _compare_universes( + client_universe: str, credentials: ga_credentials.Credentials + ) -> bool: + """Returns True iff the universe domains used by the client and credentials match. + + Args: + client_universe (str): The universe domain configured via the client options. + credentials (ga_credentials.Credentials): The credentials being used in the client. + + Returns: + bool: True iff client_universe matches the universe in credentials. + + Raises: + ValueError: when client_universe does not match the universe in credentials. + """ + + default_universe = LicenseManagementServiceClient._DEFAULT_UNIVERSE + credentials_universe = getattr(credentials, "universe_domain", default_universe) + + if client_universe != credentials_universe: + raise ValueError( + "The configured universe domain " + f"({client_universe}) does not match the universe domain " + f"found in the credentials ({credentials_universe}). " + "If you haven't configured the universe domain explicitly, " + f"`{default_universe}` is the default." + ) + return True + + def _validate_universe_domain(self): + """Validates client's and credentials' universe domains are consistent. + + Returns: + bool: True iff the configured universe domain is valid. + + Raises: + ValueError: If the configured universe domain is not valid. + """ + self._is_universe_domain_valid = ( + self._is_universe_domain_valid + or LicenseManagementServiceClient._compare_universes( + self.universe_domain, self.transport._credentials + ) + ) + return self._is_universe_domain_valid + + @property + def api_endpoint(self): + """Return the API endpoint used by the client instance. + + Returns: + str: The API endpoint used by the client instance. + """ + return self._api_endpoint + + @property + def universe_domain(self) -> str: + """Return the universe domain used by the client instance. + + Returns: + str: The universe domain used by the client instance. + """ + return self._universe_domain + + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[ + Union[ + str, + LicenseManagementServiceTransport, + Callable[..., LicenseManagementServiceTransport], + ] + ] = None, + client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the license management service client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Optional[Union[str,LicenseManagementServiceTransport,Callable[..., LicenseManagementServiceTransport]]]): + The transport to use, or a Callable that constructs and returns a new transport. + If a Callable is given, it will be called with the same set of initialization + arguments as used in the LicenseManagementServiceTransport constructor. + If set to None, a transport is chosen automatically. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): + Custom options for the client. + + 1. The ``api_endpoint`` property can be used to override the + default endpoint provided by the client when ``transport`` is + not explicitly provided. Only if this property is not set and + ``transport`` was not explicitly provided, the endpoint is + determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment + variable, which have one of the following values: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto-switch to the + default mTLS endpoint if client certificate is present; this is + the default value). + + 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide a client certificate for mTLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + 3. The ``universe_domain`` property can be used to override the + default "googleapis.com" universe. Note that the ``api_endpoint`` + property still takes precedence; and ``universe_domain`` is + currently not supported for mTLS. + + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client_options = client_options + if isinstance(self._client_options, dict): + self._client_options = client_options_lib.from_dict(self._client_options) + if self._client_options is None: + self._client_options = client_options_lib.ClientOptions() + self._client_options = cast( + client_options_lib.ClientOptions, self._client_options + ) + + universe_domain_opt = getattr(self._client_options, "universe_domain", None) + + ( + self._use_client_cert, + self._use_mtls_endpoint, + self._universe_domain_env, + ) = LicenseManagementServiceClient._read_environment_variables() + self._client_cert_source = ( + LicenseManagementServiceClient._get_client_cert_source( + self._client_options.client_cert_source, self._use_client_cert + ) + ) + self._universe_domain = LicenseManagementServiceClient._get_universe_domain( + universe_domain_opt, self._universe_domain_env + ) + self._api_endpoint = None # updated below, depending on `transport` + + # Initialize the universe domain validation. + self._is_universe_domain_valid = False + + api_key_value = getattr(self._client_options, "api_key", None) + if api_key_value and credentials: + raise ValueError( + "client_options.api_key and credentials are mutually exclusive" + ) + + # Save or instantiate the transport. + # Ordinarily, we provide the transport, but allowing a custom transport + # instance provides an extensibility point for unusual situations. + transport_provided = isinstance(transport, LicenseManagementServiceTransport) + if transport_provided: + # transport is a LicenseManagementServiceTransport instance. + if credentials or self._client_options.credentials_file or api_key_value: + raise ValueError( + "When providing a transport instance, " + "provide its credentials directly." + ) + if self._client_options.scopes: + raise ValueError( + "When providing a transport instance, provide its scopes " + "directly." + ) + self._transport = cast(LicenseManagementServiceTransport, transport) + self._api_endpoint = self._transport.host + + self._api_endpoint = ( + self._api_endpoint + or LicenseManagementServiceClient._get_api_endpoint( + self._client_options.api_endpoint, + self._client_cert_source, + self._universe_domain, + self._use_mtls_endpoint, + ) + ) + + if not transport_provided: + import google.auth._default # type: ignore + + if api_key_value and hasattr( + google.auth._default, "get_api_key_credentials" + ): + credentials = google.auth._default.get_api_key_credentials( + api_key_value + ) + + transport_init: Union[ + Type[LicenseManagementServiceTransport], + Callable[..., LicenseManagementServiceTransport], + ] = ( + LicenseManagementServiceClient.get_transport_class(transport) + if isinstance(transport, str) or transport is None + else cast(Callable[..., LicenseManagementServiceTransport], transport) + ) + # initialize with the provided callable or the passed in class + self._transport = transport_init( + credentials=credentials, + credentials_file=self._client_options.credentials_file, + host=self._api_endpoint, + scopes=self._client_options.scopes, + client_cert_source_for_mtls=self._client_cert_source, + quota_project_id=self._client_options.quota_project_id, + client_info=client_info, + always_use_jwt_access=True, + api_audience=self._client_options.api_audience, + ) + + def get_license_pool( + self, + request: Optional[ + Union[license_management_service.GetLicensePoolRequest, dict] + ] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> license_management_service.LicensePool: + r"""Gets the license pool. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import commerce_consumer_procurement_v1 + + def sample_get_license_pool(): + # Create a client + client = commerce_consumer_procurement_v1.LicenseManagementServiceClient() + + # Initialize request argument(s) + request = commerce_consumer_procurement_v1.GetLicensePoolRequest( + name="name_value", + ) + + # Make the request + response = client.get_license_pool(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.commerce_consumer_procurement_v1.types.GetLicensePoolRequest, dict]): + The request object. Request message for getting a license + pool. + name (str): + Required. The name of the license pool to get. Format: + ``billingAccounts/{billing_account}/orders/{order}/licensePool`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.commerce_consumer_procurement_v1.types.LicensePool: + A license pool represents a pool of + licenses that can be assigned to users. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, license_management_service.GetLicensePoolRequest): + request = license_management_service.GetLicensePoolRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_license_pool] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def update_license_pool( + self, + request: Optional[ + Union[license_management_service.UpdateLicensePoolRequest, dict] + ] = None, + *, + license_pool: Optional[license_management_service.LicensePool] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> license_management_service.LicensePool: + r"""Updates the license pool if one exists for this + Order. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import commerce_consumer_procurement_v1 + + def sample_update_license_pool(): + # Create a client + client = commerce_consumer_procurement_v1.LicenseManagementServiceClient() + + # Initialize request argument(s) + request = commerce_consumer_procurement_v1.UpdateLicensePoolRequest( + ) + + # Make the request + response = client.update_license_pool(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.commerce_consumer_procurement_v1.types.UpdateLicensePoolRequest, dict]): + The request object. Request message for updating a + license pool. + license_pool (google.cloud.commerce_consumer_procurement_v1.types.LicensePool): + Required. The license pool to update. + + The license pool's name field is used to identify the + license pool to update. Format: + ``billingAccounts/{billing_account}/orders/{order}/licensePool``. + + This corresponds to the ``license_pool`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Required. The list of fields to + update. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.commerce_consumer_procurement_v1.types.LicensePool: + A license pool represents a pool of + licenses that can be assigned to users. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([license_pool, update_mask]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, license_management_service.UpdateLicensePoolRequest): + request = license_management_service.UpdateLicensePoolRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if license_pool is not None: + request.license_pool = license_pool + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.update_license_pool] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("license_pool.name", request.license_pool.name),) + ), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def assign( + self, + request: Optional[Union[license_management_service.AssignRequest, dict]] = None, + *, + parent: Optional[str] = None, + usernames: Optional[MutableSequence[str]] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> license_management_service.AssignResponse: + r"""Assigns a license to a user. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import commerce_consumer_procurement_v1 + + def sample_assign(): + # Create a client + client = commerce_consumer_procurement_v1.LicenseManagementServiceClient() + + # Initialize request argument(s) + request = commerce_consumer_procurement_v1.AssignRequest( + parent="parent_value", + usernames=['usernames_value1', 'usernames_value2'], + ) + + # Make the request + response = client.assign(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.commerce_consumer_procurement_v1.types.AssignRequest, dict]): + The request object. Request message for + [LicenseManagementService.Assign][google.cloud.commerce.consumer.procurement.v1.LicenseManagementService.Assign]. + parent (str): + Required. License pool name. + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + usernames (MutableSequence[str]): + Required. Username. Format: ``name@domain.com``. + This corresponds to the ``usernames`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.commerce_consumer_procurement_v1.types.AssignResponse: + Response message for + [LicenseManagementService.Assign][google.cloud.commerce.consumer.procurement.v1.LicenseManagementService.Assign]. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, usernames]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, license_management_service.AssignRequest): + request = license_management_service.AssignRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if usernames is not None: + request.usernames = usernames + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.assign] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def unassign( + self, + request: Optional[ + Union[license_management_service.UnassignRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + usernames: Optional[MutableSequence[str]] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> license_management_service.UnassignResponse: + r"""Unassigns a license from a user. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import commerce_consumer_procurement_v1 + + def sample_unassign(): + # Create a client + client = commerce_consumer_procurement_v1.LicenseManagementServiceClient() + + # Initialize request argument(s) + request = commerce_consumer_procurement_v1.UnassignRequest( + parent="parent_value", + usernames=['usernames_value1', 'usernames_value2'], + ) + + # Make the request + response = client.unassign(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.commerce_consumer_procurement_v1.types.UnassignRequest, dict]): + The request object. Request message for + [LicenseManagementService.Unassign][google.cloud.commerce.consumer.procurement.v1.LicenseManagementService.Unassign]. + parent (str): + Required. License pool name. + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + usernames (MutableSequence[str]): + Required. Username. Format: ``name@domain.com``. + This corresponds to the ``usernames`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.commerce_consumer_procurement_v1.types.UnassignResponse: + Response message for + [LicenseManagementService.Unassign][google.cloud.commerce.consumer.procurement.v1.LicenseManagementService.Unassign]. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, usernames]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, license_management_service.UnassignRequest): + request = license_management_service.UnassignRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if usernames is not None: + request.usernames = usernames + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.unassign] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def enumerate_licensed_users( + self, + request: Optional[ + Union[license_management_service.EnumerateLicensedUsersRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.EnumerateLicensedUsersPager: + r"""Enumerates all users assigned a license. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import commerce_consumer_procurement_v1 + + def sample_enumerate_licensed_users(): + # Create a client + client = commerce_consumer_procurement_v1.LicenseManagementServiceClient() + + # Initialize request argument(s) + request = commerce_consumer_procurement_v1.EnumerateLicensedUsersRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.enumerate_licensed_users(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.commerce_consumer_procurement_v1.types.EnumerateLicensedUsersRequest, dict]): + The request object. Request message for + [LicenseManagementService.EnumerateLicensedUsers][google.cloud.commerce.consumer.procurement.v1.LicenseManagementService.EnumerateLicensedUsers]. + parent (str): + Required. License pool name. + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.commerce_consumer_procurement_v1.services.license_management_service.pagers.EnumerateLicensedUsersPager: + Response message for + [LicenseManagementService.EnumerateLicensedUsers][google.cloud.commerce.consumer.procurement.v1.LicenseManagementService.EnumerateLicensedUsers]. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance( + request, license_management_service.EnumerateLicensedUsersRequest + ): + request = license_management_service.EnumerateLicensedUsersRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.enumerate_licensed_users] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.EnumerateLicensedUsersPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def __enter__(self) -> "LicenseManagementServiceClient": + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + + def get_operation( + self, + request: Optional[operations_pb2.GetOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Gets the latest state of a long-running operation. + + Args: + request (:class:`~.operations_pb2.GetOperationRequest`): + The request object. Request message for + `GetOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.Operation: + An ``Operation`` object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.GetOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.get_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +__all__ = ("LicenseManagementServiceClient",) diff --git a/packages/google-cloud-commerce-consumer-procurement/google/cloud/commerce_consumer_procurement_v1/services/license_management_service/pagers.py b/packages/google-cloud-commerce-consumer-procurement/google/cloud/commerce_consumer_procurement_v1/services/license_management_service/pagers.py new file mode 100644 index 000000000000..c2d5464940a4 --- /dev/null +++ b/packages/google-cloud-commerce-consumer-procurement/google/cloud/commerce_consumer_procurement_v1/services/license_management_service/pagers.py @@ -0,0 +1,207 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import ( + Any, + AsyncIterator, + Awaitable, + Callable, + Iterator, + Optional, + Sequence, + Tuple, + Union, +) + +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.api_core import retry_async as retries_async + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] + OptionalAsyncRetry = Union[ + retries_async.AsyncRetry, gapic_v1.method._MethodDefault, None + ] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object, None] # type: ignore + OptionalAsyncRetry = Union[retries_async.AsyncRetry, object, None] # type: ignore + +from google.cloud.commerce_consumer_procurement_v1.types import ( + license_management_service, +) + + +class EnumerateLicensedUsersPager: + """A pager for iterating through ``enumerate_licensed_users`` requests. + + This class thinly wraps an initial + :class:`google.cloud.commerce_consumer_procurement_v1.types.EnumerateLicensedUsersResponse` object, and + provides an ``__iter__`` method to iterate through its + ``licensed_users`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``EnumerateLicensedUsers`` requests and continue to iterate + through the ``licensed_users`` field on the + corresponding responses. + + All the usual :class:`google.cloud.commerce_consumer_procurement_v1.types.EnumerateLicensedUsersResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[ + ..., license_management_service.EnumerateLicensedUsersResponse + ], + request: license_management_service.EnumerateLicensedUsersRequest, + response: license_management_service.EnumerateLicensedUsersResponse, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.commerce_consumer_procurement_v1.types.EnumerateLicensedUsersRequest): + The initial request object. + response (google.cloud.commerce_consumer_procurement_v1.types.EnumerateLicensedUsersResponse): + The initial response object. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = license_management_service.EnumerateLicensedUsersRequest( + request + ) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages( + self, + ) -> Iterator[license_management_service.EnumerateLicensedUsersResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) + yield self._response + + def __iter__(self) -> Iterator[license_management_service.LicensedUser]: + for page in self.pages: + yield from page.licensed_users + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class EnumerateLicensedUsersAsyncPager: + """A pager for iterating through ``enumerate_licensed_users`` requests. + + This class thinly wraps an initial + :class:`google.cloud.commerce_consumer_procurement_v1.types.EnumerateLicensedUsersResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``licensed_users`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``EnumerateLicensedUsers`` requests and continue to iterate + through the ``licensed_users`` field on the + corresponding responses. + + All the usual :class:`google.cloud.commerce_consumer_procurement_v1.types.EnumerateLicensedUsersResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[ + ..., Awaitable[license_management_service.EnumerateLicensedUsersResponse] + ], + request: license_management_service.EnumerateLicensedUsersRequest, + response: license_management_service.EnumerateLicensedUsersResponse, + *, + retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.commerce_consumer_procurement_v1.types.EnumerateLicensedUsersRequest): + The initial request object. + response (google.cloud.commerce_consumer_procurement_v1.types.EnumerateLicensedUsersResponse): + The initial response object. + retry (google.api_core.retry.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = license_management_service.EnumerateLicensedUsersRequest( + request + ) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages( + self, + ) -> AsyncIterator[license_management_service.EnumerateLicensedUsersResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) + yield self._response + + def __aiter__(self) -> AsyncIterator[license_management_service.LicensedUser]: + async def async_generator(): + async for page in self.pages: + for response in page.licensed_users: + yield response + + return async_generator() + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) diff --git a/packages/google-cloud-commerce-consumer-procurement/google/cloud/commerce_consumer_procurement_v1/services/license_management_service/transports/__init__.py b/packages/google-cloud-commerce-consumer-procurement/google/cloud/commerce_consumer_procurement_v1/services/license_management_service/transports/__init__.py new file mode 100644 index 000000000000..cdaddef08466 --- /dev/null +++ b/packages/google-cloud-commerce-consumer-procurement/google/cloud/commerce_consumer_procurement_v1/services/license_management_service/transports/__init__.py @@ -0,0 +1,41 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from typing import Dict, Type + +from .base import LicenseManagementServiceTransport +from .grpc import LicenseManagementServiceGrpcTransport +from .grpc_asyncio import LicenseManagementServiceGrpcAsyncIOTransport +from .rest import ( + LicenseManagementServiceRestInterceptor, + LicenseManagementServiceRestTransport, +) + +# Compile a registry of transports. +_transport_registry = ( + OrderedDict() +) # type: Dict[str, Type[LicenseManagementServiceTransport]] +_transport_registry["grpc"] = LicenseManagementServiceGrpcTransport +_transport_registry["grpc_asyncio"] = LicenseManagementServiceGrpcAsyncIOTransport +_transport_registry["rest"] = LicenseManagementServiceRestTransport + +__all__ = ( + "LicenseManagementServiceTransport", + "LicenseManagementServiceGrpcTransport", + "LicenseManagementServiceGrpcAsyncIOTransport", + "LicenseManagementServiceRestTransport", + "LicenseManagementServiceRestInterceptor", +) diff --git a/packages/google-cloud-gke-connect-gateway/google/cloud/gkeconnect/gateway_v1beta1/services/gateway_service/transports/base.py b/packages/google-cloud-commerce-consumer-procurement/google/cloud/commerce_consumer_procurement_v1/services/license_management_service/transports/base.py similarity index 72% rename from packages/google-cloud-gke-connect-gateway/google/cloud/gkeconnect/gateway_v1beta1/services/gateway_service/transports/base.py rename to packages/google-cloud-commerce-consumer-procurement/google/cloud/commerce_consumer_procurement_v1/services/license_management_service/transports/base.py index 232b9f043b8c..416fb9788d4c 100644 --- a/packages/google-cloud-gke-connect-gateway/google/cloud/gkeconnect/gateway_v1beta1/services/gateway_service/transports/base.py +++ b/packages/google-cloud-commerce-consumer-procurement/google/cloud/commerce_consumer_procurement_v1/services/license_management_service/transports/base.py @@ -16,28 +16,33 @@ import abc from typing import Awaitable, Callable, Dict, Optional, Sequence, Union -from google.api import httpbody_pb2 # type: ignore import google.api_core from google.api_core import exceptions as core_exceptions from google.api_core import gapic_v1 from google.api_core import retry as retries import google.auth # type: ignore from google.auth import credentials as ga_credentials # type: ignore +from google.longrunning import operations_pb2 # type: ignore from google.oauth2 import service_account # type: ignore -from google.cloud.gkeconnect.gateway_v1beta1 import gapic_version as package_version +from google.cloud.commerce_consumer_procurement_v1 import ( + gapic_version as package_version, +) +from google.cloud.commerce_consumer_procurement_v1.types import ( + license_management_service, +) DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( gapic_version=package_version.__version__ ) -class GatewayServiceTransport(abc.ABC): - """Abstract transport class for GatewayService.""" +class LicenseManagementServiceTransport(abc.ABC): + """Abstract transport class for LicenseManagementService.""" AUTH_SCOPES = ("https://www.googleapis.com/auth/cloud-platform",) - DEFAULT_HOST: str = "connectgateway.googleapis.com" + DEFAULT_HOST: str = "cloudcommerceconsumerprocurement.googleapis.com" def __init__( self, @@ -56,7 +61,7 @@ def __init__( Args: host (Optional[str]): - The hostname to connect to (default: 'connectgateway.googleapis.com'). + The hostname to connect to (default: 'cloudcommerceconsumerprocurement.googleapis.com'). credentials (Optional[google.auth.credentials.Credentials]): The authorization credentials to attach to requests. These credentials identify the application to the service; if none @@ -128,28 +133,28 @@ def host(self): def _prep_wrapped_messages(self, client_info): # Precompute the wrapped methods. self._wrapped_methods = { - self.get_resource: gapic_v1.method.wrap_method( - self.get_resource, + self.get_license_pool: gapic_v1.method.wrap_method( + self.get_license_pool, default_timeout=None, client_info=client_info, ), - self.post_resource: gapic_v1.method.wrap_method( - self.post_resource, + self.update_license_pool: gapic_v1.method.wrap_method( + self.update_license_pool, default_timeout=None, client_info=client_info, ), - self.delete_resource: gapic_v1.method.wrap_method( - self.delete_resource, + self.assign: gapic_v1.method.wrap_method( + self.assign, default_timeout=None, client_info=client_info, ), - self.put_resource: gapic_v1.method.wrap_method( - self.put_resource, + self.unassign: gapic_v1.method.wrap_method( + self.unassign, default_timeout=None, client_info=client_info, ), - self.patch_resource: gapic_v1.method.wrap_method( - self.patch_resource, + self.enumerate_licensed_users: gapic_v1.method.wrap_method( + self.enumerate_licensed_users, default_timeout=None, client_info=client_info, ), @@ -165,47 +170,71 @@ def close(self): raise NotImplementedError() @property - def get_resource( + def get_license_pool( + self, + ) -> Callable[ + [license_management_service.GetLicensePoolRequest], + Union[ + license_management_service.LicensePool, + Awaitable[license_management_service.LicensePool], + ], + ]: + raise NotImplementedError() + + @property + def update_license_pool( self, ) -> Callable[ - [httpbody_pb2.HttpBody], - Union[httpbody_pb2.HttpBody, Awaitable[httpbody_pb2.HttpBody]], + [license_management_service.UpdateLicensePoolRequest], + Union[ + license_management_service.LicensePool, + Awaitable[license_management_service.LicensePool], + ], ]: raise NotImplementedError() @property - def post_resource( + def assign( self, ) -> Callable[ - [httpbody_pb2.HttpBody], - Union[httpbody_pb2.HttpBody, Awaitable[httpbody_pb2.HttpBody]], + [license_management_service.AssignRequest], + Union[ + license_management_service.AssignResponse, + Awaitable[license_management_service.AssignResponse], + ], ]: raise NotImplementedError() @property - def delete_resource( + def unassign( self, ) -> Callable[ - [httpbody_pb2.HttpBody], - Union[httpbody_pb2.HttpBody, Awaitable[httpbody_pb2.HttpBody]], + [license_management_service.UnassignRequest], + Union[ + license_management_service.UnassignResponse, + Awaitable[license_management_service.UnassignResponse], + ], ]: raise NotImplementedError() @property - def put_resource( + def enumerate_licensed_users( self, ) -> Callable[ - [httpbody_pb2.HttpBody], - Union[httpbody_pb2.HttpBody, Awaitable[httpbody_pb2.HttpBody]], + [license_management_service.EnumerateLicensedUsersRequest], + Union[ + license_management_service.EnumerateLicensedUsersResponse, + Awaitable[license_management_service.EnumerateLicensedUsersResponse], + ], ]: raise NotImplementedError() @property - def patch_resource( + def get_operation( self, ) -> Callable[ - [httpbody_pb2.HttpBody], - Union[httpbody_pb2.HttpBody, Awaitable[httpbody_pb2.HttpBody]], + [operations_pb2.GetOperationRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], ]: raise NotImplementedError() @@ -214,4 +243,4 @@ def kind(self) -> str: raise NotImplementedError() -__all__ = ("GatewayServiceTransport",) +__all__ = ("LicenseManagementServiceTransport",) diff --git a/packages/google-cloud-commerce-consumer-procurement/google/cloud/commerce_consumer_procurement_v1/services/license_management_service/transports/grpc.py b/packages/google-cloud-commerce-consumer-procurement/google/cloud/commerce_consumer_procurement_v1/services/license_management_service/transports/grpc.py new file mode 100644 index 000000000000..ff5a937a6e70 --- /dev/null +++ b/packages/google-cloud-commerce-consumer-procurement/google/cloud/commerce_consumer_procurement_v1/services/license_management_service/transports/grpc.py @@ -0,0 +1,411 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import gapic_v1, grpc_helpers +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.longrunning import operations_pb2 # type: ignore +import grpc # type: ignore + +from google.cloud.commerce_consumer_procurement_v1.types import ( + license_management_service, +) + +from .base import DEFAULT_CLIENT_INFO, LicenseManagementServiceTransport + + +class LicenseManagementServiceGrpcTransport(LicenseManagementServiceTransport): + """gRPC backend transport for LicenseManagementService. + + Service for managing licenses. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _stubs: Dict[str, Callable] + + def __init__( + self, + *, + host: str = "cloudcommerceconsumerprocurement.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'cloudcommerceconsumerprocurement.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if a ``channel`` instance is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if a ``channel`` instance is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if a ``channel`` instance is provided. + channel (Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]]): + A ``Channel`` instance through which to make calls, or a Callable + that constructs and returns one. If set to None, ``self.create_channel`` + is used to create the channel. If a Callable is given, it will be called + with the same arguments as used in ``self.create_channel``. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if a ``channel`` instance is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if a ``channel`` instance or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if isinstance(channel, grpc.Channel): + # Ignore credentials if a channel was passed. + credentials = None + self._ignore_credentials = True + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + # initialize with the provided callable or the default channel + channel_init = channel or type(self).create_channel + self._grpc_channel = channel_init( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @classmethod + def create_channel( + cls, + host: str = "cloudcommerceconsumerprocurement.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> grpc.Channel: + """Create and return a gRPC channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + grpc.Channel: A gRPC channel object. + + Raises: + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + + return grpc_helpers.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs, + ) + + @property + def grpc_channel(self) -> grpc.Channel: + """Return the channel designed to connect to this service.""" + return self._grpc_channel + + @property + def get_license_pool( + self, + ) -> Callable[ + [license_management_service.GetLicensePoolRequest], + license_management_service.LicensePool, + ]: + r"""Return a callable for the get license pool method over gRPC. + + Gets the license pool. + + Returns: + Callable[[~.GetLicensePoolRequest], + ~.LicensePool]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_license_pool" not in self._stubs: + self._stubs["get_license_pool"] = self.grpc_channel.unary_unary( + "/google.cloud.commerce.consumer.procurement.v1.LicenseManagementService/GetLicensePool", + request_serializer=license_management_service.GetLicensePoolRequest.serialize, + response_deserializer=license_management_service.LicensePool.deserialize, + ) + return self._stubs["get_license_pool"] + + @property + def update_license_pool( + self, + ) -> Callable[ + [license_management_service.UpdateLicensePoolRequest], + license_management_service.LicensePool, + ]: + r"""Return a callable for the update license pool method over gRPC. + + Updates the license pool if one exists for this + Order. + + Returns: + Callable[[~.UpdateLicensePoolRequest], + ~.LicensePool]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_license_pool" not in self._stubs: + self._stubs["update_license_pool"] = self.grpc_channel.unary_unary( + "/google.cloud.commerce.consumer.procurement.v1.LicenseManagementService/UpdateLicensePool", + request_serializer=license_management_service.UpdateLicensePoolRequest.serialize, + response_deserializer=license_management_service.LicensePool.deserialize, + ) + return self._stubs["update_license_pool"] + + @property + def assign( + self, + ) -> Callable[ + [license_management_service.AssignRequest], + license_management_service.AssignResponse, + ]: + r"""Return a callable for the assign method over gRPC. + + Assigns a license to a user. + + Returns: + Callable[[~.AssignRequest], + ~.AssignResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "assign" not in self._stubs: + self._stubs["assign"] = self.grpc_channel.unary_unary( + "/google.cloud.commerce.consumer.procurement.v1.LicenseManagementService/Assign", + request_serializer=license_management_service.AssignRequest.serialize, + response_deserializer=license_management_service.AssignResponse.deserialize, + ) + return self._stubs["assign"] + + @property + def unassign( + self, + ) -> Callable[ + [license_management_service.UnassignRequest], + license_management_service.UnassignResponse, + ]: + r"""Return a callable for the unassign method over gRPC. + + Unassigns a license from a user. + + Returns: + Callable[[~.UnassignRequest], + ~.UnassignResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "unassign" not in self._stubs: + self._stubs["unassign"] = self.grpc_channel.unary_unary( + "/google.cloud.commerce.consumer.procurement.v1.LicenseManagementService/Unassign", + request_serializer=license_management_service.UnassignRequest.serialize, + response_deserializer=license_management_service.UnassignResponse.deserialize, + ) + return self._stubs["unassign"] + + @property + def enumerate_licensed_users( + self, + ) -> Callable[ + [license_management_service.EnumerateLicensedUsersRequest], + license_management_service.EnumerateLicensedUsersResponse, + ]: + r"""Return a callable for the enumerate licensed users method over gRPC. + + Enumerates all users assigned a license. + + Returns: + Callable[[~.EnumerateLicensedUsersRequest], + ~.EnumerateLicensedUsersResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "enumerate_licensed_users" not in self._stubs: + self._stubs["enumerate_licensed_users"] = self.grpc_channel.unary_unary( + "/google.cloud.commerce.consumer.procurement.v1.LicenseManagementService/EnumerateLicensedUsers", + request_serializer=license_management_service.EnumerateLicensedUsersRequest.serialize, + response_deserializer=license_management_service.EnumerateLicensedUsersResponse.deserialize, + ) + return self._stubs["enumerate_licensed_users"] + + def close(self): + self.grpc_channel.close() + + @property + def get_operation( + self, + ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: + r"""Return a callable for the get_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_operation" not in self._stubs: + self._stubs["get_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/GetOperation", + request_serializer=operations_pb2.GetOperationRequest.SerializeToString, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["get_operation"] + + @property + def kind(self) -> str: + return "grpc" + + +__all__ = ("LicenseManagementServiceGrpcTransport",) diff --git a/packages/google-cloud-commerce-consumer-procurement/google/cloud/commerce_consumer_procurement_v1/services/license_management_service/transports/grpc_asyncio.py b/packages/google-cloud-commerce-consumer-procurement/google/cloud/commerce_consumer_procurement_v1/services/license_management_service/transports/grpc_asyncio.py new file mode 100644 index 000000000000..77f1f9a02681 --- /dev/null +++ b/packages/google-cloud-commerce-consumer-procurement/google/cloud/commerce_consumer_procurement_v1/services/license_management_service/transports/grpc_asyncio.py @@ -0,0 +1,441 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1, grpc_helpers_async +from google.api_core import retry_async as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.longrunning import operations_pb2 # type: ignore +import grpc # type: ignore +from grpc.experimental import aio # type: ignore + +from google.cloud.commerce_consumer_procurement_v1.types import ( + license_management_service, +) + +from .base import DEFAULT_CLIENT_INFO, LicenseManagementServiceTransport +from .grpc import LicenseManagementServiceGrpcTransport + + +class LicenseManagementServiceGrpcAsyncIOTransport(LicenseManagementServiceTransport): + """gRPC AsyncIO backend transport for LicenseManagementService. + + Service for managing licenses. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _grpc_channel: aio.Channel + _stubs: Dict[str, Callable] = {} + + @classmethod + def create_channel( + cls, + host: str = "cloudcommerceconsumerprocurement.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> aio.Channel: + """Create and return a gRPC AsyncIO channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + aio.Channel: A gRPC AsyncIO channel object. + """ + + return grpc_helpers_async.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs, + ) + + def __init__( + self, + *, + host: str = "cloudcommerceconsumerprocurement.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[Union[aio.Channel, Callable[..., aio.Channel]]] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'cloudcommerceconsumerprocurement.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if a ``channel`` instance is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if a ``channel`` instance is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + channel (Optional[Union[aio.Channel, Callable[..., aio.Channel]]]): + A ``Channel`` instance through which to make calls, or a Callable + that constructs and returns one. If set to None, ``self.create_channel`` + is used to create the channel. If a Callable is given, it will be called + with the same arguments as used in ``self.create_channel``. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if a ``channel`` instance is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if a ``channel`` instance or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if isinstance(channel, aio.Channel): + # Ignore credentials if a channel was passed. + credentials = None + self._ignore_credentials = True + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + # initialize with the provided callable or the default channel + channel_init = channel or type(self).create_channel + self._grpc_channel = channel_init( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @property + def grpc_channel(self) -> aio.Channel: + """Create the channel designed to connect to this service. + + This property caches on the instance; repeated calls return + the same channel. + """ + # Return the channel from cache. + return self._grpc_channel + + @property + def get_license_pool( + self, + ) -> Callable[ + [license_management_service.GetLicensePoolRequest], + Awaitable[license_management_service.LicensePool], + ]: + r"""Return a callable for the get license pool method over gRPC. + + Gets the license pool. + + Returns: + Callable[[~.GetLicensePoolRequest], + Awaitable[~.LicensePool]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_license_pool" not in self._stubs: + self._stubs["get_license_pool"] = self.grpc_channel.unary_unary( + "/google.cloud.commerce.consumer.procurement.v1.LicenseManagementService/GetLicensePool", + request_serializer=license_management_service.GetLicensePoolRequest.serialize, + response_deserializer=license_management_service.LicensePool.deserialize, + ) + return self._stubs["get_license_pool"] + + @property + def update_license_pool( + self, + ) -> Callable[ + [license_management_service.UpdateLicensePoolRequest], + Awaitable[license_management_service.LicensePool], + ]: + r"""Return a callable for the update license pool method over gRPC. + + Updates the license pool if one exists for this + Order. + + Returns: + Callable[[~.UpdateLicensePoolRequest], + Awaitable[~.LicensePool]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_license_pool" not in self._stubs: + self._stubs["update_license_pool"] = self.grpc_channel.unary_unary( + "/google.cloud.commerce.consumer.procurement.v1.LicenseManagementService/UpdateLicensePool", + request_serializer=license_management_service.UpdateLicensePoolRequest.serialize, + response_deserializer=license_management_service.LicensePool.deserialize, + ) + return self._stubs["update_license_pool"] + + @property + def assign( + self, + ) -> Callable[ + [license_management_service.AssignRequest], + Awaitable[license_management_service.AssignResponse], + ]: + r"""Return a callable for the assign method over gRPC. + + Assigns a license to a user. + + Returns: + Callable[[~.AssignRequest], + Awaitable[~.AssignResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "assign" not in self._stubs: + self._stubs["assign"] = self.grpc_channel.unary_unary( + "/google.cloud.commerce.consumer.procurement.v1.LicenseManagementService/Assign", + request_serializer=license_management_service.AssignRequest.serialize, + response_deserializer=license_management_service.AssignResponse.deserialize, + ) + return self._stubs["assign"] + + @property + def unassign( + self, + ) -> Callable[ + [license_management_service.UnassignRequest], + Awaitable[license_management_service.UnassignResponse], + ]: + r"""Return a callable for the unassign method over gRPC. + + Unassigns a license from a user. + + Returns: + Callable[[~.UnassignRequest], + Awaitable[~.UnassignResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "unassign" not in self._stubs: + self._stubs["unassign"] = self.grpc_channel.unary_unary( + "/google.cloud.commerce.consumer.procurement.v1.LicenseManagementService/Unassign", + request_serializer=license_management_service.UnassignRequest.serialize, + response_deserializer=license_management_service.UnassignResponse.deserialize, + ) + return self._stubs["unassign"] + + @property + def enumerate_licensed_users( + self, + ) -> Callable[ + [license_management_service.EnumerateLicensedUsersRequest], + Awaitable[license_management_service.EnumerateLicensedUsersResponse], + ]: + r"""Return a callable for the enumerate licensed users method over gRPC. + + Enumerates all users assigned a license. + + Returns: + Callable[[~.EnumerateLicensedUsersRequest], + Awaitable[~.EnumerateLicensedUsersResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "enumerate_licensed_users" not in self._stubs: + self._stubs["enumerate_licensed_users"] = self.grpc_channel.unary_unary( + "/google.cloud.commerce.consumer.procurement.v1.LicenseManagementService/EnumerateLicensedUsers", + request_serializer=license_management_service.EnumerateLicensedUsersRequest.serialize, + response_deserializer=license_management_service.EnumerateLicensedUsersResponse.deserialize, + ) + return self._stubs["enumerate_licensed_users"] + + def _prep_wrapped_messages(self, client_info): + """Precompute the wrapped methods, overriding the base class method to use async wrappers.""" + self._wrapped_methods = { + self.get_license_pool: gapic_v1.method_async.wrap_method( + self.get_license_pool, + default_timeout=None, + client_info=client_info, + ), + self.update_license_pool: gapic_v1.method_async.wrap_method( + self.update_license_pool, + default_timeout=None, + client_info=client_info, + ), + self.assign: gapic_v1.method_async.wrap_method( + self.assign, + default_timeout=None, + client_info=client_info, + ), + self.unassign: gapic_v1.method_async.wrap_method( + self.unassign, + default_timeout=None, + client_info=client_info, + ), + self.enumerate_licensed_users: gapic_v1.method_async.wrap_method( + self.enumerate_licensed_users, + default_timeout=None, + client_info=client_info, + ), + } + + def close(self): + return self.grpc_channel.close() + + @property + def get_operation( + self, + ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: + r"""Return a callable for the get_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_operation" not in self._stubs: + self._stubs["get_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/GetOperation", + request_serializer=operations_pb2.GetOperationRequest.SerializeToString, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["get_operation"] + + +__all__ = ("LicenseManagementServiceGrpcAsyncIOTransport",) diff --git a/packages/google-cloud-commerce-consumer-procurement/google/cloud/commerce_consumer_procurement_v1/services/license_management_service/transports/rest.py b/packages/google-cloud-commerce-consumer-procurement/google/cloud/commerce_consumer_procurement_v1/services/license_management_service/transports/rest.py new file mode 100644 index 000000000000..e2e5093c1b3a --- /dev/null +++ b/packages/google-cloud-commerce-consumer-procurement/google/cloud/commerce_consumer_procurement_v1/services/license_management_service/transports/rest.py @@ -0,0 +1,958 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import dataclasses +import json # type: ignore +import re +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import gapic_v1, path_template, rest_helpers, rest_streaming +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth.transport.requests import AuthorizedSession # type: ignore +from google.protobuf import json_format +import grpc # type: ignore +from requests import __version__ as requests_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object, None] # type: ignore + + +from google.longrunning import operations_pb2 # type: ignore + +from google.cloud.commerce_consumer_procurement_v1.types import ( + license_management_service, +) + +from .base import DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO +from .base import LicenseManagementServiceTransport + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=requests_version, +) + + +class LicenseManagementServiceRestInterceptor: + """Interceptor for LicenseManagementService. + + Interceptors are used to manipulate requests, request metadata, and responses + in arbitrary ways. + Example use cases include: + * Logging + * Verifying requests according to service or custom semantics + * Stripping extraneous information from responses + + These use cases and more can be enabled by injecting an + instance of a custom subclass when constructing the LicenseManagementServiceRestTransport. + + .. code-block:: python + class MyCustomLicenseManagementServiceInterceptor(LicenseManagementServiceRestInterceptor): + def pre_assign(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_assign(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_enumerate_licensed_users(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_enumerate_licensed_users(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get_license_pool(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_license_pool(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_unassign(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_unassign(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_update_license_pool(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_update_license_pool(self, response): + logging.log(f"Received response: {response}") + return response + + transport = LicenseManagementServiceRestTransport(interceptor=MyCustomLicenseManagementServiceInterceptor()) + client = LicenseManagementServiceClient(transport=transport) + + + """ + + def pre_assign( + self, + request: license_management_service.AssignRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[license_management_service.AssignRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for assign + + Override in a subclass to manipulate the request or metadata + before they are sent to the LicenseManagementService server. + """ + return request, metadata + + def post_assign( + self, response: license_management_service.AssignResponse + ) -> license_management_service.AssignResponse: + """Post-rpc interceptor for assign + + Override in a subclass to manipulate the response + after it is returned by the LicenseManagementService server but before + it is returned to user code. + """ + return response + + def pre_enumerate_licensed_users( + self, + request: license_management_service.EnumerateLicensedUsersRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[ + license_management_service.EnumerateLicensedUsersRequest, + Sequence[Tuple[str, str]], + ]: + """Pre-rpc interceptor for enumerate_licensed_users + + Override in a subclass to manipulate the request or metadata + before they are sent to the LicenseManagementService server. + """ + return request, metadata + + def post_enumerate_licensed_users( + self, response: license_management_service.EnumerateLicensedUsersResponse + ) -> license_management_service.EnumerateLicensedUsersResponse: + """Post-rpc interceptor for enumerate_licensed_users + + Override in a subclass to manipulate the response + after it is returned by the LicenseManagementService server but before + it is returned to user code. + """ + return response + + def pre_get_license_pool( + self, + request: license_management_service.GetLicensePoolRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[ + license_management_service.GetLicensePoolRequest, Sequence[Tuple[str, str]] + ]: + """Pre-rpc interceptor for get_license_pool + + Override in a subclass to manipulate the request or metadata + before they are sent to the LicenseManagementService server. + """ + return request, metadata + + def post_get_license_pool( + self, response: license_management_service.LicensePool + ) -> license_management_service.LicensePool: + """Post-rpc interceptor for get_license_pool + + Override in a subclass to manipulate the response + after it is returned by the LicenseManagementService server but before + it is returned to user code. + """ + return response + + def pre_unassign( + self, + request: license_management_service.UnassignRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[license_management_service.UnassignRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for unassign + + Override in a subclass to manipulate the request or metadata + before they are sent to the LicenseManagementService server. + """ + return request, metadata + + def post_unassign( + self, response: license_management_service.UnassignResponse + ) -> license_management_service.UnassignResponse: + """Post-rpc interceptor for unassign + + Override in a subclass to manipulate the response + after it is returned by the LicenseManagementService server but before + it is returned to user code. + """ + return response + + def pre_update_license_pool( + self, + request: license_management_service.UpdateLicensePoolRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[ + license_management_service.UpdateLicensePoolRequest, Sequence[Tuple[str, str]] + ]: + """Pre-rpc interceptor for update_license_pool + + Override in a subclass to manipulate the request or metadata + before they are sent to the LicenseManagementService server. + """ + return request, metadata + + def post_update_license_pool( + self, response: license_management_service.LicensePool + ) -> license_management_service.LicensePool: + """Post-rpc interceptor for update_license_pool + + Override in a subclass to manipulate the response + after it is returned by the LicenseManagementService server but before + it is returned to user code. + """ + return response + + def pre_get_operation( + self, + request: operations_pb2.GetOperationRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[operations_pb2.GetOperationRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_operation + + Override in a subclass to manipulate the request or metadata + before they are sent to the LicenseManagementService server. + """ + return request, metadata + + def post_get_operation( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for get_operation + + Override in a subclass to manipulate the response + after it is returned by the LicenseManagementService server but before + it is returned to user code. + """ + return response + + +@dataclasses.dataclass +class LicenseManagementServiceRestStub: + _session: AuthorizedSession + _host: str + _interceptor: LicenseManagementServiceRestInterceptor + + +class LicenseManagementServiceRestTransport(LicenseManagementServiceTransport): + """REST backend transport for LicenseManagementService. + + Service for managing licenses. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + + """ + + def __init__( + self, + *, + host: str = "cloudcommerceconsumerprocurement.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = "https", + interceptor: Optional[LicenseManagementServiceRestInterceptor] = None, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'cloudcommerceconsumerprocurement.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client + certificate to configure mutual TLS HTTP channel. It is ignored + if ``channel`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. + # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the + # credentials object + maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) + if maybe_url_match is None: + raise ValueError( + f"Unexpected hostname structure: {host}" + ) # pragma: NO COVER + + url_match_items = maybe_url_match.groupdict() + + host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host + + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + self._session = AuthorizedSession( + self._credentials, default_host=self.DEFAULT_HOST + ) + if client_cert_source_for_mtls: + self._session.configure_mtls_channel(client_cert_source_for_mtls) + self._interceptor = interceptor or LicenseManagementServiceRestInterceptor() + self._prep_wrapped_messages(client_info) + + class _Assign(LicenseManagementServiceRestStub): + def __hash__(self): + return hash("Assign") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: license_management_service.AssignRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> license_management_service.AssignResponse: + r"""Call the assign method over HTTP. + + Args: + request (~.license_management_service.AssignRequest): + The request object. Request message for + [LicenseManagementService.Assign][google.cloud.commerce.consumer.procurement.v1.LicenseManagementService.Assign]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.license_management_service.AssignResponse: + Response message for + [LicenseManagementService.Assign][google.cloud.commerce.consumer.procurement.v1.LicenseManagementService.Assign]. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{parent=billingAccounts/*/orders/*/licensePool}:assign", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_assign(request, metadata) + pb_request = license_management_service.AssignRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = license_management_service.AssignResponse() + pb_resp = license_management_service.AssignResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_assign(resp) + return resp + + class _EnumerateLicensedUsers(LicenseManagementServiceRestStub): + def __hash__(self): + return hash("EnumerateLicensedUsers") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: license_management_service.EnumerateLicensedUsersRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> license_management_service.EnumerateLicensedUsersResponse: + r"""Call the enumerate licensed users method over HTTP. + + Args: + request (~.license_management_service.EnumerateLicensedUsersRequest): + The request object. Request message for + [LicenseManagementService.EnumerateLicensedUsers][google.cloud.commerce.consumer.procurement.v1.LicenseManagementService.EnumerateLicensedUsers]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.license_management_service.EnumerateLicensedUsersResponse: + Response message for + [LicenseManagementService.EnumerateLicensedUsers][google.cloud.commerce.consumer.procurement.v1.LicenseManagementService.EnumerateLicensedUsers]. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{parent=billingAccounts/*/orders/*/licensePool}:enumerateLicensedUsers", + }, + ] + request, metadata = self._interceptor.pre_enumerate_licensed_users( + request, metadata + ) + pb_request = license_management_service.EnumerateLicensedUsersRequest.pb( + request + ) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = license_management_service.EnumerateLicensedUsersResponse() + pb_resp = license_management_service.EnumerateLicensedUsersResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_enumerate_licensed_users(resp) + return resp + + class _GetLicensePool(LicenseManagementServiceRestStub): + def __hash__(self): + return hash("GetLicensePool") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: license_management_service.GetLicensePoolRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> license_management_service.LicensePool: + r"""Call the get license pool method over HTTP. + + Args: + request (~.license_management_service.GetLicensePoolRequest): + The request object. Request message for getting a license + pool. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.license_management_service.LicensePool: + A license pool represents a pool of + licenses that can be assigned to users. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=billingAccounts/*/orders/*/licensePool}", + }, + ] + request, metadata = self._interceptor.pre_get_license_pool( + request, metadata + ) + pb_request = license_management_service.GetLicensePoolRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = license_management_service.LicensePool() + pb_resp = license_management_service.LicensePool.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_license_pool(resp) + return resp + + class _Unassign(LicenseManagementServiceRestStub): + def __hash__(self): + return hash("Unassign") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: license_management_service.UnassignRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> license_management_service.UnassignResponse: + r"""Call the unassign method over HTTP. + + Args: + request (~.license_management_service.UnassignRequest): + The request object. Request message for + [LicenseManagementService.Unassign][google.cloud.commerce.consumer.procurement.v1.LicenseManagementService.Unassign]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.license_management_service.UnassignResponse: + Response message for + [LicenseManagementService.Unassign][google.cloud.commerce.consumer.procurement.v1.LicenseManagementService.Unassign]. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{parent=billingAccounts/*/orders/*/licensePool}:unassign", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_unassign(request, metadata) + pb_request = license_management_service.UnassignRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = license_management_service.UnassignResponse() + pb_resp = license_management_service.UnassignResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_unassign(resp) + return resp + + class _UpdateLicensePool(LicenseManagementServiceRestStub): + def __hash__(self): + return hash("UpdateLicensePool") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "updateMask": {}, + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: license_management_service.UpdateLicensePoolRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> license_management_service.LicensePool: + r"""Call the update license pool method over HTTP. + + Args: + request (~.license_management_service.UpdateLicensePoolRequest): + The request object. Request message for updating a + license pool. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.license_management_service.LicensePool: + A license pool represents a pool of + licenses that can be assigned to users. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "patch", + "uri": "/v1/{license_pool.name=billingAccounts/*/orders/*/licensePool/*}", + "body": "license_pool", + }, + ] + request, metadata = self._interceptor.pre_update_license_pool( + request, metadata + ) + pb_request = license_management_service.UpdateLicensePoolRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = license_management_service.LicensePool() + pb_resp = license_management_service.LicensePool.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_update_license_pool(resp) + return resp + + @property + def assign( + self, + ) -> Callable[ + [license_management_service.AssignRequest], + license_management_service.AssignResponse, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._Assign(self._session, self._host, self._interceptor) # type: ignore + + @property + def enumerate_licensed_users( + self, + ) -> Callable[ + [license_management_service.EnumerateLicensedUsersRequest], + license_management_service.EnumerateLicensedUsersResponse, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._EnumerateLicensedUsers(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_license_pool( + self, + ) -> Callable[ + [license_management_service.GetLicensePoolRequest], + license_management_service.LicensePool, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetLicensePool(self._session, self._host, self._interceptor) # type: ignore + + @property + def unassign( + self, + ) -> Callable[ + [license_management_service.UnassignRequest], + license_management_service.UnassignResponse, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._Unassign(self._session, self._host, self._interceptor) # type: ignore + + @property + def update_license_pool( + self, + ) -> Callable[ + [license_management_service.UpdateLicensePoolRequest], + license_management_service.LicensePool, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._UpdateLicensePool(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_operation(self): + return self._GetOperation(self._session, self._host, self._interceptor) # type: ignore + + class _GetOperation(LicenseManagementServiceRestStub): + def __call__( + self, + request: operations_pb2.GetOperationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the get operation method over HTTP. + + Args: + request (operations_pb2.GetOperationRequest): + The request object for GetOperation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + operations_pb2.Operation: Response from GetOperation method. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=billingAccounts/*/orders/*/operations/*}", + }, + ] + + request, metadata = self._interceptor.pre_get_operation(request, metadata) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + resp = operations_pb2.Operation() + resp = json_format.Parse(response.content.decode("utf-8"), resp) + resp = self._interceptor.post_get_operation(resp) + return resp + + @property + def kind(self) -> str: + return "rest" + + def close(self): + self._session.close() + + +__all__ = ("LicenseManagementServiceRestTransport",) diff --git a/packages/google-cloud-commerce-consumer-procurement/google/cloud/commerce_consumer_procurement_v1/types/__init__.py b/packages/google-cloud-commerce-consumer-procurement/google/cloud/commerce_consumer_procurement_v1/types/__init__.py index cc4943418819..4e35259c31e1 100644 --- a/packages/google-cloud-commerce-consumer-procurement/google/cloud/commerce_consumer_procurement_v1/types/__init__.py +++ b/packages/google-cloud-commerce-consumer-procurement/google/cloud/commerce_consumer_procurement_v1/types/__init__.py @@ -13,6 +13,19 @@ # See the License for the specific language governing permissions and # limitations under the License. # +from .license_management_service import ( + AssignmentProtocol, + AssignRequest, + AssignResponse, + EnumerateLicensedUsersRequest, + EnumerateLicensedUsersResponse, + GetLicensePoolRequest, + LicensedUser, + LicensePool, + UnassignRequest, + UnassignResponse, + UpdateLicensePoolRequest, +) from .order import ( LineItem, LineItemChange, @@ -25,14 +38,30 @@ Subscription, ) from .procurement_service import ( + AutoRenewalBehavior, + CancelOrderMetadata, + CancelOrderRequest, GetOrderRequest, ListOrdersRequest, ListOrdersResponse, + ModifyOrderMetadata, + ModifyOrderRequest, PlaceOrderMetadata, PlaceOrderRequest, ) __all__ = ( + "AssignmentProtocol", + "AssignRequest", + "AssignResponse", + "EnumerateLicensedUsersRequest", + "EnumerateLicensedUsersResponse", + "GetLicensePoolRequest", + "LicensedUser", + "LicensePool", + "UnassignRequest", + "UnassignResponse", + "UpdateLicensePoolRequest", "LineItem", "LineItemChange", "LineItemInfo", @@ -42,9 +71,14 @@ "LineItemChangeState", "LineItemChangeStateReasonType", "LineItemChangeType", + "CancelOrderMetadata", + "CancelOrderRequest", "GetOrderRequest", "ListOrdersRequest", "ListOrdersResponse", + "ModifyOrderMetadata", + "ModifyOrderRequest", "PlaceOrderMetadata", "PlaceOrderRequest", + "AutoRenewalBehavior", ) diff --git a/packages/google-cloud-commerce-consumer-procurement/google/cloud/commerce_consumer_procurement_v1/types/license_management_service.py b/packages/google-cloud-commerce-consumer-procurement/google/cloud/commerce_consumer_procurement_v1/types/license_management_service.py new file mode 100644 index 000000000000..4b13230f4ea8 --- /dev/null +++ b/packages/google-cloud-commerce-consumer-procurement/google/cloud/commerce_consumer_procurement_v1/types/license_management_service.py @@ -0,0 +1,331 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +from google.protobuf import duration_pb2 # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore +import proto # type: ignore + +__protobuf__ = proto.module( + package="google.cloud.commerce.consumer.procurement.v1", + manifest={ + "AssignmentProtocol", + "LicensePool", + "GetLicensePoolRequest", + "UpdateLicensePoolRequest", + "AssignRequest", + "AssignResponse", + "UnassignRequest", + "UnassignResponse", + "EnumerateLicensedUsersRequest", + "LicensedUser", + "EnumerateLicensedUsersResponse", + }, +) + + +class AssignmentProtocol(proto.Message): + r"""Assignment protocol for a license pool. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + manual_assignment_type (google.cloud.commerce_consumer_procurement_v1.types.AssignmentProtocol.ManualAssignmentType): + Allow manual assignments triggered by + administrative operations only. + + This field is a member of `oneof`_ ``assignment_type``. + auto_assignment_type (google.cloud.commerce_consumer_procurement_v1.types.AssignmentProtocol.AutoAssignmentType): + Allow automatic assignments triggered by data + plane operations. + + This field is a member of `oneof`_ ``assignment_type``. + """ + + class ManualAssignmentType(proto.Message): + r"""Allow manual assignments triggered by administrative + operations only. + + """ + + class AutoAssignmentType(proto.Message): + r"""Configuration for automatic assignments handled by data plane + operations. + + Attributes: + inactive_license_ttl (google.protobuf.duration_pb2.Duration): + Optional. The time to live for an inactive + license. After this time has passed, the license + will be automatically unassigned from the user. + Must be at least 7 days, if set. If unset, the + license will never expire. + """ + + inactive_license_ttl: duration_pb2.Duration = proto.Field( + proto.MESSAGE, + number=1, + message=duration_pb2.Duration, + ) + + manual_assignment_type: ManualAssignmentType = proto.Field( + proto.MESSAGE, + number=2, + oneof="assignment_type", + message=ManualAssignmentType, + ) + auto_assignment_type: AutoAssignmentType = proto.Field( + proto.MESSAGE, + number=3, + oneof="assignment_type", + message=AutoAssignmentType, + ) + + +class LicensePool(proto.Message): + r"""A license pool represents a pool of licenses that can be + assigned to users. + + Attributes: + name (str): + Identifier. Format: + ``billingAccounts/{billing_account}/orders/{order}/licensePool`` + license_assignment_protocol (google.cloud.commerce_consumer_procurement_v1.types.AssignmentProtocol): + Required. Assignment protocol for the license + pool. + available_license_count (int): + Output only. Licenses count that are + available to be assigned. + total_license_count (int): + Output only. Total number of licenses in the + pool. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + license_assignment_protocol: "AssignmentProtocol" = proto.Field( + proto.MESSAGE, + number=2, + message="AssignmentProtocol", + ) + available_license_count: int = proto.Field( + proto.INT32, + number=3, + ) + total_license_count: int = proto.Field( + proto.INT32, + number=4, + ) + + +class GetLicensePoolRequest(proto.Message): + r"""Request message for getting a license pool. + + Attributes: + name (str): + Required. The name of the license pool to get. Format: + ``billingAccounts/{billing_account}/orders/{order}/licensePool`` + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class UpdateLicensePoolRequest(proto.Message): + r"""Request message for updating a license pool. + + Attributes: + license_pool (google.cloud.commerce_consumer_procurement_v1.types.LicensePool): + Required. The license pool to update. + + The license pool's name field is used to identify the + license pool to update. Format: + ``billingAccounts/{billing_account}/orders/{order}/licensePool``. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Required. The list of fields to update. + """ + + license_pool: "LicensePool" = proto.Field( + proto.MESSAGE, + number=1, + message="LicensePool", + ) + update_mask: field_mask_pb2.FieldMask = proto.Field( + proto.MESSAGE, + number=2, + message=field_mask_pb2.FieldMask, + ) + + +class AssignRequest(proto.Message): + r"""Request message for + [LicenseManagementService.Assign][google.cloud.commerce.consumer.procurement.v1.LicenseManagementService.Assign]. + + Attributes: + parent (str): + Required. License pool name. + usernames (MutableSequence[str]): + Required. Username. Format: ``name@domain.com``. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + usernames: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=2, + ) + + +class AssignResponse(proto.Message): + r"""Response message for + [LicenseManagementService.Assign][google.cloud.commerce.consumer.procurement.v1.LicenseManagementService.Assign]. + + """ + + +class UnassignRequest(proto.Message): + r"""Request message for + [LicenseManagementService.Unassign][google.cloud.commerce.consumer.procurement.v1.LicenseManagementService.Unassign]. + + Attributes: + parent (str): + Required. License pool name. + usernames (MutableSequence[str]): + Required. Username. Format: ``name@domain.com``. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + usernames: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=2, + ) + + +class UnassignResponse(proto.Message): + r"""Response message for + [LicenseManagementService.Unassign][google.cloud.commerce.consumer.procurement.v1.LicenseManagementService.Unassign]. + + """ + + +class EnumerateLicensedUsersRequest(proto.Message): + r"""Request message for + [LicenseManagementService.EnumerateLicensedUsers][google.cloud.commerce.consumer.procurement.v1.LicenseManagementService.EnumerateLicensedUsers]. + + Attributes: + parent (str): + Required. License pool name. + page_size (int): + Optional. The maximum number of users to + return. The service may return fewer than this + value. + page_token (str): + Optional. A page token, received from a previous + ``EnumerateLicensedUsers`` call. Provide this to retrieve + the subsequent page. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + page_size: int = proto.Field( + proto.INT32, + number=2, + ) + page_token: str = proto.Field( + proto.STRING, + number=3, + ) + + +class LicensedUser(proto.Message): + r"""A licensed user. + + Attributes: + username (str): + Username. Format: ``name@domain.com``. + assign_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. Timestamp when the license was + assigned. + recent_usage_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. Timestamp when the license was + recently used. This may not be the most recent + usage time, and will be updated regularly + (within 24 hours). + """ + + username: str = proto.Field( + proto.STRING, + number=1, + ) + assign_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=2, + message=timestamp_pb2.Timestamp, + ) + recent_usage_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=3, + message=timestamp_pb2.Timestamp, + ) + + +class EnumerateLicensedUsersResponse(proto.Message): + r"""Response message for + [LicenseManagementService.EnumerateLicensedUsers][google.cloud.commerce.consumer.procurement.v1.LicenseManagementService.EnumerateLicensedUsers]. + + Attributes: + licensed_users (MutableSequence[google.cloud.commerce_consumer_procurement_v1.types.LicensedUser]): + The list of licensed users. + next_page_token (str): + A token that can be sent as ``page_token`` to retrieve the + next page. If this field is omitted, there are no subsequent + pages. + """ + + @property + def raw_page(self): + return self + + licensed_users: MutableSequence["LicensedUser"] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="LicensedUser", + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-commerce-consumer-procurement/google/cloud/commerce_consumer_procurement_v1/types/order.py b/packages/google-cloud-commerce-consumer-procurement/google/cloud/commerce_consumer_procurement_v1/types/order.py index 70bce4301daf..f22c139cd202 100644 --- a/packages/google-cloud-commerce-consumer-procurement/google/cloud/commerce_consumer_procurement_v1/types/order.py +++ b/packages/google-cloud-commerce-consumer-procurement/google/cloud/commerce_consumer_procurement_v1/types/order.py @@ -68,7 +68,8 @@ class LineItemChangeState(proto.Enum): Sentinel value. Do not use. LINE_ITEM_CHANGE_STATE_PENDING_APPROVAL (1): Change is in this state when a change is - initiated and waiting for partner approval. + initiated and waiting for partner approval. This + state is only applicable for pending change. LINE_ITEM_CHANGE_STATE_APPROVED (2): Change is in this state after it's approved by the partner or auto-approved but before it @@ -76,21 +77,26 @@ class LineItemChangeState(proto.Enum): cancelled depending on the new line item info property (pending Private Offer change cannot be cancelled and can only be overwritten by another - Private Offer). + Private Offer). This state is only applicable + for pending change. LINE_ITEM_CHANGE_STATE_COMPLETED (3): Change is in this state after it's been - activated. + activated. This state is only applicable for + change in history. LINE_ITEM_CHANGE_STATE_REJECTED (4): Change is in this state if it was rejected by - the partner. + the partner. This state is only applicable for + change in history. LINE_ITEM_CHANGE_STATE_ABANDONED (5): Change is in this state if it was abandoned - by the user. + by the user. This state is only applicable for + change in history. LINE_ITEM_CHANGE_STATE_ACTIVATING (6): Change is in this state if it's currently being provisioned downstream. The change can't be overwritten or cancelled when it's in this - state. + state. This state is only applicable for pending + change. """ LINE_ITEM_CHANGE_STATE_UNSPECIFIED = 0 LINE_ITEM_CHANGE_STATE_PENDING_APPROVAL = 1 diff --git a/packages/google-cloud-commerce-consumer-procurement/google/cloud/commerce_consumer_procurement_v1/types/procurement_service.py b/packages/google-cloud-commerce-consumer-procurement/google/cloud/commerce_consumer_procurement_v1/types/procurement_service.py index 3a9de30378b8..93c2b0b600ab 100644 --- a/packages/google-cloud-commerce-consumer-procurement/google/cloud/commerce_consumer_procurement_v1/types/procurement_service.py +++ b/packages/google-cloud-commerce-consumer-procurement/google/cloud/commerce_consumer_procurement_v1/types/procurement_service.py @@ -24,15 +24,39 @@ __protobuf__ = proto.module( package="google.cloud.commerce.consumer.procurement.v1", manifest={ + "AutoRenewalBehavior", "PlaceOrderRequest", "PlaceOrderMetadata", "GetOrderRequest", "ListOrdersRequest", "ListOrdersResponse", + "ModifyOrderRequest", + "ModifyOrderMetadata", + "CancelOrderRequest", + "CancelOrderMetadata", }, ) +class AutoRenewalBehavior(proto.Enum): + r"""Indicates the auto renewal behavior customer specifies on + subscription. + + Values: + AUTO_RENEWAL_BEHAVIOR_UNSPECIFIED (0): + If unspecified, the auto renewal behavior + will follow the default config. + AUTO_RENEWAL_BEHAVIOR_ENABLE (1): + Auto Renewal will be enabled on subscription. + AUTO_RENEWAL_BEHAVIOR_DISABLE (2): + Auto Renewal will be disabled on + subscription. + """ + AUTO_RENEWAL_BEHAVIOR_UNSPECIFIED = 0 + AUTO_RENEWAL_BEHAVIOR_ENABLE = 1 + AUTO_RENEWAL_BEHAVIOR_DISABLE = 2 + + class PlaceOrderRequest(proto.Message): r"""Request message for [ConsumerProcurementService.PlaceOrder][google.cloud.commerce.consumer.procurement.v1.ConsumerProcurementService.PlaceOrder]. @@ -50,7 +74,7 @@ class PlaceOrderRequest(proto.Message): request_id (str): Optional. A unique identifier for this request. The server will ignore subsequent requests that provide a duplicate - request ID for at least 120 minutes after the first request. + request ID for at least 24 hours after the first request. The request ID must be a valid `UUID `__. @@ -176,4 +200,164 @@ def raw_page(self): ) +class ModifyOrderRequest(proto.Message): + r"""Request message for + [ConsumerProcurementService.ModifyOrder][google.cloud.commerce.consumer.procurement.v1.ConsumerProcurementService.ModifyOrder]. + + Attributes: + name (str): + Required. Name of the order to update. + modifications (MutableSequence[google.cloud.commerce_consumer_procurement_v1.types.ModifyOrderRequest.Modification]): + Optional. Modifications for an existing Order + created by an Offer. Required when Offer based + Order is being modified, except for when going + from an offer to a public plan. + display_name (str): + Optional. Updated display name of the order, + leave as empty if you do not want to update + current display name. + etag (str): + Optional. The weak etag, which can be + optionally populated, of the order that this + modify request is based on. Validation checking + will only happen if the invoker supplies this + field. + """ + + class Modification(proto.Message): + r"""Modifications to make on the order. + + Attributes: + line_item_id (str): + Required. ID of the existing line item to make change to. + Required when change type is + [LineItemChangeType.LINE_ITEM_CHANGE_TYPE_UPDATE] or + [LineItemChangeType.LINE_ITEM_CHANGE_TYPE_CANCEL]. + change_type (google.cloud.commerce_consumer_procurement_v1.types.LineItemChangeType): + Required. Type of change to make. + new_line_item_info (google.cloud.commerce_consumer_procurement_v1.types.LineItemInfo): + Optional. The line item to update to. Required when + change_type is + [LineItemChangeType.LINE_ITEM_CHANGE_TYPE_CREATE] or + [LineItemChangeType.LINE_ITEM_CHANGE_TYPE_UPDATE]. + auto_renewal_behavior (google.cloud.commerce_consumer_procurement_v1.types.AutoRenewalBehavior): + Optional. Auto renewal behavior of the subscription for the + update. Applied when change_type is + [LineItemChangeType.LINE_ITEM_CHANGE_TYPE_UPDATE]. Follows + plan default config when this field is not specified. + """ + + line_item_id: str = proto.Field( + proto.STRING, + number=1, + ) + change_type: order.LineItemChangeType = proto.Field( + proto.ENUM, + number=2, + enum=order.LineItemChangeType, + ) + new_line_item_info: order.LineItemInfo = proto.Field( + proto.MESSAGE, + number=3, + message=order.LineItemInfo, + ) + auto_renewal_behavior: "AutoRenewalBehavior" = proto.Field( + proto.ENUM, + number=4, + enum="AutoRenewalBehavior", + ) + + name: str = proto.Field( + proto.STRING, + number=1, + ) + modifications: MutableSequence[Modification] = proto.RepeatedField( + proto.MESSAGE, + number=6, + message=Modification, + ) + display_name: str = proto.Field( + proto.STRING, + number=5, + ) + etag: str = proto.Field( + proto.STRING, + number=4, + ) + + +class ModifyOrderMetadata(proto.Message): + r"""Message stored in the metadata field of the Operation returned by + [ConsumerProcurementService.ModifyOrder][google.cloud.commerce.consumer.procurement.v1.ConsumerProcurementService.ModifyOrder]. + + """ + + +class CancelOrderRequest(proto.Message): + r"""Request message for + [ConsumerProcurementService.CancelOrder][google.cloud.commerce.consumer.procurement.v1.ConsumerProcurementService.CancelOrder]. + + Attributes: + name (str): + Required. The resource name of the order. + etag (str): + Optional. The weak etag, which can be + optionally populated, of the order that this + cancel request is based on. Validation checking + will only happen if the invoker supplies this + field. + cancellation_policy (google.cloud.commerce_consumer_procurement_v1.types.CancelOrderRequest.CancellationPolicy): + Optional. Cancellation policy of this + request. + """ + + class CancellationPolicy(proto.Enum): + r"""Indicates the cancellation policy the customer uses to cancel + the order. + + Values: + CANCELLATION_POLICY_UNSPECIFIED (0): + If unspecified, cancellation will try to + cancel the order, if order cannot be immediately + cancelled, auto renewal will be turned off. + However, caller should avoid using the value as + it will yield a non-deterministic result. This + is still supported mainly to maintain existing + integrated usages and ensure backwards + compatibility. + CANCELLATION_POLICY_CANCEL_IMMEDIATELY (1): + Request will cancel the whole order + immediately, if order cannot be immediately + cancelled, the request will fail. + CANCELLATION_POLICY_CANCEL_AT_TERM_END (2): + Request will cancel the auto renewal, if + order is not subscription based, the request + will fail. + """ + CANCELLATION_POLICY_UNSPECIFIED = 0 + CANCELLATION_POLICY_CANCEL_IMMEDIATELY = 1 + CANCELLATION_POLICY_CANCEL_AT_TERM_END = 2 + + name: str = proto.Field( + proto.STRING, + number=1, + ) + etag: str = proto.Field( + proto.STRING, + number=2, + ) + cancellation_policy: CancellationPolicy = proto.Field( + proto.ENUM, + number=3, + enum=CancellationPolicy, + ) + + +class CancelOrderMetadata(proto.Message): + r"""Message stored in the metadata field of the Operation returned by + [ConsumerProcurementService.CancelOrder][google.cloud.commerce.consumer.procurement.v1.ConsumerProcurementService.CancelOrder]. + + """ + + __all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-commerce-consumer-procurement/google/cloud/commerce_consumer_procurement_v1alpha1/gapic_version.py b/packages/google-cloud-commerce-consumer-procurement/google/cloud/commerce_consumer_procurement_v1alpha1/gapic_version.py index cf5493b86bbc..ec8d212c9160 100644 --- a/packages/google-cloud-commerce-consumer-procurement/google/cloud/commerce_consumer_procurement_v1alpha1/gapic_version.py +++ b/packages/google-cloud-commerce-consumer-procurement/google/cloud/commerce_consumer_procurement_v1alpha1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.1.7" # {x-release-please-version} +__version__ = "0.1.8" # {x-release-please-version} diff --git a/packages/google-cloud-commerce-consumer-procurement/google/cloud/commerce_consumer_procurement_v1alpha1/services/consumer_procurement_service/async_client.py b/packages/google-cloud-commerce-consumer-procurement/google/cloud/commerce_consumer_procurement_v1alpha1/services/consumer_procurement_service/async_client.py index 31816f320933..2c5fdff4be5d 100644 --- a/packages/google-cloud-commerce-consumer-procurement/google/cloud/commerce_consumer_procurement_v1alpha1/services/consumer_procurement_service/async_client.py +++ b/packages/google-cloud-commerce-consumer-procurement/google/cloud/commerce_consumer_procurement_v1alpha1/services/consumer_procurement_service/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -217,10 +216,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(ConsumerProcurementServiceClient).get_transport_class, - type(ConsumerProcurementServiceClient), - ) + get_transport_class = ConsumerProcurementServiceClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-commerce-consumer-procurement/google/cloud/commerce_consumer_procurement_v1alpha1/services/consumer_procurement_service/client.py b/packages/google-cloud-commerce-consumer-procurement/google/cloud/commerce_consumer_procurement_v1alpha1/services/consumer_procurement_service/client.py index b9e8b61dc93e..2d84edd718bd 100644 --- a/packages/google-cloud-commerce-consumer-procurement/google/cloud/commerce_consumer_procurement_v1alpha1/services/consumer_procurement_service/client.py +++ b/packages/google-cloud-commerce-consumer-procurement/google/cloud/commerce_consumer_procurement_v1alpha1/services/consumer_procurement_service/client.py @@ -703,7 +703,7 @@ def __init__( Type[ConsumerProcurementServiceTransport], Callable[..., ConsumerProcurementServiceTransport], ] = ( - type(self).get_transport_class(transport) + ConsumerProcurementServiceClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., ConsumerProcurementServiceTransport], transport) ) diff --git a/packages/google-cloud-commerce-consumer-procurement/samples/generated_samples/cloudcommerceconsumerprocurement_v1_generated_consumer_procurement_service_cancel_order_async.py b/packages/google-cloud-commerce-consumer-procurement/samples/generated_samples/cloudcommerceconsumerprocurement_v1_generated_consumer_procurement_service_cancel_order_async.py new file mode 100644 index 000000000000..6af6267a3dfe --- /dev/null +++ b/packages/google-cloud-commerce-consumer-procurement/samples/generated_samples/cloudcommerceconsumerprocurement_v1_generated_consumer_procurement_service_cancel_order_async.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CancelOrder +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-commerce-consumer-procurement + + +# [START cloudcommerceconsumerprocurement_v1_generated_ConsumerProcurementService_CancelOrder_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import commerce_consumer_procurement_v1 + + +async def sample_cancel_order(): + # Create a client + client = commerce_consumer_procurement_v1.ConsumerProcurementServiceAsyncClient() + + # Initialize request argument(s) + request = commerce_consumer_procurement_v1.CancelOrderRequest( + name="name_value", + ) + + # Make the request + operation = client.cancel_order(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END cloudcommerceconsumerprocurement_v1_generated_ConsumerProcurementService_CancelOrder_async] diff --git a/packages/google-cloud-commerce-consumer-procurement/samples/generated_samples/cloudcommerceconsumerprocurement_v1_generated_consumer_procurement_service_cancel_order_sync.py b/packages/google-cloud-commerce-consumer-procurement/samples/generated_samples/cloudcommerceconsumerprocurement_v1_generated_consumer_procurement_service_cancel_order_sync.py new file mode 100644 index 000000000000..d107e654d209 --- /dev/null +++ b/packages/google-cloud-commerce-consumer-procurement/samples/generated_samples/cloudcommerceconsumerprocurement_v1_generated_consumer_procurement_service_cancel_order_sync.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CancelOrder +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-commerce-consumer-procurement + + +# [START cloudcommerceconsumerprocurement_v1_generated_ConsumerProcurementService_CancelOrder_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import commerce_consumer_procurement_v1 + + +def sample_cancel_order(): + # Create a client + client = commerce_consumer_procurement_v1.ConsumerProcurementServiceClient() + + # Initialize request argument(s) + request = commerce_consumer_procurement_v1.CancelOrderRequest( + name="name_value", + ) + + # Make the request + operation = client.cancel_order(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END cloudcommerceconsumerprocurement_v1_generated_ConsumerProcurementService_CancelOrder_sync] diff --git a/packages/google-cloud-commerce-consumer-procurement/samples/generated_samples/cloudcommerceconsumerprocurement_v1_generated_consumer_procurement_service_modify_order_async.py b/packages/google-cloud-commerce-consumer-procurement/samples/generated_samples/cloudcommerceconsumerprocurement_v1_generated_consumer_procurement_service_modify_order_async.py new file mode 100644 index 000000000000..d7995349fbc1 --- /dev/null +++ b/packages/google-cloud-commerce-consumer-procurement/samples/generated_samples/cloudcommerceconsumerprocurement_v1_generated_consumer_procurement_service_modify_order_async.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ModifyOrder +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-commerce-consumer-procurement + + +# [START cloudcommerceconsumerprocurement_v1_generated_ConsumerProcurementService_ModifyOrder_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import commerce_consumer_procurement_v1 + + +async def sample_modify_order(): + # Create a client + client = commerce_consumer_procurement_v1.ConsumerProcurementServiceAsyncClient() + + # Initialize request argument(s) + request = commerce_consumer_procurement_v1.ModifyOrderRequest( + name="name_value", + ) + + # Make the request + operation = client.modify_order(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END cloudcommerceconsumerprocurement_v1_generated_ConsumerProcurementService_ModifyOrder_async] diff --git a/packages/google-cloud-commerce-consumer-procurement/samples/generated_samples/cloudcommerceconsumerprocurement_v1_generated_consumer_procurement_service_modify_order_sync.py b/packages/google-cloud-commerce-consumer-procurement/samples/generated_samples/cloudcommerceconsumerprocurement_v1_generated_consumer_procurement_service_modify_order_sync.py new file mode 100644 index 000000000000..7d2280e2367b --- /dev/null +++ b/packages/google-cloud-commerce-consumer-procurement/samples/generated_samples/cloudcommerceconsumerprocurement_v1_generated_consumer_procurement_service_modify_order_sync.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ModifyOrder +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-commerce-consumer-procurement + + +# [START cloudcommerceconsumerprocurement_v1_generated_ConsumerProcurementService_ModifyOrder_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import commerce_consumer_procurement_v1 + + +def sample_modify_order(): + # Create a client + client = commerce_consumer_procurement_v1.ConsumerProcurementServiceClient() + + # Initialize request argument(s) + request = commerce_consumer_procurement_v1.ModifyOrderRequest( + name="name_value", + ) + + # Make the request + operation = client.modify_order(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END cloudcommerceconsumerprocurement_v1_generated_ConsumerProcurementService_ModifyOrder_sync] diff --git a/packages/google-cloud-commerce-consumer-procurement/samples/generated_samples/cloudcommerceconsumerprocurement_v1_generated_license_management_service_assign_async.py b/packages/google-cloud-commerce-consumer-procurement/samples/generated_samples/cloudcommerceconsumerprocurement_v1_generated_license_management_service_assign_async.py new file mode 100644 index 000000000000..2c5e4b37f6e4 --- /dev/null +++ b/packages/google-cloud-commerce-consumer-procurement/samples/generated_samples/cloudcommerceconsumerprocurement_v1_generated_license_management_service_assign_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for Assign +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-commerce-consumer-procurement + + +# [START cloudcommerceconsumerprocurement_v1_generated_LicenseManagementService_Assign_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import commerce_consumer_procurement_v1 + + +async def sample_assign(): + # Create a client + client = commerce_consumer_procurement_v1.LicenseManagementServiceAsyncClient() + + # Initialize request argument(s) + request = commerce_consumer_procurement_v1.AssignRequest( + parent="parent_value", + usernames=['usernames_value1', 'usernames_value2'], + ) + + # Make the request + response = await client.assign(request=request) + + # Handle the response + print(response) + +# [END cloudcommerceconsumerprocurement_v1_generated_LicenseManagementService_Assign_async] diff --git a/packages/google-cloud-commerce-consumer-procurement/samples/generated_samples/cloudcommerceconsumerprocurement_v1_generated_license_management_service_assign_sync.py b/packages/google-cloud-commerce-consumer-procurement/samples/generated_samples/cloudcommerceconsumerprocurement_v1_generated_license_management_service_assign_sync.py new file mode 100644 index 000000000000..ec7ea6ea12bf --- /dev/null +++ b/packages/google-cloud-commerce-consumer-procurement/samples/generated_samples/cloudcommerceconsumerprocurement_v1_generated_license_management_service_assign_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for Assign +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-commerce-consumer-procurement + + +# [START cloudcommerceconsumerprocurement_v1_generated_LicenseManagementService_Assign_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import commerce_consumer_procurement_v1 + + +def sample_assign(): + # Create a client + client = commerce_consumer_procurement_v1.LicenseManagementServiceClient() + + # Initialize request argument(s) + request = commerce_consumer_procurement_v1.AssignRequest( + parent="parent_value", + usernames=['usernames_value1', 'usernames_value2'], + ) + + # Make the request + response = client.assign(request=request) + + # Handle the response + print(response) + +# [END cloudcommerceconsumerprocurement_v1_generated_LicenseManagementService_Assign_sync] diff --git a/packages/google-cloud-commerce-consumer-procurement/samples/generated_samples/cloudcommerceconsumerprocurement_v1_generated_license_management_service_enumerate_licensed_users_async.py b/packages/google-cloud-commerce-consumer-procurement/samples/generated_samples/cloudcommerceconsumerprocurement_v1_generated_license_management_service_enumerate_licensed_users_async.py new file mode 100644 index 000000000000..735a8c855ace --- /dev/null +++ b/packages/google-cloud-commerce-consumer-procurement/samples/generated_samples/cloudcommerceconsumerprocurement_v1_generated_license_management_service_enumerate_licensed_users_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for EnumerateLicensedUsers +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-commerce-consumer-procurement + + +# [START cloudcommerceconsumerprocurement_v1_generated_LicenseManagementService_EnumerateLicensedUsers_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import commerce_consumer_procurement_v1 + + +async def sample_enumerate_licensed_users(): + # Create a client + client = commerce_consumer_procurement_v1.LicenseManagementServiceAsyncClient() + + # Initialize request argument(s) + request = commerce_consumer_procurement_v1.EnumerateLicensedUsersRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.enumerate_licensed_users(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END cloudcommerceconsumerprocurement_v1_generated_LicenseManagementService_EnumerateLicensedUsers_async] diff --git a/packages/google-cloud-commerce-consumer-procurement/samples/generated_samples/cloudcommerceconsumerprocurement_v1_generated_license_management_service_enumerate_licensed_users_sync.py b/packages/google-cloud-commerce-consumer-procurement/samples/generated_samples/cloudcommerceconsumerprocurement_v1_generated_license_management_service_enumerate_licensed_users_sync.py new file mode 100644 index 000000000000..ae35b74ee487 --- /dev/null +++ b/packages/google-cloud-commerce-consumer-procurement/samples/generated_samples/cloudcommerceconsumerprocurement_v1_generated_license_management_service_enumerate_licensed_users_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for EnumerateLicensedUsers +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-commerce-consumer-procurement + + +# [START cloudcommerceconsumerprocurement_v1_generated_LicenseManagementService_EnumerateLicensedUsers_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import commerce_consumer_procurement_v1 + + +def sample_enumerate_licensed_users(): + # Create a client + client = commerce_consumer_procurement_v1.LicenseManagementServiceClient() + + # Initialize request argument(s) + request = commerce_consumer_procurement_v1.EnumerateLicensedUsersRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.enumerate_licensed_users(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END cloudcommerceconsumerprocurement_v1_generated_LicenseManagementService_EnumerateLicensedUsers_sync] diff --git a/packages/google-cloud-commerce-consumer-procurement/samples/generated_samples/cloudcommerceconsumerprocurement_v1_generated_license_management_service_get_license_pool_async.py b/packages/google-cloud-commerce-consumer-procurement/samples/generated_samples/cloudcommerceconsumerprocurement_v1_generated_license_management_service_get_license_pool_async.py new file mode 100644 index 000000000000..ca51289633c0 --- /dev/null +++ b/packages/google-cloud-commerce-consumer-procurement/samples/generated_samples/cloudcommerceconsumerprocurement_v1_generated_license_management_service_get_license_pool_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetLicensePool +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-commerce-consumer-procurement + + +# [START cloudcommerceconsumerprocurement_v1_generated_LicenseManagementService_GetLicensePool_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import commerce_consumer_procurement_v1 + + +async def sample_get_license_pool(): + # Create a client + client = commerce_consumer_procurement_v1.LicenseManagementServiceAsyncClient() + + # Initialize request argument(s) + request = commerce_consumer_procurement_v1.GetLicensePoolRequest( + name="name_value", + ) + + # Make the request + response = await client.get_license_pool(request=request) + + # Handle the response + print(response) + +# [END cloudcommerceconsumerprocurement_v1_generated_LicenseManagementService_GetLicensePool_async] diff --git a/packages/google-cloud-commerce-consumer-procurement/samples/generated_samples/cloudcommerceconsumerprocurement_v1_generated_license_management_service_get_license_pool_sync.py b/packages/google-cloud-commerce-consumer-procurement/samples/generated_samples/cloudcommerceconsumerprocurement_v1_generated_license_management_service_get_license_pool_sync.py new file mode 100644 index 000000000000..235e9b739fa0 --- /dev/null +++ b/packages/google-cloud-commerce-consumer-procurement/samples/generated_samples/cloudcommerceconsumerprocurement_v1_generated_license_management_service_get_license_pool_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetLicensePool +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-commerce-consumer-procurement + + +# [START cloudcommerceconsumerprocurement_v1_generated_LicenseManagementService_GetLicensePool_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import commerce_consumer_procurement_v1 + + +def sample_get_license_pool(): + # Create a client + client = commerce_consumer_procurement_v1.LicenseManagementServiceClient() + + # Initialize request argument(s) + request = commerce_consumer_procurement_v1.GetLicensePoolRequest( + name="name_value", + ) + + # Make the request + response = client.get_license_pool(request=request) + + # Handle the response + print(response) + +# [END cloudcommerceconsumerprocurement_v1_generated_LicenseManagementService_GetLicensePool_sync] diff --git a/packages/google-cloud-commerce-consumer-procurement/samples/generated_samples/cloudcommerceconsumerprocurement_v1_generated_license_management_service_unassign_async.py b/packages/google-cloud-commerce-consumer-procurement/samples/generated_samples/cloudcommerceconsumerprocurement_v1_generated_license_management_service_unassign_async.py new file mode 100644 index 000000000000..453d0136af27 --- /dev/null +++ b/packages/google-cloud-commerce-consumer-procurement/samples/generated_samples/cloudcommerceconsumerprocurement_v1_generated_license_management_service_unassign_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for Unassign +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-commerce-consumer-procurement + + +# [START cloudcommerceconsumerprocurement_v1_generated_LicenseManagementService_Unassign_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import commerce_consumer_procurement_v1 + + +async def sample_unassign(): + # Create a client + client = commerce_consumer_procurement_v1.LicenseManagementServiceAsyncClient() + + # Initialize request argument(s) + request = commerce_consumer_procurement_v1.UnassignRequest( + parent="parent_value", + usernames=['usernames_value1', 'usernames_value2'], + ) + + # Make the request + response = await client.unassign(request=request) + + # Handle the response + print(response) + +# [END cloudcommerceconsumerprocurement_v1_generated_LicenseManagementService_Unassign_async] diff --git a/packages/google-cloud-commerce-consumer-procurement/samples/generated_samples/cloudcommerceconsumerprocurement_v1_generated_license_management_service_unassign_sync.py b/packages/google-cloud-commerce-consumer-procurement/samples/generated_samples/cloudcommerceconsumerprocurement_v1_generated_license_management_service_unassign_sync.py new file mode 100644 index 000000000000..22d3cfb4d955 --- /dev/null +++ b/packages/google-cloud-commerce-consumer-procurement/samples/generated_samples/cloudcommerceconsumerprocurement_v1_generated_license_management_service_unassign_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for Unassign +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-commerce-consumer-procurement + + +# [START cloudcommerceconsumerprocurement_v1_generated_LicenseManagementService_Unassign_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import commerce_consumer_procurement_v1 + + +def sample_unassign(): + # Create a client + client = commerce_consumer_procurement_v1.LicenseManagementServiceClient() + + # Initialize request argument(s) + request = commerce_consumer_procurement_v1.UnassignRequest( + parent="parent_value", + usernames=['usernames_value1', 'usernames_value2'], + ) + + # Make the request + response = client.unassign(request=request) + + # Handle the response + print(response) + +# [END cloudcommerceconsumerprocurement_v1_generated_LicenseManagementService_Unassign_sync] diff --git a/packages/google-cloud-commerce-consumer-procurement/samples/generated_samples/cloudcommerceconsumerprocurement_v1_generated_license_management_service_update_license_pool_async.py b/packages/google-cloud-commerce-consumer-procurement/samples/generated_samples/cloudcommerceconsumerprocurement_v1_generated_license_management_service_update_license_pool_async.py new file mode 100644 index 000000000000..9f6375daf58e --- /dev/null +++ b/packages/google-cloud-commerce-consumer-procurement/samples/generated_samples/cloudcommerceconsumerprocurement_v1_generated_license_management_service_update_license_pool_async.py @@ -0,0 +1,51 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateLicensePool +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-commerce-consumer-procurement + + +# [START cloudcommerceconsumerprocurement_v1_generated_LicenseManagementService_UpdateLicensePool_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import commerce_consumer_procurement_v1 + + +async def sample_update_license_pool(): + # Create a client + client = commerce_consumer_procurement_v1.LicenseManagementServiceAsyncClient() + + # Initialize request argument(s) + request = commerce_consumer_procurement_v1.UpdateLicensePoolRequest( + ) + + # Make the request + response = await client.update_license_pool(request=request) + + # Handle the response + print(response) + +# [END cloudcommerceconsumerprocurement_v1_generated_LicenseManagementService_UpdateLicensePool_async] diff --git a/packages/google-cloud-commerce-consumer-procurement/samples/generated_samples/cloudcommerceconsumerprocurement_v1_generated_license_management_service_update_license_pool_sync.py b/packages/google-cloud-commerce-consumer-procurement/samples/generated_samples/cloudcommerceconsumerprocurement_v1_generated_license_management_service_update_license_pool_sync.py new file mode 100644 index 000000000000..8eff8c5b8d83 --- /dev/null +++ b/packages/google-cloud-commerce-consumer-procurement/samples/generated_samples/cloudcommerceconsumerprocurement_v1_generated_license_management_service_update_license_pool_sync.py @@ -0,0 +1,51 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateLicensePool +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-commerce-consumer-procurement + + +# [START cloudcommerceconsumerprocurement_v1_generated_LicenseManagementService_UpdateLicensePool_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import commerce_consumer_procurement_v1 + + +def sample_update_license_pool(): + # Create a client + client = commerce_consumer_procurement_v1.LicenseManagementServiceClient() + + # Initialize request argument(s) + request = commerce_consumer_procurement_v1.UpdateLicensePoolRequest( + ) + + # Make the request + response = client.update_license_pool(request=request) + + # Handle the response + print(response) + +# [END cloudcommerceconsumerprocurement_v1_generated_LicenseManagementService_UpdateLicensePool_sync] diff --git a/packages/google-cloud-commerce-consumer-procurement/samples/generated_samples/snippet_metadata_google.cloud.commerce.consumer.procurement.v1.json b/packages/google-cloud-commerce-consumer-procurement/samples/generated_samples/snippet_metadata_google.cloud.commerce.consumer.procurement.v1.json index a8da078691cb..acfa8fd2c13d 100644 --- a/packages/google-cloud-commerce-consumer-procurement/samples/generated_samples/snippet_metadata_google.cloud.commerce.consumer.procurement.v1.json +++ b/packages/google-cloud-commerce-consumer-procurement/samples/generated_samples/snippet_metadata_google.cloud.commerce.consumer.procurement.v1.json @@ -8,9 +8,162 @@ ], "language": "PYTHON", "name": "google-cloud-commerce-consumer-procurement", - "version": "0.1.7" + "version": "0.1.8" }, "snippets": [ + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.commerce_consumer_procurement_v1.ConsumerProcurementServiceAsyncClient", + "shortName": "ConsumerProcurementServiceAsyncClient" + }, + "fullName": "google.cloud.commerce_consumer_procurement_v1.ConsumerProcurementServiceAsyncClient.cancel_order", + "method": { + "fullName": "google.cloud.commerce.consumer.procurement.v1.ConsumerProcurementService.CancelOrder", + "service": { + "fullName": "google.cloud.commerce.consumer.procurement.v1.ConsumerProcurementService", + "shortName": "ConsumerProcurementService" + }, + "shortName": "CancelOrder" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.commerce_consumer_procurement_v1.types.CancelOrderRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "cancel_order" + }, + "description": "Sample for CancelOrder", + "file": "cloudcommerceconsumerprocurement_v1_generated_consumer_procurement_service_cancel_order_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "cloudcommerceconsumerprocurement_v1_generated_ConsumerProcurementService_CancelOrder_async", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "cloudcommerceconsumerprocurement_v1_generated_consumer_procurement_service_cancel_order_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.commerce_consumer_procurement_v1.ConsumerProcurementServiceClient", + "shortName": "ConsumerProcurementServiceClient" + }, + "fullName": "google.cloud.commerce_consumer_procurement_v1.ConsumerProcurementServiceClient.cancel_order", + "method": { + "fullName": "google.cloud.commerce.consumer.procurement.v1.ConsumerProcurementService.CancelOrder", + "service": { + "fullName": "google.cloud.commerce.consumer.procurement.v1.ConsumerProcurementService", + "shortName": "ConsumerProcurementService" + }, + "shortName": "CancelOrder" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.commerce_consumer_procurement_v1.types.CancelOrderRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "cancel_order" + }, + "description": "Sample for CancelOrder", + "file": "cloudcommerceconsumerprocurement_v1_generated_consumer_procurement_service_cancel_order_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "cloudcommerceconsumerprocurement_v1_generated_ConsumerProcurementService_CancelOrder_sync", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "cloudcommerceconsumerprocurement_v1_generated_consumer_procurement_service_cancel_order_sync.py" + }, { "canonical": true, "clientMethod": { @@ -341,19 +494,19 @@ "fullName": "google.cloud.commerce_consumer_procurement_v1.ConsumerProcurementServiceAsyncClient", "shortName": "ConsumerProcurementServiceAsyncClient" }, - "fullName": "google.cloud.commerce_consumer_procurement_v1.ConsumerProcurementServiceAsyncClient.place_order", + "fullName": "google.cloud.commerce_consumer_procurement_v1.ConsumerProcurementServiceAsyncClient.modify_order", "method": { - "fullName": "google.cloud.commerce.consumer.procurement.v1.ConsumerProcurementService.PlaceOrder", + "fullName": "google.cloud.commerce.consumer.procurement.v1.ConsumerProcurementService.ModifyOrder", "service": { "fullName": "google.cloud.commerce.consumer.procurement.v1.ConsumerProcurementService", "shortName": "ConsumerProcurementService" }, - "shortName": "PlaceOrder" + "shortName": "ModifyOrder" }, "parameters": [ { "name": "request", - "type": "google.cloud.commerce_consumer_procurement_v1.types.PlaceOrderRequest" + "type": "google.cloud.commerce_consumer_procurement_v1.types.ModifyOrderRequest" }, { "name": "retry", @@ -369,21 +522,21 @@ } ], "resultType": "google.api_core.operation_async.AsyncOperation", - "shortName": "place_order" + "shortName": "modify_order" }, - "description": "Sample for PlaceOrder", - "file": "cloudcommerceconsumerprocurement_v1_generated_consumer_procurement_service_place_order_async.py", + "description": "Sample for ModifyOrder", + "file": "cloudcommerceconsumerprocurement_v1_generated_consumer_procurement_service_modify_order_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "cloudcommerceconsumerprocurement_v1_generated_ConsumerProcurementService_PlaceOrder_async", + "regionTag": "cloudcommerceconsumerprocurement_v1_generated_ConsumerProcurementService_ModifyOrder_async", "segments": [ { - "end": 56, + "end": 55, "start": 27, "type": "FULL" }, { - "end": 56, + "end": 55, "start": 27, "type": "SHORT" }, @@ -393,22 +546,22 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 46, + "end": 45, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 53, - "start": 47, + "end": 52, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 57, - "start": 54, + "end": 56, + "start": 53, "type": "RESPONSE_HANDLING" } ], - "title": "cloudcommerceconsumerprocurement_v1_generated_consumer_procurement_service_place_order_async.py" + "title": "cloudcommerceconsumerprocurement_v1_generated_consumer_procurement_service_modify_order_async.py" }, { "canonical": true, @@ -417,7 +570,84 @@ "fullName": "google.cloud.commerce_consumer_procurement_v1.ConsumerProcurementServiceClient", "shortName": "ConsumerProcurementServiceClient" }, - "fullName": "google.cloud.commerce_consumer_procurement_v1.ConsumerProcurementServiceClient.place_order", + "fullName": "google.cloud.commerce_consumer_procurement_v1.ConsumerProcurementServiceClient.modify_order", + "method": { + "fullName": "google.cloud.commerce.consumer.procurement.v1.ConsumerProcurementService.ModifyOrder", + "service": { + "fullName": "google.cloud.commerce.consumer.procurement.v1.ConsumerProcurementService", + "shortName": "ConsumerProcurementService" + }, + "shortName": "ModifyOrder" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.commerce_consumer_procurement_v1.types.ModifyOrderRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "modify_order" + }, + "description": "Sample for ModifyOrder", + "file": "cloudcommerceconsumerprocurement_v1_generated_consumer_procurement_service_modify_order_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "cloudcommerceconsumerprocurement_v1_generated_ConsumerProcurementService_ModifyOrder_sync", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "cloudcommerceconsumerprocurement_v1_generated_consumer_procurement_service_modify_order_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.commerce_consumer_procurement_v1.ConsumerProcurementServiceAsyncClient", + "shortName": "ConsumerProcurementServiceAsyncClient" + }, + "fullName": "google.cloud.commerce_consumer_procurement_v1.ConsumerProcurementServiceAsyncClient.place_order", "method": { "fullName": "google.cloud.commerce.consumer.procurement.v1.ConsumerProcurementService.PlaceOrder", "service": { @@ -444,14 +674,14 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.api_core.operation.Operation", + "resultType": "google.api_core.operation_async.AsyncOperation", "shortName": "place_order" }, "description": "Sample for PlaceOrder", - "file": "cloudcommerceconsumerprocurement_v1_generated_consumer_procurement_service_place_order_sync.py", + "file": "cloudcommerceconsumerprocurement_v1_generated_consumer_procurement_service_place_order_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "cloudcommerceconsumerprocurement_v1_generated_ConsumerProcurementService_PlaceOrder_sync", + "regionTag": "cloudcommerceconsumerprocurement_v1_generated_ConsumerProcurementService_PlaceOrder_async", "segments": [ { "end": 56, @@ -484,7 +714,912 @@ "type": "RESPONSE_HANDLING" } ], - "title": "cloudcommerceconsumerprocurement_v1_generated_consumer_procurement_service_place_order_sync.py" + "title": "cloudcommerceconsumerprocurement_v1_generated_consumer_procurement_service_place_order_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.commerce_consumer_procurement_v1.ConsumerProcurementServiceClient", + "shortName": "ConsumerProcurementServiceClient" + }, + "fullName": "google.cloud.commerce_consumer_procurement_v1.ConsumerProcurementServiceClient.place_order", + "method": { + "fullName": "google.cloud.commerce.consumer.procurement.v1.ConsumerProcurementService.PlaceOrder", + "service": { + "fullName": "google.cloud.commerce.consumer.procurement.v1.ConsumerProcurementService", + "shortName": "ConsumerProcurementService" + }, + "shortName": "PlaceOrder" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.commerce_consumer_procurement_v1.types.PlaceOrderRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "place_order" + }, + "description": "Sample for PlaceOrder", + "file": "cloudcommerceconsumerprocurement_v1_generated_consumer_procurement_service_place_order_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "cloudcommerceconsumerprocurement_v1_generated_ConsumerProcurementService_PlaceOrder_sync", + "segments": [ + { + "end": 56, + "start": 27, + "type": "FULL" + }, + { + "end": 56, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 53, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 57, + "start": 54, + "type": "RESPONSE_HANDLING" + } + ], + "title": "cloudcommerceconsumerprocurement_v1_generated_consumer_procurement_service_place_order_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.commerce_consumer_procurement_v1.LicenseManagementServiceAsyncClient", + "shortName": "LicenseManagementServiceAsyncClient" + }, + "fullName": "google.cloud.commerce_consumer_procurement_v1.LicenseManagementServiceAsyncClient.assign", + "method": { + "fullName": "google.cloud.commerce.consumer.procurement.v1.LicenseManagementService.Assign", + "service": { + "fullName": "google.cloud.commerce.consumer.procurement.v1.LicenseManagementService", + "shortName": "LicenseManagementService" + }, + "shortName": "Assign" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.commerce_consumer_procurement_v1.types.AssignRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "usernames", + "type": "MutableSequence[str]" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.commerce_consumer_procurement_v1.types.AssignResponse", + "shortName": "assign" + }, + "description": "Sample for Assign", + "file": "cloudcommerceconsumerprocurement_v1_generated_license_management_service_assign_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "cloudcommerceconsumerprocurement_v1_generated_LicenseManagementService_Assign_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "cloudcommerceconsumerprocurement_v1_generated_license_management_service_assign_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.commerce_consumer_procurement_v1.LicenseManagementServiceClient", + "shortName": "LicenseManagementServiceClient" + }, + "fullName": "google.cloud.commerce_consumer_procurement_v1.LicenseManagementServiceClient.assign", + "method": { + "fullName": "google.cloud.commerce.consumer.procurement.v1.LicenseManagementService.Assign", + "service": { + "fullName": "google.cloud.commerce.consumer.procurement.v1.LicenseManagementService", + "shortName": "LicenseManagementService" + }, + "shortName": "Assign" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.commerce_consumer_procurement_v1.types.AssignRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "usernames", + "type": "MutableSequence[str]" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.commerce_consumer_procurement_v1.types.AssignResponse", + "shortName": "assign" + }, + "description": "Sample for Assign", + "file": "cloudcommerceconsumerprocurement_v1_generated_license_management_service_assign_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "cloudcommerceconsumerprocurement_v1_generated_LicenseManagementService_Assign_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "cloudcommerceconsumerprocurement_v1_generated_license_management_service_assign_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.commerce_consumer_procurement_v1.LicenseManagementServiceAsyncClient", + "shortName": "LicenseManagementServiceAsyncClient" + }, + "fullName": "google.cloud.commerce_consumer_procurement_v1.LicenseManagementServiceAsyncClient.enumerate_licensed_users", + "method": { + "fullName": "google.cloud.commerce.consumer.procurement.v1.LicenseManagementService.EnumerateLicensedUsers", + "service": { + "fullName": "google.cloud.commerce.consumer.procurement.v1.LicenseManagementService", + "shortName": "LicenseManagementService" + }, + "shortName": "EnumerateLicensedUsers" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.commerce_consumer_procurement_v1.types.EnumerateLicensedUsersRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.commerce_consumer_procurement_v1.services.license_management_service.pagers.EnumerateLicensedUsersAsyncPager", + "shortName": "enumerate_licensed_users" + }, + "description": "Sample for EnumerateLicensedUsers", + "file": "cloudcommerceconsumerprocurement_v1_generated_license_management_service_enumerate_licensed_users_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "cloudcommerceconsumerprocurement_v1_generated_LicenseManagementService_EnumerateLicensedUsers_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "cloudcommerceconsumerprocurement_v1_generated_license_management_service_enumerate_licensed_users_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.commerce_consumer_procurement_v1.LicenseManagementServiceClient", + "shortName": "LicenseManagementServiceClient" + }, + "fullName": "google.cloud.commerce_consumer_procurement_v1.LicenseManagementServiceClient.enumerate_licensed_users", + "method": { + "fullName": "google.cloud.commerce.consumer.procurement.v1.LicenseManagementService.EnumerateLicensedUsers", + "service": { + "fullName": "google.cloud.commerce.consumer.procurement.v1.LicenseManagementService", + "shortName": "LicenseManagementService" + }, + "shortName": "EnumerateLicensedUsers" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.commerce_consumer_procurement_v1.types.EnumerateLicensedUsersRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.commerce_consumer_procurement_v1.services.license_management_service.pagers.EnumerateLicensedUsersPager", + "shortName": "enumerate_licensed_users" + }, + "description": "Sample for EnumerateLicensedUsers", + "file": "cloudcommerceconsumerprocurement_v1_generated_license_management_service_enumerate_licensed_users_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "cloudcommerceconsumerprocurement_v1_generated_LicenseManagementService_EnumerateLicensedUsers_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "cloudcommerceconsumerprocurement_v1_generated_license_management_service_enumerate_licensed_users_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.commerce_consumer_procurement_v1.LicenseManagementServiceAsyncClient", + "shortName": "LicenseManagementServiceAsyncClient" + }, + "fullName": "google.cloud.commerce_consumer_procurement_v1.LicenseManagementServiceAsyncClient.get_license_pool", + "method": { + "fullName": "google.cloud.commerce.consumer.procurement.v1.LicenseManagementService.GetLicensePool", + "service": { + "fullName": "google.cloud.commerce.consumer.procurement.v1.LicenseManagementService", + "shortName": "LicenseManagementService" + }, + "shortName": "GetLicensePool" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.commerce_consumer_procurement_v1.types.GetLicensePoolRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.commerce_consumer_procurement_v1.types.LicensePool", + "shortName": "get_license_pool" + }, + "description": "Sample for GetLicensePool", + "file": "cloudcommerceconsumerprocurement_v1_generated_license_management_service_get_license_pool_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "cloudcommerceconsumerprocurement_v1_generated_LicenseManagementService_GetLicensePool_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "cloudcommerceconsumerprocurement_v1_generated_license_management_service_get_license_pool_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.commerce_consumer_procurement_v1.LicenseManagementServiceClient", + "shortName": "LicenseManagementServiceClient" + }, + "fullName": "google.cloud.commerce_consumer_procurement_v1.LicenseManagementServiceClient.get_license_pool", + "method": { + "fullName": "google.cloud.commerce.consumer.procurement.v1.LicenseManagementService.GetLicensePool", + "service": { + "fullName": "google.cloud.commerce.consumer.procurement.v1.LicenseManagementService", + "shortName": "LicenseManagementService" + }, + "shortName": "GetLicensePool" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.commerce_consumer_procurement_v1.types.GetLicensePoolRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.commerce_consumer_procurement_v1.types.LicensePool", + "shortName": "get_license_pool" + }, + "description": "Sample for GetLicensePool", + "file": "cloudcommerceconsumerprocurement_v1_generated_license_management_service_get_license_pool_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "cloudcommerceconsumerprocurement_v1_generated_LicenseManagementService_GetLicensePool_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "cloudcommerceconsumerprocurement_v1_generated_license_management_service_get_license_pool_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.commerce_consumer_procurement_v1.LicenseManagementServiceAsyncClient", + "shortName": "LicenseManagementServiceAsyncClient" + }, + "fullName": "google.cloud.commerce_consumer_procurement_v1.LicenseManagementServiceAsyncClient.unassign", + "method": { + "fullName": "google.cloud.commerce.consumer.procurement.v1.LicenseManagementService.Unassign", + "service": { + "fullName": "google.cloud.commerce.consumer.procurement.v1.LicenseManagementService", + "shortName": "LicenseManagementService" + }, + "shortName": "Unassign" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.commerce_consumer_procurement_v1.types.UnassignRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "usernames", + "type": "MutableSequence[str]" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.commerce_consumer_procurement_v1.types.UnassignResponse", + "shortName": "unassign" + }, + "description": "Sample for Unassign", + "file": "cloudcommerceconsumerprocurement_v1_generated_license_management_service_unassign_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "cloudcommerceconsumerprocurement_v1_generated_LicenseManagementService_Unassign_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "cloudcommerceconsumerprocurement_v1_generated_license_management_service_unassign_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.commerce_consumer_procurement_v1.LicenseManagementServiceClient", + "shortName": "LicenseManagementServiceClient" + }, + "fullName": "google.cloud.commerce_consumer_procurement_v1.LicenseManagementServiceClient.unassign", + "method": { + "fullName": "google.cloud.commerce.consumer.procurement.v1.LicenseManagementService.Unassign", + "service": { + "fullName": "google.cloud.commerce.consumer.procurement.v1.LicenseManagementService", + "shortName": "LicenseManagementService" + }, + "shortName": "Unassign" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.commerce_consumer_procurement_v1.types.UnassignRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "usernames", + "type": "MutableSequence[str]" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.commerce_consumer_procurement_v1.types.UnassignResponse", + "shortName": "unassign" + }, + "description": "Sample for Unassign", + "file": "cloudcommerceconsumerprocurement_v1_generated_license_management_service_unassign_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "cloudcommerceconsumerprocurement_v1_generated_LicenseManagementService_Unassign_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "cloudcommerceconsumerprocurement_v1_generated_license_management_service_unassign_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.commerce_consumer_procurement_v1.LicenseManagementServiceAsyncClient", + "shortName": "LicenseManagementServiceAsyncClient" + }, + "fullName": "google.cloud.commerce_consumer_procurement_v1.LicenseManagementServiceAsyncClient.update_license_pool", + "method": { + "fullName": "google.cloud.commerce.consumer.procurement.v1.LicenseManagementService.UpdateLicensePool", + "service": { + "fullName": "google.cloud.commerce.consumer.procurement.v1.LicenseManagementService", + "shortName": "LicenseManagementService" + }, + "shortName": "UpdateLicensePool" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.commerce_consumer_procurement_v1.types.UpdateLicensePoolRequest" + }, + { + "name": "license_pool", + "type": "google.cloud.commerce_consumer_procurement_v1.types.LicensePool" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.commerce_consumer_procurement_v1.types.LicensePool", + "shortName": "update_license_pool" + }, + "description": "Sample for UpdateLicensePool", + "file": "cloudcommerceconsumerprocurement_v1_generated_license_management_service_update_license_pool_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "cloudcommerceconsumerprocurement_v1_generated_LicenseManagementService_UpdateLicensePool_async", + "segments": [ + { + "end": 50, + "start": 27, + "type": "FULL" + }, + { + "end": 50, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 47, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 51, + "start": 48, + "type": "RESPONSE_HANDLING" + } + ], + "title": "cloudcommerceconsumerprocurement_v1_generated_license_management_service_update_license_pool_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.commerce_consumer_procurement_v1.LicenseManagementServiceClient", + "shortName": "LicenseManagementServiceClient" + }, + "fullName": "google.cloud.commerce_consumer_procurement_v1.LicenseManagementServiceClient.update_license_pool", + "method": { + "fullName": "google.cloud.commerce.consumer.procurement.v1.LicenseManagementService.UpdateLicensePool", + "service": { + "fullName": "google.cloud.commerce.consumer.procurement.v1.LicenseManagementService", + "shortName": "LicenseManagementService" + }, + "shortName": "UpdateLicensePool" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.commerce_consumer_procurement_v1.types.UpdateLicensePoolRequest" + }, + { + "name": "license_pool", + "type": "google.cloud.commerce_consumer_procurement_v1.types.LicensePool" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.commerce_consumer_procurement_v1.types.LicensePool", + "shortName": "update_license_pool" + }, + "description": "Sample for UpdateLicensePool", + "file": "cloudcommerceconsumerprocurement_v1_generated_license_management_service_update_license_pool_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "cloudcommerceconsumerprocurement_v1_generated_LicenseManagementService_UpdateLicensePool_sync", + "segments": [ + { + "end": 50, + "start": 27, + "type": "FULL" + }, + { + "end": 50, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 47, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 51, + "start": 48, + "type": "RESPONSE_HANDLING" + } + ], + "title": "cloudcommerceconsumerprocurement_v1_generated_license_management_service_update_license_pool_sync.py" } ] } diff --git a/packages/google-cloud-commerce-consumer-procurement/samples/generated_samples/snippet_metadata_google.cloud.commerce.consumer.procurement.v1alpha1.json b/packages/google-cloud-commerce-consumer-procurement/samples/generated_samples/snippet_metadata_google.cloud.commerce.consumer.procurement.v1alpha1.json index 6fe6a56b1e43..2d1466a85369 100644 --- a/packages/google-cloud-commerce-consumer-procurement/samples/generated_samples/snippet_metadata_google.cloud.commerce.consumer.procurement.v1alpha1.json +++ b/packages/google-cloud-commerce-consumer-procurement/samples/generated_samples/snippet_metadata_google.cloud.commerce.consumer.procurement.v1alpha1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-commerce-consumer-procurement", - "version": "0.1.7" + "version": "0.1.8" }, "snippets": [ { diff --git a/packages/google-cloud-commerce-consumer-procurement/scripts/fixup_commerce_consumer_procurement_v1_keywords.py b/packages/google-cloud-commerce-consumer-procurement/scripts/fixup_commerce_consumer_procurement_v1_keywords.py index 940f6a1151c1..dea0ce76511c 100644 --- a/packages/google-cloud-commerce-consumer-procurement/scripts/fixup_commerce_consumer_procurement_v1_keywords.py +++ b/packages/google-cloud-commerce-consumer-procurement/scripts/fixup_commerce_consumer_procurement_v1_keywords.py @@ -39,9 +39,16 @@ def partition( class commerce_consumer_procurementCallTransformer(cst.CSTTransformer): CTRL_PARAMS: Tuple[str] = ('retry', 'timeout', 'metadata') METHOD_TO_PARAMS: Dict[str, Tuple[str]] = { + 'assign': ('parent', 'usernames', ), + 'cancel_order': ('name', 'etag', 'cancellation_policy', ), + 'enumerate_licensed_users': ('parent', 'page_size', 'page_token', ), + 'get_license_pool': ('name', ), 'get_order': ('name', ), 'list_orders': ('parent', 'page_size', 'page_token', 'filter', ), + 'modify_order': ('name', 'modifications', 'display_name', 'etag', ), 'place_order': ('parent', 'display_name', 'line_item_info', 'request_id', ), + 'unassign': ('parent', 'usernames', ), + 'update_license_pool': ('license_pool', 'update_mask', ), } def leave_Call(self, original: cst.Call, updated: cst.Call) -> cst.CSTNode: diff --git a/packages/google-cloud-commerce-consumer-procurement/tests/unit/gapic/commerce_consumer_procurement_v1/test_consumer_procurement_service.py b/packages/google-cloud-commerce-consumer-procurement/tests/unit/gapic/commerce_consumer_procurement_v1/test_consumer_procurement_service.py index 907df7d5cdfa..0bf79c077912 100644 --- a/packages/google-cloud-commerce-consumer-procurement/tests/unit/gapic/commerce_consumer_procurement_v1/test_consumer_procurement_service.py +++ b/packages/google-cloud-commerce-consumer-procurement/tests/unit/gapic/commerce_consumer_procurement_v1/test_consumer_procurement_service.py @@ -1327,8 +1327,9 @@ def test_place_order_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.place_order(request) @@ -1382,26 +1383,28 @@ async def test_place_order_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.place_order - ] = mock_object + ] = mock_rpc request = {} await client.place_order(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.place_order(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1669,22 +1672,23 @@ async def test_get_order_async_use_cached_wrapped_rpc(transport: str = "grpc_asy ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_order - ] = mock_object + ] = mock_rpc request = {} await client.get_order(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_order(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2037,22 +2041,23 @@ async def test_list_orders_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_orders - ] = mock_object + ] = mock_rpc request = {} await client.list_orders(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_orders(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2434,45 +2439,94 @@ async def test_list_orders_async_pages(): @pytest.mark.parametrize( "request_type", [ - procurement_service.PlaceOrderRequest, + procurement_service.ModifyOrderRequest, dict, ], ) -def test_place_order_rest(request_type): +def test_modify_order(request_type, transport: str = "grpc"): client = ConsumerProcurementServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport="rest", + transport=transport, ) - # send a request that will satisfy transcoding - request_init = {"parent": "billingAccounts/sample1"} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.modify_order), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.modify_order(request) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - response = client.place_order(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = procurement_service.ModifyOrderRequest() + assert args[0] == request # Establish that the response is the type that we expect. - assert response.operation.name == "operations/spam" + assert isinstance(response, future.Future) -def test_place_order_rest_use_cached_wrapped_rpc(): +def test_modify_order_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ConsumerProcurementServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.modify_order), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.modify_order() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == procurement_service.ModifyOrderRequest() + + +def test_modify_order_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = ConsumerProcurementServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = procurement_service.ModifyOrderRequest( + name="name_value", + display_name="display_name_value", + etag="etag_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.modify_order), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.modify_order(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == procurement_service.ModifyOrderRequest( + name="name_value", + display_name="display_name_value", + etag="etag_value", + ) + + +def test_modify_order_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: client = ConsumerProcurementServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport="rest", + transport="grpc", ) # Should wrap all calls on client creation @@ -2480,132 +2534,1227 @@ def test_place_order_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.place_order in client._transport._wrapped_methods + assert client._transport.modify_order in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.place_order] = mock_rpc - + client._transport._wrapped_methods[client._transport.modify_order] = mock_rpc request = {} - client.place_order(request) + client.modify_order(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() - client.place_order(request) + client.modify_order(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_place_order_rest_required_fields( - request_type=procurement_service.PlaceOrderRequest, +@pytest.mark.asyncio +async def test_modify_order_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ConsumerProcurementServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.modify_order), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.modify_order() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == procurement_service.ModifyOrderRequest() + + +@pytest.mark.asyncio +async def test_modify_order_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", ): - transport_class = transports.ConsumerProcurementServiceRestTransport + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = ConsumerProcurementServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) - request_init = {} - request_init["parent"] = "" - request_init["display_name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads( - json_format.MessageToJson(pb_request, use_integers_for_enums=False) - ) + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() - # verify fields with default values are dropped + # Ensure method has been cached + assert ( + client._client._transport.modify_order + in client._client._transport._wrapped_methods + ) - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).place_order._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.modify_order + ] = mock_rpc - # verify required fields with default values are now present + request = {} + await client.modify_order(request) - jsonified_request["parent"] = "parent_value" - jsonified_request["displayName"] = "display_name_value" + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).place_order._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == "parent_value" - assert "displayName" in jsonified_request - assert jsonified_request["displayName"] == "display_name_value" + await client.modify_order(request) - client = ConsumerProcurementServiceClient( + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_modify_order_async( + transport: str = "grpc_asyncio", request_type=procurement_service.ModifyOrderRequest +): + client = ConsumerProcurementServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), - transport="rest", + transport=transport, ) - request = request_type(**request_init) - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, "transcode") as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - "uri": "v1/sample_method", - "method": "post", - "query_params": pb_request, - } - transcode_result["body"] = pb_request - transcode.return_value = transcode_result + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.modify_order), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.modify_order(request) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = procurement_service.ModifyOrderRequest() + assert args[0] == request - response = client.place_order(request) + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) - expected_params = [("$alt", "json;enum-encoding=int")] - actual_params = req.call_args.kwargs["params"] - assert expected_params == actual_params +@pytest.mark.asyncio +async def test_modify_order_async_from_dict(): + await test_modify_order_async(request_type=dict) -def test_place_order_rest_unset_required_fields(): - transport = transports.ConsumerProcurementServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials - ) - unset_fields = transport.place_order._get_unset_required_fields({}) - assert set(unset_fields) == ( - set(()) - & set( - ( - "parent", - "displayName", - ) - ) +def test_modify_order_field_headers(): + client = ConsumerProcurementServiceClient( + credentials=ga_credentials.AnonymousCredentials(), ) + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = procurement_service.ModifyOrderRequest() -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_place_order_rest_interceptors(null_interceptor): - transport = transports.ConsumerProcurementServiceRestTransport( + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.modify_order), "__call__") as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.modify_order(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_modify_order_field_headers_async(): + client = ConsumerProcurementServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.ConsumerProcurementServiceRestInterceptor(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = procurement_service.ModifyOrderRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.modify_order), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.modify_order(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.parametrize( + "request_type", + [ + procurement_service.CancelOrderRequest, + dict, + ], +) +def test_cancel_order(request_type, transport: str = "grpc"): + client = ConsumerProcurementServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_order), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.cancel_order(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = procurement_service.CancelOrderRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_cancel_order_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ConsumerProcurementServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_order), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.cancel_order() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == procurement_service.CancelOrderRequest() + + +def test_cancel_order_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = ConsumerProcurementServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = procurement_service.CancelOrderRequest( + name="name_value", + etag="etag_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_order), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.cancel_order(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == procurement_service.CancelOrderRequest( + name="name_value", + etag="etag_value", + ) + + +def test_cancel_order_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ConsumerProcurementServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.cancel_order in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.cancel_order] = mock_rpc + request = {} + client.cancel_order(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.cancel_order(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_cancel_order_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ConsumerProcurementServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_order), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.cancel_order() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == procurement_service.CancelOrderRequest() + + +@pytest.mark.asyncio +async def test_cancel_order_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = ConsumerProcurementServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.cancel_order + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.cancel_order + ] = mock_rpc + + request = {} + await client.cancel_order(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.cancel_order(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_cancel_order_async( + transport: str = "grpc_asyncio", request_type=procurement_service.CancelOrderRequest +): + client = ConsumerProcurementServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_order), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.cancel_order(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = procurement_service.CancelOrderRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_cancel_order_async_from_dict(): + await test_cancel_order_async(request_type=dict) + + +def test_cancel_order_field_headers(): + client = ConsumerProcurementServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = procurement_service.CancelOrderRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_order), "__call__") as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.cancel_order(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_cancel_order_field_headers_async(): + client = ConsumerProcurementServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = procurement_service.CancelOrderRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_order), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.cancel_order(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.parametrize( + "request_type", + [ + procurement_service.PlaceOrderRequest, + dict, + ], +) +def test_place_order_rest(request_type): + client = ConsumerProcurementServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "billingAccounts/sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.place_order(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_place_order_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ConsumerProcurementServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.place_order in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.place_order] = mock_rpc + + request = {} + client.place_order(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.place_order(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_place_order_rest_required_fields( + request_type=procurement_service.PlaceOrderRequest, +): + transport_class = transports.ConsumerProcurementServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request_init["display_name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).place_order._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + jsonified_request["displayName"] = "display_name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).place_order._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + assert "displayName" in jsonified_request + assert jsonified_request["displayName"] == "display_name_value" + + client = ConsumerProcurementServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.place_order(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_place_order_rest_unset_required_fields(): + transport = transports.ConsumerProcurementServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.place_order._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(()) + & set( + ( + "parent", + "displayName", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_place_order_rest_interceptors(null_interceptor): + transport = transports.ConsumerProcurementServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.ConsumerProcurementServiceRestInterceptor(), + ) + client = ConsumerProcurementServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.ConsumerProcurementServiceRestInterceptor, "post_place_order" + ) as post, mock.patch.object( + transports.ConsumerProcurementServiceRestInterceptor, "pre_place_order" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = procurement_service.PlaceOrderRequest.pb( + procurement_service.PlaceOrderRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) + + request = procurement_service.PlaceOrderRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.place_order( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_place_order_rest_bad_request( + transport: str = "rest", request_type=procurement_service.PlaceOrderRequest +): + client = ConsumerProcurementServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "billingAccounts/sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.place_order(request) + + +def test_place_order_rest_error(): + client = ConsumerProcurementServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + procurement_service.GetOrderRequest, + dict, + ], +) +def test_get_order_rest(request_type): + client = ConsumerProcurementServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"name": "billingAccounts/sample1/orders/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = order.Order( + name="name_value", + display_name="display_name_value", + etag="etag_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = order.Order.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.get_order(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, order.Order) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.etag == "etag_value" + + +def test_get_order_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ConsumerProcurementServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_order in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.get_order] = mock_rpc + + request = {} + client.get_order(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_order(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_get_order_rest_required_fields( + request_type=procurement_service.GetOrderRequest, +): + transport_class = transports.ConsumerProcurementServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_order._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_order._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = ConsumerProcurementServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = order.Order() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = order.Order.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_order(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_get_order_rest_unset_required_fields(): + transport = transports.ConsumerProcurementServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.get_order._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_order_rest_interceptors(null_interceptor): + transport = transports.ConsumerProcurementServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.ConsumerProcurementServiceRestInterceptor(), + ) + client = ConsumerProcurementServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.ConsumerProcurementServiceRestInterceptor, "post_get_order" + ) as post, mock.patch.object( + transports.ConsumerProcurementServiceRestInterceptor, "pre_get_order" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = procurement_service.GetOrderRequest.pb( + procurement_service.GetOrderRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = order.Order.to_json(order.Order()) + + request = procurement_service.GetOrderRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = order.Order() + + client.get_order( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_order_rest_bad_request( + transport: str = "rest", request_type=procurement_service.GetOrderRequest +): + client = ConsumerProcurementServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"name": "billingAccounts/sample1/orders/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_order(request) + + +def test_get_order_rest_flattened(): + client = ConsumerProcurementServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = order.Order() + + # get arguments that satisfy an http rule for this method + sample_request = {"name": "billingAccounts/sample1/orders/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = order.Order.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.get_order(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=billingAccounts/*/orders/*}" % client.transport._host, args[1] + ) + + +def test_get_order_rest_flattened_error(transport: str = "rest"): + client = ConsumerProcurementServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_order( + procurement_service.GetOrderRequest(), + name="name_value", + ) + + +def test_get_order_rest_error(): + client = ConsumerProcurementServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + procurement_service.ListOrdersRequest, + dict, + ], +) +def test_list_orders_rest(request_type): + client = ConsumerProcurementServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "billingAccounts/sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = procurement_service.ListOrdersResponse( + next_page_token="next_page_token_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = procurement_service.ListOrdersResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.list_orders(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListOrdersPager) + assert response.next_page_token == "next_page_token_value" + + +def test_list_orders_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ConsumerProcurementServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_orders in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.list_orders] = mock_rpc + + request = {} + client.list_orders(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_orders(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_list_orders_rest_required_fields( + request_type=procurement_service.ListOrdersRequest, +): + transport_class = transports.ConsumerProcurementServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_orders._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_orders._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "filter", + "page_size", + "page_token", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = ConsumerProcurementServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = procurement_service.ListOrdersResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = procurement_service.ListOrdersResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.list_orders(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_list_orders_rest_unset_required_fields(): + transport = transports.ConsumerProcurementServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.list_orders._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "filter", + "pageSize", + "pageToken", + ) + ) + & set(("parent",)) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_orders_rest_interceptors(null_interceptor): + transport = transports.ConsumerProcurementServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.ConsumerProcurementServiceRestInterceptor(), ) client = ConsumerProcurementServiceClient(transport=transport) with mock.patch.object( @@ -2613,16 +3762,14 @@ def test_place_order_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - operation.Operation, "_set_result_from_operation" - ), mock.patch.object( - transports.ConsumerProcurementServiceRestInterceptor, "post_place_order" + transports.ConsumerProcurementServiceRestInterceptor, "post_list_orders" ) as post, mock.patch.object( - transports.ConsumerProcurementServiceRestInterceptor, "pre_place_order" + transports.ConsumerProcurementServiceRestInterceptor, "pre_list_orders" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = procurement_service.PlaceOrderRequest.pb( - procurement_service.PlaceOrderRequest() + pb_message = procurement_service.ListOrdersRequest.pb( + procurement_service.ListOrdersRequest() ) transcode.return_value = { "method": "post", @@ -2634,19 +3781,19 @@ def test_place_order_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = json_format.MessageToJson( - operations_pb2.Operation() + req.return_value._content = procurement_service.ListOrdersResponse.to_json( + procurement_service.ListOrdersResponse() ) - request = procurement_service.PlaceOrderRequest() + request = procurement_service.ListOrdersRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = operations_pb2.Operation() + post.return_value = procurement_service.ListOrdersResponse() - client.place_order( + client.list_orders( request, metadata=[ ("key", "val"), @@ -2658,8 +3805,8 @@ def test_place_order_rest_interceptors(null_interceptor): post.assert_called_once() -def test_place_order_rest_bad_request( - transport: str = "rest", request_type=procurement_service.PlaceOrderRequest +def test_list_orders_rest_bad_request( + transport: str = "rest", request_type=procurement_service.ListOrdersRequest ): client = ConsumerProcurementServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -2679,23 +3826,135 @@ def test_place_order_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.place_order(request) + client.list_orders(request) -def test_place_order_rest_error(): +def test_list_orders_rest_flattened(): client = ConsumerProcurementServiceClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = procurement_service.ListOrdersResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "billingAccounts/sample1"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = procurement_service.ListOrdersResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.list_orders(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=billingAccounts/*}/orders" % client.transport._host, args[1] + ) + + +def test_list_orders_rest_flattened_error(transport: str = "rest"): + client = ConsumerProcurementServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_orders( + procurement_service.ListOrdersRequest(), + parent="parent_value", + ) + + +def test_list_orders_rest_pager(transport: str = "rest"): + client = ConsumerProcurementServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + procurement_service.ListOrdersResponse( + orders=[ + order.Order(), + order.Order(), + order.Order(), + ], + next_page_token="abc", + ), + procurement_service.ListOrdersResponse( + orders=[], + next_page_token="def", + ), + procurement_service.ListOrdersResponse( + orders=[ + order.Order(), + ], + next_page_token="ghi", + ), + procurement_service.ListOrdersResponse( + orders=[ + order.Order(), + order.Order(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple( + procurement_service.ListOrdersResponse.to_json(x) for x in response + ) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {"parent": "billingAccounts/sample1"} + + pager = client.list_orders(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, order.Order) for i in results) + + pages = list(client.list_orders(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + @pytest.mark.parametrize( "request_type", [ - procurement_service.GetOrderRequest, + procurement_service.ModifyOrderRequest, dict, ], ) -def test_get_order_rest(request_type): +def test_modify_order_rest(request_type): client = ConsumerProcurementServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -2708,31 +3967,22 @@ def test_get_order_rest(request_type): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = order.Order( - name="name_value", - display_name="display_name_value", - etag="etag_value", - ) + return_value = operations_pb2.Operation(name="operations/spam") # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - # Convert return value to protobuf type - return_value = order.Order.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.get_order(request) + response = client.modify_order(request) # Establish that the response is the type that we expect. - assert isinstance(response, order.Order) - assert response.name == "name_value" - assert response.display_name == "display_name_value" - assert response.etag == "etag_value" + assert response.operation.name == "operations/spam" -def test_get_order_rest_use_cached_wrapped_rpc(): +def test_modify_order_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -2746,30 +3996,34 @@ def test_get_order_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.get_order in client._transport._wrapped_methods + assert client._transport.modify_order in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.get_order] = mock_rpc + client._transport._wrapped_methods[client._transport.modify_order] = mock_rpc request = {} - client.get_order(request) + client.modify_order(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.get_order(request) + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.modify_order(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_get_order_rest_required_fields( - request_type=procurement_service.GetOrderRequest, +def test_modify_order_rest_required_fields( + request_type=procurement_service.ModifyOrderRequest, ): transport_class = transports.ConsumerProcurementServiceRestTransport @@ -2785,7 +4039,7 @@ def test_get_order_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).get_order._get_unset_required_fields(jsonified_request) + ).modify_order._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present @@ -2794,7 +4048,7 @@ def test_get_order_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).get_order._get_unset_required_fields(jsonified_request) + ).modify_order._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone @@ -2808,7 +4062,7 @@ def test_get_order_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = order.Order() + return_value = operations_pb2.Operation(name="operations/spam") # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -2820,39 +4074,37 @@ def test_get_order_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "get", + "method": "post", "query_params": pb_request, } + transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = order.Order.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.get_order(request) + response = client.modify_order(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_get_order_rest_unset_required_fields(): +def test_modify_order_rest_unset_required_fields(): transport = transports.ConsumerProcurementServiceRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.get_order._get_unset_required_fields({}) + unset_fields = transport.modify_order._get_unset_required_fields({}) assert set(unset_fields) == (set(()) & set(("name",))) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_order_rest_interceptors(null_interceptor): +def test_modify_order_rest_interceptors(null_interceptor): transport = transports.ConsumerProcurementServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -2865,14 +4117,16 @@ def test_get_order_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.ConsumerProcurementServiceRestInterceptor, "post_get_order" + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.ConsumerProcurementServiceRestInterceptor, "post_modify_order" ) as post, mock.patch.object( - transports.ConsumerProcurementServiceRestInterceptor, "pre_get_order" + transports.ConsumerProcurementServiceRestInterceptor, "pre_modify_order" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = procurement_service.GetOrderRequest.pb( - procurement_service.GetOrderRequest() + pb_message = procurement_service.ModifyOrderRequest.pb( + procurement_service.ModifyOrderRequest() ) transcode.return_value = { "method": "post", @@ -2884,108 +4138,55 @@ def test_get_order_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = order.Order.to_json(order.Order()) - - request = procurement_service.GetOrderRequest() - metadata = [ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = order.Order() - - client.get_order( - request, - metadata=[ - ("key", "val"), - ("cephalopod", "squid"), - ], - ) - - pre.assert_called_once() - post.assert_called_once() - - -def test_get_order_rest_bad_request( - transport: str = "rest", request_type=procurement_service.GetOrderRequest -): - client = ConsumerProcurementServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {"name": "billingAccounts/sample1/orders/sample2"} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.get_order(request) - - -def test_get_order_rest_flattened(): - client = ConsumerProcurementServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = order.Order() - - # get arguments that satisfy an http rule for this method - sample_request = {"name": "billingAccounts/sample1/orders/sample2"} - - # get truthy value for each flattened field - mock_args = dict( - name="name_value", + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = order.Order.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - client.get_order(**mock_args) + request = procurement_service.ModifyOrderRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate( - "%s/v1/{name=billingAccounts/*/orders/*}" % client.transport._host, args[1] + client.modify_order( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], ) + pre.assert_called_once() + post.assert_called_once() -def test_get_order_rest_flattened_error(transport: str = "rest"): + +def test_modify_order_rest_bad_request( + transport: str = "rest", request_type=procurement_service.ModifyOrderRequest +): client = ConsumerProcurementServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_order( - procurement_service.GetOrderRequest(), - name="name_value", - ) + # send a request that will satisfy transcoding + request_init = {"name": "billingAccounts/sample1/orders/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.modify_order(request) -def test_get_order_rest_error(): +def test_modify_order_rest_error(): client = ConsumerProcurementServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -2994,44 +4195,39 @@ def test_get_order_rest_error(): @pytest.mark.parametrize( "request_type", [ - procurement_service.ListOrdersRequest, + procurement_service.CancelOrderRequest, dict, ], ) -def test_list_orders_rest(request_type): +def test_cancel_order_rest(request_type): client = ConsumerProcurementServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = {"parent": "billingAccounts/sample1"} + request_init = {"name": "billingAccounts/sample1/orders/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = procurement_service.ListOrdersResponse( - next_page_token="next_page_token_value", - ) + return_value = operations_pb2.Operation(name="operations/spam") # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - # Convert return value to protobuf type - return_value = procurement_service.ListOrdersResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.list_orders(request) + response = client.cancel_order(request) # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListOrdersPager) - assert response.next_page_token == "next_page_token_value" + assert response.operation.name == "operations/spam" -def test_list_orders_rest_use_cached_wrapped_rpc(): +def test_cancel_order_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -3045,35 +4241,39 @@ def test_list_orders_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.list_orders in client._transport._wrapped_methods + assert client._transport.cancel_order in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.list_orders] = mock_rpc + client._transport._wrapped_methods[client._transport.cancel_order] = mock_rpc request = {} - client.list_orders(request) + client.cancel_order(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.list_orders(request) + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.cancel_order(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_list_orders_rest_required_fields( - request_type=procurement_service.ListOrdersRequest, +def test_cancel_order_rest_required_fields( + request_type=procurement_service.CancelOrderRequest, ): transport_class = transports.ConsumerProcurementServiceRestTransport request_init = {} - request_init["parent"] = "" + request_init["name"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -3084,29 +4284,21 @@ def test_list_orders_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).list_orders._get_unset_required_fields(jsonified_request) + ).cancel_order._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["parent"] = "parent_value" + jsonified_request["name"] = "name_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).list_orders._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set( - ( - "filter", - "page_size", - "page_token", - ) - ) + ).cancel_order._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == "parent_value" + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" client = ConsumerProcurementServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -3115,7 +4307,7 @@ def test_list_orders_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = procurement_service.ListOrdersResponse() + return_value = operations_pb2.Operation(name="operations/spam") # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -3127,48 +4319,37 @@ def test_list_orders_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "get", + "method": "post", "query_params": pb_request, } + transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = procurement_service.ListOrdersResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.list_orders(request) + response = client.cancel_order(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_list_orders_rest_unset_required_fields(): +def test_cancel_order_rest_unset_required_fields(): transport = transports.ConsumerProcurementServiceRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.list_orders._get_unset_required_fields({}) - assert set(unset_fields) == ( - set( - ( - "filter", - "pageSize", - "pageToken", - ) - ) - & set(("parent",)) - ) + unset_fields = transport.cancel_order._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_orders_rest_interceptors(null_interceptor): +def test_cancel_order_rest_interceptors(null_interceptor): transport = transports.ConsumerProcurementServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -3181,14 +4362,16 @@ def test_list_orders_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.ConsumerProcurementServiceRestInterceptor, "post_list_orders" + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.ConsumerProcurementServiceRestInterceptor, "post_cancel_order" ) as post, mock.patch.object( - transports.ConsumerProcurementServiceRestInterceptor, "pre_list_orders" + transports.ConsumerProcurementServiceRestInterceptor, "pre_cancel_order" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = procurement_service.ListOrdersRequest.pb( - procurement_service.ListOrdersRequest() + pb_message = procurement_service.CancelOrderRequest.pb( + procurement_service.CancelOrderRequest() ) transcode.return_value = { "method": "post", @@ -3200,19 +4383,19 @@ def test_list_orders_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = procurement_service.ListOrdersResponse.to_json( - procurement_service.ListOrdersResponse() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() ) - request = procurement_service.ListOrdersRequest() + request = procurement_service.CancelOrderRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = procurement_service.ListOrdersResponse() + post.return_value = operations_pb2.Operation() - client.list_orders( + client.cancel_order( request, metadata=[ ("key", "val"), @@ -3224,8 +4407,8 @@ def test_list_orders_rest_interceptors(null_interceptor): post.assert_called_once() -def test_list_orders_rest_bad_request( - transport: str = "rest", request_type=procurement_service.ListOrdersRequest +def test_cancel_order_rest_bad_request( + transport: str = "rest", request_type=procurement_service.CancelOrderRequest ): client = ConsumerProcurementServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -3233,7 +4416,7 @@ def test_list_orders_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = {"parent": "billingAccounts/sample1"} + request_init = {"name": "billingAccounts/sample1/orders/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -3245,126 +4428,14 @@ def test_list_orders_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.list_orders(request) - - -def test_list_orders_rest_flattened(): - client = ConsumerProcurementServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = procurement_service.ListOrdersResponse() - - # get arguments that satisfy an http rule for this method - sample_request = {"parent": "billingAccounts/sample1"} - - # get truthy value for each flattened field - mock_args = dict( - parent="parent_value", - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = procurement_service.ListOrdersResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - client.list_orders(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate( - "%s/v1/{parent=billingAccounts/*}/orders" % client.transport._host, args[1] - ) - - -def test_list_orders_rest_flattened_error(transport: str = "rest"): - client = ConsumerProcurementServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_orders( - procurement_service.ListOrdersRequest(), - parent="parent_value", - ) + client.cancel_order(request) -def test_list_orders_rest_pager(transport: str = "rest"): +def test_cancel_order_rest_error(): client = ConsumerProcurementServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # TODO(kbandes): remove this mock unless there's a good reason for it. - # with mock.patch.object(path_template, 'transcode') as transcode: - # Set the response as a series of pages - response = ( - procurement_service.ListOrdersResponse( - orders=[ - order.Order(), - order.Order(), - order.Order(), - ], - next_page_token="abc", - ), - procurement_service.ListOrdersResponse( - orders=[], - next_page_token="def", - ), - procurement_service.ListOrdersResponse( - orders=[ - order.Order(), - ], - next_page_token="ghi", - ), - procurement_service.ListOrdersResponse( - orders=[ - order.Order(), - order.Order(), - ], - ), - ) - # Two responses for two calls - response = response + response - - # Wrap the values into proper Response objs - response = tuple( - procurement_service.ListOrdersResponse.to_json(x) for x in response - ) - return_values = tuple(Response() for i in response) - for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode("UTF-8") - return_val.status_code = 200 - req.side_effect = return_values - - sample_request = {"parent": "billingAccounts/sample1"} - - pager = client.list_orders(request=sample_request) - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, order.Order) for i in results) - - pages = list(client.list_orders(request=sample_request).pages) - for page_, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page_.raw_page.next_page_token == token - def test_credentials_transport_error(): # It is an error to provide credentials and a transport instance. @@ -3508,6 +4579,8 @@ def test_consumer_procurement_service_base_transport(): "place_order", "get_order", "list_orders", + "modify_order", + "cancel_order", "get_operation", ) for method in methods: @@ -3804,6 +4877,12 @@ def test_consumer_procurement_service_client_transport_session_collision( session1 = client1.transport.list_orders._session session2 = client2.transport.list_orders._session assert session1 != session2 + session1 = client1.transport.modify_order._session + session2 = client2.transport.modify_order._session + assert session1 != session2 + session1 = client1.transport.cancel_order._session + session2 = client2.transport.cancel_order._session + assert session1 != session2 def test_consumer_procurement_service_grpc_transport_channel(): diff --git a/packages/google-cloud-commerce-consumer-procurement/tests/unit/gapic/commerce_consumer_procurement_v1/test_license_management_service.py b/packages/google-cloud-commerce-consumer-procurement/tests/unit/gapic/commerce_consumer_procurement_v1/test_license_management_service.py new file mode 100644 index 000000000000..aab21d579a3a --- /dev/null +++ b/packages/google-cloud-commerce-consumer-procurement/tests/unit/gapic/commerce_consumer_procurement_v1/test_license_management_service.py @@ -0,0 +1,6057 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import os + +# try/except added for compatibility with python < 3.8 +try: + from unittest import mock + from unittest.mock import AsyncMock # pragma: NO COVER +except ImportError: # pragma: NO COVER + import mock + +from collections.abc import Iterable +import json +import math + +from google.api_core import gapic_v1, grpc_helpers, grpc_helpers_async, path_template +from google.api_core import api_core_version, client_options +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +import google.auth +from google.auth import credentials as ga_credentials +from google.auth.exceptions import MutualTLSChannelError +from google.longrunning import operations_pb2 # type: ignore +from google.oauth2 import service_account +from google.protobuf import duration_pb2 # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import json_format +import grpc +from grpc.experimental import aio +from proto.marshal.rules import wrappers +from proto.marshal.rules.dates import DurationRule, TimestampRule +import pytest +from requests import PreparedRequest, Request, Response +from requests.sessions import Session + +from google.cloud.commerce_consumer_procurement_v1.services.license_management_service import ( + LicenseManagementServiceAsyncClient, + LicenseManagementServiceClient, + pagers, + transports, +) +from google.cloud.commerce_consumer_procurement_v1.types import ( + license_management_service, +) + + +def client_cert_source_callback(): + return b"cert bytes", b"key bytes" + + +# If default endpoint is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint(client): + return ( + "foo.googleapis.com" + if ("localhost" in client.DEFAULT_ENDPOINT) + else client.DEFAULT_ENDPOINT + ) + + +# If default endpoint template is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint template so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint_template(client): + return ( + "test.{UNIVERSE_DOMAIN}" + if ("localhost" in client._DEFAULT_ENDPOINT_TEMPLATE) + else client._DEFAULT_ENDPOINT_TEMPLATE + ) + + +def test__get_default_mtls_endpoint(): + api_endpoint = "example.googleapis.com" + api_mtls_endpoint = "example.mtls.googleapis.com" + sandbox_endpoint = "example.sandbox.googleapis.com" + sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" + non_googleapi = "api.example.com" + + assert LicenseManagementServiceClient._get_default_mtls_endpoint(None) is None + assert ( + LicenseManagementServiceClient._get_default_mtls_endpoint(api_endpoint) + == api_mtls_endpoint + ) + assert ( + LicenseManagementServiceClient._get_default_mtls_endpoint(api_mtls_endpoint) + == api_mtls_endpoint + ) + assert ( + LicenseManagementServiceClient._get_default_mtls_endpoint(sandbox_endpoint) + == sandbox_mtls_endpoint + ) + assert ( + LicenseManagementServiceClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) + == sandbox_mtls_endpoint + ) + assert ( + LicenseManagementServiceClient._get_default_mtls_endpoint(non_googleapi) + == non_googleapi + ) + + +def test__read_environment_variables(): + assert LicenseManagementServiceClient._read_environment_variables() == ( + False, + "auto", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + assert LicenseManagementServiceClient._read_environment_variables() == ( + True, + "auto", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + assert LicenseManagementServiceClient._read_environment_variables() == ( + False, + "auto", + None, + ) + + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + with pytest.raises(ValueError) as excinfo: + LicenseManagementServiceClient._read_environment_variables() + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + assert LicenseManagementServiceClient._read_environment_variables() == ( + False, + "never", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + assert LicenseManagementServiceClient._read_environment_variables() == ( + False, + "always", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}): + assert LicenseManagementServiceClient._read_environment_variables() == ( + False, + "auto", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + LicenseManagementServiceClient._read_environment_variables() + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + with mock.patch.dict(os.environ, {"GOOGLE_CLOUD_UNIVERSE_DOMAIN": "foo.com"}): + assert LicenseManagementServiceClient._read_environment_variables() == ( + False, + "auto", + "foo.com", + ) + + +def test__get_client_cert_source(): + mock_provided_cert_source = mock.Mock() + mock_default_cert_source = mock.Mock() + + assert LicenseManagementServiceClient._get_client_cert_source(None, False) is None + assert ( + LicenseManagementServiceClient._get_client_cert_source( + mock_provided_cert_source, False + ) + is None + ) + assert ( + LicenseManagementServiceClient._get_client_cert_source( + mock_provided_cert_source, True + ) + == mock_provided_cert_source + ) + + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", return_value=True + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=mock_default_cert_source, + ): + assert ( + LicenseManagementServiceClient._get_client_cert_source(None, True) + is mock_default_cert_source + ) + assert ( + LicenseManagementServiceClient._get_client_cert_source( + mock_provided_cert_source, "true" + ) + is mock_provided_cert_source + ) + + +@mock.patch.object( + LicenseManagementServiceClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(LicenseManagementServiceClient), +) +@mock.patch.object( + LicenseManagementServiceAsyncClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(LicenseManagementServiceAsyncClient), +) +def test__get_api_endpoint(): + api_override = "foo.com" + mock_client_cert_source = mock.Mock() + default_universe = LicenseManagementServiceClient._DEFAULT_UNIVERSE + default_endpoint = LicenseManagementServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=default_universe + ) + mock_universe = "bar.com" + mock_endpoint = LicenseManagementServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=mock_universe + ) + + assert ( + LicenseManagementServiceClient._get_api_endpoint( + api_override, mock_client_cert_source, default_universe, "always" + ) + == api_override + ) + assert ( + LicenseManagementServiceClient._get_api_endpoint( + None, mock_client_cert_source, default_universe, "auto" + ) + == LicenseManagementServiceClient.DEFAULT_MTLS_ENDPOINT + ) + assert ( + LicenseManagementServiceClient._get_api_endpoint( + None, None, default_universe, "auto" + ) + == default_endpoint + ) + assert ( + LicenseManagementServiceClient._get_api_endpoint( + None, None, default_universe, "always" + ) + == LicenseManagementServiceClient.DEFAULT_MTLS_ENDPOINT + ) + assert ( + LicenseManagementServiceClient._get_api_endpoint( + None, mock_client_cert_source, default_universe, "always" + ) + == LicenseManagementServiceClient.DEFAULT_MTLS_ENDPOINT + ) + assert ( + LicenseManagementServiceClient._get_api_endpoint( + None, None, mock_universe, "never" + ) + == mock_endpoint + ) + assert ( + LicenseManagementServiceClient._get_api_endpoint( + None, None, default_universe, "never" + ) + == default_endpoint + ) + + with pytest.raises(MutualTLSChannelError) as excinfo: + LicenseManagementServiceClient._get_api_endpoint( + None, mock_client_cert_source, mock_universe, "auto" + ) + assert ( + str(excinfo.value) + == "mTLS is not supported in any universe other than googleapis.com." + ) + + +def test__get_universe_domain(): + client_universe_domain = "foo.com" + universe_domain_env = "bar.com" + + assert ( + LicenseManagementServiceClient._get_universe_domain( + client_universe_domain, universe_domain_env + ) + == client_universe_domain + ) + assert ( + LicenseManagementServiceClient._get_universe_domain(None, universe_domain_env) + == universe_domain_env + ) + assert ( + LicenseManagementServiceClient._get_universe_domain(None, None) + == LicenseManagementServiceClient._DEFAULT_UNIVERSE + ) + + with pytest.raises(ValueError) as excinfo: + LicenseManagementServiceClient._get_universe_domain("", None) + assert str(excinfo.value) == "Universe Domain cannot be an empty string." + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + ( + LicenseManagementServiceClient, + transports.LicenseManagementServiceGrpcTransport, + "grpc", + ), + ( + LicenseManagementServiceClient, + transports.LicenseManagementServiceRestTransport, + "rest", + ), + ], +) +def test__validate_universe_domain(client_class, transport_class, transport_name): + client = client_class( + transport=transport_class(credentials=ga_credentials.AnonymousCredentials()) + ) + assert client._validate_universe_domain() == True + + # Test the case when universe is already validated. + assert client._validate_universe_domain() == True + + if transport_name == "grpc": + # Test the case where credentials are provided by the + # `local_channel_credentials`. The default universes in both match. + channel = grpc.secure_channel( + "http://localhost/", grpc.local_channel_credentials() + ) + client = client_class(transport=transport_class(channel=channel)) + assert client._validate_universe_domain() == True + + # Test the case where credentials do not exist: e.g. a transport is provided + # with no credentials. Validation should still succeed because there is no + # mismatch with non-existent credentials. + channel = grpc.secure_channel( + "http://localhost/", grpc.local_channel_credentials() + ) + transport = transport_class(channel=channel) + transport._credentials = None + client = client_class(transport=transport) + assert client._validate_universe_domain() == True + + # TODO: This is needed to cater for older versions of google-auth + # Make this test unconditional once the minimum supported version of + # google-auth becomes 2.23.0 or higher. + google_auth_major, google_auth_minor = [ + int(part) for part in google.auth.__version__.split(".")[0:2] + ] + if google_auth_major > 2 or (google_auth_major == 2 and google_auth_minor >= 23): + credentials = ga_credentials.AnonymousCredentials() + credentials._universe_domain = "foo.com" + # Test the case when there is a universe mismatch from the credentials. + client = client_class(transport=transport_class(credentials=credentials)) + with pytest.raises(ValueError) as excinfo: + client._validate_universe_domain() + assert ( + str(excinfo.value) + == "The configured universe domain (googleapis.com) does not match the universe domain found in the credentials (foo.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." + ) + + # Test the case when there is a universe mismatch from the client. + # + # TODO: Make this test unconditional once the minimum supported version of + # google-api-core becomes 2.15.0 or higher. + api_core_major, api_core_minor = [ + int(part) for part in api_core_version.__version__.split(".")[0:2] + ] + if api_core_major > 2 or (api_core_major == 2 and api_core_minor >= 15): + client = client_class( + client_options={"universe_domain": "bar.com"}, + transport=transport_class( + credentials=ga_credentials.AnonymousCredentials(), + ), + ) + with pytest.raises(ValueError) as excinfo: + client._validate_universe_domain() + assert ( + str(excinfo.value) + == "The configured universe domain (bar.com) does not match the universe domain found in the credentials (googleapis.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." + ) + + # Test that ValueError is raised if universe_domain is provided via client options and credentials is None + with pytest.raises(ValueError): + client._compare_universes("foo.bar", None) + + +@pytest.mark.parametrize( + "client_class,transport_name", + [ + (LicenseManagementServiceClient, "grpc"), + (LicenseManagementServiceAsyncClient, "grpc_asyncio"), + (LicenseManagementServiceClient, "rest"), + ], +) +def test_license_management_service_client_from_service_account_info( + client_class, transport_name +): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_info" + ) as factory: + factory.return_value = creds + info = {"valid": True} + client = client_class.from_service_account_info(info, transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + "cloudcommerceconsumerprocurement.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://cloudcommerceconsumerprocurement.googleapis.com" + ) + + +@pytest.mark.parametrize( + "transport_class,transport_name", + [ + (transports.LicenseManagementServiceGrpcTransport, "grpc"), + (transports.LicenseManagementServiceGrpcAsyncIOTransport, "grpc_asyncio"), + (transports.LicenseManagementServiceRestTransport, "rest"), + ], +) +def test_license_management_service_client_service_account_always_use_jwt( + transport_class, transport_name +): + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=True) + use_jwt.assert_called_once_with(True) + + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=False) + use_jwt.assert_not_called() + + +@pytest.mark.parametrize( + "client_class,transport_name", + [ + (LicenseManagementServiceClient, "grpc"), + (LicenseManagementServiceAsyncClient, "grpc_asyncio"), + (LicenseManagementServiceClient, "rest"), + ], +) +def test_license_management_service_client_from_service_account_file( + client_class, transport_name +): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_file" + ) as factory: + factory.return_value = creds + client = client_class.from_service_account_file( + "dummy/file/path.json", transport=transport_name + ) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + client = client_class.from_service_account_json( + "dummy/file/path.json", transport=transport_name + ) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + "cloudcommerceconsumerprocurement.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://cloudcommerceconsumerprocurement.googleapis.com" + ) + + +def test_license_management_service_client_get_transport_class(): + transport = LicenseManagementServiceClient.get_transport_class() + available_transports = [ + transports.LicenseManagementServiceGrpcTransport, + transports.LicenseManagementServiceRestTransport, + ] + assert transport in available_transports + + transport = LicenseManagementServiceClient.get_transport_class("grpc") + assert transport == transports.LicenseManagementServiceGrpcTransport + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + ( + LicenseManagementServiceClient, + transports.LicenseManagementServiceGrpcTransport, + "grpc", + ), + ( + LicenseManagementServiceAsyncClient, + transports.LicenseManagementServiceGrpcAsyncIOTransport, + "grpc_asyncio", + ), + ( + LicenseManagementServiceClient, + transports.LicenseManagementServiceRestTransport, + "rest", + ), + ], +) +@mock.patch.object( + LicenseManagementServiceClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(LicenseManagementServiceClient), +) +@mock.patch.object( + LicenseManagementServiceAsyncClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(LicenseManagementServiceAsyncClient), +) +def test_license_management_service_client_client_options( + client_class, transport_class, transport_name +): + # Check that if channel is provided we won't create a new one. + with mock.patch.object( + LicenseManagementServiceClient, "get_transport_class" + ) as gtc: + transport = transport_class(credentials=ga_credentials.AnonymousCredentials()) + client = client_class(transport=transport) + gtc.assert_not_called() + + # Check that if channel is provided via str we will create a new one. + with mock.patch.object( + LicenseManagementServiceClient, "get_transport_class" + ) as gtc: + client = client_class(transport=transport_name) + gtc.assert_called() + + # Check the case api_endpoint is provided. + options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name, client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_MTLS_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + client = client_class(transport=transport_name) + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + with pytest.raises(ValueError) as excinfo: + client = client_class(transport=transport_name) + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + + # Check the case quota_project_id is provided + options = client_options.ClientOptions(quota_project_id="octopus") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id="octopus", + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + # Check the case api_endpoint is provided + options = client_options.ClientOptions( + api_audience="https://language.googleapis.com" + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience="https://language.googleapis.com", + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,use_client_cert_env", + [ + ( + LicenseManagementServiceClient, + transports.LicenseManagementServiceGrpcTransport, + "grpc", + "true", + ), + ( + LicenseManagementServiceAsyncClient, + transports.LicenseManagementServiceGrpcAsyncIOTransport, + "grpc_asyncio", + "true", + ), + ( + LicenseManagementServiceClient, + transports.LicenseManagementServiceGrpcTransport, + "grpc", + "false", + ), + ( + LicenseManagementServiceAsyncClient, + transports.LicenseManagementServiceGrpcAsyncIOTransport, + "grpc_asyncio", + "false", + ), + ( + LicenseManagementServiceClient, + transports.LicenseManagementServiceRestTransport, + "rest", + "true", + ), + ( + LicenseManagementServiceClient, + transports.LicenseManagementServiceRestTransport, + "rest", + "false", + ), + ], +) +@mock.patch.object( + LicenseManagementServiceClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(LicenseManagementServiceClient), +) +@mock.patch.object( + LicenseManagementServiceAsyncClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(LicenseManagementServiceAsyncClient), +) +@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) +def test_license_management_service_client_mtls_env_auto( + client_class, transport_class, transport_name, use_client_cert_env +): + # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default + # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. + + # Check the case client_cert_source is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + options = client_options.ClientOptions( + client_cert_source=client_cert_source_callback + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + + if use_client_cert_env == "false": + expected_client_cert_source = None + expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ) + else: + expected_client_cert_source = client_cert_source_callback + expected_host = client.DEFAULT_MTLS_ENDPOINT + + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case ADC client cert is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=client_cert_source_callback, + ): + if use_client_cert_env == "false": + expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ) + expected_client_cert_source = None + else: + expected_host = client.DEFAULT_MTLS_ENDPOINT + expected_client_cert_source = client_cert_source_callback + + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case client_cert_source and ADC client cert are not provided. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class", + [LicenseManagementServiceClient, LicenseManagementServiceAsyncClient], +) +@mock.patch.object( + LicenseManagementServiceClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(LicenseManagementServiceClient), +) +@mock.patch.object( + LicenseManagementServiceAsyncClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(LicenseManagementServiceAsyncClient), +) +def test_license_management_service_client_get_mtls_endpoint_and_cert_source( + client_class, +): + mock_client_cert_source = mock.Mock() + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source == mock_client_cert_source + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + mock_client_cert_source = mock.Mock() + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=mock_client_cert_source, + ): + ( + api_endpoint, + cert_source, + ) = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source == mock_client_cert_source + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + client_class.get_mtls_endpoint_and_cert_source() + + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + with pytest.raises(ValueError) as excinfo: + client_class.get_mtls_endpoint_and_cert_source() + + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + + +@pytest.mark.parametrize( + "client_class", + [LicenseManagementServiceClient, LicenseManagementServiceAsyncClient], +) +@mock.patch.object( + LicenseManagementServiceClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(LicenseManagementServiceClient), +) +@mock.patch.object( + LicenseManagementServiceAsyncClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(LicenseManagementServiceAsyncClient), +) +def test_license_management_service_client_client_api_endpoint(client_class): + mock_client_cert_source = client_cert_source_callback + api_override = "foo.com" + default_universe = LicenseManagementServiceClient._DEFAULT_UNIVERSE + default_endpoint = LicenseManagementServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=default_universe + ) + mock_universe = "bar.com" + mock_endpoint = LicenseManagementServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=mock_universe + ) + + # If ClientOptions.api_endpoint is set and GOOGLE_API_USE_CLIENT_CERTIFICATE="true", + # use ClientOptions.api_endpoint as the api endpoint regardless. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.requests.AuthorizedSession.configure_mtls_channel" + ): + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=api_override + ) + client = client_class( + client_options=options, + credentials=ga_credentials.AnonymousCredentials(), + ) + assert client.api_endpoint == api_override + + # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="never", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + client = client_class(credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == default_endpoint + + # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="always", + # use the DEFAULT_MTLS_ENDPOINT as the api endpoint. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + client = client_class(credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + + # If ClientOptions.api_endpoint is not set, GOOGLE_API_USE_MTLS_ENDPOINT="auto" (default), + # GOOGLE_API_USE_CLIENT_CERTIFICATE="false" (default), default cert source doesn't exist, + # and ClientOptions.universe_domain="bar.com", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with universe domain as the api endpoint. + options = client_options.ClientOptions() + universe_exists = hasattr(options, "universe_domain") + if universe_exists: + options = client_options.ClientOptions(universe_domain=mock_universe) + client = client_class( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + else: + client = client_class( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + assert client.api_endpoint == ( + mock_endpoint if universe_exists else default_endpoint + ) + assert client.universe_domain == ( + mock_universe if universe_exists else default_universe + ) + + # If ClientOptions does not have a universe domain attribute and GOOGLE_API_USE_MTLS_ENDPOINT="never", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. + options = client_options.ClientOptions() + if hasattr(options, "universe_domain"): + delattr(options, "universe_domain") + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + client = client_class( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + assert client.api_endpoint == default_endpoint + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + ( + LicenseManagementServiceClient, + transports.LicenseManagementServiceGrpcTransport, + "grpc", + ), + ( + LicenseManagementServiceAsyncClient, + transports.LicenseManagementServiceGrpcAsyncIOTransport, + "grpc_asyncio", + ), + ( + LicenseManagementServiceClient, + transports.LicenseManagementServiceRestTransport, + "rest", + ), + ], +) +def test_license_management_service_client_client_options_scopes( + client_class, transport_class, transport_name +): + # Check the case scopes are provided. + options = client_options.ClientOptions( + scopes=["1", "2"], + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=["1", "2"], + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,grpc_helpers", + [ + ( + LicenseManagementServiceClient, + transports.LicenseManagementServiceGrpcTransport, + "grpc", + grpc_helpers, + ), + ( + LicenseManagementServiceAsyncClient, + transports.LicenseManagementServiceGrpcAsyncIOTransport, + "grpc_asyncio", + grpc_helpers_async, + ), + ( + LicenseManagementServiceClient, + transports.LicenseManagementServiceRestTransport, + "rest", + None, + ), + ], +) +def test_license_management_service_client_client_options_credentials_file( + client_class, transport_class, transport_name, grpc_helpers +): + # Check the case credentials file is provided. + options = client_options.ClientOptions(credentials_file="credentials.json") + + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +def test_license_management_service_client_client_options_from_dict(): + with mock.patch( + "google.cloud.commerce_consumer_procurement_v1.services.license_management_service.transports.LicenseManagementServiceGrpcTransport.__init__" + ) as grpc_transport: + grpc_transport.return_value = None + client = LicenseManagementServiceClient( + client_options={"api_endpoint": "squid.clam.whelk"} + ) + grpc_transport.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,grpc_helpers", + [ + ( + LicenseManagementServiceClient, + transports.LicenseManagementServiceGrpcTransport, + "grpc", + grpc_helpers, + ), + ( + LicenseManagementServiceAsyncClient, + transports.LicenseManagementServiceGrpcAsyncIOTransport, + "grpc_asyncio", + grpc_helpers_async, + ), + ], +) +def test_license_management_service_client_create_channel_credentials_file( + client_class, transport_class, transport_name, grpc_helpers +): + # Check the case credentials file is provided. + options = client_options.ClientOptions(credentials_file="credentials.json") + + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # test that the credentials from file are saved and used as the credentials. + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel" + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + file_creds = ga_credentials.AnonymousCredentials() + load_creds.return_value = (file_creds, None) + adc.return_value = (creds, None) + client = client_class(client_options=options, transport=transport_name) + create_channel.assert_called_with( + "cloudcommerceconsumerprocurement.googleapis.com:443", + credentials=file_creds, + credentials_file=None, + quota_project_id=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + scopes=None, + default_host="cloudcommerceconsumerprocurement.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize( + "request_type", + [ + license_management_service.GetLicensePoolRequest, + dict, + ], +) +def test_get_license_pool(request_type, transport: str = "grpc"): + client = LicenseManagementServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_license_pool), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = license_management_service.LicensePool( + name="name_value", + available_license_count=2411, + total_license_count=2030, + ) + response = client.get_license_pool(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = license_management_service.GetLicensePoolRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, license_management_service.LicensePool) + assert response.name == "name_value" + assert response.available_license_count == 2411 + assert response.total_license_count == 2030 + + +def test_get_license_pool_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = LicenseManagementServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_license_pool), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.get_license_pool() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == license_management_service.GetLicensePoolRequest() + + +def test_get_license_pool_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = LicenseManagementServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = license_management_service.GetLicensePoolRequest( + name="name_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_license_pool), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.get_license_pool(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == license_management_service.GetLicensePoolRequest( + name="name_value", + ) + + +def test_get_license_pool_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = LicenseManagementServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_license_pool in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.get_license_pool + ] = mock_rpc + request = {} + client.get_license_pool(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_license_pool(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_get_license_pool_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = LicenseManagementServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_license_pool), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + license_management_service.LicensePool( + name="name_value", + available_license_count=2411, + total_license_count=2030, + ) + ) + response = await client.get_license_pool() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == license_management_service.GetLicensePoolRequest() + + +@pytest.mark.asyncio +async def test_get_license_pool_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = LicenseManagementServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.get_license_pool + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.get_license_pool + ] = mock_rpc + + request = {} + await client.get_license_pool(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.get_license_pool(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_get_license_pool_async( + transport: str = "grpc_asyncio", + request_type=license_management_service.GetLicensePoolRequest, +): + client = LicenseManagementServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_license_pool), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + license_management_service.LicensePool( + name="name_value", + available_license_count=2411, + total_license_count=2030, + ) + ) + response = await client.get_license_pool(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = license_management_service.GetLicensePoolRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, license_management_service.LicensePool) + assert response.name == "name_value" + assert response.available_license_count == 2411 + assert response.total_license_count == 2030 + + +@pytest.mark.asyncio +async def test_get_license_pool_async_from_dict(): + await test_get_license_pool_async(request_type=dict) + + +def test_get_license_pool_field_headers(): + client = LicenseManagementServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = license_management_service.GetLicensePoolRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_license_pool), "__call__") as call: + call.return_value = license_management_service.LicensePool() + client.get_license_pool(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_license_pool_field_headers_async(): + client = LicenseManagementServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = license_management_service.GetLicensePoolRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_license_pool), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + license_management_service.LicensePool() + ) + await client.get_license_pool(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_get_license_pool_flattened(): + client = LicenseManagementServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_license_pool), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = license_management_service.LicensePool() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_license_pool( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_get_license_pool_flattened_error(): + client = LicenseManagementServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_license_pool( + license_management_service.GetLicensePoolRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_get_license_pool_flattened_async(): + client = LicenseManagementServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_license_pool), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = license_management_service.LicensePool() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + license_management_service.LicensePool() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_license_pool( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_get_license_pool_flattened_error_async(): + client = LicenseManagementServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_license_pool( + license_management_service.GetLicensePoolRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + license_management_service.UpdateLicensePoolRequest, + dict, + ], +) +def test_update_license_pool(request_type, transport: str = "grpc"): + client = LicenseManagementServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_license_pool), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = license_management_service.LicensePool( + name="name_value", + available_license_count=2411, + total_license_count=2030, + ) + response = client.update_license_pool(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = license_management_service.UpdateLicensePoolRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, license_management_service.LicensePool) + assert response.name == "name_value" + assert response.available_license_count == 2411 + assert response.total_license_count == 2030 + + +def test_update_license_pool_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = LicenseManagementServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_license_pool), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.update_license_pool() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == license_management_service.UpdateLicensePoolRequest() + + +def test_update_license_pool_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = LicenseManagementServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = license_management_service.UpdateLicensePoolRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_license_pool), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.update_license_pool(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == license_management_service.UpdateLicensePoolRequest() + + +def test_update_license_pool_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = LicenseManagementServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.update_license_pool in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.update_license_pool + ] = mock_rpc + request = {} + client.update_license_pool(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.update_license_pool(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_update_license_pool_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = LicenseManagementServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_license_pool), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + license_management_service.LicensePool( + name="name_value", + available_license_count=2411, + total_license_count=2030, + ) + ) + response = await client.update_license_pool() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == license_management_service.UpdateLicensePoolRequest() + + +@pytest.mark.asyncio +async def test_update_license_pool_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = LicenseManagementServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.update_license_pool + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.update_license_pool + ] = mock_rpc + + request = {} + await client.update_license_pool(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.update_license_pool(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_update_license_pool_async( + transport: str = "grpc_asyncio", + request_type=license_management_service.UpdateLicensePoolRequest, +): + client = LicenseManagementServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_license_pool), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + license_management_service.LicensePool( + name="name_value", + available_license_count=2411, + total_license_count=2030, + ) + ) + response = await client.update_license_pool(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = license_management_service.UpdateLicensePoolRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, license_management_service.LicensePool) + assert response.name == "name_value" + assert response.available_license_count == 2411 + assert response.total_license_count == 2030 + + +@pytest.mark.asyncio +async def test_update_license_pool_async_from_dict(): + await test_update_license_pool_async(request_type=dict) + + +def test_update_license_pool_field_headers(): + client = LicenseManagementServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = license_management_service.UpdateLicensePoolRequest() + + request.license_pool.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_license_pool), "__call__" + ) as call: + call.return_value = license_management_service.LicensePool() + client.update_license_pool(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "license_pool.name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_update_license_pool_field_headers_async(): + client = LicenseManagementServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = license_management_service.UpdateLicensePoolRequest() + + request.license_pool.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_license_pool), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + license_management_service.LicensePool() + ) + await client.update_license_pool(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "license_pool.name=name_value", + ) in kw["metadata"] + + +def test_update_license_pool_flattened(): + client = LicenseManagementServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_license_pool), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = license_management_service.LicensePool() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.update_license_pool( + license_pool=license_management_service.LicensePool(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].license_pool + mock_val = license_management_service.LicensePool(name="name_value") + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + assert arg == mock_val + + +def test_update_license_pool_flattened_error(): + client = LicenseManagementServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_license_pool( + license_management_service.UpdateLicensePoolRequest(), + license_pool=license_management_service.LicensePool(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +@pytest.mark.asyncio +async def test_update_license_pool_flattened_async(): + client = LicenseManagementServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_license_pool), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = license_management_service.LicensePool() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + license_management_service.LicensePool() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.update_license_pool( + license_pool=license_management_service.LicensePool(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].license_pool + mock_val = license_management_service.LicensePool(name="name_value") + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_update_license_pool_flattened_error_async(): + client = LicenseManagementServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.update_license_pool( + license_management_service.UpdateLicensePoolRequest(), + license_pool=license_management_service.LicensePool(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +@pytest.mark.parametrize( + "request_type", + [ + license_management_service.AssignRequest, + dict, + ], +) +def test_assign(request_type, transport: str = "grpc"): + client = LicenseManagementServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.assign), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = license_management_service.AssignResponse() + response = client.assign(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = license_management_service.AssignRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, license_management_service.AssignResponse) + + +def test_assign_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = LicenseManagementServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.assign), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.assign() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == license_management_service.AssignRequest() + + +def test_assign_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = LicenseManagementServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = license_management_service.AssignRequest( + parent="parent_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.assign), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.assign(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == license_management_service.AssignRequest( + parent="parent_value", + ) + + +def test_assign_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = LicenseManagementServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.assign in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.assign] = mock_rpc + request = {} + client.assign(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.assign(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_assign_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = LicenseManagementServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.assign), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + license_management_service.AssignResponse() + ) + response = await client.assign() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == license_management_service.AssignRequest() + + +@pytest.mark.asyncio +async def test_assign_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = LicenseManagementServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.assign + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.assign + ] = mock_rpc + + request = {} + await client.assign(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.assign(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_assign_async( + transport: str = "grpc_asyncio", + request_type=license_management_service.AssignRequest, +): + client = LicenseManagementServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.assign), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + license_management_service.AssignResponse() + ) + response = await client.assign(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = license_management_service.AssignRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, license_management_service.AssignResponse) + + +@pytest.mark.asyncio +async def test_assign_async_from_dict(): + await test_assign_async(request_type=dict) + + +def test_assign_field_headers(): + client = LicenseManagementServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = license_management_service.AssignRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.assign), "__call__") as call: + call.return_value = license_management_service.AssignResponse() + client.assign(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_assign_field_headers_async(): + client = LicenseManagementServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = license_management_service.AssignRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.assign), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + license_management_service.AssignResponse() + ) + await client.assign(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_assign_flattened(): + client = LicenseManagementServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.assign), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = license_management_service.AssignResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.assign( + parent="parent_value", + usernames=["usernames_value"], + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].usernames + mock_val = ["usernames_value"] + assert arg == mock_val + + +def test_assign_flattened_error(): + client = LicenseManagementServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.assign( + license_management_service.AssignRequest(), + parent="parent_value", + usernames=["usernames_value"], + ) + + +@pytest.mark.asyncio +async def test_assign_flattened_async(): + client = LicenseManagementServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.assign), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = license_management_service.AssignResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + license_management_service.AssignResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.assign( + parent="parent_value", + usernames=["usernames_value"], + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].usernames + mock_val = ["usernames_value"] + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_assign_flattened_error_async(): + client = LicenseManagementServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.assign( + license_management_service.AssignRequest(), + parent="parent_value", + usernames=["usernames_value"], + ) + + +@pytest.mark.parametrize( + "request_type", + [ + license_management_service.UnassignRequest, + dict, + ], +) +def test_unassign(request_type, transport: str = "grpc"): + client = LicenseManagementServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.unassign), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = license_management_service.UnassignResponse() + response = client.unassign(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = license_management_service.UnassignRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, license_management_service.UnassignResponse) + + +def test_unassign_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = LicenseManagementServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.unassign), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.unassign() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == license_management_service.UnassignRequest() + + +def test_unassign_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = LicenseManagementServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = license_management_service.UnassignRequest( + parent="parent_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.unassign), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.unassign(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == license_management_service.UnassignRequest( + parent="parent_value", + ) + + +def test_unassign_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = LicenseManagementServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.unassign in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.unassign] = mock_rpc + request = {} + client.unassign(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.unassign(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_unassign_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = LicenseManagementServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.unassign), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + license_management_service.UnassignResponse() + ) + response = await client.unassign() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == license_management_service.UnassignRequest() + + +@pytest.mark.asyncio +async def test_unassign_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = LicenseManagementServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.unassign + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.unassign + ] = mock_rpc + + request = {} + await client.unassign(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.unassign(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_unassign_async( + transport: str = "grpc_asyncio", + request_type=license_management_service.UnassignRequest, +): + client = LicenseManagementServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.unassign), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + license_management_service.UnassignResponse() + ) + response = await client.unassign(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = license_management_service.UnassignRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, license_management_service.UnassignResponse) + + +@pytest.mark.asyncio +async def test_unassign_async_from_dict(): + await test_unassign_async(request_type=dict) + + +def test_unassign_field_headers(): + client = LicenseManagementServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = license_management_service.UnassignRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.unassign), "__call__") as call: + call.return_value = license_management_service.UnassignResponse() + client.unassign(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_unassign_field_headers_async(): + client = LicenseManagementServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = license_management_service.UnassignRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.unassign), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + license_management_service.UnassignResponse() + ) + await client.unassign(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_unassign_flattened(): + client = LicenseManagementServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.unassign), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = license_management_service.UnassignResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.unassign( + parent="parent_value", + usernames=["usernames_value"], + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].usernames + mock_val = ["usernames_value"] + assert arg == mock_val + + +def test_unassign_flattened_error(): + client = LicenseManagementServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.unassign( + license_management_service.UnassignRequest(), + parent="parent_value", + usernames=["usernames_value"], + ) + + +@pytest.mark.asyncio +async def test_unassign_flattened_async(): + client = LicenseManagementServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.unassign), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = license_management_service.UnassignResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + license_management_service.UnassignResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.unassign( + parent="parent_value", + usernames=["usernames_value"], + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].usernames + mock_val = ["usernames_value"] + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_unassign_flattened_error_async(): + client = LicenseManagementServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.unassign( + license_management_service.UnassignRequest(), + parent="parent_value", + usernames=["usernames_value"], + ) + + +@pytest.mark.parametrize( + "request_type", + [ + license_management_service.EnumerateLicensedUsersRequest, + dict, + ], +) +def test_enumerate_licensed_users(request_type, transport: str = "grpc"): + client = LicenseManagementServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.enumerate_licensed_users), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = license_management_service.EnumerateLicensedUsersResponse( + next_page_token="next_page_token_value", + ) + response = client.enumerate_licensed_users(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = license_management_service.EnumerateLicensedUsersRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.EnumerateLicensedUsersPager) + assert response.next_page_token == "next_page_token_value" + + +def test_enumerate_licensed_users_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = LicenseManagementServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.enumerate_licensed_users), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.enumerate_licensed_users() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == license_management_service.EnumerateLicensedUsersRequest() + + +def test_enumerate_licensed_users_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = LicenseManagementServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = license_management_service.EnumerateLicensedUsersRequest( + parent="parent_value", + page_token="page_token_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.enumerate_licensed_users), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.enumerate_licensed_users(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == license_management_service.EnumerateLicensedUsersRequest( + parent="parent_value", + page_token="page_token_value", + ) + + +def test_enumerate_licensed_users_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = LicenseManagementServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.enumerate_licensed_users + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.enumerate_licensed_users + ] = mock_rpc + request = {} + client.enumerate_licensed_users(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.enumerate_licensed_users(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_enumerate_licensed_users_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = LicenseManagementServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.enumerate_licensed_users), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + license_management_service.EnumerateLicensedUsersResponse( + next_page_token="next_page_token_value", + ) + ) + response = await client.enumerate_licensed_users() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == license_management_service.EnumerateLicensedUsersRequest() + + +@pytest.mark.asyncio +async def test_enumerate_licensed_users_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = LicenseManagementServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.enumerate_licensed_users + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.enumerate_licensed_users + ] = mock_rpc + + request = {} + await client.enumerate_licensed_users(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.enumerate_licensed_users(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_enumerate_licensed_users_async( + transport: str = "grpc_asyncio", + request_type=license_management_service.EnumerateLicensedUsersRequest, +): + client = LicenseManagementServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.enumerate_licensed_users), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + license_management_service.EnumerateLicensedUsersResponse( + next_page_token="next_page_token_value", + ) + ) + response = await client.enumerate_licensed_users(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = license_management_service.EnumerateLicensedUsersRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.EnumerateLicensedUsersAsyncPager) + assert response.next_page_token == "next_page_token_value" + + +@pytest.mark.asyncio +async def test_enumerate_licensed_users_async_from_dict(): + await test_enumerate_licensed_users_async(request_type=dict) + + +def test_enumerate_licensed_users_field_headers(): + client = LicenseManagementServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = license_management_service.EnumerateLicensedUsersRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.enumerate_licensed_users), "__call__" + ) as call: + call.return_value = license_management_service.EnumerateLicensedUsersResponse() + client.enumerate_licensed_users(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_enumerate_licensed_users_field_headers_async(): + client = LicenseManagementServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = license_management_service.EnumerateLicensedUsersRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.enumerate_licensed_users), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + license_management_service.EnumerateLicensedUsersResponse() + ) + await client.enumerate_licensed_users(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_enumerate_licensed_users_flattened(): + client = LicenseManagementServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.enumerate_licensed_users), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = license_management_service.EnumerateLicensedUsersResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.enumerate_licensed_users( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +def test_enumerate_licensed_users_flattened_error(): + client = LicenseManagementServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.enumerate_licensed_users( + license_management_service.EnumerateLicensedUsersRequest(), + parent="parent_value", + ) + + +@pytest.mark.asyncio +async def test_enumerate_licensed_users_flattened_async(): + client = LicenseManagementServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.enumerate_licensed_users), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = license_management_service.EnumerateLicensedUsersResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + license_management_service.EnumerateLicensedUsersResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.enumerate_licensed_users( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_enumerate_licensed_users_flattened_error_async(): + client = LicenseManagementServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.enumerate_licensed_users( + license_management_service.EnumerateLicensedUsersRequest(), + parent="parent_value", + ) + + +def test_enumerate_licensed_users_pager(transport_name: str = "grpc"): + client = LicenseManagementServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.enumerate_licensed_users), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + license_management_service.EnumerateLicensedUsersResponse( + licensed_users=[ + license_management_service.LicensedUser(), + license_management_service.LicensedUser(), + license_management_service.LicensedUser(), + ], + next_page_token="abc", + ), + license_management_service.EnumerateLicensedUsersResponse( + licensed_users=[], + next_page_token="def", + ), + license_management_service.EnumerateLicensedUsersResponse( + licensed_users=[ + license_management_service.LicensedUser(), + ], + next_page_token="ghi", + ), + license_management_service.EnumerateLicensedUsersResponse( + licensed_users=[ + license_management_service.LicensedUser(), + license_management_service.LicensedUser(), + ], + ), + RuntimeError, + ) + + expected_metadata = () + retry = retries.Retry() + timeout = 5 + expected_metadata = tuple(expected_metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + ) + pager = client.enumerate_licensed_users( + request={}, retry=retry, timeout=timeout + ) + + assert pager._metadata == expected_metadata + assert pager._retry == retry + assert pager._timeout == timeout + + results = list(pager) + assert len(results) == 6 + assert all( + isinstance(i, license_management_service.LicensedUser) for i in results + ) + + +def test_enumerate_licensed_users_pages(transport_name: str = "grpc"): + client = LicenseManagementServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.enumerate_licensed_users), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + license_management_service.EnumerateLicensedUsersResponse( + licensed_users=[ + license_management_service.LicensedUser(), + license_management_service.LicensedUser(), + license_management_service.LicensedUser(), + ], + next_page_token="abc", + ), + license_management_service.EnumerateLicensedUsersResponse( + licensed_users=[], + next_page_token="def", + ), + license_management_service.EnumerateLicensedUsersResponse( + licensed_users=[ + license_management_service.LicensedUser(), + ], + next_page_token="ghi", + ), + license_management_service.EnumerateLicensedUsersResponse( + licensed_users=[ + license_management_service.LicensedUser(), + license_management_service.LicensedUser(), + ], + ), + RuntimeError, + ) + pages = list(client.enumerate_licensed_users(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.asyncio +async def test_enumerate_licensed_users_async_pager(): + client = LicenseManagementServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.enumerate_licensed_users), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + license_management_service.EnumerateLicensedUsersResponse( + licensed_users=[ + license_management_service.LicensedUser(), + license_management_service.LicensedUser(), + license_management_service.LicensedUser(), + ], + next_page_token="abc", + ), + license_management_service.EnumerateLicensedUsersResponse( + licensed_users=[], + next_page_token="def", + ), + license_management_service.EnumerateLicensedUsersResponse( + licensed_users=[ + license_management_service.LicensedUser(), + ], + next_page_token="ghi", + ), + license_management_service.EnumerateLicensedUsersResponse( + licensed_users=[ + license_management_service.LicensedUser(), + license_management_service.LicensedUser(), + ], + ), + RuntimeError, + ) + async_pager = await client.enumerate_licensed_users( + request={}, + ) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all( + isinstance(i, license_management_service.LicensedUser) for i in responses + ) + + +@pytest.mark.asyncio +async def test_enumerate_licensed_users_async_pages(): + client = LicenseManagementServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.enumerate_licensed_users), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + license_management_service.EnumerateLicensedUsersResponse( + licensed_users=[ + license_management_service.LicensedUser(), + license_management_service.LicensedUser(), + license_management_service.LicensedUser(), + ], + next_page_token="abc", + ), + license_management_service.EnumerateLicensedUsersResponse( + licensed_users=[], + next_page_token="def", + ), + license_management_service.EnumerateLicensedUsersResponse( + licensed_users=[ + license_management_service.LicensedUser(), + ], + next_page_token="ghi", + ), + license_management_service.EnumerateLicensedUsersResponse( + licensed_users=[ + license_management_service.LicensedUser(), + license_management_service.LicensedUser(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.enumerate_licensed_users(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + license_management_service.GetLicensePoolRequest, + dict, + ], +) +def test_get_license_pool_rest(request_type): + client = LicenseManagementServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"name": "billingAccounts/sample1/orders/sample2/licensePool"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = license_management_service.LicensePool( + name="name_value", + available_license_count=2411, + total_license_count=2030, + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = license_management_service.LicensePool.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.get_license_pool(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, license_management_service.LicensePool) + assert response.name == "name_value" + assert response.available_license_count == 2411 + assert response.total_license_count == 2030 + + +def test_get_license_pool_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = LicenseManagementServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_license_pool in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.get_license_pool + ] = mock_rpc + + request = {} + client.get_license_pool(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_license_pool(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_get_license_pool_rest_required_fields( + request_type=license_management_service.GetLicensePoolRequest, +): + transport_class = transports.LicenseManagementServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_license_pool._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_license_pool._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = LicenseManagementServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = license_management_service.LicensePool() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = license_management_service.LicensePool.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_license_pool(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_get_license_pool_rest_unset_required_fields(): + transport = transports.LicenseManagementServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.get_license_pool._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_license_pool_rest_interceptors(null_interceptor): + transport = transports.LicenseManagementServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.LicenseManagementServiceRestInterceptor(), + ) + client = LicenseManagementServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.LicenseManagementServiceRestInterceptor, "post_get_license_pool" + ) as post, mock.patch.object( + transports.LicenseManagementServiceRestInterceptor, "pre_get_license_pool" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = license_management_service.GetLicensePoolRequest.pb( + license_management_service.GetLicensePoolRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = license_management_service.LicensePool.to_json( + license_management_service.LicensePool() + ) + + request = license_management_service.GetLicensePoolRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = license_management_service.LicensePool() + + client.get_license_pool( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_license_pool_rest_bad_request( + transport: str = "rest", + request_type=license_management_service.GetLicensePoolRequest, +): + client = LicenseManagementServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"name": "billingAccounts/sample1/orders/sample2/licensePool"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_license_pool(request) + + +def test_get_license_pool_rest_flattened(): + client = LicenseManagementServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = license_management_service.LicensePool() + + # get arguments that satisfy an http rule for this method + sample_request = {"name": "billingAccounts/sample1/orders/sample2/licensePool"} + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = license_management_service.LicensePool.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.get_license_pool(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=billingAccounts/*/orders/*/licensePool}" + % client.transport._host, + args[1], + ) + + +def test_get_license_pool_rest_flattened_error(transport: str = "rest"): + client = LicenseManagementServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_license_pool( + license_management_service.GetLicensePoolRequest(), + name="name_value", + ) + + +def test_get_license_pool_rest_error(): + client = LicenseManagementServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + license_management_service.UpdateLicensePoolRequest, + dict, + ], +) +def test_update_license_pool_rest(request_type): + client = LicenseManagementServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "license_pool": { + "name": "billingAccounts/sample1/orders/sample2/licensePool/sample3" + } + } + request_init["license_pool"] = { + "name": "billingAccounts/sample1/orders/sample2/licensePool/sample3", + "license_assignment_protocol": { + "manual_assignment_type": {}, + "auto_assignment_type": { + "inactive_license_ttl": {"seconds": 751, "nanos": 543} + }, + }, + "available_license_count": 2411, + "total_license_count": 2030, + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = license_management_service.UpdateLicensePoolRequest.meta.fields[ + "license_pool" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["license_pool"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["license_pool"][field])): + del request_init["license_pool"][field][i][subfield] + else: + del request_init["license_pool"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = license_management_service.LicensePool( + name="name_value", + available_license_count=2411, + total_license_count=2030, + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = license_management_service.LicensePool.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.update_license_pool(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, license_management_service.LicensePool) + assert response.name == "name_value" + assert response.available_license_count == 2411 + assert response.total_license_count == 2030 + + +def test_update_license_pool_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = LicenseManagementServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.update_license_pool in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.update_license_pool + ] = mock_rpc + + request = {} + client.update_license_pool(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.update_license_pool(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_update_license_pool_rest_required_fields( + request_type=license_management_service.UpdateLicensePoolRequest, +): + transport_class = transports.LicenseManagementServiceRestTransport + + request_init = {} + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_license_pool._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_license_pool._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("update_mask",)) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + + client = LicenseManagementServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = license_management_service.LicensePool() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "patch", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = license_management_service.LicensePool.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.update_license_pool(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_update_license_pool_rest_unset_required_fields(): + transport = transports.LicenseManagementServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.update_license_pool._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(("updateMask",)) + & set( + ( + "licensePool", + "updateMask", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_update_license_pool_rest_interceptors(null_interceptor): + transport = transports.LicenseManagementServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.LicenseManagementServiceRestInterceptor(), + ) + client = LicenseManagementServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.LicenseManagementServiceRestInterceptor, "post_update_license_pool" + ) as post, mock.patch.object( + transports.LicenseManagementServiceRestInterceptor, "pre_update_license_pool" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = license_management_service.UpdateLicensePoolRequest.pb( + license_management_service.UpdateLicensePoolRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = license_management_service.LicensePool.to_json( + license_management_service.LicensePool() + ) + + request = license_management_service.UpdateLicensePoolRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = license_management_service.LicensePool() + + client.update_license_pool( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_update_license_pool_rest_bad_request( + transport: str = "rest", + request_type=license_management_service.UpdateLicensePoolRequest, +): + client = LicenseManagementServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "license_pool": { + "name": "billingAccounts/sample1/orders/sample2/licensePool/sample3" + } + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.update_license_pool(request) + + +def test_update_license_pool_rest_flattened(): + client = LicenseManagementServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = license_management_service.LicensePool() + + # get arguments that satisfy an http rule for this method + sample_request = { + "license_pool": { + "name": "billingAccounts/sample1/orders/sample2/licensePool/sample3" + } + } + + # get truthy value for each flattened field + mock_args = dict( + license_pool=license_management_service.LicensePool(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = license_management_service.LicensePool.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.update_license_pool(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{license_pool.name=billingAccounts/*/orders/*/licensePool/*}" + % client.transport._host, + args[1], + ) + + +def test_update_license_pool_rest_flattened_error(transport: str = "rest"): + client = LicenseManagementServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_license_pool( + license_management_service.UpdateLicensePoolRequest(), + license_pool=license_management_service.LicensePool(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +def test_update_license_pool_rest_error(): + client = LicenseManagementServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + license_management_service.AssignRequest, + dict, + ], +) +def test_assign_rest(request_type): + client = LicenseManagementServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "billingAccounts/sample1/orders/sample2/licensePool"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = license_management_service.AssignResponse() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = license_management_service.AssignResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.assign(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, license_management_service.AssignResponse) + + +def test_assign_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = LicenseManagementServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.assign in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.assign] = mock_rpc + + request = {} + client.assign(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.assign(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_assign_rest_required_fields( + request_type=license_management_service.AssignRequest, +): + transport_class = transports.LicenseManagementServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request_init["usernames"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).assign._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + jsonified_request["usernames"] = "usernames_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).assign._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + assert "usernames" in jsonified_request + assert jsonified_request["usernames"] == "usernames_value" + + client = LicenseManagementServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = license_management_service.AssignResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = license_management_service.AssignResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.assign(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_assign_rest_unset_required_fields(): + transport = transports.LicenseManagementServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.assign._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(()) + & set( + ( + "parent", + "usernames", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_assign_rest_interceptors(null_interceptor): + transport = transports.LicenseManagementServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.LicenseManagementServiceRestInterceptor(), + ) + client = LicenseManagementServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.LicenseManagementServiceRestInterceptor, "post_assign" + ) as post, mock.patch.object( + transports.LicenseManagementServiceRestInterceptor, "pre_assign" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = license_management_service.AssignRequest.pb( + license_management_service.AssignRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = license_management_service.AssignResponse.to_json( + license_management_service.AssignResponse() + ) + + request = license_management_service.AssignRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = license_management_service.AssignResponse() + + client.assign( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_assign_rest_bad_request( + transport: str = "rest", request_type=license_management_service.AssignRequest +): + client = LicenseManagementServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "billingAccounts/sample1/orders/sample2/licensePool"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.assign(request) + + +def test_assign_rest_flattened(): + client = LicenseManagementServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = license_management_service.AssignResponse() + + # get arguments that satisfy an http rule for this method + sample_request = { + "parent": "billingAccounts/sample1/orders/sample2/licensePool" + } + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + usernames=["usernames_value"], + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = license_management_service.AssignResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.assign(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=billingAccounts/*/orders/*/licensePool}:assign" + % client.transport._host, + args[1], + ) + + +def test_assign_rest_flattened_error(transport: str = "rest"): + client = LicenseManagementServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.assign( + license_management_service.AssignRequest(), + parent="parent_value", + usernames=["usernames_value"], + ) + + +def test_assign_rest_error(): + client = LicenseManagementServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + license_management_service.UnassignRequest, + dict, + ], +) +def test_unassign_rest(request_type): + client = LicenseManagementServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "billingAccounts/sample1/orders/sample2/licensePool"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = license_management_service.UnassignResponse() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = license_management_service.UnassignResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.unassign(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, license_management_service.UnassignResponse) + + +def test_unassign_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = LicenseManagementServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.unassign in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.unassign] = mock_rpc + + request = {} + client.unassign(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.unassign(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_unassign_rest_required_fields( + request_type=license_management_service.UnassignRequest, +): + transport_class = transports.LicenseManagementServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request_init["usernames"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).unassign._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + jsonified_request["usernames"] = "usernames_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).unassign._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + assert "usernames" in jsonified_request + assert jsonified_request["usernames"] == "usernames_value" + + client = LicenseManagementServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = license_management_service.UnassignResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = license_management_service.UnassignResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.unassign(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_unassign_rest_unset_required_fields(): + transport = transports.LicenseManagementServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.unassign._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(()) + & set( + ( + "parent", + "usernames", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_unassign_rest_interceptors(null_interceptor): + transport = transports.LicenseManagementServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.LicenseManagementServiceRestInterceptor(), + ) + client = LicenseManagementServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.LicenseManagementServiceRestInterceptor, "post_unassign" + ) as post, mock.patch.object( + transports.LicenseManagementServiceRestInterceptor, "pre_unassign" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = license_management_service.UnassignRequest.pb( + license_management_service.UnassignRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = license_management_service.UnassignResponse.to_json( + license_management_service.UnassignResponse() + ) + + request = license_management_service.UnassignRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = license_management_service.UnassignResponse() + + client.unassign( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_unassign_rest_bad_request( + transport: str = "rest", request_type=license_management_service.UnassignRequest +): + client = LicenseManagementServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "billingAccounts/sample1/orders/sample2/licensePool"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.unassign(request) + + +def test_unassign_rest_flattened(): + client = LicenseManagementServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = license_management_service.UnassignResponse() + + # get arguments that satisfy an http rule for this method + sample_request = { + "parent": "billingAccounts/sample1/orders/sample2/licensePool" + } + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + usernames=["usernames_value"], + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = license_management_service.UnassignResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.unassign(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=billingAccounts/*/orders/*/licensePool}:unassign" + % client.transport._host, + args[1], + ) + + +def test_unassign_rest_flattened_error(transport: str = "rest"): + client = LicenseManagementServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.unassign( + license_management_service.UnassignRequest(), + parent="parent_value", + usernames=["usernames_value"], + ) + + +def test_unassign_rest_error(): + client = LicenseManagementServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + license_management_service.EnumerateLicensedUsersRequest, + dict, + ], +) +def test_enumerate_licensed_users_rest(request_type): + client = LicenseManagementServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "billingAccounts/sample1/orders/sample2/licensePool"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = license_management_service.EnumerateLicensedUsersResponse( + next_page_token="next_page_token_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = license_management_service.EnumerateLicensedUsersResponse.pb( + return_value + ) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.enumerate_licensed_users(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.EnumerateLicensedUsersPager) + assert response.next_page_token == "next_page_token_value" + + +def test_enumerate_licensed_users_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = LicenseManagementServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.enumerate_licensed_users + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.enumerate_licensed_users + ] = mock_rpc + + request = {} + client.enumerate_licensed_users(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.enumerate_licensed_users(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_enumerate_licensed_users_rest_required_fields( + request_type=license_management_service.EnumerateLicensedUsersRequest, +): + transport_class = transports.LicenseManagementServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).enumerate_licensed_users._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).enumerate_licensed_users._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "page_size", + "page_token", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = LicenseManagementServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = license_management_service.EnumerateLicensedUsersResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = license_management_service.EnumerateLicensedUsersResponse.pb( + return_value + ) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.enumerate_licensed_users(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_enumerate_licensed_users_rest_unset_required_fields(): + transport = transports.LicenseManagementServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.enumerate_licensed_users._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "pageSize", + "pageToken", + ) + ) + & set(("parent",)) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_enumerate_licensed_users_rest_interceptors(null_interceptor): + transport = transports.LicenseManagementServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.LicenseManagementServiceRestInterceptor(), + ) + client = LicenseManagementServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.LicenseManagementServiceRestInterceptor, + "post_enumerate_licensed_users", + ) as post, mock.patch.object( + transports.LicenseManagementServiceRestInterceptor, + "pre_enumerate_licensed_users", + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = license_management_service.EnumerateLicensedUsersRequest.pb( + license_management_service.EnumerateLicensedUsersRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = ( + license_management_service.EnumerateLicensedUsersResponse.to_json( + license_management_service.EnumerateLicensedUsersResponse() + ) + ) + + request = license_management_service.EnumerateLicensedUsersRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = license_management_service.EnumerateLicensedUsersResponse() + + client.enumerate_licensed_users( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_enumerate_licensed_users_rest_bad_request( + transport: str = "rest", + request_type=license_management_service.EnumerateLicensedUsersRequest, +): + client = LicenseManagementServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "billingAccounts/sample1/orders/sample2/licensePool"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.enumerate_licensed_users(request) + + +def test_enumerate_licensed_users_rest_flattened(): + client = LicenseManagementServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = license_management_service.EnumerateLicensedUsersResponse() + + # get arguments that satisfy an http rule for this method + sample_request = { + "parent": "billingAccounts/sample1/orders/sample2/licensePool" + } + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = license_management_service.EnumerateLicensedUsersResponse.pb( + return_value + ) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.enumerate_licensed_users(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=billingAccounts/*/orders/*/licensePool}:enumerateLicensedUsers" + % client.transport._host, + args[1], + ) + + +def test_enumerate_licensed_users_rest_flattened_error(transport: str = "rest"): + client = LicenseManagementServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.enumerate_licensed_users( + license_management_service.EnumerateLicensedUsersRequest(), + parent="parent_value", + ) + + +def test_enumerate_licensed_users_rest_pager(transport: str = "rest"): + client = LicenseManagementServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + license_management_service.EnumerateLicensedUsersResponse( + licensed_users=[ + license_management_service.LicensedUser(), + license_management_service.LicensedUser(), + license_management_service.LicensedUser(), + ], + next_page_token="abc", + ), + license_management_service.EnumerateLicensedUsersResponse( + licensed_users=[], + next_page_token="def", + ), + license_management_service.EnumerateLicensedUsersResponse( + licensed_users=[ + license_management_service.LicensedUser(), + ], + next_page_token="ghi", + ), + license_management_service.EnumerateLicensedUsersResponse( + licensed_users=[ + license_management_service.LicensedUser(), + license_management_service.LicensedUser(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple( + license_management_service.EnumerateLicensedUsersResponse.to_json(x) + for x in response + ) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = { + "parent": "billingAccounts/sample1/orders/sample2/licensePool" + } + + pager = client.enumerate_licensed_users(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all( + isinstance(i, license_management_service.LicensedUser) for i in results + ) + + pages = list(client.enumerate_licensed_users(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.LicenseManagementServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = LicenseManagementServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.LicenseManagementServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = LicenseManagementServiceClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide an api_key and a transport instance. + transport = transports.LicenseManagementServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = LicenseManagementServiceClient( + client_options=options, + transport=transport, + ) + + # It is an error to provide an api_key and a credential. + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = LicenseManagementServiceClient( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.LicenseManagementServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = LicenseManagementServiceClient( + client_options={"scopes": ["1", "2"]}, + transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.LicenseManagementServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + client = LicenseManagementServiceClient(transport=transport) + assert client.transport is transport + + +def test_transport_get_channel(): + # A client may be instantiated with a custom transport instance. + transport = transports.LicenseManagementServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + transport = transports.LicenseManagementServiceGrpcAsyncIOTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.LicenseManagementServiceGrpcTransport, + transports.LicenseManagementServiceGrpcAsyncIOTransport, + transports.LicenseManagementServiceRestTransport, + ], +) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "rest", + ], +) +def test_transport_kind(transport_name): + transport = LicenseManagementServiceClient.get_transport_class(transport_name)( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert transport.kind == transport_name + + +def test_transport_grpc_default(): + # A client should use the gRPC transport by default. + client = LicenseManagementServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert isinstance( + client.transport, + transports.LicenseManagementServiceGrpcTransport, + ) + + +def test_license_management_service_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(core_exceptions.DuplicateCredentialArgs): + transport = transports.LicenseManagementServiceTransport( + credentials=ga_credentials.AnonymousCredentials(), + credentials_file="credentials.json", + ) + + +def test_license_management_service_base_transport(): + # Instantiate the base transport. + with mock.patch( + "google.cloud.commerce_consumer_procurement_v1.services.license_management_service.transports.LicenseManagementServiceTransport.__init__" + ) as Transport: + Transport.return_value = None + transport = transports.LicenseManagementServiceTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ( + "get_license_pool", + "update_license_pool", + "assign", + "unassign", + "enumerate_licensed_users", + "get_operation", + ) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) + + with pytest.raises(NotImplementedError): + transport.close() + + # Catch all for all remaining methods and properties + remainder = [ + "kind", + ] + for r in remainder: + with pytest.raises(NotImplementedError): + getattr(transport, r)() + + +def test_license_management_service_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch( + "google.cloud.commerce_consumer_procurement_v1.services.license_management_service.transports.LicenseManagementServiceTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.LicenseManagementServiceTransport( + credentials_file="credentials.json", + quota_project_id="octopus", + ) + load_creds.assert_called_once_with( + "credentials.json", + scopes=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id="octopus", + ) + + +def test_license_management_service_base_transport_with_adc(): + # Test the default credentials are used if credentials and credentials_file are None. + with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch( + "google.cloud.commerce_consumer_procurement_v1.services.license_management_service.transports.LicenseManagementServiceTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.LicenseManagementServiceTransport() + adc.assert_called_once() + + +def test_license_management_service_auth_adc(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + LicenseManagementServiceClient() + adc.assert_called_once_with( + scopes=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id=None, + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.LicenseManagementServiceGrpcTransport, + transports.LicenseManagementServiceGrpcAsyncIOTransport, + ], +) +def test_license_management_service_transport_auth_adc(transport_class): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + adc.assert_called_once_with( + scopes=["1", "2"], + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id="octopus", + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.LicenseManagementServiceGrpcTransport, + transports.LicenseManagementServiceGrpcAsyncIOTransport, + transports.LicenseManagementServiceRestTransport, + ], +) +def test_license_management_service_transport_auth_gdch_credentials(transport_class): + host = "https://language.com" + api_audience_tests = [None, "https://language2.com"] + api_audience_expect = [host, "https://language2.com"] + for t, e in zip(api_audience_tests, api_audience_expect): + with mock.patch.object(google.auth, "default", autospec=True) as adc: + gdch_mock = mock.MagicMock() + type(gdch_mock).with_gdch_audience = mock.PropertyMock( + return_value=gdch_mock + ) + adc.return_value = (gdch_mock, None) + transport_class(host=host, api_audience=t) + gdch_mock.with_gdch_audience.assert_called_once_with(e) + + +@pytest.mark.parametrize( + "transport_class,grpc_helpers", + [ + (transports.LicenseManagementServiceGrpcTransport, grpc_helpers), + (transports.LicenseManagementServiceGrpcAsyncIOTransport, grpc_helpers_async), + ], +) +def test_license_management_service_transport_create_channel( + transport_class, grpc_helpers +): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel", autospec=True + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + adc.return_value = (creds, None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + + create_channel.assert_called_with( + "cloudcommerceconsumerprocurement.googleapis.com:443", + credentials=creds, + credentials_file=None, + quota_project_id="octopus", + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + scopes=["1", "2"], + default_host="cloudcommerceconsumerprocurement.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.LicenseManagementServiceGrpcTransport, + transports.LicenseManagementServiceGrpcAsyncIOTransport, + ], +) +def test_license_management_service_grpc_transport_client_cert_source_for_mtls( + transport_class, +): + cred = ga_credentials.AnonymousCredentials() + + # Check ssl_channel_credentials is used if provided. + with mock.patch.object(transport_class, "create_channel") as mock_create_channel: + mock_ssl_channel_creds = mock.Mock() + transport_class( + host="squid.clam.whelk", + credentials=cred, + ssl_channel_credentials=mock_ssl_channel_creds, + ) + mock_create_channel.assert_called_once_with( + "squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_channel_creds, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls + # is used. + with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): + with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: + transport_class( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback, + ) + expected_cert, expected_key = client_cert_source_callback() + mock_ssl_cred.assert_called_once_with( + certificate_chain=expected_cert, private_key=expected_key + ) + + +def test_license_management_service_http_transport_client_cert_source_for_mtls(): + cred = ga_credentials.AnonymousCredentials() + with mock.patch( + "google.auth.transport.requests.AuthorizedSession.configure_mtls_channel" + ) as mock_configure_mtls_channel: + transports.LicenseManagementServiceRestTransport( + credentials=cred, client_cert_source_for_mtls=client_cert_source_callback + ) + mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "grpc_asyncio", + "rest", + ], +) +def test_license_management_service_host_no_port(transport_name): + client = LicenseManagementServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="cloudcommerceconsumerprocurement.googleapis.com" + ), + transport=transport_name, + ) + assert client.transport._host == ( + "cloudcommerceconsumerprocurement.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://cloudcommerceconsumerprocurement.googleapis.com" + ) + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "grpc_asyncio", + "rest", + ], +) +def test_license_management_service_host_with_port(transport_name): + client = LicenseManagementServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="cloudcommerceconsumerprocurement.googleapis.com:8000" + ), + transport=transport_name, + ) + assert client.transport._host == ( + "cloudcommerceconsumerprocurement.googleapis.com:8000" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://cloudcommerceconsumerprocurement.googleapis.com:8000" + ) + + +@pytest.mark.parametrize( + "transport_name", + [ + "rest", + ], +) +def test_license_management_service_client_transport_session_collision(transport_name): + creds1 = ga_credentials.AnonymousCredentials() + creds2 = ga_credentials.AnonymousCredentials() + client1 = LicenseManagementServiceClient( + credentials=creds1, + transport=transport_name, + ) + client2 = LicenseManagementServiceClient( + credentials=creds2, + transport=transport_name, + ) + session1 = client1.transport.get_license_pool._session + session2 = client2.transport.get_license_pool._session + assert session1 != session2 + session1 = client1.transport.update_license_pool._session + session2 = client2.transport.update_license_pool._session + assert session1 != session2 + session1 = client1.transport.assign._session + session2 = client2.transport.assign._session + assert session1 != session2 + session1 = client1.transport.unassign._session + session2 = client2.transport.unassign._session + assert session1 != session2 + session1 = client1.transport.enumerate_licensed_users._session + session2 = client2.transport.enumerate_licensed_users._session + assert session1 != session2 + + +def test_license_management_service_grpc_transport_channel(): + channel = grpc.secure_channel("http://localhost/", grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.LicenseManagementServiceGrpcTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +def test_license_management_service_grpc_asyncio_transport_channel(): + channel = aio.secure_channel("http://localhost/", grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.LicenseManagementServiceGrpcAsyncIOTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize( + "transport_class", + [ + transports.LicenseManagementServiceGrpcTransport, + transports.LicenseManagementServiceGrpcAsyncIOTransport, + ], +) +def test_license_management_service_transport_channel_mtls_with_client_cert_source( + transport_class, +): + with mock.patch( + "grpc.ssl_channel_credentials", autospec=True + ) as grpc_ssl_channel_cred: + with mock.patch.object( + transport_class, "create_channel" + ) as grpc_create_channel: + mock_ssl_cred = mock.Mock() + grpc_ssl_channel_cred.return_value = mock_ssl_cred + + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + + cred = ga_credentials.AnonymousCredentials() + with pytest.warns(DeprecationWarning): + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (cred, None) + transport = transport_class( + host="squid.clam.whelk", + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=client_cert_source_callback, + ) + adc.assert_called_once() + + grpc_ssl_channel_cred.assert_called_once_with( + certificate_chain=b"cert bytes", private_key=b"key bytes" + ) + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + assert transport._ssl_channel_credentials == mock_ssl_cred + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize( + "transport_class", + [ + transports.LicenseManagementServiceGrpcTransport, + transports.LicenseManagementServiceGrpcAsyncIOTransport, + ], +) +def test_license_management_service_transport_channel_mtls_with_adc(transport_class): + mock_ssl_cred = mock.Mock() + with mock.patch.multiple( + "google.auth.transport.grpc.SslCredentials", + __init__=mock.Mock(return_value=None), + ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), + ): + with mock.patch.object( + transport_class, "create_channel" + ) as grpc_create_channel: + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + mock_cred = mock.Mock() + + with pytest.warns(DeprecationWarning): + transport = transport_class( + host="squid.clam.whelk", + credentials=mock_cred, + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=None, + ) + + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=mock_cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + + +def test_license_pool_path(): + billing_account = "squid" + order = "clam" + expected = "billingAccounts/{billing_account}/orders/{order}/licensePool".format( + billing_account=billing_account, + order=order, + ) + actual = LicenseManagementServiceClient.license_pool_path(billing_account, order) + assert expected == actual + + +def test_parse_license_pool_path(): + expected = { + "billing_account": "whelk", + "order": "octopus", + } + path = LicenseManagementServiceClient.license_pool_path(**expected) + + # Check that the path construction is reversible. + actual = LicenseManagementServiceClient.parse_license_pool_path(path) + assert expected == actual + + +def test_common_billing_account_path(): + billing_account = "oyster" + expected = "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + actual = LicenseManagementServiceClient.common_billing_account_path(billing_account) + assert expected == actual + + +def test_parse_common_billing_account_path(): + expected = { + "billing_account": "nudibranch", + } + path = LicenseManagementServiceClient.common_billing_account_path(**expected) + + # Check that the path construction is reversible. + actual = LicenseManagementServiceClient.parse_common_billing_account_path(path) + assert expected == actual + + +def test_common_folder_path(): + folder = "cuttlefish" + expected = "folders/{folder}".format( + folder=folder, + ) + actual = LicenseManagementServiceClient.common_folder_path(folder) + assert expected == actual + + +def test_parse_common_folder_path(): + expected = { + "folder": "mussel", + } + path = LicenseManagementServiceClient.common_folder_path(**expected) + + # Check that the path construction is reversible. + actual = LicenseManagementServiceClient.parse_common_folder_path(path) + assert expected == actual + + +def test_common_organization_path(): + organization = "winkle" + expected = "organizations/{organization}".format( + organization=organization, + ) + actual = LicenseManagementServiceClient.common_organization_path(organization) + assert expected == actual + + +def test_parse_common_organization_path(): + expected = { + "organization": "nautilus", + } + path = LicenseManagementServiceClient.common_organization_path(**expected) + + # Check that the path construction is reversible. + actual = LicenseManagementServiceClient.parse_common_organization_path(path) + assert expected == actual + + +def test_common_project_path(): + project = "scallop" + expected = "projects/{project}".format( + project=project, + ) + actual = LicenseManagementServiceClient.common_project_path(project) + assert expected == actual + + +def test_parse_common_project_path(): + expected = { + "project": "abalone", + } + path = LicenseManagementServiceClient.common_project_path(**expected) + + # Check that the path construction is reversible. + actual = LicenseManagementServiceClient.parse_common_project_path(path) + assert expected == actual + + +def test_common_location_path(): + project = "squid" + location = "clam" + expected = "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) + actual = LicenseManagementServiceClient.common_location_path(project, location) + assert expected == actual + + +def test_parse_common_location_path(): + expected = { + "project": "whelk", + "location": "octopus", + } + path = LicenseManagementServiceClient.common_location_path(**expected) + + # Check that the path construction is reversible. + actual = LicenseManagementServiceClient.parse_common_location_path(path) + assert expected == actual + + +def test_client_with_default_client_info(): + client_info = gapic_v1.client_info.ClientInfo() + + with mock.patch.object( + transports.LicenseManagementServiceTransport, "_prep_wrapped_messages" + ) as prep: + client = LicenseManagementServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + with mock.patch.object( + transports.LicenseManagementServiceTransport, "_prep_wrapped_messages" + ) as prep: + transport_class = LicenseManagementServiceClient.get_transport_class() + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + +@pytest.mark.asyncio +async def test_transport_close_async(): + client = LicenseManagementServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + with mock.patch.object( + type(getattr(client.transport, "grpc_channel")), "close" + ) as close: + async with client: + close.assert_not_called() + close.assert_called_once() + + +def test_get_operation_rest_bad_request( + transport: str = "rest", request_type=operations_pb2.GetOperationRequest +): + client = LicenseManagementServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + request = request_type() + request = json_format.ParseDict( + {"name": "billingAccounts/sample1/orders/sample2/operations/sample3"}, request + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_operation(request) + + +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.GetOperationRequest, + dict, + ], +) +def test_get_operation_rest(request_type): + client = LicenseManagementServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = {"name": "billingAccounts/sample1/orders/sample2/operations/sample3"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_operation(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) + + +def test_get_operation(transport: str = "grpc"): + client = LicenseManagementServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.GetOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation() + response = client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) + + +@pytest.mark.asyncio +async def test_get_operation_async(transport: str = "grpc_asyncio"): + client = LicenseManagementServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.GetOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + response = await client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) + + +def test_get_operation_field_headers(): + client = LicenseManagementServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.GetOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + call.return_value = operations_pb2.Operation() + + client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_operation_field_headers_async(): + client = LicenseManagementServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.GetOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + await client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +def test_get_operation_from_dict(): + client = LicenseManagementServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation() + + response = client.get_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_get_operation_from_dict_async(): + client = LicenseManagementServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + response = await client.get_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_transport_close(): + transports = { + "rest": "_session", + "grpc": "_grpc_channel", + } + + for transport, close_name in transports.items(): + client = LicenseManagementServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + with mock.patch.object( + type(getattr(client.transport, close_name)), "close" + ) as close: + with client: + close.assert_not_called() + close.assert_called_once() + + +def test_client_ctx(): + transports = [ + "rest", + "grpc", + ] + for transport in transports: + client = LicenseManagementServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called() + + +@pytest.mark.parametrize( + "client_class,transport_class", + [ + ( + LicenseManagementServiceClient, + transports.LicenseManagementServiceGrpcTransport, + ), + ( + LicenseManagementServiceAsyncClient, + transports.LicenseManagementServiceGrpcAsyncIOTransport, + ), + ], +) +def test_api_key_credentials(client_class, transport_class): + with mock.patch.object( + google.auth._default, "get_api_key_credentials", create=True + ) as get_api_key_credentials: + mock_cred = mock.Mock() + get_api_key_credentials.return_value = mock_cred + options = client_options.ClientOptions() + options.api_key = "api_key" + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=mock_cred, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) diff --git a/packages/google-cloud-commerce-consumer-procurement/tests/unit/gapic/commerce_consumer_procurement_v1alpha1/test_consumer_procurement_service.py b/packages/google-cloud-commerce-consumer-procurement/tests/unit/gapic/commerce_consumer_procurement_v1alpha1/test_consumer_procurement_service.py index b1d81f550141..a2b4b10250ac 100644 --- a/packages/google-cloud-commerce-consumer-procurement/tests/unit/gapic/commerce_consumer_procurement_v1alpha1/test_consumer_procurement_service.py +++ b/packages/google-cloud-commerce-consumer-procurement/tests/unit/gapic/commerce_consumer_procurement_v1alpha1/test_consumer_procurement_service.py @@ -1327,8 +1327,9 @@ def test_place_order_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.place_order(request) @@ -1382,26 +1383,28 @@ async def test_place_order_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.place_order - ] = mock_object + ] = mock_rpc request = {} await client.place_order(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.place_order(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1669,22 +1672,23 @@ async def test_get_order_async_use_cached_wrapped_rpc(transport: str = "grpc_asy ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_order - ] = mock_object + ] = mock_rpc request = {} await client.get_order(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_order(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2037,22 +2041,23 @@ async def test_list_orders_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_orders - ] = mock_object + ] = mock_rpc request = {} await client.list_orders(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_orders(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-common/google/cloud/common/gapic_version.py b/packages/google-cloud-common/google/cloud/common/gapic_version.py index 5dea85083756..558c8aab67c5 100644 --- a/packages/google-cloud-common/google/cloud/common/gapic_version.py +++ b/packages/google-cloud-common/google/cloud/common/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.3.5" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-compute/google/cloud/compute/gapic_version.py b/packages/google-cloud-compute/google/cloud/compute/gapic_version.py index 7a6b2c884e03..558c8aab67c5 100644 --- a/packages/google-cloud-compute/google/cloud/compute/gapic_version.py +++ b/packages/google-cloud-compute/google/cloud/compute/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.19.2" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/gapic_version.py b/packages/google-cloud-compute/google/cloud/compute_v1/gapic_version.py index 7a6b2c884e03..558c8aab67c5 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/gapic_version.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.19.2" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/accelerator_types/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/accelerator_types/client.py index 3fc7d1c43658..e3c490fde1e6 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/accelerator_types/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/accelerator_types/client.py @@ -643,7 +643,7 @@ def __init__( Type[AcceleratorTypesTransport], Callable[..., AcceleratorTypesTransport], ] = ( - type(self).get_transport_class(transport) + AcceleratorTypesClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., AcceleratorTypesTransport], transport) ) diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/addresses/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/addresses/client.py index 3f04fe8f8cf4..bd522fdd409c 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/addresses/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/addresses/client.py @@ -635,7 +635,7 @@ def __init__( transport_init: Union[ Type[AddressesTransport], Callable[..., AddressesTransport] ] = ( - type(self).get_transport_class(transport) + AddressesClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., AddressesTransport], transport) ) diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/autoscalers/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/autoscalers/client.py index ef8959793963..654de7d186ed 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/autoscalers/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/autoscalers/client.py @@ -635,7 +635,7 @@ def __init__( transport_init: Union[ Type[AutoscalersTransport], Callable[..., AutoscalersTransport] ] = ( - type(self).get_transport_class(transport) + AutoscalersClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., AutoscalersTransport], transport) ) diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/backend_buckets/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/backend_buckets/client.py index 4de581727f4f..8a244f67edab 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/backend_buckets/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/backend_buckets/client.py @@ -640,7 +640,7 @@ def __init__( transport_init: Union[ Type[BackendBucketsTransport], Callable[..., BackendBucketsTransport] ] = ( - type(self).get_transport_class(transport) + BackendBucketsClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., BackendBucketsTransport], transport) ) diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/backend_services/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/backend_services/client.py index 5cd689f386fc..8f6aa412de12 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/backend_services/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/backend_services/client.py @@ -642,7 +642,7 @@ def __init__( transport_init: Union[ Type[BackendServicesTransport], Callable[..., BackendServicesTransport] ] = ( - type(self).get_transport_class(transport) + BackendServicesClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., BackendServicesTransport], transport) ) diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/disk_types/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/disk_types/client.py index 08d1809ce71b..3d2aa700494a 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/disk_types/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/disk_types/client.py @@ -632,7 +632,7 @@ def __init__( transport_init: Union[ Type[DiskTypesTransport], Callable[..., DiskTypesTransport] ] = ( - type(self).get_transport_class(transport) + DiskTypesClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., DiskTypesTransport], transport) ) diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/disks/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/disks/client.py index b24be76d31c5..d8f3e5164750 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/disks/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/disks/client.py @@ -635,7 +635,7 @@ def __init__( transport_init: Union[ Type[DisksTransport], Callable[..., DisksTransport] ] = ( - type(self).get_transport_class(transport) + DisksClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., DisksTransport], transport) ) diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/external_vpn_gateways/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/external_vpn_gateways/client.py index 6e4c804f13cf..173f2d4d5e5c 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/external_vpn_gateways/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/external_vpn_gateways/client.py @@ -645,7 +645,7 @@ def __init__( Type[ExternalVpnGatewaysTransport], Callable[..., ExternalVpnGatewaysTransport], ] = ( - type(self).get_transport_class(transport) + ExternalVpnGatewaysClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., ExternalVpnGatewaysTransport], transport) ) diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/firewall_policies/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/firewall_policies/client.py index abd41c10a277..f5c93138a301 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/firewall_policies/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/firewall_policies/client.py @@ -643,7 +643,7 @@ def __init__( Type[FirewallPoliciesTransport], Callable[..., FirewallPoliciesTransport], ] = ( - type(self).get_transport_class(transport) + FirewallPoliciesClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., FirewallPoliciesTransport], transport) ) diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/firewalls/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/firewalls/client.py index a38a0d219b5e..a25be623c57b 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/firewalls/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/firewalls/client.py @@ -635,7 +635,7 @@ def __init__( transport_init: Union[ Type[FirewallsTransport], Callable[..., FirewallsTransport] ] = ( - type(self).get_transport_class(transport) + FirewallsClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., FirewallsTransport], transport) ) diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/forwarding_rules/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/forwarding_rules/client.py index 1f5c84f1165d..479f226c6089 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/forwarding_rules/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/forwarding_rules/client.py @@ -642,7 +642,7 @@ def __init__( transport_init: Union[ Type[ForwardingRulesTransport], Callable[..., ForwardingRulesTransport] ] = ( - type(self).get_transport_class(transport) + ForwardingRulesClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., ForwardingRulesTransport], transport) ) diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/global_addresses/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/global_addresses/client.py index 050317243dfd..aa361a0639af 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/global_addresses/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/global_addresses/client.py @@ -642,7 +642,7 @@ def __init__( transport_init: Union[ Type[GlobalAddressesTransport], Callable[..., GlobalAddressesTransport] ] = ( - type(self).get_transport_class(transport) + GlobalAddressesClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., GlobalAddressesTransport], transport) ) diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/global_forwarding_rules/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/global_forwarding_rules/client.py index 1a410d04c750..73fac29acac7 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/global_forwarding_rules/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/global_forwarding_rules/client.py @@ -647,7 +647,7 @@ def __init__( Type[GlobalForwardingRulesTransport], Callable[..., GlobalForwardingRulesTransport], ] = ( - type(self).get_transport_class(transport) + GlobalForwardingRulesClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., GlobalForwardingRulesTransport], transport) ) diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/global_network_endpoint_groups/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/global_network_endpoint_groups/client.py index 35d214e6ba5f..9c3968a6f6d0 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/global_network_endpoint_groups/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/global_network_endpoint_groups/client.py @@ -651,7 +651,7 @@ def __init__( Type[GlobalNetworkEndpointGroupsTransport], Callable[..., GlobalNetworkEndpointGroupsTransport], ] = ( - type(self).get_transport_class(transport) + GlobalNetworkEndpointGroupsClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast( Callable[..., GlobalNetworkEndpointGroupsTransport], transport diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/global_operations/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/global_operations/client.py index e1c0f53c1b56..582f90608011 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/global_operations/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/global_operations/client.py @@ -640,7 +640,7 @@ def __init__( Type[GlobalOperationsTransport], Callable[..., GlobalOperationsTransport], ] = ( - type(self).get_transport_class(transport) + GlobalOperationsClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., GlobalOperationsTransport], transport) ) diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/global_organization_operations/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/global_organization_operations/client.py index a173eec0d1d9..917163e5dde7 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/global_organization_operations/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/global_organization_operations/client.py @@ -650,7 +650,7 @@ def __init__( Type[GlobalOrganizationOperationsTransport], Callable[..., GlobalOrganizationOperationsTransport], ] = ( - type(self).get_transport_class(transport) + GlobalOrganizationOperationsClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast( Callable[..., GlobalOrganizationOperationsTransport], transport diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/global_public_delegated_prefixes/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/global_public_delegated_prefixes/client.py index e65f6c3cbeb0..31baf252244e 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/global_public_delegated_prefixes/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/global_public_delegated_prefixes/client.py @@ -655,7 +655,7 @@ def __init__( Type[GlobalPublicDelegatedPrefixesTransport], Callable[..., GlobalPublicDelegatedPrefixesTransport], ] = ( - type(self).get_transport_class(transport) + GlobalPublicDelegatedPrefixesClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast( Callable[..., GlobalPublicDelegatedPrefixesTransport], transport diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/health_checks/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/health_checks/client.py index 5fb579f953e1..390ca315950a 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/health_checks/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/health_checks/client.py @@ -635,7 +635,7 @@ def __init__( transport_init: Union[ Type[HealthChecksTransport], Callable[..., HealthChecksTransport] ] = ( - type(self).get_transport_class(transport) + HealthChecksClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., HealthChecksTransport], transport) ) diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/image_family_views/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/image_family_views/client.py index 29aa1952847b..a92d0d7c5fe2 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/image_family_views/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/image_family_views/client.py @@ -639,7 +639,7 @@ def __init__( Type[ImageFamilyViewsTransport], Callable[..., ImageFamilyViewsTransport], ] = ( - type(self).get_transport_class(transport) + ImageFamilyViewsClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., ImageFamilyViewsTransport], transport) ) diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/images/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/images/client.py index 087baf56acd7..28351f58c4eb 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/images/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/images/client.py @@ -635,7 +635,7 @@ def __init__( transport_init: Union[ Type[ImagesTransport], Callable[..., ImagesTransport] ] = ( - type(self).get_transport_class(transport) + ImagesClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., ImagesTransport], transport) ) diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/instance_group_manager_resize_requests/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/instance_group_manager_resize_requests/client.py index d564a2844b4a..721dce6bb8a2 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/instance_group_manager_resize_requests/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/instance_group_manager_resize_requests/client.py @@ -664,7 +664,7 @@ def __init__( Type[InstanceGroupManagerResizeRequestsTransport], Callable[..., InstanceGroupManagerResizeRequestsTransport], ] = ( - type(self).get_transport_class(transport) + InstanceGroupManagerResizeRequestsClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast( Callable[..., InstanceGroupManagerResizeRequestsTransport], diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/instance_group_managers/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/instance_group_managers/client.py index 3fd4f3f6dafb..d65cb6c12b37 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/instance_group_managers/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/instance_group_managers/client.py @@ -647,7 +647,7 @@ def __init__( Type[InstanceGroupManagersTransport], Callable[..., InstanceGroupManagersTransport], ] = ( - type(self).get_transport_class(transport) + InstanceGroupManagersClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., InstanceGroupManagersTransport], transport) ) diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/instance_groups/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/instance_groups/client.py index 06a208f662a5..9d4f7dea8bd7 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/instance_groups/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/instance_groups/client.py @@ -640,7 +640,7 @@ def __init__( transport_init: Union[ Type[InstanceGroupsTransport], Callable[..., InstanceGroupsTransport] ] = ( - type(self).get_transport_class(transport) + InstanceGroupsClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., InstanceGroupsTransport], transport) ) diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/instance_settings_service/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/instance_settings_service/client.py index 20a54c242f81..6b2aa28abee9 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/instance_settings_service/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/instance_settings_service/client.py @@ -648,7 +648,7 @@ def __init__( Type[InstanceSettingsServiceTransport], Callable[..., InstanceSettingsServiceTransport], ] = ( - type(self).get_transport_class(transport) + InstanceSettingsServiceClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., InstanceSettingsServiceTransport], transport) ) diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/instance_templates/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/instance_templates/client.py index 8294ff277b29..aab4c02a23b2 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/instance_templates/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/instance_templates/client.py @@ -645,7 +645,7 @@ def __init__( Type[InstanceTemplatesTransport], Callable[..., InstanceTemplatesTransport], ] = ( - type(self).get_transport_class(transport) + InstanceTemplatesClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., InstanceTemplatesTransport], transport) ) diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/instances/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/instances/client.py index 57ac5bc40363..489e7e33bbec 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/instances/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/instances/client.py @@ -635,7 +635,7 @@ def __init__( transport_init: Union[ Type[InstancesTransport], Callable[..., InstancesTransport] ] = ( - type(self).get_transport_class(transport) + InstancesClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., InstancesTransport], transport) ) diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/instant_snapshots/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/instant_snapshots/client.py index 5340dce4c40e..64699398706e 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/instant_snapshots/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/instant_snapshots/client.py @@ -643,7 +643,7 @@ def __init__( Type[InstantSnapshotsTransport], Callable[..., InstantSnapshotsTransport], ] = ( - type(self).get_transport_class(transport) + InstantSnapshotsClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., InstantSnapshotsTransport], transport) ) diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/interconnect_attachments/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/interconnect_attachments/client.py index 663603537759..253f65fefb4b 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/interconnect_attachments/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/interconnect_attachments/client.py @@ -649,7 +649,7 @@ def __init__( Type[InterconnectAttachmentsTransport], Callable[..., InterconnectAttachmentsTransport], ] = ( - type(self).get_transport_class(transport) + InterconnectAttachmentsClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., InterconnectAttachmentsTransport], transport) ) diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/interconnect_locations/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/interconnect_locations/client.py index 05372b3bddb9..4c1955436e94 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/interconnect_locations/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/interconnect_locations/client.py @@ -644,7 +644,7 @@ def __init__( Type[InterconnectLocationsTransport], Callable[..., InterconnectLocationsTransport], ] = ( - type(self).get_transport_class(transport) + InterconnectLocationsClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., InterconnectLocationsTransport], transport) ) diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/interconnect_remote_locations/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/interconnect_remote_locations/client.py index f4158a224965..de0a24175bb3 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/interconnect_remote_locations/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/interconnect_remote_locations/client.py @@ -648,7 +648,7 @@ def __init__( Type[InterconnectRemoteLocationsTransport], Callable[..., InterconnectRemoteLocationsTransport], ] = ( - type(self).get_transport_class(transport) + InterconnectRemoteLocationsClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast( Callable[..., InterconnectRemoteLocationsTransport], transport diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/interconnects/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/interconnects/client.py index fce47fdba9be..aaceb51c97e0 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/interconnects/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/interconnects/client.py @@ -638,7 +638,7 @@ def __init__( transport_init: Union[ Type[InterconnectsTransport], Callable[..., InterconnectsTransport] ] = ( - type(self).get_transport_class(transport) + InterconnectsClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., InterconnectsTransport], transport) ) diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/license_codes/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/license_codes/client.py index 6a2003e8c5bd..19791d56e0c1 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/license_codes/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/license_codes/client.py @@ -631,7 +631,7 @@ def __init__( transport_init: Union[ Type[LicenseCodesTransport], Callable[..., LicenseCodesTransport] ] = ( - type(self).get_transport_class(transport) + LicenseCodesClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., LicenseCodesTransport], transport) ) diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/licenses/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/licenses/client.py index af21a808ff1f..1e391cd879eb 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/licenses/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/licenses/client.py @@ -635,7 +635,7 @@ def __init__( transport_init: Union[ Type[LicensesTransport], Callable[..., LicensesTransport] ] = ( - type(self).get_transport_class(transport) + LicensesClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., LicensesTransport], transport) ) diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/machine_images/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/machine_images/client.py index 694ab8c0658f..d8263f56e910 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/machine_images/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/machine_images/client.py @@ -638,7 +638,7 @@ def __init__( transport_init: Union[ Type[MachineImagesTransport], Callable[..., MachineImagesTransport] ] = ( - type(self).get_transport_class(transport) + MachineImagesClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., MachineImagesTransport], transport) ) diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/machine_types/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/machine_types/client.py index 2cc3d99ace0a..76b729111007 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/machine_types/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/machine_types/client.py @@ -632,7 +632,7 @@ def __init__( transport_init: Union[ Type[MachineTypesTransport], Callable[..., MachineTypesTransport] ] = ( - type(self).get_transport_class(transport) + MachineTypesClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., MachineTypesTransport], transport) ) diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/network_attachments/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/network_attachments/client.py index afc68a604f24..2e218d73ffe9 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/network_attachments/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/network_attachments/client.py @@ -645,7 +645,7 @@ def __init__( Type[NetworkAttachmentsTransport], Callable[..., NetworkAttachmentsTransport], ] = ( - type(self).get_transport_class(transport) + NetworkAttachmentsClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., NetworkAttachmentsTransport], transport) ) diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/network_edge_security_services/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/network_edge_security_services/client.py index 4619eff25e1c..491d38581d21 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/network_edge_security_services/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/network_edge_security_services/client.py @@ -651,7 +651,7 @@ def __init__( Type[NetworkEdgeSecurityServicesTransport], Callable[..., NetworkEdgeSecurityServicesTransport], ] = ( - type(self).get_transport_class(transport) + NetworkEdgeSecurityServicesClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast( Callable[..., NetworkEdgeSecurityServicesTransport], transport diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/network_endpoint_groups/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/network_endpoint_groups/client.py index f3fe32270087..5dd8c4475ead 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/network_endpoint_groups/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/network_endpoint_groups/client.py @@ -647,7 +647,7 @@ def __init__( Type[NetworkEndpointGroupsTransport], Callable[..., NetworkEndpointGroupsTransport], ] = ( - type(self).get_transport_class(transport) + NetworkEndpointGroupsClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., NetworkEndpointGroupsTransport], transport) ) diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/network_firewall_policies/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/network_firewall_policies/client.py index 46d70f359c55..2ce5915f843f 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/network_firewall_policies/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/network_firewall_policies/client.py @@ -649,7 +649,7 @@ def __init__( Type[NetworkFirewallPoliciesTransport], Callable[..., NetworkFirewallPoliciesTransport], ] = ( - type(self).get_transport_class(transport) + NetworkFirewallPoliciesClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., NetworkFirewallPoliciesTransport], transport) ) diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/networks/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/networks/client.py index 44729db252f0..2d6c6fdcca83 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/networks/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/networks/client.py @@ -635,7 +635,7 @@ def __init__( transport_init: Union[ Type[NetworksTransport], Callable[..., NetworksTransport] ] = ( - type(self).get_transport_class(transport) + NetworksClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., NetworksTransport], transport) ) diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/node_groups/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/node_groups/client.py index b33790a602d8..abfa417c23ea 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/node_groups/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/node_groups/client.py @@ -635,7 +635,7 @@ def __init__( transport_init: Union[ Type[NodeGroupsTransport], Callable[..., NodeGroupsTransport] ] = ( - type(self).get_transport_class(transport) + NodeGroupsClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., NodeGroupsTransport], transport) ) diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/node_templates/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/node_templates/client.py index e45c491e2789..a37de9001145 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/node_templates/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/node_templates/client.py @@ -638,7 +638,7 @@ def __init__( transport_init: Union[ Type[NodeTemplatesTransport], Callable[..., NodeTemplatesTransport] ] = ( - type(self).get_transport_class(transport) + NodeTemplatesClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., NodeTemplatesTransport], transport) ) diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/node_types/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/node_types/client.py index 6bfc6ff6e2a0..5d20d96caba5 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/node_types/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/node_types/client.py @@ -632,7 +632,7 @@ def __init__( transport_init: Union[ Type[NodeTypesTransport], Callable[..., NodeTypesTransport] ] = ( - type(self).get_transport_class(transport) + NodeTypesClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., NodeTypesTransport], transport) ) diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/packet_mirrorings/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/packet_mirrorings/client.py index 926c95f56744..8f3aae82b89e 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/packet_mirrorings/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/packet_mirrorings/client.py @@ -643,7 +643,7 @@ def __init__( Type[PacketMirroringsTransport], Callable[..., PacketMirroringsTransport], ] = ( - type(self).get_transport_class(transport) + PacketMirroringsClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., PacketMirroringsTransport], transport) ) diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/projects/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/projects/client.py index df3c01084b7f..3cd34365bdb5 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/projects/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/projects/client.py @@ -635,7 +635,7 @@ def __init__( transport_init: Union[ Type[ProjectsTransport], Callable[..., ProjectsTransport] ] = ( - type(self).get_transport_class(transport) + ProjectsClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., ProjectsTransport], transport) ) diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/public_advertised_prefixes/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/public_advertised_prefixes/client.py index 21e6317ca4bc..f988a2b8c93b 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/public_advertised_prefixes/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/public_advertised_prefixes/client.py @@ -649,7 +649,7 @@ def __init__( Type[PublicAdvertisedPrefixesTransport], Callable[..., PublicAdvertisedPrefixesTransport], ] = ( - type(self).get_transport_class(transport) + PublicAdvertisedPrefixesClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., PublicAdvertisedPrefixesTransport], transport) ) diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/public_delegated_prefixes/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/public_delegated_prefixes/client.py index ade92d4e6428..82989f868234 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/public_delegated_prefixes/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/public_delegated_prefixes/client.py @@ -649,7 +649,7 @@ def __init__( Type[PublicDelegatedPrefixesTransport], Callable[..., PublicDelegatedPrefixesTransport], ] = ( - type(self).get_transport_class(transport) + PublicDelegatedPrefixesClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., PublicDelegatedPrefixesTransport], transport) ) diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/region_autoscalers/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_autoscalers/client.py index 5ea80bcbbe37..1fc50b748039 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/region_autoscalers/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_autoscalers/client.py @@ -645,7 +645,7 @@ def __init__( Type[RegionAutoscalersTransport], Callable[..., RegionAutoscalersTransport], ] = ( - type(self).get_transport_class(transport) + RegionAutoscalersClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., RegionAutoscalersTransport], transport) ) diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/region_backend_services/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_backend_services/client.py index 6e7b7af27151..76de8cad59dc 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/region_backend_services/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_backend_services/client.py @@ -647,7 +647,7 @@ def __init__( Type[RegionBackendServicesTransport], Callable[..., RegionBackendServicesTransport], ] = ( - type(self).get_transport_class(transport) + RegionBackendServicesClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., RegionBackendServicesTransport], transport) ) diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/region_commitments/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_commitments/client.py index 351ad29d66c6..1df1123fcdb5 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/region_commitments/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_commitments/client.py @@ -645,7 +645,7 @@ def __init__( Type[RegionCommitmentsTransport], Callable[..., RegionCommitmentsTransport], ] = ( - type(self).get_transport_class(transport) + RegionCommitmentsClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., RegionCommitmentsTransport], transport) ) diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/region_disk_types/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_disk_types/client.py index 68f6a78d57f3..e985fbb9e895 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/region_disk_types/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_disk_types/client.py @@ -639,7 +639,7 @@ def __init__( transport_init: Union[ Type[RegionDiskTypesTransport], Callable[..., RegionDiskTypesTransport] ] = ( - type(self).get_transport_class(transport) + RegionDiskTypesClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., RegionDiskTypesTransport], transport) ) diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/region_disks/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_disks/client.py index c9dbd47f72fd..6f97ae0f0868 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/region_disks/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_disks/client.py @@ -635,7 +635,7 @@ def __init__( transport_init: Union[ Type[RegionDisksTransport], Callable[..., RegionDisksTransport] ] = ( - type(self).get_transport_class(transport) + RegionDisksClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., RegionDisksTransport], transport) ) diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/region_health_check_services/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_health_check_services/client.py index 0a2b95d11b4d..a26bcfd452d9 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/region_health_check_services/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_health_check_services/client.py @@ -649,7 +649,7 @@ def __init__( Type[RegionHealthCheckServicesTransport], Callable[..., RegionHealthCheckServicesTransport], ] = ( - type(self).get_transport_class(transport) + RegionHealthCheckServicesClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., RegionHealthCheckServicesTransport], transport) ) diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/region_health_checks/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_health_checks/client.py index 2fcbea43d2af..30fea9ff210a 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/region_health_checks/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_health_checks/client.py @@ -645,7 +645,7 @@ def __init__( Type[RegionHealthChecksTransport], Callable[..., RegionHealthChecksTransport], ] = ( - type(self).get_transport_class(transport) + RegionHealthChecksClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., RegionHealthChecksTransport], transport) ) diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/region_instance_group_managers/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_instance_group_managers/client.py index a90d5db315ab..ce6498488bf9 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/region_instance_group_managers/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_instance_group_managers/client.py @@ -651,7 +651,7 @@ def __init__( Type[RegionInstanceGroupManagersTransport], Callable[..., RegionInstanceGroupManagersTransport], ] = ( - type(self).get_transport_class(transport) + RegionInstanceGroupManagersClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast( Callable[..., RegionInstanceGroupManagersTransport], transport diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/region_instance_groups/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_instance_groups/client.py index 5b55c3dea81b..aacea1576000 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/region_instance_groups/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_instance_groups/client.py @@ -645,7 +645,7 @@ def __init__( Type[RegionInstanceGroupsTransport], Callable[..., RegionInstanceGroupsTransport], ] = ( - type(self).get_transport_class(transport) + RegionInstanceGroupsClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., RegionInstanceGroupsTransport], transport) ) diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/region_instance_templates/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_instance_templates/client.py index 18738fb19cf1..911bd2c6ae22 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/region_instance_templates/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_instance_templates/client.py @@ -649,7 +649,7 @@ def __init__( Type[RegionInstanceTemplatesTransport], Callable[..., RegionInstanceTemplatesTransport], ] = ( - type(self).get_transport_class(transport) + RegionInstanceTemplatesClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., RegionInstanceTemplatesTransport], transport) ) diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/region_instances/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_instances/client.py index eccd921bf254..b9838ead41aa 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/region_instances/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_instances/client.py @@ -641,7 +641,7 @@ def __init__( transport_init: Union[ Type[RegionInstancesTransport], Callable[..., RegionInstancesTransport] ] = ( - type(self).get_transport_class(transport) + RegionInstancesClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., RegionInstancesTransport], transport) ) diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/region_instant_snapshots/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_instant_snapshots/client.py index ef18ad964f96..5f2772163b6a 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/region_instant_snapshots/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_instant_snapshots/client.py @@ -647,7 +647,7 @@ def __init__( Type[RegionInstantSnapshotsTransport], Callable[..., RegionInstantSnapshotsTransport], ] = ( - type(self).get_transport_class(transport) + RegionInstantSnapshotsClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., RegionInstantSnapshotsTransport], transport) ) diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/region_network_endpoint_groups/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_network_endpoint_groups/client.py index 448c184d7608..ae10133ac5df 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/region_network_endpoint_groups/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_network_endpoint_groups/client.py @@ -651,7 +651,7 @@ def __init__( Type[RegionNetworkEndpointGroupsTransport], Callable[..., RegionNetworkEndpointGroupsTransport], ] = ( - type(self).get_transport_class(transport) + RegionNetworkEndpointGroupsClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast( Callable[..., RegionNetworkEndpointGroupsTransport], transport diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/region_network_firewall_policies/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_network_firewall_policies/client.py index 2ea39512745b..9ef32491766d 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/region_network_firewall_policies/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_network_firewall_policies/client.py @@ -655,7 +655,7 @@ def __init__( Type[RegionNetworkFirewallPoliciesTransport], Callable[..., RegionNetworkFirewallPoliciesTransport], ] = ( - type(self).get_transport_class(transport) + RegionNetworkFirewallPoliciesClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast( Callable[..., RegionNetworkFirewallPoliciesTransport], transport diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/region_notification_endpoints/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_notification_endpoints/client.py index eb68db77a265..8f9b353ce1dd 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/region_notification_endpoints/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_notification_endpoints/client.py @@ -651,7 +651,7 @@ def __init__( Type[RegionNotificationEndpointsTransport], Callable[..., RegionNotificationEndpointsTransport], ] = ( - type(self).get_transport_class(transport) + RegionNotificationEndpointsClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast( Callable[..., RegionNotificationEndpointsTransport], transport diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/region_operations/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_operations/client.py index b1650a94e7fe..9afa7640b4ae 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/region_operations/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_operations/client.py @@ -640,7 +640,7 @@ def __init__( Type[RegionOperationsTransport], Callable[..., RegionOperationsTransport], ] = ( - type(self).get_transport_class(transport) + RegionOperationsClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., RegionOperationsTransport], transport) ) diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/region_security_policies/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_security_policies/client.py index 1384c918a89e..64cd68517767 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/region_security_policies/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_security_policies/client.py @@ -647,7 +647,7 @@ def __init__( Type[RegionSecurityPoliciesTransport], Callable[..., RegionSecurityPoliciesTransport], ] = ( - type(self).get_transport_class(transport) + RegionSecurityPoliciesClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., RegionSecurityPoliciesTransport], transport) ) diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/region_ssl_certificates/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_ssl_certificates/client.py index dc4c7c1737f0..601228014ab6 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/region_ssl_certificates/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_ssl_certificates/client.py @@ -647,7 +647,7 @@ def __init__( Type[RegionSslCertificatesTransport], Callable[..., RegionSslCertificatesTransport], ] = ( - type(self).get_transport_class(transport) + RegionSslCertificatesClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., RegionSslCertificatesTransport], transport) ) diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/region_ssl_policies/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_ssl_policies/client.py index 1ceb3f71ffc3..3dad39ce1e01 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/region_ssl_policies/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_ssl_policies/client.py @@ -645,7 +645,7 @@ def __init__( Type[RegionSslPoliciesTransport], Callable[..., RegionSslPoliciesTransport], ] = ( - type(self).get_transport_class(transport) + RegionSslPoliciesClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., RegionSslPoliciesTransport], transport) ) diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/region_target_http_proxies/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_target_http_proxies/client.py index 74b34dc7fb94..4fa02950c9ee 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/region_target_http_proxies/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_target_http_proxies/client.py @@ -649,7 +649,7 @@ def __init__( Type[RegionTargetHttpProxiesTransport], Callable[..., RegionTargetHttpProxiesTransport], ] = ( - type(self).get_transport_class(transport) + RegionTargetHttpProxiesClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., RegionTargetHttpProxiesTransport], transport) ) diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/region_target_https_proxies/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_target_https_proxies/client.py index e4ad3653da7c..a0a67bf5464d 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/region_target_https_proxies/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_target_https_proxies/client.py @@ -649,7 +649,7 @@ def __init__( Type[RegionTargetHttpsProxiesTransport], Callable[..., RegionTargetHttpsProxiesTransport], ] = ( - type(self).get_transport_class(transport) + RegionTargetHttpsProxiesClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., RegionTargetHttpsProxiesTransport], transport) ) diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/region_target_tcp_proxies/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_target_tcp_proxies/client.py index b72e1de9e5ff..401ea84bb7f3 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/region_target_tcp_proxies/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_target_tcp_proxies/client.py @@ -647,7 +647,7 @@ def __init__( Type[RegionTargetTcpProxiesTransport], Callable[..., RegionTargetTcpProxiesTransport], ] = ( - type(self).get_transport_class(transport) + RegionTargetTcpProxiesClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., RegionTargetTcpProxiesTransport], transport) ) diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/region_url_maps/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_url_maps/client.py index 16da5d9623ea..af510a30bb48 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/region_url_maps/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_url_maps/client.py @@ -638,7 +638,7 @@ def __init__( transport_init: Union[ Type[RegionUrlMapsTransport], Callable[..., RegionUrlMapsTransport] ] = ( - type(self).get_transport_class(transport) + RegionUrlMapsClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., RegionUrlMapsTransport], transport) ) diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/region_zones/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_zones/client.py index 8d026c83045d..d011854631b2 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/region_zones/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_zones/client.py @@ -632,7 +632,7 @@ def __init__( transport_init: Union[ Type[RegionZonesTransport], Callable[..., RegionZonesTransport] ] = ( - type(self).get_transport_class(transport) + RegionZonesClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., RegionZonesTransport], transport) ) diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/regions/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/regions/client.py index 4dfd0ed86bad..9502b764063d 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/regions/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/regions/client.py @@ -632,7 +632,7 @@ def __init__( transport_init: Union[ Type[RegionsTransport], Callable[..., RegionsTransport] ] = ( - type(self).get_transport_class(transport) + RegionsClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., RegionsTransport], transport) ) diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/reservations/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/reservations/client.py index 5c21ebe9f5cf..712408dd11c0 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/reservations/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/reservations/client.py @@ -635,7 +635,7 @@ def __init__( transport_init: Union[ Type[ReservationsTransport], Callable[..., ReservationsTransport] ] = ( - type(self).get_transport_class(transport) + ReservationsClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., ReservationsTransport], transport) ) diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/resource_policies/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/resource_policies/client.py index e0450bc23f06..15544bdf9cbd 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/resource_policies/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/resource_policies/client.py @@ -643,7 +643,7 @@ def __init__( Type[ResourcePoliciesTransport], Callable[..., ResourcePoliciesTransport], ] = ( - type(self).get_transport_class(transport) + ResourcePoliciesClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., ResourcePoliciesTransport], transport) ) diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/routers/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/routers/client.py index 281943a40358..16244812f736 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/routers/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/routers/client.py @@ -635,7 +635,7 @@ def __init__( transport_init: Union[ Type[RoutersTransport], Callable[..., RoutersTransport] ] = ( - type(self).get_transport_class(transport) + RoutersClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., RoutersTransport], transport) ) diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/routes/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/routes/client.py index 61d007166218..4a180fbf1b7f 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/routes/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/routes/client.py @@ -635,7 +635,7 @@ def __init__( transport_init: Union[ Type[RoutesTransport], Callable[..., RoutesTransport] ] = ( - type(self).get_transport_class(transport) + RoutesClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., RoutesTransport], transport) ) diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/security_policies/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/security_policies/client.py index 5a7260c8ce7d..9866a67f1ee7 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/security_policies/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/security_policies/client.py @@ -643,7 +643,7 @@ def __init__( Type[SecurityPoliciesTransport], Callable[..., SecurityPoliciesTransport], ] = ( - type(self).get_transport_class(transport) + SecurityPoliciesClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., SecurityPoliciesTransport], transport) ) diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/service_attachments/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/service_attachments/client.py index a62bb7bd1dae..ed32f20eb629 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/service_attachments/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/service_attachments/client.py @@ -645,7 +645,7 @@ def __init__( Type[ServiceAttachmentsTransport], Callable[..., ServiceAttachmentsTransport], ] = ( - type(self).get_transport_class(transport) + ServiceAttachmentsClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., ServiceAttachmentsTransport], transport) ) diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/snapshot_settings_service/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/snapshot_settings_service/client.py index a25ad4685a7a..96038398783e 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/snapshot_settings_service/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/snapshot_settings_service/client.py @@ -648,7 +648,7 @@ def __init__( Type[SnapshotSettingsServiceTransport], Callable[..., SnapshotSettingsServiceTransport], ] = ( - type(self).get_transport_class(transport) + SnapshotSettingsServiceClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., SnapshotSettingsServiceTransport], transport) ) diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/snapshots/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/snapshots/client.py index d0db20882eb6..48ac2b521d06 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/snapshots/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/snapshots/client.py @@ -635,7 +635,7 @@ def __init__( transport_init: Union[ Type[SnapshotsTransport], Callable[..., SnapshotsTransport] ] = ( - type(self).get_transport_class(transport) + SnapshotsClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., SnapshotsTransport], transport) ) diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/ssl_certificates/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/ssl_certificates/client.py index dae0cbcde444..9f8d79a499c5 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/ssl_certificates/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/ssl_certificates/client.py @@ -642,7 +642,7 @@ def __init__( transport_init: Union[ Type[SslCertificatesTransport], Callable[..., SslCertificatesTransport] ] = ( - type(self).get_transport_class(transport) + SslCertificatesClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., SslCertificatesTransport], transport) ) diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/ssl_policies/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/ssl_policies/client.py index e0ee783f6e80..5a0b7f483a54 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/ssl_policies/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/ssl_policies/client.py @@ -635,7 +635,7 @@ def __init__( transport_init: Union[ Type[SslPoliciesTransport], Callable[..., SslPoliciesTransport] ] = ( - type(self).get_transport_class(transport) + SslPoliciesClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., SslPoliciesTransport], transport) ) diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/storage_pool_types/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/storage_pool_types/client.py index 6f3717271b10..66c919a57704 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/storage_pool_types/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/storage_pool_types/client.py @@ -640,7 +640,7 @@ def __init__( Type[StoragePoolTypesTransport], Callable[..., StoragePoolTypesTransport], ] = ( - type(self).get_transport_class(transport) + StoragePoolTypesClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., StoragePoolTypesTransport], transport) ) diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/storage_pools/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/storage_pools/client.py index a19b357d97ef..e70dc879e944 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/storage_pools/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/storage_pools/client.py @@ -635,7 +635,7 @@ def __init__( transport_init: Union[ Type[StoragePoolsTransport], Callable[..., StoragePoolsTransport] ] = ( - type(self).get_transport_class(transport) + StoragePoolsClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., StoragePoolsTransport], transport) ) diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/subnetworks/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/subnetworks/client.py index b3b2ba865ccd..2ffc259804cf 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/subnetworks/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/subnetworks/client.py @@ -635,7 +635,7 @@ def __init__( transport_init: Union[ Type[SubnetworksTransport], Callable[..., SubnetworksTransport] ] = ( - type(self).get_transport_class(transport) + SubnetworksClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., SubnetworksTransport], transport) ) diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/target_grpc_proxies/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/target_grpc_proxies/client.py index fe20cd7b20b7..021e2c228f58 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/target_grpc_proxies/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/target_grpc_proxies/client.py @@ -645,7 +645,7 @@ def __init__( Type[TargetGrpcProxiesTransport], Callable[..., TargetGrpcProxiesTransport], ] = ( - type(self).get_transport_class(transport) + TargetGrpcProxiesClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., TargetGrpcProxiesTransport], transport) ) diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/target_http_proxies/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/target_http_proxies/client.py index 2c046c5447d4..524ef81a25da 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/target_http_proxies/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/target_http_proxies/client.py @@ -645,7 +645,7 @@ def __init__( Type[TargetHttpProxiesTransport], Callable[..., TargetHttpProxiesTransport], ] = ( - type(self).get_transport_class(transport) + TargetHttpProxiesClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., TargetHttpProxiesTransport], transport) ) diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/target_https_proxies/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/target_https_proxies/client.py index 1d471011d9b6..5fc74cea1db9 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/target_https_proxies/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/target_https_proxies/client.py @@ -645,7 +645,7 @@ def __init__( Type[TargetHttpsProxiesTransport], Callable[..., TargetHttpsProxiesTransport], ] = ( - type(self).get_transport_class(transport) + TargetHttpsProxiesClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., TargetHttpsProxiesTransport], transport) ) diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/target_instances/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/target_instances/client.py index c605f9b64f62..2575ea47aa79 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/target_instances/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/target_instances/client.py @@ -642,7 +642,7 @@ def __init__( transport_init: Union[ Type[TargetInstancesTransport], Callable[..., TargetInstancesTransport] ] = ( - type(self).get_transport_class(transport) + TargetInstancesClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., TargetInstancesTransport], transport) ) diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/target_pools/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/target_pools/client.py index c90e842c2f03..b575ba53b5cf 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/target_pools/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/target_pools/client.py @@ -635,7 +635,7 @@ def __init__( transport_init: Union[ Type[TargetPoolsTransport], Callable[..., TargetPoolsTransport] ] = ( - type(self).get_transport_class(transport) + TargetPoolsClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., TargetPoolsTransport], transport) ) diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/target_ssl_proxies/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/target_ssl_proxies/client.py index 223e70bd3023..ea783bb9a2fa 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/target_ssl_proxies/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/target_ssl_proxies/client.py @@ -643,7 +643,7 @@ def __init__( Type[TargetSslProxiesTransport], Callable[..., TargetSslProxiesTransport], ] = ( - type(self).get_transport_class(transport) + TargetSslProxiesClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., TargetSslProxiesTransport], transport) ) diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/target_tcp_proxies/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/target_tcp_proxies/client.py index 3b06ab98861b..ed1563d2f327 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/target_tcp_proxies/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/target_tcp_proxies/client.py @@ -643,7 +643,7 @@ def __init__( Type[TargetTcpProxiesTransport], Callable[..., TargetTcpProxiesTransport], ] = ( - type(self).get_transport_class(transport) + TargetTcpProxiesClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., TargetTcpProxiesTransport], transport) ) diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/target_vpn_gateways/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/target_vpn_gateways/client.py index 17c732e6074c..a9c6cb8df839 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/target_vpn_gateways/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/target_vpn_gateways/client.py @@ -645,7 +645,7 @@ def __init__( Type[TargetVpnGatewaysTransport], Callable[..., TargetVpnGatewaysTransport], ] = ( - type(self).get_transport_class(transport) + TargetVpnGatewaysClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., TargetVpnGatewaysTransport], transport) ) diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/url_maps/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/url_maps/client.py index 453f4be8cdb0..9692ef9e40dd 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/url_maps/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/url_maps/client.py @@ -635,7 +635,7 @@ def __init__( transport_init: Union[ Type[UrlMapsTransport], Callable[..., UrlMapsTransport] ] = ( - type(self).get_transport_class(transport) + UrlMapsClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., UrlMapsTransport], transport) ) diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/vpn_gateways/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/vpn_gateways/client.py index 4dbea329de10..a3db517a80c0 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/vpn_gateways/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/vpn_gateways/client.py @@ -635,7 +635,7 @@ def __init__( transport_init: Union[ Type[VpnGatewaysTransport], Callable[..., VpnGatewaysTransport] ] = ( - type(self).get_transport_class(transport) + VpnGatewaysClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., VpnGatewaysTransport], transport) ) diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/vpn_tunnels/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/vpn_tunnels/client.py index 6fe30460d897..86a982810b11 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/vpn_tunnels/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/vpn_tunnels/client.py @@ -635,7 +635,7 @@ def __init__( transport_init: Union[ Type[VpnTunnelsTransport], Callable[..., VpnTunnelsTransport] ] = ( - type(self).get_transport_class(transport) + VpnTunnelsClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., VpnTunnelsTransport], transport) ) diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/zone_operations/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/zone_operations/client.py index 6b139371feaa..8d59625da496 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/zone_operations/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/zone_operations/client.py @@ -637,7 +637,7 @@ def __init__( transport_init: Union[ Type[ZoneOperationsTransport], Callable[..., ZoneOperationsTransport] ] = ( - type(self).get_transport_class(transport) + ZoneOperationsClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., ZoneOperationsTransport], transport) ) diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/zones/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/zones/client.py index 22464582ed16..6d712df2de77 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/zones/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/zones/client.py @@ -632,7 +632,7 @@ def __init__( transport_init: Union[ Type[ZonesTransport], Callable[..., ZonesTransport] ] = ( - type(self).get_transport_class(transport) + ZonesClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., ZonesTransport], transport) ) diff --git a/packages/google-cloud-compute/samples/generated_samples/snippet_metadata_google.cloud.compute.v1.json b/packages/google-cloud-compute/samples/generated_samples/snippet_metadata_google.cloud.compute.v1.json index bdfae0e308c3..8a5a8ea62778 100644 --- a/packages/google-cloud-compute/samples/generated_samples/snippet_metadata_google.cloud.compute.v1.json +++ b/packages/google-cloud-compute/samples/generated_samples/snippet_metadata_google.cloud.compute.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-compute", - "version": "1.19.2" + "version": "0.1.0" }, "snippets": [ { diff --git a/packages/google-cloud-confidentialcomputing/google/cloud/confidentialcomputing/gapic_version.py b/packages/google-cloud-confidentialcomputing/google/cloud/confidentialcomputing/gapic_version.py index 5feceb32bedf..558c8aab67c5 100644 --- a/packages/google-cloud-confidentialcomputing/google/cloud/confidentialcomputing/gapic_version.py +++ b/packages/google-cloud-confidentialcomputing/google/cloud/confidentialcomputing/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.4.11" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-confidentialcomputing/google/cloud/confidentialcomputing_v1/gapic_version.py b/packages/google-cloud-confidentialcomputing/google/cloud/confidentialcomputing_v1/gapic_version.py index 5feceb32bedf..558c8aab67c5 100644 --- a/packages/google-cloud-confidentialcomputing/google/cloud/confidentialcomputing_v1/gapic_version.py +++ b/packages/google-cloud-confidentialcomputing/google/cloud/confidentialcomputing_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.4.11" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-confidentialcomputing/google/cloud/confidentialcomputing_v1/services/confidential_computing/async_client.py b/packages/google-cloud-confidentialcomputing/google/cloud/confidentialcomputing_v1/services/confidential_computing/async_client.py index a228e2a68655..42eed01a661d 100644 --- a/packages/google-cloud-confidentialcomputing/google/cloud/confidentialcomputing_v1/services/confidential_computing/async_client.py +++ b/packages/google-cloud-confidentialcomputing/google/cloud/confidentialcomputing_v1/services/confidential_computing/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -194,10 +193,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(ConfidentialComputingClient).get_transport_class, - type(ConfidentialComputingClient), - ) + get_transport_class = ConfidentialComputingClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-confidentialcomputing/google/cloud/confidentialcomputing_v1/services/confidential_computing/client.py b/packages/google-cloud-confidentialcomputing/google/cloud/confidentialcomputing_v1/services/confidential_computing/client.py index 405aa8b04120..2dc8aec9e96a 100644 --- a/packages/google-cloud-confidentialcomputing/google/cloud/confidentialcomputing_v1/services/confidential_computing/client.py +++ b/packages/google-cloud-confidentialcomputing/google/cloud/confidentialcomputing_v1/services/confidential_computing/client.py @@ -670,7 +670,7 @@ def __init__( Type[ConfidentialComputingTransport], Callable[..., ConfidentialComputingTransport], ] = ( - type(self).get_transport_class(transport) + ConfidentialComputingClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., ConfidentialComputingTransport], transport) ) diff --git a/packages/google-cloud-confidentialcomputing/samples/generated_samples/snippet_metadata_google.cloud.confidentialcomputing.v1.json b/packages/google-cloud-confidentialcomputing/samples/generated_samples/snippet_metadata_google.cloud.confidentialcomputing.v1.json index 1f70f8007495..957efb1f23a6 100644 --- a/packages/google-cloud-confidentialcomputing/samples/generated_samples/snippet_metadata_google.cloud.confidentialcomputing.v1.json +++ b/packages/google-cloud-confidentialcomputing/samples/generated_samples/snippet_metadata_google.cloud.confidentialcomputing.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-confidentialcomputing", - "version": "0.4.11" + "version": "0.1.0" }, "snippets": [ { diff --git a/packages/google-cloud-confidentialcomputing/tests/unit/gapic/confidentialcomputing_v1/test_confidential_computing.py b/packages/google-cloud-confidentialcomputing/tests/unit/gapic/confidentialcomputing_v1/test_confidential_computing.py index d3399c09bd4a..88b58dbe84ec 100644 --- a/packages/google-cloud-confidentialcomputing/tests/unit/gapic/confidentialcomputing_v1/test_confidential_computing.py +++ b/packages/google-cloud-confidentialcomputing/tests/unit/gapic/confidentialcomputing_v1/test_confidential_computing.py @@ -1361,22 +1361,23 @@ async def test_create_challenge_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_challenge - ] = mock_object + ] = mock_rpc request = {} await client.create_challenge(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_challenge(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1747,22 +1748,23 @@ async def test_verify_attestation_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.verify_attestation - ] = mock_object + ] = mock_rpc request = {} await client.verify_attestation(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.verify_attestation(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-config/google/cloud/config/gapic_version.py b/packages/google-cloud-config/google/cloud/config/gapic_version.py index 4b834789ba9e..558c8aab67c5 100644 --- a/packages/google-cloud-config/google/cloud/config/gapic_version.py +++ b/packages/google-cloud-config/google/cloud/config/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.1.11" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-config/google/cloud/config_v1/gapic_version.py b/packages/google-cloud-config/google/cloud/config_v1/gapic_version.py index 4b834789ba9e..558c8aab67c5 100644 --- a/packages/google-cloud-config/google/cloud/config_v1/gapic_version.py +++ b/packages/google-cloud-config/google/cloud/config_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.1.11" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-config/google/cloud/config_v1/services/config/async_client.py b/packages/google-cloud-config/google/cloud/config_v1/services/config/async_client.py index 65290c0e7ea0..3bd01b92b89d 100644 --- a/packages/google-cloud-config/google/cloud/config_v1/services/config/async_client.py +++ b/packages/google-cloud-config/google/cloud/config_v1/services/config/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -204,9 +203,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(ConfigClient).get_transport_class, type(ConfigClient) - ) + get_transport_class = ConfigClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-config/google/cloud/config_v1/services/config/client.py b/packages/google-cloud-config/google/cloud/config_v1/services/config/client.py index 791c5295ea52..2a763afcd0ff 100644 --- a/packages/google-cloud-config/google/cloud/config_v1/services/config/client.py +++ b/packages/google-cloud-config/google/cloud/config_v1/services/config/client.py @@ -808,7 +808,7 @@ def __init__( transport_init: Union[ Type[ConfigTransport], Callable[..., ConfigTransport] ] = ( - type(self).get_transport_class(transport) + ConfigClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., ConfigTransport], transport) ) diff --git a/packages/google-cloud-config/samples/generated_samples/snippet_metadata_google.cloud.config.v1.json b/packages/google-cloud-config/samples/generated_samples/snippet_metadata_google.cloud.config.v1.json index f3c226df045e..772dccf47120 100644 --- a/packages/google-cloud-config/samples/generated_samples/snippet_metadata_google.cloud.config.v1.json +++ b/packages/google-cloud-config/samples/generated_samples/snippet_metadata_google.cloud.config.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-config", - "version": "0.1.11" + "version": "0.1.0" }, "snippets": [ { diff --git a/packages/google-cloud-config/tests/unit/gapic/config_v1/test_config.py b/packages/google-cloud-config/tests/unit/gapic/config_v1/test_config.py index c7134800a41c..c3d4c7bcbe0a 100644 --- a/packages/google-cloud-config/tests/unit/gapic/config_v1/test_config.py +++ b/packages/google-cloud-config/tests/unit/gapic/config_v1/test_config.py @@ -1243,22 +1243,23 @@ async def test_list_deployments_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_deployments - ] = mock_object + ] = mock_rpc request = {} await client.list_deployments(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_deployments(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1848,22 +1849,23 @@ async def test_get_deployment_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_deployment - ] = mock_object + ] = mock_rpc request = {} await client.get_deployment(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_deployment(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2194,8 +2196,9 @@ def test_create_deployment_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.create_deployment(request) @@ -2251,26 +2254,28 @@ async def test_create_deployment_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_deployment - ] = mock_object + ] = mock_rpc request = {} await client.create_deployment(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.create_deployment(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2618,8 +2623,9 @@ def test_update_deployment_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.update_deployment(request) @@ -2675,26 +2681,28 @@ async def test_update_deployment_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_deployment - ] = mock_object + ] = mock_rpc request = {} await client.update_deployment(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.update_deployment(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3034,8 +3042,9 @@ def test_delete_deployment_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.delete_deployment(request) @@ -3091,26 +3100,28 @@ async def test_delete_deployment_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_deployment - ] = mock_object + ] = mock_rpc request = {} await client.delete_deployment(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.delete_deployment(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3475,22 +3486,23 @@ async def test_list_revisions_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_revisions - ] = mock_object + ] = mock_rpc request = {} await client.list_revisions(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_revisions(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4079,22 +4091,23 @@ async def test_get_revision_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_revision - ] = mock_object + ] = mock_rpc request = {} await client.get_revision(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_revision(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4475,22 +4488,23 @@ async def test_get_resource_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_resource - ] = mock_object + ] = mock_rpc request = {} await client.get_resource(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_resource(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4848,22 +4862,23 @@ async def test_list_resources_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_resources - ] = mock_object + ] = mock_rpc request = {} await client.list_resources(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_resources(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5421,22 +5436,23 @@ async def test_export_deployment_statefile_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.export_deployment_statefile - ] = mock_object + ] = mock_rpc request = {} await client.export_deployment_statefile(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.export_deployment_statefile(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5721,22 +5737,23 @@ async def test_export_revision_statefile_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.export_revision_statefile - ] = mock_object + ] = mock_rpc request = {} await client.export_revision_statefile(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.export_revision_statefile(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -6009,22 +6026,23 @@ async def test_import_statefile_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.import_statefile - ] = mock_object + ] = mock_rpc request = {} await client.import_statefile(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.import_statefile(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -6374,22 +6392,23 @@ async def test_delete_statefile_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_statefile - ] = mock_object + ] = mock_rpc request = {} await client.delete_statefile(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_statefile(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -6673,8 +6692,9 @@ def test_lock_deployment_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.lock_deployment(request) @@ -6728,26 +6748,28 @@ async def test_lock_deployment_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.lock_deployment - ] = mock_object + ] = mock_rpc request = {} await client.lock_deployment(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.lock_deployment(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -7045,8 +7067,9 @@ def test_unlock_deployment_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.unlock_deployment(request) @@ -7102,26 +7125,28 @@ async def test_unlock_deployment_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.unlock_deployment - ] = mock_object + ] = mock_rpc request = {} await client.unlock_deployment(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.unlock_deployment(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -7501,22 +7526,23 @@ async def test_export_lock_info_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.export_lock_info - ] = mock_object + ] = mock_rpc request = {} await client.export_lock_info(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.export_lock_info(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -7815,8 +7841,9 @@ def test_create_preview_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.create_preview(request) @@ -7870,26 +7897,28 @@ async def test_create_preview_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_preview - ] = mock_object + ] = mock_rpc request = {} await client.create_preview(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.create_preview(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -8301,22 +8330,23 @@ async def test_get_preview_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_preview - ] = mock_object + ] = mock_rpc request = {} await client.get_preview(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_preview(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -8694,22 +8724,23 @@ async def test_list_previews_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_previews - ] = mock_object + ] = mock_rpc request = {} await client.list_previews(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_previews(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -9198,8 +9229,9 @@ def test_delete_preview_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.delete_preview(request) @@ -9253,26 +9285,28 @@ async def test_delete_preview_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_preview - ] = mock_object + ] = mock_rpc request = {} await client.delete_preview(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.delete_preview(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -9626,22 +9660,23 @@ async def test_export_preview_result_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.export_preview_result - ] = mock_object + ] = mock_rpc request = {} await client.export_preview_result(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.export_preview_result(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -9933,22 +9968,23 @@ async def test_list_terraform_versions_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_terraform_versions - ] = mock_object + ] = mock_rpc request = {} await client.list_terraform_versions(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_terraform_versions(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -10527,22 +10563,23 @@ async def test_get_terraform_version_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_terraform_version - ] = mock_object + ] = mock_rpc request = {} await client.get_terraform_version(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_terraform_version(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-contact-center-insights/CHANGELOG.md b/packages/google-cloud-contact-center-insights/CHANGELOG.md index 085c4c34a4fa..6628d33427bc 100644 --- a/packages/google-cloud-contact-center-insights/CHANGELOG.md +++ b/packages/google-cloud-contact-center-insights/CHANGELOG.md @@ -1,5 +1,21 @@ # Changelog +## [1.18.0](https://github.com/googleapis/google-cloud-python/compare/google-cloud-contact-center-insights-v1.17.5...google-cloud-contact-center-insights-v1.18.0) (2024-10-08) + + +### Features + +* Add CMEK InitializeLroSpec ([e4ac435](https://github.com/googleapis/google-cloud-python/commit/e4ac435aaa9508e33090091232ff35df860bfd37)) +* Add import / export IssueModel ([e4ac435](https://github.com/googleapis/google-cloud-python/commit/e4ac435aaa9508e33090091232ff35df860bfd37)) +* Add metadata import to IngestConversations ([e4ac435](https://github.com/googleapis/google-cloud-python/commit/e4ac435aaa9508e33090091232ff35df860bfd37)) +* Add sampling to IngestConversations ([e4ac435](https://github.com/googleapis/google-cloud-python/commit/e4ac435aaa9508e33090091232ff35df860bfd37)) + + +### Documentation + +* Add a comment for valid `order_by` values in ListConversations ([e4ac435](https://github.com/googleapis/google-cloud-python/commit/e4ac435aaa9508e33090091232ff35df860bfd37)) +* Add a comment for valid `update_mask` values in UpdateConversation ([e4ac435](https://github.com/googleapis/google-cloud-python/commit/e4ac435aaa9508e33090091232ff35df860bfd37)) + ## [1.17.5](https://github.com/googleapis/google-cloud-python/compare/google-cloud-contact-center-insights-v1.17.4...google-cloud-contact-center-insights-v1.17.5) (2024-07-30) diff --git a/packages/google-cloud-contact-center-insights/google/cloud/contact_center_insights/__init__.py b/packages/google-cloud-contact-center-insights/google/cloud/contact_center_insights/__init__.py index f19179e38c46..13964e6e628e 100644 --- a/packages/google-cloud-contact-center-insights/google/cloud/contact_center_insights/__init__.py +++ b/packages/google-cloud-contact-center-insights/google/cloud/contact_center_insights/__init__.py @@ -56,16 +56,26 @@ ExportInsightsDataMetadata, ExportInsightsDataRequest, ExportInsightsDataResponse, + ExportIssueModelMetadata, + ExportIssueModelRequest, + ExportIssueModelResponse, GetAnalysisRequest, GetConversationRequest, + GetEncryptionSpecRequest, GetIssueModelRequest, GetIssueRequest, GetPhraseMatcherRequest, GetSettingsRequest, GetViewRequest, + ImportIssueModelMetadata, + ImportIssueModelRequest, + ImportIssueModelResponse, IngestConversationsMetadata, IngestConversationsRequest, IngestConversationsResponse, + InitializeEncryptionSpecMetadata, + InitializeEncryptionSpecRequest, + InitializeEncryptionSpecResponse, ListAnalysesRequest, ListAnalysesResponse, ListConversationsRequest, @@ -101,11 +111,13 @@ Conversation, ConversationDataSource, ConversationLevelSentiment, + ConversationLevelSilence, ConversationParticipant, ConversationSummarizationSuggestionData, DialogflowIntent, DialogflowInteractionData, DialogflowSource, + EncryptionSpec, Entity, EntityMentionData, ExactMatchConfig, @@ -170,16 +182,26 @@ "ExportInsightsDataMetadata", "ExportInsightsDataRequest", "ExportInsightsDataResponse", + "ExportIssueModelMetadata", + "ExportIssueModelRequest", + "ExportIssueModelResponse", "GetAnalysisRequest", "GetConversationRequest", + "GetEncryptionSpecRequest", "GetIssueModelRequest", "GetIssueRequest", "GetPhraseMatcherRequest", "GetSettingsRequest", "GetViewRequest", + "ImportIssueModelMetadata", + "ImportIssueModelRequest", + "ImportIssueModelResponse", "IngestConversationsMetadata", "IngestConversationsRequest", "IngestConversationsResponse", + "InitializeEncryptionSpecMetadata", + "InitializeEncryptionSpecRequest", + "InitializeEncryptionSpecResponse", "ListAnalysesRequest", "ListAnalysesResponse", "ListConversationsRequest", @@ -214,11 +236,13 @@ "Conversation", "ConversationDataSource", "ConversationLevelSentiment", + "ConversationLevelSilence", "ConversationParticipant", "ConversationSummarizationSuggestionData", "DialogflowIntent", "DialogflowInteractionData", "DialogflowSource", + "EncryptionSpec", "Entity", "EntityMentionData", "ExactMatchConfig", diff --git a/packages/google-cloud-contact-center-insights/google/cloud/contact_center_insights/gapic_version.py b/packages/google-cloud-contact-center-insights/google/cloud/contact_center_insights/gapic_version.py index 1a7fb072f786..8099b154e9b6 100644 --- a/packages/google-cloud-contact-center-insights/google/cloud/contact_center_insights/gapic_version.py +++ b/packages/google-cloud-contact-center-insights/google/cloud/contact_center_insights/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.17.5" # {x-release-please-version} +__version__ = "1.18.0" # {x-release-please-version} diff --git a/packages/google-cloud-contact-center-insights/google/cloud/contact_center_insights_v1/__init__.py b/packages/google-cloud-contact-center-insights/google/cloud/contact_center_insights_v1/__init__.py index 5f169fc674d2..8548409a4056 100644 --- a/packages/google-cloud-contact-center-insights/google/cloud/contact_center_insights_v1/__init__.py +++ b/packages/google-cloud-contact-center-insights/google/cloud/contact_center_insights_v1/__init__.py @@ -54,16 +54,26 @@ ExportInsightsDataMetadata, ExportInsightsDataRequest, ExportInsightsDataResponse, + ExportIssueModelMetadata, + ExportIssueModelRequest, + ExportIssueModelResponse, GetAnalysisRequest, GetConversationRequest, + GetEncryptionSpecRequest, GetIssueModelRequest, GetIssueRequest, GetPhraseMatcherRequest, GetSettingsRequest, GetViewRequest, + ImportIssueModelMetadata, + ImportIssueModelRequest, + ImportIssueModelResponse, IngestConversationsMetadata, IngestConversationsRequest, IngestConversationsResponse, + InitializeEncryptionSpecMetadata, + InitializeEncryptionSpecRequest, + InitializeEncryptionSpecResponse, ListAnalysesRequest, ListAnalysesResponse, ListConversationsRequest, @@ -99,11 +109,13 @@ Conversation, ConversationDataSource, ConversationLevelSentiment, + ConversationLevelSilence, ConversationParticipant, ConversationSummarizationSuggestionData, DialogflowIntent, DialogflowInteractionData, DialogflowSource, + EncryptionSpec, Entity, EntityMentionData, ExactMatchConfig, @@ -158,6 +170,7 @@ "Conversation", "ConversationDataSource", "ConversationLevelSentiment", + "ConversationLevelSilence", "ConversationParticipant", "ConversationSummarizationSuggestionData", "ConversationView", @@ -181,25 +194,36 @@ "DialogflowIntent", "DialogflowInteractionData", "DialogflowSource", + "EncryptionSpec", "Entity", "EntityMentionData", "ExactMatchConfig", "ExportInsightsDataMetadata", "ExportInsightsDataRequest", "ExportInsightsDataResponse", + "ExportIssueModelMetadata", + "ExportIssueModelRequest", + "ExportIssueModelResponse", "FaqAnswerData", "GcsSource", "GetAnalysisRequest", "GetConversationRequest", + "GetEncryptionSpecRequest", "GetIssueModelRequest", "GetIssueRequest", "GetPhraseMatcherRequest", "GetSettingsRequest", "GetViewRequest", "HoldData", + "ImportIssueModelMetadata", + "ImportIssueModelRequest", + "ImportIssueModelResponse", "IngestConversationsMetadata", "IngestConversationsRequest", "IngestConversationsResponse", + "InitializeEncryptionSpecMetadata", + "InitializeEncryptionSpecRequest", + "InitializeEncryptionSpecResponse", "Intent", "IntentMatchData", "InterruptionData", diff --git a/packages/google-cloud-contact-center-insights/google/cloud/contact_center_insights_v1/gapic_metadata.json b/packages/google-cloud-contact-center-insights/google/cloud/contact_center_insights_v1/gapic_metadata.json index af5c8ce82e8f..0e9a96c732d4 100644 --- a/packages/google-cloud-contact-center-insights/google/cloud/contact_center_insights_v1/gapic_metadata.json +++ b/packages/google-cloud-contact-center-insights/google/cloud/contact_center_insights_v1/gapic_metadata.json @@ -95,6 +95,11 @@ "export_insights_data" ] }, + "ExportIssueModel": { + "methods": [ + "export_issue_model" + ] + }, "GetAnalysis": { "methods": [ "get_analysis" @@ -105,6 +110,11 @@ "get_conversation" ] }, + "GetEncryptionSpec": { + "methods": [ + "get_encryption_spec" + ] + }, "GetIssue": { "methods": [ "get_issue" @@ -130,11 +140,21 @@ "get_view" ] }, + "ImportIssueModel": { + "methods": [ + "import_issue_model" + ] + }, "IngestConversations": { "methods": [ "ingest_conversations" ] }, + "InitializeEncryptionSpec": { + "methods": [ + "initialize_encryption_spec" + ] + }, "ListAnalyses": { "methods": [ "list_analyses" @@ -295,6 +315,11 @@ "export_insights_data" ] }, + "ExportIssueModel": { + "methods": [ + "export_issue_model" + ] + }, "GetAnalysis": { "methods": [ "get_analysis" @@ -305,6 +330,11 @@ "get_conversation" ] }, + "GetEncryptionSpec": { + "methods": [ + "get_encryption_spec" + ] + }, "GetIssue": { "methods": [ "get_issue" @@ -330,11 +360,21 @@ "get_view" ] }, + "ImportIssueModel": { + "methods": [ + "import_issue_model" + ] + }, "IngestConversations": { "methods": [ "ingest_conversations" ] }, + "InitializeEncryptionSpec": { + "methods": [ + "initialize_encryption_spec" + ] + }, "ListAnalyses": { "methods": [ "list_analyses" @@ -495,6 +535,11 @@ "export_insights_data" ] }, + "ExportIssueModel": { + "methods": [ + "export_issue_model" + ] + }, "GetAnalysis": { "methods": [ "get_analysis" @@ -505,6 +550,11 @@ "get_conversation" ] }, + "GetEncryptionSpec": { + "methods": [ + "get_encryption_spec" + ] + }, "GetIssue": { "methods": [ "get_issue" @@ -530,11 +580,21 @@ "get_view" ] }, + "ImportIssueModel": { + "methods": [ + "import_issue_model" + ] + }, "IngestConversations": { "methods": [ "ingest_conversations" ] }, + "InitializeEncryptionSpec": { + "methods": [ + "initialize_encryption_spec" + ] + }, "ListAnalyses": { "methods": [ "list_analyses" diff --git a/packages/google-cloud-contact-center-insights/google/cloud/contact_center_insights_v1/gapic_version.py b/packages/google-cloud-contact-center-insights/google/cloud/contact_center_insights_v1/gapic_version.py index 1a7fb072f786..8099b154e9b6 100644 --- a/packages/google-cloud-contact-center-insights/google/cloud/contact_center_insights_v1/gapic_version.py +++ b/packages/google-cloud-contact-center-insights/google/cloud/contact_center_insights_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.17.5" # {x-release-please-version} +__version__ = "1.18.0" # {x-release-please-version} diff --git a/packages/google-cloud-contact-center-insights/google/cloud/contact_center_insights_v1/services/contact_center_insights/async_client.py b/packages/google-cloud-contact-center-insights/google/cloud/contact_center_insights_v1/services/contact_center_insights/async_client.py index 40795f94b8ff..e792496b0905 100644 --- a/packages/google-cloud-contact-center-insights/google/cloud/contact_center_insights_v1/services/contact_center_insights/async_client.py +++ b/packages/google-cloud-contact-center-insights/google/cloud/contact_center_insights_v1/services/contact_center_insights/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -90,6 +89,12 @@ class ContactCenterInsightsAsyncClient: parse_conversation_profile_path = staticmethod( ContactCenterInsightsClient.parse_conversation_profile_path ) + encryption_spec_path = staticmethod( + ContactCenterInsightsClient.encryption_spec_path + ) + parse_encryption_spec_path = staticmethod( + ContactCenterInsightsClient.parse_encryption_spec_path + ) issue_path = staticmethod(ContactCenterInsightsClient.issue_path) parse_issue_path = staticmethod(ContactCenterInsightsClient.parse_issue_path) issue_model_path = staticmethod(ContactCenterInsightsClient.issue_model_path) @@ -236,10 +241,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(ContactCenterInsightsClient).get_transport_class, - type(ContactCenterInsightsClient), - ) + get_transport_class = ContactCenterInsightsClient.get_transport_class def __init__( self, @@ -324,7 +326,9 @@ async def create_conversation( timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> resources.Conversation: - r"""Creates a conversation. + r"""Creates a conversation. Note that this method does not support + audio transcription or redaction. Use ``conversations.upload`` + instead. .. code-block:: python @@ -452,9 +456,9 @@ async def upload_conversation( timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> operation_async.AsyncOperation: - r"""Create a longrunning conversation upload operation. - This method differs from CreateConversation by allowing - audio transcription and optional DLP redaction. + r"""Create a long-running conversation upload operation. This method + differs from ``CreateConversation`` by allowing audio + transcription and optional DLP redaction. .. code-block:: python @@ -594,7 +598,21 @@ async def sample_update_conversation(): on the ``request`` instance; if ``request`` is provided, this should not be set. update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): - The list of fields to be updated. + The list of fields to be updated. All possible fields + can be updated by passing ``*``, or a subset of the + following updateable fields can be provided: + + - ``agent_id`` + - ``language_code`` + - ``labels`` + - ``metadata`` + - ``quality_metadata`` + - ``call_metadata`` + - ``start_time`` + - ``expire_time`` or ``ttl`` + - ``data_source.gcs_source.audio_uri`` or + ``data_source.dialogflow_source.audio_uri`` + This corresponds to the ``update_mask`` field on the ``request`` instance; if ``request`` is provided, this should not be set. @@ -2805,6 +2823,256 @@ async def sample_undeploy_issue_model(): # Done; return the response. return response + async def export_issue_model( + self, + request: Optional[ + Union[contact_center_insights.ExportIssueModelRequest, dict] + ] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Exports an issue model to the provided destination. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import contact_center_insights_v1 + + async def sample_export_issue_model(): + # Create a client + client = contact_center_insights_v1.ContactCenterInsightsAsyncClient() + + # Initialize request argument(s) + gcs_destination = contact_center_insights_v1.GcsDestination() + gcs_destination.object_uri = "object_uri_value" + + request = contact_center_insights_v1.ExportIssueModelRequest( + gcs_destination=gcs_destination, + name="name_value", + ) + + # Make the request + operation = client.export_issue_model(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.contact_center_insights_v1.types.ExportIssueModelRequest, dict]]): + The request object. Request to export an issue model. + name (:class:`str`): + Required. The issue model to export. + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.contact_center_insights_v1.types.ExportIssueModelResponse` + Response from export issue model + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, contact_center_insights.ExportIssueModelRequest): + request = contact_center_insights.ExportIssueModelRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.export_issue_model + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + contact_center_insights.ExportIssueModelResponse, + metadata_type=contact_center_insights.ExportIssueModelMetadata, + ) + + # Done; return the response. + return response + + async def import_issue_model( + self, + request: Optional[ + Union[contact_center_insights.ImportIssueModelRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Imports an issue model from a Cloud Storage bucket. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import contact_center_insights_v1 + + async def sample_import_issue_model(): + # Create a client + client = contact_center_insights_v1.ContactCenterInsightsAsyncClient() + + # Initialize request argument(s) + gcs_source = contact_center_insights_v1.GcsSource() + gcs_source.object_uri = "object_uri_value" + + request = contact_center_insights_v1.ImportIssueModelRequest( + gcs_source=gcs_source, + parent="parent_value", + ) + + # Make the request + operation = client.import_issue_model(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.contact_center_insights_v1.types.ImportIssueModelRequest, dict]]): + The request object. Request to import an issue model. + parent (:class:`str`): + Required. The parent resource of the + issue model. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.contact_center_insights_v1.types.ImportIssueModelResponse` + Response from import issue model + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, contact_center_insights.ImportIssueModelRequest): + request = contact_center_insights.ImportIssueModelRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.import_issue_model + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + contact_center_insights.ImportIssueModelResponse, + metadata_type=contact_center_insights.ImportIssueModelMetadata, + ) + + # Done; return the response. + return response + async def get_issue( self, request: Optional[Union[contact_center_insights.GetIssueRequest, dict]] = None, @@ -4065,7 +4333,13 @@ async def sample_get_settings(): Returns: google.cloud.contact_center_insights_v1.types.Settings: - The settings resource. + The CCAI Insights project wide settings. + Use these settings to configure the behavior of + Insights. View these settings with + [getsettings](https://cloud.google.com/contact-center/insights/docs/reference/rest/v1/projects.locations/getSettings) + and change the settings with + [updateSettings](https://cloud.google.com/contact-center/insights/docs/reference/rest/v1/projects.locations/updateSettings). + """ # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have @@ -4176,7 +4450,13 @@ async def sample_update_settings(): Returns: google.cloud.contact_center_insights_v1.types.Settings: - The settings resource. + The CCAI Insights project wide settings. + Use these settings to configure the behavior of + Insights. View these settings with + [getsettings](https://cloud.google.com/contact-center/insights/docs/reference/rest/v1/projects.locations/getSettings) + and change the settings with + [updateSettings](https://cloud.google.com/contact-center/insights/docs/reference/rest/v1/projects.locations/updateSettings). + """ # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have @@ -4228,6 +4508,256 @@ async def sample_update_settings(): # Done; return the response. return response + async def get_encryption_spec( + self, + request: Optional[ + Union[contact_center_insights.GetEncryptionSpecRequest, dict] + ] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> resources.EncryptionSpec: + r"""Gets location-level encryption key specification. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import contact_center_insights_v1 + + async def sample_get_encryption_spec(): + # Create a client + client = contact_center_insights_v1.ContactCenterInsightsAsyncClient() + + # Initialize request argument(s) + request = contact_center_insights_v1.GetEncryptionSpecRequest( + name="name_value", + ) + + # Make the request + response = await client.get_encryption_spec(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.contact_center_insights_v1.types.GetEncryptionSpecRequest, dict]]): + The request object. The request to get location-level + encryption specification. + name (:class:`str`): + Required. The name of the encryption + spec resource to get. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.contact_center_insights_v1.types.EncryptionSpec: + A customer-managed encryption key + specification that can be applied to all + created resources (e.g. Conversation). + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, contact_center_insights.GetEncryptionSpecRequest): + request = contact_center_insights.GetEncryptionSpecRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.get_encryption_spec + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def initialize_encryption_spec( + self, + request: Optional[ + Union[contact_center_insights.InitializeEncryptionSpecRequest, dict] + ] = None, + *, + encryption_spec: Optional[resources.EncryptionSpec] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Initializes a location-level encryption key + specification. An error will be thrown if the location + has resources already created before the initialization. + Once the encryption specification is initialized at a + location, it is immutable and all newly created + resources under the location will be encrypted with the + existing specification. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import contact_center_insights_v1 + + async def sample_initialize_encryption_spec(): + # Create a client + client = contact_center_insights_v1.ContactCenterInsightsAsyncClient() + + # Initialize request argument(s) + encryption_spec = contact_center_insights_v1.EncryptionSpec() + encryption_spec.kms_key = "kms_key_value" + + request = contact_center_insights_v1.InitializeEncryptionSpecRequest( + encryption_spec=encryption_spec, + ) + + # Make the request + operation = client.initialize_encryption_spec(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.contact_center_insights_v1.types.InitializeEncryptionSpecRequest, dict]]): + The request object. The request to initialize a + location-level encryption specification. + encryption_spec (:class:`google.cloud.contact_center_insights_v1.types.EncryptionSpec`): + Required. The encryption spec used for CMEK encryption. + It is required that the kms key is in the same region as + the endpoint. The same key will be used for all + provisioned resources, if encryption is available. If + the kms_key_name is left empty, no encryption will be + enforced. + + This corresponds to the ``encryption_spec`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.contact_center_insights_v1.types.InitializeEncryptionSpecResponse` + The response to initialize a location-level encryption + specification. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([encryption_spec]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance( + request, contact_center_insights.InitializeEncryptionSpecRequest + ): + request = contact_center_insights.InitializeEncryptionSpecRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if encryption_spec is not None: + request.encryption_spec = encryption_spec + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.initialize_encryption_spec + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("encryption_spec.name", request.encryption_spec.name),) + ), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + contact_center_insights.InitializeEncryptionSpecResponse, + metadata_type=contact_center_insights.InitializeEncryptionSpecMetadata, + ) + + # Done; return the response. + return response + async def create_view( self, request: Optional[ diff --git a/packages/google-cloud-contact-center-insights/google/cloud/contact_center_insights_v1/services/contact_center_insights/client.py b/packages/google-cloud-contact-center-insights/google/cloud/contact_center_insights_v1/services/contact_center_insights/client.py index e1e7b70918e6..a81a8b69ce6f 100644 --- a/packages/google-cloud-contact-center-insights/google/cloud/contact_center_insights_v1/services/contact_center_insights/client.py +++ b/packages/google-cloud-contact-center-insights/google/cloud/contact_center_insights_v1/services/contact_center_insights/client.py @@ -266,6 +266,26 @@ def parse_conversation_profile_path(path: str) -> Dict[str, str]: ) return m.groupdict() if m else {} + @staticmethod + def encryption_spec_path( + project: str, + location: str, + ) -> str: + """Returns a fully-qualified encryption_spec string.""" + return "projects/{project}/locations/{location}/encryptionSpec".format( + project=project, + location=location, + ) + + @staticmethod + def parse_encryption_spec_path(path: str) -> Dict[str, str]: + """Parses a encryption_spec path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/encryptionSpec$", + path, + ) + return m.groupdict() if m else {} + @staticmethod def issue_path( project: str, @@ -885,7 +905,7 @@ def __init__( Type[ContactCenterInsightsTransport], Callable[..., ContactCenterInsightsTransport], ] = ( - type(self).get_transport_class(transport) + ContactCenterInsightsClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., ContactCenterInsightsTransport], transport) ) @@ -915,7 +935,9 @@ def create_conversation( timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> resources.Conversation: - r"""Creates a conversation. + r"""Creates a conversation. Note that this method does not support + audio transcription or redaction. Use ``conversations.upload`` + instead. .. code-block:: python @@ -1040,9 +1062,9 @@ def upload_conversation( timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> operation.Operation: - r"""Create a longrunning conversation upload operation. - This method differs from CreateConversation by allowing - audio transcription and optional DLP redaction. + r"""Create a long-running conversation upload operation. This method + differs from ``CreateConversation`` by allowing audio + transcription and optional DLP redaction. .. code-block:: python @@ -1180,7 +1202,21 @@ def sample_update_conversation(): on the ``request`` instance; if ``request`` is provided, this should not be set. update_mask (google.protobuf.field_mask_pb2.FieldMask): - The list of fields to be updated. + The list of fields to be updated. All possible fields + can be updated by passing ``*``, or a subset of the + following updateable fields can be provided: + + - ``agent_id`` + - ``language_code`` + - ``labels`` + - ``metadata`` + - ``quality_metadata`` + - ``call_metadata`` + - ``start_time`` + - ``expire_time`` or ``ttl`` + - ``data_source.gcs_source.audio_uri`` or + ``data_source.dialogflow_source.audio_uri`` + This corresponds to the ``update_mask`` field on the ``request`` instance; if ``request`` is provided, this should not be set. @@ -3338,6 +3374,250 @@ def sample_undeploy_issue_model(): # Done; return the response. return response + def export_issue_model( + self, + request: Optional[ + Union[contact_center_insights.ExportIssueModelRequest, dict] + ] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Exports an issue model to the provided destination. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import contact_center_insights_v1 + + def sample_export_issue_model(): + # Create a client + client = contact_center_insights_v1.ContactCenterInsightsClient() + + # Initialize request argument(s) + gcs_destination = contact_center_insights_v1.GcsDestination() + gcs_destination.object_uri = "object_uri_value" + + request = contact_center_insights_v1.ExportIssueModelRequest( + gcs_destination=gcs_destination, + name="name_value", + ) + + # Make the request + operation = client.export_issue_model(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.contact_center_insights_v1.types.ExportIssueModelRequest, dict]): + The request object. Request to export an issue model. + name (str): + Required. The issue model to export. + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.contact_center_insights_v1.types.ExportIssueModelResponse` + Response from export issue model + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, contact_center_insights.ExportIssueModelRequest): + request = contact_center_insights.ExportIssueModelRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.export_issue_model] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + contact_center_insights.ExportIssueModelResponse, + metadata_type=contact_center_insights.ExportIssueModelMetadata, + ) + + # Done; return the response. + return response + + def import_issue_model( + self, + request: Optional[ + Union[contact_center_insights.ImportIssueModelRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Imports an issue model from a Cloud Storage bucket. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import contact_center_insights_v1 + + def sample_import_issue_model(): + # Create a client + client = contact_center_insights_v1.ContactCenterInsightsClient() + + # Initialize request argument(s) + gcs_source = contact_center_insights_v1.GcsSource() + gcs_source.object_uri = "object_uri_value" + + request = contact_center_insights_v1.ImportIssueModelRequest( + gcs_source=gcs_source, + parent="parent_value", + ) + + # Make the request + operation = client.import_issue_model(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.contact_center_insights_v1.types.ImportIssueModelRequest, dict]): + The request object. Request to import an issue model. + parent (str): + Required. The parent resource of the + issue model. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.contact_center_insights_v1.types.ImportIssueModelResponse` + Response from import issue model + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, contact_center_insights.ImportIssueModelRequest): + request = contact_center_insights.ImportIssueModelRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.import_issue_model] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + contact_center_insights.ImportIssueModelResponse, + metadata_type=contact_center_insights.ImportIssueModelMetadata, + ) + + # Done; return the response. + return response + def get_issue( self, request: Optional[Union[contact_center_insights.GetIssueRequest, dict]] = None, @@ -4567,7 +4847,13 @@ def sample_get_settings(): Returns: google.cloud.contact_center_insights_v1.types.Settings: - The settings resource. + The CCAI Insights project wide settings. + Use these settings to configure the behavior of + Insights. View these settings with + [getsettings](https://cloud.google.com/contact-center/insights/docs/reference/rest/v1/projects.locations/getSettings) + and change the settings with + [updateSettings](https://cloud.google.com/contact-center/insights/docs/reference/rest/v1/projects.locations/updateSettings). + """ # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have @@ -4675,7 +4961,13 @@ def sample_update_settings(): Returns: google.cloud.contact_center_insights_v1.types.Settings: - The settings resource. + The CCAI Insights project wide settings. + Use these settings to configure the behavior of + Insights. View these settings with + [getsettings](https://cloud.google.com/contact-center/insights/docs/reference/rest/v1/projects.locations/getSettings) + and change the settings with + [updateSettings](https://cloud.google.com/contact-center/insights/docs/reference/rest/v1/projects.locations/updateSettings). + """ # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have @@ -4724,6 +5016,252 @@ def sample_update_settings(): # Done; return the response. return response + def get_encryption_spec( + self, + request: Optional[ + Union[contact_center_insights.GetEncryptionSpecRequest, dict] + ] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> resources.EncryptionSpec: + r"""Gets location-level encryption key specification. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import contact_center_insights_v1 + + def sample_get_encryption_spec(): + # Create a client + client = contact_center_insights_v1.ContactCenterInsightsClient() + + # Initialize request argument(s) + request = contact_center_insights_v1.GetEncryptionSpecRequest( + name="name_value", + ) + + # Make the request + response = client.get_encryption_spec(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.contact_center_insights_v1.types.GetEncryptionSpecRequest, dict]): + The request object. The request to get location-level + encryption specification. + name (str): + Required. The name of the encryption + spec resource to get. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.contact_center_insights_v1.types.EncryptionSpec: + A customer-managed encryption key + specification that can be applied to all + created resources (e.g. Conversation). + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, contact_center_insights.GetEncryptionSpecRequest): + request = contact_center_insights.GetEncryptionSpecRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_encryption_spec] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def initialize_encryption_spec( + self, + request: Optional[ + Union[contact_center_insights.InitializeEncryptionSpecRequest, dict] + ] = None, + *, + encryption_spec: Optional[resources.EncryptionSpec] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Initializes a location-level encryption key + specification. An error will be thrown if the location + has resources already created before the initialization. + Once the encryption specification is initialized at a + location, it is immutable and all newly created + resources under the location will be encrypted with the + existing specification. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import contact_center_insights_v1 + + def sample_initialize_encryption_spec(): + # Create a client + client = contact_center_insights_v1.ContactCenterInsightsClient() + + # Initialize request argument(s) + encryption_spec = contact_center_insights_v1.EncryptionSpec() + encryption_spec.kms_key = "kms_key_value" + + request = contact_center_insights_v1.InitializeEncryptionSpecRequest( + encryption_spec=encryption_spec, + ) + + # Make the request + operation = client.initialize_encryption_spec(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.contact_center_insights_v1.types.InitializeEncryptionSpecRequest, dict]): + The request object. The request to initialize a + location-level encryption specification. + encryption_spec (google.cloud.contact_center_insights_v1.types.EncryptionSpec): + Required. The encryption spec used for CMEK encryption. + It is required that the kms key is in the same region as + the endpoint. The same key will be used for all + provisioned resources, if encryption is available. If + the kms_key_name is left empty, no encryption will be + enforced. + + This corresponds to the ``encryption_spec`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.contact_center_insights_v1.types.InitializeEncryptionSpecResponse` + The response to initialize a location-level encryption + specification. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([encryption_spec]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance( + request, contact_center_insights.InitializeEncryptionSpecRequest + ): + request = contact_center_insights.InitializeEncryptionSpecRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if encryption_spec is not None: + request.encryption_spec = encryption_spec + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[ + self._transport.initialize_encryption_spec + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("encryption_spec.name", request.encryption_spec.name),) + ), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + contact_center_insights.InitializeEncryptionSpecResponse, + metadata_type=contact_center_insights.InitializeEncryptionSpecMetadata, + ) + + # Done; return the response. + return response + def create_view( self, request: Optional[ diff --git a/packages/google-cloud-contact-center-insights/google/cloud/contact_center_insights_v1/services/contact_center_insights/transports/base.py b/packages/google-cloud-contact-center-insights/google/cloud/contact_center_insights_v1/services/contact_center_insights/transports/base.py index 36ab3e540e1a..1ccd673466bf 100644 --- a/packages/google-cloud-contact-center-insights/google/cloud/contact_center_insights_v1/services/contact_center_insights/transports/base.py +++ b/packages/google-cloud-contact-center-insights/google/cloud/contact_center_insights_v1/services/contact_center_insights/transports/base.py @@ -238,6 +238,16 @@ def _prep_wrapped_messages(self, client_info): default_timeout=None, client_info=client_info, ), + self.export_issue_model: gapic_v1.method.wrap_method( + self.export_issue_model, + default_timeout=None, + client_info=client_info, + ), + self.import_issue_model: gapic_v1.method.wrap_method( + self.import_issue_model, + default_timeout=None, + client_info=client_info, + ), self.get_issue: gapic_v1.method.wrap_method( self.get_issue, default_timeout=None, @@ -303,6 +313,16 @@ def _prep_wrapped_messages(self, client_info): default_timeout=None, client_info=client_info, ), + self.get_encryption_spec: gapic_v1.method.wrap_method( + self.get_encryption_spec, + default_timeout=None, + client_info=client_info, + ), + self.initialize_encryption_spec: gapic_v1.method.wrap_method( + self.initialize_encryption_spec, + default_timeout=None, + client_info=client_info, + ), self.create_view: gapic_v1.method.wrap_method( self.create_view, default_timeout=None, @@ -542,6 +562,24 @@ def undeploy_issue_model( ]: raise NotImplementedError() + @property + def export_issue_model( + self, + ) -> Callable[ + [contact_center_insights.ExportIssueModelRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def import_issue_model( + self, + ) -> Callable[ + [contact_center_insights.ImportIssueModelRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + @property def get_issue( self, @@ -671,6 +709,24 @@ def update_settings( ]: raise NotImplementedError() + @property + def get_encryption_spec( + self, + ) -> Callable[ + [contact_center_insights.GetEncryptionSpecRequest], + Union[resources.EncryptionSpec, Awaitable[resources.EncryptionSpec]], + ]: + raise NotImplementedError() + + @property + def initialize_encryption_spec( + self, + ) -> Callable[ + [contact_center_insights.InitializeEncryptionSpecRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + @property def create_view( self, diff --git a/packages/google-cloud-contact-center-insights/google/cloud/contact_center_insights_v1/services/contact_center_insights/transports/grpc.py b/packages/google-cloud-contact-center-insights/google/cloud/contact_center_insights_v1/services/contact_center_insights/transports/grpc.py index 9028aa2b9559..adb2ad6f700c 100644 --- a/packages/google-cloud-contact-center-insights/google/cloud/contact_center_insights_v1/services/contact_center_insights/transports/grpc.py +++ b/packages/google-cloud-contact-center-insights/google/cloud/contact_center_insights_v1/services/contact_center_insights/transports/grpc.py @@ -263,7 +263,9 @@ def create_conversation( ]: r"""Return a callable for the create conversation method over gRPC. - Creates a conversation. + Creates a conversation. Note that this method does not support + audio transcription or redaction. Use ``conversations.upload`` + instead. Returns: Callable[[~.CreateConversationRequest], @@ -291,9 +293,9 @@ def upload_conversation( ]: r"""Return a callable for the upload conversation method over gRPC. - Create a longrunning conversation upload operation. - This method differs from CreateConversation by allowing - audio transcription and optional DLP redaction. + Create a long-running conversation upload operation. This method + differs from ``CreateConversation`` by allowing audio + transcription and optional DLP redaction. Returns: Callable[[~.UploadConversationRequest], @@ -849,6 +851,62 @@ def undeploy_issue_model( ) return self._stubs["undeploy_issue_model"] + @property + def export_issue_model( + self, + ) -> Callable[ + [contact_center_insights.ExportIssueModelRequest], operations_pb2.Operation + ]: + r"""Return a callable for the export issue model method over gRPC. + + Exports an issue model to the provided destination. + + Returns: + Callable[[~.ExportIssueModelRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "export_issue_model" not in self._stubs: + self._stubs["export_issue_model"] = self.grpc_channel.unary_unary( + "/google.cloud.contactcenterinsights.v1.ContactCenterInsights/ExportIssueModel", + request_serializer=contact_center_insights.ExportIssueModelRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["export_issue_model"] + + @property + def import_issue_model( + self, + ) -> Callable[ + [contact_center_insights.ImportIssueModelRequest], operations_pb2.Operation + ]: + r"""Return a callable for the import issue model method over gRPC. + + Imports an issue model from a Cloud Storage bucket. + + Returns: + Callable[[~.ImportIssueModelRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "import_issue_model" not in self._stubs: + self._stubs["import_issue_model"] = self.grpc_channel.unary_unary( + "/google.cloud.contactcenterinsights.v1.ContactCenterInsights/ImportIssueModel", + request_serializer=contact_center_insights.ImportIssueModelRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["import_issue_model"] + @property def get_issue( self, @@ -1207,6 +1265,69 @@ def update_settings( ) return self._stubs["update_settings"] + @property + def get_encryption_spec( + self, + ) -> Callable[ + [contact_center_insights.GetEncryptionSpecRequest], resources.EncryptionSpec + ]: + r"""Return a callable for the get encryption spec method over gRPC. + + Gets location-level encryption key specification. + + Returns: + Callable[[~.GetEncryptionSpecRequest], + ~.EncryptionSpec]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_encryption_spec" not in self._stubs: + self._stubs["get_encryption_spec"] = self.grpc_channel.unary_unary( + "/google.cloud.contactcenterinsights.v1.ContactCenterInsights/GetEncryptionSpec", + request_serializer=contact_center_insights.GetEncryptionSpecRequest.serialize, + response_deserializer=resources.EncryptionSpec.deserialize, + ) + return self._stubs["get_encryption_spec"] + + @property + def initialize_encryption_spec( + self, + ) -> Callable[ + [contact_center_insights.InitializeEncryptionSpecRequest], + operations_pb2.Operation, + ]: + r"""Return a callable for the initialize encryption spec method over gRPC. + + Initializes a location-level encryption key + specification. An error will be thrown if the location + has resources already created before the initialization. + Once the encryption specification is initialized at a + location, it is immutable and all newly created + resources under the location will be encrypted with the + existing specification. + + Returns: + Callable[[~.InitializeEncryptionSpecRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "initialize_encryption_spec" not in self._stubs: + self._stubs["initialize_encryption_spec"] = self.grpc_channel.unary_unary( + "/google.cloud.contactcenterinsights.v1.ContactCenterInsights/InitializeEncryptionSpec", + request_serializer=contact_center_insights.InitializeEncryptionSpecRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["initialize_encryption_spec"] + @property def create_view( self, diff --git a/packages/google-cloud-contact-center-insights/google/cloud/contact_center_insights_v1/services/contact_center_insights/transports/grpc_asyncio.py b/packages/google-cloud-contact-center-insights/google/cloud/contact_center_insights_v1/services/contact_center_insights/transports/grpc_asyncio.py index f04fd8885c78..abfe06f88262 100644 --- a/packages/google-cloud-contact-center-insights/google/cloud/contact_center_insights_v1/services/contact_center_insights/transports/grpc_asyncio.py +++ b/packages/google-cloud-contact-center-insights/google/cloud/contact_center_insights_v1/services/contact_center_insights/transports/grpc_asyncio.py @@ -270,7 +270,9 @@ def create_conversation( ]: r"""Return a callable for the create conversation method over gRPC. - Creates a conversation. + Creates a conversation. Note that this method does not support + audio transcription or redaction. Use ``conversations.upload`` + instead. Returns: Callable[[~.CreateConversationRequest], @@ -299,9 +301,9 @@ def upload_conversation( ]: r"""Return a callable for the upload conversation method over gRPC. - Create a longrunning conversation upload operation. - This method differs from CreateConversation by allowing - audio transcription and optional DLP redaction. + Create a long-running conversation upload operation. This method + differs from ``CreateConversation`` by allowing audio + transcription and optional DLP redaction. Returns: Callable[[~.UploadConversationRequest], @@ -875,6 +877,64 @@ def undeploy_issue_model( ) return self._stubs["undeploy_issue_model"] + @property + def export_issue_model( + self, + ) -> Callable[ + [contact_center_insights.ExportIssueModelRequest], + Awaitable[operations_pb2.Operation], + ]: + r"""Return a callable for the export issue model method over gRPC. + + Exports an issue model to the provided destination. + + Returns: + Callable[[~.ExportIssueModelRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "export_issue_model" not in self._stubs: + self._stubs["export_issue_model"] = self.grpc_channel.unary_unary( + "/google.cloud.contactcenterinsights.v1.ContactCenterInsights/ExportIssueModel", + request_serializer=contact_center_insights.ExportIssueModelRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["export_issue_model"] + + @property + def import_issue_model( + self, + ) -> Callable[ + [contact_center_insights.ImportIssueModelRequest], + Awaitable[operations_pb2.Operation], + ]: + r"""Return a callable for the import issue model method over gRPC. + + Imports an issue model from a Cloud Storage bucket. + + Returns: + Callable[[~.ImportIssueModelRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "import_issue_model" not in self._stubs: + self._stubs["import_issue_model"] = self.grpc_channel.unary_unary( + "/google.cloud.contactcenterinsights.v1.ContactCenterInsights/ImportIssueModel", + request_serializer=contact_center_insights.ImportIssueModelRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["import_issue_model"] + @property def get_issue( self, @@ -1246,6 +1306,70 @@ def update_settings( ) return self._stubs["update_settings"] + @property + def get_encryption_spec( + self, + ) -> Callable[ + [contact_center_insights.GetEncryptionSpecRequest], + Awaitable[resources.EncryptionSpec], + ]: + r"""Return a callable for the get encryption spec method over gRPC. + + Gets location-level encryption key specification. + + Returns: + Callable[[~.GetEncryptionSpecRequest], + Awaitable[~.EncryptionSpec]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_encryption_spec" not in self._stubs: + self._stubs["get_encryption_spec"] = self.grpc_channel.unary_unary( + "/google.cloud.contactcenterinsights.v1.ContactCenterInsights/GetEncryptionSpec", + request_serializer=contact_center_insights.GetEncryptionSpecRequest.serialize, + response_deserializer=resources.EncryptionSpec.deserialize, + ) + return self._stubs["get_encryption_spec"] + + @property + def initialize_encryption_spec( + self, + ) -> Callable[ + [contact_center_insights.InitializeEncryptionSpecRequest], + Awaitable[operations_pb2.Operation], + ]: + r"""Return a callable for the initialize encryption spec method over gRPC. + + Initializes a location-level encryption key + specification. An error will be thrown if the location + has resources already created before the initialization. + Once the encryption specification is initialized at a + location, it is immutable and all newly created + resources under the location will be encrypted with the + existing specification. + + Returns: + Callable[[~.InitializeEncryptionSpecRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "initialize_encryption_spec" not in self._stubs: + self._stubs["initialize_encryption_spec"] = self.grpc_channel.unary_unary( + "/google.cloud.contactcenterinsights.v1.ContactCenterInsights/InitializeEncryptionSpec", + request_serializer=contact_center_insights.InitializeEncryptionSpecRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["initialize_encryption_spec"] + @property def create_view( self, @@ -1493,6 +1617,16 @@ def _prep_wrapped_messages(self, client_info): default_timeout=None, client_info=client_info, ), + self.export_issue_model: gapic_v1.method_async.wrap_method( + self.export_issue_model, + default_timeout=None, + client_info=client_info, + ), + self.import_issue_model: gapic_v1.method_async.wrap_method( + self.import_issue_model, + default_timeout=None, + client_info=client_info, + ), self.get_issue: gapic_v1.method_async.wrap_method( self.get_issue, default_timeout=None, @@ -1558,6 +1692,16 @@ def _prep_wrapped_messages(self, client_info): default_timeout=None, client_info=client_info, ), + self.get_encryption_spec: gapic_v1.method_async.wrap_method( + self.get_encryption_spec, + default_timeout=None, + client_info=client_info, + ), + self.initialize_encryption_spec: gapic_v1.method_async.wrap_method( + self.initialize_encryption_spec, + default_timeout=None, + client_info=client_info, + ), self.create_view: gapic_v1.method_async.wrap_method( self.create_view, default_timeout=None, diff --git a/packages/google-cloud-contact-center-insights/google/cloud/contact_center_insights_v1/services/contact_center_insights/transports/rest.py b/packages/google-cloud-contact-center-insights/google/cloud/contact_center_insights_v1/services/contact_center_insights/transports/rest.py index ab1260ec3b6c..4c44d1bd3e8a 100644 --- a/packages/google-cloud-contact-center-insights/google/cloud/contact_center_insights_v1/services/contact_center_insights/transports/rest.py +++ b/packages/google-cloud-contact-center-insights/google/cloud/contact_center_insights_v1/services/contact_center_insights/transports/rest.py @@ -191,6 +191,14 @@ def post_export_insights_data(self, response): logging.log(f"Received response: {response}") return response + def pre_export_issue_model(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_export_issue_model(self, response): + logging.log(f"Received response: {response}") + return response + def pre_get_analysis(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata @@ -207,6 +215,14 @@ def post_get_conversation(self, response): logging.log(f"Received response: {response}") return response + def pre_get_encryption_spec(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_encryption_spec(self, response): + logging.log(f"Received response: {response}") + return response + def pre_get_issue(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata @@ -247,6 +263,14 @@ def post_get_view(self, response): logging.log(f"Received response: {response}") return response + def pre_import_issue_model(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_import_issue_model(self, response): + logging.log(f"Received response: {response}") + return response + def pre_ingest_conversations(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata @@ -255,6 +279,14 @@ def post_ingest_conversations(self, response): logging.log(f"Received response: {response}") return response + def pre_initialize_encryption_spec(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_initialize_encryption_spec(self, response): + logging.log(f"Received response: {response}") + return response + def pre_list_analyses(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata @@ -738,6 +770,31 @@ def post_export_insights_data( """ return response + def pre_export_issue_model( + self, + request: contact_center_insights.ExportIssueModelRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[ + contact_center_insights.ExportIssueModelRequest, Sequence[Tuple[str, str]] + ]: + """Pre-rpc interceptor for export_issue_model + + Override in a subclass to manipulate the request or metadata + before they are sent to the ContactCenterInsights server. + """ + return request, metadata + + def post_export_issue_model( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for export_issue_model + + Override in a subclass to manipulate the response + after it is returned by the ContactCenterInsights server but before + it is returned to user code. + """ + return response + def pre_get_analysis( self, request: contact_center_insights.GetAnalysisRequest, @@ -784,6 +841,31 @@ def post_get_conversation( """ return response + def pre_get_encryption_spec( + self, + request: contact_center_insights.GetEncryptionSpecRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[ + contact_center_insights.GetEncryptionSpecRequest, Sequence[Tuple[str, str]] + ]: + """Pre-rpc interceptor for get_encryption_spec + + Override in a subclass to manipulate the request or metadata + before they are sent to the ContactCenterInsights server. + """ + return request, metadata + + def post_get_encryption_spec( + self, response: resources.EncryptionSpec + ) -> resources.EncryptionSpec: + """Post-rpc interceptor for get_encryption_spec + + Override in a subclass to manipulate the response + after it is returned by the ContactCenterInsights server but before + it is returned to user code. + """ + return response + def pre_get_issue( self, request: contact_center_insights.GetIssueRequest, @@ -895,6 +977,31 @@ def post_get_view(self, response: resources.View) -> resources.View: """ return response + def pre_import_issue_model( + self, + request: contact_center_insights.ImportIssueModelRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[ + contact_center_insights.ImportIssueModelRequest, Sequence[Tuple[str, str]] + ]: + """Pre-rpc interceptor for import_issue_model + + Override in a subclass to manipulate the request or metadata + before they are sent to the ContactCenterInsights server. + """ + return request, metadata + + def post_import_issue_model( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for import_issue_model + + Override in a subclass to manipulate the response + after it is returned by the ContactCenterInsights server but before + it is returned to user code. + """ + return response + def pre_ingest_conversations( self, request: contact_center_insights.IngestConversationsRequest, @@ -920,6 +1027,32 @@ def post_ingest_conversations( """ return response + def pre_initialize_encryption_spec( + self, + request: contact_center_insights.InitializeEncryptionSpecRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[ + contact_center_insights.InitializeEncryptionSpecRequest, + Sequence[Tuple[str, str]], + ]: + """Pre-rpc interceptor for initialize_encryption_spec + + Override in a subclass to manipulate the request or metadata + before they are sent to the ContactCenterInsights server. + """ + return request, metadata + + def post_initialize_encryption_spec( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for initialize_encryption_spec + + Override in a subclass to manipulate the response + after it is returned by the ContactCenterInsights server but before + it is returned to user code. + """ + return response + def pre_list_analyses( self, request: contact_center_insights.ListAnalysesRequest, @@ -2960,6 +3093,101 @@ def __call__( resp = self._interceptor.post_export_insights_data(resp) return resp + class _ExportIssueModel(ContactCenterInsightsRestStub): + def __hash__(self): + return hash("ExportIssueModel") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: contact_center_insights.ExportIssueModelRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the export issue model method over HTTP. + + Args: + request (~.contact_center_insights.ExportIssueModelRequest): + The request object. Request to export an issue model. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{name=projects/*/locations/*/issueModels/*}:export", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_export_issue_model( + request, metadata + ) + pb_request = contact_center_insights.ExportIssueModelRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_export_issue_model(resp) + return resp + class _GetAnalysis(ContactCenterInsightsRestStub): def __hash__(self): return hash("GetAnalysis") @@ -3132,6 +3360,97 @@ def __call__( resp = self._interceptor.post_get_conversation(resp) return resp + class _GetEncryptionSpec(ContactCenterInsightsRestStub): + def __hash__(self): + return hash("GetEncryptionSpec") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: contact_center_insights.GetEncryptionSpecRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> resources.EncryptionSpec: + r"""Call the get encryption spec method over HTTP. + + Args: + request (~.contact_center_insights.GetEncryptionSpecRequest): + The request object. The request to get location-level + encryption specification. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.resources.EncryptionSpec: + A customer-managed encryption key + specification that can be applied to all + created resources (e.g. Conversation). + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/encryptionSpec}", + }, + ] + request, metadata = self._interceptor.pre_get_encryption_spec( + request, metadata + ) + pb_request = contact_center_insights.GetEncryptionSpecRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = resources.EncryptionSpec() + pb_resp = resources.EncryptionSpec.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_encryption_spec(resp) + return resp + class _GetIssue(ContactCenterInsightsRestStub): def __hash__(self): return hash("GetIssue") @@ -3426,7 +3745,13 @@ def __call__( Returns: ~.resources.Settings: - The settings resource. + The CCAI Insights project wide settings. Use these + settings to configure the behavior of Insights. View + these settings with + ```getsettings`` `__ + and change the settings with + ```updateSettings`` `__. + """ http_options: List[Dict[str, str]] = [ @@ -3561,6 +3886,101 @@ def __call__( resp = self._interceptor.post_get_view(resp) return resp + class _ImportIssueModel(ContactCenterInsightsRestStub): + def __hash__(self): + return hash("ImportIssueModel") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: contact_center_insights.ImportIssueModelRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the import issue model method over HTTP. + + Args: + request (~.contact_center_insights.ImportIssueModelRequest): + The request object. Request to import an issue model. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{parent=projects/*/locations/*}/issueModels:import", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_import_issue_model( + request, metadata + ) + pb_request = contact_center_insights.ImportIssueModelRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_import_issue_model(resp) + return resp + class _IngestConversations(ContactCenterInsightsRestStub): def __hash__(self): return hash("IngestConversations") @@ -3656,6 +4076,105 @@ def __call__( resp = self._interceptor.post_ingest_conversations(resp) return resp + class _InitializeEncryptionSpec(ContactCenterInsightsRestStub): + def __hash__(self): + return hash("InitializeEncryptionSpec") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: contact_center_insights.InitializeEncryptionSpecRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the initialize encryption + spec method over HTTP. + + Args: + request (~.contact_center_insights.InitializeEncryptionSpecRequest): + The request object. The request to initialize a + location-level encryption specification. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{encryption_spec.name=projects/*/locations/*/encryptionSpec}:initialize", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_initialize_encryption_spec( + request, metadata + ) + pb_request = contact_center_insights.InitializeEncryptionSpecRequest.pb( + request + ) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_initialize_encryption_spec(resp) + return resp + class _ListAnalyses(ContactCenterInsightsRestStub): def __hash__(self): return hash("ListAnalyses") @@ -4685,7 +5204,13 @@ def __call__( Returns: ~.resources.Settings: - The settings resource. + The CCAI Insights project wide settings. Use these + settings to configure the behavior of Insights. View + these settings with + ```getsettings`` `__ + and change the settings with + ```updateSettings`` `__. + """ http_options: List[Dict[str, str]] = [ @@ -5093,6 +5618,16 @@ def export_insights_data( # In C++ this would require a dynamic_cast return self._ExportInsightsData(self._session, self._host, self._interceptor) # type: ignore + @property + def export_issue_model( + self, + ) -> Callable[ + [contact_center_insights.ExportIssueModelRequest], operations_pb2.Operation + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ExportIssueModel(self._session, self._host, self._interceptor) # type: ignore + @property def get_analysis( self, @@ -5111,6 +5646,16 @@ def get_conversation( # In C++ this would require a dynamic_cast return self._GetConversation(self._session, self._host, self._interceptor) # type: ignore + @property + def get_encryption_spec( + self, + ) -> Callable[ + [contact_center_insights.GetEncryptionSpecRequest], resources.EncryptionSpec + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetEncryptionSpec(self._session, self._host, self._interceptor) # type: ignore + @property def get_issue( self, @@ -5153,6 +5698,16 @@ def get_view( # In C++ this would require a dynamic_cast return self._GetView(self._session, self._host, self._interceptor) # type: ignore + @property + def import_issue_model( + self, + ) -> Callable[ + [contact_center_insights.ImportIssueModelRequest], operations_pb2.Operation + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ImportIssueModel(self._session, self._host, self._interceptor) # type: ignore + @property def ingest_conversations( self, @@ -5163,6 +5718,17 @@ def ingest_conversations( # In C++ this would require a dynamic_cast return self._IngestConversations(self._session, self._host, self._interceptor) # type: ignore + @property + def initialize_encryption_spec( + self, + ) -> Callable[ + [contact_center_insights.InitializeEncryptionSpecRequest], + operations_pb2.Operation, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._InitializeEncryptionSpec(self._session, self._host, self._interceptor) # type: ignore + @property def list_analyses( self, diff --git a/packages/google-cloud-contact-center-insights/google/cloud/contact_center_insights_v1/types/__init__.py b/packages/google-cloud-contact-center-insights/google/cloud/contact_center_insights_v1/types/__init__.py index be8c70ff77d9..30e7ff857b46 100644 --- a/packages/google-cloud-contact-center-insights/google/cloud/contact_center_insights_v1/types/__init__.py +++ b/packages/google-cloud-contact-center-insights/google/cloud/contact_center_insights_v1/types/__init__.py @@ -45,16 +45,26 @@ ExportInsightsDataMetadata, ExportInsightsDataRequest, ExportInsightsDataResponse, + ExportIssueModelMetadata, + ExportIssueModelRequest, + ExportIssueModelResponse, GetAnalysisRequest, GetConversationRequest, + GetEncryptionSpecRequest, GetIssueModelRequest, GetIssueRequest, GetPhraseMatcherRequest, GetSettingsRequest, GetViewRequest, + ImportIssueModelMetadata, + ImportIssueModelRequest, + ImportIssueModelResponse, IngestConversationsMetadata, IngestConversationsRequest, IngestConversationsResponse, + InitializeEncryptionSpecMetadata, + InitializeEncryptionSpecRequest, + InitializeEncryptionSpecResponse, ListAnalysesRequest, ListAnalysesResponse, ListConversationsRequest, @@ -90,11 +100,13 @@ Conversation, ConversationDataSource, ConversationLevelSentiment, + ConversationLevelSilence, ConversationParticipant, ConversationSummarizationSuggestionData, DialogflowIntent, DialogflowInteractionData, DialogflowSource, + EncryptionSpec, Entity, EntityMentionData, ExactMatchConfig, @@ -157,16 +169,26 @@ "ExportInsightsDataMetadata", "ExportInsightsDataRequest", "ExportInsightsDataResponse", + "ExportIssueModelMetadata", + "ExportIssueModelRequest", + "ExportIssueModelResponse", "GetAnalysisRequest", "GetConversationRequest", + "GetEncryptionSpecRequest", "GetIssueModelRequest", "GetIssueRequest", "GetPhraseMatcherRequest", "GetSettingsRequest", "GetViewRequest", + "ImportIssueModelMetadata", + "ImportIssueModelRequest", + "ImportIssueModelResponse", "IngestConversationsMetadata", "IngestConversationsRequest", "IngestConversationsResponse", + "InitializeEncryptionSpecMetadata", + "InitializeEncryptionSpecRequest", + "InitializeEncryptionSpecResponse", "ListAnalysesRequest", "ListAnalysesResponse", "ListConversationsRequest", @@ -201,11 +223,13 @@ "Conversation", "ConversationDataSource", "ConversationLevelSentiment", + "ConversationLevelSilence", "ConversationParticipant", "ConversationSummarizationSuggestionData", "DialogflowIntent", "DialogflowInteractionData", "DialogflowSource", + "EncryptionSpec", "Entity", "EntityMentionData", "ExactMatchConfig", diff --git a/packages/google-cloud-contact-center-insights/google/cloud/contact_center_insights_v1/types/contact_center_insights.py b/packages/google-cloud-contact-center-insights/google/cloud/contact_center_insights_v1/types/contact_center_insights.py index 60816f4c061f..1229fb10453f 100644 --- a/packages/google-cloud-contact-center-insights/google/cloud/contact_center_insights_v1/types/contact_center_insights.py +++ b/packages/google-cloud-contact-center-insights/google/cloud/contact_center_insights_v1/types/contact_center_insights.py @@ -71,6 +71,12 @@ "UndeployIssueModelRequest", "UndeployIssueModelResponse", "UndeployIssueModelMetadata", + "ExportIssueModelRequest", + "ExportIssueModelResponse", + "ExportIssueModelMetadata", + "ImportIssueModelRequest", + "ImportIssueModelResponse", + "ImportIssueModelMetadata", "GetIssueRequest", "ListIssuesRequest", "ListIssuesResponse", @@ -86,6 +92,10 @@ "UpdatePhraseMatcherRequest", "GetSettingsRequest", "UpdateSettingsRequest", + "GetEncryptionSpecRequest", + "InitializeEncryptionSpecRequest", + "InitializeEncryptionSpecResponse", + "InitializeEncryptionSpecMetadata", "CreateViewRequest", "GetViewRequest", "ListViewsRequest", @@ -394,7 +404,7 @@ class UploadConversationRequest(proto.Message): class UploadConversationMetadata(proto.Message): - r"""The metadata for an UploadConversation operation. + r"""The metadata for an ``UploadConversation`` operation. Attributes: create_time (google.protobuf.timestamp_pb2.Timestamp): @@ -449,9 +459,9 @@ class ListConversationsRequest(proto.Message): page_size (int): The maximum number of conversations to return in the response. A valid page size ranges from 0 - to 1,000 inclusive. If the page size is zero or - unspecified, a default page size of 100 will be - chosen. Note that a call might return fewer + to 100,000 inclusive. If the page size is zero + or unspecified, a default page size of 100 will + be chosen. Note that a call might return fewer results than the requested page size. page_token (str): The value returned by the last @@ -462,6 +472,23 @@ class ListConversationsRequest(proto.Message): A filter to reduce results to a specific subset. Useful for querying conversations with specific properties. + order_by (str): + Optional. The attribute by which to order conversations in + the response. If empty, conversations will be ordered by + descending creation time. Supported values are one of the + following: + + - create_time + - customer_satisfaction_rating + - duration + - latest_analysis + - start_time + - turn_count + + The default sort order is ascending. To specify order, + append ``asc`` or ``desc`` (``create_time desc``). For more + details, see `Google AIPs + Ordering `__. view (google.cloud.contact_center_insights_v1.types.ConversationView): The level of details of the conversation. Default is ``BASIC``. @@ -483,6 +510,10 @@ class ListConversationsRequest(proto.Message): proto.STRING, number=4, ) + order_by: str = proto.Field( + proto.STRING, + number=7, + ) view: "ConversationView" = proto.Field( proto.ENUM, number=5, @@ -549,7 +580,20 @@ class UpdateConversationRequest(proto.Message): Required. The new values for the conversation. update_mask (google.protobuf.field_mask_pb2.FieldMask): - The list of fields to be updated. + The list of fields to be updated. All possible fields can be + updated by passing ``*``, or a subset of the following + updateable fields can be provided: + + - ``agent_id`` + - ``language_code`` + - ``labels`` + - ``metadata`` + - ``quality_metadata`` + - ``call_metadata`` + - ``start_time`` + - ``expire_time`` or ``ttl`` + - ``data_source.gcs_source.audio_uri`` or + ``data_source.dialogflow_source.audio_uri`` """ conversation: resources.Conversation = proto.Field( @@ -619,11 +663,22 @@ class IngestConversationsRequest(proto.Message): Optional. Default Speech-to-Text configuration. Optional, will default to the config specified in Settings. + sample_size (int): + Optional. If set, this fields indicates the + number of objects to ingest from the Cloud + Storage bucket. If empty, the entire bucket will + be ingested. Unless they are first deleted, + conversations produced through sampling won't be + ingested by subsequent ingest requests. + + This field is a member of `oneof`_ ``_sample_size``. """ class GcsSource(proto.Message): r"""Configuration for Cloud Storage bucket sources. + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + Attributes: bucket_uri (str): Required. The Cloud Storage bucket containing @@ -631,6 +686,22 @@ class GcsSource(proto.Message): bucket_object_type (google.cloud.contact_center_insights_v1.types.IngestConversationsRequest.GcsSource.BucketObjectType): Optional. Specifies the type of the objects in ``bucket_uri``. + metadata_bucket_uri (str): + Optional. The Cloud Storage path to the conversation + metadata. Note that: [1] Metadata files are expected to be + in JSON format. [2] Metadata and source files (transcripts + or audio) must be in separate buckets. [3] A source file and + its corresponding metadata file must share the same name to + be properly ingested, E.g. + ``gs://bucket/audio/conversation1.mp3`` and + ``gs://bucket/metadata/conversation1.json``. + + This field is a member of `oneof`_ ``_metadata_bucket_uri``. + custom_metadata_keys (MutableSequence[str]): + Optional. Custom keys to extract as conversation labels from + metadata files in ``metadata_bucket_uri``. Keys not included + in this field will be ignored. Note that there is a limit of + 20 labels per conversation. """ class BucketObjectType(proto.Enum): @@ -660,6 +731,15 @@ class BucketObjectType(proto.Enum): enum="IngestConversationsRequest.GcsSource.BucketObjectType", ) ) + metadata_bucket_uri: str = proto.Field( + proto.STRING, + number=3, + optional=True, + ) + custom_metadata_keys: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=12, + ) class TranscriptObjectConfig(proto.Message): r"""Configuration for processing transcript objects. @@ -681,8 +761,10 @@ class ConversationConfig(proto.Message): Attributes: agent_id (str): - An opaque, user-specified string representing - the human agent who handled the conversations. + Optional. An opaque, user-specified string representing a + human agent who handled all conversations in the import. + Note that this will be overridden if per-conversation + metadata is provided through the ``metadata_bucket_uri``. agent_channel (int): Optional. Indicates which of the channels, 1 or 2, contains the agent. Note that this must be @@ -739,6 +821,11 @@ class ConversationConfig(proto.Message): number=6, message=resources.SpeechConfig, ) + sample_size: int = proto.Field( + proto.INT32, + number=7, + optional=True, + ) class IngestConversationsMetadata(proto.Message): @@ -1559,6 +1646,163 @@ class UndeployIssueModelMetadata(proto.Message): ) +class ExportIssueModelRequest(proto.Message): + r"""Request to export an issue model. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + gcs_destination (google.cloud.contact_center_insights_v1.types.ExportIssueModelRequest.GcsDestination): + Google Cloud Storage URI to export the issue + model to. + + This field is a member of `oneof`_ ``Destination``. + name (str): + Required. The issue model to export. + """ + + class GcsDestination(proto.Message): + r"""Google Cloud Storage Object URI to save the issue model to. + + Attributes: + object_uri (str): + Required. Format: ``gs:///`` + """ + + object_uri: str = proto.Field( + proto.STRING, + number=1, + ) + + gcs_destination: GcsDestination = proto.Field( + proto.MESSAGE, + number=2, + oneof="Destination", + message=GcsDestination, + ) + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class ExportIssueModelResponse(proto.Message): + r"""Response from export issue model""" + + +class ExportIssueModelMetadata(proto.Message): + r"""Metadata used for export issue model. + + Attributes: + create_time (google.protobuf.timestamp_pb2.Timestamp): + The time the operation was created. + end_time (google.protobuf.timestamp_pb2.Timestamp): + The time the operation finished running. + request (google.cloud.contact_center_insights_v1.types.ExportIssueModelRequest): + The original export request. + """ + + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=1, + message=timestamp_pb2.Timestamp, + ) + end_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=2, + message=timestamp_pb2.Timestamp, + ) + request: "ExportIssueModelRequest" = proto.Field( + proto.MESSAGE, + number=3, + message="ExportIssueModelRequest", + ) + + +class ImportIssueModelRequest(proto.Message): + r"""Request to import an issue model. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + gcs_source (google.cloud.contact_center_insights_v1.types.ImportIssueModelRequest.GcsSource): + Google Cloud Storage source message. + + This field is a member of `oneof`_ ``Source``. + parent (str): + Required. The parent resource of the issue + model. + create_new_model (bool): + Optional. If set to true, will create an + issue model from the imported file with randomly + generated IDs for the issue model and + corresponding issues. Otherwise, replaces an + existing model with the same ID as the file. + """ + + class GcsSource(proto.Message): + r"""Google Cloud Storage Object URI to get the issue model file + from. + + Attributes: + object_uri (str): + Required. Format: ``gs:///`` + """ + + object_uri: str = proto.Field( + proto.STRING, + number=1, + ) + + gcs_source: GcsSource = proto.Field( + proto.MESSAGE, + number=2, + oneof="Source", + message=GcsSource, + ) + parent: str = proto.Field( + proto.STRING, + number=1, + ) + create_new_model: bool = proto.Field( + proto.BOOL, + number=3, + ) + + +class ImportIssueModelResponse(proto.Message): + r"""Response from import issue model""" + + +class ImportIssueModelMetadata(proto.Message): + r"""Metadata used for import issue model. + + Attributes: + create_time (google.protobuf.timestamp_pb2.Timestamp): + The time the operation was created. + end_time (google.protobuf.timestamp_pb2.Timestamp): + The time the operation finished running. + request (google.cloud.contact_center_insights_v1.types.ImportIssueModelRequest): + The original import request. + """ + + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=1, + message=timestamp_pb2.Timestamp, + ) + end_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=2, + message=timestamp_pb2.Timestamp, + ) + request: "ImportIssueModelRequest" = proto.Field( + proto.MESSAGE, + number=3, + message="ImportIssueModelRequest", + ) + + class GetIssueRequest(proto.Message): r"""The request to get an issue. @@ -1855,6 +2099,90 @@ class UpdateSettingsRequest(proto.Message): ) +class GetEncryptionSpecRequest(proto.Message): + r"""The request to get location-level encryption specification. + + Attributes: + name (str): + Required. The name of the encryption spec + resource to get. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class InitializeEncryptionSpecRequest(proto.Message): + r"""The request to initialize a location-level encryption + specification. + + Attributes: + encryption_spec (google.cloud.contact_center_insights_v1.types.EncryptionSpec): + Required. The encryption spec used for CMEK encryption. It + is required that the kms key is in the same region as the + endpoint. The same key will be used for all provisioned + resources, if encryption is available. If the kms_key_name + is left empty, no encryption will be enforced. + """ + + encryption_spec: resources.EncryptionSpec = proto.Field( + proto.MESSAGE, + number=1, + message=resources.EncryptionSpec, + ) + + +class InitializeEncryptionSpecResponse(proto.Message): + r"""The response to initialize a location-level encryption + specification. + + """ + + +class InitializeEncryptionSpecMetadata(proto.Message): + r"""Metadata for initializing a location-level encryption + specification. + + Attributes: + create_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The time the operation was + created. + end_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The time the operation finished + running. + request (google.cloud.contact_center_insights_v1.types.InitializeEncryptionSpecRequest): + Output only. The original request for + initialization. + partial_errors (MutableSequence[google.rpc.status_pb2.Status]): + Partial errors during initialising operation + that might cause the operation output to be + incomplete. + """ + + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=1, + message=timestamp_pb2.Timestamp, + ) + end_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=2, + message=timestamp_pb2.Timestamp, + ) + request: "InitializeEncryptionSpecRequest" = proto.Field( + proto.MESSAGE, + number=3, + message="InitializeEncryptionSpecRequest", + ) + partial_errors: MutableSequence[status_pb2.Status] = proto.RepeatedField( + proto.MESSAGE, + number=4, + message=status_pb2.Status, + ) + + class CreateViewRequest(proto.Message): r"""The request to create a view. diff --git a/packages/google-cloud-contact-center-insights/google/cloud/contact_center_insights_v1/types/resources.py b/packages/google-cloud-contact-center-insights/google/cloud/contact_center_insights_v1/types/resources.py index 0302667ebc6e..83de2aec7b7e 100644 --- a/packages/google-cloud-contact-center-insights/google/cloud/contact_center_insights_v1/types/resources.py +++ b/packages/google-cloud-contact-center-insights/google/cloud/contact_center_insights_v1/types/resources.py @@ -32,6 +32,7 @@ "AnalysisResult", "IssueModelResult", "ConversationLevelSentiment", + "ConversationLevelSilence", "IssueAssignment", "CallAnnotation", "AnnotationBoundary", @@ -55,6 +56,7 @@ "PhraseMatchRuleConfig", "ExactMatchConfig", "Settings", + "EncryptionSpec", "RedactionConfig", "SpeechConfig", "RuntimeAnnotation", @@ -129,6 +131,11 @@ class Conversation(proto.Message): quality_metadata (google.cloud.contact_center_insights_v1.types.Conversation.QualityMetadata): Conversation metadata related to quality management. + metadata_json (str): + Input only. JSON Metadata encoded as a + string. This field is primarily used by Insights + integrations with various telphony systems and + must be in one of Insights' supported formats. transcript (google.cloud.contact_center_insights_v1.types.Conversation.Transcript): Output only. The conversation transcript. medium (google.cloud.contact_center_insights_v1.types.Conversation.Medium): @@ -482,6 +489,10 @@ class DialogflowSegmentMetadata(proto.Message): number=24, message=QualityMetadata, ) + metadata_json: str = proto.Field( + proto.STRING, + number=25, + ) transcript: Transcript = proto.Field( proto.MESSAGE, number=8, @@ -691,6 +702,9 @@ class CallAnalysisMetadata(proto.Message): sentiments (MutableSequence[google.cloud.contact_center_insights_v1.types.ConversationLevelSentiment]): Overall conversation-level sentiment for each channel of the call. + silence (google.cloud.contact_center_insights_v1.types.ConversationLevelSilence): + Overall conversation-level silence during the + call. intents (MutableMapping[str, google.cloud.contact_center_insights_v1.types.Intent]): All the matched intents in the call. phrase_matchers (MutableMapping[str, google.cloud.contact_center_insights_v1.types.PhraseMatchData]): @@ -716,6 +730,11 @@ class CallAnalysisMetadata(proto.Message): number=4, message="ConversationLevelSentiment", ) + silence: "ConversationLevelSilence" = proto.Field( + proto.MESSAGE, + number=11, + message="ConversationLevelSilence", + ) intents: MutableMapping[str, "Intent"] = proto.MapField( proto.STRING, proto.MESSAGE, @@ -791,6 +810,28 @@ class ConversationLevelSentiment(proto.Message): ) +class ConversationLevelSilence(proto.Message): + r"""Conversation-level silence data. + + Attributes: + silence_duration (google.protobuf.duration_pb2.Duration): + Amount of time calculated to be in silence. + silence_percentage (float): + Percentage of the total conversation spent in + silence. + """ + + silence_duration: duration_pb2.Duration = proto.Field( + proto.MESSAGE, + number=1, + message=duration_pb2.Duration, + ) + silence_percentage: float = proto.Field( + proto.FLOAT, + number=2, + ) + + class IssueAssignment(proto.Message): r"""Information about the issue. @@ -1456,6 +1497,8 @@ class Issue(proto.Message): Output only. Resource names of the sample representative utterances that match to this issue. + display_description (str): + Representative description of the issue. """ name: str = proto.Field( @@ -1480,6 +1523,10 @@ class Issue(proto.Message): proto.STRING, number=6, ) + display_description: str = proto.Field( + proto.STRING, + number=14, + ) class IssueModelLabelStats(proto.Message): @@ -1764,7 +1811,11 @@ class ExactMatchConfig(proto.Message): class Settings(proto.Message): - r"""The settings resource. + r"""The CCAI Insights project wide settings. Use these settings to + configure the behavior of Insights. View these settings with + ```getsettings`` `__ + and change the settings with + ```updateSettings`` `__. Attributes: name (str): @@ -1807,21 +1858,30 @@ class Settings(proto.Message): created. - "export-insights-data": Notify each time an export is complete. + - "ingest-conversations": Notify each time an + IngestConversations LRO is complete. - "update-conversation": Notify each time a conversation is updated via UpdateConversation. + - "upload-conversation": Notify when an UploadConversation + LRO is complete. Values are Pub/Sub topics. The format of each Pub/Sub topic is: projects/{project}/topics/{topic} analysis_config (google.cloud.contact_center_insights_v1.types.Settings.AnalysisConfig): Default analysis settings. redaction_config (google.cloud.contact_center_insights_v1.types.RedactionConfig): - Default DLP redaction resources to be applied - while ingesting conversations. + Default DLP redaction resources to be applied while + ingesting conversations. This applies to conversations + ingested from the ``UploadConversation`` and + ``IngestConversations`` endpoints, including conversations + coming from CCAI Platform. speech_config (google.cloud.contact_center_insights_v1.types.SpeechConfig): - Optional. Default Speech-to-Text resources to - be used while ingesting audio files. Optional, - CCAI Insights will create a default if not - provided. + Optional. Default Speech-to-Text resources to use while + ingesting audio files. Optional, CCAI Insights will create a + default if not provided. This applies to conversations + ingested from the ``UploadConversation`` and + ``IngestConversations`` endpoints, including conversations + coming from CCAI Platform. """ class AnalysisConfig(proto.Message): @@ -1900,9 +1960,44 @@ class AnalysisConfig(proto.Message): ) +class EncryptionSpec(proto.Message): + r"""A customer-managed encryption key specification that can be + applied to all created resources (e.g. Conversation). + + Attributes: + name (str): + Immutable. The resource name of the + encryption key specification resource. Format: + + projects/{project}/locations/{location}/encryptionSpec + kms_key (str): + Required. The name of customer-managed encryption key that + is used to secure a resource and its sub-resources. If + empty, the resource is secured by the default Google + encryption key. Only the key in the same location as this + resource is allowed to be used for encryption. Format: + ``projects/{project}/locations/{location}/keyRings/{keyRing}/cryptoKeys/{key}`` + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + kms_key: str = proto.Field( + proto.STRING, + number=2, + ) + + class RedactionConfig(proto.Message): - r"""DLP resources used for redaction while ingesting - conversations. + r"""DLP resources used for redaction while ingesting conversations. DLP + settings are applied to conversations ingested from the + ``UploadConversation`` and ``IngestConversations`` endpoints, + including conversation coming from CCAI Platform. They are not + applied to conversations ingested from the ``CreateConversation`` + endpoint or the Dialogflow / Agent Assist runtime integrations. When + using Dialogflow / Agent Assist runtime integrations, redaction + should be performed in Dialogflow / Agent Assist. Attributes: deidentify_template (str): @@ -1926,7 +2021,11 @@ class RedactionConfig(proto.Message): class SpeechConfig(proto.Message): - r"""Speech-to-Text configuration. + r"""Speech-to-Text configuration. Speech-to-Text settings are applied to + conversations ingested from the ``UploadConversation`` and + ``IngestConversations`` endpoints, including conversation coming + from CCAI Platform. They are not applied to conversations ingested + from the ``CreateConversation`` endpoint. Attributes: speech_recognizer (str): @@ -1991,8 +2090,56 @@ class RuntimeAnnotation(proto.Message): answer_feedback (google.cloud.contact_center_insights_v1.types.AnswerFeedback): The feedback that the customer has about the answer in ``data``. + user_input (google.cloud.contact_center_insights_v1.types.RuntimeAnnotation.UserInput): + Explicit input used for generating the answer """ + class UserInput(proto.Message): + r"""Explicit input used for generating the answer + + Attributes: + query (str): + Query text. Article Search uses this to store + the input query used to generate the search + results. + generator_name (str): + The resource name of associated generator. Format: + ``projects//locations//generators/`` + query_source (google.cloud.contact_center_insights_v1.types.RuntimeAnnotation.UserInput.QuerySource): + Query source for the answer. + """ + + class QuerySource(proto.Enum): + r"""The source of the query. + + Values: + QUERY_SOURCE_UNSPECIFIED (0): + Unknown query source. + AGENT_QUERY (1): + The query is from agents. + SUGGESTED_QUERY (2): + The query is a query from previous + suggestions, e.g. from a preceding + SuggestKnowledgeAssist response. + """ + QUERY_SOURCE_UNSPECIFIED = 0 + AGENT_QUERY = 1 + SUGGESTED_QUERY = 2 + + query: str = proto.Field( + proto.STRING, + number=1, + ) + generator_name: str = proto.Field( + proto.STRING, + number=2, + ) + query_source: "RuntimeAnnotation.UserInput.QuerySource" = proto.Field( + proto.ENUM, + number=3, + enum="RuntimeAnnotation.UserInput.QuerySource", + ) + article_suggestion: "ArticleSuggestionData" = proto.Field( proto.MESSAGE, number=6, @@ -2055,6 +2202,11 @@ class RuntimeAnnotation(proto.Message): number=5, message="AnswerFeedback", ) + user_input: UserInput = proto.Field( + proto.MESSAGE, + number=16, + message=UserInput, + ) class AnswerFeedback(proto.Message): @@ -2566,9 +2718,12 @@ class SummarizationModel(proto.Enum): Unspecified summarization model. BASELINE_MODEL (1): The CCAI baseline model. + BASELINE_MODEL_V2_0 (2): + The CCAI baseline model, V2.0. """ SUMMARIZATION_MODEL_UNSPECIFIED = 0 BASELINE_MODEL = 1 + BASELINE_MODEL_V2_0 = 2 conversation_profile: str = proto.Field( proto.STRING, diff --git a/packages/google-cloud-contact-center-insights/samples/generated_samples/contactcenterinsights_v1_generated_contact_center_insights_export_issue_model_async.py b/packages/google-cloud-contact-center-insights/samples/generated_samples/contactcenterinsights_v1_generated_contact_center_insights_export_issue_model_async.py new file mode 100644 index 000000000000..ec4c508ac2b3 --- /dev/null +++ b/packages/google-cloud-contact-center-insights/samples/generated_samples/contactcenterinsights_v1_generated_contact_center_insights_export_issue_model_async.py @@ -0,0 +1,60 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ExportIssueModel +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-contact-center-insights + + +# [START contactcenterinsights_v1_generated_ContactCenterInsights_ExportIssueModel_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import contact_center_insights_v1 + + +async def sample_export_issue_model(): + # Create a client + client = contact_center_insights_v1.ContactCenterInsightsAsyncClient() + + # Initialize request argument(s) + gcs_destination = contact_center_insights_v1.GcsDestination() + gcs_destination.object_uri = "object_uri_value" + + request = contact_center_insights_v1.ExportIssueModelRequest( + gcs_destination=gcs_destination, + name="name_value", + ) + + # Make the request + operation = client.export_issue_model(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END contactcenterinsights_v1_generated_ContactCenterInsights_ExportIssueModel_async] diff --git a/packages/google-cloud-contact-center-insights/samples/generated_samples/contactcenterinsights_v1_generated_contact_center_insights_export_issue_model_sync.py b/packages/google-cloud-contact-center-insights/samples/generated_samples/contactcenterinsights_v1_generated_contact_center_insights_export_issue_model_sync.py new file mode 100644 index 000000000000..b9fa9152f794 --- /dev/null +++ b/packages/google-cloud-contact-center-insights/samples/generated_samples/contactcenterinsights_v1_generated_contact_center_insights_export_issue_model_sync.py @@ -0,0 +1,60 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ExportIssueModel +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-contact-center-insights + + +# [START contactcenterinsights_v1_generated_ContactCenterInsights_ExportIssueModel_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import contact_center_insights_v1 + + +def sample_export_issue_model(): + # Create a client + client = contact_center_insights_v1.ContactCenterInsightsClient() + + # Initialize request argument(s) + gcs_destination = contact_center_insights_v1.GcsDestination() + gcs_destination.object_uri = "object_uri_value" + + request = contact_center_insights_v1.ExportIssueModelRequest( + gcs_destination=gcs_destination, + name="name_value", + ) + + # Make the request + operation = client.export_issue_model(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END contactcenterinsights_v1_generated_ContactCenterInsights_ExportIssueModel_sync] diff --git a/packages/google-cloud-contact-center-insights/samples/generated_samples/contactcenterinsights_v1_generated_contact_center_insights_get_encryption_spec_async.py b/packages/google-cloud-contact-center-insights/samples/generated_samples/contactcenterinsights_v1_generated_contact_center_insights_get_encryption_spec_async.py new file mode 100644 index 000000000000..9217d1773b5f --- /dev/null +++ b/packages/google-cloud-contact-center-insights/samples/generated_samples/contactcenterinsights_v1_generated_contact_center_insights_get_encryption_spec_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetEncryptionSpec +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-contact-center-insights + + +# [START contactcenterinsights_v1_generated_ContactCenterInsights_GetEncryptionSpec_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import contact_center_insights_v1 + + +async def sample_get_encryption_spec(): + # Create a client + client = contact_center_insights_v1.ContactCenterInsightsAsyncClient() + + # Initialize request argument(s) + request = contact_center_insights_v1.GetEncryptionSpecRequest( + name="name_value", + ) + + # Make the request + response = await client.get_encryption_spec(request=request) + + # Handle the response + print(response) + +# [END contactcenterinsights_v1_generated_ContactCenterInsights_GetEncryptionSpec_async] diff --git a/packages/google-cloud-contact-center-insights/samples/generated_samples/contactcenterinsights_v1_generated_contact_center_insights_get_encryption_spec_sync.py b/packages/google-cloud-contact-center-insights/samples/generated_samples/contactcenterinsights_v1_generated_contact_center_insights_get_encryption_spec_sync.py new file mode 100644 index 000000000000..90f3743e6570 --- /dev/null +++ b/packages/google-cloud-contact-center-insights/samples/generated_samples/contactcenterinsights_v1_generated_contact_center_insights_get_encryption_spec_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetEncryptionSpec +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-contact-center-insights + + +# [START contactcenterinsights_v1_generated_ContactCenterInsights_GetEncryptionSpec_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import contact_center_insights_v1 + + +def sample_get_encryption_spec(): + # Create a client + client = contact_center_insights_v1.ContactCenterInsightsClient() + + # Initialize request argument(s) + request = contact_center_insights_v1.GetEncryptionSpecRequest( + name="name_value", + ) + + # Make the request + response = client.get_encryption_spec(request=request) + + # Handle the response + print(response) + +# [END contactcenterinsights_v1_generated_ContactCenterInsights_GetEncryptionSpec_sync] diff --git a/packages/google-cloud-contact-center-insights/samples/generated_samples/contactcenterinsights_v1_generated_contact_center_insights_import_issue_model_async.py b/packages/google-cloud-contact-center-insights/samples/generated_samples/contactcenterinsights_v1_generated_contact_center_insights_import_issue_model_async.py new file mode 100644 index 000000000000..e90dd84dc650 --- /dev/null +++ b/packages/google-cloud-contact-center-insights/samples/generated_samples/contactcenterinsights_v1_generated_contact_center_insights_import_issue_model_async.py @@ -0,0 +1,60 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ImportIssueModel +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-contact-center-insights + + +# [START contactcenterinsights_v1_generated_ContactCenterInsights_ImportIssueModel_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import contact_center_insights_v1 + + +async def sample_import_issue_model(): + # Create a client + client = contact_center_insights_v1.ContactCenterInsightsAsyncClient() + + # Initialize request argument(s) + gcs_source = contact_center_insights_v1.GcsSource() + gcs_source.object_uri = "object_uri_value" + + request = contact_center_insights_v1.ImportIssueModelRequest( + gcs_source=gcs_source, + parent="parent_value", + ) + + # Make the request + operation = client.import_issue_model(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END contactcenterinsights_v1_generated_ContactCenterInsights_ImportIssueModel_async] diff --git a/packages/google-cloud-contact-center-insights/samples/generated_samples/contactcenterinsights_v1_generated_contact_center_insights_import_issue_model_sync.py b/packages/google-cloud-contact-center-insights/samples/generated_samples/contactcenterinsights_v1_generated_contact_center_insights_import_issue_model_sync.py new file mode 100644 index 000000000000..74ee99de39bc --- /dev/null +++ b/packages/google-cloud-contact-center-insights/samples/generated_samples/contactcenterinsights_v1_generated_contact_center_insights_import_issue_model_sync.py @@ -0,0 +1,60 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ImportIssueModel +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-contact-center-insights + + +# [START contactcenterinsights_v1_generated_ContactCenterInsights_ImportIssueModel_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import contact_center_insights_v1 + + +def sample_import_issue_model(): + # Create a client + client = contact_center_insights_v1.ContactCenterInsightsClient() + + # Initialize request argument(s) + gcs_source = contact_center_insights_v1.GcsSource() + gcs_source.object_uri = "object_uri_value" + + request = contact_center_insights_v1.ImportIssueModelRequest( + gcs_source=gcs_source, + parent="parent_value", + ) + + # Make the request + operation = client.import_issue_model(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END contactcenterinsights_v1_generated_ContactCenterInsights_ImportIssueModel_sync] diff --git a/packages/google-cloud-contact-center-insights/samples/generated_samples/contactcenterinsights_v1_generated_contact_center_insights_initialize_encryption_spec_async.py b/packages/google-cloud-contact-center-insights/samples/generated_samples/contactcenterinsights_v1_generated_contact_center_insights_initialize_encryption_spec_async.py new file mode 100644 index 000000000000..bc5767e0a1c1 --- /dev/null +++ b/packages/google-cloud-contact-center-insights/samples/generated_samples/contactcenterinsights_v1_generated_contact_center_insights_initialize_encryption_spec_async.py @@ -0,0 +1,59 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for InitializeEncryptionSpec +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-contact-center-insights + + +# [START contactcenterinsights_v1_generated_ContactCenterInsights_InitializeEncryptionSpec_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import contact_center_insights_v1 + + +async def sample_initialize_encryption_spec(): + # Create a client + client = contact_center_insights_v1.ContactCenterInsightsAsyncClient() + + # Initialize request argument(s) + encryption_spec = contact_center_insights_v1.EncryptionSpec() + encryption_spec.kms_key = "kms_key_value" + + request = contact_center_insights_v1.InitializeEncryptionSpecRequest( + encryption_spec=encryption_spec, + ) + + # Make the request + operation = client.initialize_encryption_spec(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END contactcenterinsights_v1_generated_ContactCenterInsights_InitializeEncryptionSpec_async] diff --git a/packages/google-cloud-contact-center-insights/samples/generated_samples/contactcenterinsights_v1_generated_contact_center_insights_initialize_encryption_spec_sync.py b/packages/google-cloud-contact-center-insights/samples/generated_samples/contactcenterinsights_v1_generated_contact_center_insights_initialize_encryption_spec_sync.py new file mode 100644 index 000000000000..26a058ab9ae1 --- /dev/null +++ b/packages/google-cloud-contact-center-insights/samples/generated_samples/contactcenterinsights_v1_generated_contact_center_insights_initialize_encryption_spec_sync.py @@ -0,0 +1,59 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for InitializeEncryptionSpec +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-contact-center-insights + + +# [START contactcenterinsights_v1_generated_ContactCenterInsights_InitializeEncryptionSpec_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import contact_center_insights_v1 + + +def sample_initialize_encryption_spec(): + # Create a client + client = contact_center_insights_v1.ContactCenterInsightsClient() + + # Initialize request argument(s) + encryption_spec = contact_center_insights_v1.EncryptionSpec() + encryption_spec.kms_key = "kms_key_value" + + request = contact_center_insights_v1.InitializeEncryptionSpecRequest( + encryption_spec=encryption_spec, + ) + + # Make the request + operation = client.initialize_encryption_spec(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END contactcenterinsights_v1_generated_ContactCenterInsights_InitializeEncryptionSpec_sync] diff --git a/packages/google-cloud-contact-center-insights/samples/generated_samples/snippet_metadata_google.cloud.contactcenterinsights.v1.json b/packages/google-cloud-contact-center-insights/samples/generated_samples/snippet_metadata_google.cloud.contactcenterinsights.v1.json index 8a7deccb9d04..17cdec51febb 100644 --- a/packages/google-cloud-contact-center-insights/samples/generated_samples/snippet_metadata_google.cloud.contactcenterinsights.v1.json +++ b/packages/google-cloud-contact-center-insights/samples/generated_samples/snippet_metadata_google.cloud.contactcenterinsights.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-contact-center-insights", - "version": "1.17.5" + "version": "1.18.0" }, "snippets": [ { @@ -2798,19 +2798,19 @@ "fullName": "google.cloud.contact_center_insights_v1.ContactCenterInsightsAsyncClient", "shortName": "ContactCenterInsightsAsyncClient" }, - "fullName": "google.cloud.contact_center_insights_v1.ContactCenterInsightsAsyncClient.get_analysis", + "fullName": "google.cloud.contact_center_insights_v1.ContactCenterInsightsAsyncClient.export_issue_model", "method": { - "fullName": "google.cloud.contactcenterinsights.v1.ContactCenterInsights.GetAnalysis", + "fullName": "google.cloud.contactcenterinsights.v1.ContactCenterInsights.ExportIssueModel", "service": { "fullName": "google.cloud.contactcenterinsights.v1.ContactCenterInsights", "shortName": "ContactCenterInsights" }, - "shortName": "GetAnalysis" + "shortName": "ExportIssueModel" }, "parameters": [ { "name": "request", - "type": "google.cloud.contact_center_insights_v1.types.GetAnalysisRequest" + "type": "google.cloud.contact_center_insights_v1.types.ExportIssueModelRequest" }, { "name": "name", @@ -2829,22 +2829,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.contact_center_insights_v1.types.Analysis", - "shortName": "get_analysis" + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "export_issue_model" }, - "description": "Sample for GetAnalysis", - "file": "contactcenterinsights_v1_generated_contact_center_insights_get_analysis_async.py", + "description": "Sample for ExportIssueModel", + "file": "contactcenterinsights_v1_generated_contact_center_insights_export_issue_model_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "contactcenterinsights_v1_generated_ContactCenterInsights_GetAnalysis_async", + "regionTag": "contactcenterinsights_v1_generated_ContactCenterInsights_ExportIssueModel_async", "segments": [ { - "end": 51, + "end": 59, "start": 27, "type": "FULL" }, { - "end": 51, + "end": 59, "start": 27, "type": "SHORT" }, @@ -2854,22 +2854,22 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 45, + "end": 49, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 48, - "start": 46, + "end": 56, + "start": 50, "type": "REQUEST_EXECUTION" }, { - "end": 52, - "start": 49, + "end": 60, + "start": 57, "type": "RESPONSE_HANDLING" } ], - "title": "contactcenterinsights_v1_generated_contact_center_insights_get_analysis_async.py" + "title": "contactcenterinsights_v1_generated_contact_center_insights_export_issue_model_async.py" }, { "canonical": true, @@ -2878,19 +2878,19 @@ "fullName": "google.cloud.contact_center_insights_v1.ContactCenterInsightsClient", "shortName": "ContactCenterInsightsClient" }, - "fullName": "google.cloud.contact_center_insights_v1.ContactCenterInsightsClient.get_analysis", + "fullName": "google.cloud.contact_center_insights_v1.ContactCenterInsightsClient.export_issue_model", "method": { - "fullName": "google.cloud.contactcenterinsights.v1.ContactCenterInsights.GetAnalysis", + "fullName": "google.cloud.contactcenterinsights.v1.ContactCenterInsights.ExportIssueModel", "service": { "fullName": "google.cloud.contactcenterinsights.v1.ContactCenterInsights", "shortName": "ContactCenterInsights" }, - "shortName": "GetAnalysis" + "shortName": "ExportIssueModel" }, "parameters": [ { "name": "request", - "type": "google.cloud.contact_center_insights_v1.types.GetAnalysisRequest" + "type": "google.cloud.contact_center_insights_v1.types.ExportIssueModelRequest" }, { "name": "name", @@ -2909,22 +2909,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.contact_center_insights_v1.types.Analysis", - "shortName": "get_analysis" + "resultType": "google.api_core.operation.Operation", + "shortName": "export_issue_model" }, - "description": "Sample for GetAnalysis", - "file": "contactcenterinsights_v1_generated_contact_center_insights_get_analysis_sync.py", + "description": "Sample for ExportIssueModel", + "file": "contactcenterinsights_v1_generated_contact_center_insights_export_issue_model_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "contactcenterinsights_v1_generated_ContactCenterInsights_GetAnalysis_sync", + "regionTag": "contactcenterinsights_v1_generated_ContactCenterInsights_ExportIssueModel_sync", "segments": [ { - "end": 51, + "end": 59, "start": 27, "type": "FULL" }, { - "end": 51, + "end": 59, "start": 27, "type": "SHORT" }, @@ -2934,22 +2934,22 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 45, + "end": 49, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 48, - "start": 46, + "end": 56, + "start": 50, "type": "REQUEST_EXECUTION" }, { - "end": 52, - "start": 49, + "end": 60, + "start": 57, "type": "RESPONSE_HANDLING" } ], - "title": "contactcenterinsights_v1_generated_contact_center_insights_get_analysis_sync.py" + "title": "contactcenterinsights_v1_generated_contact_center_insights_export_issue_model_sync.py" }, { "canonical": true, @@ -2959,19 +2959,19 @@ "fullName": "google.cloud.contact_center_insights_v1.ContactCenterInsightsAsyncClient", "shortName": "ContactCenterInsightsAsyncClient" }, - "fullName": "google.cloud.contact_center_insights_v1.ContactCenterInsightsAsyncClient.get_conversation", + "fullName": "google.cloud.contact_center_insights_v1.ContactCenterInsightsAsyncClient.get_analysis", "method": { - "fullName": "google.cloud.contactcenterinsights.v1.ContactCenterInsights.GetConversation", + "fullName": "google.cloud.contactcenterinsights.v1.ContactCenterInsights.GetAnalysis", "service": { "fullName": "google.cloud.contactcenterinsights.v1.ContactCenterInsights", "shortName": "ContactCenterInsights" }, - "shortName": "GetConversation" + "shortName": "GetAnalysis" }, "parameters": [ { "name": "request", - "type": "google.cloud.contact_center_insights_v1.types.GetConversationRequest" + "type": "google.cloud.contact_center_insights_v1.types.GetAnalysisRequest" }, { "name": "name", @@ -2990,14 +2990,14 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.contact_center_insights_v1.types.Conversation", - "shortName": "get_conversation" + "resultType": "google.cloud.contact_center_insights_v1.types.Analysis", + "shortName": "get_analysis" }, - "description": "Sample for GetConversation", - "file": "contactcenterinsights_v1_generated_contact_center_insights_get_conversation_async.py", + "description": "Sample for GetAnalysis", + "file": "contactcenterinsights_v1_generated_contact_center_insights_get_analysis_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "contactcenterinsights_v1_generated_ContactCenterInsights_GetConversation_async", + "regionTag": "contactcenterinsights_v1_generated_ContactCenterInsights_GetAnalysis_async", "segments": [ { "end": 51, @@ -3030,7 +3030,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "contactcenterinsights_v1_generated_contact_center_insights_get_conversation_async.py" + "title": "contactcenterinsights_v1_generated_contact_center_insights_get_analysis_async.py" }, { "canonical": true, @@ -3039,19 +3039,19 @@ "fullName": "google.cloud.contact_center_insights_v1.ContactCenterInsightsClient", "shortName": "ContactCenterInsightsClient" }, - "fullName": "google.cloud.contact_center_insights_v1.ContactCenterInsightsClient.get_conversation", + "fullName": "google.cloud.contact_center_insights_v1.ContactCenterInsightsClient.get_analysis", "method": { - "fullName": "google.cloud.contactcenterinsights.v1.ContactCenterInsights.GetConversation", + "fullName": "google.cloud.contactcenterinsights.v1.ContactCenterInsights.GetAnalysis", "service": { "fullName": "google.cloud.contactcenterinsights.v1.ContactCenterInsights", "shortName": "ContactCenterInsights" }, - "shortName": "GetConversation" + "shortName": "GetAnalysis" }, "parameters": [ { "name": "request", - "type": "google.cloud.contact_center_insights_v1.types.GetConversationRequest" + "type": "google.cloud.contact_center_insights_v1.types.GetAnalysisRequest" }, { "name": "name", @@ -3070,14 +3070,14 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.contact_center_insights_v1.types.Conversation", - "shortName": "get_conversation" + "resultType": "google.cloud.contact_center_insights_v1.types.Analysis", + "shortName": "get_analysis" }, - "description": "Sample for GetConversation", - "file": "contactcenterinsights_v1_generated_contact_center_insights_get_conversation_sync.py", + "description": "Sample for GetAnalysis", + "file": "contactcenterinsights_v1_generated_contact_center_insights_get_analysis_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "contactcenterinsights_v1_generated_ContactCenterInsights_GetConversation_sync", + "regionTag": "contactcenterinsights_v1_generated_ContactCenterInsights_GetAnalysis_sync", "segments": [ { "end": 51, @@ -3110,7 +3110,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "contactcenterinsights_v1_generated_contact_center_insights_get_conversation_sync.py" + "title": "contactcenterinsights_v1_generated_contact_center_insights_get_analysis_sync.py" }, { "canonical": true, @@ -3120,19 +3120,19 @@ "fullName": "google.cloud.contact_center_insights_v1.ContactCenterInsightsAsyncClient", "shortName": "ContactCenterInsightsAsyncClient" }, - "fullName": "google.cloud.contact_center_insights_v1.ContactCenterInsightsAsyncClient.get_issue_model", + "fullName": "google.cloud.contact_center_insights_v1.ContactCenterInsightsAsyncClient.get_conversation", "method": { - "fullName": "google.cloud.contactcenterinsights.v1.ContactCenterInsights.GetIssueModel", + "fullName": "google.cloud.contactcenterinsights.v1.ContactCenterInsights.GetConversation", "service": { "fullName": "google.cloud.contactcenterinsights.v1.ContactCenterInsights", "shortName": "ContactCenterInsights" }, - "shortName": "GetIssueModel" + "shortName": "GetConversation" }, "parameters": [ { "name": "request", - "type": "google.cloud.contact_center_insights_v1.types.GetIssueModelRequest" + "type": "google.cloud.contact_center_insights_v1.types.GetConversationRequest" }, { "name": "name", @@ -3151,14 +3151,14 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.contact_center_insights_v1.types.IssueModel", - "shortName": "get_issue_model" + "resultType": "google.cloud.contact_center_insights_v1.types.Conversation", + "shortName": "get_conversation" }, - "description": "Sample for GetIssueModel", - "file": "contactcenterinsights_v1_generated_contact_center_insights_get_issue_model_async.py", + "description": "Sample for GetConversation", + "file": "contactcenterinsights_v1_generated_contact_center_insights_get_conversation_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "contactcenterinsights_v1_generated_ContactCenterInsights_GetIssueModel_async", + "regionTag": "contactcenterinsights_v1_generated_ContactCenterInsights_GetConversation_async", "segments": [ { "end": 51, @@ -3191,7 +3191,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "contactcenterinsights_v1_generated_contact_center_insights_get_issue_model_async.py" + "title": "contactcenterinsights_v1_generated_contact_center_insights_get_conversation_async.py" }, { "canonical": true, @@ -3200,19 +3200,19 @@ "fullName": "google.cloud.contact_center_insights_v1.ContactCenterInsightsClient", "shortName": "ContactCenterInsightsClient" }, - "fullName": "google.cloud.contact_center_insights_v1.ContactCenterInsightsClient.get_issue_model", + "fullName": "google.cloud.contact_center_insights_v1.ContactCenterInsightsClient.get_conversation", "method": { - "fullName": "google.cloud.contactcenterinsights.v1.ContactCenterInsights.GetIssueModel", + "fullName": "google.cloud.contactcenterinsights.v1.ContactCenterInsights.GetConversation", "service": { "fullName": "google.cloud.contactcenterinsights.v1.ContactCenterInsights", "shortName": "ContactCenterInsights" }, - "shortName": "GetIssueModel" + "shortName": "GetConversation" }, "parameters": [ { "name": "request", - "type": "google.cloud.contact_center_insights_v1.types.GetIssueModelRequest" + "type": "google.cloud.contact_center_insights_v1.types.GetConversationRequest" }, { "name": "name", @@ -3231,14 +3231,14 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.contact_center_insights_v1.types.IssueModel", - "shortName": "get_issue_model" + "resultType": "google.cloud.contact_center_insights_v1.types.Conversation", + "shortName": "get_conversation" }, - "description": "Sample for GetIssueModel", - "file": "contactcenterinsights_v1_generated_contact_center_insights_get_issue_model_sync.py", + "description": "Sample for GetConversation", + "file": "contactcenterinsights_v1_generated_contact_center_insights_get_conversation_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "contactcenterinsights_v1_generated_ContactCenterInsights_GetIssueModel_sync", + "regionTag": "contactcenterinsights_v1_generated_ContactCenterInsights_GetConversation_sync", "segments": [ { "end": 51, @@ -3271,7 +3271,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "contactcenterinsights_v1_generated_contact_center_insights_get_issue_model_sync.py" + "title": "contactcenterinsights_v1_generated_contact_center_insights_get_conversation_sync.py" }, { "canonical": true, @@ -3281,19 +3281,19 @@ "fullName": "google.cloud.contact_center_insights_v1.ContactCenterInsightsAsyncClient", "shortName": "ContactCenterInsightsAsyncClient" }, - "fullName": "google.cloud.contact_center_insights_v1.ContactCenterInsightsAsyncClient.get_issue", + "fullName": "google.cloud.contact_center_insights_v1.ContactCenterInsightsAsyncClient.get_encryption_spec", "method": { - "fullName": "google.cloud.contactcenterinsights.v1.ContactCenterInsights.GetIssue", + "fullName": "google.cloud.contactcenterinsights.v1.ContactCenterInsights.GetEncryptionSpec", "service": { "fullName": "google.cloud.contactcenterinsights.v1.ContactCenterInsights", "shortName": "ContactCenterInsights" }, - "shortName": "GetIssue" + "shortName": "GetEncryptionSpec" }, "parameters": [ { "name": "request", - "type": "google.cloud.contact_center_insights_v1.types.GetIssueRequest" + "type": "google.cloud.contact_center_insights_v1.types.GetEncryptionSpecRequest" }, { "name": "name", @@ -3312,14 +3312,14 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.contact_center_insights_v1.types.Issue", - "shortName": "get_issue" + "resultType": "google.cloud.contact_center_insights_v1.types.EncryptionSpec", + "shortName": "get_encryption_spec" }, - "description": "Sample for GetIssue", - "file": "contactcenterinsights_v1_generated_contact_center_insights_get_issue_async.py", + "description": "Sample for GetEncryptionSpec", + "file": "contactcenterinsights_v1_generated_contact_center_insights_get_encryption_spec_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "contactcenterinsights_v1_generated_ContactCenterInsights_GetIssue_async", + "regionTag": "contactcenterinsights_v1_generated_ContactCenterInsights_GetEncryptionSpec_async", "segments": [ { "end": 51, @@ -3352,7 +3352,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "contactcenterinsights_v1_generated_contact_center_insights_get_issue_async.py" + "title": "contactcenterinsights_v1_generated_contact_center_insights_get_encryption_spec_async.py" }, { "canonical": true, @@ -3361,19 +3361,19 @@ "fullName": "google.cloud.contact_center_insights_v1.ContactCenterInsightsClient", "shortName": "ContactCenterInsightsClient" }, - "fullName": "google.cloud.contact_center_insights_v1.ContactCenterInsightsClient.get_issue", + "fullName": "google.cloud.contact_center_insights_v1.ContactCenterInsightsClient.get_encryption_spec", "method": { - "fullName": "google.cloud.contactcenterinsights.v1.ContactCenterInsights.GetIssue", + "fullName": "google.cloud.contactcenterinsights.v1.ContactCenterInsights.GetEncryptionSpec", "service": { "fullName": "google.cloud.contactcenterinsights.v1.ContactCenterInsights", "shortName": "ContactCenterInsights" }, - "shortName": "GetIssue" + "shortName": "GetEncryptionSpec" }, "parameters": [ { "name": "request", - "type": "google.cloud.contact_center_insights_v1.types.GetIssueRequest" + "type": "google.cloud.contact_center_insights_v1.types.GetEncryptionSpecRequest" }, { "name": "name", @@ -3392,14 +3392,14 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.contact_center_insights_v1.types.Issue", - "shortName": "get_issue" + "resultType": "google.cloud.contact_center_insights_v1.types.EncryptionSpec", + "shortName": "get_encryption_spec" }, - "description": "Sample for GetIssue", - "file": "contactcenterinsights_v1_generated_contact_center_insights_get_issue_sync.py", + "description": "Sample for GetEncryptionSpec", + "file": "contactcenterinsights_v1_generated_contact_center_insights_get_encryption_spec_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "contactcenterinsights_v1_generated_ContactCenterInsights_GetIssue_sync", + "regionTag": "contactcenterinsights_v1_generated_ContactCenterInsights_GetEncryptionSpec_sync", "segments": [ { "end": 51, @@ -3432,7 +3432,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "contactcenterinsights_v1_generated_contact_center_insights_get_issue_sync.py" + "title": "contactcenterinsights_v1_generated_contact_center_insights_get_encryption_spec_sync.py" }, { "canonical": true, @@ -3442,19 +3442,19 @@ "fullName": "google.cloud.contact_center_insights_v1.ContactCenterInsightsAsyncClient", "shortName": "ContactCenterInsightsAsyncClient" }, - "fullName": "google.cloud.contact_center_insights_v1.ContactCenterInsightsAsyncClient.get_phrase_matcher", + "fullName": "google.cloud.contact_center_insights_v1.ContactCenterInsightsAsyncClient.get_issue_model", "method": { - "fullName": "google.cloud.contactcenterinsights.v1.ContactCenterInsights.GetPhraseMatcher", + "fullName": "google.cloud.contactcenterinsights.v1.ContactCenterInsights.GetIssueModel", "service": { "fullName": "google.cloud.contactcenterinsights.v1.ContactCenterInsights", "shortName": "ContactCenterInsights" }, - "shortName": "GetPhraseMatcher" + "shortName": "GetIssueModel" }, "parameters": [ { "name": "request", - "type": "google.cloud.contact_center_insights_v1.types.GetPhraseMatcherRequest" + "type": "google.cloud.contact_center_insights_v1.types.GetIssueModelRequest" }, { "name": "name", @@ -3473,14 +3473,14 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.contact_center_insights_v1.types.PhraseMatcher", - "shortName": "get_phrase_matcher" + "resultType": "google.cloud.contact_center_insights_v1.types.IssueModel", + "shortName": "get_issue_model" }, - "description": "Sample for GetPhraseMatcher", - "file": "contactcenterinsights_v1_generated_contact_center_insights_get_phrase_matcher_async.py", + "description": "Sample for GetIssueModel", + "file": "contactcenterinsights_v1_generated_contact_center_insights_get_issue_model_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "contactcenterinsights_v1_generated_ContactCenterInsights_GetPhraseMatcher_async", + "regionTag": "contactcenterinsights_v1_generated_ContactCenterInsights_GetIssueModel_async", "segments": [ { "end": 51, @@ -3513,7 +3513,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "contactcenterinsights_v1_generated_contact_center_insights_get_phrase_matcher_async.py" + "title": "contactcenterinsights_v1_generated_contact_center_insights_get_issue_model_async.py" }, { "canonical": true, @@ -3522,19 +3522,19 @@ "fullName": "google.cloud.contact_center_insights_v1.ContactCenterInsightsClient", "shortName": "ContactCenterInsightsClient" }, - "fullName": "google.cloud.contact_center_insights_v1.ContactCenterInsightsClient.get_phrase_matcher", + "fullName": "google.cloud.contact_center_insights_v1.ContactCenterInsightsClient.get_issue_model", "method": { - "fullName": "google.cloud.contactcenterinsights.v1.ContactCenterInsights.GetPhraseMatcher", + "fullName": "google.cloud.contactcenterinsights.v1.ContactCenterInsights.GetIssueModel", "service": { "fullName": "google.cloud.contactcenterinsights.v1.ContactCenterInsights", "shortName": "ContactCenterInsights" }, - "shortName": "GetPhraseMatcher" + "shortName": "GetIssueModel" }, "parameters": [ { "name": "request", - "type": "google.cloud.contact_center_insights_v1.types.GetPhraseMatcherRequest" + "type": "google.cloud.contact_center_insights_v1.types.GetIssueModelRequest" }, { "name": "name", @@ -3553,14 +3553,14 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.contact_center_insights_v1.types.PhraseMatcher", - "shortName": "get_phrase_matcher" + "resultType": "google.cloud.contact_center_insights_v1.types.IssueModel", + "shortName": "get_issue_model" }, - "description": "Sample for GetPhraseMatcher", - "file": "contactcenterinsights_v1_generated_contact_center_insights_get_phrase_matcher_sync.py", + "description": "Sample for GetIssueModel", + "file": "contactcenterinsights_v1_generated_contact_center_insights_get_issue_model_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "contactcenterinsights_v1_generated_ContactCenterInsights_GetPhraseMatcher_sync", + "regionTag": "contactcenterinsights_v1_generated_ContactCenterInsights_GetIssueModel_sync", "segments": [ { "end": 51, @@ -3593,7 +3593,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "contactcenterinsights_v1_generated_contact_center_insights_get_phrase_matcher_sync.py" + "title": "contactcenterinsights_v1_generated_contact_center_insights_get_issue_model_sync.py" }, { "canonical": true, @@ -3603,19 +3603,19 @@ "fullName": "google.cloud.contact_center_insights_v1.ContactCenterInsightsAsyncClient", "shortName": "ContactCenterInsightsAsyncClient" }, - "fullName": "google.cloud.contact_center_insights_v1.ContactCenterInsightsAsyncClient.get_settings", + "fullName": "google.cloud.contact_center_insights_v1.ContactCenterInsightsAsyncClient.get_issue", "method": { - "fullName": "google.cloud.contactcenterinsights.v1.ContactCenterInsights.GetSettings", + "fullName": "google.cloud.contactcenterinsights.v1.ContactCenterInsights.GetIssue", "service": { "fullName": "google.cloud.contactcenterinsights.v1.ContactCenterInsights", "shortName": "ContactCenterInsights" }, - "shortName": "GetSettings" + "shortName": "GetIssue" }, "parameters": [ { "name": "request", - "type": "google.cloud.contact_center_insights_v1.types.GetSettingsRequest" + "type": "google.cloud.contact_center_insights_v1.types.GetIssueRequest" }, { "name": "name", @@ -3634,14 +3634,14 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.contact_center_insights_v1.types.Settings", - "shortName": "get_settings" + "resultType": "google.cloud.contact_center_insights_v1.types.Issue", + "shortName": "get_issue" }, - "description": "Sample for GetSettings", - "file": "contactcenterinsights_v1_generated_contact_center_insights_get_settings_async.py", + "description": "Sample for GetIssue", + "file": "contactcenterinsights_v1_generated_contact_center_insights_get_issue_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "contactcenterinsights_v1_generated_ContactCenterInsights_GetSettings_async", + "regionTag": "contactcenterinsights_v1_generated_ContactCenterInsights_GetIssue_async", "segments": [ { "end": 51, @@ -3674,7 +3674,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "contactcenterinsights_v1_generated_contact_center_insights_get_settings_async.py" + "title": "contactcenterinsights_v1_generated_contact_center_insights_get_issue_async.py" }, { "canonical": true, @@ -3683,19 +3683,19 @@ "fullName": "google.cloud.contact_center_insights_v1.ContactCenterInsightsClient", "shortName": "ContactCenterInsightsClient" }, - "fullName": "google.cloud.contact_center_insights_v1.ContactCenterInsightsClient.get_settings", + "fullName": "google.cloud.contact_center_insights_v1.ContactCenterInsightsClient.get_issue", "method": { - "fullName": "google.cloud.contactcenterinsights.v1.ContactCenterInsights.GetSettings", + "fullName": "google.cloud.contactcenterinsights.v1.ContactCenterInsights.GetIssue", "service": { "fullName": "google.cloud.contactcenterinsights.v1.ContactCenterInsights", "shortName": "ContactCenterInsights" }, - "shortName": "GetSettings" + "shortName": "GetIssue" }, "parameters": [ { "name": "request", - "type": "google.cloud.contact_center_insights_v1.types.GetSettingsRequest" + "type": "google.cloud.contact_center_insights_v1.types.GetIssueRequest" }, { "name": "name", @@ -3714,14 +3714,14 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.contact_center_insights_v1.types.Settings", - "shortName": "get_settings" + "resultType": "google.cloud.contact_center_insights_v1.types.Issue", + "shortName": "get_issue" }, - "description": "Sample for GetSettings", - "file": "contactcenterinsights_v1_generated_contact_center_insights_get_settings_sync.py", + "description": "Sample for GetIssue", + "file": "contactcenterinsights_v1_generated_contact_center_insights_get_issue_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "contactcenterinsights_v1_generated_ContactCenterInsights_GetSettings_sync", + "regionTag": "contactcenterinsights_v1_generated_ContactCenterInsights_GetIssue_sync", "segments": [ { "end": 51, @@ -3754,7 +3754,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "contactcenterinsights_v1_generated_contact_center_insights_get_settings_sync.py" + "title": "contactcenterinsights_v1_generated_contact_center_insights_get_issue_sync.py" }, { "canonical": true, @@ -3764,19 +3764,19 @@ "fullName": "google.cloud.contact_center_insights_v1.ContactCenterInsightsAsyncClient", "shortName": "ContactCenterInsightsAsyncClient" }, - "fullName": "google.cloud.contact_center_insights_v1.ContactCenterInsightsAsyncClient.get_view", + "fullName": "google.cloud.contact_center_insights_v1.ContactCenterInsightsAsyncClient.get_phrase_matcher", "method": { - "fullName": "google.cloud.contactcenterinsights.v1.ContactCenterInsights.GetView", + "fullName": "google.cloud.contactcenterinsights.v1.ContactCenterInsights.GetPhraseMatcher", "service": { "fullName": "google.cloud.contactcenterinsights.v1.ContactCenterInsights", "shortName": "ContactCenterInsights" }, - "shortName": "GetView" + "shortName": "GetPhraseMatcher" }, "parameters": [ { "name": "request", - "type": "google.cloud.contact_center_insights_v1.types.GetViewRequest" + "type": "google.cloud.contact_center_insights_v1.types.GetPhraseMatcherRequest" }, { "name": "name", @@ -3795,14 +3795,14 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.contact_center_insights_v1.types.View", - "shortName": "get_view" + "resultType": "google.cloud.contact_center_insights_v1.types.PhraseMatcher", + "shortName": "get_phrase_matcher" }, - "description": "Sample for GetView", - "file": "contactcenterinsights_v1_generated_contact_center_insights_get_view_async.py", + "description": "Sample for GetPhraseMatcher", + "file": "contactcenterinsights_v1_generated_contact_center_insights_get_phrase_matcher_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "contactcenterinsights_v1_generated_ContactCenterInsights_GetView_async", + "regionTag": "contactcenterinsights_v1_generated_ContactCenterInsights_GetPhraseMatcher_async", "segments": [ { "end": 51, @@ -3835,7 +3835,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "contactcenterinsights_v1_generated_contact_center_insights_get_view_async.py" + "title": "contactcenterinsights_v1_generated_contact_center_insights_get_phrase_matcher_async.py" }, { "canonical": true, @@ -3844,14 +3844,256 @@ "fullName": "google.cloud.contact_center_insights_v1.ContactCenterInsightsClient", "shortName": "ContactCenterInsightsClient" }, - "fullName": "google.cloud.contact_center_insights_v1.ContactCenterInsightsClient.get_view", + "fullName": "google.cloud.contact_center_insights_v1.ContactCenterInsightsClient.get_phrase_matcher", "method": { - "fullName": "google.cloud.contactcenterinsights.v1.ContactCenterInsights.GetView", + "fullName": "google.cloud.contactcenterinsights.v1.ContactCenterInsights.GetPhraseMatcher", "service": { "fullName": "google.cloud.contactcenterinsights.v1.ContactCenterInsights", "shortName": "ContactCenterInsights" }, - "shortName": "GetView" + "shortName": "GetPhraseMatcher" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.contact_center_insights_v1.types.GetPhraseMatcherRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.contact_center_insights_v1.types.PhraseMatcher", + "shortName": "get_phrase_matcher" + }, + "description": "Sample for GetPhraseMatcher", + "file": "contactcenterinsights_v1_generated_contact_center_insights_get_phrase_matcher_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "contactcenterinsights_v1_generated_ContactCenterInsights_GetPhraseMatcher_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "contactcenterinsights_v1_generated_contact_center_insights_get_phrase_matcher_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.contact_center_insights_v1.ContactCenterInsightsAsyncClient", + "shortName": "ContactCenterInsightsAsyncClient" + }, + "fullName": "google.cloud.contact_center_insights_v1.ContactCenterInsightsAsyncClient.get_settings", + "method": { + "fullName": "google.cloud.contactcenterinsights.v1.ContactCenterInsights.GetSettings", + "service": { + "fullName": "google.cloud.contactcenterinsights.v1.ContactCenterInsights", + "shortName": "ContactCenterInsights" + }, + "shortName": "GetSettings" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.contact_center_insights_v1.types.GetSettingsRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.contact_center_insights_v1.types.Settings", + "shortName": "get_settings" + }, + "description": "Sample for GetSettings", + "file": "contactcenterinsights_v1_generated_contact_center_insights_get_settings_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "contactcenterinsights_v1_generated_ContactCenterInsights_GetSettings_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "contactcenterinsights_v1_generated_contact_center_insights_get_settings_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.contact_center_insights_v1.ContactCenterInsightsClient", + "shortName": "ContactCenterInsightsClient" + }, + "fullName": "google.cloud.contact_center_insights_v1.ContactCenterInsightsClient.get_settings", + "method": { + "fullName": "google.cloud.contactcenterinsights.v1.ContactCenterInsights.GetSettings", + "service": { + "fullName": "google.cloud.contactcenterinsights.v1.ContactCenterInsights", + "shortName": "ContactCenterInsights" + }, + "shortName": "GetSettings" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.contact_center_insights_v1.types.GetSettingsRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.contact_center_insights_v1.types.Settings", + "shortName": "get_settings" + }, + "description": "Sample for GetSettings", + "file": "contactcenterinsights_v1_generated_contact_center_insights_get_settings_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "contactcenterinsights_v1_generated_ContactCenterInsights_GetSettings_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "contactcenterinsights_v1_generated_contact_center_insights_get_settings_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.contact_center_insights_v1.ContactCenterInsightsAsyncClient", + "shortName": "ContactCenterInsightsAsyncClient" + }, + "fullName": "google.cloud.contact_center_insights_v1.ContactCenterInsightsAsyncClient.get_view", + "method": { + "fullName": "google.cloud.contactcenterinsights.v1.ContactCenterInsights.GetView", + "service": { + "fullName": "google.cloud.contactcenterinsights.v1.ContactCenterInsights", + "shortName": "ContactCenterInsights" + }, + "shortName": "GetView" }, "parameters": [ { @@ -3859,7 +4101,168 @@ "type": "google.cloud.contact_center_insights_v1.types.GetViewRequest" }, { - "name": "name", + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.contact_center_insights_v1.types.View", + "shortName": "get_view" + }, + "description": "Sample for GetView", + "file": "contactcenterinsights_v1_generated_contact_center_insights_get_view_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "contactcenterinsights_v1_generated_ContactCenterInsights_GetView_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "contactcenterinsights_v1_generated_contact_center_insights_get_view_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.contact_center_insights_v1.ContactCenterInsightsClient", + "shortName": "ContactCenterInsightsClient" + }, + "fullName": "google.cloud.contact_center_insights_v1.ContactCenterInsightsClient.get_view", + "method": { + "fullName": "google.cloud.contactcenterinsights.v1.ContactCenterInsights.GetView", + "service": { + "fullName": "google.cloud.contactcenterinsights.v1.ContactCenterInsights", + "shortName": "ContactCenterInsights" + }, + "shortName": "GetView" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.contact_center_insights_v1.types.GetViewRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.contact_center_insights_v1.types.View", + "shortName": "get_view" + }, + "description": "Sample for GetView", + "file": "contactcenterinsights_v1_generated_contact_center_insights_get_view_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "contactcenterinsights_v1_generated_ContactCenterInsights_GetView_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "contactcenterinsights_v1_generated_contact_center_insights_get_view_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.contact_center_insights_v1.ContactCenterInsightsAsyncClient", + "shortName": "ContactCenterInsightsAsyncClient" + }, + "fullName": "google.cloud.contact_center_insights_v1.ContactCenterInsightsAsyncClient.import_issue_model", + "method": { + "fullName": "google.cloud.contactcenterinsights.v1.ContactCenterInsights.ImportIssueModel", + "service": { + "fullName": "google.cloud.contactcenterinsights.v1.ContactCenterInsights", + "shortName": "ContactCenterInsights" + }, + "shortName": "ImportIssueModel" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.contact_center_insights_v1.types.ImportIssueModelRequest" + }, + { + "name": "parent", "type": "str" }, { @@ -3875,22 +4278,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.contact_center_insights_v1.types.View", - "shortName": "get_view" + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "import_issue_model" }, - "description": "Sample for GetView", - "file": "contactcenterinsights_v1_generated_contact_center_insights_get_view_sync.py", + "description": "Sample for ImportIssueModel", + "file": "contactcenterinsights_v1_generated_contact_center_insights_import_issue_model_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "contactcenterinsights_v1_generated_ContactCenterInsights_GetView_sync", + "regionTag": "contactcenterinsights_v1_generated_ContactCenterInsights_ImportIssueModel_async", "segments": [ { - "end": 51, + "end": 59, "start": 27, "type": "FULL" }, { - "end": 51, + "end": 59, "start": 27, "type": "SHORT" }, @@ -3900,22 +4303,102 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 45, + "end": 49, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 48, - "start": 46, + "end": 56, + "start": 50, "type": "REQUEST_EXECUTION" }, { - "end": 52, - "start": 49, + "end": 60, + "start": 57, "type": "RESPONSE_HANDLING" } ], - "title": "contactcenterinsights_v1_generated_contact_center_insights_get_view_sync.py" + "title": "contactcenterinsights_v1_generated_contact_center_insights_import_issue_model_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.contact_center_insights_v1.ContactCenterInsightsClient", + "shortName": "ContactCenterInsightsClient" + }, + "fullName": "google.cloud.contact_center_insights_v1.ContactCenterInsightsClient.import_issue_model", + "method": { + "fullName": "google.cloud.contactcenterinsights.v1.ContactCenterInsights.ImportIssueModel", + "service": { + "fullName": "google.cloud.contactcenterinsights.v1.ContactCenterInsights", + "shortName": "ContactCenterInsights" + }, + "shortName": "ImportIssueModel" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.contact_center_insights_v1.types.ImportIssueModelRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "import_issue_model" + }, + "description": "Sample for ImportIssueModel", + "file": "contactcenterinsights_v1_generated_contact_center_insights_import_issue_model_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "contactcenterinsights_v1_generated_ContactCenterInsights_ImportIssueModel_sync", + "segments": [ + { + "end": 59, + "start": 27, + "type": "FULL" + }, + { + "end": 59, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 49, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 56, + "start": 50, + "type": "REQUEST_EXECUTION" + }, + { + "end": 60, + "start": 57, + "type": "RESPONSE_HANDLING" + } + ], + "title": "contactcenterinsights_v1_generated_contact_center_insights_import_issue_model_sync.py" }, { "canonical": true, @@ -4078,6 +4561,167 @@ ], "title": "contactcenterinsights_v1_generated_contact_center_insights_ingest_conversations_sync.py" }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.contact_center_insights_v1.ContactCenterInsightsAsyncClient", + "shortName": "ContactCenterInsightsAsyncClient" + }, + "fullName": "google.cloud.contact_center_insights_v1.ContactCenterInsightsAsyncClient.initialize_encryption_spec", + "method": { + "fullName": "google.cloud.contactcenterinsights.v1.ContactCenterInsights.InitializeEncryptionSpec", + "service": { + "fullName": "google.cloud.contactcenterinsights.v1.ContactCenterInsights", + "shortName": "ContactCenterInsights" + }, + "shortName": "InitializeEncryptionSpec" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.contact_center_insights_v1.types.InitializeEncryptionSpecRequest" + }, + { + "name": "encryption_spec", + "type": "google.cloud.contact_center_insights_v1.types.EncryptionSpec" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "initialize_encryption_spec" + }, + "description": "Sample for InitializeEncryptionSpec", + "file": "contactcenterinsights_v1_generated_contact_center_insights_initialize_encryption_spec_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "contactcenterinsights_v1_generated_ContactCenterInsights_InitializeEncryptionSpec_async", + "segments": [ + { + "end": 58, + "start": 27, + "type": "FULL" + }, + { + "end": 58, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 48, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 55, + "start": 49, + "type": "REQUEST_EXECUTION" + }, + { + "end": 59, + "start": 56, + "type": "RESPONSE_HANDLING" + } + ], + "title": "contactcenterinsights_v1_generated_contact_center_insights_initialize_encryption_spec_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.contact_center_insights_v1.ContactCenterInsightsClient", + "shortName": "ContactCenterInsightsClient" + }, + "fullName": "google.cloud.contact_center_insights_v1.ContactCenterInsightsClient.initialize_encryption_spec", + "method": { + "fullName": "google.cloud.contactcenterinsights.v1.ContactCenterInsights.InitializeEncryptionSpec", + "service": { + "fullName": "google.cloud.contactcenterinsights.v1.ContactCenterInsights", + "shortName": "ContactCenterInsights" + }, + "shortName": "InitializeEncryptionSpec" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.contact_center_insights_v1.types.InitializeEncryptionSpecRequest" + }, + { + "name": "encryption_spec", + "type": "google.cloud.contact_center_insights_v1.types.EncryptionSpec" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "initialize_encryption_spec" + }, + "description": "Sample for InitializeEncryptionSpec", + "file": "contactcenterinsights_v1_generated_contact_center_insights_initialize_encryption_spec_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "contactcenterinsights_v1_generated_ContactCenterInsights_InitializeEncryptionSpec_sync", + "segments": [ + { + "end": 58, + "start": 27, + "type": "FULL" + }, + { + "end": 58, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 48, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 55, + "start": 49, + "type": "REQUEST_EXECUTION" + }, + { + "end": 59, + "start": 56, + "type": "RESPONSE_HANDLING" + } + ], + "title": "contactcenterinsights_v1_generated_contact_center_insights_initialize_encryption_spec_sync.py" + }, { "canonical": true, "clientMethod": { diff --git a/packages/google-cloud-contact-center-insights/scripts/fixup_contact_center_insights_v1_keywords.py b/packages/google-cloud-contact-center-insights/scripts/fixup_contact_center_insights_v1_keywords.py index ca15d27e1fd8..10d05e634fa6 100644 --- a/packages/google-cloud-contact-center-insights/scripts/fixup_contact_center_insights_v1_keywords.py +++ b/packages/google-cloud-contact-center-insights/scripts/fixup_contact_center_insights_v1_keywords.py @@ -56,16 +56,20 @@ class contact_center_insightsCallTransformer(cst.CSTTransformer): 'delete_view': ('name', ), 'deploy_issue_model': ('name', ), 'export_insights_data': ('parent', 'big_query_destination', 'filter', 'kms_key', 'write_disposition', ), + 'export_issue_model': ('name', 'gcs_destination', ), 'get_analysis': ('name', ), 'get_conversation': ('name', 'view', ), + 'get_encryption_spec': ('name', ), 'get_issue': ('name', ), 'get_issue_model': ('name', ), 'get_phrase_matcher': ('name', ), 'get_settings': ('name', ), 'get_view': ('name', ), - 'ingest_conversations': ('parent', 'gcs_source', 'transcript_object_config', 'conversation_config', 'redaction_config', 'speech_config', ), + 'import_issue_model': ('parent', 'gcs_source', 'create_new_model', ), + 'ingest_conversations': ('parent', 'gcs_source', 'transcript_object_config', 'conversation_config', 'redaction_config', 'speech_config', 'sample_size', ), + 'initialize_encryption_spec': ('encryption_spec', ), 'list_analyses': ('parent', 'page_size', 'page_token', 'filter', ), - 'list_conversations': ('parent', 'page_size', 'page_token', 'filter', 'view', ), + 'list_conversations': ('parent', 'page_size', 'page_token', 'filter', 'order_by', 'view', ), 'list_issue_models': ('parent', ), 'list_issues': ('parent', ), 'list_phrase_matchers': ('parent', 'page_size', 'page_token', 'filter', ), diff --git a/packages/google-cloud-contact-center-insights/tests/unit/gapic/contact_center_insights_v1/test_contact_center_insights.py b/packages/google-cloud-contact-center-insights/tests/unit/gapic/contact_center_insights_v1/test_contact_center_insights.py index ab8a670da609..034282057f4a 100644 --- a/packages/google-cloud-contact-center-insights/tests/unit/gapic/contact_center_insights_v1/test_contact_center_insights.py +++ b/packages/google-cloud-contact-center-insights/tests/unit/gapic/contact_center_insights_v1/test_contact_center_insights.py @@ -1230,6 +1230,7 @@ def test_create_conversation(request_type, transport: str = "grpc"): name="name_value", language_code="language_code_value", agent_id="agent_id_value", + metadata_json="metadata_json_value", medium=resources.Conversation.Medium.PHONE_CALL, turn_count=1105, obfuscated_user_id="obfuscated_user_id_value", @@ -1247,6 +1248,7 @@ def test_create_conversation(request_type, transport: str = "grpc"): assert response.name == "name_value" assert response.language_code == "language_code_value" assert response.agent_id == "agent_id_value" + assert response.metadata_json == "metadata_json_value" assert response.medium == resources.Conversation.Medium.PHONE_CALL assert response.turn_count == 1105 assert response.obfuscated_user_id == "obfuscated_user_id_value" @@ -1363,6 +1365,7 @@ async def test_create_conversation_empty_call_async(): name="name_value", language_code="language_code_value", agent_id="agent_id_value", + metadata_json="metadata_json_value", medium=resources.Conversation.Medium.PHONE_CALL, turn_count=1105, obfuscated_user_id="obfuscated_user_id_value", @@ -1397,22 +1400,23 @@ async def test_create_conversation_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_conversation - ] = mock_object + ] = mock_rpc request = {} await client.create_conversation(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_conversation(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1439,6 +1443,7 @@ async def test_create_conversation_async( name="name_value", language_code="language_code_value", agent_id="agent_id_value", + metadata_json="metadata_json_value", medium=resources.Conversation.Medium.PHONE_CALL, turn_count=1105, obfuscated_user_id="obfuscated_user_id_value", @@ -1457,6 +1462,7 @@ async def test_create_conversation_async( assert response.name == "name_value" assert response.language_code == "language_code_value" assert response.agent_id == "agent_id_value" + assert response.metadata_json == "metadata_json_value" assert response.medium == resources.Conversation.Medium.PHONE_CALL assert response.turn_count == 1105 assert response.obfuscated_user_id == "obfuscated_user_id_value" @@ -1770,8 +1776,9 @@ def test_upload_conversation_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.upload_conversation(request) @@ -1827,26 +1834,28 @@ async def test_upload_conversation_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.upload_conversation - ] = mock_object + ] = mock_rpc request = {} await client.upload_conversation(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.upload_conversation(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1979,6 +1988,7 @@ def test_update_conversation(request_type, transport: str = "grpc"): name="name_value", language_code="language_code_value", agent_id="agent_id_value", + metadata_json="metadata_json_value", medium=resources.Conversation.Medium.PHONE_CALL, turn_count=1105, obfuscated_user_id="obfuscated_user_id_value", @@ -1996,6 +2006,7 @@ def test_update_conversation(request_type, transport: str = "grpc"): assert response.name == "name_value" assert response.language_code == "language_code_value" assert response.agent_id == "agent_id_value" + assert response.metadata_json == "metadata_json_value" assert response.medium == resources.Conversation.Medium.PHONE_CALL assert response.turn_count == 1105 assert response.obfuscated_user_id == "obfuscated_user_id_value" @@ -2106,6 +2117,7 @@ async def test_update_conversation_empty_call_async(): name="name_value", language_code="language_code_value", agent_id="agent_id_value", + metadata_json="metadata_json_value", medium=resources.Conversation.Medium.PHONE_CALL, turn_count=1105, obfuscated_user_id="obfuscated_user_id_value", @@ -2140,22 +2152,23 @@ async def test_update_conversation_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_conversation - ] = mock_object + ] = mock_rpc request = {} await client.update_conversation(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_conversation(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2182,6 +2195,7 @@ async def test_update_conversation_async( name="name_value", language_code="language_code_value", agent_id="agent_id_value", + metadata_json="metadata_json_value", medium=resources.Conversation.Medium.PHONE_CALL, turn_count=1105, obfuscated_user_id="obfuscated_user_id_value", @@ -2200,6 +2214,7 @@ async def test_update_conversation_async( assert response.name == "name_value" assert response.language_code == "language_code_value" assert response.agent_id == "agent_id_value" + assert response.metadata_json == "metadata_json_value" assert response.medium == resources.Conversation.Medium.PHONE_CALL assert response.turn_count == 1105 assert response.obfuscated_user_id == "obfuscated_user_id_value" @@ -2407,6 +2422,7 @@ def test_get_conversation(request_type, transport: str = "grpc"): name="name_value", language_code="language_code_value", agent_id="agent_id_value", + metadata_json="metadata_json_value", medium=resources.Conversation.Medium.PHONE_CALL, turn_count=1105, obfuscated_user_id="obfuscated_user_id_value", @@ -2424,6 +2440,7 @@ def test_get_conversation(request_type, transport: str = "grpc"): assert response.name == "name_value" assert response.language_code == "language_code_value" assert response.agent_id == "agent_id_value" + assert response.metadata_json == "metadata_json_value" assert response.medium == resources.Conversation.Medium.PHONE_CALL assert response.turn_count == 1105 assert response.obfuscated_user_id == "obfuscated_user_id_value" @@ -2530,6 +2547,7 @@ async def test_get_conversation_empty_call_async(): name="name_value", language_code="language_code_value", agent_id="agent_id_value", + metadata_json="metadata_json_value", medium=resources.Conversation.Medium.PHONE_CALL, turn_count=1105, obfuscated_user_id="obfuscated_user_id_value", @@ -2564,22 +2582,23 @@ async def test_get_conversation_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_conversation - ] = mock_object + ] = mock_rpc request = {} await client.get_conversation(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_conversation(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2604,6 +2623,7 @@ async def test_get_conversation_async( name="name_value", language_code="language_code_value", agent_id="agent_id_value", + metadata_json="metadata_json_value", medium=resources.Conversation.Medium.PHONE_CALL, turn_count=1105, obfuscated_user_id="obfuscated_user_id_value", @@ -2622,6 +2642,7 @@ async def test_get_conversation_async( assert response.name == "name_value" assert response.language_code == "language_code_value" assert response.agent_id == "agent_id_value" + assert response.metadata_json == "metadata_json_value" assert response.medium == resources.Conversation.Medium.PHONE_CALL assert response.turn_count == 1105 assert response.obfuscated_user_id == "obfuscated_user_id_value" @@ -2849,6 +2870,7 @@ def test_list_conversations_non_empty_request_with_auto_populated_field(): parent="parent_value", page_token="page_token_value", filter="filter_value", + order_by="order_by_value", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -2865,6 +2887,7 @@ def test_list_conversations_non_empty_request_with_auto_populated_field(): parent="parent_value", page_token="page_token_value", filter="filter_value", + order_by="order_by_value", ) @@ -2955,22 +2978,23 @@ async def test_list_conversations_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_conversations - ] = mock_object + ] = mock_rpc request = {} await client.list_conversations(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_conversations(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3537,22 +3561,23 @@ async def test_delete_conversation_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_conversation - ] = mock_object + ] = mock_rpc request = {} await client.delete_conversation(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_conversation(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3847,8 +3872,9 @@ def test_create_analysis_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.create_analysis(request) @@ -3902,26 +3928,28 @@ async def test_create_analysis_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_analysis - ] = mock_object + ] = mock_rpc request = {} await client.create_analysis(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.create_analysis(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4278,22 +4306,23 @@ async def test_get_analysis_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_analysis - ] = mock_object + ] = mock_rpc request = {} await client.get_analysis(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_analysis(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4643,22 +4672,23 @@ async def test_list_analyses_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_analyses - ] = mock_object + ] = mock_rpc request = {} await client.list_analyses(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_analyses(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5195,22 +5225,23 @@ async def test_delete_analysis_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_analysis - ] = mock_object + ] = mock_rpc request = {} await client.delete_analysis(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_analysis(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5508,8 +5539,9 @@ def test_bulk_analyze_conversations_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.bulk_analyze_conversations(request) @@ -5565,26 +5597,28 @@ async def test_bulk_analyze_conversations_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.bulk_analyze_conversations - ] = mock_object + ] = mock_rpc request = {} await client.bulk_analyze_conversations(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.bulk_analyze_conversations(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5918,8 +5952,9 @@ def test_bulk_delete_conversations_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.bulk_delete_conversations(request) @@ -5975,26 +6010,28 @@ async def test_bulk_delete_conversations_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.bulk_delete_conversations - ] = mock_object + ] = mock_rpc request = {} await client.bulk_delete_conversations(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.bulk_delete_conversations(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -6315,8 +6352,9 @@ def test_ingest_conversations_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.ingest_conversations(request) @@ -6372,26 +6410,28 @@ async def test_ingest_conversations_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.ingest_conversations - ] = mock_object + ] = mock_rpc request = {} await client.ingest_conversations(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.ingest_conversations(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -6706,8 +6746,9 @@ def test_export_insights_data_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.export_insights_data(request) @@ -6763,26 +6804,28 @@ async def test_export_insights_data_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.export_insights_data - ] = mock_object + ] = mock_rpc request = {} await client.export_insights_data(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.export_insights_data(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -7093,8 +7136,9 @@ def test_create_issue_model_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.create_issue_model(request) @@ -7150,26 +7194,28 @@ async def test_create_issue_model_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_issue_model - ] = mock_object + ] = mock_rpc request = {} await client.create_issue_model(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.create_issue_model(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -7559,22 +7605,23 @@ async def test_update_issue_model_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_issue_model - ] = mock_object + ] = mock_rpc request = {} await client.update_issue_model(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_issue_model(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -7969,22 +8016,23 @@ async def test_get_issue_model_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_issue_model - ] = mock_object + ] = mock_rpc request = {} await client.get_issue_model(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_issue_model(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -8349,22 +8397,23 @@ async def test_list_issue_models_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_issue_models - ] = mock_object + ] = mock_rpc request = {} await client.list_issue_models(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_issue_models(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -8675,8 +8724,9 @@ def test_delete_issue_model_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.delete_issue_model(request) @@ -8732,26 +8782,28 @@ async def test_delete_issue_model_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_issue_model - ] = mock_object + ] = mock_rpc request = {} await client.delete_issue_model(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.delete_issue_model(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -9062,8 +9114,9 @@ def test_deploy_issue_model_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.deploy_issue_model(request) @@ -9119,26 +9172,28 @@ async def test_deploy_issue_model_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.deploy_issue_model - ] = mock_object + ] = mock_rpc request = {} await client.deploy_issue_model(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.deploy_issue_model(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -9449,8 +9504,9 @@ def test_undeploy_issue_model_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.undeploy_issue_model(request) @@ -9506,26 +9562,28 @@ async def test_undeploy_issue_model_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.undeploy_issue_model - ] = mock_object + ] = mock_rpc request = {} await client.undeploy_issue_model(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.undeploy_issue_model(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -9721,11 +9779,11 @@ async def test_undeploy_issue_model_flattened_error_async(): @pytest.mark.parametrize( "request_type", [ - contact_center_insights.GetIssueRequest, + contact_center_insights.ExportIssueModelRequest, dict, ], ) -def test_get_issue(request_type, transport: str = "grpc"): +def test_export_issue_model(request_type, transport: str = "grpc"): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -9736,29 +9794,24 @@ def test_get_issue(request_type, transport: str = "grpc"): request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_issue), "__call__") as call: + with mock.patch.object( + type(client.transport.export_issue_model), "__call__" + ) as call: # Designate an appropriate return value for the call. - call.return_value = resources.Issue( - name="name_value", - display_name="display_name_value", - sample_utterances=["sample_utterances_value"], - ) - response = client.get_issue(request) + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.export_issue_model(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - request = contact_center_insights.GetIssueRequest() + request = contact_center_insights.ExportIssueModelRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, resources.Issue) - assert response.name == "name_value" - assert response.display_name == "display_name_value" - assert response.sample_utterances == ["sample_utterances_value"] + assert isinstance(response, future.Future) -def test_get_issue_empty_call(): +def test_export_issue_model_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = ContactCenterInsightsClient( @@ -9767,17 +9820,19 @@ def test_get_issue_empty_call(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_issue), "__call__") as call: + with mock.patch.object( + type(client.transport.export_issue_model), "__call__" + ) as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.get_issue() + client.export_issue_model() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == contact_center_insights.GetIssueRequest() + assert args[0] == contact_center_insights.ExportIssueModelRequest() -def test_get_issue_non_empty_request_with_auto_populated_field(): +def test_export_issue_model_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. client = ContactCenterInsightsClient( @@ -9788,24 +9843,26 @@ def test_get_issue_non_empty_request_with_auto_populated_field(): # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. - request = contact_center_insights.GetIssueRequest( + request = contact_center_insights.ExportIssueModelRequest( name="name_value", ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_issue), "__call__") as call: + with mock.patch.object( + type(client.transport.export_issue_model), "__call__" + ) as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.get_issue(request=request) + client.export_issue_model(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == contact_center_insights.GetIssueRequest( + assert args[0] == contact_center_insights.ExportIssueModelRequest( name="name_value", ) -def test_get_issue_use_cached_wrapped_rpc(): +def test_export_issue_model_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -9819,21 +9876,30 @@ def test_get_issue_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.get_issue in client._transport._wrapped_methods + assert ( + client._transport.export_issue_model in client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.get_issue] = mock_rpc + client._transport._wrapped_methods[ + client._transport.export_issue_model + ] = mock_rpc request = {} - client.get_issue(request) + client.export_issue_model(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.get_issue(request) + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.export_issue_model(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -9841,7 +9907,7 @@ def test_get_issue_use_cached_wrapped_rpc(): @pytest.mark.asyncio -async def test_get_issue_empty_call_async(): +async def test_export_issue_model_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = ContactCenterInsightsAsyncClient( @@ -9850,23 +9916,23 @@ async def test_get_issue_empty_call_async(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_issue), "__call__") as call: + with mock.patch.object( + type(client.transport.export_issue_model), "__call__" + ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - resources.Issue( - name="name_value", - display_name="display_name_value", - sample_utterances=["sample_utterances_value"], - ) + operations_pb2.Operation(name="operations/spam") ) - response = await client.get_issue() + response = await client.export_issue_model() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == contact_center_insights.GetIssueRequest() + assert args[0] == contact_center_insights.ExportIssueModelRequest() @pytest.mark.asyncio -async def test_get_issue_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): +async def test_export_issue_model_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: @@ -9881,33 +9947,39 @@ async def test_get_issue_async_use_cached_wrapped_rpc(transport: str = "grpc_asy # Ensure method has been cached assert ( - client._client._transport.get_issue + client._client._transport.export_issue_model in client._client._transport._wrapped_methods ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ - client._client._transport.get_issue - ] = mock_object + client._client._transport.export_issue_model + ] = mock_rpc request = {} - await client.get_issue(request) + await client.export_issue_model(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - await client.get_issue(request) + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.export_issue_model(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio -async def test_get_issue_async( +async def test_export_issue_model_async( transport: str = "grpc_asyncio", - request_type=contact_center_insights.GetIssueRequest, + request_type=contact_center_insights.ExportIssueModelRequest, ): client = ContactCenterInsightsAsyncClient( credentials=ga_credentials.AnonymousCredentials(), @@ -9919,50 +9991,47 @@ async def test_get_issue_async( request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_issue), "__call__") as call: + with mock.patch.object( + type(client.transport.export_issue_model), "__call__" + ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - resources.Issue( - name="name_value", - display_name="display_name_value", - sample_utterances=["sample_utterances_value"], - ) + operations_pb2.Operation(name="operations/spam") ) - response = await client.get_issue(request) + response = await client.export_issue_model(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request = contact_center_insights.GetIssueRequest() + request = contact_center_insights.ExportIssueModelRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, resources.Issue) - assert response.name == "name_value" - assert response.display_name == "display_name_value" - assert response.sample_utterances == ["sample_utterances_value"] + assert isinstance(response, future.Future) @pytest.mark.asyncio -async def test_get_issue_async_from_dict(): - await test_get_issue_async(request_type=dict) +async def test_export_issue_model_async_from_dict(): + await test_export_issue_model_async(request_type=dict) -def test_get_issue_field_headers(): +def test_export_issue_model_field_headers(): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = contact_center_insights.GetIssueRequest() + request = contact_center_insights.ExportIssueModelRequest() request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_issue), "__call__") as call: - call.return_value = resources.Issue() - client.get_issue(request) + with mock.patch.object( + type(client.transport.export_issue_model), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.export_issue_model(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -9978,21 +10047,25 @@ def test_get_issue_field_headers(): @pytest.mark.asyncio -async def test_get_issue_field_headers_async(): +async def test_export_issue_model_field_headers_async(): client = ContactCenterInsightsAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = contact_center_insights.GetIssueRequest() + request = contact_center_insights.ExportIssueModelRequest() request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_issue), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(resources.Issue()) - await client.get_issue(request) + with mock.patch.object( + type(client.transport.export_issue_model), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.export_issue_model(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -10007,18 +10080,20 @@ async def test_get_issue_field_headers_async(): ) in kw["metadata"] -def test_get_issue_flattened(): +def test_export_issue_model_flattened(): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_issue), "__call__") as call: + with mock.patch.object( + type(client.transport.export_issue_model), "__call__" + ) as call: # Designate an appropriate return value for the call. - call.return_value = resources.Issue() + call.return_value = operations_pb2.Operation(name="operations/op") # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.get_issue( + client.export_issue_model( name="name_value", ) @@ -10031,7 +10106,7 @@ def test_get_issue_flattened(): assert arg == mock_val -def test_get_issue_flattened_error(): +def test_export_issue_model_flattened_error(): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -10039,27 +10114,31 @@ def test_get_issue_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.get_issue( - contact_center_insights.GetIssueRequest(), + client.export_issue_model( + contact_center_insights.ExportIssueModelRequest(), name="name_value", ) @pytest.mark.asyncio -async def test_get_issue_flattened_async(): +async def test_export_issue_model_flattened_async(): client = ContactCenterInsightsAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_issue), "__call__") as call: + with mock.patch.object( + type(client.transport.export_issue_model), "__call__" + ) as call: # Designate an appropriate return value for the call. - call.return_value = resources.Issue() + call.return_value = operations_pb2.Operation(name="operations/op") - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(resources.Issue()) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.get_issue( + response = await client.export_issue_model( name="name_value", ) @@ -10073,7 +10152,7 @@ async def test_get_issue_flattened_async(): @pytest.mark.asyncio -async def test_get_issue_flattened_error_async(): +async def test_export_issue_model_flattened_error_async(): client = ContactCenterInsightsAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -10081,8 +10160,8 @@ async def test_get_issue_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.get_issue( - contact_center_insights.GetIssueRequest(), + await client.export_issue_model( + contact_center_insights.ExportIssueModelRequest(), name="name_value", ) @@ -10090,11 +10169,11 @@ async def test_get_issue_flattened_error_async(): @pytest.mark.parametrize( "request_type", [ - contact_center_insights.ListIssuesRequest, + contact_center_insights.ImportIssueModelRequest, dict, ], ) -def test_list_issues(request_type, transport: str = "grpc"): +def test_import_issue_model(request_type, transport: str = "grpc"): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -10105,22 +10184,24 @@ def test_list_issues(request_type, transport: str = "grpc"): request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_issues), "__call__") as call: + with mock.patch.object( + type(client.transport.import_issue_model), "__call__" + ) as call: # Designate an appropriate return value for the call. - call.return_value = contact_center_insights.ListIssuesResponse() - response = client.list_issues(request) + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.import_issue_model(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - request = contact_center_insights.ListIssuesRequest() + request = contact_center_insights.ImportIssueModelRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, contact_center_insights.ListIssuesResponse) + assert isinstance(response, future.Future) -def test_list_issues_empty_call(): +def test_import_issue_model_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = ContactCenterInsightsClient( @@ -10129,17 +10210,19 @@ def test_list_issues_empty_call(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_issues), "__call__") as call: + with mock.patch.object( + type(client.transport.import_issue_model), "__call__" + ) as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.list_issues() + client.import_issue_model() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == contact_center_insights.ListIssuesRequest() + assert args[0] == contact_center_insights.ImportIssueModelRequest() -def test_list_issues_non_empty_request_with_auto_populated_field(): +def test_import_issue_model_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. client = ContactCenterInsightsClient( @@ -10150,24 +10233,26 @@ def test_list_issues_non_empty_request_with_auto_populated_field(): # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. - request = contact_center_insights.ListIssuesRequest( + request = contact_center_insights.ImportIssueModelRequest( parent="parent_value", ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_issues), "__call__") as call: + with mock.patch.object( + type(client.transport.import_issue_model), "__call__" + ) as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.list_issues(request=request) + client.import_issue_model(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == contact_center_insights.ListIssuesRequest( + assert args[0] == contact_center_insights.ImportIssueModelRequest( parent="parent_value", ) -def test_list_issues_use_cached_wrapped_rpc(): +def test_import_issue_model_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -10181,21 +10266,30 @@ def test_list_issues_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.list_issues in client._transport._wrapped_methods + assert ( + client._transport.import_issue_model in client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.list_issues] = mock_rpc + client._transport._wrapped_methods[ + client._transport.import_issue_model + ] = mock_rpc request = {} - client.list_issues(request) + client.import_issue_model(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.list_issues(request) + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.import_issue_model(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -10203,7 +10297,7 @@ def test_list_issues_use_cached_wrapped_rpc(): @pytest.mark.asyncio -async def test_list_issues_empty_call_async(): +async def test_import_issue_model_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = ContactCenterInsightsAsyncClient( @@ -10212,19 +10306,21 @@ async def test_list_issues_empty_call_async(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_issues), "__call__") as call: + with mock.patch.object( + type(client.transport.import_issue_model), "__call__" + ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - contact_center_insights.ListIssuesResponse() + operations_pb2.Operation(name="operations/spam") ) - response = await client.list_issues() + response = await client.import_issue_model() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == contact_center_insights.ListIssuesRequest() + assert args[0] == contact_center_insights.ImportIssueModelRequest() @pytest.mark.asyncio -async def test_list_issues_async_use_cached_wrapped_rpc( +async def test_import_issue_model_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", ): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -10241,33 +10337,39 @@ async def test_list_issues_async_use_cached_wrapped_rpc( # Ensure method has been cached assert ( - client._client._transport.list_issues + client._client._transport.import_issue_model in client._client._transport._wrapped_methods ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ - client._client._transport.list_issues - ] = mock_object + client._client._transport.import_issue_model + ] = mock_rpc request = {} - await client.list_issues(request) + await client.import_issue_model(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - await client.list_issues(request) + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.import_issue_model(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio -async def test_list_issues_async( +async def test_import_issue_model_async( transport: str = "grpc_asyncio", - request_type=contact_center_insights.ListIssuesRequest, + request_type=contact_center_insights.ImportIssueModelRequest, ): client = ContactCenterInsightsAsyncClient( credentials=ga_credentials.AnonymousCredentials(), @@ -10279,43 +10381,47 @@ async def test_list_issues_async( request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_issues), "__call__") as call: + with mock.patch.object( + type(client.transport.import_issue_model), "__call__" + ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - contact_center_insights.ListIssuesResponse() + operations_pb2.Operation(name="operations/spam") ) - response = await client.list_issues(request) + response = await client.import_issue_model(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request = contact_center_insights.ListIssuesRequest() + request = contact_center_insights.ImportIssueModelRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, contact_center_insights.ListIssuesResponse) + assert isinstance(response, future.Future) @pytest.mark.asyncio -async def test_list_issues_async_from_dict(): - await test_list_issues_async(request_type=dict) +async def test_import_issue_model_async_from_dict(): + await test_import_issue_model_async(request_type=dict) -def test_list_issues_field_headers(): +def test_import_issue_model_field_headers(): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = contact_center_insights.ListIssuesRequest() + request = contact_center_insights.ImportIssueModelRequest() request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_issues), "__call__") as call: - call.return_value = contact_center_insights.ListIssuesResponse() - client.list_issues(request) + with mock.patch.object( + type(client.transport.import_issue_model), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.import_issue_model(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -10331,23 +10437,25 @@ def test_list_issues_field_headers(): @pytest.mark.asyncio -async def test_list_issues_field_headers_async(): +async def test_import_issue_model_field_headers_async(): client = ContactCenterInsightsAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = contact_center_insights.ListIssuesRequest() + request = contact_center_insights.ImportIssueModelRequest() request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_issues), "__call__") as call: + with mock.patch.object( + type(client.transport.import_issue_model), "__call__" + ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - contact_center_insights.ListIssuesResponse() + operations_pb2.Operation(name="operations/op") ) - await client.list_issues(request) + await client.import_issue_model(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -10362,18 +10470,20 @@ async def test_list_issues_field_headers_async(): ) in kw["metadata"] -def test_list_issues_flattened(): +def test_import_issue_model_flattened(): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_issues), "__call__") as call: + with mock.patch.object( + type(client.transport.import_issue_model), "__call__" + ) as call: # Designate an appropriate return value for the call. - call.return_value = contact_center_insights.ListIssuesResponse() + call.return_value = operations_pb2.Operation(name="operations/op") # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.list_issues( + client.import_issue_model( parent="parent_value", ) @@ -10386,7 +10496,7 @@ def test_list_issues_flattened(): assert arg == mock_val -def test_list_issues_flattened_error(): +def test_import_issue_model_flattened_error(): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -10394,29 +10504,31 @@ def test_list_issues_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.list_issues( - contact_center_insights.ListIssuesRequest(), + client.import_issue_model( + contact_center_insights.ImportIssueModelRequest(), parent="parent_value", ) @pytest.mark.asyncio -async def test_list_issues_flattened_async(): +async def test_import_issue_model_flattened_async(): client = ContactCenterInsightsAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_issues), "__call__") as call: + with mock.patch.object( + type(client.transport.import_issue_model), "__call__" + ) as call: # Designate an appropriate return value for the call. - call.return_value = contact_center_insights.ListIssuesResponse() + call.return_value = operations_pb2.Operation(name="operations/op") call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - contact_center_insights.ListIssuesResponse() + operations_pb2.Operation(name="operations/spam") ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.list_issues( + response = await client.import_issue_model( parent="parent_value", ) @@ -10430,7 +10542,7 @@ async def test_list_issues_flattened_async(): @pytest.mark.asyncio -async def test_list_issues_flattened_error_async(): +async def test_import_issue_model_flattened_error_async(): client = ContactCenterInsightsAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -10438,8 +10550,8 @@ async def test_list_issues_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.list_issues( - contact_center_insights.ListIssuesRequest(), + await client.import_issue_model( + contact_center_insights.ImportIssueModelRequest(), parent="parent_value", ) @@ -10447,11 +10559,11 @@ async def test_list_issues_flattened_error_async(): @pytest.mark.parametrize( "request_type", [ - contact_center_insights.UpdateIssueRequest, + contact_center_insights.GetIssueRequest, dict, ], ) -def test_update_issue(request_type, transport: str = "grpc"): +def test_get_issue(request_type, transport: str = "grpc"): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -10462,19 +10574,20 @@ def test_update_issue(request_type, transport: str = "grpc"): request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.update_issue), "__call__") as call: + with mock.patch.object(type(client.transport.get_issue), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = resources.Issue( name="name_value", display_name="display_name_value", sample_utterances=["sample_utterances_value"], + display_description="display_description_value", ) - response = client.update_issue(request) + response = client.get_issue(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - request = contact_center_insights.UpdateIssueRequest() + request = contact_center_insights.GetIssueRequest() assert args[0] == request # Establish that the response is the type that we expect. @@ -10482,9 +10595,10 @@ def test_update_issue(request_type, transport: str = "grpc"): assert response.name == "name_value" assert response.display_name == "display_name_value" assert response.sample_utterances == ["sample_utterances_value"] + assert response.display_description == "display_description_value" -def test_update_issue_empty_call(): +def test_get_issue_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = ContactCenterInsightsClient( @@ -10493,17 +10607,17 @@ def test_update_issue_empty_call(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.update_issue), "__call__") as call: + with mock.patch.object(type(client.transport.get_issue), "__call__") as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.update_issue() + client.get_issue() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == contact_center_insights.UpdateIssueRequest() + assert args[0] == contact_center_insights.GetIssueRequest() -def test_update_issue_non_empty_request_with_auto_populated_field(): +def test_get_issue_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. client = ContactCenterInsightsClient( @@ -10514,20 +10628,24 @@ def test_update_issue_non_empty_request_with_auto_populated_field(): # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. - request = contact_center_insights.UpdateIssueRequest() + request = contact_center_insights.GetIssueRequest( + name="name_value", + ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.update_issue), "__call__") as call: + with mock.patch.object(type(client.transport.get_issue), "__call__") as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.update_issue(request=request) + client.get_issue(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == contact_center_insights.UpdateIssueRequest() + assert args[0] == contact_center_insights.GetIssueRequest( + name="name_value", + ) -def test_update_issue_use_cached_wrapped_rpc(): +def test_get_issue_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -10541,21 +10659,21 @@ def test_update_issue_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.update_issue in client._transport._wrapped_methods + assert client._transport.get_issue in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.update_issue] = mock_rpc + client._transport._wrapped_methods[client._transport.get_issue] = mock_rpc request = {} - client.update_issue(request) + client.get_issue(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.update_issue(request) + client.get_issue(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -10563,7 +10681,7 @@ def test_update_issue_use_cached_wrapped_rpc(): @pytest.mark.asyncio -async def test_update_issue_empty_call_async(): +async def test_get_issue_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = ContactCenterInsightsAsyncClient( @@ -10572,25 +10690,24 @@ async def test_update_issue_empty_call_async(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.update_issue), "__call__") as call: + with mock.patch.object(type(client.transport.get_issue), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( resources.Issue( name="name_value", display_name="display_name_value", sample_utterances=["sample_utterances_value"], + display_description="display_description_value", ) ) - response = await client.update_issue() + response = await client.get_issue() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == contact_center_insights.UpdateIssueRequest() + assert args[0] == contact_center_insights.GetIssueRequest() @pytest.mark.asyncio -async def test_update_issue_async_use_cached_wrapped_rpc( - transport: str = "grpc_asyncio", -): +async def test_get_issue_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: @@ -10605,33 +10722,34 @@ async def test_update_issue_async_use_cached_wrapped_rpc( # Ensure method has been cached assert ( - client._client._transport.update_issue + client._client._transport.get_issue in client._client._transport._wrapped_methods ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ - client._client._transport.update_issue - ] = mock_object + client._client._transport.get_issue + ] = mock_rpc request = {} - await client.update_issue(request) + await client.get_issue(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - await client.update_issue(request) + await client.get_issue(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio -async def test_update_issue_async( +async def test_get_issue_async( transport: str = "grpc_asyncio", - request_type=contact_center_insights.UpdateIssueRequest, + request_type=contact_center_insights.GetIssueRequest, ): client = ContactCenterInsightsAsyncClient( credentials=ga_credentials.AnonymousCredentials(), @@ -10643,21 +10761,22 @@ async def test_update_issue_async( request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.update_issue), "__call__") as call: + with mock.patch.object(type(client.transport.get_issue), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( resources.Issue( name="name_value", display_name="display_name_value", sample_utterances=["sample_utterances_value"], + display_description="display_description_value", ) ) - response = await client.update_issue(request) + response = await client.get_issue(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request = contact_center_insights.UpdateIssueRequest() + request = contact_center_insights.GetIssueRequest() assert args[0] == request # Establish that the response is the type that we expect. @@ -10665,28 +10784,29 @@ async def test_update_issue_async( assert response.name == "name_value" assert response.display_name == "display_name_value" assert response.sample_utterances == ["sample_utterances_value"] + assert response.display_description == "display_description_value" @pytest.mark.asyncio -async def test_update_issue_async_from_dict(): - await test_update_issue_async(request_type=dict) +async def test_get_issue_async_from_dict(): + await test_get_issue_async(request_type=dict) -def test_update_issue_field_headers(): +def test_get_issue_field_headers(): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = contact_center_insights.UpdateIssueRequest() + request = contact_center_insights.GetIssueRequest() - request.issue.name = "name_value" + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.update_issue), "__call__") as call: + with mock.patch.object(type(client.transport.get_issue), "__call__") as call: call.return_value = resources.Issue() - client.update_issue(request) + client.get_issue(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -10697,26 +10817,26 @@ def test_update_issue_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "issue.name=name_value", + "name=name_value", ) in kw["metadata"] @pytest.mark.asyncio -async def test_update_issue_field_headers_async(): +async def test_get_issue_field_headers_async(): client = ContactCenterInsightsAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = contact_center_insights.UpdateIssueRequest() + request = contact_center_insights.GetIssueRequest() - request.issue.name = "name_value" + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.update_issue), "__call__") as call: + with mock.patch.object(type(client.transport.get_issue), "__call__") as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(resources.Issue()) - await client.update_issue(request) + await client.get_issue(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -10727,39 +10847,35 @@ async def test_update_issue_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "issue.name=name_value", + "name=name_value", ) in kw["metadata"] -def test_update_issue_flattened(): +def test_get_issue_flattened(): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.update_issue), "__call__") as call: + with mock.patch.object(type(client.transport.get_issue), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = resources.Issue() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.update_issue( - issue=resources.Issue(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + client.get_issue( + name="name_value", ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - arg = args[0].issue - mock_val = resources.Issue(name="name_value") - assert arg == mock_val - arg = args[0].update_mask - mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + arg = args[0].name + mock_val = "name_value" assert arg == mock_val -def test_update_issue_flattened_error(): +def test_get_issue_flattened_error(): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -10767,46 +10883,41 @@ def test_update_issue_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.update_issue( - contact_center_insights.UpdateIssueRequest(), - issue=resources.Issue(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + client.get_issue( + contact_center_insights.GetIssueRequest(), + name="name_value", ) @pytest.mark.asyncio -async def test_update_issue_flattened_async(): +async def test_get_issue_flattened_async(): client = ContactCenterInsightsAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.update_issue), "__call__") as call: + with mock.patch.object(type(client.transport.get_issue), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = resources.Issue() call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(resources.Issue()) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.update_issue( - issue=resources.Issue(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + response = await client.get_issue( + name="name_value", ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - arg = args[0].issue - mock_val = resources.Issue(name="name_value") - assert arg == mock_val - arg = args[0].update_mask - mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + arg = args[0].name + mock_val = "name_value" assert arg == mock_val @pytest.mark.asyncio -async def test_update_issue_flattened_error_async(): +async def test_get_issue_flattened_error_async(): client = ContactCenterInsightsAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -10814,21 +10925,20 @@ async def test_update_issue_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.update_issue( - contact_center_insights.UpdateIssueRequest(), - issue=resources.Issue(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + await client.get_issue( + contact_center_insights.GetIssueRequest(), + name="name_value", ) @pytest.mark.parametrize( "request_type", [ - contact_center_insights.DeleteIssueRequest, + contact_center_insights.ListIssuesRequest, dict, ], ) -def test_delete_issue(request_type, transport: str = "grpc"): +def test_list_issues(request_type, transport: str = "grpc"): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -10839,22 +10949,22 @@ def test_delete_issue(request_type, transport: str = "grpc"): request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_issue), "__call__") as call: + with mock.patch.object(type(client.transport.list_issues), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = None - response = client.delete_issue(request) + call.return_value = contact_center_insights.ListIssuesResponse() + response = client.list_issues(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - request = contact_center_insights.DeleteIssueRequest() + request = contact_center_insights.ListIssuesRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert response is None + assert isinstance(response, contact_center_insights.ListIssuesResponse) -def test_delete_issue_empty_call(): +def test_list_issues_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = ContactCenterInsightsClient( @@ -10863,17 +10973,17 @@ def test_delete_issue_empty_call(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_issue), "__call__") as call: + with mock.patch.object(type(client.transport.list_issues), "__call__") as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.delete_issue() + client.list_issues() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == contact_center_insights.DeleteIssueRequest() + assert args[0] == contact_center_insights.ListIssuesRequest() -def test_delete_issue_non_empty_request_with_auto_populated_field(): +def test_list_issues_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. client = ContactCenterInsightsClient( @@ -10884,24 +10994,24 @@ def test_delete_issue_non_empty_request_with_auto_populated_field(): # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. - request = contact_center_insights.DeleteIssueRequest( - name="name_value", + request = contact_center_insights.ListIssuesRequest( + parent="parent_value", ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_issue), "__call__") as call: + with mock.patch.object(type(client.transport.list_issues), "__call__") as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.delete_issue(request=request) + client.list_issues(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == contact_center_insights.DeleteIssueRequest( - name="name_value", + assert args[0] == contact_center_insights.ListIssuesRequest( + parent="parent_value", ) -def test_delete_issue_use_cached_wrapped_rpc(): +def test_list_issues_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -10915,21 +11025,21 @@ def test_delete_issue_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.delete_issue in client._transport._wrapped_methods + assert client._transport.list_issues in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.delete_issue] = mock_rpc + client._transport._wrapped_methods[client._transport.list_issues] = mock_rpc request = {} - client.delete_issue(request) + client.list_issues(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.delete_issue(request) + client.list_issues(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -10937,7 +11047,7 @@ def test_delete_issue_use_cached_wrapped_rpc(): @pytest.mark.asyncio -async def test_delete_issue_empty_call_async(): +async def test_list_issues_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = ContactCenterInsightsAsyncClient( @@ -10946,17 +11056,19 @@ async def test_delete_issue_empty_call_async(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_issue), "__call__") as call: + with mock.patch.object(type(client.transport.list_issues), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.delete_issue() + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + contact_center_insights.ListIssuesResponse() + ) + response = await client.list_issues() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == contact_center_insights.DeleteIssueRequest() + assert args[0] == contact_center_insights.ListIssuesRequest() @pytest.mark.asyncio -async def test_delete_issue_async_use_cached_wrapped_rpc( +async def test_list_issues_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", ): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -10973,33 +11085,34 @@ async def test_delete_issue_async_use_cached_wrapped_rpc( # Ensure method has been cached assert ( - client._client._transport.delete_issue + client._client._transport.list_issues in client._client._transport._wrapped_methods ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ - client._client._transport.delete_issue - ] = mock_object + client._client._transport.list_issues + ] = mock_rpc request = {} - await client.delete_issue(request) + await client.list_issues(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - await client.delete_issue(request) + await client.list_issues(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio -async def test_delete_issue_async( +async def test_list_issues_async( transport: str = "grpc_asyncio", - request_type=contact_center_insights.DeleteIssueRequest, + request_type=contact_center_insights.ListIssuesRequest, ): client = ContactCenterInsightsAsyncClient( credentials=ga_credentials.AnonymousCredentials(), @@ -11011,41 +11124,43 @@ async def test_delete_issue_async( request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_issue), "__call__") as call: + with mock.patch.object(type(client.transport.list_issues), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.delete_issue(request) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + contact_center_insights.ListIssuesResponse() + ) + response = await client.list_issues(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request = contact_center_insights.DeleteIssueRequest() + request = contact_center_insights.ListIssuesRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert response is None + assert isinstance(response, contact_center_insights.ListIssuesResponse) @pytest.mark.asyncio -async def test_delete_issue_async_from_dict(): - await test_delete_issue_async(request_type=dict) +async def test_list_issues_async_from_dict(): + await test_list_issues_async(request_type=dict) -def test_delete_issue_field_headers(): +def test_list_issues_field_headers(): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = contact_center_insights.DeleteIssueRequest() + request = contact_center_insights.ListIssuesRequest() - request.name = "name_value" + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_issue), "__call__") as call: - call.return_value = None - client.delete_issue(request) + with mock.patch.object(type(client.transport.list_issues), "__call__") as call: + call.return_value = contact_center_insights.ListIssuesResponse() + client.list_issues(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -11056,26 +11171,28 @@ def test_delete_issue_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "name=name_value", + "parent=parent_value", ) in kw["metadata"] @pytest.mark.asyncio -async def test_delete_issue_field_headers_async(): +async def test_list_issues_field_headers_async(): client = ContactCenterInsightsAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = contact_center_insights.DeleteIssueRequest() + request = contact_center_insights.ListIssuesRequest() - request.name = "name_value" + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_issue), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - await client.delete_issue(request) + with mock.patch.object(type(client.transport.list_issues), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + contact_center_insights.ListIssuesResponse() + ) + await client.list_issues(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -11086,35 +11203,35 @@ async def test_delete_issue_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "name=name_value", + "parent=parent_value", ) in kw["metadata"] -def test_delete_issue_flattened(): +def test_list_issues_flattened(): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_issue), "__call__") as call: + with mock.patch.object(type(client.transport.list_issues), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = None + call.return_value = contact_center_insights.ListIssuesResponse() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.delete_issue( - name="name_value", + client.list_issues( + parent="parent_value", ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" + arg = args[0].parent + mock_val = "parent_value" assert arg == mock_val -def test_delete_issue_flattened_error(): +def test_list_issues_flattened_error(): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -11122,41 +11239,43 @@ def test_delete_issue_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.delete_issue( - contact_center_insights.DeleteIssueRequest(), - name="name_value", + client.list_issues( + contact_center_insights.ListIssuesRequest(), + parent="parent_value", ) @pytest.mark.asyncio -async def test_delete_issue_flattened_async(): +async def test_list_issues_flattened_async(): client = ContactCenterInsightsAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_issue), "__call__") as call: + with mock.patch.object(type(client.transport.list_issues), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = None + call.return_value = contact_center_insights.ListIssuesResponse() - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + contact_center_insights.ListIssuesResponse() + ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.delete_issue( - name="name_value", + response = await client.list_issues( + parent="parent_value", ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" + arg = args[0].parent + mock_val = "parent_value" assert arg == mock_val @pytest.mark.asyncio -async def test_delete_issue_flattened_error_async(): +async def test_list_issues_flattened_error_async(): client = ContactCenterInsightsAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -11164,20 +11283,20 @@ async def test_delete_issue_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.delete_issue( - contact_center_insights.DeleteIssueRequest(), - name="name_value", + await client.list_issues( + contact_center_insights.ListIssuesRequest(), + parent="parent_value", ) @pytest.mark.parametrize( "request_type", [ - contact_center_insights.CalculateIssueModelStatsRequest, + contact_center_insights.UpdateIssueRequest, dict, ], ) -def test_calculate_issue_model_stats(request_type, transport: str = "grpc"): +def test_update_issue(request_type, transport: str = "grpc"): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -11188,26 +11307,31 @@ def test_calculate_issue_model_stats(request_type, transport: str = "grpc"): request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.calculate_issue_model_stats), "__call__" - ) as call: + with mock.patch.object(type(client.transport.update_issue), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = contact_center_insights.CalculateIssueModelStatsResponse() - response = client.calculate_issue_model_stats(request) + call.return_value = resources.Issue( + name="name_value", + display_name="display_name_value", + sample_utterances=["sample_utterances_value"], + display_description="display_description_value", + ) + response = client.update_issue(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - request = contact_center_insights.CalculateIssueModelStatsRequest() + request = contact_center_insights.UpdateIssueRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance( - response, contact_center_insights.CalculateIssueModelStatsResponse - ) + assert isinstance(response, resources.Issue) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.sample_utterances == ["sample_utterances_value"] + assert response.display_description == "display_description_value" -def test_calculate_issue_model_stats_empty_call(): +def test_update_issue_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = ContactCenterInsightsClient( @@ -11216,19 +11340,17 @@ def test_calculate_issue_model_stats_empty_call(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.calculate_issue_model_stats), "__call__" - ) as call: + with mock.patch.object(type(client.transport.update_issue), "__call__") as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.calculate_issue_model_stats() + client.update_issue() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == contact_center_insights.CalculateIssueModelStatsRequest() + assert args[0] == contact_center_insights.UpdateIssueRequest() -def test_calculate_issue_model_stats_non_empty_request_with_auto_populated_field(): +def test_update_issue_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. client = ContactCenterInsightsClient( @@ -11239,26 +11361,20 @@ def test_calculate_issue_model_stats_non_empty_request_with_auto_populated_field # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. - request = contact_center_insights.CalculateIssueModelStatsRequest( - issue_model="issue_model_value", - ) + request = contact_center_insights.UpdateIssueRequest() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.calculate_issue_model_stats), "__call__" - ) as call: + with mock.patch.object(type(client.transport.update_issue), "__call__") as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.calculate_issue_model_stats(request=request) + client.update_issue(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == contact_center_insights.CalculateIssueModelStatsRequest( - issue_model="issue_model_value", - ) + assert args[0] == contact_center_insights.UpdateIssueRequest() -def test_calculate_issue_model_stats_use_cached_wrapped_rpc(): +def test_update_issue_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -11272,26 +11388,21 @@ def test_calculate_issue_model_stats_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert ( - client._transport.calculate_issue_model_stats - in client._transport._wrapped_methods - ) + assert client._transport.update_issue in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.calculate_issue_model_stats - ] = mock_rpc + client._transport._wrapped_methods[client._transport.update_issue] = mock_rpc request = {} - client.calculate_issue_model_stats(request) + client.update_issue(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.calculate_issue_model_stats(request) + client.update_issue(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -11299,7 +11410,7 @@ def test_calculate_issue_model_stats_use_cached_wrapped_rpc(): @pytest.mark.asyncio -async def test_calculate_issue_model_stats_empty_call_async(): +async def test_update_issue_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = ContactCenterInsightsAsyncClient( @@ -11308,21 +11419,24 @@ async def test_calculate_issue_model_stats_empty_call_async(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.calculate_issue_model_stats), "__call__" - ) as call: + with mock.patch.object(type(client.transport.update_issue), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - contact_center_insights.CalculateIssueModelStatsResponse() + resources.Issue( + name="name_value", + display_name="display_name_value", + sample_utterances=["sample_utterances_value"], + display_description="display_description_value", + ) ) - response = await client.calculate_issue_model_stats() + response = await client.update_issue() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == contact_center_insights.CalculateIssueModelStatsRequest() + assert args[0] == contact_center_insights.UpdateIssueRequest() @pytest.mark.asyncio -async def test_calculate_issue_model_stats_async_use_cached_wrapped_rpc( +async def test_update_issue_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", ): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -11339,33 +11453,34 @@ async def test_calculate_issue_model_stats_async_use_cached_wrapped_rpc( # Ensure method has been cached assert ( - client._client._transport.calculate_issue_model_stats + client._client._transport.update_issue in client._client._transport._wrapped_methods ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ - client._client._transport.calculate_issue_model_stats - ] = mock_object + client._client._transport.update_issue + ] = mock_rpc request = {} - await client.calculate_issue_model_stats(request) + await client.update_issue(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - await client.calculate_issue_model_stats(request) + await client.update_issue(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio -async def test_calculate_issue_model_stats_async( +async def test_update_issue_async( transport: str = "grpc_asyncio", - request_type=contact_center_insights.CalculateIssueModelStatsRequest, + request_type=contact_center_insights.UpdateIssueRequest, ): client = ContactCenterInsightsAsyncClient( credentials=ga_credentials.AnonymousCredentials(), @@ -11377,49 +11492,52 @@ async def test_calculate_issue_model_stats_async( request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.calculate_issue_model_stats), "__call__" - ) as call: + with mock.patch.object(type(client.transport.update_issue), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - contact_center_insights.CalculateIssueModelStatsResponse() + resources.Issue( + name="name_value", + display_name="display_name_value", + sample_utterances=["sample_utterances_value"], + display_description="display_description_value", + ) ) - response = await client.calculate_issue_model_stats(request) + response = await client.update_issue(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request = contact_center_insights.CalculateIssueModelStatsRequest() + request = contact_center_insights.UpdateIssueRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance( - response, contact_center_insights.CalculateIssueModelStatsResponse - ) + assert isinstance(response, resources.Issue) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.sample_utterances == ["sample_utterances_value"] + assert response.display_description == "display_description_value" @pytest.mark.asyncio -async def test_calculate_issue_model_stats_async_from_dict(): - await test_calculate_issue_model_stats_async(request_type=dict) +async def test_update_issue_async_from_dict(): + await test_update_issue_async(request_type=dict) -def test_calculate_issue_model_stats_field_headers(): +def test_update_issue_field_headers(): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = contact_center_insights.CalculateIssueModelStatsRequest() + request = contact_center_insights.UpdateIssueRequest() - request.issue_model = "issue_model_value" + request.issue.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.calculate_issue_model_stats), "__call__" - ) as call: - call.return_value = contact_center_insights.CalculateIssueModelStatsResponse() - client.calculate_issue_model_stats(request) + with mock.patch.object(type(client.transport.update_issue), "__call__") as call: + call.return_value = resources.Issue() + client.update_issue(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -11430,30 +11548,26 @@ def test_calculate_issue_model_stats_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "issue_model=issue_model_value", + "issue.name=name_value", ) in kw["metadata"] @pytest.mark.asyncio -async def test_calculate_issue_model_stats_field_headers_async(): +async def test_update_issue_field_headers_async(): client = ContactCenterInsightsAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = contact_center_insights.CalculateIssueModelStatsRequest() + request = contact_center_insights.UpdateIssueRequest() - request.issue_model = "issue_model_value" + request.issue.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.calculate_issue_model_stats), "__call__" - ) as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - contact_center_insights.CalculateIssueModelStatsResponse() - ) - await client.calculate_issue_model_stats(request) + with mock.patch.object(type(client.transport.update_issue), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(resources.Issue()) + await client.update_issue(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -11464,37 +11578,39 @@ async def test_calculate_issue_model_stats_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "issue_model=issue_model_value", + "issue.name=name_value", ) in kw["metadata"] -def test_calculate_issue_model_stats_flattened(): +def test_update_issue_flattened(): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.calculate_issue_model_stats), "__call__" - ) as call: + with mock.patch.object(type(client.transport.update_issue), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = contact_center_insights.CalculateIssueModelStatsResponse() + call.return_value = resources.Issue() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.calculate_issue_model_stats( - issue_model="issue_model_value", + client.update_issue( + issue=resources.Issue(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - arg = args[0].issue_model - mock_val = "issue_model_value" + arg = args[0].issue + mock_val = resources.Issue(name="name_value") + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) assert arg == mock_val -def test_calculate_issue_model_stats_flattened_error(): +def test_update_issue_flattened_error(): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -11502,45 +11618,46 @@ def test_calculate_issue_model_stats_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.calculate_issue_model_stats( - contact_center_insights.CalculateIssueModelStatsRequest(), - issue_model="issue_model_value", + client.update_issue( + contact_center_insights.UpdateIssueRequest(), + issue=resources.Issue(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) @pytest.mark.asyncio -async def test_calculate_issue_model_stats_flattened_async(): +async def test_update_issue_flattened_async(): client = ContactCenterInsightsAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.calculate_issue_model_stats), "__call__" - ) as call: + with mock.patch.object(type(client.transport.update_issue), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = contact_center_insights.CalculateIssueModelStatsResponse() + call.return_value = resources.Issue() - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - contact_center_insights.CalculateIssueModelStatsResponse() - ) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(resources.Issue()) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.calculate_issue_model_stats( - issue_model="issue_model_value", + response = await client.update_issue( + issue=resources.Issue(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - arg = args[0].issue_model - mock_val = "issue_model_value" + arg = args[0].issue + mock_val = resources.Issue(name="name_value") + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) assert arg == mock_val @pytest.mark.asyncio -async def test_calculate_issue_model_stats_flattened_error_async(): +async def test_update_issue_flattened_error_async(): client = ContactCenterInsightsAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -11548,20 +11665,21 @@ async def test_calculate_issue_model_stats_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.calculate_issue_model_stats( - contact_center_insights.CalculateIssueModelStatsRequest(), - issue_model="issue_model_value", + await client.update_issue( + contact_center_insights.UpdateIssueRequest(), + issue=resources.Issue(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) @pytest.mark.parametrize( "request_type", [ - contact_center_insights.CreatePhraseMatcherRequest, + contact_center_insights.DeleteIssueRequest, dict, ], ) -def test_create_phrase_matcher(request_type, transport: str = "grpc"): +def test_delete_issue(request_type, transport: str = "grpc"): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -11572,39 +11690,22 @@ def test_create_phrase_matcher(request_type, transport: str = "grpc"): request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_phrase_matcher), "__call__" - ) as call: + with mock.patch.object(type(client.transport.delete_issue), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = resources.PhraseMatcher( - name="name_value", - revision_id="revision_id_value", - version_tag="version_tag_value", - display_name="display_name_value", - type_=resources.PhraseMatcher.PhraseMatcherType.ALL_OF, - active=True, - role_match=resources.ConversationParticipant.Role.HUMAN_AGENT, - ) - response = client.create_phrase_matcher(request) + call.return_value = None + response = client.delete_issue(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - request = contact_center_insights.CreatePhraseMatcherRequest() + request = contact_center_insights.DeleteIssueRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, resources.PhraseMatcher) - assert response.name == "name_value" - assert response.revision_id == "revision_id_value" - assert response.version_tag == "version_tag_value" - assert response.display_name == "display_name_value" - assert response.type_ == resources.PhraseMatcher.PhraseMatcherType.ALL_OF - assert response.active is True - assert response.role_match == resources.ConversationParticipant.Role.HUMAN_AGENT + assert response is None -def test_create_phrase_matcher_empty_call(): +def test_delete_issue_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = ContactCenterInsightsClient( @@ -11613,19 +11714,17 @@ def test_create_phrase_matcher_empty_call(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_phrase_matcher), "__call__" - ) as call: + with mock.patch.object(type(client.transport.delete_issue), "__call__") as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.create_phrase_matcher() + client.delete_issue() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == contact_center_insights.CreatePhraseMatcherRequest() + assert args[0] == contact_center_insights.DeleteIssueRequest() -def test_create_phrase_matcher_non_empty_request_with_auto_populated_field(): +def test_delete_issue_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. client = ContactCenterInsightsClient( @@ -11636,26 +11735,24 @@ def test_create_phrase_matcher_non_empty_request_with_auto_populated_field(): # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. - request = contact_center_insights.CreatePhraseMatcherRequest( - parent="parent_value", + request = contact_center_insights.DeleteIssueRequest( + name="name_value", ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_phrase_matcher), "__call__" - ) as call: + with mock.patch.object(type(client.transport.delete_issue), "__call__") as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.create_phrase_matcher(request=request) + client.delete_issue(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == contact_center_insights.CreatePhraseMatcherRequest( - parent="parent_value", + assert args[0] == contact_center_insights.DeleteIssueRequest( + name="name_value", ) -def test_create_phrase_matcher_use_cached_wrapped_rpc(): +def test_delete_issue_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -11669,26 +11766,21 @@ def test_create_phrase_matcher_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert ( - client._transport.create_phrase_matcher - in client._transport._wrapped_methods - ) + assert client._transport.delete_issue in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.create_phrase_matcher - ] = mock_rpc + client._transport._wrapped_methods[client._transport.delete_issue] = mock_rpc request = {} - client.create_phrase_matcher(request) + client.delete_issue(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.create_phrase_matcher(request) + client.delete_issue(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -11696,7 +11788,7 @@ def test_create_phrase_matcher_use_cached_wrapped_rpc(): @pytest.mark.asyncio -async def test_create_phrase_matcher_empty_call_async(): +async def test_delete_issue_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = ContactCenterInsightsAsyncClient( @@ -11705,29 +11797,17 @@ async def test_create_phrase_matcher_empty_call_async(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_phrase_matcher), "__call__" - ) as call: + with mock.patch.object(type(client.transport.delete_issue), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - resources.PhraseMatcher( - name="name_value", - revision_id="revision_id_value", - version_tag="version_tag_value", - display_name="display_name_value", - type_=resources.PhraseMatcher.PhraseMatcherType.ALL_OF, - active=True, - role_match=resources.ConversationParticipant.Role.HUMAN_AGENT, - ) - ) - response = await client.create_phrase_matcher() + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.delete_issue() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == contact_center_insights.CreatePhraseMatcherRequest() + assert args[0] == contact_center_insights.DeleteIssueRequest() @pytest.mark.asyncio -async def test_create_phrase_matcher_async_use_cached_wrapped_rpc( +async def test_delete_issue_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", ): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -11744,33 +11824,34 @@ async def test_create_phrase_matcher_async_use_cached_wrapped_rpc( # Ensure method has been cached assert ( - client._client._transport.create_phrase_matcher + client._client._transport.delete_issue in client._client._transport._wrapped_methods ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ - client._client._transport.create_phrase_matcher - ] = mock_object + client._client._transport.delete_issue + ] = mock_rpc request = {} - await client.create_phrase_matcher(request) + await client.delete_issue(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - await client.create_phrase_matcher(request) + await client.delete_issue(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio -async def test_create_phrase_matcher_async( +async def test_delete_issue_async( transport: str = "grpc_asyncio", - request_type=contact_center_insights.CreatePhraseMatcherRequest, + request_type=contact_center_insights.DeleteIssueRequest, ): client = ContactCenterInsightsAsyncClient( credentials=ga_credentials.AnonymousCredentials(), @@ -11782,62 +11863,41 @@ async def test_create_phrase_matcher_async( request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_phrase_matcher), "__call__" - ) as call: + with mock.patch.object(type(client.transport.delete_issue), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - resources.PhraseMatcher( - name="name_value", - revision_id="revision_id_value", - version_tag="version_tag_value", - display_name="display_name_value", - type_=resources.PhraseMatcher.PhraseMatcherType.ALL_OF, - active=True, - role_match=resources.ConversationParticipant.Role.HUMAN_AGENT, - ) - ) - response = await client.create_phrase_matcher(request) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.delete_issue(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request = contact_center_insights.CreatePhraseMatcherRequest() + request = contact_center_insights.DeleteIssueRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, resources.PhraseMatcher) - assert response.name == "name_value" - assert response.revision_id == "revision_id_value" - assert response.version_tag == "version_tag_value" - assert response.display_name == "display_name_value" - assert response.type_ == resources.PhraseMatcher.PhraseMatcherType.ALL_OF - assert response.active is True - assert response.role_match == resources.ConversationParticipant.Role.HUMAN_AGENT + assert response is None @pytest.mark.asyncio -async def test_create_phrase_matcher_async_from_dict(): - await test_create_phrase_matcher_async(request_type=dict) +async def test_delete_issue_async_from_dict(): + await test_delete_issue_async(request_type=dict) -def test_create_phrase_matcher_field_headers(): +def test_delete_issue_field_headers(): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = contact_center_insights.CreatePhraseMatcherRequest() + request = contact_center_insights.DeleteIssueRequest() - request.parent = "parent_value" + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_phrase_matcher), "__call__" - ) as call: - call.return_value = resources.PhraseMatcher() - client.create_phrase_matcher(request) + with mock.patch.object(type(client.transport.delete_issue), "__call__") as call: + call.return_value = None + client.delete_issue(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -11848,30 +11908,26 @@ def test_create_phrase_matcher_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "parent=parent_value", + "name=name_value", ) in kw["metadata"] @pytest.mark.asyncio -async def test_create_phrase_matcher_field_headers_async(): +async def test_delete_issue_field_headers_async(): client = ContactCenterInsightsAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = contact_center_insights.CreatePhraseMatcherRequest() + request = contact_center_insights.DeleteIssueRequest() - request.parent = "parent_value" + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_phrase_matcher), "__call__" - ) as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - resources.PhraseMatcher() - ) - await client.create_phrase_matcher(request) + with mock.patch.object(type(client.transport.delete_issue), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.delete_issue(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -11882,41 +11938,35 @@ async def test_create_phrase_matcher_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "parent=parent_value", + "name=name_value", ) in kw["metadata"] -def test_create_phrase_matcher_flattened(): +def test_delete_issue_flattened(): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_phrase_matcher), "__call__" - ) as call: + with mock.patch.object(type(client.transport.delete_issue), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = resources.PhraseMatcher() + call.return_value = None # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.create_phrase_matcher( - parent="parent_value", - phrase_matcher=resources.PhraseMatcher(name="name_value"), + client.delete_issue( + name="name_value", ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = "parent_value" - assert arg == mock_val - arg = args[0].phrase_matcher - mock_val = resources.PhraseMatcher(name="name_value") + arg = args[0].name + mock_val = "name_value" assert arg == mock_val -def test_create_phrase_matcher_flattened_error(): +def test_delete_issue_flattened_error(): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -11924,50 +11974,41 @@ def test_create_phrase_matcher_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.create_phrase_matcher( - contact_center_insights.CreatePhraseMatcherRequest(), - parent="parent_value", - phrase_matcher=resources.PhraseMatcher(name="name_value"), + client.delete_issue( + contact_center_insights.DeleteIssueRequest(), + name="name_value", ) @pytest.mark.asyncio -async def test_create_phrase_matcher_flattened_async(): +async def test_delete_issue_flattened_async(): client = ContactCenterInsightsAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_phrase_matcher), "__call__" - ) as call: + with mock.patch.object(type(client.transport.delete_issue), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = resources.PhraseMatcher() + call.return_value = None - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - resources.PhraseMatcher() - ) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.create_phrase_matcher( - parent="parent_value", - phrase_matcher=resources.PhraseMatcher(name="name_value"), + response = await client.delete_issue( + name="name_value", ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = "parent_value" - assert arg == mock_val - arg = args[0].phrase_matcher - mock_val = resources.PhraseMatcher(name="name_value") + arg = args[0].name + mock_val = "name_value" assert arg == mock_val @pytest.mark.asyncio -async def test_create_phrase_matcher_flattened_error_async(): +async def test_delete_issue_flattened_error_async(): client = ContactCenterInsightsAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -11975,21 +12016,20 @@ async def test_create_phrase_matcher_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.create_phrase_matcher( - contact_center_insights.CreatePhraseMatcherRequest(), - parent="parent_value", - phrase_matcher=resources.PhraseMatcher(name="name_value"), + await client.delete_issue( + contact_center_insights.DeleteIssueRequest(), + name="name_value", ) @pytest.mark.parametrize( "request_type", [ - contact_center_insights.GetPhraseMatcherRequest, + contact_center_insights.CalculateIssueModelStatsRequest, dict, ], ) -def test_get_phrase_matcher(request_type, transport: str = "grpc"): +def test_calculate_issue_model_stats(request_type, transport: str = "grpc"): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -12001,38 +12041,25 @@ def test_get_phrase_matcher(request_type, transport: str = "grpc"): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.get_phrase_matcher), "__call__" + type(client.transport.calculate_issue_model_stats), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = resources.PhraseMatcher( - name="name_value", - revision_id="revision_id_value", - version_tag="version_tag_value", - display_name="display_name_value", - type_=resources.PhraseMatcher.PhraseMatcherType.ALL_OF, - active=True, - role_match=resources.ConversationParticipant.Role.HUMAN_AGENT, - ) - response = client.get_phrase_matcher(request) + call.return_value = contact_center_insights.CalculateIssueModelStatsResponse() + response = client.calculate_issue_model_stats(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - request = contact_center_insights.GetPhraseMatcherRequest() + request = contact_center_insights.CalculateIssueModelStatsRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, resources.PhraseMatcher) - assert response.name == "name_value" - assert response.revision_id == "revision_id_value" - assert response.version_tag == "version_tag_value" - assert response.display_name == "display_name_value" - assert response.type_ == resources.PhraseMatcher.PhraseMatcherType.ALL_OF - assert response.active is True - assert response.role_match == resources.ConversationParticipant.Role.HUMAN_AGENT + assert isinstance( + response, contact_center_insights.CalculateIssueModelStatsResponse + ) -def test_get_phrase_matcher_empty_call(): +def test_calculate_issue_model_stats_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = ContactCenterInsightsClient( @@ -12042,18 +12069,18 @@ def test_get_phrase_matcher_empty_call(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.get_phrase_matcher), "__call__" + type(client.transport.calculate_issue_model_stats), "__call__" ) as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.get_phrase_matcher() + client.calculate_issue_model_stats() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == contact_center_insights.GetPhraseMatcherRequest() + assert args[0] == contact_center_insights.CalculateIssueModelStatsRequest() -def test_get_phrase_matcher_non_empty_request_with_auto_populated_field(): +def test_calculate_issue_model_stats_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. client = ContactCenterInsightsClient( @@ -12064,26 +12091,26 @@ def test_get_phrase_matcher_non_empty_request_with_auto_populated_field(): # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. - request = contact_center_insights.GetPhraseMatcherRequest( - name="name_value", + request = contact_center_insights.CalculateIssueModelStatsRequest( + issue_model="issue_model_value", ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.get_phrase_matcher), "__call__" + type(client.transport.calculate_issue_model_stats), "__call__" ) as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.get_phrase_matcher(request=request) + client.calculate_issue_model_stats(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == contact_center_insights.GetPhraseMatcherRequest( - name="name_value", + assert args[0] == contact_center_insights.CalculateIssueModelStatsRequest( + issue_model="issue_model_value", ) -def test_get_phrase_matcher_use_cached_wrapped_rpc(): +def test_calculate_issue_model_stats_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -12098,7 +12125,8 @@ def test_get_phrase_matcher_use_cached_wrapped_rpc(): # Ensure method has been cached assert ( - client._transport.get_phrase_matcher in client._transport._wrapped_methods + client._transport.calculate_issue_model_stats + in client._transport._wrapped_methods ) # Replace cached wrapped function with mock @@ -12107,15 +12135,15 @@ def test_get_phrase_matcher_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.get_phrase_matcher + client._transport.calculate_issue_model_stats ] = mock_rpc request = {} - client.get_phrase_matcher(request) + client.calculate_issue_model_stats(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.get_phrase_matcher(request) + client.calculate_issue_model_stats(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -12123,7 +12151,7 @@ def test_get_phrase_matcher_use_cached_wrapped_rpc(): @pytest.mark.asyncio -async def test_get_phrase_matcher_empty_call_async(): +async def test_calculate_issue_model_stats_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = ContactCenterInsightsAsyncClient( @@ -12133,28 +12161,20 @@ async def test_get_phrase_matcher_empty_call_async(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.get_phrase_matcher), "__call__" + type(client.transport.calculate_issue_model_stats), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - resources.PhraseMatcher( - name="name_value", - revision_id="revision_id_value", - version_tag="version_tag_value", - display_name="display_name_value", - type_=resources.PhraseMatcher.PhraseMatcherType.ALL_OF, - active=True, - role_match=resources.ConversationParticipant.Role.HUMAN_AGENT, - ) + contact_center_insights.CalculateIssueModelStatsResponse() ) - response = await client.get_phrase_matcher() + response = await client.calculate_issue_model_stats() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == contact_center_insights.GetPhraseMatcherRequest() + assert args[0] == contact_center_insights.CalculateIssueModelStatsRequest() @pytest.mark.asyncio -async def test_get_phrase_matcher_async_use_cached_wrapped_rpc( +async def test_calculate_issue_model_stats_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", ): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -12171,33 +12191,34 @@ async def test_get_phrase_matcher_async_use_cached_wrapped_rpc( # Ensure method has been cached assert ( - client._client._transport.get_phrase_matcher + client._client._transport.calculate_issue_model_stats in client._client._transport._wrapped_methods ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ - client._client._transport.get_phrase_matcher - ] = mock_object + client._client._transport.calculate_issue_model_stats + ] = mock_rpc request = {} - await client.get_phrase_matcher(request) + await client.calculate_issue_model_stats(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - await client.get_phrase_matcher(request) + await client.calculate_issue_model_stats(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio -async def test_get_phrase_matcher_async( +async def test_calculate_issue_model_stats_async( transport: str = "grpc_asyncio", - request_type=contact_center_insights.GetPhraseMatcherRequest, + request_type=contact_center_insights.CalculateIssueModelStatsRequest, ): client = ContactCenterInsightsAsyncClient( credentials=ga_credentials.AnonymousCredentials(), @@ -12210,61 +12231,48 @@ async def test_get_phrase_matcher_async( # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.get_phrase_matcher), "__call__" + type(client.transport.calculate_issue_model_stats), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - resources.PhraseMatcher( - name="name_value", - revision_id="revision_id_value", - version_tag="version_tag_value", - display_name="display_name_value", - type_=resources.PhraseMatcher.PhraseMatcherType.ALL_OF, - active=True, - role_match=resources.ConversationParticipant.Role.HUMAN_AGENT, - ) + contact_center_insights.CalculateIssueModelStatsResponse() ) - response = await client.get_phrase_matcher(request) + response = await client.calculate_issue_model_stats(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request = contact_center_insights.GetPhraseMatcherRequest() + request = contact_center_insights.CalculateIssueModelStatsRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, resources.PhraseMatcher) - assert response.name == "name_value" - assert response.revision_id == "revision_id_value" - assert response.version_tag == "version_tag_value" - assert response.display_name == "display_name_value" - assert response.type_ == resources.PhraseMatcher.PhraseMatcherType.ALL_OF - assert response.active is True - assert response.role_match == resources.ConversationParticipant.Role.HUMAN_AGENT + assert isinstance( + response, contact_center_insights.CalculateIssueModelStatsResponse + ) @pytest.mark.asyncio -async def test_get_phrase_matcher_async_from_dict(): - await test_get_phrase_matcher_async(request_type=dict) +async def test_calculate_issue_model_stats_async_from_dict(): + await test_calculate_issue_model_stats_async(request_type=dict) -def test_get_phrase_matcher_field_headers(): +def test_calculate_issue_model_stats_field_headers(): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = contact_center_insights.GetPhraseMatcherRequest() + request = contact_center_insights.CalculateIssueModelStatsRequest() - request.name = "name_value" + request.issue_model = "issue_model_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.get_phrase_matcher), "__call__" + type(client.transport.calculate_issue_model_stats), "__call__" ) as call: - call.return_value = resources.PhraseMatcher() - client.get_phrase_matcher(request) + call.return_value = contact_center_insights.CalculateIssueModelStatsResponse() + client.calculate_issue_model_stats(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -12275,30 +12283,30 @@ def test_get_phrase_matcher_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "name=name_value", + "issue_model=issue_model_value", ) in kw["metadata"] @pytest.mark.asyncio -async def test_get_phrase_matcher_field_headers_async(): +async def test_calculate_issue_model_stats_field_headers_async(): client = ContactCenterInsightsAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = contact_center_insights.GetPhraseMatcherRequest() + request = contact_center_insights.CalculateIssueModelStatsRequest() - request.name = "name_value" + request.issue_model = "issue_model_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.get_phrase_matcher), "__call__" + type(client.transport.calculate_issue_model_stats), "__call__" ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - resources.PhraseMatcher() + contact_center_insights.CalculateIssueModelStatsResponse() ) - await client.get_phrase_matcher(request) + await client.calculate_issue_model_stats(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -12309,37 +12317,37 @@ async def test_get_phrase_matcher_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "name=name_value", + "issue_model=issue_model_value", ) in kw["metadata"] -def test_get_phrase_matcher_flattened(): +def test_calculate_issue_model_stats_flattened(): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.get_phrase_matcher), "__call__" + type(client.transport.calculate_issue_model_stats), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = resources.PhraseMatcher() + call.return_value = contact_center_insights.CalculateIssueModelStatsResponse() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.get_phrase_matcher( - name="name_value", + client.calculate_issue_model_stats( + issue_model="issue_model_value", ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" + arg = args[0].issue_model + mock_val = "issue_model_value" assert arg == mock_val -def test_get_phrase_matcher_flattened_error(): +def test_calculate_issue_model_stats_flattened_error(): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -12347,45 +12355,45 @@ def test_get_phrase_matcher_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.get_phrase_matcher( - contact_center_insights.GetPhraseMatcherRequest(), - name="name_value", + client.calculate_issue_model_stats( + contact_center_insights.CalculateIssueModelStatsRequest(), + issue_model="issue_model_value", ) @pytest.mark.asyncio -async def test_get_phrase_matcher_flattened_async(): +async def test_calculate_issue_model_stats_flattened_async(): client = ContactCenterInsightsAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.get_phrase_matcher), "__call__" + type(client.transport.calculate_issue_model_stats), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = resources.PhraseMatcher() + call.return_value = contact_center_insights.CalculateIssueModelStatsResponse() call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - resources.PhraseMatcher() + contact_center_insights.CalculateIssueModelStatsResponse() ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.get_phrase_matcher( - name="name_value", + response = await client.calculate_issue_model_stats( + issue_model="issue_model_value", ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" + arg = args[0].issue_model + mock_val = "issue_model_value" assert arg == mock_val @pytest.mark.asyncio -async def test_get_phrase_matcher_flattened_error_async(): +async def test_calculate_issue_model_stats_flattened_error_async(): client = ContactCenterInsightsAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -12393,20 +12401,20 @@ async def test_get_phrase_matcher_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.get_phrase_matcher( - contact_center_insights.GetPhraseMatcherRequest(), - name="name_value", + await client.calculate_issue_model_stats( + contact_center_insights.CalculateIssueModelStatsRequest(), + issue_model="issue_model_value", ) @pytest.mark.parametrize( "request_type", [ - contact_center_insights.ListPhraseMatchersRequest, + contact_center_insights.CreatePhraseMatcherRequest, dict, ], ) -def test_list_phrase_matchers(request_type, transport: str = "grpc"): +def test_create_phrase_matcher(request_type, transport: str = "grpc"): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -12418,26 +12426,38 @@ def test_list_phrase_matchers(request_type, transport: str = "grpc"): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_phrase_matchers), "__call__" + type(client.transport.create_phrase_matcher), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = contact_center_insights.ListPhraseMatchersResponse( - next_page_token="next_page_token_value", + call.return_value = resources.PhraseMatcher( + name="name_value", + revision_id="revision_id_value", + version_tag="version_tag_value", + display_name="display_name_value", + type_=resources.PhraseMatcher.PhraseMatcherType.ALL_OF, + active=True, + role_match=resources.ConversationParticipant.Role.HUMAN_AGENT, ) - response = client.list_phrase_matchers(request) + response = client.create_phrase_matcher(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - request = contact_center_insights.ListPhraseMatchersRequest() + request = contact_center_insights.CreatePhraseMatcherRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListPhraseMatchersPager) - assert response.next_page_token == "next_page_token_value" + assert isinstance(response, resources.PhraseMatcher) + assert response.name == "name_value" + assert response.revision_id == "revision_id_value" + assert response.version_tag == "version_tag_value" + assert response.display_name == "display_name_value" + assert response.type_ == resources.PhraseMatcher.PhraseMatcherType.ALL_OF + assert response.active is True + assert response.role_match == resources.ConversationParticipant.Role.HUMAN_AGENT -def test_list_phrase_matchers_empty_call(): +def test_create_phrase_matcher_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = ContactCenterInsightsClient( @@ -12447,18 +12467,18 @@ def test_list_phrase_matchers_empty_call(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_phrase_matchers), "__call__" + type(client.transport.create_phrase_matcher), "__call__" ) as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.list_phrase_matchers() + client.create_phrase_matcher() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == contact_center_insights.ListPhraseMatchersRequest() + assert args[0] == contact_center_insights.CreatePhraseMatcherRequest() -def test_list_phrase_matchers_non_empty_request_with_auto_populated_field(): +def test_create_phrase_matcher_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. client = ContactCenterInsightsClient( @@ -12469,30 +12489,26 @@ def test_list_phrase_matchers_non_empty_request_with_auto_populated_field(): # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. - request = contact_center_insights.ListPhraseMatchersRequest( + request = contact_center_insights.CreatePhraseMatcherRequest( parent="parent_value", - page_token="page_token_value", - filter="filter_value", ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_phrase_matchers), "__call__" + type(client.transport.create_phrase_matcher), "__call__" ) as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.list_phrase_matchers(request=request) + client.create_phrase_matcher(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == contact_center_insights.ListPhraseMatchersRequest( + assert args[0] == contact_center_insights.CreatePhraseMatcherRequest( parent="parent_value", - page_token="page_token_value", - filter="filter_value", ) -def test_list_phrase_matchers_use_cached_wrapped_rpc(): +def test_create_phrase_matcher_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -12507,7 +12523,8 @@ def test_list_phrase_matchers_use_cached_wrapped_rpc(): # Ensure method has been cached assert ( - client._transport.list_phrase_matchers in client._transport._wrapped_methods + client._transport.create_phrase_matcher + in client._transport._wrapped_methods ) # Replace cached wrapped function with mock @@ -12516,15 +12533,15 @@ def test_list_phrase_matchers_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.list_phrase_matchers + client._transport.create_phrase_matcher ] = mock_rpc request = {} - client.list_phrase_matchers(request) + client.create_phrase_matcher(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.list_phrase_matchers(request) + client.create_phrase_matcher(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -12532,7 +12549,7 @@ def test_list_phrase_matchers_use_cached_wrapped_rpc(): @pytest.mark.asyncio -async def test_list_phrase_matchers_empty_call_async(): +async def test_create_phrase_matcher_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = ContactCenterInsightsAsyncClient( @@ -12542,22 +12559,28 @@ async def test_list_phrase_matchers_empty_call_async(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_phrase_matchers), "__call__" + type(client.transport.create_phrase_matcher), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - contact_center_insights.ListPhraseMatchersResponse( - next_page_token="next_page_token_value", + resources.PhraseMatcher( + name="name_value", + revision_id="revision_id_value", + version_tag="version_tag_value", + display_name="display_name_value", + type_=resources.PhraseMatcher.PhraseMatcherType.ALL_OF, + active=True, + role_match=resources.ConversationParticipant.Role.HUMAN_AGENT, ) ) - response = await client.list_phrase_matchers() + response = await client.create_phrase_matcher() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == contact_center_insights.ListPhraseMatchersRequest() + assert args[0] == contact_center_insights.CreatePhraseMatcherRequest() @pytest.mark.asyncio -async def test_list_phrase_matchers_async_use_cached_wrapped_rpc( +async def test_create_phrase_matcher_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", ): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -12574,33 +12597,34 @@ async def test_list_phrase_matchers_async_use_cached_wrapped_rpc( # Ensure method has been cached assert ( - client._client._transport.list_phrase_matchers + client._client._transport.create_phrase_matcher in client._client._transport._wrapped_methods ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ - client._client._transport.list_phrase_matchers - ] = mock_object + client._client._transport.create_phrase_matcher + ] = mock_rpc request = {} - await client.list_phrase_matchers(request) + await client.create_phrase_matcher(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - await client.list_phrase_matchers(request) + await client.create_phrase_matcher(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio -async def test_list_phrase_matchers_async( +async def test_create_phrase_matcher_async( transport: str = "grpc_asyncio", - request_type=contact_center_insights.ListPhraseMatchersRequest, + request_type=contact_center_insights.CreatePhraseMatcherRequest, ): client = ContactCenterInsightsAsyncClient( credentials=ga_credentials.AnonymousCredentials(), @@ -12613,49 +12637,61 @@ async def test_list_phrase_matchers_async( # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_phrase_matchers), "__call__" + type(client.transport.create_phrase_matcher), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - contact_center_insights.ListPhraseMatchersResponse( - next_page_token="next_page_token_value", + resources.PhraseMatcher( + name="name_value", + revision_id="revision_id_value", + version_tag="version_tag_value", + display_name="display_name_value", + type_=resources.PhraseMatcher.PhraseMatcherType.ALL_OF, + active=True, + role_match=resources.ConversationParticipant.Role.HUMAN_AGENT, ) ) - response = await client.list_phrase_matchers(request) + response = await client.create_phrase_matcher(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request = contact_center_insights.ListPhraseMatchersRequest() + request = contact_center_insights.CreatePhraseMatcherRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListPhraseMatchersAsyncPager) - assert response.next_page_token == "next_page_token_value" + assert isinstance(response, resources.PhraseMatcher) + assert response.name == "name_value" + assert response.revision_id == "revision_id_value" + assert response.version_tag == "version_tag_value" + assert response.display_name == "display_name_value" + assert response.type_ == resources.PhraseMatcher.PhraseMatcherType.ALL_OF + assert response.active is True + assert response.role_match == resources.ConversationParticipant.Role.HUMAN_AGENT @pytest.mark.asyncio -async def test_list_phrase_matchers_async_from_dict(): - await test_list_phrase_matchers_async(request_type=dict) +async def test_create_phrase_matcher_async_from_dict(): + await test_create_phrase_matcher_async(request_type=dict) -def test_list_phrase_matchers_field_headers(): +def test_create_phrase_matcher_field_headers(): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = contact_center_insights.ListPhraseMatchersRequest() + request = contact_center_insights.CreatePhraseMatcherRequest() request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_phrase_matchers), "__call__" + type(client.transport.create_phrase_matcher), "__call__" ) as call: - call.return_value = contact_center_insights.ListPhraseMatchersResponse() - client.list_phrase_matchers(request) + call.return_value = resources.PhraseMatcher() + client.create_phrase_matcher(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -12671,25 +12707,25 @@ def test_list_phrase_matchers_field_headers(): @pytest.mark.asyncio -async def test_list_phrase_matchers_field_headers_async(): +async def test_create_phrase_matcher_field_headers_async(): client = ContactCenterInsightsAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = contact_center_insights.ListPhraseMatchersRequest() + request = contact_center_insights.CreatePhraseMatcherRequest() request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_phrase_matchers), "__call__" + type(client.transport.create_phrase_matcher), "__call__" ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - contact_center_insights.ListPhraseMatchersResponse() + resources.PhraseMatcher() ) - await client.list_phrase_matchers(request) + await client.create_phrase_matcher(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -12704,21 +12740,22 @@ async def test_list_phrase_matchers_field_headers_async(): ) in kw["metadata"] -def test_list_phrase_matchers_flattened(): +def test_create_phrase_matcher_flattened(): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_phrase_matchers), "__call__" + type(client.transport.create_phrase_matcher), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = contact_center_insights.ListPhraseMatchersResponse() + call.return_value = resources.PhraseMatcher() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.list_phrase_matchers( + client.create_phrase_matcher( parent="parent_value", + phrase_matcher=resources.PhraseMatcher(name="name_value"), ) # Establish that the underlying call was made with the expected @@ -12728,9 +12765,12 @@ def test_list_phrase_matchers_flattened(): arg = args[0].parent mock_val = "parent_value" assert arg == mock_val + arg = args[0].phrase_matcher + mock_val = resources.PhraseMatcher(name="name_value") + assert arg == mock_val -def test_list_phrase_matchers_flattened_error(): +def test_create_phrase_matcher_flattened_error(): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -12738,32 +12778,34 @@ def test_list_phrase_matchers_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.list_phrase_matchers( - contact_center_insights.ListPhraseMatchersRequest(), + client.create_phrase_matcher( + contact_center_insights.CreatePhraseMatcherRequest(), parent="parent_value", + phrase_matcher=resources.PhraseMatcher(name="name_value"), ) @pytest.mark.asyncio -async def test_list_phrase_matchers_flattened_async(): +async def test_create_phrase_matcher_flattened_async(): client = ContactCenterInsightsAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_phrase_matchers), "__call__" + type(client.transport.create_phrase_matcher), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = contact_center_insights.ListPhraseMatchersResponse() + call.return_value = resources.PhraseMatcher() call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - contact_center_insights.ListPhraseMatchersResponse() + resources.PhraseMatcher() ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.list_phrase_matchers( + response = await client.create_phrase_matcher( parent="parent_value", + phrase_matcher=resources.PhraseMatcher(name="name_value"), ) # Establish that the underlying call was made with the expected @@ -12773,10 +12815,13 @@ async def test_list_phrase_matchers_flattened_async(): arg = args[0].parent mock_val = "parent_value" assert arg == mock_val + arg = args[0].phrase_matcher + mock_val = resources.PhraseMatcher(name="name_value") + assert arg == mock_val @pytest.mark.asyncio -async def test_list_phrase_matchers_flattened_error_async(): +async def test_create_phrase_matcher_flattened_error_async(): client = ContactCenterInsightsAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -12784,250 +12829,64 @@ async def test_list_phrase_matchers_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.list_phrase_matchers( - contact_center_insights.ListPhraseMatchersRequest(), + await client.create_phrase_matcher( + contact_center_insights.CreatePhraseMatcherRequest(), parent="parent_value", + phrase_matcher=resources.PhraseMatcher(name="name_value"), ) -def test_list_phrase_matchers_pager(transport_name: str = "grpc"): +@pytest.mark.parametrize( + "request_type", + [ + contact_center_insights.GetPhraseMatcherRequest, + dict, + ], +) +def test_get_phrase_matcher(request_type, transport: str = "grpc"): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, + transport=transport, ) + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_phrase_matchers), "__call__" - ) as call: - # Set the response to a series of pages. - call.side_effect = ( - contact_center_insights.ListPhraseMatchersResponse( - phrase_matchers=[ - resources.PhraseMatcher(), - resources.PhraseMatcher(), - resources.PhraseMatcher(), - ], - next_page_token="abc", - ), - contact_center_insights.ListPhraseMatchersResponse( - phrase_matchers=[], - next_page_token="def", - ), - contact_center_insights.ListPhraseMatchersResponse( - phrase_matchers=[ - resources.PhraseMatcher(), - ], - next_page_token="ghi", - ), - contact_center_insights.ListPhraseMatchersResponse( - phrase_matchers=[ - resources.PhraseMatcher(), - resources.PhraseMatcher(), - ], - ), - RuntimeError, - ) - - expected_metadata = () - retry = retries.Retry() - timeout = 5 - expected_metadata = tuple(expected_metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), - ) - pager = client.list_phrase_matchers(request={}, retry=retry, timeout=timeout) - - assert pager._metadata == expected_metadata - assert pager._retry == retry - assert pager._timeout == timeout - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, resources.PhraseMatcher) for i in results) - - -def test_list_phrase_matchers_pages(transport_name: str = "grpc"): - client = ContactCenterInsightsClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_phrase_matchers), "__call__" - ) as call: - # Set the response to a series of pages. - call.side_effect = ( - contact_center_insights.ListPhraseMatchersResponse( - phrase_matchers=[ - resources.PhraseMatcher(), - resources.PhraseMatcher(), - resources.PhraseMatcher(), - ], - next_page_token="abc", - ), - contact_center_insights.ListPhraseMatchersResponse( - phrase_matchers=[], - next_page_token="def", - ), - contact_center_insights.ListPhraseMatchersResponse( - phrase_matchers=[ - resources.PhraseMatcher(), - ], - next_page_token="ghi", - ), - contact_center_insights.ListPhraseMatchersResponse( - phrase_matchers=[ - resources.PhraseMatcher(), - resources.PhraseMatcher(), - ], - ), - RuntimeError, - ) - pages = list(client.list_phrase_matchers(request={}).pages) - for page_, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page_.raw_page.next_page_token == token - - -@pytest.mark.asyncio -async def test_list_phrase_matchers_async_pager(): - client = ContactCenterInsightsAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_phrase_matchers), - "__call__", - new_callable=mock.AsyncMock, - ) as call: - # Set the response to a series of pages. - call.side_effect = ( - contact_center_insights.ListPhraseMatchersResponse( - phrase_matchers=[ - resources.PhraseMatcher(), - resources.PhraseMatcher(), - resources.PhraseMatcher(), - ], - next_page_token="abc", - ), - contact_center_insights.ListPhraseMatchersResponse( - phrase_matchers=[], - next_page_token="def", - ), - contact_center_insights.ListPhraseMatchersResponse( - phrase_matchers=[ - resources.PhraseMatcher(), - ], - next_page_token="ghi", - ), - contact_center_insights.ListPhraseMatchersResponse( - phrase_matchers=[ - resources.PhraseMatcher(), - resources.PhraseMatcher(), - ], - ), - RuntimeError, - ) - async_pager = await client.list_phrase_matchers( - request={}, - ) - assert async_pager.next_page_token == "abc" - responses = [] - async for response in async_pager: # pragma: no branch - responses.append(response) - - assert len(responses) == 6 - assert all(isinstance(i, resources.PhraseMatcher) for i in responses) - - -@pytest.mark.asyncio -async def test_list_phrase_matchers_async_pages(): - client = ContactCenterInsightsAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_phrase_matchers), - "__call__", - new_callable=mock.AsyncMock, - ) as call: - # Set the response to a series of pages. - call.side_effect = ( - contact_center_insights.ListPhraseMatchersResponse( - phrase_matchers=[ - resources.PhraseMatcher(), - resources.PhraseMatcher(), - resources.PhraseMatcher(), - ], - next_page_token="abc", - ), - contact_center_insights.ListPhraseMatchersResponse( - phrase_matchers=[], - next_page_token="def", - ), - contact_center_insights.ListPhraseMatchersResponse( - phrase_matchers=[ - resources.PhraseMatcher(), - ], - next_page_token="ghi", - ), - contact_center_insights.ListPhraseMatchersResponse( - phrase_matchers=[ - resources.PhraseMatcher(), - resources.PhraseMatcher(), - ], - ), - RuntimeError, - ) - pages = [] - # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` - # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 - async for page_ in ( # pragma: no branch - await client.list_phrase_matchers(request={}) - ).pages: - pages.append(page_) - for page_, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page_.raw_page.next_page_token == token - - -@pytest.mark.parametrize( - "request_type", - [ - contact_center_insights.DeletePhraseMatcherRequest, - dict, - ], -) -def test_delete_phrase_matcher(request_type, transport: str = "grpc"): - client = ContactCenterInsightsClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_phrase_matcher), "__call__" + type(client.transport.get_phrase_matcher), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = None - response = client.delete_phrase_matcher(request) + call.return_value = resources.PhraseMatcher( + name="name_value", + revision_id="revision_id_value", + version_tag="version_tag_value", + display_name="display_name_value", + type_=resources.PhraseMatcher.PhraseMatcherType.ALL_OF, + active=True, + role_match=resources.ConversationParticipant.Role.HUMAN_AGENT, + ) + response = client.get_phrase_matcher(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - request = contact_center_insights.DeletePhraseMatcherRequest() + request = contact_center_insights.GetPhraseMatcherRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert response is None + assert isinstance(response, resources.PhraseMatcher) + assert response.name == "name_value" + assert response.revision_id == "revision_id_value" + assert response.version_tag == "version_tag_value" + assert response.display_name == "display_name_value" + assert response.type_ == resources.PhraseMatcher.PhraseMatcherType.ALL_OF + assert response.active is True + assert response.role_match == resources.ConversationParticipant.Role.HUMAN_AGENT -def test_delete_phrase_matcher_empty_call(): +def test_get_phrase_matcher_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = ContactCenterInsightsClient( @@ -13037,18 +12896,18 @@ def test_delete_phrase_matcher_empty_call(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.delete_phrase_matcher), "__call__" + type(client.transport.get_phrase_matcher), "__call__" ) as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.delete_phrase_matcher() + client.get_phrase_matcher() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == contact_center_insights.DeletePhraseMatcherRequest() + assert args[0] == contact_center_insights.GetPhraseMatcherRequest() -def test_delete_phrase_matcher_non_empty_request_with_auto_populated_field(): +def test_get_phrase_matcher_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. client = ContactCenterInsightsClient( @@ -13059,26 +12918,26 @@ def test_delete_phrase_matcher_non_empty_request_with_auto_populated_field(): # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. - request = contact_center_insights.DeletePhraseMatcherRequest( + request = contact_center_insights.GetPhraseMatcherRequest( name="name_value", ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.delete_phrase_matcher), "__call__" + type(client.transport.get_phrase_matcher), "__call__" ) as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.delete_phrase_matcher(request=request) + client.get_phrase_matcher(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == contact_center_insights.DeletePhraseMatcherRequest( + assert args[0] == contact_center_insights.GetPhraseMatcherRequest( name="name_value", ) -def test_delete_phrase_matcher_use_cached_wrapped_rpc(): +def test_get_phrase_matcher_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -13093,8 +12952,7 @@ def test_delete_phrase_matcher_use_cached_wrapped_rpc(): # Ensure method has been cached assert ( - client._transport.delete_phrase_matcher - in client._transport._wrapped_methods + client._transport.get_phrase_matcher in client._transport._wrapped_methods ) # Replace cached wrapped function with mock @@ -13103,15 +12961,15 @@ def test_delete_phrase_matcher_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.delete_phrase_matcher + client._transport.get_phrase_matcher ] = mock_rpc request = {} - client.delete_phrase_matcher(request) + client.get_phrase_matcher(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.delete_phrase_matcher(request) + client.get_phrase_matcher(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -13119,7 +12977,7 @@ def test_delete_phrase_matcher_use_cached_wrapped_rpc(): @pytest.mark.asyncio -async def test_delete_phrase_matcher_empty_call_async(): +async def test_get_phrase_matcher_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = ContactCenterInsightsAsyncClient( @@ -13129,18 +12987,28 @@ async def test_delete_phrase_matcher_empty_call_async(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.delete_phrase_matcher), "__call__" + type(client.transport.get_phrase_matcher), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.delete_phrase_matcher() + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + resources.PhraseMatcher( + name="name_value", + revision_id="revision_id_value", + version_tag="version_tag_value", + display_name="display_name_value", + type_=resources.PhraseMatcher.PhraseMatcherType.ALL_OF, + active=True, + role_match=resources.ConversationParticipant.Role.HUMAN_AGENT, + ) + ) + response = await client.get_phrase_matcher() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == contact_center_insights.DeletePhraseMatcherRequest() + assert args[0] == contact_center_insights.GetPhraseMatcherRequest() @pytest.mark.asyncio -async def test_delete_phrase_matcher_async_use_cached_wrapped_rpc( +async def test_get_phrase_matcher_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", ): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -13157,33 +13025,34 @@ async def test_delete_phrase_matcher_async_use_cached_wrapped_rpc( # Ensure method has been cached assert ( - client._client._transport.delete_phrase_matcher + client._client._transport.get_phrase_matcher in client._client._transport._wrapped_methods ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ - client._client._transport.delete_phrase_matcher - ] = mock_object + client._client._transport.get_phrase_matcher + ] = mock_rpc request = {} - await client.delete_phrase_matcher(request) + await client.get_phrase_matcher(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - await client.delete_phrase_matcher(request) + await client.get_phrase_matcher(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio -async def test_delete_phrase_matcher_async( +async def test_get_phrase_matcher_async( transport: str = "grpc_asyncio", - request_type=contact_center_insights.DeletePhraseMatcherRequest, + request_type=contact_center_insights.GetPhraseMatcherRequest, ): client = ContactCenterInsightsAsyncClient( credentials=ga_credentials.AnonymousCredentials(), @@ -13196,44 +13065,61 @@ async def test_delete_phrase_matcher_async( # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.delete_phrase_matcher), "__call__" + type(client.transport.get_phrase_matcher), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.delete_phrase_matcher(request) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + resources.PhraseMatcher( + name="name_value", + revision_id="revision_id_value", + version_tag="version_tag_value", + display_name="display_name_value", + type_=resources.PhraseMatcher.PhraseMatcherType.ALL_OF, + active=True, + role_match=resources.ConversationParticipant.Role.HUMAN_AGENT, + ) + ) + response = await client.get_phrase_matcher(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request = contact_center_insights.DeletePhraseMatcherRequest() + request = contact_center_insights.GetPhraseMatcherRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert response is None + assert isinstance(response, resources.PhraseMatcher) + assert response.name == "name_value" + assert response.revision_id == "revision_id_value" + assert response.version_tag == "version_tag_value" + assert response.display_name == "display_name_value" + assert response.type_ == resources.PhraseMatcher.PhraseMatcherType.ALL_OF + assert response.active is True + assert response.role_match == resources.ConversationParticipant.Role.HUMAN_AGENT @pytest.mark.asyncio -async def test_delete_phrase_matcher_async_from_dict(): - await test_delete_phrase_matcher_async(request_type=dict) +async def test_get_phrase_matcher_async_from_dict(): + await test_get_phrase_matcher_async(request_type=dict) -def test_delete_phrase_matcher_field_headers(): +def test_get_phrase_matcher_field_headers(): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = contact_center_insights.DeletePhraseMatcherRequest() + request = contact_center_insights.GetPhraseMatcherRequest() request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.delete_phrase_matcher), "__call__" + type(client.transport.get_phrase_matcher), "__call__" ) as call: - call.return_value = None - client.delete_phrase_matcher(request) + call.return_value = resources.PhraseMatcher() + client.get_phrase_matcher(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -13249,23 +13135,25 @@ def test_delete_phrase_matcher_field_headers(): @pytest.mark.asyncio -async def test_delete_phrase_matcher_field_headers_async(): +async def test_get_phrase_matcher_field_headers_async(): client = ContactCenterInsightsAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = contact_center_insights.DeletePhraseMatcherRequest() + request = contact_center_insights.GetPhraseMatcherRequest() request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.delete_phrase_matcher), "__call__" + type(client.transport.get_phrase_matcher), "__call__" ) as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - await client.delete_phrase_matcher(request) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + resources.PhraseMatcher() + ) + await client.get_phrase_matcher(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -13280,20 +13168,20 @@ async def test_delete_phrase_matcher_field_headers_async(): ) in kw["metadata"] -def test_delete_phrase_matcher_flattened(): +def test_get_phrase_matcher_flattened(): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.delete_phrase_matcher), "__call__" + type(client.transport.get_phrase_matcher), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = None + call.return_value = resources.PhraseMatcher() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.delete_phrase_matcher( + client.get_phrase_matcher( name="name_value", ) @@ -13306,7 +13194,7 @@ def test_delete_phrase_matcher_flattened(): assert arg == mock_val -def test_delete_phrase_matcher_flattened_error(): +def test_get_phrase_matcher_flattened_error(): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -13314,29 +13202,31 @@ def test_delete_phrase_matcher_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.delete_phrase_matcher( - contact_center_insights.DeletePhraseMatcherRequest(), + client.get_phrase_matcher( + contact_center_insights.GetPhraseMatcherRequest(), name="name_value", ) @pytest.mark.asyncio -async def test_delete_phrase_matcher_flattened_async(): +async def test_get_phrase_matcher_flattened_async(): client = ContactCenterInsightsAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.delete_phrase_matcher), "__call__" + type(client.transport.get_phrase_matcher), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = None + call.return_value = resources.PhraseMatcher() - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + resources.PhraseMatcher() + ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.delete_phrase_matcher( + response = await client.get_phrase_matcher( name="name_value", ) @@ -13350,7 +13240,7 @@ async def test_delete_phrase_matcher_flattened_async(): @pytest.mark.asyncio -async def test_delete_phrase_matcher_flattened_error_async(): +async def test_get_phrase_matcher_flattened_error_async(): client = ContactCenterInsightsAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -13358,8 +13248,8 @@ async def test_delete_phrase_matcher_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.delete_phrase_matcher( - contact_center_insights.DeletePhraseMatcherRequest(), + await client.get_phrase_matcher( + contact_center_insights.GetPhraseMatcherRequest(), name="name_value", ) @@ -13367,11 +13257,11 @@ async def test_delete_phrase_matcher_flattened_error_async(): @pytest.mark.parametrize( "request_type", [ - contact_center_insights.UpdatePhraseMatcherRequest, + contact_center_insights.ListPhraseMatchersRequest, dict, ], ) -def test_update_phrase_matcher(request_type, transport: str = "grpc"): +def test_list_phrase_matchers(request_type, transport: str = "grpc"): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -13383,38 +13273,26 @@ def test_update_phrase_matcher(request_type, transport: str = "grpc"): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.update_phrase_matcher), "__call__" + type(client.transport.list_phrase_matchers), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = resources.PhraseMatcher( - name="name_value", - revision_id="revision_id_value", - version_tag="version_tag_value", - display_name="display_name_value", - type_=resources.PhraseMatcher.PhraseMatcherType.ALL_OF, - active=True, - role_match=resources.ConversationParticipant.Role.HUMAN_AGENT, + call.return_value = contact_center_insights.ListPhraseMatchersResponse( + next_page_token="next_page_token_value", ) - response = client.update_phrase_matcher(request) + response = client.list_phrase_matchers(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - request = contact_center_insights.UpdatePhraseMatcherRequest() + request = contact_center_insights.ListPhraseMatchersRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, resources.PhraseMatcher) - assert response.name == "name_value" - assert response.revision_id == "revision_id_value" - assert response.version_tag == "version_tag_value" - assert response.display_name == "display_name_value" - assert response.type_ == resources.PhraseMatcher.PhraseMatcherType.ALL_OF - assert response.active is True - assert response.role_match == resources.ConversationParticipant.Role.HUMAN_AGENT + assert isinstance(response, pagers.ListPhraseMatchersPager) + assert response.next_page_token == "next_page_token_value" -def test_update_phrase_matcher_empty_call(): +def test_list_phrase_matchers_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = ContactCenterInsightsClient( @@ -13424,18 +13302,18 @@ def test_update_phrase_matcher_empty_call(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.update_phrase_matcher), "__call__" + type(client.transport.list_phrase_matchers), "__call__" ) as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.update_phrase_matcher() + client.list_phrase_matchers() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == contact_center_insights.UpdatePhraseMatcherRequest() + assert args[0] == contact_center_insights.ListPhraseMatchersRequest() -def test_update_phrase_matcher_non_empty_request_with_auto_populated_field(): +def test_list_phrase_matchers_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. client = ContactCenterInsightsClient( @@ -13446,22 +13324,30 @@ def test_update_phrase_matcher_non_empty_request_with_auto_populated_field(): # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. - request = contact_center_insights.UpdatePhraseMatcherRequest() + request = contact_center_insights.ListPhraseMatchersRequest( + parent="parent_value", + page_token="page_token_value", + filter="filter_value", + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.update_phrase_matcher), "__call__" + type(client.transport.list_phrase_matchers), "__call__" ) as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.update_phrase_matcher(request=request) + client.list_phrase_matchers(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == contact_center_insights.UpdatePhraseMatcherRequest() + assert args[0] == contact_center_insights.ListPhraseMatchersRequest( + parent="parent_value", + page_token="page_token_value", + filter="filter_value", + ) -def test_update_phrase_matcher_use_cached_wrapped_rpc(): +def test_list_phrase_matchers_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -13476,8 +13362,7 @@ def test_update_phrase_matcher_use_cached_wrapped_rpc(): # Ensure method has been cached assert ( - client._transport.update_phrase_matcher - in client._transport._wrapped_methods + client._transport.list_phrase_matchers in client._transport._wrapped_methods ) # Replace cached wrapped function with mock @@ -13486,15 +13371,15 @@ def test_update_phrase_matcher_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.update_phrase_matcher + client._transport.list_phrase_matchers ] = mock_rpc request = {} - client.update_phrase_matcher(request) + client.list_phrase_matchers(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.update_phrase_matcher(request) + client.list_phrase_matchers(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -13502,7 +13387,7 @@ def test_update_phrase_matcher_use_cached_wrapped_rpc(): @pytest.mark.asyncio -async def test_update_phrase_matcher_empty_call_async(): +async def test_list_phrase_matchers_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = ContactCenterInsightsAsyncClient( @@ -13512,28 +13397,22 @@ async def test_update_phrase_matcher_empty_call_async(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.update_phrase_matcher), "__call__" + type(client.transport.list_phrase_matchers), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - resources.PhraseMatcher( - name="name_value", - revision_id="revision_id_value", - version_tag="version_tag_value", - display_name="display_name_value", - type_=resources.PhraseMatcher.PhraseMatcherType.ALL_OF, - active=True, - role_match=resources.ConversationParticipant.Role.HUMAN_AGENT, + contact_center_insights.ListPhraseMatchersResponse( + next_page_token="next_page_token_value", ) ) - response = await client.update_phrase_matcher() + response = await client.list_phrase_matchers() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == contact_center_insights.UpdatePhraseMatcherRequest() + assert args[0] == contact_center_insights.ListPhraseMatchersRequest() @pytest.mark.asyncio -async def test_update_phrase_matcher_async_use_cached_wrapped_rpc( +async def test_list_phrase_matchers_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", ): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -13550,33 +13429,34 @@ async def test_update_phrase_matcher_async_use_cached_wrapped_rpc( # Ensure method has been cached assert ( - client._client._transport.update_phrase_matcher + client._client._transport.list_phrase_matchers in client._client._transport._wrapped_methods ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ - client._client._transport.update_phrase_matcher - ] = mock_object + client._client._transport.list_phrase_matchers + ] = mock_rpc request = {} - await client.update_phrase_matcher(request) + await client.list_phrase_matchers(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - await client.update_phrase_matcher(request) + await client.list_phrase_matchers(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio -async def test_update_phrase_matcher_async( +async def test_list_phrase_matchers_async( transport: str = "grpc_asyncio", - request_type=contact_center_insights.UpdatePhraseMatcherRequest, + request_type=contact_center_insights.ListPhraseMatchersRequest, ): client = ContactCenterInsightsAsyncClient( credentials=ga_credentials.AnonymousCredentials(), @@ -13589,61 +13469,49 @@ async def test_update_phrase_matcher_async( # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.update_phrase_matcher), "__call__" + type(client.transport.list_phrase_matchers), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - resources.PhraseMatcher( - name="name_value", - revision_id="revision_id_value", - version_tag="version_tag_value", - display_name="display_name_value", - type_=resources.PhraseMatcher.PhraseMatcherType.ALL_OF, - active=True, - role_match=resources.ConversationParticipant.Role.HUMAN_AGENT, + contact_center_insights.ListPhraseMatchersResponse( + next_page_token="next_page_token_value", ) ) - response = await client.update_phrase_matcher(request) + response = await client.list_phrase_matchers(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request = contact_center_insights.UpdatePhraseMatcherRequest() + request = contact_center_insights.ListPhraseMatchersRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, resources.PhraseMatcher) - assert response.name == "name_value" - assert response.revision_id == "revision_id_value" - assert response.version_tag == "version_tag_value" - assert response.display_name == "display_name_value" - assert response.type_ == resources.PhraseMatcher.PhraseMatcherType.ALL_OF - assert response.active is True - assert response.role_match == resources.ConversationParticipant.Role.HUMAN_AGENT + assert isinstance(response, pagers.ListPhraseMatchersAsyncPager) + assert response.next_page_token == "next_page_token_value" @pytest.mark.asyncio -async def test_update_phrase_matcher_async_from_dict(): - await test_update_phrase_matcher_async(request_type=dict) +async def test_list_phrase_matchers_async_from_dict(): + await test_list_phrase_matchers_async(request_type=dict) -def test_update_phrase_matcher_field_headers(): +def test_list_phrase_matchers_field_headers(): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = contact_center_insights.UpdatePhraseMatcherRequest() + request = contact_center_insights.ListPhraseMatchersRequest() - request.phrase_matcher.name = "name_value" + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.update_phrase_matcher), "__call__" + type(client.transport.list_phrase_matchers), "__call__" ) as call: - call.return_value = resources.PhraseMatcher() - client.update_phrase_matcher(request) + call.return_value = contact_center_insights.ListPhraseMatchersResponse() + client.list_phrase_matchers(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -13654,30 +13522,30 @@ def test_update_phrase_matcher_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "phrase_matcher.name=name_value", + "parent=parent_value", ) in kw["metadata"] @pytest.mark.asyncio -async def test_update_phrase_matcher_field_headers_async(): +async def test_list_phrase_matchers_field_headers_async(): client = ContactCenterInsightsAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = contact_center_insights.UpdatePhraseMatcherRequest() + request = contact_center_insights.ListPhraseMatchersRequest() - request.phrase_matcher.name = "name_value" + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.update_phrase_matcher), "__call__" + type(client.transport.list_phrase_matchers), "__call__" ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - resources.PhraseMatcher() + contact_center_insights.ListPhraseMatchersResponse() ) - await client.update_phrase_matcher(request) + await client.list_phrase_matchers(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -13688,41 +13556,37 @@ async def test_update_phrase_matcher_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "phrase_matcher.name=name_value", + "parent=parent_value", ) in kw["metadata"] -def test_update_phrase_matcher_flattened(): +def test_list_phrase_matchers_flattened(): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.update_phrase_matcher), "__call__" + type(client.transport.list_phrase_matchers), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = resources.PhraseMatcher() + call.return_value = contact_center_insights.ListPhraseMatchersResponse() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.update_phrase_matcher( - phrase_matcher=resources.PhraseMatcher(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + client.list_phrase_matchers( + parent="parent_value", ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - arg = args[0].phrase_matcher - mock_val = resources.PhraseMatcher(name="name_value") - assert arg == mock_val - arg = args[0].update_mask - mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + arg = args[0].parent + mock_val = "parent_value" assert arg == mock_val -def test_update_phrase_matcher_flattened_error(): +def test_list_phrase_matchers_flattened_error(): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -13730,50 +13594,45 @@ def test_update_phrase_matcher_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.update_phrase_matcher( - contact_center_insights.UpdatePhraseMatcherRequest(), - phrase_matcher=resources.PhraseMatcher(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + client.list_phrase_matchers( + contact_center_insights.ListPhraseMatchersRequest(), + parent="parent_value", ) @pytest.mark.asyncio -async def test_update_phrase_matcher_flattened_async(): +async def test_list_phrase_matchers_flattened_async(): client = ContactCenterInsightsAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.update_phrase_matcher), "__call__" + type(client.transport.list_phrase_matchers), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = resources.PhraseMatcher() + call.return_value = contact_center_insights.ListPhraseMatchersResponse() call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - resources.PhraseMatcher() + contact_center_insights.ListPhraseMatchersResponse() ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.update_phrase_matcher( - phrase_matcher=resources.PhraseMatcher(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + response = await client.list_phrase_matchers( + parent="parent_value", ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - arg = args[0].phrase_matcher - mock_val = resources.PhraseMatcher(name="name_value") - assert arg == mock_val - arg = args[0].update_mask - mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + arg = args[0].parent + mock_val = "parent_value" assert arg == mock_val @pytest.mark.asyncio -async def test_update_phrase_matcher_flattened_error_async(): +async def test_list_phrase_matchers_flattened_error_async(): client = ContactCenterInsightsAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -13781,101 +13640,301 @@ async def test_update_phrase_matcher_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.update_phrase_matcher( - contact_center_insights.UpdatePhraseMatcherRequest(), - phrase_matcher=resources.PhraseMatcher(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + await client.list_phrase_matchers( + contact_center_insights.ListPhraseMatchersRequest(), + parent="parent_value", ) -@pytest.mark.parametrize( - "request_type", - [ - contact_center_insights.CalculateStatsRequest, - dict, - ], -) -def test_calculate_stats(request_type, transport: str = "grpc"): +def test_list_phrase_matchers_pager(transport_name: str = "grpc"): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + transport=transport_name, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.calculate_stats), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = contact_center_insights.CalculateStatsResponse( - average_turn_count=1931, - conversation_count=1955, + with mock.patch.object( + type(client.transport.list_phrase_matchers), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + contact_center_insights.ListPhraseMatchersResponse( + phrase_matchers=[ + resources.PhraseMatcher(), + resources.PhraseMatcher(), + resources.PhraseMatcher(), + ], + next_page_token="abc", + ), + contact_center_insights.ListPhraseMatchersResponse( + phrase_matchers=[], + next_page_token="def", + ), + contact_center_insights.ListPhraseMatchersResponse( + phrase_matchers=[ + resources.PhraseMatcher(), + ], + next_page_token="ghi", + ), + contact_center_insights.ListPhraseMatchersResponse( + phrase_matchers=[ + resources.PhraseMatcher(), + resources.PhraseMatcher(), + ], + ), + RuntimeError, ) - response = client.calculate_stats(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = contact_center_insights.CalculateStatsRequest() - assert args[0] == request + expected_metadata = () + retry = retries.Retry() + timeout = 5 + expected_metadata = tuple(expected_metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + ) + pager = client.list_phrase_matchers(request={}, retry=retry, timeout=timeout) - # Establish that the response is the type that we expect. - assert isinstance(response, contact_center_insights.CalculateStatsResponse) - assert response.average_turn_count == 1931 - assert response.conversation_count == 1955 + assert pager._metadata == expected_metadata + assert pager._retry == retry + assert pager._timeout == timeout + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, resources.PhraseMatcher) for i in results) -def test_calculate_stats_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. +def test_list_phrase_matchers_pages(transport_name: str = "grpc"): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", + transport=transport_name, ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.calculate_stats), "__call__") as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. + with mock.patch.object( + type(client.transport.list_phrase_matchers), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + contact_center_insights.ListPhraseMatchersResponse( + phrase_matchers=[ + resources.PhraseMatcher(), + resources.PhraseMatcher(), + resources.PhraseMatcher(), + ], + next_page_token="abc", + ), + contact_center_insights.ListPhraseMatchersResponse( + phrase_matchers=[], + next_page_token="def", + ), + contact_center_insights.ListPhraseMatchersResponse( + phrase_matchers=[ + resources.PhraseMatcher(), + ], + next_page_token="ghi", + ), + contact_center_insights.ListPhraseMatchersResponse( + phrase_matchers=[ + resources.PhraseMatcher(), + resources.PhraseMatcher(), + ], + ), + RuntimeError, ) - client.calculate_stats() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == contact_center_insights.CalculateStatsRequest() + pages = list(client.list_phrase_matchers(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token -def test_calculate_stats_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = ContactCenterInsightsClient( +@pytest.mark.asyncio +async def test_list_phrase_matchers_async_pager(): + client = ContactCenterInsightsAsyncClient( credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = contact_center_insights.CalculateStatsRequest( - location="location_value", - filter="filter_value", ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.calculate_stats), "__call__") as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. + with mock.patch.object( + type(client.transport.list_phrase_matchers), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + contact_center_insights.ListPhraseMatchersResponse( + phrase_matchers=[ + resources.PhraseMatcher(), + resources.PhraseMatcher(), + resources.PhraseMatcher(), + ], + next_page_token="abc", + ), + contact_center_insights.ListPhraseMatchersResponse( + phrase_matchers=[], + next_page_token="def", + ), + contact_center_insights.ListPhraseMatchersResponse( + phrase_matchers=[ + resources.PhraseMatcher(), + ], + next_page_token="ghi", + ), + contact_center_insights.ListPhraseMatchersResponse( + phrase_matchers=[ + resources.PhraseMatcher(), + resources.PhraseMatcher(), + ], + ), + RuntimeError, ) - client.calculate_stats(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == contact_center_insights.CalculateStatsRequest( - location="location_value", - filter="filter_value", + async_pager = await client.list_phrase_matchers( + request={}, ) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + assert len(responses) == 6 + assert all(isinstance(i, resources.PhraseMatcher) for i in responses) -def test_calculate_stats_use_cached_wrapped_rpc(): + +@pytest.mark.asyncio +async def test_list_phrase_matchers_async_pages(): + client = ContactCenterInsightsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_phrase_matchers), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + contact_center_insights.ListPhraseMatchersResponse( + phrase_matchers=[ + resources.PhraseMatcher(), + resources.PhraseMatcher(), + resources.PhraseMatcher(), + ], + next_page_token="abc", + ), + contact_center_insights.ListPhraseMatchersResponse( + phrase_matchers=[], + next_page_token="def", + ), + contact_center_insights.ListPhraseMatchersResponse( + phrase_matchers=[ + resources.PhraseMatcher(), + ], + next_page_token="ghi", + ), + contact_center_insights.ListPhraseMatchersResponse( + phrase_matchers=[ + resources.PhraseMatcher(), + resources.PhraseMatcher(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_phrase_matchers(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + contact_center_insights.DeletePhraseMatcherRequest, + dict, + ], +) +def test_delete_phrase_matcher(request_type, transport: str = "grpc"): + client = ContactCenterInsightsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_phrase_matcher), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.delete_phrase_matcher(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = contact_center_insights.DeletePhraseMatcherRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_phrase_matcher_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ContactCenterInsightsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_phrase_matcher), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.delete_phrase_matcher() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == contact_center_insights.DeletePhraseMatcherRequest() + + +def test_delete_phrase_matcher_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = ContactCenterInsightsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = contact_center_insights.DeletePhraseMatcherRequest( + name="name_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_phrase_matcher), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.delete_phrase_matcher(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == contact_center_insights.DeletePhraseMatcherRequest( + name="name_value", + ) + + +def test_delete_phrase_matcher_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -13889,21 +13948,26 @@ def test_calculate_stats_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.calculate_stats in client._transport._wrapped_methods + assert ( + client._transport.delete_phrase_matcher + in client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.calculate_stats] = mock_rpc + client._transport._wrapped_methods[ + client._transport.delete_phrase_matcher + ] = mock_rpc request = {} - client.calculate_stats(request) + client.delete_phrase_matcher(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.calculate_stats(request) + client.delete_phrase_matcher(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -13911,7 +13975,7 @@ def test_calculate_stats_use_cached_wrapped_rpc(): @pytest.mark.asyncio -async def test_calculate_stats_empty_call_async(): +async def test_delete_phrase_matcher_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = ContactCenterInsightsAsyncClient( @@ -13920,22 +13984,19 @@ async def test_calculate_stats_empty_call_async(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.calculate_stats), "__call__") as call: + with mock.patch.object( + type(client.transport.delete_phrase_matcher), "__call__" + ) as call: # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - contact_center_insights.CalculateStatsResponse( - average_turn_count=1931, - conversation_count=1955, - ) - ) - response = await client.calculate_stats() + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.delete_phrase_matcher() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == contact_center_insights.CalculateStatsRequest() + assert args[0] == contact_center_insights.DeletePhraseMatcherRequest() @pytest.mark.asyncio -async def test_calculate_stats_async_use_cached_wrapped_rpc( +async def test_delete_phrase_matcher_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", ): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -13952,33 +14013,34 @@ async def test_calculate_stats_async_use_cached_wrapped_rpc( # Ensure method has been cached assert ( - client._client._transport.calculate_stats + client._client._transport.delete_phrase_matcher in client._client._transport._wrapped_methods ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ - client._client._transport.calculate_stats - ] = mock_object + client._client._transport.delete_phrase_matcher + ] = mock_rpc request = {} - await client.calculate_stats(request) + await client.delete_phrase_matcher(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - await client.calculate_stats(request) + await client.delete_phrase_matcher(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio -async def test_calculate_stats_async( +async def test_delete_phrase_matcher_async( transport: str = "grpc_asyncio", - request_type=contact_center_insights.CalculateStatsRequest, + request_type=contact_center_insights.DeletePhraseMatcherRequest, ): client = ContactCenterInsightsAsyncClient( credentials=ga_credentials.AnonymousCredentials(), @@ -13990,48 +14052,45 @@ async def test_calculate_stats_async( request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.calculate_stats), "__call__") as call: + with mock.patch.object( + type(client.transport.delete_phrase_matcher), "__call__" + ) as call: # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - contact_center_insights.CalculateStatsResponse( - average_turn_count=1931, - conversation_count=1955, - ) - ) - response = await client.calculate_stats(request) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.delete_phrase_matcher(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request = contact_center_insights.CalculateStatsRequest() + request = contact_center_insights.DeletePhraseMatcherRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, contact_center_insights.CalculateStatsResponse) - assert response.average_turn_count == 1931 - assert response.conversation_count == 1955 + assert response is None @pytest.mark.asyncio -async def test_calculate_stats_async_from_dict(): - await test_calculate_stats_async(request_type=dict) +async def test_delete_phrase_matcher_async_from_dict(): + await test_delete_phrase_matcher_async(request_type=dict) -def test_calculate_stats_field_headers(): +def test_delete_phrase_matcher_field_headers(): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = contact_center_insights.CalculateStatsRequest() + request = contact_center_insights.DeletePhraseMatcherRequest() - request.location = "location_value" + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.calculate_stats), "__call__") as call: - call.return_value = contact_center_insights.CalculateStatsResponse() - client.calculate_stats(request) + with mock.patch.object( + type(client.transport.delete_phrase_matcher), "__call__" + ) as call: + call.return_value = None + client.delete_phrase_matcher(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -14042,28 +14101,28 @@ def test_calculate_stats_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "location=location_value", + "name=name_value", ) in kw["metadata"] @pytest.mark.asyncio -async def test_calculate_stats_field_headers_async(): +async def test_delete_phrase_matcher_field_headers_async(): client = ContactCenterInsightsAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = contact_center_insights.CalculateStatsRequest() + request = contact_center_insights.DeletePhraseMatcherRequest() - request.location = "location_value" + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.calculate_stats), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - contact_center_insights.CalculateStatsResponse() - ) - await client.calculate_stats(request) + with mock.patch.object( + type(client.transport.delete_phrase_matcher), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.delete_phrase_matcher(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -14074,35 +14133,37 @@ async def test_calculate_stats_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "location=location_value", + "name=name_value", ) in kw["metadata"] -def test_calculate_stats_flattened(): +def test_delete_phrase_matcher_flattened(): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.calculate_stats), "__call__") as call: + with mock.patch.object( + type(client.transport.delete_phrase_matcher), "__call__" + ) as call: # Designate an appropriate return value for the call. - call.return_value = contact_center_insights.CalculateStatsResponse() + call.return_value = None # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.calculate_stats( - location="location_value", + client.delete_phrase_matcher( + name="name_value", ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - arg = args[0].location - mock_val = "location_value" + arg = args[0].name + mock_val = "name_value" assert arg == mock_val -def test_calculate_stats_flattened_error(): +def test_delete_phrase_matcher_flattened_error(): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -14110,43 +14171,43 @@ def test_calculate_stats_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.calculate_stats( - contact_center_insights.CalculateStatsRequest(), - location="location_value", + client.delete_phrase_matcher( + contact_center_insights.DeletePhraseMatcherRequest(), + name="name_value", ) @pytest.mark.asyncio -async def test_calculate_stats_flattened_async(): +async def test_delete_phrase_matcher_flattened_async(): client = ContactCenterInsightsAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.calculate_stats), "__call__") as call: + with mock.patch.object( + type(client.transport.delete_phrase_matcher), "__call__" + ) as call: # Designate an appropriate return value for the call. - call.return_value = contact_center_insights.CalculateStatsResponse() + call.return_value = None - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - contact_center_insights.CalculateStatsResponse() - ) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.calculate_stats( - location="location_value", + response = await client.delete_phrase_matcher( + name="name_value", ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - arg = args[0].location - mock_val = "location_value" + arg = args[0].name + mock_val = "name_value" assert arg == mock_val @pytest.mark.asyncio -async def test_calculate_stats_flattened_error_async(): +async def test_delete_phrase_matcher_flattened_error_async(): client = ContactCenterInsightsAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -14154,20 +14215,20 @@ async def test_calculate_stats_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.calculate_stats( - contact_center_insights.CalculateStatsRequest(), - location="location_value", + await client.delete_phrase_matcher( + contact_center_insights.DeletePhraseMatcherRequest(), + name="name_value", ) @pytest.mark.parametrize( "request_type", [ - contact_center_insights.GetSettingsRequest, + contact_center_insights.UpdatePhraseMatcherRequest, dict, ], ) -def test_get_settings(request_type, transport: str = "grpc"): +def test_update_phrase_matcher(request_type, transport: str = "grpc"): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -14178,27 +14239,39 @@ def test_get_settings(request_type, transport: str = "grpc"): request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_settings), "__call__") as call: + with mock.patch.object( + type(client.transport.update_phrase_matcher), "__call__" + ) as call: # Designate an appropriate return value for the call. - call.return_value = resources.Settings( + call.return_value = resources.PhraseMatcher( name="name_value", - language_code="language_code_value", + revision_id="revision_id_value", + version_tag="version_tag_value", + display_name="display_name_value", + type_=resources.PhraseMatcher.PhraseMatcherType.ALL_OF, + active=True, + role_match=resources.ConversationParticipant.Role.HUMAN_AGENT, ) - response = client.get_settings(request) + response = client.update_phrase_matcher(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - request = contact_center_insights.GetSettingsRequest() + request = contact_center_insights.UpdatePhraseMatcherRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, resources.Settings) + assert isinstance(response, resources.PhraseMatcher) assert response.name == "name_value" - assert response.language_code == "language_code_value" + assert response.revision_id == "revision_id_value" + assert response.version_tag == "version_tag_value" + assert response.display_name == "display_name_value" + assert response.type_ == resources.PhraseMatcher.PhraseMatcherType.ALL_OF + assert response.active is True + assert response.role_match == resources.ConversationParticipant.Role.HUMAN_AGENT -def test_get_settings_empty_call(): +def test_update_phrase_matcher_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = ContactCenterInsightsClient( @@ -14207,17 +14280,19 @@ def test_get_settings_empty_call(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_settings), "__call__") as call: + with mock.patch.object( + type(client.transport.update_phrase_matcher), "__call__" + ) as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.get_settings() + client.update_phrase_matcher() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == contact_center_insights.GetSettingsRequest() + assert args[0] == contact_center_insights.UpdatePhraseMatcherRequest() -def test_get_settings_non_empty_request_with_auto_populated_field(): +def test_update_phrase_matcher_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. client = ContactCenterInsightsClient( @@ -14228,24 +14303,22 @@ def test_get_settings_non_empty_request_with_auto_populated_field(): # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. - request = contact_center_insights.GetSettingsRequest( - name="name_value", - ) + request = contact_center_insights.UpdatePhraseMatcherRequest() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_settings), "__call__") as call: + with mock.patch.object( + type(client.transport.update_phrase_matcher), "__call__" + ) as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.get_settings(request=request) + client.update_phrase_matcher(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == contact_center_insights.GetSettingsRequest( - name="name_value", - ) + assert args[0] == contact_center_insights.UpdatePhraseMatcherRequest() -def test_get_settings_use_cached_wrapped_rpc(): +def test_update_phrase_matcher_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -14259,21 +14332,26 @@ def test_get_settings_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.get_settings in client._transport._wrapped_methods + assert ( + client._transport.update_phrase_matcher + in client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.get_settings] = mock_rpc + client._transport._wrapped_methods[ + client._transport.update_phrase_matcher + ] = mock_rpc request = {} - client.get_settings(request) + client.update_phrase_matcher(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.get_settings(request) + client.update_phrase_matcher(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -14281,7 +14359,7 @@ def test_get_settings_use_cached_wrapped_rpc(): @pytest.mark.asyncio -async def test_get_settings_empty_call_async(): +async def test_update_phrase_matcher_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = ContactCenterInsightsAsyncClient( @@ -14290,22 +14368,29 @@ async def test_get_settings_empty_call_async(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_settings), "__call__") as call: + with mock.patch.object( + type(client.transport.update_phrase_matcher), "__call__" + ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - resources.Settings( + resources.PhraseMatcher( name="name_value", - language_code="language_code_value", + revision_id="revision_id_value", + version_tag="version_tag_value", + display_name="display_name_value", + type_=resources.PhraseMatcher.PhraseMatcherType.ALL_OF, + active=True, + role_match=resources.ConversationParticipant.Role.HUMAN_AGENT, ) ) - response = await client.get_settings() + response = await client.update_phrase_matcher() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == contact_center_insights.GetSettingsRequest() + assert args[0] == contact_center_insights.UpdatePhraseMatcherRequest() @pytest.mark.asyncio -async def test_get_settings_async_use_cached_wrapped_rpc( +async def test_update_phrase_matcher_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", ): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -14322,33 +14407,34 @@ async def test_get_settings_async_use_cached_wrapped_rpc( # Ensure method has been cached assert ( - client._client._transport.get_settings + client._client._transport.update_phrase_matcher in client._client._transport._wrapped_methods ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ - client._client._transport.get_settings - ] = mock_object + client._client._transport.update_phrase_matcher + ] = mock_rpc request = {} - await client.get_settings(request) + await client.update_phrase_matcher(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - await client.get_settings(request) + await client.update_phrase_matcher(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio -async def test_get_settings_async( +async def test_update_phrase_matcher_async( transport: str = "grpc_asyncio", - request_type=contact_center_insights.GetSettingsRequest, + request_type=contact_center_insights.UpdatePhraseMatcherRequest, ): client = ContactCenterInsightsAsyncClient( credentials=ga_credentials.AnonymousCredentials(), @@ -14360,48 +14446,62 @@ async def test_get_settings_async( request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_settings), "__call__") as call: + with mock.patch.object( + type(client.transport.update_phrase_matcher), "__call__" + ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - resources.Settings( + resources.PhraseMatcher( name="name_value", - language_code="language_code_value", + revision_id="revision_id_value", + version_tag="version_tag_value", + display_name="display_name_value", + type_=resources.PhraseMatcher.PhraseMatcherType.ALL_OF, + active=True, + role_match=resources.ConversationParticipant.Role.HUMAN_AGENT, ) ) - response = await client.get_settings(request) + response = await client.update_phrase_matcher(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request = contact_center_insights.GetSettingsRequest() + request = contact_center_insights.UpdatePhraseMatcherRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, resources.Settings) + assert isinstance(response, resources.PhraseMatcher) assert response.name == "name_value" - assert response.language_code == "language_code_value" + assert response.revision_id == "revision_id_value" + assert response.version_tag == "version_tag_value" + assert response.display_name == "display_name_value" + assert response.type_ == resources.PhraseMatcher.PhraseMatcherType.ALL_OF + assert response.active is True + assert response.role_match == resources.ConversationParticipant.Role.HUMAN_AGENT @pytest.mark.asyncio -async def test_get_settings_async_from_dict(): - await test_get_settings_async(request_type=dict) +async def test_update_phrase_matcher_async_from_dict(): + await test_update_phrase_matcher_async(request_type=dict) -def test_get_settings_field_headers(): +def test_update_phrase_matcher_field_headers(): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = contact_center_insights.GetSettingsRequest() + request = contact_center_insights.UpdatePhraseMatcherRequest() - request.name = "name_value" + request.phrase_matcher.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_settings), "__call__") as call: - call.return_value = resources.Settings() - client.get_settings(request) + with mock.patch.object( + type(client.transport.update_phrase_matcher), "__call__" + ) as call: + call.return_value = resources.PhraseMatcher() + client.update_phrase_matcher(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -14412,26 +14512,30 @@ def test_get_settings_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "name=name_value", + "phrase_matcher.name=name_value", ) in kw["metadata"] @pytest.mark.asyncio -async def test_get_settings_field_headers_async(): +async def test_update_phrase_matcher_field_headers_async(): client = ContactCenterInsightsAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = contact_center_insights.GetSettingsRequest() + request = contact_center_insights.UpdatePhraseMatcherRequest() - request.name = "name_value" + request.phrase_matcher.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_settings), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(resources.Settings()) - await client.get_settings(request) + with mock.patch.object( + type(client.transport.update_phrase_matcher), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + resources.PhraseMatcher() + ) + await client.update_phrase_matcher(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -14442,35 +14546,41 @@ async def test_get_settings_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "name=name_value", + "phrase_matcher.name=name_value", ) in kw["metadata"] -def test_get_settings_flattened(): +def test_update_phrase_matcher_flattened(): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_settings), "__call__") as call: + with mock.patch.object( + type(client.transport.update_phrase_matcher), "__call__" + ) as call: # Designate an appropriate return value for the call. - call.return_value = resources.Settings() + call.return_value = resources.PhraseMatcher() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.get_settings( - name="name_value", + client.update_phrase_matcher( + phrase_matcher=resources.PhraseMatcher(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" + arg = args[0].phrase_matcher + mock_val = resources.PhraseMatcher(name="name_value") + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) assert arg == mock_val -def test_get_settings_flattened_error(): +def test_update_phrase_matcher_flattened_error(): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -14478,41 +14588,50 @@ def test_get_settings_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.get_settings( - contact_center_insights.GetSettingsRequest(), - name="name_value", + client.update_phrase_matcher( + contact_center_insights.UpdatePhraseMatcherRequest(), + phrase_matcher=resources.PhraseMatcher(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) @pytest.mark.asyncio -async def test_get_settings_flattened_async(): +async def test_update_phrase_matcher_flattened_async(): client = ContactCenterInsightsAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_settings), "__call__") as call: + with mock.patch.object( + type(client.transport.update_phrase_matcher), "__call__" + ) as call: # Designate an appropriate return value for the call. - call.return_value = resources.Settings() + call.return_value = resources.PhraseMatcher() - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(resources.Settings()) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + resources.PhraseMatcher() + ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.get_settings( - name="name_value", + response = await client.update_phrase_matcher( + phrase_matcher=resources.PhraseMatcher(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" + arg = args[0].phrase_matcher + mock_val = resources.PhraseMatcher(name="name_value") + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) assert arg == mock_val @pytest.mark.asyncio -async def test_get_settings_flattened_error_async(): +async def test_update_phrase_matcher_flattened_error_async(): client = ContactCenterInsightsAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -14520,20 +14639,21 @@ async def test_get_settings_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.get_settings( - contact_center_insights.GetSettingsRequest(), - name="name_value", + await client.update_phrase_matcher( + contact_center_insights.UpdatePhraseMatcherRequest(), + phrase_matcher=resources.PhraseMatcher(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) @pytest.mark.parametrize( "request_type", [ - contact_center_insights.UpdateSettingsRequest, + contact_center_insights.CalculateStatsRequest, dict, ], ) -def test_update_settings(request_type, transport: str = "grpc"): +def test_calculate_stats(request_type, transport: str = "grpc"): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -14544,27 +14664,27 @@ def test_update_settings(request_type, transport: str = "grpc"): request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.update_settings), "__call__") as call: + with mock.patch.object(type(client.transport.calculate_stats), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = resources.Settings( - name="name_value", - language_code="language_code_value", + call.return_value = contact_center_insights.CalculateStatsResponse( + average_turn_count=1931, + conversation_count=1955, ) - response = client.update_settings(request) + response = client.calculate_stats(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - request = contact_center_insights.UpdateSettingsRequest() + request = contact_center_insights.CalculateStatsRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, resources.Settings) - assert response.name == "name_value" - assert response.language_code == "language_code_value" + assert isinstance(response, contact_center_insights.CalculateStatsResponse) + assert response.average_turn_count == 1931 + assert response.conversation_count == 1955 -def test_update_settings_empty_call(): +def test_calculate_stats_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = ContactCenterInsightsClient( @@ -14573,17 +14693,17 @@ def test_update_settings_empty_call(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.update_settings), "__call__") as call: + with mock.patch.object(type(client.transport.calculate_stats), "__call__") as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.update_settings() + client.calculate_stats() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == contact_center_insights.UpdateSettingsRequest() + assert args[0] == contact_center_insights.CalculateStatsRequest() -def test_update_settings_non_empty_request_with_auto_populated_field(): +def test_calculate_stats_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. client = ContactCenterInsightsClient( @@ -14594,20 +14714,26 @@ def test_update_settings_non_empty_request_with_auto_populated_field(): # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. - request = contact_center_insights.UpdateSettingsRequest() + request = contact_center_insights.CalculateStatsRequest( + location="location_value", + filter="filter_value", + ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.update_settings), "__call__") as call: + with mock.patch.object(type(client.transport.calculate_stats), "__call__") as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.update_settings(request=request) + client.calculate_stats(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == contact_center_insights.UpdateSettingsRequest() + assert args[0] == contact_center_insights.CalculateStatsRequest( + location="location_value", + filter="filter_value", + ) -def test_update_settings_use_cached_wrapped_rpc(): +def test_calculate_stats_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -14621,21 +14747,21 @@ def test_update_settings_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.update_settings in client._transport._wrapped_methods + assert client._transport.calculate_stats in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.update_settings] = mock_rpc + client._transport._wrapped_methods[client._transport.calculate_stats] = mock_rpc request = {} - client.update_settings(request) + client.calculate_stats(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.update_settings(request) + client.calculate_stats(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -14643,7 +14769,7 @@ def test_update_settings_use_cached_wrapped_rpc(): @pytest.mark.asyncio -async def test_update_settings_empty_call_async(): +async def test_calculate_stats_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = ContactCenterInsightsAsyncClient( @@ -14652,22 +14778,22 @@ async def test_update_settings_empty_call_async(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.update_settings), "__call__") as call: + with mock.patch.object(type(client.transport.calculate_stats), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - resources.Settings( - name="name_value", - language_code="language_code_value", + contact_center_insights.CalculateStatsResponse( + average_turn_count=1931, + conversation_count=1955, ) ) - response = await client.update_settings() + response = await client.calculate_stats() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == contact_center_insights.UpdateSettingsRequest() + assert args[0] == contact_center_insights.CalculateStatsRequest() @pytest.mark.asyncio -async def test_update_settings_async_use_cached_wrapped_rpc( +async def test_calculate_stats_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", ): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -14684,33 +14810,34 @@ async def test_update_settings_async_use_cached_wrapped_rpc( # Ensure method has been cached assert ( - client._client._transport.update_settings + client._client._transport.calculate_stats in client._client._transport._wrapped_methods ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ - client._client._transport.update_settings - ] = mock_object + client._client._transport.calculate_stats + ] = mock_rpc request = {} - await client.update_settings(request) + await client.calculate_stats(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - await client.update_settings(request) + await client.calculate_stats(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio -async def test_update_settings_async( +async def test_calculate_stats_async( transport: str = "grpc_asyncio", - request_type=contact_center_insights.UpdateSettingsRequest, + request_type=contact_center_insights.CalculateStatsRequest, ): client = ContactCenterInsightsAsyncClient( credentials=ga_credentials.AnonymousCredentials(), @@ -14722,48 +14849,48 @@ async def test_update_settings_async( request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.update_settings), "__call__") as call: + with mock.patch.object(type(client.transport.calculate_stats), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - resources.Settings( - name="name_value", - language_code="language_code_value", + contact_center_insights.CalculateStatsResponse( + average_turn_count=1931, + conversation_count=1955, ) ) - response = await client.update_settings(request) + response = await client.calculate_stats(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request = contact_center_insights.UpdateSettingsRequest() + request = contact_center_insights.CalculateStatsRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, resources.Settings) - assert response.name == "name_value" - assert response.language_code == "language_code_value" + assert isinstance(response, contact_center_insights.CalculateStatsResponse) + assert response.average_turn_count == 1931 + assert response.conversation_count == 1955 @pytest.mark.asyncio -async def test_update_settings_async_from_dict(): - await test_update_settings_async(request_type=dict) +async def test_calculate_stats_async_from_dict(): + await test_calculate_stats_async(request_type=dict) -def test_update_settings_field_headers(): +def test_calculate_stats_field_headers(): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = contact_center_insights.UpdateSettingsRequest() + request = contact_center_insights.CalculateStatsRequest() - request.settings.name = "name_value" + request.location = "location_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.update_settings), "__call__") as call: - call.return_value = resources.Settings() - client.update_settings(request) + with mock.patch.object(type(client.transport.calculate_stats), "__call__") as call: + call.return_value = contact_center_insights.CalculateStatsResponse() + client.calculate_stats(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -14774,26 +14901,28 @@ def test_update_settings_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "settings.name=name_value", + "location=location_value", ) in kw["metadata"] @pytest.mark.asyncio -async def test_update_settings_field_headers_async(): +async def test_calculate_stats_field_headers_async(): client = ContactCenterInsightsAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = contact_center_insights.UpdateSettingsRequest() + request = contact_center_insights.CalculateStatsRequest() - request.settings.name = "name_value" + request.location = "location_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.update_settings), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(resources.Settings()) - await client.update_settings(request) + with mock.patch.object(type(client.transport.calculate_stats), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + contact_center_insights.CalculateStatsResponse() + ) + await client.calculate_stats(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -14804,39 +14933,35 @@ async def test_update_settings_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "settings.name=name_value", + "location=location_value", ) in kw["metadata"] -def test_update_settings_flattened(): +def test_calculate_stats_flattened(): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.update_settings), "__call__") as call: + with mock.patch.object(type(client.transport.calculate_stats), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = resources.Settings() + call.return_value = contact_center_insights.CalculateStatsResponse() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.update_settings( - settings=resources.Settings(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + client.calculate_stats( + location="location_value", ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - arg = args[0].settings - mock_val = resources.Settings(name="name_value") - assert arg == mock_val - arg = args[0].update_mask - mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + arg = args[0].location + mock_val = "location_value" assert arg == mock_val -def test_update_settings_flattened_error(): +def test_calculate_stats_flattened_error(): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -14844,46 +14969,43 @@ def test_update_settings_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.update_settings( - contact_center_insights.UpdateSettingsRequest(), - settings=resources.Settings(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + client.calculate_stats( + contact_center_insights.CalculateStatsRequest(), + location="location_value", ) @pytest.mark.asyncio -async def test_update_settings_flattened_async(): +async def test_calculate_stats_flattened_async(): client = ContactCenterInsightsAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.update_settings), "__call__") as call: + with mock.patch.object(type(client.transport.calculate_stats), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = resources.Settings() + call.return_value = contact_center_insights.CalculateStatsResponse() - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(resources.Settings()) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + contact_center_insights.CalculateStatsResponse() + ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.update_settings( - settings=resources.Settings(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + response = await client.calculate_stats( + location="location_value", ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - arg = args[0].settings - mock_val = resources.Settings(name="name_value") - assert arg == mock_val - arg = args[0].update_mask - mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + arg = args[0].location + mock_val = "location_value" assert arg == mock_val @pytest.mark.asyncio -async def test_update_settings_flattened_error_async(): +async def test_calculate_stats_flattened_error_async(): client = ContactCenterInsightsAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -14891,21 +15013,20 @@ async def test_update_settings_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.update_settings( - contact_center_insights.UpdateSettingsRequest(), - settings=resources.Settings(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + await client.calculate_stats( + contact_center_insights.CalculateStatsRequest(), + location="location_value", ) @pytest.mark.parametrize( "request_type", [ - contact_center_insights.CreateViewRequest, + contact_center_insights.GetSettingsRequest, dict, ], ) -def test_create_view(request_type, transport: str = "grpc"): +def test_get_settings(request_type, transport: str = "grpc"): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -14916,29 +15037,27 @@ def test_create_view(request_type, transport: str = "grpc"): request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.create_view), "__call__") as call: + with mock.patch.object(type(client.transport.get_settings), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = resources.View( + call.return_value = resources.Settings( name="name_value", - display_name="display_name_value", - value="value_value", + language_code="language_code_value", ) - response = client.create_view(request) + response = client.get_settings(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - request = contact_center_insights.CreateViewRequest() + request = contact_center_insights.GetSettingsRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, resources.View) + assert isinstance(response, resources.Settings) assert response.name == "name_value" - assert response.display_name == "display_name_value" - assert response.value == "value_value" + assert response.language_code == "language_code_value" -def test_create_view_empty_call(): +def test_get_settings_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = ContactCenterInsightsClient( @@ -14947,17 +15066,17 @@ def test_create_view_empty_call(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.create_view), "__call__") as call: + with mock.patch.object(type(client.transport.get_settings), "__call__") as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.create_view() + client.get_settings() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == contact_center_insights.CreateViewRequest() + assert args[0] == contact_center_insights.GetSettingsRequest() -def test_create_view_non_empty_request_with_auto_populated_field(): +def test_get_settings_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. client = ContactCenterInsightsClient( @@ -14968,24 +15087,24 @@ def test_create_view_non_empty_request_with_auto_populated_field(): # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. - request = contact_center_insights.CreateViewRequest( - parent="parent_value", + request = contact_center_insights.GetSettingsRequest( + name="name_value", ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.create_view), "__call__") as call: + with mock.patch.object(type(client.transport.get_settings), "__call__") as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.create_view(request=request) + client.get_settings(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == contact_center_insights.CreateViewRequest( - parent="parent_value", + assert args[0] == contact_center_insights.GetSettingsRequest( + name="name_value", ) -def test_create_view_use_cached_wrapped_rpc(): +def test_get_settings_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -14999,21 +15118,21 @@ def test_create_view_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.create_view in client._transport._wrapped_methods + assert client._transport.get_settings in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.create_view] = mock_rpc + client._transport._wrapped_methods[client._transport.get_settings] = mock_rpc request = {} - client.create_view(request) + client.get_settings(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.create_view(request) + client.get_settings(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -15021,7 +15140,7 @@ def test_create_view_use_cached_wrapped_rpc(): @pytest.mark.asyncio -async def test_create_view_empty_call_async(): +async def test_get_settings_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = ContactCenterInsightsAsyncClient( @@ -15030,23 +15149,22 @@ async def test_create_view_empty_call_async(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.create_view), "__call__") as call: + with mock.patch.object(type(client.transport.get_settings), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - resources.View( + resources.Settings( name="name_value", - display_name="display_name_value", - value="value_value", + language_code="language_code_value", ) ) - response = await client.create_view() + response = await client.get_settings() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == contact_center_insights.CreateViewRequest() + assert args[0] == contact_center_insights.GetSettingsRequest() @pytest.mark.asyncio -async def test_create_view_async_use_cached_wrapped_rpc( +async def test_get_settings_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", ): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -15063,33 +15181,34 @@ async def test_create_view_async_use_cached_wrapped_rpc( # Ensure method has been cached assert ( - client._client._transport.create_view + client._client._transport.get_settings in client._client._transport._wrapped_methods ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ - client._client._transport.create_view - ] = mock_object + client._client._transport.get_settings + ] = mock_rpc request = {} - await client.create_view(request) + await client.get_settings(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - await client.create_view(request) + await client.get_settings(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio -async def test_create_view_async( +async def test_get_settings_async( transport: str = "grpc_asyncio", - request_type=contact_center_insights.CreateViewRequest, + request_type=contact_center_insights.GetSettingsRequest, ): client = ContactCenterInsightsAsyncClient( credentials=ga_credentials.AnonymousCredentials(), @@ -15101,50 +15220,48 @@ async def test_create_view_async( request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.create_view), "__call__") as call: + with mock.patch.object(type(client.transport.get_settings), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - resources.View( + resources.Settings( name="name_value", - display_name="display_name_value", - value="value_value", + language_code="language_code_value", ) ) - response = await client.create_view(request) + response = await client.get_settings(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request = contact_center_insights.CreateViewRequest() + request = contact_center_insights.GetSettingsRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, resources.View) + assert isinstance(response, resources.Settings) assert response.name == "name_value" - assert response.display_name == "display_name_value" - assert response.value == "value_value" + assert response.language_code == "language_code_value" @pytest.mark.asyncio -async def test_create_view_async_from_dict(): - await test_create_view_async(request_type=dict) +async def test_get_settings_async_from_dict(): + await test_get_settings_async(request_type=dict) -def test_create_view_field_headers(): +def test_get_settings_field_headers(): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = contact_center_insights.CreateViewRequest() + request = contact_center_insights.GetSettingsRequest() - request.parent = "parent_value" + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.create_view), "__call__") as call: - call.return_value = resources.View() - client.create_view(request) + with mock.patch.object(type(client.transport.get_settings), "__call__") as call: + call.return_value = resources.Settings() + client.get_settings(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -15155,26 +15272,26 @@ def test_create_view_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "parent=parent_value", + "name=name_value", ) in kw["metadata"] @pytest.mark.asyncio -async def test_create_view_field_headers_async(): +async def test_get_settings_field_headers_async(): client = ContactCenterInsightsAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = contact_center_insights.CreateViewRequest() + request = contact_center_insights.GetSettingsRequest() - request.parent = "parent_value" + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.create_view), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(resources.View()) - await client.create_view(request) + with mock.patch.object(type(client.transport.get_settings), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(resources.Settings()) + await client.get_settings(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -15185,39 +15302,35 @@ async def test_create_view_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "parent=parent_value", + "name=name_value", ) in kw["metadata"] -def test_create_view_flattened(): +def test_get_settings_flattened(): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.create_view), "__call__") as call: + with mock.patch.object(type(client.transport.get_settings), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = resources.View() + call.return_value = resources.Settings() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.create_view( - parent="parent_value", - view=resources.View(name="name_value"), + client.get_settings( + name="name_value", ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = "parent_value" - assert arg == mock_val - arg = args[0].view - mock_val = resources.View(name="name_value") + arg = args[0].name + mock_val = "name_value" assert arg == mock_val -def test_create_view_flattened_error(): +def test_get_settings_flattened_error(): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -15225,46 +15338,41 @@ def test_create_view_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.create_view( - contact_center_insights.CreateViewRequest(), - parent="parent_value", - view=resources.View(name="name_value"), + client.get_settings( + contact_center_insights.GetSettingsRequest(), + name="name_value", ) @pytest.mark.asyncio -async def test_create_view_flattened_async(): +async def test_get_settings_flattened_async(): client = ContactCenterInsightsAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.create_view), "__call__") as call: + with mock.patch.object(type(client.transport.get_settings), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = resources.View() + call.return_value = resources.Settings() - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(resources.View()) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(resources.Settings()) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.create_view( - parent="parent_value", - view=resources.View(name="name_value"), + response = await client.get_settings( + name="name_value", ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = "parent_value" - assert arg == mock_val - arg = args[0].view - mock_val = resources.View(name="name_value") + arg = args[0].name + mock_val = "name_value" assert arg == mock_val @pytest.mark.asyncio -async def test_create_view_flattened_error_async(): +async def test_get_settings_flattened_error_async(): client = ContactCenterInsightsAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -15272,21 +15380,20 @@ async def test_create_view_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.create_view( - contact_center_insights.CreateViewRequest(), - parent="parent_value", - view=resources.View(name="name_value"), + await client.get_settings( + contact_center_insights.GetSettingsRequest(), + name="name_value", ) @pytest.mark.parametrize( "request_type", [ - contact_center_insights.GetViewRequest, + contact_center_insights.UpdateSettingsRequest, dict, ], ) -def test_get_view(request_type, transport: str = "grpc"): +def test_update_settings(request_type, transport: str = "grpc"): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -15297,29 +15404,27 @@ def test_get_view(request_type, transport: str = "grpc"): request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_view), "__call__") as call: + with mock.patch.object(type(client.transport.update_settings), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = resources.View( + call.return_value = resources.Settings( name="name_value", - display_name="display_name_value", - value="value_value", + language_code="language_code_value", ) - response = client.get_view(request) + response = client.update_settings(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - request = contact_center_insights.GetViewRequest() + request = contact_center_insights.UpdateSettingsRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, resources.View) + assert isinstance(response, resources.Settings) assert response.name == "name_value" - assert response.display_name == "display_name_value" - assert response.value == "value_value" + assert response.language_code == "language_code_value" -def test_get_view_empty_call(): +def test_update_settings_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = ContactCenterInsightsClient( @@ -15328,17 +15433,17 @@ def test_get_view_empty_call(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_view), "__call__") as call: + with mock.patch.object(type(client.transport.update_settings), "__call__") as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.get_view() + client.update_settings() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == contact_center_insights.GetViewRequest() + assert args[0] == contact_center_insights.UpdateSettingsRequest() -def test_get_view_non_empty_request_with_auto_populated_field(): +def test_update_settings_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. client = ContactCenterInsightsClient( @@ -15349,24 +15454,20 @@ def test_get_view_non_empty_request_with_auto_populated_field(): # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. - request = contact_center_insights.GetViewRequest( - name="name_value", - ) + request = contact_center_insights.UpdateSettingsRequest() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_view), "__call__") as call: + with mock.patch.object(type(client.transport.update_settings), "__call__") as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.get_view(request=request) + client.update_settings(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == contact_center_insights.GetViewRequest( - name="name_value", - ) + assert args[0] == contact_center_insights.UpdateSettingsRequest() -def test_get_view_use_cached_wrapped_rpc(): +def test_update_settings_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -15380,21 +15481,21 @@ def test_get_view_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.get_view in client._transport._wrapped_methods + assert client._transport.update_settings in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.get_view] = mock_rpc + client._transport._wrapped_methods[client._transport.update_settings] = mock_rpc request = {} - client.get_view(request) + client.update_settings(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.get_view(request) + client.update_settings(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -15402,7 +15503,7 @@ def test_get_view_use_cached_wrapped_rpc(): @pytest.mark.asyncio -async def test_get_view_empty_call_async(): +async def test_update_settings_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = ContactCenterInsightsAsyncClient( @@ -15411,23 +15512,24 @@ async def test_get_view_empty_call_async(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_view), "__call__") as call: + with mock.patch.object(type(client.transport.update_settings), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - resources.View( + resources.Settings( name="name_value", - display_name="display_name_value", - value="value_value", + language_code="language_code_value", ) ) - response = await client.get_view() + response = await client.update_settings() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == contact_center_insights.GetViewRequest() + assert args[0] == contact_center_insights.UpdateSettingsRequest() @pytest.mark.asyncio -async def test_get_view_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): +async def test_update_settings_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: @@ -15442,32 +15544,34 @@ async def test_get_view_async_use_cached_wrapped_rpc(transport: str = "grpc_asyn # Ensure method has been cached assert ( - client._client._transport.get_view + client._client._transport.update_settings in client._client._transport._wrapped_methods ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ - client._client._transport.get_view - ] = mock_object + client._client._transport.update_settings + ] = mock_rpc request = {} - await client.get_view(request) + await client.update_settings(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - await client.get_view(request) + await client.update_settings(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio -async def test_get_view_async( - transport: str = "grpc_asyncio", request_type=contact_center_insights.GetViewRequest +async def test_update_settings_async( + transport: str = "grpc_asyncio", + request_type=contact_center_insights.UpdateSettingsRequest, ): client = ContactCenterInsightsAsyncClient( credentials=ga_credentials.AnonymousCredentials(), @@ -15479,50 +15583,48 @@ async def test_get_view_async( request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_view), "__call__") as call: + with mock.patch.object(type(client.transport.update_settings), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - resources.View( + resources.Settings( name="name_value", - display_name="display_name_value", - value="value_value", + language_code="language_code_value", ) ) - response = await client.get_view(request) + response = await client.update_settings(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request = contact_center_insights.GetViewRequest() + request = contact_center_insights.UpdateSettingsRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, resources.View) + assert isinstance(response, resources.Settings) assert response.name == "name_value" - assert response.display_name == "display_name_value" - assert response.value == "value_value" + assert response.language_code == "language_code_value" @pytest.mark.asyncio -async def test_get_view_async_from_dict(): - await test_get_view_async(request_type=dict) +async def test_update_settings_async_from_dict(): + await test_update_settings_async(request_type=dict) -def test_get_view_field_headers(): +def test_update_settings_field_headers(): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = contact_center_insights.GetViewRequest() + request = contact_center_insights.UpdateSettingsRequest() - request.name = "name_value" + request.settings.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_view), "__call__") as call: - call.return_value = resources.View() - client.get_view(request) + with mock.patch.object(type(client.transport.update_settings), "__call__") as call: + call.return_value = resources.Settings() + client.update_settings(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -15533,26 +15635,26 @@ def test_get_view_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "name=name_value", + "settings.name=name_value", ) in kw["metadata"] @pytest.mark.asyncio -async def test_get_view_field_headers_async(): +async def test_update_settings_field_headers_async(): client = ContactCenterInsightsAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = contact_center_insights.GetViewRequest() + request = contact_center_insights.UpdateSettingsRequest() - request.name = "name_value" + request.settings.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_view), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(resources.View()) - await client.get_view(request) + with mock.patch.object(type(client.transport.update_settings), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(resources.Settings()) + await client.update_settings(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -15563,35 +15665,39 @@ async def test_get_view_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "name=name_value", + "settings.name=name_value", ) in kw["metadata"] -def test_get_view_flattened(): +def test_update_settings_flattened(): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_view), "__call__") as call: + with mock.patch.object(type(client.transport.update_settings), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = resources.View() + call.return_value = resources.Settings() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.get_view( - name="name_value", + client.update_settings( + settings=resources.Settings(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" + arg = args[0].settings + mock_val = resources.Settings(name="name_value") + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) assert arg == mock_val -def test_get_view_flattened_error(): +def test_update_settings_flattened_error(): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -15599,41 +15705,46 @@ def test_get_view_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.get_view( - contact_center_insights.GetViewRequest(), - name="name_value", + client.update_settings( + contact_center_insights.UpdateSettingsRequest(), + settings=resources.Settings(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) @pytest.mark.asyncio -async def test_get_view_flattened_async(): +async def test_update_settings_flattened_async(): client = ContactCenterInsightsAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_view), "__call__") as call: + with mock.patch.object(type(client.transport.update_settings), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = resources.View() + call.return_value = resources.Settings() - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(resources.View()) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(resources.Settings()) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.get_view( - name="name_value", + response = await client.update_settings( + settings=resources.Settings(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" + arg = args[0].settings + mock_val = resources.Settings(name="name_value") + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) assert arg == mock_val @pytest.mark.asyncio -async def test_get_view_flattened_error_async(): +async def test_update_settings_flattened_error_async(): client = ContactCenterInsightsAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -15641,20 +15752,21 @@ async def test_get_view_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.get_view( - contact_center_insights.GetViewRequest(), - name="name_value", + await client.update_settings( + contact_center_insights.UpdateSettingsRequest(), + settings=resources.Settings(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) @pytest.mark.parametrize( "request_type", [ - contact_center_insights.ListViewsRequest, + contact_center_insights.GetEncryptionSpecRequest, dict, ], ) -def test_list_views(request_type, transport: str = "grpc"): +def test_get_encryption_spec(request_type, transport: str = "grpc"): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -15665,25 +15777,29 @@ def test_list_views(request_type, transport: str = "grpc"): request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_views), "__call__") as call: + with mock.patch.object( + type(client.transport.get_encryption_spec), "__call__" + ) as call: # Designate an appropriate return value for the call. - call.return_value = contact_center_insights.ListViewsResponse( - next_page_token="next_page_token_value", + call.return_value = resources.EncryptionSpec( + name="name_value", + kms_key="kms_key_value", ) - response = client.list_views(request) + response = client.get_encryption_spec(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - request = contact_center_insights.ListViewsRequest() + request = contact_center_insights.GetEncryptionSpecRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListViewsPager) - assert response.next_page_token == "next_page_token_value" + assert isinstance(response, resources.EncryptionSpec) + assert response.name == "name_value" + assert response.kms_key == "kms_key_value" -def test_list_views_empty_call(): +def test_get_encryption_spec_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = ContactCenterInsightsClient( @@ -15692,17 +15808,19 @@ def test_list_views_empty_call(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_views), "__call__") as call: + with mock.patch.object( + type(client.transport.get_encryption_spec), "__call__" + ) as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.list_views() + client.get_encryption_spec() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == contact_center_insights.ListViewsRequest() + assert args[0] == contact_center_insights.GetEncryptionSpecRequest() -def test_list_views_non_empty_request_with_auto_populated_field(): +def test_get_encryption_spec_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. client = ContactCenterInsightsClient( @@ -15713,26 +15831,26 @@ def test_list_views_non_empty_request_with_auto_populated_field(): # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. - request = contact_center_insights.ListViewsRequest( - parent="parent_value", - page_token="page_token_value", + request = contact_center_insights.GetEncryptionSpecRequest( + name="name_value", ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_views), "__call__") as call: + with mock.patch.object( + type(client.transport.get_encryption_spec), "__call__" + ) as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.list_views(request=request) + client.get_encryption_spec(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == contact_center_insights.ListViewsRequest( - parent="parent_value", - page_token="page_token_value", + assert args[0] == contact_center_insights.GetEncryptionSpecRequest( + name="name_value", ) -def test_list_views_use_cached_wrapped_rpc(): +def test_get_encryption_spec_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -15746,21 +15864,25 @@ def test_list_views_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.list_views in client._transport._wrapped_methods + assert ( + client._transport.get_encryption_spec in client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.list_views] = mock_rpc + client._transport._wrapped_methods[ + client._transport.get_encryption_spec + ] = mock_rpc request = {} - client.list_views(request) + client.get_encryption_spec(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.list_views(request) + client.get_encryption_spec(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -15768,7 +15890,7 @@ def test_list_views_use_cached_wrapped_rpc(): @pytest.mark.asyncio -async def test_list_views_empty_call_async(): +async def test_get_encryption_spec_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = ContactCenterInsightsAsyncClient( @@ -15777,21 +15899,26 @@ async def test_list_views_empty_call_async(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_views), "__call__") as call: + with mock.patch.object( + type(client.transport.get_encryption_spec), "__call__" + ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - contact_center_insights.ListViewsResponse( - next_page_token="next_page_token_value", + resources.EncryptionSpec( + name="name_value", + kms_key="kms_key_value", ) ) - response = await client.list_views() + response = await client.get_encryption_spec() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == contact_center_insights.ListViewsRequest() + assert args[0] == contact_center_insights.GetEncryptionSpecRequest() @pytest.mark.asyncio -async def test_list_views_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): +async def test_get_encryption_spec_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: @@ -15806,33 +15933,34 @@ async def test_list_views_async_use_cached_wrapped_rpc(transport: str = "grpc_as # Ensure method has been cached assert ( - client._client._transport.list_views + client._client._transport.get_encryption_spec in client._client._transport._wrapped_methods ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ - client._client._transport.list_views - ] = mock_object + client._client._transport.get_encryption_spec + ] = mock_rpc request = {} - await client.list_views(request) + await client.get_encryption_spec(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - await client.list_views(request) + await client.get_encryption_spec(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio -async def test_list_views_async( +async def test_get_encryption_spec_async( transport: str = "grpc_asyncio", - request_type=contact_center_insights.ListViewsRequest, + request_type=contact_center_insights.GetEncryptionSpecRequest, ): client = ContactCenterInsightsAsyncClient( credentials=ga_credentials.AnonymousCredentials(), @@ -15844,46 +15972,52 @@ async def test_list_views_async( request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_views), "__call__") as call: + with mock.patch.object( + type(client.transport.get_encryption_spec), "__call__" + ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - contact_center_insights.ListViewsResponse( - next_page_token="next_page_token_value", + resources.EncryptionSpec( + name="name_value", + kms_key="kms_key_value", ) ) - response = await client.list_views(request) + response = await client.get_encryption_spec(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request = contact_center_insights.ListViewsRequest() + request = contact_center_insights.GetEncryptionSpecRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListViewsAsyncPager) - assert response.next_page_token == "next_page_token_value" + assert isinstance(response, resources.EncryptionSpec) + assert response.name == "name_value" + assert response.kms_key == "kms_key_value" @pytest.mark.asyncio -async def test_list_views_async_from_dict(): - await test_list_views_async(request_type=dict) +async def test_get_encryption_spec_async_from_dict(): + await test_get_encryption_spec_async(request_type=dict) -def test_list_views_field_headers(): +def test_get_encryption_spec_field_headers(): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = contact_center_insights.ListViewsRequest() + request = contact_center_insights.GetEncryptionSpecRequest() - request.parent = "parent_value" + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_views), "__call__") as call: - call.return_value = contact_center_insights.ListViewsResponse() - client.list_views(request) + with mock.patch.object( + type(client.transport.get_encryption_spec), "__call__" + ) as call: + call.return_value = resources.EncryptionSpec() + client.get_encryption_spec(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -15894,28 +16028,30 @@ def test_list_views_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "parent=parent_value", + "name=name_value", ) in kw["metadata"] @pytest.mark.asyncio -async def test_list_views_field_headers_async(): +async def test_get_encryption_spec_field_headers_async(): client = ContactCenterInsightsAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = contact_center_insights.ListViewsRequest() + request = contact_center_insights.GetEncryptionSpecRequest() - request.parent = "parent_value" + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_views), "__call__") as call: + with mock.patch.object( + type(client.transport.get_encryption_spec), "__call__" + ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - contact_center_insights.ListViewsResponse() + resources.EncryptionSpec() ) - await client.list_views(request) + await client.get_encryption_spec(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -15926,35 +16062,37 @@ async def test_list_views_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "parent=parent_value", + "name=name_value", ) in kw["metadata"] -def test_list_views_flattened(): +def test_get_encryption_spec_flattened(): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_views), "__call__") as call: + with mock.patch.object( + type(client.transport.get_encryption_spec), "__call__" + ) as call: # Designate an appropriate return value for the call. - call.return_value = contact_center_insights.ListViewsResponse() + call.return_value = resources.EncryptionSpec() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.list_views( - parent="parent_value", + client.get_encryption_spec( + name="name_value", ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = "parent_value" + arg = args[0].name + mock_val = "name_value" assert arg == mock_val -def test_list_views_flattened_error(): +def test_get_encryption_spec_flattened_error(): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -15962,43 +16100,45 @@ def test_list_views_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.list_views( - contact_center_insights.ListViewsRequest(), - parent="parent_value", + client.get_encryption_spec( + contact_center_insights.GetEncryptionSpecRequest(), + name="name_value", ) @pytest.mark.asyncio -async def test_list_views_flattened_async(): +async def test_get_encryption_spec_flattened_async(): client = ContactCenterInsightsAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_views), "__call__") as call: + with mock.patch.object( + type(client.transport.get_encryption_spec), "__call__" + ) as call: # Designate an appropriate return value for the call. - call.return_value = contact_center_insights.ListViewsResponse() + call.return_value = resources.EncryptionSpec() call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - contact_center_insights.ListViewsResponse() + resources.EncryptionSpec() ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.list_views( - parent="parent_value", + response = await client.get_encryption_spec( + name="name_value", ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = "parent_value" + arg = args[0].name + mock_val = "name_value" assert arg == mock_val @pytest.mark.asyncio -async def test_list_views_flattened_error_async(): +async def test_get_encryption_spec_flattened_error_async(): client = ContactCenterInsightsAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -16006,247 +16146,48 @@ async def test_list_views_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.list_views( - contact_center_insights.ListViewsRequest(), - parent="parent_value", + await client.get_encryption_spec( + contact_center_insights.GetEncryptionSpecRequest(), + name="name_value", ) -def test_list_views_pager(transport_name: str = "grpc"): +@pytest.mark.parametrize( + "request_type", + [ + contact_center_insights.InitializeEncryptionSpecRequest, + dict, + ], +) +def test_initialize_encryption_spec(request_type, transport: str = "grpc"): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_views), "__call__") as call: - # Set the response to a series of pages. - call.side_effect = ( - contact_center_insights.ListViewsResponse( - views=[ - resources.View(), - resources.View(), - resources.View(), - ], - next_page_token="abc", - ), - contact_center_insights.ListViewsResponse( - views=[], - next_page_token="def", - ), - contact_center_insights.ListViewsResponse( - views=[ - resources.View(), - ], - next_page_token="ghi", - ), - contact_center_insights.ListViewsResponse( - views=[ - resources.View(), - resources.View(), - ], - ), - RuntimeError, - ) - - expected_metadata = () - retry = retries.Retry() - timeout = 5 - expected_metadata = tuple(expected_metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), - ) - pager = client.list_views(request={}, retry=retry, timeout=timeout) - - assert pager._metadata == expected_metadata - assert pager._retry == retry - assert pager._timeout == timeout - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, resources.View) for i in results) - - -def test_list_views_pages(transport_name: str = "grpc"): - client = ContactCenterInsightsClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_views), "__call__") as call: - # Set the response to a series of pages. - call.side_effect = ( - contact_center_insights.ListViewsResponse( - views=[ - resources.View(), - resources.View(), - resources.View(), - ], - next_page_token="abc", - ), - contact_center_insights.ListViewsResponse( - views=[], - next_page_token="def", - ), - contact_center_insights.ListViewsResponse( - views=[ - resources.View(), - ], - next_page_token="ghi", - ), - contact_center_insights.ListViewsResponse( - views=[ - resources.View(), - resources.View(), - ], - ), - RuntimeError, - ) - pages = list(client.list_views(request={}).pages) - for page_, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page_.raw_page.next_page_token == token - - -@pytest.mark.asyncio -async def test_list_views_async_pager(): - client = ContactCenterInsightsAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_views), "__call__", new_callable=mock.AsyncMock - ) as call: - # Set the response to a series of pages. - call.side_effect = ( - contact_center_insights.ListViewsResponse( - views=[ - resources.View(), - resources.View(), - resources.View(), - ], - next_page_token="abc", - ), - contact_center_insights.ListViewsResponse( - views=[], - next_page_token="def", - ), - contact_center_insights.ListViewsResponse( - views=[ - resources.View(), - ], - next_page_token="ghi", - ), - contact_center_insights.ListViewsResponse( - views=[ - resources.View(), - resources.View(), - ], - ), - RuntimeError, - ) - async_pager = await client.list_views( - request={}, - ) - assert async_pager.next_page_token == "abc" - responses = [] - async for response in async_pager: # pragma: no branch - responses.append(response) - - assert len(responses) == 6 - assert all(isinstance(i, resources.View) for i in responses) - - -@pytest.mark.asyncio -async def test_list_views_async_pages(): - client = ContactCenterInsightsAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_views), "__call__", new_callable=mock.AsyncMock + type(client.transport.initialize_encryption_spec), "__call__" ) as call: - # Set the response to a series of pages. - call.side_effect = ( - contact_center_insights.ListViewsResponse( - views=[ - resources.View(), - resources.View(), - resources.View(), - ], - next_page_token="abc", - ), - contact_center_insights.ListViewsResponse( - views=[], - next_page_token="def", - ), - contact_center_insights.ListViewsResponse( - views=[ - resources.View(), - ], - next_page_token="ghi", - ), - contact_center_insights.ListViewsResponse( - views=[ - resources.View(), - resources.View(), - ], - ), - RuntimeError, - ) - pages = [] - # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` - # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 - async for page_ in ( # pragma: no branch - await client.list_views(request={}) - ).pages: - pages.append(page_) - for page_, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page_.raw_page.next_page_token == token - - -@pytest.mark.parametrize( - "request_type", - [ - contact_center_insights.UpdateViewRequest, - dict, - ], -) -def test_update_view(request_type, transport: str = "grpc"): - client = ContactCenterInsightsClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.update_view), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = resources.View( - name="name_value", - display_name="display_name_value", - value="value_value", - ) - response = client.update_view(request) + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.initialize_encryption_spec(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - request = contact_center_insights.UpdateViewRequest() + request = contact_center_insights.InitializeEncryptionSpecRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, resources.View) - assert response.name == "name_value" - assert response.display_name == "display_name_value" - assert response.value == "value_value" + assert isinstance(response, future.Future) -def test_update_view_empty_call(): +def test_initialize_encryption_spec_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = ContactCenterInsightsClient( @@ -16255,17 +16196,19 @@ def test_update_view_empty_call(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.update_view), "__call__") as call: + with mock.patch.object( + type(client.transport.initialize_encryption_spec), "__call__" + ) as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.update_view() + client.initialize_encryption_spec() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == contact_center_insights.UpdateViewRequest() + assert args[0] == contact_center_insights.InitializeEncryptionSpecRequest() -def test_update_view_non_empty_request_with_auto_populated_field(): +def test_initialize_encryption_spec_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. client = ContactCenterInsightsClient( @@ -16276,20 +16219,22 @@ def test_update_view_non_empty_request_with_auto_populated_field(): # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. - request = contact_center_insights.UpdateViewRequest() + request = contact_center_insights.InitializeEncryptionSpecRequest() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.update_view), "__call__") as call: + with mock.patch.object( + type(client.transport.initialize_encryption_spec), "__call__" + ) as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.update_view(request=request) + client.initialize_encryption_spec(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == contact_center_insights.UpdateViewRequest() + assert args[0] == contact_center_insights.InitializeEncryptionSpecRequest() -def test_update_view_use_cached_wrapped_rpc(): +def test_initialize_encryption_spec_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -16303,21 +16248,31 @@ def test_update_view_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.update_view in client._transport._wrapped_methods + assert ( + client._transport.initialize_encryption_spec + in client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.update_view] = mock_rpc + client._transport._wrapped_methods[ + client._transport.initialize_encryption_spec + ] = mock_rpc request = {} - client.update_view(request) + client.initialize_encryption_spec(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.update_view(request) + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.initialize_encryption_spec(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -16325,7 +16280,7 @@ def test_update_view_use_cached_wrapped_rpc(): @pytest.mark.asyncio -async def test_update_view_empty_call_async(): +async def test_initialize_encryption_spec_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = ContactCenterInsightsAsyncClient( @@ -16334,23 +16289,21 @@ async def test_update_view_empty_call_async(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.update_view), "__call__") as call: + with mock.patch.object( + type(client.transport.initialize_encryption_spec), "__call__" + ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - resources.View( - name="name_value", - display_name="display_name_value", - value="value_value", - ) + operations_pb2.Operation(name="operations/spam") ) - response = await client.update_view() + response = await client.initialize_encryption_spec() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == contact_center_insights.UpdateViewRequest() + assert args[0] == contact_center_insights.InitializeEncryptionSpecRequest() @pytest.mark.asyncio -async def test_update_view_async_use_cached_wrapped_rpc( +async def test_initialize_encryption_spec_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", ): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -16367,33 +16320,39 @@ async def test_update_view_async_use_cached_wrapped_rpc( # Ensure method has been cached assert ( - client._client._transport.update_view + client._client._transport.initialize_encryption_spec in client._client._transport._wrapped_methods ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ - client._client._transport.update_view - ] = mock_object + client._client._transport.initialize_encryption_spec + ] = mock_rpc request = {} - await client.update_view(request) + await client.initialize_encryption_spec(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - await client.update_view(request) + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.initialize_encryption_spec(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio -async def test_update_view_async( +async def test_initialize_encryption_spec_async( transport: str = "grpc_asyncio", - request_type=contact_center_insights.UpdateViewRequest, + request_type=contact_center_insights.InitializeEncryptionSpecRequest, ): client = ContactCenterInsightsAsyncClient( credentials=ga_credentials.AnonymousCredentials(), @@ -16405,50 +16364,47 @@ async def test_update_view_async( request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.update_view), "__call__") as call: + with mock.patch.object( + type(client.transport.initialize_encryption_spec), "__call__" + ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - resources.View( - name="name_value", - display_name="display_name_value", - value="value_value", - ) + operations_pb2.Operation(name="operations/spam") ) - response = await client.update_view(request) + response = await client.initialize_encryption_spec(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request = contact_center_insights.UpdateViewRequest() + request = contact_center_insights.InitializeEncryptionSpecRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, resources.View) - assert response.name == "name_value" - assert response.display_name == "display_name_value" - assert response.value == "value_value" + assert isinstance(response, future.Future) @pytest.mark.asyncio -async def test_update_view_async_from_dict(): - await test_update_view_async(request_type=dict) +async def test_initialize_encryption_spec_async_from_dict(): + await test_initialize_encryption_spec_async(request_type=dict) -def test_update_view_field_headers(): +def test_initialize_encryption_spec_field_headers(): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = contact_center_insights.UpdateViewRequest() + request = contact_center_insights.InitializeEncryptionSpecRequest() - request.view.name = "name_value" + request.encryption_spec.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.update_view), "__call__") as call: - call.return_value = resources.View() - client.update_view(request) + with mock.patch.object( + type(client.transport.initialize_encryption_spec), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.initialize_encryption_spec(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -16459,26 +16415,30 @@ def test_update_view_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "view.name=name_value", + "encryption_spec.name=name_value", ) in kw["metadata"] @pytest.mark.asyncio -async def test_update_view_field_headers_async(): +async def test_initialize_encryption_spec_field_headers_async(): client = ContactCenterInsightsAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = contact_center_insights.UpdateViewRequest() + request = contact_center_insights.InitializeEncryptionSpecRequest() - request.view.name = "name_value" + request.encryption_spec.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.update_view), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(resources.View()) - await client.update_view(request) + with mock.patch.object( + type(client.transport.initialize_encryption_spec), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.initialize_encryption_spec(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -16489,39 +16449,37 @@ async def test_update_view_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "view.name=name_value", + "encryption_spec.name=name_value", ) in kw["metadata"] -def test_update_view_flattened(): +def test_initialize_encryption_spec_flattened(): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.update_view), "__call__") as call: + with mock.patch.object( + type(client.transport.initialize_encryption_spec), "__call__" + ) as call: # Designate an appropriate return value for the call. - call.return_value = resources.View() + call.return_value = operations_pb2.Operation(name="operations/op") # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.update_view( - view=resources.View(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + client.initialize_encryption_spec( + encryption_spec=resources.EncryptionSpec(name="name_value"), ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - arg = args[0].view - mock_val = resources.View(name="name_value") - assert arg == mock_val - arg = args[0].update_mask - mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + arg = args[0].encryption_spec + mock_val = resources.EncryptionSpec(name="name_value") assert arg == mock_val -def test_update_view_flattened_error(): +def test_initialize_encryption_spec_flattened_error(): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -16529,46 +16487,45 @@ def test_update_view_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.update_view( - contact_center_insights.UpdateViewRequest(), - view=resources.View(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + client.initialize_encryption_spec( + contact_center_insights.InitializeEncryptionSpecRequest(), + encryption_spec=resources.EncryptionSpec(name="name_value"), ) @pytest.mark.asyncio -async def test_update_view_flattened_async(): +async def test_initialize_encryption_spec_flattened_async(): client = ContactCenterInsightsAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.update_view), "__call__") as call: + with mock.patch.object( + type(client.transport.initialize_encryption_spec), "__call__" + ) as call: # Designate an appropriate return value for the call. - call.return_value = resources.View() + call.return_value = operations_pb2.Operation(name="operations/op") - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(resources.View()) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.update_view( - view=resources.View(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + response = await client.initialize_encryption_spec( + encryption_spec=resources.EncryptionSpec(name="name_value"), ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - arg = args[0].view - mock_val = resources.View(name="name_value") - assert arg == mock_val - arg = args[0].update_mask - mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + arg = args[0].encryption_spec + mock_val = resources.EncryptionSpec(name="name_value") assert arg == mock_val @pytest.mark.asyncio -async def test_update_view_flattened_error_async(): +async def test_initialize_encryption_spec_flattened_error_async(): client = ContactCenterInsightsAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -16576,21 +16533,20 @@ async def test_update_view_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.update_view( - contact_center_insights.UpdateViewRequest(), - view=resources.View(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + await client.initialize_encryption_spec( + contact_center_insights.InitializeEncryptionSpecRequest(), + encryption_spec=resources.EncryptionSpec(name="name_value"), ) @pytest.mark.parametrize( "request_type", [ - contact_center_insights.DeleteViewRequest, + contact_center_insights.CreateViewRequest, dict, ], ) -def test_delete_view(request_type, transport: str = "grpc"): +def test_create_view(request_type, transport: str = "grpc"): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -16601,22 +16557,29 @@ def test_delete_view(request_type, transport: str = "grpc"): request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_view), "__call__") as call: + with mock.patch.object(type(client.transport.create_view), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = None - response = client.delete_view(request) + call.return_value = resources.View( + name="name_value", + display_name="display_name_value", + value="value_value", + ) + response = client.create_view(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - request = contact_center_insights.DeleteViewRequest() + request = contact_center_insights.CreateViewRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert response is None + assert isinstance(response, resources.View) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.value == "value_value" -def test_delete_view_empty_call(): +def test_create_view_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = ContactCenterInsightsClient( @@ -16625,17 +16588,17 @@ def test_delete_view_empty_call(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_view), "__call__") as call: + with mock.patch.object(type(client.transport.create_view), "__call__") as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.delete_view() + client.create_view() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == contact_center_insights.DeleteViewRequest() + assert args[0] == contact_center_insights.CreateViewRequest() -def test_delete_view_non_empty_request_with_auto_populated_field(): +def test_create_view_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. client = ContactCenterInsightsClient( @@ -16646,24 +16609,24 @@ def test_delete_view_non_empty_request_with_auto_populated_field(): # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. - request = contact_center_insights.DeleteViewRequest( - name="name_value", + request = contact_center_insights.CreateViewRequest( + parent="parent_value", ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_view), "__call__") as call: + with mock.patch.object(type(client.transport.create_view), "__call__") as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.delete_view(request=request) + client.create_view(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == contact_center_insights.DeleteViewRequest( - name="name_value", + assert args[0] == contact_center_insights.CreateViewRequest( + parent="parent_value", ) -def test_delete_view_use_cached_wrapped_rpc(): +def test_create_view_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -16677,21 +16640,21 @@ def test_delete_view_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.delete_view in client._transport._wrapped_methods + assert client._transport.create_view in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.delete_view] = mock_rpc + client._transport._wrapped_methods[client._transport.create_view] = mock_rpc request = {} - client.delete_view(request) + client.create_view(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.delete_view(request) + client.create_view(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -16699,7 +16662,7 @@ def test_delete_view_use_cached_wrapped_rpc(): @pytest.mark.asyncio -async def test_delete_view_empty_call_async(): +async def test_create_view_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = ContactCenterInsightsAsyncClient( @@ -16708,17 +16671,23 @@ async def test_delete_view_empty_call_async(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_view), "__call__") as call: + with mock.patch.object(type(client.transport.create_view), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.delete_view() + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + resources.View( + name="name_value", + display_name="display_name_value", + value="value_value", + ) + ) + response = await client.create_view() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == contact_center_insights.DeleteViewRequest() + assert args[0] == contact_center_insights.CreateViewRequest() @pytest.mark.asyncio -async def test_delete_view_async_use_cached_wrapped_rpc( +async def test_create_view_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", ): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -16735,33 +16704,34 @@ async def test_delete_view_async_use_cached_wrapped_rpc( # Ensure method has been cached assert ( - client._client._transport.delete_view + client._client._transport.create_view in client._client._transport._wrapped_methods ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ - client._client._transport.delete_view - ] = mock_object + client._client._transport.create_view + ] = mock_rpc request = {} - await client.delete_view(request) + await client.create_view(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - await client.delete_view(request) + await client.create_view(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio -async def test_delete_view_async( +async def test_create_view_async( transport: str = "grpc_asyncio", - request_type=contact_center_insights.DeleteViewRequest, + request_type=contact_center_insights.CreateViewRequest, ): client = ContactCenterInsightsAsyncClient( credentials=ga_credentials.AnonymousCredentials(), @@ -16773,41 +16743,50 @@ async def test_delete_view_async( request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_view), "__call__") as call: + with mock.patch.object(type(client.transport.create_view), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.delete_view(request) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + resources.View( + name="name_value", + display_name="display_name_value", + value="value_value", + ) + ) + response = await client.create_view(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request = contact_center_insights.DeleteViewRequest() + request = contact_center_insights.CreateViewRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert response is None + assert isinstance(response, resources.View) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.value == "value_value" @pytest.mark.asyncio -async def test_delete_view_async_from_dict(): - await test_delete_view_async(request_type=dict) +async def test_create_view_async_from_dict(): + await test_create_view_async(request_type=dict) -def test_delete_view_field_headers(): +def test_create_view_field_headers(): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = contact_center_insights.DeleteViewRequest() + request = contact_center_insights.CreateViewRequest() - request.name = "name_value" + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_view), "__call__") as call: - call.return_value = None - client.delete_view(request) + with mock.patch.object(type(client.transport.create_view), "__call__") as call: + call.return_value = resources.View() + client.create_view(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -16818,26 +16797,26 @@ def test_delete_view_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "name=name_value", + "parent=parent_value", ) in kw["metadata"] @pytest.mark.asyncio -async def test_delete_view_field_headers_async(): +async def test_create_view_field_headers_async(): client = ContactCenterInsightsAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = contact_center_insights.DeleteViewRequest() + request = contact_center_insights.CreateViewRequest() - request.name = "name_value" + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_view), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - await client.delete_view(request) + with mock.patch.object(type(client.transport.create_view), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(resources.View()) + await client.create_view(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -16848,35 +16827,39 @@ async def test_delete_view_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "name=name_value", + "parent=parent_value", ) in kw["metadata"] -def test_delete_view_flattened(): +def test_create_view_flattened(): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_view), "__call__") as call: + with mock.patch.object(type(client.transport.create_view), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = None + call.return_value = resources.View() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.delete_view( - name="name_value", + client.create_view( + parent="parent_value", + view=resources.View(name="name_value"), ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].view + mock_val = resources.View(name="name_value") assert arg == mock_val -def test_delete_view_flattened_error(): +def test_create_view_flattened_error(): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -16884,41 +16867,46 @@ def test_delete_view_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.delete_view( - contact_center_insights.DeleteViewRequest(), - name="name_value", + client.create_view( + contact_center_insights.CreateViewRequest(), + parent="parent_value", + view=resources.View(name="name_value"), ) @pytest.mark.asyncio -async def test_delete_view_flattened_async(): +async def test_create_view_flattened_async(): client = ContactCenterInsightsAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_view), "__call__") as call: + with mock.patch.object(type(client.transport.create_view), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = None + call.return_value = resources.View() - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(resources.View()) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.delete_view( - name="name_value", + response = await client.create_view( + parent="parent_value", + view=resources.View(name="name_value"), ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].view + mock_val = resources.View(name="name_value") assert arg == mock_val @pytest.mark.asyncio -async def test_delete_view_flattened_error_async(): +async def test_create_view_flattened_error_async(): client = ContactCenterInsightsAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -16926,252 +16914,2785 @@ async def test_delete_view_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.delete_view( - contact_center_insights.DeleteViewRequest(), - name="name_value", + await client.create_view( + contact_center_insights.CreateViewRequest(), + parent="parent_value", + view=resources.View(name="name_value"), ) @pytest.mark.parametrize( "request_type", [ - contact_center_insights.CreateConversationRequest, + contact_center_insights.GetViewRequest, dict, ], ) -def test_create_conversation_rest(request_type): +def test_get_view(request_type, transport: str = "grpc"): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), - transport="rest", + transport=transport, ) - # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2"} - request_init["conversation"] = { - "call_metadata": {"customer_channel": 1706, "agent_channel": 1351}, - "expire_time": {"seconds": 751, "nanos": 543}, - "ttl": {"seconds": 751, "nanos": 543}, - "name": "name_value", - "data_source": { - "gcs_source": { - "audio_uri": "audio_uri_value", - "transcript_uri": "transcript_uri_value", - }, - "dialogflow_source": { - "dialogflow_conversation": "dialogflow_conversation_value", - "audio_uri": "audio_uri_value", - }, - }, - "create_time": {}, - "update_time": {}, - "start_time": {}, - "language_code": "language_code_value", - "agent_id": "agent_id_value", - "labels": {}, - "quality_metadata": { - "customer_satisfaction_rating": 3005, - "wait_duration": {}, - "menu_path": "menu_path_value", - "agent_info": [ - { - "agent_id": "agent_id_value", - "display_name": "display_name_value", - "team": "team_value", - "disposition_code": "disposition_code_value", - } - ], - }, - "transcript": { - "transcript_segments": [ - { - "message_time": {}, - "text": "text_value", - "confidence": 0.1038, - "words": [ - { - "start_offset": {}, - "end_offset": {}, - "word": "word_value", - "confidence": 0.1038, - } - ], - "language_code": "language_code_value", - "channel_tag": 1140, - "segment_participant": { - "dialogflow_participant_name": "dialogflow_participant_name_value", - "user_id": "user_id_value", - "dialogflow_participant": "dialogflow_participant_value", - "obfuscated_external_user_id": "obfuscated_external_user_id_value", - "role": 1, - }, - "dialogflow_segment_metadata": { - "smart_reply_allowlist_covered": True - }, - "sentiment": {"magnitude": 0.9580000000000001, "score": 0.54}, - } - ] - }, - "medium": 1, - "duration": {}, - "turn_count": 1105, - "latest_analysis": { - "name": "name_value", - "request_time": {}, - "create_time": {}, - "analysis_result": { - "call_analysis_metadata": { - "annotations": [ - { - "interruption_data": {}, - "sentiment_data": {}, - "silence_data": {}, - "hold_data": {}, - "entity_mention_data": { - "entity_unique_id": "entity_unique_id_value", - "type_": 1, - "sentiment": {}, - }, - "intent_match_data": { - "intent_unique_id": "intent_unique_id_value" - }, - "phrase_match_data": { - "phrase_matcher": "phrase_matcher_value", - "display_name": "display_name_value", - }, - "issue_match_data": { - "issue_assignment": { - "issue": "issue_value", - "score": 0.54, - "display_name": "display_name_value", - } - }, - "channel_tag": 1140, - "annotation_start_boundary": { - "word_index": 1075, - "transcript_index": 1729, - }, - "annotation_end_boundary": {}, - } - ], - "entities": {}, - "sentiments": [{"channel_tag": 1140, "sentiment_data": {}}], - "intents": {}, - "phrase_matchers": {}, - "issue_model_result": { - "issue_model": "issue_model_value", - "issues": {}, - }, - }, - "end_time": {}, - }, - "annotator_selector": { - "run_interruption_annotator": True, - "run_silence_annotator": True, - "run_phrase_matcher_annotator": True, - "phrase_matchers": ["phrase_matchers_value1", "phrase_matchers_value2"], - "run_sentiment_annotator": True, - "run_entity_annotator": True, - "run_intent_annotator": True, - "run_issue_model_annotator": True, - "issue_models": ["issue_models_value1", "issue_models_value2"], - "run_summarization_annotator": True, - "summarization_config": { - "conversation_profile": "conversation_profile_value", - "summarization_model": 1, - }, - }, - }, - "latest_summary": { - "text": "text_value", - "text_sections": {}, - "confidence": 0.1038, - "metadata": {}, - "answer_record": "answer_record_value", - "conversation_model": "conversation_model_value", - }, - "runtime_annotations": [ - { - "article_suggestion": { - "title": "title_value", - "uri": "uri_value", - "confidence_score": 0.1673, - "metadata": {}, - "query_record": "query_record_value", - "source": "source_value", - }, - "faq_answer": { - "answer": "answer_value", - "confidence_score": 0.1673, - "question": "question_value", - "metadata": {}, - "query_record": "query_record_value", - "source": "source_value", - }, - "smart_reply": { - "reply": "reply_value", - "confidence_score": 0.1673, - "metadata": {}, - "query_record": "query_record_value", - }, - "smart_compose_suggestion": { - "suggestion": "suggestion_value", - "confidence_score": 0.1673, - "metadata": {}, - "query_record": "query_record_value", - }, - "dialogflow_interaction": { - "dialogflow_intent_id": "dialogflow_intent_id_value", - "confidence": 0.1038, - }, - "conversation_summarization_suggestion": {}, - "annotation_id": "annotation_id_value", - "create_time": {}, - "start_boundary": {}, - "end_boundary": {}, - "answer_feedback": { - "correctness_level": 1, - "clicked": True, - "displayed": True, - }, - } - ], - "dialogflow_intents": {}, - "obfuscated_user_id": "obfuscated_user_id_value", - } - # The version of a generated dependency at test runtime may differ from the version used during generation. - # Delete any fields which are not present in the current runtime dependency - # See https://github.com/googleapis/gapic-generator-python/issues/1748 - - # Determine if the message type is proto-plus or protobuf - test_field = contact_center_insights.CreateConversationRequest.meta.fields[ - "conversation" - ] - - def get_message_fields(field): - # Given a field which is a message (composite type), return a list with - # all the fields of the message. - # If the field is not a composite type, return an empty list. - message_fields = [] + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() - if hasattr(field, "message") and field.message: - is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_view), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = resources.View( + name="name_value", + display_name="display_name_value", + value="value_value", + ) + response = client.get_view(request) - if is_field_type_proto_plus_type: - message_fields = field.message.meta.fields.values() - # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER - message_fields = field.message.DESCRIPTOR.fields - return message_fields + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = contact_center_insights.GetViewRequest() + assert args[0] == request - runtime_nested_fields = [ - (field.name, nested_field.name) - for field in get_message_fields(test_field) - for nested_field in get_message_fields(field) - ] + # Establish that the response is the type that we expect. + assert isinstance(response, resources.View) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.value == "value_value" - subfields_not_in_runtime = [] - # For each item in the sample request, create a list of sub fields which are not present at runtime +def test_get_view_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ContactCenterInsightsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_view), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.get_view() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == contact_center_insights.GetViewRequest() + + +def test_get_view_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = ContactCenterInsightsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = contact_center_insights.GetViewRequest( + name="name_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_view), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.get_view(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == contact_center_insights.GetViewRequest( + name="name_value", + ) + + +def test_get_view_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ContactCenterInsightsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_view in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.get_view] = mock_rpc + request = {} + client.get_view(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_view(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_get_view_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ContactCenterInsightsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_view), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + resources.View( + name="name_value", + display_name="display_name_value", + value="value_value", + ) + ) + response = await client.get_view() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == contact_center_insights.GetViewRequest() + + +@pytest.mark.asyncio +async def test_get_view_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = ContactCenterInsightsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.get_view + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.get_view + ] = mock_rpc + + request = {} + await client.get_view(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.get_view(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_get_view_async( + transport: str = "grpc_asyncio", request_type=contact_center_insights.GetViewRequest +): + client = ContactCenterInsightsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_view), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + resources.View( + name="name_value", + display_name="display_name_value", + value="value_value", + ) + ) + response = await client.get_view(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = contact_center_insights.GetViewRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, resources.View) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.value == "value_value" + + +@pytest.mark.asyncio +async def test_get_view_async_from_dict(): + await test_get_view_async(request_type=dict) + + +def test_get_view_field_headers(): + client = ContactCenterInsightsClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = contact_center_insights.GetViewRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_view), "__call__") as call: + call.return_value = resources.View() + client.get_view(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_view_field_headers_async(): + client = ContactCenterInsightsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = contact_center_insights.GetViewRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_view), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(resources.View()) + await client.get_view(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_get_view_flattened(): + client = ContactCenterInsightsClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_view), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = resources.View() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_view( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_get_view_flattened_error(): + client = ContactCenterInsightsClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_view( + contact_center_insights.GetViewRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_get_view_flattened_async(): + client = ContactCenterInsightsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_view), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = resources.View() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(resources.View()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_view( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_get_view_flattened_error_async(): + client = ContactCenterInsightsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_view( + contact_center_insights.GetViewRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + contact_center_insights.ListViewsRequest, + dict, + ], +) +def test_list_views(request_type, transport: str = "grpc"): + client = ContactCenterInsightsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_views), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = contact_center_insights.ListViewsResponse( + next_page_token="next_page_token_value", + ) + response = client.list_views(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = contact_center_insights.ListViewsRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListViewsPager) + assert response.next_page_token == "next_page_token_value" + + +def test_list_views_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ContactCenterInsightsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_views), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.list_views() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == contact_center_insights.ListViewsRequest() + + +def test_list_views_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = ContactCenterInsightsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = contact_center_insights.ListViewsRequest( + parent="parent_value", + page_token="page_token_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_views), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.list_views(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == contact_center_insights.ListViewsRequest( + parent="parent_value", + page_token="page_token_value", + ) + + +def test_list_views_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ContactCenterInsightsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_views in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.list_views] = mock_rpc + request = {} + client.list_views(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_views(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_list_views_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ContactCenterInsightsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_views), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + contact_center_insights.ListViewsResponse( + next_page_token="next_page_token_value", + ) + ) + response = await client.list_views() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == contact_center_insights.ListViewsRequest() + + +@pytest.mark.asyncio +async def test_list_views_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = ContactCenterInsightsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.list_views + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.list_views + ] = mock_rpc + + request = {} + await client.list_views(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.list_views(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_list_views_async( + transport: str = "grpc_asyncio", + request_type=contact_center_insights.ListViewsRequest, +): + client = ContactCenterInsightsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_views), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + contact_center_insights.ListViewsResponse( + next_page_token="next_page_token_value", + ) + ) + response = await client.list_views(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = contact_center_insights.ListViewsRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListViewsAsyncPager) + assert response.next_page_token == "next_page_token_value" + + +@pytest.mark.asyncio +async def test_list_views_async_from_dict(): + await test_list_views_async(request_type=dict) + + +def test_list_views_field_headers(): + client = ContactCenterInsightsClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = contact_center_insights.ListViewsRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_views), "__call__") as call: + call.return_value = contact_center_insights.ListViewsResponse() + client.list_views(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_views_field_headers_async(): + client = ContactCenterInsightsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = contact_center_insights.ListViewsRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_views), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + contact_center_insights.ListViewsResponse() + ) + await client.list_views(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_list_views_flattened(): + client = ContactCenterInsightsClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_views), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = contact_center_insights.ListViewsResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_views( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +def test_list_views_flattened_error(): + client = ContactCenterInsightsClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_views( + contact_center_insights.ListViewsRequest(), + parent="parent_value", + ) + + +@pytest.mark.asyncio +async def test_list_views_flattened_async(): + client = ContactCenterInsightsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_views), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = contact_center_insights.ListViewsResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + contact_center_insights.ListViewsResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_views( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_list_views_flattened_error_async(): + client = ContactCenterInsightsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_views( + contact_center_insights.ListViewsRequest(), + parent="parent_value", + ) + + +def test_list_views_pager(transport_name: str = "grpc"): + client = ContactCenterInsightsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_views), "__call__") as call: + # Set the response to a series of pages. + call.side_effect = ( + contact_center_insights.ListViewsResponse( + views=[ + resources.View(), + resources.View(), + resources.View(), + ], + next_page_token="abc", + ), + contact_center_insights.ListViewsResponse( + views=[], + next_page_token="def", + ), + contact_center_insights.ListViewsResponse( + views=[ + resources.View(), + ], + next_page_token="ghi", + ), + contact_center_insights.ListViewsResponse( + views=[ + resources.View(), + resources.View(), + ], + ), + RuntimeError, + ) + + expected_metadata = () + retry = retries.Retry() + timeout = 5 + expected_metadata = tuple(expected_metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + ) + pager = client.list_views(request={}, retry=retry, timeout=timeout) + + assert pager._metadata == expected_metadata + assert pager._retry == retry + assert pager._timeout == timeout + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, resources.View) for i in results) + + +def test_list_views_pages(transport_name: str = "grpc"): + client = ContactCenterInsightsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_views), "__call__") as call: + # Set the response to a series of pages. + call.side_effect = ( + contact_center_insights.ListViewsResponse( + views=[ + resources.View(), + resources.View(), + resources.View(), + ], + next_page_token="abc", + ), + contact_center_insights.ListViewsResponse( + views=[], + next_page_token="def", + ), + contact_center_insights.ListViewsResponse( + views=[ + resources.View(), + ], + next_page_token="ghi", + ), + contact_center_insights.ListViewsResponse( + views=[ + resources.View(), + resources.View(), + ], + ), + RuntimeError, + ) + pages = list(client.list_views(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.asyncio +async def test_list_views_async_pager(): + client = ContactCenterInsightsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_views), "__call__", new_callable=mock.AsyncMock + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + contact_center_insights.ListViewsResponse( + views=[ + resources.View(), + resources.View(), + resources.View(), + ], + next_page_token="abc", + ), + contact_center_insights.ListViewsResponse( + views=[], + next_page_token="def", + ), + contact_center_insights.ListViewsResponse( + views=[ + resources.View(), + ], + next_page_token="ghi", + ), + contact_center_insights.ListViewsResponse( + views=[ + resources.View(), + resources.View(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_views( + request={}, + ) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, resources.View) for i in responses) + + +@pytest.mark.asyncio +async def test_list_views_async_pages(): + client = ContactCenterInsightsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_views), "__call__", new_callable=mock.AsyncMock + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + contact_center_insights.ListViewsResponse( + views=[ + resources.View(), + resources.View(), + resources.View(), + ], + next_page_token="abc", + ), + contact_center_insights.ListViewsResponse( + views=[], + next_page_token="def", + ), + contact_center_insights.ListViewsResponse( + views=[ + resources.View(), + ], + next_page_token="ghi", + ), + contact_center_insights.ListViewsResponse( + views=[ + resources.View(), + resources.View(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_views(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + contact_center_insights.UpdateViewRequest, + dict, + ], +) +def test_update_view(request_type, transport: str = "grpc"): + client = ContactCenterInsightsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_view), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = resources.View( + name="name_value", + display_name="display_name_value", + value="value_value", + ) + response = client.update_view(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = contact_center_insights.UpdateViewRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, resources.View) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.value == "value_value" + + +def test_update_view_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ContactCenterInsightsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_view), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.update_view() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == contact_center_insights.UpdateViewRequest() + + +def test_update_view_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = ContactCenterInsightsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = contact_center_insights.UpdateViewRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_view), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.update_view(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == contact_center_insights.UpdateViewRequest() + + +def test_update_view_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ContactCenterInsightsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.update_view in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.update_view] = mock_rpc + request = {} + client.update_view(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.update_view(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_update_view_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ContactCenterInsightsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_view), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + resources.View( + name="name_value", + display_name="display_name_value", + value="value_value", + ) + ) + response = await client.update_view() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == contact_center_insights.UpdateViewRequest() + + +@pytest.mark.asyncio +async def test_update_view_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = ContactCenterInsightsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.update_view + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.update_view + ] = mock_rpc + + request = {} + await client.update_view(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.update_view(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_update_view_async( + transport: str = "grpc_asyncio", + request_type=contact_center_insights.UpdateViewRequest, +): + client = ContactCenterInsightsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_view), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + resources.View( + name="name_value", + display_name="display_name_value", + value="value_value", + ) + ) + response = await client.update_view(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = contact_center_insights.UpdateViewRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, resources.View) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.value == "value_value" + + +@pytest.mark.asyncio +async def test_update_view_async_from_dict(): + await test_update_view_async(request_type=dict) + + +def test_update_view_field_headers(): + client = ContactCenterInsightsClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = contact_center_insights.UpdateViewRequest() + + request.view.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_view), "__call__") as call: + call.return_value = resources.View() + client.update_view(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "view.name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_update_view_field_headers_async(): + client = ContactCenterInsightsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = contact_center_insights.UpdateViewRequest() + + request.view.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_view), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(resources.View()) + await client.update_view(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "view.name=name_value", + ) in kw["metadata"] + + +def test_update_view_flattened(): + client = ContactCenterInsightsClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_view), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = resources.View() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.update_view( + view=resources.View(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].view + mock_val = resources.View(name="name_value") + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + assert arg == mock_val + + +def test_update_view_flattened_error(): + client = ContactCenterInsightsClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_view( + contact_center_insights.UpdateViewRequest(), + view=resources.View(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +@pytest.mark.asyncio +async def test_update_view_flattened_async(): + client = ContactCenterInsightsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_view), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = resources.View() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(resources.View()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.update_view( + view=resources.View(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].view + mock_val = resources.View(name="name_value") + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_update_view_flattened_error_async(): + client = ContactCenterInsightsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.update_view( + contact_center_insights.UpdateViewRequest(), + view=resources.View(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +@pytest.mark.parametrize( + "request_type", + [ + contact_center_insights.DeleteViewRequest, + dict, + ], +) +def test_delete_view(request_type, transport: str = "grpc"): + client = ContactCenterInsightsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_view), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.delete_view(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = contact_center_insights.DeleteViewRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_view_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ContactCenterInsightsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_view), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.delete_view() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == contact_center_insights.DeleteViewRequest() + + +def test_delete_view_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = ContactCenterInsightsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = contact_center_insights.DeleteViewRequest( + name="name_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_view), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.delete_view(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == contact_center_insights.DeleteViewRequest( + name="name_value", + ) + + +def test_delete_view_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ContactCenterInsightsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.delete_view in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.delete_view] = mock_rpc + request = {} + client.delete_view(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.delete_view(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_delete_view_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ContactCenterInsightsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_view), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.delete_view() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == contact_center_insights.DeleteViewRequest() + + +@pytest.mark.asyncio +async def test_delete_view_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = ContactCenterInsightsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.delete_view + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.delete_view + ] = mock_rpc + + request = {} + await client.delete_view(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.delete_view(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_delete_view_async( + transport: str = "grpc_asyncio", + request_type=contact_center_insights.DeleteViewRequest, +): + client = ContactCenterInsightsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_view), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.delete_view(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = contact_center_insights.DeleteViewRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.asyncio +async def test_delete_view_async_from_dict(): + await test_delete_view_async(request_type=dict) + + +def test_delete_view_field_headers(): + client = ContactCenterInsightsClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = contact_center_insights.DeleteViewRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_view), "__call__") as call: + call.return_value = None + client.delete_view(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_delete_view_field_headers_async(): + client = ContactCenterInsightsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = contact_center_insights.DeleteViewRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_view), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.delete_view(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_delete_view_flattened(): + client = ContactCenterInsightsClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_view), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.delete_view( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_delete_view_flattened_error(): + client = ContactCenterInsightsClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_view( + contact_center_insights.DeleteViewRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_delete_view_flattened_async(): + client = ContactCenterInsightsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_view), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.delete_view( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_delete_view_flattened_error_async(): + client = ContactCenterInsightsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.delete_view( + contact_center_insights.DeleteViewRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + contact_center_insights.CreateConversationRequest, + dict, + ], +) +def test_create_conversation_rest(request_type): + client = ContactCenterInsightsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request_init["conversation"] = { + "call_metadata": {"customer_channel": 1706, "agent_channel": 1351}, + "expire_time": {"seconds": 751, "nanos": 543}, + "ttl": {"seconds": 751, "nanos": 543}, + "name": "name_value", + "data_source": { + "gcs_source": { + "audio_uri": "audio_uri_value", + "transcript_uri": "transcript_uri_value", + }, + "dialogflow_source": { + "dialogflow_conversation": "dialogflow_conversation_value", + "audio_uri": "audio_uri_value", + }, + }, + "create_time": {}, + "update_time": {}, + "start_time": {}, + "language_code": "language_code_value", + "agent_id": "agent_id_value", + "labels": {}, + "quality_metadata": { + "customer_satisfaction_rating": 3005, + "wait_duration": {}, + "menu_path": "menu_path_value", + "agent_info": [ + { + "agent_id": "agent_id_value", + "display_name": "display_name_value", + "team": "team_value", + "disposition_code": "disposition_code_value", + } + ], + }, + "metadata_json": "metadata_json_value", + "transcript": { + "transcript_segments": [ + { + "message_time": {}, + "text": "text_value", + "confidence": 0.1038, + "words": [ + { + "start_offset": {}, + "end_offset": {}, + "word": "word_value", + "confidence": 0.1038, + } + ], + "language_code": "language_code_value", + "channel_tag": 1140, + "segment_participant": { + "dialogflow_participant_name": "dialogflow_participant_name_value", + "user_id": "user_id_value", + "dialogflow_participant": "dialogflow_participant_value", + "obfuscated_external_user_id": "obfuscated_external_user_id_value", + "role": 1, + }, + "dialogflow_segment_metadata": { + "smart_reply_allowlist_covered": True + }, + "sentiment": {"magnitude": 0.9580000000000001, "score": 0.54}, + } + ] + }, + "medium": 1, + "duration": {}, + "turn_count": 1105, + "latest_analysis": { + "name": "name_value", + "request_time": {}, + "create_time": {}, + "analysis_result": { + "call_analysis_metadata": { + "annotations": [ + { + "interruption_data": {}, + "sentiment_data": {}, + "silence_data": {}, + "hold_data": {}, + "entity_mention_data": { + "entity_unique_id": "entity_unique_id_value", + "type_": 1, + "sentiment": {}, + }, + "intent_match_data": { + "intent_unique_id": "intent_unique_id_value" + }, + "phrase_match_data": { + "phrase_matcher": "phrase_matcher_value", + "display_name": "display_name_value", + }, + "issue_match_data": { + "issue_assignment": { + "issue": "issue_value", + "score": 0.54, + "display_name": "display_name_value", + } + }, + "channel_tag": 1140, + "annotation_start_boundary": { + "word_index": 1075, + "transcript_index": 1729, + }, + "annotation_end_boundary": {}, + } + ], + "entities": {}, + "sentiments": [{"channel_tag": 1140, "sentiment_data": {}}], + "silence": {"silence_duration": {}, "silence_percentage": 0.1888}, + "intents": {}, + "phrase_matchers": {}, + "issue_model_result": { + "issue_model": "issue_model_value", + "issues": {}, + }, + }, + "end_time": {}, + }, + "annotator_selector": { + "run_interruption_annotator": True, + "run_silence_annotator": True, + "run_phrase_matcher_annotator": True, + "phrase_matchers": ["phrase_matchers_value1", "phrase_matchers_value2"], + "run_sentiment_annotator": True, + "run_entity_annotator": True, + "run_intent_annotator": True, + "run_issue_model_annotator": True, + "issue_models": ["issue_models_value1", "issue_models_value2"], + "run_summarization_annotator": True, + "summarization_config": { + "conversation_profile": "conversation_profile_value", + "summarization_model": 1, + }, + }, + }, + "latest_summary": { + "text": "text_value", + "text_sections": {}, + "confidence": 0.1038, + "metadata": {}, + "answer_record": "answer_record_value", + "conversation_model": "conversation_model_value", + }, + "runtime_annotations": [ + { + "article_suggestion": { + "title": "title_value", + "uri": "uri_value", + "confidence_score": 0.1673, + "metadata": {}, + "query_record": "query_record_value", + "source": "source_value", + }, + "faq_answer": { + "answer": "answer_value", + "confidence_score": 0.1673, + "question": "question_value", + "metadata": {}, + "query_record": "query_record_value", + "source": "source_value", + }, + "smart_reply": { + "reply": "reply_value", + "confidence_score": 0.1673, + "metadata": {}, + "query_record": "query_record_value", + }, + "smart_compose_suggestion": { + "suggestion": "suggestion_value", + "confidence_score": 0.1673, + "metadata": {}, + "query_record": "query_record_value", + }, + "dialogflow_interaction": { + "dialogflow_intent_id": "dialogflow_intent_id_value", + "confidence": 0.1038, + }, + "conversation_summarization_suggestion": {}, + "annotation_id": "annotation_id_value", + "create_time": {}, + "start_boundary": {}, + "end_boundary": {}, + "answer_feedback": { + "correctness_level": 1, + "clicked": True, + "displayed": True, + }, + "user_input": { + "query": "query_value", + "generator_name": "generator_name_value", + "query_source": 1, + }, + } + ], + "dialogflow_intents": {}, + "obfuscated_user_id": "obfuscated_user_id_value", + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = contact_center_insights.CreateConversationRequest.meta.fields[ + "conversation" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["conversation"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["conversation"][field])): + del request_init["conversation"][field][i][subfield] + else: + del request_init["conversation"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = resources.Conversation( + name="name_value", + language_code="language_code_value", + agent_id="agent_id_value", + metadata_json="metadata_json_value", + medium=resources.Conversation.Medium.PHONE_CALL, + turn_count=1105, + obfuscated_user_id="obfuscated_user_id_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = resources.Conversation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.create_conversation(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, resources.Conversation) + assert response.name == "name_value" + assert response.language_code == "language_code_value" + assert response.agent_id == "agent_id_value" + assert response.metadata_json == "metadata_json_value" + assert response.medium == resources.Conversation.Medium.PHONE_CALL + assert response.turn_count == 1105 + assert response.obfuscated_user_id == "obfuscated_user_id_value" + + +def test_create_conversation_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ContactCenterInsightsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.create_conversation in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.create_conversation + ] = mock_rpc + + request = {} + client.create_conversation(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.create_conversation(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_create_conversation_rest_required_fields( + request_type=contact_center_insights.CreateConversationRequest, +): + transport_class = transports.ContactCenterInsightsRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_conversation._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_conversation._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("conversation_id",)) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = ContactCenterInsightsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = resources.Conversation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = resources.Conversation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.create_conversation(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_create_conversation_rest_unset_required_fields(): + transport = transports.ContactCenterInsightsRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.create_conversation._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(("conversationId",)) + & set( + ( + "parent", + "conversation", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_create_conversation_rest_interceptors(null_interceptor): + transport = transports.ContactCenterInsightsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.ContactCenterInsightsRestInterceptor(), + ) + client = ContactCenterInsightsClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.ContactCenterInsightsRestInterceptor, "post_create_conversation" + ) as post, mock.patch.object( + transports.ContactCenterInsightsRestInterceptor, "pre_create_conversation" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = contact_center_insights.CreateConversationRequest.pb( + contact_center_insights.CreateConversationRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = resources.Conversation.to_json( + resources.Conversation() + ) + + request = contact_center_insights.CreateConversationRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = resources.Conversation() + + client.create_conversation( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_create_conversation_rest_bad_request( + transport: str = "rest", + request_type=contact_center_insights.CreateConversationRequest, +): + client = ContactCenterInsightsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.create_conversation(request) + + +def test_create_conversation_rest_flattened(): + client = ContactCenterInsightsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = resources.Conversation() + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1/locations/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + conversation=resources.Conversation( + call_metadata=resources.Conversation.CallMetadata(customer_channel=1706) + ), + conversation_id="conversation_id_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = resources.Conversation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.create_conversation(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*}/conversations" + % client.transport._host, + args[1], + ) + + +def test_create_conversation_rest_flattened_error(transport: str = "rest"): + client = ContactCenterInsightsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_conversation( + contact_center_insights.CreateConversationRequest(), + parent="parent_value", + conversation=resources.Conversation( + call_metadata=resources.Conversation.CallMetadata(customer_channel=1706) + ), + conversation_id="conversation_id_value", + ) + + +def test_create_conversation_rest_error(): + client = ContactCenterInsightsClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + contact_center_insights.UploadConversationRequest, + dict, + ], +) +def test_upload_conversation_rest(request_type): + client = ContactCenterInsightsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.upload_conversation(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_upload_conversation_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ContactCenterInsightsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.upload_conversation in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.upload_conversation + ] = mock_rpc + + request = {} + client.upload_conversation(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.upload_conversation(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_upload_conversation_rest_required_fields( + request_type=contact_center_insights.UploadConversationRequest, +): + transport_class = transports.ContactCenterInsightsRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).upload_conversation._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).upload_conversation._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = ContactCenterInsightsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.upload_conversation(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_upload_conversation_rest_unset_required_fields(): + transport = transports.ContactCenterInsightsRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.upload_conversation._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(()) + & set( + ( + "parent", + "conversation", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_upload_conversation_rest_interceptors(null_interceptor): + transport = transports.ContactCenterInsightsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.ContactCenterInsightsRestInterceptor(), + ) + client = ContactCenterInsightsClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.ContactCenterInsightsRestInterceptor, "post_upload_conversation" + ) as post, mock.patch.object( + transports.ContactCenterInsightsRestInterceptor, "pre_upload_conversation" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = contact_center_insights.UploadConversationRequest.pb( + contact_center_insights.UploadConversationRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) + + request = contact_center_insights.UploadConversationRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.upload_conversation( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_upload_conversation_rest_bad_request( + transport: str = "rest", + request_type=contact_center_insights.UploadConversationRequest, +): + client = ContactCenterInsightsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.upload_conversation(request) + + +def test_upload_conversation_rest_error(): + client = ContactCenterInsightsClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + contact_center_insights.UpdateConversationRequest, + dict, + ], +) +def test_update_conversation_rest(request_type): + client = ContactCenterInsightsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "conversation": { + "name": "projects/sample1/locations/sample2/conversations/sample3" + } + } + request_init["conversation"] = { + "call_metadata": {"customer_channel": 1706, "agent_channel": 1351}, + "expire_time": {"seconds": 751, "nanos": 543}, + "ttl": {"seconds": 751, "nanos": 543}, + "name": "projects/sample1/locations/sample2/conversations/sample3", + "data_source": { + "gcs_source": { + "audio_uri": "audio_uri_value", + "transcript_uri": "transcript_uri_value", + }, + "dialogflow_source": { + "dialogflow_conversation": "dialogflow_conversation_value", + "audio_uri": "audio_uri_value", + }, + }, + "create_time": {}, + "update_time": {}, + "start_time": {}, + "language_code": "language_code_value", + "agent_id": "agent_id_value", + "labels": {}, + "quality_metadata": { + "customer_satisfaction_rating": 3005, + "wait_duration": {}, + "menu_path": "menu_path_value", + "agent_info": [ + { + "agent_id": "agent_id_value", + "display_name": "display_name_value", + "team": "team_value", + "disposition_code": "disposition_code_value", + } + ], + }, + "metadata_json": "metadata_json_value", + "transcript": { + "transcript_segments": [ + { + "message_time": {}, + "text": "text_value", + "confidence": 0.1038, + "words": [ + { + "start_offset": {}, + "end_offset": {}, + "word": "word_value", + "confidence": 0.1038, + } + ], + "language_code": "language_code_value", + "channel_tag": 1140, + "segment_participant": { + "dialogflow_participant_name": "dialogflow_participant_name_value", + "user_id": "user_id_value", + "dialogflow_participant": "dialogflow_participant_value", + "obfuscated_external_user_id": "obfuscated_external_user_id_value", + "role": 1, + }, + "dialogflow_segment_metadata": { + "smart_reply_allowlist_covered": True + }, + "sentiment": {"magnitude": 0.9580000000000001, "score": 0.54}, + } + ] + }, + "medium": 1, + "duration": {}, + "turn_count": 1105, + "latest_analysis": { + "name": "name_value", + "request_time": {}, + "create_time": {}, + "analysis_result": { + "call_analysis_metadata": { + "annotations": [ + { + "interruption_data": {}, + "sentiment_data": {}, + "silence_data": {}, + "hold_data": {}, + "entity_mention_data": { + "entity_unique_id": "entity_unique_id_value", + "type_": 1, + "sentiment": {}, + }, + "intent_match_data": { + "intent_unique_id": "intent_unique_id_value" + }, + "phrase_match_data": { + "phrase_matcher": "phrase_matcher_value", + "display_name": "display_name_value", + }, + "issue_match_data": { + "issue_assignment": { + "issue": "issue_value", + "score": 0.54, + "display_name": "display_name_value", + } + }, + "channel_tag": 1140, + "annotation_start_boundary": { + "word_index": 1075, + "transcript_index": 1729, + }, + "annotation_end_boundary": {}, + } + ], + "entities": {}, + "sentiments": [{"channel_tag": 1140, "sentiment_data": {}}], + "silence": {"silence_duration": {}, "silence_percentage": 0.1888}, + "intents": {}, + "phrase_matchers": {}, + "issue_model_result": { + "issue_model": "issue_model_value", + "issues": {}, + }, + }, + "end_time": {}, + }, + "annotator_selector": { + "run_interruption_annotator": True, + "run_silence_annotator": True, + "run_phrase_matcher_annotator": True, + "phrase_matchers": ["phrase_matchers_value1", "phrase_matchers_value2"], + "run_sentiment_annotator": True, + "run_entity_annotator": True, + "run_intent_annotator": True, + "run_issue_model_annotator": True, + "issue_models": ["issue_models_value1", "issue_models_value2"], + "run_summarization_annotator": True, + "summarization_config": { + "conversation_profile": "conversation_profile_value", + "summarization_model": 1, + }, + }, + }, + "latest_summary": { + "text": "text_value", + "text_sections": {}, + "confidence": 0.1038, + "metadata": {}, + "answer_record": "answer_record_value", + "conversation_model": "conversation_model_value", + }, + "runtime_annotations": [ + { + "article_suggestion": { + "title": "title_value", + "uri": "uri_value", + "confidence_score": 0.1673, + "metadata": {}, + "query_record": "query_record_value", + "source": "source_value", + }, + "faq_answer": { + "answer": "answer_value", + "confidence_score": 0.1673, + "question": "question_value", + "metadata": {}, + "query_record": "query_record_value", + "source": "source_value", + }, + "smart_reply": { + "reply": "reply_value", + "confidence_score": 0.1673, + "metadata": {}, + "query_record": "query_record_value", + }, + "smart_compose_suggestion": { + "suggestion": "suggestion_value", + "confidence_score": 0.1673, + "metadata": {}, + "query_record": "query_record_value", + }, + "dialogflow_interaction": { + "dialogflow_intent_id": "dialogflow_intent_id_value", + "confidence": 0.1038, + }, + "conversation_summarization_suggestion": {}, + "annotation_id": "annotation_id_value", + "create_time": {}, + "start_boundary": {}, + "end_boundary": {}, + "answer_feedback": { + "correctness_level": 1, + "clicked": True, + "displayed": True, + }, + "user_input": { + "query": "query_value", + "generator_name": "generator_name_value", + "query_source": 1, + }, + } + ], + "dialogflow_intents": {}, + "obfuscated_user_id": "obfuscated_user_id_value", + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = contact_center_insights.UpdateConversationRequest.meta.fields[ + "conversation" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime for field, value in request_init["conversation"].items(): # pragma: NO COVER result = None @@ -17184,65 +19705,710 @@ def get_message_fields(field): if isinstance(value, dict): result = value - if result and hasattr(result, "keys"): - for subfield in result.keys(): - if (field, subfield) not in runtime_nested_fields: - subfields_not_in_runtime.append( - { - "field": field, - "subfield": subfield, - "is_repeated": is_repeated, - } - ) + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["conversation"][field])): + del request_init["conversation"][field][i][subfield] + else: + del request_init["conversation"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = resources.Conversation( + name="name_value", + language_code="language_code_value", + agent_id="agent_id_value", + metadata_json="metadata_json_value", + medium=resources.Conversation.Medium.PHONE_CALL, + turn_count=1105, + obfuscated_user_id="obfuscated_user_id_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = resources.Conversation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.update_conversation(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, resources.Conversation) + assert response.name == "name_value" + assert response.language_code == "language_code_value" + assert response.agent_id == "agent_id_value" + assert response.metadata_json == "metadata_json_value" + assert response.medium == resources.Conversation.Medium.PHONE_CALL + assert response.turn_count == 1105 + assert response.obfuscated_user_id == "obfuscated_user_id_value" + + +def test_update_conversation_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ContactCenterInsightsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.update_conversation in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.update_conversation + ] = mock_rpc + + request = {} + client.update_conversation(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.update_conversation(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_update_conversation_rest_required_fields( + request_type=contact_center_insights.UpdateConversationRequest, +): + transport_class = transports.ContactCenterInsightsRestTransport + + request_init = {} + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_conversation._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_conversation._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("update_mask",)) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + + client = ContactCenterInsightsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = resources.Conversation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "patch", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = resources.Conversation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.update_conversation(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_update_conversation_rest_unset_required_fields(): + transport = transports.ContactCenterInsightsRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.update_conversation._get_unset_required_fields({}) + assert set(unset_fields) == (set(("updateMask",)) & set(("conversation",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_update_conversation_rest_interceptors(null_interceptor): + transport = transports.ContactCenterInsightsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.ContactCenterInsightsRestInterceptor(), + ) + client = ContactCenterInsightsClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.ContactCenterInsightsRestInterceptor, "post_update_conversation" + ) as post, mock.patch.object( + transports.ContactCenterInsightsRestInterceptor, "pre_update_conversation" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = contact_center_insights.UpdateConversationRequest.pb( + contact_center_insights.UpdateConversationRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = resources.Conversation.to_json( + resources.Conversation() + ) + + request = contact_center_insights.UpdateConversationRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = resources.Conversation() + + client.update_conversation( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_update_conversation_rest_bad_request( + transport: str = "rest", + request_type=contact_center_insights.UpdateConversationRequest, +): + client = ContactCenterInsightsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "conversation": { + "name": "projects/sample1/locations/sample2/conversations/sample3" + } + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.update_conversation(request) + + +def test_update_conversation_rest_flattened(): + client = ContactCenterInsightsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = resources.Conversation() + + # get arguments that satisfy an http rule for this method + sample_request = { + "conversation": { + "name": "projects/sample1/locations/sample2/conversations/sample3" + } + } + + # get truthy value for each flattened field + mock_args = dict( + conversation=resources.Conversation( + call_metadata=resources.Conversation.CallMetadata(customer_channel=1706) + ), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = resources.Conversation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.update_conversation(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{conversation.name=projects/*/locations/*/conversations/*}" + % client.transport._host, + args[1], + ) + + +def test_update_conversation_rest_flattened_error(transport: str = "rest"): + client = ContactCenterInsightsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_conversation( + contact_center_insights.UpdateConversationRequest(), + conversation=resources.Conversation( + call_metadata=resources.Conversation.CallMetadata(customer_channel=1706) + ), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +def test_update_conversation_rest_error(): + client = ContactCenterInsightsClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + contact_center_insights.GetConversationRequest, + dict, + ], +) +def test_get_conversation_rest(request_type): + client = ContactCenterInsightsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/conversations/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = resources.Conversation( + name="name_value", + language_code="language_code_value", + agent_id="agent_id_value", + metadata_json="metadata_json_value", + medium=resources.Conversation.Medium.PHONE_CALL, + turn_count=1105, + obfuscated_user_id="obfuscated_user_id_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = resources.Conversation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.get_conversation(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, resources.Conversation) + assert response.name == "name_value" + assert response.language_code == "language_code_value" + assert response.agent_id == "agent_id_value" + assert response.metadata_json == "metadata_json_value" + assert response.medium == resources.Conversation.Medium.PHONE_CALL + assert response.turn_count == 1105 + assert response.obfuscated_user_id == "obfuscated_user_id_value" + + +def test_get_conversation_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ContactCenterInsightsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_conversation in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.get_conversation + ] = mock_rpc + + request = {} + client.get_conversation(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_conversation(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_get_conversation_rest_required_fields( + request_type=contact_center_insights.GetConversationRequest, +): + transport_class = transports.ContactCenterInsightsRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_conversation._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_conversation._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("view",)) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = ContactCenterInsightsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = resources.Conversation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = resources.Conversation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_conversation(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_get_conversation_rest_unset_required_fields(): + transport = transports.ContactCenterInsightsRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.get_conversation._get_unset_required_fields({}) + assert set(unset_fields) == (set(("view",)) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_conversation_rest_interceptors(null_interceptor): + transport = transports.ContactCenterInsightsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.ContactCenterInsightsRestInterceptor(), + ) + client = ContactCenterInsightsClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.ContactCenterInsightsRestInterceptor, "post_get_conversation" + ) as post, mock.patch.object( + transports.ContactCenterInsightsRestInterceptor, "pre_get_conversation" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = contact_center_insights.GetConversationRequest.pb( + contact_center_insights.GetConversationRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = resources.Conversation.to_json( + resources.Conversation() + ) + + request = contact_center_insights.GetConversationRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = resources.Conversation() + + client.get_conversation( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_conversation_rest_bad_request( + transport: str = "rest", request_type=contact_center_insights.GetConversationRequest +): + client = ContactCenterInsightsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/conversations/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_conversation(request) + + +def test_get_conversation_rest_flattened(): + client = ContactCenterInsightsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = resources.Conversation() + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/conversations/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = resources.Conversation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.get_conversation(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/conversations/*}" + % client.transport._host, + args[1], + ) + + +def test_get_conversation_rest_flattened_error(transport: str = "rest"): + client = ContactCenterInsightsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_conversation( + contact_center_insights.GetConversationRequest(), + name="name_value", + ) + + +def test_get_conversation_rest_error(): + client = ContactCenterInsightsClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + contact_center_insights.ListConversationsRequest, + dict, + ], +) +def test_list_conversations_rest(request_type): + client = ContactCenterInsightsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) - # Remove fields from the sample request which are not present in the runtime version of the dependency - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER - field = subfield_to_delete.get("field") - field_repeated = subfield_to_delete.get("is_repeated") - subfield = subfield_to_delete.get("subfield") - if subfield: - if field_repeated: - for i in range(0, len(request_init["conversation"][field])): - del request_init["conversation"][field][i][subfield] - else: - del request_init["conversation"][field][subfield] + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = resources.Conversation( - name="name_value", - language_code="language_code_value", - agent_id="agent_id_value", - medium=resources.Conversation.Medium.PHONE_CALL, - turn_count=1105, - obfuscated_user_id="obfuscated_user_id_value", + return_value = contact_center_insights.ListConversationsResponse( + next_page_token="next_page_token_value", ) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = resources.Conversation.pb(return_value) + return_value = contact_center_insights.ListConversationsResponse.pb( + return_value + ) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.create_conversation(request) + response = client.list_conversations(request) # Establish that the response is the type that we expect. - assert isinstance(response, resources.Conversation) - assert response.name == "name_value" - assert response.language_code == "language_code_value" - assert response.agent_id == "agent_id_value" - assert response.medium == resources.Conversation.Medium.PHONE_CALL - assert response.turn_count == 1105 - assert response.obfuscated_user_id == "obfuscated_user_id_value" + assert isinstance(response, pagers.ListConversationsPager) + assert response.next_page_token == "next_page_token_value" -def test_create_conversation_rest_use_cached_wrapped_rpc(): +def test_list_conversations_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -17257,7 +20423,7 @@ def test_create_conversation_rest_use_cached_wrapped_rpc(): # Ensure method has been cached assert ( - client._transport.create_conversation in client._transport._wrapped_methods + client._transport.list_conversations in client._transport._wrapped_methods ) # Replace cached wrapped function with mock @@ -17266,24 +20432,24 @@ def test_create_conversation_rest_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.create_conversation + client._transport.list_conversations ] = mock_rpc request = {} - client.create_conversation(request) + client.list_conversations(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.create_conversation(request) + client.list_conversations(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_create_conversation_rest_required_fields( - request_type=contact_center_insights.CreateConversationRequest, +def test_list_conversations_rest_required_fields( + request_type=contact_center_insights.ListConversationsRequest, ): transport_class = transports.ContactCenterInsightsRestTransport @@ -17299,7 +20465,7 @@ def test_create_conversation_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).create_conversation._get_unset_required_fields(jsonified_request) + ).list_conversations._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present @@ -17308,9 +20474,17 @@ def test_create_conversation_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).create_conversation._get_unset_required_fields(jsonified_request) + ).list_conversations._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("conversation_id",)) + assert not set(unset_fields) - set( + ( + "filter", + "order_by", + "page_size", + "page_token", + "view", + ) + ) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone @@ -17324,7 +20498,7 @@ def test_create_conversation_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = resources.Conversation() + return_value = contact_center_insights.ListConversationsResponse() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -17336,48 +20510,52 @@ def test_create_conversation_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "post", + "method": "get", "query_params": pb_request, } - transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = resources.Conversation.pb(return_value) + return_value = contact_center_insights.ListConversationsResponse.pb( + return_value + ) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.create_conversation(request) + response = client.list_conversations(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_create_conversation_rest_unset_required_fields(): +def test_list_conversations_rest_unset_required_fields(): transport = transports.ContactCenterInsightsRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.create_conversation._get_unset_required_fields({}) + unset_fields = transport.list_conversations._get_unset_required_fields({}) assert set(unset_fields) == ( - set(("conversationId",)) - & set( + set( ( - "parent", - "conversation", + "filter", + "orderBy", + "pageSize", + "pageToken", + "view", ) ) + & set(("parent",)) ) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_create_conversation_rest_interceptors(null_interceptor): +def test_list_conversations_rest_interceptors(null_interceptor): transport = transports.ContactCenterInsightsRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -17390,14 +20568,14 @@ def test_create_conversation_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.ContactCenterInsightsRestInterceptor, "post_create_conversation" + transports.ContactCenterInsightsRestInterceptor, "post_list_conversations" ) as post, mock.patch.object( - transports.ContactCenterInsightsRestInterceptor, "pre_create_conversation" + transports.ContactCenterInsightsRestInterceptor, "pre_list_conversations" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = contact_center_insights.CreateConversationRequest.pb( - contact_center_insights.CreateConversationRequest() + pb_message = contact_center_insights.ListConversationsRequest.pb( + contact_center_insights.ListConversationsRequest() ) transcode.return_value = { "method": "post", @@ -17409,19 +20587,21 @@ def test_create_conversation_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = resources.Conversation.to_json( - resources.Conversation() + req.return_value._content = ( + contact_center_insights.ListConversationsResponse.to_json( + contact_center_insights.ListConversationsResponse() + ) ) - request = contact_center_insights.CreateConversationRequest() + request = contact_center_insights.ListConversationsRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = resources.Conversation() + post.return_value = contact_center_insights.ListConversationsResponse() - client.create_conversation( + client.list_conversations( request, metadata=[ ("key", "val"), @@ -17433,9 +20613,9 @@ def test_create_conversation_rest_interceptors(null_interceptor): post.assert_called_once() -def test_create_conversation_rest_bad_request( +def test_list_conversations_rest_bad_request( transport: str = "rest", - request_type=contact_center_insights.CreateConversationRequest, + request_type=contact_center_insights.ListConversationsRequest, ): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), @@ -17455,10 +20635,10 @@ def test_create_conversation_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.create_conversation(request) + client.list_conversations(request) -def test_create_conversation_rest_flattened(): +def test_list_conversations_rest_flattened(): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -17467,7 +20647,7 @@ def test_create_conversation_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = resources.Conversation() + return_value = contact_center_insights.ListConversationsResponse() # get arguments that satisfy an http rule for this method sample_request = {"parent": "projects/sample1/locations/sample2"} @@ -17475,10 +20655,6 @@ def test_create_conversation_rest_flattened(): # get truthy value for each flattened field mock_args = dict( parent="parent_value", - conversation=resources.Conversation( - call_metadata=resources.Conversation.CallMetadata(customer_channel=1706) - ), - conversation_id="conversation_id_value", ) mock_args.update(sample_request) @@ -17486,12 +20662,14 @@ def test_create_conversation_rest_flattened(): response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = resources.Conversation.pb(return_value) + return_value = contact_center_insights.ListConversationsResponse.pb( + return_value + ) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.create_conversation(**mock_args) + client.list_conversations(**mock_args) # Establish that the underlying call was made with the expected # request object values. @@ -17504,7 +20682,7 @@ def test_create_conversation_rest_flattened(): ) -def test_create_conversation_rest_flattened_error(transport: str = "rest"): +def test_list_conversations_rest_flattened_error(transport: str = "rest"): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -17513,58 +20691,112 @@ def test_create_conversation_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.create_conversation( - contact_center_insights.CreateConversationRequest(), + client.list_conversations( + contact_center_insights.ListConversationsRequest(), parent="parent_value", - conversation=resources.Conversation( - call_metadata=resources.Conversation.CallMetadata(customer_channel=1706) - ), - conversation_id="conversation_id_value", ) -def test_create_conversation_rest_error(): +def test_list_conversations_rest_pager(transport: str = "rest"): client = ContactCenterInsightsClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + contact_center_insights.ListConversationsResponse( + conversations=[ + resources.Conversation(), + resources.Conversation(), + resources.Conversation(), + ], + next_page_token="abc", + ), + contact_center_insights.ListConversationsResponse( + conversations=[], + next_page_token="def", + ), + contact_center_insights.ListConversationsResponse( + conversations=[ + resources.Conversation(), + ], + next_page_token="ghi", + ), + contact_center_insights.ListConversationsResponse( + conversations=[ + resources.Conversation(), + resources.Conversation(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple( + contact_center_insights.ListConversationsResponse.to_json(x) + for x in response + ) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {"parent": "projects/sample1/locations/sample2"} + + pager = client.list_conversations(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, resources.Conversation) for i in results) + + pages = list(client.list_conversations(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + @pytest.mark.parametrize( "request_type", [ - contact_center_insights.UploadConversationRequest, + contact_center_insights.DeleteConversationRequest, dict, ], ) -def test_upload_conversation_rest(request_type): +def test_delete_conversation_rest(request_type): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2"} + request_init = {"name": "projects/sample1/locations/sample2/conversations/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = None # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) + json_return_value = "" response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.upload_conversation(request) + response = client.delete_conversation(request) # Establish that the response is the type that we expect. - assert response.operation.name == "operations/spam" + assert response is None -def test_upload_conversation_rest_use_cached_wrapped_rpc(): +def test_delete_conversation_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -17579,7 +20811,7 @@ def test_upload_conversation_rest_use_cached_wrapped_rpc(): # Ensure method has been cached assert ( - client._transport.upload_conversation in client._transport._wrapped_methods + client._transport.delete_conversation in client._transport._wrapped_methods ) # Replace cached wrapped function with mock @@ -17588,33 +20820,29 @@ def test_upload_conversation_rest_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.upload_conversation + client._transport.delete_conversation ] = mock_rpc request = {} - client.upload_conversation(request) + client.delete_conversation(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.upload_conversation(request) + client.delete_conversation(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_upload_conversation_rest_required_fields( - request_type=contact_center_insights.UploadConversationRequest, +def test_delete_conversation_rest_required_fields( + request_type=contact_center_insights.DeleteConversationRequest, ): transport_class = transports.ContactCenterInsightsRestTransport request_init = {} - request_init["parent"] = "" + request_init["name"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -17625,21 +20853,23 @@ def test_upload_conversation_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).upload_conversation._get_unset_required_fields(jsonified_request) + ).delete_conversation._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["parent"] = "parent_value" + jsonified_request["name"] = "name_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).upload_conversation._get_unset_required_fields(jsonified_request) + ).delete_conversation._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("force",)) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == "parent_value" + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), @@ -17648,7 +20878,7 @@ def test_upload_conversation_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = None # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -17660,45 +20890,36 @@ def test_upload_conversation_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "post", + "method": "delete", "query_params": pb_request, } - transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) + json_return_value = "" response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.upload_conversation(request) + response = client.delete_conversation(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_upload_conversation_rest_unset_required_fields(): +def test_delete_conversation_rest_unset_required_fields(): transport = transports.ContactCenterInsightsRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.upload_conversation._get_unset_required_fields({}) - assert set(unset_fields) == ( - set(()) - & set( - ( - "parent", - "conversation", - ) - ) - ) + unset_fields = transport.delete_conversation._get_unset_required_fields({}) + assert set(unset_fields) == (set(("force",)) & set(("name",))) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_upload_conversation_rest_interceptors(null_interceptor): +def test_delete_conversation_rest_interceptors(null_interceptor): transport = transports.ContactCenterInsightsRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -17711,16 +20932,11 @@ def test_upload_conversation_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - operation.Operation, "_set_result_from_operation" - ), mock.patch.object( - transports.ContactCenterInsightsRestInterceptor, "post_upload_conversation" - ) as post, mock.patch.object( - transports.ContactCenterInsightsRestInterceptor, "pre_upload_conversation" + transports.ContactCenterInsightsRestInterceptor, "pre_delete_conversation" ) as pre: pre.assert_not_called() - post.assert_not_called() - pb_message = contact_center_insights.UploadConversationRequest.pb( - contact_center_insights.UploadConversationRequest() + pb_message = contact_center_insights.DeleteConversationRequest.pb( + contact_center_insights.DeleteConversationRequest() ) transcode.return_value = { "method": "post", @@ -17732,19 +20948,15 @@ def test_upload_conversation_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = json_format.MessageToJson( - operations_pb2.Operation() - ) - request = contact_center_insights.UploadConversationRequest() + request = contact_center_insights.DeleteConversationRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = operations_pb2.Operation() - client.upload_conversation( + client.delete_conversation( request, metadata=[ ("key", "val"), @@ -17753,12 +20965,11 @@ def test_upload_conversation_rest_interceptors(null_interceptor): ) pre.assert_called_once() - post.assert_called_once() -def test_upload_conversation_rest_bad_request( +def test_delete_conversation_rest_bad_request( transport: str = "rest", - request_type=contact_center_insights.UploadConversationRequest, + request_type=contact_center_insights.DeleteConversationRequest, ): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), @@ -17766,7 +20977,7 @@ def test_upload_conversation_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2"} + request_init = {"name": "projects/sample1/locations/sample2/conversations/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -17778,10 +20989,67 @@ def test_upload_conversation_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.upload_conversation(request) + client.delete_conversation(request) -def test_upload_conversation_rest_error(): +def test_delete_conversation_rest_flattened(): + client = ContactCenterInsightsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/conversations/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "" + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.delete_conversation(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/conversations/*}" + % client.transport._host, + args[1], + ) + + +def test_delete_conversation_rest_flattened_error(transport: str = "rest"): + client = ContactCenterInsightsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_conversation( + contact_center_insights.DeleteConversationRequest(), + name="name_value", + ) + + +def test_delete_conversation_rest_error(): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -17790,11 +21058,11 @@ def test_upload_conversation_rest_error(): @pytest.mark.parametrize( "request_type", [ - contact_center_insights.UpdateConversationRequest, + contact_center_insights.CreateAnalysisRequest, dict, ], ) -def test_update_conversation_rest(request_type): +def test_create_analysis_rest(request_type): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -17802,209 +21070,88 @@ def test_update_conversation_rest(request_type): # send a request that will satisfy transcoding request_init = { - "conversation": { - "name": "projects/sample1/locations/sample2/conversations/sample3" - } + "parent": "projects/sample1/locations/sample2/conversations/sample3" } - request_init["conversation"] = { - "call_metadata": {"customer_channel": 1706, "agent_channel": 1351}, - "expire_time": {"seconds": 751, "nanos": 543}, - "ttl": {"seconds": 751, "nanos": 543}, - "name": "projects/sample1/locations/sample2/conversations/sample3", - "data_source": { - "gcs_source": { - "audio_uri": "audio_uri_value", - "transcript_uri": "transcript_uri_value", - }, - "dialogflow_source": { - "dialogflow_conversation": "dialogflow_conversation_value", - "audio_uri": "audio_uri_value", - }, - }, + request_init["analysis"] = { + "name": "name_value", + "request_time": {"seconds": 751, "nanos": 543}, "create_time": {}, - "update_time": {}, - "start_time": {}, - "language_code": "language_code_value", - "agent_id": "agent_id_value", - "labels": {}, - "quality_metadata": { - "customer_satisfaction_rating": 3005, - "wait_duration": {}, - "menu_path": "menu_path_value", - "agent_info": [ - { - "agent_id": "agent_id_value", - "display_name": "display_name_value", - "team": "team_value", - "disposition_code": "disposition_code_value", - } - ], - }, - "transcript": { - "transcript_segments": [ - { - "message_time": {}, - "text": "text_value", - "confidence": 0.1038, - "words": [ - { - "start_offset": {}, - "end_offset": {}, - "word": "word_value", - "confidence": 0.1038, - } - ], - "language_code": "language_code_value", - "channel_tag": 1140, - "segment_participant": { - "dialogflow_participant_name": "dialogflow_participant_name_value", - "user_id": "user_id_value", - "dialogflow_participant": "dialogflow_participant_value", - "obfuscated_external_user_id": "obfuscated_external_user_id_value", - "role": 1, - }, - "dialogflow_segment_metadata": { - "smart_reply_allowlist_covered": True - }, - "sentiment": {"magnitude": 0.9580000000000001, "score": 0.54}, - } - ] - }, - "medium": 1, - "duration": {}, - "turn_count": 1105, - "latest_analysis": { - "name": "name_value", - "request_time": {}, - "create_time": {}, - "analysis_result": { - "call_analysis_metadata": { - "annotations": [ - { - "interruption_data": {}, - "sentiment_data": {}, - "silence_data": {}, - "hold_data": {}, - "entity_mention_data": { - "entity_unique_id": "entity_unique_id_value", - "type_": 1, - "sentiment": {}, - }, - "intent_match_data": { - "intent_unique_id": "intent_unique_id_value" - }, - "phrase_match_data": { - "phrase_matcher": "phrase_matcher_value", - "display_name": "display_name_value", - }, - "issue_match_data": { - "issue_assignment": { - "issue": "issue_value", - "score": 0.54, - "display_name": "display_name_value", - } - }, - "channel_tag": 1140, - "annotation_start_boundary": { - "word_index": 1075, - "transcript_index": 1729, - }, - "annotation_end_boundary": {}, - } - ], - "entities": {}, - "sentiments": [{"channel_tag": 1140, "sentiment_data": {}}], - "intents": {}, - "phrase_matchers": {}, - "issue_model_result": { - "issue_model": "issue_model_value", - "issues": {}, - }, + "analysis_result": { + "call_analysis_metadata": { + "annotations": [ + { + "interruption_data": {}, + "sentiment_data": { + "magnitude": 0.9580000000000001, + "score": 0.54, + }, + "silence_data": {}, + "hold_data": {}, + "entity_mention_data": { + "entity_unique_id": "entity_unique_id_value", + "type_": 1, + "sentiment": {}, + }, + "intent_match_data": { + "intent_unique_id": "intent_unique_id_value" + }, + "phrase_match_data": { + "phrase_matcher": "phrase_matcher_value", + "display_name": "display_name_value", + }, + "issue_match_data": { + "issue_assignment": { + "issue": "issue_value", + "score": 0.54, + "display_name": "display_name_value", + } + }, + "channel_tag": 1140, + "annotation_start_boundary": { + "word_index": 1075, + "transcript_index": 1729, + }, + "annotation_end_boundary": {}, + } + ], + "entities": {}, + "sentiments": [{"channel_tag": 1140, "sentiment_data": {}}], + "silence": { + "silence_duration": {"seconds": 751, "nanos": 543}, + "silence_percentage": 0.1888, }, - "end_time": {}, - }, - "annotator_selector": { - "run_interruption_annotator": True, - "run_silence_annotator": True, - "run_phrase_matcher_annotator": True, - "phrase_matchers": ["phrase_matchers_value1", "phrase_matchers_value2"], - "run_sentiment_annotator": True, - "run_entity_annotator": True, - "run_intent_annotator": True, - "run_issue_model_annotator": True, - "issue_models": ["issue_models_value1", "issue_models_value2"], - "run_summarization_annotator": True, - "summarization_config": { - "conversation_profile": "conversation_profile_value", - "summarization_model": 1, + "intents": {}, + "phrase_matchers": {}, + "issue_model_result": { + "issue_model": "issue_model_value", + "issues": {}, }, }, + "end_time": {}, }, - "latest_summary": { - "text": "text_value", - "text_sections": {}, - "confidence": 0.1038, - "metadata": {}, - "answer_record": "answer_record_value", - "conversation_model": "conversation_model_value", + "annotator_selector": { + "run_interruption_annotator": True, + "run_silence_annotator": True, + "run_phrase_matcher_annotator": True, + "phrase_matchers": ["phrase_matchers_value1", "phrase_matchers_value2"], + "run_sentiment_annotator": True, + "run_entity_annotator": True, + "run_intent_annotator": True, + "run_issue_model_annotator": True, + "issue_models": ["issue_models_value1", "issue_models_value2"], + "run_summarization_annotator": True, + "summarization_config": { + "conversation_profile": "conversation_profile_value", + "summarization_model": 1, + }, }, - "runtime_annotations": [ - { - "article_suggestion": { - "title": "title_value", - "uri": "uri_value", - "confidence_score": 0.1673, - "metadata": {}, - "query_record": "query_record_value", - "source": "source_value", - }, - "faq_answer": { - "answer": "answer_value", - "confidence_score": 0.1673, - "question": "question_value", - "metadata": {}, - "query_record": "query_record_value", - "source": "source_value", - }, - "smart_reply": { - "reply": "reply_value", - "confidence_score": 0.1673, - "metadata": {}, - "query_record": "query_record_value", - }, - "smart_compose_suggestion": { - "suggestion": "suggestion_value", - "confidence_score": 0.1673, - "metadata": {}, - "query_record": "query_record_value", - }, - "dialogflow_interaction": { - "dialogflow_intent_id": "dialogflow_intent_id_value", - "confidence": 0.1038, - }, - "conversation_summarization_suggestion": {}, - "annotation_id": "annotation_id_value", - "create_time": {}, - "start_boundary": {}, - "end_boundary": {}, - "answer_feedback": { - "correctness_level": 1, - "clicked": True, - "displayed": True, - }, - } - ], - "dialogflow_intents": {}, - "obfuscated_user_id": "obfuscated_user_id_value", } # The version of a generated dependency at test runtime may differ from the version used during generation. # Delete any fields which are not present in the current runtime dependency # See https://github.com/googleapis/gapic-generator-python/issues/1748 # Determine if the message type is proto-plus or protobuf - test_field = contact_center_insights.UpdateConversationRequest.meta.fields[ - "conversation" - ] + test_field = contact_center_insights.CreateAnalysisRequest.meta.fields["analysis"] def get_message_fields(field): # Given a field which is a message (composite type), return a list with @@ -18032,7 +21179,7 @@ def get_message_fields(field): # For each item in the sample request, create a list of sub fields which are not present at runtime # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["conversation"].items(): # pragma: NO COVER + for field, value in request_init["analysis"].items(): # pragma: NO COVER result = None is_repeated = False # For repeated fields @@ -18062,46 +21209,31 @@ def get_message_fields(field): subfield = subfield_to_delete.get("subfield") if subfield: if field_repeated: - for i in range(0, len(request_init["conversation"][field])): - del request_init["conversation"][field][i][subfield] + for i in range(0, len(request_init["analysis"][field])): + del request_init["analysis"][field][i][subfield] else: - del request_init["conversation"][field][subfield] + del request_init["analysis"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = resources.Conversation( - name="name_value", - language_code="language_code_value", - agent_id="agent_id_value", - medium=resources.Conversation.Medium.PHONE_CALL, - turn_count=1105, - obfuscated_user_id="obfuscated_user_id_value", - ) + return_value = operations_pb2.Operation(name="operations/spam") # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - # Convert return value to protobuf type - return_value = resources.Conversation.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.update_conversation(request) + response = client.create_analysis(request) # Establish that the response is the type that we expect. - assert isinstance(response, resources.Conversation) - assert response.name == "name_value" - assert response.language_code == "language_code_value" - assert response.agent_id == "agent_id_value" - assert response.medium == resources.Conversation.Medium.PHONE_CALL - assert response.turn_count == 1105 - assert response.obfuscated_user_id == "obfuscated_user_id_value" + assert response.operation.name == "operations/spam" -def test_update_conversation_rest_use_cached_wrapped_rpc(): +def test_create_analysis_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -18115,38 +21247,39 @@ def test_update_conversation_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert ( - client._transport.update_conversation in client._transport._wrapped_methods - ) + assert client._transport.create_analysis in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.update_conversation - ] = mock_rpc + client._transport._wrapped_methods[client._transport.create_analysis] = mock_rpc request = {} - client.update_conversation(request) + client.create_analysis(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.update_conversation(request) + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.create_analysis(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_update_conversation_rest_required_fields( - request_type=contact_center_insights.UpdateConversationRequest, +def test_create_analysis_rest_required_fields( + request_type=contact_center_insights.CreateAnalysisRequest, ): transport_class = transports.ContactCenterInsightsRestTransport request_init = {} + request_init["parent"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -18157,19 +21290,21 @@ def test_update_conversation_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).update_conversation._get_unset_required_fields(jsonified_request) + ).create_analysis._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present + jsonified_request["parent"] = "parent_value" + unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).update_conversation._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("update_mask",)) + ).create_analysis._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), @@ -18178,7 +21313,7 @@ def test_update_conversation_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = resources.Conversation() + return_value = operations_pb2.Operation(name="operations/spam") # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -18190,7 +21325,7 @@ def test_update_conversation_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "patch", + "method": "post", "query_params": pb_request, } transcode_result["body"] = pb_request @@ -18198,32 +21333,37 @@ def test_update_conversation_rest_required_fields( response_value = Response() response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = resources.Conversation.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.update_conversation(request) + response = client.create_analysis(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_update_conversation_rest_unset_required_fields(): +def test_create_analysis_rest_unset_required_fields(): transport = transports.ContactCenterInsightsRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.update_conversation._get_unset_required_fields({}) - assert set(unset_fields) == (set(("updateMask",)) & set(("conversation",))) + unset_fields = transport.create_analysis._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(()) + & set( + ( + "parent", + "analysis", + ) + ) + ) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_update_conversation_rest_interceptors(null_interceptor): +def test_create_analysis_rest_interceptors(null_interceptor): transport = transports.ContactCenterInsightsRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -18236,14 +21376,16 @@ def test_update_conversation_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.ContactCenterInsightsRestInterceptor, "post_update_conversation" + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.ContactCenterInsightsRestInterceptor, "post_create_analysis" ) as post, mock.patch.object( - transports.ContactCenterInsightsRestInterceptor, "pre_update_conversation" + transports.ContactCenterInsightsRestInterceptor, "pre_create_analysis" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = contact_center_insights.UpdateConversationRequest.pb( - contact_center_insights.UpdateConversationRequest() + pb_message = contact_center_insights.CreateAnalysisRequest.pb( + contact_center_insights.CreateAnalysisRequest() ) transcode.return_value = { "method": "post", @@ -18255,19 +21397,19 @@ def test_update_conversation_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = resources.Conversation.to_json( - resources.Conversation() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() ) - request = contact_center_insights.UpdateConversationRequest() + request = contact_center_insights.CreateAnalysisRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = resources.Conversation() + post.return_value = operations_pb2.Operation() - client.update_conversation( + client.create_analysis( request, metadata=[ ("key", "val"), @@ -18279,9 +21421,8 @@ def test_update_conversation_rest_interceptors(null_interceptor): post.assert_called_once() -def test_update_conversation_rest_bad_request( - transport: str = "rest", - request_type=contact_center_insights.UpdateConversationRequest, +def test_create_analysis_rest_bad_request( + transport: str = "rest", request_type=contact_center_insights.CreateAnalysisRequest ): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), @@ -18290,9 +21431,7 @@ def test_update_conversation_rest_bad_request( # send a request that will satisfy transcoding request_init = { - "conversation": { - "name": "projects/sample1/locations/sample2/conversations/sample3" - } + "parent": "projects/sample1/locations/sample2/conversations/sample3" } request = request_type(**request_init) @@ -18305,10 +21444,10 @@ def test_update_conversation_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.update_conversation(request) + client.create_analysis(request) -def test_update_conversation_rest_flattened(): +def test_create_analysis_rest_flattened(): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -18317,47 +21456,41 @@ def test_update_conversation_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = resources.Conversation() + return_value = operations_pb2.Operation(name="operations/spam") # get arguments that satisfy an http rule for this method sample_request = { - "conversation": { - "name": "projects/sample1/locations/sample2/conversations/sample3" - } + "parent": "projects/sample1/locations/sample2/conversations/sample3" } # get truthy value for each flattened field mock_args = dict( - conversation=resources.Conversation( - call_metadata=resources.Conversation.CallMetadata(customer_channel=1706) - ), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + parent="parent_value", + analysis=resources.Analysis(name="name_value"), ) mock_args.update(sample_request) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - # Convert return value to protobuf type - return_value = resources.Conversation.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.update_conversation(**mock_args) + client.create_analysis(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{conversation.name=projects/*/locations/*/conversations/*}" + "%s/v1/{parent=projects/*/locations/*/conversations/*}/analyses" % client.transport._host, args[1], ) -def test_update_conversation_rest_flattened_error(transport: str = "rest"): +def test_create_analysis_rest_flattened_error(transport: str = "rest"): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -18366,16 +21499,14 @@ def test_update_conversation_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.update_conversation( - contact_center_insights.UpdateConversationRequest(), - conversation=resources.Conversation( - call_metadata=resources.Conversation.CallMetadata(customer_channel=1706) - ), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + client.create_analysis( + contact_center_insights.CreateAnalysisRequest(), + parent="parent_value", + analysis=resources.Analysis(name="name_value"), ) -def test_update_conversation_rest_error(): +def test_create_analysis_rest_error(): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -18384,54 +21515,46 @@ def test_update_conversation_rest_error(): @pytest.mark.parametrize( "request_type", [ - contact_center_insights.GetConversationRequest, + contact_center_insights.GetAnalysisRequest, dict, ], ) -def test_get_conversation_rest(request_type): +def test_get_analysis_rest(request_type): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = {"name": "projects/sample1/locations/sample2/conversations/sample3"} + request_init = { + "name": "projects/sample1/locations/sample2/conversations/sample3/analyses/sample4" + } request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = resources.Conversation( + return_value = resources.Analysis( name="name_value", - language_code="language_code_value", - agent_id="agent_id_value", - medium=resources.Conversation.Medium.PHONE_CALL, - turn_count=1105, - obfuscated_user_id="obfuscated_user_id_value", ) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = resources.Conversation.pb(return_value) + return_value = resources.Analysis.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.get_conversation(request) + response = client.get_analysis(request) # Establish that the response is the type that we expect. - assert isinstance(response, resources.Conversation) + assert isinstance(response, resources.Analysis) assert response.name == "name_value" - assert response.language_code == "language_code_value" - assert response.agent_id == "agent_id_value" - assert response.medium == resources.Conversation.Medium.PHONE_CALL - assert response.turn_count == 1105 - assert response.obfuscated_user_id == "obfuscated_user_id_value" -def test_get_conversation_rest_use_cached_wrapped_rpc(): +def test_get_analysis_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -18445,32 +21568,30 @@ def test_get_conversation_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.get_conversation in client._transport._wrapped_methods + assert client._transport.get_analysis in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.get_conversation - ] = mock_rpc + client._transport._wrapped_methods[client._transport.get_analysis] = mock_rpc request = {} - client.get_conversation(request) + client.get_analysis(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.get_conversation(request) + client.get_analysis(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_get_conversation_rest_required_fields( - request_type=contact_center_insights.GetConversationRequest, +def test_get_analysis_rest_required_fields( + request_type=contact_center_insights.GetAnalysisRequest, ): transport_class = transports.ContactCenterInsightsRestTransport @@ -18486,7 +21607,7 @@ def test_get_conversation_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).get_conversation._get_unset_required_fields(jsonified_request) + ).get_analysis._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present @@ -18495,9 +21616,7 @@ def test_get_conversation_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).get_conversation._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("view",)) + ).get_analysis._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone @@ -18511,7 +21630,7 @@ def test_get_conversation_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = resources.Conversation() + return_value = resources.Analysis() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -18532,30 +21651,30 @@ def test_get_conversation_rest_required_fields( response_value.status_code = 200 # Convert return value to protobuf type - return_value = resources.Conversation.pb(return_value) + return_value = resources.Analysis.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.get_conversation(request) + response = client.get_analysis(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_get_conversation_rest_unset_required_fields(): +def test_get_analysis_rest_unset_required_fields(): transport = transports.ContactCenterInsightsRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.get_conversation._get_unset_required_fields({}) - assert set(unset_fields) == (set(("view",)) & set(("name",))) + unset_fields = transport.get_analysis._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_conversation_rest_interceptors(null_interceptor): +def test_get_analysis_rest_interceptors(null_interceptor): transport = transports.ContactCenterInsightsRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -18568,14 +21687,14 @@ def test_get_conversation_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.ContactCenterInsightsRestInterceptor, "post_get_conversation" + transports.ContactCenterInsightsRestInterceptor, "post_get_analysis" ) as post, mock.patch.object( - transports.ContactCenterInsightsRestInterceptor, "pre_get_conversation" + transports.ContactCenterInsightsRestInterceptor, "pre_get_analysis" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = contact_center_insights.GetConversationRequest.pb( - contact_center_insights.GetConversationRequest() + pb_message = contact_center_insights.GetAnalysisRequest.pb( + contact_center_insights.GetAnalysisRequest() ) transcode.return_value = { "method": "post", @@ -18587,19 +21706,17 @@ def test_get_conversation_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = resources.Conversation.to_json( - resources.Conversation() - ) + req.return_value._content = resources.Analysis.to_json(resources.Analysis()) - request = contact_center_insights.GetConversationRequest() + request = contact_center_insights.GetAnalysisRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = resources.Conversation() + post.return_value = resources.Analysis() - client.get_conversation( + client.get_analysis( request, metadata=[ ("key", "val"), @@ -18611,8 +21728,8 @@ def test_get_conversation_rest_interceptors(null_interceptor): post.assert_called_once() -def test_get_conversation_rest_bad_request( - transport: str = "rest", request_type=contact_center_insights.GetConversationRequest +def test_get_analysis_rest_bad_request( + transport: str = "rest", request_type=contact_center_insights.GetAnalysisRequest ): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), @@ -18620,7 +21737,9 @@ def test_get_conversation_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = {"name": "projects/sample1/locations/sample2/conversations/sample3"} + request_init = { + "name": "projects/sample1/locations/sample2/conversations/sample3/analyses/sample4" + } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -18632,10 +21751,10 @@ def test_get_conversation_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.get_conversation(request) + client.get_analysis(request) -def test_get_conversation_rest_flattened(): +def test_get_analysis_rest_flattened(): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -18644,11 +21763,11 @@ def test_get_conversation_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = resources.Conversation() + return_value = resources.Analysis() # get arguments that satisfy an http rule for this method sample_request = { - "name": "projects/sample1/locations/sample2/conversations/sample3" + "name": "projects/sample1/locations/sample2/conversations/sample3/analyses/sample4" } # get truthy value for each flattened field @@ -18661,25 +21780,25 @@ def test_get_conversation_rest_flattened(): response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = resources.Conversation.pb(return_value) + return_value = resources.Analysis.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.get_conversation(**mock_args) + client.get_analysis(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{name=projects/*/locations/*/conversations/*}" + "%s/v1/{name=projects/*/locations/*/conversations/*/analyses/*}" % client.transport._host, args[1], ) -def test_get_conversation_rest_flattened_error(transport: str = "rest"): +def test_get_analysis_rest_flattened_error(transport: str = "rest"): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -18688,13 +21807,13 @@ def test_get_conversation_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.get_conversation( - contact_center_insights.GetConversationRequest(), + client.get_analysis( + contact_center_insights.GetAnalysisRequest(), name="name_value", ) -def test_get_conversation_rest_error(): +def test_get_analysis_rest_error(): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -18703,24 +21822,26 @@ def test_get_conversation_rest_error(): @pytest.mark.parametrize( "request_type", [ - contact_center_insights.ListConversationsRequest, + contact_center_insights.ListAnalysesRequest, dict, ], ) -def test_list_conversations_rest(request_type): +def test_list_analyses_rest(request_type): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2"} + request_init = { + "parent": "projects/sample1/locations/sample2/conversations/sample3" + } request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = contact_center_insights.ListConversationsResponse( + return_value = contact_center_insights.ListAnalysesResponse( next_page_token="next_page_token_value", ) @@ -18728,21 +21849,19 @@ def test_list_conversations_rest(request_type): response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = contact_center_insights.ListConversationsResponse.pb( - return_value - ) + return_value = contact_center_insights.ListAnalysesResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.list_conversations(request) + response = client.list_analyses(request) # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListConversationsPager) + assert isinstance(response, pagers.ListAnalysesPager) assert response.next_page_token == "next_page_token_value" -def test_list_conversations_rest_use_cached_wrapped_rpc(): +def test_list_analyses_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -18756,34 +21875,30 @@ def test_list_conversations_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert ( - client._transport.list_conversations in client._transport._wrapped_methods - ) + assert client._transport.list_analyses in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.list_conversations - ] = mock_rpc + client._transport._wrapped_methods[client._transport.list_analyses] = mock_rpc request = {} - client.list_conversations(request) + client.list_analyses(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.list_conversations(request) + client.list_analyses(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_list_conversations_rest_required_fields( - request_type=contact_center_insights.ListConversationsRequest, +def test_list_analyses_rest_required_fields( + request_type=contact_center_insights.ListAnalysesRequest, ): transport_class = transports.ContactCenterInsightsRestTransport @@ -18799,7 +21914,7 @@ def test_list_conversations_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).list_conversations._get_unset_required_fields(jsonified_request) + ).list_analyses._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present @@ -18808,14 +21923,13 @@ def test_list_conversations_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).list_conversations._get_unset_required_fields(jsonified_request) + ).list_analyses._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. assert not set(unset_fields) - set( ( "filter", "page_size", "page_token", - "view", ) ) jsonified_request.update(unset_fields) @@ -18831,7 +21945,7 @@ def test_list_conversations_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = contact_center_insights.ListConversationsResponse() + return_value = contact_center_insights.ListAnalysesResponse() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -18852,34 +21966,31 @@ def test_list_conversations_rest_required_fields( response_value.status_code = 200 # Convert return value to protobuf type - return_value = contact_center_insights.ListConversationsResponse.pb( - return_value - ) + return_value = contact_center_insights.ListAnalysesResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.list_conversations(request) + response = client.list_analyses(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_list_conversations_rest_unset_required_fields(): +def test_list_analyses_rest_unset_required_fields(): transport = transports.ContactCenterInsightsRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.list_conversations._get_unset_required_fields({}) + unset_fields = transport.list_analyses._get_unset_required_fields({}) assert set(unset_fields) == ( set( ( "filter", "pageSize", "pageToken", - "view", ) ) & set(("parent",)) @@ -18887,7 +21998,7 @@ def test_list_conversations_rest_unset_required_fields(): @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_conversations_rest_interceptors(null_interceptor): +def test_list_analyses_rest_interceptors(null_interceptor): transport = transports.ContactCenterInsightsRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -18900,14 +22011,14 @@ def test_list_conversations_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.ContactCenterInsightsRestInterceptor, "post_list_conversations" + transports.ContactCenterInsightsRestInterceptor, "post_list_analyses" ) as post, mock.patch.object( - transports.ContactCenterInsightsRestInterceptor, "pre_list_conversations" + transports.ContactCenterInsightsRestInterceptor, "pre_list_analyses" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = contact_center_insights.ListConversationsRequest.pb( - contact_center_insights.ListConversationsRequest() + pb_message = contact_center_insights.ListAnalysesRequest.pb( + contact_center_insights.ListAnalysesRequest() ) transcode.return_value = { "method": "post", @@ -18920,20 +22031,20 @@ def test_list_conversations_rest_interceptors(null_interceptor): req.return_value.status_code = 200 req.return_value.request = PreparedRequest() req.return_value._content = ( - contact_center_insights.ListConversationsResponse.to_json( - contact_center_insights.ListConversationsResponse() + contact_center_insights.ListAnalysesResponse.to_json( + contact_center_insights.ListAnalysesResponse() ) ) - request = contact_center_insights.ListConversationsRequest() + request = contact_center_insights.ListAnalysesRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = contact_center_insights.ListConversationsResponse() + post.return_value = contact_center_insights.ListAnalysesResponse() - client.list_conversations( + client.list_analyses( request, metadata=[ ("key", "val"), @@ -18945,9 +22056,8 @@ def test_list_conversations_rest_interceptors(null_interceptor): post.assert_called_once() -def test_list_conversations_rest_bad_request( - transport: str = "rest", - request_type=contact_center_insights.ListConversationsRequest, +def test_list_analyses_rest_bad_request( + transport: str = "rest", request_type=contact_center_insights.ListAnalysesRequest ): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), @@ -18955,7 +22065,9 @@ def test_list_conversations_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2"} + request_init = { + "parent": "projects/sample1/locations/sample2/conversations/sample3" + } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -18967,10 +22079,10 @@ def test_list_conversations_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.list_conversations(request) + client.list_analyses(request) -def test_list_conversations_rest_flattened(): +def test_list_analyses_rest_flattened(): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -18979,10 +22091,12 @@ def test_list_conversations_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = contact_center_insights.ListConversationsResponse() + return_value = contact_center_insights.ListAnalysesResponse() # get arguments that satisfy an http rule for this method - sample_request = {"parent": "projects/sample1/locations/sample2"} + sample_request = { + "parent": "projects/sample1/locations/sample2/conversations/sample3" + } # get truthy value for each flattened field mock_args = dict( @@ -18994,27 +22108,25 @@ def test_list_conversations_rest_flattened(): response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = contact_center_insights.ListConversationsResponse.pb( - return_value - ) + return_value = contact_center_insights.ListAnalysesResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.list_conversations(**mock_args) + client.list_analyses(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{parent=projects/*/locations/*}/conversations" + "%s/v1/{parent=projects/*/locations/*/conversations/*}/analyses" % client.transport._host, args[1], ) -def test_list_conversations_rest_flattened_error(transport: str = "rest"): +def test_list_analyses_rest_flattened_error(transport: str = "rest"): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -19023,13 +22135,13 @@ def test_list_conversations_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.list_conversations( - contact_center_insights.ListConversationsRequest(), + client.list_analyses( + contact_center_insights.ListAnalysesRequest(), parent="parent_value", ) -def test_list_conversations_rest_pager(transport: str = "rest"): +def test_list_analyses_rest_pager(transport: str = "rest"): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -19041,28 +22153,28 @@ def test_list_conversations_rest_pager(transport: str = "rest"): # with mock.patch.object(path_template, 'transcode') as transcode: # Set the response as a series of pages response = ( - contact_center_insights.ListConversationsResponse( - conversations=[ - resources.Conversation(), - resources.Conversation(), - resources.Conversation(), + contact_center_insights.ListAnalysesResponse( + analyses=[ + resources.Analysis(), + resources.Analysis(), + resources.Analysis(), ], next_page_token="abc", ), - contact_center_insights.ListConversationsResponse( - conversations=[], + contact_center_insights.ListAnalysesResponse( + analyses=[], next_page_token="def", ), - contact_center_insights.ListConversationsResponse( - conversations=[ - resources.Conversation(), + contact_center_insights.ListAnalysesResponse( + analyses=[ + resources.Analysis(), ], next_page_token="ghi", ), - contact_center_insights.ListConversationsResponse( - conversations=[ - resources.Conversation(), - resources.Conversation(), + contact_center_insights.ListAnalysesResponse( + analyses=[ + resources.Analysis(), + resources.Analysis(), ], ), ) @@ -19071,8 +22183,7 @@ def test_list_conversations_rest_pager(transport: str = "rest"): # Wrap the values into proper Response objs response = tuple( - contact_center_insights.ListConversationsResponse.to_json(x) - for x in response + contact_center_insights.ListAnalysesResponse.to_json(x) for x in response ) return_values = tuple(Response() for i in response) for return_val, response_val in zip(return_values, response): @@ -19080,15 +22191,17 @@ def test_list_conversations_rest_pager(transport: str = "rest"): return_val.status_code = 200 req.side_effect = return_values - sample_request = {"parent": "projects/sample1/locations/sample2"} + sample_request = { + "parent": "projects/sample1/locations/sample2/conversations/sample3" + } - pager = client.list_conversations(request=sample_request) + pager = client.list_analyses(request=sample_request) results = list(pager) assert len(results) == 6 - assert all(isinstance(i, resources.Conversation) for i in results) + assert all(isinstance(i, resources.Analysis) for i in results) - pages = list(client.list_conversations(request=sample_request).pages) + pages = list(client.list_analyses(request=sample_request).pages) for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token @@ -19096,18 +22209,20 @@ def test_list_conversations_rest_pager(transport: str = "rest"): @pytest.mark.parametrize( "request_type", [ - contact_center_insights.DeleteConversationRequest, + contact_center_insights.DeleteAnalysisRequest, dict, ], ) -def test_delete_conversation_rest(request_type): +def test_delete_analysis_rest(request_type): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = {"name": "projects/sample1/locations/sample2/conversations/sample3"} + request_init = { + "name": "projects/sample1/locations/sample2/conversations/sample3/analyses/sample4" + } request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -19122,13 +22237,13 @@ def test_delete_conversation_rest(request_type): response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.delete_conversation(request) + response = client.delete_analysis(request) # Establish that the response is the type that we expect. assert response is None -def test_delete_conversation_rest_use_cached_wrapped_rpc(): +def test_delete_analysis_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -19142,34 +22257,30 @@ def test_delete_conversation_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert ( - client._transport.delete_conversation in client._transport._wrapped_methods - ) + assert client._transport.delete_analysis in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.delete_conversation - ] = mock_rpc + client._transport._wrapped_methods[client._transport.delete_analysis] = mock_rpc request = {} - client.delete_conversation(request) + client.delete_analysis(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.delete_conversation(request) + client.delete_analysis(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_delete_conversation_rest_required_fields( - request_type=contact_center_insights.DeleteConversationRequest, +def test_delete_analysis_rest_required_fields( + request_type=contact_center_insights.DeleteAnalysisRequest, ): transport_class = transports.ContactCenterInsightsRestTransport @@ -19185,7 +22296,7 @@ def test_delete_conversation_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).delete_conversation._get_unset_required_fields(jsonified_request) + ).delete_analysis._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present @@ -19194,9 +22305,7 @@ def test_delete_conversation_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).delete_conversation._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("force",)) + ).delete_analysis._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone @@ -19234,24 +22343,24 @@ def test_delete_conversation_rest_required_fields( response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.delete_conversation(request) + response = client.delete_analysis(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_delete_conversation_rest_unset_required_fields(): +def test_delete_analysis_rest_unset_required_fields(): transport = transports.ContactCenterInsightsRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.delete_conversation._get_unset_required_fields({}) - assert set(unset_fields) == (set(("force",)) & set(("name",))) + unset_fields = transport.delete_analysis._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_delete_conversation_rest_interceptors(null_interceptor): +def test_delete_analysis_rest_interceptors(null_interceptor): transport = transports.ContactCenterInsightsRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -19264,11 +22373,11 @@ def test_delete_conversation_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.ContactCenterInsightsRestInterceptor, "pre_delete_conversation" + transports.ContactCenterInsightsRestInterceptor, "pre_delete_analysis" ) as pre: pre.assert_not_called() - pb_message = contact_center_insights.DeleteConversationRequest.pb( - contact_center_insights.DeleteConversationRequest() + pb_message = contact_center_insights.DeleteAnalysisRequest.pb( + contact_center_insights.DeleteAnalysisRequest() ) transcode.return_value = { "method": "post", @@ -19281,14 +22390,14 @@ def test_delete_conversation_rest_interceptors(null_interceptor): req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - request = contact_center_insights.DeleteConversationRequest() + request = contact_center_insights.DeleteAnalysisRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - client.delete_conversation( + client.delete_analysis( request, metadata=[ ("key", "val"), @@ -19299,9 +22408,8 @@ def test_delete_conversation_rest_interceptors(null_interceptor): pre.assert_called_once() -def test_delete_conversation_rest_bad_request( - transport: str = "rest", - request_type=contact_center_insights.DeleteConversationRequest, +def test_delete_analysis_rest_bad_request( + transport: str = "rest", request_type=contact_center_insights.DeleteAnalysisRequest ): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), @@ -19309,7 +22417,9 @@ def test_delete_conversation_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = {"name": "projects/sample1/locations/sample2/conversations/sample3"} + request_init = { + "name": "projects/sample1/locations/sample2/conversations/sample3/analyses/sample4" + } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -19321,10 +22431,10 @@ def test_delete_conversation_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.delete_conversation(request) + client.delete_analysis(request) -def test_delete_conversation_rest_flattened(): +def test_delete_analysis_rest_flattened(): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -19337,7 +22447,7 @@ def test_delete_conversation_rest_flattened(): # get arguments that satisfy an http rule for this method sample_request = { - "name": "projects/sample1/locations/sample2/conversations/sample3" + "name": "projects/sample1/locations/sample2/conversations/sample3/analyses/sample4" } # get truthy value for each flattened field @@ -19353,20 +22463,20 @@ def test_delete_conversation_rest_flattened(): response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.delete_conversation(**mock_args) + client.delete_analysis(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{name=projects/*/locations/*/conversations/*}" + "%s/v1/{name=projects/*/locations/*/conversations/*/analyses/*}" % client.transport._host, args[1], ) -def test_delete_conversation_rest_flattened_error(transport: str = "rest"): +def test_delete_analysis_rest_flattened_error(transport: str = "rest"): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -19375,13 +22485,13 @@ def test_delete_conversation_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.delete_conversation( - contact_center_insights.DeleteConversationRequest(), + client.delete_analysis( + contact_center_insights.DeleteAnalysisRequest(), name="name_value", ) -def test_delete_conversation_rest_error(): +def test_delete_analysis_rest_error(): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -19390,157 +22500,18 @@ def test_delete_conversation_rest_error(): @pytest.mark.parametrize( "request_type", [ - contact_center_insights.CreateAnalysisRequest, + contact_center_insights.BulkAnalyzeConversationsRequest, dict, ], ) -def test_create_analysis_rest(request_type): +def test_bulk_analyze_conversations_rest(request_type): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = { - "parent": "projects/sample1/locations/sample2/conversations/sample3" - } - request_init["analysis"] = { - "name": "name_value", - "request_time": {"seconds": 751, "nanos": 543}, - "create_time": {}, - "analysis_result": { - "call_analysis_metadata": { - "annotations": [ - { - "interruption_data": {}, - "sentiment_data": { - "magnitude": 0.9580000000000001, - "score": 0.54, - }, - "silence_data": {}, - "hold_data": {}, - "entity_mention_data": { - "entity_unique_id": "entity_unique_id_value", - "type_": 1, - "sentiment": {}, - }, - "intent_match_data": { - "intent_unique_id": "intent_unique_id_value" - }, - "phrase_match_data": { - "phrase_matcher": "phrase_matcher_value", - "display_name": "display_name_value", - }, - "issue_match_data": { - "issue_assignment": { - "issue": "issue_value", - "score": 0.54, - "display_name": "display_name_value", - } - }, - "channel_tag": 1140, - "annotation_start_boundary": { - "word_index": 1075, - "transcript_index": 1729, - }, - "annotation_end_boundary": {}, - } - ], - "entities": {}, - "sentiments": [{"channel_tag": 1140, "sentiment_data": {}}], - "intents": {}, - "phrase_matchers": {}, - "issue_model_result": { - "issue_model": "issue_model_value", - "issues": {}, - }, - }, - "end_time": {}, - }, - "annotator_selector": { - "run_interruption_annotator": True, - "run_silence_annotator": True, - "run_phrase_matcher_annotator": True, - "phrase_matchers": ["phrase_matchers_value1", "phrase_matchers_value2"], - "run_sentiment_annotator": True, - "run_entity_annotator": True, - "run_intent_annotator": True, - "run_issue_model_annotator": True, - "issue_models": ["issue_models_value1", "issue_models_value2"], - "run_summarization_annotator": True, - "summarization_config": { - "conversation_profile": "conversation_profile_value", - "summarization_model": 1, - }, - }, - } - # The version of a generated dependency at test runtime may differ from the version used during generation. - # Delete any fields which are not present in the current runtime dependency - # See https://github.com/googleapis/gapic-generator-python/issues/1748 - - # Determine if the message type is proto-plus or protobuf - test_field = contact_center_insights.CreateAnalysisRequest.meta.fields["analysis"] - - def get_message_fields(field): - # Given a field which is a message (composite type), return a list with - # all the fields of the message. - # If the field is not a composite type, return an empty list. - message_fields = [] - - if hasattr(field, "message") and field.message: - is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") - - if is_field_type_proto_plus_type: - message_fields = field.message.meta.fields.values() - # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER - message_fields = field.message.DESCRIPTOR.fields - return message_fields - - runtime_nested_fields = [ - (field.name, nested_field.name) - for field in get_message_fields(test_field) - for nested_field in get_message_fields(field) - ] - - subfields_not_in_runtime = [] - - # For each item in the sample request, create a list of sub fields which are not present at runtime - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["analysis"].items(): # pragma: NO COVER - result = None - is_repeated = False - # For repeated fields - if isinstance(value, list) and len(value): - is_repeated = True - result = value[0] - # For fields where the type is another message - if isinstance(value, dict): - result = value - - if result and hasattr(result, "keys"): - for subfield in result.keys(): - if (field, subfield) not in runtime_nested_fields: - subfields_not_in_runtime.append( - { - "field": field, - "subfield": subfield, - "is_repeated": is_repeated, - } - ) - - # Remove fields from the sample request which are not present in the runtime version of the dependency - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER - field = subfield_to_delete.get("field") - field_repeated = subfield_to_delete.get("is_repeated") - subfield = subfield_to_delete.get("subfield") - if subfield: - if field_repeated: - for i in range(0, len(request_init["analysis"][field])): - del request_init["analysis"][field][i][subfield] - else: - del request_init["analysis"][field][subfield] + request_init = {"parent": "projects/sample1/locations/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -19555,13 +22526,13 @@ def get_message_fields(field): response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.create_analysis(request) + response = client.bulk_analyze_conversations(request) # Establish that the response is the type that we expect. assert response.operation.name == "operations/spam" -def test_create_analysis_rest_use_cached_wrapped_rpc(): +def test_bulk_analyze_conversations_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -19575,17 +22546,22 @@ def test_create_analysis_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.create_analysis in client._transport._wrapped_methods + assert ( + client._transport.bulk_analyze_conversations + in client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.create_analysis] = mock_rpc + client._transport._wrapped_methods[ + client._transport.bulk_analyze_conversations + ] = mock_rpc request = {} - client.create_analysis(request) + client.bulk_analyze_conversations(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 @@ -19594,20 +22570,22 @@ def test_create_analysis_rest_use_cached_wrapped_rpc(): # subsequent calls should use the cached wrapper wrapper_fn.reset_mock() - client.create_analysis(request) + client.bulk_analyze_conversations(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_create_analysis_rest_required_fields( - request_type=contact_center_insights.CreateAnalysisRequest, +def test_bulk_analyze_conversations_rest_required_fields( + request_type=contact_center_insights.BulkAnalyzeConversationsRequest, ): transport_class = transports.ContactCenterInsightsRestTransport request_init = {} request_init["parent"] = "" + request_init["filter"] = "" + request_init["analysis_percentage"] = 0.0 request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -19618,21 +22596,27 @@ def test_create_analysis_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).create_analysis._get_unset_required_fields(jsonified_request) + ).bulk_analyze_conversations._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present jsonified_request["parent"] = "parent_value" + jsonified_request["filter"] = "filter_value" + jsonified_request["analysisPercentage"] = 0.20170000000000002 unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).create_analysis._get_unset_required_fields(jsonified_request) + ).bulk_analyze_conversations._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone assert "parent" in jsonified_request assert jsonified_request["parent"] == "parent_value" + assert "filter" in jsonified_request + assert jsonified_request["filter"] == "filter_value" + assert "analysisPercentage" in jsonified_request + assert jsonified_request["analysisPercentage"] == 0.20170000000000002 client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), @@ -19666,32 +22650,33 @@ def test_create_analysis_rest_required_fields( response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.create_analysis(request) + response = client.bulk_analyze_conversations(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_create_analysis_rest_unset_required_fields(): +def test_bulk_analyze_conversations_rest_unset_required_fields(): transport = transports.ContactCenterInsightsRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.create_analysis._get_unset_required_fields({}) + unset_fields = transport.bulk_analyze_conversations._get_unset_required_fields({}) assert set(unset_fields) == ( set(()) & set( ( "parent", - "analysis", + "filter", + "analysisPercentage", ) ) ) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_create_analysis_rest_interceptors(null_interceptor): +def test_bulk_analyze_conversations_rest_interceptors(null_interceptor): transport = transports.ContactCenterInsightsRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -19706,14 +22691,16 @@ def test_create_analysis_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( operation.Operation, "_set_result_from_operation" ), mock.patch.object( - transports.ContactCenterInsightsRestInterceptor, "post_create_analysis" + transports.ContactCenterInsightsRestInterceptor, + "post_bulk_analyze_conversations", ) as post, mock.patch.object( - transports.ContactCenterInsightsRestInterceptor, "pre_create_analysis" + transports.ContactCenterInsightsRestInterceptor, + "pre_bulk_analyze_conversations", ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = contact_center_insights.CreateAnalysisRequest.pb( - contact_center_insights.CreateAnalysisRequest() + pb_message = contact_center_insights.BulkAnalyzeConversationsRequest.pb( + contact_center_insights.BulkAnalyzeConversationsRequest() ) transcode.return_value = { "method": "post", @@ -19729,7 +22716,7 @@ def test_create_analysis_rest_interceptors(null_interceptor): operations_pb2.Operation() ) - request = contact_center_insights.CreateAnalysisRequest() + request = contact_center_insights.BulkAnalyzeConversationsRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), @@ -19737,7 +22724,7 @@ def test_create_analysis_rest_interceptors(null_interceptor): pre.return_value = request, metadata post.return_value = operations_pb2.Operation() - client.create_analysis( + client.bulk_analyze_conversations( request, metadata=[ ("key", "val"), @@ -19749,8 +22736,9 @@ def test_create_analysis_rest_interceptors(null_interceptor): post.assert_called_once() -def test_create_analysis_rest_bad_request( - transport: str = "rest", request_type=contact_center_insights.CreateAnalysisRequest +def test_bulk_analyze_conversations_rest_bad_request( + transport: str = "rest", + request_type=contact_center_insights.BulkAnalyzeConversationsRequest, ): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), @@ -19758,9 +22746,7 @@ def test_create_analysis_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = { - "parent": "projects/sample1/locations/sample2/conversations/sample3" - } + request_init = {"parent": "projects/sample1/locations/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -19772,10 +22758,10 @@ def test_create_analysis_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.create_analysis(request) + client.bulk_analyze_conversations(request) -def test_create_analysis_rest_flattened(): +def test_bulk_analyze_conversations_rest_flattened(): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -19787,14 +22773,13 @@ def test_create_analysis_rest_flattened(): return_value = operations_pb2.Operation(name="operations/spam") # get arguments that satisfy an http rule for this method - sample_request = { - "parent": "projects/sample1/locations/sample2/conversations/sample3" - } + sample_request = {"parent": "projects/sample1/locations/sample2"} # get truthy value for each flattened field mock_args = dict( parent="parent_value", - analysis=resources.Analysis(name="name_value"), + filter="filter_value", + analysis_percentage=0.20170000000000002, ) mock_args.update(sample_request) @@ -19805,20 +22790,20 @@ def test_create_analysis_rest_flattened(): response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.create_analysis(**mock_args) + client.bulk_analyze_conversations(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{parent=projects/*/locations/*/conversations/*}/analyses" + "%s/v1/{parent=projects/*/locations/*}/conversations:bulkAnalyze" % client.transport._host, args[1], ) -def test_create_analysis_rest_flattened_error(transport: str = "rest"): +def test_bulk_analyze_conversations_rest_flattened_error(transport: str = "rest"): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -19827,14 +22812,15 @@ def test_create_analysis_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.create_analysis( - contact_center_insights.CreateAnalysisRequest(), + client.bulk_analyze_conversations( + contact_center_insights.BulkAnalyzeConversationsRequest(), parent="parent_value", - analysis=resources.Analysis(name="name_value"), + filter="filter_value", + analysis_percentage=0.20170000000000002, ) -def test_create_analysis_rest_error(): +def test_bulk_analyze_conversations_rest_error(): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -19843,46 +22829,39 @@ def test_create_analysis_rest_error(): @pytest.mark.parametrize( "request_type", [ - contact_center_insights.GetAnalysisRequest, + contact_center_insights.BulkDeleteConversationsRequest, dict, ], ) -def test_get_analysis_rest(request_type): +def test_bulk_delete_conversations_rest(request_type): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = { - "name": "projects/sample1/locations/sample2/conversations/sample3/analyses/sample4" - } + request_init = {"parent": "projects/sample1/locations/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = resources.Analysis( - name="name_value", - ) + return_value = operations_pb2.Operation(name="operations/spam") # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - # Convert return value to protobuf type - return_value = resources.Analysis.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.get_analysis(request) + response = client.bulk_delete_conversations(request) # Establish that the response is the type that we expect. - assert isinstance(response, resources.Analysis) - assert response.name == "name_value" + assert response.operation.name == "operations/spam" -def test_get_analysis_rest_use_cached_wrapped_rpc(): +def test_bulk_delete_conversations_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -19896,35 +22875,44 @@ def test_get_analysis_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.get_analysis in client._transport._wrapped_methods + assert ( + client._transport.bulk_delete_conversations + in client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.get_analysis] = mock_rpc + client._transport._wrapped_methods[ + client._transport.bulk_delete_conversations + ] = mock_rpc request = {} - client.get_analysis(request) + client.bulk_delete_conversations(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.get_analysis(request) + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.bulk_delete_conversations(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_get_analysis_rest_required_fields( - request_type=contact_center_insights.GetAnalysisRequest, +def test_bulk_delete_conversations_rest_required_fields( + request_type=contact_center_insights.BulkDeleteConversationsRequest, ): transport_class = transports.ContactCenterInsightsRestTransport request_init = {} - request_init["name"] = "" + request_init["parent"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -19935,21 +22923,21 @@ def test_get_analysis_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).get_analysis._get_unset_required_fields(jsonified_request) + ).bulk_delete_conversations._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["name"] = "name_value" + jsonified_request["parent"] = "parent_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).get_analysis._get_unset_required_fields(jsonified_request) + ).bulk_delete_conversations._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == "name_value" + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), @@ -19958,7 +22946,7 @@ def test_get_analysis_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = resources.Analysis() + return_value = operations_pb2.Operation(name="operations/spam") # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -19970,39 +22958,37 @@ def test_get_analysis_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "get", + "method": "post", "query_params": pb_request, } + transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = resources.Analysis.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.get_analysis(request) + response = client.bulk_delete_conversations(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_get_analysis_rest_unset_required_fields(): +def test_bulk_delete_conversations_rest_unset_required_fields(): transport = transports.ContactCenterInsightsRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.get_analysis._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name",))) + unset_fields = transport.bulk_delete_conversations._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("parent",))) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_analysis_rest_interceptors(null_interceptor): +def test_bulk_delete_conversations_rest_interceptors(null_interceptor): transport = transports.ContactCenterInsightsRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -20015,14 +23001,17 @@ def test_get_analysis_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.ContactCenterInsightsRestInterceptor, "post_get_analysis" + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.ContactCenterInsightsRestInterceptor, + "post_bulk_delete_conversations", ) as post, mock.patch.object( - transports.ContactCenterInsightsRestInterceptor, "pre_get_analysis" + transports.ContactCenterInsightsRestInterceptor, "pre_bulk_delete_conversations" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = contact_center_insights.GetAnalysisRequest.pb( - contact_center_insights.GetAnalysisRequest() + pb_message = contact_center_insights.BulkDeleteConversationsRequest.pb( + contact_center_insights.BulkDeleteConversationsRequest() ) transcode.return_value = { "method": "post", @@ -20034,17 +23023,19 @@ def test_get_analysis_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = resources.Analysis.to_json(resources.Analysis()) + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) - request = contact_center_insights.GetAnalysisRequest() + request = contact_center_insights.BulkDeleteConversationsRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = resources.Analysis() + post.return_value = operations_pb2.Operation() - client.get_analysis( + client.bulk_delete_conversations( request, metadata=[ ("key", "val"), @@ -20056,8 +23047,9 @@ def test_get_analysis_rest_interceptors(null_interceptor): post.assert_called_once() -def test_get_analysis_rest_bad_request( - transport: str = "rest", request_type=contact_center_insights.GetAnalysisRequest +def test_bulk_delete_conversations_rest_bad_request( + transport: str = "rest", + request_type=contact_center_insights.BulkDeleteConversationsRequest, ): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), @@ -20065,9 +23057,7 @@ def test_get_analysis_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = { - "name": "projects/sample1/locations/sample2/conversations/sample3/analyses/sample4" - } + request_init = {"parent": "projects/sample1/locations/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -20079,10 +23069,10 @@ def test_get_analysis_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.get_analysis(request) + client.bulk_delete_conversations(request) -def test_get_analysis_rest_flattened(): +def test_bulk_delete_conversations_rest_flattened(): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -20091,42 +23081,39 @@ def test_get_analysis_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = resources.Analysis() + return_value = operations_pb2.Operation(name="operations/spam") # get arguments that satisfy an http rule for this method - sample_request = { - "name": "projects/sample1/locations/sample2/conversations/sample3/analyses/sample4" - } + sample_request = {"parent": "projects/sample1/locations/sample2"} # get truthy value for each flattened field mock_args = dict( - name="name_value", + parent="parent_value", + filter="filter_value", ) mock_args.update(sample_request) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - # Convert return value to protobuf type - return_value = resources.Analysis.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.get_analysis(**mock_args) + client.bulk_delete_conversations(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{name=projects/*/locations/*/conversations/*/analyses/*}" + "%s/v1/{parent=projects/*/locations/*}/conversations:bulkDelete" % client.transport._host, args[1], ) -def test_get_analysis_rest_flattened_error(transport: str = "rest"): +def test_bulk_delete_conversations_rest_flattened_error(transport: str = "rest"): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -20135,13 +23122,14 @@ def test_get_analysis_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.get_analysis( - contact_center_insights.GetAnalysisRequest(), - name="name_value", + client.bulk_delete_conversations( + contact_center_insights.BulkDeleteConversationsRequest(), + parent="parent_value", + filter="filter_value", ) -def test_get_analysis_rest_error(): +def test_bulk_delete_conversations_rest_error(): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -20150,46 +23138,39 @@ def test_get_analysis_rest_error(): @pytest.mark.parametrize( "request_type", [ - contact_center_insights.ListAnalysesRequest, + contact_center_insights.IngestConversationsRequest, dict, ], ) -def test_list_analyses_rest(request_type): +def test_ingest_conversations_rest(request_type): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = { - "parent": "projects/sample1/locations/sample2/conversations/sample3" - } + request_init = {"parent": "projects/sample1/locations/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = contact_center_insights.ListAnalysesResponse( - next_page_token="next_page_token_value", - ) + return_value = operations_pb2.Operation(name="operations/spam") # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - # Convert return value to protobuf type - return_value = contact_center_insights.ListAnalysesResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.list_analyses(request) + response = client.ingest_conversations(request) # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListAnalysesPager) - assert response.next_page_token == "next_page_token_value" + assert response.operation.name == "operations/spam" -def test_list_analyses_rest_use_cached_wrapped_rpc(): +def test_ingest_conversations_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -20203,30 +23184,38 @@ def test_list_analyses_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.list_analyses in client._transport._wrapped_methods + assert ( + client._transport.ingest_conversations in client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.list_analyses] = mock_rpc + client._transport._wrapped_methods[ + client._transport.ingest_conversations + ] = mock_rpc request = {} - client.list_analyses(request) + client.ingest_conversations(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.list_analyses(request) + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.ingest_conversations(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_list_analyses_rest_required_fields( - request_type=contact_center_insights.ListAnalysesRequest, +def test_ingest_conversations_rest_required_fields( + request_type=contact_center_insights.IngestConversationsRequest, ): transport_class = transports.ContactCenterInsightsRestTransport @@ -20242,7 +23231,7 @@ def test_list_analyses_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).list_analyses._get_unset_required_fields(jsonified_request) + ).ingest_conversations._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present @@ -20251,15 +23240,7 @@ def test_list_analyses_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).list_analyses._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set( - ( - "filter", - "page_size", - "page_token", - ) - ) + ).ingest_conversations._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone @@ -20273,7 +23254,7 @@ def test_list_analyses_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = contact_center_insights.ListAnalysesResponse() + return_value = operations_pb2.Operation(name="operations/spam") # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -20285,48 +23266,37 @@ def test_list_analyses_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "get", + "method": "post", "query_params": pb_request, } + transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = contact_center_insights.ListAnalysesResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.list_analyses(request) + response = client.ingest_conversations(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_list_analyses_rest_unset_required_fields(): +def test_ingest_conversations_rest_unset_required_fields(): transport = transports.ContactCenterInsightsRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.list_analyses._get_unset_required_fields({}) - assert set(unset_fields) == ( - set( - ( - "filter", - "pageSize", - "pageToken", - ) - ) - & set(("parent",)) - ) + unset_fields = transport.ingest_conversations._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("parent",))) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_analyses_rest_interceptors(null_interceptor): +def test_ingest_conversations_rest_interceptors(null_interceptor): transport = transports.ContactCenterInsightsRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -20339,14 +23309,16 @@ def test_list_analyses_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.ContactCenterInsightsRestInterceptor, "post_list_analyses" + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.ContactCenterInsightsRestInterceptor, "post_ingest_conversations" ) as post, mock.patch.object( - transports.ContactCenterInsightsRestInterceptor, "pre_list_analyses" + transports.ContactCenterInsightsRestInterceptor, "pre_ingest_conversations" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = contact_center_insights.ListAnalysesRequest.pb( - contact_center_insights.ListAnalysesRequest() + pb_message = contact_center_insights.IngestConversationsRequest.pb( + contact_center_insights.IngestConversationsRequest() ) transcode.return_value = { "method": "post", @@ -20358,21 +23330,19 @@ def test_list_analyses_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = ( - contact_center_insights.ListAnalysesResponse.to_json( - contact_center_insights.ListAnalysesResponse() - ) + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() ) - request = contact_center_insights.ListAnalysesRequest() + request = contact_center_insights.IngestConversationsRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = contact_center_insights.ListAnalysesResponse() + post.return_value = operations_pb2.Operation() - client.list_analyses( + client.ingest_conversations( request, metadata=[ ("key", "val"), @@ -20384,8 +23354,9 @@ def test_list_analyses_rest_interceptors(null_interceptor): post.assert_called_once() -def test_list_analyses_rest_bad_request( - transport: str = "rest", request_type=contact_center_insights.ListAnalysesRequest +def test_ingest_conversations_rest_bad_request( + transport: str = "rest", + request_type=contact_center_insights.IngestConversationsRequest, ): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), @@ -20393,9 +23364,7 @@ def test_list_analyses_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = { - "parent": "projects/sample1/locations/sample2/conversations/sample3" - } + request_init = {"parent": "projects/sample1/locations/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -20407,10 +23376,10 @@ def test_list_analyses_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.list_analyses(request) + client.ingest_conversations(request) -def test_list_analyses_rest_flattened(): +def test_ingest_conversations_rest_flattened(): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -20419,12 +23388,10 @@ def test_list_analyses_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = contact_center_insights.ListAnalysesResponse() + return_value = operations_pb2.Operation(name="operations/spam") # get arguments that satisfy an http rule for this method - sample_request = { - "parent": "projects/sample1/locations/sample2/conversations/sample3" - } + sample_request = {"parent": "projects/sample1/locations/sample2"} # get truthy value for each flattened field mock_args = dict( @@ -20435,26 +23402,24 @@ def test_list_analyses_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - # Convert return value to protobuf type - return_value = contact_center_insights.ListAnalysesResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.list_analyses(**mock_args) + client.ingest_conversations(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{parent=projects/*/locations/*/conversations/*}/analyses" + "%s/v1/{parent=projects/*/locations/*}/conversations:ingest" % client.transport._host, args[1], ) -def test_list_analyses_rest_flattened_error(transport: str = "rest"): +def test_ingest_conversations_rest_flattened_error(transport: str = "rest"): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -20463,115 +23428,54 @@ def test_list_analyses_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.list_analyses( - contact_center_insights.ListAnalysesRequest(), + client.ingest_conversations( + contact_center_insights.IngestConversationsRequest(), parent="parent_value", ) -def test_list_analyses_rest_pager(transport: str = "rest"): +def test_ingest_conversations_rest_error(): client = ContactCenterInsightsClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # TODO(kbandes): remove this mock unless there's a good reason for it. - # with mock.patch.object(path_template, 'transcode') as transcode: - # Set the response as a series of pages - response = ( - contact_center_insights.ListAnalysesResponse( - analyses=[ - resources.Analysis(), - resources.Analysis(), - resources.Analysis(), - ], - next_page_token="abc", - ), - contact_center_insights.ListAnalysesResponse( - analyses=[], - next_page_token="def", - ), - contact_center_insights.ListAnalysesResponse( - analyses=[ - resources.Analysis(), - ], - next_page_token="ghi", - ), - contact_center_insights.ListAnalysesResponse( - analyses=[ - resources.Analysis(), - resources.Analysis(), - ], - ), - ) - # Two responses for two calls - response = response + response - - # Wrap the values into proper Response objs - response = tuple( - contact_center_insights.ListAnalysesResponse.to_json(x) for x in response - ) - return_values = tuple(Response() for i in response) - for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode("UTF-8") - return_val.status_code = 200 - req.side_effect = return_values - - sample_request = { - "parent": "projects/sample1/locations/sample2/conversations/sample3" - } - - pager = client.list_analyses(request=sample_request) - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, resources.Analysis) for i in results) - - pages = list(client.list_analyses(request=sample_request).pages) - for page_, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page_.raw_page.next_page_token == token - @pytest.mark.parametrize( "request_type", [ - contact_center_insights.DeleteAnalysisRequest, + contact_center_insights.ExportInsightsDataRequest, dict, ], ) -def test_delete_analysis_rest(request_type): +def test_export_insights_data_rest(request_type): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = { - "name": "projects/sample1/locations/sample2/conversations/sample3/analyses/sample4" - } + request_init = {"parent": "projects/sample1/locations/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = None + return_value = operations_pb2.Operation(name="operations/spam") # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - json_return_value = "" + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.delete_analysis(request) + response = client.export_insights_data(request) # Establish that the response is the type that we expect. - assert response is None + assert response.operation.name == "operations/spam" -def test_delete_analysis_rest_use_cached_wrapped_rpc(): +def test_export_insights_data_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -20585,35 +23489,43 @@ def test_delete_analysis_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.delete_analysis in client._transport._wrapped_methods + assert ( + client._transport.export_insights_data in client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.delete_analysis] = mock_rpc + client._transport._wrapped_methods[ + client._transport.export_insights_data + ] = mock_rpc request = {} - client.delete_analysis(request) + client.export_insights_data(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.delete_analysis(request) + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.export_insights_data(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_delete_analysis_rest_required_fields( - request_type=contact_center_insights.DeleteAnalysisRequest, +def test_export_insights_data_rest_required_fields( + request_type=contact_center_insights.ExportInsightsDataRequest, ): transport_class = transports.ContactCenterInsightsRestTransport request_init = {} - request_init["name"] = "" + request_init["parent"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -20624,21 +23536,21 @@ def test_delete_analysis_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).delete_analysis._get_unset_required_fields(jsonified_request) + ).export_insights_data._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["name"] = "name_value" + jsonified_request["parent"] = "parent_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).delete_analysis._get_unset_required_fields(jsonified_request) + ).export_insights_data._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == "name_value" + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), @@ -20647,7 +23559,7 @@ def test_delete_analysis_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = None + return_value = operations_pb2.Operation(name="operations/spam") # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -20659,36 +23571,37 @@ def test_delete_analysis_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "delete", + "method": "post", "query_params": pb_request, } + transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 - json_return_value = "" + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.delete_analysis(request) + response = client.export_insights_data(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_delete_analysis_rest_unset_required_fields(): +def test_export_insights_data_rest_unset_required_fields(): transport = transports.ContactCenterInsightsRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.delete_analysis._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name",))) + unset_fields = transport.export_insights_data._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("parent",))) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_delete_analysis_rest_interceptors(null_interceptor): +def test_export_insights_data_rest_interceptors(null_interceptor): transport = transports.ContactCenterInsightsRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -20701,11 +23614,16 @@ def test_delete_analysis_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.ContactCenterInsightsRestInterceptor, "pre_delete_analysis" + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.ContactCenterInsightsRestInterceptor, "post_export_insights_data" + ) as post, mock.patch.object( + transports.ContactCenterInsightsRestInterceptor, "pre_export_insights_data" ) as pre: pre.assert_not_called() - pb_message = contact_center_insights.DeleteAnalysisRequest.pb( - contact_center_insights.DeleteAnalysisRequest() + post.assert_not_called() + pb_message = contact_center_insights.ExportInsightsDataRequest.pb( + contact_center_insights.ExportInsightsDataRequest() ) transcode.return_value = { "method": "post", @@ -20717,15 +23635,19 @@ def test_delete_analysis_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) - request = contact_center_insights.DeleteAnalysisRequest() + request = contact_center_insights.ExportInsightsDataRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() - client.delete_analysis( + client.export_insights_data( request, metadata=[ ("key", "val"), @@ -20734,10 +23656,12 @@ def test_delete_analysis_rest_interceptors(null_interceptor): ) pre.assert_called_once() + post.assert_called_once() -def test_delete_analysis_rest_bad_request( - transport: str = "rest", request_type=contact_center_insights.DeleteAnalysisRequest +def test_export_insights_data_rest_bad_request( + transport: str = "rest", + request_type=contact_center_insights.ExportInsightsDataRequest, ): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), @@ -20745,9 +23669,7 @@ def test_delete_analysis_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = { - "name": "projects/sample1/locations/sample2/conversations/sample3/analyses/sample4" - } + request_init = {"parent": "projects/sample1/locations/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -20759,10 +23681,10 @@ def test_delete_analysis_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.delete_analysis(request) + client.export_insights_data(request) -def test_delete_analysis_rest_flattened(): +def test_export_insights_data_rest_flattened(): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -20771,40 +23693,38 @@ def test_delete_analysis_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = None + return_value = operations_pb2.Operation(name="operations/spam") # get arguments that satisfy an http rule for this method - sample_request = { - "name": "projects/sample1/locations/sample2/conversations/sample3/analyses/sample4" - } + sample_request = {"parent": "projects/sample1/locations/sample2"} # get truthy value for each flattened field mock_args = dict( - name="name_value", + parent="parent_value", ) mock_args.update(sample_request) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - json_return_value = "" + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.delete_analysis(**mock_args) + client.export_insights_data(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{name=projects/*/locations/*/conversations/*/analyses/*}" + "%s/v1/{parent=projects/*/locations/*}/insightsdata:export" % client.transport._host, args[1], ) -def test_delete_analysis_rest_flattened_error(transport: str = "rest"): +def test_export_insights_data_rest_flattened_error(transport: str = "rest"): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -20813,13 +23733,13 @@ def test_delete_analysis_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.delete_analysis( - contact_center_insights.DeleteAnalysisRequest(), - name="name_value", + client.export_insights_data( + contact_center_insights.ExportInsightsDataRequest(), + parent="parent_value", ) -def test_delete_analysis_rest_error(): +def test_export_insights_data_rest_error(): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -20828,11 +23748,11 @@ def test_delete_analysis_rest_error(): @pytest.mark.parametrize( "request_type", [ - contact_center_insights.BulkAnalyzeConversationsRequest, + contact_center_insights.CreateIssueModelRequest, dict, ], ) -def test_bulk_analyze_conversations_rest(request_type): +def test_create_issue_model_rest(request_type): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -20840,6 +23760,95 @@ def test_bulk_analyze_conversations_rest(request_type): # send a request that will satisfy transcoding request_init = {"parent": "projects/sample1/locations/sample2"} + request_init["issue_model"] = { + "name": "name_value", + "display_name": "display_name_value", + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "issue_count": 1201, + "state": 1, + "input_data_config": { + "medium": 1, + "training_conversations_count": 3025, + "filter": "filter_value", + }, + "training_stats": { + "analyzed_conversations_count": 3021, + "unclassified_conversations_count": 3439, + "issue_stats": {}, + }, + "model_type": 1, + "language_code": "language_code_value", + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = contact_center_insights.CreateIssueModelRequest.meta.fields[ + "issue_model" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["issue_model"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["issue_model"][field])): + del request_init["issue_model"][field][i][subfield] + else: + del request_init["issue_model"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -20854,13 +23863,13 @@ def test_bulk_analyze_conversations_rest(request_type): response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.bulk_analyze_conversations(request) + response = client.create_issue_model(request) # Establish that the response is the type that we expect. assert response.operation.name == "operations/spam" -def test_bulk_analyze_conversations_rest_use_cached_wrapped_rpc(): +def test_create_issue_model_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -20875,8 +23884,7 @@ def test_bulk_analyze_conversations_rest_use_cached_wrapped_rpc(): # Ensure method has been cached assert ( - client._transport.bulk_analyze_conversations - in client._transport._wrapped_methods + client._transport.create_issue_model in client._transport._wrapped_methods ) # Replace cached wrapped function with mock @@ -20885,11 +23893,11 @@ def test_bulk_analyze_conversations_rest_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.bulk_analyze_conversations + client._transport.create_issue_model ] = mock_rpc request = {} - client.bulk_analyze_conversations(request) + client.create_issue_model(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 @@ -20898,22 +23906,20 @@ def test_bulk_analyze_conversations_rest_use_cached_wrapped_rpc(): # subsequent calls should use the cached wrapper wrapper_fn.reset_mock() - client.bulk_analyze_conversations(request) + client.create_issue_model(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_bulk_analyze_conversations_rest_required_fields( - request_type=contact_center_insights.BulkAnalyzeConversationsRequest, +def test_create_issue_model_rest_required_fields( + request_type=contact_center_insights.CreateIssueModelRequest, ): transport_class = transports.ContactCenterInsightsRestTransport request_init = {} request_init["parent"] = "" - request_init["filter"] = "" - request_init["analysis_percentage"] = 0.0 request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -20924,27 +23930,21 @@ def test_bulk_analyze_conversations_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).bulk_analyze_conversations._get_unset_required_fields(jsonified_request) + ).create_issue_model._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present jsonified_request["parent"] = "parent_value" - jsonified_request["filter"] = "filter_value" - jsonified_request["analysisPercentage"] = 0.20170000000000002 unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).bulk_analyze_conversations._get_unset_required_fields(jsonified_request) + ).create_issue_model._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone assert "parent" in jsonified_request assert jsonified_request["parent"] == "parent_value" - assert "filter" in jsonified_request - assert jsonified_request["filter"] == "filter_value" - assert "analysisPercentage" in jsonified_request - assert jsonified_request["analysisPercentage"] == 0.20170000000000002 client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), @@ -20978,33 +23978,32 @@ def test_bulk_analyze_conversations_rest_required_fields( response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.bulk_analyze_conversations(request) + response = client.create_issue_model(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_bulk_analyze_conversations_rest_unset_required_fields(): +def test_create_issue_model_rest_unset_required_fields(): transport = transports.ContactCenterInsightsRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.bulk_analyze_conversations._get_unset_required_fields({}) + unset_fields = transport.create_issue_model._get_unset_required_fields({}) assert set(unset_fields) == ( set(()) & set( ( "parent", - "filter", - "analysisPercentage", + "issueModel", ) ) ) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_bulk_analyze_conversations_rest_interceptors(null_interceptor): +def test_create_issue_model_rest_interceptors(null_interceptor): transport = transports.ContactCenterInsightsRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -21019,16 +24018,14 @@ def test_bulk_analyze_conversations_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( operation.Operation, "_set_result_from_operation" ), mock.patch.object( - transports.ContactCenterInsightsRestInterceptor, - "post_bulk_analyze_conversations", + transports.ContactCenterInsightsRestInterceptor, "post_create_issue_model" ) as post, mock.patch.object( - transports.ContactCenterInsightsRestInterceptor, - "pre_bulk_analyze_conversations", + transports.ContactCenterInsightsRestInterceptor, "pre_create_issue_model" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = contact_center_insights.BulkAnalyzeConversationsRequest.pb( - contact_center_insights.BulkAnalyzeConversationsRequest() + pb_message = contact_center_insights.CreateIssueModelRequest.pb( + contact_center_insights.CreateIssueModelRequest() ) transcode.return_value = { "method": "post", @@ -21044,7 +24041,7 @@ def test_bulk_analyze_conversations_rest_interceptors(null_interceptor): operations_pb2.Operation() ) - request = contact_center_insights.BulkAnalyzeConversationsRequest() + request = contact_center_insights.CreateIssueModelRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), @@ -21052,7 +24049,7 @@ def test_bulk_analyze_conversations_rest_interceptors(null_interceptor): pre.return_value = request, metadata post.return_value = operations_pb2.Operation() - client.bulk_analyze_conversations( + client.create_issue_model( request, metadata=[ ("key", "val"), @@ -21064,9 +24061,9 @@ def test_bulk_analyze_conversations_rest_interceptors(null_interceptor): post.assert_called_once() -def test_bulk_analyze_conversations_rest_bad_request( +def test_create_issue_model_rest_bad_request( transport: str = "rest", - request_type=contact_center_insights.BulkAnalyzeConversationsRequest, + request_type=contact_center_insights.CreateIssueModelRequest, ): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), @@ -21086,10 +24083,10 @@ def test_bulk_analyze_conversations_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.bulk_analyze_conversations(request) + client.create_issue_model(request) -def test_bulk_analyze_conversations_rest_flattened(): +def test_create_issue_model_rest_flattened(): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -21106,8 +24103,7 @@ def test_bulk_analyze_conversations_rest_flattened(): # get truthy value for each flattened field mock_args = dict( parent="parent_value", - filter="filter_value", - analysis_percentage=0.20170000000000002, + issue_model=resources.IssueModel(name="name_value"), ) mock_args.update(sample_request) @@ -21118,78 +24114,185 @@ def test_bulk_analyze_conversations_rest_flattened(): response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.bulk_analyze_conversations(**mock_args) + client.create_issue_model(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{parent=projects/*/locations/*}/conversations:bulkAnalyze" + "%s/v1/{parent=projects/*/locations/*}/issueModels" % client.transport._host, args[1], ) -def test_bulk_analyze_conversations_rest_flattened_error(transport: str = "rest"): +def test_create_issue_model_rest_flattened_error(transport: str = "rest"): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.bulk_analyze_conversations( - contact_center_insights.BulkAnalyzeConversationsRequest(), - parent="parent_value", - filter="filter_value", - analysis_percentage=0.20170000000000002, - ) + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_issue_model( + contact_center_insights.CreateIssueModelRequest(), + parent="parent_value", + issue_model=resources.IssueModel(name="name_value"), + ) + + +def test_create_issue_model_rest_error(): + client = ContactCenterInsightsClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + contact_center_insights.UpdateIssueModelRequest, + dict, + ], +) +def test_update_issue_model_rest(request_type): + client = ContactCenterInsightsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "issue_model": { + "name": "projects/sample1/locations/sample2/issueModels/sample3" + } + } + request_init["issue_model"] = { + "name": "projects/sample1/locations/sample2/issueModels/sample3", + "display_name": "display_name_value", + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "issue_count": 1201, + "state": 1, + "input_data_config": { + "medium": 1, + "training_conversations_count": 3025, + "filter": "filter_value", + }, + "training_stats": { + "analyzed_conversations_count": 3021, + "unclassified_conversations_count": 3439, + "issue_stats": {}, + }, + "model_type": 1, + "language_code": "language_code_value", + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = contact_center_insights.UpdateIssueModelRequest.meta.fields[ + "issue_model" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields -def test_bulk_analyze_conversations_rest_error(): - client = ContactCenterInsightsClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + subfields_not_in_runtime = [] -@pytest.mark.parametrize( - "request_type", - [ - contact_center_insights.BulkDeleteConversationsRequest, - dict, - ], -) -def test_bulk_delete_conversations_rest(request_type): - client = ContactCenterInsightsClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["issue_model"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value - # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2"} + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["issue_model"][field])): + del request_init["issue_model"][field][i][subfield] + else: + del request_init["issue_model"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = resources.IssueModel( + name="name_value", + display_name="display_name_value", + issue_count=1201, + state=resources.IssueModel.State.UNDEPLOYED, + model_type=resources.IssueModel.ModelType.TYPE_V1, + language_code="language_code_value", + ) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 + # Convert return value to protobuf type + return_value = resources.IssueModel.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.bulk_delete_conversations(request) + response = client.update_issue_model(request) # Establish that the response is the type that we expect. - assert response.operation.name == "operations/spam" + assert isinstance(response, resources.IssueModel) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.issue_count == 1201 + assert response.state == resources.IssueModel.State.UNDEPLOYED + assert response.model_type == resources.IssueModel.ModelType.TYPE_V1 + assert response.language_code == "language_code_value" -def test_bulk_delete_conversations_rest_use_cached_wrapped_rpc(): +def test_update_issue_model_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -21204,8 +24307,7 @@ def test_bulk_delete_conversations_rest_use_cached_wrapped_rpc(): # Ensure method has been cached assert ( - client._transport.bulk_delete_conversations - in client._transport._wrapped_methods + client._transport.update_issue_model in client._transport._wrapped_methods ) # Replace cached wrapped function with mock @@ -21214,33 +24316,28 @@ def test_bulk_delete_conversations_rest_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.bulk_delete_conversations + client._transport.update_issue_model ] = mock_rpc request = {} - client.bulk_delete_conversations(request) + client.update_issue_model(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.bulk_delete_conversations(request) + client.update_issue_model(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_bulk_delete_conversations_rest_required_fields( - request_type=contact_center_insights.BulkDeleteConversationsRequest, +def test_update_issue_model_rest_required_fields( + request_type=contact_center_insights.UpdateIssueModelRequest, ): transport_class = transports.ContactCenterInsightsRestTransport request_init = {} - request_init["parent"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -21251,21 +24348,19 @@ def test_bulk_delete_conversations_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).bulk_delete_conversations._get_unset_required_fields(jsonified_request) + ).update_issue_model._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["parent"] = "parent_value" - unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).bulk_delete_conversations._get_unset_required_fields(jsonified_request) + ).update_issue_model._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("update_mask",)) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == "parent_value" client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), @@ -21274,7 +24369,7 @@ def test_bulk_delete_conversations_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = resources.IssueModel() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -21286,7 +24381,7 @@ def test_bulk_delete_conversations_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "post", + "method": "patch", "query_params": pb_request, } transcode_result["body"] = pb_request @@ -21294,29 +24389,32 @@ def test_bulk_delete_conversations_rest_required_fields( response_value = Response() response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = resources.IssueModel.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.bulk_delete_conversations(request) + response = client.update_issue_model(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_bulk_delete_conversations_rest_unset_required_fields(): +def test_update_issue_model_rest_unset_required_fields(): transport = transports.ContactCenterInsightsRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.bulk_delete_conversations._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("parent",))) + unset_fields = transport.update_issue_model._get_unset_required_fields({}) + assert set(unset_fields) == (set(("updateMask",)) & set(("issueModel",))) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_bulk_delete_conversations_rest_interceptors(null_interceptor): +def test_update_issue_model_rest_interceptors(null_interceptor): transport = transports.ContactCenterInsightsRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -21329,17 +24427,14 @@ def test_bulk_delete_conversations_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - operation.Operation, "_set_result_from_operation" - ), mock.patch.object( - transports.ContactCenterInsightsRestInterceptor, - "post_bulk_delete_conversations", + transports.ContactCenterInsightsRestInterceptor, "post_update_issue_model" ) as post, mock.patch.object( - transports.ContactCenterInsightsRestInterceptor, "pre_bulk_delete_conversations" + transports.ContactCenterInsightsRestInterceptor, "pre_update_issue_model" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = contact_center_insights.BulkDeleteConversationsRequest.pb( - contact_center_insights.BulkDeleteConversationsRequest() + pb_message = contact_center_insights.UpdateIssueModelRequest.pb( + contact_center_insights.UpdateIssueModelRequest() ) transcode.return_value = { "method": "post", @@ -21351,19 +24446,17 @@ def test_bulk_delete_conversations_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = json_format.MessageToJson( - operations_pb2.Operation() - ) + req.return_value._content = resources.IssueModel.to_json(resources.IssueModel()) - request = contact_center_insights.BulkDeleteConversationsRequest() + request = contact_center_insights.UpdateIssueModelRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = operations_pb2.Operation() + post.return_value = resources.IssueModel() - client.bulk_delete_conversations( + client.update_issue_model( request, metadata=[ ("key", "val"), @@ -21375,9 +24468,9 @@ def test_bulk_delete_conversations_rest_interceptors(null_interceptor): post.assert_called_once() -def test_bulk_delete_conversations_rest_bad_request( +def test_update_issue_model_rest_bad_request( transport: str = "rest", - request_type=contact_center_insights.BulkDeleteConversationsRequest, + request_type=contact_center_insights.UpdateIssueModelRequest, ): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), @@ -21385,7 +24478,11 @@ def test_bulk_delete_conversations_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2"} + request_init = { + "issue_model": { + "name": "projects/sample1/locations/sample2/issueModels/sample3" + } + } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -21397,10 +24494,10 @@ def test_bulk_delete_conversations_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.bulk_delete_conversations(request) + client.update_issue_model(request) -def test_bulk_delete_conversations_rest_flattened(): +def test_update_issue_model_rest_flattened(): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -21409,39 +24506,45 @@ def test_bulk_delete_conversations_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = resources.IssueModel() # get arguments that satisfy an http rule for this method - sample_request = {"parent": "projects/sample1/locations/sample2"} + sample_request = { + "issue_model": { + "name": "projects/sample1/locations/sample2/issueModels/sample3" + } + } # get truthy value for each flattened field mock_args = dict( - parent="parent_value", - filter="filter_value", + issue_model=resources.IssueModel(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) mock_args.update(sample_request) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 + # Convert return value to protobuf type + return_value = resources.IssueModel.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.bulk_delete_conversations(**mock_args) + client.update_issue_model(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{parent=projects/*/locations/*}/conversations:bulkDelete" + "%s/v1/{issue_model.name=projects/*/locations/*/issueModels/*}" % client.transport._host, args[1], ) -def test_bulk_delete_conversations_rest_flattened_error(transport: str = "rest"): +def test_update_issue_model_rest_flattened_error(transport: str = "rest"): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -21450,14 +24553,14 @@ def test_bulk_delete_conversations_rest_flattened_error(transport: str = "rest") # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.bulk_delete_conversations( - contact_center_insights.BulkDeleteConversationsRequest(), - parent="parent_value", - filter="filter_value", + client.update_issue_model( + contact_center_insights.UpdateIssueModelRequest(), + issue_model=resources.IssueModel(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) -def test_bulk_delete_conversations_rest_error(): +def test_update_issue_model_rest_error(): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -21466,39 +24569,54 @@ def test_bulk_delete_conversations_rest_error(): @pytest.mark.parametrize( "request_type", [ - contact_center_insights.IngestConversationsRequest, + contact_center_insights.GetIssueModelRequest, dict, ], ) -def test_ingest_conversations_rest(request_type): +def test_get_issue_model_rest(request_type): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2"} + request_init = {"name": "projects/sample1/locations/sample2/issueModels/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = resources.IssueModel( + name="name_value", + display_name="display_name_value", + issue_count=1201, + state=resources.IssueModel.State.UNDEPLOYED, + model_type=resources.IssueModel.ModelType.TYPE_V1, + language_code="language_code_value", + ) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 + # Convert return value to protobuf type + return_value = resources.IssueModel.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.ingest_conversations(request) + response = client.get_issue_model(request) # Establish that the response is the type that we expect. - assert response.operation.name == "operations/spam" + assert isinstance(response, resources.IssueModel) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.issue_count == 1201 + assert response.state == resources.IssueModel.State.UNDEPLOYED + assert response.model_type == resources.IssueModel.ModelType.TYPE_V1 + assert response.language_code == "language_code_value" -def test_ingest_conversations_rest_use_cached_wrapped_rpc(): +def test_get_issue_model_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -21512,43 +24630,35 @@ def test_ingest_conversations_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert ( - client._transport.ingest_conversations in client._transport._wrapped_methods - ) + assert client._transport.get_issue_model in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.ingest_conversations - ] = mock_rpc + client._transport._wrapped_methods[client._transport.get_issue_model] = mock_rpc request = {} - client.ingest_conversations(request) + client.get_issue_model(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.ingest_conversations(request) + client.get_issue_model(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_ingest_conversations_rest_required_fields( - request_type=contact_center_insights.IngestConversationsRequest, +def test_get_issue_model_rest_required_fields( + request_type=contact_center_insights.GetIssueModelRequest, ): transport_class = transports.ContactCenterInsightsRestTransport request_init = {} - request_init["parent"] = "" + request_init["name"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -21559,21 +24669,21 @@ def test_ingest_conversations_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).ingest_conversations._get_unset_required_fields(jsonified_request) + ).get_issue_model._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["parent"] = "parent_value" + jsonified_request["name"] = "name_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).ingest_conversations._get_unset_required_fields(jsonified_request) + ).get_issue_model._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == "parent_value" + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), @@ -21582,7 +24692,7 @@ def test_ingest_conversations_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = resources.IssueModel() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -21594,37 +24704,39 @@ def test_ingest_conversations_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "post", + "method": "get", "query_params": pb_request, } - transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = resources.IssueModel.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.ingest_conversations(request) + response = client.get_issue_model(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_ingest_conversations_rest_unset_required_fields(): +def test_get_issue_model_rest_unset_required_fields(): transport = transports.ContactCenterInsightsRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.ingest_conversations._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("parent",))) + unset_fields = transport.get_issue_model._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_ingest_conversations_rest_interceptors(null_interceptor): +def test_get_issue_model_rest_interceptors(null_interceptor): transport = transports.ContactCenterInsightsRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -21637,16 +24749,14 @@ def test_ingest_conversations_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - operation.Operation, "_set_result_from_operation" - ), mock.patch.object( - transports.ContactCenterInsightsRestInterceptor, "post_ingest_conversations" + transports.ContactCenterInsightsRestInterceptor, "post_get_issue_model" ) as post, mock.patch.object( - transports.ContactCenterInsightsRestInterceptor, "pre_ingest_conversations" + transports.ContactCenterInsightsRestInterceptor, "pre_get_issue_model" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = contact_center_insights.IngestConversationsRequest.pb( - contact_center_insights.IngestConversationsRequest() + pb_message = contact_center_insights.GetIssueModelRequest.pb( + contact_center_insights.GetIssueModelRequest() ) transcode.return_value = { "method": "post", @@ -21658,19 +24768,17 @@ def test_ingest_conversations_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = json_format.MessageToJson( - operations_pb2.Operation() - ) + req.return_value._content = resources.IssueModel.to_json(resources.IssueModel()) - request = contact_center_insights.IngestConversationsRequest() + request = contact_center_insights.GetIssueModelRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = operations_pb2.Operation() + post.return_value = resources.IssueModel() - client.ingest_conversations( + client.get_issue_model( request, metadata=[ ("key", "val"), @@ -21682,9 +24790,8 @@ def test_ingest_conversations_rest_interceptors(null_interceptor): post.assert_called_once() -def test_ingest_conversations_rest_bad_request( - transport: str = "rest", - request_type=contact_center_insights.IngestConversationsRequest, +def test_get_issue_model_rest_bad_request( + transport: str = "rest", request_type=contact_center_insights.GetIssueModelRequest ): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), @@ -21692,7 +24799,7 @@ def test_ingest_conversations_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2"} + request_init = {"name": "projects/sample1/locations/sample2/issueModels/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -21704,10 +24811,10 @@ def test_ingest_conversations_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.ingest_conversations(request) + client.get_issue_model(request) -def test_ingest_conversations_rest_flattened(): +def test_get_issue_model_rest_flattened(): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -21716,38 +24823,42 @@ def test_ingest_conversations_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = resources.IssueModel() # get arguments that satisfy an http rule for this method - sample_request = {"parent": "projects/sample1/locations/sample2"} + sample_request = { + "name": "projects/sample1/locations/sample2/issueModels/sample3" + } # get truthy value for each flattened field mock_args = dict( - parent="parent_value", + name="name_value", ) mock_args.update(sample_request) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 + # Convert return value to protobuf type + return_value = resources.IssueModel.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.ingest_conversations(**mock_args) + client.get_issue_model(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{parent=projects/*/locations/*}/conversations:ingest" + "%s/v1/{name=projects/*/locations/*/issueModels/*}" % client.transport._host, args[1], ) -def test_ingest_conversations_rest_flattened_error(transport: str = "rest"): +def test_get_issue_model_rest_flattened_error(transport: str = "rest"): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -21756,13 +24867,13 @@ def test_ingest_conversations_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.ingest_conversations( - contact_center_insights.IngestConversationsRequest(), - parent="parent_value", + client.get_issue_model( + contact_center_insights.GetIssueModelRequest(), + name="name_value", ) -def test_ingest_conversations_rest_error(): +def test_get_issue_model_rest_error(): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -21771,11 +24882,11 @@ def test_ingest_conversations_rest_error(): @pytest.mark.parametrize( "request_type", [ - contact_center_insights.ExportInsightsDataRequest, + contact_center_insights.ListIssueModelsRequest, dict, ], ) -def test_export_insights_data_rest(request_type): +def test_list_issue_models_rest(request_type): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -21788,22 +24899,24 @@ def test_export_insights_data_rest(request_type): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = contact_center_insights.ListIssueModelsResponse() # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 + # Convert return value to protobuf type + return_value = contact_center_insights.ListIssueModelsResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.export_insights_data(request) + response = client.list_issue_models(request) # Establish that the response is the type that we expect. - assert response.operation.name == "operations/spam" + assert isinstance(response, contact_center_insights.ListIssueModelsResponse) -def test_export_insights_data_rest_use_cached_wrapped_rpc(): +def test_list_issue_models_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -21817,9 +24930,7 @@ def test_export_insights_data_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert ( - client._transport.export_insights_data in client._transport._wrapped_methods - ) + assert client._transport.list_issue_models in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() @@ -21827,28 +24938,24 @@ def test_export_insights_data_rest_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.export_insights_data + client._transport.list_issue_models ] = mock_rpc request = {} - client.export_insights_data(request) + client.list_issue_models(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.export_insights_data(request) + client.list_issue_models(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_export_insights_data_rest_required_fields( - request_type=contact_center_insights.ExportInsightsDataRequest, +def test_list_issue_models_rest_required_fields( + request_type=contact_center_insights.ListIssueModelsRequest, ): transport_class = transports.ContactCenterInsightsRestTransport @@ -21864,7 +24971,7 @@ def test_export_insights_data_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).export_insights_data._get_unset_required_fields(jsonified_request) + ).list_issue_models._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present @@ -21873,7 +24980,7 @@ def test_export_insights_data_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).export_insights_data._get_unset_required_fields(jsonified_request) + ).list_issue_models._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone @@ -21887,7 +24994,7 @@ def test_export_insights_data_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = contact_center_insights.ListIssueModelsResponse() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -21899,37 +25006,41 @@ def test_export_insights_data_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "post", + "method": "get", "query_params": pb_request, } - transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = contact_center_insights.ListIssueModelsResponse.pb( + return_value + ) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.export_insights_data(request) + response = client.list_issue_models(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_export_insights_data_rest_unset_required_fields(): +def test_list_issue_models_rest_unset_required_fields(): transport = transports.ContactCenterInsightsRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.export_insights_data._get_unset_required_fields({}) + unset_fields = transport.list_issue_models._get_unset_required_fields({}) assert set(unset_fields) == (set(()) & set(("parent",))) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_export_insights_data_rest_interceptors(null_interceptor): +def test_list_issue_models_rest_interceptors(null_interceptor): transport = transports.ContactCenterInsightsRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -21942,16 +25053,14 @@ def test_export_insights_data_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - operation.Operation, "_set_result_from_operation" - ), mock.patch.object( - transports.ContactCenterInsightsRestInterceptor, "post_export_insights_data" + transports.ContactCenterInsightsRestInterceptor, "post_list_issue_models" ) as post, mock.patch.object( - transports.ContactCenterInsightsRestInterceptor, "pre_export_insights_data" + transports.ContactCenterInsightsRestInterceptor, "pre_list_issue_models" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = contact_center_insights.ExportInsightsDataRequest.pb( - contact_center_insights.ExportInsightsDataRequest() + pb_message = contact_center_insights.ListIssueModelsRequest.pb( + contact_center_insights.ListIssueModelsRequest() ) transcode.return_value = { "method": "post", @@ -21963,19 +25072,21 @@ def test_export_insights_data_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = json_format.MessageToJson( - operations_pb2.Operation() + req.return_value._content = ( + contact_center_insights.ListIssueModelsResponse.to_json( + contact_center_insights.ListIssueModelsResponse() + ) ) - request = contact_center_insights.ExportInsightsDataRequest() + request = contact_center_insights.ListIssueModelsRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = operations_pb2.Operation() + post.return_value = contact_center_insights.ListIssueModelsResponse() - client.export_insights_data( + client.list_issue_models( request, metadata=[ ("key", "val"), @@ -21987,9 +25098,8 @@ def test_export_insights_data_rest_interceptors(null_interceptor): post.assert_called_once() -def test_export_insights_data_rest_bad_request( - transport: str = "rest", - request_type=contact_center_insights.ExportInsightsDataRequest, +def test_list_issue_models_rest_bad_request( + transport: str = "rest", request_type=contact_center_insights.ListIssueModelsRequest ): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), @@ -22009,10 +25119,10 @@ def test_export_insights_data_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.export_insights_data(request) + client.list_issue_models(request) -def test_export_insights_data_rest_flattened(): +def test_list_issue_models_rest_flattened(): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -22021,7 +25131,7 @@ def test_export_insights_data_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = contact_center_insights.ListIssueModelsResponse() # get arguments that satisfy an http rule for this method sample_request = {"parent": "projects/sample1/locations/sample2"} @@ -22035,148 +25145,61 @@ def test_export_insights_data_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 + # Convert return value to protobuf type + return_value = contact_center_insights.ListIssueModelsResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.export_insights_data(**mock_args) + client.list_issue_models(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{parent=projects/*/locations/*}/insightsdata:export" + "%s/v1/{parent=projects/*/locations/*}/issueModels" % client.transport._host, args[1], ) -def test_export_insights_data_rest_flattened_error(transport: str = "rest"): - client = ContactCenterInsightsClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.export_insights_data( - contact_center_insights.ExportInsightsDataRequest(), - parent="parent_value", - ) - - -def test_export_insights_data_rest_error(): - client = ContactCenterInsightsClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) - - -@pytest.mark.parametrize( - "request_type", - [ - contact_center_insights.CreateIssueModelRequest, - dict, - ], -) -def test_create_issue_model_rest(request_type): +def test_list_issue_models_rest_flattened_error(transport: str = "rest"): client = ContactCenterInsightsClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2"} - request_init["issue_model"] = { - "name": "name_value", - "display_name": "display_name_value", - "create_time": {"seconds": 751, "nanos": 543}, - "update_time": {}, - "issue_count": 1201, - "state": 1, - "input_data_config": { - "medium": 1, - "training_conversations_count": 3025, - "filter": "filter_value", - }, - "training_stats": { - "analyzed_conversations_count": 3021, - "unclassified_conversations_count": 3439, - "issue_stats": {}, - }, - "model_type": 1, - "language_code": "language_code_value", - } - # The version of a generated dependency at test runtime may differ from the version used during generation. - # Delete any fields which are not present in the current runtime dependency - # See https://github.com/googleapis/gapic-generator-python/issues/1748 - - # Determine if the message type is proto-plus or protobuf - test_field = contact_center_insights.CreateIssueModelRequest.meta.fields[ - "issue_model" - ] - - def get_message_fields(field): - # Given a field which is a message (composite type), return a list with - # all the fields of the message. - # If the field is not a composite type, return an empty list. - message_fields = [] - - if hasattr(field, "message") and field.message: - is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) - if is_field_type_proto_plus_type: - message_fields = field.message.meta.fields.values() - # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER - message_fields = field.message.DESCRIPTOR.fields - return message_fields + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_issue_models( + contact_center_insights.ListIssueModelsRequest(), + parent="parent_value", + ) - runtime_nested_fields = [ - (field.name, nested_field.name) - for field in get_message_fields(test_field) - for nested_field in get_message_fields(field) - ] - subfields_not_in_runtime = [] +def test_list_issue_models_rest_error(): + client = ContactCenterInsightsClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) - # For each item in the sample request, create a list of sub fields which are not present at runtime - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["issue_model"].items(): # pragma: NO COVER - result = None - is_repeated = False - # For repeated fields - if isinstance(value, list) and len(value): - is_repeated = True - result = value[0] - # For fields where the type is another message - if isinstance(value, dict): - result = value - if result and hasattr(result, "keys"): - for subfield in result.keys(): - if (field, subfield) not in runtime_nested_fields: - subfields_not_in_runtime.append( - { - "field": field, - "subfield": subfield, - "is_repeated": is_repeated, - } - ) +@pytest.mark.parametrize( + "request_type", + [ + contact_center_insights.DeleteIssueModelRequest, + dict, + ], +) +def test_delete_issue_model_rest(request_type): + client = ContactCenterInsightsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) - # Remove fields from the sample request which are not present in the runtime version of the dependency - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER - field = subfield_to_delete.get("field") - field_repeated = subfield_to_delete.get("is_repeated") - subfield = subfield_to_delete.get("subfield") - if subfield: - if field_repeated: - for i in range(0, len(request_init["issue_model"][field])): - del request_init["issue_model"][field][i][subfield] - else: - del request_init["issue_model"][field][subfield] + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/issueModels/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -22191,13 +25214,13 @@ def get_message_fields(field): response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.create_issue_model(request) + response = client.delete_issue_model(request) # Establish that the response is the type that we expect. assert response.operation.name == "operations/spam" -def test_create_issue_model_rest_use_cached_wrapped_rpc(): +def test_delete_issue_model_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -22212,7 +25235,7 @@ def test_create_issue_model_rest_use_cached_wrapped_rpc(): # Ensure method has been cached assert ( - client._transport.create_issue_model in client._transport._wrapped_methods + client._transport.delete_issue_model in client._transport._wrapped_methods ) # Replace cached wrapped function with mock @@ -22221,11 +25244,11 @@ def test_create_issue_model_rest_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.create_issue_model + client._transport.delete_issue_model ] = mock_rpc request = {} - client.create_issue_model(request) + client.delete_issue_model(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 @@ -22234,20 +25257,20 @@ def test_create_issue_model_rest_use_cached_wrapped_rpc(): # subsequent calls should use the cached wrapper wrapper_fn.reset_mock() - client.create_issue_model(request) + client.delete_issue_model(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_create_issue_model_rest_required_fields( - request_type=contact_center_insights.CreateIssueModelRequest, +def test_delete_issue_model_rest_required_fields( + request_type=contact_center_insights.DeleteIssueModelRequest, ): transport_class = transports.ContactCenterInsightsRestTransport request_init = {} - request_init["parent"] = "" + request_init["name"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -22258,21 +25281,21 @@ def test_create_issue_model_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).create_issue_model._get_unset_required_fields(jsonified_request) + ).delete_issue_model._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["parent"] = "parent_value" + jsonified_request["name"] = "name_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).create_issue_model._get_unset_required_fields(jsonified_request) + ).delete_issue_model._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == "parent_value" + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), @@ -22293,10 +25316,9 @@ def test_create_issue_model_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "post", + "method": "delete", "query_params": pb_request, } - transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() @@ -22306,32 +25328,24 @@ def test_create_issue_model_rest_required_fields( response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.create_issue_model(request) + response = client.delete_issue_model(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_create_issue_model_rest_unset_required_fields(): +def test_delete_issue_model_rest_unset_required_fields(): transport = transports.ContactCenterInsightsRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.create_issue_model._get_unset_required_fields({}) - assert set(unset_fields) == ( - set(()) - & set( - ( - "parent", - "issueModel", - ) - ) - ) + unset_fields = transport.delete_issue_model._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_create_issue_model_rest_interceptors(null_interceptor): +def test_delete_issue_model_rest_interceptors(null_interceptor): transport = transports.ContactCenterInsightsRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -22346,14 +25360,14 @@ def test_create_issue_model_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( operation.Operation, "_set_result_from_operation" ), mock.patch.object( - transports.ContactCenterInsightsRestInterceptor, "post_create_issue_model" + transports.ContactCenterInsightsRestInterceptor, "post_delete_issue_model" ) as post, mock.patch.object( - transports.ContactCenterInsightsRestInterceptor, "pre_create_issue_model" + transports.ContactCenterInsightsRestInterceptor, "pre_delete_issue_model" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = contact_center_insights.CreateIssueModelRequest.pb( - contact_center_insights.CreateIssueModelRequest() + pb_message = contact_center_insights.DeleteIssueModelRequest.pb( + contact_center_insights.DeleteIssueModelRequest() ) transcode.return_value = { "method": "post", @@ -22369,7 +25383,7 @@ def test_create_issue_model_rest_interceptors(null_interceptor): operations_pb2.Operation() ) - request = contact_center_insights.CreateIssueModelRequest() + request = contact_center_insights.DeleteIssueModelRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), @@ -22377,7 +25391,7 @@ def test_create_issue_model_rest_interceptors(null_interceptor): pre.return_value = request, metadata post.return_value = operations_pb2.Operation() - client.create_issue_model( + client.delete_issue_model( request, metadata=[ ("key", "val"), @@ -22389,9 +25403,9 @@ def test_create_issue_model_rest_interceptors(null_interceptor): post.assert_called_once() -def test_create_issue_model_rest_bad_request( +def test_delete_issue_model_rest_bad_request( transport: str = "rest", - request_type=contact_center_insights.CreateIssueModelRequest, + request_type=contact_center_insights.DeleteIssueModelRequest, ): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), @@ -22399,7 +25413,7 @@ def test_create_issue_model_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2"} + request_init = {"name": "projects/sample1/locations/sample2/issueModels/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -22411,10 +25425,10 @@ def test_create_issue_model_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.create_issue_model(request) + client.delete_issue_model(request) -def test_create_issue_model_rest_flattened(): +def test_delete_issue_model_rest_flattened(): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -22426,12 +25440,13 @@ def test_create_issue_model_rest_flattened(): return_value = operations_pb2.Operation(name="operations/spam") # get arguments that satisfy an http rule for this method - sample_request = {"parent": "projects/sample1/locations/sample2"} + sample_request = { + "name": "projects/sample1/locations/sample2/issueModels/sample3" + } # get truthy value for each flattened field mock_args = dict( - parent="parent_value", - issue_model=resources.IssueModel(name="name_value"), + name="name_value", ) mock_args.update(sample_request) @@ -22442,20 +25457,20 @@ def test_create_issue_model_rest_flattened(): response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.create_issue_model(**mock_args) + client.delete_issue_model(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{parent=projects/*/locations/*}/issueModels" + "%s/v1/{name=projects/*/locations/*/issueModels/*}" % client.transport._host, args[1], ) -def test_create_issue_model_rest_flattened_error(transport: str = "rest"): +def test_delete_issue_model_rest_flattened_error(transport: str = "rest"): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -22464,163 +25479,54 @@ def test_create_issue_model_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.create_issue_model( - contact_center_insights.CreateIssueModelRequest(), - parent="parent_value", - issue_model=resources.IssueModel(name="name_value"), + client.delete_issue_model( + contact_center_insights.DeleteIssueModelRequest(), + name="name_value", ) -def test_create_issue_model_rest_error(): - client = ContactCenterInsightsClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) - - -@pytest.mark.parametrize( - "request_type", - [ - contact_center_insights.UpdateIssueModelRequest, - dict, - ], -) -def test_update_issue_model_rest(request_type): - client = ContactCenterInsightsClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = { - "issue_model": { - "name": "projects/sample1/locations/sample2/issueModels/sample3" - } - } - request_init["issue_model"] = { - "name": "projects/sample1/locations/sample2/issueModels/sample3", - "display_name": "display_name_value", - "create_time": {"seconds": 751, "nanos": 543}, - "update_time": {}, - "issue_count": 1201, - "state": 1, - "input_data_config": { - "medium": 1, - "training_conversations_count": 3025, - "filter": "filter_value", - }, - "training_stats": { - "analyzed_conversations_count": 3021, - "unclassified_conversations_count": 3439, - "issue_stats": {}, - }, - "model_type": 1, - "language_code": "language_code_value", - } - # The version of a generated dependency at test runtime may differ from the version used during generation. - # Delete any fields which are not present in the current runtime dependency - # See https://github.com/googleapis/gapic-generator-python/issues/1748 - - # Determine if the message type is proto-plus or protobuf - test_field = contact_center_insights.UpdateIssueModelRequest.meta.fields[ - "issue_model" - ] - - def get_message_fields(field): - # Given a field which is a message (composite type), return a list with - # all the fields of the message. - # If the field is not a composite type, return an empty list. - message_fields = [] - - if hasattr(field, "message") and field.message: - is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") - - if is_field_type_proto_plus_type: - message_fields = field.message.meta.fields.values() - # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER - message_fields = field.message.DESCRIPTOR.fields - return message_fields - - runtime_nested_fields = [ - (field.name, nested_field.name) - for field in get_message_fields(test_field) - for nested_field in get_message_fields(field) - ] - - subfields_not_in_runtime = [] - - # For each item in the sample request, create a list of sub fields which are not present at runtime - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["issue_model"].items(): # pragma: NO COVER - result = None - is_repeated = False - # For repeated fields - if isinstance(value, list) and len(value): - is_repeated = True - result = value[0] - # For fields where the type is another message - if isinstance(value, dict): - result = value - - if result and hasattr(result, "keys"): - for subfield in result.keys(): - if (field, subfield) not in runtime_nested_fields: - subfields_not_in_runtime.append( - { - "field": field, - "subfield": subfield, - "is_repeated": is_repeated, - } - ) +def test_delete_issue_model_rest_error(): + client = ContactCenterInsightsClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) - # Remove fields from the sample request which are not present in the runtime version of the dependency - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER - field = subfield_to_delete.get("field") - field_repeated = subfield_to_delete.get("is_repeated") - subfield = subfield_to_delete.get("subfield") - if subfield: - if field_repeated: - for i in range(0, len(request_init["issue_model"][field])): - del request_init["issue_model"][field][i][subfield] - else: - del request_init["issue_model"][field][subfield] + +@pytest.mark.parametrize( + "request_type", + [ + contact_center_insights.DeployIssueModelRequest, + dict, + ], +) +def test_deploy_issue_model_rest(request_type): + client = ContactCenterInsightsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/issueModels/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = resources.IssueModel( - name="name_value", - display_name="display_name_value", - issue_count=1201, - state=resources.IssueModel.State.UNDEPLOYED, - model_type=resources.IssueModel.ModelType.TYPE_V1, - language_code="language_code_value", - ) + return_value = operations_pb2.Operation(name="operations/spam") # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - # Convert return value to protobuf type - return_value = resources.IssueModel.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.update_issue_model(request) + response = client.deploy_issue_model(request) # Establish that the response is the type that we expect. - assert isinstance(response, resources.IssueModel) - assert response.name == "name_value" - assert response.display_name == "display_name_value" - assert response.issue_count == 1201 - assert response.state == resources.IssueModel.State.UNDEPLOYED - assert response.model_type == resources.IssueModel.ModelType.TYPE_V1 - assert response.language_code == "language_code_value" + assert response.operation.name == "operations/spam" -def test_update_issue_model_rest_use_cached_wrapped_rpc(): +def test_deploy_issue_model_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -22635,7 +25541,7 @@ def test_update_issue_model_rest_use_cached_wrapped_rpc(): # Ensure method has been cached assert ( - client._transport.update_issue_model in client._transport._wrapped_methods + client._transport.deploy_issue_model in client._transport._wrapped_methods ) # Replace cached wrapped function with mock @@ -22644,28 +25550,33 @@ def test_update_issue_model_rest_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.update_issue_model + client._transport.deploy_issue_model ] = mock_rpc request = {} - client.update_issue_model(request) + client.deploy_issue_model(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.update_issue_model(request) + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.deploy_issue_model(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_update_issue_model_rest_required_fields( - request_type=contact_center_insights.UpdateIssueModelRequest, +def test_deploy_issue_model_rest_required_fields( + request_type=contact_center_insights.DeployIssueModelRequest, ): transport_class = transports.ContactCenterInsightsRestTransport request_init = {} + request_init["name"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -22676,19 +25587,21 @@ def test_update_issue_model_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).update_issue_model._get_unset_required_fields(jsonified_request) + ).deploy_issue_model._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present + jsonified_request["name"] = "name_value" + unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).update_issue_model._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("update_mask",)) + ).deploy_issue_model._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), @@ -22697,7 +25610,7 @@ def test_update_issue_model_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = resources.IssueModel() + return_value = operations_pb2.Operation(name="operations/spam") # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -22709,7 +25622,7 @@ def test_update_issue_model_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "patch", + "method": "post", "query_params": pb_request, } transcode_result["body"] = pb_request @@ -22717,32 +25630,29 @@ def test_update_issue_model_rest_required_fields( response_value = Response() response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = resources.IssueModel.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.update_issue_model(request) + response = client.deploy_issue_model(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_update_issue_model_rest_unset_required_fields(): +def test_deploy_issue_model_rest_unset_required_fields(): transport = transports.ContactCenterInsightsRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.update_issue_model._get_unset_required_fields({}) - assert set(unset_fields) == (set(("updateMask",)) & set(("issueModel",))) + unset_fields = transport.deploy_issue_model._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_update_issue_model_rest_interceptors(null_interceptor): +def test_deploy_issue_model_rest_interceptors(null_interceptor): transport = transports.ContactCenterInsightsRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -22755,14 +25665,16 @@ def test_update_issue_model_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.ContactCenterInsightsRestInterceptor, "post_update_issue_model" + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.ContactCenterInsightsRestInterceptor, "post_deploy_issue_model" ) as post, mock.patch.object( - transports.ContactCenterInsightsRestInterceptor, "pre_update_issue_model" + transports.ContactCenterInsightsRestInterceptor, "pre_deploy_issue_model" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = contact_center_insights.UpdateIssueModelRequest.pb( - contact_center_insights.UpdateIssueModelRequest() + pb_message = contact_center_insights.DeployIssueModelRequest.pb( + contact_center_insights.DeployIssueModelRequest() ) transcode.return_value = { "method": "post", @@ -22774,17 +25686,19 @@ def test_update_issue_model_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = resources.IssueModel.to_json(resources.IssueModel()) + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) - request = contact_center_insights.UpdateIssueModelRequest() + request = contact_center_insights.DeployIssueModelRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = resources.IssueModel() + post.return_value = operations_pb2.Operation() - client.update_issue_model( + client.deploy_issue_model( request, metadata=[ ("key", "val"), @@ -22796,9 +25710,9 @@ def test_update_issue_model_rest_interceptors(null_interceptor): post.assert_called_once() -def test_update_issue_model_rest_bad_request( +def test_deploy_issue_model_rest_bad_request( transport: str = "rest", - request_type=contact_center_insights.UpdateIssueModelRequest, + request_type=contact_center_insights.DeployIssueModelRequest, ): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), @@ -22806,11 +25720,7 @@ def test_update_issue_model_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = { - "issue_model": { - "name": "projects/sample1/locations/sample2/issueModels/sample3" - } - } + request_init = {"name": "projects/sample1/locations/sample2/issueModels/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -22822,10 +25732,10 @@ def test_update_issue_model_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.update_issue_model(request) + client.deploy_issue_model(request) -def test_update_issue_model_rest_flattened(): +def test_deploy_issue_model_rest_flattened(): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -22834,45 +25744,40 @@ def test_update_issue_model_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = resources.IssueModel() + return_value = operations_pb2.Operation(name="operations/spam") # get arguments that satisfy an http rule for this method sample_request = { - "issue_model": { - "name": "projects/sample1/locations/sample2/issueModels/sample3" - } + "name": "projects/sample1/locations/sample2/issueModels/sample3" } # get truthy value for each flattened field mock_args = dict( - issue_model=resources.IssueModel(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + name="name_value", ) mock_args.update(sample_request) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - # Convert return value to protobuf type - return_value = resources.IssueModel.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.update_issue_model(**mock_args) + client.deploy_issue_model(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{issue_model.name=projects/*/locations/*/issueModels/*}" + "%s/v1/{name=projects/*/locations/*/issueModels/*}:deploy" % client.transport._host, args[1], ) -def test_update_issue_model_rest_flattened_error(transport: str = "rest"): +def test_deploy_issue_model_rest_flattened_error(transport: str = "rest"): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -22881,14 +25786,13 @@ def test_update_issue_model_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.update_issue_model( - contact_center_insights.UpdateIssueModelRequest(), - issue_model=resources.IssueModel(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + client.deploy_issue_model( + contact_center_insights.DeployIssueModelRequest(), + name="name_value", ) -def test_update_issue_model_rest_error(): +def test_deploy_issue_model_rest_error(): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -22897,11 +25801,11 @@ def test_update_issue_model_rest_error(): @pytest.mark.parametrize( "request_type", [ - contact_center_insights.GetIssueModelRequest, + contact_center_insights.UndeployIssueModelRequest, dict, ], ) -def test_get_issue_model_rest(request_type): +def test_undeploy_issue_model_rest(request_type): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -22914,37 +25818,22 @@ def test_get_issue_model_rest(request_type): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = resources.IssueModel( - name="name_value", - display_name="display_name_value", - issue_count=1201, - state=resources.IssueModel.State.UNDEPLOYED, - model_type=resources.IssueModel.ModelType.TYPE_V1, - language_code="language_code_value", - ) + return_value = operations_pb2.Operation(name="operations/spam") # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - # Convert return value to protobuf type - return_value = resources.IssueModel.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.get_issue_model(request) + response = client.undeploy_issue_model(request) # Establish that the response is the type that we expect. - assert isinstance(response, resources.IssueModel) - assert response.name == "name_value" - assert response.display_name == "display_name_value" - assert response.issue_count == 1201 - assert response.state == resources.IssueModel.State.UNDEPLOYED - assert response.model_type == resources.IssueModel.ModelType.TYPE_V1 - assert response.language_code == "language_code_value" + assert response.operation.name == "operations/spam" -def test_get_issue_model_rest_use_cached_wrapped_rpc(): +def test_undeploy_issue_model_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -22958,30 +25847,38 @@ def test_get_issue_model_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.get_issue_model in client._transport._wrapped_methods + assert ( + client._transport.undeploy_issue_model in client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.get_issue_model] = mock_rpc + client._transport._wrapped_methods[ + client._transport.undeploy_issue_model + ] = mock_rpc request = {} - client.get_issue_model(request) + client.undeploy_issue_model(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.get_issue_model(request) + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.undeploy_issue_model(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_get_issue_model_rest_required_fields( - request_type=contact_center_insights.GetIssueModelRequest, +def test_undeploy_issue_model_rest_required_fields( + request_type=contact_center_insights.UndeployIssueModelRequest, ): transport_class = transports.ContactCenterInsightsRestTransport @@ -22997,7 +25894,7 @@ def test_get_issue_model_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).get_issue_model._get_unset_required_fields(jsonified_request) + ).undeploy_issue_model._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present @@ -23006,7 +25903,7 @@ def test_get_issue_model_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).get_issue_model._get_unset_required_fields(jsonified_request) + ).undeploy_issue_model._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone @@ -23020,7 +25917,7 @@ def test_get_issue_model_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = resources.IssueModel() + return_value = operations_pb2.Operation(name="operations/spam") # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -23032,39 +25929,37 @@ def test_get_issue_model_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "get", + "method": "post", "query_params": pb_request, } + transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = resources.IssueModel.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.get_issue_model(request) + response = client.undeploy_issue_model(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_get_issue_model_rest_unset_required_fields(): +def test_undeploy_issue_model_rest_unset_required_fields(): transport = transports.ContactCenterInsightsRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.get_issue_model._get_unset_required_fields({}) + unset_fields = transport.undeploy_issue_model._get_unset_required_fields({}) assert set(unset_fields) == (set(()) & set(("name",))) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_issue_model_rest_interceptors(null_interceptor): +def test_undeploy_issue_model_rest_interceptors(null_interceptor): transport = transports.ContactCenterInsightsRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -23077,14 +25972,16 @@ def test_get_issue_model_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.ContactCenterInsightsRestInterceptor, "post_get_issue_model" + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.ContactCenterInsightsRestInterceptor, "post_undeploy_issue_model" ) as post, mock.patch.object( - transports.ContactCenterInsightsRestInterceptor, "pre_get_issue_model" + transports.ContactCenterInsightsRestInterceptor, "pre_undeploy_issue_model" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = contact_center_insights.GetIssueModelRequest.pb( - contact_center_insights.GetIssueModelRequest() + pb_message = contact_center_insights.UndeployIssueModelRequest.pb( + contact_center_insights.UndeployIssueModelRequest() ) transcode.return_value = { "method": "post", @@ -23096,17 +25993,19 @@ def test_get_issue_model_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = resources.IssueModel.to_json(resources.IssueModel()) + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) - request = contact_center_insights.GetIssueModelRequest() + request = contact_center_insights.UndeployIssueModelRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = resources.IssueModel() + post.return_value = operations_pb2.Operation() - client.get_issue_model( + client.undeploy_issue_model( request, metadata=[ ("key", "val"), @@ -23118,8 +26017,9 @@ def test_get_issue_model_rest_interceptors(null_interceptor): post.assert_called_once() -def test_get_issue_model_rest_bad_request( - transport: str = "rest", request_type=contact_center_insights.GetIssueModelRequest +def test_undeploy_issue_model_rest_bad_request( + transport: str = "rest", + request_type=contact_center_insights.UndeployIssueModelRequest, ): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), @@ -23139,10 +26039,10 @@ def test_get_issue_model_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.get_issue_model(request) + client.undeploy_issue_model(request) -def test_get_issue_model_rest_flattened(): +def test_undeploy_issue_model_rest_flattened(): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -23151,7 +26051,7 @@ def test_get_issue_model_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = resources.IssueModel() + return_value = operations_pb2.Operation(name="operations/spam") # get arguments that satisfy an http rule for this method sample_request = { @@ -23167,26 +26067,24 @@ def test_get_issue_model_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - # Convert return value to protobuf type - return_value = resources.IssueModel.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.get_issue_model(**mock_args) + client.undeploy_issue_model(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{name=projects/*/locations/*/issueModels/*}" + "%s/v1/{name=projects/*/locations/*/issueModels/*}:undeploy" % client.transport._host, args[1], ) -def test_get_issue_model_rest_flattened_error(transport: str = "rest"): +def test_undeploy_issue_model_rest_flattened_error(transport: str = "rest"): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -23195,13 +26093,13 @@ def test_get_issue_model_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.get_issue_model( - contact_center_insights.GetIssueModelRequest(), + client.undeploy_issue_model( + contact_center_insights.UndeployIssueModelRequest(), name="name_value", ) -def test_get_issue_model_rest_error(): +def test_undeploy_issue_model_rest_error(): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -23210,41 +26108,39 @@ def test_get_issue_model_rest_error(): @pytest.mark.parametrize( "request_type", [ - contact_center_insights.ListIssueModelsRequest, + contact_center_insights.ExportIssueModelRequest, dict, ], ) -def test_list_issue_models_rest(request_type): +def test_export_issue_model_rest(request_type): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2"} + request_init = {"name": "projects/sample1/locations/sample2/issueModels/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = contact_center_insights.ListIssueModelsResponse() + return_value = operations_pb2.Operation(name="operations/spam") # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - # Convert return value to protobuf type - return_value = contact_center_insights.ListIssueModelsResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.list_issue_models(request) + response = client.export_issue_model(request) # Establish that the response is the type that we expect. - assert isinstance(response, contact_center_insights.ListIssueModelsResponse) + assert response.operation.name == "operations/spam" -def test_list_issue_models_rest_use_cached_wrapped_rpc(): +def test_export_issue_model_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -23258,7 +26154,9 @@ def test_list_issue_models_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.list_issue_models in client._transport._wrapped_methods + assert ( + client._transport.export_issue_model in client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.Mock() @@ -23266,29 +26164,33 @@ def test_list_issue_models_rest_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.list_issue_models + client._transport.export_issue_model ] = mock_rpc request = {} - client.list_issue_models(request) + client.export_issue_model(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.list_issue_models(request) + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.export_issue_model(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_list_issue_models_rest_required_fields( - request_type=contact_center_insights.ListIssueModelsRequest, +def test_export_issue_model_rest_required_fields( + request_type=contact_center_insights.ExportIssueModelRequest, ): transport_class = transports.ContactCenterInsightsRestTransport request_init = {} - request_init["parent"] = "" + request_init["name"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -23299,21 +26201,21 @@ def test_list_issue_models_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).list_issue_models._get_unset_required_fields(jsonified_request) + ).export_issue_model._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["parent"] = "parent_value" + jsonified_request["name"] = "name_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).list_issue_models._get_unset_required_fields(jsonified_request) + ).export_issue_model._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == "parent_value" + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), @@ -23322,7 +26224,7 @@ def test_list_issue_models_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = contact_center_insights.ListIssueModelsResponse() + return_value = operations_pb2.Operation(name="operations/spam") # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -23334,41 +26236,37 @@ def test_list_issue_models_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "get", + "method": "post", "query_params": pb_request, } + transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = contact_center_insights.ListIssueModelsResponse.pb( - return_value - ) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.list_issue_models(request) + response = client.export_issue_model(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_list_issue_models_rest_unset_required_fields(): +def test_export_issue_model_rest_unset_required_fields(): transport = transports.ContactCenterInsightsRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.list_issue_models._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("parent",))) + unset_fields = transport.export_issue_model._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_issue_models_rest_interceptors(null_interceptor): +def test_export_issue_model_rest_interceptors(null_interceptor): transport = transports.ContactCenterInsightsRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -23381,14 +26279,16 @@ def test_list_issue_models_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.ContactCenterInsightsRestInterceptor, "post_list_issue_models" + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.ContactCenterInsightsRestInterceptor, "post_export_issue_model" ) as post, mock.patch.object( - transports.ContactCenterInsightsRestInterceptor, "pre_list_issue_models" + transports.ContactCenterInsightsRestInterceptor, "pre_export_issue_model" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = contact_center_insights.ListIssueModelsRequest.pb( - contact_center_insights.ListIssueModelsRequest() + pb_message = contact_center_insights.ExportIssueModelRequest.pb( + contact_center_insights.ExportIssueModelRequest() ) transcode.return_value = { "method": "post", @@ -23400,21 +26300,19 @@ def test_list_issue_models_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = ( - contact_center_insights.ListIssueModelsResponse.to_json( - contact_center_insights.ListIssueModelsResponse() - ) + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() ) - request = contact_center_insights.ListIssueModelsRequest() + request = contact_center_insights.ExportIssueModelRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = contact_center_insights.ListIssueModelsResponse() + post.return_value = operations_pb2.Operation() - client.list_issue_models( + client.export_issue_model( request, metadata=[ ("key", "val"), @@ -23426,8 +26324,9 @@ def test_list_issue_models_rest_interceptors(null_interceptor): post.assert_called_once() -def test_list_issue_models_rest_bad_request( - transport: str = "rest", request_type=contact_center_insights.ListIssueModelsRequest +def test_export_issue_model_rest_bad_request( + transport: str = "rest", + request_type=contact_center_insights.ExportIssueModelRequest, ): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), @@ -23435,7 +26334,7 @@ def test_list_issue_models_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2"} + request_init = {"name": "projects/sample1/locations/sample2/issueModels/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -23447,10 +26346,10 @@ def test_list_issue_models_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.list_issue_models(request) + client.export_issue_model(request) -def test_list_issue_models_rest_flattened(): +def test_export_issue_model_rest_flattened(): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -23459,40 +26358,40 @@ def test_list_issue_models_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = contact_center_insights.ListIssueModelsResponse() + return_value = operations_pb2.Operation(name="operations/spam") # get arguments that satisfy an http rule for this method - sample_request = {"parent": "projects/sample1/locations/sample2"} + sample_request = { + "name": "projects/sample1/locations/sample2/issueModels/sample3" + } # get truthy value for each flattened field mock_args = dict( - parent="parent_value", + name="name_value", ) mock_args.update(sample_request) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - # Convert return value to protobuf type - return_value = contact_center_insights.ListIssueModelsResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.list_issue_models(**mock_args) + client.export_issue_model(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{parent=projects/*/locations/*}/issueModels" + "%s/v1/{name=projects/*/locations/*/issueModels/*}:export" % client.transport._host, args[1], ) -def test_list_issue_models_rest_flattened_error(transport: str = "rest"): +def test_export_issue_model_rest_flattened_error(transport: str = "rest"): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -23501,13 +26400,13 @@ def test_list_issue_models_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.list_issue_models( - contact_center_insights.ListIssueModelsRequest(), - parent="parent_value", + client.export_issue_model( + contact_center_insights.ExportIssueModelRequest(), + name="name_value", ) -def test_list_issue_models_rest_error(): +def test_export_issue_model_rest_error(): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -23516,18 +26415,18 @@ def test_list_issue_models_rest_error(): @pytest.mark.parametrize( "request_type", [ - contact_center_insights.DeleteIssueModelRequest, + contact_center_insights.ImportIssueModelRequest, dict, ], ) -def test_delete_issue_model_rest(request_type): +def test_import_issue_model_rest(request_type): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = {"name": "projects/sample1/locations/sample2/issueModels/sample3"} + request_init = {"parent": "projects/sample1/locations/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -23542,13 +26441,13 @@ def test_delete_issue_model_rest(request_type): response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.delete_issue_model(request) + response = client.import_issue_model(request) # Establish that the response is the type that we expect. assert response.operation.name == "operations/spam" -def test_delete_issue_model_rest_use_cached_wrapped_rpc(): +def test_import_issue_model_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -23563,7 +26462,7 @@ def test_delete_issue_model_rest_use_cached_wrapped_rpc(): # Ensure method has been cached assert ( - client._transport.delete_issue_model in client._transport._wrapped_methods + client._transport.import_issue_model in client._transport._wrapped_methods ) # Replace cached wrapped function with mock @@ -23572,11 +26471,11 @@ def test_delete_issue_model_rest_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.delete_issue_model + client._transport.import_issue_model ] = mock_rpc request = {} - client.delete_issue_model(request) + client.import_issue_model(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 @@ -23585,20 +26484,20 @@ def test_delete_issue_model_rest_use_cached_wrapped_rpc(): # subsequent calls should use the cached wrapper wrapper_fn.reset_mock() - client.delete_issue_model(request) + client.import_issue_model(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_delete_issue_model_rest_required_fields( - request_type=contact_center_insights.DeleteIssueModelRequest, +def test_import_issue_model_rest_required_fields( + request_type=contact_center_insights.ImportIssueModelRequest, ): transport_class = transports.ContactCenterInsightsRestTransport request_init = {} - request_init["name"] = "" + request_init["parent"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -23609,21 +26508,21 @@ def test_delete_issue_model_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).delete_issue_model._get_unset_required_fields(jsonified_request) + ).import_issue_model._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["name"] = "name_value" + jsonified_request["parent"] = "parent_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).delete_issue_model._get_unset_required_fields(jsonified_request) + ).import_issue_model._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == "name_value" + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), @@ -23644,9 +26543,10 @@ def test_delete_issue_model_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "delete", + "method": "post", "query_params": pb_request, } + transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() @@ -23656,24 +26556,24 @@ def test_delete_issue_model_rest_required_fields( response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.delete_issue_model(request) + response = client.import_issue_model(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_delete_issue_model_rest_unset_required_fields(): +def test_import_issue_model_rest_unset_required_fields(): transport = transports.ContactCenterInsightsRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.delete_issue_model._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name",))) + unset_fields = transport.import_issue_model._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("parent",))) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_delete_issue_model_rest_interceptors(null_interceptor): +def test_import_issue_model_rest_interceptors(null_interceptor): transport = transports.ContactCenterInsightsRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -23688,14 +26588,14 @@ def test_delete_issue_model_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( operation.Operation, "_set_result_from_operation" ), mock.patch.object( - transports.ContactCenterInsightsRestInterceptor, "post_delete_issue_model" + transports.ContactCenterInsightsRestInterceptor, "post_import_issue_model" ) as post, mock.patch.object( - transports.ContactCenterInsightsRestInterceptor, "pre_delete_issue_model" + transports.ContactCenterInsightsRestInterceptor, "pre_import_issue_model" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = contact_center_insights.DeleteIssueModelRequest.pb( - contact_center_insights.DeleteIssueModelRequest() + pb_message = contact_center_insights.ImportIssueModelRequest.pb( + contact_center_insights.ImportIssueModelRequest() ) transcode.return_value = { "method": "post", @@ -23711,7 +26611,7 @@ def test_delete_issue_model_rest_interceptors(null_interceptor): operations_pb2.Operation() ) - request = contact_center_insights.DeleteIssueModelRequest() + request = contact_center_insights.ImportIssueModelRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), @@ -23719,7 +26619,7 @@ def test_delete_issue_model_rest_interceptors(null_interceptor): pre.return_value = request, metadata post.return_value = operations_pb2.Operation() - client.delete_issue_model( + client.import_issue_model( request, metadata=[ ("key", "val"), @@ -23731,9 +26631,9 @@ def test_delete_issue_model_rest_interceptors(null_interceptor): post.assert_called_once() -def test_delete_issue_model_rest_bad_request( +def test_import_issue_model_rest_bad_request( transport: str = "rest", - request_type=contact_center_insights.DeleteIssueModelRequest, + request_type=contact_center_insights.ImportIssueModelRequest, ): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), @@ -23741,7 +26641,7 @@ def test_delete_issue_model_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = {"name": "projects/sample1/locations/sample2/issueModels/sample3"} + request_init = {"parent": "projects/sample1/locations/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -23753,10 +26653,10 @@ def test_delete_issue_model_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.delete_issue_model(request) + client.import_issue_model(request) -def test_delete_issue_model_rest_flattened(): +def test_import_issue_model_rest_flattened(): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -23768,13 +26668,11 @@ def test_delete_issue_model_rest_flattened(): return_value = operations_pb2.Operation(name="operations/spam") # get arguments that satisfy an http rule for this method - sample_request = { - "name": "projects/sample1/locations/sample2/issueModels/sample3" - } + sample_request = {"parent": "projects/sample1/locations/sample2"} # get truthy value for each flattened field mock_args = dict( - name="name_value", + parent="parent_value", ) mock_args.update(sample_request) @@ -23785,20 +26683,20 @@ def test_delete_issue_model_rest_flattened(): response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.delete_issue_model(**mock_args) + client.import_issue_model(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{name=projects/*/locations/*/issueModels/*}" + "%s/v1/{parent=projects/*/locations/*}/issueModels:import" % client.transport._host, args[1], ) -def test_delete_issue_model_rest_flattened_error(transport: str = "rest"): +def test_import_issue_model_rest_flattened_error(transport: str = "rest"): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -23807,13 +26705,13 @@ def test_delete_issue_model_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.delete_issue_model( - contact_center_insights.DeleteIssueModelRequest(), - name="name_value", + client.import_issue_model( + contact_center_insights.ImportIssueModelRequest(), + parent="parent_value", ) -def test_delete_issue_model_rest_error(): +def test_import_issue_model_rest_error(): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -23822,39 +26720,52 @@ def test_delete_issue_model_rest_error(): @pytest.mark.parametrize( "request_type", [ - contact_center_insights.DeployIssueModelRequest, + contact_center_insights.GetIssueRequest, dict, ], ) -def test_deploy_issue_model_rest(request_type): +def test_get_issue_rest(request_type): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = {"name": "projects/sample1/locations/sample2/issueModels/sample3"} + request_init = { + "name": "projects/sample1/locations/sample2/issueModels/sample3/issues/sample4" + } request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = resources.Issue( + name="name_value", + display_name="display_name_value", + sample_utterances=["sample_utterances_value"], + display_description="display_description_value", + ) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 + # Convert return value to protobuf type + return_value = resources.Issue.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.deploy_issue_model(request) + response = client.get_issue(request) # Establish that the response is the type that we expect. - assert response.operation.name == "operations/spam" + assert isinstance(response, resources.Issue) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.sample_utterances == ["sample_utterances_value"] + assert response.display_description == "display_description_value" -def test_deploy_issue_model_rest_use_cached_wrapped_rpc(): +def test_get_issue_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -23868,38 +26779,30 @@ def test_deploy_issue_model_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert ( - client._transport.deploy_issue_model in client._transport._wrapped_methods - ) + assert client._transport.get_issue in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.deploy_issue_model - ] = mock_rpc + client._transport._wrapped_methods[client._transport.get_issue] = mock_rpc request = {} - client.deploy_issue_model(request) + client.get_issue(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.deploy_issue_model(request) + client.get_issue(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_deploy_issue_model_rest_required_fields( - request_type=contact_center_insights.DeployIssueModelRequest, +def test_get_issue_rest_required_fields( + request_type=contact_center_insights.GetIssueRequest, ): transport_class = transports.ContactCenterInsightsRestTransport @@ -23915,7 +26818,7 @@ def test_deploy_issue_model_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).deploy_issue_model._get_unset_required_fields(jsonified_request) + ).get_issue._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present @@ -23924,7 +26827,7 @@ def test_deploy_issue_model_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).deploy_issue_model._get_unset_required_fields(jsonified_request) + ).get_issue._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone @@ -23938,7 +26841,7 @@ def test_deploy_issue_model_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = resources.Issue() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -23950,37 +26853,39 @@ def test_deploy_issue_model_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "post", + "method": "get", "query_params": pb_request, } - transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = resources.Issue.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.deploy_issue_model(request) + response = client.get_issue(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_deploy_issue_model_rest_unset_required_fields(): +def test_get_issue_rest_unset_required_fields(): transport = transports.ContactCenterInsightsRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.deploy_issue_model._get_unset_required_fields({}) + unset_fields = transport.get_issue._get_unset_required_fields({}) assert set(unset_fields) == (set(()) & set(("name",))) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_deploy_issue_model_rest_interceptors(null_interceptor): +def test_get_issue_rest_interceptors(null_interceptor): transport = transports.ContactCenterInsightsRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -23993,16 +26898,14 @@ def test_deploy_issue_model_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - operation.Operation, "_set_result_from_operation" - ), mock.patch.object( - transports.ContactCenterInsightsRestInterceptor, "post_deploy_issue_model" + transports.ContactCenterInsightsRestInterceptor, "post_get_issue" ) as post, mock.patch.object( - transports.ContactCenterInsightsRestInterceptor, "pre_deploy_issue_model" + transports.ContactCenterInsightsRestInterceptor, "pre_get_issue" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = contact_center_insights.DeployIssueModelRequest.pb( - contact_center_insights.DeployIssueModelRequest() + pb_message = contact_center_insights.GetIssueRequest.pb( + contact_center_insights.GetIssueRequest() ) transcode.return_value = { "method": "post", @@ -24014,19 +26917,17 @@ def test_deploy_issue_model_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = json_format.MessageToJson( - operations_pb2.Operation() - ) + req.return_value._content = resources.Issue.to_json(resources.Issue()) - request = contact_center_insights.DeployIssueModelRequest() + request = contact_center_insights.GetIssueRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = operations_pb2.Operation() + post.return_value = resources.Issue() - client.deploy_issue_model( + client.get_issue( request, metadata=[ ("key", "val"), @@ -24038,9 +26939,8 @@ def test_deploy_issue_model_rest_interceptors(null_interceptor): post.assert_called_once() -def test_deploy_issue_model_rest_bad_request( - transport: str = "rest", - request_type=contact_center_insights.DeployIssueModelRequest, +def test_get_issue_rest_bad_request( + transport: str = "rest", request_type=contact_center_insights.GetIssueRequest ): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), @@ -24048,7 +26948,9 @@ def test_deploy_issue_model_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = {"name": "projects/sample1/locations/sample2/issueModels/sample3"} + request_init = { + "name": "projects/sample1/locations/sample2/issueModels/sample3/issues/sample4" + } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -24060,10 +26962,10 @@ def test_deploy_issue_model_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.deploy_issue_model(request) + client.get_issue(request) -def test_deploy_issue_model_rest_flattened(): +def test_get_issue_rest_flattened(): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -24072,11 +26974,11 @@ def test_deploy_issue_model_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = resources.Issue() # get arguments that satisfy an http rule for this method sample_request = { - "name": "projects/sample1/locations/sample2/issueModels/sample3" + "name": "projects/sample1/locations/sample2/issueModels/sample3/issues/sample4" } # get truthy value for each flattened field @@ -24088,24 +26990,26 @@ def test_deploy_issue_model_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 + # Convert return value to protobuf type + return_value = resources.Issue.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.deploy_issue_model(**mock_args) + client.get_issue(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{name=projects/*/locations/*/issueModels/*}:deploy" + "%s/v1/{name=projects/*/locations/*/issueModels/*/issues/*}" % client.transport._host, args[1], ) -def test_deploy_issue_model_rest_flattened_error(transport: str = "rest"): +def test_get_issue_rest_flattened_error(transport: str = "rest"): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -24114,13 +27018,13 @@ def test_deploy_issue_model_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.deploy_issue_model( - contact_center_insights.DeployIssueModelRequest(), + client.get_issue( + contact_center_insights.GetIssueRequest(), name="name_value", ) -def test_deploy_issue_model_rest_error(): +def test_get_issue_rest_error(): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -24129,39 +27033,41 @@ def test_deploy_issue_model_rest_error(): @pytest.mark.parametrize( "request_type", [ - contact_center_insights.UndeployIssueModelRequest, + contact_center_insights.ListIssuesRequest, dict, ], ) -def test_undeploy_issue_model_rest(request_type): +def test_list_issues_rest(request_type): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = {"name": "projects/sample1/locations/sample2/issueModels/sample3"} + request_init = {"parent": "projects/sample1/locations/sample2/issueModels/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = contact_center_insights.ListIssuesResponse() # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 + # Convert return value to protobuf type + return_value = contact_center_insights.ListIssuesResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.undeploy_issue_model(request) + response = client.list_issues(request) # Establish that the response is the type that we expect. - assert response.operation.name == "operations/spam" + assert isinstance(response, contact_center_insights.ListIssuesResponse) -def test_undeploy_issue_model_rest_use_cached_wrapped_rpc(): +def test_list_issues_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -24175,43 +27081,35 @@ def test_undeploy_issue_model_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert ( - client._transport.undeploy_issue_model in client._transport._wrapped_methods - ) + assert client._transport.list_issues in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.undeploy_issue_model - ] = mock_rpc + client._transport._wrapped_methods[client._transport.list_issues] = mock_rpc request = {} - client.undeploy_issue_model(request) + client.list_issues(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.undeploy_issue_model(request) + client.list_issues(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_undeploy_issue_model_rest_required_fields( - request_type=contact_center_insights.UndeployIssueModelRequest, +def test_list_issues_rest_required_fields( + request_type=contact_center_insights.ListIssuesRequest, ): transport_class = transports.ContactCenterInsightsRestTransport request_init = {} - request_init["name"] = "" + request_init["parent"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -24222,21 +27120,21 @@ def test_undeploy_issue_model_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).undeploy_issue_model._get_unset_required_fields(jsonified_request) + ).list_issues._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["name"] = "name_value" + jsonified_request["parent"] = "parent_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).undeploy_issue_model._get_unset_required_fields(jsonified_request) + ).list_issues._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == "name_value" + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), @@ -24245,7 +27143,7 @@ def test_undeploy_issue_model_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = contact_center_insights.ListIssuesResponse() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -24257,37 +27155,39 @@ def test_undeploy_issue_model_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "post", + "method": "get", "query_params": pb_request, } - transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = contact_center_insights.ListIssuesResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.undeploy_issue_model(request) + response = client.list_issues(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_undeploy_issue_model_rest_unset_required_fields(): +def test_list_issues_rest_unset_required_fields(): transport = transports.ContactCenterInsightsRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.undeploy_issue_model._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name",))) + unset_fields = transport.list_issues._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("parent",))) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_undeploy_issue_model_rest_interceptors(null_interceptor): +def test_list_issues_rest_interceptors(null_interceptor): transport = transports.ContactCenterInsightsRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -24300,16 +27200,14 @@ def test_undeploy_issue_model_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - operation.Operation, "_set_result_from_operation" - ), mock.patch.object( - transports.ContactCenterInsightsRestInterceptor, "post_undeploy_issue_model" + transports.ContactCenterInsightsRestInterceptor, "post_list_issues" ) as post, mock.patch.object( - transports.ContactCenterInsightsRestInterceptor, "pre_undeploy_issue_model" + transports.ContactCenterInsightsRestInterceptor, "pre_list_issues" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = contact_center_insights.UndeployIssueModelRequest.pb( - contact_center_insights.UndeployIssueModelRequest() + pb_message = contact_center_insights.ListIssuesRequest.pb( + contact_center_insights.ListIssuesRequest() ) transcode.return_value = { "method": "post", @@ -24321,19 +27219,19 @@ def test_undeploy_issue_model_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = json_format.MessageToJson( - operations_pb2.Operation() + req.return_value._content = contact_center_insights.ListIssuesResponse.to_json( + contact_center_insights.ListIssuesResponse() ) - request = contact_center_insights.UndeployIssueModelRequest() + request = contact_center_insights.ListIssuesRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = operations_pb2.Operation() + post.return_value = contact_center_insights.ListIssuesResponse() - client.undeploy_issue_model( + client.list_issues( request, metadata=[ ("key", "val"), @@ -24345,9 +27243,8 @@ def test_undeploy_issue_model_rest_interceptors(null_interceptor): post.assert_called_once() -def test_undeploy_issue_model_rest_bad_request( - transport: str = "rest", - request_type=contact_center_insights.UndeployIssueModelRequest, +def test_list_issues_rest_bad_request( + transport: str = "rest", request_type=contact_center_insights.ListIssuesRequest ): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), @@ -24355,7 +27252,7 @@ def test_undeploy_issue_model_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = {"name": "projects/sample1/locations/sample2/issueModels/sample3"} + request_init = {"parent": "projects/sample1/locations/sample2/issueModels/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -24367,10 +27264,10 @@ def test_undeploy_issue_model_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.undeploy_issue_model(request) + client.list_issues(request) -def test_undeploy_issue_model_rest_flattened(): +def test_list_issues_rest_flattened(): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -24379,40 +27276,42 @@ def test_undeploy_issue_model_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = contact_center_insights.ListIssuesResponse() # get arguments that satisfy an http rule for this method sample_request = { - "name": "projects/sample1/locations/sample2/issueModels/sample3" + "parent": "projects/sample1/locations/sample2/issueModels/sample3" } # get truthy value for each flattened field mock_args = dict( - name="name_value", + parent="parent_value", ) mock_args.update(sample_request) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 + # Convert return value to protobuf type + return_value = contact_center_insights.ListIssuesResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.undeploy_issue_model(**mock_args) + client.list_issues(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{name=projects/*/locations/*/issueModels/*}:undeploy" + "%s/v1/{parent=projects/*/locations/*/issueModels/*}/issues" % client.transport._host, args[1], ) -def test_undeploy_issue_model_rest_flattened_error(transport: str = "rest"): +def test_list_issues_rest_flattened_error(transport: str = "rest"): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -24421,13 +27320,13 @@ def test_undeploy_issue_model_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.undeploy_issue_model( - contact_center_insights.UndeployIssueModelRequest(), - name="name_value", + client.list_issues( + contact_center_insights.ListIssuesRequest(), + parent="parent_value", ) -def test_undeploy_issue_model_rest_error(): +def test_list_issues_rest_error(): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -24436,11 +27335,11 @@ def test_undeploy_issue_model_rest_error(): @pytest.mark.parametrize( "request_type", [ - contact_center_insights.GetIssueRequest, + contact_center_insights.UpdateIssueRequest, dict, ], ) -def test_get_issue_rest(request_type): +def test_update_issue_rest(request_type): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -24448,8 +27347,85 @@ def test_get_issue_rest(request_type): # send a request that will satisfy transcoding request_init = { - "name": "projects/sample1/locations/sample2/issueModels/sample3/issues/sample4" + "issue": { + "name": "projects/sample1/locations/sample2/issueModels/sample3/issues/sample4" + } } + request_init["issue"] = { + "name": "projects/sample1/locations/sample2/issueModels/sample3/issues/sample4", + "display_name": "display_name_value", + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "sample_utterances": ["sample_utterances_value1", "sample_utterances_value2"], + "display_description": "display_description_value", + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = contact_center_insights.UpdateIssueRequest.meta.fields["issue"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["issue"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["issue"][field])): + del request_init["issue"][field][i][subfield] + else: + del request_init["issue"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -24459,6 +27435,7 @@ def test_get_issue_rest(request_type): name="name_value", display_name="display_name_value", sample_utterances=["sample_utterances_value"], + display_description="display_description_value", ) # Wrap the value into a proper Response obj @@ -24470,16 +27447,17 @@ def test_get_issue_rest(request_type): response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.get_issue(request) + response = client.update_issue(request) # Establish that the response is the type that we expect. assert isinstance(response, resources.Issue) assert response.name == "name_value" assert response.display_name == "display_name_value" assert response.sample_utterances == ["sample_utterances_value"] + assert response.display_description == "display_description_value" -def test_get_issue_rest_use_cached_wrapped_rpc(): +def test_update_issue_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -24493,35 +27471,34 @@ def test_get_issue_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.get_issue in client._transport._wrapped_methods + assert client._transport.update_issue in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.get_issue] = mock_rpc + client._transport._wrapped_methods[client._transport.update_issue] = mock_rpc request = {} - client.get_issue(request) + client.update_issue(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.get_issue(request) + client.update_issue(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_get_issue_rest_required_fields( - request_type=contact_center_insights.GetIssueRequest, +def test_update_issue_rest_required_fields( + request_type=contact_center_insights.UpdateIssueRequest, ): transport_class = transports.ContactCenterInsightsRestTransport request_init = {} - request_init["name"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -24532,21 +27509,19 @@ def test_get_issue_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).get_issue._get_unset_required_fields(jsonified_request) + ).update_issue._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) - # verify required fields with default values are now present - - jsonified_request["name"] = "name_value" + # verify required fields with default values are now present unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).get_issue._get_unset_required_fields(jsonified_request) + ).update_issue._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("update_mask",)) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == "name_value" client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), @@ -24567,9 +27542,10 @@ def test_get_issue_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "get", + "method": "patch", "query_params": pb_request, } + transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() @@ -24582,24 +27558,24 @@ def test_get_issue_rest_required_fields( response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.get_issue(request) + response = client.update_issue(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_get_issue_rest_unset_required_fields(): +def test_update_issue_rest_unset_required_fields(): transport = transports.ContactCenterInsightsRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.get_issue._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name",))) + unset_fields = transport.update_issue._get_unset_required_fields({}) + assert set(unset_fields) == (set(("updateMask",)) & set(("issue",))) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_issue_rest_interceptors(null_interceptor): +def test_update_issue_rest_interceptors(null_interceptor): transport = transports.ContactCenterInsightsRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -24612,14 +27588,14 @@ def test_get_issue_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.ContactCenterInsightsRestInterceptor, "post_get_issue" + transports.ContactCenterInsightsRestInterceptor, "post_update_issue" ) as post, mock.patch.object( - transports.ContactCenterInsightsRestInterceptor, "pre_get_issue" + transports.ContactCenterInsightsRestInterceptor, "pre_update_issue" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = contact_center_insights.GetIssueRequest.pb( - contact_center_insights.GetIssueRequest() + pb_message = contact_center_insights.UpdateIssueRequest.pb( + contact_center_insights.UpdateIssueRequest() ) transcode.return_value = { "method": "post", @@ -24633,7 +27609,7 @@ def test_get_issue_rest_interceptors(null_interceptor): req.return_value.request = PreparedRequest() req.return_value._content = resources.Issue.to_json(resources.Issue()) - request = contact_center_insights.GetIssueRequest() + request = contact_center_insights.UpdateIssueRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), @@ -24641,7 +27617,7 @@ def test_get_issue_rest_interceptors(null_interceptor): pre.return_value = request, metadata post.return_value = resources.Issue() - client.get_issue( + client.update_issue( request, metadata=[ ("key", "val"), @@ -24653,8 +27629,8 @@ def test_get_issue_rest_interceptors(null_interceptor): post.assert_called_once() -def test_get_issue_rest_bad_request( - transport: str = "rest", request_type=contact_center_insights.GetIssueRequest +def test_update_issue_rest_bad_request( + transport: str = "rest", request_type=contact_center_insights.UpdateIssueRequest ): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), @@ -24663,7 +27639,9 @@ def test_get_issue_rest_bad_request( # send a request that will satisfy transcoding request_init = { - "name": "projects/sample1/locations/sample2/issueModels/sample3/issues/sample4" + "issue": { + "name": "projects/sample1/locations/sample2/issueModels/sample3/issues/sample4" + } } request = request_type(**request_init) @@ -24676,10 +27654,10 @@ def test_get_issue_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.get_issue(request) + client.update_issue(request) -def test_get_issue_rest_flattened(): +def test_update_issue_rest_flattened(): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -24692,12 +27670,15 @@ def test_get_issue_rest_flattened(): # get arguments that satisfy an http rule for this method sample_request = { - "name": "projects/sample1/locations/sample2/issueModels/sample3/issues/sample4" + "issue": { + "name": "projects/sample1/locations/sample2/issueModels/sample3/issues/sample4" + } } # get truthy value for each flattened field mock_args = dict( - name="name_value", + issue=resources.Issue(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) mock_args.update(sample_request) @@ -24710,20 +27691,20 @@ def test_get_issue_rest_flattened(): response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.get_issue(**mock_args) + client.update_issue(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{name=projects/*/locations/*/issueModels/*/issues/*}" + "%s/v1/{issue.name=projects/*/locations/*/issueModels/*/issues/*}" % client.transport._host, args[1], ) -def test_get_issue_rest_flattened_error(transport: str = "rest"): +def test_update_issue_rest_flattened_error(transport: str = "rest"): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -24732,13 +27713,14 @@ def test_get_issue_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.get_issue( - contact_center_insights.GetIssueRequest(), - name="name_value", + client.update_issue( + contact_center_insights.UpdateIssueRequest(), + issue=resources.Issue(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) -def test_get_issue_rest_error(): +def test_update_issue_rest_error(): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -24747,41 +27729,41 @@ def test_get_issue_rest_error(): @pytest.mark.parametrize( "request_type", [ - contact_center_insights.ListIssuesRequest, + contact_center_insights.DeleteIssueRequest, dict, ], ) -def test_list_issues_rest(request_type): +def test_delete_issue_rest(request_type): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2/issueModels/sample3"} + request_init = { + "name": "projects/sample1/locations/sample2/issueModels/sample3/issues/sample4" + } request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = contact_center_insights.ListIssuesResponse() + return_value = None # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - # Convert return value to protobuf type - return_value = contact_center_insights.ListIssuesResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) + json_return_value = "" response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.list_issues(request) + response = client.delete_issue(request) # Establish that the response is the type that we expect. - assert isinstance(response, contact_center_insights.ListIssuesResponse) + assert response is None -def test_list_issues_rest_use_cached_wrapped_rpc(): +def test_delete_issue_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -24795,35 +27777,35 @@ def test_list_issues_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.list_issues in client._transport._wrapped_methods + assert client._transport.delete_issue in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.list_issues] = mock_rpc + client._transport._wrapped_methods[client._transport.delete_issue] = mock_rpc request = {} - client.list_issues(request) + client.delete_issue(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.list_issues(request) + client.delete_issue(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_list_issues_rest_required_fields( - request_type=contact_center_insights.ListIssuesRequest, +def test_delete_issue_rest_required_fields( + request_type=contact_center_insights.DeleteIssueRequest, ): transport_class = transports.ContactCenterInsightsRestTransport request_init = {} - request_init["parent"] = "" + request_init["name"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -24834,21 +27816,21 @@ def test_list_issues_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).list_issues._get_unset_required_fields(jsonified_request) + ).delete_issue._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["parent"] = "parent_value" + jsonified_request["name"] = "name_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).list_issues._get_unset_required_fields(jsonified_request) + ).delete_issue._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == "parent_value" + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), @@ -24857,7 +27839,7 @@ def test_list_issues_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = contact_center_insights.ListIssuesResponse() + return_value = None # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -24869,39 +27851,36 @@ def test_list_issues_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "get", + "method": "delete", "query_params": pb_request, } transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = contact_center_insights.ListIssuesResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) + json_return_value = "" response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.list_issues(request) + response = client.delete_issue(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_list_issues_rest_unset_required_fields(): +def test_delete_issue_rest_unset_required_fields(): transport = transports.ContactCenterInsightsRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.list_issues._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("parent",))) + unset_fields = transport.delete_issue._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_issues_rest_interceptors(null_interceptor): +def test_delete_issue_rest_interceptors(null_interceptor): transport = transports.ContactCenterInsightsRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -24914,14 +27893,11 @@ def test_list_issues_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.ContactCenterInsightsRestInterceptor, "post_list_issues" - ) as post, mock.patch.object( - transports.ContactCenterInsightsRestInterceptor, "pre_list_issues" + transports.ContactCenterInsightsRestInterceptor, "pre_delete_issue" ) as pre: pre.assert_not_called() - post.assert_not_called() - pb_message = contact_center_insights.ListIssuesRequest.pb( - contact_center_insights.ListIssuesRequest() + pb_message = contact_center_insights.DeleteIssueRequest.pb( + contact_center_insights.DeleteIssueRequest() ) transcode.return_value = { "method": "post", @@ -24933,19 +27909,15 @@ def test_list_issues_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = contact_center_insights.ListIssuesResponse.to_json( - contact_center_insights.ListIssuesResponse() - ) - request = contact_center_insights.ListIssuesRequest() + request = contact_center_insights.DeleteIssueRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = contact_center_insights.ListIssuesResponse() - client.list_issues( + client.delete_issue( request, metadata=[ ("key", "val"), @@ -24954,11 +27926,10 @@ def test_list_issues_rest_interceptors(null_interceptor): ) pre.assert_called_once() - post.assert_called_once() -def test_list_issues_rest_bad_request( - transport: str = "rest", request_type=contact_center_insights.ListIssuesRequest +def test_delete_issue_rest_bad_request( + transport: str = "rest", request_type=contact_center_insights.DeleteIssueRequest ): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), @@ -24966,7 +27937,9 @@ def test_list_issues_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2/issueModels/sample3"} + request_init = { + "name": "projects/sample1/locations/sample2/issueModels/sample3/issues/sample4" + } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -24978,10 +27951,10 @@ def test_list_issues_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.list_issues(request) + client.delete_issue(request) -def test_list_issues_rest_flattened(): +def test_delete_issue_rest_flattened(): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -24990,42 +27963,40 @@ def test_list_issues_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = contact_center_insights.ListIssuesResponse() + return_value = None # get arguments that satisfy an http rule for this method sample_request = { - "parent": "projects/sample1/locations/sample2/issueModels/sample3" + "name": "projects/sample1/locations/sample2/issueModels/sample3/issues/sample4" } # get truthy value for each flattened field mock_args = dict( - parent="parent_value", + name="name_value", ) mock_args.update(sample_request) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - # Convert return value to protobuf type - return_value = contact_center_insights.ListIssuesResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) + json_return_value = "" response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.list_issues(**mock_args) + client.delete_issue(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{parent=projects/*/locations/*/issueModels/*}/issues" + "%s/v1/{name=projects/*/locations/*/issueModels/*/issues/*}" % client.transport._host, args[1], ) -def test_list_issues_rest_flattened_error(transport: str = "rest"): +def test_delete_issue_rest_flattened_error(transport: str = "rest"): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -25034,141 +28005,62 @@ def test_list_issues_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.list_issues( - contact_center_insights.ListIssuesRequest(), - parent="parent_value", - ) - - -def test_list_issues_rest_error(): - client = ContactCenterInsightsClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) - - -@pytest.mark.parametrize( - "request_type", - [ - contact_center_insights.UpdateIssueRequest, - dict, - ], -) -def test_update_issue_rest(request_type): - client = ContactCenterInsightsClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = { - "issue": { - "name": "projects/sample1/locations/sample2/issueModels/sample3/issues/sample4" - } - } - request_init["issue"] = { - "name": "projects/sample1/locations/sample2/issueModels/sample3/issues/sample4", - "display_name": "display_name_value", - "create_time": {"seconds": 751, "nanos": 543}, - "update_time": {}, - "sample_utterances": ["sample_utterances_value1", "sample_utterances_value2"], - } - # The version of a generated dependency at test runtime may differ from the version used during generation. - # Delete any fields which are not present in the current runtime dependency - # See https://github.com/googleapis/gapic-generator-python/issues/1748 - - # Determine if the message type is proto-plus or protobuf - test_field = contact_center_insights.UpdateIssueRequest.meta.fields["issue"] - - def get_message_fields(field): - # Given a field which is a message (composite type), return a list with - # all the fields of the message. - # If the field is not a composite type, return an empty list. - message_fields = [] - - if hasattr(field, "message") and field.message: - is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") - - if is_field_type_proto_plus_type: - message_fields = field.message.meta.fields.values() - # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER - message_fields = field.message.DESCRIPTOR.fields - return message_fields - - runtime_nested_fields = [ - (field.name, nested_field.name) - for field in get_message_fields(test_field) - for nested_field in get_message_fields(field) - ] - - subfields_not_in_runtime = [] - - # For each item in the sample request, create a list of sub fields which are not present at runtime - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["issue"].items(): # pragma: NO COVER - result = None - is_repeated = False - # For repeated fields - if isinstance(value, list) and len(value): - is_repeated = True - result = value[0] - # For fields where the type is another message - if isinstance(value, dict): - result = value + client.delete_issue( + contact_center_insights.DeleteIssueRequest(), + name="name_value", + ) - if result and hasattr(result, "keys"): - for subfield in result.keys(): - if (field, subfield) not in runtime_nested_fields: - subfields_not_in_runtime.append( - { - "field": field, - "subfield": subfield, - "is_repeated": is_repeated, - } - ) - # Remove fields from the sample request which are not present in the runtime version of the dependency - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER - field = subfield_to_delete.get("field") - field_repeated = subfield_to_delete.get("is_repeated") - subfield = subfield_to_delete.get("subfield") - if subfield: - if field_repeated: - for i in range(0, len(request_init["issue"][field])): - del request_init["issue"][field][i][subfield] - else: - del request_init["issue"][field][subfield] +def test_delete_issue_rest_error(): + client = ContactCenterInsightsClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + contact_center_insights.CalculateIssueModelStatsRequest, + dict, + ], +) +def test_calculate_issue_model_stats_rest(request_type): + client = ContactCenterInsightsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "issue_model": "projects/sample1/locations/sample2/issueModels/sample3" + } request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = resources.Issue( - name="name_value", - display_name="display_name_value", - sample_utterances=["sample_utterances_value"], - ) + return_value = contact_center_insights.CalculateIssueModelStatsResponse() # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = resources.Issue.pb(return_value) + return_value = contact_center_insights.CalculateIssueModelStatsResponse.pb( + return_value + ) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.update_issue(request) + response = client.calculate_issue_model_stats(request) # Establish that the response is the type that we expect. - assert isinstance(response, resources.Issue) - assert response.name == "name_value" - assert response.display_name == "display_name_value" - assert response.sample_utterances == ["sample_utterances_value"] + assert isinstance( + response, contact_center_insights.CalculateIssueModelStatsResponse + ) -def test_update_issue_rest_use_cached_wrapped_rpc(): +def test_calculate_issue_model_stats_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -25182,34 +28074,40 @@ def test_update_issue_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.update_issue in client._transport._wrapped_methods + assert ( + client._transport.calculate_issue_model_stats + in client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.update_issue] = mock_rpc + client._transport._wrapped_methods[ + client._transport.calculate_issue_model_stats + ] = mock_rpc request = {} - client.update_issue(request) + client.calculate_issue_model_stats(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.update_issue(request) + client.calculate_issue_model_stats(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_update_issue_rest_required_fields( - request_type=contact_center_insights.UpdateIssueRequest, +def test_calculate_issue_model_stats_rest_required_fields( + request_type=contact_center_insights.CalculateIssueModelStatsRequest, ): transport_class = transports.ContactCenterInsightsRestTransport request_init = {} + request_init["issue_model"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -25220,19 +28118,21 @@ def test_update_issue_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).update_issue._get_unset_required_fields(jsonified_request) + ).calculate_issue_model_stats._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present + jsonified_request["issueModel"] = "issue_model_value" + unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).update_issue._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("update_mask",)) + ).calculate_issue_model_stats._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone + assert "issueModel" in jsonified_request + assert jsonified_request["issueModel"] == "issue_model_value" client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), @@ -25241,7 +28141,7 @@ def test_update_issue_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = resources.Issue() + return_value = contact_center_insights.CalculateIssueModelStatsResponse() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -25253,40 +28153,41 @@ def test_update_issue_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "patch", + "method": "get", "query_params": pb_request, } - transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = resources.Issue.pb(return_value) + return_value = contact_center_insights.CalculateIssueModelStatsResponse.pb( + return_value + ) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.update_issue(request) + response = client.calculate_issue_model_stats(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_update_issue_rest_unset_required_fields(): +def test_calculate_issue_model_stats_rest_unset_required_fields(): transport = transports.ContactCenterInsightsRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.update_issue._get_unset_required_fields({}) - assert set(unset_fields) == (set(("updateMask",)) & set(("issue",))) + unset_fields = transport.calculate_issue_model_stats._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("issueModel",))) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_update_issue_rest_interceptors(null_interceptor): +def test_calculate_issue_model_stats_rest_interceptors(null_interceptor): transport = transports.ContactCenterInsightsRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -25299,14 +28200,16 @@ def test_update_issue_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.ContactCenterInsightsRestInterceptor, "post_update_issue" + transports.ContactCenterInsightsRestInterceptor, + "post_calculate_issue_model_stats", ) as post, mock.patch.object( - transports.ContactCenterInsightsRestInterceptor, "pre_update_issue" + transports.ContactCenterInsightsRestInterceptor, + "pre_calculate_issue_model_stats", ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = contact_center_insights.UpdateIssueRequest.pb( - contact_center_insights.UpdateIssueRequest() + pb_message = contact_center_insights.CalculateIssueModelStatsRequest.pb( + contact_center_insights.CalculateIssueModelStatsRequest() ) transcode.return_value = { "method": "post", @@ -25318,17 +28221,21 @@ def test_update_issue_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = resources.Issue.to_json(resources.Issue()) + req.return_value._content = ( + contact_center_insights.CalculateIssueModelStatsResponse.to_json( + contact_center_insights.CalculateIssueModelStatsResponse() + ) + ) - request = contact_center_insights.UpdateIssueRequest() + request = contact_center_insights.CalculateIssueModelStatsRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = resources.Issue() + post.return_value = contact_center_insights.CalculateIssueModelStatsResponse() - client.update_issue( + client.calculate_issue_model_stats( request, metadata=[ ("key", "val"), @@ -25340,8 +28247,9 @@ def test_update_issue_rest_interceptors(null_interceptor): post.assert_called_once() -def test_update_issue_rest_bad_request( - transport: str = "rest", request_type=contact_center_insights.UpdateIssueRequest +def test_calculate_issue_model_stats_rest_bad_request( + transport: str = "rest", + request_type=contact_center_insights.CalculateIssueModelStatsRequest, ): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), @@ -25350,9 +28258,7 @@ def test_update_issue_rest_bad_request( # send a request that will satisfy transcoding request_init = { - "issue": { - "name": "projects/sample1/locations/sample2/issueModels/sample3/issues/sample4" - } + "issue_model": "projects/sample1/locations/sample2/issueModels/sample3" } request = request_type(**request_init) @@ -25365,10 +28271,10 @@ def test_update_issue_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.update_issue(request) + client.calculate_issue_model_stats(request) -def test_update_issue_rest_flattened(): +def test_calculate_issue_model_stats_rest_flattened(): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -25377,19 +28283,16 @@ def test_update_issue_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = resources.Issue() + return_value = contact_center_insights.CalculateIssueModelStatsResponse() # get arguments that satisfy an http rule for this method sample_request = { - "issue": { - "name": "projects/sample1/locations/sample2/issueModels/sample3/issues/sample4" - } + "issue_model": "projects/sample1/locations/sample2/issueModels/sample3" } # get truthy value for each flattened field mock_args = dict( - issue=resources.Issue(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + issue_model="issue_model_value", ) mock_args.update(sample_request) @@ -25397,25 +28300,27 @@ def test_update_issue_rest_flattened(): response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = resources.Issue.pb(return_value) + return_value = contact_center_insights.CalculateIssueModelStatsResponse.pb( + return_value + ) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.update_issue(**mock_args) + client.calculate_issue_model_stats(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{issue.name=projects/*/locations/*/issueModels/*/issues/*}" + "%s/v1/{issue_model=projects/*/locations/*/issueModels/*}:calculateIssueModelStats" % client.transport._host, args[1], ) -def test_update_issue_rest_flattened_error(transport: str = "rest"): +def test_calculate_issue_model_stats_rest_flattened_error(transport: str = "rest"): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -25424,14 +28329,13 @@ def test_update_issue_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.update_issue( - contact_center_insights.UpdateIssueRequest(), - issue=resources.Issue(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + client.calculate_issue_model_stats( + contact_center_insights.CalculateIssueModelStatsRequest(), + issue_model="issue_model_value", ) -def test_update_issue_rest_error(): +def test_calculate_issue_model_stats_rest_error(): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -25440,41 +28344,149 @@ def test_update_issue_rest_error(): @pytest.mark.parametrize( "request_type", [ - contact_center_insights.DeleteIssueRequest, + contact_center_insights.CreatePhraseMatcherRequest, dict, ], ) -def test_delete_issue_rest(request_type): +def test_create_phrase_matcher_rest(request_type): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) - # send a request that will satisfy transcoding - request_init = { - "name": "projects/sample1/locations/sample2/issueModels/sample3/issues/sample4" - } + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request_init["phrase_matcher"] = { + "name": "name_value", + "revision_id": "revision_id_value", + "version_tag": "version_tag_value", + "revision_create_time": {"seconds": 751, "nanos": 543}, + "display_name": "display_name_value", + "type_": 1, + "active": True, + "phrase_match_rule_groups": [ + { + "type_": 1, + "phrase_match_rules": [ + { + "query": "query_value", + "negated": True, + "config": {"exact_match_config": {"case_sensitive": True}}, + } + ], + } + ], + "activation_update_time": {}, + "role_match": 1, + "update_time": {}, + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = contact_center_insights.CreatePhraseMatcherRequest.meta.fields[ + "phrase_matcher" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["phrase_matcher"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["phrase_matcher"][field])): + del request_init["phrase_matcher"][field][i][subfield] + else: + del request_init["phrase_matcher"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = None + return_value = resources.PhraseMatcher( + name="name_value", + revision_id="revision_id_value", + version_tag="version_tag_value", + display_name="display_name_value", + type_=resources.PhraseMatcher.PhraseMatcherType.ALL_OF, + active=True, + role_match=resources.ConversationParticipant.Role.HUMAN_AGENT, + ) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - json_return_value = "" + # Convert return value to protobuf type + return_value = resources.PhraseMatcher.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.delete_issue(request) + response = client.create_phrase_matcher(request) # Establish that the response is the type that we expect. - assert response is None + assert isinstance(response, resources.PhraseMatcher) + assert response.name == "name_value" + assert response.revision_id == "revision_id_value" + assert response.version_tag == "version_tag_value" + assert response.display_name == "display_name_value" + assert response.type_ == resources.PhraseMatcher.PhraseMatcherType.ALL_OF + assert response.active is True + assert response.role_match == resources.ConversationParticipant.Role.HUMAN_AGENT -def test_delete_issue_rest_use_cached_wrapped_rpc(): +def test_create_phrase_matcher_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -25488,35 +28500,40 @@ def test_delete_issue_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.delete_issue in client._transport._wrapped_methods + assert ( + client._transport.create_phrase_matcher + in client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.delete_issue] = mock_rpc + client._transport._wrapped_methods[ + client._transport.create_phrase_matcher + ] = mock_rpc request = {} - client.delete_issue(request) + client.create_phrase_matcher(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.delete_issue(request) + client.create_phrase_matcher(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_delete_issue_rest_required_fields( - request_type=contact_center_insights.DeleteIssueRequest, +def test_create_phrase_matcher_rest_required_fields( + request_type=contact_center_insights.CreatePhraseMatcherRequest, ): transport_class = transports.ContactCenterInsightsRestTransport request_init = {} - request_init["name"] = "" + request_init["parent"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -25527,21 +28544,21 @@ def test_delete_issue_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).delete_issue._get_unset_required_fields(jsonified_request) + ).create_phrase_matcher._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["name"] = "name_value" + jsonified_request["parent"] = "parent_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).delete_issue._get_unset_required_fields(jsonified_request) + ).create_phrase_matcher._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == "name_value" + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), @@ -25550,7 +28567,7 @@ def test_delete_issue_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = None + return_value = resources.PhraseMatcher() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -25562,36 +28579,48 @@ def test_delete_issue_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "delete", + "method": "post", "query_params": pb_request, } + transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 - json_return_value = "" + + # Convert return value to protobuf type + return_value = resources.PhraseMatcher.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.delete_issue(request) + response = client.create_phrase_matcher(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_delete_issue_rest_unset_required_fields(): +def test_create_phrase_matcher_rest_unset_required_fields(): transport = transports.ContactCenterInsightsRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.delete_issue._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name",))) + unset_fields = transport.create_phrase_matcher._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(()) + & set( + ( + "parent", + "phraseMatcher", + ) + ) + ) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_delete_issue_rest_interceptors(null_interceptor): +def test_create_phrase_matcher_rest_interceptors(null_interceptor): transport = transports.ContactCenterInsightsRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -25604,11 +28633,14 @@ def test_delete_issue_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.ContactCenterInsightsRestInterceptor, "pre_delete_issue" + transports.ContactCenterInsightsRestInterceptor, "post_create_phrase_matcher" + ) as post, mock.patch.object( + transports.ContactCenterInsightsRestInterceptor, "pre_create_phrase_matcher" ) as pre: pre.assert_not_called() - pb_message = contact_center_insights.DeleteIssueRequest.pb( - contact_center_insights.DeleteIssueRequest() + post.assert_not_called() + pb_message = contact_center_insights.CreatePhraseMatcherRequest.pb( + contact_center_insights.CreatePhraseMatcherRequest() ) transcode.return_value = { "method": "post", @@ -25620,15 +28652,19 @@ def test_delete_issue_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() + req.return_value._content = resources.PhraseMatcher.to_json( + resources.PhraseMatcher() + ) - request = contact_center_insights.DeleteIssueRequest() + request = contact_center_insights.CreatePhraseMatcherRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata + post.return_value = resources.PhraseMatcher() - client.delete_issue( + client.create_phrase_matcher( request, metadata=[ ("key", "val"), @@ -25637,10 +28673,12 @@ def test_delete_issue_rest_interceptors(null_interceptor): ) pre.assert_called_once() + post.assert_called_once() -def test_delete_issue_rest_bad_request( - transport: str = "rest", request_type=contact_center_insights.DeleteIssueRequest +def test_create_phrase_matcher_rest_bad_request( + transport: str = "rest", + request_type=contact_center_insights.CreatePhraseMatcherRequest, ): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), @@ -25648,9 +28686,7 @@ def test_delete_issue_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = { - "name": "projects/sample1/locations/sample2/issueModels/sample3/issues/sample4" - } + request_init = {"parent": "projects/sample1/locations/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -25662,10 +28698,10 @@ def test_delete_issue_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.delete_issue(request) + client.create_phrase_matcher(request) -def test_delete_issue_rest_flattened(): +def test_create_phrase_matcher_rest_flattened(): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -25674,40 +28710,41 @@ def test_delete_issue_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = None + return_value = resources.PhraseMatcher() # get arguments that satisfy an http rule for this method - sample_request = { - "name": "projects/sample1/locations/sample2/issueModels/sample3/issues/sample4" - } + sample_request = {"parent": "projects/sample1/locations/sample2"} # get truthy value for each flattened field mock_args = dict( - name="name_value", + parent="parent_value", + phrase_matcher=resources.PhraseMatcher(name="name_value"), ) mock_args.update(sample_request) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - json_return_value = "" + # Convert return value to protobuf type + return_value = resources.PhraseMatcher.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.delete_issue(**mock_args) + client.create_phrase_matcher(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{name=projects/*/locations/*/issueModels/*/issues/*}" + "%s/v1/{parent=projects/*/locations/*}/phraseMatchers" % client.transport._host, args[1], ) -def test_delete_issue_rest_flattened_error(transport: str = "rest"): +def test_create_phrase_matcher_rest_flattened_error(transport: str = "rest"): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -25716,13 +28753,14 @@ def test_delete_issue_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.delete_issue( - contact_center_insights.DeleteIssueRequest(), - name="name_value", + client.create_phrase_matcher( + contact_center_insights.CreatePhraseMatcherRequest(), + parent="parent_value", + phrase_matcher=resources.PhraseMatcher(name="name_value"), ) -def test_delete_issue_rest_error(): +def test_create_phrase_matcher_rest_error(): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -25731,47 +28769,56 @@ def test_delete_issue_rest_error(): @pytest.mark.parametrize( "request_type", [ - contact_center_insights.CalculateIssueModelStatsRequest, + contact_center_insights.GetPhraseMatcherRequest, dict, ], ) -def test_calculate_issue_model_stats_rest(request_type): +def test_get_phrase_matcher_rest(request_type): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = { - "issue_model": "projects/sample1/locations/sample2/issueModels/sample3" - } + request_init = {"name": "projects/sample1/locations/sample2/phraseMatchers/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = contact_center_insights.CalculateIssueModelStatsResponse() + return_value = resources.PhraseMatcher( + name="name_value", + revision_id="revision_id_value", + version_tag="version_tag_value", + display_name="display_name_value", + type_=resources.PhraseMatcher.PhraseMatcherType.ALL_OF, + active=True, + role_match=resources.ConversationParticipant.Role.HUMAN_AGENT, + ) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = contact_center_insights.CalculateIssueModelStatsResponse.pb( - return_value - ) + return_value = resources.PhraseMatcher.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.calculate_issue_model_stats(request) + response = client.get_phrase_matcher(request) # Establish that the response is the type that we expect. - assert isinstance( - response, contact_center_insights.CalculateIssueModelStatsResponse - ) + assert isinstance(response, resources.PhraseMatcher) + assert response.name == "name_value" + assert response.revision_id == "revision_id_value" + assert response.version_tag == "version_tag_value" + assert response.display_name == "display_name_value" + assert response.type_ == resources.PhraseMatcher.PhraseMatcherType.ALL_OF + assert response.active is True + assert response.role_match == resources.ConversationParticipant.Role.HUMAN_AGENT -def test_calculate_issue_model_stats_rest_use_cached_wrapped_rpc(): +def test_get_phrase_matcher_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -25786,8 +28833,7 @@ def test_calculate_issue_model_stats_rest_use_cached_wrapped_rpc(): # Ensure method has been cached assert ( - client._transport.calculate_issue_model_stats - in client._transport._wrapped_methods + client._transport.get_phrase_matcher in client._transport._wrapped_methods ) # Replace cached wrapped function with mock @@ -25796,29 +28842,29 @@ def test_calculate_issue_model_stats_rest_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.calculate_issue_model_stats + client._transport.get_phrase_matcher ] = mock_rpc request = {} - client.calculate_issue_model_stats(request) + client.get_phrase_matcher(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.calculate_issue_model_stats(request) + client.get_phrase_matcher(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_calculate_issue_model_stats_rest_required_fields( - request_type=contact_center_insights.CalculateIssueModelStatsRequest, +def test_get_phrase_matcher_rest_required_fields( + request_type=contact_center_insights.GetPhraseMatcherRequest, ): transport_class = transports.ContactCenterInsightsRestTransport request_init = {} - request_init["issue_model"] = "" + request_init["name"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -25829,21 +28875,21 @@ def test_calculate_issue_model_stats_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).calculate_issue_model_stats._get_unset_required_fields(jsonified_request) + ).get_phrase_matcher._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["issueModel"] = "issue_model_value" + jsonified_request["name"] = "name_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).calculate_issue_model_stats._get_unset_required_fields(jsonified_request) + ).get_phrase_matcher._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "issueModel" in jsonified_request - assert jsonified_request["issueModel"] == "issue_model_value" + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), @@ -25852,7 +28898,7 @@ def test_calculate_issue_model_stats_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = contact_center_insights.CalculateIssueModelStatsResponse() + return_value = resources.PhraseMatcher() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -25873,32 +28919,30 @@ def test_calculate_issue_model_stats_rest_required_fields( response_value.status_code = 200 # Convert return value to protobuf type - return_value = contact_center_insights.CalculateIssueModelStatsResponse.pb( - return_value - ) + return_value = resources.PhraseMatcher.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.calculate_issue_model_stats(request) + response = client.get_phrase_matcher(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_calculate_issue_model_stats_rest_unset_required_fields(): +def test_get_phrase_matcher_rest_unset_required_fields(): transport = transports.ContactCenterInsightsRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.calculate_issue_model_stats._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("issueModel",))) + unset_fields = transport.get_phrase_matcher._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_calculate_issue_model_stats_rest_interceptors(null_interceptor): +def test_get_phrase_matcher_rest_interceptors(null_interceptor): transport = transports.ContactCenterInsightsRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -25911,16 +28955,14 @@ def test_calculate_issue_model_stats_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.ContactCenterInsightsRestInterceptor, - "post_calculate_issue_model_stats", + transports.ContactCenterInsightsRestInterceptor, "post_get_phrase_matcher" ) as post, mock.patch.object( - transports.ContactCenterInsightsRestInterceptor, - "pre_calculate_issue_model_stats", + transports.ContactCenterInsightsRestInterceptor, "pre_get_phrase_matcher" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = contact_center_insights.CalculateIssueModelStatsRequest.pb( - contact_center_insights.CalculateIssueModelStatsRequest() + pb_message = contact_center_insights.GetPhraseMatcherRequest.pb( + contact_center_insights.GetPhraseMatcherRequest() ) transcode.return_value = { "method": "post", @@ -25932,21 +28974,19 @@ def test_calculate_issue_model_stats_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = ( - contact_center_insights.CalculateIssueModelStatsResponse.to_json( - contact_center_insights.CalculateIssueModelStatsResponse() - ) + req.return_value._content = resources.PhraseMatcher.to_json( + resources.PhraseMatcher() ) - request = contact_center_insights.CalculateIssueModelStatsRequest() + request = contact_center_insights.GetPhraseMatcherRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = contact_center_insights.CalculateIssueModelStatsResponse() + post.return_value = resources.PhraseMatcher() - client.calculate_issue_model_stats( + client.get_phrase_matcher( request, metadata=[ ("key", "val"), @@ -25958,9 +28998,9 @@ def test_calculate_issue_model_stats_rest_interceptors(null_interceptor): post.assert_called_once() -def test_calculate_issue_model_stats_rest_bad_request( +def test_get_phrase_matcher_rest_bad_request( transport: str = "rest", - request_type=contact_center_insights.CalculateIssueModelStatsRequest, + request_type=contact_center_insights.GetPhraseMatcherRequest, ): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), @@ -25968,9 +29008,7 @@ def test_calculate_issue_model_stats_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = { - "issue_model": "projects/sample1/locations/sample2/issueModels/sample3" - } + request_init = {"name": "projects/sample1/locations/sample2/phraseMatchers/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -25982,10 +29020,10 @@ def test_calculate_issue_model_stats_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.calculate_issue_model_stats(request) + client.get_phrase_matcher(request) -def test_calculate_issue_model_stats_rest_flattened(): +def test_get_phrase_matcher_rest_flattened(): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -25994,16 +29032,16 @@ def test_calculate_issue_model_stats_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = contact_center_insights.CalculateIssueModelStatsResponse() + return_value = resources.PhraseMatcher() # get arguments that satisfy an http rule for this method sample_request = { - "issue_model": "projects/sample1/locations/sample2/issueModels/sample3" + "name": "projects/sample1/locations/sample2/phraseMatchers/sample3" } # get truthy value for each flattened field mock_args = dict( - issue_model="issue_model_value", + name="name_value", ) mock_args.update(sample_request) @@ -26011,27 +29049,25 @@ def test_calculate_issue_model_stats_rest_flattened(): response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = contact_center_insights.CalculateIssueModelStatsResponse.pb( - return_value - ) + return_value = resources.PhraseMatcher.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.calculate_issue_model_stats(**mock_args) + client.get_phrase_matcher(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{issue_model=projects/*/locations/*/issueModels/*}:calculateIssueModelStats" + "%s/v1/{name=projects/*/locations/*/phraseMatchers/*}" % client.transport._host, args[1], ) -def test_calculate_issue_model_stats_rest_flattened_error(transport: str = "rest"): +def test_get_phrase_matcher_rest_flattened_error(transport: str = "rest"): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -26040,13 +29076,13 @@ def test_calculate_issue_model_stats_rest_flattened_error(transport: str = "rest # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.calculate_issue_model_stats( - contact_center_insights.CalculateIssueModelStatsRequest(), - issue_model="issue_model_value", + client.get_phrase_matcher( + contact_center_insights.GetPhraseMatcherRequest(), + name="name_value", ) -def test_calculate_issue_model_stats_rest_error(): +def test_get_phrase_matcher_rest_error(): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -26055,11 +29091,11 @@ def test_calculate_issue_model_stats_rest_error(): @pytest.mark.parametrize( "request_type", [ - contact_center_insights.CreatePhraseMatcherRequest, + contact_center_insights.ListPhraseMatchersRequest, dict, ], ) -def test_create_phrase_matcher_rest(request_type): +def test_list_phrase_matchers_rest(request_type): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -26067,137 +29103,34 @@ def test_create_phrase_matcher_rest(request_type): # send a request that will satisfy transcoding request_init = {"parent": "projects/sample1/locations/sample2"} - request_init["phrase_matcher"] = { - "name": "name_value", - "revision_id": "revision_id_value", - "version_tag": "version_tag_value", - "revision_create_time": {"seconds": 751, "nanos": 543}, - "display_name": "display_name_value", - "type_": 1, - "active": True, - "phrase_match_rule_groups": [ - { - "type_": 1, - "phrase_match_rules": [ - { - "query": "query_value", - "negated": True, - "config": {"exact_match_config": {"case_sensitive": True}}, - } - ], - } - ], - "activation_update_time": {}, - "role_match": 1, - "update_time": {}, - } - # The version of a generated dependency at test runtime may differ from the version used during generation. - # Delete any fields which are not present in the current runtime dependency - # See https://github.com/googleapis/gapic-generator-python/issues/1748 - - # Determine if the message type is proto-plus or protobuf - test_field = contact_center_insights.CreatePhraseMatcherRequest.meta.fields[ - "phrase_matcher" - ] - - def get_message_fields(field): - # Given a field which is a message (composite type), return a list with - # all the fields of the message. - # If the field is not a composite type, return an empty list. - message_fields = [] - - if hasattr(field, "message") and field.message: - is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") - - if is_field_type_proto_plus_type: - message_fields = field.message.meta.fields.values() - # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER - message_fields = field.message.DESCRIPTOR.fields - return message_fields - - runtime_nested_fields = [ - (field.name, nested_field.name) - for field in get_message_fields(test_field) - for nested_field in get_message_fields(field) - ] - - subfields_not_in_runtime = [] - - # For each item in the sample request, create a list of sub fields which are not present at runtime - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["phrase_matcher"].items(): # pragma: NO COVER - result = None - is_repeated = False - # For repeated fields - if isinstance(value, list) and len(value): - is_repeated = True - result = value[0] - # For fields where the type is another message - if isinstance(value, dict): - result = value - - if result and hasattr(result, "keys"): - for subfield in result.keys(): - if (field, subfield) not in runtime_nested_fields: - subfields_not_in_runtime.append( - { - "field": field, - "subfield": subfield, - "is_repeated": is_repeated, - } - ) - - # Remove fields from the sample request which are not present in the runtime version of the dependency - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER - field = subfield_to_delete.get("field") - field_repeated = subfield_to_delete.get("is_repeated") - subfield = subfield_to_delete.get("subfield") - if subfield: - if field_repeated: - for i in range(0, len(request_init["phrase_matcher"][field])): - del request_init["phrase_matcher"][field][i][subfield] - else: - del request_init["phrase_matcher"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = resources.PhraseMatcher( - name="name_value", - revision_id="revision_id_value", - version_tag="version_tag_value", - display_name="display_name_value", - type_=resources.PhraseMatcher.PhraseMatcherType.ALL_OF, - active=True, - role_match=resources.ConversationParticipant.Role.HUMAN_AGENT, + return_value = contact_center_insights.ListPhraseMatchersResponse( + next_page_token="next_page_token_value", ) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = resources.PhraseMatcher.pb(return_value) + return_value = contact_center_insights.ListPhraseMatchersResponse.pb( + return_value + ) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.create_phrase_matcher(request) + response = client.list_phrase_matchers(request) # Establish that the response is the type that we expect. - assert isinstance(response, resources.PhraseMatcher) - assert response.name == "name_value" - assert response.revision_id == "revision_id_value" - assert response.version_tag == "version_tag_value" - assert response.display_name == "display_name_value" - assert response.type_ == resources.PhraseMatcher.PhraseMatcherType.ALL_OF - assert response.active is True - assert response.role_match == resources.ConversationParticipant.Role.HUMAN_AGENT + assert isinstance(response, pagers.ListPhraseMatchersPager) + assert response.next_page_token == "next_page_token_value" -def test_create_phrase_matcher_rest_use_cached_wrapped_rpc(): +def test_list_phrase_matchers_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -26212,8 +29145,7 @@ def test_create_phrase_matcher_rest_use_cached_wrapped_rpc(): # Ensure method has been cached assert ( - client._transport.create_phrase_matcher - in client._transport._wrapped_methods + client._transport.list_phrase_matchers in client._transport._wrapped_methods ) # Replace cached wrapped function with mock @@ -26222,24 +29154,24 @@ def test_create_phrase_matcher_rest_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.create_phrase_matcher + client._transport.list_phrase_matchers ] = mock_rpc request = {} - client.create_phrase_matcher(request) + client.list_phrase_matchers(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.create_phrase_matcher(request) + client.list_phrase_matchers(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_create_phrase_matcher_rest_required_fields( - request_type=contact_center_insights.CreatePhraseMatcherRequest, +def test_list_phrase_matchers_rest_required_fields( + request_type=contact_center_insights.ListPhraseMatchersRequest, ): transport_class = transports.ContactCenterInsightsRestTransport @@ -26255,7 +29187,7 @@ def test_create_phrase_matcher_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).create_phrase_matcher._get_unset_required_fields(jsonified_request) + ).list_phrase_matchers._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present @@ -26264,7 +29196,15 @@ def test_create_phrase_matcher_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).create_phrase_matcher._get_unset_required_fields(jsonified_request) + ).list_phrase_matchers._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "filter", + "page_size", + "page_token", + ) + ) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone @@ -26278,7 +29218,7 @@ def test_create_phrase_matcher_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = resources.PhraseMatcher() + return_value = contact_center_insights.ListPhraseMatchersResponse() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -26290,48 +29230,50 @@ def test_create_phrase_matcher_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "post", + "method": "get", "query_params": pb_request, } - transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = resources.PhraseMatcher.pb(return_value) + return_value = contact_center_insights.ListPhraseMatchersResponse.pb( + return_value + ) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.create_phrase_matcher(request) + response = client.list_phrase_matchers(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_create_phrase_matcher_rest_unset_required_fields(): +def test_list_phrase_matchers_rest_unset_required_fields(): transport = transports.ContactCenterInsightsRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.create_phrase_matcher._get_unset_required_fields({}) + unset_fields = transport.list_phrase_matchers._get_unset_required_fields({}) assert set(unset_fields) == ( - set(()) - & set( + set( ( - "parent", - "phraseMatcher", + "filter", + "pageSize", + "pageToken", ) ) + & set(("parent",)) ) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_create_phrase_matcher_rest_interceptors(null_interceptor): +def test_list_phrase_matchers_rest_interceptors(null_interceptor): transport = transports.ContactCenterInsightsRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -26344,14 +29286,14 @@ def test_create_phrase_matcher_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.ContactCenterInsightsRestInterceptor, "post_create_phrase_matcher" + transports.ContactCenterInsightsRestInterceptor, "post_list_phrase_matchers" ) as post, mock.patch.object( - transports.ContactCenterInsightsRestInterceptor, "pre_create_phrase_matcher" + transports.ContactCenterInsightsRestInterceptor, "pre_list_phrase_matchers" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = contact_center_insights.CreatePhraseMatcherRequest.pb( - contact_center_insights.CreatePhraseMatcherRequest() + pb_message = contact_center_insights.ListPhraseMatchersRequest.pb( + contact_center_insights.ListPhraseMatchersRequest() ) transcode.return_value = { "method": "post", @@ -26363,19 +29305,21 @@ def test_create_phrase_matcher_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = resources.PhraseMatcher.to_json( - resources.PhraseMatcher() + req.return_value._content = ( + contact_center_insights.ListPhraseMatchersResponse.to_json( + contact_center_insights.ListPhraseMatchersResponse() + ) ) - request = contact_center_insights.CreatePhraseMatcherRequest() + request = contact_center_insights.ListPhraseMatchersRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = resources.PhraseMatcher() + post.return_value = contact_center_insights.ListPhraseMatchersResponse() - client.create_phrase_matcher( + client.list_phrase_matchers( request, metadata=[ ("key", "val"), @@ -26387,9 +29331,9 @@ def test_create_phrase_matcher_rest_interceptors(null_interceptor): post.assert_called_once() -def test_create_phrase_matcher_rest_bad_request( +def test_list_phrase_matchers_rest_bad_request( transport: str = "rest", - request_type=contact_center_insights.CreatePhraseMatcherRequest, + request_type=contact_center_insights.ListPhraseMatchersRequest, ): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), @@ -26409,10 +29353,10 @@ def test_create_phrase_matcher_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.create_phrase_matcher(request) + client.list_phrase_matchers(request) -def test_create_phrase_matcher_rest_flattened(): +def test_list_phrase_matchers_rest_flattened(): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -26421,7 +29365,7 @@ def test_create_phrase_matcher_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = resources.PhraseMatcher() + return_value = contact_center_insights.ListPhraseMatchersResponse() # get arguments that satisfy an http rule for this method sample_request = {"parent": "projects/sample1/locations/sample2"} @@ -26429,7 +29373,6 @@ def test_create_phrase_matcher_rest_flattened(): # get truthy value for each flattened field mock_args = dict( parent="parent_value", - phrase_matcher=resources.PhraseMatcher(name="name_value"), ) mock_args.update(sample_request) @@ -26437,12 +29380,14 @@ def test_create_phrase_matcher_rest_flattened(): response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = resources.PhraseMatcher.pb(return_value) + return_value = contact_center_insights.ListPhraseMatchersResponse.pb( + return_value + ) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.create_phrase_matcher(**mock_args) + client.list_phrase_matchers(**mock_args) # Establish that the underlying call was made with the expected # request object values. @@ -26455,7 +29400,7 @@ def test_create_phrase_matcher_rest_flattened(): ) -def test_create_phrase_matcher_rest_flattened_error(transport: str = "rest"): +def test_list_phrase_matchers_rest_flattened_error(transport: str = "rest"): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -26464,27 +29409,84 @@ def test_create_phrase_matcher_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.create_phrase_matcher( - contact_center_insights.CreatePhraseMatcherRequest(), + client.list_phrase_matchers( + contact_center_insights.ListPhraseMatchersRequest(), parent="parent_value", - phrase_matcher=resources.PhraseMatcher(name="name_value"), ) -def test_create_phrase_matcher_rest_error(): +def test_list_phrase_matchers_rest_pager(transport: str = "rest"): client = ContactCenterInsightsClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + contact_center_insights.ListPhraseMatchersResponse( + phrase_matchers=[ + resources.PhraseMatcher(), + resources.PhraseMatcher(), + resources.PhraseMatcher(), + ], + next_page_token="abc", + ), + contact_center_insights.ListPhraseMatchersResponse( + phrase_matchers=[], + next_page_token="def", + ), + contact_center_insights.ListPhraseMatchersResponse( + phrase_matchers=[ + resources.PhraseMatcher(), + ], + next_page_token="ghi", + ), + contact_center_insights.ListPhraseMatchersResponse( + phrase_matchers=[ + resources.PhraseMatcher(), + resources.PhraseMatcher(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple( + contact_center_insights.ListPhraseMatchersResponse.to_json(x) + for x in response + ) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {"parent": "projects/sample1/locations/sample2"} + + pager = client.list_phrase_matchers(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, resources.PhraseMatcher) for i in results) + + pages = list(client.list_phrase_matchers(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + @pytest.mark.parametrize( "request_type", [ - contact_center_insights.GetPhraseMatcherRequest, + contact_center_insights.DeletePhraseMatcherRequest, dict, ], ) -def test_get_phrase_matcher_rest(request_type): +def test_delete_phrase_matcher_rest(request_type): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -26497,39 +29499,22 @@ def test_get_phrase_matcher_rest(request_type): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = resources.PhraseMatcher( - name="name_value", - revision_id="revision_id_value", - version_tag="version_tag_value", - display_name="display_name_value", - type_=resources.PhraseMatcher.PhraseMatcherType.ALL_OF, - active=True, - role_match=resources.ConversationParticipant.Role.HUMAN_AGENT, - ) + return_value = None # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - # Convert return value to protobuf type - return_value = resources.PhraseMatcher.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) + json_return_value = "" response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.get_phrase_matcher(request) + response = client.delete_phrase_matcher(request) # Establish that the response is the type that we expect. - assert isinstance(response, resources.PhraseMatcher) - assert response.name == "name_value" - assert response.revision_id == "revision_id_value" - assert response.version_tag == "version_tag_value" - assert response.display_name == "display_name_value" - assert response.type_ == resources.PhraseMatcher.PhraseMatcherType.ALL_OF - assert response.active is True - assert response.role_match == resources.ConversationParticipant.Role.HUMAN_AGENT + assert response is None -def test_get_phrase_matcher_rest_use_cached_wrapped_rpc(): +def test_delete_phrase_matcher_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -26544,7 +29529,8 @@ def test_get_phrase_matcher_rest_use_cached_wrapped_rpc(): # Ensure method has been cached assert ( - client._transport.get_phrase_matcher in client._transport._wrapped_methods + client._transport.delete_phrase_matcher + in client._transport._wrapped_methods ) # Replace cached wrapped function with mock @@ -26553,24 +29539,24 @@ def test_get_phrase_matcher_rest_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.get_phrase_matcher + client._transport.delete_phrase_matcher ] = mock_rpc request = {} - client.get_phrase_matcher(request) + client.delete_phrase_matcher(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.get_phrase_matcher(request) + client.delete_phrase_matcher(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_get_phrase_matcher_rest_required_fields( - request_type=contact_center_insights.GetPhraseMatcherRequest, +def test_delete_phrase_matcher_rest_required_fields( + request_type=contact_center_insights.DeletePhraseMatcherRequest, ): transport_class = transports.ContactCenterInsightsRestTransport @@ -26586,7 +29572,7 @@ def test_get_phrase_matcher_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).get_phrase_matcher._get_unset_required_fields(jsonified_request) + ).delete_phrase_matcher._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present @@ -26595,7 +29581,7 @@ def test_get_phrase_matcher_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).get_phrase_matcher._get_unset_required_fields(jsonified_request) + ).delete_phrase_matcher._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone @@ -26609,7 +29595,7 @@ def test_get_phrase_matcher_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = resources.PhraseMatcher() + return_value = None # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -26621,39 +29607,36 @@ def test_get_phrase_matcher_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "get", + "method": "delete", "query_params": pb_request, } transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = resources.PhraseMatcher.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) + json_return_value = "" response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.get_phrase_matcher(request) + response = client.delete_phrase_matcher(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_get_phrase_matcher_rest_unset_required_fields(): +def test_delete_phrase_matcher_rest_unset_required_fields(): transport = transports.ContactCenterInsightsRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.get_phrase_matcher._get_unset_required_fields({}) + unset_fields = transport.delete_phrase_matcher._get_unset_required_fields({}) assert set(unset_fields) == (set(()) & set(("name",))) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_phrase_matcher_rest_interceptors(null_interceptor): +def test_delete_phrase_matcher_rest_interceptors(null_interceptor): transport = transports.ContactCenterInsightsRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -26666,14 +29649,11 @@ def test_get_phrase_matcher_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.ContactCenterInsightsRestInterceptor, "post_get_phrase_matcher" - ) as post, mock.patch.object( - transports.ContactCenterInsightsRestInterceptor, "pre_get_phrase_matcher" + transports.ContactCenterInsightsRestInterceptor, "pre_delete_phrase_matcher" ) as pre: pre.assert_not_called() - post.assert_not_called() - pb_message = contact_center_insights.GetPhraseMatcherRequest.pb( - contact_center_insights.GetPhraseMatcherRequest() + pb_message = contact_center_insights.DeletePhraseMatcherRequest.pb( + contact_center_insights.DeletePhraseMatcherRequest() ) transcode.return_value = { "method": "post", @@ -26685,19 +29665,15 @@ def test_get_phrase_matcher_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = resources.PhraseMatcher.to_json( - resources.PhraseMatcher() - ) - request = contact_center_insights.GetPhraseMatcherRequest() + request = contact_center_insights.DeletePhraseMatcherRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = resources.PhraseMatcher() - client.get_phrase_matcher( + client.delete_phrase_matcher( request, metadata=[ ("key", "val"), @@ -26706,12 +29682,11 @@ def test_get_phrase_matcher_rest_interceptors(null_interceptor): ) pre.assert_called_once() - post.assert_called_once() -def test_get_phrase_matcher_rest_bad_request( +def test_delete_phrase_matcher_rest_bad_request( transport: str = "rest", - request_type=contact_center_insights.GetPhraseMatcherRequest, + request_type=contact_center_insights.DeletePhraseMatcherRequest, ): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), @@ -26731,10 +29706,10 @@ def test_get_phrase_matcher_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.get_phrase_matcher(request) + client.delete_phrase_matcher(request) -def test_get_phrase_matcher_rest_flattened(): +def test_delete_phrase_matcher_rest_flattened(): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -26743,7 +29718,7 @@ def test_get_phrase_matcher_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = resources.PhraseMatcher() + return_value = None # get arguments that satisfy an http rule for this method sample_request = { @@ -26759,13 +29734,11 @@ def test_get_phrase_matcher_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - # Convert return value to protobuf type - return_value = resources.PhraseMatcher.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) + json_return_value = "" response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.get_phrase_matcher(**mock_args) + client.delete_phrase_matcher(**mock_args) # Establish that the underlying call was made with the expected # request object values. @@ -26778,7 +29751,7 @@ def test_get_phrase_matcher_rest_flattened(): ) -def test_get_phrase_matcher_rest_flattened_error(transport: str = "rest"): +def test_delete_phrase_matcher_rest_flattened_error(transport: str = "rest"): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -26787,61 +29760,168 @@ def test_get_phrase_matcher_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.get_phrase_matcher( - contact_center_insights.GetPhraseMatcherRequest(), + client.delete_phrase_matcher( + contact_center_insights.DeletePhraseMatcherRequest(), name="name_value", ) -def test_get_phrase_matcher_rest_error(): +def test_delete_phrase_matcher_rest_error(): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) -@pytest.mark.parametrize( - "request_type", - [ - contact_center_insights.ListPhraseMatchersRequest, - dict, - ], -) -def test_list_phrase_matchers_rest(request_type): - client = ContactCenterInsightsClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) +@pytest.mark.parametrize( + "request_type", + [ + contact_center_insights.UpdatePhraseMatcherRequest, + dict, + ], +) +def test_update_phrase_matcher_rest(request_type): + client = ContactCenterInsightsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "phrase_matcher": { + "name": "projects/sample1/locations/sample2/phraseMatchers/sample3" + } + } + request_init["phrase_matcher"] = { + "name": "projects/sample1/locations/sample2/phraseMatchers/sample3", + "revision_id": "revision_id_value", + "version_tag": "version_tag_value", + "revision_create_time": {"seconds": 751, "nanos": 543}, + "display_name": "display_name_value", + "type_": 1, + "active": True, + "phrase_match_rule_groups": [ + { + "type_": 1, + "phrase_match_rules": [ + { + "query": "query_value", + "negated": True, + "config": {"exact_match_config": {"case_sensitive": True}}, + } + ], + } + ], + "activation_update_time": {}, + "role_match": 1, + "update_time": {}, + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = contact_center_insights.UpdatePhraseMatcherRequest.meta.fields[ + "phrase_matcher" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["phrase_matcher"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value - # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2"} + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["phrase_matcher"][field])): + del request_init["phrase_matcher"][field][i][subfield] + else: + del request_init["phrase_matcher"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = contact_center_insights.ListPhraseMatchersResponse( - next_page_token="next_page_token_value", + return_value = resources.PhraseMatcher( + name="name_value", + revision_id="revision_id_value", + version_tag="version_tag_value", + display_name="display_name_value", + type_=resources.PhraseMatcher.PhraseMatcherType.ALL_OF, + active=True, + role_match=resources.ConversationParticipant.Role.HUMAN_AGENT, ) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = contact_center_insights.ListPhraseMatchersResponse.pb( - return_value - ) + return_value = resources.PhraseMatcher.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.list_phrase_matchers(request) + response = client.update_phrase_matcher(request) # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListPhraseMatchersPager) - assert response.next_page_token == "next_page_token_value" + assert isinstance(response, resources.PhraseMatcher) + assert response.name == "name_value" + assert response.revision_id == "revision_id_value" + assert response.version_tag == "version_tag_value" + assert response.display_name == "display_name_value" + assert response.type_ == resources.PhraseMatcher.PhraseMatcherType.ALL_OF + assert response.active is True + assert response.role_match == resources.ConversationParticipant.Role.HUMAN_AGENT -def test_list_phrase_matchers_rest_use_cached_wrapped_rpc(): +def test_update_phrase_matcher_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -26856,7 +29936,8 @@ def test_list_phrase_matchers_rest_use_cached_wrapped_rpc(): # Ensure method has been cached assert ( - client._transport.list_phrase_matchers in client._transport._wrapped_methods + client._transport.update_phrase_matcher + in client._transport._wrapped_methods ) # Replace cached wrapped function with mock @@ -26865,29 +29946,28 @@ def test_list_phrase_matchers_rest_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.list_phrase_matchers + client._transport.update_phrase_matcher ] = mock_rpc request = {} - client.list_phrase_matchers(request) + client.update_phrase_matcher(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.list_phrase_matchers(request) + client.update_phrase_matcher(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_list_phrase_matchers_rest_required_fields( - request_type=contact_center_insights.ListPhraseMatchersRequest, +def test_update_phrase_matcher_rest_required_fields( + request_type=contact_center_insights.UpdatePhraseMatcherRequest, ): transport_class = transports.ContactCenterInsightsRestTransport request_init = {} - request_init["parent"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -26898,29 +29978,19 @@ def test_list_phrase_matchers_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).list_phrase_matchers._get_unset_required_fields(jsonified_request) + ).update_phrase_matcher._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["parent"] = "parent_value" - unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).list_phrase_matchers._get_unset_required_fields(jsonified_request) + ).update_phrase_matcher._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set( - ( - "filter", - "page_size", - "page_token", - ) - ) + assert not set(unset_fields) - set(("update_mask",)) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == "parent_value" client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), @@ -26929,7 +29999,7 @@ def test_list_phrase_matchers_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = contact_center_insights.ListPhraseMatchersResponse() + return_value = resources.PhraseMatcher() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -26941,50 +30011,40 @@ def test_list_phrase_matchers_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "get", + "method": "patch", "query_params": pb_request, } + transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = contact_center_insights.ListPhraseMatchersResponse.pb( - return_value - ) + return_value = resources.PhraseMatcher.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.list_phrase_matchers(request) + response = client.update_phrase_matcher(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_list_phrase_matchers_rest_unset_required_fields(): +def test_update_phrase_matcher_rest_unset_required_fields(): transport = transports.ContactCenterInsightsRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.list_phrase_matchers._get_unset_required_fields({}) - assert set(unset_fields) == ( - set( - ( - "filter", - "pageSize", - "pageToken", - ) - ) - & set(("parent",)) - ) + unset_fields = transport.update_phrase_matcher._get_unset_required_fields({}) + assert set(unset_fields) == (set(("updateMask",)) & set(("phraseMatcher",))) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_phrase_matchers_rest_interceptors(null_interceptor): +def test_update_phrase_matcher_rest_interceptors(null_interceptor): transport = transports.ContactCenterInsightsRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -26997,14 +30057,14 @@ def test_list_phrase_matchers_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.ContactCenterInsightsRestInterceptor, "post_list_phrase_matchers" + transports.ContactCenterInsightsRestInterceptor, "post_update_phrase_matcher" ) as post, mock.patch.object( - transports.ContactCenterInsightsRestInterceptor, "pre_list_phrase_matchers" + transports.ContactCenterInsightsRestInterceptor, "pre_update_phrase_matcher" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = contact_center_insights.ListPhraseMatchersRequest.pb( - contact_center_insights.ListPhraseMatchersRequest() + pb_message = contact_center_insights.UpdatePhraseMatcherRequest.pb( + contact_center_insights.UpdatePhraseMatcherRequest() ) transcode.return_value = { "method": "post", @@ -27016,21 +30076,19 @@ def test_list_phrase_matchers_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = ( - contact_center_insights.ListPhraseMatchersResponse.to_json( - contact_center_insights.ListPhraseMatchersResponse() - ) + req.return_value._content = resources.PhraseMatcher.to_json( + resources.PhraseMatcher() ) - request = contact_center_insights.ListPhraseMatchersRequest() + request = contact_center_insights.UpdatePhraseMatcherRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = contact_center_insights.ListPhraseMatchersResponse() + post.return_value = resources.PhraseMatcher() - client.list_phrase_matchers( + client.update_phrase_matcher( request, metadata=[ ("key", "val"), @@ -27042,9 +30100,9 @@ def test_list_phrase_matchers_rest_interceptors(null_interceptor): post.assert_called_once() -def test_list_phrase_matchers_rest_bad_request( +def test_update_phrase_matcher_rest_bad_request( transport: str = "rest", - request_type=contact_center_insights.ListPhraseMatchersRequest, + request_type=contact_center_insights.UpdatePhraseMatcherRequest, ): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), @@ -27052,7 +30110,11 @@ def test_list_phrase_matchers_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2"} + request_init = { + "phrase_matcher": { + "name": "projects/sample1/locations/sample2/phraseMatchers/sample3" + } + } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -27064,10 +30126,10 @@ def test_list_phrase_matchers_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.list_phrase_matchers(request) + client.update_phrase_matcher(request) -def test_list_phrase_matchers_rest_flattened(): +def test_update_phrase_matcher_rest_flattened(): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -27076,14 +30138,19 @@ def test_list_phrase_matchers_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = contact_center_insights.ListPhraseMatchersResponse() + return_value = resources.PhraseMatcher() # get arguments that satisfy an http rule for this method - sample_request = {"parent": "projects/sample1/locations/sample2"} + sample_request = { + "phrase_matcher": { + "name": "projects/sample1/locations/sample2/phraseMatchers/sample3" + } + } # get truthy value for each flattened field mock_args = dict( - parent="parent_value", + phrase_matcher=resources.PhraseMatcher(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) mock_args.update(sample_request) @@ -27091,141 +30158,89 @@ def test_list_phrase_matchers_rest_flattened(): response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = contact_center_insights.ListPhraseMatchersResponse.pb( - return_value - ) + return_value = resources.PhraseMatcher.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - client.list_phrase_matchers(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate( - "%s/v1/{parent=projects/*/locations/*}/phraseMatchers" - % client.transport._host, - args[1], - ) - - -def test_list_phrase_matchers_rest_flattened_error(transport: str = "rest"): - client = ContactCenterInsightsClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_phrase_matchers( - contact_center_insights.ListPhraseMatchersRequest(), - parent="parent_value", - ) - - -def test_list_phrase_matchers_rest_pager(transport: str = "rest"): - client = ContactCenterInsightsClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # TODO(kbandes): remove this mock unless there's a good reason for it. - # with mock.patch.object(path_template, 'transcode') as transcode: - # Set the response as a series of pages - response = ( - contact_center_insights.ListPhraseMatchersResponse( - phrase_matchers=[ - resources.PhraseMatcher(), - resources.PhraseMatcher(), - resources.PhraseMatcher(), - ], - next_page_token="abc", - ), - contact_center_insights.ListPhraseMatchersResponse( - phrase_matchers=[], - next_page_token="def", - ), - contact_center_insights.ListPhraseMatchersResponse( - phrase_matchers=[ - resources.PhraseMatcher(), - ], - next_page_token="ghi", - ), - contact_center_insights.ListPhraseMatchersResponse( - phrase_matchers=[ - resources.PhraseMatcher(), - resources.PhraseMatcher(), - ], - ), - ) - # Two responses for two calls - response = response + response + req.return_value = response_value - # Wrap the values into proper Response objs - response = tuple( - contact_center_insights.ListPhraseMatchersResponse.to_json(x) - for x in response + client.update_phrase_matcher(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{phrase_matcher.name=projects/*/locations/*/phraseMatchers/*}" + % client.transport._host, + args[1], ) - return_values = tuple(Response() for i in response) - for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode("UTF-8") - return_val.status_code = 200 - req.side_effect = return_values - sample_request = {"parent": "projects/sample1/locations/sample2"} - pager = client.list_phrase_matchers(request=sample_request) +def test_update_phrase_matcher_rest_flattened_error(transport: str = "rest"): + client = ContactCenterInsightsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, resources.PhraseMatcher) for i in results) + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_phrase_matcher( + contact_center_insights.UpdatePhraseMatcherRequest(), + phrase_matcher=resources.PhraseMatcher(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) - pages = list(client.list_phrase_matchers(request=sample_request).pages) - for page_, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page_.raw_page.next_page_token == token + +def test_update_phrase_matcher_rest_error(): + client = ContactCenterInsightsClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) @pytest.mark.parametrize( "request_type", [ - contact_center_insights.DeletePhraseMatcherRequest, + contact_center_insights.CalculateStatsRequest, dict, ], ) -def test_delete_phrase_matcher_rest(request_type): +def test_calculate_stats_rest(request_type): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = {"name": "projects/sample1/locations/sample2/phraseMatchers/sample3"} + request_init = {"location": "projects/sample1/locations/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = None + return_value = contact_center_insights.CalculateStatsResponse( + average_turn_count=1931, + conversation_count=1955, + ) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - json_return_value = "" + # Convert return value to protobuf type + return_value = contact_center_insights.CalculateStatsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.delete_phrase_matcher(request) + response = client.calculate_stats(request) # Establish that the response is the type that we expect. - assert response is None + assert isinstance(response, contact_center_insights.CalculateStatsResponse) + assert response.average_turn_count == 1931 + assert response.conversation_count == 1955 -def test_delete_phrase_matcher_rest_use_cached_wrapped_rpc(): +def test_calculate_stats_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -27239,40 +30254,35 @@ def test_delete_phrase_matcher_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert ( - client._transport.delete_phrase_matcher - in client._transport._wrapped_methods - ) + assert client._transport.calculate_stats in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.delete_phrase_matcher - ] = mock_rpc + client._transport._wrapped_methods[client._transport.calculate_stats] = mock_rpc request = {} - client.delete_phrase_matcher(request) + client.calculate_stats(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.delete_phrase_matcher(request) + client.calculate_stats(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_delete_phrase_matcher_rest_required_fields( - request_type=contact_center_insights.DeletePhraseMatcherRequest, +def test_calculate_stats_rest_required_fields( + request_type=contact_center_insights.CalculateStatsRequest, ): transport_class = transports.ContactCenterInsightsRestTransport request_init = {} - request_init["name"] = "" + request_init["location"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -27283,21 +30293,23 @@ def test_delete_phrase_matcher_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).delete_phrase_matcher._get_unset_required_fields(jsonified_request) + ).calculate_stats._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["name"] = "name_value" + jsonified_request["location"] = "location_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).delete_phrase_matcher._get_unset_required_fields(jsonified_request) + ).calculate_stats._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("filter",)) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == "name_value" + assert "location" in jsonified_request + assert jsonified_request["location"] == "location_value" client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), @@ -27306,7 +30318,7 @@ def test_delete_phrase_matcher_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = None + return_value = contact_center_insights.CalculateStatsResponse() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -27318,36 +30330,41 @@ def test_delete_phrase_matcher_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "delete", + "method": "get", "query_params": pb_request, } transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 - json_return_value = "" + + # Convert return value to protobuf type + return_value = contact_center_insights.CalculateStatsResponse.pb( + return_value + ) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.delete_phrase_matcher(request) + response = client.calculate_stats(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_delete_phrase_matcher_rest_unset_required_fields(): +def test_calculate_stats_rest_unset_required_fields(): transport = transports.ContactCenterInsightsRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.delete_phrase_matcher._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name",))) + unset_fields = transport.calculate_stats._get_unset_required_fields({}) + assert set(unset_fields) == (set(("filter",)) & set(("location",))) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_delete_phrase_matcher_rest_interceptors(null_interceptor): +def test_calculate_stats_rest_interceptors(null_interceptor): transport = transports.ContactCenterInsightsRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -27360,11 +30377,14 @@ def test_delete_phrase_matcher_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.ContactCenterInsightsRestInterceptor, "pre_delete_phrase_matcher" + transports.ContactCenterInsightsRestInterceptor, "post_calculate_stats" + ) as post, mock.patch.object( + transports.ContactCenterInsightsRestInterceptor, "pre_calculate_stats" ) as pre: pre.assert_not_called() - pb_message = contact_center_insights.DeletePhraseMatcherRequest.pb( - contact_center_insights.DeletePhraseMatcherRequest() + post.assert_not_called() + pb_message = contact_center_insights.CalculateStatsRequest.pb( + contact_center_insights.CalculateStatsRequest() ) transcode.return_value = { "method": "post", @@ -27376,15 +30396,21 @@ def test_delete_phrase_matcher_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() + req.return_value._content = ( + contact_center_insights.CalculateStatsResponse.to_json( + contact_center_insights.CalculateStatsResponse() + ) + ) - request = contact_center_insights.DeletePhraseMatcherRequest() + request = contact_center_insights.CalculateStatsRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata + post.return_value = contact_center_insights.CalculateStatsResponse() - client.delete_phrase_matcher( + client.calculate_stats( request, metadata=[ ("key", "val"), @@ -27393,11 +30419,11 @@ def test_delete_phrase_matcher_rest_interceptors(null_interceptor): ) pre.assert_called_once() + post.assert_called_once() -def test_delete_phrase_matcher_rest_bad_request( - transport: str = "rest", - request_type=contact_center_insights.DeletePhraseMatcherRequest, +def test_calculate_stats_rest_bad_request( + transport: str = "rest", request_type=contact_center_insights.CalculateStatsRequest ): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), @@ -27405,7 +30431,7 @@ def test_delete_phrase_matcher_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = {"name": "projects/sample1/locations/sample2/phraseMatchers/sample3"} + request_init = {"location": "projects/sample1/locations/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -27417,10 +30443,10 @@ def test_delete_phrase_matcher_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.delete_phrase_matcher(request) + client.calculate_stats(request) -def test_delete_phrase_matcher_rest_flattened(): +def test_calculate_stats_rest_flattened(): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -27429,40 +30455,40 @@ def test_delete_phrase_matcher_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = None + return_value = contact_center_insights.CalculateStatsResponse() # get arguments that satisfy an http rule for this method - sample_request = { - "name": "projects/sample1/locations/sample2/phraseMatchers/sample3" - } + sample_request = {"location": "projects/sample1/locations/sample2"} # get truthy value for each flattened field mock_args = dict( - name="name_value", + location="location_value", ) mock_args.update(sample_request) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - json_return_value = "" + # Convert return value to protobuf type + return_value = contact_center_insights.CalculateStatsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.delete_phrase_matcher(**mock_args) + client.calculate_stats(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{name=projects/*/locations/*/phraseMatchers/*}" + "%s/v1/{location=projects/*/locations/*}/conversations:calculateStats" % client.transport._host, args[1], ) -def test_delete_phrase_matcher_rest_flattened_error(transport: str = "rest"): +def test_calculate_stats_rest_flattened_error(transport: str = "rest"): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -27471,168 +30497,61 @@ def test_delete_phrase_matcher_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.delete_phrase_matcher( - contact_center_insights.DeletePhraseMatcherRequest(), - name="name_value", - ) - - -def test_delete_phrase_matcher_rest_error(): - client = ContactCenterInsightsClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) - - -@pytest.mark.parametrize( - "request_type", - [ - contact_center_insights.UpdatePhraseMatcherRequest, - dict, - ], -) -def test_update_phrase_matcher_rest(request_type): - client = ContactCenterInsightsClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = { - "phrase_matcher": { - "name": "projects/sample1/locations/sample2/phraseMatchers/sample3" - } - } - request_init["phrase_matcher"] = { - "name": "projects/sample1/locations/sample2/phraseMatchers/sample3", - "revision_id": "revision_id_value", - "version_tag": "version_tag_value", - "revision_create_time": {"seconds": 751, "nanos": 543}, - "display_name": "display_name_value", - "type_": 1, - "active": True, - "phrase_match_rule_groups": [ - { - "type_": 1, - "phrase_match_rules": [ - { - "query": "query_value", - "negated": True, - "config": {"exact_match_config": {"case_sensitive": True}}, - } - ], - } - ], - "activation_update_time": {}, - "role_match": 1, - "update_time": {}, - } - # The version of a generated dependency at test runtime may differ from the version used during generation. - # Delete any fields which are not present in the current runtime dependency - # See https://github.com/googleapis/gapic-generator-python/issues/1748 - - # Determine if the message type is proto-plus or protobuf - test_field = contact_center_insights.UpdatePhraseMatcherRequest.meta.fields[ - "phrase_matcher" - ] - - def get_message_fields(field): - # Given a field which is a message (composite type), return a list with - # all the fields of the message. - # If the field is not a composite type, return an empty list. - message_fields = [] - - if hasattr(field, "message") and field.message: - is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") - - if is_field_type_proto_plus_type: - message_fields = field.message.meta.fields.values() - # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER - message_fields = field.message.DESCRIPTOR.fields - return message_fields - - runtime_nested_fields = [ - (field.name, nested_field.name) - for field in get_message_fields(test_field) - for nested_field in get_message_fields(field) - ] + client.calculate_stats( + contact_center_insights.CalculateStatsRequest(), + location="location_value", + ) - subfields_not_in_runtime = [] - # For each item in the sample request, create a list of sub fields which are not present at runtime - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["phrase_matcher"].items(): # pragma: NO COVER - result = None - is_repeated = False - # For repeated fields - if isinstance(value, list) and len(value): - is_repeated = True - result = value[0] - # For fields where the type is another message - if isinstance(value, dict): - result = value +def test_calculate_stats_rest_error(): + client = ContactCenterInsightsClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) - if result and hasattr(result, "keys"): - for subfield in result.keys(): - if (field, subfield) not in runtime_nested_fields: - subfields_not_in_runtime.append( - { - "field": field, - "subfield": subfield, - "is_repeated": is_repeated, - } - ) - # Remove fields from the sample request which are not present in the runtime version of the dependency - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER - field = subfield_to_delete.get("field") - field_repeated = subfield_to_delete.get("is_repeated") - subfield = subfield_to_delete.get("subfield") - if subfield: - if field_repeated: - for i in range(0, len(request_init["phrase_matcher"][field])): - del request_init["phrase_matcher"][field][i][subfield] - else: - del request_init["phrase_matcher"][field][subfield] +@pytest.mark.parametrize( + "request_type", + [ + contact_center_insights.GetSettingsRequest, + dict, + ], +) +def test_get_settings_rest(request_type): + client = ContactCenterInsightsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/settings"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = resources.PhraseMatcher( + return_value = resources.Settings( name="name_value", - revision_id="revision_id_value", - version_tag="version_tag_value", - display_name="display_name_value", - type_=resources.PhraseMatcher.PhraseMatcherType.ALL_OF, - active=True, - role_match=resources.ConversationParticipant.Role.HUMAN_AGENT, + language_code="language_code_value", ) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = resources.PhraseMatcher.pb(return_value) + return_value = resources.Settings.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.update_phrase_matcher(request) + response = client.get_settings(request) # Establish that the response is the type that we expect. - assert isinstance(response, resources.PhraseMatcher) + assert isinstance(response, resources.Settings) assert response.name == "name_value" - assert response.revision_id == "revision_id_value" - assert response.version_tag == "version_tag_value" - assert response.display_name == "display_name_value" - assert response.type_ == resources.PhraseMatcher.PhraseMatcherType.ALL_OF - assert response.active is True - assert response.role_match == resources.ConversationParticipant.Role.HUMAN_AGENT + assert response.language_code == "language_code_value" -def test_update_phrase_matcher_rest_use_cached_wrapped_rpc(): +def test_get_settings_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -27646,39 +30565,35 @@ def test_update_phrase_matcher_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert ( - client._transport.update_phrase_matcher - in client._transport._wrapped_methods - ) + assert client._transport.get_settings in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.update_phrase_matcher - ] = mock_rpc + client._transport._wrapped_methods[client._transport.get_settings] = mock_rpc request = {} - client.update_phrase_matcher(request) + client.get_settings(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.update_phrase_matcher(request) + client.get_settings(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_update_phrase_matcher_rest_required_fields( - request_type=contact_center_insights.UpdatePhraseMatcherRequest, +def test_get_settings_rest_required_fields( + request_type=contact_center_insights.GetSettingsRequest, ): transport_class = transports.ContactCenterInsightsRestTransport request_init = {} + request_init["name"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -27689,19 +30604,21 @@ def test_update_phrase_matcher_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).update_phrase_matcher._get_unset_required_fields(jsonified_request) + ).get_settings._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present + jsonified_request["name"] = "name_value" + unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).update_phrase_matcher._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("update_mask",)) + ).get_settings._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), @@ -27710,7 +30627,7 @@ def test_update_phrase_matcher_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = resources.PhraseMatcher() + return_value = resources.Settings() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -27722,40 +30639,39 @@ def test_update_phrase_matcher_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "patch", + "method": "get", "query_params": pb_request, } - transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = resources.PhraseMatcher.pb(return_value) + return_value = resources.Settings.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.update_phrase_matcher(request) + response = client.get_settings(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_update_phrase_matcher_rest_unset_required_fields(): +def test_get_settings_rest_unset_required_fields(): transport = transports.ContactCenterInsightsRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.update_phrase_matcher._get_unset_required_fields({}) - assert set(unset_fields) == (set(("updateMask",)) & set(("phraseMatcher",))) + unset_fields = transport.get_settings._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_update_phrase_matcher_rest_interceptors(null_interceptor): +def test_get_settings_rest_interceptors(null_interceptor): transport = transports.ContactCenterInsightsRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -27768,14 +30684,14 @@ def test_update_phrase_matcher_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.ContactCenterInsightsRestInterceptor, "post_update_phrase_matcher" + transports.ContactCenterInsightsRestInterceptor, "post_get_settings" ) as post, mock.patch.object( - transports.ContactCenterInsightsRestInterceptor, "pre_update_phrase_matcher" + transports.ContactCenterInsightsRestInterceptor, "pre_get_settings" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = contact_center_insights.UpdatePhraseMatcherRequest.pb( - contact_center_insights.UpdatePhraseMatcherRequest() + pb_message = contact_center_insights.GetSettingsRequest.pb( + contact_center_insights.GetSettingsRequest() ) transcode.return_value = { "method": "post", @@ -27787,19 +30703,17 @@ def test_update_phrase_matcher_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = resources.PhraseMatcher.to_json( - resources.PhraseMatcher() - ) + req.return_value._content = resources.Settings.to_json(resources.Settings()) - request = contact_center_insights.UpdatePhraseMatcherRequest() + request = contact_center_insights.GetSettingsRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = resources.PhraseMatcher() + post.return_value = resources.Settings() - client.update_phrase_matcher( + client.get_settings( request, metadata=[ ("key", "val"), @@ -27811,9 +30725,8 @@ def test_update_phrase_matcher_rest_interceptors(null_interceptor): post.assert_called_once() -def test_update_phrase_matcher_rest_bad_request( - transport: str = "rest", - request_type=contact_center_insights.UpdatePhraseMatcherRequest, +def test_get_settings_rest_bad_request( + transport: str = "rest", request_type=contact_center_insights.GetSettingsRequest ): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), @@ -27821,11 +30734,7 @@ def test_update_phrase_matcher_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = { - "phrase_matcher": { - "name": "projects/sample1/locations/sample2/phraseMatchers/sample3" - } - } + request_init = {"name": "projects/sample1/locations/sample2/settings"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -27837,10 +30746,10 @@ def test_update_phrase_matcher_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.update_phrase_matcher(request) + client.get_settings(request) -def test_update_phrase_matcher_rest_flattened(): +def test_get_settings_rest_flattened(): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -27849,19 +30758,14 @@ def test_update_phrase_matcher_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = resources.PhraseMatcher() + return_value = resources.Settings() # get arguments that satisfy an http rule for this method - sample_request = { - "phrase_matcher": { - "name": "projects/sample1/locations/sample2/phraseMatchers/sample3" - } - } + sample_request = {"name": "projects/sample1/locations/sample2/settings"} # get truthy value for each flattened field mock_args = dict( - phrase_matcher=resources.PhraseMatcher(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + name="name_value", ) mock_args.update(sample_request) @@ -27869,89 +30773,187 @@ def test_update_phrase_matcher_rest_flattened(): response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = resources.PhraseMatcher.pb(return_value) + return_value = resources.Settings.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.update_phrase_matcher(**mock_args) + client.get_settings(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{phrase_matcher.name=projects/*/locations/*/phraseMatchers/*}" - % client.transport._host, + "%s/v1/{name=projects/*/locations/*/settings}" % client.transport._host, args[1], ) -def test_update_phrase_matcher_rest_flattened_error(transport: str = "rest"): - client = ContactCenterInsightsClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) +def test_get_settings_rest_flattened_error(transport: str = "rest"): + client = ContactCenterInsightsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_settings( + contact_center_insights.GetSettingsRequest(), + name="name_value", + ) + + +def test_get_settings_rest_error(): + client = ContactCenterInsightsClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + contact_center_insights.UpdateSettingsRequest, + dict, + ], +) +def test_update_settings_rest(request_type): + client = ContactCenterInsightsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"settings": {"name": "projects/sample1/locations/sample2/settings"}} + request_init["settings"] = { + "name": "projects/sample1/locations/sample2/settings", + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "language_code": "language_code_value", + "conversation_ttl": {"seconds": 751, "nanos": 543}, + "pubsub_notification_settings": {}, + "analysis_config": { + "runtime_integration_analysis_percentage": 0.4167, + "upload_conversation_analysis_percentage": 0.41590000000000005, + "annotator_selector": { + "run_interruption_annotator": True, + "run_silence_annotator": True, + "run_phrase_matcher_annotator": True, + "phrase_matchers": ["phrase_matchers_value1", "phrase_matchers_value2"], + "run_sentiment_annotator": True, + "run_entity_annotator": True, + "run_intent_annotator": True, + "run_issue_model_annotator": True, + "issue_models": ["issue_models_value1", "issue_models_value2"], + "run_summarization_annotator": True, + "summarization_config": { + "conversation_profile": "conversation_profile_value", + "summarization_model": 1, + }, + }, + }, + "redaction_config": { + "deidentify_template": "deidentify_template_value", + "inspect_template": "inspect_template_value", + }, + "speech_config": {"speech_recognizer": "speech_recognizer_value"}, + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = contact_center_insights.UpdateSettingsRequest.meta.fields["settings"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.update_phrase_matcher( - contact_center_insights.UpdatePhraseMatcherRequest(), - phrase_matcher=resources.PhraseMatcher(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), - ) + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] -def test_update_phrase_matcher_rest_error(): - client = ContactCenterInsightsClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) + subfields_not_in_runtime = [] + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["settings"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value -@pytest.mark.parametrize( - "request_type", - [ - contact_center_insights.CalculateStatsRequest, - dict, - ], -) -def test_calculate_stats_rest(request_type): - client = ContactCenterInsightsClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) - # send a request that will satisfy transcoding - request_init = {"location": "projects/sample1/locations/sample2"} + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["settings"][field])): + del request_init["settings"][field][i][subfield] + else: + del request_init["settings"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = contact_center_insights.CalculateStatsResponse( - average_turn_count=1931, - conversation_count=1955, + return_value = resources.Settings( + name="name_value", + language_code="language_code_value", ) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = contact_center_insights.CalculateStatsResponse.pb(return_value) + return_value = resources.Settings.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.calculate_stats(request) + response = client.update_settings(request) # Establish that the response is the type that we expect. - assert isinstance(response, contact_center_insights.CalculateStatsResponse) - assert response.average_turn_count == 1931 - assert response.conversation_count == 1955 + assert isinstance(response, resources.Settings) + assert response.name == "name_value" + assert response.language_code == "language_code_value" -def test_calculate_stats_rest_use_cached_wrapped_rpc(): +def test_update_settings_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -27965,35 +30967,34 @@ def test_calculate_stats_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.calculate_stats in client._transport._wrapped_methods + assert client._transport.update_settings in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.calculate_stats] = mock_rpc + client._transport._wrapped_methods[client._transport.update_settings] = mock_rpc request = {} - client.calculate_stats(request) + client.update_settings(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.calculate_stats(request) + client.update_settings(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_calculate_stats_rest_required_fields( - request_type=contact_center_insights.CalculateStatsRequest, +def test_update_settings_rest_required_fields( + request_type=contact_center_insights.UpdateSettingsRequest, ): transport_class = transports.ContactCenterInsightsRestTransport request_init = {} - request_init["location"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -28004,23 +31005,19 @@ def test_calculate_stats_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).calculate_stats._get_unset_required_fields(jsonified_request) + ).update_settings._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["location"] = "location_value" - unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).calculate_stats._get_unset_required_fields(jsonified_request) + ).update_settings._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("filter",)) + assert not set(unset_fields) - set(("update_mask",)) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "location" in jsonified_request - assert jsonified_request["location"] == "location_value" client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), @@ -28029,7 +31026,7 @@ def test_calculate_stats_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = contact_center_insights.CalculateStatsResponse() + return_value = resources.Settings() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -28041,41 +31038,48 @@ def test_calculate_stats_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "get", + "method": "patch", "query_params": pb_request, } + transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = contact_center_insights.CalculateStatsResponse.pb( - return_value - ) + return_value = resources.Settings.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.calculate_stats(request) + response = client.update_settings(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_calculate_stats_rest_unset_required_fields(): +def test_update_settings_rest_unset_required_fields(): transport = transports.ContactCenterInsightsRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.calculate_stats._get_unset_required_fields({}) - assert set(unset_fields) == (set(("filter",)) & set(("location",))) + unset_fields = transport.update_settings._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(("updateMask",)) + & set( + ( + "settings", + "updateMask", + ) + ) + ) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_calculate_stats_rest_interceptors(null_interceptor): +def test_update_settings_rest_interceptors(null_interceptor): transport = transports.ContactCenterInsightsRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -28088,14 +31092,14 @@ def test_calculate_stats_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.ContactCenterInsightsRestInterceptor, "post_calculate_stats" + transports.ContactCenterInsightsRestInterceptor, "post_update_settings" ) as post, mock.patch.object( - transports.ContactCenterInsightsRestInterceptor, "pre_calculate_stats" + transports.ContactCenterInsightsRestInterceptor, "pre_update_settings" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = contact_center_insights.CalculateStatsRequest.pb( - contact_center_insights.CalculateStatsRequest() + pb_message = contact_center_insights.UpdateSettingsRequest.pb( + contact_center_insights.UpdateSettingsRequest() ) transcode.return_value = { "method": "post", @@ -28107,21 +31111,17 @@ def test_calculate_stats_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = ( - contact_center_insights.CalculateStatsResponse.to_json( - contact_center_insights.CalculateStatsResponse() - ) - ) + req.return_value._content = resources.Settings.to_json(resources.Settings()) - request = contact_center_insights.CalculateStatsRequest() + request = contact_center_insights.UpdateSettingsRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = contact_center_insights.CalculateStatsResponse() + post.return_value = resources.Settings() - client.calculate_stats( + client.update_settings( request, metadata=[ ("key", "val"), @@ -28133,8 +31133,8 @@ def test_calculate_stats_rest_interceptors(null_interceptor): post.assert_called_once() -def test_calculate_stats_rest_bad_request( - transport: str = "rest", request_type=contact_center_insights.CalculateStatsRequest +def test_update_settings_rest_bad_request( + transport: str = "rest", request_type=contact_center_insights.UpdateSettingsRequest ): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), @@ -28142,7 +31142,7 @@ def test_calculate_stats_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = {"location": "projects/sample1/locations/sample2"} + request_init = {"settings": {"name": "projects/sample1/locations/sample2/settings"}} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -28154,10 +31154,10 @@ def test_calculate_stats_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.calculate_stats(request) + client.update_settings(request) -def test_calculate_stats_rest_flattened(): +def test_update_settings_rest_flattened(): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -28166,14 +31166,17 @@ def test_calculate_stats_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = contact_center_insights.CalculateStatsResponse() + return_value = resources.Settings() # get arguments that satisfy an http rule for this method - sample_request = {"location": "projects/sample1/locations/sample2"} + sample_request = { + "settings": {"name": "projects/sample1/locations/sample2/settings"} + } # get truthy value for each flattened field mock_args = dict( - location="location_value", + settings=resources.Settings(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) mock_args.update(sample_request) @@ -28181,25 +31184,25 @@ def test_calculate_stats_rest_flattened(): response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = contact_center_insights.CalculateStatsResponse.pb(return_value) + return_value = resources.Settings.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.calculate_stats(**mock_args) + client.update_settings(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{location=projects/*/locations/*}/conversations:calculateStats" + "%s/v1/{settings.name=projects/*/locations/*/settings}" % client.transport._host, args[1], ) -def test_calculate_stats_rest_flattened_error(transport: str = "rest"): +def test_update_settings_rest_flattened_error(transport: str = "rest"): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -28208,13 +31211,14 @@ def test_calculate_stats_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.calculate_stats( - contact_center_insights.CalculateStatsRequest(), - location="location_value", + client.update_settings( + contact_center_insights.UpdateSettingsRequest(), + settings=resources.Settings(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) -def test_calculate_stats_rest_error(): +def test_update_settings_rest_error(): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -28223,46 +31227,46 @@ def test_calculate_stats_rest_error(): @pytest.mark.parametrize( "request_type", [ - contact_center_insights.GetSettingsRequest, + contact_center_insights.GetEncryptionSpecRequest, dict, ], ) -def test_get_settings_rest(request_type): +def test_get_encryption_spec_rest(request_type): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = {"name": "projects/sample1/locations/sample2/settings"} + request_init = {"name": "projects/sample1/locations/sample2/encryptionSpec"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = resources.Settings( + return_value = resources.EncryptionSpec( name="name_value", - language_code="language_code_value", + kms_key="kms_key_value", ) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = resources.Settings.pb(return_value) + return_value = resources.EncryptionSpec.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.get_settings(request) + response = client.get_encryption_spec(request) # Establish that the response is the type that we expect. - assert isinstance(response, resources.Settings) + assert isinstance(response, resources.EncryptionSpec) assert response.name == "name_value" - assert response.language_code == "language_code_value" + assert response.kms_key == "kms_key_value" -def test_get_settings_rest_use_cached_wrapped_rpc(): +def test_get_encryption_spec_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -28276,30 +31280,34 @@ def test_get_settings_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.get_settings in client._transport._wrapped_methods + assert ( + client._transport.get_encryption_spec in client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.get_settings] = mock_rpc + client._transport._wrapped_methods[ + client._transport.get_encryption_spec + ] = mock_rpc request = {} - client.get_settings(request) + client.get_encryption_spec(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.get_settings(request) + client.get_encryption_spec(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_get_settings_rest_required_fields( - request_type=contact_center_insights.GetSettingsRequest, +def test_get_encryption_spec_rest_required_fields( + request_type=contact_center_insights.GetEncryptionSpecRequest, ): transport_class = transports.ContactCenterInsightsRestTransport @@ -28315,7 +31323,7 @@ def test_get_settings_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).get_settings._get_unset_required_fields(jsonified_request) + ).get_encryption_spec._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present @@ -28324,7 +31332,7 @@ def test_get_settings_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).get_settings._get_unset_required_fields(jsonified_request) + ).get_encryption_spec._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone @@ -28338,7 +31346,7 @@ def test_get_settings_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = resources.Settings() + return_value = resources.EncryptionSpec() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -28359,30 +31367,30 @@ def test_get_settings_rest_required_fields( response_value.status_code = 200 # Convert return value to protobuf type - return_value = resources.Settings.pb(return_value) + return_value = resources.EncryptionSpec.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.get_settings(request) + response = client.get_encryption_spec(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_get_settings_rest_unset_required_fields(): +def test_get_encryption_spec_rest_unset_required_fields(): transport = transports.ContactCenterInsightsRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.get_settings._get_unset_required_fields({}) + unset_fields = transport.get_encryption_spec._get_unset_required_fields({}) assert set(unset_fields) == (set(()) & set(("name",))) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_settings_rest_interceptors(null_interceptor): +def test_get_encryption_spec_rest_interceptors(null_interceptor): transport = transports.ContactCenterInsightsRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -28395,14 +31403,14 @@ def test_get_settings_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.ContactCenterInsightsRestInterceptor, "post_get_settings" + transports.ContactCenterInsightsRestInterceptor, "post_get_encryption_spec" ) as post, mock.patch.object( - transports.ContactCenterInsightsRestInterceptor, "pre_get_settings" + transports.ContactCenterInsightsRestInterceptor, "pre_get_encryption_spec" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = contact_center_insights.GetSettingsRequest.pb( - contact_center_insights.GetSettingsRequest() + pb_message = contact_center_insights.GetEncryptionSpecRequest.pb( + contact_center_insights.GetEncryptionSpecRequest() ) transcode.return_value = { "method": "post", @@ -28414,17 +31422,19 @@ def test_get_settings_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = resources.Settings.to_json(resources.Settings()) + req.return_value._content = resources.EncryptionSpec.to_json( + resources.EncryptionSpec() + ) - request = contact_center_insights.GetSettingsRequest() + request = contact_center_insights.GetEncryptionSpecRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = resources.Settings() + post.return_value = resources.EncryptionSpec() - client.get_settings( + client.get_encryption_spec( request, metadata=[ ("key", "val"), @@ -28436,8 +31446,9 @@ def test_get_settings_rest_interceptors(null_interceptor): post.assert_called_once() -def test_get_settings_rest_bad_request( - transport: str = "rest", request_type=contact_center_insights.GetSettingsRequest +def test_get_encryption_spec_rest_bad_request( + transport: str = "rest", + request_type=contact_center_insights.GetEncryptionSpecRequest, ): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), @@ -28445,7 +31456,7 @@ def test_get_settings_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = {"name": "projects/sample1/locations/sample2/settings"} + request_init = {"name": "projects/sample1/locations/sample2/encryptionSpec"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -28457,10 +31468,10 @@ def test_get_settings_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.get_settings(request) + client.get_encryption_spec(request) -def test_get_settings_rest_flattened(): +def test_get_encryption_spec_rest_flattened(): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -28469,10 +31480,10 @@ def test_get_settings_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = resources.Settings() + return_value = resources.EncryptionSpec() # get arguments that satisfy an http rule for this method - sample_request = {"name": "projects/sample1/locations/sample2/settings"} + sample_request = {"name": "projects/sample1/locations/sample2/encryptionSpec"} # get truthy value for each flattened field mock_args = dict( @@ -28484,24 +31495,25 @@ def test_get_settings_rest_flattened(): response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = resources.Settings.pb(return_value) + return_value = resources.EncryptionSpec.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.get_settings(**mock_args) + client.get_encryption_spec(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{name=projects/*/locations/*/settings}" % client.transport._host, + "%s/v1/{name=projects/*/locations/*/encryptionSpec}" + % client.transport._host, args[1], ) -def test_get_settings_rest_flattened_error(transport: str = "rest"): +def test_get_encryption_spec_rest_flattened_error(transport: str = "rest"): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -28510,13 +31522,13 @@ def test_get_settings_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.get_settings( - contact_center_insights.GetSettingsRequest(), + client.get_encryption_spec( + contact_center_insights.GetEncryptionSpecRequest(), name="name_value", ) -def test_get_settings_rest_error(): +def test_get_encryption_spec_rest_error(): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -28525,146 +31537,41 @@ def test_get_settings_rest_error(): @pytest.mark.parametrize( "request_type", [ - contact_center_insights.UpdateSettingsRequest, + contact_center_insights.InitializeEncryptionSpecRequest, dict, ], ) -def test_update_settings_rest(request_type): +def test_initialize_encryption_spec_rest(request_type): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = {"settings": {"name": "projects/sample1/locations/sample2/settings"}} - request_init["settings"] = { - "name": "projects/sample1/locations/sample2/settings", - "create_time": {"seconds": 751, "nanos": 543}, - "update_time": {}, - "language_code": "language_code_value", - "conversation_ttl": {"seconds": 751, "nanos": 543}, - "pubsub_notification_settings": {}, - "analysis_config": { - "runtime_integration_analysis_percentage": 0.4167, - "upload_conversation_analysis_percentage": 0.41590000000000005, - "annotator_selector": { - "run_interruption_annotator": True, - "run_silence_annotator": True, - "run_phrase_matcher_annotator": True, - "phrase_matchers": ["phrase_matchers_value1", "phrase_matchers_value2"], - "run_sentiment_annotator": True, - "run_entity_annotator": True, - "run_intent_annotator": True, - "run_issue_model_annotator": True, - "issue_models": ["issue_models_value1", "issue_models_value2"], - "run_summarization_annotator": True, - "summarization_config": { - "conversation_profile": "conversation_profile_value", - "summarization_model": 1, - }, - }, - }, - "redaction_config": { - "deidentify_template": "deidentify_template_value", - "inspect_template": "inspect_template_value", - }, - "speech_config": {"speech_recognizer": "speech_recognizer_value"}, + request_init = { + "encryption_spec": {"name": "projects/sample1/locations/sample2/encryptionSpec"} } - # The version of a generated dependency at test runtime may differ from the version used during generation. - # Delete any fields which are not present in the current runtime dependency - # See https://github.com/googleapis/gapic-generator-python/issues/1748 - - # Determine if the message type is proto-plus or protobuf - test_field = contact_center_insights.UpdateSettingsRequest.meta.fields["settings"] - - def get_message_fields(field): - # Given a field which is a message (composite type), return a list with - # all the fields of the message. - # If the field is not a composite type, return an empty list. - message_fields = [] - - if hasattr(field, "message") and field.message: - is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") - - if is_field_type_proto_plus_type: - message_fields = field.message.meta.fields.values() - # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER - message_fields = field.message.DESCRIPTOR.fields - return message_fields - - runtime_nested_fields = [ - (field.name, nested_field.name) - for field in get_message_fields(test_field) - for nested_field in get_message_fields(field) - ] - - subfields_not_in_runtime = [] - - # For each item in the sample request, create a list of sub fields which are not present at runtime - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["settings"].items(): # pragma: NO COVER - result = None - is_repeated = False - # For repeated fields - if isinstance(value, list) and len(value): - is_repeated = True - result = value[0] - # For fields where the type is another message - if isinstance(value, dict): - result = value - - if result and hasattr(result, "keys"): - for subfield in result.keys(): - if (field, subfield) not in runtime_nested_fields: - subfields_not_in_runtime.append( - { - "field": field, - "subfield": subfield, - "is_repeated": is_repeated, - } - ) - - # Remove fields from the sample request which are not present in the runtime version of the dependency - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER - field = subfield_to_delete.get("field") - field_repeated = subfield_to_delete.get("is_repeated") - subfield = subfield_to_delete.get("subfield") - if subfield: - if field_repeated: - for i in range(0, len(request_init["settings"][field])): - del request_init["settings"][field][i][subfield] - else: - del request_init["settings"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = resources.Settings( - name="name_value", - language_code="language_code_value", - ) + return_value = operations_pb2.Operation(name="operations/spam") # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - # Convert return value to protobuf type - return_value = resources.Settings.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.update_settings(request) + response = client.initialize_encryption_spec(request) # Establish that the response is the type that we expect. - assert isinstance(response, resources.Settings) - assert response.name == "name_value" - assert response.language_code == "language_code_value" + assert response.operation.name == "operations/spam" -def test_update_settings_rest_use_cached_wrapped_rpc(): +def test_initialize_encryption_spec_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -28678,30 +31585,39 @@ def test_update_settings_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.update_settings in client._transport._wrapped_methods + assert ( + client._transport.initialize_encryption_spec + in client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.update_settings] = mock_rpc + client._transport._wrapped_methods[ + client._transport.initialize_encryption_spec + ] = mock_rpc request = {} - client.update_settings(request) + client.initialize_encryption_spec(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.update_settings(request) + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.initialize_encryption_spec(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_update_settings_rest_required_fields( - request_type=contact_center_insights.UpdateSettingsRequest, +def test_initialize_encryption_spec_rest_required_fields( + request_type=contact_center_insights.InitializeEncryptionSpecRequest, ): transport_class = transports.ContactCenterInsightsRestTransport @@ -28716,16 +31632,14 @@ def test_update_settings_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).update_settings._get_unset_required_fields(jsonified_request) + ).initialize_encryption_spec._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).update_settings._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("update_mask",)) + ).initialize_encryption_spec._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone @@ -28737,7 +31651,7 @@ def test_update_settings_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = resources.Settings() + return_value = operations_pb2.Operation(name="operations/spam") # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -28749,7 +31663,7 @@ def test_update_settings_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "patch", + "method": "post", "query_params": pb_request, } transcode_result["body"] = pb_request @@ -28757,40 +31671,29 @@ def test_update_settings_rest_required_fields( response_value = Response() response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = resources.Settings.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.update_settings(request) + response = client.initialize_encryption_spec(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_update_settings_rest_unset_required_fields(): +def test_initialize_encryption_spec_rest_unset_required_fields(): transport = transports.ContactCenterInsightsRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.update_settings._get_unset_required_fields({}) - assert set(unset_fields) == ( - set(("updateMask",)) - & set( - ( - "settings", - "updateMask", - ) - ) - ) + unset_fields = transport.initialize_encryption_spec._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("encryptionSpec",))) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_update_settings_rest_interceptors(null_interceptor): +def test_initialize_encryption_spec_rest_interceptors(null_interceptor): transport = transports.ContactCenterInsightsRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -28803,14 +31706,18 @@ def test_update_settings_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.ContactCenterInsightsRestInterceptor, "post_update_settings" + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.ContactCenterInsightsRestInterceptor, + "post_initialize_encryption_spec", ) as post, mock.patch.object( - transports.ContactCenterInsightsRestInterceptor, "pre_update_settings" + transports.ContactCenterInsightsRestInterceptor, + "pre_initialize_encryption_spec", ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = contact_center_insights.UpdateSettingsRequest.pb( - contact_center_insights.UpdateSettingsRequest() + pb_message = contact_center_insights.InitializeEncryptionSpecRequest.pb( + contact_center_insights.InitializeEncryptionSpecRequest() ) transcode.return_value = { "method": "post", @@ -28822,17 +31729,19 @@ def test_update_settings_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = resources.Settings.to_json(resources.Settings()) + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) - request = contact_center_insights.UpdateSettingsRequest() + request = contact_center_insights.InitializeEncryptionSpecRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = resources.Settings() + post.return_value = operations_pb2.Operation() - client.update_settings( + client.initialize_encryption_spec( request, metadata=[ ("key", "val"), @@ -28844,8 +31753,9 @@ def test_update_settings_rest_interceptors(null_interceptor): post.assert_called_once() -def test_update_settings_rest_bad_request( - transport: str = "rest", request_type=contact_center_insights.UpdateSettingsRequest +def test_initialize_encryption_spec_rest_bad_request( + transport: str = "rest", + request_type=contact_center_insights.InitializeEncryptionSpecRequest, ): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), @@ -28853,7 +31763,9 @@ def test_update_settings_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = {"settings": {"name": "projects/sample1/locations/sample2/settings"}} + request_init = { + "encryption_spec": {"name": "projects/sample1/locations/sample2/encryptionSpec"} + } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -28865,10 +31777,10 @@ def test_update_settings_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.update_settings(request) + client.initialize_encryption_spec(request) -def test_update_settings_rest_flattened(): +def test_initialize_encryption_spec_rest_flattened(): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -28877,43 +31789,42 @@ def test_update_settings_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = resources.Settings() + return_value = operations_pb2.Operation(name="operations/spam") # get arguments that satisfy an http rule for this method sample_request = { - "settings": {"name": "projects/sample1/locations/sample2/settings"} + "encryption_spec": { + "name": "projects/sample1/locations/sample2/encryptionSpec" + } } # get truthy value for each flattened field mock_args = dict( - settings=resources.Settings(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + encryption_spec=resources.EncryptionSpec(name="name_value"), ) mock_args.update(sample_request) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - # Convert return value to protobuf type - return_value = resources.Settings.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.update_settings(**mock_args) + client.initialize_encryption_spec(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{settings.name=projects/*/locations/*/settings}" + "%s/v1/{encryption_spec.name=projects/*/locations/*/encryptionSpec}:initialize" % client.transport._host, args[1], ) -def test_update_settings_rest_flattened_error(transport: str = "rest"): +def test_initialize_encryption_spec_rest_flattened_error(transport: str = "rest"): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -28922,14 +31833,13 @@ def test_update_settings_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.update_settings( - contact_center_insights.UpdateSettingsRequest(), - settings=resources.Settings(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + client.initialize_encryption_spec( + contact_center_insights.InitializeEncryptionSpecRequest(), + encryption_spec=resources.EncryptionSpec(name="name_value"), ) -def test_update_settings_rest_error(): +def test_initialize_encryption_spec_rest_error(): client = ContactCenterInsightsClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -30830,6 +33740,8 @@ def test_contact_center_insights_base_transport(): "delete_issue_model", "deploy_issue_model", "undeploy_issue_model", + "export_issue_model", + "import_issue_model", "get_issue", "list_issues", "update_issue", @@ -30843,6 +33755,8 @@ def test_contact_center_insights_base_transport(): "calculate_stats", "get_settings", "update_settings", + "get_encryption_spec", + "initialize_encryption_spec", "create_view", "get_view", "list_views", @@ -31198,6 +34112,12 @@ def test_contact_center_insights_client_transport_session_collision(transport_na session1 = client1.transport.undeploy_issue_model._session session2 = client2.transport.undeploy_issue_model._session assert session1 != session2 + session1 = client1.transport.export_issue_model._session + session2 = client2.transport.export_issue_model._session + assert session1 != session2 + session1 = client1.transport.import_issue_model._session + session2 = client2.transport.import_issue_model._session + assert session1 != session2 session1 = client1.transport.get_issue._session session2 = client2.transport.get_issue._session assert session1 != session2 @@ -31237,6 +34157,12 @@ def test_contact_center_insights_client_transport_session_collision(transport_na session1 = client1.transport.update_settings._session session2 = client2.transport.update_settings._session assert session1 != session2 + session1 = client1.transport.get_encryption_spec._session + session2 = client2.transport.get_encryption_spec._session + assert session1 != session2 + session1 = client1.transport.initialize_encryption_spec._session + session2 = client2.transport.initialize_encryption_spec._session + assert session1 != session2 session1 = client1.transport.create_view._session session2 = client2.transport.create_view._session assert session1 != session2 @@ -31503,11 +34429,34 @@ def test_parse_conversation_profile_path(): assert expected == actual -def test_issue_path(): +def test_encryption_spec_path(): project = "winkle" location = "nautilus" - issue_model = "scallop" - issue = "abalone" + expected = "projects/{project}/locations/{location}/encryptionSpec".format( + project=project, + location=location, + ) + actual = ContactCenterInsightsClient.encryption_spec_path(project, location) + assert expected == actual + + +def test_parse_encryption_spec_path(): + expected = { + "project": "scallop", + "location": "abalone", + } + path = ContactCenterInsightsClient.encryption_spec_path(**expected) + + # Check that the path construction is reversible. + actual = ContactCenterInsightsClient.parse_encryption_spec_path(path) + assert expected == actual + + +def test_issue_path(): + project = "squid" + location = "clam" + issue_model = "whelk" + issue = "octopus" expected = "projects/{project}/locations/{location}/issueModels/{issue_model}/issues/{issue}".format( project=project, location=location, @@ -31522,10 +34471,10 @@ def test_issue_path(): def test_parse_issue_path(): expected = { - "project": "squid", - "location": "clam", - "issue_model": "whelk", - "issue": "octopus", + "project": "oyster", + "location": "nudibranch", + "issue_model": "cuttlefish", + "issue": "mussel", } path = ContactCenterInsightsClient.issue_path(**expected) @@ -31535,9 +34484,9 @@ def test_parse_issue_path(): def test_issue_model_path(): - project = "oyster" - location = "nudibranch" - issue_model = "cuttlefish" + project = "winkle" + location = "nautilus" + issue_model = "scallop" expected = ( "projects/{project}/locations/{location}/issueModels/{issue_model}".format( project=project, @@ -31553,9 +34502,9 @@ def test_issue_model_path(): def test_parse_issue_model_path(): expected = { - "project": "mussel", - "location": "winkle", - "issue_model": "nautilus", + "project": "abalone", + "location": "squid", + "issue_model": "clam", } path = ContactCenterInsightsClient.issue_model_path(**expected) @@ -31565,9 +34514,9 @@ def test_parse_issue_model_path(): def test_participant_path(): - project = "scallop" - conversation = "abalone" - participant = "squid" + project = "whelk" + conversation = "octopus" + participant = "oyster" expected = "projects/{project}/conversations/{conversation}/participants/{participant}".format( project=project, conversation=conversation, @@ -31581,9 +34530,9 @@ def test_participant_path(): def test_parse_participant_path(): expected = { - "project": "clam", - "conversation": "whelk", - "participant": "octopus", + "project": "nudibranch", + "conversation": "cuttlefish", + "participant": "mussel", } path = ContactCenterInsightsClient.participant_path(**expected) @@ -31593,9 +34542,9 @@ def test_parse_participant_path(): def test_phrase_matcher_path(): - project = "oyster" - location = "nudibranch" - phrase_matcher = "cuttlefish" + project = "winkle" + location = "nautilus" + phrase_matcher = "scallop" expected = "projects/{project}/locations/{location}/phraseMatchers/{phrase_matcher}".format( project=project, location=location, @@ -31609,9 +34558,9 @@ def test_phrase_matcher_path(): def test_parse_phrase_matcher_path(): expected = { - "project": "mussel", - "location": "winkle", - "phrase_matcher": "nautilus", + "project": "abalone", + "location": "squid", + "phrase_matcher": "clam", } path = ContactCenterInsightsClient.phrase_matcher_path(**expected) @@ -31621,9 +34570,9 @@ def test_parse_phrase_matcher_path(): def test_recognizer_path(): - project = "scallop" - location = "abalone" - recognizer = "squid" + project = "whelk" + location = "octopus" + recognizer = "oyster" expected = ( "projects/{project}/locations/{location}/recognizers/{recognizer}".format( project=project, @@ -31637,9 +34586,9 @@ def test_recognizer_path(): def test_parse_recognizer_path(): expected = { - "project": "clam", - "location": "whelk", - "recognizer": "octopus", + "project": "nudibranch", + "location": "cuttlefish", + "recognizer": "mussel", } path = ContactCenterInsightsClient.recognizer_path(**expected) @@ -31649,8 +34598,8 @@ def test_parse_recognizer_path(): def test_settings_path(): - project = "oyster" - location = "nudibranch" + project = "winkle" + location = "nautilus" expected = "projects/{project}/locations/{location}/settings".format( project=project, location=location, @@ -31661,8 +34610,8 @@ def test_settings_path(): def test_parse_settings_path(): expected = { - "project": "cuttlefish", - "location": "mussel", + "project": "scallop", + "location": "abalone", } path = ContactCenterInsightsClient.settings_path(**expected) @@ -31672,9 +34621,9 @@ def test_parse_settings_path(): def test_view_path(): - project = "winkle" - location = "nautilus" - view = "scallop" + project = "squid" + location = "clam" + view = "whelk" expected = "projects/{project}/locations/{location}/views/{view}".format( project=project, location=location, @@ -31686,9 +34635,9 @@ def test_view_path(): def test_parse_view_path(): expected = { - "project": "abalone", - "location": "squid", - "view": "clam", + "project": "octopus", + "location": "oyster", + "view": "nudibranch", } path = ContactCenterInsightsClient.view_path(**expected) @@ -31698,7 +34647,7 @@ def test_parse_view_path(): def test_common_billing_account_path(): - billing_account = "whelk" + billing_account = "cuttlefish" expected = "billingAccounts/{billing_account}".format( billing_account=billing_account, ) @@ -31708,7 +34657,7 @@ def test_common_billing_account_path(): def test_parse_common_billing_account_path(): expected = { - "billing_account": "octopus", + "billing_account": "mussel", } path = ContactCenterInsightsClient.common_billing_account_path(**expected) @@ -31718,7 +34667,7 @@ def test_parse_common_billing_account_path(): def test_common_folder_path(): - folder = "oyster" + folder = "winkle" expected = "folders/{folder}".format( folder=folder, ) @@ -31728,7 +34677,7 @@ def test_common_folder_path(): def test_parse_common_folder_path(): expected = { - "folder": "nudibranch", + "folder": "nautilus", } path = ContactCenterInsightsClient.common_folder_path(**expected) @@ -31738,7 +34687,7 @@ def test_parse_common_folder_path(): def test_common_organization_path(): - organization = "cuttlefish" + organization = "scallop" expected = "organizations/{organization}".format( organization=organization, ) @@ -31748,7 +34697,7 @@ def test_common_organization_path(): def test_parse_common_organization_path(): expected = { - "organization": "mussel", + "organization": "abalone", } path = ContactCenterInsightsClient.common_organization_path(**expected) @@ -31758,7 +34707,7 @@ def test_parse_common_organization_path(): def test_common_project_path(): - project = "winkle" + project = "squid" expected = "projects/{project}".format( project=project, ) @@ -31768,7 +34717,7 @@ def test_common_project_path(): def test_parse_common_project_path(): expected = { - "project": "nautilus", + "project": "clam", } path = ContactCenterInsightsClient.common_project_path(**expected) @@ -31778,8 +34727,8 @@ def test_parse_common_project_path(): def test_common_location_path(): - project = "scallop" - location = "abalone" + project = "whelk" + location = "octopus" expected = "projects/{project}/locations/{location}".format( project=project, location=location, @@ -31790,8 +34739,8 @@ def test_common_location_path(): def test_parse_common_location_path(): expected = { - "project": "squid", - "location": "clam", + "project": "oyster", + "location": "nudibranch", } path = ContactCenterInsightsClient.common_location_path(**expected) diff --git a/packages/google-cloud-container/CHANGELOG.md b/packages/google-cloud-container/CHANGELOG.md index d939cc42c90a..c39b8c48144d 100644 --- a/packages/google-cloud-container/CHANGELOG.md +++ b/packages/google-cloud-container/CHANGELOG.md @@ -4,6 +4,14 @@ [1]: https://pypi.org/project/google-cloud-container/#history +## [2.51.0](https://github.com/googleapis/google-cloud-python/compare/google-cloud-container-v2.50.0...google-cloud-container-v2.51.0) (2024-09-03) + + +### Features + +* add `EXTENDED` enum value for `ReleaseChannel.Channel` ([ea71725](https://github.com/googleapis/google-cloud-python/commit/ea71725d3fe3bde0afd775d20127bed958e8eb8e)) +* add ReleaseChannel EXTENDED value ([ea71725](https://github.com/googleapis/google-cloud-python/commit/ea71725d3fe3bde0afd775d20127bed958e8eb8e)) + ## [2.50.0](https://github.com/googleapis/google-cloud-python/compare/google-cloud-container-v2.49.0...google-cloud-container-v2.50.0) (2024-07-30) diff --git a/packages/google-cloud-container/google/cloud/container/gapic_version.py b/packages/google-cloud-container/google/cloud/container/gapic_version.py index 39ada5514ff0..a08d6ef9b30b 100644 --- a/packages/google-cloud-container/google/cloud/container/gapic_version.py +++ b/packages/google-cloud-container/google/cloud/container/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "2.50.0" # {x-release-please-version} +__version__ = "2.51.0" # {x-release-please-version} diff --git a/packages/google-cloud-container/google/cloud/container_v1/gapic_version.py b/packages/google-cloud-container/google/cloud/container_v1/gapic_version.py index 39ada5514ff0..a08d6ef9b30b 100644 --- a/packages/google-cloud-container/google/cloud/container_v1/gapic_version.py +++ b/packages/google-cloud-container/google/cloud/container_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "2.50.0" # {x-release-please-version} +__version__ = "2.51.0" # {x-release-please-version} diff --git a/packages/google-cloud-container/google/cloud/container_v1/services/cluster_manager/async_client.py b/packages/google-cloud-container/google/cloud/container_v1/services/cluster_manager/async_client.py index 9518929e7c09..2b7a0e1a642a 100644 --- a/packages/google-cloud-container/google/cloud/container_v1/services/cluster_manager/async_client.py +++ b/packages/google-cloud-container/google/cloud/container_v1/services/cluster_manager/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -190,9 +189,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(ClusterManagerClient).get_transport_class, type(ClusterManagerClient) - ) + get_transport_class = ClusterManagerClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-container/google/cloud/container_v1/services/cluster_manager/client.py b/packages/google-cloud-container/google/cloud/container_v1/services/cluster_manager/client.py index 6367cf1b7183..3dea67a3b36f 100644 --- a/packages/google-cloud-container/google/cloud/container_v1/services/cluster_manager/client.py +++ b/packages/google-cloud-container/google/cloud/container_v1/services/cluster_manager/client.py @@ -657,7 +657,7 @@ def __init__( transport_init: Union[ Type[ClusterManagerTransport], Callable[..., ClusterManagerTransport] ] = ( - type(self).get_transport_class(transport) + ClusterManagerClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., ClusterManagerTransport], transport) ) diff --git a/packages/google-cloud-container/google/cloud/container_v1/types/cluster_service.py b/packages/google-cloud-container/google/cloud/container_v1/types/cluster_service.py index e5d04d99d2eb..20d4fb52446a 100644 --- a/packages/google-cloud-container/google/cloud/container_v1/types/cluster_service.py +++ b/packages/google-cloud-container/google/cloud/container_v1/types/cluster_service.py @@ -8388,11 +8388,17 @@ class Channel(proto.Enum): Clusters subscribed to STABLE receive versions that are known to be stable and reliable in production. + EXTENDED (4): + Clusters subscribed to EXTENDED receive + extended support and availability for versions + which are known to be stable and reliable in + production. """ UNSPECIFIED = 0 RAPID = 1 REGULAR = 2 STABLE = 3 + EXTENDED = 4 channel: Channel = proto.Field( proto.ENUM, diff --git a/packages/google-cloud-container/google/cloud/container_v1beta1/gapic_version.py b/packages/google-cloud-container/google/cloud/container_v1beta1/gapic_version.py index 39ada5514ff0..a08d6ef9b30b 100644 --- a/packages/google-cloud-container/google/cloud/container_v1beta1/gapic_version.py +++ b/packages/google-cloud-container/google/cloud/container_v1beta1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "2.50.0" # {x-release-please-version} +__version__ = "2.51.0" # {x-release-please-version} diff --git a/packages/google-cloud-container/google/cloud/container_v1beta1/services/cluster_manager/async_client.py b/packages/google-cloud-container/google/cloud/container_v1beta1/services/cluster_manager/async_client.py index e30f0bf1ef76..699b9b86cdac 100644 --- a/packages/google-cloud-container/google/cloud/container_v1beta1/services/cluster_manager/async_client.py +++ b/packages/google-cloud-container/google/cloud/container_v1beta1/services/cluster_manager/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -190,9 +189,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(ClusterManagerClient).get_transport_class, type(ClusterManagerClient) - ) + get_transport_class = ClusterManagerClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-container/google/cloud/container_v1beta1/services/cluster_manager/client.py b/packages/google-cloud-container/google/cloud/container_v1beta1/services/cluster_manager/client.py index cd7a5750ed0a..13487da71d47 100644 --- a/packages/google-cloud-container/google/cloud/container_v1beta1/services/cluster_manager/client.py +++ b/packages/google-cloud-container/google/cloud/container_v1beta1/services/cluster_manager/client.py @@ -655,7 +655,7 @@ def __init__( transport_init: Union[ Type[ClusterManagerTransport], Callable[..., ClusterManagerTransport] ] = ( - type(self).get_transport_class(transport) + ClusterManagerClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., ClusterManagerTransport], transport) ) diff --git a/packages/google-cloud-container/google/cloud/container_v1beta1/types/cluster_service.py b/packages/google-cloud-container/google/cloud/container_v1beta1/types/cluster_service.py index 04e05220a931..f6187795c40e 100644 --- a/packages/google-cloud-container/google/cloud/container_v1beta1/types/cluster_service.py +++ b/packages/google-cloud-container/google/cloud/container_v1beta1/types/cluster_service.py @@ -10166,11 +10166,17 @@ class Channel(proto.Enum): Clusters subscribed to STABLE receive versions that are known to be stable and reliable in production. + EXTENDED (4): + Clusters subscribed to EXTENDED receive + extended support and availability for versions + which are known to be stable and reliable in + production. """ UNSPECIFIED = 0 RAPID = 1 REGULAR = 2 STABLE = 3 + EXTENDED = 4 channel: Channel = proto.Field( proto.ENUM, diff --git a/packages/google-cloud-container/samples/generated_samples/snippet_metadata_google.container.v1.json b/packages/google-cloud-container/samples/generated_samples/snippet_metadata_google.container.v1.json index e1284a608c4e..b317d472f3e2 100644 --- a/packages/google-cloud-container/samples/generated_samples/snippet_metadata_google.container.v1.json +++ b/packages/google-cloud-container/samples/generated_samples/snippet_metadata_google.container.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-container", - "version": "2.50.0" + "version": "2.51.0" }, "snippets": [ { diff --git a/packages/google-cloud-container/samples/generated_samples/snippet_metadata_google.container.v1beta1.json b/packages/google-cloud-container/samples/generated_samples/snippet_metadata_google.container.v1beta1.json index 17b37ede76ea..f6b08f446330 100644 --- a/packages/google-cloud-container/samples/generated_samples/snippet_metadata_google.container.v1beta1.json +++ b/packages/google-cloud-container/samples/generated_samples/snippet_metadata_google.container.v1beta1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-container", - "version": "2.50.0" + "version": "2.51.0" }, "snippets": [ { diff --git a/packages/google-cloud-container/tests/unit/gapic/container_v1/test_cluster_manager.py b/packages/google-cloud-container/tests/unit/gapic/container_v1/test_cluster_manager.py index ed4966a33353..3e87b9113ba1 100644 --- a/packages/google-cloud-container/tests/unit/gapic/container_v1/test_cluster_manager.py +++ b/packages/google-cloud-container/tests/unit/gapic/container_v1/test_cluster_manager.py @@ -1290,22 +1290,23 @@ async def test_list_clusters_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_clusters - ] = mock_object + ] = mock_rpc request = {} await client.list_clusters(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_clusters(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1773,22 +1774,23 @@ async def test_get_cluster_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_cluster - ] = mock_object + ] = mock_rpc request = {} await client.get_cluster(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_cluster(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2263,22 +2265,23 @@ async def test_create_cluster_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_cluster - ] = mock_object + ] = mock_rpc request = {} await client.create_cluster(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_cluster(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2713,22 +2716,23 @@ async def test_update_cluster_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_cluster - ] = mock_object + ] = mock_rpc request = {} await client.update_cluster(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_cluster(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3199,22 +3203,23 @@ async def test_update_node_pool_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_node_pool - ] = mock_object + ] = mock_rpc request = {} await client.update_node_pool(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_node_pool(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3552,22 +3557,23 @@ async def test_set_node_pool_autoscaling_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.set_node_pool_autoscaling - ] = mock_object + ] = mock_rpc request = {} await client.set_node_pool_autoscaling(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.set_node_pool_autoscaling(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3911,22 +3917,23 @@ async def test_set_logging_service_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.set_logging_service - ] = mock_object + ] = mock_rpc request = {} await client.set_logging_service(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.set_logging_service(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4397,22 +4404,23 @@ async def test_set_monitoring_service_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.set_monitoring_service - ] = mock_object + ] = mock_rpc request = {} await client.set_monitoring_service(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.set_monitoring_service(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4878,22 +4886,23 @@ async def test_set_addons_config_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.set_addons_config - ] = mock_object + ] = mock_rpc request = {} await client.set_addons_config(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.set_addons_config(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5360,22 +5369,23 @@ async def test_set_locations_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.set_locations - ] = mock_object + ] = mock_rpc request = {} await client.set_locations(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.set_locations(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5822,22 +5832,23 @@ async def test_update_master_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_master - ] = mock_object + ] = mock_rpc request = {} await client.update_master(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_master(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -6282,22 +6293,23 @@ async def test_set_master_auth_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.set_master_auth - ] = mock_object + ] = mock_rpc request = {} await client.set_master_auth(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.set_master_auth(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -6620,22 +6632,23 @@ async def test_delete_cluster_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_cluster - ] = mock_object + ] = mock_rpc request = {} await client.delete_cluster(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_cluster(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -7038,22 +7051,23 @@ async def test_list_operations_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_operations - ] = mock_object + ] = mock_rpc request = {} await client.list_operations(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_operations(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -7458,22 +7472,23 @@ async def test_get_operation_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_operation - ] = mock_object + ] = mock_rpc request = {} await client.get_operation(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_operation(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -7873,22 +7888,23 @@ async def test_cancel_operation_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.cancel_operation - ] = mock_object + ] = mock_rpc request = {} await client.cancel_operation(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.cancel_operation(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -8284,22 +8300,23 @@ async def test_get_server_config_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_server_config - ] = mock_object + ] = mock_rpc request = {} await client.get_server_config(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_server_config(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -8691,22 +8708,23 @@ async def test_get_json_web_keys_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_json_web_keys - ] = mock_object + ] = mock_rpc request = {} await client.get_json_web_keys(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_json_web_keys(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -8977,22 +8995,23 @@ async def test_list_node_pools_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_node_pools - ] = mock_object + ] = mock_rpc request = {} await client.list_node_pools(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_node_pools(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -9403,22 +9422,23 @@ async def test_get_node_pool_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_node_pool - ] = mock_object + ] = mock_rpc request = {} await client.get_node_pool(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_node_pool(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -9863,22 +9883,23 @@ async def test_create_node_pool_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_node_pool - ] = mock_object + ] = mock_rpc request = {} await client.create_node_pool(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_node_pool(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -10327,22 +10348,23 @@ async def test_delete_node_pool_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_node_pool - ] = mock_object + ] = mock_rpc request = {} await client.delete_node_pool(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_node_pool(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -10757,22 +10779,23 @@ async def test_complete_node_pool_upgrade_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.complete_node_pool_upgrade - ] = mock_object + ] = mock_rpc request = {} await client.complete_node_pool_upgrade(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.complete_node_pool_upgrade(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -11090,22 +11113,23 @@ async def test_rollback_node_pool_upgrade_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.rollback_node_pool_upgrade - ] = mock_object + ] = mock_rpc request = {} await client.rollback_node_pool_upgrade(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.rollback_node_pool_upgrade(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -11576,22 +11600,23 @@ async def test_set_node_pool_management_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.set_node_pool_management - ] = mock_object + ] = mock_rpc request = {} await client.set_node_pool_management(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.set_node_pool_management(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -11921,22 +11946,23 @@ async def test_set_labels_async_use_cached_wrapped_rpc(transport: str = "grpc_as ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.set_labels - ] = mock_object + ] = mock_rpc request = {} await client.set_labels(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.set_labels(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -12259,22 +12285,23 @@ async def test_set_legacy_abac_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.set_legacy_abac - ] = mock_object + ] = mock_rpc request = {} await client.set_legacy_abac(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.set_legacy_abac(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -12729,22 +12756,23 @@ async def test_start_ip_rotation_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.start_ip_rotation - ] = mock_object + ] = mock_rpc request = {} await client.start_ip_rotation(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.start_ip_rotation(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -13201,22 +13229,23 @@ async def test_complete_ip_rotation_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.complete_ip_rotation - ] = mock_object + ] = mock_rpc request = {} await client.complete_ip_rotation(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.complete_ip_rotation(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -13676,22 +13705,23 @@ async def test_set_node_pool_size_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.set_node_pool_size - ] = mock_object + ] = mock_rpc request = {} await client.set_node_pool_size(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.set_node_pool_size(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -14032,22 +14062,23 @@ async def test_set_network_policy_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.set_network_policy - ] = mock_object + ] = mock_rpc request = {} await client.set_network_policy(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.set_network_policy(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -14528,22 +14559,23 @@ async def test_set_maintenance_policy_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.set_maintenance_policy - ] = mock_object + ] = mock_rpc request = {} await client.set_maintenance_policy(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.set_maintenance_policy(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -15016,22 +15048,23 @@ async def test_list_usable_subnetworks_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_usable_subnetworks - ] = mock_object + ] = mock_rpc request = {} await client.list_usable_subnetworks(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_usable_subnetworks(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -15520,22 +15553,23 @@ async def test_check_autopilot_compatibility_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.check_autopilot_compatibility - ] = mock_object + ] = mock_rpc request = {} await client.check_autopilot_compatibility(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.check_autopilot_compatibility(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-container/tests/unit/gapic/container_v1beta1/test_cluster_manager.py b/packages/google-cloud-container/tests/unit/gapic/container_v1beta1/test_cluster_manager.py index dcdc6664a778..da6c520022d0 100644 --- a/packages/google-cloud-container/tests/unit/gapic/container_v1beta1/test_cluster_manager.py +++ b/packages/google-cloud-container/tests/unit/gapic/container_v1beta1/test_cluster_manager.py @@ -1267,22 +1267,23 @@ async def test_list_clusters_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_clusters - ] = mock_object + ] = mock_rpc request = {} await client.list_clusters(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_clusters(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1746,22 +1747,23 @@ async def test_get_cluster_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_cluster - ] = mock_object + ] = mock_rpc request = {} await client.get_cluster(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_cluster(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2230,22 +2232,23 @@ async def test_create_cluster_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_cluster - ] = mock_object + ] = mock_rpc request = {} await client.create_cluster(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_cluster(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2670,22 +2673,23 @@ async def test_update_cluster_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_cluster - ] = mock_object + ] = mock_rpc request = {} await client.update_cluster(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_cluster(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3146,22 +3150,23 @@ async def test_update_node_pool_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_node_pool - ] = mock_object + ] = mock_rpc request = {} await client.update_node_pool(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_node_pool(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3499,22 +3504,23 @@ async def test_set_node_pool_autoscaling_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.set_node_pool_autoscaling - ] = mock_object + ] = mock_rpc request = {} await client.set_node_pool_autoscaling(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.set_node_pool_autoscaling(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3858,22 +3864,23 @@ async def test_set_logging_service_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.set_logging_service - ] = mock_object + ] = mock_rpc request = {} await client.set_logging_service(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.set_logging_service(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4334,22 +4341,23 @@ async def test_set_monitoring_service_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.set_monitoring_service - ] = mock_object + ] = mock_rpc request = {} await client.set_monitoring_service(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.set_monitoring_service(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4805,22 +4813,23 @@ async def test_set_addons_config_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.set_addons_config - ] = mock_object + ] = mock_rpc request = {} await client.set_addons_config(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.set_addons_config(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5277,22 +5286,23 @@ async def test_set_locations_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.set_locations - ] = mock_object + ] = mock_rpc request = {} await client.set_locations(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.set_locations(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5729,22 +5739,23 @@ async def test_update_master_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_master - ] = mock_object + ] = mock_rpc request = {} await client.update_master(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_master(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -6179,22 +6190,23 @@ async def test_set_master_auth_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.set_master_auth - ] = mock_object + ] = mock_rpc request = {} await client.set_master_auth(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.set_master_auth(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -6517,22 +6529,23 @@ async def test_delete_cluster_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_cluster - ] = mock_object + ] = mock_rpc request = {} await client.delete_cluster(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_cluster(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -6925,22 +6938,23 @@ async def test_list_operations_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_operations - ] = mock_object + ] = mock_rpc request = {} await client.list_operations(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_operations(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -7335,22 +7349,23 @@ async def test_get_operation_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_operation - ] = mock_object + ] = mock_rpc request = {} await client.get_operation(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_operation(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -7740,22 +7755,23 @@ async def test_cancel_operation_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.cancel_operation - ] = mock_object + ] = mock_rpc request = {} await client.cancel_operation(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.cancel_operation(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -8141,22 +8157,23 @@ async def test_get_server_config_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_server_config - ] = mock_object + ] = mock_rpc request = {} await client.get_server_config(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_server_config(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -8538,22 +8555,23 @@ async def test_get_json_web_keys_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_json_web_keys - ] = mock_object + ] = mock_rpc request = {} await client.get_json_web_keys(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_json_web_keys(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -8824,22 +8842,23 @@ async def test_list_node_pools_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_node_pools - ] = mock_object + ] = mock_rpc request = {} await client.list_node_pools(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_node_pools(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -9240,22 +9259,23 @@ async def test_get_node_pool_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_node_pool - ] = mock_object + ] = mock_rpc request = {} await client.get_node_pool(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_node_pool(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -9690,22 +9710,23 @@ async def test_create_node_pool_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_node_pool - ] = mock_object + ] = mock_rpc request = {} await client.create_node_pool(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_node_pool(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -10144,22 +10165,23 @@ async def test_delete_node_pool_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_node_pool - ] = mock_object + ] = mock_rpc request = {} await client.delete_node_pool(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_node_pool(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -10564,22 +10586,23 @@ async def test_complete_node_pool_upgrade_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.complete_node_pool_upgrade - ] = mock_object + ] = mock_rpc request = {} await client.complete_node_pool_upgrade(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.complete_node_pool_upgrade(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -10897,22 +10920,23 @@ async def test_rollback_node_pool_upgrade_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.rollback_node_pool_upgrade - ] = mock_object + ] = mock_rpc request = {} await client.rollback_node_pool_upgrade(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.rollback_node_pool_upgrade(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -11373,22 +11397,23 @@ async def test_set_node_pool_management_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.set_node_pool_management - ] = mock_object + ] = mock_rpc request = {} await client.set_node_pool_management(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.set_node_pool_management(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -11844,22 +11869,23 @@ async def test_set_labels_async_use_cached_wrapped_rpc(transport: str = "grpc_as ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.set_labels - ] = mock_object + ] = mock_rpc request = {} await client.set_labels(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.set_labels(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -12304,22 +12330,23 @@ async def test_set_legacy_abac_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.set_legacy_abac - ] = mock_object + ] = mock_rpc request = {} await client.set_legacy_abac(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.set_legacy_abac(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -12764,22 +12791,23 @@ async def test_start_ip_rotation_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.start_ip_rotation - ] = mock_object + ] = mock_rpc request = {} await client.start_ip_rotation(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.start_ip_rotation(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -13226,22 +13254,23 @@ async def test_complete_ip_rotation_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.complete_ip_rotation - ] = mock_object + ] = mock_rpc request = {} await client.complete_ip_rotation(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.complete_ip_rotation(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -13691,22 +13720,23 @@ async def test_set_node_pool_size_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.set_node_pool_size - ] = mock_object + ] = mock_rpc request = {} await client.set_node_pool_size(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.set_node_pool_size(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -14047,22 +14077,23 @@ async def test_set_network_policy_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.set_network_policy - ] = mock_object + ] = mock_rpc request = {} await client.set_network_policy(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.set_network_policy(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -14533,22 +14564,23 @@ async def test_set_maintenance_policy_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.set_maintenance_policy - ] = mock_object + ] = mock_rpc request = {} await client.set_maintenance_policy(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.set_maintenance_policy(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -15011,22 +15043,23 @@ async def test_list_usable_subnetworks_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_usable_subnetworks - ] = mock_object + ] = mock_rpc request = {} await client.list_usable_subnetworks(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_usable_subnetworks(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -15601,22 +15634,23 @@ async def test_check_autopilot_compatibility_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.check_autopilot_compatibility - ] = mock_object + ] = mock_rpc request = {} await client.check_autopilot_compatibility(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.check_autopilot_compatibility(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -15891,22 +15925,23 @@ async def test_list_locations_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_locations - ] = mock_object + ] = mock_rpc request = {} await client.list_locations(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_locations(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-containeranalysis/google/cloud/devtools/containeranalysis/gapic_version.py b/packages/google-cloud-containeranalysis/google/cloud/devtools/containeranalysis/gapic_version.py index c99be268ce75..558c8aab67c5 100644 --- a/packages/google-cloud-containeranalysis/google/cloud/devtools/containeranalysis/gapic_version.py +++ b/packages/google-cloud-containeranalysis/google/cloud/devtools/containeranalysis/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "2.14.5" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-containeranalysis/google/cloud/devtools/containeranalysis_v1/gapic_version.py b/packages/google-cloud-containeranalysis/google/cloud/devtools/containeranalysis_v1/gapic_version.py index c99be268ce75..558c8aab67c5 100644 --- a/packages/google-cloud-containeranalysis/google/cloud/devtools/containeranalysis_v1/gapic_version.py +++ b/packages/google-cloud-containeranalysis/google/cloud/devtools/containeranalysis_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "2.14.5" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-containeranalysis/google/cloud/devtools/containeranalysis_v1/services/container_analysis/async_client.py b/packages/google-cloud-containeranalysis/google/cloud/devtools/containeranalysis_v1/services/container_analysis/async_client.py index 3afffcc31462..7ae843dbce3d 100644 --- a/packages/google-cloud-containeranalysis/google/cloud/devtools/containeranalysis_v1/services/container_analysis/async_client.py +++ b/packages/google-cloud-containeranalysis/google/cloud/devtools/containeranalysis_v1/services/container_analysis/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -204,9 +203,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(ContainerAnalysisClient).get_transport_class, type(ContainerAnalysisClient) - ) + get_transport_class = ContainerAnalysisClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-containeranalysis/google/cloud/devtools/containeranalysis_v1/services/container_analysis/client.py b/packages/google-cloud-containeranalysis/google/cloud/devtools/containeranalysis_v1/services/container_analysis/client.py index f547b55537f2..3b0dec1774db 100644 --- a/packages/google-cloud-containeranalysis/google/cloud/devtools/containeranalysis_v1/services/container_analysis/client.py +++ b/packages/google-cloud-containeranalysis/google/cloud/devtools/containeranalysis_v1/services/container_analysis/client.py @@ -662,7 +662,7 @@ def __init__( Type[ContainerAnalysisTransport], Callable[..., ContainerAnalysisTransport], ] = ( - type(self).get_transport_class(transport) + ContainerAnalysisClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., ContainerAnalysisTransport], transport) ) diff --git a/packages/google-cloud-containeranalysis/samples/generated_samples/snippet_metadata_google.devtools.containeranalysis.v1.json b/packages/google-cloud-containeranalysis/samples/generated_samples/snippet_metadata_google.devtools.containeranalysis.v1.json index 21bd0118a95d..dab3e1ef2940 100644 --- a/packages/google-cloud-containeranalysis/samples/generated_samples/snippet_metadata_google.devtools.containeranalysis.v1.json +++ b/packages/google-cloud-containeranalysis/samples/generated_samples/snippet_metadata_google.devtools.containeranalysis.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-containeranalysis", - "version": "2.14.5" + "version": "0.1.0" }, "snippets": [ { diff --git a/packages/google-cloud-containeranalysis/tests/unit/gapic/containeranalysis_v1/test_container_analysis.py b/packages/google-cloud-containeranalysis/tests/unit/gapic/containeranalysis_v1/test_container_analysis.py index 08ab1ffe97ee..8852b7b1d541 100644 --- a/packages/google-cloud-containeranalysis/tests/unit/gapic/containeranalysis_v1/test_container_analysis.py +++ b/packages/google-cloud-containeranalysis/tests/unit/gapic/containeranalysis_v1/test_container_analysis.py @@ -1329,22 +1329,23 @@ async def test_set_iam_policy_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.set_iam_policy - ] = mock_object + ] = mock_rpc request = {} await client.set_iam_policy(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.set_iam_policy(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1712,22 +1713,23 @@ async def test_get_iam_policy_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_iam_policy - ] = mock_object + ] = mock_rpc request = {} await client.get_iam_policy(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_iam_policy(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2103,22 +2105,23 @@ async def test_test_iam_permissions_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.test_iam_permissions - ] = mock_object + ] = mock_rpc request = {} await client.test_iam_permissions(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.test_iam_permissions(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2517,22 +2520,23 @@ async def test_get_vulnerability_occurrences_summary_async_use_cached_wrapped_rp ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_vulnerability_occurrences_summary - ] = mock_object + ] = mock_rpc request = {} await client.get_vulnerability_occurrences_summary(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_vulnerability_occurrences_summary(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse/gapic_version.py b/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse/gapic_version.py index 35a7786db3a7..558c8aab67c5 100644 --- a/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse/gapic_version.py +++ b/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.7.9" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/gapic_version.py b/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/gapic_version.py index 35a7786db3a7..558c8aab67c5 100644 --- a/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/gapic_version.py +++ b/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.7.9" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/services/document_link_service/async_client.py b/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/services/document_link_service/async_client.py index f344385b9816..3c6831dc21f0 100644 --- a/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/services/document_link_service/async_client.py +++ b/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/services/document_link_service/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -197,10 +196,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(DocumentLinkServiceClient).get_transport_class, - type(DocumentLinkServiceClient), - ) + get_transport_class = DocumentLinkServiceClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/services/document_link_service/client.py b/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/services/document_link_service/client.py index 75e842e51274..d3046a9d7f24 100644 --- a/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/services/document_link_service/client.py +++ b/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/services/document_link_service/client.py @@ -695,7 +695,7 @@ def __init__( Type[DocumentLinkServiceTransport], Callable[..., DocumentLinkServiceTransport], ] = ( - type(self).get_transport_class(transport) + DocumentLinkServiceClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., DocumentLinkServiceTransport], transport) ) diff --git a/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/services/document_schema_service/async_client.py b/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/services/document_schema_service/async_client.py index a6de5aa870ce..db68dbe4838c 100644 --- a/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/services/document_schema_service/async_client.py +++ b/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/services/document_schema_service/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -202,10 +201,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(DocumentSchemaServiceClient).get_transport_class, - type(DocumentSchemaServiceClient), - ) + get_transport_class = DocumentSchemaServiceClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/services/document_schema_service/client.py b/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/services/document_schema_service/client.py index e09f1fefc82d..2ec3ff29b93c 100644 --- a/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/services/document_schema_service/client.py +++ b/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/services/document_schema_service/client.py @@ -691,7 +691,7 @@ def __init__( Type[DocumentSchemaServiceTransport], Callable[..., DocumentSchemaServiceTransport], ] = ( - type(self).get_transport_class(transport) + DocumentSchemaServiceClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., DocumentSchemaServiceTransport], transport) ) diff --git a/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/services/document_service/async_client.py b/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/services/document_service/async_client.py index 471cbd4892a7..ff1753f3d097 100644 --- a/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/services/document_service/async_client.py +++ b/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/services/document_service/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -204,9 +203,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(DocumentServiceClient).get_transport_class, type(DocumentServiceClient) - ) + get_transport_class = DocumentServiceClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/services/document_service/client.py b/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/services/document_service/client.py index a67244cd0062..49814fe14345 100644 --- a/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/services/document_service/client.py +++ b/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/services/document_service/client.py @@ -712,7 +712,7 @@ def __init__( transport_init: Union[ Type[DocumentServiceTransport], Callable[..., DocumentServiceTransport] ] = ( - type(self).get_transport_class(transport) + DocumentServiceClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., DocumentServiceTransport], transport) ) diff --git a/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/services/pipeline_service/async_client.py b/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/services/pipeline_service/async_client.py index af737f1f1c39..22669269e196 100644 --- a/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/services/pipeline_service/async_client.py +++ b/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/services/pipeline_service/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -194,9 +193,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(PipelineServiceClient).get_transport_class, type(PipelineServiceClient) - ) + get_transport_class = PipelineServiceClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/services/pipeline_service/client.py b/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/services/pipeline_service/client.py index f4f365323074..ef5b9440ab85 100644 --- a/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/services/pipeline_service/client.py +++ b/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/services/pipeline_service/client.py @@ -682,7 +682,7 @@ def __init__( transport_init: Union[ Type[PipelineServiceTransport], Callable[..., PipelineServiceTransport] ] = ( - type(self).get_transport_class(transport) + PipelineServiceClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., PipelineServiceTransport], transport) ) diff --git a/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/services/rule_set_service/async_client.py b/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/services/rule_set_service/async_client.py index cf1c549e42f6..7c13c495543a 100644 --- a/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/services/rule_set_service/async_client.py +++ b/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/services/rule_set_service/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -193,9 +192,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(RuleSetServiceClient).get_transport_class, type(RuleSetServiceClient) - ) + get_transport_class = RuleSetServiceClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/services/rule_set_service/client.py b/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/services/rule_set_service/client.py index 45d311236f7a..df37e447f79d 100644 --- a/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/services/rule_set_service/client.py +++ b/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/services/rule_set_service/client.py @@ -701,7 +701,7 @@ def __init__( transport_init: Union[ Type[RuleSetServiceTransport], Callable[..., RuleSetServiceTransport] ] = ( - type(self).get_transport_class(transport) + RuleSetServiceClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., RuleSetServiceTransport], transport) ) diff --git a/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/services/synonym_set_service/async_client.py b/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/services/synonym_set_service/async_client.py index d47996577184..666fd9a062ff 100644 --- a/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/services/synonym_set_service/async_client.py +++ b/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/services/synonym_set_service/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -196,9 +195,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(SynonymSetServiceClient).get_transport_class, type(SynonymSetServiceClient) - ) + get_transport_class = SynonymSetServiceClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/services/synonym_set_service/client.py b/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/services/synonym_set_service/client.py index 32812db3afc9..61e3bdc10a0c 100644 --- a/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/services/synonym_set_service/client.py +++ b/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/services/synonym_set_service/client.py @@ -687,7 +687,7 @@ def __init__( Type[SynonymSetServiceTransport], Callable[..., SynonymSetServiceTransport], ] = ( - type(self).get_transport_class(transport) + SynonymSetServiceClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., SynonymSetServiceTransport], transport) ) diff --git a/packages/google-cloud-contentwarehouse/samples/generated_samples/snippet_metadata_google.cloud.contentwarehouse.v1.json b/packages/google-cloud-contentwarehouse/samples/generated_samples/snippet_metadata_google.cloud.contentwarehouse.v1.json index 6f7185201f9c..4e798f93791e 100644 --- a/packages/google-cloud-contentwarehouse/samples/generated_samples/snippet_metadata_google.cloud.contentwarehouse.v1.json +++ b/packages/google-cloud-contentwarehouse/samples/generated_samples/snippet_metadata_google.cloud.contentwarehouse.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-contentwarehouse", - "version": "0.7.9" + "version": "0.1.0" }, "snippets": [ { diff --git a/packages/google-cloud-contentwarehouse/tests/unit/gapic/contentwarehouse_v1/test_document_link_service.py b/packages/google-cloud-contentwarehouse/tests/unit/gapic/contentwarehouse_v1/test_document_link_service.py index c85e3d67f810..1f996a128646 100644 --- a/packages/google-cloud-contentwarehouse/tests/unit/gapic/contentwarehouse_v1/test_document_link_service.py +++ b/packages/google-cloud-contentwarehouse/tests/unit/gapic/contentwarehouse_v1/test_document_link_service.py @@ -1368,22 +1368,23 @@ async def test_list_linked_targets_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_linked_targets - ] = mock_object + ] = mock_rpc request = {} await client.list_linked_targets(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_linked_targets(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1757,22 +1758,23 @@ async def test_list_linked_sources_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_linked_sources - ] = mock_object + ] = mock_rpc request = {} await client.list_linked_sources(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_linked_sources(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2352,22 +2354,23 @@ async def test_create_document_link_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_document_link - ] = mock_object + ] = mock_rpc request = {} await client.create_document_link(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_document_link(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2746,22 +2749,23 @@ async def test_delete_document_link_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_document_link - ] = mock_object + ] = mock_rpc request = {} await client.delete_document_link(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_document_link(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-contentwarehouse/tests/unit/gapic/contentwarehouse_v1/test_document_schema_service.py b/packages/google-cloud-contentwarehouse/tests/unit/gapic/contentwarehouse_v1/test_document_schema_service.py index 6918e5a30c6b..80a7753d7452 100644 --- a/packages/google-cloud-contentwarehouse/tests/unit/gapic/contentwarehouse_v1/test_document_schema_service.py +++ b/packages/google-cloud-contentwarehouse/tests/unit/gapic/contentwarehouse_v1/test_document_schema_service.py @@ -1379,22 +1379,23 @@ async def test_create_document_schema_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_document_schema - ] = mock_object + ] = mock_rpc request = {} await client.create_document_schema(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_document_schema(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1792,22 +1793,23 @@ async def test_update_document_schema_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_document_schema - ] = mock_object + ] = mock_rpc request = {} await client.update_document_schema(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_document_schema(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2204,22 +2206,23 @@ async def test_get_document_schema_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_document_schema - ] = mock_object + ] = mock_rpc request = {} await client.get_document_schema(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_document_schema(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2591,22 +2594,23 @@ async def test_delete_document_schema_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_document_schema - ] = mock_object + ] = mock_rpc request = {} await client.delete_document_schema(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_document_schema(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2972,22 +2976,23 @@ async def test_list_document_schemas_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_document_schemas - ] = mock_object + ] = mock_rpc request = {} await client.list_document_schemas(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_document_schemas(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-contentwarehouse/tests/unit/gapic/contentwarehouse_v1/test_document_service.py b/packages/google-cloud-contentwarehouse/tests/unit/gapic/contentwarehouse_v1/test_document_service.py index 87bf8ec6620f..5a92f71043cf 100644 --- a/packages/google-cloud-contentwarehouse/tests/unit/gapic/contentwarehouse_v1/test_document_service.py +++ b/packages/google-cloud-contentwarehouse/tests/unit/gapic/contentwarehouse_v1/test_document_service.py @@ -1326,22 +1326,23 @@ async def test_create_document_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_document - ] = mock_object + ] = mock_rpc request = {} await client.create_document(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_document(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1747,22 +1748,23 @@ async def test_get_document_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_document - ] = mock_object + ] = mock_rpc request = {} await client.get_document(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_document(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2140,22 +2142,23 @@ async def test_update_document_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_document - ] = mock_object + ] = mock_rpc request = {} await client.update_document(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_document(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2505,22 +2508,23 @@ async def test_delete_document_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_document - ] = mock_object + ] = mock_rpc request = {} await client.delete_document(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_document(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2873,22 +2877,23 @@ async def test_search_documents_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.search_documents - ] = mock_object + ] = mock_rpc request = {} await client.search_documents(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.search_documents(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3493,22 +3498,23 @@ async def test_lock_document_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.lock_document - ] = mock_object + ] = mock_rpc request = {} await client.lock_document(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.lock_document(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3884,22 +3890,23 @@ async def test_fetch_acl_async_use_cached_wrapped_rpc(transport: str = "grpc_asy ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.fetch_acl - ] = mock_object + ] = mock_rpc request = {} await client.fetch_acl(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.fetch_acl(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4239,22 +4246,23 @@ async def test_set_acl_async_use_cached_wrapped_rpc(transport: str = "grpc_async ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.set_acl - ] = mock_object + ] = mock_rpc request = {} await client.set_acl(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.set_acl(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-contentwarehouse/tests/unit/gapic/contentwarehouse_v1/test_pipeline_service.py b/packages/google-cloud-contentwarehouse/tests/unit/gapic/contentwarehouse_v1/test_pipeline_service.py index 12669e2313b3..e87f4adba870 100644 --- a/packages/google-cloud-contentwarehouse/tests/unit/gapic/contentwarehouse_v1/test_pipeline_service.py +++ b/packages/google-cloud-contentwarehouse/tests/unit/gapic/contentwarehouse_v1/test_pipeline_service.py @@ -1260,8 +1260,9 @@ def test_run_pipeline_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.run_pipeline(request) @@ -1315,26 +1316,28 @@ async def test_run_pipeline_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.run_pipeline - ] = mock_object + ] = mock_rpc request = {} await client.run_pipeline(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.run_pipeline(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-contentwarehouse/tests/unit/gapic/contentwarehouse_v1/test_rule_set_service.py b/packages/google-cloud-contentwarehouse/tests/unit/gapic/contentwarehouse_v1/test_rule_set_service.py index 22aba996ea70..cc15a93ecd1d 100644 --- a/packages/google-cloud-contentwarehouse/tests/unit/gapic/contentwarehouse_v1/test_rule_set_service.py +++ b/packages/google-cloud-contentwarehouse/tests/unit/gapic/contentwarehouse_v1/test_rule_set_service.py @@ -1294,22 +1294,23 @@ async def test_create_rule_set_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_rule_set - ] = mock_object + ] = mock_rpc request = {} await client.create_rule_set(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_rule_set(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1675,22 +1676,23 @@ async def test_get_rule_set_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_rule_set - ] = mock_object + ] = mock_rpc request = {} await client.get_rule_set(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_rule_set(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2046,22 +2048,23 @@ async def test_update_rule_set_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_rule_set - ] = mock_object + ] = mock_rpc request = {} await client.update_rule_set(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_rule_set(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2414,22 +2417,23 @@ async def test_delete_rule_set_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_rule_set - ] = mock_object + ] = mock_rpc request = {} await client.delete_rule_set(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_rule_set(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2772,22 +2776,23 @@ async def test_list_rule_sets_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_rule_sets - ] = mock_object + ] = mock_rpc request = {} await client.list_rule_sets(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_rule_sets(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-contentwarehouse/tests/unit/gapic/contentwarehouse_v1/test_synonym_set_service.py b/packages/google-cloud-contentwarehouse/tests/unit/gapic/contentwarehouse_v1/test_synonym_set_service.py index 451c74e46bf7..06ca74439156 100644 --- a/packages/google-cloud-contentwarehouse/tests/unit/gapic/contentwarehouse_v1/test_synonym_set_service.py +++ b/packages/google-cloud-contentwarehouse/tests/unit/gapic/contentwarehouse_v1/test_synonym_set_service.py @@ -1338,22 +1338,23 @@ async def test_create_synonym_set_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_synonym_set - ] = mock_object + ] = mock_rpc request = {} await client.create_synonym_set(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_synonym_set(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1728,22 +1729,23 @@ async def test_get_synonym_set_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_synonym_set - ] = mock_object + ] = mock_rpc request = {} await client.get_synonym_set(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_synonym_set(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2110,22 +2112,23 @@ async def test_update_synonym_set_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_synonym_set - ] = mock_object + ] = mock_rpc request = {} await client.update_synonym_set(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_synonym_set(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2502,22 +2505,23 @@ async def test_delete_synonym_set_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_synonym_set - ] = mock_object + ] = mock_rpc request = {} await client.delete_synonym_set(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_synonym_set(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2880,22 +2884,23 @@ async def test_list_synonym_sets_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_synonym_sets - ] = mock_object + ] = mock_rpc request = {} await client.list_synonym_sets(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_synonym_sets(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-data-fusion/google/cloud/data_fusion/gapic_version.py b/packages/google-cloud-data-fusion/google/cloud/data_fusion/gapic_version.py index 3ba9a6de4897..558c8aab67c5 100644 --- a/packages/google-cloud-data-fusion/google/cloud/data_fusion/gapic_version.py +++ b/packages/google-cloud-data-fusion/google/cloud/data_fusion/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.10.5" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-data-fusion/google/cloud/data_fusion_v1/gapic_version.py b/packages/google-cloud-data-fusion/google/cloud/data_fusion_v1/gapic_version.py index 3ba9a6de4897..558c8aab67c5 100644 --- a/packages/google-cloud-data-fusion/google/cloud/data_fusion_v1/gapic_version.py +++ b/packages/google-cloud-data-fusion/google/cloud/data_fusion_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.10.5" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-data-fusion/google/cloud/data_fusion_v1/services/data_fusion/async_client.py b/packages/google-cloud-data-fusion/google/cloud/data_fusion_v1/services/data_fusion/async_client.py index 54b9be0842e3..752f062de3b3 100644 --- a/packages/google-cloud-data-fusion/google/cloud/data_fusion_v1/services/data_fusion/async_client.py +++ b/packages/google-cloud-data-fusion/google/cloud/data_fusion_v1/services/data_fusion/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -192,9 +191,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(DataFusionClient).get_transport_class, type(DataFusionClient) - ) + get_transport_class = DataFusionClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-data-fusion/google/cloud/data_fusion_v1/services/data_fusion/client.py b/packages/google-cloud-data-fusion/google/cloud/data_fusion_v1/services/data_fusion/client.py index 1adb5e2f23cc..1fb70174685f 100644 --- a/packages/google-cloud-data-fusion/google/cloud/data_fusion_v1/services/data_fusion/client.py +++ b/packages/google-cloud-data-fusion/google/cloud/data_fusion_v1/services/data_fusion/client.py @@ -688,7 +688,7 @@ def __init__( transport_init: Union[ Type[DataFusionTransport], Callable[..., DataFusionTransport] ] = ( - type(self).get_transport_class(transport) + DataFusionClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., DataFusionTransport], transport) ) diff --git a/packages/google-cloud-data-fusion/samples/generated_samples/snippet_metadata_google.cloud.datafusion.v1.json b/packages/google-cloud-data-fusion/samples/generated_samples/snippet_metadata_google.cloud.datafusion.v1.json index 621909d85f63..1c5648e25de2 100644 --- a/packages/google-cloud-data-fusion/samples/generated_samples/snippet_metadata_google.cloud.datafusion.v1.json +++ b/packages/google-cloud-data-fusion/samples/generated_samples/snippet_metadata_google.cloud.datafusion.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-data-fusion", - "version": "1.10.5" + "version": "0.1.0" }, "snippets": [ { diff --git a/packages/google-cloud-data-fusion/tests/unit/gapic/data_fusion_v1/test_data_fusion.py b/packages/google-cloud-data-fusion/tests/unit/gapic/data_fusion_v1/test_data_fusion.py index 46e3afdc643a..c3738bf000c3 100644 --- a/packages/google-cloud-data-fusion/tests/unit/gapic/data_fusion_v1/test_data_fusion.py +++ b/packages/google-cloud-data-fusion/tests/unit/gapic/data_fusion_v1/test_data_fusion.py @@ -1268,22 +1268,23 @@ async def test_list_available_versions_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_available_versions - ] = mock_object + ] = mock_rpc request = {} await client.list_available_versions(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_available_versions(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1854,22 +1855,23 @@ async def test_list_instances_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_instances - ] = mock_object + ] = mock_rpc request = {} await client.list_instances(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_instances(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2391,22 +2393,23 @@ async def test_get_instance_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_instance - ] = mock_object + ] = mock_rpc request = {} await client.get_instance(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_instance(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2657,8 +2660,9 @@ def test_create_instance_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.create_instance(request) @@ -2712,26 +2716,28 @@ async def test_create_instance_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_instance - ] = mock_object + ] = mock_rpc request = {} await client.create_instance(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.create_instance(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3041,8 +3047,9 @@ def test_delete_instance_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.delete_instance(request) @@ -3096,26 +3103,28 @@ async def test_delete_instance_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_instance - ] = mock_object + ] = mock_rpc request = {} await client.delete_instance(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.delete_instance(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3401,8 +3410,9 @@ def test_update_instance_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.update_instance(request) @@ -3456,26 +3466,28 @@ async def test_update_instance_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_instance - ] = mock_object + ] = mock_rpc request = {} await client.update_instance(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.update_instance(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3777,8 +3789,9 @@ def test_restart_instance_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.restart_instance(request) @@ -3832,26 +3845,28 @@ async def test_restart_instance_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.restart_instance - ] = mock_object + ] = mock_rpc request = {} await client.restart_instance(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.restart_instance(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-data-qna/google/cloud/dataqna/gapic_version.py b/packages/google-cloud-data-qna/google/cloud/dataqna/gapic_version.py index 52ac68d02611..558c8aab67c5 100644 --- a/packages/google-cloud-data-qna/google/cloud/dataqna/gapic_version.py +++ b/packages/google-cloud-data-qna/google/cloud/dataqna/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.10.11" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-data-qna/google/cloud/dataqna_v1alpha/gapic_version.py b/packages/google-cloud-data-qna/google/cloud/dataqna_v1alpha/gapic_version.py index 52ac68d02611..558c8aab67c5 100644 --- a/packages/google-cloud-data-qna/google/cloud/dataqna_v1alpha/gapic_version.py +++ b/packages/google-cloud-data-qna/google/cloud/dataqna_v1alpha/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.10.11" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-data-qna/google/cloud/dataqna_v1alpha/services/auto_suggestion_service/async_client.py b/packages/google-cloud-data-qna/google/cloud/dataqna_v1alpha/services/auto_suggestion_service/async_client.py index b7c6f8cc25f8..76031483cd3d 100644 --- a/packages/google-cloud-data-qna/google/cloud/dataqna_v1alpha/services/auto_suggestion_service/async_client.py +++ b/packages/google-cloud-data-qna/google/cloud/dataqna_v1alpha/services/auto_suggestion_service/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -257,10 +256,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(AutoSuggestionServiceClient).get_transport_class, - type(AutoSuggestionServiceClient), - ) + get_transport_class = AutoSuggestionServiceClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-data-qna/google/cloud/dataqna_v1alpha/services/auto_suggestion_service/client.py b/packages/google-cloud-data-qna/google/cloud/dataqna_v1alpha/services/auto_suggestion_service/client.py index a6da93e4f7b8..6de5b4c512ba 100644 --- a/packages/google-cloud-data-qna/google/cloud/dataqna_v1alpha/services/auto_suggestion_service/client.py +++ b/packages/google-cloud-data-qna/google/cloud/dataqna_v1alpha/services/auto_suggestion_service/client.py @@ -718,7 +718,7 @@ def __init__( Type[AutoSuggestionServiceTransport], Callable[..., AutoSuggestionServiceTransport], ] = ( - type(self).get_transport_class(transport) + AutoSuggestionServiceClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., AutoSuggestionServiceTransport], transport) ) diff --git a/packages/google-cloud-data-qna/google/cloud/dataqna_v1alpha/services/question_service/async_client.py b/packages/google-cloud-data-qna/google/cloud/dataqna_v1alpha/services/question_service/async_client.py index be6bf229721a..6ea78adb9c12 100644 --- a/packages/google-cloud-data-qna/google/cloud/dataqna_v1alpha/services/question_service/async_client.py +++ b/packages/google-cloud-data-qna/google/cloud/dataqna_v1alpha/services/question_service/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -215,9 +214,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(QuestionServiceClient).get_transport_class, type(QuestionServiceClient) - ) + get_transport_class = QuestionServiceClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-data-qna/google/cloud/dataqna_v1alpha/services/question_service/client.py b/packages/google-cloud-data-qna/google/cloud/dataqna_v1alpha/services/question_service/client.py index 34323c3d1076..a1793ae50ed9 100644 --- a/packages/google-cloud-data-qna/google/cloud/dataqna_v1alpha/services/question_service/client.py +++ b/packages/google-cloud-data-qna/google/cloud/dataqna_v1alpha/services/question_service/client.py @@ -711,7 +711,7 @@ def __init__( transport_init: Union[ Type[QuestionServiceTransport], Callable[..., QuestionServiceTransport] ] = ( - type(self).get_transport_class(transport) + QuestionServiceClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., QuestionServiceTransport], transport) ) diff --git a/packages/google-cloud-data-qna/samples/generated_samples/snippet_metadata_google.cloud.dataqna.v1alpha.json b/packages/google-cloud-data-qna/samples/generated_samples/snippet_metadata_google.cloud.dataqna.v1alpha.json index d104234fa78b..2325b770fe09 100644 --- a/packages/google-cloud-data-qna/samples/generated_samples/snippet_metadata_google.cloud.dataqna.v1alpha.json +++ b/packages/google-cloud-data-qna/samples/generated_samples/snippet_metadata_google.cloud.dataqna.v1alpha.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-data-qna", - "version": "0.10.11" + "version": "0.1.0" }, "snippets": [ { diff --git a/packages/google-cloud-data-qna/tests/unit/gapic/dataqna_v1alpha/test_auto_suggestion_service.py b/packages/google-cloud-data-qna/tests/unit/gapic/dataqna_v1alpha/test_auto_suggestion_service.py index e3de3b31049f..45c3f5ea6ca8 100644 --- a/packages/google-cloud-data-qna/tests/unit/gapic/dataqna_v1alpha/test_auto_suggestion_service.py +++ b/packages/google-cloud-data-qna/tests/unit/gapic/dataqna_v1alpha/test_auto_suggestion_service.py @@ -1347,22 +1347,23 @@ async def test_suggest_queries_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.suggest_queries - ] = mock_object + ] = mock_rpc request = {} await client.suggest_queries(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.suggest_queries(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-data-qna/tests/unit/gapic/dataqna_v1alpha/test_question_service.py b/packages/google-cloud-data-qna/tests/unit/gapic/dataqna_v1alpha/test_question_service.py index 8458160723db..6765274f125a 100644 --- a/packages/google-cloud-data-qna/tests/unit/gapic/dataqna_v1alpha/test_question_service.py +++ b/packages/google-cloud-data-qna/tests/unit/gapic/dataqna_v1alpha/test_question_service.py @@ -1325,22 +1325,23 @@ async def test_get_question_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_question - ] = mock_object + ] = mock_rpc request = {} await client.get_question(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_question(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1705,22 +1706,23 @@ async def test_create_question_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_question - ] = mock_object + ] = mock_rpc request = {} await client.create_question(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_question(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2101,22 +2103,23 @@ async def test_execute_question_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.execute_question - ] = mock_object + ] = mock_rpc request = {} await client.execute_question(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.execute_question(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2496,22 +2499,23 @@ async def test_get_user_feedback_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_user_feedback - ] = mock_object + ] = mock_rpc request = {} await client.get_user_feedback(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_user_feedback(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2889,22 +2893,23 @@ async def test_update_user_feedback_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_user_feedback - ] = mock_object + ] = mock_rpc request = {} await client.update_user_feedback(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_user_feedback(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-datacatalog-lineage/google/cloud/datacatalog_lineage/gapic_version.py b/packages/google-cloud-datacatalog-lineage/google/cloud/datacatalog_lineage/gapic_version.py index cbc79b808afc..558c8aab67c5 100644 --- a/packages/google-cloud-datacatalog-lineage/google/cloud/datacatalog_lineage/gapic_version.py +++ b/packages/google-cloud-datacatalog-lineage/google/cloud/datacatalog_lineage/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.3.8" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-datacatalog-lineage/google/cloud/datacatalog_lineage_v1/gapic_version.py b/packages/google-cloud-datacatalog-lineage/google/cloud/datacatalog_lineage_v1/gapic_version.py index cbc79b808afc..558c8aab67c5 100644 --- a/packages/google-cloud-datacatalog-lineage/google/cloud/datacatalog_lineage_v1/gapic_version.py +++ b/packages/google-cloud-datacatalog-lineage/google/cloud/datacatalog_lineage_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.3.8" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-datacatalog-lineage/google/cloud/datacatalog_lineage_v1/services/lineage/async_client.py b/packages/google-cloud-datacatalog-lineage/google/cloud/datacatalog_lineage_v1/services/lineage/async_client.py index bc785e00f39f..128a73fb72df 100644 --- a/packages/google-cloud-datacatalog-lineage/google/cloud/datacatalog_lineage_v1/services/lineage/async_client.py +++ b/packages/google-cloud-datacatalog-lineage/google/cloud/datacatalog_lineage_v1/services/lineage/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -196,9 +195,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(LineageClient).get_transport_class, type(LineageClient) - ) + get_transport_class = LineageClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-datacatalog-lineage/google/cloud/datacatalog_lineage_v1/services/lineage/client.py b/packages/google-cloud-datacatalog-lineage/google/cloud/datacatalog_lineage_v1/services/lineage/client.py index 3bb5b735b73f..349c60ba95f4 100644 --- a/packages/google-cloud-datacatalog-lineage/google/cloud/datacatalog_lineage_v1/services/lineage/client.py +++ b/packages/google-cloud-datacatalog-lineage/google/cloud/datacatalog_lineage_v1/services/lineage/client.py @@ -718,7 +718,7 @@ def __init__( transport_init: Union[ Type[LineageTransport], Callable[..., LineageTransport] ] = ( - type(self).get_transport_class(transport) + LineageClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., LineageTransport], transport) ) diff --git a/packages/google-cloud-datacatalog-lineage/samples/generated_samples/snippet_metadata_google.cloud.datacatalog.lineage.v1.json b/packages/google-cloud-datacatalog-lineage/samples/generated_samples/snippet_metadata_google.cloud.datacatalog.lineage.v1.json index 2156bebef95e..02d1172d6df8 100644 --- a/packages/google-cloud-datacatalog-lineage/samples/generated_samples/snippet_metadata_google.cloud.datacatalog.lineage.v1.json +++ b/packages/google-cloud-datacatalog-lineage/samples/generated_samples/snippet_metadata_google.cloud.datacatalog.lineage.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-datacatalog-lineage", - "version": "0.3.8" + "version": "0.1.0" }, "snippets": [ { diff --git a/packages/google-cloud-datacatalog-lineage/tests/unit/gapic/datacatalog_lineage_v1/test_lineage.py b/packages/google-cloud-datacatalog-lineage/tests/unit/gapic/datacatalog_lineage_v1/test_lineage.py index 31e8666c9ddd..3b3d72019372 100644 --- a/packages/google-cloud-datacatalog-lineage/tests/unit/gapic/datacatalog_lineage_v1/test_lineage.py +++ b/packages/google-cloud-datacatalog-lineage/tests/unit/gapic/datacatalog_lineage_v1/test_lineage.py @@ -1249,22 +1249,23 @@ async def test_process_open_lineage_run_event_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.process_open_lineage_run_event - ] = mock_object + ] = mock_rpc request = {} await client.process_open_lineage_run_event(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.process_open_lineage_run_event(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1679,22 +1680,23 @@ async def test_create_process_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_process - ] = mock_object + ] = mock_rpc request = {} await client.create_process(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_process(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2050,22 +2052,23 @@ async def test_update_process_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_process - ] = mock_object + ] = mock_rpc request = {} await client.update_process(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_process(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2425,22 +2428,23 @@ async def test_get_process_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_process - ] = mock_object + ] = mock_rpc request = {} await client.get_process(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_process(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2789,22 +2793,23 @@ async def test_list_processes_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_processes - ] = mock_object + ] = mock_rpc request = {} await client.list_processes(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_processes(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3291,8 +3296,9 @@ def test_delete_process_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.delete_process(request) @@ -3346,26 +3352,28 @@ async def test_delete_process_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_process - ] = mock_object + ] = mock_rpc request = {} await client.delete_process(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.delete_process(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3717,22 +3725,23 @@ async def test_create_run_async_use_cached_wrapped_rpc(transport: str = "grpc_as ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_run - ] = mock_object + ] = mock_rpc request = {} await client.create_run(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_run(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4091,22 +4100,23 @@ async def test_update_run_async_use_cached_wrapped_rpc(transport: str = "grpc_as ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_run - ] = mock_object + ] = mock_rpc request = {} await client.update_run(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_run(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4469,22 +4479,23 @@ async def test_get_run_async_use_cached_wrapped_rpc(transport: str = "grpc_async ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_run - ] = mock_object + ] = mock_rpc request = {} await client.get_run(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_run(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4833,22 +4844,23 @@ async def test_list_runs_async_use_cached_wrapped_rpc(transport: str = "grpc_asy ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_runs - ] = mock_object + ] = mock_rpc request = {} await client.list_runs(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_runs(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5335,8 +5347,9 @@ def test_delete_run_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.delete_run(request) @@ -5388,26 +5401,28 @@ async def test_delete_run_async_use_cached_wrapped_rpc(transport: str = "grpc_as ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_run - ] = mock_object + ] = mock_rpc request = {} await client.delete_run(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.delete_run(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5767,22 +5782,23 @@ async def test_create_lineage_event_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_lineage_event - ] = mock_object + ] = mock_rpc request = {} await client.create_lineage_event(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_lineage_event(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -6161,22 +6177,23 @@ async def test_get_lineage_event_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_lineage_event - ] = mock_object + ] = mock_rpc request = {} await client.get_lineage_event(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_lineage_event(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -6549,22 +6566,23 @@ async def test_list_lineage_events_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_lineage_events - ] = mock_object + ] = mock_rpc request = {} await client.list_lineage_events(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_lineage_events(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -7130,22 +7148,23 @@ async def test_delete_lineage_event_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_lineage_event - ] = mock_object + ] = mock_rpc request = {} await client.delete_lineage_event(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_lineage_event(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -7497,22 +7516,23 @@ async def test_search_links_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.search_links - ] = mock_object + ] = mock_rpc request = {} await client.search_links(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.search_links(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -7988,22 +8008,23 @@ async def test_batch_search_link_processes_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.batch_search_link_processes - ] = mock_object + ] = mock_rpc request = {} await client.batch_search_link_processes(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.batch_search_link_processes(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-datacatalog/CHANGELOG.md b/packages/google-cloud-datacatalog/CHANGELOG.md index 7abd3dd90890..885b8cebdcf8 100644 --- a/packages/google-cloud-datacatalog/CHANGELOG.md +++ b/packages/google-cloud-datacatalog/CHANGELOG.md @@ -4,6 +4,13 @@ [1]: https://pypi.org/project/google-cloud-datacatalog/#history +## [3.20.1](https://github.com/googleapis/google-cloud-python/compare/google-cloud-datacatalog-v3.20.0...google-cloud-datacatalog-v3.20.1) (2024-07-31) + + +### Documentation + +* [google-cloud-datacatalog] mark DataplexTransferStatus.MIGRATED as deprecated ([#12968](https://github.com/googleapis/google-cloud-python/issues/12968)) ([6cebf3e](https://github.com/googleapis/google-cloud-python/commit/6cebf3e1f0d3014cea558e280e4ebf41b5d477ec)) + ## [3.20.0](https://github.com/googleapis/google-cloud-python/compare/google-cloud-datacatalog-v3.19.1...google-cloud-datacatalog-v3.20.0) (2024-07-30) diff --git a/packages/google-cloud-datacatalog/google/cloud/datacatalog/gapic_version.py b/packages/google-cloud-datacatalog/google/cloud/datacatalog/gapic_version.py index c3ababdf3b1a..558c8aab67c5 100644 --- a/packages/google-cloud-datacatalog/google/cloud/datacatalog/gapic_version.py +++ b/packages/google-cloud-datacatalog/google/cloud/datacatalog/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "3.20.0" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1/gapic_version.py b/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1/gapic_version.py index c3ababdf3b1a..558c8aab67c5 100644 --- a/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1/gapic_version.py +++ b/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "3.20.0" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1/services/data_catalog/async_client.py b/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1/services/data_catalog/async_client.py index 11e88eef1af1..8563a22d9452 100644 --- a/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1/services/data_catalog/async_client.py +++ b/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1/services/data_catalog/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -219,9 +218,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(DataCatalogClient).get_transport_class, type(DataCatalogClient) - ) + get_transport_class = DataCatalogClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1/services/data_catalog/client.py b/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1/services/data_catalog/client.py index 82a91dfeb438..60d5ae521e3a 100644 --- a/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1/services/data_catalog/client.py +++ b/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1/services/data_catalog/client.py @@ -797,7 +797,7 @@ def __init__( transport_init: Union[ Type[DataCatalogTransport], Callable[..., DataCatalogTransport] ] = ( - type(self).get_transport_class(transport) + DataCatalogClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., DataCatalogTransport], transport) ) diff --git a/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1/services/policy_tag_manager/async_client.py b/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1/services/policy_tag_manager/async_client.py index d058c3c6634f..7d163597edd4 100644 --- a/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1/services/policy_tag_manager/async_client.py +++ b/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1/services/policy_tag_manager/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -199,9 +198,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(PolicyTagManagerClient).get_transport_class, type(PolicyTagManagerClient) - ) + get_transport_class = PolicyTagManagerClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1/services/policy_tag_manager/client.py b/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1/services/policy_tag_manager/client.py index b1f2e7a86ec0..61ccfadc83f7 100644 --- a/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1/services/policy_tag_manager/client.py +++ b/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1/services/policy_tag_manager/client.py @@ -695,7 +695,7 @@ def __init__( Type[PolicyTagManagerTransport], Callable[..., PolicyTagManagerTransport], ] = ( - type(self).get_transport_class(transport) + PolicyTagManagerClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., PolicyTagManagerTransport], transport) ) diff --git a/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1/services/policy_tag_manager_serialization/async_client.py b/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1/services/policy_tag_manager_serialization/async_client.py index a10cd8d0705a..749b175afc3b 100644 --- a/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1/services/policy_tag_manager_serialization/async_client.py +++ b/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1/services/policy_tag_manager_serialization/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -209,10 +208,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(PolicyTagManagerSerializationClient).get_transport_class, - type(PolicyTagManagerSerializationClient), - ) + get_transport_class = PolicyTagManagerSerializationClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1/services/policy_tag_manager_serialization/client.py b/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1/services/policy_tag_manager_serialization/client.py index c604038771ed..800d04876397 100644 --- a/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1/services/policy_tag_manager_serialization/client.py +++ b/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1/services/policy_tag_manager_serialization/client.py @@ -687,7 +687,7 @@ def __init__( Type[PolicyTagManagerSerializationTransport], Callable[..., PolicyTagManagerSerializationTransport], ] = ( - type(self).get_transport_class(transport) + PolicyTagManagerSerializationClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast( Callable[..., PolicyTagManagerSerializationTransport], transport diff --git a/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1beta1/gapic_version.py b/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1beta1/gapic_version.py index c3ababdf3b1a..558c8aab67c5 100644 --- a/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1beta1/gapic_version.py +++ b/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1beta1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "3.20.0" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1beta1/services/data_catalog/async_client.py b/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1beta1/services/data_catalog/async_client.py index 72b520b9d2e8..7597eda537a5 100644 --- a/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1beta1/services/data_catalog/async_client.py +++ b/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1beta1/services/data_catalog/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -216,9 +215,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(DataCatalogClient).get_transport_class, type(DataCatalogClient) - ) + get_transport_class = DataCatalogClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1beta1/services/data_catalog/client.py b/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1beta1/services/data_catalog/client.py index f9f1045b6b8e..1760cad85812 100644 --- a/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1beta1/services/data_catalog/client.py +++ b/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1beta1/services/data_catalog/client.py @@ -794,7 +794,7 @@ def __init__( transport_init: Union[ Type[DataCatalogTransport], Callable[..., DataCatalogTransport] ] = ( - type(self).get_transport_class(transport) + DataCatalogClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., DataCatalogTransport], transport) ) diff --git a/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1beta1/services/policy_tag_manager/async_client.py b/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1beta1/services/policy_tag_manager/async_client.py index f7bb96f65d78..f0edd007b8ca 100644 --- a/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1beta1/services/policy_tag_manager/async_client.py +++ b/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1beta1/services/policy_tag_manager/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -195,9 +194,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(PolicyTagManagerClient).get_transport_class, type(PolicyTagManagerClient) - ) + get_transport_class = PolicyTagManagerClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1beta1/services/policy_tag_manager/client.py b/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1beta1/services/policy_tag_manager/client.py index 485ecfc41a77..420376ff3822 100644 --- a/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1beta1/services/policy_tag_manager/client.py +++ b/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1beta1/services/policy_tag_manager/client.py @@ -691,7 +691,7 @@ def __init__( Type[PolicyTagManagerTransport], Callable[..., PolicyTagManagerTransport], ] = ( - type(self).get_transport_class(transport) + PolicyTagManagerClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., PolicyTagManagerTransport], transport) ) diff --git a/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1beta1/services/policy_tag_manager_serialization/async_client.py b/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1beta1/services/policy_tag_manager_serialization/async_client.py index c29575ad8158..ed73043cb897 100644 --- a/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1beta1/services/policy_tag_manager_serialization/async_client.py +++ b/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1beta1/services/policy_tag_manager_serialization/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -206,10 +205,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(PolicyTagManagerSerializationClient).get_transport_class, - type(PolicyTagManagerSerializationClient), - ) + get_transport_class = PolicyTagManagerSerializationClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1beta1/services/policy_tag_manager_serialization/client.py b/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1beta1/services/policy_tag_manager_serialization/client.py index 3818bafca657..179bf24456d1 100644 --- a/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1beta1/services/policy_tag_manager_serialization/client.py +++ b/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1beta1/services/policy_tag_manager_serialization/client.py @@ -684,7 +684,7 @@ def __init__( Type[PolicyTagManagerSerializationTransport], Callable[..., PolicyTagManagerSerializationTransport], ] = ( - type(self).get_transport_class(transport) + PolicyTagManagerSerializationClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast( Callable[..., PolicyTagManagerSerializationTransport], transport diff --git a/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1beta1/types/tags.py b/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1beta1/types/tags.py index 9bedbcd23a5d..a45cde70e169 100644 --- a/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1beta1/types/tags.py +++ b/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1beta1/types/tags.py @@ -258,6 +258,9 @@ class DataplexTransferStatus(proto.Enum): TagTemplate and its tags are auto-copied to Dataplex service. Visible in both services. Editable in DataCatalog, read-only in Dataplex. + Deprecated: Individual TagTemplate migration is + deprecated in favor of organization or project + wide TagTemplate migration opt-in. """ DATAPLEX_TRANSFER_STATUS_UNSPECIFIED = 0 MIGRATED = 1 diff --git a/packages/google-cloud-datacatalog/samples/generated_samples/snippet_metadata_google.cloud.datacatalog.v1.json b/packages/google-cloud-datacatalog/samples/generated_samples/snippet_metadata_google.cloud.datacatalog.v1.json index 6c4bce7a08a1..1659a652e62c 100644 --- a/packages/google-cloud-datacatalog/samples/generated_samples/snippet_metadata_google.cloud.datacatalog.v1.json +++ b/packages/google-cloud-datacatalog/samples/generated_samples/snippet_metadata_google.cloud.datacatalog.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-datacatalog", - "version": "3.20.0" + "version": "0.1.0" }, "snippets": [ { diff --git a/packages/google-cloud-datacatalog/samples/generated_samples/snippet_metadata_google.cloud.datacatalog.v1beta1.json b/packages/google-cloud-datacatalog/samples/generated_samples/snippet_metadata_google.cloud.datacatalog.v1beta1.json index 5c0fe0812a6d..14e0e75feb01 100644 --- a/packages/google-cloud-datacatalog/samples/generated_samples/snippet_metadata_google.cloud.datacatalog.v1beta1.json +++ b/packages/google-cloud-datacatalog/samples/generated_samples/snippet_metadata_google.cloud.datacatalog.v1beta1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-datacatalog", - "version": "3.20.0" + "version": "0.1.0" }, "snippets": [ { diff --git a/packages/google-cloud-datacatalog/scripts/client-post-processing/doc-formatting.yaml b/packages/google-cloud-datacatalog/scripts/client-post-processing/doc-formatting.yaml new file mode 120000 index 000000000000..6e0991666f97 --- /dev/null +++ b/packages/google-cloud-datacatalog/scripts/client-post-processing/doc-formatting.yaml @@ -0,0 +1 @@ +../../../../scripts/client-post-processing/doc-formatting.yaml \ No newline at end of file diff --git a/packages/google-cloud-datacatalog/tests/unit/gapic/datacatalog_v1/test_data_catalog.py b/packages/google-cloud-datacatalog/tests/unit/gapic/datacatalog_v1/test_data_catalog.py index d6eda9c1da89..c6db81642361 100644 --- a/packages/google-cloud-datacatalog/tests/unit/gapic/datacatalog_v1/test_data_catalog.py +++ b/packages/google-cloud-datacatalog/tests/unit/gapic/datacatalog_v1/test_data_catalog.py @@ -1261,22 +1261,23 @@ async def test_search_catalog_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.search_catalog - ] = mock_object + ] = mock_rpc request = {} await client.search_catalog(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.search_catalog(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1801,22 +1802,23 @@ async def test_create_entry_group_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_entry_group - ] = mock_object + ] = mock_rpc request = {} await client.create_entry_group(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_entry_group(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2205,22 +2207,23 @@ async def test_get_entry_group_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_entry_group - ] = mock_object + ] = mock_rpc request = {} await client.get_entry_group(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_entry_group(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2597,22 +2600,23 @@ async def test_update_entry_group_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_entry_group - ] = mock_object + ] = mock_rpc request = {} await client.update_entry_group(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_entry_group(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2990,22 +2994,23 @@ async def test_delete_entry_group_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_entry_group - ] = mock_object + ] = mock_rpc request = {} await client.delete_entry_group(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_entry_group(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3367,22 +3372,23 @@ async def test_list_entry_groups_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_entry_groups - ] = mock_object + ] = mock_rpc request = {} await client.list_entry_groups(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_entry_groups(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3959,22 +3965,23 @@ async def test_create_entry_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_entry - ] = mock_object + ] = mock_rpc request = {} await client.create_entry(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_entry(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4357,22 +4364,23 @@ async def test_update_entry_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_entry - ] = mock_object + ] = mock_rpc request = {} await client.update_entry(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_entry(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4728,22 +4736,23 @@ async def test_delete_entry_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_entry - ] = mock_object + ] = mock_rpc request = {} await client.delete_entry(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_entry(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5095,22 +5104,23 @@ async def test_get_entry_async_use_cached_wrapped_rpc(transport: str = "grpc_asy ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_entry - ] = mock_object + ] = mock_rpc request = {} await client.get_entry(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_entry(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5485,22 +5495,23 @@ async def test_lookup_entry_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.lookup_entry - ] = mock_object + ] = mock_rpc request = {} await client.lookup_entry(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.lookup_entry(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5716,22 +5727,23 @@ async def test_list_entries_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_entries - ] = mock_object + ] = mock_rpc request = {} await client.list_entries(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_entries(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -6287,22 +6299,23 @@ async def test_modify_entry_overview_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.modify_entry_overview - ] = mock_object + ] = mock_rpc request = {} await client.modify_entry_overview(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.modify_entry_overview(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -6583,22 +6596,23 @@ async def test_modify_entry_contacts_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.modify_entry_contacts - ] = mock_object + ] = mock_rpc request = {} await client.modify_entry_contacts(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.modify_entry_contacts(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -6888,22 +6902,23 @@ async def test_create_tag_template_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_tag_template - ] = mock_object + ] = mock_rpc request = {} await client.create_tag_template(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_tag_template(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -7290,22 +7305,23 @@ async def test_get_tag_template_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_tag_template - ] = mock_object + ] = mock_rpc request = {} await client.get_tag_template(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_tag_template(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -7668,22 +7684,23 @@ async def test_update_tag_template_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_tag_template - ] = mock_object + ] = mock_rpc request = {} await client.update_tag_template(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_tag_template(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -8057,22 +8074,23 @@ async def test_delete_tag_template_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_tag_template - ] = mock_object + ] = mock_rpc request = {} await client.delete_tag_template(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_tag_template(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -8459,22 +8477,23 @@ async def test_create_tag_template_field_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_tag_template_field - ] = mock_object + ] = mock_rpc request = {} await client.create_tag_template_field(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_tag_template_field(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -8887,22 +8906,23 @@ async def test_update_tag_template_field_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_tag_template_field - ] = mock_object + ] = mock_rpc request = {} await client.update_tag_template_field(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_tag_template_field(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -9317,22 +9337,23 @@ async def test_rename_tag_template_field_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.rename_tag_template_field - ] = mock_object + ] = mock_rpc request = {} await client.rename_tag_template_field(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.rename_tag_template_field(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -9737,22 +9758,23 @@ async def test_rename_tag_template_field_enum_value_async_use_cached_wrapped_rpc ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.rename_tag_template_field_enum_value - ] = mock_object + ] = mock_rpc request = {} await client.rename_tag_template_field_enum_value(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.rename_tag_template_field_enum_value(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -10136,22 +10158,23 @@ async def test_delete_tag_template_field_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_tag_template_field - ] = mock_object + ] = mock_rpc request = {} await client.delete_tag_template_field(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_tag_template_field(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -10517,22 +10540,23 @@ async def test_create_tag_async_use_cached_wrapped_rpc(transport: str = "grpc_as ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_tag - ] = mock_object + ] = mock_rpc request = {} await client.create_tag(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_tag(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -10892,22 +10916,23 @@ async def test_update_tag_async_use_cached_wrapped_rpc(transport: str = "grpc_as ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_tag - ] = mock_object + ] = mock_rpc request = {} await client.update_tag(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_tag(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -11257,22 +11282,23 @@ async def test_delete_tag_async_use_cached_wrapped_rpc(transport: str = "grpc_as ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_tag - ] = mock_object + ] = mock_rpc request = {} await client.delete_tag(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_tag(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -11612,22 +11638,23 @@ async def test_list_tags_async_use_cached_wrapped_rpc(transport: str = "grpc_asy ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_tags - ] = mock_object + ] = mock_rpc request = {} await client.list_tags(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_tags(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -12116,8 +12143,9 @@ def test_reconcile_tags_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.reconcile_tags(request) @@ -12171,26 +12199,28 @@ async def test_reconcile_tags_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.reconcile_tags - ] = mock_object + ] = mock_rpc request = {} await client.reconcile_tags(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.reconcile_tags(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -12447,22 +12477,23 @@ async def test_star_entry_async_use_cached_wrapped_rpc(transport: str = "grpc_as ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.star_entry - ] = mock_object + ] = mock_rpc request = {} await client.star_entry(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.star_entry(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -12803,22 +12834,23 @@ async def test_unstar_entry_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.unstar_entry - ] = mock_object + ] = mock_rpc request = {} await client.unstar_entry(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.unstar_entry(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -13167,22 +13199,23 @@ async def test_set_iam_policy_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.set_iam_policy - ] = mock_object + ] = mock_rpc request = {} await client.set_iam_policy(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.set_iam_policy(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -13550,22 +13583,23 @@ async def test_get_iam_policy_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_iam_policy - ] = mock_object + ] = mock_rpc request = {} await client.get_iam_policy(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_iam_policy(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -13941,22 +13975,23 @@ async def test_test_iam_permissions_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.test_iam_permissions - ] = mock_object + ] = mock_rpc request = {} await client.test_iam_permissions(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.test_iam_permissions(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -14197,8 +14232,9 @@ def test_import_entries_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.import_entries(request) @@ -14252,26 +14288,28 @@ async def test_import_entries_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.import_entries - ] = mock_object + ] = mock_rpc request = {} await client.import_entries(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.import_entries(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-datacatalog/tests/unit/gapic/datacatalog_v1/test_policy_tag_manager.py b/packages/google-cloud-datacatalog/tests/unit/gapic/datacatalog_v1/test_policy_tag_manager.py index b9901349b229..df706a04c106 100644 --- a/packages/google-cloud-datacatalog/tests/unit/gapic/datacatalog_v1/test_policy_tag_manager.py +++ b/packages/google-cloud-datacatalog/tests/unit/gapic/datacatalog_v1/test_policy_tag_manager.py @@ -1298,22 +1298,23 @@ async def test_create_taxonomy_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_taxonomy - ] = mock_object + ] = mock_rpc request = {} await client.create_taxonomy(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_taxonomy(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1677,22 +1678,23 @@ async def test_delete_taxonomy_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_taxonomy - ] = mock_object + ] = mock_rpc request = {} await client.delete_taxonomy(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_taxonomy(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2046,22 +2048,23 @@ async def test_update_taxonomy_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_taxonomy - ] = mock_object + ] = mock_rpc request = {} await client.update_taxonomy(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_taxonomy(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2426,22 +2429,23 @@ async def test_list_taxonomies_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_taxonomies - ] = mock_object + ] = mock_rpc request = {} await client.list_taxonomies(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_taxonomies(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3002,22 +3006,23 @@ async def test_get_taxonomy_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_taxonomy - ] = mock_object + ] = mock_rpc request = {} await client.get_taxonomy(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_taxonomy(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3400,22 +3405,23 @@ async def test_create_policy_tag_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_policy_tag - ] = mock_object + ] = mock_rpc request = {} await client.create_policy_tag(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_policy_tag(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3796,22 +3802,23 @@ async def test_delete_policy_tag_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_policy_tag - ] = mock_object + ] = mock_rpc request = {} await client.delete_policy_tag(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_policy_tag(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4180,22 +4187,23 @@ async def test_update_policy_tag_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_policy_tag - ] = mock_object + ] = mock_rpc request = {} await client.update_policy_tag(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_policy_tag(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4567,22 +4575,23 @@ async def test_list_policy_tags_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_policy_tags - ] = mock_object + ] = mock_rpc request = {} await client.list_policy_tags(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_policy_tags(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5137,22 +5146,23 @@ async def test_get_policy_tag_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_policy_tag - ] = mock_object + ] = mock_rpc request = {} await client.get_policy_tag(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_policy_tag(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5512,22 +5522,23 @@ async def test_get_iam_policy_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_iam_policy - ] = mock_object + ] = mock_rpc request = {} await client.get_iam_policy(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_iam_policy(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5814,22 +5825,23 @@ async def test_set_iam_policy_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.set_iam_policy - ] = mock_object + ] = mock_rpc request = {} await client.set_iam_policy(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.set_iam_policy(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -6126,22 +6138,23 @@ async def test_test_iam_permissions_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.test_iam_permissions - ] = mock_object + ] = mock_rpc request = {} await client.test_iam_permissions(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.test_iam_permissions(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-datacatalog/tests/unit/gapic/datacatalog_v1/test_policy_tag_manager_serialization.py b/packages/google-cloud-datacatalog/tests/unit/gapic/datacatalog_v1/test_policy_tag_manager_serialization.py index 7394b316d30b..34366fb4e79e 100644 --- a/packages/google-cloud-datacatalog/tests/unit/gapic/datacatalog_v1/test_policy_tag_manager_serialization.py +++ b/packages/google-cloud-datacatalog/tests/unit/gapic/datacatalog_v1/test_policy_tag_manager_serialization.py @@ -1352,22 +1352,23 @@ async def test_replace_taxonomy_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.replace_taxonomy - ] = mock_object + ] = mock_rpc request = {} await client.replace_taxonomy(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.replace_taxonomy(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1652,22 +1653,23 @@ async def test_import_taxonomies_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.import_taxonomies - ] = mock_object + ] = mock_rpc request = {} await client.import_taxonomies(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.import_taxonomies(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1943,22 +1945,23 @@ async def test_export_taxonomies_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.export_taxonomies - ] = mock_object + ] = mock_rpc request = {} await client.export_taxonomies(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.export_taxonomies(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-datacatalog/tests/unit/gapic/datacatalog_v1beta1/test_data_catalog.py b/packages/google-cloud-datacatalog/tests/unit/gapic/datacatalog_v1beta1/test_data_catalog.py index 27cccbb831ad..92ffd3ce8379 100644 --- a/packages/google-cloud-datacatalog/tests/unit/gapic/datacatalog_v1beta1/test_data_catalog.py +++ b/packages/google-cloud-datacatalog/tests/unit/gapic/datacatalog_v1beta1/test_data_catalog.py @@ -1248,22 +1248,23 @@ async def test_search_catalog_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.search_catalog - ] = mock_object + ] = mock_rpc request = {} await client.search_catalog(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.search_catalog(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1788,22 +1789,23 @@ async def test_create_entry_group_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_entry_group - ] = mock_object + ] = mock_rpc request = {} await client.create_entry_group(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_entry_group(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2200,22 +2202,23 @@ async def test_update_entry_group_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_entry_group - ] = mock_object + ] = mock_rpc request = {} await client.update_entry_group(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_entry_group(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2594,22 +2597,23 @@ async def test_get_entry_group_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_entry_group - ] = mock_object + ] = mock_rpc request = {} await client.get_entry_group(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_entry_group(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2977,22 +2981,23 @@ async def test_delete_entry_group_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_entry_group - ] = mock_object + ] = mock_rpc request = {} await client.delete_entry_group(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_entry_group(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3354,22 +3359,23 @@ async def test_list_entry_groups_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_entry_groups - ] = mock_object + ] = mock_rpc request = {} await client.list_entry_groups(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_entry_groups(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3943,22 +3949,23 @@ async def test_create_entry_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_entry - ] = mock_object + ] = mock_rpc request = {} await client.create_entry(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_entry(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4336,22 +4343,23 @@ async def test_update_entry_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_entry - ] = mock_object + ] = mock_rpc request = {} await client.update_entry(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_entry(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4705,22 +4713,23 @@ async def test_delete_entry_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_entry - ] = mock_object + ] = mock_rpc request = {} await client.delete_entry(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_entry(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5069,22 +5078,23 @@ async def test_get_entry_async_use_cached_wrapped_rpc(transport: str = "grpc_asy ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_entry - ] = mock_object + ] = mock_rpc request = {} await client.get_entry(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_entry(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5448,22 +5458,23 @@ async def test_lookup_entry_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.lookup_entry - ] = mock_object + ] = mock_rpc request = {} await client.lookup_entry(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.lookup_entry(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5677,22 +5688,23 @@ async def test_list_entries_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_entries - ] = mock_object + ] = mock_rpc request = {} await client.list_entries(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_entries(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -6258,22 +6270,23 @@ async def test_create_tag_template_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_tag_template - ] = mock_object + ] = mock_rpc request = {} await client.create_tag_template(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_tag_template(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -6666,22 +6679,23 @@ async def test_get_tag_template_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_tag_template - ] = mock_object + ] = mock_rpc request = {} await client.get_tag_template(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_tag_template(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -7050,22 +7064,23 @@ async def test_update_tag_template_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_tag_template - ] = mock_object + ] = mock_rpc request = {} await client.update_tag_template(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_tag_template(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -7442,22 +7457,23 @@ async def test_delete_tag_template_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_tag_template - ] = mock_object + ] = mock_rpc request = {} await client.delete_tag_template(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_tag_template(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -7844,22 +7860,23 @@ async def test_create_tag_template_field_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_tag_template_field - ] = mock_object + ] = mock_rpc request = {} await client.create_tag_template_field(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_tag_template_field(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -8272,22 +8289,23 @@ async def test_update_tag_template_field_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_tag_template_field - ] = mock_object + ] = mock_rpc request = {} await client.update_tag_template_field(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_tag_template_field(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -8702,22 +8720,23 @@ async def test_rename_tag_template_field_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.rename_tag_template_field - ] = mock_object + ] = mock_rpc request = {} await client.rename_tag_template_field(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.rename_tag_template_field(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -9122,22 +9141,23 @@ async def test_rename_tag_template_field_enum_value_async_use_cached_wrapped_rpc ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.rename_tag_template_field_enum_value - ] = mock_object + ] = mock_rpc request = {} await client.rename_tag_template_field_enum_value(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.rename_tag_template_field_enum_value(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -9521,22 +9541,23 @@ async def test_delete_tag_template_field_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_tag_template_field - ] = mock_object + ] = mock_rpc request = {} await client.delete_tag_template_field(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_tag_template_field(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -9902,22 +9923,23 @@ async def test_create_tag_async_use_cached_wrapped_rpc(transport: str = "grpc_as ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_tag - ] = mock_object + ] = mock_rpc request = {} await client.create_tag(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_tag(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -10277,22 +10299,23 @@ async def test_update_tag_async_use_cached_wrapped_rpc(transport: str = "grpc_as ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_tag - ] = mock_object + ] = mock_rpc request = {} await client.update_tag(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_tag(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -10642,22 +10665,23 @@ async def test_delete_tag_async_use_cached_wrapped_rpc(transport: str = "grpc_as ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_tag - ] = mock_object + ] = mock_rpc request = {} await client.delete_tag(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_tag(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -10997,22 +11021,23 @@ async def test_list_tags_async_use_cached_wrapped_rpc(transport: str = "grpc_asy ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_tags - ] = mock_object + ] = mock_rpc request = {} await client.list_tags(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_tags(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -11558,22 +11583,23 @@ async def test_set_iam_policy_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.set_iam_policy - ] = mock_object + ] = mock_rpc request = {} await client.set_iam_policy(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.set_iam_policy(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -11941,22 +11967,23 @@ async def test_get_iam_policy_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_iam_policy - ] = mock_object + ] = mock_rpc request = {} await client.get_iam_policy(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_iam_policy(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -12332,22 +12359,23 @@ async def test_test_iam_permissions_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.test_iam_permissions - ] = mock_object + ] = mock_rpc request = {} await client.test_iam_permissions(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.test_iam_permissions(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-datacatalog/tests/unit/gapic/datacatalog_v1beta1/test_policy_tag_manager.py b/packages/google-cloud-datacatalog/tests/unit/gapic/datacatalog_v1beta1/test_policy_tag_manager.py index c922c5b886c5..3a1e0fa02567 100644 --- a/packages/google-cloud-datacatalog/tests/unit/gapic/datacatalog_v1beta1/test_policy_tag_manager.py +++ b/packages/google-cloud-datacatalog/tests/unit/gapic/datacatalog_v1beta1/test_policy_tag_manager.py @@ -1298,22 +1298,23 @@ async def test_create_taxonomy_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_taxonomy - ] = mock_object + ] = mock_rpc request = {} await client.create_taxonomy(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_taxonomy(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1677,22 +1678,23 @@ async def test_delete_taxonomy_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_taxonomy - ] = mock_object + ] = mock_rpc request = {} await client.delete_taxonomy(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_taxonomy(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2046,22 +2048,23 @@ async def test_update_taxonomy_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_taxonomy - ] = mock_object + ] = mock_rpc request = {} await client.update_taxonomy(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_taxonomy(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2426,22 +2429,23 @@ async def test_list_taxonomies_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_taxonomies - ] = mock_object + ] = mock_rpc request = {} await client.list_taxonomies(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_taxonomies(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3002,22 +3006,23 @@ async def test_get_taxonomy_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_taxonomy - ] = mock_object + ] = mock_rpc request = {} await client.get_taxonomy(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_taxonomy(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3400,22 +3405,23 @@ async def test_create_policy_tag_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_policy_tag - ] = mock_object + ] = mock_rpc request = {} await client.create_policy_tag(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_policy_tag(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3796,22 +3802,23 @@ async def test_delete_policy_tag_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_policy_tag - ] = mock_object + ] = mock_rpc request = {} await client.delete_policy_tag(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_policy_tag(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4180,22 +4187,23 @@ async def test_update_policy_tag_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_policy_tag - ] = mock_object + ] = mock_rpc request = {} await client.update_policy_tag(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_policy_tag(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4567,22 +4575,23 @@ async def test_list_policy_tags_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_policy_tags - ] = mock_object + ] = mock_rpc request = {} await client.list_policy_tags(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_policy_tags(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5137,22 +5146,23 @@ async def test_get_policy_tag_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_policy_tag - ] = mock_object + ] = mock_rpc request = {} await client.get_policy_tag(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_policy_tag(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5512,22 +5522,23 @@ async def test_get_iam_policy_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_iam_policy - ] = mock_object + ] = mock_rpc request = {} await client.get_iam_policy(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_iam_policy(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5814,22 +5825,23 @@ async def test_set_iam_policy_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.set_iam_policy - ] = mock_object + ] = mock_rpc request = {} await client.set_iam_policy(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.set_iam_policy(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -6126,22 +6138,23 @@ async def test_test_iam_permissions_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.test_iam_permissions - ] = mock_object + ] = mock_rpc request = {} await client.test_iam_permissions(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.test_iam_permissions(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-datacatalog/tests/unit/gapic/datacatalog_v1beta1/test_policy_tag_manager_serialization.py b/packages/google-cloud-datacatalog/tests/unit/gapic/datacatalog_v1beta1/test_policy_tag_manager_serialization.py index c4d7a3249103..3af441095898 100644 --- a/packages/google-cloud-datacatalog/tests/unit/gapic/datacatalog_v1beta1/test_policy_tag_manager_serialization.py +++ b/packages/google-cloud-datacatalog/tests/unit/gapic/datacatalog_v1beta1/test_policy_tag_manager_serialization.py @@ -1336,22 +1336,23 @@ async def test_import_taxonomies_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.import_taxonomies - ] = mock_object + ] = mock_rpc request = {} await client.import_taxonomies(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.import_taxonomies(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1627,22 +1628,23 @@ async def test_export_taxonomies_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.export_taxonomies - ] = mock_object + ] = mock_rpc request = {} await client.export_taxonomies(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.export_taxonomies(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-dataflow-client/google/cloud/dataflow/gapic_version.py b/packages/google-cloud-dataflow-client/google/cloud/dataflow/gapic_version.py index f94a6b1a4aec..558c8aab67c5 100644 --- a/packages/google-cloud-dataflow-client/google/cloud/dataflow/gapic_version.py +++ b/packages/google-cloud-dataflow-client/google/cloud/dataflow/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.8.12" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/gapic_version.py b/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/gapic_version.py index f94a6b1a4aec..558c8aab67c5 100644 --- a/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/gapic_version.py +++ b/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.8.12" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/services/flex_templates_service/async_client.py b/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/services/flex_templates_service/async_client.py index fa16fb3eef69..a24cc0eb8e9e 100644 --- a/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/services/flex_templates_service/async_client.py +++ b/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/services/flex_templates_service/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -186,10 +185,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(FlexTemplatesServiceClient).get_transport_class, - type(FlexTemplatesServiceClient), - ) + get_transport_class = FlexTemplatesServiceClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/services/flex_templates_service/client.py b/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/services/flex_templates_service/client.py index efdca64928f3..71e049ca39e5 100644 --- a/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/services/flex_templates_service/client.py +++ b/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/services/flex_templates_service/client.py @@ -644,7 +644,7 @@ def __init__( Type[FlexTemplatesServiceTransport], Callable[..., FlexTemplatesServiceTransport], ] = ( - type(self).get_transport_class(transport) + FlexTemplatesServiceClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., FlexTemplatesServiceTransport], transport) ) diff --git a/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/services/jobs_v1_beta3/async_client.py b/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/services/jobs_v1_beta3/async_client.py index cbf7b23e83a6..f811d534e78e 100644 --- a/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/services/jobs_v1_beta3/async_client.py +++ b/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/services/jobs_v1_beta3/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -187,9 +186,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(JobsV1Beta3Client).get_transport_class, type(JobsV1Beta3Client) - ) + get_transport_class = JobsV1Beta3Client.get_transport_class def __init__( self, diff --git a/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/services/jobs_v1_beta3/client.py b/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/services/jobs_v1_beta3/client.py index b0f307160d19..1f3019cd8a89 100644 --- a/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/services/jobs_v1_beta3/client.py +++ b/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/services/jobs_v1_beta3/client.py @@ -639,7 +639,7 @@ def __init__( transport_init: Union[ Type[JobsV1Beta3Transport], Callable[..., JobsV1Beta3Transport] ] = ( - type(self).get_transport_class(transport) + JobsV1Beta3Client.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., JobsV1Beta3Transport], transport) ) diff --git a/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/services/messages_v1_beta3/async_client.py b/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/services/messages_v1_beta3/async_client.py index 6ddf846a4aa5..0b9299d50f5c 100644 --- a/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/services/messages_v1_beta3/async_client.py +++ b/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/services/messages_v1_beta3/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -187,9 +186,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(MessagesV1Beta3Client).get_transport_class, type(MessagesV1Beta3Client) - ) + get_transport_class = MessagesV1Beta3Client.get_transport_class def __init__( self, diff --git a/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/services/messages_v1_beta3/client.py b/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/services/messages_v1_beta3/client.py index b54eb92d3793..c32452797c5a 100644 --- a/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/services/messages_v1_beta3/client.py +++ b/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/services/messages_v1_beta3/client.py @@ -642,7 +642,7 @@ def __init__( transport_init: Union[ Type[MessagesV1Beta3Transport], Callable[..., MessagesV1Beta3Transport] ] = ( - type(self).get_transport_class(transport) + MessagesV1Beta3Client.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., MessagesV1Beta3Transport], transport) ) diff --git a/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/services/metrics_v1_beta3/async_client.py b/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/services/metrics_v1_beta3/async_client.py index 57e3e5006763..a1686a129fb0 100644 --- a/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/services/metrics_v1_beta3/async_client.py +++ b/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/services/metrics_v1_beta3/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -189,9 +188,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(MetricsV1Beta3Client).get_transport_class, type(MetricsV1Beta3Client) - ) + get_transport_class = MetricsV1Beta3Client.get_transport_class def __init__( self, diff --git a/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/services/metrics_v1_beta3/client.py b/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/services/metrics_v1_beta3/client.py index d86b7b9fa5a7..9c6570dea285 100644 --- a/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/services/metrics_v1_beta3/client.py +++ b/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/services/metrics_v1_beta3/client.py @@ -642,7 +642,7 @@ def __init__( transport_init: Union[ Type[MetricsV1Beta3Transport], Callable[..., MetricsV1Beta3Transport] ] = ( - type(self).get_transport_class(transport) + MetricsV1Beta3Client.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., MetricsV1Beta3Transport], transport) ) diff --git a/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/services/snapshots_v1_beta3/async_client.py b/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/services/snapshots_v1_beta3/async_client.py index 9fac47b40297..f46ceaa9029f 100644 --- a/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/services/snapshots_v1_beta3/async_client.py +++ b/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/services/snapshots_v1_beta3/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -189,9 +188,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(SnapshotsV1Beta3Client).get_transport_class, type(SnapshotsV1Beta3Client) - ) + get_transport_class = SnapshotsV1Beta3Client.get_transport_class def __init__( self, diff --git a/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/services/snapshots_v1_beta3/client.py b/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/services/snapshots_v1_beta3/client.py index 0e67ac28c812..568607068769 100644 --- a/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/services/snapshots_v1_beta3/client.py +++ b/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/services/snapshots_v1_beta3/client.py @@ -645,7 +645,7 @@ def __init__( Type[SnapshotsV1Beta3Transport], Callable[..., SnapshotsV1Beta3Transport], ] = ( - type(self).get_transport_class(transport) + SnapshotsV1Beta3Client.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., SnapshotsV1Beta3Transport], transport) ) diff --git a/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/services/templates_service/async_client.py b/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/services/templates_service/async_client.py index 6ff37d5c83e1..bc4e47b5c0b8 100644 --- a/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/services/templates_service/async_client.py +++ b/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/services/templates_service/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -189,9 +188,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(TemplatesServiceClient).get_transport_class, type(TemplatesServiceClient) - ) + get_transport_class = TemplatesServiceClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/services/templates_service/client.py b/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/services/templates_service/client.py index bc7a34b75f59..53466a958396 100644 --- a/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/services/templates_service/client.py +++ b/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/services/templates_service/client.py @@ -645,7 +645,7 @@ def __init__( Type[TemplatesServiceTransport], Callable[..., TemplatesServiceTransport], ] = ( - type(self).get_transport_class(transport) + TemplatesServiceClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., TemplatesServiceTransport], transport) ) diff --git a/packages/google-cloud-dataflow-client/samples/generated_samples/snippet_metadata_google.dataflow.v1beta3.json b/packages/google-cloud-dataflow-client/samples/generated_samples/snippet_metadata_google.dataflow.v1beta3.json index 8c52d7b6d17e..07ae5fac5096 100644 --- a/packages/google-cloud-dataflow-client/samples/generated_samples/snippet_metadata_google.dataflow.v1beta3.json +++ b/packages/google-cloud-dataflow-client/samples/generated_samples/snippet_metadata_google.dataflow.v1beta3.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-dataflow-client", - "version": "0.8.12" + "version": "0.1.0" }, "snippets": [ { diff --git a/packages/google-cloud-dataflow-client/tests/unit/gapic/dataflow_v1beta3/test_flex_templates_service.py b/packages/google-cloud-dataflow-client/tests/unit/gapic/dataflow_v1beta3/test_flex_templates_service.py index d454d148b542..aa5d43d912bc 100644 --- a/packages/google-cloud-dataflow-client/tests/unit/gapic/dataflow_v1beta3/test_flex_templates_service.py +++ b/packages/google-cloud-dataflow-client/tests/unit/gapic/dataflow_v1beta3/test_flex_templates_service.py @@ -1362,22 +1362,23 @@ async def test_launch_flex_template_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.launch_flex_template - ] = mock_object + ] = mock_rpc request = {} await client.launch_flex_template(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.launch_flex_template(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-dataflow-client/tests/unit/gapic/dataflow_v1beta3/test_jobs_v1_beta3.py b/packages/google-cloud-dataflow-client/tests/unit/gapic/dataflow_v1beta3/test_jobs_v1_beta3.py index 72e99e596064..88582acc1b51 100644 --- a/packages/google-cloud-dataflow-client/tests/unit/gapic/dataflow_v1beta3/test_jobs_v1_beta3.py +++ b/packages/google-cloud-dataflow-client/tests/unit/gapic/dataflow_v1beta3/test_jobs_v1_beta3.py @@ -1294,22 +1294,23 @@ async def test_create_job_async_use_cached_wrapped_rpc(transport: str = "grpc_as ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_job - ] = mock_object + ] = mock_rpc request = {} await client.create_job(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_job(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1643,22 +1644,23 @@ async def test_get_job_async_use_cached_wrapped_rpc(transport: str = "grpc_async ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_job - ] = mock_object + ] = mock_rpc request = {} await client.get_job(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_job(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1994,22 +1996,23 @@ async def test_update_job_async_use_cached_wrapped_rpc(transport: str = "grpc_as ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_job - ] = mock_object + ] = mock_rpc request = {} await client.update_job(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_job(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2306,22 +2309,23 @@ async def test_list_jobs_async_use_cached_wrapped_rpc(transport: str = "grpc_asy ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_jobs - ] = mock_object + ] = mock_rpc request = {} await client.list_jobs(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_jobs(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2805,22 +2809,23 @@ async def test_aggregated_list_jobs_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.aggregated_list_jobs - ] = mock_object + ] = mock_rpc request = {} await client.aggregated_list_jobs(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.aggregated_list_jobs(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3305,22 +3310,23 @@ async def test_check_active_jobs_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.check_active_jobs - ] = mock_object + ] = mock_rpc request = {} await client.check_active_jobs(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.check_active_jobs(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3552,22 +3558,23 @@ async def test_snapshot_job_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.snapshot_job - ] = mock_object + ] = mock_rpc request = {} await client.snapshot_job(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.snapshot_job(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-dataflow-client/tests/unit/gapic/dataflow_v1beta3/test_messages_v1_beta3.py b/packages/google-cloud-dataflow-client/tests/unit/gapic/dataflow_v1beta3/test_messages_v1_beta3.py index 2c7c68cad73c..7b5bfb96ec0b 100644 --- a/packages/google-cloud-dataflow-client/tests/unit/gapic/dataflow_v1beta3/test_messages_v1_beta3.py +++ b/packages/google-cloud-dataflow-client/tests/unit/gapic/dataflow_v1beta3/test_messages_v1_beta3.py @@ -1327,22 +1327,23 @@ async def test_list_job_messages_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_job_messages - ] = mock_object + ] = mock_rpc request = {} await client.list_job_messages(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_job_messages(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-dataflow-client/tests/unit/gapic/dataflow_v1beta3/test_metrics_v1_beta3.py b/packages/google-cloud-dataflow-client/tests/unit/gapic/dataflow_v1beta3/test_metrics_v1_beta3.py index ccf43558e7b7..102d39f0cb8d 100644 --- a/packages/google-cloud-dataflow-client/tests/unit/gapic/dataflow_v1beta3/test_metrics_v1_beta3.py +++ b/packages/google-cloud-dataflow-client/tests/unit/gapic/dataflow_v1beta3/test_metrics_v1_beta3.py @@ -1288,22 +1288,23 @@ async def test_get_job_metrics_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_job_metrics - ] = mock_object + ] = mock_rpc request = {} await client.get_job_metrics(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_job_metrics(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1586,22 +1587,23 @@ async def test_get_job_execution_details_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_job_execution_details - ] = mock_object + ] = mock_rpc request = {} await client.get_job_execution_details(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_job_execution_details(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2109,22 +2111,23 @@ async def test_get_stage_execution_details_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_stage_execution_details - ] = mock_object + ] = mock_rpc request = {} await client.get_stage_execution_details(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_stage_execution_details(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-dataflow-client/tests/unit/gapic/dataflow_v1beta3/test_snapshots_v1_beta3.py b/packages/google-cloud-dataflow-client/tests/unit/gapic/dataflow_v1beta3/test_snapshots_v1_beta3.py index 2e41686a5f77..a0a319bac934 100644 --- a/packages/google-cloud-dataflow-client/tests/unit/gapic/dataflow_v1beta3/test_snapshots_v1_beta3.py +++ b/packages/google-cloud-dataflow-client/tests/unit/gapic/dataflow_v1beta3/test_snapshots_v1_beta3.py @@ -1339,22 +1339,23 @@ async def test_get_snapshot_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_snapshot - ] = mock_object + ] = mock_rpc request = {} await client.get_snapshot(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_snapshot(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1634,22 +1635,23 @@ async def test_delete_snapshot_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_snapshot - ] = mock_object + ] = mock_rpc request = {} await client.delete_snapshot(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_snapshot(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1916,22 +1918,23 @@ async def test_list_snapshots_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_snapshots - ] = mock_object + ] = mock_rpc request = {} await client.list_snapshots(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_snapshots(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-dataflow-client/tests/unit/gapic/dataflow_v1beta3/test_templates_service.py b/packages/google-cloud-dataflow-client/tests/unit/gapic/dataflow_v1beta3/test_templates_service.py index 9b8d2541eee7..071e04cdf105 100644 --- a/packages/google-cloud-dataflow-client/tests/unit/gapic/dataflow_v1beta3/test_templates_service.py +++ b/packages/google-cloud-dataflow-client/tests/unit/gapic/dataflow_v1beta3/test_templates_service.py @@ -1375,22 +1375,23 @@ async def test_create_job_from_template_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_job_from_template - ] = mock_object + ] = mock_rpc request = {} await client.create_job_from_template(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_job_from_template(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1688,22 +1689,23 @@ async def test_launch_template_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.launch_template - ] = mock_object + ] = mock_rpc request = {} await client.launch_template(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.launch_template(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1973,22 +1975,23 @@ async def test_get_template_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_template - ] = mock_object + ] = mock_rpc request = {} await client.get_template(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_template(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-dataform/google/cloud/dataform/gapic_version.py b/packages/google-cloud-dataform/google/cloud/dataform/gapic_version.py index 0d21cc226e8f..558c8aab67c5 100644 --- a/packages/google-cloud-dataform/google/cloud/dataform/gapic_version.py +++ b/packages/google-cloud-dataform/google/cloud/dataform/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.5.11" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-dataform/google/cloud/dataform_v1beta1/gapic_version.py b/packages/google-cloud-dataform/google/cloud/dataform_v1beta1/gapic_version.py index 0d21cc226e8f..558c8aab67c5 100644 --- a/packages/google-cloud-dataform/google/cloud/dataform_v1beta1/gapic_version.py +++ b/packages/google-cloud-dataform/google/cloud/dataform_v1beta1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.5.11" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-dataform/google/cloud/dataform_v1beta1/services/dataform/async_client.py b/packages/google-cloud-dataform/google/cloud/dataform_v1beta1/services/dataform/async_client.py index 1912daa165db..2e5979df8313 100644 --- a/packages/google-cloud-dataform/google/cloud/dataform_v1beta1/services/dataform/async_client.py +++ b/packages/google-cloud-dataform/google/cloud/dataform_v1beta1/services/dataform/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -203,9 +202,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(DataformClient).get_transport_class, type(DataformClient) - ) + get_transport_class = DataformClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-dataform/google/cloud/dataform_v1beta1/services/dataform/client.py b/packages/google-cloud-dataform/google/cloud/dataform_v1beta1/services/dataform/client.py index 161196eab69d..bcf592a2a77a 100644 --- a/packages/google-cloud-dataform/google/cloud/dataform_v1beta1/services/dataform/client.py +++ b/packages/google-cloud-dataform/google/cloud/dataform_v1beta1/services/dataform/client.py @@ -807,7 +807,7 @@ def __init__( transport_init: Union[ Type[DataformTransport], Callable[..., DataformTransport] ] = ( - type(self).get_transport_class(transport) + DataformClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., DataformTransport], transport) ) diff --git a/packages/google-cloud-dataform/samples/generated_samples/snippet_metadata_google.cloud.dataform.v1beta1.json b/packages/google-cloud-dataform/samples/generated_samples/snippet_metadata_google.cloud.dataform.v1beta1.json index 9a805193694a..d78e2036cf34 100644 --- a/packages/google-cloud-dataform/samples/generated_samples/snippet_metadata_google.cloud.dataform.v1beta1.json +++ b/packages/google-cloud-dataform/samples/generated_samples/snippet_metadata_google.cloud.dataform.v1beta1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-dataform", - "version": "0.5.11" + "version": "0.1.0" }, "snippets": [ { diff --git a/packages/google-cloud-dataform/tests/unit/gapic/dataform_v1beta1/test_dataform.py b/packages/google-cloud-dataform/tests/unit/gapic/dataform_v1beta1/test_dataform.py index c79d3d689f3f..84f9bf85c48f 100644 --- a/packages/google-cloud-dataform/tests/unit/gapic/dataform_v1beta1/test_dataform.py +++ b/packages/google-cloud-dataform/tests/unit/gapic/dataform_v1beta1/test_dataform.py @@ -1253,22 +1253,23 @@ async def test_list_repositories_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_repositories - ] = mock_object + ] = mock_rpc request = {} await client.list_repositories(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_repositories(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1846,22 +1847,23 @@ async def test_get_repository_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_repository - ] = mock_object + ] = mock_rpc request = {} await client.get_repository(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_repository(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2244,22 +2246,23 @@ async def test_create_repository_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_repository - ] = mock_object + ] = mock_rpc request = {} await client.create_repository(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_repository(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2666,22 +2669,23 @@ async def test_update_repository_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_repository - ] = mock_object + ] = mock_rpc request = {} await client.update_repository(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_repository(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3060,22 +3064,23 @@ async def test_delete_repository_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_repository - ] = mock_object + ] = mock_rpc request = {} await client.delete_repository(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_repository(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3433,22 +3438,23 @@ async def test_commit_repository_changes_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.commit_repository_changes - ] = mock_object + ] = mock_rpc request = {} await client.commit_repository_changes(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.commit_repository_changes(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3731,22 +3737,23 @@ async def test_read_repository_file_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.read_repository_file - ] = mock_object + ] = mock_rpc request = {} await client.read_repository_file(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.read_repository_file(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4038,22 +4045,23 @@ async def test_query_repository_directory_contents_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.query_repository_directory_contents - ] = mock_object + ] = mock_rpc request = {} await client.query_repository_directory_contents(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.query_repository_directory_contents(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4546,22 +4554,23 @@ async def test_fetch_repository_history_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.fetch_repository_history - ] = mock_object + ] = mock_rpc request = {} await client.fetch_repository_history(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.fetch_repository_history(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5054,22 +5063,23 @@ async def test_compute_repository_access_token_status_async_use_cached_wrapped_r ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.compute_repository_access_token_status - ] = mock_object + ] = mock_rpc request = {} await client.compute_repository_access_token_status(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.compute_repository_access_token_status(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5359,22 +5369,23 @@ async def test_fetch_remote_branches_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.fetch_remote_branches - ] = mock_object + ] = mock_rpc request = {} await client.fetch_remote_branches(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.fetch_remote_branches(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5656,22 +5667,23 @@ async def test_list_workspaces_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_workspaces - ] = mock_object + ] = mock_rpc request = {} await client.list_workspaces(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_workspaces(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -6216,22 +6228,23 @@ async def test_get_workspace_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_workspace - ] = mock_object + ] = mock_rpc request = {} await client.get_workspace(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_workspace(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -6580,22 +6593,23 @@ async def test_create_workspace_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_workspace - ] = mock_object + ] = mock_rpc request = {} await client.create_workspace(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_workspace(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -6955,22 +6969,23 @@ async def test_delete_workspace_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_workspace - ] = mock_object + ] = mock_rpc request = {} await client.delete_workspace(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_workspace(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -7317,22 +7332,23 @@ async def test_install_npm_packages_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.install_npm_packages - ] = mock_object + ] = mock_rpc request = {} await client.install_npm_packages(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.install_npm_packages(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -7599,22 +7615,23 @@ async def test_pull_git_commits_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.pull_git_commits - ] = mock_object + ] = mock_rpc request = {} await client.pull_git_commits(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.pull_git_commits(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -7871,22 +7888,23 @@ async def test_push_git_commits_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.push_git_commits - ] = mock_object + ] = mock_rpc request = {} await client.push_git_commits(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.push_git_commits(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -8154,22 +8172,23 @@ async def test_fetch_file_git_statuses_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.fetch_file_git_statuses - ] = mock_object + ] = mock_rpc request = {} await client.fetch_file_git_statuses(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.fetch_file_git_statuses(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -8457,22 +8476,23 @@ async def test_fetch_git_ahead_behind_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.fetch_git_ahead_behind - ] = mock_object + ] = mock_rpc request = {} await client.fetch_git_ahead_behind(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.fetch_git_ahead_behind(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -8755,22 +8775,23 @@ async def test_commit_workspace_changes_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.commit_workspace_changes - ] = mock_object + ] = mock_rpc request = {} await client.commit_workspace_changes(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.commit_workspace_changes(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -9042,22 +9063,23 @@ async def test_reset_workspace_changes_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.reset_workspace_changes - ] = mock_object + ] = mock_rpc request = {} await client.reset_workspace_changes(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.reset_workspace_changes(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -9325,22 +9347,23 @@ async def test_fetch_file_diff_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.fetch_file_diff - ] = mock_object + ] = mock_rpc request = {} await client.fetch_file_diff(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.fetch_file_diff(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -9624,22 +9647,23 @@ async def test_query_directory_contents_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.query_directory_contents - ] = mock_object + ] = mock_rpc request = {} await client.query_directory_contents(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.query_directory_contents(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -10113,22 +10137,23 @@ async def test_make_directory_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.make_directory - ] = mock_object + ] = mock_rpc request = {} await client.make_directory(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.make_directory(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -10389,22 +10414,23 @@ async def test_remove_directory_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.remove_directory - ] = mock_object + ] = mock_rpc request = {} await client.remove_directory(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.remove_directory(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -10663,22 +10689,23 @@ async def test_move_directory_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.move_directory - ] = mock_object + ] = mock_rpc request = {} await client.move_directory(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.move_directory(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -10942,22 +10969,23 @@ async def test_read_file_async_use_cached_wrapped_rpc(transport: str = "grpc_asy ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.read_file - ] = mock_object + ] = mock_rpc request = {} await client.read_file(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.read_file(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -11219,22 +11247,23 @@ async def test_remove_file_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.remove_file - ] = mock_object + ] = mock_rpc request = {} await client.remove_file(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.remove_file(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -11491,22 +11520,23 @@ async def test_move_file_async_use_cached_wrapped_rpc(transport: str = "grpc_asy ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.move_file - ] = mock_object + ] = mock_rpc request = {} await client.move_file(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.move_file(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -11765,22 +11795,23 @@ async def test_write_file_async_use_cached_wrapped_rpc(transport: str = "grpc_as ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.write_file - ] = mock_object + ] = mock_rpc request = {} await client.write_file(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.write_file(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -12061,22 +12092,23 @@ async def test_list_release_configs_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_release_configs - ] = mock_object + ] = mock_rpc request = {} await client.list_release_configs(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_release_configs(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -12663,22 +12695,23 @@ async def test_get_release_config_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_release_config - ] = mock_object + ] = mock_rpc request = {} await client.get_release_config(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_release_config(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -13072,22 +13105,23 @@ async def test_create_release_config_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_release_config - ] = mock_object + ] = mock_rpc request = {} await client.create_release_config(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_release_config(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -13495,22 +13529,23 @@ async def test_update_release_config_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_release_config - ] = mock_object + ] = mock_rpc request = {} await client.update_release_config(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_release_config(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -13893,22 +13928,23 @@ async def test_delete_release_config_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_release_config - ] = mock_object + ] = mock_rpc request = {} await client.delete_release_config(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_release_config(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -14276,22 +14312,23 @@ async def test_list_compilation_results_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_compilation_results - ] = mock_object + ] = mock_rpc request = {} await client.list_compilation_results(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_compilation_results(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -14876,22 +14913,23 @@ async def test_get_compilation_result_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_compilation_result - ] = mock_object + ] = mock_rpc request = {} await client.get_compilation_result(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_compilation_result(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -15274,22 +15312,23 @@ async def test_create_compilation_result_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_compilation_result - ] = mock_object + ] = mock_rpc request = {} await client.create_compilation_result(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_compilation_result(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -15680,22 +15719,23 @@ async def test_query_compilation_result_actions_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.query_compilation_result_actions - ] = mock_object + ] = mock_rpc request = {} await client.query_compilation_result_actions(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.query_compilation_result_actions(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -16191,22 +16231,23 @@ async def test_list_workflow_configs_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_workflow_configs - ] = mock_object + ] = mock_rpc request = {} await client.list_workflow_configs(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_workflow_configs(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -16790,22 +16831,23 @@ async def test_get_workflow_config_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_workflow_config - ] = mock_object + ] = mock_rpc request = {} await client.get_workflow_config(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_workflow_config(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -17194,22 +17236,23 @@ async def test_create_workflow_config_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_workflow_config - ] = mock_object + ] = mock_rpc request = {} await client.create_workflow_config(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_workflow_config(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -17612,22 +17655,23 @@ async def test_update_workflow_config_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_workflow_config - ] = mock_object + ] = mock_rpc request = {} await client.update_workflow_config(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_workflow_config(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -18008,22 +18052,23 @@ async def test_delete_workflow_config_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_workflow_config - ] = mock_object + ] = mock_rpc request = {} await client.delete_workflow_config(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_workflow_config(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -18395,22 +18440,23 @@ async def test_list_workflow_invocations_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_workflow_invocations - ] = mock_object + ] = mock_rpc request = {} await client.list_workflow_invocations(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_workflow_invocations(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -18993,22 +19039,23 @@ async def test_get_workflow_invocation_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_workflow_invocation - ] = mock_object + ] = mock_rpc request = {} await client.get_workflow_invocation(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_workflow_invocation(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -19386,22 +19433,23 @@ async def test_create_workflow_invocation_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_workflow_invocation - ] = mock_object + ] = mock_rpc request = {} await client.create_workflow_invocation(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_workflow_invocation(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -19779,22 +19827,23 @@ async def test_delete_workflow_invocation_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_workflow_invocation - ] = mock_object + ] = mock_rpc request = {} await client.delete_workflow_invocation(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_workflow_invocation(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -20151,22 +20200,23 @@ async def test_cancel_workflow_invocation_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.cancel_workflow_invocation - ] = mock_object + ] = mock_rpc request = {} await client.cancel_workflow_invocation(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.cancel_workflow_invocation(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -20448,22 +20498,23 @@ async def test_query_workflow_invocation_actions_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.query_workflow_invocation_actions - ] = mock_object + ] = mock_rpc request = {} await client.query_workflow_invocation_actions(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.query_workflow_invocation_actions(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-datalabeling/google/cloud/datalabeling/gapic_version.py b/packages/google-cloud-datalabeling/google/cloud/datalabeling/gapic_version.py index 3ba9a6de4897..558c8aab67c5 100644 --- a/packages/google-cloud-datalabeling/google/cloud/datalabeling/gapic_version.py +++ b/packages/google-cloud-datalabeling/google/cloud/datalabeling/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.10.5" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-datalabeling/google/cloud/datalabeling_v1beta1/gapic_version.py b/packages/google-cloud-datalabeling/google/cloud/datalabeling_v1beta1/gapic_version.py index 3ba9a6de4897..558c8aab67c5 100644 --- a/packages/google-cloud-datalabeling/google/cloud/datalabeling_v1beta1/gapic_version.py +++ b/packages/google-cloud-datalabeling/google/cloud/datalabeling_v1beta1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.10.5" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-datalabeling/google/cloud/datalabeling_v1beta1/services/data_labeling_service/async_client.py b/packages/google-cloud-datalabeling/google/cloud/datalabeling_v1beta1/services/data_labeling_service/async_client.py index 977baa52a19d..9d2ea7440590 100644 --- a/packages/google-cloud-datalabeling/google/cloud/datalabeling_v1beta1/services/data_labeling_service/async_client.py +++ b/packages/google-cloud-datalabeling/google/cloud/datalabeling_v1beta1/services/data_labeling_service/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -234,10 +233,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(DataLabelingServiceClient).get_transport_class, - type(DataLabelingServiceClient), - ) + get_transport_class = DataLabelingServiceClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-datalabeling/google/cloud/datalabeling_v1beta1/services/data_labeling_service/client.py b/packages/google-cloud-datalabeling/google/cloud/datalabeling_v1beta1/services/data_labeling_service/client.py index 781f4c2b9b00..1cd66859e9ed 100644 --- a/packages/google-cloud-datalabeling/google/cloud/datalabeling_v1beta1/services/data_labeling_service/client.py +++ b/packages/google-cloud-datalabeling/google/cloud/datalabeling_v1beta1/services/data_labeling_service/client.py @@ -825,7 +825,7 @@ def __init__( Type[DataLabelingServiceTransport], Callable[..., DataLabelingServiceTransport], ] = ( - type(self).get_transport_class(transport) + DataLabelingServiceClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., DataLabelingServiceTransport], transport) ) diff --git a/packages/google-cloud-datalabeling/samples/generated_samples/snippet_metadata_google.cloud.datalabeling.v1beta1.json b/packages/google-cloud-datalabeling/samples/generated_samples/snippet_metadata_google.cloud.datalabeling.v1beta1.json index d4bb1f70fc54..4a6f501c7672 100644 --- a/packages/google-cloud-datalabeling/samples/generated_samples/snippet_metadata_google.cloud.datalabeling.v1beta1.json +++ b/packages/google-cloud-datalabeling/samples/generated_samples/snippet_metadata_google.cloud.datalabeling.v1beta1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-datalabeling", - "version": "1.10.5" + "version": "0.1.0" }, "snippets": [ { diff --git a/packages/google-cloud-datalabeling/tests/unit/gapic/datalabeling_v1beta1/test_data_labeling_service.py b/packages/google-cloud-datalabeling/tests/unit/gapic/datalabeling_v1beta1/test_data_labeling_service.py index a1bebd8e8d76..c495e9e496d5 100644 --- a/packages/google-cloud-datalabeling/tests/unit/gapic/datalabeling_v1beta1/test_data_labeling_service.py +++ b/packages/google-cloud-datalabeling/tests/unit/gapic/datalabeling_v1beta1/test_data_labeling_service.py @@ -1340,22 +1340,23 @@ async def test_create_dataset_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_dataset - ] = mock_object + ] = mock_rpc request = {} await client.create_dataset(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_dataset(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1731,22 +1732,23 @@ async def test_get_dataset_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_dataset - ] = mock_object + ] = mock_rpc request = {} await client.get_dataset(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_dataset(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2104,22 +2106,23 @@ async def test_list_datasets_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_datasets - ] = mock_object + ] = mock_rpc request = {} await client.list_datasets(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_datasets(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2666,22 +2669,23 @@ async def test_delete_dataset_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_dataset - ] = mock_object + ] = mock_rpc request = {} await client.delete_dataset(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_dataset(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2968,8 +2972,9 @@ def test_import_data_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.import_data(request) @@ -3023,26 +3028,28 @@ async def test_import_data_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.import_data - ] = mock_object + ] = mock_rpc request = {} await client.import_data(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.import_data(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3361,8 +3368,9 @@ def test_export_data_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.export_data(request) @@ -3416,26 +3424,28 @@ async def test_export_data_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.export_data - ] = mock_object + ] = mock_rpc request = {} await client.export_data(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.export_data(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3824,22 +3834,23 @@ async def test_get_data_item_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_data_item - ] = mock_object + ] = mock_rpc request = {} await client.get_data_item(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_data_item(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4189,22 +4200,23 @@ async def test_list_data_items_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_data_items - ] = mock_object + ] = mock_rpc request = {} await client.list_data_items(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_data_items(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4795,22 +4807,23 @@ async def test_get_annotated_dataset_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_annotated_dataset - ] = mock_object + ] = mock_rpc request = {} await client.get_annotated_dataset(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_annotated_dataset(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5204,22 +5217,23 @@ async def test_list_annotated_datasets_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_annotated_datasets - ] = mock_object + ] = mock_rpc request = {} await client.list_annotated_datasets(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_annotated_datasets(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5797,22 +5811,23 @@ async def test_delete_annotated_dataset_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_annotated_dataset - ] = mock_object + ] = mock_rpc request = {} await client.delete_annotated_dataset(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_annotated_dataset(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -6023,8 +6038,9 @@ def test_label_image_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.label_image(request) @@ -6078,26 +6094,28 @@ async def test_label_image_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.label_image - ] = mock_object + ] = mock_rpc request = {} await client.label_image(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.label_image(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -6420,8 +6438,9 @@ def test_label_video_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.label_video(request) @@ -6475,26 +6494,28 @@ async def test_label_video_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.label_video - ] = mock_object + ] = mock_rpc request = {} await client.label_video(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.label_video(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -6817,8 +6838,9 @@ def test_label_text_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.label_text(request) @@ -6870,26 +6892,28 @@ async def test_label_text_async_use_cached_wrapped_rpc(transport: str = "grpc_as ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.label_text - ] = mock_object + ] = mock_rpc request = {} await client.label_text(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.label_text(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -7269,22 +7293,23 @@ async def test_get_example_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_example - ] = mock_object + ] = mock_rpc request = {} await client.get_example(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_example(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -7644,22 +7669,23 @@ async def test_list_examples_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_examples - ] = mock_object + ] = mock_rpc request = {} await client.list_examples(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_examples(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -8235,22 +8261,23 @@ async def test_create_annotation_spec_set_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_annotation_spec_set - ] = mock_object + ] = mock_rpc request = {} await client.create_annotation_spec_set(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_annotation_spec_set(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -8656,22 +8683,23 @@ async def test_get_annotation_spec_set_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_annotation_spec_set - ] = mock_object + ] = mock_rpc request = {} await client.get_annotation_spec_set(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_annotation_spec_set(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -9054,22 +9082,23 @@ async def test_list_annotation_spec_sets_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_annotation_spec_sets - ] = mock_object + ] = mock_rpc request = {} await client.list_annotation_spec_sets(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_annotation_spec_sets(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -9653,22 +9682,23 @@ async def test_delete_annotation_spec_set_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_annotation_spec_set - ] = mock_object + ] = mock_rpc request = {} await client.delete_annotation_spec_set(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_annotation_spec_set(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -9973,8 +10003,9 @@ def test_create_instruction_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.create_instruction(request) @@ -10030,26 +10061,28 @@ async def test_create_instruction_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_instruction - ] = mock_object + ] = mock_rpc request = {} await client.create_instruction(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.create_instruction(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -10428,22 +10461,23 @@ async def test_get_instruction_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_instruction - ] = mock_object + ] = mock_rpc request = {} await client.get_instruction(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_instruction(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -10815,22 +10849,23 @@ async def test_list_instructions_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_instructions - ] = mock_object + ] = mock_rpc request = {} await client.list_instructions(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_instructions(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -11407,22 +11442,23 @@ async def test_delete_instruction_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_instruction - ] = mock_object + ] = mock_rpc request = {} await client.delete_instruction(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_instruction(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -11782,22 +11818,23 @@ async def test_get_evaluation_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_evaluation - ] = mock_object + ] = mock_rpc request = {} await client.get_evaluation(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_evaluation(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -12170,22 +12207,23 @@ async def test_search_evaluations_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.search_evaluations - ] = mock_object + ] = mock_rpc request = {} await client.search_evaluations(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.search_evaluations(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -12772,22 +12810,23 @@ async def test_search_example_comparisons_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.search_example_comparisons - ] = mock_object + ] = mock_rpc request = {} await client.search_example_comparisons(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.search_example_comparisons(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -13394,22 +13433,23 @@ async def test_create_evaluation_job_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_evaluation_job - ] = mock_object + ] = mock_rpc request = {} await client.create_evaluation_job(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_evaluation_job(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -13818,22 +13858,23 @@ async def test_update_evaluation_job_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_evaluation_job - ] = mock_object + ] = mock_rpc request = {} await client.update_evaluation_job(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_evaluation_job(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -14245,22 +14286,23 @@ async def test_get_evaluation_job_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_evaluation_job - ] = mock_object + ] = mock_rpc request = {} await client.get_evaluation_job(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_evaluation_job(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -14637,22 +14679,23 @@ async def test_pause_evaluation_job_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.pause_evaluation_job - ] = mock_object + ] = mock_rpc request = {} await client.pause_evaluation_job(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.pause_evaluation_job(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -15009,22 +15052,23 @@ async def test_resume_evaluation_job_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.resume_evaluation_job - ] = mock_object + ] = mock_rpc request = {} await client.resume_evaluation_job(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.resume_evaluation_job(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -15381,22 +15425,23 @@ async def test_delete_evaluation_job_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_evaluation_job - ] = mock_object + ] = mock_rpc request = {} await client.delete_evaluation_job(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_evaluation_job(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -15763,22 +15808,23 @@ async def test_list_evaluation_jobs_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_evaluation_jobs - ] = mock_object + ] = mock_rpc request = {} await client.list_evaluation_jobs(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_evaluation_jobs(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-dataplex/google/cloud/dataplex/gapic_version.py b/packages/google-cloud-dataplex/google/cloud/dataplex/gapic_version.py index f7850c8d0049..558c8aab67c5 100644 --- a/packages/google-cloud-dataplex/google/cloud/dataplex/gapic_version.py +++ b/packages/google-cloud-dataplex/google/cloud/dataplex/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "2.2.2" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/gapic_version.py b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/gapic_version.py index f7850c8d0049..558c8aab67c5 100644 --- a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/gapic_version.py +++ b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "2.2.2" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/catalog_service/async_client.py b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/catalog_service/async_client.py index f1503d82d419..c7055186de2c 100644 --- a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/catalog_service/async_client.py +++ b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/catalog_service/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -209,9 +208,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(CatalogServiceClient).get_transport_class, type(CatalogServiceClient) - ) + get_transport_class = CatalogServiceClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/catalog_service/client.py b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/catalog_service/client.py index 84808233ad3d..a0ca684d5d8f 100644 --- a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/catalog_service/client.py +++ b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/catalog_service/client.py @@ -746,7 +746,7 @@ def __init__( transport_init: Union[ Type[CatalogServiceTransport], Callable[..., CatalogServiceTransport] ] = ( - type(self).get_transport_class(transport) + CatalogServiceClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., CatalogServiceTransport], transport) ) diff --git a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/content_service/async_client.py b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/content_service/async_client.py index 606198cb6e02..3742d93956a9 100644 --- a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/content_service/async_client.py +++ b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/content_service/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -198,9 +197,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(ContentServiceClient).get_transport_class, type(ContentServiceClient) - ) + get_transport_class = ContentServiceClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/content_service/client.py b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/content_service/client.py index a497d4df01bc..b8342f36ff08 100644 --- a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/content_service/client.py +++ b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/content_service/client.py @@ -691,7 +691,7 @@ def __init__( transport_init: Union[ Type[ContentServiceTransport], Callable[..., ContentServiceTransport] ] = ( - type(self).get_transport_class(transport) + ContentServiceClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., ContentServiceTransport], transport) ) diff --git a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/data_scan_service/async_client.py b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/data_scan_service/async_client.py index fb96abb79333..9a85748cbb34 100644 --- a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/data_scan_service/async_client.py +++ b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/data_scan_service/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -214,9 +213,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(DataScanServiceClient).get_transport_class, type(DataScanServiceClient) - ) + get_transport_class = DataScanServiceClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/data_scan_service/client.py b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/data_scan_service/client.py index 608a8156289f..492ffa36b496 100644 --- a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/data_scan_service/client.py +++ b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/data_scan_service/client.py @@ -731,7 +731,7 @@ def __init__( transport_init: Union[ Type[DataScanServiceTransport], Callable[..., DataScanServiceTransport] ] = ( - type(self).get_transport_class(transport) + DataScanServiceClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., DataScanServiceTransport], transport) ) diff --git a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/data_taxonomy_service/async_client.py b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/data_taxonomy_service/async_client.py index 5781986ed5ac..d50c36dde112 100644 --- a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/data_taxonomy_service/async_client.py +++ b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/data_taxonomy_service/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -214,10 +213,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(DataTaxonomyServiceClient).get_transport_class, - type(DataTaxonomyServiceClient), - ) + get_transport_class = DataTaxonomyServiceClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/data_taxonomy_service/client.py b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/data_taxonomy_service/client.py index 028c44d5f445..a72997a74a85 100644 --- a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/data_taxonomy_service/client.py +++ b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/data_taxonomy_service/client.py @@ -724,7 +724,7 @@ def __init__( Type[DataTaxonomyServiceTransport], Callable[..., DataTaxonomyServiceTransport], ] = ( - type(self).get_transport_class(transport) + DataTaxonomyServiceClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., DataTaxonomyServiceTransport], transport) ) diff --git a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/dataplex_service/async_client.py b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/dataplex_service/async_client.py index b66fbee8fb81..4424693fa6a9 100644 --- a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/dataplex_service/async_client.py +++ b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/dataplex_service/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -217,9 +216,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(DataplexServiceClient).get_transport_class, type(DataplexServiceClient) - ) + get_transport_class = DataplexServiceClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/dataplex_service/client.py b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/dataplex_service/client.py index 8b07ba1d8f33..ef826d0cd2ec 100644 --- a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/dataplex_service/client.py +++ b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/dataplex_service/client.py @@ -854,7 +854,7 @@ def __init__( transport_init: Union[ Type[DataplexServiceTransport], Callable[..., DataplexServiceTransport] ] = ( - type(self).get_transport_class(transport) + DataplexServiceClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., DataplexServiceTransport], transport) ) diff --git a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/metadata_service/async_client.py b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/metadata_service/async_client.py index 331688748e05..4c1e8f2f6aee 100644 --- a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/metadata_service/async_client.py +++ b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/metadata_service/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -199,9 +198,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(MetadataServiceClient).get_transport_class, type(MetadataServiceClient) - ) + get_transport_class = MetadataServiceClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/metadata_service/client.py b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/metadata_service/client.py index 09ce53a07792..9598810211da 100644 --- a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/metadata_service/client.py +++ b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/metadata_service/client.py @@ -726,7 +726,7 @@ def __init__( transport_init: Union[ Type[MetadataServiceTransport], Callable[..., MetadataServiceTransport] ] = ( - type(self).get_transport_class(transport) + MetadataServiceClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., MetadataServiceTransport], transport) ) diff --git a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/types/catalog.py b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/types/catalog.py index fe234646514f..d4535bf4114d 100644 --- a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/types/catalog.py +++ b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/types/catalog.py @@ -1664,8 +1664,8 @@ class ListEntriesRequest(proto.Message): to be provided. Example filter expressions: "entry_source.display_name=AnExampleDisplayName" "entry_type=projects/example-project/locations/global/entryTypes/example-entry_type" - `entry_type=projects/example-project/locations/us/entryTypes/a* - OR entry_type=projects/another-project/locations/*` "NOT + "entry_type=projects/example-project/locations/us/entryTypes/a* + OR entry_type=projects/another-project/locations/*" "NOT entry_source.display_name=AnotherExampleDisplayName". """ diff --git a/packages/google-cloud-dataplex/samples/generated_samples/snippet_metadata_google.cloud.dataplex.v1.json b/packages/google-cloud-dataplex/samples/generated_samples/snippet_metadata_google.cloud.dataplex.v1.json index 257527a1bb61..61adee5a1be5 100644 --- a/packages/google-cloud-dataplex/samples/generated_samples/snippet_metadata_google.cloud.dataplex.v1.json +++ b/packages/google-cloud-dataplex/samples/generated_samples/snippet_metadata_google.cloud.dataplex.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-dataplex", - "version": "2.2.2" + "version": "0.1.0" }, "snippets": [ { diff --git a/packages/google-cloud-dataplex/tests/unit/gapic/dataplex_v1/test_catalog_service.py b/packages/google-cloud-dataplex/tests/unit/gapic/dataplex_v1/test_catalog_service.py index 38df78f1764a..78869ffa4363 100644 --- a/packages/google-cloud-dataplex/tests/unit/gapic/dataplex_v1/test_catalog_service.py +++ b/packages/google-cloud-dataplex/tests/unit/gapic/dataplex_v1/test_catalog_service.py @@ -1230,8 +1230,9 @@ def test_create_entry_type_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.create_entry_type(request) @@ -1287,26 +1288,28 @@ async def test_create_entry_type_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_entry_type - ] = mock_object + ] = mock_rpc request = {} await client.create_entry_type(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.create_entry_type(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1630,8 +1633,9 @@ def test_update_entry_type_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.update_entry_type(request) @@ -1687,26 +1691,28 @@ async def test_update_entry_type_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_entry_type - ] = mock_object + ] = mock_rpc request = {} await client.update_entry_type(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.update_entry_type(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2026,8 +2032,9 @@ def test_delete_entry_type_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.delete_entry_type(request) @@ -2083,26 +2090,28 @@ async def test_delete_entry_type_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_entry_type - ] = mock_object + ] = mock_rpc request = {} await client.delete_entry_type(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.delete_entry_type(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2469,22 +2478,23 @@ async def test_list_entry_types_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_entry_types - ] = mock_object + ] = mock_rpc request = {} await client.list_entry_types(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_entry_types(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3050,22 +3060,23 @@ async def test_get_entry_type_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_entry_type - ] = mock_object + ] = mock_rpc request = {} await client.get_entry_type(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_entry_type(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3380,8 +3391,9 @@ def test_create_aspect_type_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.create_aspect_type(request) @@ -3437,26 +3449,28 @@ async def test_create_aspect_type_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_aspect_type - ] = mock_object + ] = mock_rpc request = {} await client.create_aspect_type(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.create_aspect_type(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3782,8 +3796,9 @@ def test_update_aspect_type_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.update_aspect_type(request) @@ -3839,26 +3854,28 @@ async def test_update_aspect_type_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_aspect_type - ] = mock_object + ] = mock_rpc request = {} await client.update_aspect_type(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.update_aspect_type(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4180,8 +4197,9 @@ def test_delete_aspect_type_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.delete_aspect_type(request) @@ -4237,26 +4255,28 @@ async def test_delete_aspect_type_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_aspect_type - ] = mock_object + ] = mock_rpc request = {} await client.delete_aspect_type(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.delete_aspect_type(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4631,22 +4651,23 @@ async def test_list_aspect_types_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_aspect_types - ] = mock_object + ] = mock_rpc request = {} await client.list_aspect_types(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_aspect_types(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5224,22 +5245,23 @@ async def test_get_aspect_type_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_aspect_type - ] = mock_object + ] = mock_rpc request = {} await client.get_aspect_type(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_aspect_type(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5550,8 +5572,9 @@ def test_create_entry_group_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.create_entry_group(request) @@ -5607,26 +5630,28 @@ async def test_create_entry_group_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_entry_group - ] = mock_object + ] = mock_rpc request = {} await client.create_entry_group(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.create_entry_group(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5952,8 +5977,9 @@ def test_update_entry_group_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.update_entry_group(request) @@ -6009,26 +6035,28 @@ async def test_update_entry_group_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_entry_group - ] = mock_object + ] = mock_rpc request = {} await client.update_entry_group(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.update_entry_group(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -6350,8 +6378,9 @@ def test_delete_entry_group_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.delete_entry_group(request) @@ -6407,26 +6436,28 @@ async def test_delete_entry_group_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_entry_group - ] = mock_object + ] = mock_rpc request = {} await client.delete_entry_group(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.delete_entry_group(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -6801,22 +6832,23 @@ async def test_list_entry_groups_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_entry_groups - ] = mock_object + ] = mock_rpc request = {} await client.list_entry_groups(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_entry_groups(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -7394,22 +7426,23 @@ async def test_get_entry_group_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_entry_group - ] = mock_object + ] = mock_rpc request = {} await client.get_entry_group(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_entry_group(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -7775,22 +7808,23 @@ async def test_create_entry_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_entry - ] = mock_object + ] = mock_rpc request = {} await client.create_entry(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_entry(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -8166,22 +8200,23 @@ async def test_update_entry_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_entry - ] = mock_object + ] = mock_rpc request = {} await client.update_entry(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_entry(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -8551,22 +8586,23 @@ async def test_delete_entry_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_entry - ] = mock_object + ] = mock_rpc request = {} await client.delete_entry(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_entry(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -8921,22 +8957,23 @@ async def test_list_entries_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_entries - ] = mock_object + ] = mock_rpc request = {} await client.list_entries(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_entries(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -9486,22 +9523,23 @@ async def test_get_entry_async_use_cached_wrapped_rpc(transport: str = "grpc_asy ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_entry - ] = mock_object + ] = mock_rpc request = {} await client.get_entry(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_entry(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -9863,22 +9901,23 @@ async def test_lookup_entry_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.lookup_entry - ] = mock_object + ] = mock_rpc request = {} await client.lookup_entry(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.lookup_entry(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -10163,22 +10202,23 @@ async def test_search_entries_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.search_entries - ] = mock_object + ] = mock_rpc request = {} await client.search_entries(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.search_entries(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-dataplex/tests/unit/gapic/dataplex_v1/test_content_service.py b/packages/google-cloud-dataplex/tests/unit/gapic/dataplex_v1/test_content_service.py index 35438a7efa20..c385cbb88bea 100644 --- a/packages/google-cloud-dataplex/tests/unit/gapic/dataplex_v1/test_content_service.py +++ b/packages/google-cloud-dataplex/tests/unit/gapic/dataplex_v1/test_content_service.py @@ -1278,22 +1278,23 @@ async def test_create_content_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_content - ] = mock_object + ] = mock_rpc request = {} await client.create_content(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_content(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1660,22 +1661,23 @@ async def test_update_content_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_content - ] = mock_object + ] = mock_rpc request = {} await client.update_content(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_content(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2029,22 +2031,23 @@ async def test_delete_content_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_content - ] = mock_object + ] = mock_rpc request = {} await client.delete_content(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_content(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2394,22 +2397,23 @@ async def test_get_content_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_content - ] = mock_object + ] = mock_rpc request = {} await client.get_content(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_content(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2763,22 +2767,23 @@ async def test_get_iam_policy_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_iam_policy - ] = mock_object + ] = mock_rpc request = {} await client.get_iam_policy(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_iam_policy(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3145,22 +3150,23 @@ async def test_set_iam_policy_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.set_iam_policy - ] = mock_object + ] = mock_rpc request = {} await client.set_iam_policy(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.set_iam_policy(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3457,22 +3463,23 @@ async def test_test_iam_permissions_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.test_iam_permissions - ] = mock_object + ] = mock_rpc request = {} await client.test_iam_permissions(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.test_iam_permissions(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3769,22 +3776,23 @@ async def test_list_content_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_content - ] = mock_object + ] = mock_rpc request = {} await client.list_content(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_content(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-dataplex/tests/unit/gapic/dataplex_v1/test_data_scan_service.py b/packages/google-cloud-dataplex/tests/unit/gapic/dataplex_v1/test_data_scan_service.py index c4c810e873dc..0aa24fc84248 100644 --- a/packages/google-cloud-dataplex/tests/unit/gapic/dataplex_v1/test_data_scan_service.py +++ b/packages/google-cloud-dataplex/tests/unit/gapic/dataplex_v1/test_data_scan_service.py @@ -1244,8 +1244,9 @@ def test_create_data_scan_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.create_data_scan(request) @@ -1299,26 +1300,28 @@ async def test_create_data_scan_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_data_scan - ] = mock_object + ] = mock_rpc request = {} await client.create_data_scan(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.create_data_scan(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1626,8 +1629,9 @@ def test_update_data_scan_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.update_data_scan(request) @@ -1681,26 +1685,28 @@ async def test_update_data_scan_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_data_scan - ] = mock_object + ] = mock_rpc request = {} await client.update_data_scan(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.update_data_scan(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2002,8 +2008,9 @@ def test_delete_data_scan_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.delete_data_scan(request) @@ -2057,26 +2064,28 @@ async def test_delete_data_scan_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_data_scan - ] = mock_object + ] = mock_rpc request = {} await client.delete_data_scan(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.delete_data_scan(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2437,22 +2446,23 @@ async def test_get_data_scan_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_data_scan - ] = mock_object + ] = mock_rpc request = {} await client.get_data_scan(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_data_scan(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2816,22 +2826,23 @@ async def test_list_data_scans_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_data_scans - ] = mock_object + ] = mock_rpc request = {} await client.list_data_scans(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_data_scans(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3371,22 +3382,23 @@ async def test_run_data_scan_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.run_data_scan - ] = mock_object + ] = mock_rpc request = {} await client.run_data_scan(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.run_data_scan(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3754,22 +3766,23 @@ async def test_get_data_scan_job_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_data_scan_job - ] = mock_object + ] = mock_rpc request = {} await client.get_data_scan_job(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_data_scan_job(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4152,22 +4165,23 @@ async def test_list_data_scan_jobs_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_data_scan_jobs - ] = mock_object + ] = mock_rpc request = {} await client.list_data_scan_jobs(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_data_scan_jobs(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4736,22 +4750,23 @@ async def test_generate_data_quality_rules_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.generate_data_quality_rules - ] = mock_object + ] = mock_rpc request = {} await client.generate_data_quality_rules(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.generate_data_quality_rules(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-dataplex/tests/unit/gapic/dataplex_v1/test_data_taxonomy_service.py b/packages/google-cloud-dataplex/tests/unit/gapic/dataplex_v1/test_data_taxonomy_service.py index f98d4cbcd6c3..25e617f52308 100644 --- a/packages/google-cloud-dataplex/tests/unit/gapic/dataplex_v1/test_data_taxonomy_service.py +++ b/packages/google-cloud-dataplex/tests/unit/gapic/dataplex_v1/test_data_taxonomy_service.py @@ -1274,8 +1274,9 @@ def test_create_data_taxonomy_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.create_data_taxonomy(request) @@ -1331,26 +1332,28 @@ async def test_create_data_taxonomy_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_data_taxonomy - ] = mock_object + ] = mock_rpc request = {} await client.create_data_taxonomy(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.create_data_taxonomy(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1677,8 +1680,9 @@ def test_update_data_taxonomy_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.update_data_taxonomy(request) @@ -1734,26 +1738,28 @@ async def test_update_data_taxonomy_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_data_taxonomy - ] = mock_object + ] = mock_rpc request = {} await client.update_data_taxonomy(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.update_data_taxonomy(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2076,8 +2082,9 @@ def test_delete_data_taxonomy_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.delete_data_taxonomy(request) @@ -2133,26 +2140,28 @@ async def test_delete_data_taxonomy_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_data_taxonomy - ] = mock_object + ] = mock_rpc request = {} await client.delete_data_taxonomy(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.delete_data_taxonomy(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2530,22 +2539,23 @@ async def test_list_data_taxonomies_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_data_taxonomies - ] = mock_object + ] = mock_rpc request = {} await client.list_data_taxonomies(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_data_taxonomies(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3137,22 +3147,23 @@ async def test_get_data_taxonomy_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_data_taxonomy - ] = mock_object + ] = mock_rpc request = {} await client.get_data_taxonomy(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_data_taxonomy(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3480,8 +3491,9 @@ def test_create_data_attribute_binding_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.create_data_attribute_binding(request) @@ -3537,26 +3549,28 @@ async def test_create_data_attribute_binding_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_data_attribute_binding - ] = mock_object + ] = mock_rpc request = {} await client.create_data_attribute_binding(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.create_data_attribute_binding(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3892,8 +3906,9 @@ def test_update_data_attribute_binding_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.update_data_attribute_binding(request) @@ -3949,26 +3964,28 @@ async def test_update_data_attribute_binding_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_data_attribute_binding - ] = mock_object + ] = mock_rpc request = {} await client.update_data_attribute_binding(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.update_data_attribute_binding(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4300,8 +4317,9 @@ def test_delete_data_attribute_binding_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.delete_data_attribute_binding(request) @@ -4357,26 +4375,28 @@ async def test_delete_data_attribute_binding_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_data_attribute_binding - ] = mock_object + ] = mock_rpc request = {} await client.delete_data_attribute_binding(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.delete_data_attribute_binding(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4755,22 +4775,23 @@ async def test_list_data_attribute_bindings_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_data_attribute_bindings - ] = mock_object + ] = mock_rpc request = {} await client.list_data_attribute_bindings(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_data_attribute_bindings(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5365,22 +5386,23 @@ async def test_get_data_attribute_binding_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_data_attribute_binding - ] = mock_object + ] = mock_rpc request = {} await client.get_data_attribute_binding(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_data_attribute_binding(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5707,8 +5729,9 @@ def test_create_data_attribute_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.create_data_attribute(request) @@ -5764,26 +5787,28 @@ async def test_create_data_attribute_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_data_attribute - ] = mock_object + ] = mock_rpc request = {} await client.create_data_attribute(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.create_data_attribute(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -6111,8 +6136,9 @@ def test_update_data_attribute_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.update_data_attribute(request) @@ -6168,26 +6194,28 @@ async def test_update_data_attribute_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_data_attribute - ] = mock_object + ] = mock_rpc request = {} await client.update_data_attribute(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.update_data_attribute(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -6511,8 +6539,9 @@ def test_delete_data_attribute_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.delete_data_attribute(request) @@ -6568,26 +6597,28 @@ async def test_delete_data_attribute_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_data_attribute - ] = mock_object + ] = mock_rpc request = {} await client.delete_data_attribute(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.delete_data_attribute(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -6965,22 +6996,23 @@ async def test_list_data_attributes_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_data_attributes - ] = mock_object + ] = mock_rpc request = {} await client.list_data_attributes(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_data_attributes(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -7574,22 +7606,23 @@ async def test_get_data_attribute_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_data_attribute - ] = mock_object + ] = mock_rpc request = {} await client.get_data_attribute(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_data_attribute(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-dataplex/tests/unit/gapic/dataplex_v1/test_dataplex_service.py b/packages/google-cloud-dataplex/tests/unit/gapic/dataplex_v1/test_dataplex_service.py index e3183fd2bdec..e632e1dac75b 100644 --- a/packages/google-cloud-dataplex/tests/unit/gapic/dataplex_v1/test_dataplex_service.py +++ b/packages/google-cloud-dataplex/tests/unit/gapic/dataplex_v1/test_dataplex_service.py @@ -1236,8 +1236,9 @@ def test_create_lake_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.create_lake(request) @@ -1291,26 +1292,28 @@ async def test_create_lake_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_lake - ] = mock_object + ] = mock_rpc request = {} await client.create_lake(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.create_lake(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1616,8 +1619,9 @@ def test_update_lake_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.update_lake(request) @@ -1671,26 +1675,28 @@ async def test_update_lake_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_lake - ] = mock_object + ] = mock_rpc request = {} await client.update_lake(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.update_lake(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1990,8 +1996,9 @@ def test_delete_lake_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.delete_lake(request) @@ -2045,26 +2052,28 @@ async def test_delete_lake_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_lake - ] = mock_object + ] = mock_rpc request = {} await client.delete_lake(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.delete_lake(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2417,22 +2426,23 @@ async def test_list_lakes_async_use_cached_wrapped_rpc(transport: str = "grpc_as ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_lakes - ] = mock_object + ] = mock_rpc request = {} await client.list_lakes(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_lakes(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2990,22 +3000,23 @@ async def test_get_lake_async_use_cached_wrapped_rpc(transport: str = "grpc_asyn ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_lake - ] = mock_object + ] = mock_rpc request = {} await client.get_lake(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_lake(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3372,22 +3383,23 @@ async def test_list_lake_actions_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_lake_actions - ] = mock_object + ] = mock_rpc request = {} await client.list_lake_actions(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_lake_actions(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3894,8 +3906,9 @@ def test_create_zone_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.create_zone(request) @@ -3949,26 +3962,28 @@ async def test_create_zone_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_zone - ] = mock_object + ] = mock_rpc request = {} await client.create_zone(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.create_zone(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4274,8 +4289,9 @@ def test_update_zone_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.update_zone(request) @@ -4329,26 +4345,28 @@ async def test_update_zone_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_zone - ] = mock_object + ] = mock_rpc request = {} await client.update_zone(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.update_zone(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4648,8 +4666,9 @@ def test_delete_zone_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.delete_zone(request) @@ -4703,26 +4722,28 @@ async def test_delete_zone_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_zone - ] = mock_object + ] = mock_rpc request = {} await client.delete_zone(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.delete_zone(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5072,22 +5093,23 @@ async def test_list_zones_async_use_cached_wrapped_rpc(transport: str = "grpc_as ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_zones - ] = mock_object + ] = mock_rpc request = {} await client.list_zones(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_zones(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5643,22 +5665,23 @@ async def test_get_zone_async_use_cached_wrapped_rpc(transport: str = "grpc_asyn ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_zone - ] = mock_object + ] = mock_rpc request = {} await client.get_zone(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_zone(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -6025,22 +6048,23 @@ async def test_list_zone_actions_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_zone_actions - ] = mock_object + ] = mock_rpc request = {} await client.list_zone_actions(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_zone_actions(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -6547,8 +6571,9 @@ def test_create_asset_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.create_asset(request) @@ -6602,26 +6627,28 @@ async def test_create_asset_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_asset - ] = mock_object + ] = mock_rpc request = {} await client.create_asset(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.create_asset(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -6927,8 +6954,9 @@ def test_update_asset_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.update_asset(request) @@ -6982,26 +7010,28 @@ async def test_update_asset_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_asset - ] = mock_object + ] = mock_rpc request = {} await client.update_asset(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.update_asset(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -7301,8 +7331,9 @@ def test_delete_asset_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.delete_asset(request) @@ -7356,26 +7387,28 @@ async def test_delete_asset_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_asset - ] = mock_object + ] = mock_rpc request = {} await client.delete_asset(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.delete_asset(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -7727,22 +7760,23 @@ async def test_list_assets_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_assets - ] = mock_object + ] = mock_rpc request = {} await client.list_assets(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_assets(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -8295,22 +8329,23 @@ async def test_get_asset_async_use_cached_wrapped_rpc(transport: str = "grpc_asy ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_asset - ] = mock_object + ] = mock_rpc request = {} await client.get_asset(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_asset(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -8677,22 +8712,23 @@ async def test_list_asset_actions_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_asset_actions - ] = mock_object + ] = mock_rpc request = {} await client.list_asset_actions(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_asset_actions(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -9199,8 +9235,9 @@ def test_create_task_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.create_task(request) @@ -9254,26 +9291,28 @@ async def test_create_task_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_task - ] = mock_object + ] = mock_rpc request = {} await client.create_task(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.create_task(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -9579,8 +9618,9 @@ def test_update_task_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.update_task(request) @@ -9634,26 +9674,28 @@ async def test_update_task_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_task - ] = mock_object + ] = mock_rpc request = {} await client.update_task(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.update_task(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -9953,8 +9995,9 @@ def test_delete_task_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.delete_task(request) @@ -10008,26 +10051,28 @@ async def test_delete_task_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_task - ] = mock_object + ] = mock_rpc request = {} await client.delete_task(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.delete_task(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -10380,22 +10425,23 @@ async def test_list_tasks_async_use_cached_wrapped_rpc(transport: str = "grpc_as ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_tasks - ] = mock_object + ] = mock_rpc request = {} await client.list_tasks(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_tasks(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -10950,22 +10996,23 @@ async def test_get_task_async_use_cached_wrapped_rpc(transport: str = "grpc_asyn ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_task - ] = mock_object + ] = mock_rpc request = {} await client.get_task(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_task(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -11318,22 +11365,23 @@ async def test_list_jobs_async_use_cached_wrapped_rpc(transport: str = "grpc_asy ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_jobs - ] = mock_object + ] = mock_rpc request = {} await client.list_jobs(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_jobs(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -11869,22 +11917,23 @@ async def test_run_task_async_use_cached_wrapped_rpc(transport: str = "grpc_asyn ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.run_task - ] = mock_object + ] = mock_rpc request = {} await client.run_task(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.run_task(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -12249,22 +12298,23 @@ async def test_get_job_async_use_cached_wrapped_rpc(transport: str = "grpc_async ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_job - ] = mock_object + ] = mock_rpc request = {} await client.get_job(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_job(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -12614,22 +12664,23 @@ async def test_cancel_job_async_use_cached_wrapped_rpc(transport: str = "grpc_as ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.cancel_job - ] = mock_object + ] = mock_rpc request = {} await client.cancel_job(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.cancel_job(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -12925,8 +12976,9 @@ def test_create_environment_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.create_environment(request) @@ -12982,26 +13034,28 @@ async def test_create_environment_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_environment - ] = mock_object + ] = mock_rpc request = {} await client.create_environment(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.create_environment(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -13327,8 +13381,9 @@ def test_update_environment_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.update_environment(request) @@ -13384,26 +13439,28 @@ async def test_update_environment_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_environment - ] = mock_object + ] = mock_rpc request = {} await client.update_environment(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.update_environment(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -13723,8 +13780,9 @@ def test_delete_environment_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.delete_environment(request) @@ -13780,26 +13838,28 @@ async def test_delete_environment_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_environment - ] = mock_object + ] = mock_rpc request = {} await client.delete_environment(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.delete_environment(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -14171,22 +14231,23 @@ async def test_list_environments_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_environments - ] = mock_object + ] = mock_rpc request = {} await client.list_environments(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_environments(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -14759,22 +14820,23 @@ async def test_get_environment_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_environment - ] = mock_object + ] = mock_rpc request = {} await client.get_environment(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_environment(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -15131,22 +15193,23 @@ async def test_list_sessions_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_sessions - ] = mock_object + ] = mock_rpc request = {} await client.list_sessions(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_sessions(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-dataplex/tests/unit/gapic/dataplex_v1/test_metadata_service.py b/packages/google-cloud-dataplex/tests/unit/gapic/dataplex_v1/test_metadata_service.py index 51d511261536..106f11876d3d 100644 --- a/packages/google-cloud-dataplex/tests/unit/gapic/dataplex_v1/test_metadata_service.py +++ b/packages/google-cloud-dataplex/tests/unit/gapic/dataplex_v1/test_metadata_service.py @@ -1311,22 +1311,23 @@ async def test_create_entity_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_entity - ] = mock_object + ] = mock_rpc request = {} await client.create_entity(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_entity(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1732,22 +1733,23 @@ async def test_update_entity_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_entity - ] = mock_object + ] = mock_rpc request = {} await client.update_entity(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_entity(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2029,22 +2031,23 @@ async def test_delete_entity_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_entity - ] = mock_object + ] = mock_rpc request = {} await client.delete_entity(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_entity(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2415,22 +2418,23 @@ async def test_get_entity_async_use_cached_wrapped_rpc(transport: str = "grpc_as ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_entity - ] = mock_object + ] = mock_rpc request = {} await client.get_entity(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_entity(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2801,22 +2805,23 @@ async def test_list_entities_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_entities - ] = mock_object + ] = mock_rpc request = {} await client.list_entities(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_entities(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3370,22 +3375,23 @@ async def test_create_partition_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_partition - ] = mock_object + ] = mock_rpc request = {} await client.create_partition(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_partition(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3743,22 +3749,23 @@ async def test_delete_partition_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_partition - ] = mock_object + ] = mock_rpc request = {} await client.delete_partition(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_partition(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4107,22 +4114,23 @@ async def test_get_partition_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_partition - ] = mock_object + ] = mock_rpc request = {} await client.get_partition(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_partition(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4477,22 +4485,23 @@ async def test_list_partitions_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_partitions - ] = mock_object + ] = mock_rpc request = {} await client.list_partitions(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_partitions(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-dataproc-metastore/google/cloud/metastore/gapic_version.py b/packages/google-cloud-dataproc-metastore/google/cloud/metastore/gapic_version.py index e018cef961ff..558c8aab67c5 100644 --- a/packages/google-cloud-dataproc-metastore/google/cloud/metastore/gapic_version.py +++ b/packages/google-cloud-dataproc-metastore/google/cloud/metastore/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.15.5" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-dataproc-metastore/google/cloud/metastore_v1/gapic_version.py b/packages/google-cloud-dataproc-metastore/google/cloud/metastore_v1/gapic_version.py index e018cef961ff..558c8aab67c5 100644 --- a/packages/google-cloud-dataproc-metastore/google/cloud/metastore_v1/gapic_version.py +++ b/packages/google-cloud-dataproc-metastore/google/cloud/metastore_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.15.5" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-dataproc-metastore/google/cloud/metastore_v1/services/dataproc_metastore/async_client.py b/packages/google-cloud-dataproc-metastore/google/cloud/metastore_v1/services/dataproc_metastore/async_client.py index ae7678fa7bd3..a12bacb0b074 100644 --- a/packages/google-cloud-dataproc-metastore/google/cloud/metastore_v1/services/dataproc_metastore/async_client.py +++ b/packages/google-cloud-dataproc-metastore/google/cloud/metastore_v1/services/dataproc_metastore/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -229,9 +228,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(DataprocMetastoreClient).get_transport_class, type(DataprocMetastoreClient) - ) + get_transport_class = DataprocMetastoreClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-dataproc-metastore/google/cloud/metastore_v1/services/dataproc_metastore/client.py b/packages/google-cloud-dataproc-metastore/google/cloud/metastore_v1/services/dataproc_metastore/client.py index eb6ce944ff32..7eb04c02c9e4 100644 --- a/packages/google-cloud-dataproc-metastore/google/cloud/metastore_v1/services/dataproc_metastore/client.py +++ b/packages/google-cloud-dataproc-metastore/google/cloud/metastore_v1/services/dataproc_metastore/client.py @@ -786,7 +786,7 @@ def __init__( Type[DataprocMetastoreTransport], Callable[..., DataprocMetastoreTransport], ] = ( - type(self).get_transport_class(transport) + DataprocMetastoreClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., DataprocMetastoreTransport], transport) ) diff --git a/packages/google-cloud-dataproc-metastore/google/cloud/metastore_v1/services/dataproc_metastore_federation/async_client.py b/packages/google-cloud-dataproc-metastore/google/cloud/metastore_v1/services/dataproc_metastore_federation/async_client.py index 340282ce0d72..79b1bdd20da3 100644 --- a/packages/google-cloud-dataproc-metastore/google/cloud/metastore_v1/services/dataproc_metastore_federation/async_client.py +++ b/packages/google-cloud-dataproc-metastore/google/cloud/metastore_v1/services/dataproc_metastore_federation/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -222,10 +221,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(DataprocMetastoreFederationClient).get_transport_class, - type(DataprocMetastoreFederationClient), - ) + get_transport_class = DataprocMetastoreFederationClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-dataproc-metastore/google/cloud/metastore_v1/services/dataproc_metastore_federation/client.py b/packages/google-cloud-dataproc-metastore/google/cloud/metastore_v1/services/dataproc_metastore_federation/client.py index 02d6cb0e3c2a..b151707de647 100644 --- a/packages/google-cloud-dataproc-metastore/google/cloud/metastore_v1/services/dataproc_metastore_federation/client.py +++ b/packages/google-cloud-dataproc-metastore/google/cloud/metastore_v1/services/dataproc_metastore_federation/client.py @@ -700,7 +700,7 @@ def __init__( Type[DataprocMetastoreFederationTransport], Callable[..., DataprocMetastoreFederationTransport], ] = ( - type(self).get_transport_class(transport) + DataprocMetastoreFederationClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast( Callable[..., DataprocMetastoreFederationTransport], transport diff --git a/packages/google-cloud-dataproc-metastore/google/cloud/metastore_v1alpha/gapic_version.py b/packages/google-cloud-dataproc-metastore/google/cloud/metastore_v1alpha/gapic_version.py index e018cef961ff..558c8aab67c5 100644 --- a/packages/google-cloud-dataproc-metastore/google/cloud/metastore_v1alpha/gapic_version.py +++ b/packages/google-cloud-dataproc-metastore/google/cloud/metastore_v1alpha/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.15.5" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-dataproc-metastore/google/cloud/metastore_v1alpha/services/dataproc_metastore/async_client.py b/packages/google-cloud-dataproc-metastore/google/cloud/metastore_v1alpha/services/dataproc_metastore/async_client.py index b53984a832c0..5356db6dd979 100644 --- a/packages/google-cloud-dataproc-metastore/google/cloud/metastore_v1alpha/services/dataproc_metastore/async_client.py +++ b/packages/google-cloud-dataproc-metastore/google/cloud/metastore_v1alpha/services/dataproc_metastore/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -231,9 +230,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(DataprocMetastoreClient).get_transport_class, type(DataprocMetastoreClient) - ) + get_transport_class = DataprocMetastoreClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-dataproc-metastore/google/cloud/metastore_v1alpha/services/dataproc_metastore/client.py b/packages/google-cloud-dataproc-metastore/google/cloud/metastore_v1alpha/services/dataproc_metastore/client.py index f01596090d88..41e907b0f7fc 100644 --- a/packages/google-cloud-dataproc-metastore/google/cloud/metastore_v1alpha/services/dataproc_metastore/client.py +++ b/packages/google-cloud-dataproc-metastore/google/cloud/metastore_v1alpha/services/dataproc_metastore/client.py @@ -808,7 +808,7 @@ def __init__( Type[DataprocMetastoreTransport], Callable[..., DataprocMetastoreTransport], ] = ( - type(self).get_transport_class(transport) + DataprocMetastoreClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., DataprocMetastoreTransport], transport) ) diff --git a/packages/google-cloud-dataproc-metastore/google/cloud/metastore_v1alpha/services/dataproc_metastore_federation/async_client.py b/packages/google-cloud-dataproc-metastore/google/cloud/metastore_v1alpha/services/dataproc_metastore_federation/async_client.py index 58c17e4443f2..f85aaaf1c945 100644 --- a/packages/google-cloud-dataproc-metastore/google/cloud/metastore_v1alpha/services/dataproc_metastore_federation/async_client.py +++ b/packages/google-cloud-dataproc-metastore/google/cloud/metastore_v1alpha/services/dataproc_metastore_federation/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -222,10 +221,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(DataprocMetastoreFederationClient).get_transport_class, - type(DataprocMetastoreFederationClient), - ) + get_transport_class = DataprocMetastoreFederationClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-dataproc-metastore/google/cloud/metastore_v1alpha/services/dataproc_metastore_federation/client.py b/packages/google-cloud-dataproc-metastore/google/cloud/metastore_v1alpha/services/dataproc_metastore_federation/client.py index d5578b690d4c..d0480bac5498 100644 --- a/packages/google-cloud-dataproc-metastore/google/cloud/metastore_v1alpha/services/dataproc_metastore_federation/client.py +++ b/packages/google-cloud-dataproc-metastore/google/cloud/metastore_v1alpha/services/dataproc_metastore_federation/client.py @@ -700,7 +700,7 @@ def __init__( Type[DataprocMetastoreFederationTransport], Callable[..., DataprocMetastoreFederationTransport], ] = ( - type(self).get_transport_class(transport) + DataprocMetastoreFederationClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast( Callable[..., DataprocMetastoreFederationTransport], transport diff --git a/packages/google-cloud-dataproc-metastore/google/cloud/metastore_v1beta/gapic_version.py b/packages/google-cloud-dataproc-metastore/google/cloud/metastore_v1beta/gapic_version.py index e018cef961ff..558c8aab67c5 100644 --- a/packages/google-cloud-dataproc-metastore/google/cloud/metastore_v1beta/gapic_version.py +++ b/packages/google-cloud-dataproc-metastore/google/cloud/metastore_v1beta/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.15.5" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-dataproc-metastore/google/cloud/metastore_v1beta/services/dataproc_metastore/async_client.py b/packages/google-cloud-dataproc-metastore/google/cloud/metastore_v1beta/services/dataproc_metastore/async_client.py index dd545bdcd7c9..ced7f1c9e60c 100644 --- a/packages/google-cloud-dataproc-metastore/google/cloud/metastore_v1beta/services/dataproc_metastore/async_client.py +++ b/packages/google-cloud-dataproc-metastore/google/cloud/metastore_v1beta/services/dataproc_metastore/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -231,9 +230,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(DataprocMetastoreClient).get_transport_class, type(DataprocMetastoreClient) - ) + get_transport_class = DataprocMetastoreClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-dataproc-metastore/google/cloud/metastore_v1beta/services/dataproc_metastore/client.py b/packages/google-cloud-dataproc-metastore/google/cloud/metastore_v1beta/services/dataproc_metastore/client.py index fc54fbed5b30..6d2fa8cc4c22 100644 --- a/packages/google-cloud-dataproc-metastore/google/cloud/metastore_v1beta/services/dataproc_metastore/client.py +++ b/packages/google-cloud-dataproc-metastore/google/cloud/metastore_v1beta/services/dataproc_metastore/client.py @@ -808,7 +808,7 @@ def __init__( Type[DataprocMetastoreTransport], Callable[..., DataprocMetastoreTransport], ] = ( - type(self).get_transport_class(transport) + DataprocMetastoreClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., DataprocMetastoreTransport], transport) ) diff --git a/packages/google-cloud-dataproc-metastore/google/cloud/metastore_v1beta/services/dataproc_metastore_federation/async_client.py b/packages/google-cloud-dataproc-metastore/google/cloud/metastore_v1beta/services/dataproc_metastore_federation/async_client.py index 4414c1826a03..0f134074ee41 100644 --- a/packages/google-cloud-dataproc-metastore/google/cloud/metastore_v1beta/services/dataproc_metastore_federation/async_client.py +++ b/packages/google-cloud-dataproc-metastore/google/cloud/metastore_v1beta/services/dataproc_metastore_federation/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -222,10 +221,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(DataprocMetastoreFederationClient).get_transport_class, - type(DataprocMetastoreFederationClient), - ) + get_transport_class = DataprocMetastoreFederationClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-dataproc-metastore/google/cloud/metastore_v1beta/services/dataproc_metastore_federation/client.py b/packages/google-cloud-dataproc-metastore/google/cloud/metastore_v1beta/services/dataproc_metastore_federation/client.py index 658525e3d174..9e7055835472 100644 --- a/packages/google-cloud-dataproc-metastore/google/cloud/metastore_v1beta/services/dataproc_metastore_federation/client.py +++ b/packages/google-cloud-dataproc-metastore/google/cloud/metastore_v1beta/services/dataproc_metastore_federation/client.py @@ -700,7 +700,7 @@ def __init__( Type[DataprocMetastoreFederationTransport], Callable[..., DataprocMetastoreFederationTransport], ] = ( - type(self).get_transport_class(transport) + DataprocMetastoreFederationClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast( Callable[..., DataprocMetastoreFederationTransport], transport diff --git a/packages/google-cloud-dataproc-metastore/samples/generated_samples/snippet_metadata_google.cloud.metastore.v1.json b/packages/google-cloud-dataproc-metastore/samples/generated_samples/snippet_metadata_google.cloud.metastore.v1.json index ba41e49e516c..61145e9be3f7 100644 --- a/packages/google-cloud-dataproc-metastore/samples/generated_samples/snippet_metadata_google.cloud.metastore.v1.json +++ b/packages/google-cloud-dataproc-metastore/samples/generated_samples/snippet_metadata_google.cloud.metastore.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-dataproc-metastore", - "version": "1.15.5" + "version": "0.1.0" }, "snippets": [ { diff --git a/packages/google-cloud-dataproc-metastore/samples/generated_samples/snippet_metadata_google.cloud.metastore.v1alpha.json b/packages/google-cloud-dataproc-metastore/samples/generated_samples/snippet_metadata_google.cloud.metastore.v1alpha.json index 4422b864e380..96abac734738 100644 --- a/packages/google-cloud-dataproc-metastore/samples/generated_samples/snippet_metadata_google.cloud.metastore.v1alpha.json +++ b/packages/google-cloud-dataproc-metastore/samples/generated_samples/snippet_metadata_google.cloud.metastore.v1alpha.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-dataproc-metastore", - "version": "1.15.5" + "version": "0.1.0" }, "snippets": [ { diff --git a/packages/google-cloud-dataproc-metastore/samples/generated_samples/snippet_metadata_google.cloud.metastore.v1beta.json b/packages/google-cloud-dataproc-metastore/samples/generated_samples/snippet_metadata_google.cloud.metastore.v1beta.json index 26abda4ab6df..4a03aa222378 100644 --- a/packages/google-cloud-dataproc-metastore/samples/generated_samples/snippet_metadata_google.cloud.metastore.v1beta.json +++ b/packages/google-cloud-dataproc-metastore/samples/generated_samples/snippet_metadata_google.cloud.metastore.v1beta.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-dataproc-metastore", - "version": "1.15.5" + "version": "0.1.0" }, "snippets": [ { diff --git a/packages/google-cloud-dataproc-metastore/tests/unit/gapic/metastore_v1/test_dataproc_metastore.py b/packages/google-cloud-dataproc-metastore/tests/unit/gapic/metastore_v1/test_dataproc_metastore.py index f3cd66bfca65..13e3222324cc 100644 --- a/packages/google-cloud-dataproc-metastore/tests/unit/gapic/metastore_v1/test_dataproc_metastore.py +++ b/packages/google-cloud-dataproc-metastore/tests/unit/gapic/metastore_v1/test_dataproc_metastore.py @@ -1347,22 +1347,23 @@ async def test_list_services_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_services - ] = mock_object + ] = mock_rpc request = {} await client.list_services(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_services(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1937,22 +1938,23 @@ async def test_get_service_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_service - ] = mock_object + ] = mock_rpc request = {} await client.get_service(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_service(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2265,8 +2267,9 @@ def test_create_service_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.create_service(request) @@ -2320,26 +2323,28 @@ async def test_create_service_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_service - ] = mock_object + ] = mock_rpc request = {} await client.create_service(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.create_service(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2669,8 +2674,9 @@ def test_update_service_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.update_service(request) @@ -2724,26 +2730,28 @@ async def test_update_service_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_service - ] = mock_object + ] = mock_rpc request = {} await client.update_service(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.update_service(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3065,8 +3073,9 @@ def test_delete_service_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.delete_service(request) @@ -3120,26 +3129,28 @@ async def test_delete_service_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_service - ] = mock_object + ] = mock_rpc request = {} await client.delete_service(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.delete_service(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3507,22 +3518,23 @@ async def test_list_metadata_imports_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_metadata_imports - ] = mock_object + ] = mock_rpc request = {} await client.list_metadata_imports(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_metadata_imports(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4103,22 +4115,23 @@ async def test_get_metadata_import_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_metadata_import - ] = mock_object + ] = mock_rpc request = {} await client.get_metadata_import(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_metadata_import(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4440,8 +4453,9 @@ def test_create_metadata_import_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.create_metadata_import(request) @@ -4497,26 +4511,28 @@ async def test_create_metadata_import_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_metadata_import - ] = mock_object + ] = mock_rpc request = {} await client.create_metadata_import(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.create_metadata_import(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4871,8 +4887,9 @@ def test_update_metadata_import_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.update_metadata_import(request) @@ -4928,26 +4945,28 @@ async def test_update_metadata_import_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_metadata_import - ] = mock_object + ] = mock_rpc request = {} await client.update_metadata_import(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.update_metadata_import(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5285,8 +5304,9 @@ def test_export_metadata_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.export_metadata(request) @@ -5340,26 +5360,28 @@ async def test_export_metadata_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.export_metadata - ] = mock_object + ] = mock_rpc request = {} await client.export_metadata(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.export_metadata(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5571,8 +5593,9 @@ def test_restore_service_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.restore_service(request) @@ -5626,26 +5649,28 @@ async def test_restore_service_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.restore_service - ] = mock_object + ] = mock_rpc request = {} await client.restore_service(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.restore_service(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -6010,22 +6035,23 @@ async def test_list_backups_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_backups - ] = mock_object + ] = mock_rpc request = {} await client.list_backups(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_backups(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -6577,22 +6603,23 @@ async def test_get_backup_async_use_cached_wrapped_rpc(transport: str = "grpc_as ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_backup - ] = mock_object + ] = mock_rpc request = {} await client.get_backup(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_backup(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -6891,8 +6918,9 @@ def test_create_backup_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.create_backup(request) @@ -6946,26 +6974,28 @@ async def test_create_backup_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_backup - ] = mock_object + ] = mock_rpc request = {} await client.create_backup(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.create_backup(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -7277,8 +7307,9 @@ def test_delete_backup_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.delete_backup(request) @@ -7332,26 +7363,28 @@ async def test_delete_backup_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_backup - ] = mock_object + ] = mock_rpc request = {} await client.delete_backup(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.delete_backup(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -7643,8 +7676,9 @@ def test_query_metadata_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.query_metadata(request) @@ -7698,26 +7732,28 @@ async def test_query_metadata_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.query_metadata - ] = mock_object + ] = mock_rpc request = {} await client.query_metadata(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.query_metadata(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -7942,8 +7978,9 @@ def test_move_table_to_database_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.move_table_to_database(request) @@ -7999,26 +8036,28 @@ async def test_move_table_to_database_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.move_table_to_database - ] = mock_object + ] = mock_rpc request = {} await client.move_table_to_database(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.move_table_to_database(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -8247,8 +8286,9 @@ def test_alter_metadata_resource_location_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.alter_metadata_resource_location(request) @@ -8304,26 +8344,28 @@ async def test_alter_metadata_resource_location_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.alter_metadata_resource_location - ] = mock_object + ] = mock_rpc request = {} await client.alter_metadata_resource_location(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.alter_metadata_resource_location(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-dataproc-metastore/tests/unit/gapic/metastore_v1/test_dataproc_metastore_federation.py b/packages/google-cloud-dataproc-metastore/tests/unit/gapic/metastore_v1/test_dataproc_metastore_federation.py index e145c749331e..e8df9b41018d 100644 --- a/packages/google-cloud-dataproc-metastore/tests/unit/gapic/metastore_v1/test_dataproc_metastore_federation.py +++ b/packages/google-cloud-dataproc-metastore/tests/unit/gapic/metastore_v1/test_dataproc_metastore_federation.py @@ -1397,22 +1397,23 @@ async def test_list_federations_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_federations - ] = mock_object + ] = mock_rpc request = {} await client.list_federations(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_federations(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1973,22 +1974,23 @@ async def test_get_federation_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_federation - ] = mock_object + ] = mock_rpc request = {} await client.get_federation(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_federation(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2304,8 +2306,9 @@ def test_create_federation_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.create_federation(request) @@ -2361,26 +2364,28 @@ async def test_create_federation_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_federation - ] = mock_object + ] = mock_rpc request = {} await client.create_federation(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.create_federation(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2709,8 +2714,9 @@ def test_update_federation_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.update_federation(request) @@ -2766,26 +2772,28 @@ async def test_update_federation_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_federation - ] = mock_object + ] = mock_rpc request = {} await client.update_federation(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.update_federation(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3106,8 +3114,9 @@ def test_delete_federation_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.delete_federation(request) @@ -3163,26 +3172,28 @@ async def test_delete_federation_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_federation - ] = mock_object + ] = mock_rpc request = {} await client.delete_federation(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.delete_federation(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-dataproc-metastore/tests/unit/gapic/metastore_v1alpha/test_dataproc_metastore.py b/packages/google-cloud-dataproc-metastore/tests/unit/gapic/metastore_v1alpha/test_dataproc_metastore.py index be043d9b6a68..0acc7b881125 100644 --- a/packages/google-cloud-dataproc-metastore/tests/unit/gapic/metastore_v1alpha/test_dataproc_metastore.py +++ b/packages/google-cloud-dataproc-metastore/tests/unit/gapic/metastore_v1alpha/test_dataproc_metastore.py @@ -1347,22 +1347,23 @@ async def test_list_services_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_services - ] = mock_object + ] = mock_rpc request = {} await client.list_services(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_services(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1937,22 +1938,23 @@ async def test_get_service_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_service - ] = mock_object + ] = mock_rpc request = {} await client.get_service(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_service(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2265,8 +2267,9 @@ def test_create_service_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.create_service(request) @@ -2320,26 +2323,28 @@ async def test_create_service_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_service - ] = mock_object + ] = mock_rpc request = {} await client.create_service(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.create_service(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2669,8 +2674,9 @@ def test_update_service_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.update_service(request) @@ -2724,26 +2730,28 @@ async def test_update_service_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_service - ] = mock_object + ] = mock_rpc request = {} await client.update_service(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.update_service(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3065,8 +3073,9 @@ def test_delete_service_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.delete_service(request) @@ -3120,26 +3129,28 @@ async def test_delete_service_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_service - ] = mock_object + ] = mock_rpc request = {} await client.delete_service(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.delete_service(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3507,22 +3518,23 @@ async def test_list_metadata_imports_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_metadata_imports - ] = mock_object + ] = mock_rpc request = {} await client.list_metadata_imports(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_metadata_imports(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4103,22 +4115,23 @@ async def test_get_metadata_import_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_metadata_import - ] = mock_object + ] = mock_rpc request = {} await client.get_metadata_import(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_metadata_import(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4440,8 +4453,9 @@ def test_create_metadata_import_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.create_metadata_import(request) @@ -4497,26 +4511,28 @@ async def test_create_metadata_import_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_metadata_import - ] = mock_object + ] = mock_rpc request = {} await client.create_metadata_import(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.create_metadata_import(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4871,8 +4887,9 @@ def test_update_metadata_import_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.update_metadata_import(request) @@ -4928,26 +4945,28 @@ async def test_update_metadata_import_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_metadata_import - ] = mock_object + ] = mock_rpc request = {} await client.update_metadata_import(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.update_metadata_import(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5285,8 +5304,9 @@ def test_export_metadata_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.export_metadata(request) @@ -5340,26 +5360,28 @@ async def test_export_metadata_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.export_metadata - ] = mock_object + ] = mock_rpc request = {} await client.export_metadata(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.export_metadata(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5571,8 +5593,9 @@ def test_restore_service_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.restore_service(request) @@ -5626,26 +5649,28 @@ async def test_restore_service_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.restore_service - ] = mock_object + ] = mock_rpc request = {} await client.restore_service(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.restore_service(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -6010,22 +6035,23 @@ async def test_list_backups_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_backups - ] = mock_object + ] = mock_rpc request = {} await client.list_backups(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_backups(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -6577,22 +6603,23 @@ async def test_get_backup_async_use_cached_wrapped_rpc(transport: str = "grpc_as ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_backup - ] = mock_object + ] = mock_rpc request = {} await client.get_backup(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_backup(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -6891,8 +6918,9 @@ def test_create_backup_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.create_backup(request) @@ -6946,26 +6974,28 @@ async def test_create_backup_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_backup - ] = mock_object + ] = mock_rpc request = {} await client.create_backup(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.create_backup(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -7277,8 +7307,9 @@ def test_delete_backup_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.delete_backup(request) @@ -7332,26 +7363,28 @@ async def test_delete_backup_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_backup - ] = mock_object + ] = mock_rpc request = {} await client.delete_backup(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.delete_backup(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -7707,22 +7740,23 @@ async def test_remove_iam_policy_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.remove_iam_policy - ] = mock_object + ] = mock_rpc request = {} await client.remove_iam_policy(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.remove_iam_policy(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -7941,8 +7975,9 @@ def test_query_metadata_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.query_metadata(request) @@ -7996,26 +8031,28 @@ async def test_query_metadata_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.query_metadata - ] = mock_object + ] = mock_rpc request = {} await client.query_metadata(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.query_metadata(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -8240,8 +8277,9 @@ def test_move_table_to_database_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.move_table_to_database(request) @@ -8297,26 +8335,28 @@ async def test_move_table_to_database_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.move_table_to_database - ] = mock_object + ] = mock_rpc request = {} await client.move_table_to_database(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.move_table_to_database(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -8545,8 +8585,9 @@ def test_alter_metadata_resource_location_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.alter_metadata_resource_location(request) @@ -8602,26 +8643,28 @@ async def test_alter_metadata_resource_location_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.alter_metadata_resource_location - ] = mock_object + ] = mock_rpc request = {} await client.alter_metadata_resource_location(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.alter_metadata_resource_location(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-dataproc-metastore/tests/unit/gapic/metastore_v1alpha/test_dataproc_metastore_federation.py b/packages/google-cloud-dataproc-metastore/tests/unit/gapic/metastore_v1alpha/test_dataproc_metastore_federation.py index 3730337bda5a..b2d58f70becc 100644 --- a/packages/google-cloud-dataproc-metastore/tests/unit/gapic/metastore_v1alpha/test_dataproc_metastore_federation.py +++ b/packages/google-cloud-dataproc-metastore/tests/unit/gapic/metastore_v1alpha/test_dataproc_metastore_federation.py @@ -1397,22 +1397,23 @@ async def test_list_federations_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_federations - ] = mock_object + ] = mock_rpc request = {} await client.list_federations(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_federations(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1973,22 +1974,23 @@ async def test_get_federation_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_federation - ] = mock_object + ] = mock_rpc request = {} await client.get_federation(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_federation(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2304,8 +2306,9 @@ def test_create_federation_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.create_federation(request) @@ -2361,26 +2364,28 @@ async def test_create_federation_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_federation - ] = mock_object + ] = mock_rpc request = {} await client.create_federation(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.create_federation(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2709,8 +2714,9 @@ def test_update_federation_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.update_federation(request) @@ -2766,26 +2772,28 @@ async def test_update_federation_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_federation - ] = mock_object + ] = mock_rpc request = {} await client.update_federation(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.update_federation(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3106,8 +3114,9 @@ def test_delete_federation_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.delete_federation(request) @@ -3163,26 +3172,28 @@ async def test_delete_federation_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_federation - ] = mock_object + ] = mock_rpc request = {} await client.delete_federation(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.delete_federation(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-dataproc-metastore/tests/unit/gapic/metastore_v1beta/test_dataproc_metastore.py b/packages/google-cloud-dataproc-metastore/tests/unit/gapic/metastore_v1beta/test_dataproc_metastore.py index 8255f4fa435b..beeb99a2bded 100644 --- a/packages/google-cloud-dataproc-metastore/tests/unit/gapic/metastore_v1beta/test_dataproc_metastore.py +++ b/packages/google-cloud-dataproc-metastore/tests/unit/gapic/metastore_v1beta/test_dataproc_metastore.py @@ -1347,22 +1347,23 @@ async def test_list_services_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_services - ] = mock_object + ] = mock_rpc request = {} await client.list_services(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_services(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1937,22 +1938,23 @@ async def test_get_service_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_service - ] = mock_object + ] = mock_rpc request = {} await client.get_service(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_service(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2265,8 +2267,9 @@ def test_create_service_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.create_service(request) @@ -2320,26 +2323,28 @@ async def test_create_service_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_service - ] = mock_object + ] = mock_rpc request = {} await client.create_service(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.create_service(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2669,8 +2674,9 @@ def test_update_service_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.update_service(request) @@ -2724,26 +2730,28 @@ async def test_update_service_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_service - ] = mock_object + ] = mock_rpc request = {} await client.update_service(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.update_service(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3065,8 +3073,9 @@ def test_delete_service_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.delete_service(request) @@ -3120,26 +3129,28 @@ async def test_delete_service_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_service - ] = mock_object + ] = mock_rpc request = {} await client.delete_service(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.delete_service(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3507,22 +3518,23 @@ async def test_list_metadata_imports_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_metadata_imports - ] = mock_object + ] = mock_rpc request = {} await client.list_metadata_imports(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_metadata_imports(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4103,22 +4115,23 @@ async def test_get_metadata_import_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_metadata_import - ] = mock_object + ] = mock_rpc request = {} await client.get_metadata_import(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_metadata_import(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4440,8 +4453,9 @@ def test_create_metadata_import_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.create_metadata_import(request) @@ -4497,26 +4511,28 @@ async def test_create_metadata_import_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_metadata_import - ] = mock_object + ] = mock_rpc request = {} await client.create_metadata_import(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.create_metadata_import(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4871,8 +4887,9 @@ def test_update_metadata_import_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.update_metadata_import(request) @@ -4928,26 +4945,28 @@ async def test_update_metadata_import_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_metadata_import - ] = mock_object + ] = mock_rpc request = {} await client.update_metadata_import(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.update_metadata_import(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5285,8 +5304,9 @@ def test_export_metadata_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.export_metadata(request) @@ -5340,26 +5360,28 @@ async def test_export_metadata_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.export_metadata - ] = mock_object + ] = mock_rpc request = {} await client.export_metadata(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.export_metadata(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5571,8 +5593,9 @@ def test_restore_service_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.restore_service(request) @@ -5626,26 +5649,28 @@ async def test_restore_service_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.restore_service - ] = mock_object + ] = mock_rpc request = {} await client.restore_service(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.restore_service(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -6010,22 +6035,23 @@ async def test_list_backups_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_backups - ] = mock_object + ] = mock_rpc request = {} await client.list_backups(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_backups(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -6577,22 +6603,23 @@ async def test_get_backup_async_use_cached_wrapped_rpc(transport: str = "grpc_as ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_backup - ] = mock_object + ] = mock_rpc request = {} await client.get_backup(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_backup(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -6891,8 +6918,9 @@ def test_create_backup_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.create_backup(request) @@ -6946,26 +6974,28 @@ async def test_create_backup_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_backup - ] = mock_object + ] = mock_rpc request = {} await client.create_backup(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.create_backup(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -7277,8 +7307,9 @@ def test_delete_backup_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.delete_backup(request) @@ -7332,26 +7363,28 @@ async def test_delete_backup_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_backup - ] = mock_object + ] = mock_rpc request = {} await client.delete_backup(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.delete_backup(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -7707,22 +7740,23 @@ async def test_remove_iam_policy_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.remove_iam_policy - ] = mock_object + ] = mock_rpc request = {} await client.remove_iam_policy(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.remove_iam_policy(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -7941,8 +7975,9 @@ def test_query_metadata_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.query_metadata(request) @@ -7996,26 +8031,28 @@ async def test_query_metadata_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.query_metadata - ] = mock_object + ] = mock_rpc request = {} await client.query_metadata(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.query_metadata(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -8240,8 +8277,9 @@ def test_move_table_to_database_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.move_table_to_database(request) @@ -8297,26 +8335,28 @@ async def test_move_table_to_database_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.move_table_to_database - ] = mock_object + ] = mock_rpc request = {} await client.move_table_to_database(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.move_table_to_database(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -8545,8 +8585,9 @@ def test_alter_metadata_resource_location_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.alter_metadata_resource_location(request) @@ -8602,26 +8643,28 @@ async def test_alter_metadata_resource_location_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.alter_metadata_resource_location - ] = mock_object + ] = mock_rpc request = {} await client.alter_metadata_resource_location(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.alter_metadata_resource_location(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-dataproc-metastore/tests/unit/gapic/metastore_v1beta/test_dataproc_metastore_federation.py b/packages/google-cloud-dataproc-metastore/tests/unit/gapic/metastore_v1beta/test_dataproc_metastore_federation.py index 610fce266a7c..cf6f75708fe7 100644 --- a/packages/google-cloud-dataproc-metastore/tests/unit/gapic/metastore_v1beta/test_dataproc_metastore_federation.py +++ b/packages/google-cloud-dataproc-metastore/tests/unit/gapic/metastore_v1beta/test_dataproc_metastore_federation.py @@ -1397,22 +1397,23 @@ async def test_list_federations_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_federations - ] = mock_object + ] = mock_rpc request = {} await client.list_federations(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_federations(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1973,22 +1974,23 @@ async def test_get_federation_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_federation - ] = mock_object + ] = mock_rpc request = {} await client.get_federation(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_federation(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2304,8 +2306,9 @@ def test_create_federation_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.create_federation(request) @@ -2361,26 +2364,28 @@ async def test_create_federation_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_federation - ] = mock_object + ] = mock_rpc request = {} await client.create_federation(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.create_federation(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2709,8 +2714,9 @@ def test_update_federation_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.update_federation(request) @@ -2766,26 +2772,28 @@ async def test_update_federation_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_federation - ] = mock_object + ] = mock_rpc request = {} await client.update_federation(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.update_federation(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3106,8 +3114,9 @@ def test_delete_federation_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.delete_federation(request) @@ -3163,26 +3172,28 @@ async def test_delete_federation_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_federation - ] = mock_object + ] = mock_rpc request = {} await client.delete_federation(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.delete_federation(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-dataproc/CHANGELOG.md b/packages/google-cloud-dataproc/CHANGELOG.md index d912ceecb20e..2601f62c33b4 100644 --- a/packages/google-cloud-dataproc/CHANGELOG.md +++ b/packages/google-cloud-dataproc/CHANGELOG.md @@ -4,6 +4,39 @@ [1]: https://pypi.org/project/google-cloud-dataproc/#history +## [5.13.0](https://github.com/googleapis/google-cloud-python/compare/google-cloud-dataproc-v5.12.0...google-cloud-dataproc-v5.13.0) (2024-09-30) + + +### Features + +* add support for Spark Connect sessions in Dataproc Serverless for Spark ([0d35003](https://github.com/googleapis/google-cloud-python/commit/0d350038411bbdcf10eb7fb6820084abcb362c5a)) + + +### Documentation + +* update docs for `filter` field in `ListSessionsRequest` ([0d35003](https://github.com/googleapis/google-cloud-python/commit/0d350038411bbdcf10eb7fb6820084abcb362c5a)) + +## [5.12.0](https://github.com/googleapis/google-cloud-python/compare/google-cloud-dataproc-v5.11.0...google-cloud-dataproc-v5.12.0) (2024-09-16) + + +### Features + +* [google-cloud-dataproc] Add FLINK metric source for Dataproc Metric Source ([2402404](https://github.com/googleapis/google-cloud-python/commit/2402404a5ac48c8289a2dbc24fcc85a1eebe4224)) +* [google-cloud-dataproc] Add kms key input for create cluster API ([2402404](https://github.com/googleapis/google-cloud-python/commit/2402404a5ac48c8289a2dbc24fcc85a1eebe4224)) +* [google-cloud-dataproc] add resource reference for KMS keys and fix comments ([2402404](https://github.com/googleapis/google-cloud-python/commit/2402404a5ac48c8289a2dbc24fcc85a1eebe4224)) +* [google-cloud-dataproc] Add unreachable output field for LIST batch templates API ([2402404](https://github.com/googleapis/google-cloud-python/commit/2402404a5ac48c8289a2dbc24fcc85a1eebe4224)) +* [google-cloud-dataproc] Add unreachable output field for LIST jobs API ([2402404](https://github.com/googleapis/google-cloud-python/commit/2402404a5ac48c8289a2dbc24fcc85a1eebe4224)) +* [google-cloud-dataproc] Add unreachable output field for LIST workflow template API ([2402404](https://github.com/googleapis/google-cloud-python/commit/2402404a5ac48c8289a2dbc24fcc85a1eebe4224)) +* [google-cloud-dataproc] Allow flink and trino job support for workflow templates API ([2402404](https://github.com/googleapis/google-cloud-python/commit/2402404a5ac48c8289a2dbc24fcc85a1eebe4224)) +* [google-cloud-dataproc] Allow flink job support for jobs ([2402404](https://github.com/googleapis/google-cloud-python/commit/2402404a5ac48c8289a2dbc24fcc85a1eebe4224)) + +## [5.11.0](https://github.com/googleapis/google-cloud-python/compare/google-cloud-dataproc-v5.10.2...google-cloud-dataproc-v5.11.0) (2024-09-03) + + +### Features + +* add optional parameters (tarball-access) in DiagnoseClusterRequest ([127e5c0](https://github.com/googleapis/google-cloud-python/commit/127e5c097b08042989c124ac4cdfb5147181855d)) + ## [5.10.2](https://github.com/googleapis/google-cloud-python/compare/google-cloud-dataproc-v5.10.1...google-cloud-dataproc-v5.10.2) (2024-07-30) diff --git a/packages/google-cloud-dataproc/google/cloud/dataproc/__init__.py b/packages/google-cloud-dataproc/google/cloud/dataproc/__init__.py index 8c0be718b5bc..5df4195f42c4 100644 --- a/packages/google-cloud-dataproc/google/cloud/dataproc/__init__.py +++ b/packages/google-cloud-dataproc/google/cloud/dataproc/__init__.py @@ -135,6 +135,7 @@ CancelJobRequest, DeleteJobRequest, DriverSchedulingConfig, + FlinkJob, GetJobRequest, HadoopJob, HiveJob, @@ -188,6 +189,7 @@ ListSessionsRequest, ListSessionsResponse, Session, + SparkConnectConfig, TerminateSessionRequest, ) from google.cloud.dataproc_v1.types.shared import ( @@ -315,6 +317,7 @@ "CancelJobRequest", "DeleteJobRequest", "DriverSchedulingConfig", + "FlinkJob", "GetJobRequest", "HadoopJob", "HiveJob", @@ -360,6 +363,7 @@ "ListSessionsRequest", "ListSessionsResponse", "Session", + "SparkConnectConfig", "TerminateSessionRequest", "AutotuningConfig", "EnvironmentConfig", diff --git a/packages/google-cloud-dataproc/google/cloud/dataproc/gapic_version.py b/packages/google-cloud-dataproc/google/cloud/dataproc/gapic_version.py index a4bac342a507..3f1bf5ee8722 100644 --- a/packages/google-cloud-dataproc/google/cloud/dataproc/gapic_version.py +++ b/packages/google-cloud-dataproc/google/cloud/dataproc/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "5.10.2" # {x-release-please-version} +__version__ = "5.13.0" # {x-release-please-version} diff --git a/packages/google-cloud-dataproc/google/cloud/dataproc_v1/__init__.py b/packages/google-cloud-dataproc/google/cloud/dataproc_v1/__init__.py index 49bcea5780a2..1a6bbd78319e 100644 --- a/packages/google-cloud-dataproc/google/cloud/dataproc_v1/__init__.py +++ b/packages/google-cloud-dataproc/google/cloud/dataproc_v1/__init__.py @@ -115,6 +115,7 @@ CancelJobRequest, DeleteJobRequest, DriverSchedulingConfig, + FlinkJob, GetJobRequest, HadoopJob, HiveJob, @@ -168,6 +169,7 @@ ListSessionsRequest, ListSessionsResponse, Session, + SparkConnectConfig, TerminateSessionRequest, ) from .types.shared import ( @@ -271,6 +273,7 @@ "EnvironmentConfig", "ExecutionConfig", "FailureAction", + "FlinkJob", "GceClusterConfig", "GetAutoscalingPolicyRequest", "GetBatchRequest", @@ -351,6 +354,7 @@ "ShieldedInstanceConfig", "SoftwareConfig", "SparkBatch", + "SparkConnectConfig", "SparkHistoryServerConfig", "SparkJob", "SparkRBatch", diff --git a/packages/google-cloud-dataproc/google/cloud/dataproc_v1/gapic_version.py b/packages/google-cloud-dataproc/google/cloud/dataproc_v1/gapic_version.py index a4bac342a507..3f1bf5ee8722 100644 --- a/packages/google-cloud-dataproc/google/cloud/dataproc_v1/gapic_version.py +++ b/packages/google-cloud-dataproc/google/cloud/dataproc_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "5.10.2" # {x-release-please-version} +__version__ = "5.13.0" # {x-release-please-version} diff --git a/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/autoscaling_policy_service/async_client.py b/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/autoscaling_policy_service/async_client.py index bb5d8a6db4f6..ce06987dec0c 100644 --- a/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/autoscaling_policy_service/async_client.py +++ b/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/autoscaling_policy_service/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -203,10 +202,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(AutoscalingPolicyServiceClient).get_transport_class, - type(AutoscalingPolicyServiceClient), - ) + get_transport_class = AutoscalingPolicyServiceClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/autoscaling_policy_service/client.py b/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/autoscaling_policy_service/client.py index a92bd0cb9c27..6c4f2a92231d 100644 --- a/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/autoscaling_policy_service/client.py +++ b/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/autoscaling_policy_service/client.py @@ -675,7 +675,7 @@ def __init__( Type[AutoscalingPolicyServiceTransport], Callable[..., AutoscalingPolicyServiceTransport], ] = ( - type(self).get_transport_class(transport) + AutoscalingPolicyServiceClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., AutoscalingPolicyServiceTransport], transport) ) diff --git a/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/batch_controller/async_client.py b/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/batch_controller/async_client.py index 0f99aab34c62..8f1648476acb 100644 --- a/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/batch_controller/async_client.py +++ b/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/batch_controller/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -198,9 +197,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(BatchControllerClient).get_transport_class, type(BatchControllerClient) - ) + get_transport_class = BatchControllerClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/batch_controller/client.py b/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/batch_controller/client.py index 00817e503eb4..f9fab06463aa 100644 --- a/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/batch_controller/client.py +++ b/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/batch_controller/client.py @@ -693,7 +693,7 @@ def __init__( transport_init: Union[ Type[BatchControllerTransport], Callable[..., BatchControllerTransport] ] = ( - type(self).get_transport_class(transport) + BatchControllerClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., BatchControllerTransport], transport) ) diff --git a/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/cluster_controller/async_client.py b/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/cluster_controller/async_client.py index 38f1c3605bc6..72ad480491a2 100644 --- a/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/cluster_controller/async_client.py +++ b/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/cluster_controller/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -75,6 +74,8 @@ class ClusterControllerAsyncClient: cluster_path = staticmethod(ClusterControllerClient.cluster_path) parse_cluster_path = staticmethod(ClusterControllerClient.parse_cluster_path) + crypto_key_path = staticmethod(ClusterControllerClient.crypto_key_path) + parse_crypto_key_path = staticmethod(ClusterControllerClient.parse_crypto_key_path) node_group_path = staticmethod(ClusterControllerClient.node_group_path) parse_node_group_path = staticmethod(ClusterControllerClient.parse_node_group_path) service_path = staticmethod(ClusterControllerClient.service_path) @@ -201,9 +202,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(ClusterControllerClient).get_transport_class, type(ClusterControllerClient) - ) + get_transport_class = ClusterControllerClient.get_transport_class def __init__( self, @@ -1239,10 +1238,11 @@ async def sample_list_clusters(): label key. **value** can be ``*`` to match all values. ``status.state`` can be one of the following: ``ACTIVE``, ``INACTIVE``, ``CREATING``, ``RUNNING``, - ``ERROR``, ``DELETING``, or ``UPDATING``. ``ACTIVE`` - contains the ``CREATING``, ``UPDATING``, and ``RUNNING`` - states. ``INACTIVE`` contains the ``DELETING`` and - ``ERROR`` states. ``clusterName`` is the name of the + ``ERROR``, ``DELETING``, ``UPDATING``, ``STOPPING``, or + ``STOPPED``. ``ACTIVE`` contains the ``CREATING``, + ``UPDATING``, and ``RUNNING`` states. ``INACTIVE`` + contains the ``DELETING``, ``ERROR``, ``STOPPING``, and + ``STOPPED`` states. ``clusterName`` is the name of the cluster provided at creation time. Only the logical ``AND`` operator is supported; space-separated items are treated as having an implicit ``AND`` operator. diff --git a/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/cluster_controller/client.py b/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/cluster_controller/client.py index 64cab2519f3d..d0662bc0348c 100644 --- a/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/cluster_controller/client.py +++ b/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/cluster_controller/client.py @@ -215,6 +215,30 @@ def parse_cluster_path(path: str) -> Dict[str, str]: ) return m.groupdict() if m else {} + @staticmethod + def crypto_key_path( + project: str, + location: str, + key_ring: str, + crypto_key: str, + ) -> str: + """Returns a fully-qualified crypto_key string.""" + return "projects/{project}/locations/{location}/keyRings/{key_ring}/cryptoKeys/{crypto_key}".format( + project=project, + location=location, + key_ring=key_ring, + crypto_key=crypto_key, + ) + + @staticmethod + def parse_crypto_key_path(path: str) -> Dict[str, str]: + """Parses a crypto_key path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/keyRings/(?P.+?)/cryptoKeys/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + @staticmethod def node_group_path( project: str, @@ -721,7 +745,7 @@ def __init__( Type[ClusterControllerTransport], Callable[..., ClusterControllerTransport], ] = ( - type(self).get_transport_class(transport) + ClusterControllerClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., ClusterControllerTransport], transport) ) @@ -1686,10 +1710,11 @@ def sample_list_clusters(): label key. **value** can be ``*`` to match all values. ``status.state`` can be one of the following: ``ACTIVE``, ``INACTIVE``, ``CREATING``, ``RUNNING``, - ``ERROR``, ``DELETING``, or ``UPDATING``. ``ACTIVE`` - contains the ``CREATING``, ``UPDATING``, and ``RUNNING`` - states. ``INACTIVE`` contains the ``DELETING`` and - ``ERROR`` states. ``clusterName`` is the name of the + ``ERROR``, ``DELETING``, ``UPDATING``, ``STOPPING``, or + ``STOPPED``. ``ACTIVE`` contains the ``CREATING``, + ``UPDATING``, and ``RUNNING`` states. ``INACTIVE`` + contains the ``DELETING``, ``ERROR``, ``STOPPING``, and + ``STOPPED`` states. ``clusterName`` is the name of the cluster provided at creation time. Only the logical ``AND`` operator is supported; space-separated items are treated as having an implicit ``AND`` operator. diff --git a/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/job_controller/async_client.py b/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/job_controller/async_client.py index 92ced819a375..c028b8119da6 100644 --- a/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/job_controller/async_client.py +++ b/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/job_controller/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -191,9 +190,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(JobControllerClient).get_transport_class, type(JobControllerClient) - ) + get_transport_class = JobControllerClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/job_controller/client.py b/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/job_controller/client.py index 7b09c15712e3..2c846b3d7a6f 100644 --- a/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/job_controller/client.py +++ b/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/job_controller/client.py @@ -642,7 +642,7 @@ def __init__( transport_init: Union[ Type[JobControllerTransport], Callable[..., JobControllerTransport] ] = ( - type(self).get_transport_class(transport) + JobControllerClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., JobControllerTransport], transport) ) diff --git a/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/node_group_controller/async_client.py b/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/node_group_controller/async_client.py index 32f019859704..9827ac5afdc0 100644 --- a/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/node_group_controller/async_client.py +++ b/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/node_group_controller/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -196,10 +195,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(NodeGroupControllerClient).get_transport_class, - type(NodeGroupControllerClient), - ) + get_transport_class = NodeGroupControllerClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/node_group_controller/client.py b/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/node_group_controller/client.py index 60acc99b6ea5..74b161e83af4 100644 --- a/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/node_group_controller/client.py +++ b/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/node_group_controller/client.py @@ -674,7 +674,7 @@ def __init__( Type[NodeGroupControllerTransport], Callable[..., NodeGroupControllerTransport], ] = ( - type(self).get_transport_class(transport) + NodeGroupControllerClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., NodeGroupControllerTransport], transport) ) diff --git a/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/session_controller/async_client.py b/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/session_controller/async_client.py index 3b2db0851dbd..005fd68b3c5e 100644 --- a/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/session_controller/async_client.py +++ b/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/session_controller/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -202,9 +201,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(SessionControllerClient).get_transport_class, type(SessionControllerClient) - ) + get_transport_class = SessionControllerClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/session_controller/client.py b/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/session_controller/client.py index 4ac9945fc27b..2738c51bb43f 100644 --- a/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/session_controller/client.py +++ b/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/session_controller/client.py @@ -718,7 +718,7 @@ def __init__( Type[SessionControllerTransport], Callable[..., SessionControllerTransport], ] = ( - type(self).get_transport_class(transport) + SessionControllerClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., SessionControllerTransport], transport) ) diff --git a/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/session_template_controller/async_client.py b/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/session_template_controller/async_client.py index 01298bd7c2e8..4419ecc2b8de 100644 --- a/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/session_template_controller/async_client.py +++ b/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/session_template_controller/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -210,10 +209,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(SessionTemplateControllerClient).get_transport_class, - type(SessionTemplateControllerClient), - ) + get_transport_class = SessionTemplateControllerClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/session_template_controller/client.py b/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/session_template_controller/client.py index e22e2173c4b2..442118b1fef8 100644 --- a/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/session_template_controller/client.py +++ b/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/session_template_controller/client.py @@ -698,7 +698,7 @@ def __init__( Type[SessionTemplateControllerTransport], Callable[..., SessionTemplateControllerTransport], ] = ( - type(self).get_transport_class(transport) + SessionTemplateControllerClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., SessionTemplateControllerTransport], transport) ) diff --git a/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/workflow_template_service/async_client.py b/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/workflow_template_service/async_client.py index 1a59261f4b97..90bd5f500cbc 100644 --- a/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/workflow_template_service/async_client.py +++ b/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/workflow_template_service/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -76,6 +75,10 @@ class WorkflowTemplateServiceAsyncClient: ) _DEFAULT_UNIVERSE = WorkflowTemplateServiceClient._DEFAULT_UNIVERSE + crypto_key_path = staticmethod(WorkflowTemplateServiceClient.crypto_key_path) + parse_crypto_key_path = staticmethod( + WorkflowTemplateServiceClient.parse_crypto_key_path + ) node_group_path = staticmethod(WorkflowTemplateServiceClient.node_group_path) parse_node_group_path = staticmethod( WorkflowTemplateServiceClient.parse_node_group_path @@ -214,10 +217,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(WorkflowTemplateServiceClient).get_transport_class, - type(WorkflowTemplateServiceClient), - ) + get_transport_class = WorkflowTemplateServiceClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/workflow_template_service/client.py b/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/workflow_template_service/client.py index f99de568891a..175bc494f421 100644 --- a/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/workflow_template_service/client.py +++ b/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/workflow_template_service/client.py @@ -194,6 +194,30 @@ def transport(self) -> WorkflowTemplateServiceTransport: """ return self._transport + @staticmethod + def crypto_key_path( + project: str, + location: str, + key_ring: str, + crypto_key: str, + ) -> str: + """Returns a fully-qualified crypto_key string.""" + return "projects/{project}/locations/{location}/keyRings/{key_ring}/cryptoKeys/{crypto_key}".format( + project=project, + location=location, + key_ring=key_ring, + crypto_key=crypto_key, + ) + + @staticmethod + def parse_crypto_key_path(path: str) -> Dict[str, str]: + """Parses a crypto_key path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/keyRings/(?P.+?)/cryptoKeys/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + @staticmethod def node_group_path( project: str, @@ -726,7 +750,7 @@ def __init__( Type[WorkflowTemplateServiceTransport], Callable[..., WorkflowTemplateServiceTransport], ] = ( - type(self).get_transport_class(transport) + WorkflowTemplateServiceClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., WorkflowTemplateServiceTransport], transport) ) diff --git a/packages/google-cloud-dataproc/google/cloud/dataproc_v1/types/__init__.py b/packages/google-cloud-dataproc/google/cloud/dataproc_v1/types/__init__.py index 535fc0e4fc92..62dce7408efa 100644 --- a/packages/google-cloud-dataproc/google/cloud/dataproc_v1/types/__init__.py +++ b/packages/google-cloud-dataproc/google/cloud/dataproc_v1/types/__init__.py @@ -84,6 +84,7 @@ CancelJobRequest, DeleteJobRequest, DriverSchedulingConfig, + FlinkJob, GetJobRequest, HadoopJob, HiveJob, @@ -137,6 +138,7 @@ ListSessionsRequest, ListSessionsResponse, Session, + SparkConnectConfig, TerminateSessionRequest, ) from .shared import ( @@ -248,6 +250,7 @@ "CancelJobRequest", "DeleteJobRequest", "DriverSchedulingConfig", + "FlinkJob", "GetJobRequest", "HadoopJob", "HiveJob", @@ -293,6 +296,7 @@ "ListSessionsRequest", "ListSessionsResponse", "Session", + "SparkConnectConfig", "TerminateSessionRequest", "AutotuningConfig", "EnvironmentConfig", diff --git a/packages/google-cloud-dataproc/google/cloud/dataproc_v1/types/batches.py b/packages/google-cloud-dataproc/google/cloud/dataproc_v1/types/batches.py index bff597bc91b6..2459180957df 100644 --- a/packages/google-cloud-dataproc/google/cloud/dataproc_v1/types/batches.py +++ b/packages/google-cloud-dataproc/google/cloud/dataproc_v1/types/batches.py @@ -183,6 +183,11 @@ class ListBatchesResponse(proto.Message): A token, which can be sent as ``page_token`` to retrieve the next page. If this field is omitted, there are no subsequent pages. + unreachable (MutableSequence[str]): + Output only. List of Batches that could not + be included in the response. Attempting to get + one of these resources may indicate why it was + not included in the list response. """ @property @@ -198,6 +203,10 @@ def raw_page(self): proto.STRING, number=2, ) + unreachable: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=3, + ) class DeleteBatchRequest(proto.Message): diff --git a/packages/google-cloud-dataproc/google/cloud/dataproc_v1/types/clusters.py b/packages/google-cloud-dataproc/google/cloud/dataproc_v1/types/clusters.py index ec33ae97f36c..169d350f046b 100644 --- a/packages/google-cloud-dataproc/google/cloud/dataproc_v1/types/clusters.py +++ b/packages/google-cloud-dataproc/google/cloud/dataproc_v1/types/clusters.py @@ -464,15 +464,50 @@ class EncryptionConfig(proto.Message): Attributes: gce_pd_kms_key_name (str): - Optional. The Cloud KMS key name to use for - PD disk encryption for all instances in the - cluster. + Optional. The Cloud KMS key resource name to use for + persistent disk encryption for all instances in the cluster. + See [Use CMEK with cluster data] + (https://cloud.google.com//dataproc/docs/concepts/configuring-clusters/customer-managed-encryption#use_cmek_with_cluster_data) + for more information. + kms_key (str): + Optional. The Cloud KMS key resource name to use for cluster + persistent disk and job argument encryption. See [Use CMEK + with cluster data] + (https://cloud.google.com//dataproc/docs/concepts/configuring-clusters/customer-managed-encryption#use_cmek_with_cluster_data) + for more information. + + When this key resource name is provided, the following job + arguments of the following job types submitted to the + cluster are encrypted using CMEK: + + - `FlinkJob + args `__ + - `HadoopJob + args `__ + - `SparkJob + args `__ + - `SparkRJob + args `__ + - `PySparkJob + args `__ + - `SparkSqlJob `__ + scriptVariables and queryList.queries + - `HiveJob `__ + scriptVariables and queryList.queries + - `PigJob `__ + scriptVariables and queryList.queries + - `PrestoJob `__ + scriptVariables and queryList.queries """ gce_pd_kms_key_name: str = proto.Field( proto.STRING, number=1, ) + kms_key: str = proto.Field( + proto.STRING, + number=2, + ) class GceClusterConfig(proto.Message): @@ -519,14 +554,25 @@ class GceClusterConfig(proto.Message): - ``projects/[project_id]/regions/[region]/subnetworks/sub0`` - ``sub0`` internal_ip_only (bool): - Optional. If true, all instances in the cluster will only - have internal IP addresses. By default, clusters are not - restricted to internal IP addresses, and will have ephemeral - external IP addresses assigned to each instance. This - ``internal_ip_only`` restriction can only be enabled for - subnetwork enabled networks, and all off-cluster - dependencies must be configured to be accessible without - external IP addresses. + Optional. This setting applies to subnetwork-enabled + networks. It is set to ``true`` by default in clusters + created with image versions 2.2.x. + + When set to ``true``: + + - All cluster VMs have internal IP addresses. + - [Google Private Access] + (https://cloud.google.com/vpc/docs/private-google-access) + must be enabled to access Dataproc and other Google Cloud + APIs. + - Off-cluster dependencies must be configured to be + accessible without external IP addresses. + + When set to ``false``: + + - Cluster VMs are not restricted to internal IP addresses. + - Ephemeral external IP addresses are assigned to each + cluster VM. This field is a member of `oneof`_ ``_internal_ip_only``. private_ipv6_google_access (google.cloud.dataproc_v1.types.GceClusterConfig.PrivateIpv6GoogleAccess): @@ -560,9 +606,9 @@ class GceClusterConfig(proto.Message): - https://www.googleapis.com/auth/bigtable.data - https://www.googleapis.com/auth/devstorage.full_control tags (MutableSequence[str]): - The Compute Engine tags to add to all instances (see + The Compute Engine network tags to add to all instances (see `Tagging - instances `__). + instances `__). metadata (MutableMapping[str, str]): Optional. The Compute Engine metadata entries to add to all instances (see `Project and instance @@ -1156,15 +1202,15 @@ class AcceleratorConfig(proto.Message): Examples: - - ``https://www.googleapis.com/compute/v1/projects/[project_id]/zones/[zone]/acceleratorTypes/nvidia-tesla-k80`` - - ``projects/[project_id]/zones/[zone]/acceleratorTypes/nvidia-tesla-k80`` - - ``nvidia-tesla-k80`` + - ``https://www.googleapis.com/compute/v1/projects/[project_id]/zones/[zone]/acceleratorTypes/nvidia-tesla-t4`` + - ``projects/[project_id]/zones/[zone]/acceleratorTypes/nvidia-tesla-t4`` + - ``nvidia-tesla-t4`` **Auto Zone Exception**: If you are using the Dataproc `Auto Zone Placement `__ feature, you must use the short name of the accelerator type - resource, for example, ``nvidia-tesla-k80``. + resource, for example, ``nvidia-tesla-t4``. accelerator_count (int): The number of the accelerator cards of this type exposed to this instance. @@ -1501,8 +1547,8 @@ class KerberosConfig(proto.Message): encrypted file containing the root principal password. kms_key_uri (str): - Optional. The uri of the KMS key used to - encrypt various sensitive files. + Optional. The URI of the KMS key used to + encrypt sensitive files. keystore_uri (str): Optional. The Cloud Storage URI of the keystore file used for SSL encryption. If not @@ -1649,7 +1695,7 @@ class SoftwareConfig(proto.Message): image_version (str): Optional. The version of software inside the cluster. It must be one of the supported `Dataproc - Versions `__, + Versions `__, such as "1.2" (including a subminor version, such as "1.2.29"), or the `"preview" version `__. @@ -1834,6 +1880,8 @@ class MetricSource(proto.Enum): Hiveserver2 metric source. HIVEMETASTORE (7): hivemetastore metric source + FLINK (8): + flink metric source """ METRIC_SOURCE_UNSPECIFIED = 0 MONITORING_AGENT_DEFAULTS = 1 @@ -1843,6 +1891,7 @@ class MetricSource(proto.Enum): SPARK_HISTORY_SERVER = 5 HIVESERVER2 = 6 HIVEMETASTORE = 7 + FLINK = 8 class Metric(proto.Message): r"""A Dataproc custom metric. @@ -2312,11 +2361,12 @@ class ListClustersRequest(proto.Message): or ``labels.[KEY]``, and ``[KEY]`` is a label key. **value** can be ``*`` to match all values. ``status.state`` can be one of the following: ``ACTIVE``, ``INACTIVE``, - ``CREATING``, ``RUNNING``, ``ERROR``, ``DELETING``, or - ``UPDATING``. ``ACTIVE`` contains the ``CREATING``, - ``UPDATING``, and ``RUNNING`` states. ``INACTIVE`` contains - the ``DELETING`` and ``ERROR`` states. ``clusterName`` is - the name of the cluster provided at creation time. Only the + ``CREATING``, ``RUNNING``, ``ERROR``, ``DELETING``, + ``UPDATING``, ``STOPPING``, or ``STOPPED``. ``ACTIVE`` + contains the ``CREATING``, ``UPDATING``, and ``RUNNING`` + states. ``INACTIVE`` contains the ``DELETING``, ``ERROR``, + ``STOPPING``, and ``STOPPED`` states. ``clusterName`` is the + name of the cluster provided at creation time. Only the logical ``AND`` operator is supported; space-separated items are treated as having an implicit ``AND`` operator. @@ -2393,10 +2443,14 @@ class DiagnoseClusterRequest(proto.Message): cluster_name (str): Required. The cluster name. tarball_gcs_dir (str): - Optional. The output Cloud Storage directory - for the diagnostic tarball. If not specified, a - task-specific directory in the cluster's staging - bucket will be used. + Optional. (Optional) The output Cloud Storage + directory for the diagnostic tarball. If not + specified, a task-specific directory in the + cluster's staging bucket will be used. + tarball_access (google.cloud.dataproc_v1.types.DiagnoseClusterRequest.TarballAccess): + Optional. (Optional) The access type to the + diagnostic tarball. If not specified, falls back + to default access of the bucket diagnosis_interval (google.type.interval_pb2.Interval): Optional. Time interval in which diagnosis should be carried out on the cluster. @@ -2410,6 +2464,25 @@ class DiagnoseClusterRequest(proto.Message): performed. """ + class TarballAccess(proto.Enum): + r"""Defines who has access to the diagnostic tarball + + Values: + TARBALL_ACCESS_UNSPECIFIED (0): + Tarball Access unspecified. Falls back to + default access of the bucket + GOOGLE_CLOUD_SUPPORT (1): + Google Cloud Support group has read access to + the diagnostic tarball + GOOGLE_DATAPROC_DIAGNOSE (2): + Google Cloud Dataproc Diagnose service + account has read access to the diagnostic + tarball + """ + TARBALL_ACCESS_UNSPECIFIED = 0 + GOOGLE_CLOUD_SUPPORT = 1 + GOOGLE_DATAPROC_DIAGNOSE = 2 + project_id: str = proto.Field( proto.STRING, number=1, @@ -2426,6 +2499,11 @@ class DiagnoseClusterRequest(proto.Message): proto.STRING, number=4, ) + tarball_access: TarballAccess = proto.Field( + proto.ENUM, + number=5, + enum=TarballAccess, + ) diagnosis_interval: interval_pb2.Interval = proto.Field( proto.MESSAGE, number=6, diff --git a/packages/google-cloud-dataproc/google/cloud/dataproc_v1/types/jobs.py b/packages/google-cloud-dataproc/google/cloud/dataproc_v1/types/jobs.py index b0e094f18985..2f9bcc9dba29 100644 --- a/packages/google-cloud-dataproc/google/cloud/dataproc_v1/types/jobs.py +++ b/packages/google-cloud-dataproc/google/cloud/dataproc_v1/types/jobs.py @@ -35,6 +35,7 @@ "SparkRJob", "PrestoJob", "TrinoJob", + "FlinkJob", "JobPlacement", "JobStatus", "JobReference", @@ -60,7 +61,7 @@ class LoggingConfig(proto.Message): Attributes: driver_log_levels (MutableMapping[str, google.cloud.dataproc_v1.types.LoggingConfig.Level]): The per-package log levels for the driver. - This may include "root" package name to + This can include "root" package name to configure rootLogger. Examples: - 'com.google = FATAL' @@ -144,7 +145,7 @@ class HadoopJob(proto.Message): args (MutableSequence[str]): Optional. The arguments to pass to the driver. Do not include arguments, such as ``-libjars`` or ``-Dfoo=bar``, - that can be set as job properties, since a collision may + that can be set as job properties, since a collision might occur that causes an incorrect job submission. jar_file_uris (MutableSequence[str]): Optional. Jar file URIs to add to the @@ -163,7 +164,7 @@ class HadoopJob(proto.Message): properties (MutableMapping[str, str]): Optional. A mapping of property names to values, used to configure Hadoop. Properties that conflict with values set - by the Dataproc API may be overwritten. Can include + by the Dataproc API might be overwritten. Can include properties set in ``/etc/hadoop/conf/*-site`` and classes in user code. logging_config (google.cloud.dataproc_v1.types.LoggingConfig): @@ -229,7 +230,7 @@ class SparkJob(proto.Message): main_class (str): The name of the driver's main class. The jar file that contains the class must be in the default CLASSPATH or - specified in ``jar_file_uris``. + specified in SparkJob.jar_file_uris. This field is a member of `oneof`_ ``driver``. args (MutableSequence[str]): @@ -253,8 +254,9 @@ class SparkJob(proto.Message): properties (MutableMapping[str, str]): Optional. A mapping of property names to values, used to configure Spark. Properties that - conflict with values set by the Dataproc API may - be overwritten. Can include properties set in + conflict with values set by the Dataproc API + might be overwritten. Can include properties set + in /etc/spark/conf/spark-defaults.conf and classes in user code. logging_config (google.cloud.dataproc_v1.types.LoggingConfig): @@ -335,7 +337,7 @@ class PySparkJob(proto.Message): Optional. A mapping of property names to values, used to configure PySpark. Properties that conflict with values set by the Dataproc - API may be overwritten. Can include properties + API might be overwritten. Can include properties set in /etc/spark/conf/spark-defaults.conf and classes in user code. @@ -441,8 +443,8 @@ class HiveJob(proto.Message): properties (MutableMapping[str, str]): Optional. A mapping of property names and values, used to configure Hive. Properties that conflict with values set by - the Dataproc API may be overwritten. Can include properties - set in ``/etc/hadoop/conf/*-site.xml``, + the Dataproc API might be overwritten. Can include + properties set in ``/etc/hadoop/conf/*-site.xml``, /etc/hive/conf/hive-site.xml, and classes in user code. jar_file_uris (MutableSequence[str]): Optional. HCFS URIs of jar files to add to @@ -511,7 +513,7 @@ class SparkSqlJob(proto.Message): Optional. A mapping of property names to values, used to configure Spark SQL's SparkConf. Properties that conflict with values set by the - Dataproc API may be overwritten. + Dataproc API might be overwritten. jar_file_uris (MutableSequence[str]): Optional. HCFS URIs of jar files to be added to the Spark CLASSPATH. @@ -583,8 +585,8 @@ class PigJob(proto.Message): properties (MutableMapping[str, str]): Optional. A mapping of property names to values, used to configure Pig. Properties that conflict with values set by - the Dataproc API may be overwritten. Can include properties - set in ``/etc/hadoop/conf/*-site.xml``, + the Dataproc API might be overwritten. Can include + properties set in ``/etc/hadoop/conf/*-site.xml``, /etc/pig/conf/pig.properties, and classes in user code. jar_file_uris (MutableSequence[str]): Optional. HCFS URIs of jar files to add to @@ -659,7 +661,7 @@ class SparkRJob(proto.Message): Optional. A mapping of property names to values, used to configure SparkR. Properties that conflict with values set by the Dataproc - API may be overwritten. Can include properties + API might be overwritten. Can include properties set in /etc/spark/conf/spark-defaults.conf and classes in user code. @@ -856,6 +858,86 @@ class TrinoJob(proto.Message): ) +class FlinkJob(proto.Message): + r"""A Dataproc job for running Apache Flink applications on YARN. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + main_jar_file_uri (str): + The HCFS URI of the jar file that contains + the main class. + + This field is a member of `oneof`_ ``driver``. + main_class (str): + The name of the driver's main class. The jar file that + contains the class must be in the default CLASSPATH or + specified in + [jarFileUris][google.cloud.dataproc.v1.FlinkJob.jar_file_uris]. + + This field is a member of `oneof`_ ``driver``. + args (MutableSequence[str]): + Optional. The arguments to pass to the driver. Do not + include arguments, such as ``--conf``, that can be set as + job properties, since a collision might occur that causes an + incorrect job submission. + jar_file_uris (MutableSequence[str]): + Optional. HCFS URIs of jar files to add to + the CLASSPATHs of the Flink driver and tasks. + savepoint_uri (str): + Optional. HCFS URI of the savepoint, which + contains the last saved progress for starting + the current job. + properties (MutableMapping[str, str]): + Optional. A mapping of property names to values, used to + configure Flink. Properties that conflict with values set by + the Dataproc API might be overwritten. Can include + properties set in ``/etc/flink/conf/flink-defaults.conf`` + and classes in user code. + logging_config (google.cloud.dataproc_v1.types.LoggingConfig): + Optional. The runtime log config for job + execution. + """ + + main_jar_file_uri: str = proto.Field( + proto.STRING, + number=1, + oneof="driver", + ) + main_class: str = proto.Field( + proto.STRING, + number=2, + oneof="driver", + ) + args: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=3, + ) + jar_file_uris: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=4, + ) + savepoint_uri: str = proto.Field( + proto.STRING, + number=9, + ) + properties: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=7, + ) + logging_config: "LoggingConfig" = proto.Field( + proto.MESSAGE, + number=8, + message="LoggingConfig", + ) + + class JobPlacement(proto.Message): r"""Dataproc job config. @@ -894,9 +976,8 @@ class JobStatus(proto.Message): Output only. A state message specifying the overall job state. details (str): - Optional. Output only. Job state details, - such as an error description if the state is - ERROR. + Optional. Output only. Job state details, such as an error + description if the state is ``ERROR``. state_start_time (google.protobuf.timestamp_pb2.Timestamp): Output only. The time when this state was entered. @@ -963,14 +1044,14 @@ class Substate(proto.Enum): Applies to RUNNING state. QUEUED (2): The Job has been received and is awaiting - execution (it may be waiting for a condition to - be met). See the "details" field for the reason - for the delay. + execution (it might be waiting for a condition + to be met). See the "details" field for the + reason for the delay. Applies to RUNNING state. STALE_STATUS (3): The agent-reported status is out of date, - which may be caused by a loss of communication + which can be caused by a loss of communication between the agent and Dataproc. If the agent does not send a timely update, the job will fail. @@ -1165,10 +1246,14 @@ class Job(proto.Message): trino_job (google.cloud.dataproc_v1.types.TrinoJob): Optional. Job is a Trino job. + This field is a member of `oneof`_ ``type_job``. + flink_job (google.cloud.dataproc_v1.types.FlinkJob): + Optional. Job is a Flink job. + This field is a member of `oneof`_ ``type_job``. status (google.cloud.dataproc_v1.types.JobStatus): Output only. The job status. Additional application-specific - status information may be contained in the type_job and + status information might be contained in the type_job and yarn_applications fields. status_history (MutableSequence[google.cloud.dataproc_v1.types.JobStatus]): Output only. The previous job status. @@ -1177,20 +1262,20 @@ class Job(proto.Message): this job. **Beta** Feature: This report is available for testing - purposes only. It may be changed before final release. + purposes only. It might be changed before final release. driver_output_resource_uri (str): Output only. A URI pointing to the location of the stdout of the job's driver program. driver_control_files_uri (str): Output only. If present, the location of miscellaneous - control files which may be used as part of job setup and - handling. If not present, control files may be placed in the - same location as ``driver_output_uri``. + control files which can be used as part of job setup and + handling. If not present, control files might be placed in + the same location as ``driver_output_uri``. labels (MutableMapping[str, str]): Optional. The labels to associate with this job. Label **keys** must contain 1 to 63 characters, and must conform to `RFC 1035 `__. - Label **values** may be empty, but, if present, must contain + Label **values** can be empty, but, if present, must contain 1 to 63 characters, and must conform to `RFC 1035 `__. No more than 32 labels can be associated with a job. @@ -1199,7 +1284,8 @@ class Job(proto.Message): job_uuid (str): Output only. A UUID that uniquely identifies a job within the project over time. This is in contrast to a - user-settable reference.job_id that may be reused over time. + user-settable reference.job_id that might be reused over + time. done (bool): Output only. Indicates whether the job is completed. If the value is ``false``, the job is still in progress. If @@ -1273,6 +1359,12 @@ class Job(proto.Message): oneof="type_job", message="TrinoJob", ) + flink_job: "FlinkJob" = proto.Field( + proto.MESSAGE, + number=29, + oneof="type_job", + message="FlinkJob", + ) status: "JobStatus" = proto.Field( proto.MESSAGE, number=8, @@ -1348,12 +1440,12 @@ class JobScheduling(proto.Message): Attributes: max_failures_per_hour (int): - Optional. Maximum number of times per hour a driver may be + Optional. Maximum number of times per hour a driver can be restarted as a result of driver exiting with non-zero code before job is reported failed. - A job may be reported as thrashing if the driver exits with - a non-zero code four times within a 10-minute window. + A job might be reported as thrashing if the driver exits + with a non-zero code four times within a 10-minute window. Maximum value is 10. @@ -1361,7 +1453,7 @@ class JobScheduling(proto.Message): Dataproc [workflow templates] (https://cloud.google.com/dataproc/docs/concepts/workflows/using-workflows#adding_jobs_to_a_template). max_failures_total (int): - Optional. Maximum total number of times a driver may be + Optional. Maximum total number of times a driver can be restarted as a result of the driver exiting with a non-zero code. After the maximum number is reached, the job will be reported as failed. @@ -1644,6 +1736,12 @@ class ListJobsResponse(proto.Message): are more results to fetch. To fetch additional results, provide this value as the ``page_token`` in a subsequent ListJobsRequest. + unreachable (MutableSequence[str]): + Output only. List of jobs with + [kms_key][google.cloud.dataproc.v1.EncryptionConfig.kms_key]-encrypted + parameters that could not be decrypted. A response to a + ``jobs.get`` request may indicate the reason for the + decryption failure for a specific job. """ @property @@ -1659,6 +1757,10 @@ def raw_page(self): proto.STRING, number=2, ) + unreachable: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=3, + ) class CancelJobRequest(proto.Message): diff --git a/packages/google-cloud-dataproc/google/cloud/dataproc_v1/types/session_templates.py b/packages/google-cloud-dataproc/google/cloud/dataproc_v1/types/session_templates.py index 60c792b58b5b..66125b2cb461 100644 --- a/packages/google-cloud-dataproc/google/cloud/dataproc_v1/types/session_templates.py +++ b/packages/google-cloud-dataproc/google/cloud/dataproc_v1/types/session_templates.py @@ -175,6 +175,11 @@ class DeleteSessionTemplateRequest(proto.Message): class SessionTemplate(proto.Message): r"""A representation of a session template. + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields Attributes: @@ -189,6 +194,10 @@ class SessionTemplate(proto.Message): jupyter_session (google.cloud.dataproc_v1.types.JupyterConfig): Optional. Jupyter session config. + This field is a member of `oneof`_ ``session_config``. + spark_connect_session (google.cloud.dataproc_v1.types.SparkConnectConfig): + Optional. Spark Connect session config. + This field is a member of `oneof`_ ``session_config``. creator (str): Output only. The email address of the user @@ -236,6 +245,12 @@ class SessionTemplate(proto.Message): oneof="session_config", message=sessions.JupyterConfig, ) + spark_connect_session: sessions.SparkConnectConfig = proto.Field( + proto.MESSAGE, + number=11, + oneof="session_config", + message=sessions.SparkConnectConfig, + ) creator: str = proto.Field( proto.STRING, number=5, diff --git a/packages/google-cloud-dataproc/google/cloud/dataproc_v1/types/sessions.py b/packages/google-cloud-dataproc/google/cloud/dataproc_v1/types/sessions.py index 2a5b2cc5d248..1ab37c9ec32c 100644 --- a/packages/google-cloud-dataproc/google/cloud/dataproc_v1/types/sessions.py +++ b/packages/google-cloud-dataproc/google/cloud/dataproc_v1/types/sessions.py @@ -33,6 +33,7 @@ "DeleteSessionRequest", "Session", "JupyterConfig", + "SparkConnectConfig", }, ) @@ -125,13 +126,16 @@ class ListSessionsRequest(proto.Message): various fields in each session resource. Filters are case sensitive, and may contain multiple clauses combined with logical operators (AND, OR). Supported fields are - ``session_id``, ``session_uuid``, ``state``, and - ``create_time``. + ``session_id``, ``session_uuid``, ``state``, + ``create_time``, and ``labels``. Example: ``state = ACTIVE and create_time < "2023-01-01T00:00:00Z"`` is a filter for sessions in an ACTIVE state that were created before 2023-01-01. + ``state = ACTIVE and labels.environment=production`` is a + filter for sessions in an ACTIVE state that have a + production environment label. See https://google.aip.dev/assets/misc/ebnf-filtering.txt for a detailed description of the filter syntax and a list @@ -249,6 +253,11 @@ class DeleteSessionRequest(proto.Message): class Session(proto.Message): r"""A representation of a session. + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields Attributes: @@ -264,6 +273,10 @@ class Session(proto.Message): jupyter_session (google.cloud.dataproc_v1.types.JupyterConfig): Optional. Jupyter session config. + This field is a member of `oneof`_ ``session_config``. + spark_connect_session (google.cloud.dataproc_v1.types.SparkConnectConfig): + Optional. Spark Connect session config. + This field is a member of `oneof`_ ``session_config``. runtime_info (google.cloud.dataproc_v1.types.RuntimeInfo): Output only. Runtime information about @@ -388,6 +401,12 @@ class SessionStateHistory(proto.Message): oneof="session_config", message="JupyterConfig", ) + spark_connect_session: "SparkConnectConfig" = proto.Field( + proto.MESSAGE, + number=17, + oneof="session_config", + message="SparkConnectConfig", + ) runtime_info: shared.RuntimeInfo = proto.Field( proto.MESSAGE, number=6, @@ -478,4 +497,8 @@ class Kernel(proto.Enum): ) +class SparkConnectConfig(proto.Message): + r"""Spark Connect configuration for an interactive session.""" + + __all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-dataproc/google/cloud/dataproc_v1/types/shared.py b/packages/google-cloud-dataproc/google/cloud/dataproc_v1/types/shared.py index ed37c1b8a565..656453c5fe33 100644 --- a/packages/google-cloud-dataproc/google/cloud/dataproc_v1/types/shared.py +++ b/packages/google-cloud-dataproc/google/cloud/dataproc_v1/types/shared.py @@ -54,10 +54,11 @@ class Component(proto.Enum): Unspecified component. Specifying this will cause Cluster creation to fail. ANACONDA (5): - The Anaconda python distribution. The Anaconda component is - not supported in the Dataproc [2.0 image] - (/https://cloud.google.com/dataproc/docs/concepts/versioning/dataproc-release-2.0). - The 2.0 image is pre-installed with Miniconda. + The Anaconda component is no longer supported or applicable + to [supported Dataproc on Compute Engine image versions] + (https://cloud.google.com/dataproc/docs/concepts/versioning/dataproc-version-clusters#supported-dataproc-image-versions). + It cannot be activated on clusters created with supported + Dataproc on Compute Engine image versions. DOCKER (13): Docker DRUID (9): diff --git a/packages/google-cloud-dataproc/google/cloud/dataproc_v1/types/workflow_templates.py b/packages/google-cloud-dataproc/google/cloud/dataproc_v1/types/workflow_templates.py index 3526627e14b0..5022f3e353a7 100644 --- a/packages/google-cloud-dataproc/google/cloud/dataproc_v1/types/workflow_templates.py +++ b/packages/google-cloud-dataproc/google/cloud/dataproc_v1/types/workflow_templates.py @@ -124,8 +124,51 @@ class WorkflowTemplate(proto.Message): `managed cluster `__, the cluster is deleted. + encryption_config (google.cloud.dataproc_v1.types.WorkflowTemplate.EncryptionConfig): + Optional. Encryption settings for encrypting + workflow template job arguments. """ + class EncryptionConfig(proto.Message): + r"""Encryption settings for encrypting workflow template job + arguments. + + Attributes: + kms_key (str): + Optional. The Cloud KMS key name to use for encrypting + workflow template job arguments. + + When this this key is provided, the following workflow + template [job arguments] + (https://cloud.google.com/dataproc/docs/concepts/workflows/use-workflows#adding_jobs_to_a_template), + if present, are `CMEK + encrypted `__: + + - `FlinkJob + args `__ + - `HadoopJob + args `__ + - `SparkJob + args `__ + - `SparkRJob + args `__ + - `PySparkJob + args `__ + - `SparkSqlJob `__ + scriptVariables and queryList.queries + - `HiveJob `__ + scriptVariables and queryList.queries + - `PigJob `__ + scriptVariables and queryList.queries + - `PrestoJob `__ + scriptVariables and queryList.queries + """ + + kms_key: str = proto.Field( + proto.STRING, + number=1, + ) + id: str = proto.Field( proto.STRING, number=2, @@ -173,6 +216,11 @@ class WorkflowTemplate(proto.Message): number=10, message=duration_pb2.Duration, ) + encryption_config: EncryptionConfig = proto.Field( + proto.MESSAGE, + number=11, + message=EncryptionConfig, + ) class WorkflowTemplatePlacement(proto.Message): @@ -346,6 +394,14 @@ class OrderedJob(proto.Message): presto_job (google.cloud.dataproc_v1.types.PrestoJob): Optional. Job is a Presto job. + This field is a member of `oneof`_ ``job_type``. + trino_job (google.cloud.dataproc_v1.types.TrinoJob): + Optional. Job is a Trino job. + + This field is a member of `oneof`_ ``job_type``. + flink_job (google.cloud.dataproc_v1.types.FlinkJob): + Optional. Job is a Flink job. + This field is a member of `oneof`_ ``job_type``. labels (MutableMapping[str, str]): Optional. The labels to associate with this job. @@ -419,6 +475,18 @@ class OrderedJob(proto.Message): oneof="job_type", message=gcd_jobs.PrestoJob, ) + trino_job: gcd_jobs.TrinoJob = proto.Field( + proto.MESSAGE, + number=13, + oneof="job_type", + message=gcd_jobs.TrinoJob, + ) + flink_job: gcd_jobs.FlinkJob = proto.Field( + proto.MESSAGE, + number=14, + oneof="job_type", + message=gcd_jobs.FlinkJob, + ) labels: MutableMapping[str, str] = proto.MapField( proto.STRING, proto.STRING, @@ -1095,6 +1163,12 @@ class ListWorkflowTemplatesResponse(proto.Message): are more results to fetch. To fetch additional results, provide this value as the page_token in a subsequent ListWorkflowTemplatesRequest. + unreachable (MutableSequence[str]): + Output only. List of workflow templates that + could not be included in the response. + Attempting to get one of these resources may + indicate why it was not included in the list + response. """ @property @@ -1110,6 +1184,10 @@ def raw_page(self): proto.STRING, number=2, ) + unreachable: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=3, + ) class DeleteWorkflowTemplateRequest(proto.Message): diff --git a/packages/google-cloud-dataproc/samples/generated_samples/snippet_metadata_google.cloud.dataproc.v1.json b/packages/google-cloud-dataproc/samples/generated_samples/snippet_metadata_google.cloud.dataproc.v1.json index f461de26e252..f516b09c86b6 100644 --- a/packages/google-cloud-dataproc/samples/generated_samples/snippet_metadata_google.cloud.dataproc.v1.json +++ b/packages/google-cloud-dataproc/samples/generated_samples/snippet_metadata_google.cloud.dataproc.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-dataproc", - "version": "5.10.2" + "version": "5.13.0" }, "snippets": [ { diff --git a/packages/google-cloud-dataproc/scripts/fixup_dataproc_v1_keywords.py b/packages/google-cloud-dataproc/scripts/fixup_dataproc_v1_keywords.py index 0a6aa2e35d07..565665908c4f 100644 --- a/packages/google-cloud-dataproc/scripts/fixup_dataproc_v1_keywords.py +++ b/packages/google-cloud-dataproc/scripts/fixup_dataproc_v1_keywords.py @@ -54,7 +54,7 @@ class dataprocCallTransformer(cst.CSTTransformer): 'delete_session': ('name', 'request_id', ), 'delete_session_template': ('name', ), 'delete_workflow_template': ('name', 'version', ), - 'diagnose_cluster': ('project_id', 'region', 'cluster_name', 'tarball_gcs_dir', 'diagnosis_interval', 'jobs', 'yarn_application_ids', ), + 'diagnose_cluster': ('project_id', 'region', 'cluster_name', 'tarball_gcs_dir', 'tarball_access', 'diagnosis_interval', 'jobs', 'yarn_application_ids', ), 'get_autoscaling_policy': ('name', ), 'get_batch': ('name', ), 'get_cluster': ('project_id', 'region', 'cluster_name', ), diff --git a/packages/google-cloud-dataproc/tests/unit/gapic/dataproc_v1/test_autoscaling_policy_service.py b/packages/google-cloud-dataproc/tests/unit/gapic/dataproc_v1/test_autoscaling_policy_service.py index dcc74698fce3..9507d196b9a5 100644 --- a/packages/google-cloud-dataproc/tests/unit/gapic/dataproc_v1/test_autoscaling_policy_service.py +++ b/packages/google-cloud-dataproc/tests/unit/gapic/dataproc_v1/test_autoscaling_policy_service.py @@ -1380,22 +1380,23 @@ async def test_create_autoscaling_policy_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_autoscaling_policy - ] = mock_object + ] = mock_rpc request = {} await client.create_autoscaling_policy(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_autoscaling_policy(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1779,22 +1780,23 @@ async def test_update_autoscaling_policy_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_autoscaling_policy - ] = mock_object + ] = mock_rpc request = {} await client.update_autoscaling_policy(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_autoscaling_policy(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2172,22 +2174,23 @@ async def test_get_autoscaling_policy_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_autoscaling_policy - ] = mock_object + ] = mock_rpc request = {} await client.get_autoscaling_policy(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_autoscaling_policy(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2564,22 +2567,23 @@ async def test_list_autoscaling_policies_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_autoscaling_policies - ] = mock_object + ] = mock_rpc request = {} await client.list_autoscaling_policies(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_autoscaling_policies(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3153,22 +3157,23 @@ async def test_delete_autoscaling_policy_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_autoscaling_policy - ] = mock_object + ] = mock_rpc request = {} await client.delete_autoscaling_policy(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_autoscaling_policy(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-dataproc/tests/unit/gapic/dataproc_v1/test_batch_controller.py b/packages/google-cloud-dataproc/tests/unit/gapic/dataproc_v1/test_batch_controller.py index e8c71d15897a..b1b1cf5bcb8b 100644 --- a/packages/google-cloud-dataproc/tests/unit/gapic/dataproc_v1/test_batch_controller.py +++ b/packages/google-cloud-dataproc/tests/unit/gapic/dataproc_v1/test_batch_controller.py @@ -1268,8 +1268,9 @@ def test_create_batch_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.create_batch(request) @@ -1323,26 +1324,28 @@ async def test_create_batch_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_batch - ] = mock_object + ] = mock_rpc request = {} await client.create_batch(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.create_batch(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1721,22 +1724,23 @@ async def test_get_batch_async_use_cached_wrapped_rpc(transport: str = "grpc_asy ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_batch - ] = mock_object + ] = mock_rpc request = {} await client.get_batch(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_batch(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1949,6 +1953,7 @@ def test_list_batches(request_type, transport: str = "grpc"): # Designate an appropriate return value for the call. call.return_value = batches.ListBatchesResponse( next_page_token="next_page_token_value", + unreachable=["unreachable_value"], ) response = client.list_batches(request) @@ -1961,6 +1966,7 @@ def test_list_batches(request_type, transport: str = "grpc"): # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListBatchesPager) assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] def test_list_batches_empty_call(): @@ -2066,6 +2072,7 @@ async def test_list_batches_empty_call_async(): call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( batches.ListBatchesResponse( next_page_token="next_page_token_value", + unreachable=["unreachable_value"], ) ) response = await client.list_batches() @@ -2097,22 +2104,23 @@ async def test_list_batches_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_batches - ] = mock_object + ] = mock_rpc request = {} await client.list_batches(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_batches(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2134,6 +2142,7 @@ async def test_list_batches_async( call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( batches.ListBatchesResponse( next_page_token="next_page_token_value", + unreachable=["unreachable_value"], ) ) response = await client.list_batches(request) @@ -2147,6 +2156,7 @@ async def test_list_batches_async( # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListBatchesAsyncPager) assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] @pytest.mark.asyncio @@ -2648,22 +2658,23 @@ async def test_delete_batch_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_batch - ] = mock_object + ] = mock_rpc request = {} await client.delete_batch(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_batch(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3642,6 +3653,7 @@ def test_list_batches_rest(request_type): # Designate an appropriate value for the returned response. return_value = batches.ListBatchesResponse( next_page_token="next_page_token_value", + unreachable=["unreachable_value"], ) # Wrap the value into a proper Response obj @@ -3658,6 +3670,7 @@ def test_list_batches_rest(request_type): # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListBatchesPager) assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] def test_list_batches_rest_use_cached_wrapped_rpc(): diff --git a/packages/google-cloud-dataproc/tests/unit/gapic/dataproc_v1/test_cluster_controller.py b/packages/google-cloud-dataproc/tests/unit/gapic/dataproc_v1/test_cluster_controller.py index 238b6364103c..6910a6fc2fa4 100644 --- a/packages/google-cloud-dataproc/tests/unit/gapic/dataproc_v1/test_cluster_controller.py +++ b/packages/google-cloud-dataproc/tests/unit/gapic/dataproc_v1/test_cluster_controller.py @@ -1285,8 +1285,9 @@ def test_create_cluster_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.create_cluster(request) @@ -1340,26 +1341,28 @@ async def test_create_cluster_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_cluster - ] = mock_object + ] = mock_rpc request = {} await client.create_cluster(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.create_cluster(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1677,8 +1680,9 @@ def test_update_cluster_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.update_cluster(request) @@ -1732,26 +1736,28 @@ async def test_update_cluster_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_cluster - ] = mock_object + ] = mock_rpc request = {} await client.update_cluster(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.update_cluster(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2093,8 +2099,9 @@ def test_stop_cluster_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.stop_cluster(request) @@ -2148,26 +2155,28 @@ async def test_stop_cluster_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.stop_cluster - ] = mock_object + ] = mock_rpc request = {} await client.stop_cluster(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.stop_cluster(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2387,8 +2396,9 @@ def test_start_cluster_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.start_cluster(request) @@ -2442,26 +2452,28 @@ async def test_start_cluster_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.start_cluster - ] = mock_object + ] = mock_rpc request = {} await client.start_cluster(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.start_cluster(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2681,8 +2693,9 @@ def test_delete_cluster_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.delete_cluster(request) @@ -2736,26 +2749,28 @@ async def test_delete_cluster_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_cluster - ] = mock_object + ] = mock_rpc request = {} await client.delete_cluster(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.delete_cluster(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3135,22 +3150,23 @@ async def test_get_cluster_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_cluster - ] = mock_object + ] = mock_rpc request = {} await client.get_cluster(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_cluster(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3529,22 +3545,23 @@ async def test_list_clusters_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_clusters - ] = mock_object + ] = mock_rpc request = {} await client.list_clusters(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_clusters(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4066,8 +4083,9 @@ def test_diagnose_cluster_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.diagnose_cluster(request) @@ -4121,26 +4139,28 @@ async def test_diagnose_cluster_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.diagnose_cluster - ] = mock_object + ] = mock_rpc request = {} await client.diagnose_cluster(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.diagnose_cluster(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4456,7 +4476,10 @@ def test_create_cluster_rest(request_type): "execution_timeout": {"seconds": 751, "nanos": 543}, } ], - "encryption_config": {"gce_pd_kms_key_name": "gce_pd_kms_key_name_value"}, + "encryption_config": { + "gce_pd_kms_key_name": "gce_pd_kms_key_name_value", + "kms_key": "kms_key_value", + }, "autoscaling_config": {"policy_uri": "policy_uri_value"}, "security_config": { "kerberos_config": { @@ -5062,7 +5085,10 @@ def test_update_cluster_rest(request_type): "execution_timeout": {"seconds": 751, "nanos": 543}, } ], - "encryption_config": {"gce_pd_kms_key_name": "gce_pd_kms_key_name_value"}, + "encryption_config": { + "gce_pd_kms_key_name": "gce_pd_kms_key_name_value", + "kms_key": "kms_key_value", + }, "autoscaling_config": {"policy_uri": "policy_uri_value"}, "security_config": { "kerberos_config": { @@ -8147,11 +8173,42 @@ def test_parse_cluster_path(): assert expected == actual -def test_node_group_path(): +def test_crypto_key_path(): project = "cuttlefish" - region = "mussel" - cluster = "winkle" - node_group = "nautilus" + location = "mussel" + key_ring = "winkle" + crypto_key = "nautilus" + expected = "projects/{project}/locations/{location}/keyRings/{key_ring}/cryptoKeys/{crypto_key}".format( + project=project, + location=location, + key_ring=key_ring, + crypto_key=crypto_key, + ) + actual = ClusterControllerClient.crypto_key_path( + project, location, key_ring, crypto_key + ) + assert expected == actual + + +def test_parse_crypto_key_path(): + expected = { + "project": "scallop", + "location": "abalone", + "key_ring": "squid", + "crypto_key": "clam", + } + path = ClusterControllerClient.crypto_key_path(**expected) + + # Check that the path construction is reversible. + actual = ClusterControllerClient.parse_crypto_key_path(path) + assert expected == actual + + +def test_node_group_path(): + project = "whelk" + region = "octopus" + cluster = "oyster" + node_group = "nudibranch" expected = "projects/{project}/regions/{region}/clusters/{cluster}/nodeGroups/{node_group}".format( project=project, region=region, @@ -8166,10 +8223,10 @@ def test_node_group_path(): def test_parse_node_group_path(): expected = { - "project": "scallop", - "region": "abalone", - "cluster": "squid", - "node_group": "clam", + "project": "cuttlefish", + "region": "mussel", + "cluster": "winkle", + "node_group": "nautilus", } path = ClusterControllerClient.node_group_path(**expected) @@ -8179,9 +8236,9 @@ def test_parse_node_group_path(): def test_service_path(): - project = "whelk" - location = "octopus" - service = "oyster" + project = "scallop" + location = "abalone" + service = "squid" expected = "projects/{project}/locations/{location}/services/{service}".format( project=project, location=location, @@ -8193,9 +8250,9 @@ def test_service_path(): def test_parse_service_path(): expected = { - "project": "nudibranch", - "location": "cuttlefish", - "service": "mussel", + "project": "clam", + "location": "whelk", + "service": "octopus", } path = ClusterControllerClient.service_path(**expected) @@ -8205,7 +8262,7 @@ def test_parse_service_path(): def test_common_billing_account_path(): - billing_account = "winkle" + billing_account = "oyster" expected = "billingAccounts/{billing_account}".format( billing_account=billing_account, ) @@ -8215,7 +8272,7 @@ def test_common_billing_account_path(): def test_parse_common_billing_account_path(): expected = { - "billing_account": "nautilus", + "billing_account": "nudibranch", } path = ClusterControllerClient.common_billing_account_path(**expected) @@ -8225,7 +8282,7 @@ def test_parse_common_billing_account_path(): def test_common_folder_path(): - folder = "scallop" + folder = "cuttlefish" expected = "folders/{folder}".format( folder=folder, ) @@ -8235,7 +8292,7 @@ def test_common_folder_path(): def test_parse_common_folder_path(): expected = { - "folder": "abalone", + "folder": "mussel", } path = ClusterControllerClient.common_folder_path(**expected) @@ -8245,7 +8302,7 @@ def test_parse_common_folder_path(): def test_common_organization_path(): - organization = "squid" + organization = "winkle" expected = "organizations/{organization}".format( organization=organization, ) @@ -8255,7 +8312,7 @@ def test_common_organization_path(): def test_parse_common_organization_path(): expected = { - "organization": "clam", + "organization": "nautilus", } path = ClusterControllerClient.common_organization_path(**expected) @@ -8265,7 +8322,7 @@ def test_parse_common_organization_path(): def test_common_project_path(): - project = "whelk" + project = "scallop" expected = "projects/{project}".format( project=project, ) @@ -8275,7 +8332,7 @@ def test_common_project_path(): def test_parse_common_project_path(): expected = { - "project": "octopus", + "project": "abalone", } path = ClusterControllerClient.common_project_path(**expected) @@ -8285,8 +8342,8 @@ def test_parse_common_project_path(): def test_common_location_path(): - project = "oyster" - location = "nudibranch" + project = "squid" + location = "clam" expected = "projects/{project}/locations/{location}".format( project=project, location=location, @@ -8297,8 +8354,8 @@ def test_common_location_path(): def test_parse_common_location_path(): expected = { - "project": "cuttlefish", - "location": "mussel", + "project": "whelk", + "location": "octopus", } path = ClusterControllerClient.common_location_path(**expected) diff --git a/packages/google-cloud-dataproc/tests/unit/gapic/dataproc_v1/test_job_controller.py b/packages/google-cloud-dataproc/tests/unit/gapic/dataproc_v1/test_job_controller.py index e59a1ed6a1ab..8b8450a8006f 100644 --- a/packages/google-cloud-dataproc/tests/unit/gapic/dataproc_v1/test_job_controller.py +++ b/packages/google-cloud-dataproc/tests/unit/gapic/dataproc_v1/test_job_controller.py @@ -1303,22 +1303,23 @@ async def test_submit_job_async_use_cached_wrapped_rpc(transport: str = "grpc_as ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.submit_job - ] = mock_object + ] = mock_rpc request = {} await client.submit_job(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.submit_job(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1650,8 +1651,9 @@ def test_submit_job_as_operation_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.submit_job_as_operation(request) @@ -1707,26 +1709,28 @@ async def test_submit_job_as_operation_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.submit_job_as_operation - ] = mock_object + ] = mock_rpc request = {} await client.submit_job_as_operation(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.submit_job_as_operation(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2115,22 +2119,23 @@ async def test_get_job_async_use_cached_wrapped_rpc(transport: str = "grpc_async ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_job - ] = mock_object + ] = mock_rpc request = {} await client.get_job(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_job(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2363,6 +2368,7 @@ def test_list_jobs(request_type, transport: str = "grpc"): # Designate an appropriate return value for the call. call.return_value = jobs.ListJobsResponse( next_page_token="next_page_token_value", + unreachable=["unreachable_value"], ) response = client.list_jobs(request) @@ -2375,6 +2381,7 @@ def test_list_jobs(request_type, transport: str = "grpc"): # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListJobsPager) assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] def test_list_jobs_empty_call(): @@ -2482,6 +2489,7 @@ async def test_list_jobs_empty_call_async(): call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( jobs.ListJobsResponse( next_page_token="next_page_token_value", + unreachable=["unreachable_value"], ) ) response = await client.list_jobs() @@ -2511,22 +2519,23 @@ async def test_list_jobs_async_use_cached_wrapped_rpc(transport: str = "grpc_asy ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_jobs - ] = mock_object + ] = mock_rpc request = {} await client.list_jobs(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_jobs(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2548,6 +2557,7 @@ async def test_list_jobs_async( call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( jobs.ListJobsResponse( next_page_token="next_page_token_value", + unreachable=["unreachable_value"], ) ) response = await client.list_jobs(request) @@ -2561,6 +2571,7 @@ async def test_list_jobs_async( # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListJobsAsyncPager) assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] @pytest.mark.asyncio @@ -3107,22 +3118,23 @@ async def test_update_job_async_use_cached_wrapped_rpc(transport: str = "grpc_as ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_job - ] = mock_object + ] = mock_rpc request = {} await client.update_job(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_job(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3408,22 +3420,23 @@ async def test_cancel_job_async_use_cached_wrapped_rpc(transport: str = "grpc_as ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.cancel_job - ] = mock_object + ] = mock_rpc request = {} await client.cancel_job(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.cancel_job(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3793,22 +3806,23 @@ async def test_delete_job_async_use_cached_wrapped_rpc(transport: str = "grpc_as ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_job - ] = mock_object + ] = mock_rpc request = {} await client.delete_job(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_job(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4999,6 +5013,7 @@ def test_list_jobs_rest(request_type): # Designate an appropriate value for the returned response. return_value = jobs.ListJobsResponse( next_page_token="next_page_token_value", + unreachable=["unreachable_value"], ) # Wrap the value into a proper Response obj @@ -5015,6 +5030,7 @@ def test_list_jobs_rest(request_type): # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListJobsPager) assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] def test_list_jobs_rest_use_cached_wrapped_rpc(): @@ -5468,6 +5484,15 @@ def test_update_job_rest(request_type): "properties": {}, "logging_config": {}, }, + "flink_job": { + "main_jar_file_uri": "main_jar_file_uri_value", + "main_class": "main_class_value", + "args": ["args_value1", "args_value2"], + "jar_file_uris": ["jar_file_uris_value1", "jar_file_uris_value2"], + "savepoint_uri": "savepoint_uri_value", + "properties": {}, + "logging_config": {}, + }, "status": { "state": 1, "details": "details_value", diff --git a/packages/google-cloud-dataproc/tests/unit/gapic/dataproc_v1/test_node_group_controller.py b/packages/google-cloud-dataproc/tests/unit/gapic/dataproc_v1/test_node_group_controller.py index d873909055a0..1273aa1bde2b 100644 --- a/packages/google-cloud-dataproc/tests/unit/gapic/dataproc_v1/test_node_group_controller.py +++ b/packages/google-cloud-dataproc/tests/unit/gapic/dataproc_v1/test_node_group_controller.py @@ -1318,8 +1318,9 @@ def test_create_node_group_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.create_node_group(request) @@ -1375,26 +1376,28 @@ async def test_create_node_group_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_node_group - ] = mock_object + ] = mock_rpc request = {} await client.create_node_group(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.create_node_group(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1724,8 +1727,9 @@ def test_resize_node_group_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.resize_node_group(request) @@ -1781,26 +1785,28 @@ async def test_resize_node_group_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.resize_node_group - ] = mock_object + ] = mock_rpc request = {} await client.resize_node_group(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.resize_node_group(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2169,22 +2175,23 @@ async def test_get_node_group_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_node_group - ] = mock_object + ] = mock_rpc request = {} await client.get_node_group(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_node_group(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-dataproc/tests/unit/gapic/dataproc_v1/test_session_controller.py b/packages/google-cloud-dataproc/tests/unit/gapic/dataproc_v1/test_session_controller.py index e9ff1c5a0ec5..7cc868611e6a 100644 --- a/packages/google-cloud-dataproc/tests/unit/gapic/dataproc_v1/test_session_controller.py +++ b/packages/google-cloud-dataproc/tests/unit/gapic/dataproc_v1/test_session_controller.py @@ -1282,8 +1282,9 @@ def test_create_session_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.create_session(request) @@ -1337,26 +1338,28 @@ async def test_create_session_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_session - ] = mock_object + ] = mock_rpc request = {} await client.create_session(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.create_session(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1740,22 +1743,23 @@ async def test_get_session_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_session - ] = mock_object + ] = mock_rpc request = {} await client.get_session(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_session(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2116,22 +2120,23 @@ async def test_list_sessions_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_sessions - ] = mock_object + ] = mock_rpc request = {} await client.list_sessions(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_sessions(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2628,8 +2633,9 @@ def test_terminate_session_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.terminate_session(request) @@ -2685,26 +2691,28 @@ async def test_terminate_session_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.terminate_session - ] = mock_object + ] = mock_rpc request = {} await client.terminate_session(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.terminate_session(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3006,8 +3014,9 @@ def test_delete_session_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.delete_session(request) @@ -3061,26 +3070,28 @@ async def test_delete_session_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_session - ] = mock_object + ] = mock_rpc request = {} await client.delete_session(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.delete_session(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3282,6 +3293,7 @@ def test_create_session_rest(request_type): "uuid": "uuid_value", "create_time": {"seconds": 751, "nanos": 543}, "jupyter_session": {"kernel": 1, "display_name": "display_name_value"}, + "spark_connect_session": {}, "runtime_info": { "endpoints": {}, "output_uri": "output_uri_value", diff --git a/packages/google-cloud-dataproc/tests/unit/gapic/dataproc_v1/test_session_template_controller.py b/packages/google-cloud-dataproc/tests/unit/gapic/dataproc_v1/test_session_template_controller.py index 3855f4b6b8be..0da9f81f5ffe 100644 --- a/packages/google-cloud-dataproc/tests/unit/gapic/dataproc_v1/test_session_template_controller.py +++ b/packages/google-cloud-dataproc/tests/unit/gapic/dataproc_v1/test_session_template_controller.py @@ -1393,22 +1393,23 @@ async def test_create_session_template_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_session_template - ] = mock_object + ] = mock_rpc request = {} await client.create_session_template(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_session_template(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1802,22 +1803,23 @@ async def test_update_session_template_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_session_template - ] = mock_object + ] = mock_rpc request = {} await client.update_session_template(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_session_template(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2204,22 +2206,23 @@ async def test_get_session_template_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_session_template - ] = mock_object + ] = mock_rpc request = {} await client.get_session_template(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_session_template(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2602,22 +2605,23 @@ async def test_list_session_templates_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_session_templates - ] = mock_object + ] = mock_rpc request = {} await client.list_session_templates(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_session_templates(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3185,22 +3189,23 @@ async def test_delete_session_template_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_session_template - ] = mock_object + ] = mock_rpc request = {} await client.delete_session_template(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_session_template(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3407,6 +3412,7 @@ def test_create_session_template_rest(request_type): "description": "description_value", "create_time": {"seconds": 751, "nanos": 543}, "jupyter_session": {"kernel": 1, "display_name": "display_name_value"}, + "spark_connect_session": {}, "creator": "creator_value", "labels": {}, "runtime_config": { @@ -3845,6 +3851,7 @@ def test_update_session_template_rest(request_type): "description": "description_value", "create_time": {"seconds": 751, "nanos": 543}, "jupyter_session": {"kernel": 1, "display_name": "display_name_value"}, + "spark_connect_session": {}, "creator": "creator_value", "labels": {}, "runtime_config": { diff --git a/packages/google-cloud-dataproc/tests/unit/gapic/dataproc_v1/test_workflow_template_service.py b/packages/google-cloud-dataproc/tests/unit/gapic/dataproc_v1/test_workflow_template_service.py index 952acdc9e41e..c996bc73de7b 100644 --- a/packages/google-cloud-dataproc/tests/unit/gapic/dataproc_v1/test_workflow_template_service.py +++ b/packages/google-cloud-dataproc/tests/unit/gapic/dataproc_v1/test_workflow_template_service.py @@ -1388,22 +1388,23 @@ async def test_create_workflow_template_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_workflow_template - ] = mock_object + ] = mock_rpc request = {} await client.create_workflow_template(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_workflow_template(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1796,22 +1797,23 @@ async def test_get_workflow_template_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_workflow_template - ] = mock_object + ] = mock_rpc request = {} await client.get_workflow_template(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_workflow_template(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2132,8 +2134,9 @@ def test_instantiate_workflow_template_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.instantiate_workflow_template(request) @@ -2189,26 +2192,28 @@ async def test_instantiate_workflow_template_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.instantiate_workflow_template - ] = mock_object + ] = mock_rpc request = {} await client.instantiate_workflow_template(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.instantiate_workflow_template(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2532,8 +2537,9 @@ def test_instantiate_inline_workflow_template_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.instantiate_inline_workflow_template(request) @@ -2589,26 +2595,28 @@ async def test_instantiate_inline_workflow_template_async_use_cached_wrapped_rpc ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.instantiate_inline_workflow_template - ] = mock_object + ] = mock_rpc request = {} await client.instantiate_inline_workflow_template(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.instantiate_inline_workflow_template(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2990,22 +2998,23 @@ async def test_update_workflow_template_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_workflow_template - ] = mock_object + ] = mock_rpc request = {} await client.update_workflow_template(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_workflow_template(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3229,6 +3238,7 @@ def test_list_workflow_templates(request_type, transport: str = "grpc"): # Designate an appropriate return value for the call. call.return_value = workflow_templates.ListWorkflowTemplatesResponse( next_page_token="next_page_token_value", + unreachable=["unreachable_value"], ) response = client.list_workflow_templates(request) @@ -3241,6 +3251,7 @@ def test_list_workflow_templates(request_type, transport: str = "grpc"): # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListWorkflowTemplatesPager) assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] def test_list_workflow_templates_empty_call(): @@ -3353,6 +3364,7 @@ async def test_list_workflow_templates_empty_call_async(): call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( workflow_templates.ListWorkflowTemplatesResponse( next_page_token="next_page_token_value", + unreachable=["unreachable_value"], ) ) response = await client.list_workflow_templates() @@ -3384,22 +3396,23 @@ async def test_list_workflow_templates_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_workflow_templates - ] = mock_object + ] = mock_rpc request = {} await client.list_workflow_templates(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_workflow_templates(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3424,6 +3437,7 @@ async def test_list_workflow_templates_async( call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( workflow_templates.ListWorkflowTemplatesResponse( next_page_token="next_page_token_value", + unreachable=["unreachable_value"], ) ) response = await client.list_workflow_templates(request) @@ -3437,6 +3451,7 @@ async def test_list_workflow_templates_async( # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListWorkflowTemplatesAsyncPager) assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] @pytest.mark.asyncio @@ -3969,22 +3984,23 @@ async def test_delete_workflow_template_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_workflow_template - ] = mock_object + ] = mock_rpc request = {} await client.delete_workflow_template(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_workflow_template(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4296,7 +4312,8 @@ def test_create_workflow_template_rest(request_type): } ], "encryption_config": { - "gce_pd_kms_key_name": "gce_pd_kms_key_name_value" + "gce_pd_kms_key_name": "gce_pd_kms_key_name_value", + "kms_key": "kms_key_value", }, "autoscaling_config": {"policy_uri": "policy_uri_value"}, "security_config": { @@ -4437,6 +4454,24 @@ def test_create_workflow_template_rest(request_type): "properties": {}, "logging_config": {}, }, + "trino_job": { + "query_file_uri": "query_file_uri_value", + "query_list": {}, + "continue_on_failure": True, + "output_format": "output_format_value", + "client_tags": ["client_tags_value1", "client_tags_value2"], + "properties": {}, + "logging_config": {}, + }, + "flink_job": { + "main_jar_file_uri": "main_jar_file_uri_value", + "main_class": "main_class_value", + "args": ["args_value1", "args_value2"], + "jar_file_uris": ["jar_file_uris_value1", "jar_file_uris_value2"], + "savepoint_uri": "savepoint_uri_value", + "properties": {}, + "logging_config": {}, + }, "labels": {}, "scheduling": { "max_failures_per_hour": 2243, @@ -4460,6 +4495,7 @@ def test_create_workflow_template_rest(request_type): } ], "dag_timeout": {}, + "encryption_config": {"kms_key": "kms_key_value"}, } # The version of a generated dependency at test runtime may differ from the version used during generation. # Delete any fields which are not present in the current runtime dependency @@ -5604,7 +5640,8 @@ def test_instantiate_inline_workflow_template_rest(request_type): } ], "encryption_config": { - "gce_pd_kms_key_name": "gce_pd_kms_key_name_value" + "gce_pd_kms_key_name": "gce_pd_kms_key_name_value", + "kms_key": "kms_key_value", }, "autoscaling_config": {"policy_uri": "policy_uri_value"}, "security_config": { @@ -5745,6 +5782,24 @@ def test_instantiate_inline_workflow_template_rest(request_type): "properties": {}, "logging_config": {}, }, + "trino_job": { + "query_file_uri": "query_file_uri_value", + "query_list": {}, + "continue_on_failure": True, + "output_format": "output_format_value", + "client_tags": ["client_tags_value1", "client_tags_value2"], + "properties": {}, + "logging_config": {}, + }, + "flink_job": { + "main_jar_file_uri": "main_jar_file_uri_value", + "main_class": "main_class_value", + "args": ["args_value1", "args_value2"], + "jar_file_uris": ["jar_file_uris_value1", "jar_file_uris_value2"], + "savepoint_uri": "savepoint_uri_value", + "properties": {}, + "logging_config": {}, + }, "labels": {}, "scheduling": { "max_failures_per_hour": 2243, @@ -5768,6 +5823,7 @@ def test_instantiate_inline_workflow_template_rest(request_type): } ], "dag_timeout": {}, + "encryption_config": {"kms_key": "kms_key_value"}, } # The version of a generated dependency at test runtime may differ from the version used during generation. # Delete any fields which are not present in the current runtime dependency @@ -6278,7 +6334,8 @@ def test_update_workflow_template_rest(request_type): } ], "encryption_config": { - "gce_pd_kms_key_name": "gce_pd_kms_key_name_value" + "gce_pd_kms_key_name": "gce_pd_kms_key_name_value", + "kms_key": "kms_key_value", }, "autoscaling_config": {"policy_uri": "policy_uri_value"}, "security_config": { @@ -6419,6 +6476,24 @@ def test_update_workflow_template_rest(request_type): "properties": {}, "logging_config": {}, }, + "trino_job": { + "query_file_uri": "query_file_uri_value", + "query_list": {}, + "continue_on_failure": True, + "output_format": "output_format_value", + "client_tags": ["client_tags_value1", "client_tags_value2"], + "properties": {}, + "logging_config": {}, + }, + "flink_job": { + "main_jar_file_uri": "main_jar_file_uri_value", + "main_class": "main_class_value", + "args": ["args_value1", "args_value2"], + "jar_file_uris": ["jar_file_uris_value1", "jar_file_uris_value2"], + "savepoint_uri": "savepoint_uri_value", + "properties": {}, + "logging_config": {}, + }, "labels": {}, "scheduling": { "max_failures_per_hour": 2243, @@ -6442,6 +6517,7 @@ def test_update_workflow_template_rest(request_type): } ], "dag_timeout": {}, + "encryption_config": {"kms_key": "kms_key_value"}, } # The version of a generated dependency at test runtime may differ from the version used during generation. # Delete any fields which are not present in the current runtime dependency @@ -6838,6 +6914,7 @@ def test_list_workflow_templates_rest(request_type): # Designate an appropriate value for the returned response. return_value = workflow_templates.ListWorkflowTemplatesResponse( next_page_token="next_page_token_value", + unreachable=["unreachable_value"], ) # Wrap the value into a proper Response obj @@ -6854,6 +6931,7 @@ def test_list_workflow_templates_rest(request_type): # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListWorkflowTemplatesPager) assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] def test_list_workflow_templates_rest_use_cached_wrapped_rpc(): @@ -8123,11 +8201,42 @@ def test_workflow_template_service_grpc_lro_async_client(): assert transport.operations_client is transport.operations_client -def test_node_group_path(): +def test_crypto_key_path(): project = "squid" - region = "clam" - cluster = "whelk" - node_group = "octopus" + location = "clam" + key_ring = "whelk" + crypto_key = "octopus" + expected = "projects/{project}/locations/{location}/keyRings/{key_ring}/cryptoKeys/{crypto_key}".format( + project=project, + location=location, + key_ring=key_ring, + crypto_key=crypto_key, + ) + actual = WorkflowTemplateServiceClient.crypto_key_path( + project, location, key_ring, crypto_key + ) + assert expected == actual + + +def test_parse_crypto_key_path(): + expected = { + "project": "oyster", + "location": "nudibranch", + "key_ring": "cuttlefish", + "crypto_key": "mussel", + } + path = WorkflowTemplateServiceClient.crypto_key_path(**expected) + + # Check that the path construction is reversible. + actual = WorkflowTemplateServiceClient.parse_crypto_key_path(path) + assert expected == actual + + +def test_node_group_path(): + project = "winkle" + region = "nautilus" + cluster = "scallop" + node_group = "abalone" expected = "projects/{project}/regions/{region}/clusters/{cluster}/nodeGroups/{node_group}".format( project=project, region=region, @@ -8142,10 +8251,10 @@ def test_node_group_path(): def test_parse_node_group_path(): expected = { - "project": "oyster", - "region": "nudibranch", - "cluster": "cuttlefish", - "node_group": "mussel", + "project": "squid", + "region": "clam", + "cluster": "whelk", + "node_group": "octopus", } path = WorkflowTemplateServiceClient.node_group_path(**expected) @@ -8155,9 +8264,9 @@ def test_parse_node_group_path(): def test_service_path(): - project = "winkle" - location = "nautilus" - service = "scallop" + project = "oyster" + location = "nudibranch" + service = "cuttlefish" expected = "projects/{project}/locations/{location}/services/{service}".format( project=project, location=location, @@ -8169,9 +8278,9 @@ def test_service_path(): def test_parse_service_path(): expected = { - "project": "abalone", - "location": "squid", - "service": "clam", + "project": "mussel", + "location": "winkle", + "service": "nautilus", } path = WorkflowTemplateServiceClient.service_path(**expected) @@ -8181,9 +8290,9 @@ def test_parse_service_path(): def test_workflow_template_path(): - project = "whelk" - region = "octopus" - workflow_template = "oyster" + project = "scallop" + region = "abalone" + workflow_template = "squid" expected = "projects/{project}/regions/{region}/workflowTemplates/{workflow_template}".format( project=project, region=region, @@ -8197,9 +8306,9 @@ def test_workflow_template_path(): def test_parse_workflow_template_path(): expected = { - "project": "nudibranch", - "region": "cuttlefish", - "workflow_template": "mussel", + "project": "clam", + "region": "whelk", + "workflow_template": "octopus", } path = WorkflowTemplateServiceClient.workflow_template_path(**expected) @@ -8209,7 +8318,7 @@ def test_parse_workflow_template_path(): def test_common_billing_account_path(): - billing_account = "winkle" + billing_account = "oyster" expected = "billingAccounts/{billing_account}".format( billing_account=billing_account, ) @@ -8219,7 +8328,7 @@ def test_common_billing_account_path(): def test_parse_common_billing_account_path(): expected = { - "billing_account": "nautilus", + "billing_account": "nudibranch", } path = WorkflowTemplateServiceClient.common_billing_account_path(**expected) @@ -8229,7 +8338,7 @@ def test_parse_common_billing_account_path(): def test_common_folder_path(): - folder = "scallop" + folder = "cuttlefish" expected = "folders/{folder}".format( folder=folder, ) @@ -8239,7 +8348,7 @@ def test_common_folder_path(): def test_parse_common_folder_path(): expected = { - "folder": "abalone", + "folder": "mussel", } path = WorkflowTemplateServiceClient.common_folder_path(**expected) @@ -8249,7 +8358,7 @@ def test_parse_common_folder_path(): def test_common_organization_path(): - organization = "squid" + organization = "winkle" expected = "organizations/{organization}".format( organization=organization, ) @@ -8259,7 +8368,7 @@ def test_common_organization_path(): def test_parse_common_organization_path(): expected = { - "organization": "clam", + "organization": "nautilus", } path = WorkflowTemplateServiceClient.common_organization_path(**expected) @@ -8269,7 +8378,7 @@ def test_parse_common_organization_path(): def test_common_project_path(): - project = "whelk" + project = "scallop" expected = "projects/{project}".format( project=project, ) @@ -8279,7 +8388,7 @@ def test_common_project_path(): def test_parse_common_project_path(): expected = { - "project": "octopus", + "project": "abalone", } path = WorkflowTemplateServiceClient.common_project_path(**expected) @@ -8289,8 +8398,8 @@ def test_parse_common_project_path(): def test_common_location_path(): - project = "oyster" - location = "nudibranch" + project = "squid" + location = "clam" expected = "projects/{project}/locations/{location}".format( project=project, location=location, @@ -8301,8 +8410,8 @@ def test_common_location_path(): def test_parse_common_location_path(): expected = { - "project": "cuttlefish", - "location": "mussel", + "project": "whelk", + "location": "octopus", } path = WorkflowTemplateServiceClient.common_location_path(**expected) diff --git a/packages/google-cloud-datastream/google/cloud/datastream/gapic_version.py b/packages/google-cloud-datastream/google/cloud/datastream/gapic_version.py index 64ddc0e431e3..558c8aab67c5 100644 --- a/packages/google-cloud-datastream/google/cloud/datastream/gapic_version.py +++ b/packages/google-cloud-datastream/google/cloud/datastream/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.9.5" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-datastream/google/cloud/datastream_v1/gapic_version.py b/packages/google-cloud-datastream/google/cloud/datastream_v1/gapic_version.py index 64ddc0e431e3..558c8aab67c5 100644 --- a/packages/google-cloud-datastream/google/cloud/datastream_v1/gapic_version.py +++ b/packages/google-cloud-datastream/google/cloud/datastream_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.9.5" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-datastream/google/cloud/datastream_v1/services/datastream/async_client.py b/packages/google-cloud-datastream/google/cloud/datastream_v1/services/datastream/async_client.py index 07458d75789d..91b665e49d21 100644 --- a/packages/google-cloud-datastream/google/cloud/datastream_v1/services/datastream/async_client.py +++ b/packages/google-cloud-datastream/google/cloud/datastream_v1/services/datastream/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -205,9 +204,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(DatastreamClient).get_transport_class, type(DatastreamClient) - ) + get_transport_class = DatastreamClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-datastream/google/cloud/datastream_v1/services/datastream/client.py b/packages/google-cloud-datastream/google/cloud/datastream_v1/services/datastream/client.py index a4bc48a00274..d4f79a05af7f 100644 --- a/packages/google-cloud-datastream/google/cloud/datastream_v1/services/datastream/client.py +++ b/packages/google-cloud-datastream/google/cloud/datastream_v1/services/datastream/client.py @@ -776,7 +776,7 @@ def __init__( transport_init: Union[ Type[DatastreamTransport], Callable[..., DatastreamTransport] ] = ( - type(self).get_transport_class(transport) + DatastreamClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., DatastreamTransport], transport) ) diff --git a/packages/google-cloud-datastream/google/cloud/datastream_v1alpha1/gapic_version.py b/packages/google-cloud-datastream/google/cloud/datastream_v1alpha1/gapic_version.py index 64ddc0e431e3..558c8aab67c5 100644 --- a/packages/google-cloud-datastream/google/cloud/datastream_v1alpha1/gapic_version.py +++ b/packages/google-cloud-datastream/google/cloud/datastream_v1alpha1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.9.5" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-datastream/google/cloud/datastream_v1alpha1/services/datastream/async_client.py b/packages/google-cloud-datastream/google/cloud/datastream_v1alpha1/services/datastream/async_client.py index b45cf101cf25..c71f11f0c58c 100644 --- a/packages/google-cloud-datastream/google/cloud/datastream_v1alpha1/services/datastream/async_client.py +++ b/packages/google-cloud-datastream/google/cloud/datastream_v1alpha1/services/datastream/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -197,9 +196,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(DatastreamClient).get_transport_class, type(DatastreamClient) - ) + get_transport_class = DatastreamClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-datastream/google/cloud/datastream_v1alpha1/services/datastream/client.py b/packages/google-cloud-datastream/google/cloud/datastream_v1alpha1/services/datastream/client.py index 361e95536a5e..ce57716e9865 100644 --- a/packages/google-cloud-datastream/google/cloud/datastream_v1alpha1/services/datastream/client.py +++ b/packages/google-cloud-datastream/google/cloud/datastream_v1alpha1/services/datastream/client.py @@ -729,7 +729,7 @@ def __init__( transport_init: Union[ Type[DatastreamTransport], Callable[..., DatastreamTransport] ] = ( - type(self).get_transport_class(transport) + DatastreamClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., DatastreamTransport], transport) ) diff --git a/packages/google-cloud-datastream/samples/generated_samples/snippet_metadata_google.cloud.datastream.v1.json b/packages/google-cloud-datastream/samples/generated_samples/snippet_metadata_google.cloud.datastream.v1.json index 8cde4a7487bd..8a69b9040e43 100644 --- a/packages/google-cloud-datastream/samples/generated_samples/snippet_metadata_google.cloud.datastream.v1.json +++ b/packages/google-cloud-datastream/samples/generated_samples/snippet_metadata_google.cloud.datastream.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-datastream", - "version": "1.9.5" + "version": "0.1.0" }, "snippets": [ { diff --git a/packages/google-cloud-datastream/samples/generated_samples/snippet_metadata_google.cloud.datastream.v1alpha1.json b/packages/google-cloud-datastream/samples/generated_samples/snippet_metadata_google.cloud.datastream.v1alpha1.json index 00ca209534e3..09dea00ec932 100644 --- a/packages/google-cloud-datastream/samples/generated_samples/snippet_metadata_google.cloud.datastream.v1alpha1.json +++ b/packages/google-cloud-datastream/samples/generated_samples/snippet_metadata_google.cloud.datastream.v1alpha1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-datastream", - "version": "1.9.5" + "version": "0.1.0" }, "snippets": [ { diff --git a/packages/google-cloud-datastream/tests/unit/gapic/datastream_v1/test_datastream.py b/packages/google-cloud-datastream/tests/unit/gapic/datastream_v1/test_datastream.py index 80899677eda3..88fccb36bcff 100644 --- a/packages/google-cloud-datastream/tests/unit/gapic/datastream_v1/test_datastream.py +++ b/packages/google-cloud-datastream/tests/unit/gapic/datastream_v1/test_datastream.py @@ -1280,22 +1280,23 @@ async def test_list_connection_profiles_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_connection_profiles - ] = mock_object + ] = mock_rpc request = {} await client.list_connection_profiles(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_connection_profiles(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1881,22 +1882,23 @@ async def test_get_connection_profile_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_connection_profile - ] = mock_object + ] = mock_rpc request = {} await client.get_connection_profile(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_connection_profile(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2216,8 +2218,9 @@ def test_create_connection_profile_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.create_connection_profile(request) @@ -2273,26 +2276,28 @@ async def test_create_connection_profile_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_connection_profile - ] = mock_object + ] = mock_rpc request = {} await client.create_connection_profile(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.create_connection_profile(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2632,8 +2637,9 @@ def test_update_connection_profile_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.update_connection_profile(request) @@ -2689,26 +2695,28 @@ async def test_update_connection_profile_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_connection_profile - ] = mock_object + ] = mock_rpc request = {} await client.update_connection_profile(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.update_connection_profile(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3040,8 +3048,9 @@ def test_delete_connection_profile_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.delete_connection_profile(request) @@ -3097,26 +3106,28 @@ async def test_delete_connection_profile_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_connection_profile - ] = mock_object + ] = mock_rpc request = {} await client.delete_connection_profile(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.delete_connection_profile(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3483,22 +3494,23 @@ async def test_discover_connection_profile_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.discover_connection_profile - ] = mock_object + ] = mock_rpc request = {} await client.discover_connection_profile(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.discover_connection_profile(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3778,22 +3790,23 @@ async def test_list_streams_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_streams - ] = mock_object + ] = mock_rpc request = {} await client.list_streams(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_streams(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4348,22 +4361,23 @@ async def test_get_stream_async_use_cached_wrapped_rpc(transport: str = "grpc_as ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_stream - ] = mock_object + ] = mock_rpc request = {} await client.get_stream(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_stream(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4669,8 +4683,9 @@ def test_create_stream_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.create_stream(request) @@ -4724,26 +4739,28 @@ async def test_create_stream_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_stream - ] = mock_object + ] = mock_rpc request = {} await client.create_stream(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.create_stream(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5053,8 +5070,9 @@ def test_update_stream_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.update_stream(request) @@ -5108,26 +5126,28 @@ async def test_update_stream_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_stream - ] = mock_object + ] = mock_rpc request = {} await client.update_stream(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.update_stream(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5429,8 +5449,9 @@ def test_delete_stream_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.delete_stream(request) @@ -5484,26 +5505,28 @@ async def test_delete_stream_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_stream - ] = mock_object + ] = mock_rpc request = {} await client.delete_stream(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.delete_stream(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5862,22 +5885,23 @@ async def test_get_stream_object_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_stream_object - ] = mock_object + ] = mock_rpc request = {} await client.get_stream_object(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_stream_object(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -6253,22 +6277,23 @@ async def test_lookup_stream_object_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.lookup_stream_object - ] = mock_object + ] = mock_rpc request = {} await client.lookup_stream_object(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.lookup_stream_object(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -6557,22 +6582,23 @@ async def test_list_stream_objects_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_stream_objects - ] = mock_object + ] = mock_rpc request = {} await client.list_stream_objects(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_stream_objects(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -7140,22 +7166,23 @@ async def test_start_backfill_job_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.start_backfill_job - ] = mock_object + ] = mock_rpc request = {} await client.start_backfill_job(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.start_backfill_job(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -7516,22 +7543,23 @@ async def test_stop_backfill_job_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.stop_backfill_job - ] = mock_object + ] = mock_rpc request = {} await client.stop_backfill_job(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.stop_backfill_job(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -7894,22 +7922,23 @@ async def test_fetch_static_ips_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.fetch_static_ips - ] = mock_object + ] = mock_rpc request = {} await client.fetch_static_ips(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.fetch_static_ips(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -8413,8 +8442,9 @@ def test_create_private_connection_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.create_private_connection(request) @@ -8470,26 +8500,28 @@ async def test_create_private_connection_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_private_connection - ] = mock_object + ] = mock_rpc request = {} await client.create_private_connection(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.create_private_connection(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -8893,22 +8925,23 @@ async def test_get_private_connection_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_private_connection - ] = mock_object + ] = mock_rpc request = {} await client.get_private_connection(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_private_connection(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -9293,22 +9326,23 @@ async def test_list_private_connections_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_private_connections - ] = mock_object + ] = mock_rpc request = {} await client.list_private_connections(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_private_connections(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -9835,8 +9869,9 @@ def test_delete_private_connection_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.delete_private_connection(request) @@ -9892,26 +9927,28 @@ async def test_delete_private_connection_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_private_connection - ] = mock_object + ] = mock_rpc request = {} await client.delete_private_connection(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.delete_private_connection(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -10216,8 +10253,9 @@ def test_create_route_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.create_route(request) @@ -10271,26 +10309,28 @@ async def test_create_route_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_route - ] = mock_object + ] = mock_rpc request = {} await client.create_route(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.create_route(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -10663,22 +10703,23 @@ async def test_get_route_async_use_cached_wrapped_rpc(transport: str = "grpc_asy ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_route - ] = mock_object + ] = mock_rpc request = {} await client.get_route(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_route(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -11042,22 +11083,23 @@ async def test_list_routes_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_routes - ] = mock_object + ] = mock_rpc request = {} await client.list_routes(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_routes(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -11548,8 +11590,9 @@ def test_delete_route_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.delete_route(request) @@ -11603,26 +11646,28 @@ async def test_delete_route_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_route - ] = mock_object + ] = mock_rpc request = {} await client.delete_route(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.delete_route(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-datastream/tests/unit/gapic/datastream_v1alpha1/test_datastream.py b/packages/google-cloud-datastream/tests/unit/gapic/datastream_v1alpha1/test_datastream.py index 1f6089fc5fd6..9626e246f848 100644 --- a/packages/google-cloud-datastream/tests/unit/gapic/datastream_v1alpha1/test_datastream.py +++ b/packages/google-cloud-datastream/tests/unit/gapic/datastream_v1alpha1/test_datastream.py @@ -1276,22 +1276,23 @@ async def test_list_connection_profiles_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_connection_profiles - ] = mock_object + ] = mock_rpc request = {} await client.list_connection_profiles(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_connection_profiles(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1877,22 +1878,23 @@ async def test_get_connection_profile_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_connection_profile - ] = mock_object + ] = mock_rpc request = {} await client.get_connection_profile(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_connection_profile(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2212,8 +2214,9 @@ def test_create_connection_profile_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.create_connection_profile(request) @@ -2269,26 +2272,28 @@ async def test_create_connection_profile_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_connection_profile - ] = mock_object + ] = mock_rpc request = {} await client.create_connection_profile(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.create_connection_profile(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2628,8 +2633,9 @@ def test_update_connection_profile_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.update_connection_profile(request) @@ -2685,26 +2691,28 @@ async def test_update_connection_profile_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_connection_profile - ] = mock_object + ] = mock_rpc request = {} await client.update_connection_profile(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.update_connection_profile(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3036,8 +3044,9 @@ def test_delete_connection_profile_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.delete_connection_profile(request) @@ -3093,26 +3102,28 @@ async def test_delete_connection_profile_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_connection_profile - ] = mock_object + ] = mock_rpc request = {} await client.delete_connection_profile(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.delete_connection_profile(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3479,22 +3490,23 @@ async def test_discover_connection_profile_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.discover_connection_profile - ] = mock_object + ] = mock_rpc request = {} await client.discover_connection_profile(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.discover_connection_profile(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3774,22 +3786,23 @@ async def test_list_streams_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_streams - ] = mock_object + ] = mock_rpc request = {} await client.list_streams(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_streams(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4338,22 +4351,23 @@ async def test_get_stream_async_use_cached_wrapped_rpc(transport: str = "grpc_as ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_stream - ] = mock_object + ] = mock_rpc request = {} await client.get_stream(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_stream(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4654,8 +4668,9 @@ def test_create_stream_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.create_stream(request) @@ -4709,26 +4724,28 @@ async def test_create_stream_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_stream - ] = mock_object + ] = mock_rpc request = {} await client.create_stream(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.create_stream(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5038,8 +5055,9 @@ def test_update_stream_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.update_stream(request) @@ -5093,26 +5111,28 @@ async def test_update_stream_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_stream - ] = mock_object + ] = mock_rpc request = {} await client.update_stream(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.update_stream(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5414,8 +5434,9 @@ def test_delete_stream_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.delete_stream(request) @@ -5469,26 +5490,28 @@ async def test_delete_stream_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_stream - ] = mock_object + ] = mock_rpc request = {} await client.delete_stream(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.delete_stream(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5778,8 +5801,9 @@ def test_fetch_errors_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.fetch_errors(request) @@ -5833,26 +5857,28 @@ async def test_fetch_errors_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.fetch_errors - ] = mock_object + ] = mock_rpc request = {} await client.fetch_errors(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.fetch_errors(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -6123,22 +6149,23 @@ async def test_fetch_static_ips_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.fetch_static_ips - ] = mock_object + ] = mock_rpc request = {} await client.fetch_static_ips(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.fetch_static_ips(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -6642,8 +6669,9 @@ def test_create_private_connection_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.create_private_connection(request) @@ -6699,26 +6727,28 @@ async def test_create_private_connection_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_private_connection - ] = mock_object + ] = mock_rpc request = {} await client.create_private_connection(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.create_private_connection(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -7122,22 +7152,23 @@ async def test_get_private_connection_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_private_connection - ] = mock_object + ] = mock_rpc request = {} await client.get_private_connection(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_private_connection(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -7522,22 +7553,23 @@ async def test_list_private_connections_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_private_connections - ] = mock_object + ] = mock_rpc request = {} await client.list_private_connections(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_private_connections(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -8064,8 +8096,9 @@ def test_delete_private_connection_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.delete_private_connection(request) @@ -8121,26 +8154,28 @@ async def test_delete_private_connection_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_private_connection - ] = mock_object + ] = mock_rpc request = {} await client.delete_private_connection(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.delete_private_connection(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -8445,8 +8480,9 @@ def test_create_route_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.create_route(request) @@ -8500,26 +8536,28 @@ async def test_create_route_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_route - ] = mock_object + ] = mock_rpc request = {} await client.create_route(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.create_route(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -8892,22 +8930,23 @@ async def test_get_route_async_use_cached_wrapped_rpc(transport: str = "grpc_asy ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_route - ] = mock_object + ] = mock_rpc request = {} await client.get_route(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_route(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -9271,22 +9310,23 @@ async def test_list_routes_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_routes - ] = mock_object + ] = mock_rpc request = {} await client.list_routes(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_routes(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -9777,8 +9817,9 @@ def test_delete_route_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.delete_route(request) @@ -9832,26 +9873,28 @@ async def test_delete_route_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_route - ] = mock_object + ] = mock_rpc request = {} await client.delete_route(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.delete_route(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-deploy/CHANGELOG.md b/packages/google-cloud-deploy/CHANGELOG.md index 5a034696e1f6..3943b8fa9de3 100644 --- a/packages/google-cloud-deploy/CHANGELOG.md +++ b/packages/google-cloud-deploy/CHANGELOG.md @@ -1,5 +1,24 @@ # Changelog +## [2.1.0](https://github.com/googleapis/google-cloud-python/compare/google-cloud-deploy-v2.0.1...google-cloud-deploy-v2.1.0) (2024-10-08) + + +### Features + +* added support for deploy policies ([bbe5daf](https://github.com/googleapis/google-cloud-python/commit/bbe5daf0c71a02ae780c7609d433787dec1bc168)) + + +### Documentation + +* Minor documentation updates ([bbe5daf](https://github.com/googleapis/google-cloud-python/commit/bbe5daf0c71a02ae780c7609d433787dec1bc168)) + +## [2.0.1](https://github.com/googleapis/google-cloud-python/compare/google-cloud-deploy-v2.0.0...google-cloud-deploy-v2.0.1) (2024-08-08) + + +### Documentation + +* very minor documentation updates ([477c8e4](https://github.com/googleapis/google-cloud-python/commit/477c8e4438b5ca2f05095955fd03cb5a6f189292)) + ## [2.0.0](https://github.com/googleapis/google-cloud-python/compare/google-cloud-deploy-v1.19.1...google-cloud-deploy-v2.0.0) (2024-07-30) diff --git a/packages/google-cloud-deploy/google/cloud/deploy/__init__.py b/packages/google-cloud-deploy/google/cloud/deploy/__init__.py index 6a2f11ab9889..4b051017ed31 100644 --- a/packages/google-cloud-deploy/google/cloud/deploy/__init__.py +++ b/packages/google-cloud-deploy/google/cloud/deploy/__init__.py @@ -61,6 +61,7 @@ CreateChildRolloutJobRun, CreateCustomTargetTypeRequest, CreateDeliveryPipelineRequest, + CreateDeployPolicyRequest, CreateReleaseRequest, CreateRolloutRequest, CreateTargetRequest, @@ -74,20 +75,25 @@ DeleteAutomationRequest, DeleteCustomTargetTypeRequest, DeleteDeliveryPipelineRequest, + DeleteDeployPolicyRequest, DeleteTargetRequest, DeliveryPipeline, + DeliveryPipelineAttribute, DeployArtifact, DeployJob, DeployJobRun, DeployJobRunMetadata, DeploymentJobs, DeployParameters, + DeployPolicy, + DeployPolicyResourceSelector, ExecutionConfig, GetAutomationRequest, GetAutomationRunRequest, GetConfigRequest, GetCustomTargetTypeRequest, GetDeliveryPipelineRequest, + GetDeployPolicyRequest, GetJobRunRequest, GetReleaseRequest, GetRolloutRequest, @@ -106,6 +112,8 @@ ListCustomTargetTypesResponse, ListDeliveryPipelinesRequest, ListDeliveryPipelinesResponse, + ListDeployPoliciesRequest, + ListDeployPoliciesResponse, ListJobRunsRequest, ListJobRunsResponse, ListReleasesRequest, @@ -116,10 +124,14 @@ ListTargetsResponse, Metadata, MultiTarget, + OneTimeWindow, OperationMetadata, Phase, PipelineCondition, PipelineReadyCondition, + PolicyRule, + PolicyViolation, + PolicyViolationDetails, Postdeploy, PostdeployJob, PostdeployJobRun, @@ -144,6 +156,7 @@ RollbackTargetRequest, RollbackTargetResponse, Rollout, + RolloutRestriction, RuntimeConfig, SerialPipeline, SkaffoldModules, @@ -159,12 +172,15 @@ TargetsTypeCondition, TerminateJobRunRequest, TerminateJobRunResponse, + TimeWindows, UpdateAutomationRequest, UpdateCustomTargetTypeRequest, UpdateDeliveryPipelineRequest, + UpdateDeployPolicyRequest, UpdateTargetRequest, VerifyJob, VerifyJobRun, + WeeklyWindow, ) from google.cloud.deploy_v1.types.customtargettype_notification_payload import ( CustomTargetTypeNotificationEvent, @@ -172,6 +188,9 @@ from google.cloud.deploy_v1.types.deliverypipeline_notification_payload import ( DeliveryPipelineNotificationEvent, ) +from google.cloud.deploy_v1.types.deploypolicy_evaluation_payload import ( + DeployPolicyEvaluationEvent, +) from google.cloud.deploy_v1.types.deploypolicy_notification_payload import ( DeployPolicyNotificationEvent, ) @@ -231,6 +250,7 @@ "CreateChildRolloutJobRun", "CreateCustomTargetTypeRequest", "CreateDeliveryPipelineRequest", + "CreateDeployPolicyRequest", "CreateReleaseRequest", "CreateRolloutRequest", "CreateTargetRequest", @@ -244,20 +264,25 @@ "DeleteAutomationRequest", "DeleteCustomTargetTypeRequest", "DeleteDeliveryPipelineRequest", + "DeleteDeployPolicyRequest", "DeleteTargetRequest", "DeliveryPipeline", + "DeliveryPipelineAttribute", "DeployArtifact", "DeployJob", "DeployJobRun", "DeployJobRunMetadata", "DeploymentJobs", "DeployParameters", + "DeployPolicy", + "DeployPolicyResourceSelector", "ExecutionConfig", "GetAutomationRequest", "GetAutomationRunRequest", "GetConfigRequest", "GetCustomTargetTypeRequest", "GetDeliveryPipelineRequest", + "GetDeployPolicyRequest", "GetJobRunRequest", "GetReleaseRequest", "GetRolloutRequest", @@ -276,6 +301,8 @@ "ListCustomTargetTypesResponse", "ListDeliveryPipelinesRequest", "ListDeliveryPipelinesResponse", + "ListDeployPoliciesRequest", + "ListDeployPoliciesResponse", "ListJobRunsRequest", "ListJobRunsResponse", "ListReleasesRequest", @@ -286,10 +313,14 @@ "ListTargetsResponse", "Metadata", "MultiTarget", + "OneTimeWindow", "OperationMetadata", "Phase", "PipelineCondition", "PipelineReadyCondition", + "PolicyRule", + "PolicyViolation", + "PolicyViolationDetails", "Postdeploy", "PostdeployJob", "PostdeployJobRun", @@ -313,6 +344,7 @@ "RollbackTargetRequest", "RollbackTargetResponse", "Rollout", + "RolloutRestriction", "RuntimeConfig", "SerialPipeline", "SkaffoldModules", @@ -327,17 +359,21 @@ "TargetsTypeCondition", "TerminateJobRunRequest", "TerminateJobRunResponse", + "TimeWindows", "UpdateAutomationRequest", "UpdateCustomTargetTypeRequest", "UpdateDeliveryPipelineRequest", + "UpdateDeployPolicyRequest", "UpdateTargetRequest", "VerifyJob", "VerifyJobRun", + "WeeklyWindow", "BackoffMode", "RepairState", "SkaffoldSupportState", "CustomTargetTypeNotificationEvent", "DeliveryPipelineNotificationEvent", + "DeployPolicyEvaluationEvent", "DeployPolicyNotificationEvent", "JobRunNotificationEvent", "Type", diff --git a/packages/google-cloud-deploy/google/cloud/deploy/gapic_version.py b/packages/google-cloud-deploy/google/cloud/deploy/gapic_version.py index f41fed841208..dfb0e5435759 100644 --- a/packages/google-cloud-deploy/google/cloud/deploy/gapic_version.py +++ b/packages/google-cloud-deploy/google/cloud/deploy/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "2.0.0" # {x-release-please-version} +__version__ = "2.1.0" # {x-release-please-version} diff --git a/packages/google-cloud-deploy/google/cloud/deploy_v1/__init__.py b/packages/google-cloud-deploy/google/cloud/deploy_v1/__init__.py index 80367e9c4284..c9c4ecd71f25 100644 --- a/packages/google-cloud-deploy/google/cloud/deploy_v1/__init__.py +++ b/packages/google-cloud-deploy/google/cloud/deploy_v1/__init__.py @@ -58,6 +58,7 @@ CreateChildRolloutJobRun, CreateCustomTargetTypeRequest, CreateDeliveryPipelineRequest, + CreateDeployPolicyRequest, CreateReleaseRequest, CreateRolloutRequest, CreateTargetRequest, @@ -71,20 +72,25 @@ DeleteAutomationRequest, DeleteCustomTargetTypeRequest, DeleteDeliveryPipelineRequest, + DeleteDeployPolicyRequest, DeleteTargetRequest, DeliveryPipeline, + DeliveryPipelineAttribute, DeployArtifact, DeployJob, DeployJobRun, DeployJobRunMetadata, DeploymentJobs, DeployParameters, + DeployPolicy, + DeployPolicyResourceSelector, ExecutionConfig, GetAutomationRequest, GetAutomationRunRequest, GetConfigRequest, GetCustomTargetTypeRequest, GetDeliveryPipelineRequest, + GetDeployPolicyRequest, GetJobRunRequest, GetReleaseRequest, GetRolloutRequest, @@ -103,6 +109,8 @@ ListCustomTargetTypesResponse, ListDeliveryPipelinesRequest, ListDeliveryPipelinesResponse, + ListDeployPoliciesRequest, + ListDeployPoliciesResponse, ListJobRunsRequest, ListJobRunsResponse, ListReleasesRequest, @@ -113,10 +121,14 @@ ListTargetsResponse, Metadata, MultiTarget, + OneTimeWindow, OperationMetadata, Phase, PipelineCondition, PipelineReadyCondition, + PolicyRule, + PolicyViolation, + PolicyViolationDetails, Postdeploy, PostdeployJob, PostdeployJobRun, @@ -141,6 +153,7 @@ RollbackTargetRequest, RollbackTargetResponse, Rollout, + RolloutRestriction, RuntimeConfig, SerialPipeline, SkaffoldModules, @@ -156,12 +169,15 @@ TargetsTypeCondition, TerminateJobRunRequest, TerminateJobRunResponse, + TimeWindows, UpdateAutomationRequest, UpdateCustomTargetTypeRequest, UpdateDeliveryPipelineRequest, + UpdateDeployPolicyRequest, UpdateTargetRequest, VerifyJob, VerifyJobRun, + WeeklyWindow, ) from .types.customtargettype_notification_payload import ( CustomTargetTypeNotificationEvent, @@ -169,6 +185,7 @@ from .types.deliverypipeline_notification_payload import ( DeliveryPipelineNotificationEvent, ) +from .types.deploypolicy_evaluation_payload import DeployPolicyEvaluationEvent from .types.deploypolicy_notification_payload import DeployPolicyNotificationEvent from .types.jobrun_notification_payload import JobRunNotificationEvent from .types.log_enums import Type @@ -219,6 +236,7 @@ "CreateChildRolloutJobRun", "CreateCustomTargetTypeRequest", "CreateDeliveryPipelineRequest", + "CreateDeployPolicyRequest", "CreateReleaseRequest", "CreateRolloutRequest", "CreateTargetRequest", @@ -233,15 +251,20 @@ "DeleteAutomationRequest", "DeleteCustomTargetTypeRequest", "DeleteDeliveryPipelineRequest", + "DeleteDeployPolicyRequest", "DeleteTargetRequest", "DeliveryPipeline", + "DeliveryPipelineAttribute", "DeliveryPipelineNotificationEvent", "DeployArtifact", "DeployJob", "DeployJobRun", "DeployJobRunMetadata", "DeployParameters", + "DeployPolicy", + "DeployPolicyEvaluationEvent", "DeployPolicyNotificationEvent", + "DeployPolicyResourceSelector", "DeploymentJobs", "ExecutionConfig", "GetAutomationRequest", @@ -249,6 +272,7 @@ "GetConfigRequest", "GetCustomTargetTypeRequest", "GetDeliveryPipelineRequest", + "GetDeployPolicyRequest", "GetJobRunRequest", "GetReleaseRequest", "GetRolloutRequest", @@ -268,6 +292,8 @@ "ListCustomTargetTypesResponse", "ListDeliveryPipelinesRequest", "ListDeliveryPipelinesResponse", + "ListDeployPoliciesRequest", + "ListDeployPoliciesResponse", "ListJobRunsRequest", "ListJobRunsResponse", "ListReleasesRequest", @@ -278,10 +304,14 @@ "ListTargetsResponse", "Metadata", "MultiTarget", + "OneTimeWindow", "OperationMetadata", "Phase", "PipelineCondition", "PipelineReadyCondition", + "PolicyRule", + "PolicyViolation", + "PolicyViolationDetails", "Postdeploy", "PostdeployJob", "PostdeployJobRun", @@ -309,6 +339,7 @@ "RollbackTargetResponse", "Rollout", "RolloutNotificationEvent", + "RolloutRestriction", "RolloutUpdateEvent", "RuntimeConfig", "SerialPipeline", @@ -326,11 +357,14 @@ "TargetsTypeCondition", "TerminateJobRunRequest", "TerminateJobRunResponse", + "TimeWindows", "Type", "UpdateAutomationRequest", "UpdateCustomTargetTypeRequest", "UpdateDeliveryPipelineRequest", + "UpdateDeployPolicyRequest", "UpdateTargetRequest", "VerifyJob", "VerifyJobRun", + "WeeklyWindow", ) diff --git a/packages/google-cloud-deploy/google/cloud/deploy_v1/gapic_metadata.json b/packages/google-cloud-deploy/google/cloud/deploy_v1/gapic_metadata.json index a8eee6244a5f..32200eac6f82 100644 --- a/packages/google-cloud-deploy/google/cloud/deploy_v1/gapic_metadata.json +++ b/packages/google-cloud-deploy/google/cloud/deploy_v1/gapic_metadata.json @@ -50,6 +50,11 @@ "create_delivery_pipeline" ] }, + "CreateDeployPolicy": { + "methods": [ + "create_deploy_policy" + ] + }, "CreateRelease": { "methods": [ "create_release" @@ -80,6 +85,11 @@ "delete_delivery_pipeline" ] }, + "DeleteDeployPolicy": { + "methods": [ + "delete_deploy_policy" + ] + }, "DeleteTarget": { "methods": [ "delete_target" @@ -110,6 +120,11 @@ "get_delivery_pipeline" ] }, + "GetDeployPolicy": { + "methods": [ + "get_deploy_policy" + ] + }, "GetJobRun": { "methods": [ "get_job_run" @@ -155,6 +170,11 @@ "list_delivery_pipelines" ] }, + "ListDeployPolicies": { + "methods": [ + "list_deploy_policies" + ] + }, "ListJobRuns": { "methods": [ "list_job_runs" @@ -205,6 +225,11 @@ "update_delivery_pipeline" ] }, + "UpdateDeployPolicy": { + "methods": [ + "update_deploy_policy" + ] + }, "UpdateTarget": { "methods": [ "update_target" @@ -255,6 +280,11 @@ "create_delivery_pipeline" ] }, + "CreateDeployPolicy": { + "methods": [ + "create_deploy_policy" + ] + }, "CreateRelease": { "methods": [ "create_release" @@ -285,6 +315,11 @@ "delete_delivery_pipeline" ] }, + "DeleteDeployPolicy": { + "methods": [ + "delete_deploy_policy" + ] + }, "DeleteTarget": { "methods": [ "delete_target" @@ -315,6 +350,11 @@ "get_delivery_pipeline" ] }, + "GetDeployPolicy": { + "methods": [ + "get_deploy_policy" + ] + }, "GetJobRun": { "methods": [ "get_job_run" @@ -360,6 +400,11 @@ "list_delivery_pipelines" ] }, + "ListDeployPolicies": { + "methods": [ + "list_deploy_policies" + ] + }, "ListJobRuns": { "methods": [ "list_job_runs" @@ -410,6 +455,11 @@ "update_delivery_pipeline" ] }, + "UpdateDeployPolicy": { + "methods": [ + "update_deploy_policy" + ] + }, "UpdateTarget": { "methods": [ "update_target" @@ -460,6 +510,11 @@ "create_delivery_pipeline" ] }, + "CreateDeployPolicy": { + "methods": [ + "create_deploy_policy" + ] + }, "CreateRelease": { "methods": [ "create_release" @@ -490,6 +545,11 @@ "delete_delivery_pipeline" ] }, + "DeleteDeployPolicy": { + "methods": [ + "delete_deploy_policy" + ] + }, "DeleteTarget": { "methods": [ "delete_target" @@ -520,6 +580,11 @@ "get_delivery_pipeline" ] }, + "GetDeployPolicy": { + "methods": [ + "get_deploy_policy" + ] + }, "GetJobRun": { "methods": [ "get_job_run" @@ -565,6 +630,11 @@ "list_delivery_pipelines" ] }, + "ListDeployPolicies": { + "methods": [ + "list_deploy_policies" + ] + }, "ListJobRuns": { "methods": [ "list_job_runs" @@ -615,6 +685,11 @@ "update_delivery_pipeline" ] }, + "UpdateDeployPolicy": { + "methods": [ + "update_deploy_policy" + ] + }, "UpdateTarget": { "methods": [ "update_target" diff --git a/packages/google-cloud-deploy/google/cloud/deploy_v1/gapic_version.py b/packages/google-cloud-deploy/google/cloud/deploy_v1/gapic_version.py index f41fed841208..dfb0e5435759 100644 --- a/packages/google-cloud-deploy/google/cloud/deploy_v1/gapic_version.py +++ b/packages/google-cloud-deploy/google/cloud/deploy_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "2.0.0" # {x-release-please-version} +__version__ = "2.1.0" # {x-release-please-version} diff --git a/packages/google-cloud-deploy/google/cloud/deploy_v1/services/cloud_deploy/async_client.py b/packages/google-cloud-deploy/google/cloud/deploy_v1/services/cloud_deploy/async_client.py index 87618574717d..d4fb28cb770e 100644 --- a/packages/google-cloud-deploy/google/cloud/deploy_v1/services/cloud_deploy/async_client.py +++ b/packages/google-cloud-deploy/google/cloud/deploy_v1/services/cloud_deploy/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -96,6 +95,8 @@ class CloudDeployAsyncClient: parse_delivery_pipeline_path = staticmethod( CloudDeployClient.parse_delivery_pipeline_path ) + deploy_policy_path = staticmethod(CloudDeployClient.deploy_policy_path) + parse_deploy_policy_path = staticmethod(CloudDeployClient.parse_deploy_policy_path) job_path = staticmethod(CloudDeployClient.job_path) parse_job_path = staticmethod(CloudDeployClient.parse_job_path) job_run_path = staticmethod(CloudDeployClient.job_run_path) @@ -232,9 +233,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(CloudDeployClient).get_transport_class, type(CloudDeployClient) - ) + get_transport_class = CloudDeployClient.get_transport_class def __init__( self, @@ -2829,6 +2828,649 @@ async def sample_abandon_release(): # Done; return the response. return response + async def create_deploy_policy( + self, + request: Optional[Union[cloud_deploy.CreateDeployPolicyRequest, dict]] = None, + *, + parent: Optional[str] = None, + deploy_policy: Optional[cloud_deploy.DeployPolicy] = None, + deploy_policy_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Creates a new DeployPolicy in a given project and + location. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import deploy_v1 + + async def sample_create_deploy_policy(): + # Create a client + client = deploy_v1.CloudDeployAsyncClient() + + # Initialize request argument(s) + deploy_policy = deploy_v1.DeployPolicy() + deploy_policy.rules.rollout_restriction.id = "id_value" + deploy_policy.rules.rollout_restriction.time_windows.time_zone = "time_zone_value" + + request = deploy_v1.CreateDeployPolicyRequest( + parent="parent_value", + deploy_policy_id="deploy_policy_id_value", + deploy_policy=deploy_policy, + ) + + # Make the request + operation = client.create_deploy_policy(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.deploy_v1.types.CreateDeployPolicyRequest, dict]]): + The request object. The request object for ``CreateDeployPolicy``. + parent (:class:`str`): + Required. The parent collection in which the + ``DeployPolicy`` must be created. The format is + ``projects/{project_id}/locations/{location_name}``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + deploy_policy (:class:`google.cloud.deploy_v1.types.DeployPolicy`): + Required. The ``DeployPolicy`` to create. + This corresponds to the ``deploy_policy`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + deploy_policy_id (:class:`str`): + Required. ID of the ``DeployPolicy``. + This corresponds to the ``deploy_policy_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.deploy_v1.types.DeployPolicy` A + DeployPolicy resource in the Cloud Deploy API. + + A DeployPolicy inhibits manual or automation-driven + actions within a Delivery Pipeline or Target. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, deploy_policy, deploy_policy_id]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, cloud_deploy.CreateDeployPolicyRequest): + request = cloud_deploy.CreateDeployPolicyRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if deploy_policy is not None: + request.deploy_policy = deploy_policy + if deploy_policy_id is not None: + request.deploy_policy_id = deploy_policy_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.create_deploy_policy + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + cloud_deploy.DeployPolicy, + metadata_type=cloud_deploy.OperationMetadata, + ) + + # Done; return the response. + return response + + async def update_deploy_policy( + self, + request: Optional[Union[cloud_deploy.UpdateDeployPolicyRequest, dict]] = None, + *, + deploy_policy: Optional[cloud_deploy.DeployPolicy] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Updates the parameters of a single DeployPolicy. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import deploy_v1 + + async def sample_update_deploy_policy(): + # Create a client + client = deploy_v1.CloudDeployAsyncClient() + + # Initialize request argument(s) + deploy_policy = deploy_v1.DeployPolicy() + deploy_policy.rules.rollout_restriction.id = "id_value" + deploy_policy.rules.rollout_restriction.time_windows.time_zone = "time_zone_value" + + request = deploy_v1.UpdateDeployPolicyRequest( + deploy_policy=deploy_policy, + ) + + # Make the request + operation = client.update_deploy_policy(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.deploy_v1.types.UpdateDeployPolicyRequest, dict]]): + The request object. The request object for ``UpdateDeployPolicy``. + deploy_policy (:class:`google.cloud.deploy_v1.types.DeployPolicy`): + Required. The ``DeployPolicy`` to update. + This corresponds to the ``deploy_policy`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): + Required. Field mask is used to specify the fields to be + overwritten by the update in the ``DeployPolicy`` + resource. The fields specified in the update_mask are + relative to the resource, not the full request. A field + will be overwritten if it's in the mask. If the user + doesn't provide a mask then all fields are overwritten. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.deploy_v1.types.DeployPolicy` A + DeployPolicy resource in the Cloud Deploy API. + + A DeployPolicy inhibits manual or automation-driven + actions within a Delivery Pipeline or Target. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([deploy_policy, update_mask]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, cloud_deploy.UpdateDeployPolicyRequest): + request = cloud_deploy.UpdateDeployPolicyRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if deploy_policy is not None: + request.deploy_policy = deploy_policy + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.update_deploy_policy + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("deploy_policy.name", request.deploy_policy.name),) + ), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + cloud_deploy.DeployPolicy, + metadata_type=cloud_deploy.OperationMetadata, + ) + + # Done; return the response. + return response + + async def delete_deploy_policy( + self, + request: Optional[Union[cloud_deploy.DeleteDeployPolicyRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Deletes a single DeployPolicy. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import deploy_v1 + + async def sample_delete_deploy_policy(): + # Create a client + client = deploy_v1.CloudDeployAsyncClient() + + # Initialize request argument(s) + request = deploy_v1.DeleteDeployPolicyRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_deploy_policy(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.deploy_v1.types.DeleteDeployPolicyRequest, dict]]): + The request object. The request object for ``DeleteDeployPolicy``. + name (:class:`str`): + Required. The name of the ``DeployPolicy`` to delete. + The format is + ``projects/{project_id}/locations/{location_name}/deployPolicies/{deploy_policy_name}``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated + empty messages in your APIs. A typical example is to + use it as the request or the response type of an API + method. For instance: + + service Foo { + rpc Bar(google.protobuf.Empty) returns + (google.protobuf.Empty); + + } + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, cloud_deploy.DeleteDeployPolicyRequest): + request = cloud_deploy.DeleteDeployPolicyRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.delete_deploy_policy + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + empty_pb2.Empty, + metadata_type=cloud_deploy.OperationMetadata, + ) + + # Done; return the response. + return response + + async def list_deploy_policies( + self, + request: Optional[Union[cloud_deploy.ListDeployPoliciesRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListDeployPoliciesAsyncPager: + r"""Lists DeployPolicies in a given project and location. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import deploy_v1 + + async def sample_list_deploy_policies(): + # Create a client + client = deploy_v1.CloudDeployAsyncClient() + + # Initialize request argument(s) + request = deploy_v1.ListDeployPoliciesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_deploy_policies(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.deploy_v1.types.ListDeployPoliciesRequest, dict]]): + The request object. The request object for ``ListDeployPolicies``. + parent (:class:`str`): + Required. The parent, which owns this collection of + deploy policies. Format must be + ``projects/{project_id}/locations/{location_name}``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.deploy_v1.services.cloud_deploy.pagers.ListDeployPoliciesAsyncPager: + The response object from ListDeployPolicies. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, cloud_deploy.ListDeployPoliciesRequest): + request = cloud_deploy.ListDeployPoliciesRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.list_deploy_policies + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListDeployPoliciesAsyncPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_deploy_policy( + self, + request: Optional[Union[cloud_deploy.GetDeployPolicyRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> cloud_deploy.DeployPolicy: + r"""Gets details of a single DeployPolicy. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import deploy_v1 + + async def sample_get_deploy_policy(): + # Create a client + client = deploy_v1.CloudDeployAsyncClient() + + # Initialize request argument(s) + request = deploy_v1.GetDeployPolicyRequest( + name="name_value", + ) + + # Make the request + response = await client.get_deploy_policy(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.deploy_v1.types.GetDeployPolicyRequest, dict]]): + The request object. The request object for ``GetDeployPolicy`` + name (:class:`str`): + Required. Name of the ``DeployPolicy``. Format must be + ``projects/{project_id}/locations/{location_name}/deployPolicies/{deploy_policy_name}``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.deploy_v1.types.DeployPolicy: + A DeployPolicy resource in the Cloud Deploy API. + + A DeployPolicy inhibits manual or automation-driven + actions within a Delivery Pipeline or Target. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, cloud_deploy.GetDeployPolicyRequest): + request = cloud_deploy.GetDeployPolicyRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.get_deploy_policy + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + async def approve_rollout( self, request: Optional[Union[cloud_deploy.ApproveRolloutRequest, dict]] = None, diff --git a/packages/google-cloud-deploy/google/cloud/deploy_v1/services/cloud_deploy/client.py b/packages/google-cloud-deploy/google/cloud/deploy_v1/services/cloud_deploy/client.py index 55a5228129f9..8bb6a9e6f9b9 100644 --- a/packages/google-cloud-deploy/google/cloud/deploy_v1/services/cloud_deploy/client.py +++ b/packages/google-cloud-deploy/google/cloud/deploy_v1/services/cloud_deploy/client.py @@ -349,6 +349,28 @@ def parse_delivery_pipeline_path(path: str) -> Dict[str, str]: ) return m.groupdict() if m else {} + @staticmethod + def deploy_policy_path( + project: str, + location: str, + deploy_policy: str, + ) -> str: + """Returns a fully-qualified deploy_policy string.""" + return "projects/{project}/locations/{location}/deployPolicies/{deploy_policy}".format( + project=project, + location=location, + deploy_policy=deploy_policy, + ) + + @staticmethod + def parse_deploy_policy_path(path: str) -> Dict[str, str]: + """Parses a deploy_policy path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/deployPolicies/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + @staticmethod def job_path( project: str, @@ -1017,7 +1039,7 @@ def __init__( transport_init: Union[ Type[CloudDeployTransport], Callable[..., CloudDeployTransport] ] = ( - type(self).get_transport_class(transport) + CloudDeployClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., CloudDeployTransport], transport) ) @@ -3507,6 +3529,634 @@ def sample_abandon_release(): # Done; return the response. return response + def create_deploy_policy( + self, + request: Optional[Union[cloud_deploy.CreateDeployPolicyRequest, dict]] = None, + *, + parent: Optional[str] = None, + deploy_policy: Optional[cloud_deploy.DeployPolicy] = None, + deploy_policy_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Creates a new DeployPolicy in a given project and + location. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import deploy_v1 + + def sample_create_deploy_policy(): + # Create a client + client = deploy_v1.CloudDeployClient() + + # Initialize request argument(s) + deploy_policy = deploy_v1.DeployPolicy() + deploy_policy.rules.rollout_restriction.id = "id_value" + deploy_policy.rules.rollout_restriction.time_windows.time_zone = "time_zone_value" + + request = deploy_v1.CreateDeployPolicyRequest( + parent="parent_value", + deploy_policy_id="deploy_policy_id_value", + deploy_policy=deploy_policy, + ) + + # Make the request + operation = client.create_deploy_policy(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.deploy_v1.types.CreateDeployPolicyRequest, dict]): + The request object. The request object for ``CreateDeployPolicy``. + parent (str): + Required. The parent collection in which the + ``DeployPolicy`` must be created. The format is + ``projects/{project_id}/locations/{location_name}``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + deploy_policy (google.cloud.deploy_v1.types.DeployPolicy): + Required. The ``DeployPolicy`` to create. + This corresponds to the ``deploy_policy`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + deploy_policy_id (str): + Required. ID of the ``DeployPolicy``. + This corresponds to the ``deploy_policy_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.deploy_v1.types.DeployPolicy` A + DeployPolicy resource in the Cloud Deploy API. + + A DeployPolicy inhibits manual or automation-driven + actions within a Delivery Pipeline or Target. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, deploy_policy, deploy_policy_id]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, cloud_deploy.CreateDeployPolicyRequest): + request = cloud_deploy.CreateDeployPolicyRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if deploy_policy is not None: + request.deploy_policy = deploy_policy + if deploy_policy_id is not None: + request.deploy_policy_id = deploy_policy_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.create_deploy_policy] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + cloud_deploy.DeployPolicy, + metadata_type=cloud_deploy.OperationMetadata, + ) + + # Done; return the response. + return response + + def update_deploy_policy( + self, + request: Optional[Union[cloud_deploy.UpdateDeployPolicyRequest, dict]] = None, + *, + deploy_policy: Optional[cloud_deploy.DeployPolicy] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Updates the parameters of a single DeployPolicy. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import deploy_v1 + + def sample_update_deploy_policy(): + # Create a client + client = deploy_v1.CloudDeployClient() + + # Initialize request argument(s) + deploy_policy = deploy_v1.DeployPolicy() + deploy_policy.rules.rollout_restriction.id = "id_value" + deploy_policy.rules.rollout_restriction.time_windows.time_zone = "time_zone_value" + + request = deploy_v1.UpdateDeployPolicyRequest( + deploy_policy=deploy_policy, + ) + + # Make the request + operation = client.update_deploy_policy(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.deploy_v1.types.UpdateDeployPolicyRequest, dict]): + The request object. The request object for ``UpdateDeployPolicy``. + deploy_policy (google.cloud.deploy_v1.types.DeployPolicy): + Required. The ``DeployPolicy`` to update. + This corresponds to the ``deploy_policy`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Required. Field mask is used to specify the fields to be + overwritten by the update in the ``DeployPolicy`` + resource. The fields specified in the update_mask are + relative to the resource, not the full request. A field + will be overwritten if it's in the mask. If the user + doesn't provide a mask then all fields are overwritten. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.deploy_v1.types.DeployPolicy` A + DeployPolicy resource in the Cloud Deploy API. + + A DeployPolicy inhibits manual or automation-driven + actions within a Delivery Pipeline or Target. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([deploy_policy, update_mask]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, cloud_deploy.UpdateDeployPolicyRequest): + request = cloud_deploy.UpdateDeployPolicyRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if deploy_policy is not None: + request.deploy_policy = deploy_policy + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.update_deploy_policy] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("deploy_policy.name", request.deploy_policy.name),) + ), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + cloud_deploy.DeployPolicy, + metadata_type=cloud_deploy.OperationMetadata, + ) + + # Done; return the response. + return response + + def delete_deploy_policy( + self, + request: Optional[Union[cloud_deploy.DeleteDeployPolicyRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Deletes a single DeployPolicy. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import deploy_v1 + + def sample_delete_deploy_policy(): + # Create a client + client = deploy_v1.CloudDeployClient() + + # Initialize request argument(s) + request = deploy_v1.DeleteDeployPolicyRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_deploy_policy(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.deploy_v1.types.DeleteDeployPolicyRequest, dict]): + The request object. The request object for ``DeleteDeployPolicy``. + name (str): + Required. The name of the ``DeployPolicy`` to delete. + The format is + ``projects/{project_id}/locations/{location_name}/deployPolicies/{deploy_policy_name}``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated + empty messages in your APIs. A typical example is to + use it as the request or the response type of an API + method. For instance: + + service Foo { + rpc Bar(google.protobuf.Empty) returns + (google.protobuf.Empty); + + } + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, cloud_deploy.DeleteDeployPolicyRequest): + request = cloud_deploy.DeleteDeployPolicyRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_deploy_policy] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + empty_pb2.Empty, + metadata_type=cloud_deploy.OperationMetadata, + ) + + # Done; return the response. + return response + + def list_deploy_policies( + self, + request: Optional[Union[cloud_deploy.ListDeployPoliciesRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListDeployPoliciesPager: + r"""Lists DeployPolicies in a given project and location. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import deploy_v1 + + def sample_list_deploy_policies(): + # Create a client + client = deploy_v1.CloudDeployClient() + + # Initialize request argument(s) + request = deploy_v1.ListDeployPoliciesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_deploy_policies(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.deploy_v1.types.ListDeployPoliciesRequest, dict]): + The request object. The request object for ``ListDeployPolicies``. + parent (str): + Required. The parent, which owns this collection of + deploy policies. Format must be + ``projects/{project_id}/locations/{location_name}``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.deploy_v1.services.cloud_deploy.pagers.ListDeployPoliciesPager: + The response object from ListDeployPolicies. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, cloud_deploy.ListDeployPoliciesRequest): + request = cloud_deploy.ListDeployPoliciesRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_deploy_policies] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListDeployPoliciesPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_deploy_policy( + self, + request: Optional[Union[cloud_deploy.GetDeployPolicyRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> cloud_deploy.DeployPolicy: + r"""Gets details of a single DeployPolicy. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import deploy_v1 + + def sample_get_deploy_policy(): + # Create a client + client = deploy_v1.CloudDeployClient() + + # Initialize request argument(s) + request = deploy_v1.GetDeployPolicyRequest( + name="name_value", + ) + + # Make the request + response = client.get_deploy_policy(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.deploy_v1.types.GetDeployPolicyRequest, dict]): + The request object. The request object for ``GetDeployPolicy`` + name (str): + Required. Name of the ``DeployPolicy``. Format must be + ``projects/{project_id}/locations/{location_name}/deployPolicies/{deploy_policy_name}``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.deploy_v1.types.DeployPolicy: + A DeployPolicy resource in the Cloud Deploy API. + + A DeployPolicy inhibits manual or automation-driven + actions within a Delivery Pipeline or Target. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, cloud_deploy.GetDeployPolicyRequest): + request = cloud_deploy.GetDeployPolicyRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_deploy_policy] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + def approve_rollout( self, request: Optional[Union[cloud_deploy.ApproveRolloutRequest, dict]] = None, diff --git a/packages/google-cloud-deploy/google/cloud/deploy_v1/services/cloud_deploy/pagers.py b/packages/google-cloud-deploy/google/cloud/deploy_v1/services/cloud_deploy/pagers.py index 508ba8ca2d07..29ee6b427605 100644 --- a/packages/google-cloud-deploy/google/cloud/deploy_v1/services/cloud_deploy/pagers.py +++ b/packages/google-cloud-deploy/google/cloud/deploy_v1/services/cloud_deploy/pagers.py @@ -649,6 +649,158 @@ def __repr__(self) -> str: return "{0}<{1!r}>".format(self.__class__.__name__, self._response) +class ListDeployPoliciesPager: + """A pager for iterating through ``list_deploy_policies`` requests. + + This class thinly wraps an initial + :class:`google.cloud.deploy_v1.types.ListDeployPoliciesResponse` object, and + provides an ``__iter__`` method to iterate through its + ``deploy_policies`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListDeployPolicies`` requests and continue to iterate + through the ``deploy_policies`` field on the + corresponding responses. + + All the usual :class:`google.cloud.deploy_v1.types.ListDeployPoliciesResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., cloud_deploy.ListDeployPoliciesResponse], + request: cloud_deploy.ListDeployPoliciesRequest, + response: cloud_deploy.ListDeployPoliciesResponse, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.deploy_v1.types.ListDeployPoliciesRequest): + The initial request object. + response (google.cloud.deploy_v1.types.ListDeployPoliciesResponse): + The initial response object. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = cloud_deploy.ListDeployPoliciesRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[cloud_deploy.ListDeployPoliciesResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) + yield self._response + + def __iter__(self) -> Iterator[cloud_deploy.DeployPolicy]: + for page in self.pages: + yield from page.deploy_policies + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListDeployPoliciesAsyncPager: + """A pager for iterating through ``list_deploy_policies`` requests. + + This class thinly wraps an initial + :class:`google.cloud.deploy_v1.types.ListDeployPoliciesResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``deploy_policies`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListDeployPolicies`` requests and continue to iterate + through the ``deploy_policies`` field on the + corresponding responses. + + All the usual :class:`google.cloud.deploy_v1.types.ListDeployPoliciesResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., Awaitable[cloud_deploy.ListDeployPoliciesResponse]], + request: cloud_deploy.ListDeployPoliciesRequest, + response: cloud_deploy.ListDeployPoliciesResponse, + *, + retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.deploy_v1.types.ListDeployPoliciesRequest): + The initial request object. + response (google.cloud.deploy_v1.types.ListDeployPoliciesResponse): + The initial response object. + retry (google.api_core.retry.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = cloud_deploy.ListDeployPoliciesRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[cloud_deploy.ListDeployPoliciesResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) + yield self._response + + def __aiter__(self) -> AsyncIterator[cloud_deploy.DeployPolicy]: + async def async_generator(): + async for page in self.pages: + for response in page.deploy_policies: + yield response + + return async_generator() + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + class ListRolloutsPager: """A pager for iterating through ``list_rollouts`` requests. diff --git a/packages/google-cloud-deploy/google/cloud/deploy_v1/services/cloud_deploy/transports/base.py b/packages/google-cloud-deploy/google/cloud/deploy_v1/services/cloud_deploy/transports/base.py index d3042176dd81..d52eff30b350 100644 --- a/packages/google-cloud-deploy/google/cloud/deploy_v1/services/cloud_deploy/transports/base.py +++ b/packages/google-cloud-deploy/google/cloud/deploy_v1/services/cloud_deploy/transports/base.py @@ -304,6 +304,49 @@ def _prep_wrapped_messages(self, client_info): default_timeout=60.0, client_info=client_info, ), + self.create_deploy_policy: gapic_v1.method.wrap_method( + self.create_deploy_policy, + default_timeout=60.0, + client_info=client_info, + ), + self.update_deploy_policy: gapic_v1.method.wrap_method( + self.update_deploy_policy, + default_timeout=60.0, + client_info=client_info, + ), + self.delete_deploy_policy: gapic_v1.method.wrap_method( + self.delete_deploy_policy, + default_timeout=60.0, + client_info=client_info, + ), + self.list_deploy_policies: gapic_v1.method.wrap_method( + self.list_deploy_policies, + default_retry=retries.Retry( + initial=1.0, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.get_deploy_policy: gapic_v1.method.wrap_method( + self.get_deploy_policy, + default_retry=retries.Retry( + initial=1.0, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), self.approve_rollout: gapic_v1.method.wrap_method( self.approve_rollout, default_timeout=60.0, @@ -699,6 +742,54 @@ def abandon_release( ]: raise NotImplementedError() + @property + def create_deploy_policy( + self, + ) -> Callable[ + [cloud_deploy.CreateDeployPolicyRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def update_deploy_policy( + self, + ) -> Callable[ + [cloud_deploy.UpdateDeployPolicyRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def delete_deploy_policy( + self, + ) -> Callable[ + [cloud_deploy.DeleteDeployPolicyRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def list_deploy_policies( + self, + ) -> Callable[ + [cloud_deploy.ListDeployPoliciesRequest], + Union[ + cloud_deploy.ListDeployPoliciesResponse, + Awaitable[cloud_deploy.ListDeployPoliciesResponse], + ], + ]: + raise NotImplementedError() + + @property + def get_deploy_policy( + self, + ) -> Callable[ + [cloud_deploy.GetDeployPolicyRequest], + Union[cloud_deploy.DeployPolicy, Awaitable[cloud_deploy.DeployPolicy]], + ]: + raise NotImplementedError() + @property def approve_rollout( self, diff --git a/packages/google-cloud-deploy/google/cloud/deploy_v1/services/cloud_deploy/transports/grpc.py b/packages/google-cloud-deploy/google/cloud/deploy_v1/services/cloud_deploy/transports/grpc.py index 4ce9240634f7..4590ce080f41 100644 --- a/packages/google-cloud-deploy/google/cloud/deploy_v1/services/cloud_deploy/transports/grpc.py +++ b/packages/google-cloud-deploy/google/cloud/deploy_v1/services/cloud_deploy/transports/grpc.py @@ -808,6 +808,140 @@ def abandon_release( ) return self._stubs["abandon_release"] + @property + def create_deploy_policy( + self, + ) -> Callable[[cloud_deploy.CreateDeployPolicyRequest], operations_pb2.Operation]: + r"""Return a callable for the create deploy policy method over gRPC. + + Creates a new DeployPolicy in a given project and + location. + + Returns: + Callable[[~.CreateDeployPolicyRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_deploy_policy" not in self._stubs: + self._stubs["create_deploy_policy"] = self.grpc_channel.unary_unary( + "/google.cloud.deploy.v1.CloudDeploy/CreateDeployPolicy", + request_serializer=cloud_deploy.CreateDeployPolicyRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["create_deploy_policy"] + + @property + def update_deploy_policy( + self, + ) -> Callable[[cloud_deploy.UpdateDeployPolicyRequest], operations_pb2.Operation]: + r"""Return a callable for the update deploy policy method over gRPC. + + Updates the parameters of a single DeployPolicy. + + Returns: + Callable[[~.UpdateDeployPolicyRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_deploy_policy" not in self._stubs: + self._stubs["update_deploy_policy"] = self.grpc_channel.unary_unary( + "/google.cloud.deploy.v1.CloudDeploy/UpdateDeployPolicy", + request_serializer=cloud_deploy.UpdateDeployPolicyRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["update_deploy_policy"] + + @property + def delete_deploy_policy( + self, + ) -> Callable[[cloud_deploy.DeleteDeployPolicyRequest], operations_pb2.Operation]: + r"""Return a callable for the delete deploy policy method over gRPC. + + Deletes a single DeployPolicy. + + Returns: + Callable[[~.DeleteDeployPolicyRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_deploy_policy" not in self._stubs: + self._stubs["delete_deploy_policy"] = self.grpc_channel.unary_unary( + "/google.cloud.deploy.v1.CloudDeploy/DeleteDeployPolicy", + request_serializer=cloud_deploy.DeleteDeployPolicyRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["delete_deploy_policy"] + + @property + def list_deploy_policies( + self, + ) -> Callable[ + [cloud_deploy.ListDeployPoliciesRequest], + cloud_deploy.ListDeployPoliciesResponse, + ]: + r"""Return a callable for the list deploy policies method over gRPC. + + Lists DeployPolicies in a given project and location. + + Returns: + Callable[[~.ListDeployPoliciesRequest], + ~.ListDeployPoliciesResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_deploy_policies" not in self._stubs: + self._stubs["list_deploy_policies"] = self.grpc_channel.unary_unary( + "/google.cloud.deploy.v1.CloudDeploy/ListDeployPolicies", + request_serializer=cloud_deploy.ListDeployPoliciesRequest.serialize, + response_deserializer=cloud_deploy.ListDeployPoliciesResponse.deserialize, + ) + return self._stubs["list_deploy_policies"] + + @property + def get_deploy_policy( + self, + ) -> Callable[[cloud_deploy.GetDeployPolicyRequest], cloud_deploy.DeployPolicy]: + r"""Return a callable for the get deploy policy method over gRPC. + + Gets details of a single DeployPolicy. + + Returns: + Callable[[~.GetDeployPolicyRequest], + ~.DeployPolicy]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_deploy_policy" not in self._stubs: + self._stubs["get_deploy_policy"] = self.grpc_channel.unary_unary( + "/google.cloud.deploy.v1.CloudDeploy/GetDeployPolicy", + request_serializer=cloud_deploy.GetDeployPolicyRequest.serialize, + response_deserializer=cloud_deploy.DeployPolicy.deserialize, + ) + return self._stubs["get_deploy_policy"] + @property def approve_rollout( self, diff --git a/packages/google-cloud-deploy/google/cloud/deploy_v1/services/cloud_deploy/transports/grpc_asyncio.py b/packages/google-cloud-deploy/google/cloud/deploy_v1/services/cloud_deploy/transports/grpc_asyncio.py index 62883f74e557..db641300032a 100644 --- a/packages/google-cloud-deploy/google/cloud/deploy_v1/services/cloud_deploy/transports/grpc_asyncio.py +++ b/packages/google-cloud-deploy/google/cloud/deploy_v1/services/cloud_deploy/transports/grpc_asyncio.py @@ -834,6 +834,148 @@ def abandon_release( ) return self._stubs["abandon_release"] + @property + def create_deploy_policy( + self, + ) -> Callable[ + [cloud_deploy.CreateDeployPolicyRequest], Awaitable[operations_pb2.Operation] + ]: + r"""Return a callable for the create deploy policy method over gRPC. + + Creates a new DeployPolicy in a given project and + location. + + Returns: + Callable[[~.CreateDeployPolicyRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_deploy_policy" not in self._stubs: + self._stubs["create_deploy_policy"] = self.grpc_channel.unary_unary( + "/google.cloud.deploy.v1.CloudDeploy/CreateDeployPolicy", + request_serializer=cloud_deploy.CreateDeployPolicyRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["create_deploy_policy"] + + @property + def update_deploy_policy( + self, + ) -> Callable[ + [cloud_deploy.UpdateDeployPolicyRequest], Awaitable[operations_pb2.Operation] + ]: + r"""Return a callable for the update deploy policy method over gRPC. + + Updates the parameters of a single DeployPolicy. + + Returns: + Callable[[~.UpdateDeployPolicyRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_deploy_policy" not in self._stubs: + self._stubs["update_deploy_policy"] = self.grpc_channel.unary_unary( + "/google.cloud.deploy.v1.CloudDeploy/UpdateDeployPolicy", + request_serializer=cloud_deploy.UpdateDeployPolicyRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["update_deploy_policy"] + + @property + def delete_deploy_policy( + self, + ) -> Callable[ + [cloud_deploy.DeleteDeployPolicyRequest], Awaitable[operations_pb2.Operation] + ]: + r"""Return a callable for the delete deploy policy method over gRPC. + + Deletes a single DeployPolicy. + + Returns: + Callable[[~.DeleteDeployPolicyRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_deploy_policy" not in self._stubs: + self._stubs["delete_deploy_policy"] = self.grpc_channel.unary_unary( + "/google.cloud.deploy.v1.CloudDeploy/DeleteDeployPolicy", + request_serializer=cloud_deploy.DeleteDeployPolicyRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["delete_deploy_policy"] + + @property + def list_deploy_policies( + self, + ) -> Callable[ + [cloud_deploy.ListDeployPoliciesRequest], + Awaitable[cloud_deploy.ListDeployPoliciesResponse], + ]: + r"""Return a callable for the list deploy policies method over gRPC. + + Lists DeployPolicies in a given project and location. + + Returns: + Callable[[~.ListDeployPoliciesRequest], + Awaitable[~.ListDeployPoliciesResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_deploy_policies" not in self._stubs: + self._stubs["list_deploy_policies"] = self.grpc_channel.unary_unary( + "/google.cloud.deploy.v1.CloudDeploy/ListDeployPolicies", + request_serializer=cloud_deploy.ListDeployPoliciesRequest.serialize, + response_deserializer=cloud_deploy.ListDeployPoliciesResponse.deserialize, + ) + return self._stubs["list_deploy_policies"] + + @property + def get_deploy_policy( + self, + ) -> Callable[ + [cloud_deploy.GetDeployPolicyRequest], Awaitable[cloud_deploy.DeployPolicy] + ]: + r"""Return a callable for the get deploy policy method over gRPC. + + Gets details of a single DeployPolicy. + + Returns: + Callable[[~.GetDeployPolicyRequest], + Awaitable[~.DeployPolicy]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_deploy_policy" not in self._stubs: + self._stubs["get_deploy_policy"] = self.grpc_channel.unary_unary( + "/google.cloud.deploy.v1.CloudDeploy/GetDeployPolicy", + request_serializer=cloud_deploy.GetDeployPolicyRequest.serialize, + response_deserializer=cloud_deploy.DeployPolicy.deserialize, + ) + return self._stubs["get_deploy_policy"] + @property def approve_rollout( self, @@ -1577,6 +1719,49 @@ def _prep_wrapped_messages(self, client_info): default_timeout=60.0, client_info=client_info, ), + self.create_deploy_policy: gapic_v1.method_async.wrap_method( + self.create_deploy_policy, + default_timeout=60.0, + client_info=client_info, + ), + self.update_deploy_policy: gapic_v1.method_async.wrap_method( + self.update_deploy_policy, + default_timeout=60.0, + client_info=client_info, + ), + self.delete_deploy_policy: gapic_v1.method_async.wrap_method( + self.delete_deploy_policy, + default_timeout=60.0, + client_info=client_info, + ), + self.list_deploy_policies: gapic_v1.method_async.wrap_method( + self.list_deploy_policies, + default_retry=retries.AsyncRetry( + initial=1.0, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.get_deploy_policy: gapic_v1.method_async.wrap_method( + self.get_deploy_policy, + default_retry=retries.AsyncRetry( + initial=1.0, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), self.approve_rollout: gapic_v1.method_async.wrap_method( self.approve_rollout, default_timeout=60.0, diff --git a/packages/google-cloud-deploy/google/cloud/deploy_v1/services/cloud_deploy/transports/rest.py b/packages/google-cloud-deploy/google/cloud/deploy_v1/services/cloud_deploy/transports/rest.py index a96dad3bb982..bd704448565e 100644 --- a/packages/google-cloud-deploy/google/cloud/deploy_v1/services/cloud_deploy/transports/rest.py +++ b/packages/google-cloud-deploy/google/cloud/deploy_v1/services/cloud_deploy/transports/rest.py @@ -138,6 +138,14 @@ def post_create_delivery_pipeline(self, response): logging.log(f"Received response: {response}") return response + def pre_create_deploy_policy(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_create_deploy_policy(self, response): + logging.log(f"Received response: {response}") + return response + def pre_create_release(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata @@ -186,6 +194,14 @@ def post_delete_delivery_pipeline(self, response): logging.log(f"Received response: {response}") return response + def pre_delete_deploy_policy(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_delete_deploy_policy(self, response): + logging.log(f"Received response: {response}") + return response + def pre_delete_target(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata @@ -234,6 +250,14 @@ def post_get_delivery_pipeline(self, response): logging.log(f"Received response: {response}") return response + def pre_get_deploy_policy(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_deploy_policy(self, response): + logging.log(f"Received response: {response}") + return response + def pre_get_job_run(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata @@ -306,6 +330,14 @@ def post_list_delivery_pipelines(self, response): logging.log(f"Received response: {response}") return response + def pre_list_deploy_policies(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_deploy_policies(self, response): + logging.log(f"Received response: {response}") + return response + def pre_list_job_runs(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata @@ -386,6 +418,14 @@ def post_update_delivery_pipeline(self, response): logging.log(f"Received response: {response}") return response + def pre_update_deploy_policy(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_update_deploy_policy(self, response): + logging.log(f"Received response: {response}") + return response + def pre_update_target(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata @@ -584,6 +624,29 @@ def post_create_delivery_pipeline( """ return response + def pre_create_deploy_policy( + self, + request: cloud_deploy.CreateDeployPolicyRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[cloud_deploy.CreateDeployPolicyRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for create_deploy_policy + + Override in a subclass to manipulate the request or metadata + before they are sent to the CloudDeploy server. + """ + return request, metadata + + def post_create_deploy_policy( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for create_deploy_policy + + Override in a subclass to manipulate the response + after it is returned by the CloudDeploy server but before + it is returned to user code. + """ + return response + def pre_create_release( self, request: cloud_deploy.CreateReleaseRequest, @@ -722,6 +785,29 @@ def post_delete_delivery_pipeline( """ return response + def pre_delete_deploy_policy( + self, + request: cloud_deploy.DeleteDeployPolicyRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[cloud_deploy.DeleteDeployPolicyRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for delete_deploy_policy + + Override in a subclass to manipulate the request or metadata + before they are sent to the CloudDeploy server. + """ + return request, metadata + + def post_delete_deploy_policy( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for delete_deploy_policy + + Override in a subclass to manipulate the response + after it is returned by the CloudDeploy server but before + it is returned to user code. + """ + return response + def pre_delete_target( self, request: cloud_deploy.DeleteTargetRequest, @@ -858,6 +944,29 @@ def post_get_delivery_pipeline( """ return response + def pre_get_deploy_policy( + self, + request: cloud_deploy.GetDeployPolicyRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[cloud_deploy.GetDeployPolicyRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_deploy_policy + + Override in a subclass to manipulate the request or metadata + before they are sent to the CloudDeploy server. + """ + return request, metadata + + def post_get_deploy_policy( + self, response: cloud_deploy.DeployPolicy + ) -> cloud_deploy.DeployPolicy: + """Post-rpc interceptor for get_deploy_policy + + Override in a subclass to manipulate the response + after it is returned by the CloudDeploy server but before + it is returned to user code. + """ + return response + def pre_get_job_run( self, request: cloud_deploy.GetJobRunRequest, @@ -1057,6 +1166,29 @@ def post_list_delivery_pipelines( """ return response + def pre_list_deploy_policies( + self, + request: cloud_deploy.ListDeployPoliciesRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[cloud_deploy.ListDeployPoliciesRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_deploy_policies + + Override in a subclass to manipulate the request or metadata + before they are sent to the CloudDeploy server. + """ + return request, metadata + + def post_list_deploy_policies( + self, response: cloud_deploy.ListDeployPoliciesResponse + ) -> cloud_deploy.ListDeployPoliciesResponse: + """Post-rpc interceptor for list_deploy_policies + + Override in a subclass to manipulate the response + after it is returned by the CloudDeploy server but before + it is returned to user code. + """ + return response + def pre_list_job_runs( self, request: cloud_deploy.ListJobRunsRequest, @@ -1285,6 +1417,29 @@ def post_update_delivery_pipeline( """ return response + def pre_update_deploy_policy( + self, + request: cloud_deploy.UpdateDeployPolicyRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[cloud_deploy.UpdateDeployPolicyRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for update_deploy_policy + + Override in a subclass to manipulate the request or metadata + before they are sent to the CloudDeploy server. + """ + return request, metadata + + def post_update_deploy_policy( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for update_deploy_policy + + Override in a subclass to manipulate the response + after it is returned by the CloudDeploy server but before + it is returned to user code. + """ + return response + def pre_update_target( self, request: cloud_deploy.UpdateTargetRequest, @@ -2413,6 +2568,103 @@ def __call__( resp = self._interceptor.post_create_delivery_pipeline(resp) return resp + class _CreateDeployPolicy(CloudDeployRestStub): + def __hash__(self): + return hash("CreateDeployPolicy") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "deployPolicyId": "", + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: cloud_deploy.CreateDeployPolicyRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the create deploy policy method over HTTP. + + Args: + request (~.cloud_deploy.CreateDeployPolicyRequest): + The request object. The request object for ``CreateDeployPolicy``. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{parent=projects/*/locations/*}/deployPolicies", + "body": "deploy_policy", + }, + ] + request, metadata = self._interceptor.pre_create_deploy_policy( + request, metadata + ) + pb_request = cloud_deploy.CreateDeployPolicyRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_create_deploy_policy(resp) + return resp + class _CreateRelease(CloudDeployRestStub): def __hash__(self): return hash("CreateRelease") @@ -2963,9 +3215,9 @@ def __call__( resp = self._interceptor.post_delete_delivery_pipeline(resp) return resp - class _DeleteTarget(CloudDeployRestStub): + class _DeleteDeployPolicy(CloudDeployRestStub): def __hash__(self): - return hash("DeleteTarget") + return hash("DeleteDeployPolicy") __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} @@ -2979,17 +3231,17 @@ def _get_unset_required_fields(cls, message_dict): def __call__( self, - request: cloud_deploy.DeleteTargetRequest, + request: cloud_deploy.DeleteDeployPolicyRequest, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), ) -> operations_pb2.Operation: - r"""Call the delete target method over HTTP. + r"""Call the delete deploy policy method over HTTP. Args: - request (~.cloud_deploy.DeleteTargetRequest): - The request object. The request object for ``DeleteTarget``. + request (~.cloud_deploy.DeleteDeployPolicyRequest): + The request object. The request object for ``DeleteDeployPolicy``. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -3007,11 +3259,13 @@ def __call__( http_options: List[Dict[str, str]] = [ { "method": "delete", - "uri": "/v1/{name=projects/*/locations/*/targets/*}", + "uri": "/v1/{name=projects/*/locations/*/deployPolicies/*}", }, ] - request, metadata = self._interceptor.pre_delete_target(request, metadata) - pb_request = cloud_deploy.DeleteTargetRequest.pb(request) + request, metadata = self._interceptor.pre_delete_deploy_policy( + request, metadata + ) + pb_request = cloud_deploy.DeleteDeployPolicyRequest.pb(request) transcoded_request = path_template.transcode(http_options, pb_request) uri = transcoded_request["uri"] @@ -3046,12 +3300,12 @@ def __call__( # Return the response resp = operations_pb2.Operation() json_format.Parse(response.content, resp, ignore_unknown_fields=True) - resp = self._interceptor.post_delete_target(resp) + resp = self._interceptor.post_delete_deploy_policy(resp) return resp - class _GetAutomation(CloudDeployRestStub): + class _DeleteTarget(CloudDeployRestStub): def __hash__(self): - return hash("GetAutomation") + return hash("DeleteTarget") __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} @@ -3065,17 +3319,17 @@ def _get_unset_required_fields(cls, message_dict): def __call__( self, - request: cloud_deploy.GetAutomationRequest, + request: cloud_deploy.DeleteTargetRequest, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), - ) -> cloud_deploy.Automation: - r"""Call the get automation method over HTTP. + ) -> operations_pb2.Operation: + r"""Call the delete target method over HTTP. Args: - request (~.cloud_deploy.GetAutomationRequest): - The request object. The request object for ``GetAutomation`` + request (~.cloud_deploy.DeleteTargetRequest): + The request object. The request object for ``DeleteTarget``. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -3083,26 +3337,21 @@ def __call__( sent along with the request as metadata. Returns: - ~.cloud_deploy.Automation: - An ``Automation`` resource in the Cloud Deploy API. - - An ``Automation`` enables the automation of manually - driven actions for a Delivery Pipeline, which includes - Release promotion among Targets, Rollout repair and - Rollout deployment strategy advancement. The intention - of Automation is to reduce manual intervention in the - continuous delivery process. + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. """ http_options: List[Dict[str, str]] = [ { - "method": "get", - "uri": "/v1/{name=projects/*/locations/*/deliveryPipelines/*/automations/*}", + "method": "delete", + "uri": "/v1/{name=projects/*/locations/*/targets/*}", }, ] - request, metadata = self._interceptor.pre_get_automation(request, metadata) - pb_request = cloud_deploy.GetAutomationRequest.pb(request) + request, metadata = self._interceptor.pre_delete_target(request, metadata) + pb_request = cloud_deploy.DeleteTargetRequest.pb(request) transcoded_request = path_template.transcode(http_options, pb_request) uri = transcoded_request["uri"] @@ -3135,16 +3384,14 @@ def __call__( raise core_exceptions.from_http_response(response) # Return the response - resp = cloud_deploy.Automation() - pb_resp = cloud_deploy.Automation.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - resp = self._interceptor.post_get_automation(resp) + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_delete_target(resp) return resp - class _GetAutomationRun(CloudDeployRestStub): + class _GetAutomation(CloudDeployRestStub): def __hash__(self): - return hash("GetAutomationRun") + return hash("GetAutomation") __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} @@ -3158,7 +3405,100 @@ def _get_unset_required_fields(cls, message_dict): def __call__( self, - request: cloud_deploy.GetAutomationRunRequest, + request: cloud_deploy.GetAutomationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> cloud_deploy.Automation: + r"""Call the get automation method over HTTP. + + Args: + request (~.cloud_deploy.GetAutomationRequest): + The request object. The request object for ``GetAutomation`` + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.cloud_deploy.Automation: + An ``Automation`` resource in the Cloud Deploy API. + + An ``Automation`` enables the automation of manually + driven actions for a Delivery Pipeline, which includes + Release promotion among Targets, Rollout repair and + Rollout deployment strategy advancement. The intention + of Automation is to reduce manual intervention in the + continuous delivery process. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/deliveryPipelines/*/automations/*}", + }, + ] + request, metadata = self._interceptor.pre_get_automation(request, metadata) + pb_request = cloud_deploy.GetAutomationRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = cloud_deploy.Automation() + pb_resp = cloud_deploy.Automation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_automation(resp) + return resp + + class _GetAutomationRun(CloudDeployRestStub): + def __hash__(self): + return hash("GetAutomationRun") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: cloud_deploy.GetAutomationRunRequest, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Optional[float] = None, @@ -3502,6 +3842,97 @@ def __call__( resp = self._interceptor.post_get_delivery_pipeline(resp) return resp + class _GetDeployPolicy(CloudDeployRestStub): + def __hash__(self): + return hash("GetDeployPolicy") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: cloud_deploy.GetDeployPolicyRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> cloud_deploy.DeployPolicy: + r"""Call the get deploy policy method over HTTP. + + Args: + request (~.cloud_deploy.GetDeployPolicyRequest): + The request object. The request object for ``GetDeployPolicy`` + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.cloud_deploy.DeployPolicy: + A ``DeployPolicy`` resource in the Cloud Deploy API. + + A ``DeployPolicy`` inhibits manual or automation-driven + actions within a Delivery Pipeline or Target. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/deployPolicies/*}", + }, + ] + request, metadata = self._interceptor.pre_get_deploy_policy( + request, metadata + ) + pb_request = cloud_deploy.GetDeployPolicyRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = cloud_deploy.DeployPolicy() + pb_resp = cloud_deploy.DeployPolicy.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_deploy_policy(resp) + return resp + class _GetJobRun(CloudDeployRestStub): def __hash__(self): return hash("GetJobRun") @@ -4300,6 +4731,93 @@ def __call__( resp = self._interceptor.post_list_delivery_pipelines(resp) return resp + class _ListDeployPolicies(CloudDeployRestStub): + def __hash__(self): + return hash("ListDeployPolicies") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: cloud_deploy.ListDeployPoliciesRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> cloud_deploy.ListDeployPoliciesResponse: + r"""Call the list deploy policies method over HTTP. + + Args: + request (~.cloud_deploy.ListDeployPoliciesRequest): + The request object. The request object for ``ListDeployPolicies``. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.cloud_deploy.ListDeployPoliciesResponse: + The response object from ``ListDeployPolicies``. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{parent=projects/*/locations/*}/deployPolicies", + }, + ] + request, metadata = self._interceptor.pre_list_deploy_policies( + request, metadata + ) + pb_request = cloud_deploy.ListDeployPoliciesRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = cloud_deploy.ListDeployPoliciesResponse() + pb_resp = cloud_deploy.ListDeployPoliciesResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_deploy_policies(resp) + return resp + class _ListJobRuns(CloudDeployRestStub): def __hash__(self): return hash("ListJobRuns") @@ -5216,6 +5734,103 @@ def __call__( resp = self._interceptor.post_update_delivery_pipeline(resp) return resp + class _UpdateDeployPolicy(CloudDeployRestStub): + def __hash__(self): + return hash("UpdateDeployPolicy") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "updateMask": {}, + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: cloud_deploy.UpdateDeployPolicyRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the update deploy policy method over HTTP. + + Args: + request (~.cloud_deploy.UpdateDeployPolicyRequest): + The request object. The request object for ``UpdateDeployPolicy``. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "patch", + "uri": "/v1/{deploy_policy.name=projects/*/locations/*/deployPolicies/*}", + "body": "deploy_policy", + }, + ] + request, metadata = self._interceptor.pre_update_deploy_policy( + request, metadata + ) + pb_request = cloud_deploy.UpdateDeployPolicyRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_update_deploy_policy(resp) + return resp + class _UpdateTarget(CloudDeployRestStub): def __hash__(self): return hash("UpdateTarget") @@ -5390,6 +6005,14 @@ def create_delivery_pipeline( # In C++ this would require a dynamic_cast return self._CreateDeliveryPipeline(self._session, self._host, self._interceptor) # type: ignore + @property + def create_deploy_policy( + self, + ) -> Callable[[cloud_deploy.CreateDeployPolicyRequest], operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._CreateDeployPolicy(self._session, self._host, self._interceptor) # type: ignore + @property def create_release( self, @@ -5442,6 +6065,14 @@ def delete_delivery_pipeline( # In C++ this would require a dynamic_cast return self._DeleteDeliveryPipeline(self._session, self._host, self._interceptor) # type: ignore + @property + def delete_deploy_policy( + self, + ) -> Callable[[cloud_deploy.DeleteDeployPolicyRequest], operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._DeleteDeployPolicy(self._session, self._host, self._interceptor) # type: ignore + @property def delete_target( self, @@ -5494,6 +6125,14 @@ def get_delivery_pipeline( # In C++ this would require a dynamic_cast return self._GetDeliveryPipeline(self._session, self._host, self._interceptor) # type: ignore + @property + def get_deploy_policy( + self, + ) -> Callable[[cloud_deploy.GetDeployPolicyRequest], cloud_deploy.DeployPolicy]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetDeployPolicy(self._session, self._host, self._interceptor) # type: ignore + @property def get_job_run( self, @@ -5577,6 +6216,17 @@ def list_delivery_pipelines( # In C++ this would require a dynamic_cast return self._ListDeliveryPipelines(self._session, self._host, self._interceptor) # type: ignore + @property + def list_deploy_policies( + self, + ) -> Callable[ + [cloud_deploy.ListDeployPoliciesRequest], + cloud_deploy.ListDeployPoliciesResponse, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListDeployPolicies(self._session, self._host, self._interceptor) # type: ignore + @property def list_job_runs( self, @@ -5669,6 +6319,14 @@ def update_delivery_pipeline( # In C++ this would require a dynamic_cast return self._UpdateDeliveryPipeline(self._session, self._host, self._interceptor) # type: ignore + @property + def update_deploy_policy( + self, + ) -> Callable[[cloud_deploy.UpdateDeployPolicyRequest], operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._UpdateDeployPolicy(self._session, self._host, self._interceptor) # type: ignore + @property def update_target( self, diff --git a/packages/google-cloud-deploy/google/cloud/deploy_v1/types/__init__.py b/packages/google-cloud-deploy/google/cloud/deploy_v1/types/__init__.py index 215038669273..7017500e27c7 100644 --- a/packages/google-cloud-deploy/google/cloud/deploy_v1/types/__init__.py +++ b/packages/google-cloud-deploy/google/cloud/deploy_v1/types/__init__.py @@ -52,6 +52,7 @@ CreateChildRolloutJobRun, CreateCustomTargetTypeRequest, CreateDeliveryPipelineRequest, + CreateDeployPolicyRequest, CreateReleaseRequest, CreateRolloutRequest, CreateTargetRequest, @@ -65,20 +66,25 @@ DeleteAutomationRequest, DeleteCustomTargetTypeRequest, DeleteDeliveryPipelineRequest, + DeleteDeployPolicyRequest, DeleteTargetRequest, DeliveryPipeline, + DeliveryPipelineAttribute, DeployArtifact, DeployJob, DeployJobRun, DeployJobRunMetadata, DeploymentJobs, DeployParameters, + DeployPolicy, + DeployPolicyResourceSelector, ExecutionConfig, GetAutomationRequest, GetAutomationRunRequest, GetConfigRequest, GetCustomTargetTypeRequest, GetDeliveryPipelineRequest, + GetDeployPolicyRequest, GetJobRunRequest, GetReleaseRequest, GetRolloutRequest, @@ -97,6 +103,8 @@ ListCustomTargetTypesResponse, ListDeliveryPipelinesRequest, ListDeliveryPipelinesResponse, + ListDeployPoliciesRequest, + ListDeployPoliciesResponse, ListJobRunsRequest, ListJobRunsResponse, ListReleasesRequest, @@ -107,10 +115,14 @@ ListTargetsResponse, Metadata, MultiTarget, + OneTimeWindow, OperationMetadata, Phase, PipelineCondition, PipelineReadyCondition, + PolicyRule, + PolicyViolation, + PolicyViolationDetails, Postdeploy, PostdeployJob, PostdeployJobRun, @@ -135,6 +147,7 @@ RollbackTargetRequest, RollbackTargetResponse, Rollout, + RolloutRestriction, RuntimeConfig, SerialPipeline, SkaffoldModules, @@ -150,15 +163,19 @@ TargetsTypeCondition, TerminateJobRunRequest, TerminateJobRunResponse, + TimeWindows, UpdateAutomationRequest, UpdateCustomTargetTypeRequest, UpdateDeliveryPipelineRequest, + UpdateDeployPolicyRequest, UpdateTargetRequest, VerifyJob, VerifyJobRun, + WeeklyWindow, ) from .customtargettype_notification_payload import CustomTargetTypeNotificationEvent from .deliverypipeline_notification_payload import DeliveryPipelineNotificationEvent +from .deploypolicy_evaluation_payload import DeployPolicyEvaluationEvent from .deploypolicy_notification_payload import DeployPolicyNotificationEvent from .jobrun_notification_payload import JobRunNotificationEvent from .log_enums import Type @@ -206,6 +223,7 @@ "CreateChildRolloutJobRun", "CreateCustomTargetTypeRequest", "CreateDeliveryPipelineRequest", + "CreateDeployPolicyRequest", "CreateReleaseRequest", "CreateRolloutRequest", "CreateTargetRequest", @@ -219,20 +237,25 @@ "DeleteAutomationRequest", "DeleteCustomTargetTypeRequest", "DeleteDeliveryPipelineRequest", + "DeleteDeployPolicyRequest", "DeleteTargetRequest", "DeliveryPipeline", + "DeliveryPipelineAttribute", "DeployArtifact", "DeployJob", "DeployJobRun", "DeployJobRunMetadata", "DeploymentJobs", "DeployParameters", + "DeployPolicy", + "DeployPolicyResourceSelector", "ExecutionConfig", "GetAutomationRequest", "GetAutomationRunRequest", "GetConfigRequest", "GetCustomTargetTypeRequest", "GetDeliveryPipelineRequest", + "GetDeployPolicyRequest", "GetJobRunRequest", "GetReleaseRequest", "GetRolloutRequest", @@ -251,6 +274,8 @@ "ListCustomTargetTypesResponse", "ListDeliveryPipelinesRequest", "ListDeliveryPipelinesResponse", + "ListDeployPoliciesRequest", + "ListDeployPoliciesResponse", "ListJobRunsRequest", "ListJobRunsResponse", "ListReleasesRequest", @@ -261,10 +286,14 @@ "ListTargetsResponse", "Metadata", "MultiTarget", + "OneTimeWindow", "OperationMetadata", "Phase", "PipelineCondition", "PipelineReadyCondition", + "PolicyRule", + "PolicyViolation", + "PolicyViolationDetails", "Postdeploy", "PostdeployJob", "PostdeployJobRun", @@ -288,6 +317,7 @@ "RollbackTargetRequest", "RollbackTargetResponse", "Rollout", + "RolloutRestriction", "RuntimeConfig", "SerialPipeline", "SkaffoldModules", @@ -302,17 +332,21 @@ "TargetsTypeCondition", "TerminateJobRunRequest", "TerminateJobRunResponse", + "TimeWindows", "UpdateAutomationRequest", "UpdateCustomTargetTypeRequest", "UpdateDeliveryPipelineRequest", + "UpdateDeployPolicyRequest", "UpdateTargetRequest", "VerifyJob", "VerifyJobRun", + "WeeklyWindow", "BackoffMode", "RepairState", "SkaffoldSupportState", "CustomTargetTypeNotificationEvent", "DeliveryPipelineNotificationEvent", + "DeployPolicyEvaluationEvent", "DeployPolicyNotificationEvent", "JobRunNotificationEvent", "Type", diff --git a/packages/google-cloud-deploy/google/cloud/deploy_v1/types/cloud_deploy.py b/packages/google-cloud-deploy/google/cloud/deploy_v1/types/cloud_deploy.py index 390ce9753401..809363dd9595 100644 --- a/packages/google-cloud-deploy/google/cloud/deploy_v1/types/cloud_deploy.py +++ b/packages/google-cloud-deploy/google/cloud/deploy_v1/types/cloud_deploy.py @@ -21,6 +21,8 @@ from google.protobuf import field_mask_pb2 # type: ignore from google.protobuf import timestamp_pb2 # type: ignore from google.type import date_pb2 # type: ignore +from google.type import dayofweek_pb2 # type: ignore +from google.type import timeofday_pb2 # type: ignore import proto # type: ignore __protobuf__ = proto.module( @@ -80,8 +82,24 @@ "CreateCustomTargetTypeRequest", "UpdateCustomTargetTypeRequest", "DeleteCustomTargetTypeRequest", + "DeployPolicy", + "DeployPolicyResourceSelector", + "DeliveryPipelineAttribute", "TargetAttribute", + "PolicyRule", + "RolloutRestriction", + "TimeWindows", + "OneTimeWindow", + "WeeklyWindow", + "PolicyViolation", + "PolicyViolationDetails", "Release", + "CreateDeployPolicyRequest", + "UpdateDeployPolicyRequest", + "DeleteDeployPolicyRequest", + "ListDeployPoliciesRequest", + "ListDeployPoliciesResponse", + "GetDeployPolicyRequest", "BuildArtifact", "TargetArtifact", "DeployArtifact", @@ -1387,6 +1405,9 @@ class RollbackTargetRequest(proto.Message): validate_only (bool): Optional. If set to true, the request is validated and the user is provided with a ``RollbackTargetResponse``. + override_deploy_policy (MutableSequence[str]): + Optional. Deploy policies to override. Format is + ``projects/{project}/locations/{location}/deployPolicies/{deploy_policy}``. """ name: str = proto.Field( @@ -1418,6 +1439,10 @@ class RollbackTargetRequest(proto.Message): proto.BOOL, number=7, ) + override_deploy_policy: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=9, + ) class RollbackTargetResponse(proto.Message): @@ -2398,7 +2423,7 @@ class SkaffoldGCSSource(proto.Message): Attributes: source (str): Required. Cloud Storage source paths to copy recursively. - For example, providing `gs://my-bucket/dir/configs/*` will + For example, providing ``"gs://my-bucket/dir/configs/*"`` will result in Skaffold copying all files within the "dir/configs" directory in the bucket "my-bucket". path (str): @@ -2761,17 +2786,220 @@ class DeleteCustomTargetTypeRequest(proto.Message): ) +class DeployPolicy(proto.Message): + r"""A ``DeployPolicy`` resource in the Cloud Deploy API. + + A ``DeployPolicy`` inhibits manual or automation-driven actions + within a Delivery Pipeline or Target. + + Attributes: + name (str): + Output only. Name of the ``DeployPolicy``. Format is + ``projects/{project}/locations/{location}/deployPolicies/{deployPolicy}``. + The ``deployPolicy`` component must match + ``[a-z]([a-z0-9-]{0,61}[a-z0-9])?`` + uid (str): + Output only. Unique identifier of the ``DeployPolicy``. + description (str): + Description of the ``DeployPolicy``. Max length is 255 + characters. + annotations (MutableMapping[str, str]): + User annotations. These attributes can only be set and used + by the user, and not by Cloud Deploy. Annotations must meet + the following constraints: + + - Annotations are key/value pairs. + - Valid annotation keys have two segments: an optional + prefix and name, separated by a slash (``/``). + - The name segment is required and must be 63 characters or + less, beginning and ending with an alphanumeric character + (``[a-z0-9A-Z]``) with dashes (``-``), underscores + (``_``), dots (``.``), and alphanumerics between. + - The prefix is optional. If specified, the prefix must be + a DNS subdomain: a series of DNS labels separated by + dots(\ ``.``), not longer than 253 characters in total, + followed by a slash (``/``). + + See + https://kubernetes.io/docs/concepts/overview/working-with-objects/annotations/#syntax-and-character-set + for more details. + labels (MutableMapping[str, str]): + Labels are attributes that can be set and used by both the + user and by Cloud Deploy. Labels must meet the following + constraints: + + - Keys and values can contain only lowercase letters, + numeric characters, underscores, and dashes. + - All characters must use UTF-8 encoding, and international + characters are allowed. + - Keys must start with a lowercase letter or international + character. + - Each resource is limited to a maximum of 64 labels. + + Both keys and values are additionally constrained to be <= + 128 bytes. + create_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. Time at which the deploy policy + was created. + update_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. Most recent time at which the + deploy policy was updated. + suspended (bool): + When suspended, the policy will not prevent + actions from occurring, even if the action + violates the policy. + selectors (MutableSequence[google.cloud.deploy_v1.types.DeployPolicyResourceSelector]): + Required. Selected resources to which the + policy will be applied. At least one selector is + required. If one selector matches the resource + the policy applies. For example, if there are + two selectors and the action being attempted + matches one of them, the policy will apply to + that action. + rules (MutableSequence[google.cloud.deploy_v1.types.PolicyRule]): + Required. Rules to apply. At least one rule + must be present. + etag (str): + The weak etag of the ``Automation`` resource. This checksum + is computed by the server based on the value of other + fields, and may be sent on update and delete requests to + ensure the client has an up-to-date value before proceeding. + """ + + class Invoker(proto.Enum): + r"""What invoked the action. Filters enforcing the policy + depending on what invoked the action. + + Values: + INVOKER_UNSPECIFIED (0): + Unspecified. + USER (1): + The action is user-driven. For example, + creating a rollout manually via a gcloud create + command. + DEPLOY_AUTOMATION (2): + Automated action by Cloud Deploy. + """ + INVOKER_UNSPECIFIED = 0 + USER = 1 + DEPLOY_AUTOMATION = 2 + + name: str = proto.Field( + proto.STRING, + number=1, + ) + uid: str = proto.Field( + proto.STRING, + number=2, + ) + description: str = proto.Field( + proto.STRING, + number=3, + ) + annotations: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=4, + ) + labels: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=5, + ) + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=6, + message=timestamp_pb2.Timestamp, + ) + update_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=7, + message=timestamp_pb2.Timestamp, + ) + suspended: bool = proto.Field( + proto.BOOL, + number=8, + ) + selectors: MutableSequence["DeployPolicyResourceSelector"] = proto.RepeatedField( + proto.MESSAGE, + number=12, + message="DeployPolicyResourceSelector", + ) + rules: MutableSequence["PolicyRule"] = proto.RepeatedField( + proto.MESSAGE, + number=10, + message="PolicyRule", + ) + etag: str = proto.Field( + proto.STRING, + number=11, + ) + + +class DeployPolicyResourceSelector(proto.Message): + r"""Contains information on the resources to select for a deploy + policy. Attributes provided must all match the resource in order + for policy restrictions to apply. For example, if delivery + pipelines attributes given are an id "prod" and labels "foo: + bar", a delivery pipeline resource must match both that id and + have that label in order to be subject to the policy. + + Attributes: + delivery_pipeline (google.cloud.deploy_v1.types.DeliveryPipelineAttribute): + Optional. Contains attributes about a + delivery pipeline. + target (google.cloud.deploy_v1.types.TargetAttribute): + Optional. Contains attributes about a target. + """ + + delivery_pipeline: "DeliveryPipelineAttribute" = proto.Field( + proto.MESSAGE, + number=1, + message="DeliveryPipelineAttribute", + ) + target: "TargetAttribute" = proto.Field( + proto.MESSAGE, + number=2, + message="TargetAttribute", + ) + + +class DeliveryPipelineAttribute(proto.Message): + r"""Contains criteria for selecting DeliveryPipelines. + + Attributes: + id (str): + ID of the ``DeliveryPipeline``. The value of this field + could be one of the following: + + - The last segment of a pipeline name + - "*", all delivery pipelines in a location + labels (MutableMapping[str, str]): + DeliveryPipeline labels. + """ + + id: str = proto.Field( + proto.STRING, + number=1, + ) + labels: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=2, + ) + + class TargetAttribute(proto.Message): - r"""Contains criteria for selecting Targets. + r"""Contains criteria for selecting Targets. This could be used + to select targets for a Deploy Policy or for an Automation. Attributes: id (str): ID of the ``Target``. The value of this field could be one of the following: - - The last segment of a target name. It only needs the ID - to determine which target is being referred to - - "*", all targets in a location. + - The last segment of a target name + - "*", all targets in a location labels (MutableMapping[str, str]): Target labels. """ @@ -2787,6 +3015,262 @@ class TargetAttribute(proto.Message): ) +class PolicyRule(proto.Message): + r"""Deploy Policy rule. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + rollout_restriction (google.cloud.deploy_v1.types.RolloutRestriction): + Rollout restrictions. + + This field is a member of `oneof`_ ``rule``. + """ + + rollout_restriction: "RolloutRestriction" = proto.Field( + proto.MESSAGE, + number=2, + oneof="rule", + message="RolloutRestriction", + ) + + +class RolloutRestriction(proto.Message): + r"""Rollout restrictions. + + Attributes: + id (str): + Required. Restriction rule ID. Required and must be unique + within a DeployPolicy. The format is + ``[a-z]([a-z0-9-]{0,61}[a-z0-9])?``. + invokers (MutableSequence[google.cloud.deploy_v1.types.DeployPolicy.Invoker]): + Optional. What invoked the action. If left + empty, all invoker types will be restricted. + actions (MutableSequence[google.cloud.deploy_v1.types.RolloutRestriction.RolloutActions]): + Optional. Rollout actions to be restricted as + part of the policy. If left empty, all actions + will be restricted. + time_windows (google.cloud.deploy_v1.types.TimeWindows): + Required. Time window within which actions + are restricted. + """ + + class RolloutActions(proto.Enum): + r"""Rollout actions to be restricted as part of the policy. + + Values: + ROLLOUT_ACTIONS_UNSPECIFIED (0): + Unspecified. + ADVANCE (1): + Advance the rollout to the next phase. + APPROVE (2): + Approve the rollout. + CANCEL (3): + Cancel the rollout. + CREATE (4): + Create a rollout. + IGNORE_JOB (5): + Ignore a job result on the rollout. + RETRY_JOB (6): + Retry a job for a rollout. + ROLLBACK (7): + Rollback a rollout. + TERMINATE_JOBRUN (8): + Terminate a jobrun. + """ + ROLLOUT_ACTIONS_UNSPECIFIED = 0 + ADVANCE = 1 + APPROVE = 2 + CANCEL = 3 + CREATE = 4 + IGNORE_JOB = 5 + RETRY_JOB = 6 + ROLLBACK = 7 + TERMINATE_JOBRUN = 8 + + id: str = proto.Field( + proto.STRING, + number=1, + ) + invokers: MutableSequence["DeployPolicy.Invoker"] = proto.RepeatedField( + proto.ENUM, + number=2, + enum="DeployPolicy.Invoker", + ) + actions: MutableSequence[RolloutActions] = proto.RepeatedField( + proto.ENUM, + number=3, + enum=RolloutActions, + ) + time_windows: "TimeWindows" = proto.Field( + proto.MESSAGE, + number=4, + message="TimeWindows", + ) + + +class TimeWindows(proto.Message): + r"""Time windows within which actions are restricted. See the + `documentation `__ + for more information on how to configure dates/times. + + Attributes: + time_zone (str): + Required. The time zone in IANA format `IANA Time Zone + Database `__ (e.g. + America/New_York). + one_time_windows (MutableSequence[google.cloud.deploy_v1.types.OneTimeWindow]): + Optional. One-time windows within which + actions are restricted. + weekly_windows (MutableSequence[google.cloud.deploy_v1.types.WeeklyWindow]): + Optional. Recurring weekly windows within + which actions are restricted. + """ + + time_zone: str = proto.Field( + proto.STRING, + number=1, + ) + one_time_windows: MutableSequence["OneTimeWindow"] = proto.RepeatedField( + proto.MESSAGE, + number=2, + message="OneTimeWindow", + ) + weekly_windows: MutableSequence["WeeklyWindow"] = proto.RepeatedField( + proto.MESSAGE, + number=3, + message="WeeklyWindow", + ) + + +class OneTimeWindow(proto.Message): + r"""One-time window within which actions are restricted. For + example, blocking actions over New Year's Eve from December 31st + at 5pm to January 1st at 9am. + + Attributes: + start_date (google.type.date_pb2.Date): + Required. Start date. + start_time (google.type.timeofday_pb2.TimeOfDay): + Required. Start time (inclusive). Use 00:00 + for the beginning of the day. + end_date (google.type.date_pb2.Date): + Required. End date. + end_time (google.type.timeofday_pb2.TimeOfDay): + Required. End time (exclusive). You may use + 24:00 for the end of the day. + """ + + start_date: date_pb2.Date = proto.Field( + proto.MESSAGE, + number=1, + message=date_pb2.Date, + ) + start_time: timeofday_pb2.TimeOfDay = proto.Field( + proto.MESSAGE, + number=2, + message=timeofday_pb2.TimeOfDay, + ) + end_date: date_pb2.Date = proto.Field( + proto.MESSAGE, + number=3, + message=date_pb2.Date, + ) + end_time: timeofday_pb2.TimeOfDay = proto.Field( + proto.MESSAGE, + number=4, + message=timeofday_pb2.TimeOfDay, + ) + + +class WeeklyWindow(proto.Message): + r"""Weekly windows. For example, blocking actions every Saturday + and Sunday. Another example would be blocking actions every + weekday from 5pm to midnight. + + Attributes: + days_of_week (MutableSequence[google.type.dayofweek_pb2.DayOfWeek]): + Optional. Days of week. If left empty, all + days of the week will be included. + start_time (google.type.timeofday_pb2.TimeOfDay): + Optional. Start time (inclusive). Use 00:00 for the + beginning of the day. If you specify start_time you must + also specify end_time. If left empty, this will block for + the entire day for the days specified in days_of_week. + end_time (google.type.timeofday_pb2.TimeOfDay): + Optional. End time (exclusive). Use 24:00 to indicate + midnight. If you specify end_time you must also specify + start_time. If left empty, this will block for the entire + day for the days specified in days_of_week. + """ + + days_of_week: MutableSequence[dayofweek_pb2.DayOfWeek] = proto.RepeatedField( + proto.ENUM, + number=1, + enum=dayofweek_pb2.DayOfWeek, + ) + start_time: timeofday_pb2.TimeOfDay = proto.Field( + proto.MESSAGE, + number=2, + message=timeofday_pb2.TimeOfDay, + ) + end_time: timeofday_pb2.TimeOfDay = proto.Field( + proto.MESSAGE, + number=3, + message=timeofday_pb2.TimeOfDay, + ) + + +class PolicyViolation(proto.Message): + r"""Returned from an action if one or more policies were + violated, and therefore the action was prevented. Contains + information about what policies were violated and why. + + Attributes: + policy_violation_details (MutableSequence[google.cloud.deploy_v1.types.PolicyViolationDetails]): + Policy violation details. + """ + + policy_violation_details: MutableSequence[ + "PolicyViolationDetails" + ] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="PolicyViolationDetails", + ) + + +class PolicyViolationDetails(proto.Message): + r"""Policy violation details. + + Attributes: + policy (str): + Name of the policy that was violated. Policy resource will + be in the format of + ``projects/{project}/locations/{location}/policies/{policy}``. + rule_id (str): + Id of the rule that triggered the policy + violation. + failure_message (str): + User readable message about why the request + violated a policy. This is not intended for + machine parsing. + """ + + policy: str = proto.Field( + proto.STRING, + number=1, + ) + rule_id: str = proto.Field( + proto.STRING, + number=2, + ) + failure_message: str = proto.Field( + proto.STRING, + number=3, + ) + + class Release(proto.Message): r"""A ``Release`` resource in the Cloud Deploy API. @@ -3085,124 +3569,417 @@ class ReleaseCondition(proto.Message): release's Skaffold version. """ - release_ready_condition: "Release.ReleaseReadyCondition" = proto.Field( - proto.MESSAGE, - number=1, - message="Release.ReleaseReadyCondition", - ) - skaffold_supported_condition: "Release.SkaffoldSupportedCondition" = ( - proto.Field( - proto.MESSAGE, - number=2, - message="Release.SkaffoldSupportedCondition", - ) - ) + release_ready_condition: "Release.ReleaseReadyCondition" = proto.Field( + proto.MESSAGE, + number=1, + message="Release.ReleaseReadyCondition", + ) + skaffold_supported_condition: "Release.SkaffoldSupportedCondition" = ( + proto.Field( + proto.MESSAGE, + number=2, + message="Release.SkaffoldSupportedCondition", + ) + ) + + name: str = proto.Field( + proto.STRING, + number=1, + ) + uid: str = proto.Field( + proto.STRING, + number=2, + ) + description: str = proto.Field( + proto.STRING, + number=3, + ) + annotations: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=4, + ) + labels: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=5, + ) + abandoned: bool = proto.Field( + proto.BOOL, + number=23, + ) + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=6, + message=timestamp_pb2.Timestamp, + ) + render_start_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=7, + message=timestamp_pb2.Timestamp, + ) + render_end_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=8, + message=timestamp_pb2.Timestamp, + ) + skaffold_config_uri: str = proto.Field( + proto.STRING, + number=17, + ) + skaffold_config_path: str = proto.Field( + proto.STRING, + number=9, + ) + build_artifacts: MutableSequence["BuildArtifact"] = proto.RepeatedField( + proto.MESSAGE, + number=10, + message="BuildArtifact", + ) + delivery_pipeline_snapshot: "DeliveryPipeline" = proto.Field( + proto.MESSAGE, + number=11, + message="DeliveryPipeline", + ) + target_snapshots: MutableSequence["Target"] = proto.RepeatedField( + proto.MESSAGE, + number=12, + message="Target", + ) + custom_target_type_snapshots: MutableSequence[ + "CustomTargetType" + ] = proto.RepeatedField( + proto.MESSAGE, + number=27, + message="CustomTargetType", + ) + render_state: RenderState = proto.Field( + proto.ENUM, + number=13, + enum=RenderState, + ) + etag: str = proto.Field( + proto.STRING, + number=16, + ) + skaffold_version: str = proto.Field( + proto.STRING, + number=19, + ) + target_artifacts: MutableMapping[str, "TargetArtifact"] = proto.MapField( + proto.STRING, + proto.MESSAGE, + number=20, + message="TargetArtifact", + ) + target_renders: MutableMapping[str, TargetRender] = proto.MapField( + proto.STRING, + proto.MESSAGE, + number=22, + message=TargetRender, + ) + condition: ReleaseCondition = proto.Field( + proto.MESSAGE, + number=24, + message=ReleaseCondition, + ) + deploy_parameters: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=25, + ) + + +class CreateDeployPolicyRequest(proto.Message): + r"""The request object for ``CreateDeployPolicy``. + + Attributes: + parent (str): + Required. The parent collection in which the + ``DeployPolicy`` must be created. The format is + ``projects/{project_id}/locations/{location_name}``. + deploy_policy_id (str): + Required. ID of the ``DeployPolicy``. + deploy_policy (google.cloud.deploy_v1.types.DeployPolicy): + Required. The ``DeployPolicy`` to create. + request_id (str): + Optional. A request ID to identify requests. + Specify a unique request ID so that if you must + retry your request, the server knows to ignore + the request if it has already been completed. + The server guarantees that for at least 60 + minutes after the first request. + + For example, consider a situation where you make + an initial request and the request times out. If + you make the request again with the same request + ID, the server can check if original operation + with the same request ID was received, and if + so, will ignore the second request. This + prevents clients from accidentally creating + duplicate commitments. + + The request ID must be a valid UUID with the + exception that zero UUID is not supported + (00000000-0000-0000-0000-000000000000). + validate_only (bool): + Optional. If set to true, the request is + validated and the user is provided with an + expected result, but no actual change is made. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + deploy_policy_id: str = proto.Field( + proto.STRING, + number=2, + ) + deploy_policy: "DeployPolicy" = proto.Field( + proto.MESSAGE, + number=3, + message="DeployPolicy", + ) + request_id: str = proto.Field( + proto.STRING, + number=4, + ) + validate_only: bool = proto.Field( + proto.BOOL, + number=5, + ) + + +class UpdateDeployPolicyRequest(proto.Message): + r"""The request object for ``UpdateDeployPolicy``. + + Attributes: + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Required. Field mask is used to specify the fields to be + overwritten by the update in the ``DeployPolicy`` resource. + The fields specified in the update_mask are relative to the + resource, not the full request. A field will be overwritten + if it's in the mask. If the user doesn't provide a mask then + all fields are overwritten. + deploy_policy (google.cloud.deploy_v1.types.DeployPolicy): + Required. The ``DeployPolicy`` to update. + request_id (str): + Optional. A request ID to identify requests. + Specify a unique request ID so that if you must + retry your request, the server knows to ignore + the request if it has already been completed. + The server guarantees that for at least 60 + minutes after the first request. + + For example, consider a situation where you make + an initial request and the request times out. If + you make the request again with the same request + ID, the server can check if original operation + with the same request ID was received, and if + so, will ignore the second request. This + prevents clients from accidentally creating + duplicate commitments. + + The request ID must be a valid UUID with the + exception that zero UUID is not supported + (00000000-0000-0000-0000-000000000000). + allow_missing (bool): + Optional. If set to true, updating a ``DeployPolicy`` that + does not exist will result in the creation of a new + ``DeployPolicy``. + validate_only (bool): + Optional. If set to true, the request is + validated and the user is provided with an + expected result, but no actual change is made. + """ - name: str = proto.Field( - proto.STRING, + update_mask: field_mask_pb2.FieldMask = proto.Field( + proto.MESSAGE, number=1, + message=field_mask_pb2.FieldMask, ) - uid: str = proto.Field( - proto.STRING, + deploy_policy: "DeployPolicy" = proto.Field( + proto.MESSAGE, number=2, + message="DeployPolicy", ) - description: str = proto.Field( + request_id: str = proto.Field( proto.STRING, number=3, ) - annotations: MutableMapping[str, str] = proto.MapField( - proto.STRING, - proto.STRING, + allow_missing: bool = proto.Field( + proto.BOOL, number=4, ) - labels: MutableMapping[str, str] = proto.MapField( - proto.STRING, - proto.STRING, - number=5, - ) - abandoned: bool = proto.Field( + validate_only: bool = proto.Field( proto.BOOL, - number=23, - ) - create_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=6, - message=timestamp_pb2.Timestamp, - ) - render_start_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=7, - message=timestamp_pb2.Timestamp, - ) - render_end_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=8, - message=timestamp_pb2.Timestamp, + number=5, ) - skaffold_config_uri: str = proto.Field( + + +class DeleteDeployPolicyRequest(proto.Message): + r"""The request object for ``DeleteDeployPolicy``. + + Attributes: + name (str): + Required. The name of the ``DeployPolicy`` to delete. The + format is + ``projects/{project_id}/locations/{location_name}/deployPolicies/{deploy_policy_name}``. + request_id (str): + Optional. A request ID to identify requests. + Specify a unique request ID so that if you must + retry your request, the server knows to ignore + the request if it has already been completed. + The server guarantees that for at least 60 + minutes after the first request. + + For example, consider a situation where you make + an initial request and the request times out. If + you make the request again with the same request + ID, the server can check if original operation + with the same request ID was received, and if + so, will ignore the second request. This + prevents clients from accidentally creating + duplicate commitments. + + The request ID must be a valid UUID with the + exception that zero UUID is not supported + (00000000-0000-0000-0000-000000000000). + allow_missing (bool): + Optional. If set to true, then deleting an already deleted + or non-existing ``DeployPolicy`` will succeed. + validate_only (bool): + Optional. If set, validate the request and + preview the review, but do not actually post it. + etag (str): + Optional. This checksum is computed by the + server based on the value of other fields, and + may be sent on update and delete requests to + ensure the client has an up-to-date value before + proceeding. + """ + + name: str = proto.Field( proto.STRING, - number=17, + number=1, ) - skaffold_config_path: str = proto.Field( + request_id: str = proto.Field( proto.STRING, - number=9, + number=2, ) - build_artifacts: MutableSequence["BuildArtifact"] = proto.RepeatedField( - proto.MESSAGE, - number=10, - message="BuildArtifact", + allow_missing: bool = proto.Field( + proto.BOOL, + number=3, ) - delivery_pipeline_snapshot: "DeliveryPipeline" = proto.Field( - proto.MESSAGE, - number=11, - message="DeliveryPipeline", + validate_only: bool = proto.Field( + proto.BOOL, + number=4, ) - target_snapshots: MutableSequence["Target"] = proto.RepeatedField( - proto.MESSAGE, - number=12, - message="Target", + etag: str = proto.Field( + proto.STRING, + number=5, ) - custom_target_type_snapshots: MutableSequence[ - "CustomTargetType" - ] = proto.RepeatedField( - proto.MESSAGE, - number=27, - message="CustomTargetType", + + +class ListDeployPoliciesRequest(proto.Message): + r"""The request object for ``ListDeployPolicies``. + + Attributes: + parent (str): + Required. The parent, which owns this collection of deploy + policies. Format must be + ``projects/{project_id}/locations/{location_name}``. + page_size (int): + The maximum number of deploy policies to + return. The service may return fewer than this + value. If unspecified, at most 50 deploy + policies will be returned. The maximum value is + 1000; values above 1000 will be set to 1000. + page_token (str): + A page token, received from a previous + ``ListDeployPolicies`` call. Provide this to retrieve the + subsequent page. + + When paginating, all other provided parameters match the + call that provided the page token. + filter (str): + Filter deploy policies to be returned. See + https://google.aip.dev/160 for more details. All + fields can be used in the filter. + order_by (str): + Field to sort by. See + https://google.aip.dev/132#ordering for more + details. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, ) - render_state: RenderState = proto.Field( - proto.ENUM, - number=13, - enum=RenderState, + page_size: int = proto.Field( + proto.INT32, + number=2, ) - etag: str = proto.Field( + page_token: str = proto.Field( proto.STRING, - number=16, + number=3, ) - skaffold_version: str = proto.Field( + filter: str = proto.Field( proto.STRING, - number=19, + number=4, ) - target_artifacts: MutableMapping[str, "TargetArtifact"] = proto.MapField( + order_by: str = proto.Field( proto.STRING, - proto.MESSAGE, - number=20, - message="TargetArtifact", + number=5, ) - target_renders: MutableMapping[str, TargetRender] = proto.MapField( - proto.STRING, + + +class ListDeployPoliciesResponse(proto.Message): + r"""The response object from ``ListDeployPolicies``. + + Attributes: + deploy_policies (MutableSequence[google.cloud.deploy_v1.types.DeployPolicy]): + The ``DeployPolicy`` objects. + next_page_token (str): + A token, which can be sent as ``page_token`` to retrieve the + next page. If this field is omitted, there are no subsequent + pages. + unreachable (MutableSequence[str]): + Locations that could not be reached. + """ + + @property + def raw_page(self): + return self + + deploy_policies: MutableSequence["DeployPolicy"] = proto.RepeatedField( proto.MESSAGE, - number=22, - message=TargetRender, + number=1, + message="DeployPolicy", ) - condition: ReleaseCondition = proto.Field( - proto.MESSAGE, - number=24, - message=ReleaseCondition, + next_page_token: str = proto.Field( + proto.STRING, + number=2, ) - deploy_parameters: MutableMapping[str, str] = proto.MapField( + unreachable: MutableSequence[str] = proto.RepeatedField( proto.STRING, + number=3, + ) + + +class GetDeployPolicyRequest(proto.Message): + r"""The request object for ``GetDeployPolicy`` + + Attributes: + name (str): + Required. Name of the ``DeployPolicy``. Format must be + ``projects/{project_id}/locations/{location_name}/deployPolicies/{deploy_policy_name}``. + """ + + name: str = proto.Field( proto.STRING, - number=25, + number=1, ) @@ -3505,6 +4282,9 @@ class CreateReleaseRequest(proto.Message): Optional. If set to true, the request is validated and the user is provided with an expected result, but no actual change is made. + override_deploy_policy (MutableSequence[str]): + Optional. Deploy policies to override. Format is + ``projects/{project}/locations/{location}/deployPolicies/{deployPolicy}``. """ parent: str = proto.Field( @@ -3528,6 +4308,10 @@ class CreateReleaseRequest(proto.Message): proto.BOOL, number=5, ) + override_deploy_policy: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=6, + ) class Rollout(proto.Message): @@ -4458,6 +5242,9 @@ class CreateRolloutRequest(proto.Message): Optional. If set to true, the request is validated and the user is provided with an expected result, but no actual change is made. + override_deploy_policy (MutableSequence[str]): + Optional. Deploy policies to override. Format is + ``projects/{project}/locations/{location}/deployPolicies/{deployPolicy}``. starting_phase_id (str): Optional. The starting phase ID for the ``Rollout``. If empty the ``Rollout`` will start at the first phase. @@ -4484,6 +5271,10 @@ class CreateRolloutRequest(proto.Message): proto.BOOL, number=5, ) + override_deploy_policy: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=6, + ) starting_phase_id: str = proto.Field( proto.STRING, number=7, @@ -4561,6 +5352,9 @@ class ApproveRolloutRequest(proto.Message): ``projects/{project}/locations/{location}/deliveryPipelines/{deliveryPipeline}/releases/{release}/rollouts/{rollout}``. approved (bool): Required. True = approve; false = reject + override_deploy_policy (MutableSequence[str]): + Optional. Deploy policies to override. Format is + ``projects/{project}/locations/{location}/deployPolicies/{deployPolicy}``. """ name: str = proto.Field( @@ -4571,6 +5365,10 @@ class ApproveRolloutRequest(proto.Message): proto.BOOL, number=2, ) + override_deploy_policy: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=3, + ) class ApproveRolloutResponse(proto.Message): @@ -4586,6 +5384,9 @@ class AdvanceRolloutRequest(proto.Message): ``projects/{project}/locations/{location}/deliveryPipelines/{deliveryPipeline}/releases/{release}/rollouts/{rollout}``. phase_id (str): Required. The phase ID to advance the ``Rollout`` to. + override_deploy_policy (MutableSequence[str]): + Optional. Deploy policies to override. Format is + ``projects/{project}/locations/{location}/deployPolicies/{deployPolicy}``. """ name: str = proto.Field( @@ -4596,6 +5397,10 @@ class AdvanceRolloutRequest(proto.Message): proto.STRING, number=2, ) + override_deploy_policy: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=3, + ) class AdvanceRolloutResponse(proto.Message): @@ -4609,12 +5414,19 @@ class CancelRolloutRequest(proto.Message): name (str): Required. Name of the Rollout. Format is ``projects/{project}/locations/{location}/deliveryPipelines/{deliveryPipeline}/releases/{release}/rollouts/{rollout}``. + override_deploy_policy (MutableSequence[str]): + Optional. Deploy policies to override. Format is + ``projects/{project}/locations/{location}/deployPolicies/{deployPolicy}``. """ name: str = proto.Field( proto.STRING, number=1, ) + override_deploy_policy: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=2, + ) class CancelRolloutResponse(proto.Message): @@ -4633,6 +5445,9 @@ class IgnoreJobRequest(proto.Message): belongs to. job_id (str): Required. The job ID for the Job to ignore. + override_deploy_policy (MutableSequence[str]): + Optional. Deploy policies to override. Format is + ``projects/{project}/locations/{location}/deployPolicies/{deployPolicy}``. """ rollout: str = proto.Field( @@ -4647,6 +5462,10 @@ class IgnoreJobRequest(proto.Message): proto.STRING, number=3, ) + override_deploy_policy: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=4, + ) class IgnoreJobResponse(proto.Message): @@ -4665,6 +5484,9 @@ class RetryJobRequest(proto.Message): belongs to. job_id (str): Required. The job ID for the Job to retry. + override_deploy_policy (MutableSequence[str]): + Optional. Deploy policies to override. Format is + ``projects/{project}/locations/{location}/deployPolicies/{deployPolicy}``. """ rollout: str = proto.Field( @@ -4679,6 +5501,10 @@ class RetryJobRequest(proto.Message): proto.STRING, number=3, ) + override_deploy_policy: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=4, + ) class RetryJobResponse(proto.Message): @@ -5311,12 +6137,19 @@ class TerminateJobRunRequest(proto.Message): name (str): Required. Name of the ``JobRun``. Format must be ``projects/{project}/locations/{location}/deliveryPipelines/{deliveryPipeline}/releases/{release}/rollouts/{rollout}/jobRuns/{jobRun}``. + override_deploy_policy (MutableSequence[str]): + Optional. Deploy policies to override. Format is + ``projects/{project}/locations/{location}/deployPolicies/{deployPolicy}``. """ name: str = proto.Field( proto.STRING, number=1, ) + override_deploy_policy: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=2, + ) class TerminateJobRunResponse(proto.Message): @@ -5626,10 +6459,8 @@ class PromoteReleaseRule(proto.Message): next stage in the promotion flow. The value of this field could be one of the following: - - The last segment of a target name. It only needs the ID - to determine if the target is one of the stages in the - promotion sequence defined in the pipeline. - - "@next", the next target in the promotion sequence. + - The last segment of a target name + - "@next", the next target in the promotion sequence condition (google.cloud.deploy_v1.types.AutomationRuleCondition): Output only. Information around the state of the Automation rule. @@ -6098,6 +6929,9 @@ class AutomationRun(proto.Message): Output only. Explains the current state of the ``AutomationRun``. Present only when an explanation is needed. + policy_violation (google.cloud.deploy_v1.types.PolicyViolation): + Output only. Contains information about what policies + prevented the ``AutomationRun`` from proceeding. expire_time (google.protobuf.timestamp_pb2.Timestamp): Output only. Time the ``AutomationRun`` expires. An ``AutomationRun`` expires after 14 days from its creation @@ -6195,6 +7029,11 @@ class State(proto.Enum): proto.STRING, number=9, ) + policy_violation: "PolicyViolation" = proto.Field( + proto.MESSAGE, + number=10, + message="PolicyViolation", + ) expire_time: timestamp_pb2.Timestamp = proto.Field( proto.MESSAGE, number=11, diff --git a/packages/google-cloud-deploy/google/cloud/deploy_v1/types/log_enums.py b/packages/google-cloud-deploy/google/cloud/deploy_v1/types/log_enums.py index 28a732b7b078..853e64e15f60 100644 --- a/packages/google-cloud-deploy/google/cloud/deploy_v1/types/log_enums.py +++ b/packages/google-cloud-deploy/google/cloud/deploy_v1/types/log_enums.py @@ -46,6 +46,8 @@ class Type(proto.Enum): Resource deleted. TYPE_ROLLOUT_UPDATE (7): Rollout updated. + TYPE_DEPLOY_POLICY_EVALUATION (8): + Deploy Policy evaluation. TYPE_RENDER_STATUES_CHANGE (2): Deprecated: This field is never used. Use release_render log type instead. @@ -57,6 +59,7 @@ class Type(proto.Enum): TYPE_RESTRICTION_VIOLATED = 5 TYPE_RESOURCE_DELETED = 6 TYPE_ROLLOUT_UPDATE = 7 + TYPE_DEPLOY_POLICY_EVALUATION = 8 TYPE_RENDER_STATUES_CHANGE = 2 diff --git a/packages/google-cloud-deploy/samples/generated_samples/clouddeploy_v1_generated_cloud_deploy_create_deploy_policy_async.py b/packages/google-cloud-deploy/samples/generated_samples/clouddeploy_v1_generated_cloud_deploy_create_deploy_policy_async.py new file mode 100644 index 000000000000..5c0a67fbf626 --- /dev/null +++ b/packages/google-cloud-deploy/samples/generated_samples/clouddeploy_v1_generated_cloud_deploy_create_deploy_policy_async.py @@ -0,0 +1,62 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateDeployPolicy +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-deploy + + +# [START clouddeploy_v1_generated_CloudDeploy_CreateDeployPolicy_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import deploy_v1 + + +async def sample_create_deploy_policy(): + # Create a client + client = deploy_v1.CloudDeployAsyncClient() + + # Initialize request argument(s) + deploy_policy = deploy_v1.DeployPolicy() + deploy_policy.rules.rollout_restriction.id = "id_value" + deploy_policy.rules.rollout_restriction.time_windows.time_zone = "time_zone_value" + + request = deploy_v1.CreateDeployPolicyRequest( + parent="parent_value", + deploy_policy_id="deploy_policy_id_value", + deploy_policy=deploy_policy, + ) + + # Make the request + operation = client.create_deploy_policy(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END clouddeploy_v1_generated_CloudDeploy_CreateDeployPolicy_async] diff --git a/packages/google-cloud-deploy/samples/generated_samples/clouddeploy_v1_generated_cloud_deploy_create_deploy_policy_sync.py b/packages/google-cloud-deploy/samples/generated_samples/clouddeploy_v1_generated_cloud_deploy_create_deploy_policy_sync.py new file mode 100644 index 000000000000..87445443040f --- /dev/null +++ b/packages/google-cloud-deploy/samples/generated_samples/clouddeploy_v1_generated_cloud_deploy_create_deploy_policy_sync.py @@ -0,0 +1,62 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateDeployPolicy +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-deploy + + +# [START clouddeploy_v1_generated_CloudDeploy_CreateDeployPolicy_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import deploy_v1 + + +def sample_create_deploy_policy(): + # Create a client + client = deploy_v1.CloudDeployClient() + + # Initialize request argument(s) + deploy_policy = deploy_v1.DeployPolicy() + deploy_policy.rules.rollout_restriction.id = "id_value" + deploy_policy.rules.rollout_restriction.time_windows.time_zone = "time_zone_value" + + request = deploy_v1.CreateDeployPolicyRequest( + parent="parent_value", + deploy_policy_id="deploy_policy_id_value", + deploy_policy=deploy_policy, + ) + + # Make the request + operation = client.create_deploy_policy(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END clouddeploy_v1_generated_CloudDeploy_CreateDeployPolicy_sync] diff --git a/packages/google-cloud-deploy/samples/generated_samples/clouddeploy_v1_generated_cloud_deploy_delete_deploy_policy_async.py b/packages/google-cloud-deploy/samples/generated_samples/clouddeploy_v1_generated_cloud_deploy_delete_deploy_policy_async.py new file mode 100644 index 000000000000..6f6545a0350c --- /dev/null +++ b/packages/google-cloud-deploy/samples/generated_samples/clouddeploy_v1_generated_cloud_deploy_delete_deploy_policy_async.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteDeployPolicy +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-deploy + + +# [START clouddeploy_v1_generated_CloudDeploy_DeleteDeployPolicy_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import deploy_v1 + + +async def sample_delete_deploy_policy(): + # Create a client + client = deploy_v1.CloudDeployAsyncClient() + + # Initialize request argument(s) + request = deploy_v1.DeleteDeployPolicyRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_deploy_policy(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END clouddeploy_v1_generated_CloudDeploy_DeleteDeployPolicy_async] diff --git a/packages/google-cloud-deploy/samples/generated_samples/clouddeploy_v1_generated_cloud_deploy_delete_deploy_policy_sync.py b/packages/google-cloud-deploy/samples/generated_samples/clouddeploy_v1_generated_cloud_deploy_delete_deploy_policy_sync.py new file mode 100644 index 000000000000..1c9d7dd414fe --- /dev/null +++ b/packages/google-cloud-deploy/samples/generated_samples/clouddeploy_v1_generated_cloud_deploy_delete_deploy_policy_sync.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteDeployPolicy +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-deploy + + +# [START clouddeploy_v1_generated_CloudDeploy_DeleteDeployPolicy_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import deploy_v1 + + +def sample_delete_deploy_policy(): + # Create a client + client = deploy_v1.CloudDeployClient() + + # Initialize request argument(s) + request = deploy_v1.DeleteDeployPolicyRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_deploy_policy(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END clouddeploy_v1_generated_CloudDeploy_DeleteDeployPolicy_sync] diff --git a/packages/google-cloud-deploy/samples/generated_samples/clouddeploy_v1_generated_cloud_deploy_get_deploy_policy_async.py b/packages/google-cloud-deploy/samples/generated_samples/clouddeploy_v1_generated_cloud_deploy_get_deploy_policy_async.py new file mode 100644 index 000000000000..a96e7a4309c2 --- /dev/null +++ b/packages/google-cloud-deploy/samples/generated_samples/clouddeploy_v1_generated_cloud_deploy_get_deploy_policy_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetDeployPolicy +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-deploy + + +# [START clouddeploy_v1_generated_CloudDeploy_GetDeployPolicy_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import deploy_v1 + + +async def sample_get_deploy_policy(): + # Create a client + client = deploy_v1.CloudDeployAsyncClient() + + # Initialize request argument(s) + request = deploy_v1.GetDeployPolicyRequest( + name="name_value", + ) + + # Make the request + response = await client.get_deploy_policy(request=request) + + # Handle the response + print(response) + +# [END clouddeploy_v1_generated_CloudDeploy_GetDeployPolicy_async] diff --git a/packages/google-cloud-deploy/samples/generated_samples/clouddeploy_v1_generated_cloud_deploy_get_deploy_policy_sync.py b/packages/google-cloud-deploy/samples/generated_samples/clouddeploy_v1_generated_cloud_deploy_get_deploy_policy_sync.py new file mode 100644 index 000000000000..669a50729182 --- /dev/null +++ b/packages/google-cloud-deploy/samples/generated_samples/clouddeploy_v1_generated_cloud_deploy_get_deploy_policy_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetDeployPolicy +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-deploy + + +# [START clouddeploy_v1_generated_CloudDeploy_GetDeployPolicy_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import deploy_v1 + + +def sample_get_deploy_policy(): + # Create a client + client = deploy_v1.CloudDeployClient() + + # Initialize request argument(s) + request = deploy_v1.GetDeployPolicyRequest( + name="name_value", + ) + + # Make the request + response = client.get_deploy_policy(request=request) + + # Handle the response + print(response) + +# [END clouddeploy_v1_generated_CloudDeploy_GetDeployPolicy_sync] diff --git a/packages/google-cloud-deploy/samples/generated_samples/clouddeploy_v1_generated_cloud_deploy_list_deploy_policies_async.py b/packages/google-cloud-deploy/samples/generated_samples/clouddeploy_v1_generated_cloud_deploy_list_deploy_policies_async.py new file mode 100644 index 000000000000..f3932c8119a1 --- /dev/null +++ b/packages/google-cloud-deploy/samples/generated_samples/clouddeploy_v1_generated_cloud_deploy_list_deploy_policies_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListDeployPolicies +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-deploy + + +# [START clouddeploy_v1_generated_CloudDeploy_ListDeployPolicies_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import deploy_v1 + + +async def sample_list_deploy_policies(): + # Create a client + client = deploy_v1.CloudDeployAsyncClient() + + # Initialize request argument(s) + request = deploy_v1.ListDeployPoliciesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_deploy_policies(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END clouddeploy_v1_generated_CloudDeploy_ListDeployPolicies_async] diff --git a/packages/google-cloud-deploy/samples/generated_samples/clouddeploy_v1_generated_cloud_deploy_list_deploy_policies_sync.py b/packages/google-cloud-deploy/samples/generated_samples/clouddeploy_v1_generated_cloud_deploy_list_deploy_policies_sync.py new file mode 100644 index 000000000000..8c6baf6b8c95 --- /dev/null +++ b/packages/google-cloud-deploy/samples/generated_samples/clouddeploy_v1_generated_cloud_deploy_list_deploy_policies_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListDeployPolicies +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-deploy + + +# [START clouddeploy_v1_generated_CloudDeploy_ListDeployPolicies_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import deploy_v1 + + +def sample_list_deploy_policies(): + # Create a client + client = deploy_v1.CloudDeployClient() + + # Initialize request argument(s) + request = deploy_v1.ListDeployPoliciesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_deploy_policies(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END clouddeploy_v1_generated_CloudDeploy_ListDeployPolicies_sync] diff --git a/packages/google-cloud-deploy/samples/generated_samples/clouddeploy_v1_generated_cloud_deploy_update_deploy_policy_async.py b/packages/google-cloud-deploy/samples/generated_samples/clouddeploy_v1_generated_cloud_deploy_update_deploy_policy_async.py new file mode 100644 index 000000000000..d08ab6a4ded1 --- /dev/null +++ b/packages/google-cloud-deploy/samples/generated_samples/clouddeploy_v1_generated_cloud_deploy_update_deploy_policy_async.py @@ -0,0 +1,60 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateDeployPolicy +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-deploy + + +# [START clouddeploy_v1_generated_CloudDeploy_UpdateDeployPolicy_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import deploy_v1 + + +async def sample_update_deploy_policy(): + # Create a client + client = deploy_v1.CloudDeployAsyncClient() + + # Initialize request argument(s) + deploy_policy = deploy_v1.DeployPolicy() + deploy_policy.rules.rollout_restriction.id = "id_value" + deploy_policy.rules.rollout_restriction.time_windows.time_zone = "time_zone_value" + + request = deploy_v1.UpdateDeployPolicyRequest( + deploy_policy=deploy_policy, + ) + + # Make the request + operation = client.update_deploy_policy(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END clouddeploy_v1_generated_CloudDeploy_UpdateDeployPolicy_async] diff --git a/packages/google-cloud-deploy/samples/generated_samples/clouddeploy_v1_generated_cloud_deploy_update_deploy_policy_sync.py b/packages/google-cloud-deploy/samples/generated_samples/clouddeploy_v1_generated_cloud_deploy_update_deploy_policy_sync.py new file mode 100644 index 000000000000..18ab7cd6a8b9 --- /dev/null +++ b/packages/google-cloud-deploy/samples/generated_samples/clouddeploy_v1_generated_cloud_deploy_update_deploy_policy_sync.py @@ -0,0 +1,60 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateDeployPolicy +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-deploy + + +# [START clouddeploy_v1_generated_CloudDeploy_UpdateDeployPolicy_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import deploy_v1 + + +def sample_update_deploy_policy(): + # Create a client + client = deploy_v1.CloudDeployClient() + + # Initialize request argument(s) + deploy_policy = deploy_v1.DeployPolicy() + deploy_policy.rules.rollout_restriction.id = "id_value" + deploy_policy.rules.rollout_restriction.time_windows.time_zone = "time_zone_value" + + request = deploy_v1.UpdateDeployPolicyRequest( + deploy_policy=deploy_policy, + ) + + # Make the request + operation = client.update_deploy_policy(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END clouddeploy_v1_generated_CloudDeploy_UpdateDeployPolicy_sync] diff --git a/packages/google-cloud-deploy/samples/generated_samples/snippet_metadata_google.cloud.deploy.v1.json b/packages/google-cloud-deploy/samples/generated_samples/snippet_metadata_google.cloud.deploy.v1.json index b40c1309decb..93d8c019cf25 100644 --- a/packages/google-cloud-deploy/samples/generated_samples/snippet_metadata_google.cloud.deploy.v1.json +++ b/packages/google-cloud-deploy/samples/generated_samples/snippet_metadata_google.cloud.deploy.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-deploy", - "version": "2.0.0" + "version": "2.1.0" }, "snippets": [ { @@ -1355,6 +1355,183 @@ ], "title": "clouddeploy_v1_generated_cloud_deploy_create_delivery_pipeline_sync.py" }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.deploy_v1.CloudDeployAsyncClient", + "shortName": "CloudDeployAsyncClient" + }, + "fullName": "google.cloud.deploy_v1.CloudDeployAsyncClient.create_deploy_policy", + "method": { + "fullName": "google.cloud.deploy.v1.CloudDeploy.CreateDeployPolicy", + "service": { + "fullName": "google.cloud.deploy.v1.CloudDeploy", + "shortName": "CloudDeploy" + }, + "shortName": "CreateDeployPolicy" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.deploy_v1.types.CreateDeployPolicyRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "deploy_policy", + "type": "google.cloud.deploy_v1.types.DeployPolicy" + }, + { + "name": "deploy_policy_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "create_deploy_policy" + }, + "description": "Sample for CreateDeployPolicy", + "file": "clouddeploy_v1_generated_cloud_deploy_create_deploy_policy_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "clouddeploy_v1_generated_CloudDeploy_CreateDeployPolicy_async", + "segments": [ + { + "end": 61, + "start": 27, + "type": "FULL" + }, + { + "end": 61, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 51, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 58, + "start": 52, + "type": "REQUEST_EXECUTION" + }, + { + "end": 62, + "start": 59, + "type": "RESPONSE_HANDLING" + } + ], + "title": "clouddeploy_v1_generated_cloud_deploy_create_deploy_policy_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.deploy_v1.CloudDeployClient", + "shortName": "CloudDeployClient" + }, + "fullName": "google.cloud.deploy_v1.CloudDeployClient.create_deploy_policy", + "method": { + "fullName": "google.cloud.deploy.v1.CloudDeploy.CreateDeployPolicy", + "service": { + "fullName": "google.cloud.deploy.v1.CloudDeploy", + "shortName": "CloudDeploy" + }, + "shortName": "CreateDeployPolicy" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.deploy_v1.types.CreateDeployPolicyRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "deploy_policy", + "type": "google.cloud.deploy_v1.types.DeployPolicy" + }, + { + "name": "deploy_policy_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "create_deploy_policy" + }, + "description": "Sample for CreateDeployPolicy", + "file": "clouddeploy_v1_generated_cloud_deploy_create_deploy_policy_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "clouddeploy_v1_generated_CloudDeploy_CreateDeployPolicy_sync", + "segments": [ + { + "end": 61, + "start": 27, + "type": "FULL" + }, + { + "end": 61, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 51, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 58, + "start": 52, + "type": "REQUEST_EXECUTION" + }, + { + "end": 62, + "start": 59, + "type": "RESPONSE_HANDLING" + } + ], + "title": "clouddeploy_v1_generated_cloud_deploy_create_deploy_policy_sync.py" + }, { "canonical": true, "clientMethod": { @@ -2377,19 +2554,19 @@ "fullName": "google.cloud.deploy_v1.CloudDeployAsyncClient", "shortName": "CloudDeployAsyncClient" }, - "fullName": "google.cloud.deploy_v1.CloudDeployAsyncClient.delete_target", + "fullName": "google.cloud.deploy_v1.CloudDeployAsyncClient.delete_deploy_policy", "method": { - "fullName": "google.cloud.deploy.v1.CloudDeploy.DeleteTarget", + "fullName": "google.cloud.deploy.v1.CloudDeploy.DeleteDeployPolicy", "service": { "fullName": "google.cloud.deploy.v1.CloudDeploy", "shortName": "CloudDeploy" }, - "shortName": "DeleteTarget" + "shortName": "DeleteDeployPolicy" }, "parameters": [ { "name": "request", - "type": "google.cloud.deploy_v1.types.DeleteTargetRequest" + "type": "google.cloud.deploy_v1.types.DeleteDeployPolicyRequest" }, { "name": "name", @@ -2409,13 +2586,13 @@ } ], "resultType": "google.api_core.operation_async.AsyncOperation", - "shortName": "delete_target" + "shortName": "delete_deploy_policy" }, - "description": "Sample for DeleteTarget", - "file": "clouddeploy_v1_generated_cloud_deploy_delete_target_async.py", + "description": "Sample for DeleteDeployPolicy", + "file": "clouddeploy_v1_generated_cloud_deploy_delete_deploy_policy_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "clouddeploy_v1_generated_CloudDeploy_DeleteTarget_async", + "regionTag": "clouddeploy_v1_generated_CloudDeploy_DeleteDeployPolicy_async", "segments": [ { "end": 55, @@ -2448,7 +2625,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "clouddeploy_v1_generated_cloud_deploy_delete_target_async.py" + "title": "clouddeploy_v1_generated_cloud_deploy_delete_deploy_policy_async.py" }, { "canonical": true, @@ -2457,19 +2634,19 @@ "fullName": "google.cloud.deploy_v1.CloudDeployClient", "shortName": "CloudDeployClient" }, - "fullName": "google.cloud.deploy_v1.CloudDeployClient.delete_target", + "fullName": "google.cloud.deploy_v1.CloudDeployClient.delete_deploy_policy", "method": { - "fullName": "google.cloud.deploy.v1.CloudDeploy.DeleteTarget", + "fullName": "google.cloud.deploy.v1.CloudDeploy.DeleteDeployPolicy", "service": { "fullName": "google.cloud.deploy.v1.CloudDeploy", "shortName": "CloudDeploy" }, - "shortName": "DeleteTarget" + "shortName": "DeleteDeployPolicy" }, "parameters": [ { "name": "request", - "type": "google.cloud.deploy_v1.types.DeleteTargetRequest" + "type": "google.cloud.deploy_v1.types.DeleteDeployPolicyRequest" }, { "name": "name", @@ -2489,13 +2666,13 @@ } ], "resultType": "google.api_core.operation.Operation", - "shortName": "delete_target" + "shortName": "delete_deploy_policy" }, - "description": "Sample for DeleteTarget", - "file": "clouddeploy_v1_generated_cloud_deploy_delete_target_sync.py", + "description": "Sample for DeleteDeployPolicy", + "file": "clouddeploy_v1_generated_cloud_deploy_delete_deploy_policy_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "clouddeploy_v1_generated_CloudDeploy_DeleteTarget_sync", + "regionTag": "clouddeploy_v1_generated_CloudDeploy_DeleteDeployPolicy_sync", "segments": [ { "end": 55, @@ -2528,7 +2705,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "clouddeploy_v1_generated_cloud_deploy_delete_target_sync.py" + "title": "clouddeploy_v1_generated_cloud_deploy_delete_deploy_policy_sync.py" }, { "canonical": true, @@ -2538,19 +2715,19 @@ "fullName": "google.cloud.deploy_v1.CloudDeployAsyncClient", "shortName": "CloudDeployAsyncClient" }, - "fullName": "google.cloud.deploy_v1.CloudDeployAsyncClient.get_automation_run", + "fullName": "google.cloud.deploy_v1.CloudDeployAsyncClient.delete_target", "method": { - "fullName": "google.cloud.deploy.v1.CloudDeploy.GetAutomationRun", + "fullName": "google.cloud.deploy.v1.CloudDeploy.DeleteTarget", "service": { "fullName": "google.cloud.deploy.v1.CloudDeploy", "shortName": "CloudDeploy" }, - "shortName": "GetAutomationRun" + "shortName": "DeleteTarget" }, "parameters": [ { "name": "request", - "type": "google.cloud.deploy_v1.types.GetAutomationRunRequest" + "type": "google.cloud.deploy_v1.types.DeleteTargetRequest" }, { "name": "name", @@ -2569,22 +2746,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.deploy_v1.types.AutomationRun", - "shortName": "get_automation_run" + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "delete_target" }, - "description": "Sample for GetAutomationRun", - "file": "clouddeploy_v1_generated_cloud_deploy_get_automation_run_async.py", + "description": "Sample for DeleteTarget", + "file": "clouddeploy_v1_generated_cloud_deploy_delete_target_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "clouddeploy_v1_generated_CloudDeploy_GetAutomationRun_async", + "regionTag": "clouddeploy_v1_generated_CloudDeploy_DeleteTarget_async", "segments": [ { - "end": 51, + "end": 55, "start": 27, "type": "FULL" }, { - "end": 51, + "end": 55, "start": 27, "type": "SHORT" }, @@ -2599,17 +2776,17 @@ "type": "REQUEST_INITIALIZATION" }, { - "end": 48, + "end": 52, "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 52, - "start": 49, + "end": 56, + "start": 53, "type": "RESPONSE_HANDLING" } ], - "title": "clouddeploy_v1_generated_cloud_deploy_get_automation_run_async.py" + "title": "clouddeploy_v1_generated_cloud_deploy_delete_target_async.py" }, { "canonical": true, @@ -2618,19 +2795,19 @@ "fullName": "google.cloud.deploy_v1.CloudDeployClient", "shortName": "CloudDeployClient" }, - "fullName": "google.cloud.deploy_v1.CloudDeployClient.get_automation_run", + "fullName": "google.cloud.deploy_v1.CloudDeployClient.delete_target", "method": { - "fullName": "google.cloud.deploy.v1.CloudDeploy.GetAutomationRun", + "fullName": "google.cloud.deploy.v1.CloudDeploy.DeleteTarget", "service": { "fullName": "google.cloud.deploy.v1.CloudDeploy", "shortName": "CloudDeploy" }, - "shortName": "GetAutomationRun" + "shortName": "DeleteTarget" }, "parameters": [ { "name": "request", - "type": "google.cloud.deploy_v1.types.GetAutomationRunRequest" + "type": "google.cloud.deploy_v1.types.DeleteTargetRequest" }, { "name": "name", @@ -2649,22 +2826,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.deploy_v1.types.AutomationRun", - "shortName": "get_automation_run" + "resultType": "google.api_core.operation.Operation", + "shortName": "delete_target" }, - "description": "Sample for GetAutomationRun", - "file": "clouddeploy_v1_generated_cloud_deploy_get_automation_run_sync.py", + "description": "Sample for DeleteTarget", + "file": "clouddeploy_v1_generated_cloud_deploy_delete_target_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "clouddeploy_v1_generated_CloudDeploy_GetAutomationRun_sync", + "regionTag": "clouddeploy_v1_generated_CloudDeploy_DeleteTarget_sync", "segments": [ { - "end": 51, + "end": 55, "start": 27, "type": "FULL" }, { - "end": 51, + "end": 55, "start": 27, "type": "SHORT" }, @@ -2679,17 +2856,17 @@ "type": "REQUEST_INITIALIZATION" }, { - "end": 48, + "end": 52, "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 52, - "start": 49, + "end": 56, + "start": 53, "type": "RESPONSE_HANDLING" } ], - "title": "clouddeploy_v1_generated_cloud_deploy_get_automation_run_sync.py" + "title": "clouddeploy_v1_generated_cloud_deploy_delete_target_sync.py" }, { "canonical": true, @@ -2699,19 +2876,19 @@ "fullName": "google.cloud.deploy_v1.CloudDeployAsyncClient", "shortName": "CloudDeployAsyncClient" }, - "fullName": "google.cloud.deploy_v1.CloudDeployAsyncClient.get_automation", + "fullName": "google.cloud.deploy_v1.CloudDeployAsyncClient.get_automation_run", "method": { - "fullName": "google.cloud.deploy.v1.CloudDeploy.GetAutomation", + "fullName": "google.cloud.deploy.v1.CloudDeploy.GetAutomationRun", "service": { "fullName": "google.cloud.deploy.v1.CloudDeploy", "shortName": "CloudDeploy" }, - "shortName": "GetAutomation" + "shortName": "GetAutomationRun" }, "parameters": [ { "name": "request", - "type": "google.cloud.deploy_v1.types.GetAutomationRequest" + "type": "google.cloud.deploy_v1.types.GetAutomationRunRequest" }, { "name": "name", @@ -2730,14 +2907,14 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.deploy_v1.types.Automation", - "shortName": "get_automation" + "resultType": "google.cloud.deploy_v1.types.AutomationRun", + "shortName": "get_automation_run" }, - "description": "Sample for GetAutomation", - "file": "clouddeploy_v1_generated_cloud_deploy_get_automation_async.py", + "description": "Sample for GetAutomationRun", + "file": "clouddeploy_v1_generated_cloud_deploy_get_automation_run_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "clouddeploy_v1_generated_CloudDeploy_GetAutomation_async", + "regionTag": "clouddeploy_v1_generated_CloudDeploy_GetAutomationRun_async", "segments": [ { "end": 51, @@ -2770,7 +2947,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "clouddeploy_v1_generated_cloud_deploy_get_automation_async.py" + "title": "clouddeploy_v1_generated_cloud_deploy_get_automation_run_async.py" }, { "canonical": true, @@ -2779,14 +2956,175 @@ "fullName": "google.cloud.deploy_v1.CloudDeployClient", "shortName": "CloudDeployClient" }, - "fullName": "google.cloud.deploy_v1.CloudDeployClient.get_automation", + "fullName": "google.cloud.deploy_v1.CloudDeployClient.get_automation_run", "method": { - "fullName": "google.cloud.deploy.v1.CloudDeploy.GetAutomation", + "fullName": "google.cloud.deploy.v1.CloudDeploy.GetAutomationRun", "service": { "fullName": "google.cloud.deploy.v1.CloudDeploy", "shortName": "CloudDeploy" }, - "shortName": "GetAutomation" + "shortName": "GetAutomationRun" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.deploy_v1.types.GetAutomationRunRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.deploy_v1.types.AutomationRun", + "shortName": "get_automation_run" + }, + "description": "Sample for GetAutomationRun", + "file": "clouddeploy_v1_generated_cloud_deploy_get_automation_run_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "clouddeploy_v1_generated_CloudDeploy_GetAutomationRun_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "clouddeploy_v1_generated_cloud_deploy_get_automation_run_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.deploy_v1.CloudDeployAsyncClient", + "shortName": "CloudDeployAsyncClient" + }, + "fullName": "google.cloud.deploy_v1.CloudDeployAsyncClient.get_automation", + "method": { + "fullName": "google.cloud.deploy.v1.CloudDeploy.GetAutomation", + "service": { + "fullName": "google.cloud.deploy.v1.CloudDeploy", + "shortName": "CloudDeploy" + }, + "shortName": "GetAutomation" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.deploy_v1.types.GetAutomationRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.deploy_v1.types.Automation", + "shortName": "get_automation" + }, + "description": "Sample for GetAutomation", + "file": "clouddeploy_v1_generated_cloud_deploy_get_automation_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "clouddeploy_v1_generated_CloudDeploy_GetAutomation_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "clouddeploy_v1_generated_cloud_deploy_get_automation_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.deploy_v1.CloudDeployClient", + "shortName": "CloudDeployClient" + }, + "fullName": "google.cloud.deploy_v1.CloudDeployClient.get_automation", + "method": { + "fullName": "google.cloud.deploy.v1.CloudDeploy.GetAutomation", + "service": { + "fullName": "google.cloud.deploy.v1.CloudDeploy", + "shortName": "CloudDeploy" + }, + "shortName": "GetAutomation" }, "parameters": [ { @@ -3335,6 +3673,167 @@ ], "title": "clouddeploy_v1_generated_cloud_deploy_get_delivery_pipeline_sync.py" }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.deploy_v1.CloudDeployAsyncClient", + "shortName": "CloudDeployAsyncClient" + }, + "fullName": "google.cloud.deploy_v1.CloudDeployAsyncClient.get_deploy_policy", + "method": { + "fullName": "google.cloud.deploy.v1.CloudDeploy.GetDeployPolicy", + "service": { + "fullName": "google.cloud.deploy.v1.CloudDeploy", + "shortName": "CloudDeploy" + }, + "shortName": "GetDeployPolicy" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.deploy_v1.types.GetDeployPolicyRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.deploy_v1.types.DeployPolicy", + "shortName": "get_deploy_policy" + }, + "description": "Sample for GetDeployPolicy", + "file": "clouddeploy_v1_generated_cloud_deploy_get_deploy_policy_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "clouddeploy_v1_generated_CloudDeploy_GetDeployPolicy_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "clouddeploy_v1_generated_cloud_deploy_get_deploy_policy_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.deploy_v1.CloudDeployClient", + "shortName": "CloudDeployClient" + }, + "fullName": "google.cloud.deploy_v1.CloudDeployClient.get_deploy_policy", + "method": { + "fullName": "google.cloud.deploy.v1.CloudDeploy.GetDeployPolicy", + "service": { + "fullName": "google.cloud.deploy.v1.CloudDeploy", + "shortName": "CloudDeploy" + }, + "shortName": "GetDeployPolicy" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.deploy_v1.types.GetDeployPolicyRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.deploy_v1.types.DeployPolicy", + "shortName": "get_deploy_policy" + }, + "description": "Sample for GetDeployPolicy", + "file": "clouddeploy_v1_generated_cloud_deploy_get_deploy_policy_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "clouddeploy_v1_generated_CloudDeploy_GetDeployPolicy_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "clouddeploy_v1_generated_cloud_deploy_get_deploy_policy_sync.py" + }, { "canonical": true, "clientMethod": { @@ -4278,11 +4777,172 @@ "resultType": "google.cloud.deploy_v1.services.cloud_deploy.pagers.ListAutomationRunsPager", "shortName": "list_automation_runs" }, - "description": "Sample for ListAutomationRuns", - "file": "clouddeploy_v1_generated_cloud_deploy_list_automation_runs_sync.py", + "description": "Sample for ListAutomationRuns", + "file": "clouddeploy_v1_generated_cloud_deploy_list_automation_runs_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "clouddeploy_v1_generated_CloudDeploy_ListAutomationRuns_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "clouddeploy_v1_generated_cloud_deploy_list_automation_runs_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.deploy_v1.CloudDeployAsyncClient", + "shortName": "CloudDeployAsyncClient" + }, + "fullName": "google.cloud.deploy_v1.CloudDeployAsyncClient.list_automations", + "method": { + "fullName": "google.cloud.deploy.v1.CloudDeploy.ListAutomations", + "service": { + "fullName": "google.cloud.deploy.v1.CloudDeploy", + "shortName": "CloudDeploy" + }, + "shortName": "ListAutomations" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.deploy_v1.types.ListAutomationsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.deploy_v1.services.cloud_deploy.pagers.ListAutomationsAsyncPager", + "shortName": "list_automations" + }, + "description": "Sample for ListAutomations", + "file": "clouddeploy_v1_generated_cloud_deploy_list_automations_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "clouddeploy_v1_generated_CloudDeploy_ListAutomations_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "clouddeploy_v1_generated_cloud_deploy_list_automations_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.deploy_v1.CloudDeployClient", + "shortName": "CloudDeployClient" + }, + "fullName": "google.cloud.deploy_v1.CloudDeployClient.list_automations", + "method": { + "fullName": "google.cloud.deploy.v1.CloudDeploy.ListAutomations", + "service": { + "fullName": "google.cloud.deploy.v1.CloudDeploy", + "shortName": "CloudDeploy" + }, + "shortName": "ListAutomations" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.deploy_v1.types.ListAutomationsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.deploy_v1.services.cloud_deploy.pagers.ListAutomationsPager", + "shortName": "list_automations" + }, + "description": "Sample for ListAutomations", + "file": "clouddeploy_v1_generated_cloud_deploy_list_automations_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "clouddeploy_v1_generated_CloudDeploy_ListAutomationRuns_sync", + "regionTag": "clouddeploy_v1_generated_CloudDeploy_ListAutomations_sync", "segments": [ { "end": 52, @@ -4315,7 +4975,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "clouddeploy_v1_generated_cloud_deploy_list_automation_runs_sync.py" + "title": "clouddeploy_v1_generated_cloud_deploy_list_automations_sync.py" }, { "canonical": true, @@ -4325,19 +4985,19 @@ "fullName": "google.cloud.deploy_v1.CloudDeployAsyncClient", "shortName": "CloudDeployAsyncClient" }, - "fullName": "google.cloud.deploy_v1.CloudDeployAsyncClient.list_automations", + "fullName": "google.cloud.deploy_v1.CloudDeployAsyncClient.list_custom_target_types", "method": { - "fullName": "google.cloud.deploy.v1.CloudDeploy.ListAutomations", + "fullName": "google.cloud.deploy.v1.CloudDeploy.ListCustomTargetTypes", "service": { "fullName": "google.cloud.deploy.v1.CloudDeploy", "shortName": "CloudDeploy" }, - "shortName": "ListAutomations" + "shortName": "ListCustomTargetTypes" }, "parameters": [ { "name": "request", - "type": "google.cloud.deploy_v1.types.ListAutomationsRequest" + "type": "google.cloud.deploy_v1.types.ListCustomTargetTypesRequest" }, { "name": "parent", @@ -4356,14 +5016,14 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.deploy_v1.services.cloud_deploy.pagers.ListAutomationsAsyncPager", - "shortName": "list_automations" + "resultType": "google.cloud.deploy_v1.services.cloud_deploy.pagers.ListCustomTargetTypesAsyncPager", + "shortName": "list_custom_target_types" }, - "description": "Sample for ListAutomations", - "file": "clouddeploy_v1_generated_cloud_deploy_list_automations_async.py", + "description": "Sample for ListCustomTargetTypes", + "file": "clouddeploy_v1_generated_cloud_deploy_list_custom_target_types_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "clouddeploy_v1_generated_CloudDeploy_ListAutomations_async", + "regionTag": "clouddeploy_v1_generated_CloudDeploy_ListCustomTargetTypes_async", "segments": [ { "end": 52, @@ -4396,7 +5056,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "clouddeploy_v1_generated_cloud_deploy_list_automations_async.py" + "title": "clouddeploy_v1_generated_cloud_deploy_list_custom_target_types_async.py" }, { "canonical": true, @@ -4405,19 +5065,19 @@ "fullName": "google.cloud.deploy_v1.CloudDeployClient", "shortName": "CloudDeployClient" }, - "fullName": "google.cloud.deploy_v1.CloudDeployClient.list_automations", + "fullName": "google.cloud.deploy_v1.CloudDeployClient.list_custom_target_types", "method": { - "fullName": "google.cloud.deploy.v1.CloudDeploy.ListAutomations", + "fullName": "google.cloud.deploy.v1.CloudDeploy.ListCustomTargetTypes", "service": { "fullName": "google.cloud.deploy.v1.CloudDeploy", "shortName": "CloudDeploy" }, - "shortName": "ListAutomations" + "shortName": "ListCustomTargetTypes" }, "parameters": [ { "name": "request", - "type": "google.cloud.deploy_v1.types.ListAutomationsRequest" + "type": "google.cloud.deploy_v1.types.ListCustomTargetTypesRequest" }, { "name": "parent", @@ -4436,14 +5096,14 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.deploy_v1.services.cloud_deploy.pagers.ListAutomationsPager", - "shortName": "list_automations" + "resultType": "google.cloud.deploy_v1.services.cloud_deploy.pagers.ListCustomTargetTypesPager", + "shortName": "list_custom_target_types" }, - "description": "Sample for ListAutomations", - "file": "clouddeploy_v1_generated_cloud_deploy_list_automations_sync.py", + "description": "Sample for ListCustomTargetTypes", + "file": "clouddeploy_v1_generated_cloud_deploy_list_custom_target_types_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "clouddeploy_v1_generated_CloudDeploy_ListAutomations_sync", + "regionTag": "clouddeploy_v1_generated_CloudDeploy_ListCustomTargetTypes_sync", "segments": [ { "end": 52, @@ -4476,7 +5136,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "clouddeploy_v1_generated_cloud_deploy_list_automations_sync.py" + "title": "clouddeploy_v1_generated_cloud_deploy_list_custom_target_types_sync.py" }, { "canonical": true, @@ -4486,19 +5146,19 @@ "fullName": "google.cloud.deploy_v1.CloudDeployAsyncClient", "shortName": "CloudDeployAsyncClient" }, - "fullName": "google.cloud.deploy_v1.CloudDeployAsyncClient.list_custom_target_types", + "fullName": "google.cloud.deploy_v1.CloudDeployAsyncClient.list_delivery_pipelines", "method": { - "fullName": "google.cloud.deploy.v1.CloudDeploy.ListCustomTargetTypes", + "fullName": "google.cloud.deploy.v1.CloudDeploy.ListDeliveryPipelines", "service": { "fullName": "google.cloud.deploy.v1.CloudDeploy", "shortName": "CloudDeploy" }, - "shortName": "ListCustomTargetTypes" + "shortName": "ListDeliveryPipelines" }, "parameters": [ { "name": "request", - "type": "google.cloud.deploy_v1.types.ListCustomTargetTypesRequest" + "type": "google.cloud.deploy_v1.types.ListDeliveryPipelinesRequest" }, { "name": "parent", @@ -4517,14 +5177,14 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.deploy_v1.services.cloud_deploy.pagers.ListCustomTargetTypesAsyncPager", - "shortName": "list_custom_target_types" + "resultType": "google.cloud.deploy_v1.services.cloud_deploy.pagers.ListDeliveryPipelinesAsyncPager", + "shortName": "list_delivery_pipelines" }, - "description": "Sample for ListCustomTargetTypes", - "file": "clouddeploy_v1_generated_cloud_deploy_list_custom_target_types_async.py", + "description": "Sample for ListDeliveryPipelines", + "file": "clouddeploy_v1_generated_cloud_deploy_list_delivery_pipelines_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "clouddeploy_v1_generated_CloudDeploy_ListCustomTargetTypes_async", + "regionTag": "clouddeploy_v1_generated_CloudDeploy_ListDeliveryPipelines_async", "segments": [ { "end": 52, @@ -4557,7 +5217,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "clouddeploy_v1_generated_cloud_deploy_list_custom_target_types_async.py" + "title": "clouddeploy_v1_generated_cloud_deploy_list_delivery_pipelines_async.py" }, { "canonical": true, @@ -4566,19 +5226,19 @@ "fullName": "google.cloud.deploy_v1.CloudDeployClient", "shortName": "CloudDeployClient" }, - "fullName": "google.cloud.deploy_v1.CloudDeployClient.list_custom_target_types", + "fullName": "google.cloud.deploy_v1.CloudDeployClient.list_delivery_pipelines", "method": { - "fullName": "google.cloud.deploy.v1.CloudDeploy.ListCustomTargetTypes", + "fullName": "google.cloud.deploy.v1.CloudDeploy.ListDeliveryPipelines", "service": { "fullName": "google.cloud.deploy.v1.CloudDeploy", "shortName": "CloudDeploy" }, - "shortName": "ListCustomTargetTypes" + "shortName": "ListDeliveryPipelines" }, "parameters": [ { "name": "request", - "type": "google.cloud.deploy_v1.types.ListCustomTargetTypesRequest" + "type": "google.cloud.deploy_v1.types.ListDeliveryPipelinesRequest" }, { "name": "parent", @@ -4597,14 +5257,14 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.deploy_v1.services.cloud_deploy.pagers.ListCustomTargetTypesPager", - "shortName": "list_custom_target_types" + "resultType": "google.cloud.deploy_v1.services.cloud_deploy.pagers.ListDeliveryPipelinesPager", + "shortName": "list_delivery_pipelines" }, - "description": "Sample for ListCustomTargetTypes", - "file": "clouddeploy_v1_generated_cloud_deploy_list_custom_target_types_sync.py", + "description": "Sample for ListDeliveryPipelines", + "file": "clouddeploy_v1_generated_cloud_deploy_list_delivery_pipelines_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "clouddeploy_v1_generated_CloudDeploy_ListCustomTargetTypes_sync", + "regionTag": "clouddeploy_v1_generated_CloudDeploy_ListDeliveryPipelines_sync", "segments": [ { "end": 52, @@ -4637,7 +5297,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "clouddeploy_v1_generated_cloud_deploy_list_custom_target_types_sync.py" + "title": "clouddeploy_v1_generated_cloud_deploy_list_delivery_pipelines_sync.py" }, { "canonical": true, @@ -4647,19 +5307,19 @@ "fullName": "google.cloud.deploy_v1.CloudDeployAsyncClient", "shortName": "CloudDeployAsyncClient" }, - "fullName": "google.cloud.deploy_v1.CloudDeployAsyncClient.list_delivery_pipelines", + "fullName": "google.cloud.deploy_v1.CloudDeployAsyncClient.list_deploy_policies", "method": { - "fullName": "google.cloud.deploy.v1.CloudDeploy.ListDeliveryPipelines", + "fullName": "google.cloud.deploy.v1.CloudDeploy.ListDeployPolicies", "service": { "fullName": "google.cloud.deploy.v1.CloudDeploy", "shortName": "CloudDeploy" }, - "shortName": "ListDeliveryPipelines" + "shortName": "ListDeployPolicies" }, "parameters": [ { "name": "request", - "type": "google.cloud.deploy_v1.types.ListDeliveryPipelinesRequest" + "type": "google.cloud.deploy_v1.types.ListDeployPoliciesRequest" }, { "name": "parent", @@ -4678,14 +5338,14 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.deploy_v1.services.cloud_deploy.pagers.ListDeliveryPipelinesAsyncPager", - "shortName": "list_delivery_pipelines" + "resultType": "google.cloud.deploy_v1.services.cloud_deploy.pagers.ListDeployPoliciesAsyncPager", + "shortName": "list_deploy_policies" }, - "description": "Sample for ListDeliveryPipelines", - "file": "clouddeploy_v1_generated_cloud_deploy_list_delivery_pipelines_async.py", + "description": "Sample for ListDeployPolicies", + "file": "clouddeploy_v1_generated_cloud_deploy_list_deploy_policies_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "clouddeploy_v1_generated_CloudDeploy_ListDeliveryPipelines_async", + "regionTag": "clouddeploy_v1_generated_CloudDeploy_ListDeployPolicies_async", "segments": [ { "end": 52, @@ -4718,7 +5378,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "clouddeploy_v1_generated_cloud_deploy_list_delivery_pipelines_async.py" + "title": "clouddeploy_v1_generated_cloud_deploy_list_deploy_policies_async.py" }, { "canonical": true, @@ -4727,19 +5387,19 @@ "fullName": "google.cloud.deploy_v1.CloudDeployClient", "shortName": "CloudDeployClient" }, - "fullName": "google.cloud.deploy_v1.CloudDeployClient.list_delivery_pipelines", + "fullName": "google.cloud.deploy_v1.CloudDeployClient.list_deploy_policies", "method": { - "fullName": "google.cloud.deploy.v1.CloudDeploy.ListDeliveryPipelines", + "fullName": "google.cloud.deploy.v1.CloudDeploy.ListDeployPolicies", "service": { "fullName": "google.cloud.deploy.v1.CloudDeploy", "shortName": "CloudDeploy" }, - "shortName": "ListDeliveryPipelines" + "shortName": "ListDeployPolicies" }, "parameters": [ { "name": "request", - "type": "google.cloud.deploy_v1.types.ListDeliveryPipelinesRequest" + "type": "google.cloud.deploy_v1.types.ListDeployPoliciesRequest" }, { "name": "parent", @@ -4758,14 +5418,14 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.deploy_v1.services.cloud_deploy.pagers.ListDeliveryPipelinesPager", - "shortName": "list_delivery_pipelines" + "resultType": "google.cloud.deploy_v1.services.cloud_deploy.pagers.ListDeployPoliciesPager", + "shortName": "list_deploy_policies" }, - "description": "Sample for ListDeliveryPipelines", - "file": "clouddeploy_v1_generated_cloud_deploy_list_delivery_pipelines_sync.py", + "description": "Sample for ListDeployPolicies", + "file": "clouddeploy_v1_generated_cloud_deploy_list_deploy_policies_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "clouddeploy_v1_generated_CloudDeploy_ListDeliveryPipelines_sync", + "regionTag": "clouddeploy_v1_generated_CloudDeploy_ListDeployPolicies_sync", "segments": [ { "end": 52, @@ -4798,7 +5458,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "clouddeploy_v1_generated_cloud_deploy_list_delivery_pipelines_sync.py" + "title": "clouddeploy_v1_generated_cloud_deploy_list_deploy_policies_sync.py" }, { "canonical": true, @@ -6466,6 +7126,175 @@ ], "title": "clouddeploy_v1_generated_cloud_deploy_update_delivery_pipeline_sync.py" }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.deploy_v1.CloudDeployAsyncClient", + "shortName": "CloudDeployAsyncClient" + }, + "fullName": "google.cloud.deploy_v1.CloudDeployAsyncClient.update_deploy_policy", + "method": { + "fullName": "google.cloud.deploy.v1.CloudDeploy.UpdateDeployPolicy", + "service": { + "fullName": "google.cloud.deploy.v1.CloudDeploy", + "shortName": "CloudDeploy" + }, + "shortName": "UpdateDeployPolicy" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.deploy_v1.types.UpdateDeployPolicyRequest" + }, + { + "name": "deploy_policy", + "type": "google.cloud.deploy_v1.types.DeployPolicy" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "update_deploy_policy" + }, + "description": "Sample for UpdateDeployPolicy", + "file": "clouddeploy_v1_generated_cloud_deploy_update_deploy_policy_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "clouddeploy_v1_generated_CloudDeploy_UpdateDeployPolicy_async", + "segments": [ + { + "end": 59, + "start": 27, + "type": "FULL" + }, + { + "end": 59, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 49, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 56, + "start": 50, + "type": "REQUEST_EXECUTION" + }, + { + "end": 60, + "start": 57, + "type": "RESPONSE_HANDLING" + } + ], + "title": "clouddeploy_v1_generated_cloud_deploy_update_deploy_policy_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.deploy_v1.CloudDeployClient", + "shortName": "CloudDeployClient" + }, + "fullName": "google.cloud.deploy_v1.CloudDeployClient.update_deploy_policy", + "method": { + "fullName": "google.cloud.deploy.v1.CloudDeploy.UpdateDeployPolicy", + "service": { + "fullName": "google.cloud.deploy.v1.CloudDeploy", + "shortName": "CloudDeploy" + }, + "shortName": "UpdateDeployPolicy" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.deploy_v1.types.UpdateDeployPolicyRequest" + }, + { + "name": "deploy_policy", + "type": "google.cloud.deploy_v1.types.DeployPolicy" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "update_deploy_policy" + }, + "description": "Sample for UpdateDeployPolicy", + "file": "clouddeploy_v1_generated_cloud_deploy_update_deploy_policy_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "clouddeploy_v1_generated_CloudDeploy_UpdateDeployPolicy_sync", + "segments": [ + { + "end": 59, + "start": 27, + "type": "FULL" + }, + { + "end": 59, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 49, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 56, + "start": 50, + "type": "REQUEST_EXECUTION" + }, + { + "end": 60, + "start": 57, + "type": "RESPONSE_HANDLING" + } + ], + "title": "clouddeploy_v1_generated_cloud_deploy_update_deploy_policy_sync.py" + }, { "canonical": true, "clientMethod": { diff --git a/packages/google-cloud-deploy/scripts/fixup_deploy_v1_keywords.py b/packages/google-cloud-deploy/scripts/fixup_deploy_v1_keywords.py index 1a652b4ea5be..10255de70644 100644 --- a/packages/google-cloud-deploy/scripts/fixup_deploy_v1_keywords.py +++ b/packages/google-cloud-deploy/scripts/fixup_deploy_v1_keywords.py @@ -40,44 +40,49 @@ class deployCallTransformer(cst.CSTTransformer): CTRL_PARAMS: Tuple[str] = ('retry', 'timeout', 'metadata') METHOD_TO_PARAMS: Dict[str, Tuple[str]] = { 'abandon_release': ('name', ), - 'advance_rollout': ('name', 'phase_id', ), - 'approve_rollout': ('name', 'approved', ), + 'advance_rollout': ('name', 'phase_id', 'override_deploy_policy', ), + 'approve_rollout': ('name', 'approved', 'override_deploy_policy', ), 'cancel_automation_run': ('name', ), - 'cancel_rollout': ('name', ), + 'cancel_rollout': ('name', 'override_deploy_policy', ), 'create_automation': ('parent', 'automation_id', 'automation', 'request_id', 'validate_only', ), 'create_custom_target_type': ('parent', 'custom_target_type_id', 'custom_target_type', 'request_id', 'validate_only', ), 'create_delivery_pipeline': ('parent', 'delivery_pipeline_id', 'delivery_pipeline', 'request_id', 'validate_only', ), - 'create_release': ('parent', 'release_id', 'release', 'request_id', 'validate_only', ), - 'create_rollout': ('parent', 'rollout_id', 'rollout', 'request_id', 'validate_only', 'starting_phase_id', ), + 'create_deploy_policy': ('parent', 'deploy_policy_id', 'deploy_policy', 'request_id', 'validate_only', ), + 'create_release': ('parent', 'release_id', 'release', 'request_id', 'validate_only', 'override_deploy_policy', ), + 'create_rollout': ('parent', 'rollout_id', 'rollout', 'request_id', 'validate_only', 'override_deploy_policy', 'starting_phase_id', ), 'create_target': ('parent', 'target_id', 'target', 'request_id', 'validate_only', ), 'delete_automation': ('name', 'request_id', 'allow_missing', 'validate_only', 'etag', ), 'delete_custom_target_type': ('name', 'request_id', 'allow_missing', 'validate_only', 'etag', ), 'delete_delivery_pipeline': ('name', 'request_id', 'allow_missing', 'validate_only', 'force', 'etag', ), + 'delete_deploy_policy': ('name', 'request_id', 'allow_missing', 'validate_only', 'etag', ), 'delete_target': ('name', 'request_id', 'allow_missing', 'validate_only', 'etag', ), 'get_automation': ('name', ), 'get_automation_run': ('name', ), 'get_config': ('name', ), 'get_custom_target_type': ('name', ), 'get_delivery_pipeline': ('name', ), + 'get_deploy_policy': ('name', ), 'get_job_run': ('name', ), 'get_release': ('name', ), 'get_rollout': ('name', ), 'get_target': ('name', ), - 'ignore_job': ('rollout', 'phase_id', 'job_id', ), + 'ignore_job': ('rollout', 'phase_id', 'job_id', 'override_deploy_policy', ), 'list_automation_runs': ('parent', 'page_size', 'page_token', 'filter', 'order_by', ), 'list_automations': ('parent', 'page_size', 'page_token', 'filter', 'order_by', ), 'list_custom_target_types': ('parent', 'page_size', 'page_token', 'filter', 'order_by', ), 'list_delivery_pipelines': ('parent', 'page_size', 'page_token', 'filter', 'order_by', ), + 'list_deploy_policies': ('parent', 'page_size', 'page_token', 'filter', 'order_by', ), 'list_job_runs': ('parent', 'page_size', 'page_token', 'filter', 'order_by', ), 'list_releases': ('parent', 'page_size', 'page_token', 'filter', 'order_by', ), 'list_rollouts': ('parent', 'page_size', 'page_token', 'filter', 'order_by', ), 'list_targets': ('parent', 'page_size', 'page_token', 'filter', 'order_by', ), - 'retry_job': ('rollout', 'phase_id', 'job_id', ), - 'rollback_target': ('name', 'target_id', 'rollout_id', 'release_id', 'rollout_to_roll_back', 'rollback_config', 'validate_only', ), - 'terminate_job_run': ('name', ), + 'retry_job': ('rollout', 'phase_id', 'job_id', 'override_deploy_policy', ), + 'rollback_target': ('name', 'target_id', 'rollout_id', 'release_id', 'rollout_to_roll_back', 'rollback_config', 'validate_only', 'override_deploy_policy', ), + 'terminate_job_run': ('name', 'override_deploy_policy', ), 'update_automation': ('update_mask', 'automation', 'request_id', 'allow_missing', 'validate_only', ), 'update_custom_target_type': ('update_mask', 'custom_target_type', 'request_id', 'allow_missing', 'validate_only', ), 'update_delivery_pipeline': ('update_mask', 'delivery_pipeline', 'request_id', 'allow_missing', 'validate_only', ), + 'update_deploy_policy': ('update_mask', 'deploy_policy', 'request_id', 'allow_missing', 'validate_only', ), 'update_target': ('update_mask', 'target', 'request_id', 'allow_missing', 'validate_only', ), } diff --git a/packages/google-cloud-deploy/tests/unit/gapic/deploy_v1/test_cloud_deploy.py b/packages/google-cloud-deploy/tests/unit/gapic/deploy_v1/test_cloud_deploy.py index 95200390561d..b6c62348fc82 100644 --- a/packages/google-cloud-deploy/tests/unit/gapic/deploy_v1/test_cloud_deploy.py +++ b/packages/google-cloud-deploy/tests/unit/gapic/deploy_v1/test_cloud_deploy.py @@ -53,6 +53,9 @@ from google.protobuf import field_mask_pb2 # type: ignore from google.protobuf import json_format from google.protobuf import timestamp_pb2 # type: ignore +from google.type import date_pb2 # type: ignore +from google.type import dayofweek_pb2 # type: ignore +from google.type import timeofday_pb2 # type: ignore import grpc from grpc.experimental import aio from proto.marshal.rules import wrappers @@ -1284,22 +1287,23 @@ async def test_list_delivery_pipelines_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_delivery_pipelines - ] = mock_object + ] = mock_rpc request = {} await client.list_delivery_pipelines(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_delivery_pipelines(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1888,22 +1892,23 @@ async def test_get_delivery_pipeline_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_delivery_pipeline - ] = mock_object + ] = mock_rpc request = {} await client.get_delivery_pipeline(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_delivery_pipeline(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2230,8 +2235,9 @@ def test_create_delivery_pipeline_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.create_delivery_pipeline(request) @@ -2287,26 +2293,28 @@ async def test_create_delivery_pipeline_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_delivery_pipeline - ] = mock_object + ] = mock_rpc request = {} await client.create_delivery_pipeline(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.create_delivery_pipeline(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2638,8 +2646,9 @@ def test_update_delivery_pipeline_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.update_delivery_pipeline(request) @@ -2695,26 +2704,28 @@ async def test_update_delivery_pipeline_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_delivery_pipeline - ] = mock_object + ] = mock_rpc request = {} await client.update_delivery_pipeline(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.update_delivery_pipeline(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3040,8 +3051,9 @@ def test_delete_delivery_pipeline_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.delete_delivery_pipeline(request) @@ -3097,26 +3109,28 @@ async def test_delete_delivery_pipeline_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_delivery_pipeline - ] = mock_object + ] = mock_rpc request = {} await client.delete_delivery_pipeline(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.delete_delivery_pipeline(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3482,22 +3496,23 @@ async def test_list_targets_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_targets - ] = mock_object + ] = mock_rpc request = {} await client.list_targets(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_targets(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4045,22 +4060,23 @@ async def test_rollback_target_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.rollback_target - ] = mock_object + ] = mock_rpc request = {} await client.rollback_target(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.rollback_target(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4439,22 +4455,23 @@ async def test_get_target_async_use_cached_wrapped_rpc(transport: str = "grpc_as ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_target - ] = mock_object + ] = mock_rpc request = {} await client.get_target(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_target(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4757,8 +4774,9 @@ def test_create_target_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.create_target(request) @@ -4812,26 +4830,28 @@ async def test_create_target_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_target - ] = mock_object + ] = mock_rpc request = {} await client.create_target(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.create_target(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5141,8 +5161,9 @@ def test_update_target_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.update_target(request) @@ -5196,26 +5217,28 @@ async def test_update_target_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_target - ] = mock_object + ] = mock_rpc request = {} await client.update_target(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.update_target(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5519,8 +5542,9 @@ def test_delete_target_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.delete_target(request) @@ -5574,26 +5598,28 @@ async def test_delete_target_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_target - ] = mock_object + ] = mock_rpc request = {} await client.delete_target(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.delete_target(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5961,22 +5987,23 @@ async def test_list_custom_target_types_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_custom_target_types - ] = mock_object + ] = mock_rpc request = {} await client.list_custom_target_types(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_custom_target_types(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -6567,22 +6594,23 @@ async def test_get_custom_target_type_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_custom_target_type - ] = mock_object + ] = mock_rpc request = {} await client.get_custom_target_type(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_custom_target_type(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -6909,8 +6937,9 @@ def test_create_custom_target_type_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.create_custom_target_type(request) @@ -6966,26 +6995,28 @@ async def test_create_custom_target_type_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_custom_target_type - ] = mock_object + ] = mock_rpc request = {} await client.create_custom_target_type(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.create_custom_target_type(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -7317,8 +7348,9 @@ def test_update_custom_target_type_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.update_custom_target_type(request) @@ -7374,26 +7406,28 @@ async def test_update_custom_target_type_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_custom_target_type - ] = mock_object + ] = mock_rpc request = {} await client.update_custom_target_type(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.update_custom_target_type(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -7719,8 +7753,9 @@ def test_delete_custom_target_type_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.delete_custom_target_type(request) @@ -7776,26 +7811,28 @@ async def test_delete_custom_target_type_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_custom_target_type - ] = mock_object + ] = mock_rpc request = {} await client.delete_custom_target_type(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.delete_custom_target_type(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -8161,22 +8198,23 @@ async def test_list_releases_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_releases - ] = mock_object + ] = mock_rpc request = {} await client.list_releases(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_releases(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -8745,22 +8783,23 @@ async def test_get_release_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_release - ] = mock_object + ] = mock_rpc request = {} await client.get_release(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_release(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -9073,8 +9112,9 @@ def test_create_release_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.create_release(request) @@ -9128,26 +9168,28 @@ async def test_create_release_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_release - ] = mock_object + ] = mock_rpc request = {} await client.create_release(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.create_release(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -9508,22 +9550,23 @@ async def test_abandon_release_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.abandon_release - ] = mock_object + ] = mock_rpc request = {} await client.abandon_release(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.abandon_release(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -9708,11 +9751,11 @@ async def test_abandon_release_flattened_error_async(): @pytest.mark.parametrize( "request_type", [ - cloud_deploy.ApproveRolloutRequest, + cloud_deploy.CreateDeployPolicyRequest, dict, ], ) -def test_approve_rollout(request_type, transport: str = "grpc"): +def test_create_deploy_policy(request_type, transport: str = "grpc"): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -9723,22 +9766,24 @@ def test_approve_rollout(request_type, transport: str = "grpc"): request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.approve_rollout), "__call__") as call: + with mock.patch.object( + type(client.transport.create_deploy_policy), "__call__" + ) as call: # Designate an appropriate return value for the call. - call.return_value = cloud_deploy.ApproveRolloutResponse() - response = client.approve_rollout(request) + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.create_deploy_policy(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - request = cloud_deploy.ApproveRolloutRequest() + request = cloud_deploy.CreateDeployPolicyRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, cloud_deploy.ApproveRolloutResponse) + assert isinstance(response, future.Future) -def test_approve_rollout_empty_call(): +def test_create_deploy_policy_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = CloudDeployClient( @@ -9747,17 +9792,19 @@ def test_approve_rollout_empty_call(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.approve_rollout), "__call__") as call: + with mock.patch.object( + type(client.transport.create_deploy_policy), "__call__" + ) as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.approve_rollout() + client.create_deploy_policy() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == cloud_deploy.ApproveRolloutRequest() + assert args[0] == cloud_deploy.CreateDeployPolicyRequest() -def test_approve_rollout_non_empty_request_with_auto_populated_field(): +def test_create_deploy_policy_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. client = CloudDeployClient( @@ -9768,24 +9815,30 @@ def test_approve_rollout_non_empty_request_with_auto_populated_field(): # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. - request = cloud_deploy.ApproveRolloutRequest( - name="name_value", + request = cloud_deploy.CreateDeployPolicyRequest( + parent="parent_value", + deploy_policy_id="deploy_policy_id_value", + request_id="request_id_value", ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.approve_rollout), "__call__") as call: + with mock.patch.object( + type(client.transport.create_deploy_policy), "__call__" + ) as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.approve_rollout(request=request) + client.create_deploy_policy(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == cloud_deploy.ApproveRolloutRequest( - name="name_value", + assert args[0] == cloud_deploy.CreateDeployPolicyRequest( + parent="parent_value", + deploy_policy_id="deploy_policy_id_value", + request_id="request_id_value", ) -def test_approve_rollout_use_cached_wrapped_rpc(): +def test_create_deploy_policy_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -9799,21 +9852,30 @@ def test_approve_rollout_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.approve_rollout in client._transport._wrapped_methods + assert ( + client._transport.create_deploy_policy in client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.approve_rollout] = mock_rpc + client._transport._wrapped_methods[ + client._transport.create_deploy_policy + ] = mock_rpc request = {} - client.approve_rollout(request) + client.create_deploy_policy(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.approve_rollout(request) + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.create_deploy_policy(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -9821,7 +9883,7 @@ def test_approve_rollout_use_cached_wrapped_rpc(): @pytest.mark.asyncio -async def test_approve_rollout_empty_call_async(): +async def test_create_deploy_policy_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = CloudDeployAsyncClient( @@ -9830,19 +9892,21 @@ async def test_approve_rollout_empty_call_async(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.approve_rollout), "__call__") as call: + with mock.patch.object( + type(client.transport.create_deploy_policy), "__call__" + ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - cloud_deploy.ApproveRolloutResponse() + operations_pb2.Operation(name="operations/spam") ) - response = await client.approve_rollout() + response = await client.create_deploy_policy() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == cloud_deploy.ApproveRolloutRequest() + assert args[0] == cloud_deploy.CreateDeployPolicyRequest() @pytest.mark.asyncio -async def test_approve_rollout_async_use_cached_wrapped_rpc( +async def test_create_deploy_policy_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", ): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -9859,32 +9923,38 @@ async def test_approve_rollout_async_use_cached_wrapped_rpc( # Ensure method has been cached assert ( - client._client._transport.approve_rollout + client._client._transport.create_deploy_policy in client._client._transport._wrapped_methods ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ - client._client._transport.approve_rollout - ] = mock_object + client._client._transport.create_deploy_policy + ] = mock_rpc request = {} - await client.approve_rollout(request) + await client.create_deploy_policy(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - await client.approve_rollout(request) + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.create_deploy_policy(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio -async def test_approve_rollout_async( - transport: str = "grpc_asyncio", request_type=cloud_deploy.ApproveRolloutRequest +async def test_create_deploy_policy_async( + transport: str = "grpc_asyncio", request_type=cloud_deploy.CreateDeployPolicyRequest ): client = CloudDeployAsyncClient( credentials=ga_credentials.AnonymousCredentials(), @@ -9896,43 +9966,47 @@ async def test_approve_rollout_async( request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.approve_rollout), "__call__") as call: + with mock.patch.object( + type(client.transport.create_deploy_policy), "__call__" + ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - cloud_deploy.ApproveRolloutResponse() + operations_pb2.Operation(name="operations/spam") ) - response = await client.approve_rollout(request) + response = await client.create_deploy_policy(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request = cloud_deploy.ApproveRolloutRequest() + request = cloud_deploy.CreateDeployPolicyRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, cloud_deploy.ApproveRolloutResponse) + assert isinstance(response, future.Future) @pytest.mark.asyncio -async def test_approve_rollout_async_from_dict(): - await test_approve_rollout_async(request_type=dict) +async def test_create_deploy_policy_async_from_dict(): + await test_create_deploy_policy_async(request_type=dict) -def test_approve_rollout_field_headers(): +def test_create_deploy_policy_field_headers(): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = cloud_deploy.ApproveRolloutRequest() + request = cloud_deploy.CreateDeployPolicyRequest() - request.name = "name_value" + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.approve_rollout), "__call__") as call: - call.return_value = cloud_deploy.ApproveRolloutResponse() - client.approve_rollout(request) + with mock.patch.object( + type(client.transport.create_deploy_policy), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.create_deploy_policy(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -9943,28 +10017,30 @@ def test_approve_rollout_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "name=name_value", + "parent=parent_value", ) in kw["metadata"] @pytest.mark.asyncio -async def test_approve_rollout_field_headers_async(): +async def test_create_deploy_policy_field_headers_async(): client = CloudDeployAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = cloud_deploy.ApproveRolloutRequest() + request = cloud_deploy.CreateDeployPolicyRequest() - request.name = "name_value" + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.approve_rollout), "__call__") as call: + with mock.patch.object( + type(client.transport.create_deploy_policy), "__call__" + ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - cloud_deploy.ApproveRolloutResponse() + operations_pb2.Operation(name="operations/op") ) - await client.approve_rollout(request) + await client.create_deploy_policy(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -9975,35 +10051,45 @@ async def test_approve_rollout_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "name=name_value", + "parent=parent_value", ) in kw["metadata"] -def test_approve_rollout_flattened(): +def test_create_deploy_policy_flattened(): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.approve_rollout), "__call__") as call: + with mock.patch.object( + type(client.transport.create_deploy_policy), "__call__" + ) as call: # Designate an appropriate return value for the call. - call.return_value = cloud_deploy.ApproveRolloutResponse() + call.return_value = operations_pb2.Operation(name="operations/op") # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.approve_rollout( - name="name_value", + client.create_deploy_policy( + parent="parent_value", + deploy_policy=cloud_deploy.DeployPolicy(name="name_value"), + deploy_policy_id="deploy_policy_id_value", ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].deploy_policy + mock_val = cloud_deploy.DeployPolicy(name="name_value") + assert arg == mock_val + arg = args[0].deploy_policy_id + mock_val = "deploy_policy_id_value" assert arg == mock_val -def test_approve_rollout_flattened_error(): +def test_create_deploy_policy_flattened_error(): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -10011,43 +10097,55 @@ def test_approve_rollout_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.approve_rollout( - cloud_deploy.ApproveRolloutRequest(), - name="name_value", + client.create_deploy_policy( + cloud_deploy.CreateDeployPolicyRequest(), + parent="parent_value", + deploy_policy=cloud_deploy.DeployPolicy(name="name_value"), + deploy_policy_id="deploy_policy_id_value", ) @pytest.mark.asyncio -async def test_approve_rollout_flattened_async(): +async def test_create_deploy_policy_flattened_async(): client = CloudDeployAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.approve_rollout), "__call__") as call: + with mock.patch.object( + type(client.transport.create_deploy_policy), "__call__" + ) as call: # Designate an appropriate return value for the call. - call.return_value = cloud_deploy.ApproveRolloutResponse() + call.return_value = operations_pb2.Operation(name="operations/op") call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - cloud_deploy.ApproveRolloutResponse() + operations_pb2.Operation(name="operations/spam") ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.approve_rollout( - name="name_value", + response = await client.create_deploy_policy( + parent="parent_value", + deploy_policy=cloud_deploy.DeployPolicy(name="name_value"), + deploy_policy_id="deploy_policy_id_value", ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].deploy_policy + mock_val = cloud_deploy.DeployPolicy(name="name_value") + assert arg == mock_val + arg = args[0].deploy_policy_id + mock_val = "deploy_policy_id_value" assert arg == mock_val @pytest.mark.asyncio -async def test_approve_rollout_flattened_error_async(): +async def test_create_deploy_policy_flattened_error_async(): client = CloudDeployAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -10055,20 +10153,22 @@ async def test_approve_rollout_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.approve_rollout( - cloud_deploy.ApproveRolloutRequest(), - name="name_value", + await client.create_deploy_policy( + cloud_deploy.CreateDeployPolicyRequest(), + parent="parent_value", + deploy_policy=cloud_deploy.DeployPolicy(name="name_value"), + deploy_policy_id="deploy_policy_id_value", ) @pytest.mark.parametrize( "request_type", [ - cloud_deploy.AdvanceRolloutRequest, + cloud_deploy.UpdateDeployPolicyRequest, dict, ], ) -def test_advance_rollout(request_type, transport: str = "grpc"): +def test_update_deploy_policy(request_type, transport: str = "grpc"): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -10079,22 +10179,24 @@ def test_advance_rollout(request_type, transport: str = "grpc"): request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.advance_rollout), "__call__") as call: + with mock.patch.object( + type(client.transport.update_deploy_policy), "__call__" + ) as call: # Designate an appropriate return value for the call. - call.return_value = cloud_deploy.AdvanceRolloutResponse() - response = client.advance_rollout(request) + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.update_deploy_policy(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - request = cloud_deploy.AdvanceRolloutRequest() + request = cloud_deploy.UpdateDeployPolicyRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, cloud_deploy.AdvanceRolloutResponse) + assert isinstance(response, future.Future) -def test_advance_rollout_empty_call(): +def test_update_deploy_policy_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = CloudDeployClient( @@ -10103,17 +10205,19 @@ def test_advance_rollout_empty_call(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.advance_rollout), "__call__") as call: + with mock.patch.object( + type(client.transport.update_deploy_policy), "__call__" + ) as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.advance_rollout() + client.update_deploy_policy() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == cloud_deploy.AdvanceRolloutRequest() + assert args[0] == cloud_deploy.UpdateDeployPolicyRequest() -def test_advance_rollout_non_empty_request_with_auto_populated_field(): +def test_update_deploy_policy_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. client = CloudDeployClient( @@ -10124,26 +10228,26 @@ def test_advance_rollout_non_empty_request_with_auto_populated_field(): # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. - request = cloud_deploy.AdvanceRolloutRequest( - name="name_value", - phase_id="phase_id_value", + request = cloud_deploy.UpdateDeployPolicyRequest( + request_id="request_id_value", ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.advance_rollout), "__call__") as call: + with mock.patch.object( + type(client.transport.update_deploy_policy), "__call__" + ) as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.advance_rollout(request=request) + client.update_deploy_policy(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == cloud_deploy.AdvanceRolloutRequest( - name="name_value", - phase_id="phase_id_value", + assert args[0] == cloud_deploy.UpdateDeployPolicyRequest( + request_id="request_id_value", ) -def test_advance_rollout_use_cached_wrapped_rpc(): +def test_update_deploy_policy_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -10157,21 +10261,30 @@ def test_advance_rollout_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.advance_rollout in client._transport._wrapped_methods + assert ( + client._transport.update_deploy_policy in client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.advance_rollout] = mock_rpc + client._transport._wrapped_methods[ + client._transport.update_deploy_policy + ] = mock_rpc request = {} - client.advance_rollout(request) + client.update_deploy_policy(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.advance_rollout(request) + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.update_deploy_policy(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -10179,7 +10292,7 @@ def test_advance_rollout_use_cached_wrapped_rpc(): @pytest.mark.asyncio -async def test_advance_rollout_empty_call_async(): +async def test_update_deploy_policy_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = CloudDeployAsyncClient( @@ -10188,19 +10301,21 @@ async def test_advance_rollout_empty_call_async(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.advance_rollout), "__call__") as call: + with mock.patch.object( + type(client.transport.update_deploy_policy), "__call__" + ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - cloud_deploy.AdvanceRolloutResponse() + operations_pb2.Operation(name="operations/spam") ) - response = await client.advance_rollout() + response = await client.update_deploy_policy() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == cloud_deploy.AdvanceRolloutRequest() + assert args[0] == cloud_deploy.UpdateDeployPolicyRequest() @pytest.mark.asyncio -async def test_advance_rollout_async_use_cached_wrapped_rpc( +async def test_update_deploy_policy_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", ): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -10217,32 +10332,38 @@ async def test_advance_rollout_async_use_cached_wrapped_rpc( # Ensure method has been cached assert ( - client._client._transport.advance_rollout + client._client._transport.update_deploy_policy in client._client._transport._wrapped_methods ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ - client._client._transport.advance_rollout - ] = mock_object + client._client._transport.update_deploy_policy + ] = mock_rpc request = {} - await client.advance_rollout(request) + await client.update_deploy_policy(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - await client.advance_rollout(request) + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.update_deploy_policy(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio -async def test_advance_rollout_async( - transport: str = "grpc_asyncio", request_type=cloud_deploy.AdvanceRolloutRequest +async def test_update_deploy_policy_async( + transport: str = "grpc_asyncio", request_type=cloud_deploy.UpdateDeployPolicyRequest ): client = CloudDeployAsyncClient( credentials=ga_credentials.AnonymousCredentials(), @@ -10254,43 +10375,47 @@ async def test_advance_rollout_async( request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.advance_rollout), "__call__") as call: + with mock.patch.object( + type(client.transport.update_deploy_policy), "__call__" + ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - cloud_deploy.AdvanceRolloutResponse() + operations_pb2.Operation(name="operations/spam") ) - response = await client.advance_rollout(request) + response = await client.update_deploy_policy(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request = cloud_deploy.AdvanceRolloutRequest() + request = cloud_deploy.UpdateDeployPolicyRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, cloud_deploy.AdvanceRolloutResponse) + assert isinstance(response, future.Future) @pytest.mark.asyncio -async def test_advance_rollout_async_from_dict(): - await test_advance_rollout_async(request_type=dict) +async def test_update_deploy_policy_async_from_dict(): + await test_update_deploy_policy_async(request_type=dict) -def test_advance_rollout_field_headers(): +def test_update_deploy_policy_field_headers(): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = cloud_deploy.AdvanceRolloutRequest() + request = cloud_deploy.UpdateDeployPolicyRequest() - request.name = "name_value" + request.deploy_policy.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.advance_rollout), "__call__") as call: - call.return_value = cloud_deploy.AdvanceRolloutResponse() - client.advance_rollout(request) + with mock.patch.object( + type(client.transport.update_deploy_policy), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.update_deploy_policy(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -10301,28 +10426,30 @@ def test_advance_rollout_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "name=name_value", + "deploy_policy.name=name_value", ) in kw["metadata"] @pytest.mark.asyncio -async def test_advance_rollout_field_headers_async(): +async def test_update_deploy_policy_field_headers_async(): client = CloudDeployAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = cloud_deploy.AdvanceRolloutRequest() + request = cloud_deploy.UpdateDeployPolicyRequest() - request.name = "name_value" + request.deploy_policy.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.advance_rollout), "__call__") as call: + with mock.patch.object( + type(client.transport.update_deploy_policy), "__call__" + ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - cloud_deploy.AdvanceRolloutResponse() + operations_pb2.Operation(name="operations/op") ) - await client.advance_rollout(request) + await client.update_deploy_policy(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -10333,39 +10460,41 @@ async def test_advance_rollout_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "name=name_value", + "deploy_policy.name=name_value", ) in kw["metadata"] -def test_advance_rollout_flattened(): +def test_update_deploy_policy_flattened(): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.advance_rollout), "__call__") as call: + with mock.patch.object( + type(client.transport.update_deploy_policy), "__call__" + ) as call: # Designate an appropriate return value for the call. - call.return_value = cloud_deploy.AdvanceRolloutResponse() + call.return_value = operations_pb2.Operation(name="operations/op") # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.advance_rollout( - name="name_value", - phase_id="phase_id_value", + client.update_deploy_policy( + deploy_policy=cloud_deploy.DeployPolicy(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" + arg = args[0].deploy_policy + mock_val = cloud_deploy.DeployPolicy(name="name_value") assert arg == mock_val - arg = args[0].phase_id - mock_val = "phase_id_value" + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) assert arg == mock_val -def test_advance_rollout_flattened_error(): +def test_update_deploy_policy_flattened_error(): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -10373,48 +10502,50 @@ def test_advance_rollout_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.advance_rollout( - cloud_deploy.AdvanceRolloutRequest(), - name="name_value", - phase_id="phase_id_value", + client.update_deploy_policy( + cloud_deploy.UpdateDeployPolicyRequest(), + deploy_policy=cloud_deploy.DeployPolicy(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) @pytest.mark.asyncio -async def test_advance_rollout_flattened_async(): +async def test_update_deploy_policy_flattened_async(): client = CloudDeployAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.advance_rollout), "__call__") as call: + with mock.patch.object( + type(client.transport.update_deploy_policy), "__call__" + ) as call: # Designate an appropriate return value for the call. - call.return_value = cloud_deploy.AdvanceRolloutResponse() + call.return_value = operations_pb2.Operation(name="operations/op") call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - cloud_deploy.AdvanceRolloutResponse() + operations_pb2.Operation(name="operations/spam") ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.advance_rollout( - name="name_value", - phase_id="phase_id_value", + response = await client.update_deploy_policy( + deploy_policy=cloud_deploy.DeployPolicy(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" + arg = args[0].deploy_policy + mock_val = cloud_deploy.DeployPolicy(name="name_value") assert arg == mock_val - arg = args[0].phase_id - mock_val = "phase_id_value" + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) assert arg == mock_val @pytest.mark.asyncio -async def test_advance_rollout_flattened_error_async(): +async def test_update_deploy_policy_flattened_error_async(): client = CloudDeployAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -10422,21 +10553,21 @@ async def test_advance_rollout_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.advance_rollout( - cloud_deploy.AdvanceRolloutRequest(), - name="name_value", - phase_id="phase_id_value", + await client.update_deploy_policy( + cloud_deploy.UpdateDeployPolicyRequest(), + deploy_policy=cloud_deploy.DeployPolicy(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) @pytest.mark.parametrize( "request_type", [ - cloud_deploy.CancelRolloutRequest, + cloud_deploy.DeleteDeployPolicyRequest, dict, ], ) -def test_cancel_rollout(request_type, transport: str = "grpc"): +def test_delete_deploy_policy(request_type, transport: str = "grpc"): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -10447,22 +10578,24 @@ def test_cancel_rollout(request_type, transport: str = "grpc"): request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.cancel_rollout), "__call__") as call: + with mock.patch.object( + type(client.transport.delete_deploy_policy), "__call__" + ) as call: # Designate an appropriate return value for the call. - call.return_value = cloud_deploy.CancelRolloutResponse() - response = client.cancel_rollout(request) + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.delete_deploy_policy(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - request = cloud_deploy.CancelRolloutRequest() + request = cloud_deploy.DeleteDeployPolicyRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, cloud_deploy.CancelRolloutResponse) + assert isinstance(response, future.Future) -def test_cancel_rollout_empty_call(): +def test_delete_deploy_policy_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = CloudDeployClient( @@ -10471,17 +10604,19 @@ def test_cancel_rollout_empty_call(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.cancel_rollout), "__call__") as call: + with mock.patch.object( + type(client.transport.delete_deploy_policy), "__call__" + ) as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.cancel_rollout() + client.delete_deploy_policy() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == cloud_deploy.CancelRolloutRequest() + assert args[0] == cloud_deploy.DeleteDeployPolicyRequest() -def test_cancel_rollout_non_empty_request_with_auto_populated_field(): +def test_delete_deploy_policy_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. client = CloudDeployClient( @@ -10492,24 +10627,30 @@ def test_cancel_rollout_non_empty_request_with_auto_populated_field(): # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. - request = cloud_deploy.CancelRolloutRequest( + request = cloud_deploy.DeleteDeployPolicyRequest( name="name_value", + request_id="request_id_value", + etag="etag_value", ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.cancel_rollout), "__call__") as call: + with mock.patch.object( + type(client.transport.delete_deploy_policy), "__call__" + ) as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.cancel_rollout(request=request) + client.delete_deploy_policy(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == cloud_deploy.CancelRolloutRequest( + assert args[0] == cloud_deploy.DeleteDeployPolicyRequest( name="name_value", + request_id="request_id_value", + etag="etag_value", ) -def test_cancel_rollout_use_cached_wrapped_rpc(): +def test_delete_deploy_policy_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -10523,21 +10664,30 @@ def test_cancel_rollout_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.cancel_rollout in client._transport._wrapped_methods + assert ( + client._transport.delete_deploy_policy in client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.cancel_rollout] = mock_rpc + client._transport._wrapped_methods[ + client._transport.delete_deploy_policy + ] = mock_rpc request = {} - client.cancel_rollout(request) + client.delete_deploy_policy(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.cancel_rollout(request) + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.delete_deploy_policy(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -10545,7 +10695,7 @@ def test_cancel_rollout_use_cached_wrapped_rpc(): @pytest.mark.asyncio -async def test_cancel_rollout_empty_call_async(): +async def test_delete_deploy_policy_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = CloudDeployAsyncClient( @@ -10554,19 +10704,21 @@ async def test_cancel_rollout_empty_call_async(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.cancel_rollout), "__call__") as call: + with mock.patch.object( + type(client.transport.delete_deploy_policy), "__call__" + ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - cloud_deploy.CancelRolloutResponse() + operations_pb2.Operation(name="operations/spam") ) - response = await client.cancel_rollout() + response = await client.delete_deploy_policy() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == cloud_deploy.CancelRolloutRequest() + assert args[0] == cloud_deploy.DeleteDeployPolicyRequest() @pytest.mark.asyncio -async def test_cancel_rollout_async_use_cached_wrapped_rpc( +async def test_delete_deploy_policy_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", ): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -10583,32 +10735,38 @@ async def test_cancel_rollout_async_use_cached_wrapped_rpc( # Ensure method has been cached assert ( - client._client._transport.cancel_rollout + client._client._transport.delete_deploy_policy in client._client._transport._wrapped_methods ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ - client._client._transport.cancel_rollout - ] = mock_object + client._client._transport.delete_deploy_policy + ] = mock_rpc request = {} - await client.cancel_rollout(request) + await client.delete_deploy_policy(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - await client.cancel_rollout(request) + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.delete_deploy_policy(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio -async def test_cancel_rollout_async( - transport: str = "grpc_asyncio", request_type=cloud_deploy.CancelRolloutRequest +async def test_delete_deploy_policy_async( + transport: str = "grpc_asyncio", request_type=cloud_deploy.DeleteDeployPolicyRequest ): client = CloudDeployAsyncClient( credentials=ga_credentials.AnonymousCredentials(), @@ -10620,43 +10778,47 @@ async def test_cancel_rollout_async( request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.cancel_rollout), "__call__") as call: + with mock.patch.object( + type(client.transport.delete_deploy_policy), "__call__" + ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - cloud_deploy.CancelRolloutResponse() + operations_pb2.Operation(name="operations/spam") ) - response = await client.cancel_rollout(request) + response = await client.delete_deploy_policy(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request = cloud_deploy.CancelRolloutRequest() + request = cloud_deploy.DeleteDeployPolicyRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, cloud_deploy.CancelRolloutResponse) + assert isinstance(response, future.Future) @pytest.mark.asyncio -async def test_cancel_rollout_async_from_dict(): - await test_cancel_rollout_async(request_type=dict) +async def test_delete_deploy_policy_async_from_dict(): + await test_delete_deploy_policy_async(request_type=dict) -def test_cancel_rollout_field_headers(): +def test_delete_deploy_policy_field_headers(): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = cloud_deploy.CancelRolloutRequest() + request = cloud_deploy.DeleteDeployPolicyRequest() request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.cancel_rollout), "__call__") as call: - call.return_value = cloud_deploy.CancelRolloutResponse() - client.cancel_rollout(request) + with mock.patch.object( + type(client.transport.delete_deploy_policy), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.delete_deploy_policy(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -10672,23 +10834,25 @@ def test_cancel_rollout_field_headers(): @pytest.mark.asyncio -async def test_cancel_rollout_field_headers_async(): +async def test_delete_deploy_policy_field_headers_async(): client = CloudDeployAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = cloud_deploy.CancelRolloutRequest() + request = cloud_deploy.DeleteDeployPolicyRequest() request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.cancel_rollout), "__call__") as call: + with mock.patch.object( + type(client.transport.delete_deploy_policy), "__call__" + ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - cloud_deploy.CancelRolloutResponse() + operations_pb2.Operation(name="operations/op") ) - await client.cancel_rollout(request) + await client.delete_deploy_policy(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -10703,18 +10867,20 @@ async def test_cancel_rollout_field_headers_async(): ) in kw["metadata"] -def test_cancel_rollout_flattened(): +def test_delete_deploy_policy_flattened(): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.cancel_rollout), "__call__") as call: + with mock.patch.object( + type(client.transport.delete_deploy_policy), "__call__" + ) as call: # Designate an appropriate return value for the call. - call.return_value = cloud_deploy.CancelRolloutResponse() + call.return_value = operations_pb2.Operation(name="operations/op") # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.cancel_rollout( + client.delete_deploy_policy( name="name_value", ) @@ -10727,7 +10893,7 @@ def test_cancel_rollout_flattened(): assert arg == mock_val -def test_cancel_rollout_flattened_error(): +def test_delete_deploy_policy_flattened_error(): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -10735,29 +10901,31 @@ def test_cancel_rollout_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.cancel_rollout( - cloud_deploy.CancelRolloutRequest(), + client.delete_deploy_policy( + cloud_deploy.DeleteDeployPolicyRequest(), name="name_value", ) @pytest.mark.asyncio -async def test_cancel_rollout_flattened_async(): +async def test_delete_deploy_policy_flattened_async(): client = CloudDeployAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.cancel_rollout), "__call__") as call: + with mock.patch.object( + type(client.transport.delete_deploy_policy), "__call__" + ) as call: # Designate an appropriate return value for the call. - call.return_value = cloud_deploy.CancelRolloutResponse() + call.return_value = operations_pb2.Operation(name="operations/op") call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - cloud_deploy.CancelRolloutResponse() + operations_pb2.Operation(name="operations/spam") ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.cancel_rollout( + response = await client.delete_deploy_policy( name="name_value", ) @@ -10771,7 +10939,7 @@ async def test_cancel_rollout_flattened_async(): @pytest.mark.asyncio -async def test_cancel_rollout_flattened_error_async(): +async def test_delete_deploy_policy_flattened_error_async(): client = CloudDeployAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -10779,8 +10947,8 @@ async def test_cancel_rollout_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.cancel_rollout( - cloud_deploy.CancelRolloutRequest(), + await client.delete_deploy_policy( + cloud_deploy.DeleteDeployPolicyRequest(), name="name_value", ) @@ -10788,11 +10956,11 @@ async def test_cancel_rollout_flattened_error_async(): @pytest.mark.parametrize( "request_type", [ - cloud_deploy.ListRolloutsRequest, + cloud_deploy.ListDeployPoliciesRequest, dict, ], ) -def test_list_rollouts(request_type, transport: str = "grpc"): +def test_list_deploy_policies(request_type, transport: str = "grpc"): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -10803,27 +10971,29 @@ def test_list_rollouts(request_type, transport: str = "grpc"): request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_rollouts), "__call__") as call: + with mock.patch.object( + type(client.transport.list_deploy_policies), "__call__" + ) as call: # Designate an appropriate return value for the call. - call.return_value = cloud_deploy.ListRolloutsResponse( + call.return_value = cloud_deploy.ListDeployPoliciesResponse( next_page_token="next_page_token_value", unreachable=["unreachable_value"], ) - response = client.list_rollouts(request) + response = client.list_deploy_policies(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - request = cloud_deploy.ListRolloutsRequest() + request = cloud_deploy.ListDeployPoliciesRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListRolloutsPager) + assert isinstance(response, pagers.ListDeployPoliciesPager) assert response.next_page_token == "next_page_token_value" assert response.unreachable == ["unreachable_value"] -def test_list_rollouts_empty_call(): +def test_list_deploy_policies_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = CloudDeployClient( @@ -10832,17 +11002,19 @@ def test_list_rollouts_empty_call(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_rollouts), "__call__") as call: + with mock.patch.object( + type(client.transport.list_deploy_policies), "__call__" + ) as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.list_rollouts() + client.list_deploy_policies() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == cloud_deploy.ListRolloutsRequest() + assert args[0] == cloud_deploy.ListDeployPoliciesRequest() -def test_list_rollouts_non_empty_request_with_auto_populated_field(): +def test_list_deploy_policies_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. client = CloudDeployClient( @@ -10853,7 +11025,7 @@ def test_list_rollouts_non_empty_request_with_auto_populated_field(): # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. - request = cloud_deploy.ListRolloutsRequest( + request = cloud_deploy.ListDeployPoliciesRequest( parent="parent_value", page_token="page_token_value", filter="filter_value", @@ -10861,14 +11033,16 @@ def test_list_rollouts_non_empty_request_with_auto_populated_field(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_rollouts), "__call__") as call: + with mock.patch.object( + type(client.transport.list_deploy_policies), "__call__" + ) as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.list_rollouts(request=request) + client.list_deploy_policies(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == cloud_deploy.ListRolloutsRequest( + assert args[0] == cloud_deploy.ListDeployPoliciesRequest( parent="parent_value", page_token="page_token_value", filter="filter_value", @@ -10876,7 +11050,7 @@ def test_list_rollouts_non_empty_request_with_auto_populated_field(): ) -def test_list_rollouts_use_cached_wrapped_rpc(): +def test_list_deploy_policies_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -10890,21 +11064,25 @@ def test_list_rollouts_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.list_rollouts in client._transport._wrapped_methods + assert ( + client._transport.list_deploy_policies in client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.list_rollouts] = mock_rpc - request = {} - client.list_rollouts(request) + client._transport._wrapped_methods[ + client._transport.list_deploy_policies + ] = mock_rpc + request = {} + client.list_deploy_policies(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.list_rollouts(request) + client.list_deploy_policies(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -10912,7 +11090,7 @@ def test_list_rollouts_use_cached_wrapped_rpc(): @pytest.mark.asyncio -async def test_list_rollouts_empty_call_async(): +async def test_list_deploy_policies_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = CloudDeployAsyncClient( @@ -10921,22 +11099,24 @@ async def test_list_rollouts_empty_call_async(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_rollouts), "__call__") as call: + with mock.patch.object( + type(client.transport.list_deploy_policies), "__call__" + ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - cloud_deploy.ListRolloutsResponse( + cloud_deploy.ListDeployPoliciesResponse( next_page_token="next_page_token_value", unreachable=["unreachable_value"], ) ) - response = await client.list_rollouts() + response = await client.list_deploy_policies() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == cloud_deploy.ListRolloutsRequest() + assert args[0] == cloud_deploy.ListDeployPoliciesRequest() @pytest.mark.asyncio -async def test_list_rollouts_async_use_cached_wrapped_rpc( +async def test_list_deploy_policies_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", ): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -10953,32 +11133,33 @@ async def test_list_rollouts_async_use_cached_wrapped_rpc( # Ensure method has been cached assert ( - client._client._transport.list_rollouts + client._client._transport.list_deploy_policies in client._client._transport._wrapped_methods ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ - client._client._transport.list_rollouts - ] = mock_object + client._client._transport.list_deploy_policies + ] = mock_rpc request = {} - await client.list_rollouts(request) + await client.list_deploy_policies(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - await client.list_rollouts(request) + await client.list_deploy_policies(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio -async def test_list_rollouts_async( - transport: str = "grpc_asyncio", request_type=cloud_deploy.ListRolloutsRequest +async def test_list_deploy_policies_async( + transport: str = "grpc_asyncio", request_type=cloud_deploy.ListDeployPoliciesRequest ): client = CloudDeployAsyncClient( credentials=ga_credentials.AnonymousCredentials(), @@ -10990,48 +11171,52 @@ async def test_list_rollouts_async( request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_rollouts), "__call__") as call: + with mock.patch.object( + type(client.transport.list_deploy_policies), "__call__" + ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - cloud_deploy.ListRolloutsResponse( + cloud_deploy.ListDeployPoliciesResponse( next_page_token="next_page_token_value", unreachable=["unreachable_value"], ) ) - response = await client.list_rollouts(request) + response = await client.list_deploy_policies(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request = cloud_deploy.ListRolloutsRequest() + request = cloud_deploy.ListDeployPoliciesRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListRolloutsAsyncPager) + assert isinstance(response, pagers.ListDeployPoliciesAsyncPager) assert response.next_page_token == "next_page_token_value" assert response.unreachable == ["unreachable_value"] @pytest.mark.asyncio -async def test_list_rollouts_async_from_dict(): - await test_list_rollouts_async(request_type=dict) +async def test_list_deploy_policies_async_from_dict(): + await test_list_deploy_policies_async(request_type=dict) -def test_list_rollouts_field_headers(): +def test_list_deploy_policies_field_headers(): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = cloud_deploy.ListRolloutsRequest() + request = cloud_deploy.ListDeployPoliciesRequest() request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_rollouts), "__call__") as call: - call.return_value = cloud_deploy.ListRolloutsResponse() - client.list_rollouts(request) + with mock.patch.object( + type(client.transport.list_deploy_policies), "__call__" + ) as call: + call.return_value = cloud_deploy.ListDeployPoliciesResponse() + client.list_deploy_policies(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -11047,23 +11232,25 @@ def test_list_rollouts_field_headers(): @pytest.mark.asyncio -async def test_list_rollouts_field_headers_async(): +async def test_list_deploy_policies_field_headers_async(): client = CloudDeployAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = cloud_deploy.ListRolloutsRequest() + request = cloud_deploy.ListDeployPoliciesRequest() request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_rollouts), "__call__") as call: + with mock.patch.object( + type(client.transport.list_deploy_policies), "__call__" + ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - cloud_deploy.ListRolloutsResponse() + cloud_deploy.ListDeployPoliciesResponse() ) - await client.list_rollouts(request) + await client.list_deploy_policies(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -11078,18 +11265,20 @@ async def test_list_rollouts_field_headers_async(): ) in kw["metadata"] -def test_list_rollouts_flattened(): +def test_list_deploy_policies_flattened(): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_rollouts), "__call__") as call: + with mock.patch.object( + type(client.transport.list_deploy_policies), "__call__" + ) as call: # Designate an appropriate return value for the call. - call.return_value = cloud_deploy.ListRolloutsResponse() + call.return_value = cloud_deploy.ListDeployPoliciesResponse() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.list_rollouts( + client.list_deploy_policies( parent="parent_value", ) @@ -11102,7 +11291,7 @@ def test_list_rollouts_flattened(): assert arg == mock_val -def test_list_rollouts_flattened_error(): +def test_list_deploy_policies_flattened_error(): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -11110,29 +11299,31 @@ def test_list_rollouts_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.list_rollouts( - cloud_deploy.ListRolloutsRequest(), + client.list_deploy_policies( + cloud_deploy.ListDeployPoliciesRequest(), parent="parent_value", ) @pytest.mark.asyncio -async def test_list_rollouts_flattened_async(): +async def test_list_deploy_policies_flattened_async(): client = CloudDeployAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_rollouts), "__call__") as call: + with mock.patch.object( + type(client.transport.list_deploy_policies), "__call__" + ) as call: # Designate an appropriate return value for the call. - call.return_value = cloud_deploy.ListRolloutsResponse() + call.return_value = cloud_deploy.ListDeployPoliciesResponse() call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - cloud_deploy.ListRolloutsResponse() + cloud_deploy.ListDeployPoliciesResponse() ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.list_rollouts( + response = await client.list_deploy_policies( parent="parent_value", ) @@ -11146,7 +11337,7 @@ async def test_list_rollouts_flattened_async(): @pytest.mark.asyncio -async def test_list_rollouts_flattened_error_async(): +async def test_list_deploy_policies_flattened_error_async(): client = CloudDeployAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -11154,44 +11345,46 @@ async def test_list_rollouts_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.list_rollouts( - cloud_deploy.ListRolloutsRequest(), + await client.list_deploy_policies( + cloud_deploy.ListDeployPoliciesRequest(), parent="parent_value", ) -def test_list_rollouts_pager(transport_name: str = "grpc"): +def test_list_deploy_policies_pager(transport_name: str = "grpc"): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport_name, ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_rollouts), "__call__") as call: + with mock.patch.object( + type(client.transport.list_deploy_policies), "__call__" + ) as call: # Set the response to a series of pages. call.side_effect = ( - cloud_deploy.ListRolloutsResponse( - rollouts=[ - cloud_deploy.Rollout(), - cloud_deploy.Rollout(), - cloud_deploy.Rollout(), + cloud_deploy.ListDeployPoliciesResponse( + deploy_policies=[ + cloud_deploy.DeployPolicy(), + cloud_deploy.DeployPolicy(), + cloud_deploy.DeployPolicy(), ], next_page_token="abc", ), - cloud_deploy.ListRolloutsResponse( - rollouts=[], + cloud_deploy.ListDeployPoliciesResponse( + deploy_policies=[], next_page_token="def", ), - cloud_deploy.ListRolloutsResponse( - rollouts=[ - cloud_deploy.Rollout(), + cloud_deploy.ListDeployPoliciesResponse( + deploy_policies=[ + cloud_deploy.DeployPolicy(), ], next_page_token="ghi", ), - cloud_deploy.ListRolloutsResponse( - rollouts=[ - cloud_deploy.Rollout(), - cloud_deploy.Rollout(), + cloud_deploy.ListDeployPoliciesResponse( + deploy_policies=[ + cloud_deploy.DeployPolicy(), + cloud_deploy.DeployPolicy(), ], ), RuntimeError, @@ -11203,7 +11396,7 @@ def test_list_rollouts_pager(transport_name: str = "grpc"): expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) - pager = client.list_rollouts(request={}, retry=retry, timeout=timeout) + pager = client.list_deploy_policies(request={}, retry=retry, timeout=timeout) assert pager._metadata == expected_metadata assert pager._retry == retry @@ -11211,89 +11404,93 @@ def test_list_rollouts_pager(transport_name: str = "grpc"): results = list(pager) assert len(results) == 6 - assert all(isinstance(i, cloud_deploy.Rollout) for i in results) + assert all(isinstance(i, cloud_deploy.DeployPolicy) for i in results) -def test_list_rollouts_pages(transport_name: str = "grpc"): +def test_list_deploy_policies_pages(transport_name: str = "grpc"): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport_name, ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_rollouts), "__call__") as call: + with mock.patch.object( + type(client.transport.list_deploy_policies), "__call__" + ) as call: # Set the response to a series of pages. call.side_effect = ( - cloud_deploy.ListRolloutsResponse( - rollouts=[ - cloud_deploy.Rollout(), - cloud_deploy.Rollout(), - cloud_deploy.Rollout(), + cloud_deploy.ListDeployPoliciesResponse( + deploy_policies=[ + cloud_deploy.DeployPolicy(), + cloud_deploy.DeployPolicy(), + cloud_deploy.DeployPolicy(), ], next_page_token="abc", ), - cloud_deploy.ListRolloutsResponse( - rollouts=[], + cloud_deploy.ListDeployPoliciesResponse( + deploy_policies=[], next_page_token="def", ), - cloud_deploy.ListRolloutsResponse( - rollouts=[ - cloud_deploy.Rollout(), + cloud_deploy.ListDeployPoliciesResponse( + deploy_policies=[ + cloud_deploy.DeployPolicy(), ], next_page_token="ghi", ), - cloud_deploy.ListRolloutsResponse( - rollouts=[ - cloud_deploy.Rollout(), - cloud_deploy.Rollout(), + cloud_deploy.ListDeployPoliciesResponse( + deploy_policies=[ + cloud_deploy.DeployPolicy(), + cloud_deploy.DeployPolicy(), ], ), RuntimeError, ) - pages = list(client.list_rollouts(request={}).pages) + pages = list(client.list_deploy_policies(request={}).pages) for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token @pytest.mark.asyncio -async def test_list_rollouts_async_pager(): +async def test_list_deploy_policies_async_pager(): client = CloudDeployAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_rollouts), "__call__", new_callable=mock.AsyncMock + type(client.transport.list_deploy_policies), + "__call__", + new_callable=mock.AsyncMock, ) as call: # Set the response to a series of pages. call.side_effect = ( - cloud_deploy.ListRolloutsResponse( - rollouts=[ - cloud_deploy.Rollout(), - cloud_deploy.Rollout(), - cloud_deploy.Rollout(), + cloud_deploy.ListDeployPoliciesResponse( + deploy_policies=[ + cloud_deploy.DeployPolicy(), + cloud_deploy.DeployPolicy(), + cloud_deploy.DeployPolicy(), ], next_page_token="abc", ), - cloud_deploy.ListRolloutsResponse( - rollouts=[], + cloud_deploy.ListDeployPoliciesResponse( + deploy_policies=[], next_page_token="def", ), - cloud_deploy.ListRolloutsResponse( - rollouts=[ - cloud_deploy.Rollout(), + cloud_deploy.ListDeployPoliciesResponse( + deploy_policies=[ + cloud_deploy.DeployPolicy(), ], next_page_token="ghi", ), - cloud_deploy.ListRolloutsResponse( - rollouts=[ - cloud_deploy.Rollout(), - cloud_deploy.Rollout(), + cloud_deploy.ListDeployPoliciesResponse( + deploy_policies=[ + cloud_deploy.DeployPolicy(), + cloud_deploy.DeployPolicy(), ], ), RuntimeError, ) - async_pager = await client.list_rollouts( + async_pager = await client.list_deploy_policies( request={}, ) assert async_pager.next_page_token == "abc" @@ -11302,43 +11499,45 @@ async def test_list_rollouts_async_pager(): responses.append(response) assert len(responses) == 6 - assert all(isinstance(i, cloud_deploy.Rollout) for i in responses) + assert all(isinstance(i, cloud_deploy.DeployPolicy) for i in responses) @pytest.mark.asyncio -async def test_list_rollouts_async_pages(): +async def test_list_deploy_policies_async_pages(): client = CloudDeployAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_rollouts), "__call__", new_callable=mock.AsyncMock + type(client.transport.list_deploy_policies), + "__call__", + new_callable=mock.AsyncMock, ) as call: # Set the response to a series of pages. call.side_effect = ( - cloud_deploy.ListRolloutsResponse( - rollouts=[ - cloud_deploy.Rollout(), - cloud_deploy.Rollout(), - cloud_deploy.Rollout(), + cloud_deploy.ListDeployPoliciesResponse( + deploy_policies=[ + cloud_deploy.DeployPolicy(), + cloud_deploy.DeployPolicy(), + cloud_deploy.DeployPolicy(), ], next_page_token="abc", ), - cloud_deploy.ListRolloutsResponse( - rollouts=[], + cloud_deploy.ListDeployPoliciesResponse( + deploy_policies=[], next_page_token="def", ), - cloud_deploy.ListRolloutsResponse( - rollouts=[ - cloud_deploy.Rollout(), + cloud_deploy.ListDeployPoliciesResponse( + deploy_policies=[ + cloud_deploy.DeployPolicy(), ], next_page_token="ghi", ), - cloud_deploy.ListRolloutsResponse( - rollouts=[ - cloud_deploy.Rollout(), - cloud_deploy.Rollout(), + cloud_deploy.ListDeployPoliciesResponse( + deploy_policies=[ + cloud_deploy.DeployPolicy(), + cloud_deploy.DeployPolicy(), ], ), RuntimeError, @@ -11347,7 +11546,7 @@ async def test_list_rollouts_async_pages(): # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 async for page_ in ( # pragma: no branch - await client.list_rollouts(request={}) + await client.list_deploy_policies(request={}) ).pages: pages.append(page_) for page_, token in zip(pages, ["abc", "def", "ghi", ""]): @@ -11357,11 +11556,11 @@ async def test_list_rollouts_async_pages(): @pytest.mark.parametrize( "request_type", [ - cloud_deploy.GetRolloutRequest, + cloud_deploy.GetDeployPolicyRequest, dict, ], ) -def test_get_rollout(request_type, transport: str = "grpc"): +def test_get_deploy_policy(request_type, transport: str = "grpc"): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -11372,52 +11571,35 @@ def test_get_rollout(request_type, transport: str = "grpc"): request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_rollout), "__call__") as call: + with mock.patch.object( + type(client.transport.get_deploy_policy), "__call__" + ) as call: # Designate an appropriate return value for the call. - call.return_value = cloud_deploy.Rollout( + call.return_value = cloud_deploy.DeployPolicy( name="name_value", uid="uid_value", description="description_value", - target_id="target_id_value", - approval_state=cloud_deploy.Rollout.ApprovalState.NEEDS_APPROVAL, - state=cloud_deploy.Rollout.State.SUCCEEDED, - failure_reason="failure_reason_value", - deploying_build="deploying_build_value", + suspended=True, etag="etag_value", - deploy_failure_cause=cloud_deploy.Rollout.FailureCause.CLOUD_BUILD_UNAVAILABLE, - controller_rollout="controller_rollout_value", - rollback_of_rollout="rollback_of_rollout_value", - rolled_back_by_rollouts=["rolled_back_by_rollouts_value"], ) - response = client.get_rollout(request) + response = client.get_deploy_policy(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - request = cloud_deploy.GetRolloutRequest() + request = cloud_deploy.GetDeployPolicyRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, cloud_deploy.Rollout) + assert isinstance(response, cloud_deploy.DeployPolicy) assert response.name == "name_value" assert response.uid == "uid_value" assert response.description == "description_value" - assert response.target_id == "target_id_value" - assert response.approval_state == cloud_deploy.Rollout.ApprovalState.NEEDS_APPROVAL - assert response.state == cloud_deploy.Rollout.State.SUCCEEDED - assert response.failure_reason == "failure_reason_value" - assert response.deploying_build == "deploying_build_value" + assert response.suspended is True assert response.etag == "etag_value" - assert ( - response.deploy_failure_cause - == cloud_deploy.Rollout.FailureCause.CLOUD_BUILD_UNAVAILABLE - ) - assert response.controller_rollout == "controller_rollout_value" - assert response.rollback_of_rollout == "rollback_of_rollout_value" - assert response.rolled_back_by_rollouts == ["rolled_back_by_rollouts_value"] -def test_get_rollout_empty_call(): +def test_get_deploy_policy_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = CloudDeployClient( @@ -11426,17 +11608,19 @@ def test_get_rollout_empty_call(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_rollout), "__call__") as call: + with mock.patch.object( + type(client.transport.get_deploy_policy), "__call__" + ) as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.get_rollout() + client.get_deploy_policy() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == cloud_deploy.GetRolloutRequest() + assert args[0] == cloud_deploy.GetDeployPolicyRequest() -def test_get_rollout_non_empty_request_with_auto_populated_field(): +def test_get_deploy_policy_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. client = CloudDeployClient( @@ -11447,24 +11631,26 @@ def test_get_rollout_non_empty_request_with_auto_populated_field(): # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. - request = cloud_deploy.GetRolloutRequest( + request = cloud_deploy.GetDeployPolicyRequest( name="name_value", ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_rollout), "__call__") as call: + with mock.patch.object( + type(client.transport.get_deploy_policy), "__call__" + ) as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.get_rollout(request=request) + client.get_deploy_policy(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == cloud_deploy.GetRolloutRequest( + assert args[0] == cloud_deploy.GetDeployPolicyRequest( name="name_value", ) -def test_get_rollout_use_cached_wrapped_rpc(): +def test_get_deploy_policy_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -11478,21 +11664,23 @@ def test_get_rollout_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.get_rollout in client._transport._wrapped_methods + assert client._transport.get_deploy_policy in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.get_rollout] = mock_rpc + client._transport._wrapped_methods[ + client._transport.get_deploy_policy + ] = mock_rpc request = {} - client.get_rollout(request) + client.get_deploy_policy(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.get_rollout(request) + client.get_deploy_policy(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -11500,7 +11688,7 @@ def test_get_rollout_use_cached_wrapped_rpc(): @pytest.mark.asyncio -async def test_get_rollout_empty_call_async(): +async def test_get_deploy_policy_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = CloudDeployAsyncClient( @@ -11509,33 +11697,27 @@ async def test_get_rollout_empty_call_async(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_rollout), "__call__") as call: + with mock.patch.object( + type(client.transport.get_deploy_policy), "__call__" + ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - cloud_deploy.Rollout( + cloud_deploy.DeployPolicy( name="name_value", uid="uid_value", description="description_value", - target_id="target_id_value", - approval_state=cloud_deploy.Rollout.ApprovalState.NEEDS_APPROVAL, - state=cloud_deploy.Rollout.State.SUCCEEDED, - failure_reason="failure_reason_value", - deploying_build="deploying_build_value", + suspended=True, etag="etag_value", - deploy_failure_cause=cloud_deploy.Rollout.FailureCause.CLOUD_BUILD_UNAVAILABLE, - controller_rollout="controller_rollout_value", - rollback_of_rollout="rollback_of_rollout_value", - rolled_back_by_rollouts=["rolled_back_by_rollouts_value"], ) ) - response = await client.get_rollout() + response = await client.get_deploy_policy() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == cloud_deploy.GetRolloutRequest() + assert args[0] == cloud_deploy.GetDeployPolicyRequest() @pytest.mark.asyncio -async def test_get_rollout_async_use_cached_wrapped_rpc( +async def test_get_deploy_policy_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", ): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -11552,32 +11734,33 @@ async def test_get_rollout_async_use_cached_wrapped_rpc( # Ensure method has been cached assert ( - client._client._transport.get_rollout + client._client._transport.get_deploy_policy in client._client._transport._wrapped_methods ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ - client._client._transport.get_rollout - ] = mock_object + client._client._transport.get_deploy_policy + ] = mock_rpc request = {} - await client.get_rollout(request) + await client.get_deploy_policy(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - await client.get_rollout(request) + await client.get_deploy_policy(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio -async def test_get_rollout_async( - transport: str = "grpc_asyncio", request_type=cloud_deploy.GetRolloutRequest +async def test_get_deploy_policy_async( + transport: str = "grpc_asyncio", request_type=cloud_deploy.GetDeployPolicyRequest ): client = CloudDeployAsyncClient( credentials=ga_credentials.AnonymousCredentials(), @@ -11589,73 +11772,58 @@ async def test_get_rollout_async( request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_rollout), "__call__") as call: + with mock.patch.object( + type(client.transport.get_deploy_policy), "__call__" + ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - cloud_deploy.Rollout( + cloud_deploy.DeployPolicy( name="name_value", uid="uid_value", description="description_value", - target_id="target_id_value", - approval_state=cloud_deploy.Rollout.ApprovalState.NEEDS_APPROVAL, - state=cloud_deploy.Rollout.State.SUCCEEDED, - failure_reason="failure_reason_value", - deploying_build="deploying_build_value", + suspended=True, etag="etag_value", - deploy_failure_cause=cloud_deploy.Rollout.FailureCause.CLOUD_BUILD_UNAVAILABLE, - controller_rollout="controller_rollout_value", - rollback_of_rollout="rollback_of_rollout_value", - rolled_back_by_rollouts=["rolled_back_by_rollouts_value"], ) ) - response = await client.get_rollout(request) + response = await client.get_deploy_policy(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request = cloud_deploy.GetRolloutRequest() + request = cloud_deploy.GetDeployPolicyRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, cloud_deploy.Rollout) + assert isinstance(response, cloud_deploy.DeployPolicy) assert response.name == "name_value" assert response.uid == "uid_value" assert response.description == "description_value" - assert response.target_id == "target_id_value" - assert response.approval_state == cloud_deploy.Rollout.ApprovalState.NEEDS_APPROVAL - assert response.state == cloud_deploy.Rollout.State.SUCCEEDED - assert response.failure_reason == "failure_reason_value" - assert response.deploying_build == "deploying_build_value" + assert response.suspended is True assert response.etag == "etag_value" - assert ( - response.deploy_failure_cause - == cloud_deploy.Rollout.FailureCause.CLOUD_BUILD_UNAVAILABLE - ) - assert response.controller_rollout == "controller_rollout_value" - assert response.rollback_of_rollout == "rollback_of_rollout_value" - assert response.rolled_back_by_rollouts == ["rolled_back_by_rollouts_value"] @pytest.mark.asyncio -async def test_get_rollout_async_from_dict(): - await test_get_rollout_async(request_type=dict) +async def test_get_deploy_policy_async_from_dict(): + await test_get_deploy_policy_async(request_type=dict) -def test_get_rollout_field_headers(): +def test_get_deploy_policy_field_headers(): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = cloud_deploy.GetRolloutRequest() + request = cloud_deploy.GetDeployPolicyRequest() request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_rollout), "__call__") as call: - call.return_value = cloud_deploy.Rollout() - client.get_rollout(request) + with mock.patch.object( + type(client.transport.get_deploy_policy), "__call__" + ) as call: + call.return_value = cloud_deploy.DeployPolicy() + client.get_deploy_policy(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -11671,23 +11839,25 @@ def test_get_rollout_field_headers(): @pytest.mark.asyncio -async def test_get_rollout_field_headers_async(): +async def test_get_deploy_policy_field_headers_async(): client = CloudDeployAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = cloud_deploy.GetRolloutRequest() + request = cloud_deploy.GetDeployPolicyRequest() request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_rollout), "__call__") as call: + with mock.patch.object( + type(client.transport.get_deploy_policy), "__call__" + ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - cloud_deploy.Rollout() + cloud_deploy.DeployPolicy() ) - await client.get_rollout(request) + await client.get_deploy_policy(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -11702,18 +11872,20 @@ async def test_get_rollout_field_headers_async(): ) in kw["metadata"] -def test_get_rollout_flattened(): +def test_get_deploy_policy_flattened(): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_rollout), "__call__") as call: + with mock.patch.object( + type(client.transport.get_deploy_policy), "__call__" + ) as call: # Designate an appropriate return value for the call. - call.return_value = cloud_deploy.Rollout() + call.return_value = cloud_deploy.DeployPolicy() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.get_rollout( + client.get_deploy_policy( name="name_value", ) @@ -11726,7 +11898,7 @@ def test_get_rollout_flattened(): assert arg == mock_val -def test_get_rollout_flattened_error(): +def test_get_deploy_policy_flattened_error(): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -11734,29 +11906,31 @@ def test_get_rollout_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.get_rollout( - cloud_deploy.GetRolloutRequest(), + client.get_deploy_policy( + cloud_deploy.GetDeployPolicyRequest(), name="name_value", ) @pytest.mark.asyncio -async def test_get_rollout_flattened_async(): +async def test_get_deploy_policy_flattened_async(): client = CloudDeployAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_rollout), "__call__") as call: + with mock.patch.object( + type(client.transport.get_deploy_policy), "__call__" + ) as call: # Designate an appropriate return value for the call. - call.return_value = cloud_deploy.Rollout() + call.return_value = cloud_deploy.DeployPolicy() call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - cloud_deploy.Rollout() + cloud_deploy.DeployPolicy() ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.get_rollout( + response = await client.get_deploy_policy( name="name_value", ) @@ -11770,7 +11944,7 @@ async def test_get_rollout_flattened_async(): @pytest.mark.asyncio -async def test_get_rollout_flattened_error_async(): +async def test_get_deploy_policy_flattened_error_async(): client = CloudDeployAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -11778,8 +11952,8 @@ async def test_get_rollout_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.get_rollout( - cloud_deploy.GetRolloutRequest(), + await client.get_deploy_policy( + cloud_deploy.GetDeployPolicyRequest(), name="name_value", ) @@ -11787,11 +11961,11 @@ async def test_get_rollout_flattened_error_async(): @pytest.mark.parametrize( "request_type", [ - cloud_deploy.CreateRolloutRequest, + cloud_deploy.ApproveRolloutRequest, dict, ], ) -def test_create_rollout(request_type, transport: str = "grpc"): +def test_approve_rollout(request_type, transport: str = "grpc"): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -11802,22 +11976,22 @@ def test_create_rollout(request_type, transport: str = "grpc"): request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.create_rollout), "__call__") as call: + with mock.patch.object(type(client.transport.approve_rollout), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name="operations/spam") - response = client.create_rollout(request) + call.return_value = cloud_deploy.ApproveRolloutResponse() + response = client.approve_rollout(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - request = cloud_deploy.CreateRolloutRequest() + request = cloud_deploy.ApproveRolloutRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) + assert isinstance(response, cloud_deploy.ApproveRolloutResponse) -def test_create_rollout_empty_call(): +def test_approve_rollout_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = CloudDeployClient( @@ -11826,17 +12000,17 @@ def test_create_rollout_empty_call(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.create_rollout), "__call__") as call: + with mock.patch.object(type(client.transport.approve_rollout), "__call__") as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.create_rollout() + client.approve_rollout() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == cloud_deploy.CreateRolloutRequest() + assert args[0] == cloud_deploy.ApproveRolloutRequest() -def test_create_rollout_non_empty_request_with_auto_populated_field(): +def test_approve_rollout_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. client = CloudDeployClient( @@ -11847,30 +12021,24 @@ def test_create_rollout_non_empty_request_with_auto_populated_field(): # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. - request = cloud_deploy.CreateRolloutRequest( - parent="parent_value", - rollout_id="rollout_id_value", - request_id="request_id_value", - starting_phase_id="starting_phase_id_value", + request = cloud_deploy.ApproveRolloutRequest( + name="name_value", ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.create_rollout), "__call__") as call: + with mock.patch.object(type(client.transport.approve_rollout), "__call__") as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.create_rollout(request=request) + client.approve_rollout(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == cloud_deploy.CreateRolloutRequest( - parent="parent_value", - rollout_id="rollout_id_value", - request_id="request_id_value", - starting_phase_id="starting_phase_id_value", + assert args[0] == cloud_deploy.ApproveRolloutRequest( + name="name_value", ) -def test_create_rollout_use_cached_wrapped_rpc(): +def test_approve_rollout_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -11884,25 +12052,21 @@ def test_create_rollout_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.create_rollout in client._transport._wrapped_methods + assert client._transport.approve_rollout in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.create_rollout] = mock_rpc + client._transport._wrapped_methods[client._transport.approve_rollout] = mock_rpc request = {} - client.create_rollout(request) + client.approve_rollout(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.create_rollout(request) + client.approve_rollout(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -11910,7 +12074,7 @@ def test_create_rollout_use_cached_wrapped_rpc(): @pytest.mark.asyncio -async def test_create_rollout_empty_call_async(): +async def test_approve_rollout_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = CloudDeployAsyncClient( @@ -11919,19 +12083,19 @@ async def test_create_rollout_empty_call_async(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.create_rollout), "__call__") as call: + with mock.patch.object(type(client.transport.approve_rollout), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name="operations/spam") + cloud_deploy.ApproveRolloutResponse() ) - response = await client.create_rollout() + response = await client.approve_rollout() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == cloud_deploy.CreateRolloutRequest() + assert args[0] == cloud_deploy.ApproveRolloutRequest() @pytest.mark.asyncio -async def test_create_rollout_async_use_cached_wrapped_rpc( +async def test_approve_rollout_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", ): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -11948,36 +12112,33 @@ async def test_create_rollout_async_use_cached_wrapped_rpc( # Ensure method has been cached assert ( - client._client._transport.create_rollout + client._client._transport.approve_rollout in client._client._transport._wrapped_methods ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ - client._client._transport.create_rollout - ] = mock_object + client._client._transport.approve_rollout + ] = mock_rpc request = {} - await client.create_rollout(request) + await client.approve_rollout(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 - - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() + assert mock_rpc.call_count == 1 - await client.create_rollout(request) + await client.approve_rollout(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio -async def test_create_rollout_async( - transport: str = "grpc_asyncio", request_type=cloud_deploy.CreateRolloutRequest +async def test_approve_rollout_async( + transport: str = "grpc_asyncio", request_type=cloud_deploy.ApproveRolloutRequest ): client = CloudDeployAsyncClient( credentials=ga_credentials.AnonymousCredentials(), @@ -11989,43 +12150,43 @@ async def test_create_rollout_async( request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.create_rollout), "__call__") as call: + with mock.patch.object(type(client.transport.approve_rollout), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name="operations/spam") + cloud_deploy.ApproveRolloutResponse() ) - response = await client.create_rollout(request) + response = await client.approve_rollout(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request = cloud_deploy.CreateRolloutRequest() + request = cloud_deploy.ApproveRolloutRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) + assert isinstance(response, cloud_deploy.ApproveRolloutResponse) @pytest.mark.asyncio -async def test_create_rollout_async_from_dict(): - await test_create_rollout_async(request_type=dict) +async def test_approve_rollout_async_from_dict(): + await test_approve_rollout_async(request_type=dict) -def test_create_rollout_field_headers(): +def test_approve_rollout_field_headers(): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = cloud_deploy.CreateRolloutRequest() + request = cloud_deploy.ApproveRolloutRequest() - request.parent = "parent_value" + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.create_rollout), "__call__") as call: - call.return_value = operations_pb2.Operation(name="operations/op") - client.create_rollout(request) + with mock.patch.object(type(client.transport.approve_rollout), "__call__") as call: + call.return_value = cloud_deploy.ApproveRolloutResponse() + client.approve_rollout(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -12036,28 +12197,28 @@ def test_create_rollout_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "parent=parent_value", + "name=name_value", ) in kw["metadata"] @pytest.mark.asyncio -async def test_create_rollout_field_headers_async(): +async def test_approve_rollout_field_headers_async(): client = CloudDeployAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = cloud_deploy.CreateRolloutRequest() + request = cloud_deploy.ApproveRolloutRequest() - request.parent = "parent_value" + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.create_rollout), "__call__") as call: + with mock.patch.object(type(client.transport.approve_rollout), "__call__") as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name="operations/op") + cloud_deploy.ApproveRolloutResponse() ) - await client.create_rollout(request) + await client.approve_rollout(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -12068,43 +12229,35 @@ async def test_create_rollout_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "parent=parent_value", + "name=name_value", ) in kw["metadata"] -def test_create_rollout_flattened(): +def test_approve_rollout_flattened(): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.create_rollout), "__call__") as call: + with mock.patch.object(type(client.transport.approve_rollout), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name="operations/op") + call.return_value = cloud_deploy.ApproveRolloutResponse() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.create_rollout( - parent="parent_value", - rollout=cloud_deploy.Rollout(name="name_value"), - rollout_id="rollout_id_value", + client.approve_rollout( + name="name_value", ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = "parent_value" - assert arg == mock_val - arg = args[0].rollout - mock_val = cloud_deploy.Rollout(name="name_value") - assert arg == mock_val - arg = args[0].rollout_id - mock_val = "rollout_id_value" + arg = args[0].name + mock_val = "name_value" assert arg == mock_val -def test_create_rollout_flattened_error(): +def test_approve_rollout_flattened_error(): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -12112,53 +12265,43 @@ def test_create_rollout_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.create_rollout( - cloud_deploy.CreateRolloutRequest(), - parent="parent_value", - rollout=cloud_deploy.Rollout(name="name_value"), - rollout_id="rollout_id_value", + client.approve_rollout( + cloud_deploy.ApproveRolloutRequest(), + name="name_value", ) @pytest.mark.asyncio -async def test_create_rollout_flattened_async(): +async def test_approve_rollout_flattened_async(): client = CloudDeployAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.create_rollout), "__call__") as call: + with mock.patch.object(type(client.transport.approve_rollout), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name="operations/op") + call.return_value = cloud_deploy.ApproveRolloutResponse() call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name="operations/spam") + cloud_deploy.ApproveRolloutResponse() ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.create_rollout( - parent="parent_value", - rollout=cloud_deploy.Rollout(name="name_value"), - rollout_id="rollout_id_value", + response = await client.approve_rollout( + name="name_value", ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = "parent_value" - assert arg == mock_val - arg = args[0].rollout - mock_val = cloud_deploy.Rollout(name="name_value") - assert arg == mock_val - arg = args[0].rollout_id - mock_val = "rollout_id_value" + arg = args[0].name + mock_val = "name_value" assert arg == mock_val @pytest.mark.asyncio -async def test_create_rollout_flattened_error_async(): +async def test_approve_rollout_flattened_error_async(): client = CloudDeployAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -12166,22 +12309,20 @@ async def test_create_rollout_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.create_rollout( - cloud_deploy.CreateRolloutRequest(), - parent="parent_value", - rollout=cloud_deploy.Rollout(name="name_value"), - rollout_id="rollout_id_value", + await client.approve_rollout( + cloud_deploy.ApproveRolloutRequest(), + name="name_value", ) @pytest.mark.parametrize( "request_type", [ - cloud_deploy.IgnoreJobRequest, + cloud_deploy.AdvanceRolloutRequest, dict, ], ) -def test_ignore_job(request_type, transport: str = "grpc"): +def test_advance_rollout(request_type, transport: str = "grpc"): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -12192,22 +12333,22 @@ def test_ignore_job(request_type, transport: str = "grpc"): request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.ignore_job), "__call__") as call: + with mock.patch.object(type(client.transport.advance_rollout), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = cloud_deploy.IgnoreJobResponse() - response = client.ignore_job(request) + call.return_value = cloud_deploy.AdvanceRolloutResponse() + response = client.advance_rollout(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - request = cloud_deploy.IgnoreJobRequest() + request = cloud_deploy.AdvanceRolloutRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, cloud_deploy.IgnoreJobResponse) + assert isinstance(response, cloud_deploy.AdvanceRolloutResponse) -def test_ignore_job_empty_call(): +def test_advance_rollout_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = CloudDeployClient( @@ -12216,17 +12357,17 @@ def test_ignore_job_empty_call(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.ignore_job), "__call__") as call: + with mock.patch.object(type(client.transport.advance_rollout), "__call__") as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.ignore_job() + client.advance_rollout() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == cloud_deploy.IgnoreJobRequest() + assert args[0] == cloud_deploy.AdvanceRolloutRequest() -def test_ignore_job_non_empty_request_with_auto_populated_field(): +def test_advance_rollout_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. client = CloudDeployClient( @@ -12237,28 +12378,26 @@ def test_ignore_job_non_empty_request_with_auto_populated_field(): # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. - request = cloud_deploy.IgnoreJobRequest( - rollout="rollout_value", + request = cloud_deploy.AdvanceRolloutRequest( + name="name_value", phase_id="phase_id_value", - job_id="job_id_value", ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.ignore_job), "__call__") as call: + with mock.patch.object(type(client.transport.advance_rollout), "__call__") as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.ignore_job(request=request) + client.advance_rollout(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == cloud_deploy.IgnoreJobRequest( - rollout="rollout_value", + assert args[0] == cloud_deploy.AdvanceRolloutRequest( + name="name_value", phase_id="phase_id_value", - job_id="job_id_value", ) -def test_ignore_job_use_cached_wrapped_rpc(): +def test_advance_rollout_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -12272,21 +12411,21 @@ def test_ignore_job_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.ignore_job in client._transport._wrapped_methods + assert client._transport.advance_rollout in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.ignore_job] = mock_rpc + client._transport._wrapped_methods[client._transport.advance_rollout] = mock_rpc request = {} - client.ignore_job(request) + client.advance_rollout(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.ignore_job(request) + client.advance_rollout(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -12294,7 +12433,7 @@ def test_ignore_job_use_cached_wrapped_rpc(): @pytest.mark.asyncio -async def test_ignore_job_empty_call_async(): +async def test_advance_rollout_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = CloudDeployAsyncClient( @@ -12303,19 +12442,21 @@ async def test_ignore_job_empty_call_async(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.ignore_job), "__call__") as call: + with mock.patch.object(type(client.transport.advance_rollout), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - cloud_deploy.IgnoreJobResponse() + cloud_deploy.AdvanceRolloutResponse() ) - response = await client.ignore_job() + response = await client.advance_rollout() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == cloud_deploy.IgnoreJobRequest() + assert args[0] == cloud_deploy.AdvanceRolloutRequest() @pytest.mark.asyncio -async def test_ignore_job_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): +async def test_advance_rollout_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: @@ -12330,32 +12471,33 @@ async def test_ignore_job_async_use_cached_wrapped_rpc(transport: str = "grpc_as # Ensure method has been cached assert ( - client._client._transport.ignore_job + client._client._transport.advance_rollout in client._client._transport._wrapped_methods ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ - client._client._transport.ignore_job - ] = mock_object + client._client._transport.advance_rollout + ] = mock_rpc request = {} - await client.ignore_job(request) + await client.advance_rollout(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - await client.ignore_job(request) + await client.advance_rollout(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio -async def test_ignore_job_async( - transport: str = "grpc_asyncio", request_type=cloud_deploy.IgnoreJobRequest +async def test_advance_rollout_async( + transport: str = "grpc_asyncio", request_type=cloud_deploy.AdvanceRolloutRequest ): client = CloudDeployAsyncClient( credentials=ga_credentials.AnonymousCredentials(), @@ -12367,43 +12509,43 @@ async def test_ignore_job_async( request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.ignore_job), "__call__") as call: + with mock.patch.object(type(client.transport.advance_rollout), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - cloud_deploy.IgnoreJobResponse() + cloud_deploy.AdvanceRolloutResponse() ) - response = await client.ignore_job(request) + response = await client.advance_rollout(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request = cloud_deploy.IgnoreJobRequest() + request = cloud_deploy.AdvanceRolloutRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, cloud_deploy.IgnoreJobResponse) + assert isinstance(response, cloud_deploy.AdvanceRolloutResponse) @pytest.mark.asyncio -async def test_ignore_job_async_from_dict(): - await test_ignore_job_async(request_type=dict) +async def test_advance_rollout_async_from_dict(): + await test_advance_rollout_async(request_type=dict) -def test_ignore_job_field_headers(): +def test_advance_rollout_field_headers(): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = cloud_deploy.IgnoreJobRequest() + request = cloud_deploy.AdvanceRolloutRequest() - request.rollout = "rollout_value" + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.ignore_job), "__call__") as call: - call.return_value = cloud_deploy.IgnoreJobResponse() - client.ignore_job(request) + with mock.patch.object(type(client.transport.advance_rollout), "__call__") as call: + call.return_value = cloud_deploy.AdvanceRolloutResponse() + client.advance_rollout(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -12414,28 +12556,28 @@ def test_ignore_job_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "rollout=rollout_value", + "name=name_value", ) in kw["metadata"] @pytest.mark.asyncio -async def test_ignore_job_field_headers_async(): +async def test_advance_rollout_field_headers_async(): client = CloudDeployAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = cloud_deploy.IgnoreJobRequest() + request = cloud_deploy.AdvanceRolloutRequest() - request.rollout = "rollout_value" + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.ignore_job), "__call__") as call: + with mock.patch.object(type(client.transport.advance_rollout), "__call__") as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - cloud_deploy.IgnoreJobResponse() + cloud_deploy.AdvanceRolloutResponse() ) - await client.ignore_job(request) + await client.advance_rollout(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -12446,43 +12588,39 @@ async def test_ignore_job_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "rollout=rollout_value", + "name=name_value", ) in kw["metadata"] -def test_ignore_job_flattened(): +def test_advance_rollout_flattened(): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.ignore_job), "__call__") as call: + with mock.patch.object(type(client.transport.advance_rollout), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = cloud_deploy.IgnoreJobResponse() + call.return_value = cloud_deploy.AdvanceRolloutResponse() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.ignore_job( - rollout="rollout_value", + client.advance_rollout( + name="name_value", phase_id="phase_id_value", - job_id="job_id_value", ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - arg = args[0].rollout - mock_val = "rollout_value" + arg = args[0].name + mock_val = "name_value" assert arg == mock_val arg = args[0].phase_id mock_val = "phase_id_value" assert arg == mock_val - arg = args[0].job_id - mock_val = "job_id_value" - assert arg == mock_val -def test_ignore_job_flattened_error(): +def test_advance_rollout_flattened_error(): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -12490,53 +12628,48 @@ def test_ignore_job_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.ignore_job( - cloud_deploy.IgnoreJobRequest(), - rollout="rollout_value", + client.advance_rollout( + cloud_deploy.AdvanceRolloutRequest(), + name="name_value", phase_id="phase_id_value", - job_id="job_id_value", ) @pytest.mark.asyncio -async def test_ignore_job_flattened_async(): +async def test_advance_rollout_flattened_async(): client = CloudDeployAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.ignore_job), "__call__") as call: + with mock.patch.object(type(client.transport.advance_rollout), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = cloud_deploy.IgnoreJobResponse() + call.return_value = cloud_deploy.AdvanceRolloutResponse() call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - cloud_deploy.IgnoreJobResponse() + cloud_deploy.AdvanceRolloutResponse() ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.ignore_job( - rollout="rollout_value", + response = await client.advance_rollout( + name="name_value", phase_id="phase_id_value", - job_id="job_id_value", ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - arg = args[0].rollout - mock_val = "rollout_value" + arg = args[0].name + mock_val = "name_value" assert arg == mock_val arg = args[0].phase_id mock_val = "phase_id_value" assert arg == mock_val - arg = args[0].job_id - mock_val = "job_id_value" - assert arg == mock_val @pytest.mark.asyncio -async def test_ignore_job_flattened_error_async(): +async def test_advance_rollout_flattened_error_async(): client = CloudDeployAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -12544,22 +12677,21 @@ async def test_ignore_job_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.ignore_job( - cloud_deploy.IgnoreJobRequest(), - rollout="rollout_value", + await client.advance_rollout( + cloud_deploy.AdvanceRolloutRequest(), + name="name_value", phase_id="phase_id_value", - job_id="job_id_value", ) @pytest.mark.parametrize( "request_type", [ - cloud_deploy.RetryJobRequest, + cloud_deploy.CancelRolloutRequest, dict, ], ) -def test_retry_job(request_type, transport: str = "grpc"): +def test_cancel_rollout(request_type, transport: str = "grpc"): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -12570,22 +12702,22 @@ def test_retry_job(request_type, transport: str = "grpc"): request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.retry_job), "__call__") as call: + with mock.patch.object(type(client.transport.cancel_rollout), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = cloud_deploy.RetryJobResponse() - response = client.retry_job(request) + call.return_value = cloud_deploy.CancelRolloutResponse() + response = client.cancel_rollout(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - request = cloud_deploy.RetryJobRequest() + request = cloud_deploy.CancelRolloutRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, cloud_deploy.RetryJobResponse) + assert isinstance(response, cloud_deploy.CancelRolloutResponse) -def test_retry_job_empty_call(): +def test_cancel_rollout_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = CloudDeployClient( @@ -12594,17 +12726,17 @@ def test_retry_job_empty_call(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.retry_job), "__call__") as call: + with mock.patch.object(type(client.transport.cancel_rollout), "__call__") as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.retry_job() + client.cancel_rollout() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == cloud_deploy.RetryJobRequest() + assert args[0] == cloud_deploy.CancelRolloutRequest() -def test_retry_job_non_empty_request_with_auto_populated_field(): +def test_cancel_rollout_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. client = CloudDeployClient( @@ -12615,28 +12747,24 @@ def test_retry_job_non_empty_request_with_auto_populated_field(): # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. - request = cloud_deploy.RetryJobRequest( - rollout="rollout_value", - phase_id="phase_id_value", - job_id="job_id_value", + request = cloud_deploy.CancelRolloutRequest( + name="name_value", ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.retry_job), "__call__") as call: + with mock.patch.object(type(client.transport.cancel_rollout), "__call__") as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.retry_job(request=request) + client.cancel_rollout(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == cloud_deploy.RetryJobRequest( - rollout="rollout_value", - phase_id="phase_id_value", - job_id="job_id_value", + assert args[0] == cloud_deploy.CancelRolloutRequest( + name="name_value", ) -def test_retry_job_use_cached_wrapped_rpc(): +def test_cancel_rollout_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -12650,21 +12778,21 @@ def test_retry_job_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.retry_job in client._transport._wrapped_methods + assert client._transport.cancel_rollout in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.retry_job] = mock_rpc + client._transport._wrapped_methods[client._transport.cancel_rollout] = mock_rpc request = {} - client.retry_job(request) + client.cancel_rollout(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.retry_job(request) + client.cancel_rollout(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -12672,7 +12800,7 @@ def test_retry_job_use_cached_wrapped_rpc(): @pytest.mark.asyncio -async def test_retry_job_empty_call_async(): +async def test_cancel_rollout_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = CloudDeployAsyncClient( @@ -12681,19 +12809,21 @@ async def test_retry_job_empty_call_async(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.retry_job), "__call__") as call: + with mock.patch.object(type(client.transport.cancel_rollout), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - cloud_deploy.RetryJobResponse() + cloud_deploy.CancelRolloutResponse() ) - response = await client.retry_job() + response = await client.cancel_rollout() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == cloud_deploy.RetryJobRequest() + assert args[0] == cloud_deploy.CancelRolloutRequest() @pytest.mark.asyncio -async def test_retry_job_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): +async def test_cancel_rollout_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: @@ -12708,32 +12838,33 @@ async def test_retry_job_async_use_cached_wrapped_rpc(transport: str = "grpc_asy # Ensure method has been cached assert ( - client._client._transport.retry_job + client._client._transport.cancel_rollout in client._client._transport._wrapped_methods ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ - client._client._transport.retry_job - ] = mock_object + client._client._transport.cancel_rollout + ] = mock_rpc request = {} - await client.retry_job(request) + await client.cancel_rollout(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - await client.retry_job(request) + await client.cancel_rollout(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio -async def test_retry_job_async( - transport: str = "grpc_asyncio", request_type=cloud_deploy.RetryJobRequest +async def test_cancel_rollout_async( + transport: str = "grpc_asyncio", request_type=cloud_deploy.CancelRolloutRequest ): client = CloudDeployAsyncClient( credentials=ga_credentials.AnonymousCredentials(), @@ -12745,43 +12876,43 @@ async def test_retry_job_async( request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.retry_job), "__call__") as call: + with mock.patch.object(type(client.transport.cancel_rollout), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - cloud_deploy.RetryJobResponse() + cloud_deploy.CancelRolloutResponse() ) - response = await client.retry_job(request) + response = await client.cancel_rollout(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request = cloud_deploy.RetryJobRequest() + request = cloud_deploy.CancelRolloutRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, cloud_deploy.RetryJobResponse) + assert isinstance(response, cloud_deploy.CancelRolloutResponse) @pytest.mark.asyncio -async def test_retry_job_async_from_dict(): - await test_retry_job_async(request_type=dict) +async def test_cancel_rollout_async_from_dict(): + await test_cancel_rollout_async(request_type=dict) -def test_retry_job_field_headers(): +def test_cancel_rollout_field_headers(): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = cloud_deploy.RetryJobRequest() + request = cloud_deploy.CancelRolloutRequest() - request.rollout = "rollout_value" + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.retry_job), "__call__") as call: - call.return_value = cloud_deploy.RetryJobResponse() - client.retry_job(request) + with mock.patch.object(type(client.transport.cancel_rollout), "__call__") as call: + call.return_value = cloud_deploy.CancelRolloutResponse() + client.cancel_rollout(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -12792,28 +12923,28 @@ def test_retry_job_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "rollout=rollout_value", + "name=name_value", ) in kw["metadata"] @pytest.mark.asyncio -async def test_retry_job_field_headers_async(): +async def test_cancel_rollout_field_headers_async(): client = CloudDeployAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = cloud_deploy.RetryJobRequest() + request = cloud_deploy.CancelRolloutRequest() - request.rollout = "rollout_value" + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.retry_job), "__call__") as call: + with mock.patch.object(type(client.transport.cancel_rollout), "__call__") as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - cloud_deploy.RetryJobResponse() + cloud_deploy.CancelRolloutResponse() ) - await client.retry_job(request) + await client.cancel_rollout(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -12824,43 +12955,35 @@ async def test_retry_job_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "rollout=rollout_value", + "name=name_value", ) in kw["metadata"] -def test_retry_job_flattened(): +def test_cancel_rollout_flattened(): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.retry_job), "__call__") as call: + with mock.patch.object(type(client.transport.cancel_rollout), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = cloud_deploy.RetryJobResponse() + call.return_value = cloud_deploy.CancelRolloutResponse() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.retry_job( - rollout="rollout_value", - phase_id="phase_id_value", - job_id="job_id_value", + client.cancel_rollout( + name="name_value", ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - arg = args[0].rollout - mock_val = "rollout_value" - assert arg == mock_val - arg = args[0].phase_id - mock_val = "phase_id_value" - assert arg == mock_val - arg = args[0].job_id - mock_val = "job_id_value" + arg = args[0].name + mock_val = "name_value" assert arg == mock_val -def test_retry_job_flattened_error(): +def test_cancel_rollout_flattened_error(): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -12868,53 +12991,43 @@ def test_retry_job_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.retry_job( - cloud_deploy.RetryJobRequest(), - rollout="rollout_value", - phase_id="phase_id_value", - job_id="job_id_value", + client.cancel_rollout( + cloud_deploy.CancelRolloutRequest(), + name="name_value", ) @pytest.mark.asyncio -async def test_retry_job_flattened_async(): +async def test_cancel_rollout_flattened_async(): client = CloudDeployAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.retry_job), "__call__") as call: + with mock.patch.object(type(client.transport.cancel_rollout), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = cloud_deploy.RetryJobResponse() + call.return_value = cloud_deploy.CancelRolloutResponse() call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - cloud_deploy.RetryJobResponse() + cloud_deploy.CancelRolloutResponse() ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.retry_job( - rollout="rollout_value", - phase_id="phase_id_value", - job_id="job_id_value", + response = await client.cancel_rollout( + name="name_value", ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - arg = args[0].rollout - mock_val = "rollout_value" - assert arg == mock_val - arg = args[0].phase_id - mock_val = "phase_id_value" - assert arg == mock_val - arg = args[0].job_id - mock_val = "job_id_value" + arg = args[0].name + mock_val = "name_value" assert arg == mock_val @pytest.mark.asyncio -async def test_retry_job_flattened_error_async(): +async def test_cancel_rollout_flattened_error_async(): client = CloudDeployAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -12922,22 +13035,20 @@ async def test_retry_job_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.retry_job( - cloud_deploy.RetryJobRequest(), - rollout="rollout_value", - phase_id="phase_id_value", - job_id="job_id_value", + await client.cancel_rollout( + cloud_deploy.CancelRolloutRequest(), + name="name_value", ) @pytest.mark.parametrize( "request_type", [ - cloud_deploy.ListJobRunsRequest, + cloud_deploy.ListRolloutsRequest, dict, ], ) -def test_list_job_runs(request_type, transport: str = "grpc"): +def test_list_rollouts(request_type, transport: str = "grpc"): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -12948,27 +13059,27 @@ def test_list_job_runs(request_type, transport: str = "grpc"): request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_job_runs), "__call__") as call: + with mock.patch.object(type(client.transport.list_rollouts), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = cloud_deploy.ListJobRunsResponse( + call.return_value = cloud_deploy.ListRolloutsResponse( next_page_token="next_page_token_value", unreachable=["unreachable_value"], ) - response = client.list_job_runs(request) + response = client.list_rollouts(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - request = cloud_deploy.ListJobRunsRequest() + request = cloud_deploy.ListRolloutsRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListJobRunsPager) + assert isinstance(response, pagers.ListRolloutsPager) assert response.next_page_token == "next_page_token_value" assert response.unreachable == ["unreachable_value"] -def test_list_job_runs_empty_call(): +def test_list_rollouts_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = CloudDeployClient( @@ -12977,17 +13088,17 @@ def test_list_job_runs_empty_call(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_job_runs), "__call__") as call: + with mock.patch.object(type(client.transport.list_rollouts), "__call__") as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.list_job_runs() + client.list_rollouts() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == cloud_deploy.ListJobRunsRequest() + assert args[0] == cloud_deploy.ListRolloutsRequest() -def test_list_job_runs_non_empty_request_with_auto_populated_field(): +def test_list_rollouts_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. client = CloudDeployClient( @@ -12998,7 +13109,7 @@ def test_list_job_runs_non_empty_request_with_auto_populated_field(): # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. - request = cloud_deploy.ListJobRunsRequest( + request = cloud_deploy.ListRolloutsRequest( parent="parent_value", page_token="page_token_value", filter="filter_value", @@ -13006,14 +13117,14 @@ def test_list_job_runs_non_empty_request_with_auto_populated_field(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_job_runs), "__call__") as call: + with mock.patch.object(type(client.transport.list_rollouts), "__call__") as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.list_job_runs(request=request) + client.list_rollouts(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == cloud_deploy.ListJobRunsRequest( + assert args[0] == cloud_deploy.ListRolloutsRequest( parent="parent_value", page_token="page_token_value", filter="filter_value", @@ -13021,7 +13132,7 @@ def test_list_job_runs_non_empty_request_with_auto_populated_field(): ) -def test_list_job_runs_use_cached_wrapped_rpc(): +def test_list_rollouts_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -13035,21 +13146,21 @@ def test_list_job_runs_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.list_job_runs in client._transport._wrapped_methods + assert client._transport.list_rollouts in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.list_job_runs] = mock_rpc + client._transport._wrapped_methods[client._transport.list_rollouts] = mock_rpc request = {} - client.list_job_runs(request) + client.list_rollouts(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.list_job_runs(request) + client.list_rollouts(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -13057,7 +13168,7 @@ def test_list_job_runs_use_cached_wrapped_rpc(): @pytest.mark.asyncio -async def test_list_job_runs_empty_call_async(): +async def test_list_rollouts_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = CloudDeployAsyncClient( @@ -13066,22 +13177,22 @@ async def test_list_job_runs_empty_call_async(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_job_runs), "__call__") as call: + with mock.patch.object(type(client.transport.list_rollouts), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - cloud_deploy.ListJobRunsResponse( + cloud_deploy.ListRolloutsResponse( next_page_token="next_page_token_value", unreachable=["unreachable_value"], ) ) - response = await client.list_job_runs() + response = await client.list_rollouts() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == cloud_deploy.ListJobRunsRequest() + assert args[0] == cloud_deploy.ListRolloutsRequest() @pytest.mark.asyncio -async def test_list_job_runs_async_use_cached_wrapped_rpc( +async def test_list_rollouts_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", ): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -13098,32 +13209,33 @@ async def test_list_job_runs_async_use_cached_wrapped_rpc( # Ensure method has been cached assert ( - client._client._transport.list_job_runs + client._client._transport.list_rollouts in client._client._transport._wrapped_methods ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ - client._client._transport.list_job_runs - ] = mock_object + client._client._transport.list_rollouts + ] = mock_rpc request = {} - await client.list_job_runs(request) + await client.list_rollouts(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - await client.list_job_runs(request) + await client.list_rollouts(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio -async def test_list_job_runs_async( - transport: str = "grpc_asyncio", request_type=cloud_deploy.ListJobRunsRequest +async def test_list_rollouts_async( + transport: str = "grpc_asyncio", request_type=cloud_deploy.ListRolloutsRequest ): client = CloudDeployAsyncClient( credentials=ga_credentials.AnonymousCredentials(), @@ -13135,48 +13247,48 @@ async def test_list_job_runs_async( request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_job_runs), "__call__") as call: + with mock.patch.object(type(client.transport.list_rollouts), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - cloud_deploy.ListJobRunsResponse( + cloud_deploy.ListRolloutsResponse( next_page_token="next_page_token_value", unreachable=["unreachable_value"], ) ) - response = await client.list_job_runs(request) + response = await client.list_rollouts(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request = cloud_deploy.ListJobRunsRequest() + request = cloud_deploy.ListRolloutsRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListJobRunsAsyncPager) + assert isinstance(response, pagers.ListRolloutsAsyncPager) assert response.next_page_token == "next_page_token_value" assert response.unreachable == ["unreachable_value"] @pytest.mark.asyncio -async def test_list_job_runs_async_from_dict(): - await test_list_job_runs_async(request_type=dict) +async def test_list_rollouts_async_from_dict(): + await test_list_rollouts_async(request_type=dict) -def test_list_job_runs_field_headers(): +def test_list_rollouts_field_headers(): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = cloud_deploy.ListJobRunsRequest() + request = cloud_deploy.ListRolloutsRequest() request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_job_runs), "__call__") as call: - call.return_value = cloud_deploy.ListJobRunsResponse() - client.list_job_runs(request) + with mock.patch.object(type(client.transport.list_rollouts), "__call__") as call: + call.return_value = cloud_deploy.ListRolloutsResponse() + client.list_rollouts(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -13192,23 +13304,23 @@ def test_list_job_runs_field_headers(): @pytest.mark.asyncio -async def test_list_job_runs_field_headers_async(): +async def test_list_rollouts_field_headers_async(): client = CloudDeployAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = cloud_deploy.ListJobRunsRequest() + request = cloud_deploy.ListRolloutsRequest() request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_job_runs), "__call__") as call: + with mock.patch.object(type(client.transport.list_rollouts), "__call__") as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - cloud_deploy.ListJobRunsResponse() + cloud_deploy.ListRolloutsResponse() ) - await client.list_job_runs(request) + await client.list_rollouts(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -13223,18 +13335,18 @@ async def test_list_job_runs_field_headers_async(): ) in kw["metadata"] -def test_list_job_runs_flattened(): +def test_list_rollouts_flattened(): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_job_runs), "__call__") as call: + with mock.patch.object(type(client.transport.list_rollouts), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = cloud_deploy.ListJobRunsResponse() + call.return_value = cloud_deploy.ListRolloutsResponse() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.list_job_runs( + client.list_rollouts( parent="parent_value", ) @@ -13247,7 +13359,7 @@ def test_list_job_runs_flattened(): assert arg == mock_val -def test_list_job_runs_flattened_error(): +def test_list_rollouts_flattened_error(): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -13255,29 +13367,29 @@ def test_list_job_runs_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.list_job_runs( - cloud_deploy.ListJobRunsRequest(), + client.list_rollouts( + cloud_deploy.ListRolloutsRequest(), parent="parent_value", ) @pytest.mark.asyncio -async def test_list_job_runs_flattened_async(): +async def test_list_rollouts_flattened_async(): client = CloudDeployAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_job_runs), "__call__") as call: + with mock.patch.object(type(client.transport.list_rollouts), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = cloud_deploy.ListJobRunsResponse() + call.return_value = cloud_deploy.ListRolloutsResponse() call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - cloud_deploy.ListJobRunsResponse() + cloud_deploy.ListRolloutsResponse() ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.list_job_runs( + response = await client.list_rollouts( parent="parent_value", ) @@ -13291,7 +13403,7 @@ async def test_list_job_runs_flattened_async(): @pytest.mark.asyncio -async def test_list_job_runs_flattened_error_async(): +async def test_list_rollouts_flattened_error_async(): client = CloudDeployAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -13299,44 +13411,44 @@ async def test_list_job_runs_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.list_job_runs( - cloud_deploy.ListJobRunsRequest(), + await client.list_rollouts( + cloud_deploy.ListRolloutsRequest(), parent="parent_value", ) -def test_list_job_runs_pager(transport_name: str = "grpc"): +def test_list_rollouts_pager(transport_name: str = "grpc"): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport_name, ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_job_runs), "__call__") as call: + with mock.patch.object(type(client.transport.list_rollouts), "__call__") as call: # Set the response to a series of pages. call.side_effect = ( - cloud_deploy.ListJobRunsResponse( - job_runs=[ - cloud_deploy.JobRun(), - cloud_deploy.JobRun(), - cloud_deploy.JobRun(), + cloud_deploy.ListRolloutsResponse( + rollouts=[ + cloud_deploy.Rollout(), + cloud_deploy.Rollout(), + cloud_deploy.Rollout(), ], next_page_token="abc", ), - cloud_deploy.ListJobRunsResponse( - job_runs=[], + cloud_deploy.ListRolloutsResponse( + rollouts=[], next_page_token="def", ), - cloud_deploy.ListJobRunsResponse( - job_runs=[ - cloud_deploy.JobRun(), + cloud_deploy.ListRolloutsResponse( + rollouts=[ + cloud_deploy.Rollout(), ], next_page_token="ghi", ), - cloud_deploy.ListJobRunsResponse( - job_runs=[ - cloud_deploy.JobRun(), - cloud_deploy.JobRun(), + cloud_deploy.ListRolloutsResponse( + rollouts=[ + cloud_deploy.Rollout(), + cloud_deploy.Rollout(), ], ), RuntimeError, @@ -13348,7 +13460,7 @@ def test_list_job_runs_pager(transport_name: str = "grpc"): expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) - pager = client.list_job_runs(request={}, retry=retry, timeout=timeout) + pager = client.list_rollouts(request={}, retry=retry, timeout=timeout) assert pager._metadata == expected_metadata assert pager._retry == retry @@ -13356,89 +13468,89 @@ def test_list_job_runs_pager(transport_name: str = "grpc"): results = list(pager) assert len(results) == 6 - assert all(isinstance(i, cloud_deploy.JobRun) for i in results) + assert all(isinstance(i, cloud_deploy.Rollout) for i in results) -def test_list_job_runs_pages(transport_name: str = "grpc"): +def test_list_rollouts_pages(transport_name: str = "grpc"): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport_name, ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_job_runs), "__call__") as call: + with mock.patch.object(type(client.transport.list_rollouts), "__call__") as call: # Set the response to a series of pages. call.side_effect = ( - cloud_deploy.ListJobRunsResponse( - job_runs=[ - cloud_deploy.JobRun(), - cloud_deploy.JobRun(), - cloud_deploy.JobRun(), - ], - next_page_token="abc", - ), - cloud_deploy.ListJobRunsResponse( - job_runs=[], + cloud_deploy.ListRolloutsResponse( + rollouts=[ + cloud_deploy.Rollout(), + cloud_deploy.Rollout(), + cloud_deploy.Rollout(), + ], + next_page_token="abc", + ), + cloud_deploy.ListRolloutsResponse( + rollouts=[], next_page_token="def", ), - cloud_deploy.ListJobRunsResponse( - job_runs=[ - cloud_deploy.JobRun(), + cloud_deploy.ListRolloutsResponse( + rollouts=[ + cloud_deploy.Rollout(), ], next_page_token="ghi", ), - cloud_deploy.ListJobRunsResponse( - job_runs=[ - cloud_deploy.JobRun(), - cloud_deploy.JobRun(), + cloud_deploy.ListRolloutsResponse( + rollouts=[ + cloud_deploy.Rollout(), + cloud_deploy.Rollout(), ], ), RuntimeError, ) - pages = list(client.list_job_runs(request={}).pages) + pages = list(client.list_rollouts(request={}).pages) for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token @pytest.mark.asyncio -async def test_list_job_runs_async_pager(): +async def test_list_rollouts_async_pager(): client = CloudDeployAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_job_runs), "__call__", new_callable=mock.AsyncMock + type(client.transport.list_rollouts), "__call__", new_callable=mock.AsyncMock ) as call: # Set the response to a series of pages. call.side_effect = ( - cloud_deploy.ListJobRunsResponse( - job_runs=[ - cloud_deploy.JobRun(), - cloud_deploy.JobRun(), - cloud_deploy.JobRun(), + cloud_deploy.ListRolloutsResponse( + rollouts=[ + cloud_deploy.Rollout(), + cloud_deploy.Rollout(), + cloud_deploy.Rollout(), ], next_page_token="abc", ), - cloud_deploy.ListJobRunsResponse( - job_runs=[], + cloud_deploy.ListRolloutsResponse( + rollouts=[], next_page_token="def", ), - cloud_deploy.ListJobRunsResponse( - job_runs=[ - cloud_deploy.JobRun(), + cloud_deploy.ListRolloutsResponse( + rollouts=[ + cloud_deploy.Rollout(), ], next_page_token="ghi", ), - cloud_deploy.ListJobRunsResponse( - job_runs=[ - cloud_deploy.JobRun(), - cloud_deploy.JobRun(), + cloud_deploy.ListRolloutsResponse( + rollouts=[ + cloud_deploy.Rollout(), + cloud_deploy.Rollout(), ], ), RuntimeError, ) - async_pager = await client.list_job_runs( + async_pager = await client.list_rollouts( request={}, ) assert async_pager.next_page_token == "abc" @@ -13447,43 +13559,43 @@ async def test_list_job_runs_async_pager(): responses.append(response) assert len(responses) == 6 - assert all(isinstance(i, cloud_deploy.JobRun) for i in responses) + assert all(isinstance(i, cloud_deploy.Rollout) for i in responses) @pytest.mark.asyncio -async def test_list_job_runs_async_pages(): +async def test_list_rollouts_async_pages(): client = CloudDeployAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_job_runs), "__call__", new_callable=mock.AsyncMock + type(client.transport.list_rollouts), "__call__", new_callable=mock.AsyncMock ) as call: # Set the response to a series of pages. call.side_effect = ( - cloud_deploy.ListJobRunsResponse( - job_runs=[ - cloud_deploy.JobRun(), - cloud_deploy.JobRun(), - cloud_deploy.JobRun(), + cloud_deploy.ListRolloutsResponse( + rollouts=[ + cloud_deploy.Rollout(), + cloud_deploy.Rollout(), + cloud_deploy.Rollout(), ], next_page_token="abc", ), - cloud_deploy.ListJobRunsResponse( - job_runs=[], + cloud_deploy.ListRolloutsResponse( + rollouts=[], next_page_token="def", ), - cloud_deploy.ListJobRunsResponse( - job_runs=[ - cloud_deploy.JobRun(), + cloud_deploy.ListRolloutsResponse( + rollouts=[ + cloud_deploy.Rollout(), ], next_page_token="ghi", ), - cloud_deploy.ListJobRunsResponse( - job_runs=[ - cloud_deploy.JobRun(), - cloud_deploy.JobRun(), + cloud_deploy.ListRolloutsResponse( + rollouts=[ + cloud_deploy.Rollout(), + cloud_deploy.Rollout(), ], ), RuntimeError, @@ -13492,7 +13604,7 @@ async def test_list_job_runs_async_pages(): # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 async for page_ in ( # pragma: no branch - await client.list_job_runs(request={}) + await client.list_rollouts(request={}) ).pages: pages.append(page_) for page_, token in zip(pages, ["abc", "def", "ghi", ""]): @@ -13502,11 +13614,11 @@ async def test_list_job_runs_async_pages(): @pytest.mark.parametrize( "request_type", [ - cloud_deploy.GetJobRunRequest, + cloud_deploy.GetRolloutRequest, dict, ], ) -def test_get_job_run(request_type, transport: str = "grpc"): +def test_get_rollout(request_type, transport: str = "grpc"): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -13517,35 +13629,52 @@ def test_get_job_run(request_type, transport: str = "grpc"): request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_job_run), "__call__") as call: + with mock.patch.object(type(client.transport.get_rollout), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = cloud_deploy.JobRun( + call.return_value = cloud_deploy.Rollout( name="name_value", uid="uid_value", - phase_id="phase_id_value", - job_id="job_id_value", - state=cloud_deploy.JobRun.State.IN_PROGRESS, + description="description_value", + target_id="target_id_value", + approval_state=cloud_deploy.Rollout.ApprovalState.NEEDS_APPROVAL, + state=cloud_deploy.Rollout.State.SUCCEEDED, + failure_reason="failure_reason_value", + deploying_build="deploying_build_value", etag="etag_value", + deploy_failure_cause=cloud_deploy.Rollout.FailureCause.CLOUD_BUILD_UNAVAILABLE, + controller_rollout="controller_rollout_value", + rollback_of_rollout="rollback_of_rollout_value", + rolled_back_by_rollouts=["rolled_back_by_rollouts_value"], ) - response = client.get_job_run(request) + response = client.get_rollout(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - request = cloud_deploy.GetJobRunRequest() + request = cloud_deploy.GetRolloutRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, cloud_deploy.JobRun) + assert isinstance(response, cloud_deploy.Rollout) assert response.name == "name_value" assert response.uid == "uid_value" - assert response.phase_id == "phase_id_value" - assert response.job_id == "job_id_value" - assert response.state == cloud_deploy.JobRun.State.IN_PROGRESS + assert response.description == "description_value" + assert response.target_id == "target_id_value" + assert response.approval_state == cloud_deploy.Rollout.ApprovalState.NEEDS_APPROVAL + assert response.state == cloud_deploy.Rollout.State.SUCCEEDED + assert response.failure_reason == "failure_reason_value" + assert response.deploying_build == "deploying_build_value" assert response.etag == "etag_value" + assert ( + response.deploy_failure_cause + == cloud_deploy.Rollout.FailureCause.CLOUD_BUILD_UNAVAILABLE + ) + assert response.controller_rollout == "controller_rollout_value" + assert response.rollback_of_rollout == "rollback_of_rollout_value" + assert response.rolled_back_by_rollouts == ["rolled_back_by_rollouts_value"] -def test_get_job_run_empty_call(): +def test_get_rollout_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = CloudDeployClient( @@ -13554,17 +13683,17 @@ def test_get_job_run_empty_call(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_job_run), "__call__") as call: + with mock.patch.object(type(client.transport.get_rollout), "__call__") as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.get_job_run() + client.get_rollout() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == cloud_deploy.GetJobRunRequest() + assert args[0] == cloud_deploy.GetRolloutRequest() -def test_get_job_run_non_empty_request_with_auto_populated_field(): +def test_get_rollout_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. client = CloudDeployClient( @@ -13575,24 +13704,24 @@ def test_get_job_run_non_empty_request_with_auto_populated_field(): # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. - request = cloud_deploy.GetJobRunRequest( + request = cloud_deploy.GetRolloutRequest( name="name_value", ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_job_run), "__call__") as call: + with mock.patch.object(type(client.transport.get_rollout), "__call__") as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.get_job_run(request=request) + client.get_rollout(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == cloud_deploy.GetJobRunRequest( + assert args[0] == cloud_deploy.GetRolloutRequest( name="name_value", ) -def test_get_job_run_use_cached_wrapped_rpc(): +def test_get_rollout_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -13606,21 +13735,21 @@ def test_get_job_run_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.get_job_run in client._transport._wrapped_methods + assert client._transport.get_rollout in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.get_job_run] = mock_rpc + client._transport._wrapped_methods[client._transport.get_rollout] = mock_rpc request = {} - client.get_job_run(request) + client.get_rollout(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.get_job_run(request) + client.get_rollout(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -13628,7 +13757,7 @@ def test_get_job_run_use_cached_wrapped_rpc(): @pytest.mark.asyncio -async def test_get_job_run_empty_call_async(): +async def test_get_rollout_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = CloudDeployAsyncClient( @@ -13637,26 +13766,33 @@ async def test_get_job_run_empty_call_async(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_job_run), "__call__") as call: + with mock.patch.object(type(client.transport.get_rollout), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - cloud_deploy.JobRun( + cloud_deploy.Rollout( name="name_value", uid="uid_value", - phase_id="phase_id_value", - job_id="job_id_value", - state=cloud_deploy.JobRun.State.IN_PROGRESS, + description="description_value", + target_id="target_id_value", + approval_state=cloud_deploy.Rollout.ApprovalState.NEEDS_APPROVAL, + state=cloud_deploy.Rollout.State.SUCCEEDED, + failure_reason="failure_reason_value", + deploying_build="deploying_build_value", etag="etag_value", + deploy_failure_cause=cloud_deploy.Rollout.FailureCause.CLOUD_BUILD_UNAVAILABLE, + controller_rollout="controller_rollout_value", + rollback_of_rollout="rollback_of_rollout_value", + rolled_back_by_rollouts=["rolled_back_by_rollouts_value"], ) ) - response = await client.get_job_run() + response = await client.get_rollout() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == cloud_deploy.GetJobRunRequest() + assert args[0] == cloud_deploy.GetRolloutRequest() @pytest.mark.asyncio -async def test_get_job_run_async_use_cached_wrapped_rpc( +async def test_get_rollout_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", ): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -13673,32 +13809,33 @@ async def test_get_job_run_async_use_cached_wrapped_rpc( # Ensure method has been cached assert ( - client._client._transport.get_job_run + client._client._transport.get_rollout in client._client._transport._wrapped_methods ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ - client._client._transport.get_job_run - ] = mock_object + client._client._transport.get_rollout + ] = mock_rpc request = {} - await client.get_job_run(request) + await client.get_rollout(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - await client.get_job_run(request) + await client.get_rollout(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio -async def test_get_job_run_async( - transport: str = "grpc_asyncio", request_type=cloud_deploy.GetJobRunRequest +async def test_get_rollout_async( + transport: str = "grpc_asyncio", request_type=cloud_deploy.GetRolloutRequest ): client = CloudDeployAsyncClient( credentials=ga_credentials.AnonymousCredentials(), @@ -13710,56 +13847,73 @@ async def test_get_job_run_async( request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_job_run), "__call__") as call: + with mock.patch.object(type(client.transport.get_rollout), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - cloud_deploy.JobRun( + cloud_deploy.Rollout( name="name_value", uid="uid_value", - phase_id="phase_id_value", - job_id="job_id_value", - state=cloud_deploy.JobRun.State.IN_PROGRESS, + description="description_value", + target_id="target_id_value", + approval_state=cloud_deploy.Rollout.ApprovalState.NEEDS_APPROVAL, + state=cloud_deploy.Rollout.State.SUCCEEDED, + failure_reason="failure_reason_value", + deploying_build="deploying_build_value", etag="etag_value", + deploy_failure_cause=cloud_deploy.Rollout.FailureCause.CLOUD_BUILD_UNAVAILABLE, + controller_rollout="controller_rollout_value", + rollback_of_rollout="rollback_of_rollout_value", + rolled_back_by_rollouts=["rolled_back_by_rollouts_value"], ) ) - response = await client.get_job_run(request) + response = await client.get_rollout(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request = cloud_deploy.GetJobRunRequest() + request = cloud_deploy.GetRolloutRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, cloud_deploy.JobRun) + assert isinstance(response, cloud_deploy.Rollout) assert response.name == "name_value" assert response.uid == "uid_value" - assert response.phase_id == "phase_id_value" - assert response.job_id == "job_id_value" - assert response.state == cloud_deploy.JobRun.State.IN_PROGRESS + assert response.description == "description_value" + assert response.target_id == "target_id_value" + assert response.approval_state == cloud_deploy.Rollout.ApprovalState.NEEDS_APPROVAL + assert response.state == cloud_deploy.Rollout.State.SUCCEEDED + assert response.failure_reason == "failure_reason_value" + assert response.deploying_build == "deploying_build_value" assert response.etag == "etag_value" + assert ( + response.deploy_failure_cause + == cloud_deploy.Rollout.FailureCause.CLOUD_BUILD_UNAVAILABLE + ) + assert response.controller_rollout == "controller_rollout_value" + assert response.rollback_of_rollout == "rollback_of_rollout_value" + assert response.rolled_back_by_rollouts == ["rolled_back_by_rollouts_value"] @pytest.mark.asyncio -async def test_get_job_run_async_from_dict(): - await test_get_job_run_async(request_type=dict) +async def test_get_rollout_async_from_dict(): + await test_get_rollout_async(request_type=dict) -def test_get_job_run_field_headers(): +def test_get_rollout_field_headers(): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = cloud_deploy.GetJobRunRequest() + request = cloud_deploy.GetRolloutRequest() request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_job_run), "__call__") as call: - call.return_value = cloud_deploy.JobRun() - client.get_job_run(request) + with mock.patch.object(type(client.transport.get_rollout), "__call__") as call: + call.return_value = cloud_deploy.Rollout() + client.get_rollout(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -13775,21 +13929,23 @@ def test_get_job_run_field_headers(): @pytest.mark.asyncio -async def test_get_job_run_field_headers_async(): +async def test_get_rollout_field_headers_async(): client = CloudDeployAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = cloud_deploy.GetJobRunRequest() + request = cloud_deploy.GetRolloutRequest() request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_job_run), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(cloud_deploy.JobRun()) - await client.get_job_run(request) + with mock.patch.object(type(client.transport.get_rollout), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + cloud_deploy.Rollout() + ) + await client.get_rollout(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -13804,18 +13960,18 @@ async def test_get_job_run_field_headers_async(): ) in kw["metadata"] -def test_get_job_run_flattened(): +def test_get_rollout_flattened(): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_job_run), "__call__") as call: + with mock.patch.object(type(client.transport.get_rollout), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = cloud_deploy.JobRun() + call.return_value = cloud_deploy.Rollout() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.get_job_run( + client.get_rollout( name="name_value", ) @@ -13828,7 +13984,7 @@ def test_get_job_run_flattened(): assert arg == mock_val -def test_get_job_run_flattened_error(): +def test_get_rollout_flattened_error(): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -13836,27 +13992,29 @@ def test_get_job_run_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.get_job_run( - cloud_deploy.GetJobRunRequest(), + client.get_rollout( + cloud_deploy.GetRolloutRequest(), name="name_value", ) @pytest.mark.asyncio -async def test_get_job_run_flattened_async(): +async def test_get_rollout_flattened_async(): client = CloudDeployAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_job_run), "__call__") as call: + with mock.patch.object(type(client.transport.get_rollout), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = cloud_deploy.JobRun() + call.return_value = cloud_deploy.Rollout() - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(cloud_deploy.JobRun()) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + cloud_deploy.Rollout() + ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.get_job_run( + response = await client.get_rollout( name="name_value", ) @@ -13870,7 +14028,7 @@ async def test_get_job_run_flattened_async(): @pytest.mark.asyncio -async def test_get_job_run_flattened_error_async(): +async def test_get_rollout_flattened_error_async(): client = CloudDeployAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -13878,8 +14036,8 @@ async def test_get_job_run_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.get_job_run( - cloud_deploy.GetJobRunRequest(), + await client.get_rollout( + cloud_deploy.GetRolloutRequest(), name="name_value", ) @@ -13887,11 +14045,11 @@ async def test_get_job_run_flattened_error_async(): @pytest.mark.parametrize( "request_type", [ - cloud_deploy.TerminateJobRunRequest, + cloud_deploy.CreateRolloutRequest, dict, ], ) -def test_terminate_job_run(request_type, transport: str = "grpc"): +def test_create_rollout(request_type, transport: str = "grpc"): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -13902,24 +14060,22 @@ def test_terminate_job_run(request_type, transport: str = "grpc"): request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.terminate_job_run), "__call__" - ) as call: + with mock.patch.object(type(client.transport.create_rollout), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = cloud_deploy.TerminateJobRunResponse() - response = client.terminate_job_run(request) + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.create_rollout(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - request = cloud_deploy.TerminateJobRunRequest() + request = cloud_deploy.CreateRolloutRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, cloud_deploy.TerminateJobRunResponse) + assert isinstance(response, future.Future) -def test_terminate_job_run_empty_call(): +def test_create_rollout_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = CloudDeployClient( @@ -13928,19 +14084,17 @@ def test_terminate_job_run_empty_call(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.terminate_job_run), "__call__" - ) as call: + with mock.patch.object(type(client.transport.create_rollout), "__call__") as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.terminate_job_run() + client.create_rollout() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == cloud_deploy.TerminateJobRunRequest() + assert args[0] == cloud_deploy.CreateRolloutRequest() -def test_terminate_job_run_non_empty_request_with_auto_populated_field(): +def test_create_rollout_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. client = CloudDeployClient( @@ -13951,26 +14105,30 @@ def test_terminate_job_run_non_empty_request_with_auto_populated_field(): # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. - request = cloud_deploy.TerminateJobRunRequest( - name="name_value", + request = cloud_deploy.CreateRolloutRequest( + parent="parent_value", + rollout_id="rollout_id_value", + request_id="request_id_value", + starting_phase_id="starting_phase_id_value", ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.terminate_job_run), "__call__" - ) as call: + with mock.patch.object(type(client.transport.create_rollout), "__call__") as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.terminate_job_run(request=request) + client.create_rollout(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == cloud_deploy.TerminateJobRunRequest( - name="name_value", + assert args[0] == cloud_deploy.CreateRolloutRequest( + parent="parent_value", + rollout_id="rollout_id_value", + request_id="request_id_value", + starting_phase_id="starting_phase_id_value", ) -def test_terminate_job_run_use_cached_wrapped_rpc(): +def test_create_rollout_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -13984,23 +14142,26 @@ def test_terminate_job_run_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.terminate_job_run in client._transport._wrapped_methods + assert client._transport.create_rollout in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.terminate_job_run - ] = mock_rpc + client._transport._wrapped_methods[client._transport.create_rollout] = mock_rpc request = {} - client.terminate_job_run(request) + client.create_rollout(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.terminate_job_run(request) + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.create_rollout(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -14008,7 +14169,7 @@ def test_terminate_job_run_use_cached_wrapped_rpc(): @pytest.mark.asyncio -async def test_terminate_job_run_empty_call_async(): +async def test_create_rollout_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = CloudDeployAsyncClient( @@ -14017,21 +14178,19 @@ async def test_terminate_job_run_empty_call_async(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.terminate_job_run), "__call__" - ) as call: + with mock.patch.object(type(client.transport.create_rollout), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - cloud_deploy.TerminateJobRunResponse() + operations_pb2.Operation(name="operations/spam") ) - response = await client.terminate_job_run() + response = await client.create_rollout() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == cloud_deploy.TerminateJobRunRequest() + assert args[0] == cloud_deploy.CreateRolloutRequest() @pytest.mark.asyncio -async def test_terminate_job_run_async_use_cached_wrapped_rpc( +async def test_create_rollout_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", ): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -14048,32 +14207,38 @@ async def test_terminate_job_run_async_use_cached_wrapped_rpc( # Ensure method has been cached assert ( - client._client._transport.terminate_job_run + client._client._transport.create_rollout in client._client._transport._wrapped_methods ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ - client._client._transport.terminate_job_run - ] = mock_object + client._client._transport.create_rollout + ] = mock_rpc request = {} - await client.terminate_job_run(request) + await client.create_rollout(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - await client.terminate_job_run(request) + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.create_rollout(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio -async def test_terminate_job_run_async( - transport: str = "grpc_asyncio", request_type=cloud_deploy.TerminateJobRunRequest +async def test_create_rollout_async( + transport: str = "grpc_asyncio", request_type=cloud_deploy.CreateRolloutRequest ): client = CloudDeployAsyncClient( credentials=ga_credentials.AnonymousCredentials(), @@ -14085,47 +14250,43 @@ async def test_terminate_job_run_async( request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.terminate_job_run), "__call__" - ) as call: + with mock.patch.object(type(client.transport.create_rollout), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - cloud_deploy.TerminateJobRunResponse() + operations_pb2.Operation(name="operations/spam") ) - response = await client.terminate_job_run(request) + response = await client.create_rollout(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request = cloud_deploy.TerminateJobRunRequest() + request = cloud_deploy.CreateRolloutRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, cloud_deploy.TerminateJobRunResponse) + assert isinstance(response, future.Future) @pytest.mark.asyncio -async def test_terminate_job_run_async_from_dict(): - await test_terminate_job_run_async(request_type=dict) +async def test_create_rollout_async_from_dict(): + await test_create_rollout_async(request_type=dict) -def test_terminate_job_run_field_headers(): +def test_create_rollout_field_headers(): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = cloud_deploy.TerminateJobRunRequest() + request = cloud_deploy.CreateRolloutRequest() - request.name = "name_value" + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.terminate_job_run), "__call__" - ) as call: - call.return_value = cloud_deploy.TerminateJobRunResponse() - client.terminate_job_run(request) + with mock.patch.object(type(client.transport.create_rollout), "__call__") as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.create_rollout(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -14136,30 +14297,28 @@ def test_terminate_job_run_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "name=name_value", + "parent=parent_value", ) in kw["metadata"] @pytest.mark.asyncio -async def test_terminate_job_run_field_headers_async(): +async def test_create_rollout_field_headers_async(): client = CloudDeployAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = cloud_deploy.TerminateJobRunRequest() + request = cloud_deploy.CreateRolloutRequest() - request.name = "name_value" + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.terminate_job_run), "__call__" - ) as call: + with mock.patch.object(type(client.transport.create_rollout), "__call__") as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - cloud_deploy.TerminateJobRunResponse() + operations_pb2.Operation(name="operations/op") ) - await client.terminate_job_run(request) + await client.create_rollout(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -14170,37 +14329,43 @@ async def test_terminate_job_run_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "name=name_value", + "parent=parent_value", ) in kw["metadata"] -def test_terminate_job_run_flattened(): +def test_create_rollout_flattened(): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.terminate_job_run), "__call__" - ) as call: + with mock.patch.object(type(client.transport.create_rollout), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = cloud_deploy.TerminateJobRunResponse() + call.return_value = operations_pb2.Operation(name="operations/op") # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.terminate_job_run( - name="name_value", + client.create_rollout( + parent="parent_value", + rollout=cloud_deploy.Rollout(name="name_value"), + rollout_id="rollout_id_value", ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].rollout + mock_val = cloud_deploy.Rollout(name="name_value") + assert arg == mock_val + arg = args[0].rollout_id + mock_val = "rollout_id_value" assert arg == mock_val -def test_terminate_job_run_flattened_error(): +def test_create_rollout_flattened_error(): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -14208,45 +14373,53 @@ def test_terminate_job_run_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.terminate_job_run( - cloud_deploy.TerminateJobRunRequest(), - name="name_value", + client.create_rollout( + cloud_deploy.CreateRolloutRequest(), + parent="parent_value", + rollout=cloud_deploy.Rollout(name="name_value"), + rollout_id="rollout_id_value", ) @pytest.mark.asyncio -async def test_terminate_job_run_flattened_async(): +async def test_create_rollout_flattened_async(): client = CloudDeployAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.terminate_job_run), "__call__" - ) as call: + with mock.patch.object(type(client.transport.create_rollout), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = cloud_deploy.TerminateJobRunResponse() + call.return_value = operations_pb2.Operation(name="operations/op") call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - cloud_deploy.TerminateJobRunResponse() + operations_pb2.Operation(name="operations/spam") ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.terminate_job_run( - name="name_value", + response = await client.create_rollout( + parent="parent_value", + rollout=cloud_deploy.Rollout(name="name_value"), + rollout_id="rollout_id_value", ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].rollout + mock_val = cloud_deploy.Rollout(name="name_value") + assert arg == mock_val + arg = args[0].rollout_id + mock_val = "rollout_id_value" assert arg == mock_val @pytest.mark.asyncio -async def test_terminate_job_run_flattened_error_async(): +async def test_create_rollout_flattened_error_async(): client = CloudDeployAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -14254,20 +14427,22 @@ async def test_terminate_job_run_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.terminate_job_run( - cloud_deploy.TerminateJobRunRequest(), - name="name_value", + await client.create_rollout( + cloud_deploy.CreateRolloutRequest(), + parent="parent_value", + rollout=cloud_deploy.Rollout(name="name_value"), + rollout_id="rollout_id_value", ) @pytest.mark.parametrize( "request_type", [ - cloud_deploy.GetConfigRequest, + cloud_deploy.IgnoreJobRequest, dict, ], ) -def test_get_config(request_type, transport: str = "grpc"): +def test_ignore_job(request_type, transport: str = "grpc"): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -14278,27 +14453,22 @@ def test_get_config(request_type, transport: str = "grpc"): request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_config), "__call__") as call: + with mock.patch.object(type(client.transport.ignore_job), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = cloud_deploy.Config( - name="name_value", - default_skaffold_version="default_skaffold_version_value", - ) - response = client.get_config(request) + call.return_value = cloud_deploy.IgnoreJobResponse() + response = client.ignore_job(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - request = cloud_deploy.GetConfigRequest() + request = cloud_deploy.IgnoreJobRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, cloud_deploy.Config) - assert response.name == "name_value" - assert response.default_skaffold_version == "default_skaffold_version_value" + assert isinstance(response, cloud_deploy.IgnoreJobResponse) -def test_get_config_empty_call(): +def test_ignore_job_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = CloudDeployClient( @@ -14307,17 +14477,17 @@ def test_get_config_empty_call(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_config), "__call__") as call: + with mock.patch.object(type(client.transport.ignore_job), "__call__") as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.get_config() + client.ignore_job() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == cloud_deploy.GetConfigRequest() + assert args[0] == cloud_deploy.IgnoreJobRequest() -def test_get_config_non_empty_request_with_auto_populated_field(): +def test_ignore_job_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. client = CloudDeployClient( @@ -14328,24 +14498,28 @@ def test_get_config_non_empty_request_with_auto_populated_field(): # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. - request = cloud_deploy.GetConfigRequest( - name="name_value", + request = cloud_deploy.IgnoreJobRequest( + rollout="rollout_value", + phase_id="phase_id_value", + job_id="job_id_value", ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_config), "__call__") as call: + with mock.patch.object(type(client.transport.ignore_job), "__call__") as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.get_config(request=request) + client.ignore_job(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == cloud_deploy.GetConfigRequest( - name="name_value", + assert args[0] == cloud_deploy.IgnoreJobRequest( + rollout="rollout_value", + phase_id="phase_id_value", + job_id="job_id_value", ) -def test_get_config_use_cached_wrapped_rpc(): +def test_ignore_job_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -14359,21 +14533,21 @@ def test_get_config_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.get_config in client._transport._wrapped_methods + assert client._transport.ignore_job in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.get_config] = mock_rpc + client._transport._wrapped_methods[client._transport.ignore_job] = mock_rpc request = {} - client.get_config(request) + client.ignore_job(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.get_config(request) + client.ignore_job(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -14381,7 +14555,7 @@ def test_get_config_use_cached_wrapped_rpc(): @pytest.mark.asyncio -async def test_get_config_empty_call_async(): +async def test_ignore_job_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = CloudDeployAsyncClient( @@ -14390,22 +14564,19 @@ async def test_get_config_empty_call_async(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_config), "__call__") as call: + with mock.patch.object(type(client.transport.ignore_job), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - cloud_deploy.Config( - name="name_value", - default_skaffold_version="default_skaffold_version_value", - ) + cloud_deploy.IgnoreJobResponse() ) - response = await client.get_config() + response = await client.ignore_job() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == cloud_deploy.GetConfigRequest() + assert args[0] == cloud_deploy.IgnoreJobRequest() @pytest.mark.asyncio -async def test_get_config_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): +async def test_ignore_job_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: @@ -14420,32 +14591,33 @@ async def test_get_config_async_use_cached_wrapped_rpc(transport: str = "grpc_as # Ensure method has been cached assert ( - client._client._transport.get_config + client._client._transport.ignore_job in client._client._transport._wrapped_methods ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ - client._client._transport.get_config - ] = mock_object + client._client._transport.ignore_job + ] = mock_rpc request = {} - await client.get_config(request) + await client.ignore_job(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - await client.get_config(request) + await client.ignore_job(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio -async def test_get_config_async( - transport: str = "grpc_asyncio", request_type=cloud_deploy.GetConfigRequest +async def test_ignore_job_async( + transport: str = "grpc_asyncio", request_type=cloud_deploy.IgnoreJobRequest ): client = CloudDeployAsyncClient( credentials=ga_credentials.AnonymousCredentials(), @@ -14457,48 +14629,43 @@ async def test_get_config_async( request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_config), "__call__") as call: + with mock.patch.object(type(client.transport.ignore_job), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - cloud_deploy.Config( - name="name_value", - default_skaffold_version="default_skaffold_version_value", - ) + cloud_deploy.IgnoreJobResponse() ) - response = await client.get_config(request) + response = await client.ignore_job(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request = cloud_deploy.GetConfigRequest() + request = cloud_deploy.IgnoreJobRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, cloud_deploy.Config) - assert response.name == "name_value" - assert response.default_skaffold_version == "default_skaffold_version_value" + assert isinstance(response, cloud_deploy.IgnoreJobResponse) @pytest.mark.asyncio -async def test_get_config_async_from_dict(): - await test_get_config_async(request_type=dict) +async def test_ignore_job_async_from_dict(): + await test_ignore_job_async(request_type=dict) -def test_get_config_field_headers(): +def test_ignore_job_field_headers(): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = cloud_deploy.GetConfigRequest() + request = cloud_deploy.IgnoreJobRequest() - request.name = "name_value" + request.rollout = "rollout_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_config), "__call__") as call: - call.return_value = cloud_deploy.Config() - client.get_config(request) + with mock.patch.object(type(client.transport.ignore_job), "__call__") as call: + call.return_value = cloud_deploy.IgnoreJobResponse() + client.ignore_job(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -14509,26 +14676,28 @@ def test_get_config_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "name=name_value", + "rollout=rollout_value", ) in kw["metadata"] @pytest.mark.asyncio -async def test_get_config_field_headers_async(): +async def test_ignore_job_field_headers_async(): client = CloudDeployAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = cloud_deploy.GetConfigRequest() + request = cloud_deploy.IgnoreJobRequest() - request.name = "name_value" + request.rollout = "rollout_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_config), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(cloud_deploy.Config()) - await client.get_config(request) + with mock.patch.object(type(client.transport.ignore_job), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + cloud_deploy.IgnoreJobResponse() + ) + await client.ignore_job(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -14539,35 +14708,43 @@ async def test_get_config_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "name=name_value", + "rollout=rollout_value", ) in kw["metadata"] -def test_get_config_flattened(): +def test_ignore_job_flattened(): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_config), "__call__") as call: + with mock.patch.object(type(client.transport.ignore_job), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = cloud_deploy.Config() + call.return_value = cloud_deploy.IgnoreJobResponse() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.get_config( - name="name_value", + client.ignore_job( + rollout="rollout_value", + phase_id="phase_id_value", + job_id="job_id_value", ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" + arg = args[0].rollout + mock_val = "rollout_value" + assert arg == mock_val + arg = args[0].phase_id + mock_val = "phase_id_value" + assert arg == mock_val + arg = args[0].job_id + mock_val = "job_id_value" assert arg == mock_val -def test_get_config_flattened_error(): +def test_ignore_job_flattened_error(): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -14575,41 +14752,53 @@ def test_get_config_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.get_config( - cloud_deploy.GetConfigRequest(), - name="name_value", + client.ignore_job( + cloud_deploy.IgnoreJobRequest(), + rollout="rollout_value", + phase_id="phase_id_value", + job_id="job_id_value", ) @pytest.mark.asyncio -async def test_get_config_flattened_async(): +async def test_ignore_job_flattened_async(): client = CloudDeployAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_config), "__call__") as call: + with mock.patch.object(type(client.transport.ignore_job), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = cloud_deploy.Config() + call.return_value = cloud_deploy.IgnoreJobResponse() - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(cloud_deploy.Config()) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + cloud_deploy.IgnoreJobResponse() + ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.get_config( - name="name_value", + response = await client.ignore_job( + rollout="rollout_value", + phase_id="phase_id_value", + job_id="job_id_value", ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" + arg = args[0].rollout + mock_val = "rollout_value" + assert arg == mock_val + arg = args[0].phase_id + mock_val = "phase_id_value" + assert arg == mock_val + arg = args[0].job_id + mock_val = "job_id_value" assert arg == mock_val @pytest.mark.asyncio -async def test_get_config_flattened_error_async(): +async def test_ignore_job_flattened_error_async(): client = CloudDeployAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -14617,20 +14806,22 @@ async def test_get_config_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.get_config( - cloud_deploy.GetConfigRequest(), - name="name_value", + await client.ignore_job( + cloud_deploy.IgnoreJobRequest(), + rollout="rollout_value", + phase_id="phase_id_value", + job_id="job_id_value", ) @pytest.mark.parametrize( "request_type", [ - cloud_deploy.CreateAutomationRequest, + cloud_deploy.RetryJobRequest, dict, ], ) -def test_create_automation(request_type, transport: str = "grpc"): +def test_retry_job(request_type, transport: str = "grpc"): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -14641,24 +14832,22 @@ def test_create_automation(request_type, transport: str = "grpc"): request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_automation), "__call__" - ) as call: + with mock.patch.object(type(client.transport.retry_job), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name="operations/spam") - response = client.create_automation(request) + call.return_value = cloud_deploy.RetryJobResponse() + response = client.retry_job(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - request = cloud_deploy.CreateAutomationRequest() + request = cloud_deploy.RetryJobRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) + assert isinstance(response, cloud_deploy.RetryJobResponse) -def test_create_automation_empty_call(): +def test_retry_job_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = CloudDeployClient( @@ -14667,19 +14856,17 @@ def test_create_automation_empty_call(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_automation), "__call__" - ) as call: + with mock.patch.object(type(client.transport.retry_job), "__call__") as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.create_automation() + client.retry_job() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == cloud_deploy.CreateAutomationRequest() + assert args[0] == cloud_deploy.RetryJobRequest() -def test_create_automation_non_empty_request_with_auto_populated_field(): +def test_retry_job_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. client = CloudDeployClient( @@ -14690,30 +14877,28 @@ def test_create_automation_non_empty_request_with_auto_populated_field(): # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. - request = cloud_deploy.CreateAutomationRequest( - parent="parent_value", - automation_id="automation_id_value", - request_id="request_id_value", + request = cloud_deploy.RetryJobRequest( + rollout="rollout_value", + phase_id="phase_id_value", + job_id="job_id_value", ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_automation), "__call__" - ) as call: + with mock.patch.object(type(client.transport.retry_job), "__call__") as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.create_automation(request=request) + client.retry_job(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == cloud_deploy.CreateAutomationRequest( - parent="parent_value", - automation_id="automation_id_value", - request_id="request_id_value", + assert args[0] == cloud_deploy.RetryJobRequest( + rollout="rollout_value", + phase_id="phase_id_value", + job_id="job_id_value", ) -def test_create_automation_use_cached_wrapped_rpc(): +def test_retry_job_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -14727,27 +14912,21 @@ def test_create_automation_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.create_automation in client._transport._wrapped_methods + assert client._transport.retry_job in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.create_automation - ] = mock_rpc + client._transport._wrapped_methods[client._transport.retry_job] = mock_rpc request = {} - client.create_automation(request) + client.retry_job(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.create_automation(request) + client.retry_job(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -14755,7 +14934,7 @@ def test_create_automation_use_cached_wrapped_rpc(): @pytest.mark.asyncio -async def test_create_automation_empty_call_async(): +async def test_retry_job_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = CloudDeployAsyncClient( @@ -14764,23 +14943,19 @@ async def test_create_automation_empty_call_async(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_automation), "__call__" - ) as call: + with mock.patch.object(type(client.transport.retry_job), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name="operations/spam") + cloud_deploy.RetryJobResponse() ) - response = await client.create_automation() + response = await client.retry_job() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == cloud_deploy.CreateAutomationRequest() + assert args[0] == cloud_deploy.RetryJobRequest() @pytest.mark.asyncio -async def test_create_automation_async_use_cached_wrapped_rpc( - transport: str = "grpc_asyncio", -): +async def test_retry_job_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: @@ -14795,36 +14970,33 @@ async def test_create_automation_async_use_cached_wrapped_rpc( # Ensure method has been cached assert ( - client._client._transport.create_automation + client._client._transport.retry_job in client._client._transport._wrapped_methods ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ - client._client._transport.create_automation - ] = mock_object + client._client._transport.retry_job + ] = mock_rpc request = {} - await client.create_automation(request) + await client.retry_job(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 - - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() + assert mock_rpc.call_count == 1 - await client.create_automation(request) + await client.retry_job(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio -async def test_create_automation_async( - transport: str = "grpc_asyncio", request_type=cloud_deploy.CreateAutomationRequest +async def test_retry_job_async( + transport: str = "grpc_asyncio", request_type=cloud_deploy.RetryJobRequest ): client = CloudDeployAsyncClient( credentials=ga_credentials.AnonymousCredentials(), @@ -14836,47 +15008,43 @@ async def test_create_automation_async( request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_automation), "__call__" - ) as call: + with mock.patch.object(type(client.transport.retry_job), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name="operations/spam") + cloud_deploy.RetryJobResponse() ) - response = await client.create_automation(request) + response = await client.retry_job(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request = cloud_deploy.CreateAutomationRequest() + request = cloud_deploy.RetryJobRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) + assert isinstance(response, cloud_deploy.RetryJobResponse) @pytest.mark.asyncio -async def test_create_automation_async_from_dict(): - await test_create_automation_async(request_type=dict) +async def test_retry_job_async_from_dict(): + await test_retry_job_async(request_type=dict) -def test_create_automation_field_headers(): +def test_retry_job_field_headers(): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = cloud_deploy.CreateAutomationRequest() + request = cloud_deploy.RetryJobRequest() - request.parent = "parent_value" + request.rollout = "rollout_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_automation), "__call__" - ) as call: - call.return_value = operations_pb2.Operation(name="operations/op") - client.create_automation(request) + with mock.patch.object(type(client.transport.retry_job), "__call__") as call: + call.return_value = cloud_deploy.RetryJobResponse() + client.retry_job(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -14887,30 +15055,28 @@ def test_create_automation_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "parent=parent_value", + "rollout=rollout_value", ) in kw["metadata"] @pytest.mark.asyncio -async def test_create_automation_field_headers_async(): +async def test_retry_job_field_headers_async(): client = CloudDeployAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = cloud_deploy.CreateAutomationRequest() + request = cloud_deploy.RetryJobRequest() - request.parent = "parent_value" + request.rollout = "rollout_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_automation), "__call__" - ) as call: + with mock.patch.object(type(client.transport.retry_job), "__call__") as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name="operations/op") + cloud_deploy.RetryJobResponse() ) - await client.create_automation(request) + await client.retry_job(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -14921,45 +15087,43 @@ async def test_create_automation_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "parent=parent_value", + "rollout=rollout_value", ) in kw["metadata"] -def test_create_automation_flattened(): +def test_retry_job_flattened(): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_automation), "__call__" - ) as call: + with mock.patch.object(type(client.transport.retry_job), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name="operations/op") + call.return_value = cloud_deploy.RetryJobResponse() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.create_automation( - parent="parent_value", - automation=cloud_deploy.Automation(name="name_value"), - automation_id="automation_id_value", + client.retry_job( + rollout="rollout_value", + phase_id="phase_id_value", + job_id="job_id_value", ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = "parent_value" + arg = args[0].rollout + mock_val = "rollout_value" assert arg == mock_val - arg = args[0].automation - mock_val = cloud_deploy.Automation(name="name_value") + arg = args[0].phase_id + mock_val = "phase_id_value" assert arg == mock_val - arg = args[0].automation_id - mock_val = "automation_id_value" + arg = args[0].job_id + mock_val = "job_id_value" assert arg == mock_val -def test_create_automation_flattened_error(): +def test_retry_job_flattened_error(): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -14967,55 +15131,53 @@ def test_create_automation_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.create_automation( - cloud_deploy.CreateAutomationRequest(), - parent="parent_value", - automation=cloud_deploy.Automation(name="name_value"), - automation_id="automation_id_value", + client.retry_job( + cloud_deploy.RetryJobRequest(), + rollout="rollout_value", + phase_id="phase_id_value", + job_id="job_id_value", ) @pytest.mark.asyncio -async def test_create_automation_flattened_async(): +async def test_retry_job_flattened_async(): client = CloudDeployAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_automation), "__call__" - ) as call: + with mock.patch.object(type(client.transport.retry_job), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name="operations/op") + call.return_value = cloud_deploy.RetryJobResponse() call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name="operations/spam") + cloud_deploy.RetryJobResponse() ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.create_automation( - parent="parent_value", - automation=cloud_deploy.Automation(name="name_value"), - automation_id="automation_id_value", + response = await client.retry_job( + rollout="rollout_value", + phase_id="phase_id_value", + job_id="job_id_value", ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = "parent_value" + arg = args[0].rollout + mock_val = "rollout_value" assert arg == mock_val - arg = args[0].automation - mock_val = cloud_deploy.Automation(name="name_value") + arg = args[0].phase_id + mock_val = "phase_id_value" assert arg == mock_val - arg = args[0].automation_id - mock_val = "automation_id_value" + arg = args[0].job_id + mock_val = "job_id_value" assert arg == mock_val @pytest.mark.asyncio -async def test_create_automation_flattened_error_async(): +async def test_retry_job_flattened_error_async(): client = CloudDeployAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -15023,22 +15185,22 @@ async def test_create_automation_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.create_automation( - cloud_deploy.CreateAutomationRequest(), - parent="parent_value", - automation=cloud_deploy.Automation(name="name_value"), - automation_id="automation_id_value", + await client.retry_job( + cloud_deploy.RetryJobRequest(), + rollout="rollout_value", + phase_id="phase_id_value", + job_id="job_id_value", ) @pytest.mark.parametrize( "request_type", [ - cloud_deploy.UpdateAutomationRequest, + cloud_deploy.ListJobRunsRequest, dict, ], ) -def test_update_automation(request_type, transport: str = "grpc"): +def test_list_job_runs(request_type, transport: str = "grpc"): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -15049,24 +15211,27 @@ def test_update_automation(request_type, transport: str = "grpc"): request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_automation), "__call__" - ) as call: + with mock.patch.object(type(client.transport.list_job_runs), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name="operations/spam") - response = client.update_automation(request) + call.return_value = cloud_deploy.ListJobRunsResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + response = client.list_job_runs(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - request = cloud_deploy.UpdateAutomationRequest() + request = cloud_deploy.ListJobRunsRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) + assert isinstance(response, pagers.ListJobRunsPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] -def test_update_automation_empty_call(): +def test_list_job_runs_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = CloudDeployClient( @@ -15075,19 +15240,17 @@ def test_update_automation_empty_call(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_automation), "__call__" - ) as call: + with mock.patch.object(type(client.transport.list_job_runs), "__call__") as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.update_automation() + client.list_job_runs() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == cloud_deploy.UpdateAutomationRequest() + assert args[0] == cloud_deploy.ListJobRunsRequest() -def test_update_automation_non_empty_request_with_auto_populated_field(): +def test_list_job_runs_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. client = CloudDeployClient( @@ -15098,26 +15261,30 @@ def test_update_automation_non_empty_request_with_auto_populated_field(): # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. - request = cloud_deploy.UpdateAutomationRequest( - request_id="request_id_value", + request = cloud_deploy.ListJobRunsRequest( + parent="parent_value", + page_token="page_token_value", + filter="filter_value", + order_by="order_by_value", ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_automation), "__call__" - ) as call: + with mock.patch.object(type(client.transport.list_job_runs), "__call__") as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.update_automation(request=request) + client.list_job_runs(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == cloud_deploy.UpdateAutomationRequest( - request_id="request_id_value", + assert args[0] == cloud_deploy.ListJobRunsRequest( + parent="parent_value", + page_token="page_token_value", + filter="filter_value", + order_by="order_by_value", ) -def test_update_automation_use_cached_wrapped_rpc(): +def test_list_job_runs_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -15131,27 +15298,21 @@ def test_update_automation_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.update_automation in client._transport._wrapped_methods + assert client._transport.list_job_runs in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.update_automation - ] = mock_rpc + client._transport._wrapped_methods[client._transport.list_job_runs] = mock_rpc request = {} - client.update_automation(request) + client.list_job_runs(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.update_automation(request) + client.list_job_runs(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -15159,7 +15320,7 @@ def test_update_automation_use_cached_wrapped_rpc(): @pytest.mark.asyncio -async def test_update_automation_empty_call_async(): +async def test_list_job_runs_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = CloudDeployAsyncClient( @@ -15168,21 +15329,22 @@ async def test_update_automation_empty_call_async(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_automation), "__call__" - ) as call: + with mock.patch.object(type(client.transport.list_job_runs), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name="operations/spam") + cloud_deploy.ListJobRunsResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) ) - response = await client.update_automation() + response = await client.list_job_runs() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == cloud_deploy.UpdateAutomationRequest() + assert args[0] == cloud_deploy.ListJobRunsRequest() @pytest.mark.asyncio -async def test_update_automation_async_use_cached_wrapped_rpc( +async def test_list_job_runs_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", ): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -15199,36 +15361,33 @@ async def test_update_automation_async_use_cached_wrapped_rpc( # Ensure method has been cached assert ( - client._client._transport.update_automation + client._client._transport.list_job_runs in client._client._transport._wrapped_methods ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ - client._client._transport.update_automation - ] = mock_object + client._client._transport.list_job_runs + ] = mock_rpc request = {} - await client.update_automation(request) + await client.list_job_runs(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 - - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() + assert mock_rpc.call_count == 1 - await client.update_automation(request) + await client.list_job_runs(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio -async def test_update_automation_async( - transport: str = "grpc_asyncio", request_type=cloud_deploy.UpdateAutomationRequest +async def test_list_job_runs_async( + transport: str = "grpc_asyncio", request_type=cloud_deploy.ListJobRunsRequest ): client = CloudDeployAsyncClient( credentials=ga_credentials.AnonymousCredentials(), @@ -15240,47 +15399,48 @@ async def test_update_automation_async( request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_automation), "__call__" - ) as call: + with mock.patch.object(type(client.transport.list_job_runs), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name="operations/spam") + cloud_deploy.ListJobRunsResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) ) - response = await client.update_automation(request) + response = await client.list_job_runs(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request = cloud_deploy.UpdateAutomationRequest() + request = cloud_deploy.ListJobRunsRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) + assert isinstance(response, pagers.ListJobRunsAsyncPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] @pytest.mark.asyncio -async def test_update_automation_async_from_dict(): - await test_update_automation_async(request_type=dict) +async def test_list_job_runs_async_from_dict(): + await test_list_job_runs_async(request_type=dict) -def test_update_automation_field_headers(): +def test_list_job_runs_field_headers(): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = cloud_deploy.UpdateAutomationRequest() + request = cloud_deploy.ListJobRunsRequest() - request.automation.name = "name_value" + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_automation), "__call__" - ) as call: - call.return_value = operations_pb2.Operation(name="operations/op") - client.update_automation(request) + with mock.patch.object(type(client.transport.list_job_runs), "__call__") as call: + call.return_value = cloud_deploy.ListJobRunsResponse() + client.list_job_runs(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -15291,30 +15451,28 @@ def test_update_automation_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "automation.name=name_value", + "parent=parent_value", ) in kw["metadata"] @pytest.mark.asyncio -async def test_update_automation_field_headers_async(): +async def test_list_job_runs_field_headers_async(): client = CloudDeployAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = cloud_deploy.UpdateAutomationRequest() + request = cloud_deploy.ListJobRunsRequest() - request.automation.name = "name_value" + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_automation), "__call__" - ) as call: + with mock.patch.object(type(client.transport.list_job_runs), "__call__") as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name="operations/op") + cloud_deploy.ListJobRunsResponse() ) - await client.update_automation(request) + await client.list_job_runs(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -15325,41 +15483,35 @@ async def test_update_automation_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "automation.name=name_value", + "parent=parent_value", ) in kw["metadata"] -def test_update_automation_flattened(): +def test_list_job_runs_flattened(): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_automation), "__call__" - ) as call: + with mock.patch.object(type(client.transport.list_job_runs), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name="operations/op") + call.return_value = cloud_deploy.ListJobRunsResponse() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.update_automation( - automation=cloud_deploy.Automation(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + client.list_job_runs( + parent="parent_value", ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - arg = args[0].automation - mock_val = cloud_deploy.Automation(name="name_value") - assert arg == mock_val - arg = args[0].update_mask - mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + arg = args[0].parent + mock_val = "parent_value" assert arg == mock_val -def test_update_automation_flattened_error(): +def test_list_job_runs_flattened_error(): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -15367,50 +15519,43 @@ def test_update_automation_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.update_automation( - cloud_deploy.UpdateAutomationRequest(), - automation=cloud_deploy.Automation(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + client.list_job_runs( + cloud_deploy.ListJobRunsRequest(), + parent="parent_value", ) @pytest.mark.asyncio -async def test_update_automation_flattened_async(): +async def test_list_job_runs_flattened_async(): client = CloudDeployAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_automation), "__call__" - ) as call: + with mock.patch.object(type(client.transport.list_job_runs), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name="operations/op") + call.return_value = cloud_deploy.ListJobRunsResponse() call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name="operations/spam") + cloud_deploy.ListJobRunsResponse() ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.update_automation( - automation=cloud_deploy.Automation(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + response = await client.list_job_runs( + parent="parent_value", ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - arg = args[0].automation - mock_val = cloud_deploy.Automation(name="name_value") - assert arg == mock_val - arg = args[0].update_mask - mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + arg = args[0].parent + mock_val = "parent_value" assert arg == mock_val @pytest.mark.asyncio -async def test_update_automation_flattened_error_async(): +async def test_list_job_runs_flattened_error_async(): client = CloudDeployAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -15418,21 +15563,214 @@ async def test_update_automation_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.update_automation( - cloud_deploy.UpdateAutomationRequest(), - automation=cloud_deploy.Automation(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + await client.list_job_runs( + cloud_deploy.ListJobRunsRequest(), + parent="parent_value", + ) + + +def test_list_job_runs_pager(transport_name: str = "grpc"): + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_job_runs), "__call__") as call: + # Set the response to a series of pages. + call.side_effect = ( + cloud_deploy.ListJobRunsResponse( + job_runs=[ + cloud_deploy.JobRun(), + cloud_deploy.JobRun(), + cloud_deploy.JobRun(), + ], + next_page_token="abc", + ), + cloud_deploy.ListJobRunsResponse( + job_runs=[], + next_page_token="def", + ), + cloud_deploy.ListJobRunsResponse( + job_runs=[ + cloud_deploy.JobRun(), + ], + next_page_token="ghi", + ), + cloud_deploy.ListJobRunsResponse( + job_runs=[ + cloud_deploy.JobRun(), + cloud_deploy.JobRun(), + ], + ), + RuntimeError, + ) + + expected_metadata = () + retry = retries.Retry() + timeout = 5 + expected_metadata = tuple(expected_metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + ) + pager = client.list_job_runs(request={}, retry=retry, timeout=timeout) + + assert pager._metadata == expected_metadata + assert pager._retry == retry + assert pager._timeout == timeout + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, cloud_deploy.JobRun) for i in results) + + +def test_list_job_runs_pages(transport_name: str = "grpc"): + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_job_runs), "__call__") as call: + # Set the response to a series of pages. + call.side_effect = ( + cloud_deploy.ListJobRunsResponse( + job_runs=[ + cloud_deploy.JobRun(), + cloud_deploy.JobRun(), + cloud_deploy.JobRun(), + ], + next_page_token="abc", + ), + cloud_deploy.ListJobRunsResponse( + job_runs=[], + next_page_token="def", + ), + cloud_deploy.ListJobRunsResponse( + job_runs=[ + cloud_deploy.JobRun(), + ], + next_page_token="ghi", + ), + cloud_deploy.ListJobRunsResponse( + job_runs=[ + cloud_deploy.JobRun(), + cloud_deploy.JobRun(), + ], + ), + RuntimeError, + ) + pages = list(client.list_job_runs(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.asyncio +async def test_list_job_runs_async_pager(): + client = CloudDeployAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_job_runs), "__call__", new_callable=mock.AsyncMock + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + cloud_deploy.ListJobRunsResponse( + job_runs=[ + cloud_deploy.JobRun(), + cloud_deploy.JobRun(), + cloud_deploy.JobRun(), + ], + next_page_token="abc", + ), + cloud_deploy.ListJobRunsResponse( + job_runs=[], + next_page_token="def", + ), + cloud_deploy.ListJobRunsResponse( + job_runs=[ + cloud_deploy.JobRun(), + ], + next_page_token="ghi", + ), + cloud_deploy.ListJobRunsResponse( + job_runs=[ + cloud_deploy.JobRun(), + cloud_deploy.JobRun(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_job_runs( + request={}, + ) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, cloud_deploy.JobRun) for i in responses) + + +@pytest.mark.asyncio +async def test_list_job_runs_async_pages(): + client = CloudDeployAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_job_runs), "__call__", new_callable=mock.AsyncMock + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + cloud_deploy.ListJobRunsResponse( + job_runs=[ + cloud_deploy.JobRun(), + cloud_deploy.JobRun(), + cloud_deploy.JobRun(), + ], + next_page_token="abc", + ), + cloud_deploy.ListJobRunsResponse( + job_runs=[], + next_page_token="def", + ), + cloud_deploy.ListJobRunsResponse( + job_runs=[ + cloud_deploy.JobRun(), + ], + next_page_token="ghi", + ), + cloud_deploy.ListJobRunsResponse( + job_runs=[ + cloud_deploy.JobRun(), + cloud_deploy.JobRun(), + ], + ), + RuntimeError, ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_job_runs(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token @pytest.mark.parametrize( "request_type", [ - cloud_deploy.DeleteAutomationRequest, + cloud_deploy.GetJobRunRequest, dict, ], ) -def test_delete_automation(request_type, transport: str = "grpc"): +def test_get_job_run(request_type, transport: str = "grpc"): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -15443,24 +15781,35 @@ def test_delete_automation(request_type, transport: str = "grpc"): request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_automation), "__call__" - ) as call: + with mock.patch.object(type(client.transport.get_job_run), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name="operations/spam") - response = client.delete_automation(request) + call.return_value = cloud_deploy.JobRun( + name="name_value", + uid="uid_value", + phase_id="phase_id_value", + job_id="job_id_value", + state=cloud_deploy.JobRun.State.IN_PROGRESS, + etag="etag_value", + ) + response = client.get_job_run(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - request = cloud_deploy.DeleteAutomationRequest() + request = cloud_deploy.GetJobRunRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) + assert isinstance(response, cloud_deploy.JobRun) + assert response.name == "name_value" + assert response.uid == "uid_value" + assert response.phase_id == "phase_id_value" + assert response.job_id == "job_id_value" + assert response.state == cloud_deploy.JobRun.State.IN_PROGRESS + assert response.etag == "etag_value" -def test_delete_automation_empty_call(): +def test_get_job_run_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = CloudDeployClient( @@ -15469,19 +15818,17 @@ def test_delete_automation_empty_call(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_automation), "__call__" - ) as call: + with mock.patch.object(type(client.transport.get_job_run), "__call__") as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.delete_automation() + client.get_job_run() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == cloud_deploy.DeleteAutomationRequest() + assert args[0] == cloud_deploy.GetJobRunRequest() -def test_delete_automation_non_empty_request_with_auto_populated_field(): +def test_get_job_run_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. client = CloudDeployClient( @@ -15492,30 +15839,24 @@ def test_delete_automation_non_empty_request_with_auto_populated_field(): # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. - request = cloud_deploy.DeleteAutomationRequest( + request = cloud_deploy.GetJobRunRequest( name="name_value", - request_id="request_id_value", - etag="etag_value", ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_automation), "__call__" - ) as call: + with mock.patch.object(type(client.transport.get_job_run), "__call__") as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.delete_automation(request=request) + client.get_job_run(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == cloud_deploy.DeleteAutomationRequest( + assert args[0] == cloud_deploy.GetJobRunRequest( name="name_value", - request_id="request_id_value", - etag="etag_value", ) -def test_delete_automation_use_cached_wrapped_rpc(): +def test_get_job_run_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -15529,27 +15870,21 @@ def test_delete_automation_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.delete_automation in client._transport._wrapped_methods + assert client._transport.get_job_run in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.delete_automation - ] = mock_rpc + client._transport._wrapped_methods[client._transport.get_job_run] = mock_rpc request = {} - client.delete_automation(request) + client.get_job_run(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.delete_automation(request) + client.get_job_run(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -15557,7 +15892,7 @@ def test_delete_automation_use_cached_wrapped_rpc(): @pytest.mark.asyncio -async def test_delete_automation_empty_call_async(): +async def test_get_job_run_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = CloudDeployAsyncClient( @@ -15566,21 +15901,26 @@ async def test_delete_automation_empty_call_async(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_automation), "__call__" - ) as call: + with mock.patch.object(type(client.transport.get_job_run), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name="operations/spam") + cloud_deploy.JobRun( + name="name_value", + uid="uid_value", + phase_id="phase_id_value", + job_id="job_id_value", + state=cloud_deploy.JobRun.State.IN_PROGRESS, + etag="etag_value", + ) ) - response = await client.delete_automation() + response = await client.get_job_run() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == cloud_deploy.DeleteAutomationRequest() + assert args[0] == cloud_deploy.GetJobRunRequest() @pytest.mark.asyncio -async def test_delete_automation_async_use_cached_wrapped_rpc( +async def test_get_job_run_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", ): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -15597,36 +15937,33 @@ async def test_delete_automation_async_use_cached_wrapped_rpc( # Ensure method has been cached assert ( - client._client._transport.delete_automation + client._client._transport.get_job_run in client._client._transport._wrapped_methods ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ - client._client._transport.delete_automation - ] = mock_object + client._client._transport.get_job_run + ] = mock_rpc request = {} - await client.delete_automation(request) + await client.get_job_run(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 - - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() + assert mock_rpc.call_count == 1 - await client.delete_automation(request) + await client.get_job_run(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio -async def test_delete_automation_async( - transport: str = "grpc_asyncio", request_type=cloud_deploy.DeleteAutomationRequest +async def test_get_job_run_async( + transport: str = "grpc_asyncio", request_type=cloud_deploy.GetJobRunRequest ): client = CloudDeployAsyncClient( credentials=ga_credentials.AnonymousCredentials(), @@ -15638,47 +15975,56 @@ async def test_delete_automation_async( request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_automation), "__call__" - ) as call: + with mock.patch.object(type(client.transport.get_job_run), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name="operations/spam") + cloud_deploy.JobRun( + name="name_value", + uid="uid_value", + phase_id="phase_id_value", + job_id="job_id_value", + state=cloud_deploy.JobRun.State.IN_PROGRESS, + etag="etag_value", + ) ) - response = await client.delete_automation(request) + response = await client.get_job_run(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request = cloud_deploy.DeleteAutomationRequest() + request = cloud_deploy.GetJobRunRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) + assert isinstance(response, cloud_deploy.JobRun) + assert response.name == "name_value" + assert response.uid == "uid_value" + assert response.phase_id == "phase_id_value" + assert response.job_id == "job_id_value" + assert response.state == cloud_deploy.JobRun.State.IN_PROGRESS + assert response.etag == "etag_value" @pytest.mark.asyncio -async def test_delete_automation_async_from_dict(): - await test_delete_automation_async(request_type=dict) +async def test_get_job_run_async_from_dict(): + await test_get_job_run_async(request_type=dict) -def test_delete_automation_field_headers(): +def test_get_job_run_field_headers(): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = cloud_deploy.DeleteAutomationRequest() + request = cloud_deploy.GetJobRunRequest() request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_automation), "__call__" - ) as call: - call.return_value = operations_pb2.Operation(name="operations/op") - client.delete_automation(request) + with mock.patch.object(type(client.transport.get_job_run), "__call__") as call: + call.return_value = cloud_deploy.JobRun() + client.get_job_run(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -15694,25 +16040,21 @@ def test_delete_automation_field_headers(): @pytest.mark.asyncio -async def test_delete_automation_field_headers_async(): +async def test_get_job_run_field_headers_async(): client = CloudDeployAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = cloud_deploy.DeleteAutomationRequest() + request = cloud_deploy.GetJobRunRequest() request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_automation), "__call__" - ) as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name="operations/op") - ) - await client.delete_automation(request) + with mock.patch.object(type(client.transport.get_job_run), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(cloud_deploy.JobRun()) + await client.get_job_run(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -15727,20 +16069,18 @@ async def test_delete_automation_field_headers_async(): ) in kw["metadata"] -def test_delete_automation_flattened(): +def test_get_job_run_flattened(): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_automation), "__call__" - ) as call: + with mock.patch.object(type(client.transport.get_job_run), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name="operations/op") + call.return_value = cloud_deploy.JobRun() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.delete_automation( + client.get_job_run( name="name_value", ) @@ -15753,7 +16093,7 @@ def test_delete_automation_flattened(): assert arg == mock_val -def test_delete_automation_flattened_error(): +def test_get_job_run_flattened_error(): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -15761,31 +16101,27 @@ def test_delete_automation_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.delete_automation( - cloud_deploy.DeleteAutomationRequest(), + client.get_job_run( + cloud_deploy.GetJobRunRequest(), name="name_value", ) @pytest.mark.asyncio -async def test_delete_automation_flattened_async(): +async def test_get_job_run_flattened_async(): client = CloudDeployAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_automation), "__call__" - ) as call: + with mock.patch.object(type(client.transport.get_job_run), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name="operations/op") + call.return_value = cloud_deploy.JobRun() - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name="operations/spam") - ) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(cloud_deploy.JobRun()) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.delete_automation( + response = await client.get_job_run( name="name_value", ) @@ -15799,7 +16135,7 @@ async def test_delete_automation_flattened_async(): @pytest.mark.asyncio -async def test_delete_automation_flattened_error_async(): +async def test_get_job_run_flattened_error_async(): client = CloudDeployAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -15807,8 +16143,8 @@ async def test_delete_automation_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.delete_automation( - cloud_deploy.DeleteAutomationRequest(), + await client.get_job_run( + cloud_deploy.GetJobRunRequest(), name="name_value", ) @@ -15816,11 +16152,11 @@ async def test_delete_automation_flattened_error_async(): @pytest.mark.parametrize( "request_type", [ - cloud_deploy.GetAutomationRequest, + cloud_deploy.TerminateJobRunRequest, dict, ], ) -def test_get_automation(request_type, transport: str = "grpc"): +def test_terminate_job_run(request_type, transport: str = "grpc"): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -15831,35 +16167,24 @@ def test_get_automation(request_type, transport: str = "grpc"): request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_automation), "__call__") as call: + with mock.patch.object( + type(client.transport.terminate_job_run), "__call__" + ) as call: # Designate an appropriate return value for the call. - call.return_value = cloud_deploy.Automation( - name="name_value", - uid="uid_value", - description="description_value", - etag="etag_value", - suspended=True, - service_account="service_account_value", - ) - response = client.get_automation(request) + call.return_value = cloud_deploy.TerminateJobRunResponse() + response = client.terminate_job_run(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - request = cloud_deploy.GetAutomationRequest() + request = cloud_deploy.TerminateJobRunRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, cloud_deploy.Automation) - assert response.name == "name_value" - assert response.uid == "uid_value" - assert response.description == "description_value" - assert response.etag == "etag_value" - assert response.suspended is True - assert response.service_account == "service_account_value" + assert isinstance(response, cloud_deploy.TerminateJobRunResponse) -def test_get_automation_empty_call(): +def test_terminate_job_run_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = CloudDeployClient( @@ -15868,17 +16193,19 @@ def test_get_automation_empty_call(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_automation), "__call__") as call: + with mock.patch.object( + type(client.transport.terminate_job_run), "__call__" + ) as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.get_automation() + client.terminate_job_run() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == cloud_deploy.GetAutomationRequest() + assert args[0] == cloud_deploy.TerminateJobRunRequest() -def test_get_automation_non_empty_request_with_auto_populated_field(): +def test_terminate_job_run_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. client = CloudDeployClient( @@ -15889,24 +16216,26 @@ def test_get_automation_non_empty_request_with_auto_populated_field(): # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. - request = cloud_deploy.GetAutomationRequest( + request = cloud_deploy.TerminateJobRunRequest( name="name_value", ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_automation), "__call__") as call: + with mock.patch.object( + type(client.transport.terminate_job_run), "__call__" + ) as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.get_automation(request=request) + client.terminate_job_run(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == cloud_deploy.GetAutomationRequest( + assert args[0] == cloud_deploy.TerminateJobRunRequest( name="name_value", ) -def test_get_automation_use_cached_wrapped_rpc(): +def test_terminate_job_run_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -15920,21 +16249,23 @@ def test_get_automation_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.get_automation in client._transport._wrapped_methods + assert client._transport.terminate_job_run in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.get_automation] = mock_rpc + client._transport._wrapped_methods[ + client._transport.terminate_job_run + ] = mock_rpc request = {} - client.get_automation(request) + client.terminate_job_run(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.get_automation(request) + client.terminate_job_run(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -15942,7 +16273,7 @@ def test_get_automation_use_cached_wrapped_rpc(): @pytest.mark.asyncio -async def test_get_automation_empty_call_async(): +async def test_terminate_job_run_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = CloudDeployAsyncClient( @@ -15951,26 +16282,21 @@ async def test_get_automation_empty_call_async(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_automation), "__call__") as call: + with mock.patch.object( + type(client.transport.terminate_job_run), "__call__" + ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - cloud_deploy.Automation( - name="name_value", - uid="uid_value", - description="description_value", - etag="etag_value", - suspended=True, - service_account="service_account_value", - ) + cloud_deploy.TerminateJobRunResponse() ) - response = await client.get_automation() + response = await client.terminate_job_run() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == cloud_deploy.GetAutomationRequest() + assert args[0] == cloud_deploy.TerminateJobRunRequest() @pytest.mark.asyncio -async def test_get_automation_async_use_cached_wrapped_rpc( +async def test_terminate_job_run_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", ): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -15987,32 +16313,33 @@ async def test_get_automation_async_use_cached_wrapped_rpc( # Ensure method has been cached assert ( - client._client._transport.get_automation + client._client._transport.terminate_job_run in client._client._transport._wrapped_methods ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ - client._client._transport.get_automation - ] = mock_object + client._client._transport.terminate_job_run + ] = mock_rpc request = {} - await client.get_automation(request) + await client.terminate_job_run(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - await client.get_automation(request) + await client.terminate_job_run(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio -async def test_get_automation_async( - transport: str = "grpc_asyncio", request_type=cloud_deploy.GetAutomationRequest +async def test_terminate_job_run_async( + transport: str = "grpc_asyncio", request_type=cloud_deploy.TerminateJobRunRequest ): client = CloudDeployAsyncClient( credentials=ga_credentials.AnonymousCredentials(), @@ -16024,56 +16351,47 @@ async def test_get_automation_async( request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_automation), "__call__") as call: + with mock.patch.object( + type(client.transport.terminate_job_run), "__call__" + ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - cloud_deploy.Automation( - name="name_value", - uid="uid_value", - description="description_value", - etag="etag_value", - suspended=True, - service_account="service_account_value", - ) + cloud_deploy.TerminateJobRunResponse() ) - response = await client.get_automation(request) + response = await client.terminate_job_run(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request = cloud_deploy.GetAutomationRequest() + request = cloud_deploy.TerminateJobRunRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, cloud_deploy.Automation) - assert response.name == "name_value" - assert response.uid == "uid_value" - assert response.description == "description_value" - assert response.etag == "etag_value" - assert response.suspended is True - assert response.service_account == "service_account_value" + assert isinstance(response, cloud_deploy.TerminateJobRunResponse) @pytest.mark.asyncio -async def test_get_automation_async_from_dict(): - await test_get_automation_async(request_type=dict) +async def test_terminate_job_run_async_from_dict(): + await test_terminate_job_run_async(request_type=dict) -def test_get_automation_field_headers(): +def test_terminate_job_run_field_headers(): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = cloud_deploy.GetAutomationRequest() + request = cloud_deploy.TerminateJobRunRequest() request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_automation), "__call__") as call: - call.return_value = cloud_deploy.Automation() - client.get_automation(request) + with mock.patch.object( + type(client.transport.terminate_job_run), "__call__" + ) as call: + call.return_value = cloud_deploy.TerminateJobRunResponse() + client.terminate_job_run(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -16089,23 +16407,25 @@ def test_get_automation_field_headers(): @pytest.mark.asyncio -async def test_get_automation_field_headers_async(): +async def test_terminate_job_run_field_headers_async(): client = CloudDeployAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = cloud_deploy.GetAutomationRequest() + request = cloud_deploy.TerminateJobRunRequest() request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_automation), "__call__") as call: + with mock.patch.object( + type(client.transport.terminate_job_run), "__call__" + ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - cloud_deploy.Automation() + cloud_deploy.TerminateJobRunResponse() ) - await client.get_automation(request) + await client.terminate_job_run(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -16120,18 +16440,20 @@ async def test_get_automation_field_headers_async(): ) in kw["metadata"] -def test_get_automation_flattened(): +def test_terminate_job_run_flattened(): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_automation), "__call__") as call: + with mock.patch.object( + type(client.transport.terminate_job_run), "__call__" + ) as call: # Designate an appropriate return value for the call. - call.return_value = cloud_deploy.Automation() + call.return_value = cloud_deploy.TerminateJobRunResponse() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.get_automation( + client.terminate_job_run( name="name_value", ) @@ -16144,7 +16466,7 @@ def test_get_automation_flattened(): assert arg == mock_val -def test_get_automation_flattened_error(): +def test_terminate_job_run_flattened_error(): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -16152,29 +16474,31 @@ def test_get_automation_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.get_automation( - cloud_deploy.GetAutomationRequest(), + client.terminate_job_run( + cloud_deploy.TerminateJobRunRequest(), name="name_value", ) @pytest.mark.asyncio -async def test_get_automation_flattened_async(): +async def test_terminate_job_run_flattened_async(): client = CloudDeployAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_automation), "__call__") as call: + with mock.patch.object( + type(client.transport.terminate_job_run), "__call__" + ) as call: # Designate an appropriate return value for the call. - call.return_value = cloud_deploy.Automation() + call.return_value = cloud_deploy.TerminateJobRunResponse() call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - cloud_deploy.Automation() + cloud_deploy.TerminateJobRunResponse() ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.get_automation( + response = await client.terminate_job_run( name="name_value", ) @@ -16188,7 +16512,7 @@ async def test_get_automation_flattened_async(): @pytest.mark.asyncio -async def test_get_automation_flattened_error_async(): +async def test_terminate_job_run_flattened_error_async(): client = CloudDeployAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -16196,8 +16520,8 @@ async def test_get_automation_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.get_automation( - cloud_deploy.GetAutomationRequest(), + await client.terminate_job_run( + cloud_deploy.TerminateJobRunRequest(), name="name_value", ) @@ -16205,11 +16529,11 @@ async def test_get_automation_flattened_error_async(): @pytest.mark.parametrize( "request_type", [ - cloud_deploy.ListAutomationsRequest, + cloud_deploy.GetConfigRequest, dict, ], ) -def test_list_automations(request_type, transport: str = "grpc"): +def test_get_config(request_type, transport: str = "grpc"): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -16220,27 +16544,27 @@ def test_list_automations(request_type, transport: str = "grpc"): request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_automations), "__call__") as call: + with mock.patch.object(type(client.transport.get_config), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = cloud_deploy.ListAutomationsResponse( - next_page_token="next_page_token_value", - unreachable=["unreachable_value"], + call.return_value = cloud_deploy.Config( + name="name_value", + default_skaffold_version="default_skaffold_version_value", ) - response = client.list_automations(request) + response = client.get_config(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - request = cloud_deploy.ListAutomationsRequest() + request = cloud_deploy.GetConfigRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListAutomationsPager) - assert response.next_page_token == "next_page_token_value" - assert response.unreachable == ["unreachable_value"] + assert isinstance(response, cloud_deploy.Config) + assert response.name == "name_value" + assert response.default_skaffold_version == "default_skaffold_version_value" -def test_list_automations_empty_call(): +def test_get_config_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = CloudDeployClient( @@ -16249,17 +16573,17 @@ def test_list_automations_empty_call(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_automations), "__call__") as call: + with mock.patch.object(type(client.transport.get_config), "__call__") as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.list_automations() + client.get_config() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == cloud_deploy.ListAutomationsRequest() + assert args[0] == cloud_deploy.GetConfigRequest() -def test_list_automations_non_empty_request_with_auto_populated_field(): +def test_get_config_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. client = CloudDeployClient( @@ -16270,30 +16594,24 @@ def test_list_automations_non_empty_request_with_auto_populated_field(): # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. - request = cloud_deploy.ListAutomationsRequest( - parent="parent_value", - page_token="page_token_value", - filter="filter_value", - order_by="order_by_value", + request = cloud_deploy.GetConfigRequest( + name="name_value", ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_automations), "__call__") as call: + with mock.patch.object(type(client.transport.get_config), "__call__") as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.list_automations(request=request) + client.get_config(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == cloud_deploy.ListAutomationsRequest( - parent="parent_value", - page_token="page_token_value", - filter="filter_value", - order_by="order_by_value", + assert args[0] == cloud_deploy.GetConfigRequest( + name="name_value", ) -def test_list_automations_use_cached_wrapped_rpc(): +def test_get_config_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -16307,23 +16625,21 @@ def test_list_automations_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.list_automations in client._transport._wrapped_methods + assert client._transport.get_config in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.list_automations - ] = mock_rpc + client._transport._wrapped_methods[client._transport.get_config] = mock_rpc request = {} - client.list_automations(request) + client.get_config(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.list_automations(request) + client.get_config(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -16331,7 +16647,7 @@ def test_list_automations_use_cached_wrapped_rpc(): @pytest.mark.asyncio -async def test_list_automations_empty_call_async(): +async def test_get_config_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = CloudDeployAsyncClient( @@ -16340,24 +16656,22 @@ async def test_list_automations_empty_call_async(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_automations), "__call__") as call: + with mock.patch.object(type(client.transport.get_config), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - cloud_deploy.ListAutomationsResponse( - next_page_token="next_page_token_value", - unreachable=["unreachable_value"], + cloud_deploy.Config( + name="name_value", + default_skaffold_version="default_skaffold_version_value", ) ) - response = await client.list_automations() + response = await client.get_config() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == cloud_deploy.ListAutomationsRequest() + assert args[0] == cloud_deploy.GetConfigRequest() @pytest.mark.asyncio -async def test_list_automations_async_use_cached_wrapped_rpc( - transport: str = "grpc_asyncio", -): +async def test_get_config_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: @@ -16372,32 +16686,33 @@ async def test_list_automations_async_use_cached_wrapped_rpc( # Ensure method has been cached assert ( - client._client._transport.list_automations + client._client._transport.get_config in client._client._transport._wrapped_methods ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ - client._client._transport.list_automations - ] = mock_object + client._client._transport.get_config + ] = mock_rpc request = {} - await client.list_automations(request) + await client.get_config(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - await client.list_automations(request) + await client.get_config(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio -async def test_list_automations_async( - transport: str = "grpc_asyncio", request_type=cloud_deploy.ListAutomationsRequest +async def test_get_config_async( + transport: str = "grpc_asyncio", request_type=cloud_deploy.GetConfigRequest ): client = CloudDeployAsyncClient( credentials=ga_credentials.AnonymousCredentials(), @@ -16409,48 +16724,48 @@ async def test_list_automations_async( request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_automations), "__call__") as call: + with mock.patch.object(type(client.transport.get_config), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - cloud_deploy.ListAutomationsResponse( - next_page_token="next_page_token_value", - unreachable=["unreachable_value"], + cloud_deploy.Config( + name="name_value", + default_skaffold_version="default_skaffold_version_value", ) ) - response = await client.list_automations(request) + response = await client.get_config(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request = cloud_deploy.ListAutomationsRequest() + request = cloud_deploy.GetConfigRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListAutomationsAsyncPager) - assert response.next_page_token == "next_page_token_value" - assert response.unreachable == ["unreachable_value"] + assert isinstance(response, cloud_deploy.Config) + assert response.name == "name_value" + assert response.default_skaffold_version == "default_skaffold_version_value" @pytest.mark.asyncio -async def test_list_automations_async_from_dict(): - await test_list_automations_async(request_type=dict) +async def test_get_config_async_from_dict(): + await test_get_config_async(request_type=dict) -def test_list_automations_field_headers(): +def test_get_config_field_headers(): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = cloud_deploy.ListAutomationsRequest() + request = cloud_deploy.GetConfigRequest() - request.parent = "parent_value" + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_automations), "__call__") as call: - call.return_value = cloud_deploy.ListAutomationsResponse() - client.list_automations(request) + with mock.patch.object(type(client.transport.get_config), "__call__") as call: + call.return_value = cloud_deploy.Config() + client.get_config(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -16461,28 +16776,26 @@ def test_list_automations_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "parent=parent_value", + "name=name_value", ) in kw["metadata"] @pytest.mark.asyncio -async def test_list_automations_field_headers_async(): +async def test_get_config_field_headers_async(): client = CloudDeployAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = cloud_deploy.ListAutomationsRequest() + request = cloud_deploy.GetConfigRequest() - request.parent = "parent_value" + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_automations), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - cloud_deploy.ListAutomationsResponse() - ) - await client.list_automations(request) + with mock.patch.object(type(client.transport.get_config), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(cloud_deploy.Config()) + await client.get_config(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -16493,35 +16806,35 @@ async def test_list_automations_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "parent=parent_value", + "name=name_value", ) in kw["metadata"] -def test_list_automations_flattened(): +def test_get_config_flattened(): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_automations), "__call__") as call: + with mock.patch.object(type(client.transport.get_config), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = cloud_deploy.ListAutomationsResponse() + call.return_value = cloud_deploy.Config() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.list_automations( - parent="parent_value", + client.get_config( + name="name_value", ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = "parent_value" + arg = args[0].name + mock_val = "name_value" assert arg == mock_val -def test_list_automations_flattened_error(): +def test_get_config_flattened_error(): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -16529,43 +16842,41 @@ def test_list_automations_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.list_automations( - cloud_deploy.ListAutomationsRequest(), - parent="parent_value", + client.get_config( + cloud_deploy.GetConfigRequest(), + name="name_value", ) @pytest.mark.asyncio -async def test_list_automations_flattened_async(): +async def test_get_config_flattened_async(): client = CloudDeployAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_automations), "__call__") as call: + with mock.patch.object(type(client.transport.get_config), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = cloud_deploy.ListAutomationsResponse() + call.return_value = cloud_deploy.Config() - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - cloud_deploy.ListAutomationsResponse() - ) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(cloud_deploy.Config()) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.list_automations( - parent="parent_value", + response = await client.get_config( + name="name_value", ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = "parent_value" + arg = args[0].name + mock_val = "name_value" assert arg == mock_val @pytest.mark.asyncio -async def test_list_automations_flattened_error_async(): +async def test_get_config_flattened_error_async(): client = CloudDeployAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -16573,280 +16884,69 @@ async def test_list_automations_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.list_automations( - cloud_deploy.ListAutomationsRequest(), - parent="parent_value", + await client.get_config( + cloud_deploy.GetConfigRequest(), + name="name_value", ) -def test_list_automations_pager(transport_name: str = "grpc"): +@pytest.mark.parametrize( + "request_type", + [ + cloud_deploy.CreateAutomationRequest, + dict, + ], +) +def test_create_automation(request_type, transport: str = "grpc"): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, + transport=transport, ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_automations), "__call__") as call: - # Set the response to a series of pages. - call.side_effect = ( - cloud_deploy.ListAutomationsResponse( - automations=[ - cloud_deploy.Automation(), - cloud_deploy.Automation(), - cloud_deploy.Automation(), - ], - next_page_token="abc", - ), - cloud_deploy.ListAutomationsResponse( - automations=[], - next_page_token="def", - ), - cloud_deploy.ListAutomationsResponse( - automations=[ - cloud_deploy.Automation(), - ], - next_page_token="ghi", - ), - cloud_deploy.ListAutomationsResponse( - automations=[ - cloud_deploy.Automation(), - cloud_deploy.Automation(), - ], - ), - RuntimeError, - ) + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() - expected_metadata = () - retry = retries.Retry() - timeout = 5 - expected_metadata = tuple(expected_metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), - ) - pager = client.list_automations(request={}, retry=retry, timeout=timeout) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_automation), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.create_automation(request) - assert pager._metadata == expected_metadata - assert pager._retry == retry - assert pager._timeout == timeout + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = cloud_deploy.CreateAutomationRequest() + assert args[0] == request - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, cloud_deploy.Automation) for i in results) + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) -def test_list_automations_pages(transport_name: str = "grpc"): +def test_create_automation_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_automations), "__call__") as call: - # Set the response to a series of pages. - call.side_effect = ( - cloud_deploy.ListAutomationsResponse( - automations=[ - cloud_deploy.Automation(), - cloud_deploy.Automation(), - cloud_deploy.Automation(), - ], - next_page_token="abc", - ), - cloud_deploy.ListAutomationsResponse( - automations=[], - next_page_token="def", - ), - cloud_deploy.ListAutomationsResponse( - automations=[ - cloud_deploy.Automation(), - ], - next_page_token="ghi", - ), - cloud_deploy.ListAutomationsResponse( - automations=[ - cloud_deploy.Automation(), - cloud_deploy.Automation(), - ], - ), - RuntimeError, - ) - pages = list(client.list_automations(request={}).pages) - for page_, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page_.raw_page.next_page_token == token - - -@pytest.mark.asyncio -async def test_list_automations_async_pager(): - client = CloudDeployAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_automations), "__call__", new_callable=mock.AsyncMock - ) as call: - # Set the response to a series of pages. - call.side_effect = ( - cloud_deploy.ListAutomationsResponse( - automations=[ - cloud_deploy.Automation(), - cloud_deploy.Automation(), - cloud_deploy.Automation(), - ], - next_page_token="abc", - ), - cloud_deploy.ListAutomationsResponse( - automations=[], - next_page_token="def", - ), - cloud_deploy.ListAutomationsResponse( - automations=[ - cloud_deploy.Automation(), - ], - next_page_token="ghi", - ), - cloud_deploy.ListAutomationsResponse( - automations=[ - cloud_deploy.Automation(), - cloud_deploy.Automation(), - ], - ), - RuntimeError, - ) - async_pager = await client.list_automations( - request={}, - ) - assert async_pager.next_page_token == "abc" - responses = [] - async for response in async_pager: # pragma: no branch - responses.append(response) - - assert len(responses) == 6 - assert all(isinstance(i, cloud_deploy.Automation) for i in responses) - - -@pytest.mark.asyncio -async def test_list_automations_async_pages(): - client = CloudDeployAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_automations), "__call__", new_callable=mock.AsyncMock - ) as call: - # Set the response to a series of pages. - call.side_effect = ( - cloud_deploy.ListAutomationsResponse( - automations=[ - cloud_deploy.Automation(), - cloud_deploy.Automation(), - cloud_deploy.Automation(), - ], - next_page_token="abc", - ), - cloud_deploy.ListAutomationsResponse( - automations=[], - next_page_token="def", - ), - cloud_deploy.ListAutomationsResponse( - automations=[ - cloud_deploy.Automation(), - ], - next_page_token="ghi", - ), - cloud_deploy.ListAutomationsResponse( - automations=[ - cloud_deploy.Automation(), - cloud_deploy.Automation(), - ], - ), - RuntimeError, - ) - pages = [] - # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` - # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 - async for page_ in ( # pragma: no branch - await client.list_automations(request={}) - ).pages: - pages.append(page_) - for page_, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page_.raw_page.next_page_token == token - - -@pytest.mark.parametrize( - "request_type", - [ - cloud_deploy.GetAutomationRunRequest, - dict, - ], -) -def test_get_automation_run(request_type, transport: str = "grpc"): - client = CloudDeployClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_automation_run), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = cloud_deploy.AutomationRun( - name="name_value", - etag="etag_value", - service_account="service_account_value", - target_id="target_id_value", - state=cloud_deploy.AutomationRun.State.SUCCEEDED, - state_description="state_description_value", - rule_id="rule_id_value", - automation_id="automation_id_value", - ) - response = client.get_automation_run(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = cloud_deploy.GetAutomationRunRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, cloud_deploy.AutomationRun) - assert response.name == "name_value" - assert response.etag == "etag_value" - assert response.service_account == "service_account_value" - assert response.target_id == "target_id_value" - assert response.state == cloud_deploy.AutomationRun.State.SUCCEEDED - assert response.state_description == "state_description_value" - assert response.rule_id == "rule_id_value" - assert response.automation_id == "automation_id_value" - - -def test_get_automation_run_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = CloudDeployClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", + transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.get_automation_run), "__call__" + type(client.transport.create_automation), "__call__" ) as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.get_automation_run() + client.create_automation() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == cloud_deploy.GetAutomationRunRequest() + assert args[0] == cloud_deploy.CreateAutomationRequest() -def test_get_automation_run_non_empty_request_with_auto_populated_field(): +def test_create_automation_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. client = CloudDeployClient( @@ -16857,26 +16957,30 @@ def test_get_automation_run_non_empty_request_with_auto_populated_field(): # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. - request = cloud_deploy.GetAutomationRunRequest( - name="name_value", + request = cloud_deploy.CreateAutomationRequest( + parent="parent_value", + automation_id="automation_id_value", + request_id="request_id_value", ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.get_automation_run), "__call__" + type(client.transport.create_automation), "__call__" ) as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.get_automation_run(request=request) + client.create_automation(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == cloud_deploy.GetAutomationRunRequest( - name="name_value", + assert args[0] == cloud_deploy.CreateAutomationRequest( + parent="parent_value", + automation_id="automation_id_value", + request_id="request_id_value", ) -def test_get_automation_run_use_cached_wrapped_rpc(): +def test_create_automation_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -16890,9 +16994,7 @@ def test_get_automation_run_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert ( - client._transport.get_automation_run in client._transport._wrapped_methods - ) + assert client._transport.create_automation in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() @@ -16900,15 +17002,20 @@ def test_get_automation_run_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.get_automation_run + client._transport.create_automation ] = mock_rpc request = {} - client.get_automation_run(request) + client.create_automation(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.get_automation_run(request) + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.create_automation(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -16916,7 +17023,7 @@ def test_get_automation_run_use_cached_wrapped_rpc(): @pytest.mark.asyncio -async def test_get_automation_run_empty_call_async(): +async def test_create_automation_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = CloudDeployAsyncClient( @@ -16926,29 +17033,20 @@ async def test_get_automation_run_empty_call_async(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.get_automation_run), "__call__" + type(client.transport.create_automation), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - cloud_deploy.AutomationRun( - name="name_value", - etag="etag_value", - service_account="service_account_value", - target_id="target_id_value", - state=cloud_deploy.AutomationRun.State.SUCCEEDED, - state_description="state_description_value", - rule_id="rule_id_value", - automation_id="automation_id_value", - ) + operations_pb2.Operation(name="operations/spam") ) - response = await client.get_automation_run() + response = await client.create_automation() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == cloud_deploy.GetAutomationRunRequest() + assert args[0] == cloud_deploy.CreateAutomationRequest() @pytest.mark.asyncio -async def test_get_automation_run_async_use_cached_wrapped_rpc( +async def test_create_automation_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", ): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -16965,32 +17063,38 @@ async def test_get_automation_run_async_use_cached_wrapped_rpc( # Ensure method has been cached assert ( - client._client._transport.get_automation_run + client._client._transport.create_automation in client._client._transport._wrapped_methods ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ - client._client._transport.get_automation_run - ] = mock_object + client._client._transport.create_automation + ] = mock_rpc request = {} - await client.get_automation_run(request) + await client.create_automation(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - await client.get_automation_run(request) + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.create_automation(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio -async def test_get_automation_run_async( - transport: str = "grpc_asyncio", request_type=cloud_deploy.GetAutomationRunRequest +async def test_create_automation_async( + transport: str = "grpc_asyncio", request_type=cloud_deploy.CreateAutomationRequest ): client = CloudDeployAsyncClient( credentials=ga_credentials.AnonymousCredentials(), @@ -17003,63 +17107,46 @@ async def test_get_automation_run_async( # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.get_automation_run), "__call__" + type(client.transport.create_automation), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - cloud_deploy.AutomationRun( - name="name_value", - etag="etag_value", - service_account="service_account_value", - target_id="target_id_value", - state=cloud_deploy.AutomationRun.State.SUCCEEDED, - state_description="state_description_value", - rule_id="rule_id_value", - automation_id="automation_id_value", - ) + operations_pb2.Operation(name="operations/spam") ) - response = await client.get_automation_run(request) + response = await client.create_automation(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request = cloud_deploy.GetAutomationRunRequest() + request = cloud_deploy.CreateAutomationRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, cloud_deploy.AutomationRun) - assert response.name == "name_value" - assert response.etag == "etag_value" - assert response.service_account == "service_account_value" - assert response.target_id == "target_id_value" - assert response.state == cloud_deploy.AutomationRun.State.SUCCEEDED - assert response.state_description == "state_description_value" - assert response.rule_id == "rule_id_value" - assert response.automation_id == "automation_id_value" + assert isinstance(response, future.Future) @pytest.mark.asyncio -async def test_get_automation_run_async_from_dict(): - await test_get_automation_run_async(request_type=dict) +async def test_create_automation_async_from_dict(): + await test_create_automation_async(request_type=dict) -def test_get_automation_run_field_headers(): +def test_create_automation_field_headers(): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = cloud_deploy.GetAutomationRunRequest() + request = cloud_deploy.CreateAutomationRequest() - request.name = "name_value" + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.get_automation_run), "__call__" + type(client.transport.create_automation), "__call__" ) as call: - call.return_value = cloud_deploy.AutomationRun() - client.get_automation_run(request) + call.return_value = operations_pb2.Operation(name="operations/op") + client.create_automation(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -17070,30 +17157,30 @@ def test_get_automation_run_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "name=name_value", + "parent=parent_value", ) in kw["metadata"] @pytest.mark.asyncio -async def test_get_automation_run_field_headers_async(): +async def test_create_automation_field_headers_async(): client = CloudDeployAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = cloud_deploy.GetAutomationRunRequest() + request = cloud_deploy.CreateAutomationRequest() - request.name = "name_value" + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.get_automation_run), "__call__" + type(client.transport.create_automation), "__call__" ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - cloud_deploy.AutomationRun() + operations_pb2.Operation(name="operations/op") ) - await client.get_automation_run(request) + await client.create_automation(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -17104,37 +17191,45 @@ async def test_get_automation_run_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "name=name_value", + "parent=parent_value", ) in kw["metadata"] -def test_get_automation_run_flattened(): +def test_create_automation_flattened(): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.get_automation_run), "__call__" + type(client.transport.create_automation), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = cloud_deploy.AutomationRun() + call.return_value = operations_pb2.Operation(name="operations/op") # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.get_automation_run( - name="name_value", + client.create_automation( + parent="parent_value", + automation=cloud_deploy.Automation(name="name_value"), + automation_id="automation_id_value", ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].automation + mock_val = cloud_deploy.Automation(name="name_value") + assert arg == mock_val + arg = args[0].automation_id + mock_val = "automation_id_value" assert arg == mock_val -def test_get_automation_run_flattened_error(): +def test_create_automation_flattened_error(): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -17142,45 +17237,55 @@ def test_get_automation_run_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.get_automation_run( - cloud_deploy.GetAutomationRunRequest(), - name="name_value", + client.create_automation( + cloud_deploy.CreateAutomationRequest(), + parent="parent_value", + automation=cloud_deploy.Automation(name="name_value"), + automation_id="automation_id_value", ) @pytest.mark.asyncio -async def test_get_automation_run_flattened_async(): +async def test_create_automation_flattened_async(): client = CloudDeployAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.get_automation_run), "__call__" + type(client.transport.create_automation), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = cloud_deploy.AutomationRun() + call.return_value = operations_pb2.Operation(name="operations/op") call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - cloud_deploy.AutomationRun() + operations_pb2.Operation(name="operations/spam") ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.get_automation_run( - name="name_value", + response = await client.create_automation( + parent="parent_value", + automation=cloud_deploy.Automation(name="name_value"), + automation_id="automation_id_value", ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].automation + mock_val = cloud_deploy.Automation(name="name_value") + assert arg == mock_val + arg = args[0].automation_id + mock_val = "automation_id_value" assert arg == mock_val @pytest.mark.asyncio -async def test_get_automation_run_flattened_error_async(): +async def test_create_automation_flattened_error_async(): client = CloudDeployAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -17188,20 +17293,22 @@ async def test_get_automation_run_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.get_automation_run( - cloud_deploy.GetAutomationRunRequest(), - name="name_value", + await client.create_automation( + cloud_deploy.CreateAutomationRequest(), + parent="parent_value", + automation=cloud_deploy.Automation(name="name_value"), + automation_id="automation_id_value", ) @pytest.mark.parametrize( "request_type", [ - cloud_deploy.ListAutomationRunsRequest, + cloud_deploy.UpdateAutomationRequest, dict, ], ) -def test_list_automation_runs(request_type, transport: str = "grpc"): +def test_update_automation(request_type, transport: str = "grpc"): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -17213,28 +17320,23 @@ def test_list_automation_runs(request_type, transport: str = "grpc"): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_automation_runs), "__call__" + type(client.transport.update_automation), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = cloud_deploy.ListAutomationRunsResponse( - next_page_token="next_page_token_value", - unreachable=["unreachable_value"], - ) - response = client.list_automation_runs(request) + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.update_automation(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - request = cloud_deploy.ListAutomationRunsRequest() + request = cloud_deploy.UpdateAutomationRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListAutomationRunsPager) - assert response.next_page_token == "next_page_token_value" - assert response.unreachable == ["unreachable_value"] + assert isinstance(response, future.Future) -def test_list_automation_runs_empty_call(): +def test_update_automation_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = CloudDeployClient( @@ -17244,18 +17346,18 @@ def test_list_automation_runs_empty_call(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_automation_runs), "__call__" + type(client.transport.update_automation), "__call__" ) as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.list_automation_runs() + client.update_automation() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == cloud_deploy.ListAutomationRunsRequest() + assert args[0] == cloud_deploy.UpdateAutomationRequest() -def test_list_automation_runs_non_empty_request_with_auto_populated_field(): +def test_update_automation_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. client = CloudDeployClient( @@ -17266,32 +17368,26 @@ def test_list_automation_runs_non_empty_request_with_auto_populated_field(): # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. - request = cloud_deploy.ListAutomationRunsRequest( - parent="parent_value", - page_token="page_token_value", - filter="filter_value", - order_by="order_by_value", + request = cloud_deploy.UpdateAutomationRequest( + request_id="request_id_value", ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_automation_runs), "__call__" + type(client.transport.update_automation), "__call__" ) as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.list_automation_runs(request=request) + client.update_automation(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == cloud_deploy.ListAutomationRunsRequest( - parent="parent_value", - page_token="page_token_value", - filter="filter_value", - order_by="order_by_value", + assert args[0] == cloud_deploy.UpdateAutomationRequest( + request_id="request_id_value", ) -def test_list_automation_runs_use_cached_wrapped_rpc(): +def test_update_automation_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -17305,9 +17401,7 @@ def test_list_automation_runs_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert ( - client._transport.list_automation_runs in client._transport._wrapped_methods - ) + assert client._transport.update_automation in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() @@ -17315,15 +17409,20 @@ def test_list_automation_runs_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.list_automation_runs + client._transport.update_automation ] = mock_rpc request = {} - client.list_automation_runs(request) + client.update_automation(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.list_automation_runs(request) + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.update_automation(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -17331,7 +17430,7 @@ def test_list_automation_runs_use_cached_wrapped_rpc(): @pytest.mark.asyncio -async def test_list_automation_runs_empty_call_async(): +async def test_update_automation_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = CloudDeployAsyncClient( @@ -17341,23 +17440,20 @@ async def test_list_automation_runs_empty_call_async(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_automation_runs), "__call__" + type(client.transport.update_automation), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - cloud_deploy.ListAutomationRunsResponse( - next_page_token="next_page_token_value", - unreachable=["unreachable_value"], - ) + operations_pb2.Operation(name="operations/spam") ) - response = await client.list_automation_runs() + response = await client.update_automation() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == cloud_deploy.ListAutomationRunsRequest() + assert args[0] == cloud_deploy.UpdateAutomationRequest() @pytest.mark.asyncio -async def test_list_automation_runs_async_use_cached_wrapped_rpc( +async def test_update_automation_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", ): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -17374,32 +17470,38 @@ async def test_list_automation_runs_async_use_cached_wrapped_rpc( # Ensure method has been cached assert ( - client._client._transport.list_automation_runs + client._client._transport.update_automation in client._client._transport._wrapped_methods ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ - client._client._transport.list_automation_runs - ] = mock_object + client._client._transport.update_automation + ] = mock_rpc request = {} - await client.list_automation_runs(request) + await client.update_automation(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - await client.list_automation_runs(request) + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.update_automation(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio -async def test_list_automation_runs_async( - transport: str = "grpc_asyncio", request_type=cloud_deploy.ListAutomationRunsRequest +async def test_update_automation_async( + transport: str = "grpc_asyncio", request_type=cloud_deploy.UpdateAutomationRequest ): client = CloudDeployAsyncClient( credentials=ga_credentials.AnonymousCredentials(), @@ -17412,51 +17514,46 @@ async def test_list_automation_runs_async( # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_automation_runs), "__call__" + type(client.transport.update_automation), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - cloud_deploy.ListAutomationRunsResponse( - next_page_token="next_page_token_value", - unreachable=["unreachable_value"], - ) + operations_pb2.Operation(name="operations/spam") ) - response = await client.list_automation_runs(request) + response = await client.update_automation(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request = cloud_deploy.ListAutomationRunsRequest() + request = cloud_deploy.UpdateAutomationRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListAutomationRunsAsyncPager) - assert response.next_page_token == "next_page_token_value" - assert response.unreachable == ["unreachable_value"] + assert isinstance(response, future.Future) @pytest.mark.asyncio -async def test_list_automation_runs_async_from_dict(): - await test_list_automation_runs_async(request_type=dict) +async def test_update_automation_async_from_dict(): + await test_update_automation_async(request_type=dict) -def test_list_automation_runs_field_headers(): +def test_update_automation_field_headers(): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = cloud_deploy.ListAutomationRunsRequest() + request = cloud_deploy.UpdateAutomationRequest() - request.parent = "parent_value" + request.automation.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_automation_runs), "__call__" + type(client.transport.update_automation), "__call__" ) as call: - call.return_value = cloud_deploy.ListAutomationRunsResponse() - client.list_automation_runs(request) + call.return_value = operations_pb2.Operation(name="operations/op") + client.update_automation(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -17467,30 +17564,30 @@ def test_list_automation_runs_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "parent=parent_value", + "automation.name=name_value", ) in kw["metadata"] @pytest.mark.asyncio -async def test_list_automation_runs_field_headers_async(): +async def test_update_automation_field_headers_async(): client = CloudDeployAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = cloud_deploy.ListAutomationRunsRequest() + request = cloud_deploy.UpdateAutomationRequest() - request.parent = "parent_value" + request.automation.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_automation_runs), "__call__" + type(client.transport.update_automation), "__call__" ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - cloud_deploy.ListAutomationRunsResponse() + operations_pb2.Operation(name="operations/op") ) - await client.list_automation_runs(request) + await client.update_automation(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -17501,37 +17598,41 @@ async def test_list_automation_runs_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "parent=parent_value", + "automation.name=name_value", ) in kw["metadata"] -def test_list_automation_runs_flattened(): +def test_update_automation_flattened(): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_automation_runs), "__call__" + type(client.transport.update_automation), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = cloud_deploy.ListAutomationRunsResponse() + call.return_value = operations_pb2.Operation(name="operations/op") # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.list_automation_runs( - parent="parent_value", + client.update_automation( + automation=cloud_deploy.Automation(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = "parent_value" + arg = args[0].automation + mock_val = cloud_deploy.Automation(name="name_value") + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) assert arg == mock_val -def test_list_automation_runs_flattened_error(): +def test_update_automation_flattened_error(): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -17539,45 +17640,50 @@ def test_list_automation_runs_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.list_automation_runs( - cloud_deploy.ListAutomationRunsRequest(), - parent="parent_value", + client.update_automation( + cloud_deploy.UpdateAutomationRequest(), + automation=cloud_deploy.Automation(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) @pytest.mark.asyncio -async def test_list_automation_runs_flattened_async(): +async def test_update_automation_flattened_async(): client = CloudDeployAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_automation_runs), "__call__" + type(client.transport.update_automation), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = cloud_deploy.ListAutomationRunsResponse() + call.return_value = operations_pb2.Operation(name="operations/op") call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - cloud_deploy.ListAutomationRunsResponse() + operations_pb2.Operation(name="operations/spam") ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.list_automation_runs( - parent="parent_value", + response = await client.update_automation( + automation=cloud_deploy.Automation(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = "parent_value" + arg = args[0].automation + mock_val = cloud_deploy.Automation(name="name_value") + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) assert arg == mock_val @pytest.mark.asyncio -async def test_list_automation_runs_flattened_error_async(): +async def test_update_automation_flattened_error_async(): client = CloudDeployAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -17585,222 +17691,21 @@ async def test_list_automation_runs_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.list_automation_runs( - cloud_deploy.ListAutomationRunsRequest(), - parent="parent_value", - ) - - -def test_list_automation_runs_pager(transport_name: str = "grpc"): - client = CloudDeployClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_automation_runs), "__call__" - ) as call: - # Set the response to a series of pages. - call.side_effect = ( - cloud_deploy.ListAutomationRunsResponse( - automation_runs=[ - cloud_deploy.AutomationRun(), - cloud_deploy.AutomationRun(), - cloud_deploy.AutomationRun(), - ], - next_page_token="abc", - ), - cloud_deploy.ListAutomationRunsResponse( - automation_runs=[], - next_page_token="def", - ), - cloud_deploy.ListAutomationRunsResponse( - automation_runs=[ - cloud_deploy.AutomationRun(), - ], - next_page_token="ghi", - ), - cloud_deploy.ListAutomationRunsResponse( - automation_runs=[ - cloud_deploy.AutomationRun(), - cloud_deploy.AutomationRun(), - ], - ), - RuntimeError, - ) - - expected_metadata = () - retry = retries.Retry() - timeout = 5 - expected_metadata = tuple(expected_metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), - ) - pager = client.list_automation_runs(request={}, retry=retry, timeout=timeout) - - assert pager._metadata == expected_metadata - assert pager._retry == retry - assert pager._timeout == timeout - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, cloud_deploy.AutomationRun) for i in results) - - -def test_list_automation_runs_pages(transport_name: str = "grpc"): - client = CloudDeployClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_automation_runs), "__call__" - ) as call: - # Set the response to a series of pages. - call.side_effect = ( - cloud_deploy.ListAutomationRunsResponse( - automation_runs=[ - cloud_deploy.AutomationRun(), - cloud_deploy.AutomationRun(), - cloud_deploy.AutomationRun(), - ], - next_page_token="abc", - ), - cloud_deploy.ListAutomationRunsResponse( - automation_runs=[], - next_page_token="def", - ), - cloud_deploy.ListAutomationRunsResponse( - automation_runs=[ - cloud_deploy.AutomationRun(), - ], - next_page_token="ghi", - ), - cloud_deploy.ListAutomationRunsResponse( - automation_runs=[ - cloud_deploy.AutomationRun(), - cloud_deploy.AutomationRun(), - ], - ), - RuntimeError, - ) - pages = list(client.list_automation_runs(request={}).pages) - for page_, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page_.raw_page.next_page_token == token - - -@pytest.mark.asyncio -async def test_list_automation_runs_async_pager(): - client = CloudDeployAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_automation_runs), - "__call__", - new_callable=mock.AsyncMock, - ) as call: - # Set the response to a series of pages. - call.side_effect = ( - cloud_deploy.ListAutomationRunsResponse( - automation_runs=[ - cloud_deploy.AutomationRun(), - cloud_deploy.AutomationRun(), - cloud_deploy.AutomationRun(), - ], - next_page_token="abc", - ), - cloud_deploy.ListAutomationRunsResponse( - automation_runs=[], - next_page_token="def", - ), - cloud_deploy.ListAutomationRunsResponse( - automation_runs=[ - cloud_deploy.AutomationRun(), - ], - next_page_token="ghi", - ), - cloud_deploy.ListAutomationRunsResponse( - automation_runs=[ - cloud_deploy.AutomationRun(), - cloud_deploy.AutomationRun(), - ], - ), - RuntimeError, - ) - async_pager = await client.list_automation_runs( - request={}, - ) - assert async_pager.next_page_token == "abc" - responses = [] - async for response in async_pager: # pragma: no branch - responses.append(response) - - assert len(responses) == 6 - assert all(isinstance(i, cloud_deploy.AutomationRun) for i in responses) - - -@pytest.mark.asyncio -async def test_list_automation_runs_async_pages(): - client = CloudDeployAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_automation_runs), - "__call__", - new_callable=mock.AsyncMock, - ) as call: - # Set the response to a series of pages. - call.side_effect = ( - cloud_deploy.ListAutomationRunsResponse( - automation_runs=[ - cloud_deploy.AutomationRun(), - cloud_deploy.AutomationRun(), - cloud_deploy.AutomationRun(), - ], - next_page_token="abc", - ), - cloud_deploy.ListAutomationRunsResponse( - automation_runs=[], - next_page_token="def", - ), - cloud_deploy.ListAutomationRunsResponse( - automation_runs=[ - cloud_deploy.AutomationRun(), - ], - next_page_token="ghi", - ), - cloud_deploy.ListAutomationRunsResponse( - automation_runs=[ - cloud_deploy.AutomationRun(), - cloud_deploy.AutomationRun(), - ], - ), - RuntimeError, + await client.update_automation( + cloud_deploy.UpdateAutomationRequest(), + automation=cloud_deploy.Automation(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) - pages = [] - # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` - # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 - async for page_ in ( # pragma: no branch - await client.list_automation_runs(request={}) - ).pages: - pages.append(page_) - for page_, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page_.raw_page.next_page_token == token @pytest.mark.parametrize( "request_type", [ - cloud_deploy.CancelAutomationRunRequest, + cloud_deploy.DeleteAutomationRequest, dict, ], ) -def test_cancel_automation_run(request_type, transport: str = "grpc"): +def test_delete_automation(request_type, transport: str = "grpc"): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -17812,23 +17717,23 @@ def test_cancel_automation_run(request_type, transport: str = "grpc"): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.cancel_automation_run), "__call__" + type(client.transport.delete_automation), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = cloud_deploy.CancelAutomationRunResponse() - response = client.cancel_automation_run(request) + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.delete_automation(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - request = cloud_deploy.CancelAutomationRunRequest() + request = cloud_deploy.DeleteAutomationRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, cloud_deploy.CancelAutomationRunResponse) + assert isinstance(response, future.Future) -def test_cancel_automation_run_empty_call(): +def test_delete_automation_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = CloudDeployClient( @@ -17838,18 +17743,18 @@ def test_cancel_automation_run_empty_call(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.cancel_automation_run), "__call__" + type(client.transport.delete_automation), "__call__" ) as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.cancel_automation_run() + client.delete_automation() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == cloud_deploy.CancelAutomationRunRequest() + assert args[0] == cloud_deploy.DeleteAutomationRequest() -def test_cancel_automation_run_non_empty_request_with_auto_populated_field(): +def test_delete_automation_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. client = CloudDeployClient( @@ -17860,26 +17765,30 @@ def test_cancel_automation_run_non_empty_request_with_auto_populated_field(): # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. - request = cloud_deploy.CancelAutomationRunRequest( + request = cloud_deploy.DeleteAutomationRequest( name="name_value", + request_id="request_id_value", + etag="etag_value", ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.cancel_automation_run), "__call__" + type(client.transport.delete_automation), "__call__" ) as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.cancel_automation_run(request=request) + client.delete_automation(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == cloud_deploy.CancelAutomationRunRequest( + assert args[0] == cloud_deploy.DeleteAutomationRequest( name="name_value", + request_id="request_id_value", + etag="etag_value", ) -def test_cancel_automation_run_use_cached_wrapped_rpc(): +def test_delete_automation_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -17893,10 +17802,7 @@ def test_cancel_automation_run_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert ( - client._transport.cancel_automation_run - in client._transport._wrapped_methods - ) + assert client._transport.delete_automation in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() @@ -17904,15 +17810,20 @@ def test_cancel_automation_run_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.cancel_automation_run + client._transport.delete_automation ] = mock_rpc request = {} - client.cancel_automation_run(request) + client.delete_automation(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.cancel_automation_run(request) + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.delete_automation(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -17920,7 +17831,7 @@ def test_cancel_automation_run_use_cached_wrapped_rpc(): @pytest.mark.asyncio -async def test_cancel_automation_run_empty_call_async(): +async def test_delete_automation_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = CloudDeployAsyncClient( @@ -17930,20 +17841,20 @@ async def test_cancel_automation_run_empty_call_async(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.cancel_automation_run), "__call__" + type(client.transport.delete_automation), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - cloud_deploy.CancelAutomationRunResponse() + operations_pb2.Operation(name="operations/spam") ) - response = await client.cancel_automation_run() + response = await client.delete_automation() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == cloud_deploy.CancelAutomationRunRequest() + assert args[0] == cloud_deploy.DeleteAutomationRequest() @pytest.mark.asyncio -async def test_cancel_automation_run_async_use_cached_wrapped_rpc( +async def test_delete_automation_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", ): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -17960,33 +17871,38 @@ async def test_cancel_automation_run_async_use_cached_wrapped_rpc( # Ensure method has been cached assert ( - client._client._transport.cancel_automation_run + client._client._transport.delete_automation in client._client._transport._wrapped_methods ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ - client._client._transport.cancel_automation_run - ] = mock_object + client._client._transport.delete_automation + ] = mock_rpc request = {} - await client.cancel_automation_run(request) + await client.delete_automation(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - await client.cancel_automation_run(request) + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.delete_automation(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio -async def test_cancel_automation_run_async( - transport: str = "grpc_asyncio", - request_type=cloud_deploy.CancelAutomationRunRequest, +async def test_delete_automation_async( + transport: str = "grpc_asyncio", request_type=cloud_deploy.DeleteAutomationRequest ): client = CloudDeployAsyncClient( credentials=ga_credentials.AnonymousCredentials(), @@ -17999,46 +17915,46 @@ async def test_cancel_automation_run_async( # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.cancel_automation_run), "__call__" + type(client.transport.delete_automation), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - cloud_deploy.CancelAutomationRunResponse() + operations_pb2.Operation(name="operations/spam") ) - response = await client.cancel_automation_run(request) + response = await client.delete_automation(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request = cloud_deploy.CancelAutomationRunRequest() + request = cloud_deploy.DeleteAutomationRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, cloud_deploy.CancelAutomationRunResponse) + assert isinstance(response, future.Future) @pytest.mark.asyncio -async def test_cancel_automation_run_async_from_dict(): - await test_cancel_automation_run_async(request_type=dict) +async def test_delete_automation_async_from_dict(): + await test_delete_automation_async(request_type=dict) -def test_cancel_automation_run_field_headers(): +def test_delete_automation_field_headers(): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = cloud_deploy.CancelAutomationRunRequest() + request = cloud_deploy.DeleteAutomationRequest() request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.cancel_automation_run), "__call__" + type(client.transport.delete_automation), "__call__" ) as call: - call.return_value = cloud_deploy.CancelAutomationRunResponse() - client.cancel_automation_run(request) + call.return_value = operations_pb2.Operation(name="operations/op") + client.delete_automation(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -18054,25 +17970,25 @@ def test_cancel_automation_run_field_headers(): @pytest.mark.asyncio -async def test_cancel_automation_run_field_headers_async(): +async def test_delete_automation_field_headers_async(): client = CloudDeployAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = cloud_deploy.CancelAutomationRunRequest() + request = cloud_deploy.DeleteAutomationRequest() request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.cancel_automation_run), "__call__" + type(client.transport.delete_automation), "__call__" ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - cloud_deploy.CancelAutomationRunResponse() + operations_pb2.Operation(name="operations/op") ) - await client.cancel_automation_run(request) + await client.delete_automation(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -18087,20 +18003,20 @@ async def test_cancel_automation_run_field_headers_async(): ) in kw["metadata"] -def test_cancel_automation_run_flattened(): +def test_delete_automation_flattened(): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.cancel_automation_run), "__call__" + type(client.transport.delete_automation), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = cloud_deploy.CancelAutomationRunResponse() + call.return_value = operations_pb2.Operation(name="operations/op") # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.cancel_automation_run( + client.delete_automation( name="name_value", ) @@ -18113,7 +18029,7 @@ def test_cancel_automation_run_flattened(): assert arg == mock_val -def test_cancel_automation_run_flattened_error(): +def test_delete_automation_flattened_error(): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -18121,31 +18037,31 @@ def test_cancel_automation_run_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.cancel_automation_run( - cloud_deploy.CancelAutomationRunRequest(), + client.delete_automation( + cloud_deploy.DeleteAutomationRequest(), name="name_value", ) @pytest.mark.asyncio -async def test_cancel_automation_run_flattened_async(): +async def test_delete_automation_flattened_async(): client = CloudDeployAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.cancel_automation_run), "__call__" + type(client.transport.delete_automation), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = cloud_deploy.CancelAutomationRunResponse() + call.return_value = operations_pb2.Operation(name="operations/op") call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - cloud_deploy.CancelAutomationRunResponse() + operations_pb2.Operation(name="operations/spam") ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.cancel_automation_run( + response = await client.delete_automation( name="name_value", ) @@ -18159,7 +18075,7 @@ async def test_cancel_automation_run_flattened_async(): @pytest.mark.asyncio -async def test_cancel_automation_run_flattened_error_async(): +async def test_delete_automation_flattened_error_async(): client = CloudDeployAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -18167,8 +18083,8 @@ async def test_cancel_automation_run_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.cancel_automation_run( - cloud_deploy.CancelAutomationRunRequest(), + await client.delete_automation( + cloud_deploy.DeleteAutomationRequest(), name="name_value", ) @@ -18176,52 +18092,103 @@ async def test_cancel_automation_run_flattened_error_async(): @pytest.mark.parametrize( "request_type", [ - cloud_deploy.ListDeliveryPipelinesRequest, + cloud_deploy.GetAutomationRequest, dict, ], ) -def test_list_delivery_pipelines_rest(request_type): +def test_get_automation(request_type, transport: str = "grpc"): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), - transport="rest", + transport=transport, ) - # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2"} - request = request_type(**request_init) + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = cloud_deploy.ListDeliveryPipelinesResponse( - next_page_token="next_page_token_value", - unreachable=["unreachable_value"], + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_automation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = cloud_deploy.Automation( + name="name_value", + uid="uid_value", + description="description_value", + etag="etag_value", + suspended=True, + service_account="service_account_value", ) + response = client.get_automation(request) - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = cloud_deploy.ListDeliveryPipelinesResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - response = client.list_delivery_pipelines(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = cloud_deploy.GetAutomationRequest() + assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListDeliveryPipelinesPager) - assert response.next_page_token == "next_page_token_value" - assert response.unreachable == ["unreachable_value"] + assert isinstance(response, cloud_deploy.Automation) + assert response.name == "name_value" + assert response.uid == "uid_value" + assert response.description == "description_value" + assert response.etag == "etag_value" + assert response.suspended is True + assert response.service_account == "service_account_value" -def test_list_delivery_pipelines_rest_use_cached_wrapped_rpc(): +def test_get_automation_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_automation), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.get_automation() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == cloud_deploy.GetAutomationRequest() + + +def test_get_automation_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = cloud_deploy.GetAutomationRequest( + name="name_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_automation), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.get_automation(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == cloud_deploy.GetAutomationRequest( + name="name_value", + ) + + +def test_get_automation_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), - transport="rest", + transport="grpc", ) # Should wrap all calls on client creation @@ -18229,40 +18196,4088 @@ def test_list_delivery_pipelines_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert ( - client._transport.list_delivery_pipelines - in client._transport._wrapped_methods - ) + assert client._transport.get_automation in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.list_delivery_pipelines + client._transport._wrapped_methods[client._transport.get_automation] = mock_rpc + request = {} + client.get_automation(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_automation(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_get_automation_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = CloudDeployAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_automation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + cloud_deploy.Automation( + name="name_value", + uid="uid_value", + description="description_value", + etag="etag_value", + suspended=True, + service_account="service_account_value", + ) + ) + response = await client.get_automation() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == cloud_deploy.GetAutomationRequest() + + +@pytest.mark.asyncio +async def test_get_automation_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = CloudDeployAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.get_automation + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.get_automation + ] = mock_rpc + + request = {} + await client.get_automation(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.get_automation(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_get_automation_async( + transport: str = "grpc_asyncio", request_type=cloud_deploy.GetAutomationRequest +): + client = CloudDeployAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_automation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + cloud_deploy.Automation( + name="name_value", + uid="uid_value", + description="description_value", + etag="etag_value", + suspended=True, + service_account="service_account_value", + ) + ) + response = await client.get_automation(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = cloud_deploy.GetAutomationRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, cloud_deploy.Automation) + assert response.name == "name_value" + assert response.uid == "uid_value" + assert response.description == "description_value" + assert response.etag == "etag_value" + assert response.suspended is True + assert response.service_account == "service_account_value" + + +@pytest.mark.asyncio +async def test_get_automation_async_from_dict(): + await test_get_automation_async(request_type=dict) + + +def test_get_automation_field_headers(): + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloud_deploy.GetAutomationRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_automation), "__call__") as call: + call.return_value = cloud_deploy.Automation() + client.get_automation(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_automation_field_headers_async(): + client = CloudDeployAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloud_deploy.GetAutomationRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_automation), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + cloud_deploy.Automation() + ) + await client.get_automation(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_get_automation_flattened(): + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_automation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = cloud_deploy.Automation() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_automation( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_get_automation_flattened_error(): + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_automation( + cloud_deploy.GetAutomationRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_get_automation_flattened_async(): + client = CloudDeployAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_automation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = cloud_deploy.Automation() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + cloud_deploy.Automation() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_automation( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_get_automation_flattened_error_async(): + client = CloudDeployAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_automation( + cloud_deploy.GetAutomationRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + cloud_deploy.ListAutomationsRequest, + dict, + ], +) +def test_list_automations(request_type, transport: str = "grpc"): + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_automations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = cloud_deploy.ListAutomationsResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + response = client.list_automations(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = cloud_deploy.ListAutomationsRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListAutomationsPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] + + +def test_list_automations_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_automations), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.list_automations() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == cloud_deploy.ListAutomationsRequest() + + +def test_list_automations_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = cloud_deploy.ListAutomationsRequest( + parent="parent_value", + page_token="page_token_value", + filter="filter_value", + order_by="order_by_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_automations), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.list_automations(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == cloud_deploy.ListAutomationsRequest( + parent="parent_value", + page_token="page_token_value", + filter="filter_value", + order_by="order_by_value", + ) + + +def test_list_automations_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_automations in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.list_automations + ] = mock_rpc + request = {} + client.list_automations(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_automations(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_list_automations_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = CloudDeployAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_automations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + cloud_deploy.ListAutomationsResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + ) + response = await client.list_automations() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == cloud_deploy.ListAutomationsRequest() + + +@pytest.mark.asyncio +async def test_list_automations_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = CloudDeployAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.list_automations + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.list_automations + ] = mock_rpc + + request = {} + await client.list_automations(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.list_automations(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_list_automations_async( + transport: str = "grpc_asyncio", request_type=cloud_deploy.ListAutomationsRequest +): + client = CloudDeployAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_automations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + cloud_deploy.ListAutomationsResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + ) + response = await client.list_automations(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = cloud_deploy.ListAutomationsRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListAutomationsAsyncPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] + + +@pytest.mark.asyncio +async def test_list_automations_async_from_dict(): + await test_list_automations_async(request_type=dict) + + +def test_list_automations_field_headers(): + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloud_deploy.ListAutomationsRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_automations), "__call__") as call: + call.return_value = cloud_deploy.ListAutomationsResponse() + client.list_automations(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_automations_field_headers_async(): + client = CloudDeployAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloud_deploy.ListAutomationsRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_automations), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + cloud_deploy.ListAutomationsResponse() + ) + await client.list_automations(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_list_automations_flattened(): + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_automations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = cloud_deploy.ListAutomationsResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_automations( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +def test_list_automations_flattened_error(): + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_automations( + cloud_deploy.ListAutomationsRequest(), + parent="parent_value", + ) + + +@pytest.mark.asyncio +async def test_list_automations_flattened_async(): + client = CloudDeployAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_automations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = cloud_deploy.ListAutomationsResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + cloud_deploy.ListAutomationsResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_automations( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_list_automations_flattened_error_async(): + client = CloudDeployAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_automations( + cloud_deploy.ListAutomationsRequest(), + parent="parent_value", + ) + + +def test_list_automations_pager(transport_name: str = "grpc"): + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_automations), "__call__") as call: + # Set the response to a series of pages. + call.side_effect = ( + cloud_deploy.ListAutomationsResponse( + automations=[ + cloud_deploy.Automation(), + cloud_deploy.Automation(), + cloud_deploy.Automation(), + ], + next_page_token="abc", + ), + cloud_deploy.ListAutomationsResponse( + automations=[], + next_page_token="def", + ), + cloud_deploy.ListAutomationsResponse( + automations=[ + cloud_deploy.Automation(), + ], + next_page_token="ghi", + ), + cloud_deploy.ListAutomationsResponse( + automations=[ + cloud_deploy.Automation(), + cloud_deploy.Automation(), + ], + ), + RuntimeError, + ) + + expected_metadata = () + retry = retries.Retry() + timeout = 5 + expected_metadata = tuple(expected_metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + ) + pager = client.list_automations(request={}, retry=retry, timeout=timeout) + + assert pager._metadata == expected_metadata + assert pager._retry == retry + assert pager._timeout == timeout + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, cloud_deploy.Automation) for i in results) + + +def test_list_automations_pages(transport_name: str = "grpc"): + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_automations), "__call__") as call: + # Set the response to a series of pages. + call.side_effect = ( + cloud_deploy.ListAutomationsResponse( + automations=[ + cloud_deploy.Automation(), + cloud_deploy.Automation(), + cloud_deploy.Automation(), + ], + next_page_token="abc", + ), + cloud_deploy.ListAutomationsResponse( + automations=[], + next_page_token="def", + ), + cloud_deploy.ListAutomationsResponse( + automations=[ + cloud_deploy.Automation(), + ], + next_page_token="ghi", + ), + cloud_deploy.ListAutomationsResponse( + automations=[ + cloud_deploy.Automation(), + cloud_deploy.Automation(), + ], + ), + RuntimeError, + ) + pages = list(client.list_automations(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.asyncio +async def test_list_automations_async_pager(): + client = CloudDeployAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_automations), "__call__", new_callable=mock.AsyncMock + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + cloud_deploy.ListAutomationsResponse( + automations=[ + cloud_deploy.Automation(), + cloud_deploy.Automation(), + cloud_deploy.Automation(), + ], + next_page_token="abc", + ), + cloud_deploy.ListAutomationsResponse( + automations=[], + next_page_token="def", + ), + cloud_deploy.ListAutomationsResponse( + automations=[ + cloud_deploy.Automation(), + ], + next_page_token="ghi", + ), + cloud_deploy.ListAutomationsResponse( + automations=[ + cloud_deploy.Automation(), + cloud_deploy.Automation(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_automations( + request={}, + ) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, cloud_deploy.Automation) for i in responses) + + +@pytest.mark.asyncio +async def test_list_automations_async_pages(): + client = CloudDeployAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_automations), "__call__", new_callable=mock.AsyncMock + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + cloud_deploy.ListAutomationsResponse( + automations=[ + cloud_deploy.Automation(), + cloud_deploy.Automation(), + cloud_deploy.Automation(), + ], + next_page_token="abc", + ), + cloud_deploy.ListAutomationsResponse( + automations=[], + next_page_token="def", + ), + cloud_deploy.ListAutomationsResponse( + automations=[ + cloud_deploy.Automation(), + ], + next_page_token="ghi", + ), + cloud_deploy.ListAutomationsResponse( + automations=[ + cloud_deploy.Automation(), + cloud_deploy.Automation(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_automations(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + cloud_deploy.GetAutomationRunRequest, + dict, + ], +) +def test_get_automation_run(request_type, transport: str = "grpc"): + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_automation_run), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = cloud_deploy.AutomationRun( + name="name_value", + etag="etag_value", + service_account="service_account_value", + target_id="target_id_value", + state=cloud_deploy.AutomationRun.State.SUCCEEDED, + state_description="state_description_value", + rule_id="rule_id_value", + automation_id="automation_id_value", + ) + response = client.get_automation_run(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = cloud_deploy.GetAutomationRunRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, cloud_deploy.AutomationRun) + assert response.name == "name_value" + assert response.etag == "etag_value" + assert response.service_account == "service_account_value" + assert response.target_id == "target_id_value" + assert response.state == cloud_deploy.AutomationRun.State.SUCCEEDED + assert response.state_description == "state_description_value" + assert response.rule_id == "rule_id_value" + assert response.automation_id == "automation_id_value" + + +def test_get_automation_run_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_automation_run), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.get_automation_run() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == cloud_deploy.GetAutomationRunRequest() + + +def test_get_automation_run_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = cloud_deploy.GetAutomationRunRequest( + name="name_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_automation_run), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.get_automation_run(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == cloud_deploy.GetAutomationRunRequest( + name="name_value", + ) + + +def test_get_automation_run_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.get_automation_run in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.get_automation_run + ] = mock_rpc + request = {} + client.get_automation_run(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_automation_run(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_get_automation_run_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = CloudDeployAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_automation_run), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + cloud_deploy.AutomationRun( + name="name_value", + etag="etag_value", + service_account="service_account_value", + target_id="target_id_value", + state=cloud_deploy.AutomationRun.State.SUCCEEDED, + state_description="state_description_value", + rule_id="rule_id_value", + automation_id="automation_id_value", + ) + ) + response = await client.get_automation_run() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == cloud_deploy.GetAutomationRunRequest() + + +@pytest.mark.asyncio +async def test_get_automation_run_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = CloudDeployAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.get_automation_run + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.get_automation_run + ] = mock_rpc + + request = {} + await client.get_automation_run(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.get_automation_run(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_get_automation_run_async( + transport: str = "grpc_asyncio", request_type=cloud_deploy.GetAutomationRunRequest +): + client = CloudDeployAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_automation_run), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + cloud_deploy.AutomationRun( + name="name_value", + etag="etag_value", + service_account="service_account_value", + target_id="target_id_value", + state=cloud_deploy.AutomationRun.State.SUCCEEDED, + state_description="state_description_value", + rule_id="rule_id_value", + automation_id="automation_id_value", + ) + ) + response = await client.get_automation_run(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = cloud_deploy.GetAutomationRunRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, cloud_deploy.AutomationRun) + assert response.name == "name_value" + assert response.etag == "etag_value" + assert response.service_account == "service_account_value" + assert response.target_id == "target_id_value" + assert response.state == cloud_deploy.AutomationRun.State.SUCCEEDED + assert response.state_description == "state_description_value" + assert response.rule_id == "rule_id_value" + assert response.automation_id == "automation_id_value" + + +@pytest.mark.asyncio +async def test_get_automation_run_async_from_dict(): + await test_get_automation_run_async(request_type=dict) + + +def test_get_automation_run_field_headers(): + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloud_deploy.GetAutomationRunRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_automation_run), "__call__" + ) as call: + call.return_value = cloud_deploy.AutomationRun() + client.get_automation_run(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_automation_run_field_headers_async(): + client = CloudDeployAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloud_deploy.GetAutomationRunRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_automation_run), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + cloud_deploy.AutomationRun() + ) + await client.get_automation_run(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_get_automation_run_flattened(): + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_automation_run), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = cloud_deploy.AutomationRun() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_automation_run( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_get_automation_run_flattened_error(): + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_automation_run( + cloud_deploy.GetAutomationRunRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_get_automation_run_flattened_async(): + client = CloudDeployAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_automation_run), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = cloud_deploy.AutomationRun() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + cloud_deploy.AutomationRun() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_automation_run( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_get_automation_run_flattened_error_async(): + client = CloudDeployAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_automation_run( + cloud_deploy.GetAutomationRunRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + cloud_deploy.ListAutomationRunsRequest, + dict, + ], +) +def test_list_automation_runs(request_type, transport: str = "grpc"): + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_automation_runs), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = cloud_deploy.ListAutomationRunsResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + response = client.list_automation_runs(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = cloud_deploy.ListAutomationRunsRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListAutomationRunsPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] + + +def test_list_automation_runs_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_automation_runs), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.list_automation_runs() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == cloud_deploy.ListAutomationRunsRequest() + + +def test_list_automation_runs_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = cloud_deploy.ListAutomationRunsRequest( + parent="parent_value", + page_token="page_token_value", + filter="filter_value", + order_by="order_by_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_automation_runs), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.list_automation_runs(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == cloud_deploy.ListAutomationRunsRequest( + parent="parent_value", + page_token="page_token_value", + filter="filter_value", + order_by="order_by_value", + ) + + +def test_list_automation_runs_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.list_automation_runs in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.list_automation_runs + ] = mock_rpc + request = {} + client.list_automation_runs(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_automation_runs(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_list_automation_runs_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = CloudDeployAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_automation_runs), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + cloud_deploy.ListAutomationRunsResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + ) + response = await client.list_automation_runs() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == cloud_deploy.ListAutomationRunsRequest() + + +@pytest.mark.asyncio +async def test_list_automation_runs_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = CloudDeployAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.list_automation_runs + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.list_automation_runs + ] = mock_rpc + + request = {} + await client.list_automation_runs(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.list_automation_runs(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_list_automation_runs_async( + transport: str = "grpc_asyncio", request_type=cloud_deploy.ListAutomationRunsRequest +): + client = CloudDeployAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_automation_runs), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + cloud_deploy.ListAutomationRunsResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + ) + response = await client.list_automation_runs(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = cloud_deploy.ListAutomationRunsRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListAutomationRunsAsyncPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] + + +@pytest.mark.asyncio +async def test_list_automation_runs_async_from_dict(): + await test_list_automation_runs_async(request_type=dict) + + +def test_list_automation_runs_field_headers(): + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloud_deploy.ListAutomationRunsRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_automation_runs), "__call__" + ) as call: + call.return_value = cloud_deploy.ListAutomationRunsResponse() + client.list_automation_runs(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_automation_runs_field_headers_async(): + client = CloudDeployAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloud_deploy.ListAutomationRunsRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_automation_runs), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + cloud_deploy.ListAutomationRunsResponse() + ) + await client.list_automation_runs(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_list_automation_runs_flattened(): + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_automation_runs), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = cloud_deploy.ListAutomationRunsResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_automation_runs( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +def test_list_automation_runs_flattened_error(): + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_automation_runs( + cloud_deploy.ListAutomationRunsRequest(), + parent="parent_value", + ) + + +@pytest.mark.asyncio +async def test_list_automation_runs_flattened_async(): + client = CloudDeployAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_automation_runs), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = cloud_deploy.ListAutomationRunsResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + cloud_deploy.ListAutomationRunsResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_automation_runs( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_list_automation_runs_flattened_error_async(): + client = CloudDeployAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_automation_runs( + cloud_deploy.ListAutomationRunsRequest(), + parent="parent_value", + ) + + +def test_list_automation_runs_pager(transport_name: str = "grpc"): + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_automation_runs), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + cloud_deploy.ListAutomationRunsResponse( + automation_runs=[ + cloud_deploy.AutomationRun(), + cloud_deploy.AutomationRun(), + cloud_deploy.AutomationRun(), + ], + next_page_token="abc", + ), + cloud_deploy.ListAutomationRunsResponse( + automation_runs=[], + next_page_token="def", + ), + cloud_deploy.ListAutomationRunsResponse( + automation_runs=[ + cloud_deploy.AutomationRun(), + ], + next_page_token="ghi", + ), + cloud_deploy.ListAutomationRunsResponse( + automation_runs=[ + cloud_deploy.AutomationRun(), + cloud_deploy.AutomationRun(), + ], + ), + RuntimeError, + ) + + expected_metadata = () + retry = retries.Retry() + timeout = 5 + expected_metadata = tuple(expected_metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + ) + pager = client.list_automation_runs(request={}, retry=retry, timeout=timeout) + + assert pager._metadata == expected_metadata + assert pager._retry == retry + assert pager._timeout == timeout + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, cloud_deploy.AutomationRun) for i in results) + + +def test_list_automation_runs_pages(transport_name: str = "grpc"): + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_automation_runs), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + cloud_deploy.ListAutomationRunsResponse( + automation_runs=[ + cloud_deploy.AutomationRun(), + cloud_deploy.AutomationRun(), + cloud_deploy.AutomationRun(), + ], + next_page_token="abc", + ), + cloud_deploy.ListAutomationRunsResponse( + automation_runs=[], + next_page_token="def", + ), + cloud_deploy.ListAutomationRunsResponse( + automation_runs=[ + cloud_deploy.AutomationRun(), + ], + next_page_token="ghi", + ), + cloud_deploy.ListAutomationRunsResponse( + automation_runs=[ + cloud_deploy.AutomationRun(), + cloud_deploy.AutomationRun(), + ], + ), + RuntimeError, + ) + pages = list(client.list_automation_runs(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.asyncio +async def test_list_automation_runs_async_pager(): + client = CloudDeployAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_automation_runs), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + cloud_deploy.ListAutomationRunsResponse( + automation_runs=[ + cloud_deploy.AutomationRun(), + cloud_deploy.AutomationRun(), + cloud_deploy.AutomationRun(), + ], + next_page_token="abc", + ), + cloud_deploy.ListAutomationRunsResponse( + automation_runs=[], + next_page_token="def", + ), + cloud_deploy.ListAutomationRunsResponse( + automation_runs=[ + cloud_deploy.AutomationRun(), + ], + next_page_token="ghi", + ), + cloud_deploy.ListAutomationRunsResponse( + automation_runs=[ + cloud_deploy.AutomationRun(), + cloud_deploy.AutomationRun(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_automation_runs( + request={}, + ) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, cloud_deploy.AutomationRun) for i in responses) + + +@pytest.mark.asyncio +async def test_list_automation_runs_async_pages(): + client = CloudDeployAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_automation_runs), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + cloud_deploy.ListAutomationRunsResponse( + automation_runs=[ + cloud_deploy.AutomationRun(), + cloud_deploy.AutomationRun(), + cloud_deploy.AutomationRun(), + ], + next_page_token="abc", + ), + cloud_deploy.ListAutomationRunsResponse( + automation_runs=[], + next_page_token="def", + ), + cloud_deploy.ListAutomationRunsResponse( + automation_runs=[ + cloud_deploy.AutomationRun(), + ], + next_page_token="ghi", + ), + cloud_deploy.ListAutomationRunsResponse( + automation_runs=[ + cloud_deploy.AutomationRun(), + cloud_deploy.AutomationRun(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_automation_runs(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + cloud_deploy.CancelAutomationRunRequest, + dict, + ], +) +def test_cancel_automation_run(request_type, transport: str = "grpc"): + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.cancel_automation_run), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = cloud_deploy.CancelAutomationRunResponse() + response = client.cancel_automation_run(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = cloud_deploy.CancelAutomationRunRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, cloud_deploy.CancelAutomationRunResponse) + + +def test_cancel_automation_run_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.cancel_automation_run), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.cancel_automation_run() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == cloud_deploy.CancelAutomationRunRequest() + + +def test_cancel_automation_run_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = cloud_deploy.CancelAutomationRunRequest( + name="name_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.cancel_automation_run), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.cancel_automation_run(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == cloud_deploy.CancelAutomationRunRequest( + name="name_value", + ) + + +def test_cancel_automation_run_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.cancel_automation_run + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.cancel_automation_run + ] = mock_rpc + request = {} + client.cancel_automation_run(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.cancel_automation_run(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_cancel_automation_run_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = CloudDeployAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.cancel_automation_run), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + cloud_deploy.CancelAutomationRunResponse() + ) + response = await client.cancel_automation_run() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == cloud_deploy.CancelAutomationRunRequest() + + +@pytest.mark.asyncio +async def test_cancel_automation_run_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = CloudDeployAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.cancel_automation_run + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.cancel_automation_run + ] = mock_rpc + + request = {} + await client.cancel_automation_run(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.cancel_automation_run(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_cancel_automation_run_async( + transport: str = "grpc_asyncio", + request_type=cloud_deploy.CancelAutomationRunRequest, +): + client = CloudDeployAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.cancel_automation_run), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + cloud_deploy.CancelAutomationRunResponse() + ) + response = await client.cancel_automation_run(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = cloud_deploy.CancelAutomationRunRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, cloud_deploy.CancelAutomationRunResponse) + + +@pytest.mark.asyncio +async def test_cancel_automation_run_async_from_dict(): + await test_cancel_automation_run_async(request_type=dict) + + +def test_cancel_automation_run_field_headers(): + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloud_deploy.CancelAutomationRunRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.cancel_automation_run), "__call__" + ) as call: + call.return_value = cloud_deploy.CancelAutomationRunResponse() + client.cancel_automation_run(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_cancel_automation_run_field_headers_async(): + client = CloudDeployAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloud_deploy.CancelAutomationRunRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.cancel_automation_run), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + cloud_deploy.CancelAutomationRunResponse() + ) + await client.cancel_automation_run(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_cancel_automation_run_flattened(): + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.cancel_automation_run), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = cloud_deploy.CancelAutomationRunResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.cancel_automation_run( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_cancel_automation_run_flattened_error(): + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.cancel_automation_run( + cloud_deploy.CancelAutomationRunRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_cancel_automation_run_flattened_async(): + client = CloudDeployAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.cancel_automation_run), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = cloud_deploy.CancelAutomationRunResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + cloud_deploy.CancelAutomationRunResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.cancel_automation_run( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_cancel_automation_run_flattened_error_async(): + client = CloudDeployAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.cancel_automation_run( + cloud_deploy.CancelAutomationRunRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + cloud_deploy.ListDeliveryPipelinesRequest, + dict, + ], +) +def test_list_delivery_pipelines_rest(request_type): + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = cloud_deploy.ListDeliveryPipelinesResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = cloud_deploy.ListDeliveryPipelinesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.list_delivery_pipelines(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListDeliveryPipelinesPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] + + +def test_list_delivery_pipelines_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.list_delivery_pipelines + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.list_delivery_pipelines + ] = mock_rpc + + request = {} + client.list_delivery_pipelines(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_delivery_pipelines(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_list_delivery_pipelines_rest_required_fields( + request_type=cloud_deploy.ListDeliveryPipelinesRequest, +): + transport_class = transports.CloudDeployRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_delivery_pipelines._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_delivery_pipelines._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "filter", + "order_by", + "page_size", + "page_token", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = cloud_deploy.ListDeliveryPipelinesResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = cloud_deploy.ListDeliveryPipelinesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.list_delivery_pipelines(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_list_delivery_pipelines_rest_unset_required_fields(): + transport = transports.CloudDeployRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.list_delivery_pipelines._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "filter", + "orderBy", + "pageSize", + "pageToken", + ) + ) + & set(("parent",)) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_delivery_pipelines_rest_interceptors(null_interceptor): + transport = transports.CloudDeployRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.CloudDeployRestInterceptor(), + ) + client = CloudDeployClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.CloudDeployRestInterceptor, "post_list_delivery_pipelines" + ) as post, mock.patch.object( + transports.CloudDeployRestInterceptor, "pre_list_delivery_pipelines" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = cloud_deploy.ListDeliveryPipelinesRequest.pb( + cloud_deploy.ListDeliveryPipelinesRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = cloud_deploy.ListDeliveryPipelinesResponse.to_json( + cloud_deploy.ListDeliveryPipelinesResponse() + ) + + request = cloud_deploy.ListDeliveryPipelinesRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = cloud_deploy.ListDeliveryPipelinesResponse() + + client.list_delivery_pipelines( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_delivery_pipelines_rest_bad_request( + transport: str = "rest", request_type=cloud_deploy.ListDeliveryPipelinesRequest +): + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_delivery_pipelines(request) + + +def test_list_delivery_pipelines_rest_flattened(): + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = cloud_deploy.ListDeliveryPipelinesResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1/locations/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = cloud_deploy.ListDeliveryPipelinesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.list_delivery_pipelines(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*}/deliveryPipelines" + % client.transport._host, + args[1], + ) + + +def test_list_delivery_pipelines_rest_flattened_error(transport: str = "rest"): + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_delivery_pipelines( + cloud_deploy.ListDeliveryPipelinesRequest(), + parent="parent_value", + ) + + +def test_list_delivery_pipelines_rest_pager(transport: str = "rest"): + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + cloud_deploy.ListDeliveryPipelinesResponse( + delivery_pipelines=[ + cloud_deploy.DeliveryPipeline(), + cloud_deploy.DeliveryPipeline(), + cloud_deploy.DeliveryPipeline(), + ], + next_page_token="abc", + ), + cloud_deploy.ListDeliveryPipelinesResponse( + delivery_pipelines=[], + next_page_token="def", + ), + cloud_deploy.ListDeliveryPipelinesResponse( + delivery_pipelines=[ + cloud_deploy.DeliveryPipeline(), + ], + next_page_token="ghi", + ), + cloud_deploy.ListDeliveryPipelinesResponse( + delivery_pipelines=[ + cloud_deploy.DeliveryPipeline(), + cloud_deploy.DeliveryPipeline(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple( + cloud_deploy.ListDeliveryPipelinesResponse.to_json(x) for x in response + ) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {"parent": "projects/sample1/locations/sample2"} + + pager = client.list_delivery_pipelines(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, cloud_deploy.DeliveryPipeline) for i in results) + + pages = list(client.list_delivery_pipelines(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + cloud_deploy.GetDeliveryPipelineRequest, + dict, + ], +) +def test_get_delivery_pipeline_rest(request_type): + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/deliveryPipelines/sample3" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = cloud_deploy.DeliveryPipeline( + name="name_value", + uid="uid_value", + description="description_value", + etag="etag_value", + suspended=True, + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = cloud_deploy.DeliveryPipeline.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.get_delivery_pipeline(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, cloud_deploy.DeliveryPipeline) + assert response.name == "name_value" + assert response.uid == "uid_value" + assert response.description == "description_value" + assert response.etag == "etag_value" + assert response.suspended is True + + +def test_get_delivery_pipeline_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.get_delivery_pipeline + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.get_delivery_pipeline + ] = mock_rpc + + request = {} + client.get_delivery_pipeline(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_delivery_pipeline(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_get_delivery_pipeline_rest_required_fields( + request_type=cloud_deploy.GetDeliveryPipelineRequest, +): + transport_class = transports.CloudDeployRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_delivery_pipeline._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_delivery_pipeline._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = cloud_deploy.DeliveryPipeline() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = cloud_deploy.DeliveryPipeline.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_delivery_pipeline(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_get_delivery_pipeline_rest_unset_required_fields(): + transport = transports.CloudDeployRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.get_delivery_pipeline._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_delivery_pipeline_rest_interceptors(null_interceptor): + transport = transports.CloudDeployRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.CloudDeployRestInterceptor(), + ) + client = CloudDeployClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.CloudDeployRestInterceptor, "post_get_delivery_pipeline" + ) as post, mock.patch.object( + transports.CloudDeployRestInterceptor, "pre_get_delivery_pipeline" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = cloud_deploy.GetDeliveryPipelineRequest.pb( + cloud_deploy.GetDeliveryPipelineRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = cloud_deploy.DeliveryPipeline.to_json( + cloud_deploy.DeliveryPipeline() + ) + + request = cloud_deploy.GetDeliveryPipelineRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = cloud_deploy.DeliveryPipeline() + + client.get_delivery_pipeline( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_delivery_pipeline_rest_bad_request( + transport: str = "rest", request_type=cloud_deploy.GetDeliveryPipelineRequest +): + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/deliveryPipelines/sample3" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_delivery_pipeline(request) + + +def test_get_delivery_pipeline_rest_flattened(): + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = cloud_deploy.DeliveryPipeline() + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/deliveryPipelines/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = cloud_deploy.DeliveryPipeline.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.get_delivery_pipeline(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/deliveryPipelines/*}" + % client.transport._host, + args[1], + ) + + +def test_get_delivery_pipeline_rest_flattened_error(transport: str = "rest"): + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_delivery_pipeline( + cloud_deploy.GetDeliveryPipelineRequest(), + name="name_value", + ) + + +def test_get_delivery_pipeline_rest_error(): + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + cloud_deploy.CreateDeliveryPipelineRequest, + dict, + ], +) +def test_create_delivery_pipeline_rest(request_type): + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request_init["delivery_pipeline"] = { + "name": "name_value", + "uid": "uid_value", + "description": "description_value", + "annotations": {}, + "labels": {}, + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "serial_pipeline": { + "stages": [ + { + "target_id": "target_id_value", + "profiles": ["profiles_value1", "profiles_value2"], + "strategy": { + "standard": { + "verify": True, + "predeploy": { + "actions": ["actions_value1", "actions_value2"] + }, + "postdeploy": { + "actions": ["actions_value1", "actions_value2"] + }, + }, + "canary": { + "runtime_config": { + "kubernetes": { + "gateway_service_mesh": { + "http_route": "http_route_value", + "service": "service_value", + "deployment": "deployment_value", + "route_update_wait_time": { + "seconds": 751, + "nanos": 543, + }, + "stable_cutback_duration": {}, + "pod_selector_label": "pod_selector_label_value", + }, + "service_networking": { + "service": "service_value", + "deployment": "deployment_value", + "disable_pod_overprovisioning": True, + "pod_selector_label": "pod_selector_label_value", + }, + }, + "cloud_run": { + "automatic_traffic_control": True, + "canary_revision_tags": [ + "canary_revision_tags_value1", + "canary_revision_tags_value2", + ], + "prior_revision_tags": [ + "prior_revision_tags_value1", + "prior_revision_tags_value2", + ], + "stable_revision_tags": [ + "stable_revision_tags_value1", + "stable_revision_tags_value2", + ], + }, + }, + "canary_deployment": { + "percentages": [1170, 1171], + "verify": True, + "predeploy": {}, + "postdeploy": {}, + }, + "custom_canary_deployment": { + "phase_configs": [ + { + "phase_id": "phase_id_value", + "percentage": 1054, + "profiles": [ + "profiles_value1", + "profiles_value2", + ], + "verify": True, + "predeploy": {}, + "postdeploy": {}, + } + ] + }, + }, + }, + "deploy_parameters": [{"values": {}, "match_target_labels": {}}], + } + ] + }, + "condition": { + "pipeline_ready_condition": {"status": True, "update_time": {}}, + "targets_present_condition": { + "status": True, + "missing_targets": ["missing_targets_value1", "missing_targets_value2"], + "update_time": {}, + }, + "targets_type_condition": { + "status": True, + "error_details": "error_details_value", + }, + }, + "etag": "etag_value", + "suspended": True, + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = cloud_deploy.CreateDeliveryPipelineRequest.meta.fields[ + "delivery_pipeline" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["delivery_pipeline"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["delivery_pipeline"][field])): + del request_init["delivery_pipeline"][field][i][subfield] + else: + del request_init["delivery_pipeline"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.create_delivery_pipeline(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_create_delivery_pipeline_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.create_delivery_pipeline + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.create_delivery_pipeline + ] = mock_rpc + + request = {} + client.create_delivery_pipeline(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.create_delivery_pipeline(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_create_delivery_pipeline_rest_required_fields( + request_type=cloud_deploy.CreateDeliveryPipelineRequest, +): + transport_class = transports.CloudDeployRestTransport + + request_init = {} + request_init["parent"] = "" + request_init["delivery_pipeline_id"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + assert "deliveryPipelineId" not in jsonified_request + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_delivery_pipeline._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + assert "deliveryPipelineId" in jsonified_request + assert ( + jsonified_request["deliveryPipelineId"] == request_init["delivery_pipeline_id"] + ) + + jsonified_request["parent"] = "parent_value" + jsonified_request["deliveryPipelineId"] = "delivery_pipeline_id_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_delivery_pipeline._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "delivery_pipeline_id", + "request_id", + "validate_only", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + assert "deliveryPipelineId" in jsonified_request + assert jsonified_request["deliveryPipelineId"] == "delivery_pipeline_id_value" + + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.create_delivery_pipeline(request) + + expected_params = [ + ( + "deliveryPipelineId", + "", + ), + ("$alt", "json;enum-encoding=int"), + ] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_create_delivery_pipeline_rest_unset_required_fields(): + transport = transports.CloudDeployRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.create_delivery_pipeline._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "deliveryPipelineId", + "requestId", + "validateOnly", + ) + ) + & set( + ( + "parent", + "deliveryPipelineId", + "deliveryPipeline", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_create_delivery_pipeline_rest_interceptors(null_interceptor): + transport = transports.CloudDeployRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.CloudDeployRestInterceptor(), + ) + client = CloudDeployClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.CloudDeployRestInterceptor, "post_create_delivery_pipeline" + ) as post, mock.patch.object( + transports.CloudDeployRestInterceptor, "pre_create_delivery_pipeline" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = cloud_deploy.CreateDeliveryPipelineRequest.pb( + cloud_deploy.CreateDeliveryPipelineRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) + + request = cloud_deploy.CreateDeliveryPipelineRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.create_delivery_pipeline( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_create_delivery_pipeline_rest_bad_request( + transport: str = "rest", request_type=cloud_deploy.CreateDeliveryPipelineRequest +): + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.create_delivery_pipeline(request) + + +def test_create_delivery_pipeline_rest_flattened(): + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1/locations/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + delivery_pipeline=cloud_deploy.DeliveryPipeline(name="name_value"), + delivery_pipeline_id="delivery_pipeline_id_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.create_delivery_pipeline(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*}/deliveryPipelines" + % client.transport._host, + args[1], + ) + + +def test_create_delivery_pipeline_rest_flattened_error(transport: str = "rest"): + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_delivery_pipeline( + cloud_deploy.CreateDeliveryPipelineRequest(), + parent="parent_value", + delivery_pipeline=cloud_deploy.DeliveryPipeline(name="name_value"), + delivery_pipeline_id="delivery_pipeline_id_value", + ) + + +def test_create_delivery_pipeline_rest_error(): + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + cloud_deploy.UpdateDeliveryPipelineRequest, + dict, + ], +) +def test_update_delivery_pipeline_rest(request_type): + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "delivery_pipeline": { + "name": "projects/sample1/locations/sample2/deliveryPipelines/sample3" + } + } + request_init["delivery_pipeline"] = { + "name": "projects/sample1/locations/sample2/deliveryPipelines/sample3", + "uid": "uid_value", + "description": "description_value", + "annotations": {}, + "labels": {}, + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "serial_pipeline": { + "stages": [ + { + "target_id": "target_id_value", + "profiles": ["profiles_value1", "profiles_value2"], + "strategy": { + "standard": { + "verify": True, + "predeploy": { + "actions": ["actions_value1", "actions_value2"] + }, + "postdeploy": { + "actions": ["actions_value1", "actions_value2"] + }, + }, + "canary": { + "runtime_config": { + "kubernetes": { + "gateway_service_mesh": { + "http_route": "http_route_value", + "service": "service_value", + "deployment": "deployment_value", + "route_update_wait_time": { + "seconds": 751, + "nanos": 543, + }, + "stable_cutback_duration": {}, + "pod_selector_label": "pod_selector_label_value", + }, + "service_networking": { + "service": "service_value", + "deployment": "deployment_value", + "disable_pod_overprovisioning": True, + "pod_selector_label": "pod_selector_label_value", + }, + }, + "cloud_run": { + "automatic_traffic_control": True, + "canary_revision_tags": [ + "canary_revision_tags_value1", + "canary_revision_tags_value2", + ], + "prior_revision_tags": [ + "prior_revision_tags_value1", + "prior_revision_tags_value2", + ], + "stable_revision_tags": [ + "stable_revision_tags_value1", + "stable_revision_tags_value2", + ], + }, + }, + "canary_deployment": { + "percentages": [1170, 1171], + "verify": True, + "predeploy": {}, + "postdeploy": {}, + }, + "custom_canary_deployment": { + "phase_configs": [ + { + "phase_id": "phase_id_value", + "percentage": 1054, + "profiles": [ + "profiles_value1", + "profiles_value2", + ], + "verify": True, + "predeploy": {}, + "postdeploy": {}, + } + ] + }, + }, + }, + "deploy_parameters": [{"values": {}, "match_target_labels": {}}], + } + ] + }, + "condition": { + "pipeline_ready_condition": {"status": True, "update_time": {}}, + "targets_present_condition": { + "status": True, + "missing_targets": ["missing_targets_value1", "missing_targets_value2"], + "update_time": {}, + }, + "targets_type_condition": { + "status": True, + "error_details": "error_details_value", + }, + }, + "etag": "etag_value", + "suspended": True, + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = cloud_deploy.UpdateDeliveryPipelineRequest.meta.fields[ + "delivery_pipeline" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["delivery_pipeline"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["delivery_pipeline"][field])): + del request_init["delivery_pipeline"][field][i][subfield] + else: + del request_init["delivery_pipeline"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.update_delivery_pipeline(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_update_delivery_pipeline_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.update_delivery_pipeline + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.update_delivery_pipeline + ] = mock_rpc + + request = {} + client.update_delivery_pipeline(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.update_delivery_pipeline(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_update_delivery_pipeline_rest_required_fields( + request_type=cloud_deploy.UpdateDeliveryPipelineRequest, +): + transport_class = transports.CloudDeployRestTransport + + request_init = {} + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_delivery_pipeline._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_delivery_pipeline._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "allow_missing", + "request_id", + "update_mask", + "validate_only", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "patch", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.update_delivery_pipeline(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_update_delivery_pipeline_rest_unset_required_fields(): + transport = transports.CloudDeployRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.update_delivery_pipeline._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "allowMissing", + "requestId", + "updateMask", + "validateOnly", + ) + ) + & set( + ( + "updateMask", + "deliveryPipeline", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_update_delivery_pipeline_rest_interceptors(null_interceptor): + transport = transports.CloudDeployRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.CloudDeployRestInterceptor(), + ) + client = CloudDeployClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.CloudDeployRestInterceptor, "post_update_delivery_pipeline" + ) as post, mock.patch.object( + transports.CloudDeployRestInterceptor, "pre_update_delivery_pipeline" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = cloud_deploy.UpdateDeliveryPipelineRequest.pb( + cloud_deploy.UpdateDeliveryPipelineRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) + + request = cloud_deploy.UpdateDeliveryPipelineRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.update_delivery_pipeline( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_update_delivery_pipeline_rest_bad_request( + transport: str = "rest", request_type=cloud_deploy.UpdateDeliveryPipelineRequest +): + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "delivery_pipeline": { + "name": "projects/sample1/locations/sample2/deliveryPipelines/sample3" + } + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.update_delivery_pipeline(request) + + +def test_update_delivery_pipeline_rest_flattened(): + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = { + "delivery_pipeline": { + "name": "projects/sample1/locations/sample2/deliveryPipelines/sample3" + } + } + + # get truthy value for each flattened field + mock_args = dict( + delivery_pipeline=cloud_deploy.DeliveryPipeline(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.update_delivery_pipeline(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{delivery_pipeline.name=projects/*/locations/*/deliveryPipelines/*}" + % client.transport._host, + args[1], + ) + + +def test_update_delivery_pipeline_rest_flattened_error(transport: str = "rest"): + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_delivery_pipeline( + cloud_deploy.UpdateDeliveryPipelineRequest(), + delivery_pipeline=cloud_deploy.DeliveryPipeline(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +def test_update_delivery_pipeline_rest_error(): + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + cloud_deploy.DeleteDeliveryPipelineRequest, + dict, + ], +) +def test_delete_delivery_pipeline_rest(request_type): + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/deliveryPipelines/sample3" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.delete_delivery_pipeline(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_delete_delivery_pipeline_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.delete_delivery_pipeline + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.delete_delivery_pipeline ] = mock_rpc request = {} - client.list_delivery_pipelines(request) + client.delete_delivery_pipeline(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.list_delivery_pipelines(request) + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.delete_delivery_pipeline(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_list_delivery_pipelines_rest_required_fields( - request_type=cloud_deploy.ListDeliveryPipelinesRequest, +def test_delete_delivery_pipeline_rest_required_fields( + request_type=cloud_deploy.DeleteDeliveryPipelineRequest, ): transport_class = transports.CloudDeployRestTransport request_init = {} - request_init["parent"] = "" + request_init["name"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -18273,30 +22288,31 @@ def test_list_delivery_pipelines_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).list_delivery_pipelines._get_unset_required_fields(jsonified_request) + ).delete_delivery_pipeline._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["parent"] = "parent_value" + jsonified_request["name"] = "name_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).list_delivery_pipelines._get_unset_required_fields(jsonified_request) + ).delete_delivery_pipeline._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. assert not set(unset_fields) - set( ( - "filter", - "order_by", - "page_size", - "page_token", + "allow_missing", + "etag", + "force", + "request_id", + "validate_only", ) ) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == "parent_value" + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), @@ -18305,7 +22321,7 @@ def test_list_delivery_pipelines_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = cloud_deploy.ListDeliveryPipelinesResponse() + return_value = operations_pb2.Operation(name="operations/spam") # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -18317,49 +22333,47 @@ def test_list_delivery_pipelines_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "get", + "method": "delete", "query_params": pb_request, } transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = cloud_deploy.ListDeliveryPipelinesResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.list_delivery_pipelines(request) + response = client.delete_delivery_pipeline(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_list_delivery_pipelines_rest_unset_required_fields(): +def test_delete_delivery_pipeline_rest_unset_required_fields(): transport = transports.CloudDeployRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.list_delivery_pipelines._get_unset_required_fields({}) + unset_fields = transport.delete_delivery_pipeline._get_unset_required_fields({}) assert set(unset_fields) == ( set( ( - "filter", - "orderBy", - "pageSize", - "pageToken", + "allowMissing", + "etag", + "force", + "requestId", + "validateOnly", ) ) - & set(("parent",)) + & set(("name",)) ) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_delivery_pipelines_rest_interceptors(null_interceptor): +def test_delete_delivery_pipeline_rest_interceptors(null_interceptor): transport = transports.CloudDeployRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -18372,14 +22386,16 @@ def test_list_delivery_pipelines_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.CloudDeployRestInterceptor, "post_list_delivery_pipelines" + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.CloudDeployRestInterceptor, "post_delete_delivery_pipeline" ) as post, mock.patch.object( - transports.CloudDeployRestInterceptor, "pre_list_delivery_pipelines" + transports.CloudDeployRestInterceptor, "pre_delete_delivery_pipeline" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = cloud_deploy.ListDeliveryPipelinesRequest.pb( - cloud_deploy.ListDeliveryPipelinesRequest() + pb_message = cloud_deploy.DeleteDeliveryPipelineRequest.pb( + cloud_deploy.DeleteDeliveryPipelineRequest() ) transcode.return_value = { "method": "post", @@ -18391,19 +22407,19 @@ def test_list_delivery_pipelines_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = cloud_deploy.ListDeliveryPipelinesResponse.to_json( - cloud_deploy.ListDeliveryPipelinesResponse() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() ) - request = cloud_deploy.ListDeliveryPipelinesRequest() + request = cloud_deploy.DeleteDeliveryPipelineRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = cloud_deploy.ListDeliveryPipelinesResponse() + post.return_value = operations_pb2.Operation() - client.list_delivery_pipelines( + client.delete_delivery_pipeline( request, metadata=[ ("key", "val"), @@ -18415,8 +22431,8 @@ def test_list_delivery_pipelines_rest_interceptors(null_interceptor): post.assert_called_once() -def test_list_delivery_pipelines_rest_bad_request( - transport: str = "rest", request_type=cloud_deploy.ListDeliveryPipelinesRequest +def test_delete_delivery_pipeline_rest_bad_request( + transport: str = "rest", request_type=cloud_deploy.DeleteDeliveryPipelineRequest ): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), @@ -18424,7 +22440,9 @@ def test_list_delivery_pipelines_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2"} + request_init = { + "name": "projects/sample1/locations/sample2/deliveryPipelines/sample3" + } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -18436,10 +22454,10 @@ def test_list_delivery_pipelines_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.list_delivery_pipelines(request) + client.delete_delivery_pipeline(request) -def test_list_delivery_pipelines_rest_flattened(): +def test_delete_delivery_pipeline_rest_flattened(): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -18448,40 +22466,40 @@ def test_list_delivery_pipelines_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = cloud_deploy.ListDeliveryPipelinesResponse() + return_value = operations_pb2.Operation(name="operations/spam") # get arguments that satisfy an http rule for this method - sample_request = {"parent": "projects/sample1/locations/sample2"} + sample_request = { + "name": "projects/sample1/locations/sample2/deliveryPipelines/sample3" + } # get truthy value for each flattened field mock_args = dict( - parent="parent_value", + name="name_value", ) mock_args.update(sample_request) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - # Convert return value to protobuf type - return_value = cloud_deploy.ListDeliveryPipelinesResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.list_delivery_pipelines(**mock_args) + client.delete_delivery_pipeline(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{parent=projects/*/locations/*}/deliveryPipelines" + "%s/v1/{name=projects/*/locations/*/deliveryPipelines/*}" % client.transport._host, args[1], ) -def test_list_delivery_pipelines_rest_flattened_error(transport: str = "rest"): +def test_delete_delivery_pipeline_rest_flattened_error(transport: str = "rest"): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -18490,126 +22508,61 @@ def test_list_delivery_pipelines_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.list_delivery_pipelines( - cloud_deploy.ListDeliveryPipelinesRequest(), - parent="parent_value", + client.delete_delivery_pipeline( + cloud_deploy.DeleteDeliveryPipelineRequest(), + name="name_value", ) -def test_list_delivery_pipelines_rest_pager(transport: str = "rest"): +def test_delete_delivery_pipeline_rest_error(): client = CloudDeployClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # TODO(kbandes): remove this mock unless there's a good reason for it. - # with mock.patch.object(path_template, 'transcode') as transcode: - # Set the response as a series of pages - response = ( - cloud_deploy.ListDeliveryPipelinesResponse( - delivery_pipelines=[ - cloud_deploy.DeliveryPipeline(), - cloud_deploy.DeliveryPipeline(), - cloud_deploy.DeliveryPipeline(), - ], - next_page_token="abc", - ), - cloud_deploy.ListDeliveryPipelinesResponse( - delivery_pipelines=[], - next_page_token="def", - ), - cloud_deploy.ListDeliveryPipelinesResponse( - delivery_pipelines=[ - cloud_deploy.DeliveryPipeline(), - ], - next_page_token="ghi", - ), - cloud_deploy.ListDeliveryPipelinesResponse( - delivery_pipelines=[ - cloud_deploy.DeliveryPipeline(), - cloud_deploy.DeliveryPipeline(), - ], - ), - ) - # Two responses for two calls - response = response + response - - # Wrap the values into proper Response objs - response = tuple( - cloud_deploy.ListDeliveryPipelinesResponse.to_json(x) for x in response - ) - return_values = tuple(Response() for i in response) - for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode("UTF-8") - return_val.status_code = 200 - req.side_effect = return_values - - sample_request = {"parent": "projects/sample1/locations/sample2"} - - pager = client.list_delivery_pipelines(request=sample_request) - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, cloud_deploy.DeliveryPipeline) for i in results) - - pages = list(client.list_delivery_pipelines(request=sample_request).pages) - for page_, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page_.raw_page.next_page_token == token - @pytest.mark.parametrize( "request_type", [ - cloud_deploy.GetDeliveryPipelineRequest, + cloud_deploy.ListTargetsRequest, dict, ], ) -def test_get_delivery_pipeline_rest(request_type): +def test_list_targets_rest(request_type): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = { - "name": "projects/sample1/locations/sample2/deliveryPipelines/sample3" - } + request_init = {"parent": "projects/sample1/locations/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = cloud_deploy.DeliveryPipeline( - name="name_value", - uid="uid_value", - description="description_value", - etag="etag_value", - suspended=True, + return_value = cloud_deploy.ListTargetsResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], ) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = cloud_deploy.DeliveryPipeline.pb(return_value) + return_value = cloud_deploy.ListTargetsResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.get_delivery_pipeline(request) + response = client.list_targets(request) # Establish that the response is the type that we expect. - assert isinstance(response, cloud_deploy.DeliveryPipeline) - assert response.name == "name_value" - assert response.uid == "uid_value" - assert response.description == "description_value" - assert response.etag == "etag_value" - assert response.suspended is True + assert isinstance(response, pagers.ListTargetsPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] -def test_get_delivery_pipeline_rest_use_cached_wrapped_rpc(): +def test_list_targets_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -18623,40 +22576,35 @@ def test_get_delivery_pipeline_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert ( - client._transport.get_delivery_pipeline - in client._transport._wrapped_methods - ) + assert client._transport.list_targets in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.get_delivery_pipeline - ] = mock_rpc + client._transport._wrapped_methods[client._transport.list_targets] = mock_rpc request = {} - client.get_delivery_pipeline(request) + client.list_targets(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.get_delivery_pipeline(request) + client.list_targets(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_get_delivery_pipeline_rest_required_fields( - request_type=cloud_deploy.GetDeliveryPipelineRequest, +def test_list_targets_rest_required_fields( + request_type=cloud_deploy.ListTargetsRequest, ): transport_class = transports.CloudDeployRestTransport request_init = {} - request_init["name"] = "" + request_init["parent"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -18667,21 +22615,30 @@ def test_get_delivery_pipeline_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).get_delivery_pipeline._get_unset_required_fields(jsonified_request) + ).list_targets._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["name"] = "name_value" + jsonified_request["parent"] = "parent_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).get_delivery_pipeline._get_unset_required_fields(jsonified_request) + ).list_targets._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "filter", + "order_by", + "page_size", + "page_token", + ) + ) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == "name_value" + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), @@ -18690,7 +22647,7 @@ def test_get_delivery_pipeline_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = cloud_deploy.DeliveryPipeline() + return_value = cloud_deploy.ListTargetsResponse() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -18711,30 +22668,40 @@ def test_get_delivery_pipeline_rest_required_fields( response_value.status_code = 200 # Convert return value to protobuf type - return_value = cloud_deploy.DeliveryPipeline.pb(return_value) + return_value = cloud_deploy.ListTargetsResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.get_delivery_pipeline(request) + response = client.list_targets(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_get_delivery_pipeline_rest_unset_required_fields(): +def test_list_targets_rest_unset_required_fields(): transport = transports.CloudDeployRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.get_delivery_pipeline._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name",))) + unset_fields = transport.list_targets._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "filter", + "orderBy", + "pageSize", + "pageToken", + ) + ) + & set(("parent",)) + ) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_delivery_pipeline_rest_interceptors(null_interceptor): +def test_list_targets_rest_interceptors(null_interceptor): transport = transports.CloudDeployRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -18747,14 +22714,14 @@ def test_get_delivery_pipeline_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.CloudDeployRestInterceptor, "post_get_delivery_pipeline" + transports.CloudDeployRestInterceptor, "post_list_targets" ) as post, mock.patch.object( - transports.CloudDeployRestInterceptor, "pre_get_delivery_pipeline" + transports.CloudDeployRestInterceptor, "pre_list_targets" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = cloud_deploy.GetDeliveryPipelineRequest.pb( - cloud_deploy.GetDeliveryPipelineRequest() + pb_message = cloud_deploy.ListTargetsRequest.pb( + cloud_deploy.ListTargetsRequest() ) transcode.return_value = { "method": "post", @@ -18766,19 +22733,19 @@ def test_get_delivery_pipeline_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = cloud_deploy.DeliveryPipeline.to_json( - cloud_deploy.DeliveryPipeline() + req.return_value._content = cloud_deploy.ListTargetsResponse.to_json( + cloud_deploy.ListTargetsResponse() ) - request = cloud_deploy.GetDeliveryPipelineRequest() + request = cloud_deploy.ListTargetsRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = cloud_deploy.DeliveryPipeline() + post.return_value = cloud_deploy.ListTargetsResponse() - client.get_delivery_pipeline( + client.list_targets( request, metadata=[ ("key", "val"), @@ -18790,8 +22757,8 @@ def test_get_delivery_pipeline_rest_interceptors(null_interceptor): post.assert_called_once() -def test_get_delivery_pipeline_rest_bad_request( - transport: str = "rest", request_type=cloud_deploy.GetDeliveryPipelineRequest +def test_list_targets_rest_bad_request( + transport: str = "rest", request_type=cloud_deploy.ListTargetsRequest ): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), @@ -18799,9 +22766,7 @@ def test_get_delivery_pipeline_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = { - "name": "projects/sample1/locations/sample2/deliveryPipelines/sample3" - } + request_init = {"parent": "projects/sample1/locations/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -18813,10 +22778,10 @@ def test_get_delivery_pipeline_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.get_delivery_pipeline(request) + client.list_targets(request) -def test_get_delivery_pipeline_rest_flattened(): +def test_list_targets_rest_flattened(): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -18825,16 +22790,14 @@ def test_get_delivery_pipeline_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = cloud_deploy.DeliveryPipeline() + return_value = cloud_deploy.ListTargetsResponse() # get arguments that satisfy an http rule for this method - sample_request = { - "name": "projects/sample1/locations/sample2/deliveryPipelines/sample3" - } + sample_request = {"parent": "projects/sample1/locations/sample2"} # get truthy value for each flattened field mock_args = dict( - name="name_value", + parent="parent_value", ) mock_args.update(sample_request) @@ -18842,25 +22805,24 @@ def test_get_delivery_pipeline_rest_flattened(): response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = cloud_deploy.DeliveryPipeline.pb(return_value) + return_value = cloud_deploy.ListTargetsResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.get_delivery_pipeline(**mock_args) + client.list_targets(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{name=projects/*/locations/*/deliveryPipelines/*}" - % client.transport._host, + "%s/v1/{parent=projects/*/locations/*}/targets" % client.transport._host, args[1], ) -def test_get_delivery_pipeline_rest_flattened_error(transport: str = "rest"): +def test_list_targets_rest_flattened_error(transport: str = "rest"): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -18869,225 +22831,113 @@ def test_get_delivery_pipeline_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.get_delivery_pipeline( - cloud_deploy.GetDeliveryPipelineRequest(), - name="name_value", + client.list_targets( + cloud_deploy.ListTargetsRequest(), + parent="parent_value", + ) + + +def test_list_targets_rest_pager(transport: str = "rest"): + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + cloud_deploy.ListTargetsResponse( + targets=[ + cloud_deploy.Target(), + cloud_deploy.Target(), + cloud_deploy.Target(), + ], + next_page_token="abc", + ), + cloud_deploy.ListTargetsResponse( + targets=[], + next_page_token="def", + ), + cloud_deploy.ListTargetsResponse( + targets=[ + cloud_deploy.Target(), + ], + next_page_token="ghi", + ), + cloud_deploy.ListTargetsResponse( + targets=[ + cloud_deploy.Target(), + cloud_deploy.Target(), + ], + ), ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(cloud_deploy.ListTargetsResponse.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + sample_request = {"parent": "projects/sample1/locations/sample2"} -def test_get_delivery_pipeline_rest_error(): - client = CloudDeployClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) + pager = client.list_targets(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, cloud_deploy.Target) for i in results) + + pages = list(client.list_targets(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token @pytest.mark.parametrize( "request_type", [ - cloud_deploy.CreateDeliveryPipelineRequest, + cloud_deploy.RollbackTargetRequest, dict, ], ) -def test_create_delivery_pipeline_rest(request_type): +def test_rollback_target_rest(request_type): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2"} - request_init["delivery_pipeline"] = { - "name": "name_value", - "uid": "uid_value", - "description": "description_value", - "annotations": {}, - "labels": {}, - "create_time": {"seconds": 751, "nanos": 543}, - "update_time": {}, - "serial_pipeline": { - "stages": [ - { - "target_id": "target_id_value", - "profiles": ["profiles_value1", "profiles_value2"], - "strategy": { - "standard": { - "verify": True, - "predeploy": { - "actions": ["actions_value1", "actions_value2"] - }, - "postdeploy": { - "actions": ["actions_value1", "actions_value2"] - }, - }, - "canary": { - "runtime_config": { - "kubernetes": { - "gateway_service_mesh": { - "http_route": "http_route_value", - "service": "service_value", - "deployment": "deployment_value", - "route_update_wait_time": { - "seconds": 751, - "nanos": 543, - }, - "stable_cutback_duration": {}, - "pod_selector_label": "pod_selector_label_value", - }, - "service_networking": { - "service": "service_value", - "deployment": "deployment_value", - "disable_pod_overprovisioning": True, - "pod_selector_label": "pod_selector_label_value", - }, - }, - "cloud_run": { - "automatic_traffic_control": True, - "canary_revision_tags": [ - "canary_revision_tags_value1", - "canary_revision_tags_value2", - ], - "prior_revision_tags": [ - "prior_revision_tags_value1", - "prior_revision_tags_value2", - ], - "stable_revision_tags": [ - "stable_revision_tags_value1", - "stable_revision_tags_value2", - ], - }, - }, - "canary_deployment": { - "percentages": [1170, 1171], - "verify": True, - "predeploy": {}, - "postdeploy": {}, - }, - "custom_canary_deployment": { - "phase_configs": [ - { - "phase_id": "phase_id_value", - "percentage": 1054, - "profiles": [ - "profiles_value1", - "profiles_value2", - ], - "verify": True, - "predeploy": {}, - "postdeploy": {}, - } - ] - }, - }, - }, - "deploy_parameters": [{"values": {}, "match_target_labels": {}}], - } - ] - }, - "condition": { - "pipeline_ready_condition": {"status": True, "update_time": {}}, - "targets_present_condition": { - "status": True, - "missing_targets": ["missing_targets_value1", "missing_targets_value2"], - "update_time": {}, - }, - "targets_type_condition": { - "status": True, - "error_details": "error_details_value", - }, - }, - "etag": "etag_value", - "suspended": True, + request_init = { + "name": "projects/sample1/locations/sample2/deliveryPipelines/sample3" } - # The version of a generated dependency at test runtime may differ from the version used during generation. - # Delete any fields which are not present in the current runtime dependency - # See https://github.com/googleapis/gapic-generator-python/issues/1748 - - # Determine if the message type is proto-plus or protobuf - test_field = cloud_deploy.CreateDeliveryPipelineRequest.meta.fields[ - "delivery_pipeline" - ] - - def get_message_fields(field): - # Given a field which is a message (composite type), return a list with - # all the fields of the message. - # If the field is not a composite type, return an empty list. - message_fields = [] - - if hasattr(field, "message") and field.message: - is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") - - if is_field_type_proto_plus_type: - message_fields = field.message.meta.fields.values() - # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER - message_fields = field.message.DESCRIPTOR.fields - return message_fields - - runtime_nested_fields = [ - (field.name, nested_field.name) - for field in get_message_fields(test_field) - for nested_field in get_message_fields(field) - ] - - subfields_not_in_runtime = [] - - # For each item in the sample request, create a list of sub fields which are not present at runtime - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["delivery_pipeline"].items(): # pragma: NO COVER - result = None - is_repeated = False - # For repeated fields - if isinstance(value, list) and len(value): - is_repeated = True - result = value[0] - # For fields where the type is another message - if isinstance(value, dict): - result = value - - if result and hasattr(result, "keys"): - for subfield in result.keys(): - if (field, subfield) not in runtime_nested_fields: - subfields_not_in_runtime.append( - { - "field": field, - "subfield": subfield, - "is_repeated": is_repeated, - } - ) - - # Remove fields from the sample request which are not present in the runtime version of the dependency - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER - field = subfield_to_delete.get("field") - field_repeated = subfield_to_delete.get("is_repeated") - subfield = subfield_to_delete.get("subfield") - if subfield: - if field_repeated: - for i in range(0, len(request_init["delivery_pipeline"][field])): - del request_init["delivery_pipeline"][field][i][subfield] - else: - del request_init["delivery_pipeline"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = cloud_deploy.RollbackTargetResponse() # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 + # Convert return value to protobuf type + return_value = cloud_deploy.RollbackTargetResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.create_delivery_pipeline(request) + response = client.rollback_target(request) # Establish that the response is the type that we expect. - assert response.operation.name == "operations/spam" + assert isinstance(response, cloud_deploy.RollbackTargetResponse) -def test_create_delivery_pipeline_rest_use_cached_wrapped_rpc(): +def test_rollback_target_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -19101,45 +22951,37 @@ def test_create_delivery_pipeline_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert ( - client._transport.create_delivery_pipeline - in client._transport._wrapped_methods - ) + assert client._transport.rollback_target in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.create_delivery_pipeline - ] = mock_rpc + client._transport._wrapped_methods[client._transport.rollback_target] = mock_rpc request = {} - client.create_delivery_pipeline(request) + client.rollback_target(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.create_delivery_pipeline(request) + client.rollback_target(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_create_delivery_pipeline_rest_required_fields( - request_type=cloud_deploy.CreateDeliveryPipelineRequest, +def test_rollback_target_rest_required_fields( + request_type=cloud_deploy.RollbackTargetRequest, ): transport_class = transports.CloudDeployRestTransport request_init = {} - request_init["parent"] = "" - request_init["delivery_pipeline_id"] = "" + request_init["name"] = "" + request_init["target_id"] = "" + request_init["rollout_id"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -19147,40 +22989,30 @@ def test_create_delivery_pipeline_rest_required_fields( ) # verify fields with default values are dropped - assert "deliveryPipelineId" not in jsonified_request unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).create_delivery_pipeline._get_unset_required_fields(jsonified_request) + ).rollback_target._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - assert "deliveryPipelineId" in jsonified_request - assert ( - jsonified_request["deliveryPipelineId"] == request_init["delivery_pipeline_id"] - ) - jsonified_request["parent"] = "parent_value" - jsonified_request["deliveryPipelineId"] = "delivery_pipeline_id_value" + jsonified_request["name"] = "name_value" + jsonified_request["targetId"] = "target_id_value" + jsonified_request["rolloutId"] = "rollout_id_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).create_delivery_pipeline._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set( - ( - "delivery_pipeline_id", - "request_id", - "validate_only", - ) - ) + ).rollback_target._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == "parent_value" - assert "deliveryPipelineId" in jsonified_request - assert jsonified_request["deliveryPipelineId"] == "delivery_pipeline_id_value" + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + assert "targetId" in jsonified_request + assert jsonified_request["targetId"] == "target_id_value" + assert "rolloutId" in jsonified_request + assert jsonified_request["rolloutId"] == "rollout_id_value" client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), @@ -19189,7 +23021,7 @@ def test_create_delivery_pipeline_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = cloud_deploy.RollbackTargetResponse() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -19209,50 +23041,41 @@ def test_create_delivery_pipeline_rest_required_fields( response_value = Response() response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = cloud_deploy.RollbackTargetResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.create_delivery_pipeline(request) + response = client.rollback_target(request) - expected_params = [ - ( - "deliveryPipelineId", - "", - ), - ("$alt", "json;enum-encoding=int"), - ] + expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_create_delivery_pipeline_rest_unset_required_fields(): +def test_rollback_target_rest_unset_required_fields(): transport = transports.CloudDeployRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.create_delivery_pipeline._get_unset_required_fields({}) + unset_fields = transport.rollback_target._get_unset_required_fields({}) assert set(unset_fields) == ( - set( - ( - "deliveryPipelineId", - "requestId", - "validateOnly", - ) - ) + set(()) & set( ( - "parent", - "deliveryPipelineId", - "deliveryPipeline", + "name", + "targetId", + "rolloutId", ) ) ) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_create_delivery_pipeline_rest_interceptors(null_interceptor): +def test_rollback_target_rest_interceptors(null_interceptor): transport = transports.CloudDeployRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -19265,16 +23088,14 @@ def test_create_delivery_pipeline_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - operation.Operation, "_set_result_from_operation" - ), mock.patch.object( - transports.CloudDeployRestInterceptor, "post_create_delivery_pipeline" + transports.CloudDeployRestInterceptor, "post_rollback_target" ) as post, mock.patch.object( - transports.CloudDeployRestInterceptor, "pre_create_delivery_pipeline" + transports.CloudDeployRestInterceptor, "pre_rollback_target" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = cloud_deploy.CreateDeliveryPipelineRequest.pb( - cloud_deploy.CreateDeliveryPipelineRequest() + pb_message = cloud_deploy.RollbackTargetRequest.pb( + cloud_deploy.RollbackTargetRequest() ) transcode.return_value = { "method": "post", @@ -19286,19 +23107,19 @@ def test_create_delivery_pipeline_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = json_format.MessageToJson( - operations_pb2.Operation() + req.return_value._content = cloud_deploy.RollbackTargetResponse.to_json( + cloud_deploy.RollbackTargetResponse() ) - request = cloud_deploy.CreateDeliveryPipelineRequest() + request = cloud_deploy.RollbackTargetRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = operations_pb2.Operation() + post.return_value = cloud_deploy.RollbackTargetResponse() - client.create_delivery_pipeline( + client.rollback_target( request, metadata=[ ("key", "val"), @@ -19310,8 +23131,8 @@ def test_create_delivery_pipeline_rest_interceptors(null_interceptor): post.assert_called_once() -def test_create_delivery_pipeline_rest_bad_request( - transport: str = "rest", request_type=cloud_deploy.CreateDeliveryPipelineRequest +def test_rollback_target_rest_bad_request( + transport: str = "rest", request_type=cloud_deploy.RollbackTargetRequest ): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), @@ -19319,7 +23140,9 @@ def test_create_delivery_pipeline_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2"} + request_init = { + "name": "projects/sample1/locations/sample2/deliveryPipelines/sample3" + } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -19331,285 +23154,129 @@ def test_create_delivery_pipeline_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.create_delivery_pipeline(request) - - -def test_create_delivery_pipeline_rest_flattened(): - client = CloudDeployClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") - - # get arguments that satisfy an http rule for this method - sample_request = {"parent": "projects/sample1/locations/sample2"} - - # get truthy value for each flattened field - mock_args = dict( - parent="parent_value", - delivery_pipeline=cloud_deploy.DeliveryPipeline(name="name_value"), - delivery_pipeline_id="delivery_pipeline_id_value", - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - client.create_delivery_pipeline(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate( - "%s/v1/{parent=projects/*/locations/*}/deliveryPipelines" - % client.transport._host, - args[1], - ) - - -def test_create_delivery_pipeline_rest_flattened_error(transport: str = "rest"): - client = CloudDeployClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.create_delivery_pipeline( - cloud_deploy.CreateDeliveryPipelineRequest(), - parent="parent_value", - delivery_pipeline=cloud_deploy.DeliveryPipeline(name="name_value"), - delivery_pipeline_id="delivery_pipeline_id_value", - ) - - -def test_create_delivery_pipeline_rest_error(): - client = CloudDeployClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) + client.rollback_target(request) -@pytest.mark.parametrize( - "request_type", - [ - cloud_deploy.UpdateDeliveryPipelineRequest, - dict, - ], -) -def test_update_delivery_pipeline_rest(request_type): +def test_rollback_target_rest_flattened(): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = { - "delivery_pipeline": { - "name": "projects/sample1/locations/sample2/deliveryPipelines/sample3" - } - } - request_init["delivery_pipeline"] = { - "name": "projects/sample1/locations/sample2/deliveryPipelines/sample3", - "uid": "uid_value", - "description": "description_value", - "annotations": {}, - "labels": {}, - "create_time": {"seconds": 751, "nanos": 543}, - "update_time": {}, - "serial_pipeline": { - "stages": [ - { - "target_id": "target_id_value", - "profiles": ["profiles_value1", "profiles_value2"], - "strategy": { - "standard": { - "verify": True, - "predeploy": { - "actions": ["actions_value1", "actions_value2"] - }, - "postdeploy": { - "actions": ["actions_value1", "actions_value2"] - }, - }, - "canary": { - "runtime_config": { - "kubernetes": { - "gateway_service_mesh": { - "http_route": "http_route_value", - "service": "service_value", - "deployment": "deployment_value", - "route_update_wait_time": { - "seconds": 751, - "nanos": 543, - }, - "stable_cutback_duration": {}, - "pod_selector_label": "pod_selector_label_value", - }, - "service_networking": { - "service": "service_value", - "deployment": "deployment_value", - "disable_pod_overprovisioning": True, - "pod_selector_label": "pod_selector_label_value", - }, - }, - "cloud_run": { - "automatic_traffic_control": True, - "canary_revision_tags": [ - "canary_revision_tags_value1", - "canary_revision_tags_value2", - ], - "prior_revision_tags": [ - "prior_revision_tags_value1", - "prior_revision_tags_value2", - ], - "stable_revision_tags": [ - "stable_revision_tags_value1", - "stable_revision_tags_value2", - ], - }, - }, - "canary_deployment": { - "percentages": [1170, 1171], - "verify": True, - "predeploy": {}, - "postdeploy": {}, - }, - "custom_canary_deployment": { - "phase_configs": [ - { - "phase_id": "phase_id_value", - "percentage": 1054, - "profiles": [ - "profiles_value1", - "profiles_value2", - ], - "verify": True, - "predeploy": {}, - "postdeploy": {}, - } - ] - }, - }, - }, - "deploy_parameters": [{"values": {}, "match_target_labels": {}}], - } - ] - }, - "condition": { - "pipeline_ready_condition": {"status": True, "update_time": {}}, - "targets_present_condition": { - "status": True, - "missing_targets": ["missing_targets_value1", "missing_targets_value2"], - "update_time": {}, - }, - "targets_type_condition": { - "status": True, - "error_details": "error_details_value", - }, - }, - "etag": "etag_value", - "suspended": True, - } - # The version of a generated dependency at test runtime may differ from the version used during generation. - # Delete any fields which are not present in the current runtime dependency - # See https://github.com/googleapis/gapic-generator-python/issues/1748 + ) - # Determine if the message type is proto-plus or protobuf - test_field = cloud_deploy.UpdateDeliveryPipelineRequest.meta.fields[ - "delivery_pipeline" - ] + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = cloud_deploy.RollbackTargetResponse() - def get_message_fields(field): - # Given a field which is a message (composite type), return a list with - # all the fields of the message. - # If the field is not a composite type, return an empty list. - message_fields = [] + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/deliveryPipelines/sample3" + } - if hasattr(field, "message") and field.message: - is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + target_id="target_id_value", + rollout_id="rollout_id_value", + ) + mock_args.update(sample_request) - if is_field_type_proto_plus_type: - message_fields = field.message.meta.fields.values() - # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER - message_fields = field.message.DESCRIPTOR.fields - return message_fields + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = cloud_deploy.RollbackTargetResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value - runtime_nested_fields = [ - (field.name, nested_field.name) - for field in get_message_fields(test_field) - for nested_field in get_message_fields(field) - ] + client.rollback_target(**mock_args) - subfields_not_in_runtime = [] + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/deliveryPipelines/*}:rollbackTarget" + % client.transport._host, + args[1], + ) - # For each item in the sample request, create a list of sub fields which are not present at runtime - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["delivery_pipeline"].items(): # pragma: NO COVER - result = None - is_repeated = False - # For repeated fields - if isinstance(value, list) and len(value): - is_repeated = True - result = value[0] - # For fields where the type is another message - if isinstance(value, dict): - result = value - if result and hasattr(result, "keys"): - for subfield in result.keys(): - if (field, subfield) not in runtime_nested_fields: - subfields_not_in_runtime.append( - { - "field": field, - "subfield": subfield, - "is_repeated": is_repeated, - } - ) +def test_rollback_target_rest_flattened_error(transport: str = "rest"): + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) - # Remove fields from the sample request which are not present in the runtime version of the dependency - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER - field = subfield_to_delete.get("field") - field_repeated = subfield_to_delete.get("is_repeated") - subfield = subfield_to_delete.get("subfield") - if subfield: - if field_repeated: - for i in range(0, len(request_init["delivery_pipeline"][field])): - del request_init["delivery_pipeline"][field][i][subfield] - else: - del request_init["delivery_pipeline"][field][subfield] + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.rollback_target( + cloud_deploy.RollbackTargetRequest(), + name="name_value", + target_id="target_id_value", + rollout_id="rollout_id_value", + ) + + +def test_rollback_target_rest_error(): + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + cloud_deploy.GetTargetRequest, + dict, + ], +) +def test_get_target_rest(request_type): + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/targets/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = cloud_deploy.Target( + name="name_value", + target_id="target_id_value", + uid="uid_value", + description="description_value", + require_approval=True, + etag="etag_value", + ) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 + # Convert return value to protobuf type + return_value = cloud_deploy.Target.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.update_delivery_pipeline(request) + response = client.get_target(request) # Establish that the response is the type that we expect. - assert response.operation.name == "operations/spam" + assert isinstance(response, cloud_deploy.Target) + assert response.name == "name_value" + assert response.target_id == "target_id_value" + assert response.uid == "uid_value" + assert response.description == "description_value" + assert response.require_approval is True + assert response.etag == "etag_value" -def test_update_delivery_pipeline_rest_use_cached_wrapped_rpc(): +def test_get_target_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -19623,43 +23290,33 @@ def test_update_delivery_pipeline_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert ( - client._transport.update_delivery_pipeline - in client._transport._wrapped_methods - ) + assert client._transport.get_target in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.update_delivery_pipeline - ] = mock_rpc + client._transport._wrapped_methods[client._transport.get_target] = mock_rpc request = {} - client.update_delivery_pipeline(request) + client.get_target(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.update_delivery_pipeline(request) + client.get_target(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_update_delivery_pipeline_rest_required_fields( - request_type=cloud_deploy.UpdateDeliveryPipelineRequest, -): +def test_get_target_rest_required_fields(request_type=cloud_deploy.GetTargetRequest): transport_class = transports.CloudDeployRestTransport request_init = {} + request_init["name"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -19670,26 +23327,21 @@ def test_update_delivery_pipeline_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).update_delivery_pipeline._get_unset_required_fields(jsonified_request) + ).get_target._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present + jsonified_request["name"] = "name_value" + unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).update_delivery_pipeline._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set( - ( - "allow_missing", - "request_id", - "update_mask", - "validate_only", - ) - ) + ).get_target._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), @@ -19698,7 +23350,7 @@ def test_update_delivery_pipeline_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = cloud_deploy.Target() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -19710,52 +23362,39 @@ def test_update_delivery_pipeline_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "patch", + "method": "get", "query_params": pb_request, } - transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = cloud_deploy.Target.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.update_delivery_pipeline(request) + response = client.get_target(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_update_delivery_pipeline_rest_unset_required_fields(): +def test_get_target_rest_unset_required_fields(): transport = transports.CloudDeployRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.update_delivery_pipeline._get_unset_required_fields({}) - assert set(unset_fields) == ( - set( - ( - "allowMissing", - "requestId", - "updateMask", - "validateOnly", - ) - ) - & set( - ( - "updateMask", - "deliveryPipeline", - ) - ) - ) + unset_fields = transport.get_target._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_update_delivery_pipeline_rest_interceptors(null_interceptor): +def test_get_target_rest_interceptors(null_interceptor): transport = transports.CloudDeployRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -19768,17 +23407,13 @@ def test_update_delivery_pipeline_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - operation.Operation, "_set_result_from_operation" - ), mock.patch.object( - transports.CloudDeployRestInterceptor, "post_update_delivery_pipeline" + transports.CloudDeployRestInterceptor, "post_get_target" ) as post, mock.patch.object( - transports.CloudDeployRestInterceptor, "pre_update_delivery_pipeline" + transports.CloudDeployRestInterceptor, "pre_get_target" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = cloud_deploy.UpdateDeliveryPipelineRequest.pb( - cloud_deploy.UpdateDeliveryPipelineRequest() - ) + pb_message = cloud_deploy.GetTargetRequest.pb(cloud_deploy.GetTargetRequest()) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -19789,19 +23424,17 @@ def test_update_delivery_pipeline_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = json_format.MessageToJson( - operations_pb2.Operation() - ) + req.return_value._content = cloud_deploy.Target.to_json(cloud_deploy.Target()) - request = cloud_deploy.UpdateDeliveryPipelineRequest() + request = cloud_deploy.GetTargetRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = operations_pb2.Operation() + post.return_value = cloud_deploy.Target() - client.update_delivery_pipeline( + client.get_target( request, metadata=[ ("key", "val"), @@ -19813,8 +23446,8 @@ def test_update_delivery_pipeline_rest_interceptors(null_interceptor): post.assert_called_once() -def test_update_delivery_pipeline_rest_bad_request( - transport: str = "rest", request_type=cloud_deploy.UpdateDeliveryPipelineRequest +def test_get_target_rest_bad_request( + transport: str = "rest", request_type=cloud_deploy.GetTargetRequest ): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), @@ -19822,11 +23455,7 @@ def test_update_delivery_pipeline_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = { - "delivery_pipeline": { - "name": "projects/sample1/locations/sample2/deliveryPipelines/sample3" - } - } + request_init = {"name": "projects/sample1/locations/sample2/targets/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -19838,10 +23467,10 @@ def test_update_delivery_pipeline_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.update_delivery_pipeline(request) + client.get_target(request) -def test_update_delivery_pipeline_rest_flattened(): +def test_get_target_rest_flattened(): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -19850,43 +23479,39 @@ def test_update_delivery_pipeline_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = cloud_deploy.Target() # get arguments that satisfy an http rule for this method - sample_request = { - "delivery_pipeline": { - "name": "projects/sample1/locations/sample2/deliveryPipelines/sample3" - } - } + sample_request = {"name": "projects/sample1/locations/sample2/targets/sample3"} # get truthy value for each flattened field mock_args = dict( - delivery_pipeline=cloud_deploy.DeliveryPipeline(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + name="name_value", ) mock_args.update(sample_request) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 + # Convert return value to protobuf type + return_value = cloud_deploy.Target.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.update_delivery_pipeline(**mock_args) + client.get_target(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{delivery_pipeline.name=projects/*/locations/*/deliveryPipelines/*}" - % client.transport._host, + "%s/v1/{name=projects/*/locations/*/targets/*}" % client.transport._host, args[1], ) -def test_update_delivery_pipeline_rest_flattened_error(transport: str = "rest"): +def test_get_target_rest_flattened_error(transport: str = "rest"): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -19895,14 +23520,13 @@ def test_update_delivery_pipeline_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.update_delivery_pipeline( - cloud_deploy.UpdateDeliveryPipelineRequest(), - delivery_pipeline=cloud_deploy.DeliveryPipeline(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + client.get_target( + cloud_deploy.GetTargetRequest(), + name="name_value", ) -def test_update_delivery_pipeline_rest_error(): +def test_get_target_rest_error(): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -19911,20 +23535,126 @@ def test_update_delivery_pipeline_rest_error(): @pytest.mark.parametrize( "request_type", [ - cloud_deploy.DeleteDeliveryPipelineRequest, + cloud_deploy.CreateTargetRequest, dict, ], ) -def test_delete_delivery_pipeline_rest(request_type): +def test_create_target_rest(request_type): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = { - "name": "projects/sample1/locations/sample2/deliveryPipelines/sample3" + request_init = {"parent": "projects/sample1/locations/sample2"} + request_init["target"] = { + "name": "name_value", + "target_id": "target_id_value", + "uid": "uid_value", + "description": "description_value", + "annotations": {}, + "labels": {}, + "require_approval": True, + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "gke": { + "cluster": "cluster_value", + "internal_ip": True, + "proxy_url": "proxy_url_value", + }, + "anthos_cluster": {"membership": "membership_value"}, + "run": {"location": "location_value"}, + "multi_target": {"target_ids": ["target_ids_value1", "target_ids_value2"]}, + "custom_target": {"custom_target_type": "custom_target_type_value"}, + "etag": "etag_value", + "execution_configs": [ + { + "usages": [1], + "default_pool": { + "service_account": "service_account_value", + "artifact_storage": "artifact_storage_value", + }, + "private_pool": { + "worker_pool": "worker_pool_value", + "service_account": "service_account_value", + "artifact_storage": "artifact_storage_value", + }, + "worker_pool": "worker_pool_value", + "service_account": "service_account_value", + "artifact_storage": "artifact_storage_value", + "execution_timeout": {"seconds": 751, "nanos": 543}, + "verbose": True, + } + ], + "deploy_parameters": {}, } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = cloud_deploy.CreateTargetRequest.meta.fields["target"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["target"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["target"][field])): + del request_init["target"][field][i][subfield] + else: + del request_init["target"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -19939,13 +23669,13 @@ def test_delete_delivery_pipeline_rest(request_type): response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.delete_delivery_pipeline(request) + response = client.create_target(request) # Establish that the response is the type that we expect. assert response.operation.name == "operations/spam" -def test_delete_delivery_pipeline_rest_use_cached_wrapped_rpc(): +def test_create_target_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -19959,22 +23689,17 @@ def test_delete_delivery_pipeline_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert ( - client._transport.delete_delivery_pipeline - in client._transport._wrapped_methods - ) + assert client._transport.create_target in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.delete_delivery_pipeline - ] = mock_rpc + client._transport._wrapped_methods[client._transport.create_target] = mock_rpc request = {} - client.delete_delivery_pipeline(request) + client.create_target(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 @@ -19983,20 +23708,21 @@ def test_delete_delivery_pipeline_rest_use_cached_wrapped_rpc(): # subsequent calls should use the cached wrapper wrapper_fn.reset_mock() - client.delete_delivery_pipeline(request) + client.create_target(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_delete_delivery_pipeline_rest_required_fields( - request_type=cloud_deploy.DeleteDeliveryPipelineRequest, +def test_create_target_rest_required_fields( + request_type=cloud_deploy.CreateTargetRequest, ): transport_class = transports.CloudDeployRestTransport request_init = {} - request_init["name"] = "" + request_init["parent"] = "" + request_init["target_id"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -20004,34 +23730,38 @@ def test_delete_delivery_pipeline_rest_required_fields( ) # verify fields with default values are dropped + assert "targetId" not in jsonified_request unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).delete_delivery_pipeline._get_unset_required_fields(jsonified_request) + ).create_target._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present + assert "targetId" in jsonified_request + assert jsonified_request["targetId"] == request_init["target_id"] - jsonified_request["name"] = "name_value" + jsonified_request["parent"] = "parent_value" + jsonified_request["targetId"] = "target_id_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).delete_delivery_pipeline._get_unset_required_fields(jsonified_request) + ).create_target._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. assert not set(unset_fields) - set( ( - "allow_missing", - "etag", - "force", "request_id", + "target_id", "validate_only", ) ) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == "name_value" + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + assert "targetId" in jsonified_request + assert jsonified_request["targetId"] == "target_id_value" client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), @@ -20052,9 +23782,10 @@ def test_delete_delivery_pipeline_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "delete", + "method": "post", "query_params": pb_request, } + transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() @@ -20064,35 +23795,45 @@ def test_delete_delivery_pipeline_rest_required_fields( response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.delete_delivery_pipeline(request) + response = client.create_target(request) - expected_params = [("$alt", "json;enum-encoding=int")] + expected_params = [ + ( + "targetId", + "", + ), + ("$alt", "json;enum-encoding=int"), + ] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_delete_delivery_pipeline_rest_unset_required_fields(): +def test_create_target_rest_unset_required_fields(): transport = transports.CloudDeployRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.delete_delivery_pipeline._get_unset_required_fields({}) + unset_fields = transport.create_target._get_unset_required_fields({}) assert set(unset_fields) == ( set( ( - "allowMissing", - "etag", - "force", "requestId", + "targetId", "validateOnly", ) ) - & set(("name",)) + & set( + ( + "parent", + "targetId", + "target", + ) + ) ) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_delete_delivery_pipeline_rest_interceptors(null_interceptor): +def test_create_target_rest_interceptors(null_interceptor): transport = transports.CloudDeployRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -20107,14 +23848,14 @@ def test_delete_delivery_pipeline_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( operation.Operation, "_set_result_from_operation" ), mock.patch.object( - transports.CloudDeployRestInterceptor, "post_delete_delivery_pipeline" + transports.CloudDeployRestInterceptor, "post_create_target" ) as post, mock.patch.object( - transports.CloudDeployRestInterceptor, "pre_delete_delivery_pipeline" + transports.CloudDeployRestInterceptor, "pre_create_target" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = cloud_deploy.DeleteDeliveryPipelineRequest.pb( - cloud_deploy.DeleteDeliveryPipelineRequest() + pb_message = cloud_deploy.CreateTargetRequest.pb( + cloud_deploy.CreateTargetRequest() ) transcode.return_value = { "method": "post", @@ -20130,7 +23871,7 @@ def test_delete_delivery_pipeline_rest_interceptors(null_interceptor): operations_pb2.Operation() ) - request = cloud_deploy.DeleteDeliveryPipelineRequest() + request = cloud_deploy.CreateTargetRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), @@ -20138,7 +23879,7 @@ def test_delete_delivery_pipeline_rest_interceptors(null_interceptor): pre.return_value = request, metadata post.return_value = operations_pb2.Operation() - client.delete_delivery_pipeline( + client.create_target( request, metadata=[ ("key", "val"), @@ -20150,8 +23891,8 @@ def test_delete_delivery_pipeline_rest_interceptors(null_interceptor): post.assert_called_once() -def test_delete_delivery_pipeline_rest_bad_request( - transport: str = "rest", request_type=cloud_deploy.DeleteDeliveryPipelineRequest +def test_create_target_rest_bad_request( + transport: str = "rest", request_type=cloud_deploy.CreateTargetRequest ): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), @@ -20159,9 +23900,7 @@ def test_delete_delivery_pipeline_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = { - "name": "projects/sample1/locations/sample2/deliveryPipelines/sample3" - } + request_init = {"parent": "projects/sample1/locations/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -20173,10 +23912,10 @@ def test_delete_delivery_pipeline_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.delete_delivery_pipeline(request) + client.create_target(request) -def test_delete_delivery_pipeline_rest_flattened(): +def test_create_target_rest_flattened(): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -20188,13 +23927,13 @@ def test_delete_delivery_pipeline_rest_flattened(): return_value = operations_pb2.Operation(name="operations/spam") # get arguments that satisfy an http rule for this method - sample_request = { - "name": "projects/sample1/locations/sample2/deliveryPipelines/sample3" - } + sample_request = {"parent": "projects/sample1/locations/sample2"} # get truthy value for each flattened field mock_args = dict( - name="name_value", + parent="parent_value", + target=cloud_deploy.Target(name="name_value"), + target_id="target_id_value", ) mock_args.update(sample_request) @@ -20205,20 +23944,19 @@ def test_delete_delivery_pipeline_rest_flattened(): response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.delete_delivery_pipeline(**mock_args) + client.create_target(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] - assert path_template.validate( - "%s/v1/{name=projects/*/locations/*/deliveryPipelines/*}" - % client.transport._host, + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*}/targets" % client.transport._host, args[1], ) -def test_delete_delivery_pipeline_rest_flattened_error(transport: str = "rest"): +def test_create_target_rest_flattened_error(transport: str = "rest"): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -20227,13 +23965,15 @@ def test_delete_delivery_pipeline_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.delete_delivery_pipeline( - cloud_deploy.DeleteDeliveryPipelineRequest(), - name="name_value", + client.create_target( + cloud_deploy.CreateTargetRequest(), + parent="parent_value", + target=cloud_deploy.Target(name="name_value"), + target_id="target_id_value", ) -def test_delete_delivery_pipeline_rest_error(): +def test_create_target_rest_error(): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -20242,46 +23982,149 @@ def test_delete_delivery_pipeline_rest_error(): @pytest.mark.parametrize( "request_type", [ - cloud_deploy.ListTargetsRequest, + cloud_deploy.UpdateTargetRequest, dict, ], ) -def test_list_targets_rest(request_type): +def test_update_target_rest(request_type): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2"} + request_init = { + "target": {"name": "projects/sample1/locations/sample2/targets/sample3"} + } + request_init["target"] = { + "name": "projects/sample1/locations/sample2/targets/sample3", + "target_id": "target_id_value", + "uid": "uid_value", + "description": "description_value", + "annotations": {}, + "labels": {}, + "require_approval": True, + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "gke": { + "cluster": "cluster_value", + "internal_ip": True, + "proxy_url": "proxy_url_value", + }, + "anthos_cluster": {"membership": "membership_value"}, + "run": {"location": "location_value"}, + "multi_target": {"target_ids": ["target_ids_value1", "target_ids_value2"]}, + "custom_target": {"custom_target_type": "custom_target_type_value"}, + "etag": "etag_value", + "execution_configs": [ + { + "usages": [1], + "default_pool": { + "service_account": "service_account_value", + "artifact_storage": "artifact_storage_value", + }, + "private_pool": { + "worker_pool": "worker_pool_value", + "service_account": "service_account_value", + "artifact_storage": "artifact_storage_value", + }, + "worker_pool": "worker_pool_value", + "service_account": "service_account_value", + "artifact_storage": "artifact_storage_value", + "execution_timeout": {"seconds": 751, "nanos": 543}, + "verbose": True, + } + ], + "deploy_parameters": {}, + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = cloud_deploy.UpdateTargetRequest.meta.fields["target"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["target"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["target"][field])): + del request_init["target"][field][i][subfield] + else: + del request_init["target"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = cloud_deploy.ListTargetsResponse( - next_page_token="next_page_token_value", - unreachable=["unreachable_value"], - ) + return_value = operations_pb2.Operation(name="operations/spam") # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - # Convert return value to protobuf type - return_value = cloud_deploy.ListTargetsResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.list_targets(request) + response = client.update_target(request) # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListTargetsPager) - assert response.next_page_token == "next_page_token_value" - assert response.unreachable == ["unreachable_value"] + assert response.operation.name == "operations/spam" -def test_list_targets_rest_use_cached_wrapped_rpc(): +def test_update_target_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -20295,35 +24138,38 @@ def test_list_targets_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.list_targets in client._transport._wrapped_methods + assert client._transport.update_target in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.list_targets] = mock_rpc + client._transport._wrapped_methods[client._transport.update_target] = mock_rpc request = {} - client.list_targets(request) + client.update_target(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.list_targets(request) + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.update_target(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_list_targets_rest_required_fields( - request_type=cloud_deploy.ListTargetsRequest, +def test_update_target_rest_required_fields( + request_type=cloud_deploy.UpdateTargetRequest, ): transport_class = transports.CloudDeployRestTransport request_init = {} - request_init["parent"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -20334,30 +24180,26 @@ def test_list_targets_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).list_targets._get_unset_required_fields(jsonified_request) + ).update_target._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["parent"] = "parent_value" - unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).list_targets._get_unset_required_fields(jsonified_request) + ).update_target._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. assert not set(unset_fields) - set( ( - "filter", - "order_by", - "page_size", - "page_token", + "allow_missing", + "request_id", + "update_mask", + "validate_only", ) ) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == "parent_value" client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), @@ -20366,7 +24208,7 @@ def test_list_targets_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = cloud_deploy.ListTargetsResponse() + return_value = operations_pb2.Operation(name="operations/spam") # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -20378,49 +24220,52 @@ def test_list_targets_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "get", + "method": "patch", "query_params": pb_request, } + transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = cloud_deploy.ListTargetsResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.list_targets(request) + response = client.update_target(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_list_targets_rest_unset_required_fields(): +def test_update_target_rest_unset_required_fields(): transport = transports.CloudDeployRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.list_targets._get_unset_required_fields({}) + unset_fields = transport.update_target._get_unset_required_fields({}) assert set(unset_fields) == ( set( ( - "filter", - "orderBy", - "pageSize", - "pageToken", + "allowMissing", + "requestId", + "updateMask", + "validateOnly", + ) + ) + & set( + ( + "updateMask", + "target", ) ) - & set(("parent",)) ) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_targets_rest_interceptors(null_interceptor): +def test_update_target_rest_interceptors(null_interceptor): transport = transports.CloudDeployRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -20433,14 +24278,16 @@ def test_list_targets_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.CloudDeployRestInterceptor, "post_list_targets" + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.CloudDeployRestInterceptor, "post_update_target" ) as post, mock.patch.object( - transports.CloudDeployRestInterceptor, "pre_list_targets" + transports.CloudDeployRestInterceptor, "pre_update_target" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = cloud_deploy.ListTargetsRequest.pb( - cloud_deploy.ListTargetsRequest() + pb_message = cloud_deploy.UpdateTargetRequest.pb( + cloud_deploy.UpdateTargetRequest() ) transcode.return_value = { "method": "post", @@ -20452,19 +24299,19 @@ def test_list_targets_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = cloud_deploy.ListTargetsResponse.to_json( - cloud_deploy.ListTargetsResponse() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() ) - request = cloud_deploy.ListTargetsRequest() + request = cloud_deploy.UpdateTargetRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = cloud_deploy.ListTargetsResponse() + post.return_value = operations_pb2.Operation() - client.list_targets( + client.update_target( request, metadata=[ ("key", "val"), @@ -20476,8 +24323,8 @@ def test_list_targets_rest_interceptors(null_interceptor): post.assert_called_once() -def test_list_targets_rest_bad_request( - transport: str = "rest", request_type=cloud_deploy.ListTargetsRequest +def test_update_target_rest_bad_request( + transport: str = "rest", request_type=cloud_deploy.UpdateTargetRequest ): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), @@ -20485,7 +24332,9 @@ def test_list_targets_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2"} + request_init = { + "target": {"name": "projects/sample1/locations/sample2/targets/sample3"} + } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -20497,10 +24346,10 @@ def test_list_targets_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.list_targets(request) + client.update_target(request) -def test_list_targets_rest_flattened(): +def test_update_target_rest_flattened(): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -20509,39 +24358,41 @@ def test_list_targets_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = cloud_deploy.ListTargetsResponse() + return_value = operations_pb2.Operation(name="operations/spam") # get arguments that satisfy an http rule for this method - sample_request = {"parent": "projects/sample1/locations/sample2"} + sample_request = { + "target": {"name": "projects/sample1/locations/sample2/targets/sample3"} + } # get truthy value for each flattened field mock_args = dict( - parent="parent_value", + target=cloud_deploy.Target(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) mock_args.update(sample_request) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - # Convert return value to protobuf type - return_value = cloud_deploy.ListTargetsResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.list_targets(**mock_args) + client.update_target(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{parent=projects/*/locations/*}/targets" % client.transport._host, + "%s/v1/{target.name=projects/*/locations/*/targets/*}" + % client.transport._host, args[1], ) -def test_list_targets_rest_flattened_error(transport: str = "rest"): +def test_update_target_rest_flattened_error(transport: str = "rest"): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -20550,113 +24401,55 @@ def test_list_targets_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.list_targets( - cloud_deploy.ListTargetsRequest(), - parent="parent_value", + client.update_target( + cloud_deploy.UpdateTargetRequest(), + target=cloud_deploy.Target(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) -def test_list_targets_rest_pager(transport: str = "rest"): +def test_update_target_rest_error(): client = CloudDeployClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # TODO(kbandes): remove this mock unless there's a good reason for it. - # with mock.patch.object(path_template, 'transcode') as transcode: - # Set the response as a series of pages - response = ( - cloud_deploy.ListTargetsResponse( - targets=[ - cloud_deploy.Target(), - cloud_deploy.Target(), - cloud_deploy.Target(), - ], - next_page_token="abc", - ), - cloud_deploy.ListTargetsResponse( - targets=[], - next_page_token="def", - ), - cloud_deploy.ListTargetsResponse( - targets=[ - cloud_deploy.Target(), - ], - next_page_token="ghi", - ), - cloud_deploy.ListTargetsResponse( - targets=[ - cloud_deploy.Target(), - cloud_deploy.Target(), - ], - ), - ) - # Two responses for two calls - response = response + response - - # Wrap the values into proper Response objs - response = tuple(cloud_deploy.ListTargetsResponse.to_json(x) for x in response) - return_values = tuple(Response() for i in response) - for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode("UTF-8") - return_val.status_code = 200 - req.side_effect = return_values - - sample_request = {"parent": "projects/sample1/locations/sample2"} - - pager = client.list_targets(request=sample_request) - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, cloud_deploy.Target) for i in results) - - pages = list(client.list_targets(request=sample_request).pages) - for page_, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page_.raw_page.next_page_token == token - @pytest.mark.parametrize( "request_type", [ - cloud_deploy.RollbackTargetRequest, + cloud_deploy.DeleteTargetRequest, dict, ], ) -def test_rollback_target_rest(request_type): +def test_delete_target_rest(request_type): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = { - "name": "projects/sample1/locations/sample2/deliveryPipelines/sample3" - } + request_init = {"name": "projects/sample1/locations/sample2/targets/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = cloud_deploy.RollbackTargetResponse() + return_value = operations_pb2.Operation(name="operations/spam") # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - # Convert return value to protobuf type - return_value = cloud_deploy.RollbackTargetResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.rollback_target(request) + response = client.delete_target(request) # Establish that the response is the type that we expect. - assert isinstance(response, cloud_deploy.RollbackTargetResponse) + assert response.operation.name == "operations/spam" -def test_rollback_target_rest_use_cached_wrapped_rpc(): +def test_delete_target_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -20670,37 +24463,39 @@ def test_rollback_target_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.rollback_target in client._transport._wrapped_methods + assert client._transport.delete_target in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.rollback_target] = mock_rpc + client._transport._wrapped_methods[client._transport.delete_target] = mock_rpc request = {} - client.rollback_target(request) + client.delete_target(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.rollback_target(request) + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.delete_target(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_rollback_target_rest_required_fields( - request_type=cloud_deploy.RollbackTargetRequest, +def test_delete_target_rest_required_fields( + request_type=cloud_deploy.DeleteTargetRequest, ): transport_class = transports.CloudDeployRestTransport request_init = {} request_init["name"] = "" - request_init["target_id"] = "" - request_init["rollout_id"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -20711,27 +24506,30 @@ def test_rollback_target_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).rollback_target._get_unset_required_fields(jsonified_request) + ).delete_target._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present jsonified_request["name"] = "name_value" - jsonified_request["targetId"] = "target_id_value" - jsonified_request["rolloutId"] = "rollout_id_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).rollback_target._get_unset_required_fields(jsonified_request) + ).delete_target._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "allow_missing", + "etag", + "request_id", + "validate_only", + ) + ) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone assert "name" in jsonified_request assert jsonified_request["name"] == "name_value" - assert "targetId" in jsonified_request - assert jsonified_request["targetId"] == "target_id_value" - assert "rolloutId" in jsonified_request - assert jsonified_request["rolloutId"] == "rollout_id_value" client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), @@ -20740,7 +24538,7 @@ def test_rollback_target_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = cloud_deploy.RollbackTargetResponse() + return_value = operations_pb2.Operation(name="operations/spam") # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -20752,49 +24550,46 @@ def test_rollback_target_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "post", + "method": "delete", "query_params": pb_request, } - transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = cloud_deploy.RollbackTargetResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.rollback_target(request) + response = client.delete_target(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_rollback_target_rest_unset_required_fields(): +def test_delete_target_rest_unset_required_fields(): transport = transports.CloudDeployRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.rollback_target._get_unset_required_fields({}) + unset_fields = transport.delete_target._get_unset_required_fields({}) assert set(unset_fields) == ( - set(()) - & set( + set( ( - "name", - "targetId", - "rolloutId", + "allowMissing", + "etag", + "requestId", + "validateOnly", ) ) + & set(("name",)) ) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_rollback_target_rest_interceptors(null_interceptor): +def test_delete_target_rest_interceptors(null_interceptor): transport = transports.CloudDeployRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -20807,14 +24602,16 @@ def test_rollback_target_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.CloudDeployRestInterceptor, "post_rollback_target" + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.CloudDeployRestInterceptor, "post_delete_target" ) as post, mock.patch.object( - transports.CloudDeployRestInterceptor, "pre_rollback_target" + transports.CloudDeployRestInterceptor, "pre_delete_target" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = cloud_deploy.RollbackTargetRequest.pb( - cloud_deploy.RollbackTargetRequest() + pb_message = cloud_deploy.DeleteTargetRequest.pb( + cloud_deploy.DeleteTargetRequest() ) transcode.return_value = { "method": "post", @@ -20826,19 +24623,19 @@ def test_rollback_target_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = cloud_deploy.RollbackTargetResponse.to_json( - cloud_deploy.RollbackTargetResponse() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() ) - request = cloud_deploy.RollbackTargetRequest() + request = cloud_deploy.DeleteTargetRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = cloud_deploy.RollbackTargetResponse() + post.return_value = operations_pb2.Operation() - client.rollback_target( + client.delete_target( request, metadata=[ ("key", "val"), @@ -20850,8 +24647,8 @@ def test_rollback_target_rest_interceptors(null_interceptor): post.assert_called_once() -def test_rollback_target_rest_bad_request( - transport: str = "rest", request_type=cloud_deploy.RollbackTargetRequest +def test_delete_target_rest_bad_request( + transport: str = "rest", request_type=cloud_deploy.DeleteTargetRequest ): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), @@ -20859,9 +24656,7 @@ def test_rollback_target_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = { - "name": "projects/sample1/locations/sample2/deliveryPipelines/sample3" - } + request_init = {"name": "projects/sample1/locations/sample2/targets/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -20873,10 +24668,10 @@ def test_rollback_target_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.rollback_target(request) + client.delete_target(request) -def test_rollback_target_rest_flattened(): +def test_delete_target_rest_flattened(): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -20885,44 +24680,37 @@ def test_rollback_target_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = cloud_deploy.RollbackTargetResponse() + return_value = operations_pb2.Operation(name="operations/spam") # get arguments that satisfy an http rule for this method - sample_request = { - "name": "projects/sample1/locations/sample2/deliveryPipelines/sample3" - } + sample_request = {"name": "projects/sample1/locations/sample2/targets/sample3"} # get truthy value for each flattened field mock_args = dict( name="name_value", - target_id="target_id_value", - rollout_id="rollout_id_value", ) mock_args.update(sample_request) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - # Convert return value to protobuf type - return_value = cloud_deploy.RollbackTargetResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.rollback_target(**mock_args) + client.delete_target(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{name=projects/*/locations/*/deliveryPipelines/*}:rollbackTarget" - % client.transport._host, + "%s/v1/{name=projects/*/locations/*/targets/*}" % client.transport._host, args[1], ) -def test_rollback_target_rest_flattened_error(transport: str = "rest"): +def test_delete_target_rest_flattened_error(transport: str = "rest"): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -20931,15 +24719,13 @@ def test_rollback_target_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.rollback_target( - cloud_deploy.RollbackTargetRequest(), + client.delete_target( + cloud_deploy.DeleteTargetRequest(), name="name_value", - target_id="target_id_value", - rollout_id="rollout_id_value", ) -def test_rollback_target_rest_error(): +def test_delete_target_rest_error(): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -20948,54 +24734,46 @@ def test_rollback_target_rest_error(): @pytest.mark.parametrize( "request_type", [ - cloud_deploy.GetTargetRequest, + cloud_deploy.ListCustomTargetTypesRequest, dict, ], ) -def test_get_target_rest(request_type): +def test_list_custom_target_types_rest(request_type): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = {"name": "projects/sample1/locations/sample2/targets/sample3"} + request_init = {"parent": "projects/sample1/locations/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = cloud_deploy.Target( - name="name_value", - target_id="target_id_value", - uid="uid_value", - description="description_value", - require_approval=True, - etag="etag_value", + return_value = cloud_deploy.ListCustomTargetTypesResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], ) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = cloud_deploy.Target.pb(return_value) + return_value = cloud_deploy.ListCustomTargetTypesResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.get_target(request) + response = client.list_custom_target_types(request) # Establish that the response is the type that we expect. - assert isinstance(response, cloud_deploy.Target) - assert response.name == "name_value" - assert response.target_id == "target_id_value" - assert response.uid == "uid_value" - assert response.description == "description_value" - assert response.require_approval is True - assert response.etag == "etag_value" + assert isinstance(response, pagers.ListCustomTargetTypesPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] -def test_get_target_rest_use_cached_wrapped_rpc(): +def test_list_custom_target_types_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -21009,33 +24787,40 @@ def test_get_target_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.get_target in client._transport._wrapped_methods + assert ( + client._transport.list_custom_target_types + in client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.get_target] = mock_rpc + client._transport._wrapped_methods[ + client._transport.list_custom_target_types + ] = mock_rpc request = {} - client.get_target(request) + client.list_custom_target_types(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.get_target(request) + client.list_custom_target_types(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_get_target_rest_required_fields(request_type=cloud_deploy.GetTargetRequest): +def test_list_custom_target_types_rest_required_fields( + request_type=cloud_deploy.ListCustomTargetTypesRequest, +): transport_class = transports.CloudDeployRestTransport request_init = {} - request_init["name"] = "" + request_init["parent"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -21046,21 +24831,30 @@ def test_get_target_rest_required_fields(request_type=cloud_deploy.GetTargetRequ unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).get_target._get_unset_required_fields(jsonified_request) + ).list_custom_target_types._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["name"] = "name_value" + jsonified_request["parent"] = "parent_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).get_target._get_unset_required_fields(jsonified_request) + ).list_custom_target_types._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "filter", + "order_by", + "page_size", + "page_token", + ) + ) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == "name_value" + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), @@ -21069,7 +24863,7 @@ def test_get_target_rest_required_fields(request_type=cloud_deploy.GetTargetRequ request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = cloud_deploy.Target() + return_value = cloud_deploy.ListCustomTargetTypesResponse() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -21090,30 +24884,40 @@ def test_get_target_rest_required_fields(request_type=cloud_deploy.GetTargetRequ response_value.status_code = 200 # Convert return value to protobuf type - return_value = cloud_deploy.Target.pb(return_value) + return_value = cloud_deploy.ListCustomTargetTypesResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.get_target(request) + response = client.list_custom_target_types(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_get_target_rest_unset_required_fields(): +def test_list_custom_target_types_rest_unset_required_fields(): transport = transports.CloudDeployRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.get_target._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name",))) + unset_fields = transport.list_custom_target_types._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "filter", + "orderBy", + "pageSize", + "pageToken", + ) + ) + & set(("parent",)) + ) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_target_rest_interceptors(null_interceptor): +def test_list_custom_target_types_rest_interceptors(null_interceptor): transport = transports.CloudDeployRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -21126,13 +24930,15 @@ def test_get_target_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.CloudDeployRestInterceptor, "post_get_target" + transports.CloudDeployRestInterceptor, "post_list_custom_target_types" ) as post, mock.patch.object( - transports.CloudDeployRestInterceptor, "pre_get_target" + transports.CloudDeployRestInterceptor, "pre_list_custom_target_types" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = cloud_deploy.GetTargetRequest.pb(cloud_deploy.GetTargetRequest()) + pb_message = cloud_deploy.ListCustomTargetTypesRequest.pb( + cloud_deploy.ListCustomTargetTypesRequest() + ) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -21143,17 +24949,19 @@ def test_get_target_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = cloud_deploy.Target.to_json(cloud_deploy.Target()) + req.return_value._content = cloud_deploy.ListCustomTargetTypesResponse.to_json( + cloud_deploy.ListCustomTargetTypesResponse() + ) - request = cloud_deploy.GetTargetRequest() + request = cloud_deploy.ListCustomTargetTypesRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = cloud_deploy.Target() + post.return_value = cloud_deploy.ListCustomTargetTypesResponse() - client.get_target( + client.list_custom_target_types( request, metadata=[ ("key", "val"), @@ -21165,8 +24973,8 @@ def test_get_target_rest_interceptors(null_interceptor): post.assert_called_once() -def test_get_target_rest_bad_request( - transport: str = "rest", request_type=cloud_deploy.GetTargetRequest +def test_list_custom_target_types_rest_bad_request( + transport: str = "rest", request_type=cloud_deploy.ListCustomTargetTypesRequest ): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), @@ -21174,7 +24982,7 @@ def test_get_target_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = {"name": "projects/sample1/locations/sample2/targets/sample3"} + request_init = {"parent": "projects/sample1/locations/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -21186,10 +24994,10 @@ def test_get_target_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.get_target(request) + client.list_custom_target_types(request) -def test_get_target_rest_flattened(): +def test_list_custom_target_types_rest_flattened(): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -21198,14 +25006,14 @@ def test_get_target_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = cloud_deploy.Target() + return_value = cloud_deploy.ListCustomTargetTypesResponse() # get arguments that satisfy an http rule for this method - sample_request = {"name": "projects/sample1/locations/sample2/targets/sample3"} + sample_request = {"parent": "projects/sample1/locations/sample2"} # get truthy value for each flattened field mock_args = dict( - name="name_value", + parent="parent_value", ) mock_args.update(sample_request) @@ -21213,24 +25021,25 @@ def test_get_target_rest_flattened(): response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = cloud_deploy.Target.pb(return_value) + return_value = cloud_deploy.ListCustomTargetTypesResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.get_target(**mock_args) + client.list_custom_target_types(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{name=projects/*/locations/*/targets/*}" % client.transport._host, + "%s/v1/{parent=projects/*/locations/*}/customTargetTypes" + % client.transport._host, args[1], ) -def test_get_target_rest_flattened_error(transport: str = "rest"): +def test_list_custom_target_types_rest_flattened_error(transport: str = "rest"): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -21239,162 +25048,126 @@ def test_get_target_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.get_target( - cloud_deploy.GetTargetRequest(), - name="name_value", + client.list_custom_target_types( + cloud_deploy.ListCustomTargetTypesRequest(), + parent="parent_value", ) -def test_get_target_rest_error(): +def test_list_custom_target_types_rest_pager(transport: str = "rest"): client = CloudDeployClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + cloud_deploy.ListCustomTargetTypesResponse( + custom_target_types=[ + cloud_deploy.CustomTargetType(), + cloud_deploy.CustomTargetType(), + cloud_deploy.CustomTargetType(), + ], + next_page_token="abc", + ), + cloud_deploy.ListCustomTargetTypesResponse( + custom_target_types=[], + next_page_token="def", + ), + cloud_deploy.ListCustomTargetTypesResponse( + custom_target_types=[ + cloud_deploy.CustomTargetType(), + ], + next_page_token="ghi", + ), + cloud_deploy.ListCustomTargetTypesResponse( + custom_target_types=[ + cloud_deploy.CustomTargetType(), + cloud_deploy.CustomTargetType(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple( + cloud_deploy.ListCustomTargetTypesResponse.to_json(x) for x in response + ) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {"parent": "projects/sample1/locations/sample2"} + + pager = client.list_custom_target_types(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, cloud_deploy.CustomTargetType) for i in results) + + pages = list(client.list_custom_target_types(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + @pytest.mark.parametrize( "request_type", [ - cloud_deploy.CreateTargetRequest, + cloud_deploy.GetCustomTargetTypeRequest, dict, ], ) -def test_create_target_rest(request_type): +def test_get_custom_target_type_rest(request_type): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2"} - request_init["target"] = { - "name": "name_value", - "target_id": "target_id_value", - "uid": "uid_value", - "description": "description_value", - "annotations": {}, - "labels": {}, - "require_approval": True, - "create_time": {"seconds": 751, "nanos": 543}, - "update_time": {}, - "gke": { - "cluster": "cluster_value", - "internal_ip": True, - "proxy_url": "proxy_url_value", - }, - "anthos_cluster": {"membership": "membership_value"}, - "run": {"location": "location_value"}, - "multi_target": {"target_ids": ["target_ids_value1", "target_ids_value2"]}, - "custom_target": {"custom_target_type": "custom_target_type_value"}, - "etag": "etag_value", - "execution_configs": [ - { - "usages": [1], - "default_pool": { - "service_account": "service_account_value", - "artifact_storage": "artifact_storage_value", - }, - "private_pool": { - "worker_pool": "worker_pool_value", - "service_account": "service_account_value", - "artifact_storage": "artifact_storage_value", - }, - "worker_pool": "worker_pool_value", - "service_account": "service_account_value", - "artifact_storage": "artifact_storage_value", - "execution_timeout": {"seconds": 751, "nanos": 543}, - "verbose": True, - } - ], - "deploy_parameters": {}, + request_init = { + "name": "projects/sample1/locations/sample2/customTargetTypes/sample3" } - # The version of a generated dependency at test runtime may differ from the version used during generation. - # Delete any fields which are not present in the current runtime dependency - # See https://github.com/googleapis/gapic-generator-python/issues/1748 - - # Determine if the message type is proto-plus or protobuf - test_field = cloud_deploy.CreateTargetRequest.meta.fields["target"] - - def get_message_fields(field): - # Given a field which is a message (composite type), return a list with - # all the fields of the message. - # If the field is not a composite type, return an empty list. - message_fields = [] - - if hasattr(field, "message") and field.message: - is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") - - if is_field_type_proto_plus_type: - message_fields = field.message.meta.fields.values() - # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER - message_fields = field.message.DESCRIPTOR.fields - return message_fields - - runtime_nested_fields = [ - (field.name, nested_field.name) - for field in get_message_fields(test_field) - for nested_field in get_message_fields(field) - ] - - subfields_not_in_runtime = [] - - # For each item in the sample request, create a list of sub fields which are not present at runtime - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["target"].items(): # pragma: NO COVER - result = None - is_repeated = False - # For repeated fields - if isinstance(value, list) and len(value): - is_repeated = True - result = value[0] - # For fields where the type is another message - if isinstance(value, dict): - result = value - - if result and hasattr(result, "keys"): - for subfield in result.keys(): - if (field, subfield) not in runtime_nested_fields: - subfields_not_in_runtime.append( - { - "field": field, - "subfield": subfield, - "is_repeated": is_repeated, - } - ) - - # Remove fields from the sample request which are not present in the runtime version of the dependency - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER - field = subfield_to_delete.get("field") - field_repeated = subfield_to_delete.get("is_repeated") - subfield = subfield_to_delete.get("subfield") - if subfield: - if field_repeated: - for i in range(0, len(request_init["target"][field])): - del request_init["target"][field][i][subfield] - else: - del request_init["target"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = cloud_deploy.CustomTargetType( + name="name_value", + custom_target_type_id="custom_target_type_id_value", + uid="uid_value", + description="description_value", + etag="etag_value", + ) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 + # Convert return value to protobuf type + return_value = cloud_deploy.CustomTargetType.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.create_target(request) + response = client.get_custom_target_type(request) # Establish that the response is the type that we expect. - assert response.operation.name == "operations/spam" + assert isinstance(response, cloud_deploy.CustomTargetType) + assert response.name == "name_value" + assert response.custom_target_type_id == "custom_target_type_id_value" + assert response.uid == "uid_value" + assert response.description == "description_value" + assert response.etag == "etag_value" -def test_create_target_rest_use_cached_wrapped_rpc(): +def test_get_custom_target_type_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -21408,40 +25181,40 @@ def test_create_target_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.create_target in client._transport._wrapped_methods + assert ( + client._transport.get_custom_target_type + in client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.create_target] = mock_rpc + client._transport._wrapped_methods[ + client._transport.get_custom_target_type + ] = mock_rpc request = {} - client.create_target(request) + client.get_custom_target_type(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.create_target(request) + client.get_custom_target_type(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_create_target_rest_required_fields( - request_type=cloud_deploy.CreateTargetRequest, +def test_get_custom_target_type_rest_required_fields( + request_type=cloud_deploy.GetCustomTargetTypeRequest, ): transport_class = transports.CloudDeployRestTransport request_init = {} - request_init["parent"] = "" - request_init["target_id"] = "" + request_init["name"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -21449,38 +25222,24 @@ def test_create_target_rest_required_fields( ) # verify fields with default values are dropped - assert "targetId" not in jsonified_request unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).create_target._get_unset_required_fields(jsonified_request) + ).get_custom_target_type._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - assert "targetId" in jsonified_request - assert jsonified_request["targetId"] == request_init["target_id"] - jsonified_request["parent"] = "parent_value" - jsonified_request["targetId"] = "target_id_value" + jsonified_request["name"] = "name_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).create_target._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set( - ( - "request_id", - "target_id", - "validate_only", - ) - ) + ).get_custom_target_type._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == "parent_value" - assert "targetId" in jsonified_request - assert jsonified_request["targetId"] == "target_id_value" + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), @@ -21489,7 +25248,7 @@ def test_create_target_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = cloud_deploy.CustomTargetType() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -21501,58 +25260,39 @@ def test_create_target_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "post", + "method": "get", "query_params": pb_request, } - transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = cloud_deploy.CustomTargetType.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.create_target(request) + response = client.get_custom_target_type(request) - expected_params = [ - ( - "targetId", - "", - ), - ("$alt", "json;enum-encoding=int"), - ] + expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_create_target_rest_unset_required_fields(): +def test_get_custom_target_type_rest_unset_required_fields(): transport = transports.CloudDeployRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.create_target._get_unset_required_fields({}) - assert set(unset_fields) == ( - set( - ( - "requestId", - "targetId", - "validateOnly", - ) - ) - & set( - ( - "parent", - "targetId", - "target", - ) - ) - ) + unset_fields = transport.get_custom_target_type._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_create_target_rest_interceptors(null_interceptor): +def test_get_custom_target_type_rest_interceptors(null_interceptor): transport = transports.CloudDeployRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -21565,16 +25305,14 @@ def test_create_target_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - operation.Operation, "_set_result_from_operation" - ), mock.patch.object( - transports.CloudDeployRestInterceptor, "post_create_target" + transports.CloudDeployRestInterceptor, "post_get_custom_target_type" ) as post, mock.patch.object( - transports.CloudDeployRestInterceptor, "pre_create_target" + transports.CloudDeployRestInterceptor, "pre_get_custom_target_type" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = cloud_deploy.CreateTargetRequest.pb( - cloud_deploy.CreateTargetRequest() + pb_message = cloud_deploy.GetCustomTargetTypeRequest.pb( + cloud_deploy.GetCustomTargetTypeRequest() ) transcode.return_value = { "method": "post", @@ -21586,19 +25324,19 @@ def test_create_target_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = json_format.MessageToJson( - operations_pb2.Operation() + req.return_value._content = cloud_deploy.CustomTargetType.to_json( + cloud_deploy.CustomTargetType() ) - request = cloud_deploy.CreateTargetRequest() + request = cloud_deploy.GetCustomTargetTypeRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = operations_pb2.Operation() + post.return_value = cloud_deploy.CustomTargetType() - client.create_target( + client.get_custom_target_type( request, metadata=[ ("key", "val"), @@ -21610,8 +25348,8 @@ def test_create_target_rest_interceptors(null_interceptor): post.assert_called_once() -def test_create_target_rest_bad_request( - transport: str = "rest", request_type=cloud_deploy.CreateTargetRequest +def test_get_custom_target_type_rest_bad_request( + transport: str = "rest", request_type=cloud_deploy.GetCustomTargetTypeRequest ): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), @@ -21619,7 +25357,9 @@ def test_create_target_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2"} + request_init = { + "name": "projects/sample1/locations/sample2/customTargetTypes/sample3" + } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -21631,10 +25371,10 @@ def test_create_target_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.create_target(request) + client.get_custom_target_type(request) -def test_create_target_rest_flattened(): +def test_get_custom_target_type_rest_flattened(): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -21643,39 +25383,42 @@ def test_create_target_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = cloud_deploy.CustomTargetType() # get arguments that satisfy an http rule for this method - sample_request = {"parent": "projects/sample1/locations/sample2"} + sample_request = { + "name": "projects/sample1/locations/sample2/customTargetTypes/sample3" + } # get truthy value for each flattened field mock_args = dict( - parent="parent_value", - target=cloud_deploy.Target(name="name_value"), - target_id="target_id_value", + name="name_value", ) mock_args.update(sample_request) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 + # Convert return value to protobuf type + return_value = cloud_deploy.CustomTargetType.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.create_target(**mock_args) + client.get_custom_target_type(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{parent=projects/*/locations/*}/targets" % client.transport._host, + "%s/v1/{name=projects/*/locations/*/customTargetTypes/*}" + % client.transport._host, args[1], ) -def test_create_target_rest_flattened_error(transport: str = "rest"): +def test_get_custom_target_type_rest_flattened_error(transport: str = "rest"): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -21684,15 +25427,13 @@ def test_create_target_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.create_target( - cloud_deploy.CreateTargetRequest(), - parent="parent_value", - target=cloud_deploy.Target(name="name_value"), - target_id="target_id_value", + client.get_custom_target_type( + cloud_deploy.GetCustomTargetTypeRequest(), + name="name_value", ) -def test_create_target_rest_error(): +def test_get_custom_target_type_rest_error(): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -21701,67 +25442,60 @@ def test_create_target_rest_error(): @pytest.mark.parametrize( "request_type", [ - cloud_deploy.UpdateTargetRequest, + cloud_deploy.CreateCustomTargetTypeRequest, dict, ], ) -def test_update_target_rest(request_type): +def test_create_custom_target_type_rest(request_type): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = { - "target": {"name": "projects/sample1/locations/sample2/targets/sample3"} - } - request_init["target"] = { - "name": "projects/sample1/locations/sample2/targets/sample3", - "target_id": "target_id_value", + request_init = {"parent": "projects/sample1/locations/sample2"} + request_init["custom_target_type"] = { + "name": "name_value", + "custom_target_type_id": "custom_target_type_id_value", "uid": "uid_value", "description": "description_value", "annotations": {}, "labels": {}, - "require_approval": True, "create_time": {"seconds": 751, "nanos": 543}, "update_time": {}, - "gke": { - "cluster": "cluster_value", - "internal_ip": True, - "proxy_url": "proxy_url_value", - }, - "anthos_cluster": {"membership": "membership_value"}, - "run": {"location": "location_value"}, - "multi_target": {"target_ids": ["target_ids_value1", "target_ids_value2"]}, - "custom_target": {"custom_target_type": "custom_target_type_value"}, "etag": "etag_value", - "execution_configs": [ - { - "usages": [1], - "default_pool": { - "service_account": "service_account_value", - "artifact_storage": "artifact_storage_value", - }, - "private_pool": { - "worker_pool": "worker_pool_value", - "service_account": "service_account_value", - "artifact_storage": "artifact_storage_value", - }, - "worker_pool": "worker_pool_value", - "service_account": "service_account_value", - "artifact_storage": "artifact_storage_value", - "execution_timeout": {"seconds": 751, "nanos": 543}, - "verbose": True, - } - ], - "deploy_parameters": {}, + "custom_actions": { + "render_action": "render_action_value", + "deploy_action": "deploy_action_value", + "include_skaffold_modules": [ + { + "configs": ["configs_value1", "configs_value2"], + "git": { + "repo": "repo_value", + "path": "path_value", + "ref": "ref_value", + }, + "google_cloud_storage": { + "source": "source_value", + "path": "path_value", + }, + "google_cloud_build_repo": { + "repository": "repository_value", + "path": "path_value", + "ref": "ref_value", + }, + } + ], + }, } # The version of a generated dependency at test runtime may differ from the version used during generation. # Delete any fields which are not present in the current runtime dependency # See https://github.com/googleapis/gapic-generator-python/issues/1748 # Determine if the message type is proto-plus or protobuf - test_field = cloud_deploy.UpdateTargetRequest.meta.fields["target"] + test_field = cloud_deploy.CreateCustomTargetTypeRequest.meta.fields[ + "custom_target_type" + ] def get_message_fields(field): # Given a field which is a message (composite type), return a list with @@ -21789,7 +25523,7 @@ def get_message_fields(field): # For each item in the sample request, create a list of sub fields which are not present at runtime # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["target"].items(): # pragma: NO COVER + for field, value in request_init["custom_target_type"].items(): # pragma: NO COVER result = None is_repeated = False # For repeated fields @@ -21819,10 +25553,10 @@ def get_message_fields(field): subfield = subfield_to_delete.get("subfield") if subfield: if field_repeated: - for i in range(0, len(request_init["target"][field])): - del request_init["target"][field][i][subfield] + for i in range(0, len(request_init["custom_target_type"][field])): + del request_init["custom_target_type"][field][i][subfield] else: - del request_init["target"][field][subfield] + del request_init["custom_target_type"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -21837,13 +25571,13 @@ def get_message_fields(field): response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.update_target(request) + response = client.create_custom_target_type(request) # Establish that the response is the type that we expect. assert response.operation.name == "operations/spam" -def test_update_target_rest_use_cached_wrapped_rpc(): +def test_create_custom_target_type_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -21857,17 +25591,22 @@ def test_update_target_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.update_target in client._transport._wrapped_methods + assert ( + client._transport.create_custom_target_type + in client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.update_target] = mock_rpc + client._transport._wrapped_methods[ + client._transport.create_custom_target_type + ] = mock_rpc request = {} - client.update_target(request) + client.create_custom_target_type(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 @@ -21876,19 +25615,21 @@ def test_update_target_rest_use_cached_wrapped_rpc(): # subsequent calls should use the cached wrapper wrapper_fn.reset_mock() - client.update_target(request) + client.create_custom_target_type(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_update_target_rest_required_fields( - request_type=cloud_deploy.UpdateTargetRequest, +def test_create_custom_target_type_rest_required_fields( + request_type=cloud_deploy.CreateCustomTargetTypeRequest, ): transport_class = transports.CloudDeployRestTransport request_init = {} + request_init["parent"] = "" + request_init["custom_target_type_id"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -21896,29 +25637,40 @@ def test_update_target_rest_required_fields( ) # verify fields with default values are dropped + assert "customTargetTypeId" not in jsonified_request unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).update_target._get_unset_required_fields(jsonified_request) + ).create_custom_target_type._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present + assert "customTargetTypeId" in jsonified_request + assert ( + jsonified_request["customTargetTypeId"] == request_init["custom_target_type_id"] + ) + + jsonified_request["parent"] = "parent_value" + jsonified_request["customTargetTypeId"] = "custom_target_type_id_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).update_target._get_unset_required_fields(jsonified_request) + ).create_custom_target_type._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. assert not set(unset_fields) - set( ( - "allow_missing", + "custom_target_type_id", "request_id", - "update_mask", "validate_only", ) ) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + assert "customTargetTypeId" in jsonified_request + assert jsonified_request["customTargetTypeId"] == "custom_target_type_id_value" client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), @@ -21939,7 +25691,7 @@ def test_update_target_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "patch", + "method": "post", "query_params": pb_request, } transcode_result["body"] = pb_request @@ -21952,39 +25704,45 @@ def test_update_target_rest_required_fields( response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.update_target(request) + response = client.create_custom_target_type(request) - expected_params = [("$alt", "json;enum-encoding=int")] + expected_params = [ + ( + "customTargetTypeId", + "", + ), + ("$alt", "json;enum-encoding=int"), + ] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_update_target_rest_unset_required_fields(): +def test_create_custom_target_type_rest_unset_required_fields(): transport = transports.CloudDeployRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.update_target._get_unset_required_fields({}) + unset_fields = transport.create_custom_target_type._get_unset_required_fields({}) assert set(unset_fields) == ( set( ( - "allowMissing", + "customTargetTypeId", "requestId", - "updateMask", "validateOnly", ) ) & set( ( - "updateMask", - "target", + "parent", + "customTargetTypeId", + "customTargetType", ) ) ) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_update_target_rest_interceptors(null_interceptor): +def test_create_custom_target_type_rest_interceptors(null_interceptor): transport = transports.CloudDeployRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -21999,14 +25757,14 @@ def test_update_target_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( operation.Operation, "_set_result_from_operation" ), mock.patch.object( - transports.CloudDeployRestInterceptor, "post_update_target" + transports.CloudDeployRestInterceptor, "post_create_custom_target_type" ) as post, mock.patch.object( - transports.CloudDeployRestInterceptor, "pre_update_target" + transports.CloudDeployRestInterceptor, "pre_create_custom_target_type" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = cloud_deploy.UpdateTargetRequest.pb( - cloud_deploy.UpdateTargetRequest() + pb_message = cloud_deploy.CreateCustomTargetTypeRequest.pb( + cloud_deploy.CreateCustomTargetTypeRequest() ) transcode.return_value = { "method": "post", @@ -22022,7 +25780,7 @@ def test_update_target_rest_interceptors(null_interceptor): operations_pb2.Operation() ) - request = cloud_deploy.UpdateTargetRequest() + request = cloud_deploy.CreateCustomTargetTypeRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), @@ -22030,7 +25788,7 @@ def test_update_target_rest_interceptors(null_interceptor): pre.return_value = request, metadata post.return_value = operations_pb2.Operation() - client.update_target( + client.create_custom_target_type( request, metadata=[ ("key", "val"), @@ -22042,8 +25800,8 @@ def test_update_target_rest_interceptors(null_interceptor): post.assert_called_once() -def test_update_target_rest_bad_request( - transport: str = "rest", request_type=cloud_deploy.UpdateTargetRequest +def test_create_custom_target_type_rest_bad_request( + transport: str = "rest", request_type=cloud_deploy.CreateCustomTargetTypeRequest ): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), @@ -22051,9 +25809,7 @@ def test_update_target_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = { - "target": {"name": "projects/sample1/locations/sample2/targets/sample3"} - } + request_init = {"parent": "projects/sample1/locations/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -22065,10 +25821,10 @@ def test_update_target_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.update_target(request) + client.create_custom_target_type(request) -def test_update_target_rest_flattened(): +def test_create_custom_target_type_rest_flattened(): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -22080,14 +25836,13 @@ def test_update_target_rest_flattened(): return_value = operations_pb2.Operation(name="operations/spam") # get arguments that satisfy an http rule for this method - sample_request = { - "target": {"name": "projects/sample1/locations/sample2/targets/sample3"} - } + sample_request = {"parent": "projects/sample1/locations/sample2"} # get truthy value for each flattened field mock_args = dict( - target=cloud_deploy.Target(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + parent="parent_value", + custom_target_type=cloud_deploy.CustomTargetType(name="name_value"), + custom_target_type_id="custom_target_type_id_value", ) mock_args.update(sample_request) @@ -22098,20 +25853,20 @@ def test_update_target_rest_flattened(): response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.update_target(**mock_args) + client.create_custom_target_type(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{target.name=projects/*/locations/*/targets/*}" + "%s/v1/{parent=projects/*/locations/*}/customTargetTypes" % client.transport._host, args[1], ) -def test_update_target_rest_flattened_error(transport: str = "rest"): +def test_create_custom_target_type_rest_flattened_error(transport: str = "rest"): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -22120,14 +25875,15 @@ def test_update_target_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.update_target( - cloud_deploy.UpdateTargetRequest(), - target=cloud_deploy.Target(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + client.create_custom_target_type( + cloud_deploy.CreateCustomTargetTypeRequest(), + parent="parent_value", + custom_target_type=cloud_deploy.CustomTargetType(name="name_value"), + custom_target_type_id="custom_target_type_id_value", ) -def test_update_target_rest_error(): +def test_create_custom_target_type_rest_error(): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -22136,18 +25892,125 @@ def test_update_target_rest_error(): @pytest.mark.parametrize( "request_type", [ - cloud_deploy.DeleteTargetRequest, + cloud_deploy.UpdateCustomTargetTypeRequest, dict, ], ) -def test_delete_target_rest(request_type): +def test_update_custom_target_type_rest(request_type): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = {"name": "projects/sample1/locations/sample2/targets/sample3"} + request_init = { + "custom_target_type": { + "name": "projects/sample1/locations/sample2/customTargetTypes/sample3" + } + } + request_init["custom_target_type"] = { + "name": "projects/sample1/locations/sample2/customTargetTypes/sample3", + "custom_target_type_id": "custom_target_type_id_value", + "uid": "uid_value", + "description": "description_value", + "annotations": {}, + "labels": {}, + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "etag": "etag_value", + "custom_actions": { + "render_action": "render_action_value", + "deploy_action": "deploy_action_value", + "include_skaffold_modules": [ + { + "configs": ["configs_value1", "configs_value2"], + "git": { + "repo": "repo_value", + "path": "path_value", + "ref": "ref_value", + }, + "google_cloud_storage": { + "source": "source_value", + "path": "path_value", + }, + "google_cloud_build_repo": { + "repository": "repository_value", + "path": "path_value", + "ref": "ref_value", + }, + } + ], + }, + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = cloud_deploy.UpdateCustomTargetTypeRequest.meta.fields[ + "custom_target_type" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["custom_target_type"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["custom_target_type"][field])): + del request_init["custom_target_type"][field][i][subfield] + else: + del request_init["custom_target_type"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -22162,13 +26025,13 @@ def test_delete_target_rest(request_type): response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.delete_target(request) + response = client.update_custom_target_type(request) # Establish that the response is the type that we expect. assert response.operation.name == "operations/spam" -def test_delete_target_rest_use_cached_wrapped_rpc(): +def test_update_custom_target_type_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -22182,17 +26045,22 @@ def test_delete_target_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.delete_target in client._transport._wrapped_methods + assert ( + client._transport.update_custom_target_type + in client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.delete_target] = mock_rpc + client._transport._wrapped_methods[ + client._transport.update_custom_target_type + ] = mock_rpc request = {} - client.delete_target(request) + client.update_custom_target_type(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 @@ -22201,20 +26069,19 @@ def test_delete_target_rest_use_cached_wrapped_rpc(): # subsequent calls should use the cached wrapper wrapper_fn.reset_mock() - client.delete_target(request) + client.update_custom_target_type(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_delete_target_rest_required_fields( - request_type=cloud_deploy.DeleteTargetRequest, +def test_update_custom_target_type_rest_required_fields( + request_type=cloud_deploy.UpdateCustomTargetTypeRequest, ): transport_class = transports.CloudDeployRestTransport request_init = {} - request_init["name"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -22225,30 +26092,26 @@ def test_delete_target_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).delete_target._get_unset_required_fields(jsonified_request) + ).update_custom_target_type._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["name"] = "name_value" - unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).delete_target._get_unset_required_fields(jsonified_request) + ).update_custom_target_type._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. assert not set(unset_fields) - set( ( "allow_missing", - "etag", "request_id", + "update_mask", "validate_only", ) ) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == "name_value" client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), @@ -22269,9 +26132,10 @@ def test_delete_target_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "delete", + "method": "patch", "query_params": pb_request, } + transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() @@ -22281,34 +26145,39 @@ def test_delete_target_rest_required_fields( response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.delete_target(request) + response = client.update_custom_target_type(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_delete_target_rest_unset_required_fields(): +def test_update_custom_target_type_rest_unset_required_fields(): transport = transports.CloudDeployRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.delete_target._get_unset_required_fields({}) + unset_fields = transport.update_custom_target_type._get_unset_required_fields({}) assert set(unset_fields) == ( set( ( "allowMissing", - "etag", "requestId", + "updateMask", "validateOnly", ) ) - & set(("name",)) + & set( + ( + "updateMask", + "customTargetType", + ) + ) ) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_delete_target_rest_interceptors(null_interceptor): +def test_update_custom_target_type_rest_interceptors(null_interceptor): transport = transports.CloudDeployRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -22323,14 +26192,14 @@ def test_delete_target_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( operation.Operation, "_set_result_from_operation" ), mock.patch.object( - transports.CloudDeployRestInterceptor, "post_delete_target" + transports.CloudDeployRestInterceptor, "post_update_custom_target_type" ) as post, mock.patch.object( - transports.CloudDeployRestInterceptor, "pre_delete_target" + transports.CloudDeployRestInterceptor, "pre_update_custom_target_type" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = cloud_deploy.DeleteTargetRequest.pb( - cloud_deploy.DeleteTargetRequest() + pb_message = cloud_deploy.UpdateCustomTargetTypeRequest.pb( + cloud_deploy.UpdateCustomTargetTypeRequest() ) transcode.return_value = { "method": "post", @@ -22346,7 +26215,7 @@ def test_delete_target_rest_interceptors(null_interceptor): operations_pb2.Operation() ) - request = cloud_deploy.DeleteTargetRequest() + request = cloud_deploy.UpdateCustomTargetTypeRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), @@ -22354,7 +26223,7 @@ def test_delete_target_rest_interceptors(null_interceptor): pre.return_value = request, metadata post.return_value = operations_pb2.Operation() - client.delete_target( + client.update_custom_target_type( request, metadata=[ ("key", "val"), @@ -22366,8 +26235,8 @@ def test_delete_target_rest_interceptors(null_interceptor): post.assert_called_once() -def test_delete_target_rest_bad_request( - transport: str = "rest", request_type=cloud_deploy.DeleteTargetRequest +def test_update_custom_target_type_rest_bad_request( + transport: str = "rest", request_type=cloud_deploy.UpdateCustomTargetTypeRequest ): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), @@ -22375,7 +26244,11 @@ def test_delete_target_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = {"name": "projects/sample1/locations/sample2/targets/sample3"} + request_init = { + "custom_target_type": { + "name": "projects/sample1/locations/sample2/customTargetTypes/sample3" + } + } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -22387,10 +26260,10 @@ def test_delete_target_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.delete_target(request) + client.update_custom_target_type(request) -def test_delete_target_rest_flattened(): +def test_update_custom_target_type_rest_flattened(): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -22402,11 +26275,16 @@ def test_delete_target_rest_flattened(): return_value = operations_pb2.Operation(name="operations/spam") # get arguments that satisfy an http rule for this method - sample_request = {"name": "projects/sample1/locations/sample2/targets/sample3"} + sample_request = { + "custom_target_type": { + "name": "projects/sample1/locations/sample2/customTargetTypes/sample3" + } + } # get truthy value for each flattened field mock_args = dict( - name="name_value", + custom_target_type=cloud_deploy.CustomTargetType(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) mock_args.update(sample_request) @@ -22417,19 +26295,20 @@ def test_delete_target_rest_flattened(): response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.delete_target(**mock_args) + client.update_custom_target_type(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{name=projects/*/locations/*/targets/*}" % client.transport._host, + "%s/v1/{custom_target_type.name=projects/*/locations/*/customTargetTypes/*}" + % client.transport._host, args[1], ) -def test_delete_target_rest_flattened_error(transport: str = "rest"): +def test_update_custom_target_type_rest_flattened_error(transport: str = "rest"): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -22438,13 +26317,14 @@ def test_delete_target_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.delete_target( - cloud_deploy.DeleteTargetRequest(), - name="name_value", + client.update_custom_target_type( + cloud_deploy.UpdateCustomTargetTypeRequest(), + custom_target_type=cloud_deploy.CustomTargetType(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) -def test_delete_target_rest_error(): +def test_update_custom_target_type_rest_error(): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -22453,46 +26333,41 @@ def test_delete_target_rest_error(): @pytest.mark.parametrize( "request_type", [ - cloud_deploy.ListCustomTargetTypesRequest, + cloud_deploy.DeleteCustomTargetTypeRequest, dict, ], ) -def test_list_custom_target_types_rest(request_type): +def test_delete_custom_target_type_rest(request_type): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2"} + request_init = { + "name": "projects/sample1/locations/sample2/customTargetTypes/sample3" + } request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = cloud_deploy.ListCustomTargetTypesResponse( - next_page_token="next_page_token_value", - unreachable=["unreachable_value"], - ) + return_value = operations_pb2.Operation(name="operations/spam") # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - # Convert return value to protobuf type - return_value = cloud_deploy.ListCustomTargetTypesResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.list_custom_target_types(request) + response = client.delete_custom_target_type(request) # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListCustomTargetTypesPager) - assert response.next_page_token == "next_page_token_value" - assert response.unreachable == ["unreachable_value"] + assert response.operation.name == "operations/spam" -def test_list_custom_target_types_rest_use_cached_wrapped_rpc(): +def test_delete_custom_target_type_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -22507,7 +26382,7 @@ def test_list_custom_target_types_rest_use_cached_wrapped_rpc(): # Ensure method has been cached assert ( - client._transport.list_custom_target_types + client._transport.delete_custom_target_type in client._transport._wrapped_methods ) @@ -22517,29 +26392,33 @@ def test_list_custom_target_types_rest_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.list_custom_target_types + client._transport.delete_custom_target_type ] = mock_rpc request = {} - client.list_custom_target_types(request) + client.delete_custom_target_type(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.list_custom_target_types(request) + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.delete_custom_target_type(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_list_custom_target_types_rest_required_fields( - request_type=cloud_deploy.ListCustomTargetTypesRequest, +def test_delete_custom_target_type_rest_required_fields( + request_type=cloud_deploy.DeleteCustomTargetTypeRequest, ): transport_class = transports.CloudDeployRestTransport request_init = {} - request_init["parent"] = "" + request_init["name"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -22550,30 +26429,30 @@ def test_list_custom_target_types_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).list_custom_target_types._get_unset_required_fields(jsonified_request) + ).delete_custom_target_type._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["parent"] = "parent_value" + jsonified_request["name"] = "name_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).list_custom_target_types._get_unset_required_fields(jsonified_request) + ).delete_custom_target_type._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. assert not set(unset_fields) - set( ( - "filter", - "order_by", - "page_size", - "page_token", + "allow_missing", + "etag", + "request_id", + "validate_only", ) ) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == "parent_value" + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), @@ -22582,7 +26461,7 @@ def test_list_custom_target_types_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = cloud_deploy.ListCustomTargetTypesResponse() + return_value = operations_pb2.Operation(name="operations/spam") # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -22594,49 +26473,46 @@ def test_list_custom_target_types_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "get", + "method": "delete", "query_params": pb_request, } transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = cloud_deploy.ListCustomTargetTypesResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.list_custom_target_types(request) + response = client.delete_custom_target_type(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_list_custom_target_types_rest_unset_required_fields(): +def test_delete_custom_target_type_rest_unset_required_fields(): transport = transports.CloudDeployRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.list_custom_target_types._get_unset_required_fields({}) + unset_fields = transport.delete_custom_target_type._get_unset_required_fields({}) assert set(unset_fields) == ( set( ( - "filter", - "orderBy", - "pageSize", - "pageToken", + "allowMissing", + "etag", + "requestId", + "validateOnly", ) ) - & set(("parent",)) + & set(("name",)) ) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_custom_target_types_rest_interceptors(null_interceptor): +def test_delete_custom_target_type_rest_interceptors(null_interceptor): transport = transports.CloudDeployRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -22649,14 +26525,16 @@ def test_list_custom_target_types_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.CloudDeployRestInterceptor, "post_list_custom_target_types" + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.CloudDeployRestInterceptor, "post_delete_custom_target_type" ) as post, mock.patch.object( - transports.CloudDeployRestInterceptor, "pre_list_custom_target_types" + transports.CloudDeployRestInterceptor, "pre_delete_custom_target_type" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = cloud_deploy.ListCustomTargetTypesRequest.pb( - cloud_deploy.ListCustomTargetTypesRequest() + pb_message = cloud_deploy.DeleteCustomTargetTypeRequest.pb( + cloud_deploy.DeleteCustomTargetTypeRequest() ) transcode.return_value = { "method": "post", @@ -22668,19 +26546,19 @@ def test_list_custom_target_types_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = cloud_deploy.ListCustomTargetTypesResponse.to_json( - cloud_deploy.ListCustomTargetTypesResponse() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() ) - request = cloud_deploy.ListCustomTargetTypesRequest() + request = cloud_deploy.DeleteCustomTargetTypeRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = cloud_deploy.ListCustomTargetTypesResponse() + post.return_value = operations_pb2.Operation() - client.list_custom_target_types( + client.delete_custom_target_type( request, metadata=[ ("key", "val"), @@ -22692,8 +26570,8 @@ def test_list_custom_target_types_rest_interceptors(null_interceptor): post.assert_called_once() -def test_list_custom_target_types_rest_bad_request( - transport: str = "rest", request_type=cloud_deploy.ListCustomTargetTypesRequest +def test_delete_custom_target_type_rest_bad_request( + transport: str = "rest", request_type=cloud_deploy.DeleteCustomTargetTypeRequest ): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), @@ -22701,7 +26579,9 @@ def test_list_custom_target_types_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2"} + request_init = { + "name": "projects/sample1/locations/sample2/customTargetTypes/sample3" + } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -22713,10 +26593,10 @@ def test_list_custom_target_types_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.list_custom_target_types(request) + client.delete_custom_target_type(request) -def test_list_custom_target_types_rest_flattened(): +def test_delete_custom_target_type_rest_flattened(): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -22725,40 +26605,40 @@ def test_list_custom_target_types_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = cloud_deploy.ListCustomTargetTypesResponse() + return_value = operations_pb2.Operation(name="operations/spam") # get arguments that satisfy an http rule for this method - sample_request = {"parent": "projects/sample1/locations/sample2"} + sample_request = { + "name": "projects/sample1/locations/sample2/customTargetTypes/sample3" + } # get truthy value for each flattened field mock_args = dict( - parent="parent_value", + name="name_value", ) mock_args.update(sample_request) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - # Convert return value to protobuf type - return_value = cloud_deploy.ListCustomTargetTypesResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.list_custom_target_types(**mock_args) + client.delete_custom_target_type(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{parent=projects/*/locations/*}/customTargetTypes" + "%s/v1/{name=projects/*/locations/*/customTargetTypes/*}" % client.transport._host, args[1], ) -def test_list_custom_target_types_rest_flattened_error(transport: str = "rest"): +def test_delete_custom_target_type_rest_flattened_error(transport: str = "rest"): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -22767,83 +26647,26 @@ def test_list_custom_target_types_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.list_custom_target_types( - cloud_deploy.ListCustomTargetTypesRequest(), - parent="parent_value", + client.delete_custom_target_type( + cloud_deploy.DeleteCustomTargetTypeRequest(), + name="name_value", ) -def test_list_custom_target_types_rest_pager(transport: str = "rest"): +def test_delete_custom_target_type_rest_error(): client = CloudDeployClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # TODO(kbandes): remove this mock unless there's a good reason for it. - # with mock.patch.object(path_template, 'transcode') as transcode: - # Set the response as a series of pages - response = ( - cloud_deploy.ListCustomTargetTypesResponse( - custom_target_types=[ - cloud_deploy.CustomTargetType(), - cloud_deploy.CustomTargetType(), - cloud_deploy.CustomTargetType(), - ], - next_page_token="abc", - ), - cloud_deploy.ListCustomTargetTypesResponse( - custom_target_types=[], - next_page_token="def", - ), - cloud_deploy.ListCustomTargetTypesResponse( - custom_target_types=[ - cloud_deploy.CustomTargetType(), - ], - next_page_token="ghi", - ), - cloud_deploy.ListCustomTargetTypesResponse( - custom_target_types=[ - cloud_deploy.CustomTargetType(), - cloud_deploy.CustomTargetType(), - ], - ), - ) - # Two responses for two calls - response = response + response - - # Wrap the values into proper Response objs - response = tuple( - cloud_deploy.ListCustomTargetTypesResponse.to_json(x) for x in response - ) - return_values = tuple(Response() for i in response) - for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode("UTF-8") - return_val.status_code = 200 - req.side_effect = return_values - - sample_request = {"parent": "projects/sample1/locations/sample2"} - - pager = client.list_custom_target_types(request=sample_request) - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, cloud_deploy.CustomTargetType) for i in results) - - pages = list(client.list_custom_target_types(request=sample_request).pages) - for page_, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page_.raw_page.next_page_token == token - @pytest.mark.parametrize( "request_type", [ - cloud_deploy.GetCustomTargetTypeRequest, + cloud_deploy.ListReleasesRequest, dict, ], ) -def test_get_custom_target_type_rest(request_type): +def test_list_releases_rest(request_type): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -22851,42 +26674,36 @@ def test_get_custom_target_type_rest(request_type): # send a request that will satisfy transcoding request_init = { - "name": "projects/sample1/locations/sample2/customTargetTypes/sample3" + "parent": "projects/sample1/locations/sample2/deliveryPipelines/sample3" } request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = cloud_deploy.CustomTargetType( - name="name_value", - custom_target_type_id="custom_target_type_id_value", - uid="uid_value", - description="description_value", - etag="etag_value", + return_value = cloud_deploy.ListReleasesResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], ) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = cloud_deploy.CustomTargetType.pb(return_value) + return_value = cloud_deploy.ListReleasesResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.get_custom_target_type(request) + response = client.list_releases(request) # Establish that the response is the type that we expect. - assert isinstance(response, cloud_deploy.CustomTargetType) - assert response.name == "name_value" - assert response.custom_target_type_id == "custom_target_type_id_value" - assert response.uid == "uid_value" - assert response.description == "description_value" - assert response.etag == "etag_value" + assert isinstance(response, pagers.ListReleasesPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] -def test_get_custom_target_type_rest_use_cached_wrapped_rpc(): +def test_list_releases_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -22900,40 +26717,35 @@ def test_get_custom_target_type_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert ( - client._transport.get_custom_target_type - in client._transport._wrapped_methods - ) + assert client._transport.list_releases in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.get_custom_target_type - ] = mock_rpc + client._transport._wrapped_methods[client._transport.list_releases] = mock_rpc request = {} - client.get_custom_target_type(request) + client.list_releases(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.get_custom_target_type(request) + client.list_releases(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_get_custom_target_type_rest_required_fields( - request_type=cloud_deploy.GetCustomTargetTypeRequest, +def test_list_releases_rest_required_fields( + request_type=cloud_deploy.ListReleasesRequest, ): transport_class = transports.CloudDeployRestTransport request_init = {} - request_init["name"] = "" + request_init["parent"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -22944,21 +26756,30 @@ def test_get_custom_target_type_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).get_custom_target_type._get_unset_required_fields(jsonified_request) + ).list_releases._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["name"] = "name_value" + jsonified_request["parent"] = "parent_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).get_custom_target_type._get_unset_required_fields(jsonified_request) + ).list_releases._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "filter", + "order_by", + "page_size", + "page_token", + ) + ) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == "name_value" + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), @@ -22967,7 +26788,7 @@ def test_get_custom_target_type_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = cloud_deploy.CustomTargetType() + return_value = cloud_deploy.ListReleasesResponse() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -22988,30 +26809,40 @@ def test_get_custom_target_type_rest_required_fields( response_value.status_code = 200 # Convert return value to protobuf type - return_value = cloud_deploy.CustomTargetType.pb(return_value) + return_value = cloud_deploy.ListReleasesResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.get_custom_target_type(request) + response = client.list_releases(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_get_custom_target_type_rest_unset_required_fields(): +def test_list_releases_rest_unset_required_fields(): transport = transports.CloudDeployRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.get_custom_target_type._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name",))) + unset_fields = transport.list_releases._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "filter", + "orderBy", + "pageSize", + "pageToken", + ) + ) + & set(("parent",)) + ) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_custom_target_type_rest_interceptors(null_interceptor): +def test_list_releases_rest_interceptors(null_interceptor): transport = transports.CloudDeployRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -23024,14 +26855,14 @@ def test_get_custom_target_type_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.CloudDeployRestInterceptor, "post_get_custom_target_type" + transports.CloudDeployRestInterceptor, "post_list_releases" ) as post, mock.patch.object( - transports.CloudDeployRestInterceptor, "pre_get_custom_target_type" + transports.CloudDeployRestInterceptor, "pre_list_releases" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = cloud_deploy.GetCustomTargetTypeRequest.pb( - cloud_deploy.GetCustomTargetTypeRequest() + pb_message = cloud_deploy.ListReleasesRequest.pb( + cloud_deploy.ListReleasesRequest() ) transcode.return_value = { "method": "post", @@ -23043,19 +26874,19 @@ def test_get_custom_target_type_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = cloud_deploy.CustomTargetType.to_json( - cloud_deploy.CustomTargetType() + req.return_value._content = cloud_deploy.ListReleasesResponse.to_json( + cloud_deploy.ListReleasesResponse() ) - request = cloud_deploy.GetCustomTargetTypeRequest() + request = cloud_deploy.ListReleasesRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = cloud_deploy.CustomTargetType() + post.return_value = cloud_deploy.ListReleasesResponse() - client.get_custom_target_type( + client.list_releases( request, metadata=[ ("key", "val"), @@ -23067,8 +26898,8 @@ def test_get_custom_target_type_rest_interceptors(null_interceptor): post.assert_called_once() -def test_get_custom_target_type_rest_bad_request( - transport: str = "rest", request_type=cloud_deploy.GetCustomTargetTypeRequest +def test_list_releases_rest_bad_request( + transport: str = "rest", request_type=cloud_deploy.ListReleasesRequest ): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), @@ -23077,7 +26908,7 @@ def test_get_custom_target_type_rest_bad_request( # send a request that will satisfy transcoding request_init = { - "name": "projects/sample1/locations/sample2/customTargetTypes/sample3" + "parent": "projects/sample1/locations/sample2/deliveryPipelines/sample3" } request = request_type(**request_init) @@ -23090,10 +26921,10 @@ def test_get_custom_target_type_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.get_custom_target_type(request) + client.list_releases(request) -def test_get_custom_target_type_rest_flattened(): +def test_list_releases_rest_flattened(): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -23102,16 +26933,16 @@ def test_get_custom_target_type_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = cloud_deploy.CustomTargetType() + return_value = cloud_deploy.ListReleasesResponse() # get arguments that satisfy an http rule for this method sample_request = { - "name": "projects/sample1/locations/sample2/customTargetTypes/sample3" + "parent": "projects/sample1/locations/sample2/deliveryPipelines/sample3" } # get truthy value for each flattened field mock_args = dict( - name="name_value", + parent="parent_value", ) mock_args.update(sample_request) @@ -23119,25 +26950,25 @@ def test_get_custom_target_type_rest_flattened(): response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = cloud_deploy.CustomTargetType.pb(return_value) + return_value = cloud_deploy.ListReleasesResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.get_custom_target_type(**mock_args) + client.list_releases(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{name=projects/*/locations/*/customTargetTypes/*}" + "%s/v1/{parent=projects/*/locations/*/deliveryPipelines/*}/releases" % client.transport._host, args[1], ) -def test_get_custom_target_type_rest_flattened_error(transport: str = "rest"): +def test_list_releases_rest_flattened_error(transport: str = "rest"): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -23146,157 +26977,134 @@ def test_get_custom_target_type_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.get_custom_target_type( - cloud_deploy.GetCustomTargetTypeRequest(), - name="name_value", + client.list_releases( + cloud_deploy.ListReleasesRequest(), + parent="parent_value", ) -def test_get_custom_target_type_rest_error(): - client = CloudDeployClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) - - -@pytest.mark.parametrize( - "request_type", - [ - cloud_deploy.CreateCustomTargetTypeRequest, - dict, - ], -) -def test_create_custom_target_type_rest(request_type): +def test_list_releases_rest_pager(transport: str = "rest"): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), - transport="rest", + transport=transport, ) - # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2"} - request_init["custom_target_type"] = { - "name": "name_value", - "custom_target_type_id": "custom_target_type_id_value", - "uid": "uid_value", - "description": "description_value", - "annotations": {}, - "labels": {}, - "create_time": {"seconds": 751, "nanos": 543}, - "update_time": {}, - "etag": "etag_value", - "custom_actions": { - "render_action": "render_action_value", - "deploy_action": "deploy_action_value", - "include_skaffold_modules": [ - { - "configs": ["configs_value1", "configs_value2"], - "git": { - "repo": "repo_value", - "path": "path_value", - "ref": "ref_value", - }, - "google_cloud_storage": { - "source": "source_value", - "path": "path_value", - }, - "google_cloud_build_repo": { - "repository": "repository_value", - "path": "path_value", - "ref": "ref_value", - }, - } - ], - }, - } - # The version of a generated dependency at test runtime may differ from the version used during generation. - # Delete any fields which are not present in the current runtime dependency - # See https://github.com/googleapis/gapic-generator-python/issues/1748 - - # Determine if the message type is proto-plus or protobuf - test_field = cloud_deploy.CreateCustomTargetTypeRequest.meta.fields[ - "custom_target_type" - ] + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + cloud_deploy.ListReleasesResponse( + releases=[ + cloud_deploy.Release(), + cloud_deploy.Release(), + cloud_deploy.Release(), + ], + next_page_token="abc", + ), + cloud_deploy.ListReleasesResponse( + releases=[], + next_page_token="def", + ), + cloud_deploy.ListReleasesResponse( + releases=[ + cloud_deploy.Release(), + ], + next_page_token="ghi", + ), + cloud_deploy.ListReleasesResponse( + releases=[ + cloud_deploy.Release(), + cloud_deploy.Release(), + ], + ), + ) + # Two responses for two calls + response = response + response - def get_message_fields(field): - # Given a field which is a message (composite type), return a list with - # all the fields of the message. - # If the field is not a composite type, return an empty list. - message_fields = [] + # Wrap the values into proper Response objs + response = tuple(cloud_deploy.ListReleasesResponse.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values - if hasattr(field, "message") and field.message: - is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + sample_request = { + "parent": "projects/sample1/locations/sample2/deliveryPipelines/sample3" + } - if is_field_type_proto_plus_type: - message_fields = field.message.meta.fields.values() - # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER - message_fields = field.message.DESCRIPTOR.fields - return message_fields + pager = client.list_releases(request=sample_request) - runtime_nested_fields = [ - (field.name, nested_field.name) - for field in get_message_fields(test_field) - for nested_field in get_message_fields(field) - ] + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, cloud_deploy.Release) for i in results) - subfields_not_in_runtime = [] + pages = list(client.list_releases(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token - # For each item in the sample request, create a list of sub fields which are not present at runtime - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["custom_target_type"].items(): # pragma: NO COVER - result = None - is_repeated = False - # For repeated fields - if isinstance(value, list) and len(value): - is_repeated = True - result = value[0] - # For fields where the type is another message - if isinstance(value, dict): - result = value - if result and hasattr(result, "keys"): - for subfield in result.keys(): - if (field, subfield) not in runtime_nested_fields: - subfields_not_in_runtime.append( - { - "field": field, - "subfield": subfield, - "is_repeated": is_repeated, - } - ) +@pytest.mark.parametrize( + "request_type", + [ + cloud_deploy.GetReleaseRequest, + dict, + ], +) +def test_get_release_rest(request_type): + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) - # Remove fields from the sample request which are not present in the runtime version of the dependency - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER - field = subfield_to_delete.get("field") - field_repeated = subfield_to_delete.get("is_repeated") - subfield = subfield_to_delete.get("subfield") - if subfield: - if field_repeated: - for i in range(0, len(request_init["custom_target_type"][field])): - del request_init["custom_target_type"][field][i][subfield] - else: - del request_init["custom_target_type"][field][subfield] + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/deliveryPipelines/sample3/releases/sample4" + } request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = cloud_deploy.Release( + name="name_value", + uid="uid_value", + description="description_value", + abandoned=True, + skaffold_config_uri="skaffold_config_uri_value", + skaffold_config_path="skaffold_config_path_value", + render_state=cloud_deploy.Release.RenderState.SUCCEEDED, + etag="etag_value", + skaffold_version="skaffold_version_value", + ) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 + # Convert return value to protobuf type + return_value = cloud_deploy.Release.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.create_custom_target_type(request) + response = client.get_release(request) # Establish that the response is the type that we expect. - assert response.operation.name == "operations/spam" + assert isinstance(response, cloud_deploy.Release) + assert response.name == "name_value" + assert response.uid == "uid_value" + assert response.description == "description_value" + assert response.abandoned is True + assert response.skaffold_config_uri == "skaffold_config_uri_value" + assert response.skaffold_config_path == "skaffold_config_path_value" + assert response.render_state == cloud_deploy.Release.RenderState.SUCCEEDED + assert response.etag == "etag_value" + assert response.skaffold_version == "skaffold_version_value" -def test_create_custom_target_type_rest_use_cached_wrapped_rpc(): +def test_get_release_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -23310,45 +27118,33 @@ def test_create_custom_target_type_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert ( - client._transport.create_custom_target_type - in client._transport._wrapped_methods - ) + assert client._transport.get_release in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.create_custom_target_type - ] = mock_rpc + client._transport._wrapped_methods[client._transport.get_release] = mock_rpc request = {} - client.create_custom_target_type(request) + client.get_release(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.create_custom_target_type(request) + client.get_release(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_create_custom_target_type_rest_required_fields( - request_type=cloud_deploy.CreateCustomTargetTypeRequest, -): +def test_get_release_rest_required_fields(request_type=cloud_deploy.GetReleaseRequest): transport_class = transports.CloudDeployRestTransport request_init = {} - request_init["parent"] = "" - request_init["custom_target_type_id"] = "" + request_init["name"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -23356,40 +27152,24 @@ def test_create_custom_target_type_rest_required_fields( ) # verify fields with default values are dropped - assert "customTargetTypeId" not in jsonified_request unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).create_custom_target_type._get_unset_required_fields(jsonified_request) + ).get_release._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - assert "customTargetTypeId" in jsonified_request - assert ( - jsonified_request["customTargetTypeId"] == request_init["custom_target_type_id"] - ) - jsonified_request["parent"] = "parent_value" - jsonified_request["customTargetTypeId"] = "custom_target_type_id_value" + jsonified_request["name"] = "name_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).create_custom_target_type._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set( - ( - "custom_target_type_id", - "request_id", - "validate_only", - ) - ) + ).get_release._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == "parent_value" - assert "customTargetTypeId" in jsonified_request - assert jsonified_request["customTargetTypeId"] == "custom_target_type_id_value" + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), @@ -23398,7 +27178,7 @@ def test_create_custom_target_type_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = cloud_deploy.Release() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -23410,58 +27190,39 @@ def test_create_custom_target_type_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "post", + "method": "get", "query_params": pb_request, } - transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = cloud_deploy.Release.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.create_custom_target_type(request) + response = client.get_release(request) - expected_params = [ - ( - "customTargetTypeId", - "", - ), - ("$alt", "json;enum-encoding=int"), - ] + expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_create_custom_target_type_rest_unset_required_fields(): +def test_get_release_rest_unset_required_fields(): transport = transports.CloudDeployRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.create_custom_target_type._get_unset_required_fields({}) - assert set(unset_fields) == ( - set( - ( - "customTargetTypeId", - "requestId", - "validateOnly", - ) - ) - & set( - ( - "parent", - "customTargetTypeId", - "customTargetType", - ) - ) - ) + unset_fields = transport.get_release._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_create_custom_target_type_rest_interceptors(null_interceptor): +def test_get_release_rest_interceptors(null_interceptor): transport = transports.CloudDeployRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -23474,17 +27235,13 @@ def test_create_custom_target_type_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - operation.Operation, "_set_result_from_operation" - ), mock.patch.object( - transports.CloudDeployRestInterceptor, "post_create_custom_target_type" + transports.CloudDeployRestInterceptor, "post_get_release" ) as post, mock.patch.object( - transports.CloudDeployRestInterceptor, "pre_create_custom_target_type" + transports.CloudDeployRestInterceptor, "pre_get_release" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = cloud_deploy.CreateCustomTargetTypeRequest.pb( - cloud_deploy.CreateCustomTargetTypeRequest() - ) + pb_message = cloud_deploy.GetReleaseRequest.pb(cloud_deploy.GetReleaseRequest()) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -23495,19 +27252,17 @@ def test_create_custom_target_type_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = json_format.MessageToJson( - operations_pb2.Operation() - ) + req.return_value._content = cloud_deploy.Release.to_json(cloud_deploy.Release()) - request = cloud_deploy.CreateCustomTargetTypeRequest() + request = cloud_deploy.GetReleaseRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = operations_pb2.Operation() + post.return_value = cloud_deploy.Release() - client.create_custom_target_type( + client.get_release( request, metadata=[ ("key", "val"), @@ -23519,8 +27274,8 @@ def test_create_custom_target_type_rest_interceptors(null_interceptor): post.assert_called_once() -def test_create_custom_target_type_rest_bad_request( - transport: str = "rest", request_type=cloud_deploy.CreateCustomTargetTypeRequest +def test_get_release_rest_bad_request( + transport: str = "rest", request_type=cloud_deploy.GetReleaseRequest ): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), @@ -23528,7 +27283,9 @@ def test_create_custom_target_type_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2"} + request_init = { + "name": "projects/sample1/locations/sample2/deliveryPipelines/sample3/releases/sample4" + } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -23540,10 +27297,10 @@ def test_create_custom_target_type_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.create_custom_target_type(request) + client.get_release(request) -def test_create_custom_target_type_rest_flattened(): +def test_get_release_rest_flattened(): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -23552,40 +27309,42 @@ def test_create_custom_target_type_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = cloud_deploy.Release() # get arguments that satisfy an http rule for this method - sample_request = {"parent": "projects/sample1/locations/sample2"} + sample_request = { + "name": "projects/sample1/locations/sample2/deliveryPipelines/sample3/releases/sample4" + } # get truthy value for each flattened field mock_args = dict( - parent="parent_value", - custom_target_type=cloud_deploy.CustomTargetType(name="name_value"), - custom_target_type_id="custom_target_type_id_value", + name="name_value", ) mock_args.update(sample_request) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 + # Convert return value to protobuf type + return_value = cloud_deploy.Release.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.create_custom_target_type(**mock_args) + client.get_release(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{parent=projects/*/locations/*}/customTargetTypes" + "%s/v1/{name=projects/*/locations/*/deliveryPipelines/*/releases/*}" % client.transport._host, args[1], ) -def test_create_custom_target_type_rest_flattened_error(transport: str = "rest"): +def test_get_release_rest_flattened_error(transport: str = "rest"): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -23594,15 +27353,13 @@ def test_create_custom_target_type_rest_flattened_error(transport: str = "rest") # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.create_custom_target_type( - cloud_deploy.CreateCustomTargetTypeRequest(), - parent="parent_value", - custom_target_type=cloud_deploy.CustomTargetType(name="name_value"), - custom_target_type_id="custom_target_type_id_value", + client.get_release( + cloud_deploy.GetReleaseRequest(), + name="name_value", ) -def test_create_custom_target_type_rest_error(): +def test_get_release_rest_error(): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -23611,11 +27368,11 @@ def test_create_custom_target_type_rest_error(): @pytest.mark.parametrize( "request_type", [ - cloud_deploy.UpdateCustomTargetTypeRequest, + cloud_deploy.CreateReleaseRequest, dict, ], ) -def test_update_custom_target_type_rest(request_type): +def test_create_release_rest(request_type): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -23623,52 +27380,231 @@ def test_update_custom_target_type_rest(request_type): # send a request that will satisfy transcoding request_init = { - "custom_target_type": { - "name": "projects/sample1/locations/sample2/customTargetTypes/sample3" - } + "parent": "projects/sample1/locations/sample2/deliveryPipelines/sample3" } - request_init["custom_target_type"] = { - "name": "projects/sample1/locations/sample2/customTargetTypes/sample3", - "custom_target_type_id": "custom_target_type_id_value", + request_init["release"] = { + "name": "name_value", "uid": "uid_value", "description": "description_value", "annotations": {}, "labels": {}, + "abandoned": True, "create_time": {"seconds": 751, "nanos": 543}, - "update_time": {}, + "render_start_time": {}, + "render_end_time": {}, + "skaffold_config_uri": "skaffold_config_uri_value", + "skaffold_config_path": "skaffold_config_path_value", + "build_artifacts": [{"image": "image_value", "tag": "tag_value"}], + "delivery_pipeline_snapshot": { + "name": "name_value", + "uid": "uid_value", + "description": "description_value", + "annotations": {}, + "labels": {}, + "create_time": {}, + "update_time": {}, + "serial_pipeline": { + "stages": [ + { + "target_id": "target_id_value", + "profiles": ["profiles_value1", "profiles_value2"], + "strategy": { + "standard": { + "verify": True, + "predeploy": { + "actions": ["actions_value1", "actions_value2"] + }, + "postdeploy": { + "actions": ["actions_value1", "actions_value2"] + }, + }, + "canary": { + "runtime_config": { + "kubernetes": { + "gateway_service_mesh": { + "http_route": "http_route_value", + "service": "service_value", + "deployment": "deployment_value", + "route_update_wait_time": { + "seconds": 751, + "nanos": 543, + }, + "stable_cutback_duration": {}, + "pod_selector_label": "pod_selector_label_value", + }, + "service_networking": { + "service": "service_value", + "deployment": "deployment_value", + "disable_pod_overprovisioning": True, + "pod_selector_label": "pod_selector_label_value", + }, + }, + "cloud_run": { + "automatic_traffic_control": True, + "canary_revision_tags": [ + "canary_revision_tags_value1", + "canary_revision_tags_value2", + ], + "prior_revision_tags": [ + "prior_revision_tags_value1", + "prior_revision_tags_value2", + ], + "stable_revision_tags": [ + "stable_revision_tags_value1", + "stable_revision_tags_value2", + ], + }, + }, + "canary_deployment": { + "percentages": [1170, 1171], + "verify": True, + "predeploy": {}, + "postdeploy": {}, + }, + "custom_canary_deployment": { + "phase_configs": [ + { + "phase_id": "phase_id_value", + "percentage": 1054, + "profiles": [ + "profiles_value1", + "profiles_value2", + ], + "verify": True, + "predeploy": {}, + "postdeploy": {}, + } + ] + }, + }, + }, + "deploy_parameters": [ + {"values": {}, "match_target_labels": {}} + ], + } + ] + }, + "condition": { + "pipeline_ready_condition": {"status": True, "update_time": {}}, + "targets_present_condition": { + "status": True, + "missing_targets": [ + "missing_targets_value1", + "missing_targets_value2", + ], + "update_time": {}, + }, + "targets_type_condition": { + "status": True, + "error_details": "error_details_value", + }, + }, + "etag": "etag_value", + "suspended": True, + }, + "target_snapshots": [ + { + "name": "name_value", + "target_id": "target_id_value", + "uid": "uid_value", + "description": "description_value", + "annotations": {}, + "labels": {}, + "require_approval": True, + "create_time": {}, + "update_time": {}, + "gke": { + "cluster": "cluster_value", + "internal_ip": True, + "proxy_url": "proxy_url_value", + }, + "anthos_cluster": {"membership": "membership_value"}, + "run": {"location": "location_value"}, + "multi_target": { + "target_ids": ["target_ids_value1", "target_ids_value2"] + }, + "custom_target": {"custom_target_type": "custom_target_type_value"}, + "etag": "etag_value", + "execution_configs": [ + { + "usages": [1], + "default_pool": { + "service_account": "service_account_value", + "artifact_storage": "artifact_storage_value", + }, + "private_pool": { + "worker_pool": "worker_pool_value", + "service_account": "service_account_value", + "artifact_storage": "artifact_storage_value", + }, + "worker_pool": "worker_pool_value", + "service_account": "service_account_value", + "artifact_storage": "artifact_storage_value", + "execution_timeout": {}, + "verbose": True, + } + ], + "deploy_parameters": {}, + } + ], + "custom_target_type_snapshots": [ + { + "name": "name_value", + "custom_target_type_id": "custom_target_type_id_value", + "uid": "uid_value", + "description": "description_value", + "annotations": {}, + "labels": {}, + "create_time": {}, + "update_time": {}, + "etag": "etag_value", + "custom_actions": { + "render_action": "render_action_value", + "deploy_action": "deploy_action_value", + "include_skaffold_modules": [ + { + "configs": ["configs_value1", "configs_value2"], + "git": { + "repo": "repo_value", + "path": "path_value", + "ref": "ref_value", + }, + "google_cloud_storage": { + "source": "source_value", + "path": "path_value", + }, + "google_cloud_build_repo": { + "repository": "repository_value", + "path": "path_value", + "ref": "ref_value", + }, + } + ], + }, + } + ], + "render_state": 1, "etag": "etag_value", - "custom_actions": { - "render_action": "render_action_value", - "deploy_action": "deploy_action_value", - "include_skaffold_modules": [ - { - "configs": ["configs_value1", "configs_value2"], - "git": { - "repo": "repo_value", - "path": "path_value", - "ref": "ref_value", - }, - "google_cloud_storage": { - "source": "source_value", - "path": "path_value", - }, - "google_cloud_build_repo": { - "repository": "repository_value", - "path": "path_value", - "ref": "ref_value", - }, - } - ], + "skaffold_version": "skaffold_version_value", + "target_artifacts": {}, + "target_renders": {}, + "condition": { + "release_ready_condition": {"status": True}, + "skaffold_supported_condition": { + "status": True, + "skaffold_support_state": 1, + "maintenance_mode_time": {}, + "support_expiration_time": {}, + }, }, + "deploy_parameters": {}, } # The version of a generated dependency at test runtime may differ from the version used during generation. # Delete any fields which are not present in the current runtime dependency # See https://github.com/googleapis/gapic-generator-python/issues/1748 # Determine if the message type is proto-plus or protobuf - test_field = cloud_deploy.UpdateCustomTargetTypeRequest.meta.fields[ - "custom_target_type" - ] + test_field = cloud_deploy.CreateReleaseRequest.meta.fields["release"] def get_message_fields(field): # Given a field which is a message (composite type), return a list with @@ -23696,7 +27632,7 @@ def get_message_fields(field): # For each item in the sample request, create a list of sub fields which are not present at runtime # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["custom_target_type"].items(): # pragma: NO COVER + for field, value in request_init["release"].items(): # pragma: NO COVER result = None is_repeated = False # For repeated fields @@ -23726,10 +27662,10 @@ def get_message_fields(field): subfield = subfield_to_delete.get("subfield") if subfield: if field_repeated: - for i in range(0, len(request_init["custom_target_type"][field])): - del request_init["custom_target_type"][field][i][subfield] + for i in range(0, len(request_init["release"][field])): + del request_init["release"][field][i][subfield] else: - del request_init["custom_target_type"][field][subfield] + del request_init["release"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -23744,13 +27680,13 @@ def get_message_fields(field): response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.update_custom_target_type(request) + response = client.create_release(request) # Establish that the response is the type that we expect. assert response.operation.name == "operations/spam" -def test_update_custom_target_type_rest_use_cached_wrapped_rpc(): +def test_create_release_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -23764,22 +27700,17 @@ def test_update_custom_target_type_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert ( - client._transport.update_custom_target_type - in client._transport._wrapped_methods - ) + assert client._transport.create_release in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.update_custom_target_type - ] = mock_rpc + client._transport._wrapped_methods[client._transport.create_release] = mock_rpc request = {} - client.update_custom_target_type(request) + client.create_release(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 @@ -23788,19 +27719,21 @@ def test_update_custom_target_type_rest_use_cached_wrapped_rpc(): # subsequent calls should use the cached wrapper wrapper_fn.reset_mock() - client.update_custom_target_type(request) + client.create_release(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_update_custom_target_type_rest_required_fields( - request_type=cloud_deploy.UpdateCustomTargetTypeRequest, +def test_create_release_rest_required_fields( + request_type=cloud_deploy.CreateReleaseRequest, ): transport_class = transports.CloudDeployRestTransport request_init = {} + request_init["parent"] = "" + request_init["release_id"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -23808,29 +27741,39 @@ def test_update_custom_target_type_rest_required_fields( ) # verify fields with default values are dropped + assert "releaseId" not in jsonified_request unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).update_custom_target_type._get_unset_required_fields(jsonified_request) + ).create_release._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present + assert "releaseId" in jsonified_request + assert jsonified_request["releaseId"] == request_init["release_id"] + + jsonified_request["parent"] = "parent_value" + jsonified_request["releaseId"] = "release_id_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).update_custom_target_type._get_unset_required_fields(jsonified_request) + ).create_release._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. assert not set(unset_fields) - set( ( - "allow_missing", + "override_deploy_policy", + "release_id", "request_id", - "update_mask", "validate_only", ) ) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + assert "releaseId" in jsonified_request + assert jsonified_request["releaseId"] == "release_id_value" client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), @@ -23851,7 +27794,7 @@ def test_update_custom_target_type_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "patch", + "method": "post", "query_params": pb_request, } transcode_result["body"] = pb_request @@ -23864,39 +27807,46 @@ def test_update_custom_target_type_rest_required_fields( response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.update_custom_target_type(request) + response = client.create_release(request) - expected_params = [("$alt", "json;enum-encoding=int")] + expected_params = [ + ( + "releaseId", + "", + ), + ("$alt", "json;enum-encoding=int"), + ] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_update_custom_target_type_rest_unset_required_fields(): +def test_create_release_rest_unset_required_fields(): transport = transports.CloudDeployRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.update_custom_target_type._get_unset_required_fields({}) + unset_fields = transport.create_release._get_unset_required_fields({}) assert set(unset_fields) == ( set( ( - "allowMissing", + "overrideDeployPolicy", + "releaseId", "requestId", - "updateMask", "validateOnly", ) ) & set( ( - "updateMask", - "customTargetType", + "parent", + "releaseId", + "release", ) ) ) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_update_custom_target_type_rest_interceptors(null_interceptor): +def test_create_release_rest_interceptors(null_interceptor): transport = transports.CloudDeployRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -23911,14 +27861,14 @@ def test_update_custom_target_type_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( operation.Operation, "_set_result_from_operation" ), mock.patch.object( - transports.CloudDeployRestInterceptor, "post_update_custom_target_type" + transports.CloudDeployRestInterceptor, "post_create_release" ) as post, mock.patch.object( - transports.CloudDeployRestInterceptor, "pre_update_custom_target_type" + transports.CloudDeployRestInterceptor, "pre_create_release" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = cloud_deploy.UpdateCustomTargetTypeRequest.pb( - cloud_deploy.UpdateCustomTargetTypeRequest() + pb_message = cloud_deploy.CreateReleaseRequest.pb( + cloud_deploy.CreateReleaseRequest() ) transcode.return_value = { "method": "post", @@ -23934,7 +27884,7 @@ def test_update_custom_target_type_rest_interceptors(null_interceptor): operations_pb2.Operation() ) - request = cloud_deploy.UpdateCustomTargetTypeRequest() + request = cloud_deploy.CreateReleaseRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), @@ -23942,7 +27892,7 @@ def test_update_custom_target_type_rest_interceptors(null_interceptor): pre.return_value = request, metadata post.return_value = operations_pb2.Operation() - client.update_custom_target_type( + client.create_release( request, metadata=[ ("key", "val"), @@ -23954,8 +27904,8 @@ def test_update_custom_target_type_rest_interceptors(null_interceptor): post.assert_called_once() -def test_update_custom_target_type_rest_bad_request( - transport: str = "rest", request_type=cloud_deploy.UpdateCustomTargetTypeRequest +def test_create_release_rest_bad_request( + transport: str = "rest", request_type=cloud_deploy.CreateReleaseRequest ): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), @@ -23964,9 +27914,7 @@ def test_update_custom_target_type_rest_bad_request( # send a request that will satisfy transcoding request_init = { - "custom_target_type": { - "name": "projects/sample1/locations/sample2/customTargetTypes/sample3" - } + "parent": "projects/sample1/locations/sample2/deliveryPipelines/sample3" } request = request_type(**request_init) @@ -23979,10 +27927,10 @@ def test_update_custom_target_type_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.update_custom_target_type(request) + client.create_release(request) -def test_update_custom_target_type_rest_flattened(): +def test_create_release_rest_flattened(): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -23995,15 +27943,14 @@ def test_update_custom_target_type_rest_flattened(): # get arguments that satisfy an http rule for this method sample_request = { - "custom_target_type": { - "name": "projects/sample1/locations/sample2/customTargetTypes/sample3" - } + "parent": "projects/sample1/locations/sample2/deliveryPipelines/sample3" } # get truthy value for each flattened field mock_args = dict( - custom_target_type=cloud_deploy.CustomTargetType(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + parent="parent_value", + release=cloud_deploy.Release(name="name_value"), + release_id="release_id_value", ) mock_args.update(sample_request) @@ -24014,20 +27961,20 @@ def test_update_custom_target_type_rest_flattened(): response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.update_custom_target_type(**mock_args) + client.create_release(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{custom_target_type.name=projects/*/locations/*/customTargetTypes/*}" + "%s/v1/{parent=projects/*/locations/*/deliveryPipelines/*}/releases" % client.transport._host, args[1], ) -def test_update_custom_target_type_rest_flattened_error(transport: str = "rest"): +def test_create_release_rest_flattened_error(transport: str = "rest"): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -24036,14 +27983,15 @@ def test_update_custom_target_type_rest_flattened_error(transport: str = "rest") # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.update_custom_target_type( - cloud_deploy.UpdateCustomTargetTypeRequest(), - custom_target_type=cloud_deploy.CustomTargetType(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + client.create_release( + cloud_deploy.CreateReleaseRequest(), + parent="parent_value", + release=cloud_deploy.Release(name="name_value"), + release_id="release_id_value", ) -def test_update_custom_target_type_rest_error(): +def test_create_release_rest_error(): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -24052,11 +28000,11 @@ def test_update_custom_target_type_rest_error(): @pytest.mark.parametrize( "request_type", [ - cloud_deploy.DeleteCustomTargetTypeRequest, + cloud_deploy.AbandonReleaseRequest, dict, ], ) -def test_delete_custom_target_type_rest(request_type): +def test_abandon_release_rest(request_type): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -24064,29 +28012,31 @@ def test_delete_custom_target_type_rest(request_type): # send a request that will satisfy transcoding request_init = { - "name": "projects/sample1/locations/sample2/customTargetTypes/sample3" + "name": "projects/sample1/locations/sample2/deliveryPipelines/sample3/releases/sample4" } request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = cloud_deploy.AbandonReleaseResponse() # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 + # Convert return value to protobuf type + return_value = cloud_deploy.AbandonReleaseResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.delete_custom_target_type(request) + response = client.abandon_release(request) # Establish that the response is the type that we expect. - assert response.operation.name == "operations/spam" + assert isinstance(response, cloud_deploy.AbandonReleaseResponse) -def test_delete_custom_target_type_rest_use_cached_wrapped_rpc(): +def test_abandon_release_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -24100,39 +28050,30 @@ def test_delete_custom_target_type_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert ( - client._transport.delete_custom_target_type - in client._transport._wrapped_methods - ) + assert client._transport.abandon_release in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.delete_custom_target_type - ] = mock_rpc + client._transport._wrapped_methods[client._transport.abandon_release] = mock_rpc request = {} - client.delete_custom_target_type(request) + client.abandon_release(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.delete_custom_target_type(request) + client.abandon_release(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_delete_custom_target_type_rest_required_fields( - request_type=cloud_deploy.DeleteCustomTargetTypeRequest, +def test_abandon_release_rest_required_fields( + request_type=cloud_deploy.AbandonReleaseRequest, ): transport_class = transports.CloudDeployRestTransport @@ -24148,7 +28089,7 @@ def test_delete_custom_target_type_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).delete_custom_target_type._get_unset_required_fields(jsonified_request) + ).abandon_release._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present @@ -24157,16 +28098,7 @@ def test_delete_custom_target_type_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).delete_custom_target_type._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set( - ( - "allow_missing", - "etag", - "request_id", - "validate_only", - ) - ) + ).abandon_release._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone @@ -24180,7 +28112,7 @@ def test_delete_custom_target_type_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = cloud_deploy.AbandonReleaseResponse() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -24192,46 +28124,40 @@ def test_delete_custom_target_type_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "delete", + "method": "post", "query_params": pb_request, } + transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = cloud_deploy.AbandonReleaseResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.delete_custom_target_type(request) + response = client.abandon_release(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_delete_custom_target_type_rest_unset_required_fields(): +def test_abandon_release_rest_unset_required_fields(): transport = transports.CloudDeployRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.delete_custom_target_type._get_unset_required_fields({}) - assert set(unset_fields) == ( - set( - ( - "allowMissing", - "etag", - "requestId", - "validateOnly", - ) - ) - & set(("name",)) - ) + unset_fields = transport.abandon_release._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_delete_custom_target_type_rest_interceptors(null_interceptor): +def test_abandon_release_rest_interceptors(null_interceptor): transport = transports.CloudDeployRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -24244,16 +28170,14 @@ def test_delete_custom_target_type_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - operation.Operation, "_set_result_from_operation" - ), mock.patch.object( - transports.CloudDeployRestInterceptor, "post_delete_custom_target_type" + transports.CloudDeployRestInterceptor, "post_abandon_release" ) as post, mock.patch.object( - transports.CloudDeployRestInterceptor, "pre_delete_custom_target_type" + transports.CloudDeployRestInterceptor, "pre_abandon_release" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = cloud_deploy.DeleteCustomTargetTypeRequest.pb( - cloud_deploy.DeleteCustomTargetTypeRequest() + pb_message = cloud_deploy.AbandonReleaseRequest.pb( + cloud_deploy.AbandonReleaseRequest() ) transcode.return_value = { "method": "post", @@ -24265,19 +28189,19 @@ def test_delete_custom_target_type_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = json_format.MessageToJson( - operations_pb2.Operation() + req.return_value._content = cloud_deploy.AbandonReleaseResponse.to_json( + cloud_deploy.AbandonReleaseResponse() ) - request = cloud_deploy.DeleteCustomTargetTypeRequest() + request = cloud_deploy.AbandonReleaseRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = operations_pb2.Operation() + post.return_value = cloud_deploy.AbandonReleaseResponse() - client.delete_custom_target_type( + client.abandon_release( request, metadata=[ ("key", "val"), @@ -24289,8 +28213,8 @@ def test_delete_custom_target_type_rest_interceptors(null_interceptor): post.assert_called_once() -def test_delete_custom_target_type_rest_bad_request( - transport: str = "rest", request_type=cloud_deploy.DeleteCustomTargetTypeRequest +def test_abandon_release_rest_bad_request( + transport: str = "rest", request_type=cloud_deploy.AbandonReleaseRequest ): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), @@ -24299,7 +28223,7 @@ def test_delete_custom_target_type_rest_bad_request( # send a request that will satisfy transcoding request_init = { - "name": "projects/sample1/locations/sample2/customTargetTypes/sample3" + "name": "projects/sample1/locations/sample2/deliveryPipelines/sample3/releases/sample4" } request = request_type(**request_init) @@ -24312,10 +28236,10 @@ def test_delete_custom_target_type_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.delete_custom_target_type(request) + client.abandon_release(request) -def test_delete_custom_target_type_rest_flattened(): +def test_abandon_release_rest_flattened(): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -24324,11 +28248,11 @@ def test_delete_custom_target_type_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = cloud_deploy.AbandonReleaseResponse() # get arguments that satisfy an http rule for this method sample_request = { - "name": "projects/sample1/locations/sample2/customTargetTypes/sample3" + "name": "projects/sample1/locations/sample2/deliveryPipelines/sample3/releases/sample4" } # get truthy value for each flattened field @@ -24340,24 +28264,26 @@ def test_delete_custom_target_type_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 + # Convert return value to protobuf type + return_value = cloud_deploy.AbandonReleaseResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.delete_custom_target_type(**mock_args) + client.abandon_release(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{name=projects/*/locations/*/customTargetTypes/*}" + "%s/v1/{name=projects/*/locations/*/deliveryPipelines/*/releases/*}:abandon" % client.transport._host, args[1], ) -def test_delete_custom_target_type_rest_flattened_error(transport: str = "rest"): +def test_abandon_release_rest_flattened_error(transport: str = "rest"): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -24366,13 +28292,13 @@ def test_delete_custom_target_type_rest_flattened_error(transport: str = "rest") # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.delete_custom_target_type( - cloud_deploy.DeleteCustomTargetTypeRequest(), + client.abandon_release( + cloud_deploy.AbandonReleaseRequest(), name="name_value", ) -def test_delete_custom_target_type_rest_error(): +def test_abandon_release_rest_error(): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -24381,48 +28307,151 @@ def test_delete_custom_target_type_rest_error(): @pytest.mark.parametrize( "request_type", [ - cloud_deploy.ListReleasesRequest, + cloud_deploy.CreateDeployPolicyRequest, dict, ], ) -def test_list_releases_rest(request_type): +def test_create_deploy_policy_rest(request_type): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = { - "parent": "projects/sample1/locations/sample2/deliveryPipelines/sample3" + request_init = {"parent": "projects/sample1/locations/sample2"} + request_init["deploy_policy"] = { + "name": "name_value", + "uid": "uid_value", + "description": "description_value", + "annotations": {}, + "labels": {}, + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "suspended": True, + "selectors": [ + { + "delivery_pipeline": {"id": "id_value", "labels": {}}, + "target": {"id": "id_value", "labels": {}}, + } + ], + "rules": [ + { + "rollout_restriction": { + "id": "id_value", + "invokers": [1], + "actions": [1], + "time_windows": { + "time_zone": "time_zone_value", + "one_time_windows": [ + { + "start_date": {"year": 433, "month": 550, "day": 318}, + "start_time": { + "hours": 561, + "minutes": 773, + "seconds": 751, + "nanos": 543, + }, + "end_date": {}, + "end_time": {}, + } + ], + "weekly_windows": [ + {"days_of_week": [1], "start_time": {}, "end_time": {}} + ], + }, + } + } + ], + "etag": "etag_value", } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = cloud_deploy.CreateDeployPolicyRequest.meta.fields["deploy_policy"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["deploy_policy"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["deploy_policy"][field])): + del request_init["deploy_policy"][field][i][subfield] + else: + del request_init["deploy_policy"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = cloud_deploy.ListReleasesResponse( - next_page_token="next_page_token_value", - unreachable=["unreachable_value"], - ) + return_value = operations_pb2.Operation(name="operations/spam") # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - # Convert return value to protobuf type - return_value = cloud_deploy.ListReleasesResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.list_releases(request) + response = client.create_deploy_policy(request) # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListReleasesPager) - assert response.next_page_token == "next_page_token_value" - assert response.unreachable == ["unreachable_value"] + assert response.operation.name == "operations/spam" -def test_list_releases_rest_use_cached_wrapped_rpc(): +def test_create_deploy_policy_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -24436,35 +28465,44 @@ def test_list_releases_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.list_releases in client._transport._wrapped_methods + assert ( + client._transport.create_deploy_policy in client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.list_releases] = mock_rpc + client._transport._wrapped_methods[ + client._transport.create_deploy_policy + ] = mock_rpc request = {} - client.list_releases(request) + client.create_deploy_policy(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.list_releases(request) + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.create_deploy_policy(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_list_releases_rest_required_fields( - request_type=cloud_deploy.ListReleasesRequest, +def test_create_deploy_policy_rest_required_fields( + request_type=cloud_deploy.CreateDeployPolicyRequest, ): transport_class = transports.CloudDeployRestTransport request_init = {} request_init["parent"] = "" + request_init["deploy_policy_id"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -24472,26 +28510,29 @@ def test_list_releases_rest_required_fields( ) # verify fields with default values are dropped + assert "deployPolicyId" not in jsonified_request unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).list_releases._get_unset_required_fields(jsonified_request) + ).create_deploy_policy._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present + assert "deployPolicyId" in jsonified_request + assert jsonified_request["deployPolicyId"] == request_init["deploy_policy_id"] jsonified_request["parent"] = "parent_value" + jsonified_request["deployPolicyId"] = "deploy_policy_id_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).list_releases._get_unset_required_fields(jsonified_request) + ).create_deploy_policy._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. assert not set(unset_fields) - set( ( - "filter", - "order_by", - "page_size", - "page_token", + "deploy_policy_id", + "request_id", + "validate_only", ) ) jsonified_request.update(unset_fields) @@ -24499,6 +28540,8 @@ def test_list_releases_rest_required_fields( # verify required fields with non-default values are left alone assert "parent" in jsonified_request assert jsonified_request["parent"] == "parent_value" + assert "deployPolicyId" in jsonified_request + assert jsonified_request["deployPolicyId"] == "deploy_policy_id_value" client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), @@ -24507,7 +28550,7 @@ def test_list_releases_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = cloud_deploy.ListReleasesResponse() + return_value = operations_pb2.Operation(name="operations/spam") # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -24519,49 +28562,58 @@ def test_list_releases_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "get", + "method": "post", "query_params": pb_request, } + transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = cloud_deploy.ListReleasesResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.list_releases(request) + response = client.create_deploy_policy(request) - expected_params = [("$alt", "json;enum-encoding=int")] + expected_params = [ + ( + "deployPolicyId", + "", + ), + ("$alt", "json;enum-encoding=int"), + ] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_list_releases_rest_unset_required_fields(): +def test_create_deploy_policy_rest_unset_required_fields(): transport = transports.CloudDeployRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.list_releases._get_unset_required_fields({}) + unset_fields = transport.create_deploy_policy._get_unset_required_fields({}) assert set(unset_fields) == ( set( ( - "filter", - "orderBy", - "pageSize", - "pageToken", + "deployPolicyId", + "requestId", + "validateOnly", + ) + ) + & set( + ( + "parent", + "deployPolicyId", + "deployPolicy", ) ) - & set(("parent",)) ) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_releases_rest_interceptors(null_interceptor): +def test_create_deploy_policy_rest_interceptors(null_interceptor): transport = transports.CloudDeployRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -24574,14 +28626,16 @@ def test_list_releases_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.CloudDeployRestInterceptor, "post_list_releases" + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.CloudDeployRestInterceptor, "post_create_deploy_policy" ) as post, mock.patch.object( - transports.CloudDeployRestInterceptor, "pre_list_releases" + transports.CloudDeployRestInterceptor, "pre_create_deploy_policy" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = cloud_deploy.ListReleasesRequest.pb( - cloud_deploy.ListReleasesRequest() + pb_message = cloud_deploy.CreateDeployPolicyRequest.pb( + cloud_deploy.CreateDeployPolicyRequest() ) transcode.return_value = { "method": "post", @@ -24593,19 +28647,19 @@ def test_list_releases_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = cloud_deploy.ListReleasesResponse.to_json( - cloud_deploy.ListReleasesResponse() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() ) - request = cloud_deploy.ListReleasesRequest() + request = cloud_deploy.CreateDeployPolicyRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = cloud_deploy.ListReleasesResponse() + post.return_value = operations_pb2.Operation() - client.list_releases( + client.create_deploy_policy( request, metadata=[ ("key", "val"), @@ -24617,8 +28671,8 @@ def test_list_releases_rest_interceptors(null_interceptor): post.assert_called_once() -def test_list_releases_rest_bad_request( - transport: str = "rest", request_type=cloud_deploy.ListReleasesRequest +def test_create_deploy_policy_rest_bad_request( + transport: str = "rest", request_type=cloud_deploy.CreateDeployPolicyRequest ): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), @@ -24626,9 +28680,7 @@ def test_list_releases_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = { - "parent": "projects/sample1/locations/sample2/deliveryPipelines/sample3" - } + request_init = {"parent": "projects/sample1/locations/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -24640,10 +28692,10 @@ def test_list_releases_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.list_releases(request) + client.create_deploy_policy(request) -def test_list_releases_rest_flattened(): +def test_create_deploy_policy_rest_flattened(): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -24652,42 +28704,40 @@ def test_list_releases_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = cloud_deploy.ListReleasesResponse() + return_value = operations_pb2.Operation(name="operations/spam") # get arguments that satisfy an http rule for this method - sample_request = { - "parent": "projects/sample1/locations/sample2/deliveryPipelines/sample3" - } + sample_request = {"parent": "projects/sample1/locations/sample2"} # get truthy value for each flattened field mock_args = dict( parent="parent_value", + deploy_policy=cloud_deploy.DeployPolicy(name="name_value"), + deploy_policy_id="deploy_policy_id_value", ) mock_args.update(sample_request) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - # Convert return value to protobuf type - return_value = cloud_deploy.ListReleasesResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.list_releases(**mock_args) + client.create_deploy_policy(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{parent=projects/*/locations/*/deliveryPipelines/*}/releases" + "%s/v1/{parent=projects/*/locations/*}/deployPolicies" % client.transport._host, args[1], ) -def test_list_releases_rest_flattened_error(transport: str = "rest"): +def test_create_deploy_policy_rest_flattened_error(transport: str = "rest"): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -24696,134 +28746,172 @@ def test_list_releases_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.list_releases( - cloud_deploy.ListReleasesRequest(), + client.create_deploy_policy( + cloud_deploy.CreateDeployPolicyRequest(), parent="parent_value", + deploy_policy=cloud_deploy.DeployPolicy(name="name_value"), + deploy_policy_id="deploy_policy_id_value", ) -def test_list_releases_rest_pager(transport: str = "rest"): +def test_create_deploy_policy_rest_error(): client = CloudDeployClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # TODO(kbandes): remove this mock unless there's a good reason for it. - # with mock.patch.object(path_template, 'transcode') as transcode: - # Set the response as a series of pages - response = ( - cloud_deploy.ListReleasesResponse( - releases=[ - cloud_deploy.Release(), - cloud_deploy.Release(), - cloud_deploy.Release(), - ], - next_page_token="abc", - ), - cloud_deploy.ListReleasesResponse( - releases=[], - next_page_token="def", - ), - cloud_deploy.ListReleasesResponse( - releases=[ - cloud_deploy.Release(), - ], - next_page_token="ghi", - ), - cloud_deploy.ListReleasesResponse( - releases=[ - cloud_deploy.Release(), - cloud_deploy.Release(), - ], - ), - ) - # Two responses for two calls - response = response + response - - # Wrap the values into proper Response objs - response = tuple(cloud_deploy.ListReleasesResponse.to_json(x) for x in response) - return_values = tuple(Response() for i in response) - for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode("UTF-8") - return_val.status_code = 200 - req.side_effect = return_values - - sample_request = { - "parent": "projects/sample1/locations/sample2/deliveryPipelines/sample3" - } - - pager = client.list_releases(request=sample_request) - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, cloud_deploy.Release) for i in results) - - pages = list(client.list_releases(request=sample_request).pages) - for page_, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page_.raw_page.next_page_token == token - @pytest.mark.parametrize( "request_type", [ - cloud_deploy.GetReleaseRequest, + cloud_deploy.UpdateDeployPolicyRequest, dict, ], ) -def test_get_release_rest(request_type): +def test_update_deploy_policy_rest(request_type): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) - # send a request that will satisfy transcoding - request_init = { - "name": "projects/sample1/locations/sample2/deliveryPipelines/sample3/releases/sample4" - } + # send a request that will satisfy transcoding + request_init = { + "deploy_policy": { + "name": "projects/sample1/locations/sample2/deployPolicies/sample3" + } + } + request_init["deploy_policy"] = { + "name": "projects/sample1/locations/sample2/deployPolicies/sample3", + "uid": "uid_value", + "description": "description_value", + "annotations": {}, + "labels": {}, + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "suspended": True, + "selectors": [ + { + "delivery_pipeline": {"id": "id_value", "labels": {}}, + "target": {"id": "id_value", "labels": {}}, + } + ], + "rules": [ + { + "rollout_restriction": { + "id": "id_value", + "invokers": [1], + "actions": [1], + "time_windows": { + "time_zone": "time_zone_value", + "one_time_windows": [ + { + "start_date": {"year": 433, "month": 550, "day": 318}, + "start_time": { + "hours": 561, + "minutes": 773, + "seconds": 751, + "nanos": 543, + }, + "end_date": {}, + "end_time": {}, + } + ], + "weekly_windows": [ + {"days_of_week": [1], "start_time": {}, "end_time": {}} + ], + }, + } + } + ], + "etag": "etag_value", + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = cloud_deploy.UpdateDeployPolicyRequest.meta.fields["deploy_policy"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["deploy_policy"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["deploy_policy"][field])): + del request_init["deploy_policy"][field][i][subfield] + else: + del request_init["deploy_policy"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = cloud_deploy.Release( - name="name_value", - uid="uid_value", - description="description_value", - abandoned=True, - skaffold_config_uri="skaffold_config_uri_value", - skaffold_config_path="skaffold_config_path_value", - render_state=cloud_deploy.Release.RenderState.SUCCEEDED, - etag="etag_value", - skaffold_version="skaffold_version_value", - ) + return_value = operations_pb2.Operation(name="operations/spam") # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - # Convert return value to protobuf type - return_value = cloud_deploy.Release.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.get_release(request) + response = client.update_deploy_policy(request) # Establish that the response is the type that we expect. - assert isinstance(response, cloud_deploy.Release) - assert response.name == "name_value" - assert response.uid == "uid_value" - assert response.description == "description_value" - assert response.abandoned is True - assert response.skaffold_config_uri == "skaffold_config_uri_value" - assert response.skaffold_config_path == "skaffold_config_path_value" - assert response.render_state == cloud_deploy.Release.RenderState.SUCCEEDED - assert response.etag == "etag_value" - assert response.skaffold_version == "skaffold_version_value" + assert response.operation.name == "operations/spam" -def test_get_release_rest_use_cached_wrapped_rpc(): +def test_update_deploy_policy_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -24837,33 +28925,42 @@ def test_get_release_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.get_release in client._transport._wrapped_methods + assert ( + client._transport.update_deploy_policy in client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.get_release] = mock_rpc + client._transport._wrapped_methods[ + client._transport.update_deploy_policy + ] = mock_rpc request = {} - client.get_release(request) + client.update_deploy_policy(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.get_release(request) + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.update_deploy_policy(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_get_release_rest_required_fields(request_type=cloud_deploy.GetReleaseRequest): +def test_update_deploy_policy_rest_required_fields( + request_type=cloud_deploy.UpdateDeployPolicyRequest, +): transport_class = transports.CloudDeployRestTransport request_init = {} - request_init["name"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -24874,21 +28971,26 @@ def test_get_release_rest_required_fields(request_type=cloud_deploy.GetReleaseRe unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).get_release._get_unset_required_fields(jsonified_request) + ).update_deploy_policy._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["name"] = "name_value" - unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).get_release._get_unset_required_fields(jsonified_request) + ).update_deploy_policy._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "allow_missing", + "request_id", + "update_mask", + "validate_only", + ) + ) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == "name_value" client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), @@ -24897,7 +28999,7 @@ def test_get_release_rest_required_fields(request_type=cloud_deploy.GetReleaseRe request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = cloud_deploy.Release() + return_value = operations_pb2.Operation(name="operations/spam") # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -24909,39 +29011,52 @@ def test_get_release_rest_required_fields(request_type=cloud_deploy.GetReleaseRe pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "get", + "method": "patch", "query_params": pb_request, } + transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = cloud_deploy.Release.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.get_release(request) + response = client.update_deploy_policy(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_get_release_rest_unset_required_fields(): +def test_update_deploy_policy_rest_unset_required_fields(): transport = transports.CloudDeployRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.get_release._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name",))) + unset_fields = transport.update_deploy_policy._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "allowMissing", + "requestId", + "updateMask", + "validateOnly", + ) + ) + & set( + ( + "updateMask", + "deployPolicy", + ) + ) + ) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_release_rest_interceptors(null_interceptor): +def test_update_deploy_policy_rest_interceptors(null_interceptor): transport = transports.CloudDeployRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -24954,13 +29069,17 @@ def test_get_release_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.CloudDeployRestInterceptor, "post_get_release" + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.CloudDeployRestInterceptor, "post_update_deploy_policy" ) as post, mock.patch.object( - transports.CloudDeployRestInterceptor, "pre_get_release" + transports.CloudDeployRestInterceptor, "pre_update_deploy_policy" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = cloud_deploy.GetReleaseRequest.pb(cloud_deploy.GetReleaseRequest()) + pb_message = cloud_deploy.UpdateDeployPolicyRequest.pb( + cloud_deploy.UpdateDeployPolicyRequest() + ) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -24971,17 +29090,19 @@ def test_get_release_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = cloud_deploy.Release.to_json(cloud_deploy.Release()) + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) - request = cloud_deploy.GetReleaseRequest() + request = cloud_deploy.UpdateDeployPolicyRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = cloud_deploy.Release() + post.return_value = operations_pb2.Operation() - client.get_release( + client.update_deploy_policy( request, metadata=[ ("key", "val"), @@ -24993,8 +29114,8 @@ def test_get_release_rest_interceptors(null_interceptor): post.assert_called_once() -def test_get_release_rest_bad_request( - transport: str = "rest", request_type=cloud_deploy.GetReleaseRequest +def test_update_deploy_policy_rest_bad_request( + transport: str = "rest", request_type=cloud_deploy.UpdateDeployPolicyRequest ): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), @@ -25003,7 +29124,9 @@ def test_get_release_rest_bad_request( # send a request that will satisfy transcoding request_init = { - "name": "projects/sample1/locations/sample2/deliveryPipelines/sample3/releases/sample4" + "deploy_policy": { + "name": "projects/sample1/locations/sample2/deployPolicies/sample3" + } } request = request_type(**request_init) @@ -25016,10 +29139,10 @@ def test_get_release_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.get_release(request) + client.update_deploy_policy(request) -def test_get_release_rest_flattened(): +def test_update_deploy_policy_rest_flattened(): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -25028,42 +29151,43 @@ def test_get_release_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = cloud_deploy.Release() + return_value = operations_pb2.Operation(name="operations/spam") # get arguments that satisfy an http rule for this method sample_request = { - "name": "projects/sample1/locations/sample2/deliveryPipelines/sample3/releases/sample4" + "deploy_policy": { + "name": "projects/sample1/locations/sample2/deployPolicies/sample3" + } } # get truthy value for each flattened field mock_args = dict( - name="name_value", + deploy_policy=cloud_deploy.DeployPolicy(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) mock_args.update(sample_request) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - # Convert return value to protobuf type - return_value = cloud_deploy.Release.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.get_release(**mock_args) + client.update_deploy_policy(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{name=projects/*/locations/*/deliveryPipelines/*/releases/*}" + "%s/v1/{deploy_policy.name=projects/*/locations/*/deployPolicies/*}" % client.transport._host, args[1], ) -def test_get_release_rest_flattened_error(transport: str = "rest"): +def test_update_deploy_policy_rest_flattened_error(transport: str = "rest"): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -25072,13 +29196,14 @@ def test_get_release_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.get_release( - cloud_deploy.GetReleaseRequest(), - name="name_value", + client.update_deploy_policy( + cloud_deploy.UpdateDeployPolicyRequest(), + deploy_policy=cloud_deploy.DeployPolicy(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) -def test_get_release_rest_error(): +def test_update_deploy_policy_rest_error(): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -25087,325 +29212,370 @@ def test_get_release_rest_error(): @pytest.mark.parametrize( "request_type", [ - cloud_deploy.CreateReleaseRequest, + cloud_deploy.DeleteDeployPolicyRequest, dict, ], ) -def test_create_release_rest(request_type): +def test_delete_deploy_policy_rest(request_type): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = { - "parent": "projects/sample1/locations/sample2/deliveryPipelines/sample3" - } - request_init["release"] = { - "name": "name_value", - "uid": "uid_value", - "description": "description_value", - "annotations": {}, - "labels": {}, - "abandoned": True, - "create_time": {"seconds": 751, "nanos": 543}, - "render_start_time": {}, - "render_end_time": {}, - "skaffold_config_uri": "skaffold_config_uri_value", - "skaffold_config_path": "skaffold_config_path_value", - "build_artifacts": [{"image": "image_value", "tag": "tag_value"}], - "delivery_pipeline_snapshot": { - "name": "name_value", - "uid": "uid_value", - "description": "description_value", - "annotations": {}, - "labels": {}, - "create_time": {}, - "update_time": {}, - "serial_pipeline": { - "stages": [ - { - "target_id": "target_id_value", - "profiles": ["profiles_value1", "profiles_value2"], - "strategy": { - "standard": { - "verify": True, - "predeploy": { - "actions": ["actions_value1", "actions_value2"] - }, - "postdeploy": { - "actions": ["actions_value1", "actions_value2"] - }, - }, - "canary": { - "runtime_config": { - "kubernetes": { - "gateway_service_mesh": { - "http_route": "http_route_value", - "service": "service_value", - "deployment": "deployment_value", - "route_update_wait_time": { - "seconds": 751, - "nanos": 543, - }, - "stable_cutback_duration": {}, - "pod_selector_label": "pod_selector_label_value", - }, - "service_networking": { - "service": "service_value", - "deployment": "deployment_value", - "disable_pod_overprovisioning": True, - "pod_selector_label": "pod_selector_label_value", - }, - }, - "cloud_run": { - "automatic_traffic_control": True, - "canary_revision_tags": [ - "canary_revision_tags_value1", - "canary_revision_tags_value2", - ], - "prior_revision_tags": [ - "prior_revision_tags_value1", - "prior_revision_tags_value2", - ], - "stable_revision_tags": [ - "stable_revision_tags_value1", - "stable_revision_tags_value2", - ], - }, - }, - "canary_deployment": { - "percentages": [1170, 1171], - "verify": True, - "predeploy": {}, - "postdeploy": {}, - }, - "custom_canary_deployment": { - "phase_configs": [ - { - "phase_id": "phase_id_value", - "percentage": 1054, - "profiles": [ - "profiles_value1", - "profiles_value2", - ], - "verify": True, - "predeploy": {}, - "postdeploy": {}, - } - ] - }, - }, - }, - "deploy_parameters": [ - {"values": {}, "match_target_labels": {}} - ], - } - ] - }, - "condition": { - "pipeline_ready_condition": {"status": True, "update_time": {}}, - "targets_present_condition": { - "status": True, - "missing_targets": [ - "missing_targets_value1", - "missing_targets_value2", - ], - "update_time": {}, - }, - "targets_type_condition": { - "status": True, - "error_details": "error_details_value", - }, - }, - "etag": "etag_value", - "suspended": True, - }, - "target_snapshots": [ - { - "name": "name_value", - "target_id": "target_id_value", - "uid": "uid_value", - "description": "description_value", - "annotations": {}, - "labels": {}, - "require_approval": True, - "create_time": {}, - "update_time": {}, - "gke": { - "cluster": "cluster_value", - "internal_ip": True, - "proxy_url": "proxy_url_value", - }, - "anthos_cluster": {"membership": "membership_value"}, - "run": {"location": "location_value"}, - "multi_target": { - "target_ids": ["target_ids_value1", "target_ids_value2"] - }, - "custom_target": {"custom_target_type": "custom_target_type_value"}, - "etag": "etag_value", - "execution_configs": [ - { - "usages": [1], - "default_pool": { - "service_account": "service_account_value", - "artifact_storage": "artifact_storage_value", - }, - "private_pool": { - "worker_pool": "worker_pool_value", - "service_account": "service_account_value", - "artifact_storage": "artifact_storage_value", - }, - "worker_pool": "worker_pool_value", - "service_account": "service_account_value", - "artifact_storage": "artifact_storage_value", - "execution_timeout": {}, - "verbose": True, - } - ], - "deploy_parameters": {}, - } - ], - "custom_target_type_snapshots": [ - { - "name": "name_value", - "custom_target_type_id": "custom_target_type_id_value", - "uid": "uid_value", - "description": "description_value", - "annotations": {}, - "labels": {}, - "create_time": {}, - "update_time": {}, - "etag": "etag_value", - "custom_actions": { - "render_action": "render_action_value", - "deploy_action": "deploy_action_value", - "include_skaffold_modules": [ - { - "configs": ["configs_value1", "configs_value2"], - "git": { - "repo": "repo_value", - "path": "path_value", - "ref": "ref_value", - }, - "google_cloud_storage": { - "source": "source_value", - "path": "path_value", - }, - "google_cloud_build_repo": { - "repository": "repository_value", - "path": "path_value", - "ref": "ref_value", - }, - } - ], - }, + request_init = {"name": "projects/sample1/locations/sample2/deployPolicies/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.delete_deploy_policy(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_delete_deploy_policy_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.delete_deploy_policy in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.delete_deploy_policy + ] = mock_rpc + + request = {} + client.delete_deploy_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.delete_deploy_policy(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_delete_deploy_policy_rest_required_fields( + request_type=cloud_deploy.DeleteDeployPolicyRequest, +): + transport_class = transports.CloudDeployRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_deploy_policy._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_deploy_policy._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "allow_missing", + "etag", + "request_id", + "validate_only", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "delete", + "query_params": pb_request, } - ], - "render_state": 1, - "etag": "etag_value", - "skaffold_version": "skaffold_version_value", - "target_artifacts": {}, - "target_renders": {}, - "condition": { - "release_ready_condition": {"status": True}, - "skaffold_supported_condition": { - "status": True, - "skaffold_support_state": 1, - "maintenance_mode_time": {}, - "support_expiration_time": {}, - }, - }, - "deploy_parameters": {}, - } - # The version of a generated dependency at test runtime may differ from the version used during generation. - # Delete any fields which are not present in the current runtime dependency - # See https://github.com/googleapis/gapic-generator-python/issues/1748 + transcode.return_value = transcode_result - # Determine if the message type is proto-plus or protobuf - test_field = cloud_deploy.CreateReleaseRequest.meta.fields["release"] + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.delete_deploy_policy(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_delete_deploy_policy_rest_unset_required_fields(): + transport = transports.CloudDeployRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.delete_deploy_policy._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "allowMissing", + "etag", + "requestId", + "validateOnly", + ) + ) + & set(("name",)) + ) - def get_message_fields(field): - # Given a field which is a message (composite type), return a list with - # all the fields of the message. - # If the field is not a composite type, return an empty list. - message_fields = [] - if hasattr(field, "message") and field.message: - is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_deploy_policy_rest_interceptors(null_interceptor): + transport = transports.CloudDeployRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.CloudDeployRestInterceptor(), + ) + client = CloudDeployClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.CloudDeployRestInterceptor, "post_delete_deploy_policy" + ) as post, mock.patch.object( + transports.CloudDeployRestInterceptor, "pre_delete_deploy_policy" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = cloud_deploy.DeleteDeployPolicyRequest.pb( + cloud_deploy.DeleteDeployPolicyRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } - if is_field_type_proto_plus_type: - message_fields = field.message.meta.fields.values() - # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER - message_fields = field.message.DESCRIPTOR.fields - return message_fields + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) - runtime_nested_fields = [ - (field.name, nested_field.name) - for field in get_message_fields(test_field) - for nested_field in get_message_fields(field) - ] + request = cloud_deploy.DeleteDeployPolicyRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() - subfields_not_in_runtime = [] + client.delete_deploy_policy( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) - # For each item in the sample request, create a list of sub fields which are not present at runtime - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["release"].items(): # pragma: NO COVER - result = None - is_repeated = False - # For repeated fields - if isinstance(value, list) and len(value): - is_repeated = True - result = value[0] - # For fields where the type is another message - if isinstance(value, dict): - result = value + pre.assert_called_once() + post.assert_called_once() - if result and hasattr(result, "keys"): - for subfield in result.keys(): - if (field, subfield) not in runtime_nested_fields: - subfields_not_in_runtime.append( - { - "field": field, - "subfield": subfield, - "is_repeated": is_repeated, - } - ) - # Remove fields from the sample request which are not present in the runtime version of the dependency - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER - field = subfield_to_delete.get("field") - field_repeated = subfield_to_delete.get("is_repeated") - subfield = subfield_to_delete.get("subfield") - if subfield: - if field_repeated: - for i in range(0, len(request_init["release"][field])): - del request_init["release"][field][i][subfield] - else: - del request_init["release"][field][subfield] +def test_delete_deploy_policy_rest_bad_request( + transport: str = "rest", request_type=cloud_deploy.DeleteDeployPolicyRequest +): + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/deployPolicies/sample3"} request = request_type(**request_init) + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete_deploy_policy(request) + + +def test_delete_deploy_policy_rest_flattened(): + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. return_value = operations_pb2.Operation(name="operations/spam") + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/deployPolicies/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.delete_deploy_policy(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/deployPolicies/*}" + % client.transport._host, + args[1], + ) + + +def test_delete_deploy_policy_rest_flattened_error(transport: str = "rest"): + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_deploy_policy( + cloud_deploy.DeleteDeployPolicyRequest(), + name="name_value", + ) + + +def test_delete_deploy_policy_rest_error(): + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + cloud_deploy.ListDeployPoliciesRequest, + dict, + ], +) +def test_list_deploy_policies_rest(request_type): + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = cloud_deploy.ListDeployPoliciesResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 + # Convert return value to protobuf type + return_value = cloud_deploy.ListDeployPoliciesResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.create_release(request) + response = client.list_deploy_policies(request) # Establish that the response is the type that we expect. - assert response.operation.name == "operations/spam" + assert isinstance(response, pagers.ListDeployPoliciesPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] -def test_create_release_rest_use_cached_wrapped_rpc(): +def test_list_deploy_policies_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -25419,40 +29589,39 @@ def test_create_release_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.create_release in client._transport._wrapped_methods + assert ( + client._transport.list_deploy_policies in client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.create_release] = mock_rpc + client._transport._wrapped_methods[ + client._transport.list_deploy_policies + ] = mock_rpc request = {} - client.create_release(request) + client.list_deploy_policies(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.create_release(request) + client.list_deploy_policies(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_create_release_rest_required_fields( - request_type=cloud_deploy.CreateReleaseRequest, +def test_list_deploy_policies_rest_required_fields( + request_type=cloud_deploy.ListDeployPoliciesRequest, ): transport_class = transports.CloudDeployRestTransport request_init = {} request_init["parent"] = "" - request_init["release_id"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -25460,29 +29629,26 @@ def test_create_release_rest_required_fields( ) # verify fields with default values are dropped - assert "releaseId" not in jsonified_request unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).create_release._get_unset_required_fields(jsonified_request) + ).list_deploy_policies._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - assert "releaseId" in jsonified_request - assert jsonified_request["releaseId"] == request_init["release_id"] jsonified_request["parent"] = "parent_value" - jsonified_request["releaseId"] = "release_id_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).create_release._get_unset_required_fields(jsonified_request) + ).list_deploy_policies._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. assert not set(unset_fields) - set( ( - "release_id", - "request_id", - "validate_only", + "filter", + "order_by", + "page_size", + "page_token", ) ) jsonified_request.update(unset_fields) @@ -25490,8 +29656,6 @@ def test_create_release_rest_required_fields( # verify required fields with non-default values are left alone assert "parent" in jsonified_request assert jsonified_request["parent"] == "parent_value" - assert "releaseId" in jsonified_request - assert jsonified_request["releaseId"] == "release_id_value" client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), @@ -25500,7 +29664,7 @@ def test_create_release_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = cloud_deploy.ListDeployPoliciesResponse() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -25512,58 +29676,49 @@ def test_create_release_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "post", + "method": "get", "query_params": pb_request, } - transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = cloud_deploy.ListDeployPoliciesResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.create_release(request) + response = client.list_deploy_policies(request) - expected_params = [ - ( - "releaseId", - "", - ), - ("$alt", "json;enum-encoding=int"), - ] + expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_create_release_rest_unset_required_fields(): +def test_list_deploy_policies_rest_unset_required_fields(): transport = transports.CloudDeployRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.create_release._get_unset_required_fields({}) + unset_fields = transport.list_deploy_policies._get_unset_required_fields({}) assert set(unset_fields) == ( set( ( - "releaseId", - "requestId", - "validateOnly", - ) - ) - & set( - ( - "parent", - "releaseId", - "release", + "filter", + "orderBy", + "pageSize", + "pageToken", ) ) + & set(("parent",)) ) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_create_release_rest_interceptors(null_interceptor): +def test_list_deploy_policies_rest_interceptors(null_interceptor): transport = transports.CloudDeployRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -25576,16 +29731,14 @@ def test_create_release_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - operation.Operation, "_set_result_from_operation" - ), mock.patch.object( - transports.CloudDeployRestInterceptor, "post_create_release" + transports.CloudDeployRestInterceptor, "post_list_deploy_policies" ) as post, mock.patch.object( - transports.CloudDeployRestInterceptor, "pre_create_release" + transports.CloudDeployRestInterceptor, "pre_list_deploy_policies" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = cloud_deploy.CreateReleaseRequest.pb( - cloud_deploy.CreateReleaseRequest() + pb_message = cloud_deploy.ListDeployPoliciesRequest.pb( + cloud_deploy.ListDeployPoliciesRequest() ) transcode.return_value = { "method": "post", @@ -25597,19 +29750,19 @@ def test_create_release_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = json_format.MessageToJson( - operations_pb2.Operation() + req.return_value._content = cloud_deploy.ListDeployPoliciesResponse.to_json( + cloud_deploy.ListDeployPoliciesResponse() ) - request = cloud_deploy.CreateReleaseRequest() + request = cloud_deploy.ListDeployPoliciesRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = operations_pb2.Operation() + post.return_value = cloud_deploy.ListDeployPoliciesResponse() - client.create_release( + client.list_deploy_policies( request, metadata=[ ("key", "val"), @@ -25621,8 +29774,8 @@ def test_create_release_rest_interceptors(null_interceptor): post.assert_called_once() -def test_create_release_rest_bad_request( - transport: str = "rest", request_type=cloud_deploy.CreateReleaseRequest +def test_list_deploy_policies_rest_bad_request( + transport: str = "rest", request_type=cloud_deploy.ListDeployPoliciesRequest ): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), @@ -25630,9 +29783,7 @@ def test_create_release_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = { - "parent": "projects/sample1/locations/sample2/deliveryPipelines/sample3" - } + request_init = {"parent": "projects/sample1/locations/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -25644,10 +29795,10 @@ def test_create_release_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.create_release(request) + client.list_deploy_policies(request) -def test_create_release_rest_flattened(): +def test_list_deploy_policies_rest_flattened(): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -25656,42 +29807,40 @@ def test_create_release_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = cloud_deploy.ListDeployPoliciesResponse() # get arguments that satisfy an http rule for this method - sample_request = { - "parent": "projects/sample1/locations/sample2/deliveryPipelines/sample3" - } + sample_request = {"parent": "projects/sample1/locations/sample2"} # get truthy value for each flattened field mock_args = dict( parent="parent_value", - release=cloud_deploy.Release(name="name_value"), - release_id="release_id_value", ) mock_args.update(sample_request) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 + # Convert return value to protobuf type + return_value = cloud_deploy.ListDeployPoliciesResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.create_release(**mock_args) + client.list_deploy_policies(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{parent=projects/*/locations/*/deliveryPipelines/*}/releases" + "%s/v1/{parent=projects/*/locations/*}/deployPolicies" % client.transport._host, args[1], ) -def test_create_release_rest_flattened_error(transport: str = "rest"): +def test_list_deploy_policies_rest_flattened_error(transport: str = "rest"): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -25700,60 +29849,124 @@ def test_create_release_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.create_release( - cloud_deploy.CreateReleaseRequest(), + client.list_deploy_policies( + cloud_deploy.ListDeployPoliciesRequest(), parent="parent_value", - release=cloud_deploy.Release(name="name_value"), - release_id="release_id_value", ) -def test_create_release_rest_error(): +def test_list_deploy_policies_rest_pager(transport: str = "rest"): client = CloudDeployClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + cloud_deploy.ListDeployPoliciesResponse( + deploy_policies=[ + cloud_deploy.DeployPolicy(), + cloud_deploy.DeployPolicy(), + cloud_deploy.DeployPolicy(), + ], + next_page_token="abc", + ), + cloud_deploy.ListDeployPoliciesResponse( + deploy_policies=[], + next_page_token="def", + ), + cloud_deploy.ListDeployPoliciesResponse( + deploy_policies=[ + cloud_deploy.DeployPolicy(), + ], + next_page_token="ghi", + ), + cloud_deploy.ListDeployPoliciesResponse( + deploy_policies=[ + cloud_deploy.DeployPolicy(), + cloud_deploy.DeployPolicy(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple( + cloud_deploy.ListDeployPoliciesResponse.to_json(x) for x in response + ) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {"parent": "projects/sample1/locations/sample2"} + + pager = client.list_deploy_policies(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, cloud_deploy.DeployPolicy) for i in results) + + pages = list(client.list_deploy_policies(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + @pytest.mark.parametrize( "request_type", [ - cloud_deploy.AbandonReleaseRequest, + cloud_deploy.GetDeployPolicyRequest, dict, ], ) -def test_abandon_release_rest(request_type): +def test_get_deploy_policy_rest(request_type): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = { - "name": "projects/sample1/locations/sample2/deliveryPipelines/sample3/releases/sample4" - } + request_init = {"name": "projects/sample1/locations/sample2/deployPolicies/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = cloud_deploy.AbandonReleaseResponse() + return_value = cloud_deploy.DeployPolicy( + name="name_value", + uid="uid_value", + description="description_value", + suspended=True, + etag="etag_value", + ) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = cloud_deploy.AbandonReleaseResponse.pb(return_value) + return_value = cloud_deploy.DeployPolicy.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.abandon_release(request) + response = client.get_deploy_policy(request) # Establish that the response is the type that we expect. - assert isinstance(response, cloud_deploy.AbandonReleaseResponse) + assert isinstance(response, cloud_deploy.DeployPolicy) + assert response.name == "name_value" + assert response.uid == "uid_value" + assert response.description == "description_value" + assert response.suspended is True + assert response.etag == "etag_value" -def test_abandon_release_rest_use_cached_wrapped_rpc(): +def test_get_deploy_policy_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -25767,30 +29980,32 @@ def test_abandon_release_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.abandon_release in client._transport._wrapped_methods + assert client._transport.get_deploy_policy in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.abandon_release] = mock_rpc + client._transport._wrapped_methods[ + client._transport.get_deploy_policy + ] = mock_rpc request = {} - client.abandon_release(request) + client.get_deploy_policy(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.abandon_release(request) + client.get_deploy_policy(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_abandon_release_rest_required_fields( - request_type=cloud_deploy.AbandonReleaseRequest, +def test_get_deploy_policy_rest_required_fields( + request_type=cloud_deploy.GetDeployPolicyRequest, ): transport_class = transports.CloudDeployRestTransport @@ -25806,7 +30021,7 @@ def test_abandon_release_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).abandon_release._get_unset_required_fields(jsonified_request) + ).get_deploy_policy._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present @@ -25815,7 +30030,7 @@ def test_abandon_release_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).abandon_release._get_unset_required_fields(jsonified_request) + ).get_deploy_policy._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone @@ -25829,7 +30044,7 @@ def test_abandon_release_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = cloud_deploy.AbandonReleaseResponse() + return_value = cloud_deploy.DeployPolicy() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -25841,40 +30056,39 @@ def test_abandon_release_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "post", + "method": "get", "query_params": pb_request, } - transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = cloud_deploy.AbandonReleaseResponse.pb(return_value) + return_value = cloud_deploy.DeployPolicy.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.abandon_release(request) + response = client.get_deploy_policy(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_abandon_release_rest_unset_required_fields(): +def test_get_deploy_policy_rest_unset_required_fields(): transport = transports.CloudDeployRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.abandon_release._get_unset_required_fields({}) + unset_fields = transport.get_deploy_policy._get_unset_required_fields({}) assert set(unset_fields) == (set(()) & set(("name",))) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_abandon_release_rest_interceptors(null_interceptor): +def test_get_deploy_policy_rest_interceptors(null_interceptor): transport = transports.CloudDeployRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -25887,14 +30101,14 @@ def test_abandon_release_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.CloudDeployRestInterceptor, "post_abandon_release" + transports.CloudDeployRestInterceptor, "post_get_deploy_policy" ) as post, mock.patch.object( - transports.CloudDeployRestInterceptor, "pre_abandon_release" + transports.CloudDeployRestInterceptor, "pre_get_deploy_policy" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = cloud_deploy.AbandonReleaseRequest.pb( - cloud_deploy.AbandonReleaseRequest() + pb_message = cloud_deploy.GetDeployPolicyRequest.pb( + cloud_deploy.GetDeployPolicyRequest() ) transcode.return_value = { "method": "post", @@ -25906,19 +30120,19 @@ def test_abandon_release_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = cloud_deploy.AbandonReleaseResponse.to_json( - cloud_deploy.AbandonReleaseResponse() + req.return_value._content = cloud_deploy.DeployPolicy.to_json( + cloud_deploy.DeployPolicy() ) - request = cloud_deploy.AbandonReleaseRequest() + request = cloud_deploy.GetDeployPolicyRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = cloud_deploy.AbandonReleaseResponse() + post.return_value = cloud_deploy.DeployPolicy() - client.abandon_release( + client.get_deploy_policy( request, metadata=[ ("key", "val"), @@ -25930,8 +30144,8 @@ def test_abandon_release_rest_interceptors(null_interceptor): post.assert_called_once() -def test_abandon_release_rest_bad_request( - transport: str = "rest", request_type=cloud_deploy.AbandonReleaseRequest +def test_get_deploy_policy_rest_bad_request( + transport: str = "rest", request_type=cloud_deploy.GetDeployPolicyRequest ): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), @@ -25939,9 +30153,7 @@ def test_abandon_release_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = { - "name": "projects/sample1/locations/sample2/deliveryPipelines/sample3/releases/sample4" - } + request_init = {"name": "projects/sample1/locations/sample2/deployPolicies/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -25953,10 +30165,10 @@ def test_abandon_release_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.abandon_release(request) + client.get_deploy_policy(request) -def test_abandon_release_rest_flattened(): +def test_get_deploy_policy_rest_flattened(): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -25965,11 +30177,11 @@ def test_abandon_release_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = cloud_deploy.AbandonReleaseResponse() + return_value = cloud_deploy.DeployPolicy() # get arguments that satisfy an http rule for this method sample_request = { - "name": "projects/sample1/locations/sample2/deliveryPipelines/sample3/releases/sample4" + "name": "projects/sample1/locations/sample2/deployPolicies/sample3" } # get truthy value for each flattened field @@ -25982,25 +30194,25 @@ def test_abandon_release_rest_flattened(): response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = cloud_deploy.AbandonReleaseResponse.pb(return_value) + return_value = cloud_deploy.DeployPolicy.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.abandon_release(**mock_args) + client.get_deploy_policy(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{name=projects/*/locations/*/deliveryPipelines/*/releases/*}:abandon" + "%s/v1/{name=projects/*/locations/*/deployPolicies/*}" % client.transport._host, args[1], ) -def test_abandon_release_rest_flattened_error(transport: str = "rest"): +def test_get_deploy_policy_rest_flattened_error(transport: str = "rest"): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -26009,13 +30221,13 @@ def test_abandon_release_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.abandon_release( - cloud_deploy.AbandonReleaseRequest(), + client.get_deploy_policy( + cloud_deploy.GetDeployPolicyRequest(), name="name_value", ) -def test_abandon_release_rest_error(): +def test_get_deploy_policy_rest_error(): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -27941,6 +32153,7 @@ def test_create_rollout_rest_required_fields( # Check that path parameters and body parameters are not mixing in. assert not set(unset_fields) - set( ( + "override_deploy_policy", "request_id", "rollout_id", "starting_phase_id", @@ -28009,6 +32222,7 @@ def test_create_rollout_rest_unset_required_fields(): assert set(unset_fields) == ( set( ( + "overrideDeployPolicy", "requestId", "rolloutId", "startingPhaseId", @@ -33264,6 +37478,11 @@ def test_cloud_deploy_base_transport(): "get_release", "create_release", "abandon_release", + "create_deploy_policy", + "update_deploy_policy", + "delete_deploy_policy", + "list_deploy_policies", + "get_deploy_policy", "approve_rollout", "advance_rollout", "cancel_rollout", @@ -33630,6 +37849,21 @@ def test_cloud_deploy_client_transport_session_collision(transport_name): session1 = client1.transport.abandon_release._session session2 = client2.transport.abandon_release._session assert session1 != session2 + session1 = client1.transport.create_deploy_policy._session + session2 = client2.transport.create_deploy_policy._session + assert session1 != session2 + session1 = client1.transport.update_deploy_policy._session + session2 = client2.transport.update_deploy_policy._session + assert session1 != session2 + session1 = client1.transport.delete_deploy_policy._session + session2 = client2.transport.delete_deploy_policy._session + assert session1 != session2 + session1 = client1.transport.list_deploy_policies._session + session2 = client2.transport.list_deploy_policies._session + assert session1 != session2 + session1 = client1.transport.get_deploy_policy._session + session2 = client2.transport.get_deploy_policy._session + assert session1 != session2 session1 = client1.transport.approve_rollout._session session2 = client2.transport.approve_rollout._session assert session1 != session2 @@ -34037,10 +38271,38 @@ def test_parse_delivery_pipeline_path(): assert expected == actual -def test_job_path(): +def test_deploy_policy_path(): project = "winkle" location = "nautilus" - job = "scallop" + deploy_policy = "scallop" + expected = ( + "projects/{project}/locations/{location}/deployPolicies/{deploy_policy}".format( + project=project, + location=location, + deploy_policy=deploy_policy, + ) + ) + actual = CloudDeployClient.deploy_policy_path(project, location, deploy_policy) + assert expected == actual + + +def test_parse_deploy_policy_path(): + expected = { + "project": "abalone", + "location": "squid", + "deploy_policy": "clam", + } + path = CloudDeployClient.deploy_policy_path(**expected) + + # Check that the path construction is reversible. + actual = CloudDeployClient.parse_deploy_policy_path(path) + assert expected == actual + + +def test_job_path(): + project = "whelk" + location = "octopus" + job = "oyster" expected = "projects/{project}/locations/{location}/jobs/{job}".format( project=project, location=location, @@ -34052,9 +38314,9 @@ def test_job_path(): def test_parse_job_path(): expected = { - "project": "abalone", - "location": "squid", - "job": "clam", + "project": "nudibranch", + "location": "cuttlefish", + "job": "mussel", } path = CloudDeployClient.job_path(**expected) @@ -34064,12 +38326,12 @@ def test_parse_job_path(): def test_job_run_path(): - project = "whelk" - location = "octopus" - delivery_pipeline = "oyster" - release = "nudibranch" - rollout = "cuttlefish" - job_run = "mussel" + project = "winkle" + location = "nautilus" + delivery_pipeline = "scallop" + release = "abalone" + rollout = "squid" + job_run = "clam" expected = "projects/{project}/locations/{location}/deliveryPipelines/{delivery_pipeline}/releases/{release}/rollouts/{rollout}/jobRuns/{job_run}".format( project=project, location=location, @@ -34086,12 +38348,12 @@ def test_job_run_path(): def test_parse_job_run_path(): expected = { - "project": "winkle", - "location": "nautilus", - "delivery_pipeline": "scallop", - "release": "abalone", - "rollout": "squid", - "job_run": "clam", + "project": "whelk", + "location": "octopus", + "delivery_pipeline": "oyster", + "release": "nudibranch", + "rollout": "cuttlefish", + "job_run": "mussel", } path = CloudDeployClient.job_run_path(**expected) @@ -34101,9 +38363,9 @@ def test_parse_job_run_path(): def test_membership_path(): - project = "whelk" - location = "octopus" - membership = "oyster" + project = "winkle" + location = "nautilus" + membership = "scallop" expected = ( "projects/{project}/locations/{location}/memberships/{membership}".format( project=project, @@ -34117,9 +38379,9 @@ def test_membership_path(): def test_parse_membership_path(): expected = { - "project": "nudibranch", - "location": "cuttlefish", - "membership": "mussel", + "project": "abalone", + "location": "squid", + "membership": "clam", } path = CloudDeployClient.membership_path(**expected) @@ -34129,10 +38391,10 @@ def test_parse_membership_path(): def test_release_path(): - project = "winkle" - location = "nautilus" - delivery_pipeline = "scallop" - release = "abalone" + project = "whelk" + location = "octopus" + delivery_pipeline = "oyster" + release = "nudibranch" expected = "projects/{project}/locations/{location}/deliveryPipelines/{delivery_pipeline}/releases/{release}".format( project=project, location=location, @@ -34147,10 +38409,10 @@ def test_release_path(): def test_parse_release_path(): expected = { - "project": "squid", - "location": "clam", - "delivery_pipeline": "whelk", - "release": "octopus", + "project": "cuttlefish", + "location": "mussel", + "delivery_pipeline": "winkle", + "release": "nautilus", } path = CloudDeployClient.release_path(**expected) @@ -34160,10 +38422,10 @@ def test_parse_release_path(): def test_repository_path(): - project = "oyster" - location = "nudibranch" - connection = "cuttlefish" - repository = "mussel" + project = "scallop" + location = "abalone" + connection = "squid" + repository = "clam" expected = "projects/{project}/locations/{location}/connections/{connection}/repositories/{repository}".format( project=project, location=location, @@ -34178,10 +38440,10 @@ def test_repository_path(): def test_parse_repository_path(): expected = { - "project": "winkle", - "location": "nautilus", - "connection": "scallop", - "repository": "abalone", + "project": "whelk", + "location": "octopus", + "connection": "oyster", + "repository": "nudibranch", } path = CloudDeployClient.repository_path(**expected) @@ -34191,11 +38453,11 @@ def test_parse_repository_path(): def test_rollout_path(): - project = "squid" - location = "clam" - delivery_pipeline = "whelk" - release = "octopus" - rollout = "oyster" + project = "cuttlefish" + location = "mussel" + delivery_pipeline = "winkle" + release = "nautilus" + rollout = "scallop" expected = "projects/{project}/locations/{location}/deliveryPipelines/{delivery_pipeline}/releases/{release}/rollouts/{rollout}".format( project=project, location=location, @@ -34211,11 +38473,11 @@ def test_rollout_path(): def test_parse_rollout_path(): expected = { - "project": "nudibranch", - "location": "cuttlefish", - "delivery_pipeline": "mussel", - "release": "winkle", - "rollout": "nautilus", + "project": "abalone", + "location": "squid", + "delivery_pipeline": "clam", + "release": "whelk", + "rollout": "octopus", } path = CloudDeployClient.rollout_path(**expected) @@ -34225,9 +38487,9 @@ def test_parse_rollout_path(): def test_service_path(): - project = "scallop" - location = "abalone" - service = "squid" + project = "oyster" + location = "nudibranch" + service = "cuttlefish" expected = "projects/{project}/locations/{location}/services/{service}".format( project=project, location=location, @@ -34239,9 +38501,9 @@ def test_service_path(): def test_parse_service_path(): expected = { - "project": "clam", - "location": "whelk", - "service": "octopus", + "project": "mussel", + "location": "winkle", + "service": "nautilus", } path = CloudDeployClient.service_path(**expected) @@ -34251,9 +38513,9 @@ def test_parse_service_path(): def test_target_path(): - project = "oyster" - location = "nudibranch" - target = "cuttlefish" + project = "scallop" + location = "abalone" + target = "squid" expected = "projects/{project}/locations/{location}/targets/{target}".format( project=project, location=location, @@ -34265,9 +38527,9 @@ def test_target_path(): def test_parse_target_path(): expected = { - "project": "mussel", - "location": "winkle", - "target": "nautilus", + "project": "clam", + "location": "whelk", + "target": "octopus", } path = CloudDeployClient.target_path(**expected) @@ -34277,9 +38539,9 @@ def test_parse_target_path(): def test_worker_pool_path(): - project = "scallop" - location = "abalone" - worker_pool = "squid" + project = "oyster" + location = "nudibranch" + worker_pool = "cuttlefish" expected = ( "projects/{project}/locations/{location}/workerPools/{worker_pool}".format( project=project, @@ -34293,9 +38555,9 @@ def test_worker_pool_path(): def test_parse_worker_pool_path(): expected = { - "project": "clam", - "location": "whelk", - "worker_pool": "octopus", + "project": "mussel", + "location": "winkle", + "worker_pool": "nautilus", } path = CloudDeployClient.worker_pool_path(**expected) @@ -34305,7 +38567,7 @@ def test_parse_worker_pool_path(): def test_common_billing_account_path(): - billing_account = "oyster" + billing_account = "scallop" expected = "billingAccounts/{billing_account}".format( billing_account=billing_account, ) @@ -34315,7 +38577,7 @@ def test_common_billing_account_path(): def test_parse_common_billing_account_path(): expected = { - "billing_account": "nudibranch", + "billing_account": "abalone", } path = CloudDeployClient.common_billing_account_path(**expected) @@ -34325,7 +38587,7 @@ def test_parse_common_billing_account_path(): def test_common_folder_path(): - folder = "cuttlefish" + folder = "squid" expected = "folders/{folder}".format( folder=folder, ) @@ -34335,7 +38597,7 @@ def test_common_folder_path(): def test_parse_common_folder_path(): expected = { - "folder": "mussel", + "folder": "clam", } path = CloudDeployClient.common_folder_path(**expected) @@ -34345,7 +38607,7 @@ def test_parse_common_folder_path(): def test_common_organization_path(): - organization = "winkle" + organization = "whelk" expected = "organizations/{organization}".format( organization=organization, ) @@ -34355,7 +38617,7 @@ def test_common_organization_path(): def test_parse_common_organization_path(): expected = { - "organization": "nautilus", + "organization": "octopus", } path = CloudDeployClient.common_organization_path(**expected) @@ -34365,7 +38627,7 @@ def test_parse_common_organization_path(): def test_common_project_path(): - project = "scallop" + project = "oyster" expected = "projects/{project}".format( project=project, ) @@ -34375,7 +38637,7 @@ def test_common_project_path(): def test_parse_common_project_path(): expected = { - "project": "abalone", + "project": "nudibranch", } path = CloudDeployClient.common_project_path(**expected) @@ -34385,8 +38647,8 @@ def test_parse_common_project_path(): def test_common_location_path(): - project = "squid" - location = "clam" + project = "cuttlefish" + location = "mussel" expected = "projects/{project}/locations/{location}".format( project=project, location=location, @@ -34397,8 +38659,8 @@ def test_common_location_path(): def test_parse_common_location_path(): expected = { - "project": "whelk", - "location": "octopus", + "project": "winkle", + "location": "nautilus", } path = CloudDeployClient.common_location_path(**expected) diff --git a/packages/google-cloud-developerconnect/google/cloud/developerconnect/gapic_version.py b/packages/google-cloud-developerconnect/google/cloud/developerconnect/gapic_version.py index 3b0a9d9a8d43..558c8aab67c5 100644 --- a/packages/google-cloud-developerconnect/google/cloud/developerconnect/gapic_version.py +++ b/packages/google-cloud-developerconnect/google/cloud/developerconnect/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.1.2" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-developerconnect/google/cloud/developerconnect_v1/gapic_version.py b/packages/google-cloud-developerconnect/google/cloud/developerconnect_v1/gapic_version.py index 3b0a9d9a8d43..558c8aab67c5 100644 --- a/packages/google-cloud-developerconnect/google/cloud/developerconnect_v1/gapic_version.py +++ b/packages/google-cloud-developerconnect/google/cloud/developerconnect_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.1.2" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-developerconnect/google/cloud/developerconnect_v1/services/developer_connect/async_client.py b/packages/google-cloud-developerconnect/google/cloud/developerconnect_v1/services/developer_connect/async_client.py index 0207eada8222..c5aefdb6f411 100644 --- a/packages/google-cloud-developerconnect/google/cloud/developerconnect_v1/services/developer_connect/async_client.py +++ b/packages/google-cloud-developerconnect/google/cloud/developerconnect_v1/services/developer_connect/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -205,9 +204,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(DeveloperConnectClient).get_transport_class, type(DeveloperConnectClient) - ) + get_transport_class = DeveloperConnectClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-developerconnect/google/cloud/developerconnect_v1/services/developer_connect/client.py b/packages/google-cloud-developerconnect/google/cloud/developerconnect_v1/services/developer_connect/client.py index 79f9d66110c8..fec62e124d0b 100644 --- a/packages/google-cloud-developerconnect/google/cloud/developerconnect_v1/services/developer_connect/client.py +++ b/packages/google-cloud-developerconnect/google/cloud/developerconnect_v1/services/developer_connect/client.py @@ -719,7 +719,7 @@ def __init__( Type[DeveloperConnectTransport], Callable[..., DeveloperConnectTransport], ] = ( - type(self).get_transport_class(transport) + DeveloperConnectClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., DeveloperConnectTransport], transport) ) diff --git a/packages/google-cloud-developerconnect/samples/generated_samples/snippet_metadata_google.cloud.developerconnect.v1.json b/packages/google-cloud-developerconnect/samples/generated_samples/snippet_metadata_google.cloud.developerconnect.v1.json index c6db9b18fbf0..6d8efa20b69c 100644 --- a/packages/google-cloud-developerconnect/samples/generated_samples/snippet_metadata_google.cloud.developerconnect.v1.json +++ b/packages/google-cloud-developerconnect/samples/generated_samples/snippet_metadata_google.cloud.developerconnect.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-developerconnect", - "version": "0.1.2" + "version": "0.1.0" }, "snippets": [ { diff --git a/packages/google-cloud-developerconnect/tests/unit/gapic/developerconnect_v1/test_developer_connect.py b/packages/google-cloud-developerconnect/tests/unit/gapic/developerconnect_v1/test_developer_connect.py index 8a5240d9f1d3..9b1557a02d43 100644 --- a/packages/google-cloud-developerconnect/tests/unit/gapic/developerconnect_v1/test_developer_connect.py +++ b/packages/google-cloud-developerconnect/tests/unit/gapic/developerconnect_v1/test_developer_connect.py @@ -1336,22 +1336,23 @@ async def test_list_connections_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_connections - ] = mock_object + ] = mock_rpc request = {} await client.list_connections(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_connections(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1909,22 +1910,23 @@ async def test_get_connection_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_connection - ] = mock_object + ] = mock_rpc request = {} await client.get_connection(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_connection(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2235,8 +2237,9 @@ def test_create_connection_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.create_connection(request) @@ -2292,26 +2295,28 @@ async def test_create_connection_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_connection - ] = mock_object + ] = mock_rpc request = {} await client.create_connection(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.create_connection(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2660,8 +2665,9 @@ def test_update_connection_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.update_connection(request) @@ -2717,26 +2723,28 @@ async def test_update_connection_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_connection - ] = mock_object + ] = mock_rpc request = {} await client.update_connection(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.update_connection(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3081,8 +3089,9 @@ def test_delete_connection_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.delete_connection(request) @@ -3138,26 +3147,28 @@ async def test_delete_connection_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_connection - ] = mock_object + ] = mock_rpc request = {} await client.delete_connection(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.delete_connection(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3471,8 +3482,9 @@ def test_create_git_repository_link_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.create_git_repository_link(request) @@ -3528,26 +3540,28 @@ async def test_create_git_repository_link_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_git_repository_link - ] = mock_object + ] = mock_rpc request = {} await client.create_git_repository_link(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.create_git_repository_link(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3881,8 +3895,9 @@ def test_delete_git_repository_link_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.delete_git_repository_link(request) @@ -3938,26 +3953,28 @@ async def test_delete_git_repository_link_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_git_repository_link - ] = mock_object + ] = mock_rpc request = {} await client.delete_git_repository_link(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.delete_git_repository_link(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4336,22 +4353,23 @@ async def test_list_git_repository_links_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_git_repository_links - ] = mock_object + ] = mock_rpc request = {} await client.list_git_repository_links(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_git_repository_links(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4944,22 +4962,23 @@ async def test_get_git_repository_link_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_git_repository_link - ] = mock_object + ] = mock_rpc request = {} await client.get_git_repository_link(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_git_repository_link(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5343,22 +5362,23 @@ async def test_fetch_read_write_token_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.fetch_read_write_token - ] = mock_object + ] = mock_rpc request = {} await client.fetch_read_write_token(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.fetch_read_write_token(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5725,22 +5745,23 @@ async def test_fetch_read_token_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.fetch_read_token - ] = mock_object + ] = mock_rpc request = {} await client.fetch_read_token(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.fetch_read_token(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -6107,22 +6128,23 @@ async def test_fetch_linkable_git_repositories_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.fetch_linkable_git_repositories - ] = mock_object + ] = mock_rpc request = {} await client.fetch_linkable_git_repositories(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.fetch_linkable_git_repositories(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -6698,22 +6720,23 @@ async def test_fetch_git_hub_installations_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.fetch_git_hub_installations - ] = mock_object + ] = mock_rpc request = {} await client.fetch_git_hub_installations(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.fetch_git_hub_installations(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -7075,22 +7098,23 @@ async def test_fetch_git_refs_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.fetch_git_refs - ] = mock_object + ] = mock_rpc request = {} await client.fetch_git_refs(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.fetch_git_refs(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx/gapic_version.py b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx/gapic_version.py index ed47e29bfcec..558c8aab67c5 100644 --- a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx/gapic_version.py +++ b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.35.0" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/gapic_version.py b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/gapic_version.py index ed47e29bfcec..558c8aab67c5 100644 --- a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/gapic_version.py +++ b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.35.0" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/agents/async_client.py b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/agents/async_client.py index 646e81cef14c..d418c72e27a8 100644 --- a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/agents/async_client.py +++ b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/agents/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -217,9 +216,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(AgentsClient).get_transport_class, type(AgentsClient) - ) + get_transport_class = AgentsClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/agents/client.py b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/agents/client.py index 26950a72cab5..ee7755cbc5e4 100644 --- a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/agents/client.py +++ b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/agents/client.py @@ -809,7 +809,7 @@ def __init__( transport_init: Union[ Type[AgentsTransport], Callable[..., AgentsTransport] ] = ( - type(self).get_transport_class(transport) + AgentsClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., AgentsTransport], transport) ) diff --git a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/changelogs/async_client.py b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/changelogs/async_client.py index 5d6b6cb43f6a..1845856dbedc 100644 --- a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/changelogs/async_client.py +++ b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/changelogs/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -187,9 +186,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(ChangelogsClient).get_transport_class, type(ChangelogsClient) - ) + get_transport_class = ChangelogsClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/changelogs/client.py b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/changelogs/client.py index e7a87a9dc128..9131899f0763 100644 --- a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/changelogs/client.py +++ b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/changelogs/client.py @@ -663,7 +663,7 @@ def __init__( transport_init: Union[ Type[ChangelogsTransport], Callable[..., ChangelogsTransport] ] = ( - type(self).get_transport_class(transport) + ChangelogsClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., ChangelogsTransport], transport) ) diff --git a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/deployments/async_client.py b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/deployments/async_client.py index 28b3f4565c02..3d8b32468186 100644 --- a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/deployments/async_client.py +++ b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/deployments/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -197,9 +196,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(DeploymentsClient).get_transport_class, type(DeploymentsClient) - ) + get_transport_class = DeploymentsClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/deployments/client.py b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/deployments/client.py index de3bbf4c104f..377a00aa1f16 100644 --- a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/deployments/client.py +++ b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/deployments/client.py @@ -743,7 +743,7 @@ def __init__( transport_init: Union[ Type[DeploymentsTransport], Callable[..., DeploymentsTransport] ] = ( - type(self).get_transport_class(transport) + DeploymentsClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., DeploymentsTransport], transport) ) diff --git a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/entity_types/async_client.py b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/entity_types/async_client.py index 978b78be2307..ad3d87bc8c15 100644 --- a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/entity_types/async_client.py +++ b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/entity_types/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -192,9 +191,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(EntityTypesClient).get_transport_class, type(EntityTypesClient) - ) + get_transport_class = EntityTypesClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/entity_types/client.py b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/entity_types/client.py index f2f260f15254..ee52f34597b1 100644 --- a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/entity_types/client.py +++ b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/entity_types/client.py @@ -666,7 +666,7 @@ def __init__( transport_init: Union[ Type[EntityTypesTransport], Callable[..., EntityTypesTransport] ] = ( - type(self).get_transport_class(transport) + EntityTypesClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., EntityTypesTransport], transport) ) diff --git a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/environments/async_client.py b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/environments/async_client.py index 20627436e83c..dbeb27d1f3ea 100644 --- a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/environments/async_client.py +++ b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/environments/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -212,9 +211,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(EnvironmentsClient).get_transport_class, type(EnvironmentsClient) - ) + get_transport_class = EnvironmentsClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/environments/client.py b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/environments/client.py index 9e2f16b22c88..627b1550dd69 100644 --- a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/environments/client.py +++ b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/environments/client.py @@ -818,7 +818,7 @@ def __init__( transport_init: Union[ Type[EnvironmentsTransport], Callable[..., EnvironmentsTransport] ] = ( - type(self).get_transport_class(transport) + EnvironmentsClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., EnvironmentsTransport], transport) ) diff --git a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/experiments/async_client.py b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/experiments/async_client.py index 55f9ff9e2aba..2c7b0531132c 100644 --- a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/experiments/async_client.py +++ b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/experiments/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -194,9 +193,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(ExperimentsClient).get_transport_class, type(ExperimentsClient) - ) + get_transport_class = ExperimentsClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/experiments/client.py b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/experiments/client.py index 2bcb0a2834be..ab123d3d2577 100644 --- a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/experiments/client.py +++ b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/experiments/client.py @@ -694,7 +694,7 @@ def __init__( transport_init: Union[ Type[ExperimentsTransport], Callable[..., ExperimentsTransport] ] = ( - type(self).get_transport_class(transport) + ExperimentsClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., ExperimentsTransport], transport) ) diff --git a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/flows/async_client.py b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/flows/async_client.py index e1d493f9c433..d577674add12 100644 --- a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/flows/async_client.py +++ b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/flows/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -203,9 +202,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(FlowsClient).get_transport_class, type(FlowsClient) - ) + get_transport_class = FlowsClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/flows/client.py b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/flows/client.py index 0039ad75bb96..a9d4179addc9 100644 --- a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/flows/client.py +++ b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/flows/client.py @@ -793,7 +793,7 @@ def __init__( transport_init: Union[ Type[FlowsTransport], Callable[..., FlowsTransport] ] = ( - type(self).get_transport_class(transport) + FlowsClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., FlowsTransport], transport) ) diff --git a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/generators/async_client.py b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/generators/async_client.py index fe8a36174bfb..fa5c7d375b52 100644 --- a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/generators/async_client.py +++ b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/generators/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -188,9 +187,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(GeneratorsClient).get_transport_class, type(GeneratorsClient) - ) + get_transport_class = GeneratorsClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/generators/client.py b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/generators/client.py index c736a9c1c3a3..6628598ed330 100644 --- a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/generators/client.py +++ b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/generators/client.py @@ -664,7 +664,7 @@ def __init__( transport_init: Union[ Type[GeneratorsTransport], Callable[..., GeneratorsTransport] ] = ( - type(self).get_transport_class(transport) + GeneratorsClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., GeneratorsTransport], transport) ) diff --git a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/intents/async_client.py b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/intents/async_client.py index 8a42b95d4097..651f59caa0db 100644 --- a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/intents/async_client.py +++ b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/intents/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -190,9 +189,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(IntentsClient).get_transport_class, type(IntentsClient) - ) + get_transport_class = IntentsClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/intents/client.py b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/intents/client.py index 3cd48e97b07d..ed919379403b 100644 --- a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/intents/client.py +++ b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/intents/client.py @@ -690,7 +690,7 @@ def __init__( transport_init: Union[ Type[IntentsTransport], Callable[..., IntentsTransport] ] = ( - type(self).get_transport_class(transport) + IntentsClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., IntentsTransport], transport) ) diff --git a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/pages/async_client.py b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/pages/async_client.py index 1f4b99ef98f2..55f6df6cae8c 100644 --- a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/pages/async_client.py +++ b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/pages/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -195,9 +194,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(PagesClient).get_transport_class, type(PagesClient) - ) + get_transport_class = PagesClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/pages/client.py b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/pages/client.py index 15651b9025fc..c937c1aaf18b 100644 --- a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/pages/client.py +++ b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/pages/client.py @@ -787,7 +787,7 @@ def __init__( transport_init: Union[ Type[PagesTransport], Callable[..., PagesTransport] ] = ( - type(self).get_transport_class(transport) + PagesClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., PagesTransport], transport) ) diff --git a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/security_settings_service/async_client.py b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/security_settings_service/async_client.py index cf804aa80751..6a07ef8eaee0 100644 --- a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/security_settings_service/async_client.py +++ b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/security_settings_service/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -216,10 +215,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(SecuritySettingsServiceClient).get_transport_class, - type(SecuritySettingsServiceClient), - ) + get_transport_class = SecuritySettingsServiceClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/security_settings_service/client.py b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/security_settings_service/client.py index cff938935e48..b8798ee4c5f5 100644 --- a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/security_settings_service/client.py +++ b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/security_settings_service/client.py @@ -720,7 +720,7 @@ def __init__( Type[SecuritySettingsServiceTransport], Callable[..., SecuritySettingsServiceTransport], ] = ( - type(self).get_transport_class(transport) + SecuritySettingsServiceClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., SecuritySettingsServiceTransport], transport) ) diff --git a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/session_entity_types/async_client.py b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/session_entity_types/async_client.py index 8182cbdc393b..629553f8b32c 100644 --- a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/session_entity_types/async_client.py +++ b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/session_entity_types/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -201,10 +200,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(SessionEntityTypesClient).get_transport_class, - type(SessionEntityTypesClient), - ) + get_transport_class = SessionEntityTypesClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/session_entity_types/client.py b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/session_entity_types/client.py index 91c34591e546..9eb7e66a9a34 100644 --- a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/session_entity_types/client.py +++ b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/session_entity_types/client.py @@ -679,7 +679,7 @@ def __init__( Type[SessionEntityTypesTransport], Callable[..., SessionEntityTypesTransport], ] = ( - type(self).get_transport_class(transport) + SessionEntityTypesClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., SessionEntityTypesTransport], transport) ) diff --git a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/sessions/async_client.py b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/sessions/async_client.py index ac9083dd159f..d7e73e53d89f 100644 --- a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/sessions/async_client.py +++ b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/sessions/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( AsyncIterable, @@ -212,9 +211,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(SessionsClient).get_transport_class, type(SessionsClient) - ) + get_transport_class = SessionsClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/sessions/client.py b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/sessions/client.py index d0fcab1d2a96..bd0e647b2776 100644 --- a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/sessions/client.py +++ b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/sessions/client.py @@ -887,7 +887,7 @@ def __init__( transport_init: Union[ Type[SessionsTransport], Callable[..., SessionsTransport] ] = ( - type(self).get_transport_class(transport) + SessionsClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., SessionsTransport], transport) ) diff --git a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/test_cases/async_client.py b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/test_cases/async_client.py index 3ed4bbfe0d5a..c17d5d93010a 100644 --- a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/test_cases/async_client.py +++ b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/test_cases/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -216,9 +215,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(TestCasesClient).get_transport_class, type(TestCasesClient) - ) + get_transport_class = TestCasesClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/test_cases/client.py b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/test_cases/client.py index 1ba2e785fa73..890ba1cb1258 100644 --- a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/test_cases/client.py +++ b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/test_cases/client.py @@ -888,7 +888,7 @@ def __init__( transport_init: Union[ Type[TestCasesTransport], Callable[..., TestCasesTransport] ] = ( - type(self).get_transport_class(transport) + TestCasesClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., TestCasesTransport], transport) ) diff --git a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/transition_route_groups/async_client.py b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/transition_route_groups/async_client.py index 74c87b263245..32a9f5867ae5 100644 --- a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/transition_route_groups/async_client.py +++ b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/transition_route_groups/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -211,10 +210,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(TransitionRouteGroupsClient).get_transport_class, - type(TransitionRouteGroupsClient), - ) + get_transport_class = TransitionRouteGroupsClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/transition_route_groups/client.py b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/transition_route_groups/client.py index 595b5a8976f0..7e035c82dabc 100644 --- a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/transition_route_groups/client.py +++ b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/transition_route_groups/client.py @@ -779,7 +779,7 @@ def __init__( Type[TransitionRouteGroupsTransport], Callable[..., TransitionRouteGroupsTransport], ] = ( - type(self).get_transport_class(transport) + TransitionRouteGroupsClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., TransitionRouteGroupsTransport], transport) ) diff --git a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/versions/async_client.py b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/versions/async_client.py index 42ec82ce0340..4e55612528a9 100644 --- a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/versions/async_client.py +++ b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/versions/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -192,9 +191,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(VersionsClient).get_transport_class, type(VersionsClient) - ) + get_transport_class = VersionsClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/versions/client.py b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/versions/client.py index bdf2c9bbe88e..4ebdc5b74698 100644 --- a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/versions/client.py +++ b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/versions/client.py @@ -672,7 +672,7 @@ def __init__( transport_init: Union[ Type[VersionsTransport], Callable[..., VersionsTransport] ] = ( - type(self).get_transport_class(transport) + VersionsClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., VersionsTransport], transport) ) diff --git a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/webhooks/async_client.py b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/webhooks/async_client.py index 9755dd5fd338..0098e1a7841e 100644 --- a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/webhooks/async_client.py +++ b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/webhooks/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -189,9 +188,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(WebhooksClient).get_transport_class, type(WebhooksClient) - ) + get_transport_class = WebhooksClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/webhooks/client.py b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/webhooks/client.py index 850bd6db63e2..6f05f8fbf4a6 100644 --- a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/webhooks/client.py +++ b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/webhooks/client.py @@ -689,7 +689,7 @@ def __init__( transport_init: Union[ Type[WebhooksTransport], Callable[..., WebhooksTransport] ] = ( - type(self).get_transport_class(transport) + WebhooksClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., WebhooksTransport], transport) ) diff --git a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3beta1/gapic_version.py b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3beta1/gapic_version.py index ed47e29bfcec..558c8aab67c5 100644 --- a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3beta1/gapic_version.py +++ b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3beta1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.35.0" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3beta1/types/tool.py b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3beta1/types/tool.py index 2adbf8c97e31..65a3df4ad4d9 100644 --- a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3beta1/types/tool.py +++ b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3beta1/types/tool.py @@ -660,17 +660,12 @@ class CACert(proto.Message): certificates. N.B. Make sure the HTTPS server certificates are signed with "subject alt name". For instance a certificate can be self-signed - using the following command: - - :: - - openssl x509 - -req -days 200 -in example.com.csr \ - -signkey example.com.key \ - -out example.com.crt \ - -extfile <(printf - "\nsubjectAltName='DNS:www.example.com'") - + using the following command, openssl x509 + -req -days 200 -in example.com.csr \ + -signkey example.com.key \ + -out example.com.crt \ + -extfile <(printf + "\nsubjectAltName='DNS:www.example.com'") """ display_name: str = proto.Field( diff --git a/packages/google-cloud-dialogflow-cx/samples/generated_samples/snippet_metadata_google.cloud.dialogflow.cx.v3.json b/packages/google-cloud-dialogflow-cx/samples/generated_samples/snippet_metadata_google.cloud.dialogflow.cx.v3.json index 0f9ba15f5cf6..1ea6a76db3ff 100644 --- a/packages/google-cloud-dialogflow-cx/samples/generated_samples/snippet_metadata_google.cloud.dialogflow.cx.v3.json +++ b/packages/google-cloud-dialogflow-cx/samples/generated_samples/snippet_metadata_google.cloud.dialogflow.cx.v3.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-dialogflow-cx", - "version": "1.35.0" + "version": "0.1.0" }, "snippets": [ { diff --git a/packages/google-cloud-dialogflow-cx/samples/generated_samples/snippet_metadata_google.cloud.dialogflow.cx.v3beta1.json b/packages/google-cloud-dialogflow-cx/samples/generated_samples/snippet_metadata_google.cloud.dialogflow.cx.v3beta1.json index 7f3672bb92e4..0fdb4114a2f4 100644 --- a/packages/google-cloud-dialogflow-cx/samples/generated_samples/snippet_metadata_google.cloud.dialogflow.cx.v3beta1.json +++ b/packages/google-cloud-dialogflow-cx/samples/generated_samples/snippet_metadata_google.cloud.dialogflow.cx.v3beta1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-dialogflow-cx", - "version": "1.35.0" + "version": "0.1.0" }, "snippets": [ { diff --git a/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3/test_agents.py b/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3/test_agents.py index f3a358ac3535..c1fd0dd5c6de 100644 --- a/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3/test_agents.py +++ b/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3/test_agents.py @@ -1241,22 +1241,23 @@ async def test_list_agents_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_agents - ] = mock_object + ] = mock_rpc request = {} await client.list_agents(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_agents(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1833,22 +1834,23 @@ async def test_get_agent_async_use_cached_wrapped_rpc(transport: str = "grpc_asy ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_agent - ] = mock_object + ] = mock_rpc request = {} await client.get_agent(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_agent(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2253,22 +2255,23 @@ async def test_create_agent_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_agent - ] = mock_object + ] = mock_rpc request = {} await client.create_agent(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_agent(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2679,22 +2682,23 @@ async def test_update_agent_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_agent - ] = mock_object + ] = mock_rpc request = {} await client.update_agent(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_agent(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3066,22 +3070,23 @@ async def test_delete_agent_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_agent - ] = mock_object + ] = mock_rpc request = {} await client.delete_agent(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_agent(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3369,8 +3374,9 @@ def test_export_agent_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.export_agent(request) @@ -3424,26 +3430,28 @@ async def test_export_agent_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.export_agent - ] = mock_object + ] = mock_rpc request = {} await client.export_agent(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.export_agent(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3653,8 +3661,9 @@ def test_restore_agent_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.restore_agent(request) @@ -3708,26 +3717,28 @@ async def test_restore_agent_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.restore_agent - ] = mock_object + ] = mock_rpc request = {} await client.restore_agent(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.restore_agent(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3993,22 +4004,23 @@ async def test_validate_agent_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.validate_agent - ] = mock_object + ] = mock_rpc request = {} await client.validate_agent(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.validate_agent(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4290,22 +4302,23 @@ async def test_get_agent_validation_result_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_agent_validation_result - ] = mock_object + ] = mock_rpc request = {} await client.get_agent_validation_result(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_agent_validation_result(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4682,22 +4695,23 @@ async def test_get_generative_settings_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_generative_settings - ] = mock_object + ] = mock_rpc request = {} await client.get_generative_settings(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_generative_settings(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5080,22 +5094,23 @@ async def test_update_generative_settings_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_generative_settings - ] = mock_object + ] = mock_rpc request = {} await client.update_generative_settings(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_generative_settings(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3/test_changelogs.py b/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3/test_changelogs.py index 703d4b606ddf..b8bd1d2f1c54 100644 --- a/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3/test_changelogs.py +++ b/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3/test_changelogs.py @@ -1250,22 +1250,23 @@ async def test_list_changelogs_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_changelogs - ] = mock_object + ] = mock_rpc request = {} await client.list_changelogs(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_changelogs(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1826,22 +1827,23 @@ async def test_get_changelog_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_changelog - ] = mock_object + ] = mock_rpc request = {} await client.get_changelog(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_changelog(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3/test_deployments.py b/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3/test_deployments.py index aef4e1dbdb91..e2dd4905bc86 100644 --- a/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3/test_deployments.py +++ b/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3/test_deployments.py @@ -1254,22 +1254,23 @@ async def test_list_deployments_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_deployments - ] = mock_object + ] = mock_rpc request = {} await client.list_deployments(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_deployments(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1818,22 +1819,23 @@ async def test_get_deployment_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_deployment - ] = mock_object + ] = mock_rpc request = {} await client.get_deployment(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_deployment(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3/test_entity_types.py b/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3/test_entity_types.py index dd06d87835a8..a818583d5c57 100644 --- a/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3/test_entity_types.py +++ b/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3/test_entity_types.py @@ -1281,22 +1281,23 @@ async def test_get_entity_type_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_entity_type - ] = mock_object + ] = mock_rpc request = {} await client.get_entity_type(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_entity_type(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1690,22 +1691,23 @@ async def test_create_entity_type_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_entity_type - ] = mock_object + ] = mock_rpc request = {} await client.create_entity_type(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_entity_type(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2118,22 +2120,23 @@ async def test_update_entity_type_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_entity_type - ] = mock_object + ] = mock_rpc request = {} await client.update_entity_type(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_entity_type(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2521,22 +2524,23 @@ async def test_delete_entity_type_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_entity_type - ] = mock_object + ] = mock_rpc request = {} await client.delete_entity_type(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_entity_type(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2900,22 +2904,23 @@ async def test_list_entity_types_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_entity_types - ] = mock_object + ] = mock_rpc request = {} await client.list_entity_types(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_entity_types(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3434,8 +3439,9 @@ def test_export_entity_types_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.export_entity_types(request) @@ -3491,26 +3497,28 @@ async def test_export_entity_types_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.export_entity_types - ] = mock_object + ] = mock_rpc request = {} await client.export_entity_types(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.export_entity_types(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3738,8 +3746,9 @@ def test_import_entity_types_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.import_entity_types(request) @@ -3795,26 +3804,28 @@ async def test_import_entity_types_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.import_entity_types - ] = mock_object + ] = mock_rpc request = {} await client.import_entity_types(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.import_entity_types(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3/test_environments.py b/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3/test_environments.py index d7aedf0d1b11..d0ca52497e2f 100644 --- a/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3/test_environments.py +++ b/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3/test_environments.py @@ -1290,22 +1290,23 @@ async def test_list_environments_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_environments - ] = mock_object + ] = mock_rpc request = {} await client.list_environments(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_environments(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1872,22 +1873,23 @@ async def test_get_environment_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_environment - ] = mock_object + ] = mock_rpc request = {} await client.get_environment(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_environment(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2194,8 +2196,9 @@ def test_create_environment_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.create_environment(request) @@ -2251,26 +2254,28 @@ async def test_create_environment_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_environment - ] = mock_object + ] = mock_rpc request = {} await client.create_environment(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.create_environment(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2587,8 +2592,9 @@ def test_update_environment_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.update_environment(request) @@ -2644,26 +2650,28 @@ async def test_update_environment_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_environment - ] = mock_object + ] = mock_rpc request = {} await client.update_environment(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.update_environment(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3035,22 +3043,23 @@ async def test_delete_environment_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_environment - ] = mock_object + ] = mock_rpc request = {} await client.delete_environment(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_environment(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3415,22 +3424,23 @@ async def test_lookup_environment_history_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.lookup_environment_history - ] = mock_object + ] = mock_rpc request = {} await client.lookup_environment_history(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.lookup_environment_history(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3948,8 +3958,9 @@ def test_run_continuous_test_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.run_continuous_test(request) @@ -4005,26 +4016,28 @@ async def test_run_continuous_test_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.run_continuous_test - ] = mock_object + ] = mock_rpc request = {} await client.run_continuous_test(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.run_continuous_test(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4309,22 +4322,23 @@ async def test_list_continuous_test_results_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_continuous_test_results - ] = mock_object + ] = mock_rpc request = {} await client.list_continuous_test_results(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_continuous_test_results(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4834,8 +4848,9 @@ def test_deploy_flow_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.deploy_flow(request) @@ -4889,26 +4904,28 @@ async def test_deploy_flow_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.deploy_flow - ] = mock_object + ] = mock_rpc request = {} await client.deploy_flow(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.deploy_flow(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3/test_experiments.py b/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3/test_experiments.py index bf999cd76ad2..2f046b0f1d34 100644 --- a/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3/test_experiments.py +++ b/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3/test_experiments.py @@ -1257,22 +1257,23 @@ async def test_list_experiments_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_experiments - ] = mock_object + ] = mock_rpc request = {} await client.list_experiments(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_experiments(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1827,22 +1828,23 @@ async def test_get_experiment_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_experiment - ] = mock_object + ] = mock_rpc request = {} await client.get_experiment(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_experiment(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2221,22 +2223,23 @@ async def test_create_experiment_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_experiment - ] = mock_object + ] = mock_rpc request = {} await client.create_experiment(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_experiment(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2632,22 +2635,23 @@ async def test_update_experiment_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_experiment - ] = mock_object + ] = mock_rpc request = {} await client.update_experiment(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_experiment(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3028,22 +3032,23 @@ async def test_delete_experiment_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_experiment - ] = mock_object + ] = mock_rpc request = {} await client.delete_experiment(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_experiment(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3407,22 +3412,23 @@ async def test_start_experiment_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.start_experiment - ] = mock_object + ] = mock_rpc request = {} await client.start_experiment(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.start_experiment(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3791,22 +3797,23 @@ async def test_stop_experiment_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.stop_experiment - ] = mock_object + ] = mock_rpc request = {} await client.stop_experiment(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.stop_experiment(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3/test_flows.py b/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3/test_flows.py index 27d07c7468fa..67a434252491 100644 --- a/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3/test_flows.py +++ b/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3/test_flows.py @@ -1252,22 +1252,23 @@ async def test_create_flow_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_flow - ] = mock_object + ] = mock_rpc request = {} await client.create_flow(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_flow(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1623,22 +1624,23 @@ async def test_delete_flow_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_flow - ] = mock_object + ] = mock_rpc request = {} await client.delete_flow(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_flow(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1980,22 +1982,23 @@ async def test_list_flows_async_use_cached_wrapped_rpc(transport: str = "grpc_as ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_flows - ] = mock_object + ] = mock_rpc request = {} await client.list_flows(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_flows(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2550,22 +2553,23 @@ async def test_get_flow_async_use_cached_wrapped_rpc(transport: str = "grpc_asyn ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_flow - ] = mock_object + ] = mock_rpc request = {} await client.get_flow(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_flow(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2930,22 +2934,23 @@ async def test_update_flow_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_flow - ] = mock_object + ] = mock_rpc request = {} await client.update_flow(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_flow(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3252,8 +3257,9 @@ def test_train_flow_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.train_flow(request) @@ -3305,26 +3311,28 @@ async def test_train_flow_async_use_cached_wrapped_rpc(transport: str = "grpc_as ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.train_flow - ] = mock_object + ] = mock_rpc request = {} await client.train_flow(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.train_flow(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3672,22 +3680,23 @@ async def test_validate_flow_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.validate_flow - ] = mock_object + ] = mock_rpc request = {} await client.validate_flow(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.validate_flow(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3969,22 +3978,23 @@ async def test_get_flow_validation_result_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_flow_validation_result - ] = mock_object + ] = mock_rpc request = {} await client.get_flow_validation_result(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_flow_validation_result(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4289,8 +4299,9 @@ def test_import_flow_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.import_flow(request) @@ -4344,26 +4355,28 @@ async def test_import_flow_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.import_flow - ] = mock_object + ] = mock_rpc request = {} await client.import_flow(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.import_flow(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4573,8 +4586,9 @@ def test_export_flow_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.export_flow(request) @@ -4628,26 +4642,28 @@ async def test_export_flow_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.export_flow - ] = mock_object + ] = mock_rpc request = {} await client.export_flow(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.export_flow(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3/test_generators.py b/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3/test_generators.py index 2dadaf10ade6..3509dcdf879a 100644 --- a/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3/test_generators.py +++ b/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3/test_generators.py @@ -1251,22 +1251,23 @@ async def test_list_generators_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_generators - ] = mock_object + ] = mock_rpc request = {} await client.list_generators(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_generators(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1814,22 +1815,23 @@ async def test_get_generator_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_generator - ] = mock_object + ] = mock_rpc request = {} await client.get_generator(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_generator(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2183,22 +2185,23 @@ async def test_create_generator_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_generator - ] = mock_object + ] = mock_rpc request = {} await client.create_generator(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_generator(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2564,22 +2567,23 @@ async def test_update_generator_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_generator - ] = mock_object + ] = mock_rpc request = {} await client.update_generator(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_generator(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2935,22 +2939,23 @@ async def test_delete_generator_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_generator - ] = mock_object + ] = mock_rpc request = {} await client.delete_generator(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_generator(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3/test_intents.py b/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3/test_intents.py index 7aafde996419..1f38085b4554 100644 --- a/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3/test_intents.py +++ b/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3/test_intents.py @@ -1235,22 +1235,23 @@ async def test_list_intents_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_intents - ] = mock_object + ] = mock_rpc request = {} await client.list_intents(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_intents(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1805,22 +1806,23 @@ async def test_get_intent_async_use_cached_wrapped_rpc(transport: str = "grpc_as ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_intent - ] = mock_object + ] = mock_rpc request = {} await client.get_intent(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_intent(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2187,22 +2189,23 @@ async def test_create_intent_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_intent - ] = mock_object + ] = mock_rpc request = {} await client.create_intent(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_intent(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2577,22 +2580,23 @@ async def test_update_intent_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_intent - ] = mock_object + ] = mock_rpc request = {} await client.update_intent(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_intent(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2948,22 +2952,23 @@ async def test_delete_intent_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_intent - ] = mock_object + ] = mock_rpc request = {} await client.delete_intent(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_intent(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3249,8 +3254,9 @@ def test_import_intents_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.import_intents(request) @@ -3304,26 +3310,28 @@ async def test_import_intents_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.import_intents - ] = mock_object + ] = mock_rpc request = {} await client.import_intents(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.import_intents(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3533,8 +3541,9 @@ def test_export_intents_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.export_intents(request) @@ -3588,26 +3597,28 @@ async def test_export_intents_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.export_intents - ] = mock_object + ] = mock_rpc request = {} await client.export_intents(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.export_intents(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3/test_pages.py b/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3/test_pages.py index 527a5c488360..7a13dfa2ef01 100644 --- a/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3/test_pages.py +++ b/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3/test_pages.py @@ -1227,22 +1227,23 @@ async def test_list_pages_async_use_cached_wrapped_rpc(transport: str = "grpc_as ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_pages - ] = mock_object + ] = mock_rpc request = {} await client.list_pages(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_pages(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1794,22 +1795,23 @@ async def test_get_page_async_use_cached_wrapped_rpc(transport: str = "grpc_asyn ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_page - ] = mock_object + ] = mock_rpc request = {} await client.get_page(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_page(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2171,22 +2173,23 @@ async def test_create_page_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_page - ] = mock_object + ] = mock_rpc request = {} await client.create_page(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_page(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2556,22 +2559,23 @@ async def test_update_page_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_page - ] = mock_object + ] = mock_rpc request = {} await client.update_page(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_page(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2925,22 +2929,23 @@ async def test_delete_page_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_page - ] = mock_object + ] = mock_rpc request = {} await client.delete_page(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_page(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3/test_security_settings_service.py b/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3/test_security_settings_service.py index 773d17c206a5..019fcfd61dd3 100644 --- a/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3/test_security_settings_service.py +++ b/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3/test_security_settings_service.py @@ -1406,22 +1406,23 @@ async def test_create_security_settings_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_security_settings - ] = mock_object + ] = mock_rpc request = {} await client.create_security_settings(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_security_settings(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1865,22 +1866,23 @@ async def test_get_security_settings_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_security_settings - ] = mock_object + ] = mock_rpc request = {} await client.get_security_settings(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_security_settings(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2302,22 +2304,23 @@ async def test_update_security_settings_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_security_settings - ] = mock_object + ] = mock_rpc request = {} await client.update_security_settings(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_security_settings(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2732,22 +2735,23 @@ async def test_list_security_settings_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_security_settings - ] = mock_object + ] = mock_rpc request = {} await client.list_security_settings(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_security_settings(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3315,22 +3319,23 @@ async def test_delete_security_settings_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_security_settings - ] = mock_object + ] = mock_rpc request = {} await client.delete_security_settings(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_security_settings(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3/test_session_entity_types.py b/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3/test_session_entity_types.py index 34c921c7a716..6424ead20475 100644 --- a/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3/test_session_entity_types.py +++ b/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3/test_session_entity_types.py @@ -1348,22 +1348,23 @@ async def test_list_session_entity_types_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_session_entity_types - ] = mock_object + ] = mock_rpc request = {} await client.list_session_entity_types(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_session_entity_types(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1950,22 +1951,23 @@ async def test_get_session_entity_type_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_session_entity_type - ] = mock_object + ] = mock_rpc request = {} await client.get_session_entity_type(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_session_entity_type(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2349,22 +2351,23 @@ async def test_create_session_entity_type_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_session_entity_type - ] = mock_object + ] = mock_rpc request = {} await client.create_session_entity_type(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_session_entity_type(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2762,22 +2765,23 @@ async def test_update_session_entity_type_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_session_entity_type - ] = mock_object + ] = mock_rpc request = {} await client.update_session_entity_type(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_session_entity_type(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3166,22 +3170,23 @@ async def test_delete_session_entity_type_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_session_entity_type - ] = mock_object + ] = mock_rpc request = {} await client.delete_session_entity_type(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_session_entity_type(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3/test_sessions.py b/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3/test_sessions.py index 7df3ded84cbe..6f546a7eb231 100644 --- a/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3/test_sessions.py +++ b/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3/test_sessions.py @@ -1249,22 +1249,23 @@ async def test_detect_intent_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.detect_intent - ] = mock_object + ] = mock_rpc request = {} await client.detect_intent(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.detect_intent(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1547,22 +1548,23 @@ async def test_server_streaming_detect_intent_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.server_streaming_detect_intent - ] = mock_object + ] = mock_rpc request = {} await client.server_streaming_detect_intent(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.server_streaming_detect_intent(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1770,22 +1772,23 @@ async def test_streaming_detect_intent_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.streaming_detect_intent - ] = mock_object + ] = mock_rpc request = [{}] await client.streaming_detect_intent(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.streaming_detect_intent(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1989,22 +1992,23 @@ async def test_match_intent_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.match_intent - ] = mock_object + ] = mock_rpc request = {} await client.match_intent(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.match_intent(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2267,22 +2271,23 @@ async def test_fulfill_intent_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.fulfill_intent - ] = mock_object + ] = mock_rpc request = {} await client.fulfill_intent(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.fulfill_intent(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2569,22 +2574,23 @@ async def test_submit_answer_feedback_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.submit_answer_feedback - ] = mock_object + ] = mock_rpc request = {} await client.submit_answer_feedback(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.submit_answer_feedback(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3/test_test_cases.py b/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3/test_test_cases.py index 152e4633278a..998b46cc45d3 100644 --- a/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3/test_test_cases.py +++ b/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3/test_test_cases.py @@ -1272,22 +1272,23 @@ async def test_list_test_cases_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_test_cases - ] = mock_object + ] = mock_rpc request = {} await client.list_test_cases(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_test_cases(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1836,22 +1837,23 @@ async def test_batch_delete_test_cases_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.batch_delete_test_cases - ] = mock_object + ] = mock_rpc request = {} await client.batch_delete_test_cases(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.batch_delete_test_cases(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2210,22 +2212,23 @@ async def test_get_test_case_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_test_case - ] = mock_object + ] = mock_rpc request = {} await client.get_test_case(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_test_case(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2587,22 +2590,23 @@ async def test_create_test_case_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_test_case - ] = mock_object + ] = mock_rpc request = {} await client.create_test_case(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_test_case(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2974,22 +2978,23 @@ async def test_update_test_case_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_test_case - ] = mock_object + ] = mock_rpc request = {} await client.update_test_case(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_test_case(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3300,8 +3305,9 @@ def test_run_test_case_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.run_test_case(request) @@ -3355,26 +3361,28 @@ async def test_run_test_case_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.run_test_case - ] = mock_object + ] = mock_rpc request = {} await client.run_test_case(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.run_test_case(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3594,8 +3602,9 @@ def test_batch_run_test_cases_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.batch_run_test_cases(request) @@ -3651,26 +3660,28 @@ async def test_batch_run_test_cases_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.batch_run_test_cases - ] = mock_object + ] = mock_rpc request = {} await client.batch_run_test_cases(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.batch_run_test_cases(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3952,22 +3963,23 @@ async def test_calculate_coverage_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.calculate_coverage - ] = mock_object + ] = mock_rpc request = {} await client.calculate_coverage(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.calculate_coverage(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4194,8 +4206,9 @@ def test_import_test_cases_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.import_test_cases(request) @@ -4251,26 +4264,28 @@ async def test_import_test_cases_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.import_test_cases - ] = mock_object + ] = mock_rpc request = {} await client.import_test_cases(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.import_test_cases(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4496,8 +4511,9 @@ def test_export_test_cases_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.export_test_cases(request) @@ -4553,26 +4569,28 @@ async def test_export_test_cases_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.export_test_cases - ] = mock_object + ] = mock_rpc request = {} await client.export_test_cases(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.export_test_cases(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4859,22 +4877,23 @@ async def test_list_test_case_results_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_test_case_results - ] = mock_object + ] = mock_rpc request = {} await client.list_test_case_results(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_test_case_results(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5453,22 +5472,23 @@ async def test_get_test_case_result_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_test_case_result - ] = mock_object + ] = mock_rpc request = {} await client.get_test_case_result(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_test_case_result(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3/test_transition_route_groups.py b/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3/test_transition_route_groups.py index 5359a4941bec..d73c6d87dc90 100644 --- a/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3/test_transition_route_groups.py +++ b/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3/test_transition_route_groups.py @@ -1386,22 +1386,23 @@ async def test_list_transition_route_groups_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_transition_route_groups - ] = mock_object + ] = mock_rpc request = {} await client.list_transition_route_groups(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_transition_route_groups(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1988,22 +1989,23 @@ async def test_get_transition_route_group_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_transition_route_group - ] = mock_object + ] = mock_rpc request = {} await client.get_transition_route_group(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_transition_route_group(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2387,22 +2389,23 @@ async def test_create_transition_route_group_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_transition_route_group - ] = mock_object + ] = mock_rpc request = {} await client.create_transition_route_group(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_transition_route_group(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2802,22 +2805,23 @@ async def test_update_transition_route_group_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_transition_route_group - ] = mock_object + ] = mock_rpc request = {} await client.update_transition_route_group(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_transition_route_group(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3203,22 +3207,23 @@ async def test_delete_transition_route_group_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_transition_route_group - ] = mock_object + ] = mock_rpc request = {} await client.delete_transition_route_group(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_transition_route_group(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3/test_versions.py b/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3/test_versions.py index d07edcdaa702..9e1048175ba9 100644 --- a/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3/test_versions.py +++ b/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3/test_versions.py @@ -1247,22 +1247,23 @@ async def test_list_versions_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_versions - ] = mock_object + ] = mock_rpc request = {} await client.list_versions(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_versions(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1814,22 +1815,23 @@ async def test_get_version_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_version - ] = mock_object + ] = mock_rpc request = {} await client.get_version(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_version(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2124,8 +2126,9 @@ def test_create_version_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.create_version(request) @@ -2179,26 +2182,28 @@ async def test_create_version_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_version - ] = mock_object + ] = mock_rpc request = {} await client.create_version(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.create_version(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2559,22 +2564,23 @@ async def test_update_version_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_version - ] = mock_object + ] = mock_rpc request = {} await client.update_version(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_version(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2932,22 +2938,23 @@ async def test_delete_version_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_version - ] = mock_object + ] = mock_rpc request = {} await client.delete_version(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_version(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3231,8 +3238,9 @@ def test_load_version_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.load_version(request) @@ -3286,26 +3294,28 @@ async def test_load_version_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.load_version - ] = mock_object + ] = mock_rpc request = {} await client.load_version(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.load_version(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3660,22 +3670,23 @@ async def test_compare_versions_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.compare_versions - ] = mock_object + ] = mock_rpc request = {} await client.compare_versions(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.compare_versions(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3/test_webhooks.py b/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3/test_webhooks.py index ec37201b7396..df057720c7c9 100644 --- a/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3/test_webhooks.py +++ b/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3/test_webhooks.py @@ -1235,22 +1235,23 @@ async def test_list_webhooks_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_webhooks - ] = mock_object + ] = mock_rpc request = {} await client.list_webhooks(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_webhooks(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1799,22 +1800,23 @@ async def test_get_webhook_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_webhook - ] = mock_object + ] = mock_rpc request = {} await client.get_webhook(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_webhook(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2169,22 +2171,23 @@ async def test_create_webhook_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_webhook - ] = mock_object + ] = mock_rpc request = {} await client.create_webhook(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_webhook(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2549,22 +2552,23 @@ async def test_update_webhook_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_webhook - ] = mock_object + ] = mock_rpc request = {} await client.update_webhook(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_webhook(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2920,22 +2924,23 @@ async def test_delete_webhook_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_webhook - ] = mock_object + ] = mock_rpc request = {} await client.delete_webhook(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_webhook(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-dialogflow/CHANGELOG.md b/packages/google-cloud-dialogflow/CHANGELOG.md index 5ae0112dd3a7..fa182a35ae50 100644 --- a/packages/google-cloud-dialogflow/CHANGELOG.md +++ b/packages/google-cloud-dialogflow/CHANGELOG.md @@ -4,6 +4,56 @@ [1]: https://pypi.org/project/dialogflow/#history +## [2.33.0](https://github.com/googleapis/google-cloud-python/compare/google-cloud-dialogflow-v2.32.0...google-cloud-dialogflow-v2.33.0) (2024-10-08) + + +### Features + +* add ALAW encoding value to Audio encoding enum ([c169348](https://github.com/googleapis/google-cloud-python/commit/c1693486f314261e3799547ee6f5e53dd7e687fc)) +* created new boolean fields in conversation dataset for zone isolation and zone separation compliance status ([c169348](https://github.com/googleapis/google-cloud-python/commit/c1693486f314261e3799547ee6f5e53dd7e687fc)) + +## [2.32.0](https://github.com/googleapis/google-cloud-python/compare/google-cloud-dialogflow-v2.31.0...google-cloud-dialogflow-v2.32.0) (2024-09-23) + + +### Features + +* created new boolean fields in conversation model for zone isolation and zone separation compliance status ([1f8b564](https://github.com/googleapis/google-cloud-python/commit/1f8b5640b0ac5397318ede4ebcfa120120ebccc8)) + +## [2.31.0](https://github.com/googleapis/google-cloud-python/compare/google-cloud-dialogflow-v2.30.2...google-cloud-dialogflow-v2.31.0) (2024-08-08) + + +### Features + +* Add GenerateStatelessSuggestion related endpoints and types ([de0df45](https://github.com/googleapis/google-cloud-python/commit/de0df45e938ba89f2c533aa08f88242ffc9000ab)) +* Add Generator related services and types ([de0df45](https://github.com/googleapis/google-cloud-python/commit/de0df45e938ba89f2c533aa08f88242ffc9000ab)) +* Add Proactive Generative Knowledge Assist endpoints and types ([de0df45](https://github.com/googleapis/google-cloud-python/commit/de0df45e938ba89f2c533aa08f88242ffc9000ab)) + + +### Bug Fixes + +* An existing method_signature `parent` is fixed for method `BatchCreateMessages` in service `Conversations` ([de0df45](https://github.com/googleapis/google-cloud-python/commit/de0df45e938ba89f2c533aa08f88242ffc9000ab)) +* Changed field behavior for an existing field `parent` in message `.google.cloud.dialogflow.v2beta1.SearchKnowledgeRequest` ([de0df45](https://github.com/googleapis/google-cloud-python/commit/de0df45e938ba89f2c533aa08f88242ffc9000ab)) +* Changed field behavior for an existing field `session_id` in message `.google.cloud.dialogflow.v2beta1.SearchKnowledgeRequest` ([de0df45](https://github.com/googleapis/google-cloud-python/commit/de0df45e938ba89f2c533aa08f88242ffc9000ab)) + + +### Documentation + +* A comment for field `assist_query_params` in message `.google.cloud.dialogflow.v2beta1.SuggestConversationSummaryRequest` is changed ([de0df45](https://github.com/googleapis/google-cloud-python/commit/de0df45e938ba89f2c533aa08f88242ffc9000ab)) +* A comment for field `audio` in message `.google.cloud.dialogflow.v2beta1.AudioInput` is changed ([de0df45](https://github.com/googleapis/google-cloud-python/commit/de0df45e938ba89f2c533aa08f88242ffc9000ab)) +* A comment for field `context_size` in message `.google.cloud.dialogflow.v2beta1.SuggestConversationSummaryRequest` is changed ([de0df45](https://github.com/googleapis/google-cloud-python/commit/de0df45e938ba89f2c533aa08f88242ffc9000ab)) +* A comment for field `conversation_stage` in message `.google.cloud.dialogflow.v2beta1.Conversation` is changed ([de0df45](https://github.com/googleapis/google-cloud-python/commit/de0df45e938ba89f2c533aa08f88242ffc9000ab)) +* A comment for field `conversation` in message `.google.cloud.dialogflow.v2beta1.SearchKnowledgeRequest` is changed ([de0df45](https://github.com/googleapis/google-cloud-python/commit/de0df45e938ba89f2c533aa08f88242ffc9000ab)) +* A comment for field `filter` in message `.google.cloud.dialogflow.v2beta1.ListConversationsRequest` is changed ([de0df45](https://github.com/googleapis/google-cloud-python/commit/de0df45e938ba89f2c533aa08f88242ffc9000ab)) +* A comment for field `latest_message` in message `.google.cloud.dialogflow.v2beta1.GenerateStatelessSummaryRequest` is changed ([de0df45](https://github.com/googleapis/google-cloud-python/commit/de0df45e938ba89f2c533aa08f88242ffc9000ab)) +* A comment for field `latest_message` in message `.google.cloud.dialogflow.v2beta1.SearchKnowledgeRequest` is changed ([de0df45](https://github.com/googleapis/google-cloud-python/commit/de0df45e938ba89f2c533aa08f88242ffc9000ab)) +* A comment for field `latest_message` in message `.google.cloud.dialogflow.v2beta1.SuggestConversationSummaryRequest` is changed ([de0df45](https://github.com/googleapis/google-cloud-python/commit/de0df45e938ba89f2c533aa08f88242ffc9000ab)) +* A comment for field `live_person_config` in message `.google.cloud.dialogflow.v2beta1.HumanAgentHandoffConfig` is changed ([de0df45](https://github.com/googleapis/google-cloud-python/commit/de0df45e938ba89f2c533aa08f88242ffc9000ab)) +* A comment for field `max_context_size` in message `.google.cloud.dialogflow.v2beta1.GenerateStatelessSummaryRequest` is changed ([de0df45](https://github.com/googleapis/google-cloud-python/commit/de0df45e938ba89f2c533aa08f88242ffc9000ab)) +* A comment for field `name` in message `.google.cloud.dialogflow.v2beta1.Conversation` is changed ([de0df45](https://github.com/googleapis/google-cloud-python/commit/de0df45e938ba89f2c533aa08f88242ffc9000ab)) +* A comment for field `parent` in message `.google.cloud.dialogflow.v2beta1.SearchKnowledgeRequest` is changed ([de0df45](https://github.com/googleapis/google-cloud-python/commit/de0df45e938ba89f2c533aa08f88242ffc9000ab)) +* A comment for field `session_id` in message `.google.cloud.dialogflow.v2beta1.SearchKnowledgeRequest` is changed ([de0df45](https://github.com/googleapis/google-cloud-python/commit/de0df45e938ba89f2c533aa08f88242ffc9000ab)) +* A comment for message `HumanAgentHandoffConfig` is changed ([de0df45](https://github.com/googleapis/google-cloud-python/commit/de0df45e938ba89f2c533aa08f88242ffc9000ab)) + ## [2.30.2](https://github.com/googleapis/google-cloud-python/compare/google-cloud-dialogflow-v2.30.1...google-cloud-dialogflow-v2.30.2) (2024-07-30) diff --git a/packages/google-cloud-dialogflow/docs/dialogflow_v2/encryption_spec_service.rst b/packages/google-cloud-dialogflow/docs/dialogflow_v2/encryption_spec_service.rst new file mode 100644 index 000000000000..974580f38a80 --- /dev/null +++ b/packages/google-cloud-dialogflow/docs/dialogflow_v2/encryption_spec_service.rst @@ -0,0 +1,6 @@ +EncryptionSpecService +--------------------------------------- + +.. automodule:: google.cloud.dialogflow_v2.services.encryption_spec_service + :members: + :inherited-members: diff --git a/packages/google-cloud-dialogflow/docs/dialogflow_v2/generators.rst b/packages/google-cloud-dialogflow/docs/dialogflow_v2/generators.rst new file mode 100644 index 000000000000..0689d13fcf20 --- /dev/null +++ b/packages/google-cloud-dialogflow/docs/dialogflow_v2/generators.rst @@ -0,0 +1,10 @@ +Generators +---------------------------- + +.. automodule:: google.cloud.dialogflow_v2.services.generators + :members: + :inherited-members: + +.. automodule:: google.cloud.dialogflow_v2.services.generators.pagers + :members: + :inherited-members: diff --git a/packages/google-cloud-dialogflow/docs/dialogflow_v2/services_.rst b/packages/google-cloud-dialogflow/docs/dialogflow_v2/services_.rst index c6039fcafb7b..5bdf1deb9d22 100644 --- a/packages/google-cloud-dialogflow/docs/dialogflow_v2/services_.rst +++ b/packages/google-cloud-dialogflow/docs/dialogflow_v2/services_.rst @@ -11,9 +11,11 @@ Services for Google Cloud Dialogflow v2 API conversation_profiles conversations documents + encryption_spec_service entity_types environments fulfillments + generators intents knowledge_bases participants diff --git a/packages/google-cloud-dialogflow/docs/dialogflow_v2beta1/encryption_spec_service.rst b/packages/google-cloud-dialogflow/docs/dialogflow_v2beta1/encryption_spec_service.rst new file mode 100644 index 000000000000..3632085a7314 --- /dev/null +++ b/packages/google-cloud-dialogflow/docs/dialogflow_v2beta1/encryption_spec_service.rst @@ -0,0 +1,6 @@ +EncryptionSpecService +--------------------------------------- + +.. automodule:: google.cloud.dialogflow_v2beta1.services.encryption_spec_service + :members: + :inherited-members: diff --git a/packages/google-cloud-dialogflow/docs/dialogflow_v2beta1/generators.rst b/packages/google-cloud-dialogflow/docs/dialogflow_v2beta1/generators.rst new file mode 100644 index 000000000000..36cde2b4f60d --- /dev/null +++ b/packages/google-cloud-dialogflow/docs/dialogflow_v2beta1/generators.rst @@ -0,0 +1,10 @@ +Generators +---------------------------- + +.. automodule:: google.cloud.dialogflow_v2beta1.services.generators + :members: + :inherited-members: + +.. automodule:: google.cloud.dialogflow_v2beta1.services.generators.pagers + :members: + :inherited-members: diff --git a/packages/google-cloud-dialogflow/docs/dialogflow_v2beta1/services_.rst b/packages/google-cloud-dialogflow/docs/dialogflow_v2beta1/services_.rst index dc7790aa3046..71e06561781e 100644 --- a/packages/google-cloud-dialogflow/docs/dialogflow_v2beta1/services_.rst +++ b/packages/google-cloud-dialogflow/docs/dialogflow_v2beta1/services_.rst @@ -9,9 +9,11 @@ Services for Google Cloud Dialogflow v2beta1 API conversation_profiles conversations documents + encryption_spec_service entity_types environments fulfillments + generators intents knowledge_bases participants diff --git a/packages/google-cloud-dialogflow/google/cloud/dialogflow/__init__.py b/packages/google-cloud-dialogflow/google/cloud/dialogflow/__init__.py index 1e3ae8b0e581..0a62e6917c08 100644 --- a/packages/google-cloud-dialogflow/google/cloud/dialogflow/__init__.py +++ b/packages/google-cloud-dialogflow/google/cloud/dialogflow/__init__.py @@ -56,6 +56,12 @@ DocumentsAsyncClient, ) from google.cloud.dialogflow_v2.services.documents.client import DocumentsClient +from google.cloud.dialogflow_v2.services.encryption_spec_service.async_client import ( + EncryptionSpecServiceAsyncClient, +) +from google.cloud.dialogflow_v2.services.encryption_spec_service.client import ( + EncryptionSpecServiceClient, +) from google.cloud.dialogflow_v2.services.entity_types.async_client import ( EntityTypesAsyncClient, ) @@ -68,6 +74,10 @@ FulfillmentsAsyncClient, ) from google.cloud.dialogflow_v2.services.fulfillments.client import FulfillmentsClient +from google.cloud.dialogflow_v2.services.generators.async_client import ( + GeneratorsAsyncClient, +) +from google.cloud.dialogflow_v2.services.generators.client import GeneratorsClient from google.cloud.dialogflow_v2.services.intents.async_client import IntentsAsyncClient from google.cloud.dialogflow_v2.services.intents.client import IntentsClient from google.cloud.dialogflow_v2.services.knowledge_bases.async_client import ( @@ -147,6 +157,8 @@ Conversation, ConversationPhoneNumber, CreateConversationRequest, + GenerateStatelessSuggestionRequest, + GenerateStatelessSuggestionResponse, GenerateStatelessSummaryRequest, GenerateStatelessSummaryResponse, GetConversationRequest, @@ -236,6 +248,13 @@ ReloadDocumentRequest, UpdateDocumentRequest, ) +from google.cloud.dialogflow_v2.types.encryption_spec import ( + EncryptionSpec, + GetEncryptionSpecRequest, + InitializeEncryptionSpecMetadata, + InitializeEncryptionSpecRequest, + InitializeEncryptionSpecResponse, +) from google.cloud.dialogflow_v2.types.entity_type import ( BatchCreateEntitiesRequest, BatchDeleteEntitiesRequest, @@ -270,6 +289,25 @@ UpdateFulfillmentRequest, ) from google.cloud.dialogflow_v2.types.gcs import GcsDestination, GcsSources +from google.cloud.dialogflow_v2.types.generator import ( + ConversationContext, + CreateGeneratorRequest, + DeleteGeneratorRequest, + FewShotExample, + Generator, + GeneratorSuggestion, + GetGeneratorRequest, + InferenceParameter, + ListGeneratorsRequest, + ListGeneratorsResponse, + MessageEntry, + SummarizationContext, + SummarizationSection, + SummarizationSectionList, + SummarySuggestion, + TriggerEvent, + UpdateGeneratorRequest, +) from google.cloud.dialogflow_v2.types.human_agent_assistant_event import ( HumanAgentAssistantEvent, ) @@ -310,6 +348,7 @@ GetParticipantRequest, InputTextConfig, IntentSuggestion, + KnowledgeAssistAnswer, ListParticipantsRequest, ListParticipantsResponse, Message, @@ -325,6 +364,8 @@ SuggestFaqAnswersResponse, SuggestionInput, SuggestionResult, + SuggestKnowledgeAssistRequest, + SuggestKnowledgeAssistResponse, SuggestSmartRepliesRequest, SuggestSmartRepliesResponse, UpdateParticipantRequest, @@ -390,12 +431,16 @@ "ConversationsAsyncClient", "DocumentsClient", "DocumentsAsyncClient", + "EncryptionSpecServiceClient", + "EncryptionSpecServiceAsyncClient", "EntityTypesClient", "EntityTypesAsyncClient", "EnvironmentsClient", "EnvironmentsAsyncClient", "FulfillmentsClient", "FulfillmentsAsyncClient", + "GeneratorsClient", + "GeneratorsAsyncClient", "IntentsClient", "IntentsAsyncClient", "KnowledgeBasesClient", @@ -452,6 +497,8 @@ "Conversation", "ConversationPhoneNumber", "CreateConversationRequest", + "GenerateStatelessSuggestionRequest", + "GenerateStatelessSuggestionResponse", "GenerateStatelessSummaryRequest", "GenerateStatelessSummaryResponse", "GetConversationRequest", @@ -532,6 +579,11 @@ "ListDocumentsResponse", "ReloadDocumentRequest", "UpdateDocumentRequest", + "EncryptionSpec", + "GetEncryptionSpecRequest", + "InitializeEncryptionSpecMetadata", + "InitializeEncryptionSpecRequest", + "InitializeEncryptionSpecResponse", "BatchCreateEntitiesRequest", "BatchDeleteEntitiesRequest", "BatchDeleteEntityTypesRequest", @@ -561,6 +613,23 @@ "UpdateFulfillmentRequest", "GcsDestination", "GcsSources", + "ConversationContext", + "CreateGeneratorRequest", + "DeleteGeneratorRequest", + "FewShotExample", + "Generator", + "GeneratorSuggestion", + "GetGeneratorRequest", + "InferenceParameter", + "ListGeneratorsRequest", + "ListGeneratorsResponse", + "MessageEntry", + "SummarizationContext", + "SummarizationSection", + "SummarizationSectionList", + "SummarySuggestion", + "UpdateGeneratorRequest", + "TriggerEvent", "HumanAgentAssistantEvent", "BatchDeleteIntentsRequest", "BatchUpdateIntentsRequest", @@ -594,6 +663,7 @@ "GetParticipantRequest", "InputTextConfig", "IntentSuggestion", + "KnowledgeAssistAnswer", "ListParticipantsRequest", "ListParticipantsResponse", "Message", @@ -609,6 +679,8 @@ "SuggestFaqAnswersResponse", "SuggestionInput", "SuggestionResult", + "SuggestKnowledgeAssistRequest", + "SuggestKnowledgeAssistResponse", "SuggestSmartRepliesRequest", "SuggestSmartRepliesResponse", "UpdateParticipantRequest", diff --git a/packages/google-cloud-dialogflow/google/cloud/dialogflow/gapic_version.py b/packages/google-cloud-dialogflow/google/cloud/dialogflow/gapic_version.py index 4b29f36adc7f..e6b5564d559f 100644 --- a/packages/google-cloud-dialogflow/google/cloud/dialogflow/gapic_version.py +++ b/packages/google-cloud-dialogflow/google/cloud/dialogflow/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "2.30.2" # {x-release-please-version} +__version__ = "2.33.0" # {x-release-please-version} diff --git a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/__init__.py b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/__init__.py index 473b17f59766..d1ec29ce66a1 100644 --- a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/__init__.py +++ b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/__init__.py @@ -35,9 +35,14 @@ ) from .services.conversations import ConversationsAsyncClient, ConversationsClient from .services.documents import DocumentsAsyncClient, DocumentsClient +from .services.encryption_spec_service import ( + EncryptionSpecServiceAsyncClient, + EncryptionSpecServiceClient, +) from .services.entity_types import EntityTypesAsyncClient, EntityTypesClient from .services.environments import EnvironmentsAsyncClient, EnvironmentsClient from .services.fulfillments import FulfillmentsAsyncClient, FulfillmentsClient +from .services.generators import GeneratorsAsyncClient, GeneratorsClient from .services.intents import IntentsAsyncClient, IntentsClient from .services.knowledge_bases import KnowledgeBasesAsyncClient, KnowledgeBasesClient from .services.participants import ParticipantsAsyncClient, ParticipantsClient @@ -100,6 +105,8 @@ Conversation, ConversationPhoneNumber, CreateConversationRequest, + GenerateStatelessSuggestionRequest, + GenerateStatelessSuggestionResponse, GenerateStatelessSummaryRequest, GenerateStatelessSummaryResponse, GetConversationRequest, @@ -189,6 +196,13 @@ ReloadDocumentRequest, UpdateDocumentRequest, ) +from .types.encryption_spec import ( + EncryptionSpec, + GetEncryptionSpecRequest, + InitializeEncryptionSpecMetadata, + InitializeEncryptionSpecRequest, + InitializeEncryptionSpecResponse, +) from .types.entity_type import ( BatchCreateEntitiesRequest, BatchDeleteEntitiesRequest, @@ -223,6 +237,25 @@ UpdateFulfillmentRequest, ) from .types.gcs import GcsDestination, GcsSources +from .types.generator import ( + ConversationContext, + CreateGeneratorRequest, + DeleteGeneratorRequest, + FewShotExample, + Generator, + GeneratorSuggestion, + GetGeneratorRequest, + InferenceParameter, + ListGeneratorsRequest, + ListGeneratorsResponse, + MessageEntry, + SummarizationContext, + SummarizationSection, + SummarizationSectionList, + SummarySuggestion, + TriggerEvent, + UpdateGeneratorRequest, +) from .types.human_agent_assistant_event import HumanAgentAssistantEvent from .types.intent import ( BatchDeleteIntentsRequest, @@ -261,6 +294,7 @@ GetParticipantRequest, InputTextConfig, IntentSuggestion, + KnowledgeAssistAnswer, ListParticipantsRequest, ListParticipantsResponse, Message, @@ -276,6 +310,8 @@ SuggestFaqAnswersResponse, SuggestionInput, SuggestionResult, + SuggestKnowledgeAssistRequest, + SuggestKnowledgeAssistResponse, SuggestSmartRepliesRequest, SuggestSmartRepliesResponse, UpdateParticipantRequest, @@ -326,9 +362,11 @@ "ConversationProfilesAsyncClient", "ConversationsAsyncClient", "DocumentsAsyncClient", + "EncryptionSpecServiceAsyncClient", "EntityTypesAsyncClient", "EnvironmentsAsyncClient", "FulfillmentsAsyncClient", + "GeneratorsAsyncClient", "IntentsAsyncClient", "KnowledgeBasesAsyncClient", "ParticipantsAsyncClient", @@ -367,6 +405,7 @@ "Context", "ContextsClient", "Conversation", + "ConversationContext", "ConversationDataset", "ConversationDatasetsClient", "ConversationEvent", @@ -390,6 +429,7 @@ "CreateDocumentRequest", "CreateEntityTypeRequest", "CreateEnvironmentRequest", + "CreateGeneratorRequest", "CreateIntentRequest", "CreateKnowledgeBaseRequest", "CreateParticipantRequest", @@ -406,6 +446,7 @@ "DeleteDocumentRequest", "DeleteEntityTypeRequest", "DeleteEnvironmentRequest", + "DeleteGeneratorRequest", "DeleteIntentRequest", "DeleteKnowledgeBaseRequest", "DeleteSessionEntityTypeRequest", @@ -418,6 +459,8 @@ "Document", "DocumentsClient", "DtmfParameters", + "EncryptionSpec", + "EncryptionSpecServiceClient", "EntityType", "EntityTypeBatch", "EntityTypesClient", @@ -431,12 +474,18 @@ "ExportDocumentRequest", "ExportOperationMetadata", "FaqAnswer", + "FewShotExample", "Fulfillment", "FulfillmentsClient", "GcsDestination", "GcsSources", + "GenerateStatelessSuggestionRequest", + "GenerateStatelessSuggestionResponse", "GenerateStatelessSummaryRequest", "GenerateStatelessSummaryResponse", + "Generator", + "GeneratorSuggestion", + "GeneratorsClient", "GetAgentRequest", "GetContextRequest", "GetConversationDatasetRequest", @@ -445,10 +494,12 @@ "GetConversationProfileRequest", "GetConversationRequest", "GetDocumentRequest", + "GetEncryptionSpecRequest", "GetEntityTypeRequest", "GetEnvironmentHistoryRequest", "GetEnvironmentRequest", "GetFulfillmentRequest", + "GetGeneratorRequest", "GetIntentRequest", "GetKnowledgeBaseRequest", "GetParticipantRequest", @@ -465,6 +516,10 @@ "ImportDocumentTemplate", "ImportDocumentsRequest", "ImportDocumentsResponse", + "InferenceParameter", + "InitializeEncryptionSpecMetadata", + "InitializeEncryptionSpecRequest", + "InitializeEncryptionSpecResponse", "InputAudioConfig", "InputConfig", "InputDataset", @@ -474,6 +529,7 @@ "IntentSuggestion", "IntentView", "IntentsClient", + "KnowledgeAssistAnswer", "KnowledgeBase", "KnowledgeBasesClient", "KnowledgeOperationMetadata", @@ -497,6 +553,8 @@ "ListEntityTypesResponse", "ListEnvironmentsRequest", "ListEnvironmentsResponse", + "ListGeneratorsRequest", + "ListGeneratorsResponse", "ListIntentsRequest", "ListIntentsResponse", "ListKnowledgeBasesRequest", @@ -512,6 +570,7 @@ "LoggingConfig", "Message", "MessageAnnotation", + "MessageEntry", "NotificationConfig", "OriginalDetectIntentRequest", "OutputAudio", @@ -557,17 +616,24 @@ "SuggestConversationSummaryResponse", "SuggestFaqAnswersRequest", "SuggestFaqAnswersResponse", + "SuggestKnowledgeAssistRequest", + "SuggestKnowledgeAssistResponse", "SuggestSmartRepliesRequest", "SuggestSmartRepliesResponse", "SuggestionFeature", "SuggestionInput", "SuggestionResult", + "SummarizationContext", + "SummarizationSection", + "SummarizationSectionList", + "SummarySuggestion", "SynthesizeSpeechConfig", "TelephonyDtmf", "TelephonyDtmfEvents", "TextInput", "TextToSpeechSettings", "TrainAgentRequest", + "TriggerEvent", "UndeployConversationModelOperationMetadata", "UndeployConversationModelRequest", "UpdateAnswerRecordRequest", @@ -577,6 +643,7 @@ "UpdateEntityTypeRequest", "UpdateEnvironmentRequest", "UpdateFulfillmentRequest", + "UpdateGeneratorRequest", "UpdateIntentRequest", "UpdateKnowledgeBaseRequest", "UpdateParticipantRequest", diff --git a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/gapic_metadata.json b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/gapic_metadata.json index 7faa9bfb44d2..6f0d74c3adad 100644 --- a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/gapic_metadata.json +++ b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/gapic_metadata.json @@ -704,6 +704,11 @@ "create_conversation" ] }, + "GenerateStatelessSuggestion": { + "methods": [ + "generate_stateless_suggestion" + ] + }, "GenerateStatelessSummary": { "methods": [ "generate_stateless_summary" @@ -749,6 +754,11 @@ "create_conversation" ] }, + "GenerateStatelessSuggestion": { + "methods": [ + "generate_stateless_suggestion" + ] + }, "GenerateStatelessSummary": { "methods": [ "generate_stateless_summary" @@ -794,6 +804,11 @@ "create_conversation" ] }, + "GenerateStatelessSuggestion": { + "methods": [ + "generate_stateless_suggestion" + ] + }, "GenerateStatelessSummary": { "methods": [ "generate_stateless_summary" @@ -967,6 +982,55 @@ } } }, + "EncryptionSpecService": { + "clients": { + "grpc": { + "libraryClient": "EncryptionSpecServiceClient", + "rpcs": { + "GetEncryptionSpec": { + "methods": [ + "get_encryption_spec" + ] + }, + "InitializeEncryptionSpec": { + "methods": [ + "initialize_encryption_spec" + ] + } + } + }, + "grpc-async": { + "libraryClient": "EncryptionSpecServiceAsyncClient", + "rpcs": { + "GetEncryptionSpec": { + "methods": [ + "get_encryption_spec" + ] + }, + "InitializeEncryptionSpec": { + "methods": [ + "initialize_encryption_spec" + ] + } + } + }, + "rest": { + "libraryClient": "EncryptionSpecServiceClient", + "rpcs": { + "GetEncryptionSpec": { + "methods": [ + "get_encryption_spec" + ] + }, + "InitializeEncryptionSpec": { + "methods": [ + "initialize_encryption_spec" + ] + } + } + } + } + }, "EntityTypes": { "clients": { "grpc": { @@ -1294,6 +1358,100 @@ } } }, + "Generators": { + "clients": { + "grpc": { + "libraryClient": "GeneratorsClient", + "rpcs": { + "CreateGenerator": { + "methods": [ + "create_generator" + ] + }, + "DeleteGenerator": { + "methods": [ + "delete_generator" + ] + }, + "GetGenerator": { + "methods": [ + "get_generator" + ] + }, + "ListGenerators": { + "methods": [ + "list_generators" + ] + }, + "UpdateGenerator": { + "methods": [ + "update_generator" + ] + } + } + }, + "grpc-async": { + "libraryClient": "GeneratorsAsyncClient", + "rpcs": { + "CreateGenerator": { + "methods": [ + "create_generator" + ] + }, + "DeleteGenerator": { + "methods": [ + "delete_generator" + ] + }, + "GetGenerator": { + "methods": [ + "get_generator" + ] + }, + "ListGenerators": { + "methods": [ + "list_generators" + ] + }, + "UpdateGenerator": { + "methods": [ + "update_generator" + ] + } + } + }, + "rest": { + "libraryClient": "GeneratorsClient", + "rpcs": { + "CreateGenerator": { + "methods": [ + "create_generator" + ] + }, + "DeleteGenerator": { + "methods": [ + "delete_generator" + ] + }, + "GetGenerator": { + "methods": [ + "get_generator" + ] + }, + "ListGenerators": { + "methods": [ + "list_generators" + ] + }, + "UpdateGenerator": { + "methods": [ + "update_generator" + ] + } + } + } + } + }, "Intents": { "clients": { "grpc": { @@ -1552,6 +1710,11 @@ "suggest_faq_answers" ] }, + "SuggestKnowledgeAssist": { + "methods": [ + "suggest_knowledge_assist" + ] + }, "SuggestSmartReplies": { "methods": [ "suggest_smart_replies" @@ -1602,6 +1765,11 @@ "suggest_faq_answers" ] }, + "SuggestKnowledgeAssist": { + "methods": [ + "suggest_knowledge_assist" + ] + }, "SuggestSmartReplies": { "methods": [ "suggest_smart_replies" @@ -1652,6 +1820,11 @@ "suggest_faq_answers" ] }, + "SuggestKnowledgeAssist": { + "methods": [ + "suggest_knowledge_assist" + ] + }, "SuggestSmartReplies": { "methods": [ "suggest_smart_replies" diff --git a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/gapic_version.py b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/gapic_version.py index 4b29f36adc7f..e6b5564d559f 100644 --- a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/gapic_version.py +++ b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "2.30.2" # {x-release-please-version} +__version__ = "2.33.0" # {x-release-please-version} diff --git a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/agents/async_client.py b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/agents/async_client.py index dbb350dffc92..33c3bea5ab9c 100644 --- a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/agents/async_client.py +++ b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/agents/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -186,9 +185,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(AgentsClient).get_transport_class, type(AgentsClient) - ) + get_transport_class = AgentsClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/agents/client.py b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/agents/client.py index 1ec96a5a0213..4ac36a2701e7 100644 --- a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/agents/client.py +++ b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/agents/client.py @@ -657,7 +657,7 @@ def __init__( transport_init: Union[ Type[AgentsTransport], Callable[..., AgentsTransport] ] = ( - type(self).get_transport_class(transport) + AgentsClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., AgentsTransport], transport) ) diff --git a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/answer_records/async_client.py b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/answer_records/async_client.py index 5f040c3a9a51..9ae205479eea 100644 --- a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/answer_records/async_client.py +++ b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/answer_records/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -200,9 +199,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(AnswerRecordsClient).get_transport_class, type(AnswerRecordsClient) - ) + get_transport_class = AnswerRecordsClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/answer_records/client.py b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/answer_records/client.py index 2178ad86a980..0e044e1f9841 100644 --- a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/answer_records/client.py +++ b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/answer_records/client.py @@ -701,7 +701,7 @@ def __init__( transport_init: Union[ Type[AnswerRecordsTransport], Callable[..., AnswerRecordsTransport] ] = ( - type(self).get_transport_class(transport) + AnswerRecordsClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., AnswerRecordsTransport], transport) ) diff --git a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/contexts/async_client.py b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/contexts/async_client.py index 3808aeb30bae..50cd3a4370f4 100644 --- a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/contexts/async_client.py +++ b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/contexts/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -185,9 +184,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(ContextsClient).get_transport_class, type(ContextsClient) - ) + get_transport_class = ContextsClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/contexts/client.py b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/contexts/client.py index 6c2fcd892984..f10af0fbdab1 100644 --- a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/contexts/client.py +++ b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/contexts/client.py @@ -661,7 +661,7 @@ def __init__( transport_init: Union[ Type[ContextsTransport], Callable[..., ContextsTransport] ] = ( - type(self).get_transport_class(transport) + ContextsClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., ContextsTransport], transport) ) diff --git a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/conversation_datasets/async_client.py b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/conversation_datasets/async_client.py index cf2dba2a38b2..b90d22fbe8f7 100644 --- a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/conversation_datasets/async_client.py +++ b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/conversation_datasets/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -205,10 +204,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(ConversationDatasetsClient).get_transport_class, - type(ConversationDatasetsClient), - ) + get_transport_class = ConversationDatasetsClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/conversation_datasets/client.py b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/conversation_datasets/client.py index edabef8eb91d..1d0dd0fd7630 100644 --- a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/conversation_datasets/client.py +++ b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/conversation_datasets/client.py @@ -679,7 +679,7 @@ def __init__( Type[ConversationDatasetsTransport], Callable[..., ConversationDatasetsTransport], ] = ( - type(self).get_transport_class(transport) + ConversationDatasetsClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., ConversationDatasetsTransport], transport) ) diff --git a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/conversation_models/async_client.py b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/conversation_models/async_client.py index 61a981270114..0cdefdaa996b 100644 --- a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/conversation_models/async_client.py +++ b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/conversation_models/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -215,10 +214,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(ConversationModelsClient).get_transport_class, - type(ConversationModelsClient), - ) + get_transport_class = ConversationModelsClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/conversation_models/client.py b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/conversation_models/client.py index a98c1de5efac..c0db5a9decd2 100644 --- a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/conversation_models/client.py +++ b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/conversation_models/client.py @@ -741,7 +741,7 @@ def __init__( Type[ConversationModelsTransport], Callable[..., ConversationModelsTransport], ] = ( - type(self).get_transport_class(transport) + ConversationModelsClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., ConversationModelsTransport], transport) ) diff --git a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/conversation_profiles/async_client.py b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/conversation_profiles/async_client.py index 185ae45bf0a2..ce4a623eb982 100644 --- a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/conversation_profiles/async_client.py +++ b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/conversation_profiles/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -99,6 +98,8 @@ class ConversationProfilesAsyncClient: ) document_path = staticmethod(ConversationProfilesClient.document_path) parse_document_path = staticmethod(ConversationProfilesClient.parse_document_path) + generator_path = staticmethod(ConversationProfilesClient.generator_path) + parse_generator_path = staticmethod(ConversationProfilesClient.parse_generator_path) knowledge_base_path = staticmethod(ConversationProfilesClient.knowledge_base_path) parse_knowledge_base_path = staticmethod( ConversationProfilesClient.parse_knowledge_base_path @@ -225,10 +226,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(ConversationProfilesClient).get_transport_class, - type(ConversationProfilesClient), - ) + get_transport_class = ConversationProfilesClient.get_transport_class def __init__( self, @@ -1127,7 +1125,7 @@ async def sample_clear_suggestion_feature_config(): request = dialogflow_v2.ClearSuggestionFeatureConfigRequest( conversation_profile="conversation_profile_value", participant_role="END_USER", - suggestion_feature_type="KNOWLEDGE_SEARCH", + suggestion_feature_type="KNOWLEDGE_ASSIST", ) # Make the request diff --git a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/conversation_profiles/client.py b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/conversation_profiles/client.py index 22d8690c7775..3479c509235c 100644 --- a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/conversation_profiles/client.py +++ b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/conversation_profiles/client.py @@ -298,6 +298,28 @@ def parse_document_path(path: str) -> Dict[str, str]: ) return m.groupdict() if m else {} + @staticmethod + def generator_path( + project: str, + location: str, + generator: str, + ) -> str: + """Returns a fully-qualified generator string.""" + return "projects/{project}/locations/{location}/generators/{generator}".format( + project=project, + location=location, + generator=generator, + ) + + @staticmethod + def parse_generator_path(path: str) -> Dict[str, str]: + """Parses a generator path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/generators/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + @staticmethod def knowledge_base_path( project: str, @@ -777,7 +799,7 @@ def __init__( Type[ConversationProfilesTransport], Callable[..., ConversationProfilesTransport], ] = ( - type(self).get_transport_class(transport) + ConversationProfilesClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., ConversationProfilesTransport], transport) ) @@ -1613,7 +1635,7 @@ def sample_clear_suggestion_feature_config(): request = dialogflow_v2.ClearSuggestionFeatureConfigRequest( conversation_profile="conversation_profile_value", participant_role="END_USER", - suggestion_feature_type="KNOWLEDGE_SEARCH", + suggestion_feature_type="KNOWLEDGE_ASSIST", ) # Make the request diff --git a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/conversations/async_client.py b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/conversations/async_client.py index 61318d1aae1e..7c2664bd6cb2 100644 --- a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/conversations/async_client.py +++ b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/conversations/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -50,7 +49,7 @@ from google.cloud.dialogflow_v2.services.conversations import pagers from google.cloud.dialogflow_v2.types import conversation from google.cloud.dialogflow_v2.types import conversation as gcd_conversation -from google.cloud.dialogflow_v2.types import participant +from google.cloud.dialogflow_v2.types import generator, participant from .client import ConversationsClient from .transports.base import DEFAULT_CLIENT_INFO, ConversationsTransport @@ -97,6 +96,8 @@ class ConversationsAsyncClient: ) document_path = staticmethod(ConversationsClient.document_path) parse_document_path = staticmethod(ConversationsClient.parse_document_path) + generator_path = staticmethod(ConversationsClient.generator_path) + parse_generator_path = staticmethod(ConversationsClient.parse_generator_path) knowledge_base_path = staticmethod(ConversationsClient.knowledge_base_path) parse_knowledge_base_path = staticmethod( ConversationsClient.parse_knowledge_base_path @@ -225,9 +226,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(ConversationsClient).get_transport_class, type(ConversationsClient) - ) + get_transport_class = ConversationsClient.get_transport_class def __init__( self, @@ -1130,6 +1129,93 @@ async def sample_generate_stateless_summary(): # Done; return the response. return response + async def generate_stateless_suggestion( + self, + request: Optional[ + Union[conversation.GenerateStatelessSuggestionRequest, dict] + ] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> conversation.GenerateStatelessSuggestionResponse: + r"""Generates and returns a suggestion for a conversation + that does not have a resource created for it. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dialogflow_v2 + + async def sample_generate_stateless_suggestion(): + # Create a client + client = dialogflow_v2.ConversationsAsyncClient() + + # Initialize request argument(s) + request = dialogflow_v2.GenerateStatelessSuggestionRequest( + parent="parent_value", + ) + + # Make the request + response = await client.generate_stateless_suggestion(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.dialogflow_v2.types.GenerateStatelessSuggestionRequest, dict]]): + The request object. The request message for + [Conversations.GenerateStatelessSuggestion][google.cloud.dialogflow.v2.Conversations.GenerateStatelessSuggestion]. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dialogflow_v2.types.GenerateStatelessSuggestionResponse: + The response message for + [Conversations.GenerateStatelessSuggestion][google.cloud.dialogflow.v2.Conversations.GenerateStatelessSuggestion]. + + """ + # Create or coerce a protobuf request object. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, conversation.GenerateStatelessSuggestionRequest): + request = conversation.GenerateStatelessSuggestionRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.generate_stateless_suggestion + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + async def search_knowledge( self, request: Optional[Union[conversation.SearchKnowledgeRequest, dict]] = None, @@ -1162,8 +1248,10 @@ async def sample_search_knowledge(): query.language_code = "language_code_value" request = dialogflow_v2.SearchKnowledgeRequest( + parent="parent_value", query=query, conversation_profile="conversation_profile_value", + session_id="session_id_value", ) # Make the request diff --git a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/conversations/client.py b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/conversations/client.py index c2f91d36fe97..2563af797f05 100644 --- a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/conversations/client.py +++ b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/conversations/client.py @@ -55,7 +55,7 @@ from google.cloud.dialogflow_v2.services.conversations import pagers from google.cloud.dialogflow_v2.types import conversation from google.cloud.dialogflow_v2.types import conversation as gcd_conversation -from google.cloud.dialogflow_v2.types import participant +from google.cloud.dialogflow_v2.types import generator, participant from .transports.base import DEFAULT_CLIENT_INFO, ConversationsTransport from .transports.grpc import ConversationsGrpcTransport @@ -328,6 +328,28 @@ def parse_document_path(path: str) -> Dict[str, str]: ) return m.groupdict() if m else {} + @staticmethod + def generator_path( + project: str, + location: str, + generator: str, + ) -> str: + """Returns a fully-qualified generator string.""" + return "projects/{project}/locations/{location}/generators/{generator}".format( + project=project, + location=location, + generator=generator, + ) + + @staticmethod + def parse_generator_path(path: str) -> Dict[str, str]: + """Parses a generator path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/generators/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + @staticmethod def knowledge_base_path( project: str, @@ -826,7 +848,7 @@ def __init__( transport_init: Union[ Type[ConversationsTransport], Callable[..., ConversationsTransport] ] = ( - type(self).get_transport_class(transport) + ConversationsClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., ConversationsTransport], transport) ) @@ -1662,6 +1684,93 @@ def sample_generate_stateless_summary(): # Done; return the response. return response + def generate_stateless_suggestion( + self, + request: Optional[ + Union[conversation.GenerateStatelessSuggestionRequest, dict] + ] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> conversation.GenerateStatelessSuggestionResponse: + r"""Generates and returns a suggestion for a conversation + that does not have a resource created for it. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dialogflow_v2 + + def sample_generate_stateless_suggestion(): + # Create a client + client = dialogflow_v2.ConversationsClient() + + # Initialize request argument(s) + request = dialogflow_v2.GenerateStatelessSuggestionRequest( + parent="parent_value", + ) + + # Make the request + response = client.generate_stateless_suggestion(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.dialogflow_v2.types.GenerateStatelessSuggestionRequest, dict]): + The request object. The request message for + [Conversations.GenerateStatelessSuggestion][google.cloud.dialogflow.v2.Conversations.GenerateStatelessSuggestion]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dialogflow_v2.types.GenerateStatelessSuggestionResponse: + The response message for + [Conversations.GenerateStatelessSuggestion][google.cloud.dialogflow.v2.Conversations.GenerateStatelessSuggestion]. + + """ + # Create or coerce a protobuf request object. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, conversation.GenerateStatelessSuggestionRequest): + request = conversation.GenerateStatelessSuggestionRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[ + self._transport.generate_stateless_suggestion + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + def search_knowledge( self, request: Optional[Union[conversation.SearchKnowledgeRequest, dict]] = None, @@ -1694,8 +1803,10 @@ def sample_search_knowledge(): query.language_code = "language_code_value" request = dialogflow_v2.SearchKnowledgeRequest( + parent="parent_value", query=query, conversation_profile="conversation_profile_value", + session_id="session_id_value", ) # Make the request diff --git a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/conversations/transports/base.py b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/conversations/transports/base.py index f4bc7d624084..7e72ab1289bc 100644 --- a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/conversations/transports/base.py +++ b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/conversations/transports/base.py @@ -169,6 +169,11 @@ def _prep_wrapped_messages(self, client_info): default_timeout=None, client_info=client_info, ), + self.generate_stateless_suggestion: gapic_v1.method.wrap_method( + self.generate_stateless_suggestion, + default_timeout=None, + client_info=client_info, + ), self.search_knowledge: gapic_v1.method.wrap_method( self.search_knowledge, default_timeout=None, @@ -260,6 +265,18 @@ def generate_stateless_summary( ]: raise NotImplementedError() + @property + def generate_stateless_suggestion( + self, + ) -> Callable[ + [conversation.GenerateStatelessSuggestionRequest], + Union[ + conversation.GenerateStatelessSuggestionResponse, + Awaitable[conversation.GenerateStatelessSuggestionResponse], + ], + ]: + raise NotImplementedError() + @property def search_knowledge( self, diff --git a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/conversations/transports/grpc.py b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/conversations/transports/grpc.py index e825cad80b57..ec8fc5f6cf00 100644 --- a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/conversations/transports/grpc.py +++ b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/conversations/transports/grpc.py @@ -464,6 +464,38 @@ def generate_stateless_summary( ) return self._stubs["generate_stateless_summary"] + @property + def generate_stateless_suggestion( + self, + ) -> Callable[ + [conversation.GenerateStatelessSuggestionRequest], + conversation.GenerateStatelessSuggestionResponse, + ]: + r"""Return a callable for the generate stateless suggestion method over gRPC. + + Generates and returns a suggestion for a conversation + that does not have a resource created for it. + + Returns: + Callable[[~.GenerateStatelessSuggestionRequest], + ~.GenerateStatelessSuggestionResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "generate_stateless_suggestion" not in self._stubs: + self._stubs[ + "generate_stateless_suggestion" + ] = self.grpc_channel.unary_unary( + "/google.cloud.dialogflow.v2.Conversations/GenerateStatelessSuggestion", + request_serializer=conversation.GenerateStatelessSuggestionRequest.serialize, + response_deserializer=conversation.GenerateStatelessSuggestionResponse.deserialize, + ) + return self._stubs["generate_stateless_suggestion"] + @property def search_knowledge( self, diff --git a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/conversations/transports/grpc_asyncio.py b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/conversations/transports/grpc_asyncio.py index df21ff623c77..113d5ae382cf 100644 --- a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/conversations/transports/grpc_asyncio.py +++ b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/conversations/transports/grpc_asyncio.py @@ -472,6 +472,38 @@ def generate_stateless_summary( ) return self._stubs["generate_stateless_summary"] + @property + def generate_stateless_suggestion( + self, + ) -> Callable[ + [conversation.GenerateStatelessSuggestionRequest], + Awaitable[conversation.GenerateStatelessSuggestionResponse], + ]: + r"""Return a callable for the generate stateless suggestion method over gRPC. + + Generates and returns a suggestion for a conversation + that does not have a resource created for it. + + Returns: + Callable[[~.GenerateStatelessSuggestionRequest], + Awaitable[~.GenerateStatelessSuggestionResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "generate_stateless_suggestion" not in self._stubs: + self._stubs[ + "generate_stateless_suggestion" + ] = self.grpc_channel.unary_unary( + "/google.cloud.dialogflow.v2.Conversations/GenerateStatelessSuggestion", + request_serializer=conversation.GenerateStatelessSuggestionRequest.serialize, + response_deserializer=conversation.GenerateStatelessSuggestionResponse.deserialize, + ) + return self._stubs["generate_stateless_suggestion"] + @property def search_knowledge( self, @@ -540,6 +572,11 @@ def _prep_wrapped_messages(self, client_info): default_timeout=None, client_info=client_info, ), + self.generate_stateless_suggestion: gapic_v1.method_async.wrap_method( + self.generate_stateless_suggestion, + default_timeout=None, + client_info=client_info, + ), self.search_knowledge: gapic_v1.method_async.wrap_method( self.search_knowledge, default_timeout=None, diff --git a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/conversations/transports/rest.py b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/conversations/transports/rest.py index dba639763c3c..47d59094b339 100644 --- a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/conversations/transports/rest.py +++ b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/conversations/transports/rest.py @@ -83,6 +83,14 @@ def post_create_conversation(self, response): logging.log(f"Received response: {response}") return response + def pre_generate_stateless_suggestion(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_generate_stateless_suggestion(self, response): + logging.log(f"Received response: {response}") + return response + def pre_generate_stateless_summary(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata @@ -183,6 +191,31 @@ def post_create_conversation( """ return response + def pre_generate_stateless_suggestion( + self, + request: conversation.GenerateStatelessSuggestionRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[ + conversation.GenerateStatelessSuggestionRequest, Sequence[Tuple[str, str]] + ]: + """Pre-rpc interceptor for generate_stateless_suggestion + + Override in a subclass to manipulate the request or metadata + before they are sent to the Conversations server. + """ + return request, metadata + + def post_generate_stateless_suggestion( + self, response: conversation.GenerateStatelessSuggestionResponse + ) -> conversation.GenerateStatelessSuggestionResponse: + """Post-rpc interceptor for generate_stateless_suggestion + + Override in a subclass to manipulate the response + after it is returned by the Conversations server but before + it is returned to user code. + """ + return response + def pre_generate_stateless_summary( self, request: conversation.GenerateStatelessSummaryRequest, @@ -748,6 +781,104 @@ def __call__( resp = self._interceptor.post_create_conversation(resp) return resp + class _GenerateStatelessSuggestion(ConversationsRestStub): + def __hash__(self): + return hash("GenerateStatelessSuggestion") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: conversation.GenerateStatelessSuggestionRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> conversation.GenerateStatelessSuggestionResponse: + r"""Call the generate stateless + suggestion method over HTTP. + + Args: + request (~.conversation.GenerateStatelessSuggestionRequest): + The request object. The request message for + [Conversations.GenerateStatelessSuggestion][google.cloud.dialogflow.v2.Conversations.GenerateStatelessSuggestion]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.conversation.GenerateStatelessSuggestionResponse: + The response message for + [Conversations.GenerateStatelessSuggestion][google.cloud.dialogflow.v2.Conversations.GenerateStatelessSuggestion]. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v2/{parent=projects/*/locations/*}/statelessSuggestion:generate", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_generate_stateless_suggestion( + request, metadata + ) + pb_request = conversation.GenerateStatelessSuggestionRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = conversation.GenerateStatelessSuggestionResponse() + pb_resp = conversation.GenerateStatelessSuggestionResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_generate_stateless_suggestion(resp) + return resp + class _GenerateStatelessSummary(ConversationsRestStub): def __hash__(self): return hash("GenerateStatelessSummary") @@ -1371,6 +1502,17 @@ def create_conversation( # In C++ this would require a dynamic_cast return self._CreateConversation(self._session, self._host, self._interceptor) # type: ignore + @property + def generate_stateless_suggestion( + self, + ) -> Callable[ + [conversation.GenerateStatelessSuggestionRequest], + conversation.GenerateStatelessSuggestionResponse, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GenerateStatelessSuggestion(self._session, self._host, self._interceptor) # type: ignore + @property def generate_stateless_summary( self, diff --git a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/documents/async_client.py b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/documents/async_client.py index 91b6998e1ff9..88b4715db904 100644 --- a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/documents/async_client.py +++ b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/documents/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Callable, @@ -191,9 +190,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(DocumentsClient).get_transport_class, type(DocumentsClient) - ) + get_transport_class = DocumentsClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/documents/client.py b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/documents/client.py index 259398041362..7a181338d959 100644 --- a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/documents/client.py +++ b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/documents/client.py @@ -665,7 +665,7 @@ def __init__( transport_init: Union[ Type[DocumentsTransport], Callable[..., DocumentsTransport] ] = ( - type(self).get_transport_class(transport) + DocumentsClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., DocumentsTransport], transport) ) diff --git a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/encryption_spec_service/__init__.py b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/encryption_spec_service/__init__.py new file mode 100644 index 000000000000..b2c855861d8c --- /dev/null +++ b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/encryption_spec_service/__init__.py @@ -0,0 +1,22 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .async_client import EncryptionSpecServiceAsyncClient +from .client import EncryptionSpecServiceClient + +__all__ = ( + "EncryptionSpecServiceClient", + "EncryptionSpecServiceAsyncClient", +) diff --git a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/encryption_spec_service/async_client.py b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/encryption_spec_service/async_client.py new file mode 100644 index 000000000000..2d002ee347bb --- /dev/null +++ b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/encryption_spec_service/async_client.py @@ -0,0 +1,817 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import re +from typing import ( + Callable, + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, +) + +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry_async as retries +from google.api_core.client_options import ClientOptions +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.dialogflow_v2 import gapic_version as package_version + +try: + OptionalRetry = Union[retries.AsyncRetry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.AsyncRetry, object, None] # type: ignore + +from google.api_core import operation # type: ignore +from google.api_core import operation_async # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore + +from google.cloud.dialogflow_v2.types import encryption_spec as gcd_encryption_spec +from google.cloud.dialogflow_v2.types import encryption_spec + +from .client import EncryptionSpecServiceClient +from .transports.base import DEFAULT_CLIENT_INFO, EncryptionSpecServiceTransport +from .transports.grpc_asyncio import EncryptionSpecServiceGrpcAsyncIOTransport + + +class EncryptionSpecServiceAsyncClient: + """Manages encryption spec settings for Dialogflow and Agent + Assist. + """ + + _client: EncryptionSpecServiceClient + + # Copy defaults from the synchronous client for use here. + # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. + DEFAULT_ENDPOINT = EncryptionSpecServiceClient.DEFAULT_ENDPOINT + DEFAULT_MTLS_ENDPOINT = EncryptionSpecServiceClient.DEFAULT_MTLS_ENDPOINT + _DEFAULT_ENDPOINT_TEMPLATE = EncryptionSpecServiceClient._DEFAULT_ENDPOINT_TEMPLATE + _DEFAULT_UNIVERSE = EncryptionSpecServiceClient._DEFAULT_UNIVERSE + + encryption_spec_path = staticmethod( + EncryptionSpecServiceClient.encryption_spec_path + ) + parse_encryption_spec_path = staticmethod( + EncryptionSpecServiceClient.parse_encryption_spec_path + ) + common_billing_account_path = staticmethod( + EncryptionSpecServiceClient.common_billing_account_path + ) + parse_common_billing_account_path = staticmethod( + EncryptionSpecServiceClient.parse_common_billing_account_path + ) + common_folder_path = staticmethod(EncryptionSpecServiceClient.common_folder_path) + parse_common_folder_path = staticmethod( + EncryptionSpecServiceClient.parse_common_folder_path + ) + common_organization_path = staticmethod( + EncryptionSpecServiceClient.common_organization_path + ) + parse_common_organization_path = staticmethod( + EncryptionSpecServiceClient.parse_common_organization_path + ) + common_project_path = staticmethod(EncryptionSpecServiceClient.common_project_path) + parse_common_project_path = staticmethod( + EncryptionSpecServiceClient.parse_common_project_path + ) + common_location_path = staticmethod( + EncryptionSpecServiceClient.common_location_path + ) + parse_common_location_path = staticmethod( + EncryptionSpecServiceClient.parse_common_location_path + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + EncryptionSpecServiceAsyncClient: The constructed client. + """ + return EncryptionSpecServiceClient.from_service_account_info.__func__(EncryptionSpecServiceAsyncClient, info, *args, **kwargs) # type: ignore + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + EncryptionSpecServiceAsyncClient: The constructed client. + """ + return EncryptionSpecServiceClient.from_service_account_file.__func__(EncryptionSpecServiceAsyncClient, filename, *args, **kwargs) # type: ignore + + from_service_account_json = from_service_account_file + + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[ClientOptions] = None + ): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + return EncryptionSpecServiceClient.get_mtls_endpoint_and_cert_source(client_options) # type: ignore + + @property + def transport(self) -> EncryptionSpecServiceTransport: + """Returns the transport used by the client instance. + + Returns: + EncryptionSpecServiceTransport: The transport used by the client instance. + """ + return self._client.transport + + @property + def api_endpoint(self): + """Return the API endpoint used by the client instance. + + Returns: + str: The API endpoint used by the client instance. + """ + return self._client._api_endpoint + + @property + def universe_domain(self) -> str: + """Return the universe domain used by the client instance. + + Returns: + str: The universe domain used + by the client instance. + """ + return self._client._universe_domain + + get_transport_class = EncryptionSpecServiceClient.get_transport_class + + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[ + Union[ + str, + EncryptionSpecServiceTransport, + Callable[..., EncryptionSpecServiceTransport], + ] + ] = "grpc_asyncio", + client_options: Optional[ClientOptions] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the encryption spec service async client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Optional[Union[str,EncryptionSpecServiceTransport,Callable[..., EncryptionSpecServiceTransport]]]): + The transport to use, or a Callable that constructs and returns a new transport to use. + If a Callable is given, it will be called with the same set of initialization + arguments as used in the EncryptionSpecServiceTransport constructor. + If set to None, a transport is chosen automatically. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): + Custom options for the client. + + 1. The ``api_endpoint`` property can be used to override the + default endpoint provided by the client when ``transport`` is + not explicitly provided. Only if this property is not set and + ``transport`` was not explicitly provided, the endpoint is + determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment + variable, which have one of the following values: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto-switch to the + default mTLS endpoint if client certificate is present; this is + the default value). + + 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide a client certificate for mTLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + 3. The ``universe_domain`` property can be used to override the + default "googleapis.com" universe. Note that ``api_endpoint`` + property still takes precedence; and ``universe_domain`` is + currently not supported for mTLS. + + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client = EncryptionSpecServiceClient( + credentials=credentials, + transport=transport, + client_options=client_options, + client_info=client_info, + ) + + async def get_encryption_spec( + self, + request: Optional[Union[encryption_spec.GetEncryptionSpecRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> encryption_spec.EncryptionSpec: + r"""Gets location-level encryption key specification. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dialogflow_v2 + + async def sample_get_encryption_spec(): + # Create a client + client = dialogflow_v2.EncryptionSpecServiceAsyncClient() + + # Initialize request argument(s) + request = dialogflow_v2.GetEncryptionSpecRequest( + name="name_value", + ) + + # Make the request + response = await client.get_encryption_spec(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.dialogflow_v2.types.GetEncryptionSpecRequest, dict]]): + The request object. The request to get location-level + encryption specification. + name (:class:`str`): + Required. The name of the encryption + spec resource to get. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dialogflow_v2.types.EncryptionSpec: + A customer-managed encryption key + specification that can be applied to all + created resources (e.g. Conversation). + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, encryption_spec.GetEncryptionSpecRequest): + request = encryption_spec.GetEncryptionSpecRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.get_encryption_spec + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def initialize_encryption_spec( + self, + request: Optional[ + Union[gcd_encryption_spec.InitializeEncryptionSpecRequest, dict] + ] = None, + *, + encryption_spec: Optional[gcd_encryption_spec.EncryptionSpec] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Initializes a location-level encryption key + specification. An error will be thrown if the location + has resources already created before the initialization. + Once the encryption specification is initialized at a + location, it is immutable and all newly created + resources under the location will be encrypted with the + existing specification. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dialogflow_v2 + + async def sample_initialize_encryption_spec(): + # Create a client + client = dialogflow_v2.EncryptionSpecServiceAsyncClient() + + # Initialize request argument(s) + encryption_spec = dialogflow_v2.EncryptionSpec() + encryption_spec.kms_key = "kms_key_value" + + request = dialogflow_v2.InitializeEncryptionSpecRequest( + encryption_spec=encryption_spec, + ) + + # Make the request + operation = client.initialize_encryption_spec(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.dialogflow_v2.types.InitializeEncryptionSpecRequest, dict]]): + The request object. The request to initialize a + location-level encryption specification. + encryption_spec (:class:`google.cloud.dialogflow_v2.types.EncryptionSpec`): + Required. The encryption spec used for CMEK encryption. + It is required that the kms key is in the same region as + the endpoint. The same key will be used for all + provisioned resources, if encryption is available. If + the kms_key_name is left empty, no encryption will be + enforced. + + This corresponds to the ``encryption_spec`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.dialogflow_v2.types.InitializeEncryptionSpecResponse` + The response to initialize a location-level encryption + specification. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([encryption_spec]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, gcd_encryption_spec.InitializeEncryptionSpecRequest): + request = gcd_encryption_spec.InitializeEncryptionSpecRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if encryption_spec is not None: + request.encryption_spec = encryption_spec + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.initialize_encryption_spec + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("encryption_spec.name", request.encryption_spec.name),) + ), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + gcd_encryption_spec.InitializeEncryptionSpecResponse, + metadata_type=gcd_encryption_spec.InitializeEncryptionSpecMetadata, + ) + + # Done; return the response. + return response + + async def list_operations( + self, + request: Optional[operations_pb2.ListOperationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.ListOperationsResponse: + r"""Lists operations that match the specified filter in the request. + + Args: + request (:class:`~.operations_pb2.ListOperationsRequest`): + The request object. Request message for + `ListOperations` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.ListOperationsResponse: + Response message for ``ListOperations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.ListOperationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.list_operations, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_operation( + self, + request: Optional[operations_pb2.GetOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Gets the latest state of a long-running operation. + + Args: + request (:class:`~.operations_pb2.GetOperationRequest`): + The request object. Request message for + `GetOperation` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.Operation: + An ``Operation`` object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.GetOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def cancel_operation( + self, + request: Optional[operations_pb2.CancelOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Starts asynchronous cancellation on a long-running operation. + + The server makes a best effort to cancel the operation, but success + is not guaranteed. If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.CancelOperationRequest`): + The request object. Request message for + `CancelOperation` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.CancelOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.cancel_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + async def get_location( + self, + request: Optional[locations_pb2.GetLocationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> locations_pb2.Location: + r"""Gets information about a location. + + Args: + request (:class:`~.location_pb2.GetLocationRequest`): + The request object. Request message for + `GetLocation` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.location_pb2.Location: + Location object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = locations_pb2.GetLocationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_location, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_locations( + self, + request: Optional[locations_pb2.ListLocationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> locations_pb2.ListLocationsResponse: + r"""Lists information about the supported locations for this service. + + Args: + request (:class:`~.location_pb2.ListLocationsRequest`): + The request object. Request message for + `ListLocations` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.location_pb2.ListLocationsResponse: + Response message for ``ListLocations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = locations_pb2.ListLocationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.list_locations, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def __aenter__(self) -> "EncryptionSpecServiceAsyncClient": + return self + + async def __aexit__(self, exc_type, exc, tb): + await self.transport.close() + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +__all__ = ("EncryptionSpecServiceAsyncClient",) diff --git a/packages/google-ads-admanager/google/ads/admanager_v1/services/line_item_service/client.py b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/encryption_spec_service/client.py similarity index 65% rename from packages/google-ads-admanager/google/ads/admanager_v1/services/line_item_service/client.py rename to packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/encryption_spec_service/client.py index e395cd391a54..7c7b92bbedff 100644 --- a/packages/google-ads-admanager/google/ads/admanager_v1/services/line_item_service/client.py +++ b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/encryption_spec_service/client.py @@ -41,33 +41,29 @@ from google.auth.transport.grpc import SslCredentials # type: ignore from google.oauth2 import service_account # type: ignore -from google.ads.admanager_v1 import gapic_version as package_version +from google.cloud.dialogflow_v2 import gapic_version as package_version try: OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] except AttributeError: # pragma: NO COVER OptionalRetry = Union[retries.Retry, object, None] # type: ignore +from google.api_core import operation # type: ignore +from google.api_core import operation_async # type: ignore +from google.cloud.location import locations_pb2 # type: ignore from google.longrunning import operations_pb2 # type: ignore -from google.protobuf import timestamp_pb2 # type: ignore -from google.type import money_pb2 # type: ignore - -from google.ads.admanager_v1.services.line_item_service import pagers -from google.ads.admanager_v1.types import ( - computed_status_enum, - creative_placeholder, - environment_type_enum, - goal, - line_item_enums, - line_item_service, -) -from .transports.base import DEFAULT_CLIENT_INFO, LineItemServiceTransport -from .transports.rest import LineItemServiceRestTransport +from google.cloud.dialogflow_v2.types import encryption_spec as gcd_encryption_spec +from google.cloud.dialogflow_v2.types import encryption_spec + +from .transports.base import DEFAULT_CLIENT_INFO, EncryptionSpecServiceTransport +from .transports.grpc import EncryptionSpecServiceGrpcTransport +from .transports.grpc_asyncio import EncryptionSpecServiceGrpcAsyncIOTransport +from .transports.rest import EncryptionSpecServiceRestTransport -class LineItemServiceClientMeta(type): - """Metaclass for the LineItemService client. +class EncryptionSpecServiceClientMeta(type): + """Metaclass for the EncryptionSpecService client. This provides class-level methods for building and retrieving support objects (e.g. transport) without polluting the client instance @@ -76,13 +72,15 @@ class LineItemServiceClientMeta(type): _transport_registry = ( OrderedDict() - ) # type: Dict[str, Type[LineItemServiceTransport]] - _transport_registry["rest"] = LineItemServiceRestTransport + ) # type: Dict[str, Type[EncryptionSpecServiceTransport]] + _transport_registry["grpc"] = EncryptionSpecServiceGrpcTransport + _transport_registry["grpc_asyncio"] = EncryptionSpecServiceGrpcAsyncIOTransport + _transport_registry["rest"] = EncryptionSpecServiceRestTransport def get_transport_class( cls, label: Optional[str] = None, - ) -> Type[LineItemServiceTransport]: + ) -> Type[EncryptionSpecServiceTransport]: """Returns an appropriate transport class. Args: @@ -101,8 +99,10 @@ def get_transport_class( return next(iter(cls._transport_registry.values())) -class LineItemServiceClient(metaclass=LineItemServiceClientMeta): - """Provides methods for handling LineItem objects.""" +class EncryptionSpecServiceClient(metaclass=EncryptionSpecServiceClientMeta): + """Manages encryption spec settings for Dialogflow and Agent + Assist. + """ @staticmethod def _get_default_mtls_endpoint(api_endpoint): @@ -135,12 +135,12 @@ def _get_default_mtls_endpoint(api_endpoint): return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. - DEFAULT_ENDPOINT = "admanager.googleapis.com" + DEFAULT_ENDPOINT = "dialogflow.googleapis.com" DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore DEFAULT_ENDPOINT ) - _DEFAULT_ENDPOINT_TEMPLATE = "admanager.{UNIVERSE_DOMAIN}" + _DEFAULT_ENDPOINT_TEMPLATE = "dialogflow.{UNIVERSE_DOMAIN}" _DEFAULT_UNIVERSE = "googleapis.com" @classmethod @@ -154,7 +154,7 @@ def from_service_account_info(cls, info: dict, *args, **kwargs): kwargs: Additional arguments to pass to the constructor. Returns: - LineItemServiceClient: The constructed client. + EncryptionSpecServiceClient: The constructed client. """ credentials = service_account.Credentials.from_service_account_info(info) kwargs["credentials"] = credentials @@ -172,7 +172,7 @@ def from_service_account_file(cls, filename: str, *args, **kwargs): kwargs: Additional arguments to pass to the constructor. Returns: - LineItemServiceClient: The constructed client. + EncryptionSpecServiceClient: The constructed client. """ credentials = service_account.Credentials.from_service_account_file(filename) kwargs["credentials"] = credentials @@ -181,71 +181,35 @@ def from_service_account_file(cls, filename: str, *args, **kwargs): from_service_account_json = from_service_account_file @property - def transport(self) -> LineItemServiceTransport: + def transport(self) -> EncryptionSpecServiceTransport: """Returns the transport used by the client instance. Returns: - LineItemServiceTransport: The transport used by the client + EncryptionSpecServiceTransport: The transport used by the client instance. """ return self._transport @staticmethod - def label_path( - network_code: str, - label: str, - ) -> str: - """Returns a fully-qualified label string.""" - return "networks/{network_code}/labels/{label}".format( - network_code=network_code, - label=label, - ) - - @staticmethod - def parse_label_path(path: str) -> Dict[str, str]: - """Parses a label path into its component segments.""" - m = re.match(r"^networks/(?P.+?)/labels/(?P